repo_name
stringlengths
5
92
path
stringlengths
4
221
copies
stringclasses
19 values
size
stringlengths
4
6
content
stringlengths
766
896k
license
stringclasses
15 values
hash
int64
-9,223,277,421,539,062,000
9,223,102,107B
line_mean
float64
6.51
99.9
line_max
int64
32
997
alpha_frac
float64
0.25
0.96
autogenerated
bool
1 class
ratio
float64
1.5
13.6
config_test
bool
2 classes
has_no_keywords
bool
2 classes
few_assignments
bool
1 class
Urumasi/Flask-Bones
app/data/models/oauth.py
1
5170
from flask_login import UserMixin from app.extensions import cache ,bcrypt import bcrypt as bcr from .. import db from ..mixins import CRUDMixin import datetime from rauth import OAuth1Service, OAuth2Service from flask import current_app, url_for, request, redirect, session class Oauth(CRUDMixin, UserMixin, db.Model): __tablename__ = 'oauth' id = db.Column(db.Integer, primary_key=True) #user_id = db.Column(db.Integer, db.ForeignKey('user.id')) #oauth = db.relationship("Oauth", back_populates="users") user_id = db.Column(db.Integer, db.ForeignKey('user.id')) social_id = db.Column(db.String(64), nullable=False, unique=True) nickname = db.Column(db.String(64), nullable=True) email = db.Column(db.String(64), nullable=True) jmeno = db.Column(db.String(128), nullable=False) prijmeni = db.Column(db.String(128), nullable=False) profile_url = db.Column(db.String(128), nullable=False) image_url = db.Column(db.String(128), nullable=False) class OAuthSignIn(object): providers = None def __init__(self, provider_name): self.provider_name = provider_name credentials = current_app.config['OAUTH_CREDENTIALS'][provider_name] self.consumer_id = credentials['id'] self.consumer_secret = credentials['secret'] def authorize(self): pass def callback(self): pass def get_callback_url(self): return url_for('public.oauth_callback', provider=self.provider_name, _external=True) @classmethod def get_provider(self, provider_name): if self.providers is None: self.providers = {} for provider_class in self.__subclasses__(): provider = provider_class() self.providers[provider.provider_name] = provider return self.providers[provider_name] class FacebookSignIn(OAuthSignIn): def __init__(self): super(FacebookSignIn, self).__init__('facebook') self.service = OAuth2Service( name='facebook', client_id=self.consumer_id, client_secret=self.consumer_secret, authorize_url='https://graph.facebook.com/oauth/authorize', access_token_url='https://graph.facebook.com/oauth/access_token', base_url='https://graph.facebook.com/' ) def authorize(self): return redirect(self.service.get_authorize_url( scope='email', response_type='code', redirect_uri=self.get_callback_url()) ) def callback(self): if 'code' not in request.args: return None, None, None, None, None, None, None oauth_session = self.service.get_auth_session( data={'code': request.args['code'], 'grant_type': 'authorization_code', 'redirect_uri': self.get_callback_url()} ) me = oauth_session.get('me?fields=id,email,name').json() profile_url = "http://facebook.com/profile.php?id=%s" % me['id'] image_url = "http://graph.facebook.com/%s/picture" % me['id'] return ( 'facebook$' + me['id'], me.get('email').split('@')[0] if me.get('email') is not None else "anon"+me['id'], me.get('email'), me['name'].split(' ')[0], me['name'].split(' ')[1], profile_url, image_url ) class TwitterSignIn(OAuthSignIn): def __init__(self): super(TwitterSignIn, self).__init__('twitter') self.service = OAuth1Service( name='twitter', consumer_key=self.consumer_id, consumer_secret=self.consumer_secret, request_token_url='https://api.twitter.com/oauth/request_token', authorize_url='https://api.twitter.com/oauth/authorize', access_token_url='https://api.twitter.com/oauth/access_token', base_url='https://api.twitter.com/1.1/' ) def authorize(self): request_token = self.service.get_request_token( params={'oauth_callback': self.get_callback_url()} ) session['request_token'] = request_token return redirect(self.service.get_authorize_url(request_token[0])) def callback(self): request_token = session.pop('request_token') if 'oauth_verifier' not in request.args: return None, None, None, None, None, None, None oauth_session = self.service.get_auth_session( request_token[0], request_token[1], data={'oauth_verifier': request.args['oauth_verifier']} ) me = oauth_session.get('account/verify_credentials.json').json() social_id = 'twitter$' + str(me.get('id')) username = me.get('screen_name') name = me.get('name').split(' ') return ( social_id, username, None, name[0], name[1] if name.__len__()>1 else '', #'@%s' % me.get('screen_name') - display name (@Atheloses) "http://twitter.com/%s" % me.get('screen_name'), me.get('profile_image_url') )
mit
-6,923,497,179,262,909,000
37.303704
94
0.589555
false
3.804268
false
false
false
elainenaomi/sciwonc-dataflow-examples
sbbd2016/experiments/1-postgres/3_workflow_full_10files_primary_nosh_nors_annot_with_proj_3s/pegasus.bDkvI/pegasus-4.6.0/lib/python2.7/dist-packages/Pegasus/monitoring/notifications.py
1
34263
""" Class for managing notifications in pegasus-monitord. """ ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Import Python modules import os import sys import math import time import shlex import signal import logging import tempfile import subprocess from Pegasus.tools import utils NOTIFICATION_FILE = "monitord-notifications.log" # filename for writing the output of notification scripts WAIT_CHILD_FINISH = 5 # in seconds logger = logging.getLogger(__name__) class Notifications: """ This object contains all functions needed for managing notifications and starting notification scripts. """ def __init__(self, notification_file_prefix, max_parallel_notifications=10, notifications_timeout=0): """ This function initialized the Notifications class. """ self._active_notifications = [] self._pending_notifications = [] self._max_parallel_notifications = max_parallel_notifications self._notifications_timeout = notifications_timeout self._notifications_fn = os.path.join(notification_file_prefix, NOTIFICATION_FILE) self._notifications_log = None self._notifications = {} # Open notifications' log file try: self._notifications_log = open(self._notifications_fn, 'a') except IOError: logger.critical("cannot create notifications' log file... exiting...") sys.exit(1) def has_pending_notifications(self): """ This function returns True if we have pending notifications. """ return len(self._pending_notifications) > 0 def has_active_notifications(self): """ This function returns True if we have active notifications. """ return len(self._active_notifications) > 0 def terminate_notification(self, my_entry): """ This function terminates a notification process, and cleans up its output/error files. """ my_p = my_entry["subp"] my_pid = my_entry["pid"] my_notification = my_entry["notification"] my_out_fn = my_entry["out_fn"] my_err_fn = my_entry["err_fn"] my_out_fd = my_entry["out_fd"] my_err_fd = my_entry["err_fd"] my_action = my_entry["action"] my_p.poll() # If process hasn't finished... if my_p.returncode is None: # Send SIGTERM first... try: os.kill(my_pid, signal.SIGTERM) except OSError: logger.info("error sending SIGTERM to notification script...") # Wait for child to finish logger.warning("waiting for notification process to finish: %s - %s" % (my_notification, my_action)) time.sleep(WAIT_CHILD_FINISH) my_p.poll() if my_p.returncode is None: # Send SIGKILL now... logger.warning("killing notification process to finish: %s - %s" % (my_notification, my_action)) try: os.kill(my_pid, signal.SIGKILL) except OSError: logger.info("error sending SIGKILL to notification script...") # Finally, clean up files... try: os.unlink(my_out_fn) os.unlink(my_err_fn) except OSError: # No error here... pass logger.warning("notification terminated: %s - %s" % (my_notification, my_action)) def service_notifications(self): """ This function services notifications. It chekcs the notifications in the active list to see if they have finished. If so, it copies the stdout/stderr from these notifications to the monitord-notifications.log file. For notifications in the pending_notifications list, it starts the notification scripts, unless there are already too many notifications running in the system. """ logger.info("active notifications %d, pending notifications: %d" % (len(self._active_notifications), len(self._pending_notifications))) # Step 1: Look at existing notifications if len(self._active_notifications) > 0: # We have active notifications, let's check on their statuses my_notif_index = 0 while my_notif_index < len(self._active_notifications): my_active_notif = self._active_notifications[my_notif_index] # Get subprocess object my_active_p = my_active_notif["subp"] my_status = my_active_p.poll() if my_status is not None: # Process finished notification my_finished_out_fn = my_active_notif["out_fn"] my_finished_err_fn = my_active_notif["err_fn"] my_finished_out_fd = my_active_notif["out_fd"] my_finished_err_fd = my_active_notif["err_fd"] my_finished_notification = my_active_notif["notification"] my_finished_action = my_active_notif["action"] my_finished_notification_params = my_active_notif["params"] # Close out/err files, if not already closed... try: my_finished_out_fd.close() except IOError: logger.warning("error closing stdout file for notification %s... continuing..." % (my_finished_notification)) try: my_finished_err_fd.close() except IOError: logger.warning("error closing stderr file for notification %s... continuing..." % (my_finished_notification)) if self._notifications_log is not None: if logger.isEnabledFor(logging.INFO): self._notifications_log.write("%s\n" % ('-' * 80)) self._notifications_log.write("Notification time : %s\n" % (utils.isodate())) self._notifications_log.write("Notification event : %s\n" % (my_finished_notification)) self._notifications_log.write("Notification action: %s\n" % (my_finished_action)) self._notifications_log.write("Notification status: %s\n" % (my_status)) self._notifications_log.write("\n") self._notifications_log.write("Notification environment\n") for k in my_finished_notification_params: self._notifications_log.write("%s : %s\n" % (k, my_finished_notification_params[k])) self._notifications_log.write("\n") self._notifications_log.write("stdout:\n") try: my_f = open(my_finished_out_fn, 'r') for line in my_f: self._notifications_log.write(line) except IOError: logger.warning("error processing notification stdout file: %s. continuing..." % (my_finished_out_fn)) else: my_f.close() self._notifications_log.write("\n") self._notifications_log.write("stderr:\n") try: my_f = open(my_finished_err_fn, 'r') for line in my_f: self._notifications_log.write(line) except IOError: logger.warning("error processing notification stderr file: %s. continuing..." % (my_finished_err_fn)) else: my_f.close() self._notifications_log.write("\n") self._notifications_log.write("\n") else: # Only log a one-liner so we can debug things later if we need to self._notifications_log.write("%s - %s - %s - %s\n" % (utils.isodate(), my_finished_notification, my_finished_action, my_status)) else: logger.critical("notifications' output log file not initialized... exiting...") sys.exit(1) # Now, delete output and error files try: os.unlink(my_finished_out_fn) except OSError: logger.warning("error deleting notification stdout file: %s. continuing..." % (my_finished_out_fn)) try: os.unlink(my_finished_err_fn) except OSError: logger.warning("error deleting notification stderr file: %s. continuing..." % (my_finished_err_fn)) # Delete this notification from our list my_deleted_entry = self._active_notifications.pop(my_notif_index) else: # Process still going... leave it... my_notif_index = my_notif_index + 1 # Step 2: Look at our notification queue while len(self._pending_notifications) > 0: # Ok we have notifications to service... # print "pending notifications: %s" % (len(self._pending_notifications)) logger.debug("pending notifications: %s" % (len(self._pending_notifications))) # Check if we have reached the maximum number of concurrent notifications if len(self._active_notifications) > self._max_parallel_notifications: # print "reaching maximum number of concurrent notifications... waiting until next cycle..." logger.info("reaching maximum number of concurrent notifications... waiting until next cycle...") break # Get first notification from the list try: my_action, my_env = self._pending_notifications.pop(0) except IndexError: logger.error("error processing notification list... exiting!") sys.exit(1) # Merge default environment with notification-specific environment my_complete_env = os.environ.copy() my_complete_env.update(my_env) try: my_notification = "%s - %s" % (my_env["PEGASUS_JOBID"], my_env["PEGASUS_EVENT"]) except KeyError: logger.warning("notification missing PEGASUS_JOBID or PEGASUS_EVENT... skipping...") continue # Split arguments my_args = shlex.split(my_action) # Create output and error files for the notification script to use try: my_temp_out = tempfile.mkstemp(prefix="notification-", suffix="-out.log", dir="/tmp") my_temp_err = tempfile.mkstemp(prefix="notification-", suffix="-err.log", dir="/tmp") os.close(my_temp_out[0]) os.close(my_temp_err[0]) my_out_fn = my_temp_out[1] my_err_fn = my_temp_err[1] except OSError: logger.warning("cannot create temp files for notification: %s... skipping..." % (my_notification)) continue # Open output and error files for the notification script try: my_f_out = open(my_out_fn, 'w') my_f_err = open(my_err_fn, 'w') except IOError: logger.warning("cannot open temp files for notification: %s... skipping..." % (my_notification)) try: os.unlink(my_out_fn) os.unlink(my_err_fn) except OSError: # No error here... pass continue # Ok, here we go... try: my_p = subprocess.Popen(my_args, stdout=my_f_out, stderr=my_f_err, env=my_complete_env) except OSError: logger.warning("cannot start notification executable: %s... skipping..." % (my_notification)) try: my_f_out.close() my_f_err.close() os.unlink(my_out_fn) os.unlink(my_err_fn) except OSError: logger.warning("found problem cleaning up notification: %s... skipping..." % (my_notification)) continue # Clean up ok, just continue continue except: logger.warning("problem starting notification: %s... skipping..." % (my_notification)) try: my_f_out.close() my_f_err.close() os.unlink(my_out_fn) os.unlink(my_err_fn) except OSError: logger.warning("found problem cleaning up notification: %s... skipping..." % (my_notification)) continue # Clean up ok, just continue continue # Let's keep everything we need for the future my_started_notification = {} my_started_notification["pid"] = my_p.pid my_started_notification["subp"] = my_p my_started_notification["env"] = my_complete_env my_started_notification["params"] = my_env my_started_notification["args"] = my_args my_started_notification["action"] = my_action my_started_notification["out_fd"] = my_f_out my_started_notification["err_fd"] = my_f_err my_started_notification["out_fn"] = my_out_fn my_started_notification["err_fn"] = my_err_fn my_started_notification["notification"] = my_notification my_started_notification["time"] = time.time() # Add to the active list, and done! self._active_notifications.append(my_started_notification) logger.info("started notification for: %s" % (my_notification)) # Step 3: Check if any notifications ran over the allowed time if self._notifications_timeout > 0: # Only go through the list if a timeout was specified # Get current time now = int(math.floor(time.time())) # Go through our list my_index = 0 while my_index < len(self._active_notifications): my_entry = self._active_notifications[my_index] my_exp_time = my_entry["time"] + self._notifications_timeout # Check if notification has expired if my_exp_time < now: # Notification has expired... kill it... logger.warning("notification expired... terminating it...") self.terminate_notification(my_entry) # Delete this notification from our list my_deleted_entry = self._active_notifications.pop(my_index) else: # Notification hasn't expired yet, move to next one... my_index = my_index + 1 def finish_notifications(self): """ This function flushes all notifications, and closes the notifications' log file. It also logs all pending (but not yet issued) notifications. """ # Take care of active notifications if len(self._active_notifications) > 0: for my_entry in self._active_notifications: self.terminate_notification(my_entry) # Take care of pending notifications if len(self._pending_notifications) > 0: for my_action, my_env in self._pending_notifications: try: my_notification = "%s - %s" % (my_env["PEGASUS_JOBID"], my_env["PEGASUS_EVENT"]) except KeyError: logger.warning("notification missing PEGASUS_JOBID or PEGASUS_EVENT... skipping...") continue logger.warning("pending notification skipped: %s - %s" % (my_notification, my_action)) # Close notifications' log file if self._notifications_log is not None: try: self._notifications_log.close() except IOError: logger.warning("error closing notifications' log file...") self._notifications_log = None def read_notification_file(self, notify_file, wf_uuid): """ This function reads the notification file, parsing all notifications and creating our list of events to track. It returns the number of notifications read from the notifications' file. """ if notify_file is None: return 0 logger.info("loading notifications from %s" % (notify_file)) # Open file try: NOTIFY = open(notify_file, "r") except IOError: logger.warning("cannot load notification file %s, continuing without notifications" % (notify_file)) return 0 # Start with empty dictionaries for the three types of notifications my_notifications_read = 0 my_notifications = {"workflow" : {}, "job" : {}, "invocation": {}} # For workflow and job notifications, we have a dict(workflow_id|job_id, dict(cond, [actions])) # For invocation notifications, we have a dict(job_id, dict(inv_id, dict(cond, [actions]))) # Process notifications for line in NOTIFY: line = line.strip() # Skip blank lines if len(line) == 0: continue # Skip comments if line.startswith("#"): continue # Check if we split it in 4 or 5 pieces if line.lower().startswith("invocation"): # This is an invocation notification, split and get all pieces my_entry = line.split(None, 4) if len(my_entry) != 5: logger.warning("cannot parse notification: %s, skipping..." % (line)) continue my_type = my_entry[0].lower() my_id = my_entry[1] try: my_inv = int(my_entry[2]) except ValueError: logger.warning("cannot parse notification: %s, skipping..." % (line)) continue my_condition = my_entry[3] my_action = my_entry[4] else: # This is a workflow/job notification, split and get all pieces my_entry = line.split(None, 3) if len(my_entry) != 4: logger.warning("cannot parse notification: %s, skipping..." % (line)) continue my_type = my_entry[0].lower() my_id = my_entry[1] my_condition = my_entry[2] my_action = my_entry[3] # Pick the right dictionary, depending on event type if my_type == "workflow": my_dict = my_notifications["workflow"] if my_id != wf_uuid: logger.warning("workflow notification has id %s, our id is %s, skipping..." % (my_id, wf_uuid)) continue elif my_type == "job" or my_type == "daxjob" or my_type == "dagjob": my_dict = my_notifications["job"] elif my_type == "invocation": my_dict = my_notifications["invocation"] else: logger.warning("unknown notification type: %s, skipping..." % (line)) continue logger.debug("loading notification: %s" % (line)) my_notifications_read = my_notifications_read + 1 # Make sure id is in dictionary if not my_id in my_dict: my_dict[my_id] = {} # For invocations, one extra level... if my_type == "invocation": my_dict = my_dict[my_id] if not my_inv in my_dict: my_dict[my_inv] = {} # Now add the notification condition, action pair if not my_condition in my_dict[my_inv]: # No actions, start with the list my_dict[my_inv][my_condition] = [my_action] else: # We already have an action(s), let's add the new one to the list my_dict[my_inv][my_condition].append(my_action) else: # Now add the notification condition, action pair if not my_condition in my_dict[my_id]: my_dict[my_id][my_condition] = [my_action] else: my_dict[my_id][my_condition].append(my_action) # Save our notifications for later use... if wf_uuid in self._notifications: logger.debug("reloaded notifications for workflow %s" % (wf_uuid)) self._notifications[wf_uuid] = my_notifications # Close file try: NOTIFY.close() except IOError: pass # Return number of notifications read logger.debug("loaded %d notifications for workflow %s" % (my_notifications_read, wf_uuid)) return my_notifications_read def process_workflow_notifications(self, wf, state): """ This function takes care of processing workflow-level notifications. """ # Check if we have notifications for this workflow if not wf._wf_uuid in self._notifications: return # Get the notifications' dictionary for this workflow id wf_notifications = self._notifications[wf._wf_uuid] if "workflow" in wf_notifications: my_dict = wf_notifications["workflow"] if len(my_dict) == 0: # No workflow notifications return else: logger.warning("notification structure missing workflow entry...") return # Our workflow is must be in there... if wf._wf_uuid in my_dict: my_notifications = my_dict[wf._wf_uuid] else: logger.warning("notification has mismatching workflow id: %s different from %s" % (wf._wf_uuid, str(my_dict))) return # Sanity check the state... if state != "start" and state != "end": logger.warning("unknown workflow state %s, continuing..." % (state)) return # Now, match the workflow state to the conditions in the notifications... for k in my_notifications: # Look up the actions for this notification now my_actions = my_notifications[k] if state == "start": if k != "start" and k != "all": continue # Change k == 'all' to 'start' k = "start" if state == "end": if k == "on_error": if wf._dagman_exit_code == 0: continue elif k == "on_success": if wf._dagman_exit_code != 0: continue elif k != "at_end" and k != "all": continue if k == "all": k = "at_end" # Ok, we have a match! for action in my_actions: # Create dictionary with needed environment variables my_env = {} my_env["PEGASUS_EVENT"] = k my_env["PEGASUS_EVENT_TIMESTAMP"] = str(wf._current_timestamp) my_env["PEGASUS_EVENT_TIMESTAMP_ISO"] = utils.isodate(wf._current_timestamp) my_env["PEGASUS_SUBMIT_DIR"] = wf._original_submit_dir my_env["PEGASUS_STDOUT"] = wf._out_file my_env["PEGASUS_JOBID"] = wf._wf_uuid my_env["PEGASUS_WFID"] = ((wf._dax_label or "unknown") + "-" + (wf._dax_index or "unknown")) if state == "end": # Workflow status is already in plain format, no need for conversion my_env["PEGASUS_STATUS"] = str(wf._dagman_exit_code) # Done, queue the notification self._pending_notifications.append((action, my_env)) # print "WORKFLOW NOTIFICATION ---> ", action, my_env def process_job_notifications(self, wf, state, job, status): """ This function takes care of processing job-level notifications. """ # Check if we have notifications for this workflow if not wf._wf_uuid in self._notifications: return # Get the notifications' dictionary for this workflow id wf_notifications = self._notifications[wf._wf_uuid] if "job" in wf_notifications: my_dict = wf_notifications["job"] else: logger.warning("notification structure missing job entry...") return # Check if we have notifications for this job if not job._exec_job_id in my_dict: return my_notifications = my_dict[job._exec_job_id] if job._exec_job_id in wf._job_info: if wf._job_info[job._exec_job_id][3] is None: job_has_post_script = False else: job_has_post_script = True else: logger.warning("cannot find job %s in job_info database... skipping notification..." % (job._exec_job_id)) return # Now, match the job state to the conditions in the notifications... for k in my_notifications: # Look up the actions for this notification now my_actions = my_notifications[k] if state == "EXECUTE": if k != "start" and k != "all": continue # Change k to "start" k = "start" my_status = None elif state == "JOB_SUCCESS": if job_has_post_script: # Wait till postscript... continue if k == "start" or k == "on_error": continue if k == "all": k = "at_end" my_status = "0" elif state == "POST_SCRIPT_SUCCESS": if k == "start" or k == "on_error": continue if k == "all": k = "at_end" my_status = "0" elif state == "JOB_FAILURE": if job_has_post_script: # Wait till postscript... continue if k == "start" or k == "on_success": continue if k == "all": k = "at_end" my_status = status elif state == "POST_SCRIPT_FAILURE": if k == "start" or k == "on_success": continue if k == "all": k = "at_end" my_status = status else: # We are in some other state... continue my_output = os.path.join(wf._original_submit_dir, job._output_file) my_error = os.path.join(wf._original_submit_dir, job._error_file) # Use the rotated file names if at the end of the job if k != "start": my_output = my_output + ".%03d" % (job._job_output_counter) my_error = my_error + ".%03d" % (job._job_output_counter) # Ok, we have a match! for action in my_actions: # Create dictionary with needed environment variables my_env = {} my_env["PEGASUS_EVENT"] = k my_env["PEGASUS_EVENT_TIMESTAMP"] = str(wf._current_timestamp) my_env["PEGASUS_EVENT_TIMESTAMP_ISO"] = utils.isodate(wf._current_timestamp) my_env["PEGASUS_SUBMIT_DIR"] = wf._original_submit_dir my_env["PEGASUS_JOBID"] = job._exec_job_id my_env["PEGASUS_WFID"] = ((wf._dax_label or "unknown") + "-" + (wf._dax_index or "unknown")) my_env["PEGASUS_STDOUT"] = my_output my_env["PEGASUS_STDERR"] = my_error if my_status is not None: my_env["PEGASUS_STATUS"] = str(my_status) # Done, queue the notification self._pending_notifications.append((action, my_env)) # print "JOB NOTIFICATION ---> ", action, my_env def process_invocation_notifications(self, wf, job, task_id, record=None): """ This function takes care of processing invocation-level notifications. """ if record is None: record = {} # Check if we have notifications for this workflow if not wf._wf_uuid in self._notifications: return # Get the notifications' dictionary for this workflow id wf_notifications = self._notifications[wf._wf_uuid] if "invocation" in wf_notifications: my_dict = wf_notifications["invocation"] else: logger.warning("notification structure missing invocation entry...") return # Check if we have notifications for this job if not job._exec_job_id in my_dict: return # Advance to the task dictionary my_dict = my_dict[job._exec_job_id] # Check if we have notifications for this invocation if not task_id in my_dict: return my_notifications = my_dict[task_id] # Now, match the invocation state to the condition in the notification for k in my_notifications: # Look up the actions for this notification now my_actions = my_notifications[k] if "raw" in record: my_status = record["raw"] else: my_status = job._main_job_exitcode # Convert exitcode to int try: my_status = int(my_status) except ValueError: pass # Now, compare to the notification condition(s) if my_status == 0: if k == "on_error": continue if my_status != 0: if k == "on_success": continue if k == "all": k = "at_end" # Here, we always use the rotated file names as the invocation has already finished... my_output = os.path.join(wf._original_submit_dir, job._output_file) + ".%03d" % (job._job_output_counter) my_error = os.path.join(wf._original_submit_dir, job._error_file) + ".%03d" % (job._job_output_counter) # Ok, we have a match! for action in my_actions: # Create dictionary with needed environment variables my_env = {} my_env["PEGASUS_EVENT"] = k my_env["PEGASUS_EVENT_TIMESTAMP"] = str(wf._current_timestamp) my_env["PEGASUS_EVENT_TIMESTAMP_ISO"] = utils.isodate(wf._current_timestamp) my_env["PEGASUS_SUBMIT_DIR"] = wf._original_submit_dir my_env["PEGASUS_JOBID"] = job._exec_job_id my_env["PEGASUS_INVID"] = str(task_id) my_env["PEGASUS_WFID"] = ((wf._dax_label or "unknown") + "-" + (wf._dax_index or "unknown")) my_env["PEGASUS_STDOUT"] = my_output my_env["PEGASUS_STDERR"] = my_error if k != "start": # Convert raw exitcode into human-parseable format my_env["PEGASUS_STATUS"] = str(utils.raw_to_regular(my_status)) # Done, queue the notification self._pending_notifications.append((action, my_env)) # print "INVOCATION NOTIFICATION ---> ", action, my_env def remove_notifications(self, wf_uuid): """ This function removes the notifications for workflow wf_uuid from our _notifications dictionary. """ # Check if we have notifications for this workflow if not wf_uuid in self._notifications: return logger.debug("deleting notifications for workflow %s..." % (wf_uuid)) # Delete them from our dictionary del self._notifications[wf_uuid]
gpl-3.0
1,688,029,379,072,213,500
43.096525
118
0.516038
false
4.653402
false
false
false
GuillaumeDD/AdventOfCode2016
day08.py
1
5304
from __future__ import print_function # print utilities without systematic '\n' at EOL import io import re # --- Day 8: Two-Factor Authentication --- # # You come across a door implementing what you can only assume is an # implementation of two-factor authentication after a long game of # requirements telephone. # # To get past the door, you first swipe a keycard (no problem; there was # one on a nearby desk). Then, it displays a code on a little screen, # and you type that code on a keypad. Then, presumably, the door # unlocks. # # Unfortunately, the screen has been smashed. After a few minutes, # you've taken everything apart and figured out how it works. Now you # just have to work out what the screen would have displayed. # # The magnetic strip on the card you swiped encodes a series of # instructions for the screen; these instructions are your puzzle # input. The screen is 50 pixels wide and 6 pixels tall, all of which # start off, and is capable of three somewhat peculiar operations: # # rect AxB turns on all of the pixels in a rectangle at the top-left of the screen which is A wide and B tall. # # rotate row y=A by B shifts all of the pixels in row A (0 is the top # row) right by B pixels. Pixels that would fall off the right end # appear at the left end of the row. # # rotate column x=A by B shifts all of the pixels in column A (0 is # the left column) down by B pixels. Pixels that would fall off the # bottom appear at the top of the column. # # For example, here is a simple sequence on a smaller screen: # # rect 3x2 creates a small rectangle in the top-left corner: # # ###.... # ###.... # ....... # # rotate column x=1 by 1 rotates the second column down by one pixel: # # #.#.... # ###.... # .#..... # # rotate row y=0 by 4 rotates the top row right by four pixels: # # ....#.# # ###.... # .#..... # # rotate column x=1 by 1 again rotates the second column down by one # pixel, causing the bottom pixel to wrap back to the top: # # .#..#.# # #.#.... # .#..... # # As you can see, this display technology is extremely powerful, and # will soon dominate the tiny-code-displaying-screen market. That's what # the advertisement on the back of the display tries to convince you, # anyway. # # There seems to be an intermediate check of the voltage used by the # display: after you swipe your card, if the screen did work, how many # pixels should be lit? # # --- Part Two --- # # You notice that the screen is only capable of displaying capital # letters; in the font it uses, each letter is 5 pixels wide and 6 tall. # # After you swipe your card, what code is the screen trying to display? pattern_rect = re.compile('rect ([0-9]+)x([0-9]+)') pattern_rotate_row = re.compile('rotate row y=([0-9]+) by ([0-9]+)') pattern_rotate_column = re.compile('rotate column x=([0-9]+) by ([0-9]+)') # Light statuses ON = '#' OFF = '.' # beware of this initialisation! # -> every cell should be a different string SCREEN_WIDTH = 50 SCREEN_HEIGHT = 6 SCREEN = [[OFF for _ in range(SCREEN_WIDTH)] for _ in range(SCREEN_HEIGHT)] def print_screen(): for line in SCREEN: for col in line: print(col, end='') print() def switch(light_status): if light_status == ON: return OFF else: return ON def nb_ON(): """ Computes the number of 'ON' lights in SCREEN """ count = 0 for i in range(SCREEN_HEIGHT): for j in range(SCREEN_WIDTH): if SCREEN[i][j] == ON: count += 1 return count def apply_command(command_line): """ Apply a given command line on SCREEN """ global SCREEN rect = pattern_rect.match(command_line) if rect is not None: # RECT command width = int(rect.group(1)) height = int(rect.group(2)) for i in range(height): for j in range(width): SCREEN[i][j] = switch(SCREEN[i][j]) else: # ROTATE ROW command rotate_row = pattern_rotate_row.match(command_line) if rotate_row is not None: y = int(rotate_row.group(1)) by = int(rotate_row.group(2)) new_line = [OFF for _ in range(SCREEN_WIDTH)] for j in range(SCREEN_WIDTH): next_j = (j+by) % SCREEN_WIDTH new_line[next_j] = SCREEN[y][j] for j,light in enumerate(new_line): SCREEN[y][j] = light else: # ROTATE COLUMN command rotate_column = pattern_rotate_column.match(command_line) if rotate_column is not None: x = int(rotate_column.group(1)) by = int(rotate_column.group(2)) new_column = [OFF for _ in range(SCREEN_HEIGHT)] for i in range(SCREEN_HEIGHT): next_i = (i+by) % SCREEN_HEIGHT new_column[next_i] = SCREEN[i][x] for i,light in enumerate(new_column): SCREEN[i][x] = light else: print('Unable to match command') with io.open('inputs/day08.txt', 'r') as f: for line in f: command = line.strip() apply_command(command) print_screen() print('Number of pixels lit: {}'.format(nb_ON()))
gpl-3.0
-4,303,923,578,873,608,000
30.760479
113
0.614065
false
3.471204
false
false
false
tomlepaine/bottle-skeleton
app.py
1
1076
import argparse from bottle import get, run, response, static_file, redirect from jinja2 import Environment, PackageLoader import config parser = argparse.ArgumentParser(prog=config.name, description=config.description) parser.add_argument('--port', type=int, default=8080, help='Port where gui is running.') args = parser.parse_args() # Setup globals PORT = args.port ENV = Environment(loader=PackageLoader(config.package_name, config.template_dir)) @get('/') def index(): redirect('/hello-world') @get('/hello-world') def hello_world(): template = ENV.get_template('hello-world.html') page = template.render() return page @get('/page') def page(): template = ENV.get_template('not-implemented.html') page = template.render() return page @get('/frame/<index:int>.jpeg') def frame(index): response.content_type = "image/jpeg" return VIDEO.get_frame(index) run(host='localhost', port=PORT)
bsd-2-clause
3,597,068,652,393,336,000
19.692308
64
0.620818
false
3.870504
false
false
false
whiteclover/Breeze
breeze/chatcenter/room.py
1
1148
import time import logging LOGGER = logging.getLogger(__name__) class Room(object): def __init__(self, name): self.name = name self.peers = {} def broadcast(self, msg): if msg: for peer in self.peers.values(): if peer != msg.user: LOGGER.info('peer: %s', peer) peer.send(msg) def add_peer(self, peer): if peer.uid in self.peers: raise Exception('in') peer.add_room(self) self.peers[peer.uid] = peer def remove_peer(self, peer): peer.remove_room(self) del self.peers[peer.uid] class RoomManager(object): def __init__(self): self.rooms = {} def add_room(self, room): self.rooms[room.name] = room def remove_room(self, room): if room.name in self.rooms: del self.rooms[room.name] def add_peer_to_room(self, room_name, peer): room = self.rooms.get(room_name) if not room: room = Room(room_name) self.rooms[room_name] = room room.add_peer(peer) def remove_peer_from_room(self, room_name, peer): room = self.rooms.get(room_name) if room: room.remove_peer(peer) def broadcast(self, room_name, msg): room = self.rooms.get(room_name) if room: room.broadcast(msg)
gpl-2.0
-523,038,310,736,264,300
19.5
50
0.658537
false
2.701176
false
false
false
ge0rgi/cinder
cinder/tests/unit/volume/drivers/dell_emc/scaleio/test_delete_snapshot.py
1
3977
# Copyright (c) 2013 - 2015 EMC Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from six.moves import urllib from cinder import context from cinder import exception from cinder.tests.unit import fake_constants as fake from cinder.tests.unit.fake_snapshot import fake_snapshot_obj from cinder.tests.unit.volume.drivers.dell_emc import scaleio from cinder.tests.unit.volume.drivers.dell_emc.scaleio import mocks class TestDeleteSnapShot(scaleio.TestScaleIODriver): """Test cases for ``ScaleIODriver.delete_snapshot()``""" def setUp(self): """Setup a test case environment. Creates fake volume and snapshot objects and sets up the required API responses. """ super(TestDeleteSnapShot, self).setUp() ctx = context.RequestContext('fake', 'fake', auth_token=True) self.snapshot = fake_snapshot_obj( ctx, **{'provider_id': fake.SNAPSHOT_ID}) self.snapshot_name_2x_enc = urllib.parse.quote( urllib.parse.quote( self.driver._id_to_base64(self.snapshot.id) ) ) self.HTTPS_MOCK_RESPONSES = { self.RESPONSE_MODE.Valid: { 'types/Volume/instances/getByName::' + self.snapshot_name_2x_enc: self.snapshot.id, 'instances/Volume::{}/action/removeMappedSdc'.format( self.snapshot.provider_id ): self.snapshot.id, 'instances/Volume::{}/action/removeVolume'.format( self.snapshot.provider_id ): self.snapshot.id, }, self.RESPONSE_MODE.BadStatus: { 'types/Volume/instances/getByName::' + self.snapshot_name_2x_enc: self.BAD_STATUS_RESPONSE, 'instances/Volume::{}/action/removeVolume'.format( self.snapshot.provider_id ): self.BAD_STATUS_RESPONSE, }, self.RESPONSE_MODE.Invalid: { 'types/Volume/instances/getByName::' + self.snapshot_name_2x_enc: mocks.MockHTTPSResponse( { 'errorCode': self.OLD_VOLUME_NOT_FOUND_ERROR, 'message': 'Test Delete Invalid Snapshot', }, 400 ), 'instances/Volume::{}/action/removeVolume'.format( self.snapshot.provider_id): mocks.MockHTTPSResponse( { 'errorCode': self.OLD_VOLUME_NOT_FOUND_ERROR, 'message': 'Test Delete Invalid Snapshot', }, 400, ) }, } def test_bad_login(self): self.set_https_response_mode(self.RESPONSE_MODE.BadStatus) self.assertRaises(exception.VolumeBackendAPIException, self.driver.delete_snapshot, self.snapshot) def test_delete_invalid_snapshot(self): self.set_https_response_mode(self.RESPONSE_MODE.Valid) self.driver.delete_snapshot(self.snapshot) def test_delete_snapshot(self): """Setting the unmap volume before delete flag for tests """ self.driver.configuration.set_override( 'sio_unmap_volume_before_deletion', override=True) self.set_https_response_mode(self.RESPONSE_MODE.Valid) self.driver.delete_snapshot(self.snapshot)
apache-2.0
8,501,585,848,030,189,000
40.863158
78
0.602967
false
4.235357
true
false
false
rafafigueroa/compass-gait
hasimpy.py
1
9216
#!/usr/bin/env python # -*- coding: utf-8 -*- """ @author: Rafael Figueroa """ dp = True import numpy as np DEBUG = False class H: """Hybrid Automata Model""" def __init__(self, Q, Init_X, Init_qID, state_names = None): self.q = Q #list of q self.Init_X = Init_X self.Init_qID = Init_qID self.states = state_names self.Ts = None def mode_tracker_guard_check(self, qID, X): # Called by mode_tracker to set the mode q = self.q[qID] g=q.E.G #guard list oe=q.E.OE #out edges list [g_activated, oID_activated_g] = guard_check(g, X) # return new qID when a guard is activated if g_activated: qID_activated_g = oe[oID_activated_g] else: qID_activated_g = qID return qID_activated_g def sim(self, qID, X, u, t0, tlim, haws_flag=False, debug_flag=False, Ts=1e-4): self.Ts = Ts #t0 refers to the initial time of #each continuous dynamic time interval sr = SimResult(self.states) #Initialize class q = self.q[qID] #get a ref to current mode global DEBUG DEBUG = debug_flag #change global DEBUG variable while t0<tlim: #get values from current q object f=q.f #continuous dynamics func # when simulating is requested by haws # with a forced input if not haws_flag: u=q.u g=q.E.G #guard list r=q.E.R #reset map list oe=q.E.OE #out edges list dom=q.Dom #discrete mode domain avoid=q.Avoid #discrete mode avoid if DEBUG: print '\n*** New Discrete State *** \n' print 'f=',f,'\ng=',g,'\nr=',r,'\noe=',oe,'\ndom=',dom print 'Avoid=',avoid print 'qID=',q.qID,'\nX=',X,'\nu=',u print '\n*** domain check *** \n' if not dom(X): errorString = 'Outside domain!' print errorString #raise NameError(errorString) if DEBUG: print '\n*** continuous dynamics *** \n' #simulate continuous dynamics T, Y, oID_activated_g, \ avoid_activated, tlim_activated = \ odeeul(f, u, g, avoid, X, t0, tlim, Ts) # store this time interval # in the simulation results sr.newTimeInterval(T, Y, q) # when inside the avoid set, simulation stops # and the information is stored in the simulation results if avoid_activated: sr.avoid_activated = True sr.timeToAvoid = T[-1] break #while loop if tlim_activated: break #while loop # *** after guard is activated *** # prepare data for the next loop t0=T[-1] #reset initial time to the end of #last time interval last_state = np.array(Y[-1]) if DEBUG: print '\n *** reset map *** \n' print 'last state =',last_state X=r[oID_activated_g](last_state) #reset map qID_activated_g = oe[oID_activated_g] #guard activated print out if DEBUG: print 'sim -- guard activated' print 'sim -- from q =', q.qID, 'to q =', qID_activated_g print 'sim -- State =', X #get new q q = self.q[qID_activated_g] return sr class Q: def __init__(self,qID,f,u,E, Dom = lambda X:True, Avoid = lambda X:False , TC=True): self.qID = qID self.f = f self.u = u self.E = E self.Dom = Dom self.Avoid = Avoid self.TC = TC class E: def __init__(self,OE,G,R): self.OE = OE self.G = G self.R = R def guard_check(g,X): guard_list = [] #evaluate every guard in g #g is the list of guards for this q #store the results in guard_list for guard in g: guard_list.append(guard(X)) oID_activated_g = None g_activated = False #check if any result in guard_list is True #if it is, store the index for oID,guard in enumerate(guard_list): if guard: oID_activated_g = oID #outside q which tripped the guard g_activated = True break return [g_activated, oID_activated_g] def avoid_check(avoid,X): 'avoid returns True when inside the avoid set' return avoid(X) def odeeul(f, u, g, avoid, X0, t0, tlim, Ts): X=np.array(X0) Y=np.array(X0) T=np.array([t0]) if DEBUG: print 'State=',X g_activated, oID_activated_g = guard_check(g,X) avoid_activated = avoid_check(avoid,X) tlim_activated = (t0>=tlim) if g_activated: print 'instant jump' if DEBUG: print 'First checks:' print '\tg_activated:', g_activated print '\tavoid_activated', avoid_activated print '\ttlim_activated', tlim_activated while not (g_activated or avoid_activated or tlim_activated): #Evolve continuously until a #termination condition is activated X=Ts*f(X,u)+X Y=np.vstack((Y,X)) tnew = np.array([T[-1]+Ts]) T=np.concatenate([T,tnew]) #termination checks g_activated, oID_activated_g = guard_check(g,X) avoid_activated = avoid_check(avoid,X) tlim_activated = (tnew>=tlim) if DEBUG: print 'Running checks:' print '\tg_activated:',g_activated print '\tavoid_activated',avoid_activated print '\ttlim_activated',tlim_activated return [T, Y, oID_activated_g, avoid_activated, tlim_activated] class SimResult: """Output from one simulation run""" def __init__(self, states = None): self.I = [] self.j = 0 self.timesteps = 0 self.timeToAvoid = None self.avoid_activated = False self.path = None self.time = None self.mode = None self.states = states for yi in range(0, len(states)): self.states[yi] = "$" + self.states[yi] + "$" self.states[yi] = self.states[yi].encode('string-escape') self.states[yi] = self.states[yi].replace("\\\\", "\\") def newTimeInterval(self, T, Y, qID): """Simulation is broken into continuous chunks Here the chunks are put together""" if self.j == 0: # First interval self.path = Y self.time = T self.mode = np.array([qID]) else: self.path = np.vstack((self.path, Y)) self.time = np.concatenate((self.time, T)) self.mode = np.concatenate((self.mode, np.array([qID]))) self.j = self.j + 1 self.timesteps = self.timesteps + np.size(T) self.I.append(TimeInterval(T, Y, self.j)) def simPlot(self): Y_plot = self.path T_plot = self.time import matplotlib.pyplot as plt # TODO: Configurate at install? # user might not want latex from matplotlib import rc rc('text', usetex=True) nstates = np.size(Y_plot,1) f, axarr = plt.subplots(nstates, sharex=True) if nstates>1: for yi in range(nstates): axarr[yi].plot(T_plot, Y_plot[:,yi]) if self.states is not None: axarr[nstates-1].set_xlabel(r'time(s)') axarr[yi].set_ylabel(self.states[yi], fontsize = 20) axarr[yi].yaxis.set_label_coords(-0.08, 0.5) else: axarr.plot(T_plot,Y_plot) if self.states is not None: axarr.set_xlabel('time(s)') axarr.set_ylabel(self.states[0]) plt.ion() plt.show() def phasePlot(self, plotStates): #TODO:check size of Y,plotStates X1_plot = self.path[:,plotStates[0]] X2_plot = self.path[:,plotStates[1]] import matplotlib.pyplot as plt # figx = plt.figure() f, axarr = plt.subplots(1, sharex=True) axarr.plot(X1_plot,X2_plot) if self.states is not None: axarr.set_xlabel(self.states[plotStates[0]], fontsize = 20) axarr.set_ylabel(self.states[plotStates[1]], fontsize = 20) axarr.yaxis.set_label_coords(-0.08, 0.5) plt.ion() plt.show() class TimeInterval: def __init__(self,T,Y,j): self.T=T self.Y=Y self.j=j def idem(X): return X def tolEqual(a,b,tol=1e-2): return abs(a-b)<tol def last_row(Y): print 'shape', np.shape(Y) rows = np.shape(Y)[0] print 'rows',rows if rows>1: return Y[-1] else: return Y
gpl-2.0
5,182,060,363,343,078,000
26.927273
73
0.513129
false
3.649901
false
false
false
scribusproject/scribus-tools
md_import.py
1
1176
run_script = True import scribus from tempfile import NamedTemporaryFile try: import markdown except: scribus.messageBox('python-markdown not installed', 'You need to install python-markdown for this script to work', scribus.ICON_WARNING) run_script = False run_script &= bool(scribus.getSelectedObject(0)) # We must have at least one selected object if run_script and scribus.getSelectedObject(1): result = scribus.messageBox('', 'More than one item selected, load all?', button1=scribus.BUTTON_CANCEL, button2=scribus.BUTTON_YES) if result == scribus.BUTTON_CANCEL: run_script = False def main(): md_name = scribus.fileDialog("Select a file", 'Markdown (*.md)') if not md_name: return f = NamedTemporaryFile(suffix='.html') markdown.markdownFromFile(md_name, f) f.flush() html_name = f.name i = 0 while True: ob_name = scribus.getSelectedObject(i) if not ob_name: break if scribus.getObjectType(ob_name) == 'TextFrame': scribus.insertHtmlText(html_name, ob_name) i += 1 if run_script: main()
gpl-3.0
-3,829,810,779,273,234,400
24.565217
93
0.643707
false
3.618462
false
false
false
Code4SA/pmgbilltracker
pmg_scrapers/pmg/scrape_pdf.py
1
2621
import requests import json import os import tempfile import re url = "http://db3sqepoi5n3s.cloudfront.net/files/pmb5_2013.pdf" url = "http://db3sqepoi5n3s.cloudfront.net/files/130416pmb3-2013.pdf" url = "http://db3sqepoi5n3s.cloudfront.net/files/131031b18b-2013.pdf" url = "http://db3sqepoi5n3s.cloudfront.net/files/130621b15-2013.pdf" url = "http://db3sqepoi5n3s.cloudfront.net/files/131118b55-2013public_administration_management.pdf" reg_section1 = re.compile(r"section\s+(?:74|75|76|77)\s+bill", re.IGNORECASE) reg_section2 = re.compile(r"section\s+(?:74|75|76|77)\b", re.IGNORECASE) reg_introduced_by1 = re.compile(r""" # Search for something that looks like (Minister of Finance) \( ( Minister [^)]+ ) \) """, re.VERBOSE | re.IGNORECASE) reg_introduced_by2 = re.compile(r""" # Search for something that looks like (Ms J Jacobson MP) \( ( [^)]+ MP) \) """, re.VERBOSE | re.IGNORECASE) reg_introduced_by3 = re.compile(r""" # Search for something that looks like (Select committee on Cooperative ....) \( ([^)]*Committee[^)]*) \) """, re.VERBOSE | re.IGNORECASE) def get_pdf(url, chunk_size=1000): fp = tempfile.NamedTemporaryFile("rw", prefix="pmg_", suffix=".pdf", delete=False) with open(fp.name, "wb") as fp: resp = requests.get(url, stream=True) for chunk in resp.iter_content(chunk_size): fp.write(chunk) return fp.name def convert_to_text(path): cmd = "pdftotext %s" % path os.system(cmd) return path.replace(".pdf", ".txt") def extract_section(text): match = reg_section1.search(text) if not match: match = reg_section2.search(text) if not match: return None section = match.group() if "74" in section: return 74 if "75" in section: return 75 if "76" in section: return 76 if "77" in section: return 77 def extract_introduced_by(text): match = reg_introduced_by1.search(text) if not match: match = reg_introduced_by2.search(text) if not match: match = reg_introduced_by3.search(text) if not match: return "Boom!!" return match.groups()[0] def extract_introduction_location(text): return "NA" def scrape_pdf(url): pdf_path = get_pdf(url) text_path = convert_to_text(pdf_path) text = open(text_path).read()[0:2000] js = { "section" : extract_section(text), "introduced_by" : extract_introduced_by(text), "introduced_at" : extract_introduction_location(text) } print json.dumps(js, indent=4) scrape_pdf(url)
apache-2.0
1,909,753,005,026,915,800
25.474747
100
0.638688
false
2.971655
false
false
false
xmdy/ibstats
src/stats/models.py
1
1877
from __future__ import unicode_literals from django.db import models from django.utils.translation import ugettext as _ import random def get_random_value(start=100, end=100000): def get_random(): return random.randint(start, end) * 0.01 return get_random class Trader(models.Model): name = models.CharField(verbose_name=_('name'), max_length=128) balance = models.FloatField(verbose_name=_('balance'), default=get_random_value()) class Meta: verbose_name = _('trader') verbose_name_plural = _('traders') ordering = ['-id'] def __unicode__(self): return '<Trader: %s, %s>' % (self.id, self.name) class Transaction(models.Model): trader = models.ForeignKey(Trader, verbose_name=_('trader')) time = models.DateTimeField(verbose_name=_('time'), auto_created=True) amount = models.FloatField(verbose_name=_('amount'), default=get_random_value()) type = models.IntegerField(verbose_name=_('type'), default=1, db_index=True) class Meta: verbose_name = _('transaction') verbose_name_plural = _('transactions') ordering = ['-id'] def __unicode__(self): return '<Transaction: %s, %s, %s, %s>' % (self.id, self.time, self.amount, self.trader_id) class Deal(models.Model): trader = models.ForeignKey(Trader, verbose_name=_('trader')) time = models.DateTimeField(verbose_name=_('time'), db_index=True, auto_created=True) amount = models.FloatField(verbose_name=_('amount'), default=get_random_value()) result_amount = models.FloatField(verbose_name=_('result amount'), default=get_random_value(-100000)) class Meta: verbose_name = _('deal') verbose_name_plural = _('deals') ordering = ['-id'] def __unicode__(self): return '<Deal: %s, %s, %s, %s>' % (self.id, self.time, self.amount, self.trader_id)
gpl-3.0
-8,023,529,222,036,301,000
34.433962
105
0.639851
false
3.623552
false
false
false
jcpeterson/avoidr
player.py
1
3915
import pygame, os class Player: # this takes in screen width/height to calculate the player's starting position (center of screen) # it also takes in the background color to compute its own color (inverted background color) def __init__(self, screenWidth, screenHeight): self.posRangeX = screenWidth self.posRangeY = screenHeight self.posX = screenWidth/2 self.posY = screenHeight/2 self.speed = 10 self.sizeMax = 80 self.sizeMin = 42#40 # might as well start out at the minimum size self.size = self.sizeMin self.state = 'growing' # make the player color white; it doesn't really matter how it starts self.color = (255,255,255) self.isJumping = False self.goingUp = True self.killed = False self.exploding = False self.rect = pygame.Rect((self.posX,self.posY),(self.size,self.size)) # load the jump sound self.jumpSound = pygame.mixer.Sound(os.path.join('audio','jump.wav')) def updateSize(self): # # player size changes # if self.state == 'growing' and self.size >= self.sizeMin: # self.size += 1 # if self.size >= self.sizeMax: # self.state = 'shrinking' # if self.state == 'shrinking' and self.size <= self.sizeMax: # self.size -= 1 # if self.size <= self.sizeMin: # self.state = 'growing' if self.isJumping: self.speed = 3 # player size changes when jumpin if self.goingUp: self.size += 1 if self.size == self.sizeMax: self.goingUp = False if not self.goingUp: self.size -= 1 if self.size == self.sizeMin: self.isJumping = False self.goingUp = True self.speed = 10 self.rect.width = self.size self.rect.height = self.size # sounds/animations for this have not been implemented yet if self.killed: self.exploding = True def updatePos(self, keys): # left border collision detection if (self.posX != 0 + self.sizeMax) and (self.posX > 0 + self.sizeMax + 5): # player movement input if keys[pygame.K_LEFT]: self.posX = self.posX - self.speed # right border collision detection if (self.posX != self.posRangeX - self.sizeMax) and (self.posX < self.posRangeX - (self.sizeMax + 5)): # player movement input if keys[pygame.K_RIGHT]: self.posX = self.posX + self.speed # vertical border collision detection if (self.posY != 0 + self.sizeMax) and (self.posY > 0 + self.sizeMax + 5): # player movement input if keys[pygame.K_UP]: self.posY = self.posY - self.speed # vertical border collision detection if (self.posY != self.posRangeY - self.sizeMax) and (self.posY < self.posRangeY - (self.sizeMax + 5)): # player movement input if keys[pygame.K_DOWN]: self.posY = self.posY + self.speed # MOVE THIS OVER TO THE SIZE FUNCTION SOON!!! if keys[pygame.K_SPACE]: if not self.isJumping: # play the jump sound self.jumpSound.play() self.isJumping = True self.rect.x = self.posX self.rect.y = self.posY # use something like this for input handling later # for e in pygame.event.get(): # if e.type == QUIT: raise SystemExit, "QUIT" # if e.type == KEYDOWN and e.key == K_ESCAPE: # raise SystemExit, "ESCAPE" # pressed = pygame.key.get_pressed() # up, left, right = [pressed[key] for key in (K_UP, K_LEFT, K_RIGHT)] def updateColor(self,gameBgColor): # update the player color with the inverted current background color self.color = (255-gameBgColor[0],255-gameBgColor[1],255-gameBgColor[2])
gpl-3.0
-8,821,727,489,908,777,000
33.052174
108
0.591315
false
3.655462
false
false
false
staranjeet/fjord
fjord/redirector/tests/test_dummy.py
1
1413
from fjord.base.tests import reverse, TestCase from fjord.redirector import get_redirectors from fjord.redirector.base import build_redirect_url from fjord.redirector.providers.dummy import DummyRedirector from fjord.redirector.tests import RedirectorTestMixin class DummyRedirectorLoadingTestCase(RedirectorTestMixin, TestCase): redirectors = [] def test_didnt_load(self): dummy_providers = [ prov for prov in get_redirectors() if isinstance(prov, DummyRedirector) ] assert len(dummy_providers) == 0 class DummyRedirectorTestCase(RedirectorTestMixin, TestCase): redirectors = [ 'fjord.redirector.providers.dummy.DummyRedirector' ] def test_load(self): dummy_redirectors = [ prov for prov in get_redirectors() if isinstance(prov, DummyRedirector) ] assert len(dummy_redirectors) == 1 def test_handle_redirect(self): resp = self.client.get(build_redirect_url('dummy:ou812')) assert resp.status_code == 302 assert resp['Location'] == 'http://example.com/ou812' def test_nothing_handled_it_404(self): resp = self.client.get(build_redirect_url('notdummy:ou812')) assert resp.status_code == 404 def test_no_redirect_specified_404(self): resp = self.client.get(reverse('redirect-view')) assert resp.status_code == 404
bsd-3-clause
70,996,547,842,896,990
32.642857
68
0.678698
false
3.728232
true
false
false
lucidfrontier45/scikit-learn
examples/manifold/plot_manifold_sphere.py
1
4572
#!/usr/bin/python # -*- coding: utf-8 -*- """ ============================================= Manifold Learning methods on a severed sphere ============================================= An application of the different :ref:`manifold` techniques on a spherical data-set. Here one can see the use of dimensionality reduction in order to gain some intuition regarding the Manifold learning methods. Regarding the dataset, the poles are cut from the sphere, as well as a thin slice down its side. This enables the manifold learning techniques to 'spread it open' whilst projecting it onto two dimensions. For a similiar example, where the methods are applied to the S-curve dataset, see :ref:`example_manifold_plot_compare_methods.py` Note that the purpose of the :ref:`MDS <multidimensional_scaling>` is to find a low-dimensional representation of the data (here 2D) in which the distances respect well the distances in the original high-dimensional space, unlike other manifold-learning algorithms, it does not seeks an isotropic representation of the data in the low-dimensional space. Here the manifold problem matches fairly that of representing a flat map of the Earth, as with `map projection <http://en.wikipedia.org/wiki/Map_projection>`_ """ # Author: Jaques Grobler <[email protected]> # License: BSD print __doc__ from time import time import numpy as np import pylab as pl from mpl_toolkits.mplot3d import Axes3D from matplotlib.ticker import NullFormatter from sklearn import manifold from sklearn.utils import check_random_state # Next line to silence pyflakes. Axes3D # Variables for manifold learning. n_neighbors = 10 n_samples = 1000 # Create our sphere. random_state = check_random_state(0) p = random_state.rand(n_samples) * (2 * np.pi - 0.55) t = random_state.rand(n_samples) * np.pi # Sever the poles from the sphere. indices = ((t < (np.pi - (np.pi / 8))) & (t > ((np.pi / 8)))) colors = p[indices] x, y, z = np.sin(t[indices]) * np.cos(p[indices]), \ np.sin(t[indices]) * np.sin(p[indices]), \ np.cos(t[indices]) # Plot our dataset. fig = pl.figure(figsize=(15, 8)) pl.suptitle("Manifold Learning with %i points, %i neighbors" % (1000, n_neighbors), fontsize=14) ax = fig.add_subplot(241, projection='3d') ax.scatter(x, y, z, c=p[indices], cmap=pl.cm.rainbow) try: # compatibility matplotlib < 1.0 ax.view_init(40, -10) except: pass sphere_data = np.array([x, y, z]).T # Perform Locally Linear Embedding Manifold learning methods = ['standard', 'ltsa', 'hessian', 'modified'] labels = ['LLE', 'LTSA', 'Hessian LLE', 'Modified LLE'] for i, method in enumerate(methods): t0 = time() trans_data = manifold\ .LocallyLinearEmbedding(n_neighbors, 2, method=method).fit_transform(sphere_data).T t1 = time() print "%s: %.2g sec" % (methods[i], t1 - t0) ax = fig.add_subplot(242 + i) pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow) pl.title("%s (%.2g sec)" % (labels[i], t1 - t0)) ax.xaxis.set_major_formatter(NullFormatter()) ax.yaxis.set_major_formatter(NullFormatter()) pl.axis('tight') # Perform Isomap Manifold learning. t0 = time() trans_data = manifold.Isomap(n_neighbors, n_components=2)\ .fit_transform(sphere_data).T t1 = time() print "%s: %.2g sec" % ('ISO', t1 - t0) ax = fig.add_subplot(246) pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow) pl.title("%s (%.2g sec)" % ('Isomap', t1 - t0)) ax.xaxis.set_major_formatter(NullFormatter()) ax.yaxis.set_major_formatter(NullFormatter()) pl.axis('tight') # Perform Multi-dimensional scaling. t0 = time() mds = manifold.MDS(2, max_iter=100, n_init=1) trans_data = mds.fit_transform(sphere_data).T t1 = time() print "MDS: %.2g sec" % (t1 - t0) ax = fig.add_subplot(247) pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow) pl.title("MDS (%.2g sec)" % (t1 - t0)) ax.xaxis.set_major_formatter(NullFormatter()) ax.yaxis.set_major_formatter(NullFormatter()) pl.axis('tight') # Perform Spectral Embedding. t0 = time() se = manifold.SpectralEmbedding(n_components=2, n_neighbors=n_neighbors) trans_data = se.fit_transform(sphere_data).T t1 = time() print "Spectral Embedding: %.2g sec" % (t1 - t0) ax = fig.add_subplot(248) pl.scatter(trans_data[0], trans_data[1], c=colors, cmap=pl.cm.rainbow) pl.title("Spectral Embedding (%.2g sec)" % (t1 - t0)) ax.xaxis.set_major_formatter(NullFormatter()) ax.yaxis.set_major_formatter(NullFormatter()) pl.axis('tight') pl.show()
bsd-3-clause
-6,762,097,957,016,355,000
31.425532
75
0.680446
false
3.009875
false
false
false
iulian787/spack
var/spack/repos/builtin/packages/linktest/package.py
2
1299
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) class Linktest(MakefilePackage): """Performance tool to generate communication matrix using parallel ping-pong benchmark""" homepage = "https://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/LinkTest/_node.html" url = "http://apps.fz-juelich.de/jsc/linktest/download.php?version=1.2p1" maintainers = ['pramodk'] version('1.2p1', sha256='981b96da1d5bf214507b8e219a36e8d0183d8bd5c10539b26f660b2c83e5269d', extension='tar.gz') depends_on('mpi') depends_on('sionlib') def edit(self, spec, prefix): with working_dir('src'): makefile = FileFilter('Makefile') makefile.filter('= gcc', '= cc') makefile.filter('mpicc', spec['mpi'].mpicc) makefile.filter('#SIONLIB_INST=.*', 'SIONLIB_INST=%s' % spec['sionlib'].prefix) def build(self, spec, prefix): with working_dir('src'): make() def install(self, spec, prefix): mkdir(prefix.bin) install('src/mpilinktest', prefix.bin) install('src/pingponganalysis', prefix.bin)
lgpl-2.1
-1,271,115,290,490,346,200
35.083333
115
0.647421
false
3.347938
false
false
false
nomad-vino/SPSE-1
Module 5/5.4.py
1
1362
#!/usr/bin/python print " __ " print " |__|____ ___ __ " print " | \__ \\\\ \/ / " print " | |/ __ \\\\ / " print " /\__| (____ /\_/ " print " \______| \/ " print " " print 'Module 5' print 'Exploitation Techniques' print 'Part 4' print """ Playing with processes in IDB """ import immlib imm = immlib.Debugger() # # # # Main application DESC = 'Playing with processes' def main(args): # open closed process #exe = 'E:\\Module 6\\Server-Strcpy.exe' #imm.openProcess(exe) # attach to running process - not the one opened in immunity # -> !script_name PID #imm.Attach(int(args[0])) #PID #imm.restartProcess() # find all modules in running process modules_table = imm.createTable('Module Information', ['Name', 'Base', 'Entry', 'Size', 'Version']) # get list of modules module_dict = imm.getAllModules() # fill table for entity in module_dict.values() : # Libs.debugtypes => Module modules_table.add(0, [ entity.getName(), '%08X'%entity.getBaseAddress(), '%08X'%entity.getEntry(), '%08X'%entity.getSize(), entity.getVersion() ]) # print the state of registers in logs imm.log(str(imm.getRegs())) return 'Done'
gpl-3.0
3,683,522,595,518,480,000
22.084746
100
0.526432
false
3.167442
false
false
false
mancoast/CPythonPyc_test
fail/313_test_bigmem.py
1
41534
from test import support from test.support import bigmemtest, _1G, _2G, _4G, precisionbigmemtest import unittest import operator import sys import functools # Bigmem testing houserules: # # - Try not to allocate too many large objects. It's okay to rely on # refcounting semantics, but don't forget that 's = create_largestring()' # doesn't release the old 's' (if it exists) until well after its new # value has been created. Use 'del s' before the create_largestring call. # # - Do *not* compare large objects using assertEqual or similar. It's a # lengthy operation and the errormessage will be utterly useless due to # its size. To make sure whether a result has the right contents, better # to use the strip or count methods, or compare meaningful slices. # # - Don't forget to test for large indices, offsets and results and such, # in addition to large sizes. # # - When repeating an object (say, a substring, or a small list) to create # a large object, make the subobject of a length that is not a power of # 2. That way, int-wrapping problems are more easily detected. # # - While the bigmemtest decorator speaks of 'minsize', all tests will # actually be called with a much smaller number too, in the normal # test run (5Kb currently.) This is so the tests themselves get frequent # testing. Consequently, always make all large allocations based on the # passed-in 'size', and don't rely on the size being very large. Also, # memuse-per-size should remain sane (less than a few thousand); if your # test uses more, adjust 'size' upward, instead. # BEWARE: it seems that one failing test can yield other subsequent tests to # fail as well. I do not know whether it is due to memory fragmentation # issues, or other specifics of the platform malloc() routine. character_size = 4 if sys.maxunicode > 0xFFFF else 2 class BaseStrTest: @bigmemtest(minsize=_2G, memuse=2) def test_capitalize(self, size): _ = self.from_latin1 SUBSTR = self.from_latin1(' abc def ghi') s = _('-') * size + SUBSTR caps = s.capitalize() self.assertEqual(caps[-len(SUBSTR):], SUBSTR.capitalize()) self.assertEqual(caps.lstrip(_('-')), SUBSTR) @bigmemtest(minsize=_2G + 10, memuse=1) def test_center(self, size): SUBSTR = self.from_latin1(' abc def ghi') s = SUBSTR.center(size) self.assertEqual(len(s), size) lpadsize = rpadsize = (len(s) - len(SUBSTR)) // 2 if len(s) % 2: lpadsize += 1 self.assertEqual(s[lpadsize:-rpadsize], SUBSTR) self.assertEqual(s.strip(), SUBSTR.strip()) @bigmemtest(minsize=_2G, memuse=2) def test_count(self, size): _ = self.from_latin1 SUBSTR = _(' abc def ghi') s = _('.') * size + SUBSTR self.assertEqual(s.count(_('.')), size) s += _('.') self.assertEqual(s.count(_('.')), size + 1) self.assertEqual(s.count(_(' ')), 3) self.assertEqual(s.count(_('i')), 1) self.assertEqual(s.count(_('j')), 0) @bigmemtest(minsize=_2G, memuse=2) def test_endswith(self, size): _ = self.from_latin1 SUBSTR = _(' abc def ghi') s = _('-') * size + SUBSTR self.assertTrue(s.endswith(SUBSTR)) self.assertTrue(s.endswith(s)) s2 = _('...') + s self.assertTrue(s2.endswith(s)) self.assertFalse(s.endswith(_('a') + SUBSTR)) self.assertFalse(SUBSTR.endswith(s)) @bigmemtest(minsize=_2G + 10, memuse=2) def test_expandtabs(self, size): _ = self.from_latin1 s = _('-') * size tabsize = 8 self.assertEqual(s.expandtabs(), s) del s slen, remainder = divmod(size, tabsize) s = _(' \t') * slen s = s.expandtabs(tabsize) self.assertEqual(len(s), size - remainder) self.assertEqual(len(s.strip(_(' '))), 0) @bigmemtest(minsize=_2G, memuse=2) def test_find(self, size): _ = self.from_latin1 SUBSTR = _(' abc def ghi') sublen = len(SUBSTR) s = _('').join([SUBSTR, _('-') * size, SUBSTR]) self.assertEqual(s.find(_(' ')), 0) self.assertEqual(s.find(SUBSTR), 0) self.assertEqual(s.find(_(' '), sublen), sublen + size) self.assertEqual(s.find(SUBSTR, len(SUBSTR)), sublen + size) self.assertEqual(s.find(_('i')), SUBSTR.find(_('i'))) self.assertEqual(s.find(_('i'), sublen), sublen + size + SUBSTR.find(_('i'))) self.assertEqual(s.find(_('i'), size), sublen + size + SUBSTR.find(_('i'))) self.assertEqual(s.find(_('j')), -1) @bigmemtest(minsize=_2G, memuse=2) def test_index(self, size): _ = self.from_latin1 SUBSTR = _(' abc def ghi') sublen = len(SUBSTR) s = _('').join([SUBSTR, _('-') * size, SUBSTR]) self.assertEqual(s.index(_(' ')), 0) self.assertEqual(s.index(SUBSTR), 0) self.assertEqual(s.index(_(' '), sublen), sublen + size) self.assertEqual(s.index(SUBSTR, sublen), sublen + size) self.assertEqual(s.index(_('i')), SUBSTR.index(_('i'))) self.assertEqual(s.index(_('i'), sublen), sublen + size + SUBSTR.index(_('i'))) self.assertEqual(s.index(_('i'), size), sublen + size + SUBSTR.index(_('i'))) self.assertRaises(ValueError, s.index, _('j')) @bigmemtest(minsize=_2G, memuse=2) def test_isalnum(self, size): _ = self.from_latin1 SUBSTR = _('123456') s = _('a') * size + SUBSTR self.assertTrue(s.isalnum()) s += _('.') self.assertFalse(s.isalnum()) @bigmemtest(minsize=_2G, memuse=2) def test_isalpha(self, size): _ = self.from_latin1 SUBSTR = _('zzzzzzz') s = _('a') * size + SUBSTR self.assertTrue(s.isalpha()) s += _('.') self.assertFalse(s.isalpha()) @bigmemtest(minsize=_2G, memuse=2) def test_isdigit(self, size): _ = self.from_latin1 SUBSTR = _('123456') s = _('9') * size + SUBSTR self.assertTrue(s.isdigit()) s += _('z') self.assertFalse(s.isdigit()) @bigmemtest(minsize=_2G, memuse=2) def test_islower(self, size): _ = self.from_latin1 chars = _(''.join( chr(c) for c in range(255) if not chr(c).isupper())) repeats = size // len(chars) + 2 s = chars * repeats self.assertTrue(s.islower()) s += _('A') self.assertFalse(s.islower()) @bigmemtest(minsize=_2G, memuse=2) def test_isspace(self, size): _ = self.from_latin1 whitespace = _(' \f\n\r\t\v') repeats = size // len(whitespace) + 2 s = whitespace * repeats self.assertTrue(s.isspace()) s += _('j') self.assertFalse(s.isspace()) @bigmemtest(minsize=_2G, memuse=2) def test_istitle(self, size): _ = self.from_latin1 SUBSTR = _('123456') s = _('').join([_('A'), _('a') * size, SUBSTR]) self.assertTrue(s.istitle()) s += _('A') self.assertTrue(s.istitle()) s += _('aA') self.assertFalse(s.istitle()) @bigmemtest(minsize=_2G, memuse=2) def test_isupper(self, size): _ = self.from_latin1 chars = _(''.join( chr(c) for c in range(255) if not chr(c).islower())) repeats = size // len(chars) + 2 s = chars * repeats self.assertTrue(s.isupper()) s += _('a') self.assertFalse(s.isupper()) @bigmemtest(minsize=_2G, memuse=2) def test_join(self, size): _ = self.from_latin1 s = _('A') * size x = s.join([_('aaaaa'), _('bbbbb')]) self.assertEqual(x.count(_('a')), 5) self.assertEqual(x.count(_('b')), 5) self.assertTrue(x.startswith(_('aaaaaA'))) self.assertTrue(x.endswith(_('Abbbbb'))) @bigmemtest(minsize=_2G + 10, memuse=1) def test_ljust(self, size): _ = self.from_latin1 SUBSTR = _(' abc def ghi') s = SUBSTR.ljust(size) self.assertTrue(s.startswith(SUBSTR + _(' '))) self.assertEqual(len(s), size) self.assertEqual(s.strip(), SUBSTR.strip()) @bigmemtest(minsize=_2G + 10, memuse=2) def test_lower(self, size): _ = self.from_latin1 s = _('A') * size s = s.lower() self.assertEqual(len(s), size) self.assertEqual(s.count(_('a')), size) @bigmemtest(minsize=_2G + 10, memuse=1) def test_lstrip(self, size): _ = self.from_latin1 SUBSTR = _('abc def ghi') s = SUBSTR.rjust(size) self.assertEqual(len(s), size) self.assertEqual(s.lstrip(), SUBSTR.lstrip()) del s s = SUBSTR.ljust(size) self.assertEqual(len(s), size) # Type-specific optimization if isinstance(s, (str, bytes)): stripped = s.lstrip() self.assertTrue(stripped is s) @bigmemtest(minsize=_2G + 10, memuse=2) def test_replace(self, size): _ = self.from_latin1 replacement = _('a') s = _(' ') * size s = s.replace(_(' '), replacement) self.assertEqual(len(s), size) self.assertEqual(s.count(replacement), size) s = s.replace(replacement, _(' '), size - 4) self.assertEqual(len(s), size) self.assertEqual(s.count(replacement), 4) self.assertEqual(s[-10:], _(' aaaa')) @bigmemtest(minsize=_2G, memuse=2) def test_rfind(self, size): _ = self.from_latin1 SUBSTR = _(' abc def ghi') sublen = len(SUBSTR) s = _('').join([SUBSTR, _('-') * size, SUBSTR]) self.assertEqual(s.rfind(_(' ')), sublen + size + SUBSTR.rfind(_(' '))) self.assertEqual(s.rfind(SUBSTR), sublen + size) self.assertEqual(s.rfind(_(' '), 0, size), SUBSTR.rfind(_(' '))) self.assertEqual(s.rfind(SUBSTR, 0, sublen + size), 0) self.assertEqual(s.rfind(_('i')), sublen + size + SUBSTR.rfind(_('i'))) self.assertEqual(s.rfind(_('i'), 0, sublen), SUBSTR.rfind(_('i'))) self.assertEqual(s.rfind(_('i'), 0, sublen + size), SUBSTR.rfind(_('i'))) self.assertEqual(s.rfind(_('j')), -1) @bigmemtest(minsize=_2G, memuse=2) def test_rindex(self, size): _ = self.from_latin1 SUBSTR = _(' abc def ghi') sublen = len(SUBSTR) s = _('').join([SUBSTR, _('-') * size, SUBSTR]) self.assertEqual(s.rindex(_(' ')), sublen + size + SUBSTR.rindex(_(' '))) self.assertEqual(s.rindex(SUBSTR), sublen + size) self.assertEqual(s.rindex(_(' '), 0, sublen + size - 1), SUBSTR.rindex(_(' '))) self.assertEqual(s.rindex(SUBSTR, 0, sublen + size), 0) self.assertEqual(s.rindex(_('i')), sublen + size + SUBSTR.rindex(_('i'))) self.assertEqual(s.rindex(_('i'), 0, sublen), SUBSTR.rindex(_('i'))) self.assertEqual(s.rindex(_('i'), 0, sublen + size), SUBSTR.rindex(_('i'))) self.assertRaises(ValueError, s.rindex, _('j')) @bigmemtest(minsize=_2G + 10, memuse=1) def test_rjust(self, size): _ = self.from_latin1 SUBSTR = _(' abc def ghi') s = SUBSTR.ljust(size) self.assertTrue(s.startswith(SUBSTR + _(' '))) self.assertEqual(len(s), size) self.assertEqual(s.strip(), SUBSTR.strip()) @bigmemtest(minsize=_2G + 10, memuse=1) def test_rstrip(self, size): _ = self.from_latin1 SUBSTR = _(' abc def ghi') s = SUBSTR.ljust(size) self.assertEqual(len(s), size) self.assertEqual(s.rstrip(), SUBSTR.rstrip()) del s s = SUBSTR.rjust(size) self.assertEqual(len(s), size) # Type-specific optimization if isinstance(s, (str, bytes)): stripped = s.rstrip() self.assertTrue(stripped is s) # The test takes about size bytes to build a string, and then about # sqrt(size) substrings of sqrt(size) in size and a list to # hold sqrt(size) items. It's close but just over 2x size. @bigmemtest(minsize=_2G, memuse=2.1) def test_split_small(self, size): _ = self.from_latin1 # Crudely calculate an estimate so that the result of s.split won't # take up an inordinate amount of memory chunksize = int(size ** 0.5 + 2) SUBSTR = _('a') + _(' ') * chunksize s = SUBSTR * chunksize l = s.split() self.assertEqual(len(l), chunksize) expected = _('a') for item in l: self.assertEqual(item, expected) del l l = s.split(_('a')) self.assertEqual(len(l), chunksize + 1) expected = _(' ') * chunksize for item in filter(None, l): self.assertEqual(item, expected) # Allocates a string of twice size (and briefly two) and a list of # size. Because of internal affairs, the s.split() call produces a # list of size times the same one-character string, so we only # suffer for the list size. (Otherwise, it'd cost another 48 times # size in bytes!) Nevertheless, a list of size takes # 8*size bytes. @bigmemtest(minsize=_2G + 5, memuse=10) def test_split_large(self, size): _ = self.from_latin1 s = _(' a') * size + _(' ') l = s.split() self.assertEqual(len(l), size) self.assertEqual(set(l), set([_('a')])) del l l = s.split(_('a')) self.assertEqual(len(l), size + 1) self.assertEqual(set(l), set([_(' ')])) @bigmemtest(minsize=_2G, memuse=2.1) def test_splitlines(self, size): _ = self.from_latin1 # Crudely calculate an estimate so that the result of s.split won't # take up an inordinate amount of memory chunksize = int(size ** 0.5 + 2) // 2 SUBSTR = _(' ') * chunksize + _('\n') + _(' ') * chunksize + _('\r\n') s = SUBSTR * chunksize l = s.splitlines() self.assertEqual(len(l), chunksize * 2) expected = _(' ') * chunksize for item in l: self.assertEqual(item, expected) @bigmemtest(minsize=_2G, memuse=2) def test_startswith(self, size): _ = self.from_latin1 SUBSTR = _(' abc def ghi') s = _('-') * size + SUBSTR self.assertTrue(s.startswith(s)) self.assertTrue(s.startswith(_('-') * size)) self.assertFalse(s.startswith(SUBSTR)) @bigmemtest(minsize=_2G, memuse=1) def test_strip(self, size): _ = self.from_latin1 SUBSTR = _(' abc def ghi ') s = SUBSTR.rjust(size) self.assertEqual(len(s), size) self.assertEqual(s.strip(), SUBSTR.strip()) del s s = SUBSTR.ljust(size) self.assertEqual(len(s), size) self.assertEqual(s.strip(), SUBSTR.strip()) @bigmemtest(minsize=_2G, memuse=2) def test_swapcase(self, size): _ = self.from_latin1 SUBSTR = _("aBcDeFG12.'\xa9\x00") sublen = len(SUBSTR) repeats = size // sublen + 2 s = SUBSTR * repeats s = s.swapcase() self.assertEqual(len(s), sublen * repeats) self.assertEqual(s[:sublen * 3], SUBSTR.swapcase() * 3) self.assertEqual(s[-sublen * 3:], SUBSTR.swapcase() * 3) @bigmemtest(minsize=_2G, memuse=2) def test_title(self, size): _ = self.from_latin1 SUBSTR = _('SpaaHAaaAaham') s = SUBSTR * (size // len(SUBSTR) + 2) s = s.title() self.assertTrue(s.startswith((SUBSTR * 3).title())) self.assertTrue(s.endswith(SUBSTR.lower() * 3)) @bigmemtest(minsize=_2G, memuse=2) def test_translate(self, size): _ = self.from_latin1 SUBSTR = _('aZz.z.Aaz.') if isinstance(SUBSTR, str): trans = { ord(_('.')): _('-'), ord(_('a')): _('!'), ord(_('Z')): _('$'), } else: trans = bytes.maketrans(b'.aZ', b'-!$') sublen = len(SUBSTR) repeats = size // sublen + 2 s = SUBSTR * repeats s = s.translate(trans) self.assertEqual(len(s), repeats * sublen) self.assertEqual(s[:sublen], SUBSTR.translate(trans)) self.assertEqual(s[-sublen:], SUBSTR.translate(trans)) self.assertEqual(s.count(_('.')), 0) self.assertEqual(s.count(_('!')), repeats * 2) self.assertEqual(s.count(_('z')), repeats * 3) @bigmemtest(minsize=_2G + 5, memuse=2) def test_upper(self, size): _ = self.from_latin1 s = _('a') * size s = s.upper() self.assertEqual(len(s), size) self.assertEqual(s.count(_('A')), size) @bigmemtest(minsize=_2G + 20, memuse=1) def test_zfill(self, size): _ = self.from_latin1 SUBSTR = _('-568324723598234') s = SUBSTR.zfill(size) self.assertTrue(s.endswith(_('0') + SUBSTR[1:])) self.assertTrue(s.startswith(_('-0'))) self.assertEqual(len(s), size) self.assertEqual(s.count(_('0')), size - len(SUBSTR)) # This test is meaningful even with size < 2G, as long as the # doubled string is > 2G (but it tests more if both are > 2G :) @bigmemtest(minsize=_1G + 2, memuse=3) def test_concat(self, size): _ = self.from_latin1 s = _('.') * size self.assertEqual(len(s), size) s = s + s self.assertEqual(len(s), size * 2) self.assertEqual(s.count(_('.')), size * 2) # This test is meaningful even with size < 2G, as long as the # repeated string is > 2G (but it tests more if both are > 2G :) @bigmemtest(minsize=_1G + 2, memuse=3) def test_repeat(self, size): _ = self.from_latin1 s = _('.') * size self.assertEqual(len(s), size) s = s * 2 self.assertEqual(len(s), size * 2) self.assertEqual(s.count(_('.')), size * 2) @bigmemtest(minsize=_2G + 20, memuse=2) def test_slice_and_getitem(self, size): _ = self.from_latin1 SUBSTR = _('0123456789') sublen = len(SUBSTR) s = SUBSTR * (size // sublen) stepsize = len(s) // 100 stepsize = stepsize - (stepsize % sublen) for i in range(0, len(s) - stepsize, stepsize): self.assertEqual(s[i], SUBSTR[0]) self.assertEqual(s[i:i + sublen], SUBSTR) self.assertEqual(s[i:i + sublen:2], SUBSTR[::2]) if i > 0: self.assertEqual(s[i + sublen - 1:i - 1:-3], SUBSTR[sublen::-3]) # Make sure we do some slicing and indexing near the end of the # string, too. self.assertEqual(s[len(s) - 1], SUBSTR[-1]) self.assertEqual(s[-1], SUBSTR[-1]) self.assertEqual(s[len(s) - 10], SUBSTR[0]) self.assertEqual(s[-sublen], SUBSTR[0]) self.assertEqual(s[len(s):], _('')) self.assertEqual(s[len(s) - 1:], SUBSTR[-1:]) self.assertEqual(s[-1:], SUBSTR[-1:]) self.assertEqual(s[len(s) - sublen:], SUBSTR) self.assertEqual(s[-sublen:], SUBSTR) self.assertEqual(len(s[:]), len(s)) self.assertEqual(len(s[:len(s) - 5]), len(s) - 5) self.assertEqual(len(s[5:-5]), len(s) - 10) self.assertRaises(IndexError, operator.getitem, s, len(s)) self.assertRaises(IndexError, operator.getitem, s, len(s) + 1) self.assertRaises(IndexError, operator.getitem, s, len(s) + 1<<31) @bigmemtest(minsize=_2G, memuse=2) def test_contains(self, size): _ = self.from_latin1 SUBSTR = _('0123456789') edge = _('-') * (size // 2) s = _('').join([edge, SUBSTR, edge]) del edge self.assertTrue(SUBSTR in s) self.assertFalse(SUBSTR * 2 in s) self.assertTrue(_('-') in s) self.assertFalse(_('a') in s) s += _('a') self.assertTrue(_('a') in s) @bigmemtest(minsize=_2G + 10, memuse=2) def test_compare(self, size): _ = self.from_latin1 s1 = _('-') * size s2 = _('-') * size self.assertEqual(s1, s2) del s2 s2 = s1 + _('a') self.assertFalse(s1 == s2) del s2 s2 = _('.') * size self.assertFalse(s1 == s2) @bigmemtest(minsize=_2G + 10, memuse=1) def test_hash(self, size): # Not sure if we can do any meaningful tests here... Even if we # start relying on the exact algorithm used, the result will be # different depending on the size of the C 'long int'. Even this # test is dodgy (there's no *guarantee* that the two things should # have a different hash, even if they, in the current # implementation, almost always do.) _ = self.from_latin1 s = _('\x00') * size h1 = hash(s) del s s = _('\x00') * (size + 1) self.assertFalse(h1 == hash(s)) class StrTest(unittest.TestCase, BaseStrTest): def from_latin1(self, s): return s def basic_encode_test(self, size, enc, c='.', expectedsize=None): if expectedsize is None: expectedsize = size s = c * size self.assertEqual(len(s.encode(enc)), expectedsize) def setUp(self): # HACK: adjust memory use of tests inherited from BaseStrTest # according to character size. self._adjusted = {} for name in dir(BaseStrTest): if not name.startswith('test_'): continue meth = getattr(type(self), name) try: memuse = meth.memuse except AttributeError: continue meth.memuse = character_size * memuse self._adjusted[name] = memuse def tearDown(self): for name, memuse in self._adjusted.items(): getattr(type(self), name).memuse = memuse @bigmemtest(minsize=_2G + 2, memuse=character_size + 1) def test_encode(self, size): return self.basic_encode_test(size, 'utf-8') @precisionbigmemtest(size=_4G // 6 + 2, memuse=character_size + 1) def test_encode_raw_unicode_escape(self, size): try: return self.basic_encode_test(size, 'raw_unicode_escape') except MemoryError: pass # acceptable on 32-bit @precisionbigmemtest(size=_4G // 5 + 70, memuse=character_size + 1) def test_encode_utf7(self, size): try: return self.basic_encode_test(size, 'utf7') except MemoryError: pass # acceptable on 32-bit @precisionbigmemtest(size=_4G // 4 + 5, memuse=character_size + 4) def test_encode_utf32(self, size): try: return self.basic_encode_test(size, 'utf32', expectedsize=4*size+4) except MemoryError: pass # acceptable on 32-bit @precisionbigmemtest(size=_2G - 1, memuse=character_size + 1) def test_encode_ascii(self, size): return self.basic_encode_test(size, 'ascii', c='A') @precisionbigmemtest(size=_4G // 5, memuse=character_size * (6 + 1)) def test_unicode_repr_overflow(self, size): try: s = "\uAAAA"*size r = repr(s) except MemoryError: pass # acceptable on 32-bit else: self.assertTrue(s == eval(r)) @bigmemtest(minsize=_2G + 10, memuse=character_size * 2) def test_format(self, size): s = '-' * size sf = '%s' % (s,) self.assertEqual(s, sf) del sf sf = '..%s..' % (s,) self.assertEqual(len(sf), len(s) + 4) self.assertTrue(sf.startswith('..-')) self.assertTrue(sf.endswith('-..')) del s, sf size //= 2 edge = '-' * size s = ''.join([edge, '%s', edge]) del edge s = s % '...' self.assertEqual(len(s), size * 2 + 3) self.assertEqual(s.count('.'), 3) self.assertEqual(s.count('-'), size * 2) @bigmemtest(minsize=_2G + 10, memuse=character_size * 2) def test_repr_small(self, size): s = '-' * size s = repr(s) self.assertEqual(len(s), size + 2) self.assertEqual(s[0], "'") self.assertEqual(s[-1], "'") self.assertEqual(s.count('-'), size) del s # repr() will create a string four times as large as this 'binary # string', but we don't want to allocate much more than twice # size in total. (We do extra testing in test_repr_large()) size = size // 5 * 2 s = '\x00' * size s = repr(s) self.assertEqual(len(s), size * 4 + 2) self.assertEqual(s[0], "'") self.assertEqual(s[-1], "'") self.assertEqual(s.count('\\'), size) self.assertEqual(s.count('0'), size * 2) @bigmemtest(minsize=_2G + 10, memuse=character_size * 5) def test_repr_large(self, size): s = '\x00' * size s = repr(s) self.assertEqual(len(s), size * 4 + 2) self.assertEqual(s[0], "'") self.assertEqual(s[-1], "'") self.assertEqual(s.count('\\'), size) self.assertEqual(s.count('0'), size * 2) @bigmemtest(minsize=2**32 / 5, memuse=character_size * 7) def test_unicode_repr(self, size): s = "\uAAAA" * size for f in (repr, ascii): r = f(s) self.assertTrue(len(r) > size) self.assertTrue(r.endswith(r"\uaaaa'"), r[-10:]) del r # The character takes 4 bytes even in UCS-2 builds because it will # be decomposed into surrogates. @bigmemtest(minsize=2**32 / 5, memuse=4 + character_size * 9) def test_unicode_repr_wide(self, size): s = "\U0001AAAA" * size for f in (repr, ascii): r = f(s) self.assertTrue(len(r) > size) self.assertTrue(r.endswith(r"\U0001aaaa'"), r[-12:]) del r class BytesTest(unittest.TestCase, BaseStrTest): def from_latin1(self, s): return s.encode("latin1") @bigmemtest(minsize=_2G + 2, memuse=1 + character_size) def test_decode(self, size): s = self.from_latin1('.') * size self.assertEqual(len(s.decode('utf-8')), size) class BytearrayTest(unittest.TestCase, BaseStrTest): def from_latin1(self, s): return bytearray(s.encode("latin1")) @bigmemtest(minsize=_2G + 2, memuse=1 + character_size) def test_decode(self, size): s = self.from_latin1('.') * size self.assertEqual(len(s.decode('utf-8')), size) test_hash = None test_split_large = None class TupleTest(unittest.TestCase): # Tuples have a small, fixed-sized head and an array of pointers to # data. Since we're testing 64-bit addressing, we can assume that the # pointers are 8 bytes, and that thus that the tuples take up 8 bytes # per size. # As a side-effect of testing long tuples, these tests happen to test # having more than 2<<31 references to any given object. Hence the # use of different types of objects as contents in different tests. @bigmemtest(minsize=_2G + 2, memuse=16) def test_compare(self, size): t1 = ('',) * size t2 = ('',) * size self.assertEqual(t1, t2) del t2 t2 = ('',) * (size + 1) self.assertFalse(t1 == t2) del t2 t2 = (1,) * size self.assertFalse(t1 == t2) # Test concatenating into a single tuple of more than 2G in length, # and concatenating a tuple of more than 2G in length separately, so # the smaller test still gets run even if there isn't memory for the # larger test (but we still let the tester know the larger test is # skipped, in verbose mode.) def basic_concat_test(self, size): t = ((),) * size self.assertEqual(len(t), size) t = t + t self.assertEqual(len(t), size * 2) @bigmemtest(minsize=_2G // 2 + 2, memuse=24) def test_concat_small(self, size): return self.basic_concat_test(size) @bigmemtest(minsize=_2G + 2, memuse=24) def test_concat_large(self, size): return self.basic_concat_test(size) @bigmemtest(minsize=_2G // 5 + 10, memuse=8 * 5) def test_contains(self, size): t = (1, 2, 3, 4, 5) * size self.assertEqual(len(t), size * 5) self.assertTrue(5 in t) self.assertFalse((1, 2, 3, 4, 5) in t) self.assertFalse(0 in t) @bigmemtest(minsize=_2G + 10, memuse=8) def test_hash(self, size): t1 = (0,) * size h1 = hash(t1) del t1 t2 = (0,) * (size + 1) self.assertFalse(h1 == hash(t2)) @bigmemtest(minsize=_2G + 10, memuse=8) def test_index_and_slice(self, size): t = (None,) * size self.assertEqual(len(t), size) self.assertEqual(t[-1], None) self.assertEqual(t[5], None) self.assertEqual(t[size - 1], None) self.assertRaises(IndexError, operator.getitem, t, size) self.assertEqual(t[:5], (None,) * 5) self.assertEqual(t[-5:], (None,) * 5) self.assertEqual(t[20:25], (None,) * 5) self.assertEqual(t[-25:-20], (None,) * 5) self.assertEqual(t[size - 5:], (None,) * 5) self.assertEqual(t[size - 5:size], (None,) * 5) self.assertEqual(t[size - 6:size - 2], (None,) * 4) self.assertEqual(t[size:size], ()) self.assertEqual(t[size:size+5], ()) # Like test_concat, split in two. def basic_test_repeat(self, size): t = ('',) * size self.assertEqual(len(t), size) t = t * 2 self.assertEqual(len(t), size * 2) @bigmemtest(minsize=_2G // 2 + 2, memuse=24) def test_repeat_small(self, size): return self.basic_test_repeat(size) @bigmemtest(minsize=_2G + 2, memuse=24) def test_repeat_large(self, size): return self.basic_test_repeat(size) @bigmemtest(minsize=_1G - 1, memuse=12) def test_repeat_large_2(self, size): return self.basic_test_repeat(size) @precisionbigmemtest(size=_1G - 1, memuse=9) def test_from_2G_generator(self, size): try: t = tuple(range(size)) except MemoryError: pass # acceptable on 32-bit else: count = 0 for item in t: self.assertEqual(item, count) count += 1 self.assertEqual(count, size) @precisionbigmemtest(size=_1G - 25, memuse=9) def test_from_almost_2G_generator(self, size): try: t = tuple(range(size)) count = 0 for item in t: self.assertEqual(item, count) count += 1 self.assertEqual(count, size) except MemoryError: pass # acceptable, expected on 32-bit # Like test_concat, split in two. def basic_test_repr(self, size): t = (0,) * size s = repr(t) # The repr of a tuple of 0's is exactly three times the tuple length. self.assertEqual(len(s), size * 3) self.assertEqual(s[:5], '(0, 0') self.assertEqual(s[-5:], '0, 0)') self.assertEqual(s.count('0'), size) @bigmemtest(minsize=_2G // 3 + 2, memuse=8 + 3) def test_repr_small(self, size): return self.basic_test_repr(size) @bigmemtest(minsize=_2G + 2, memuse=8 + 3) def test_repr_large(self, size): return self.basic_test_repr(size) class ListTest(unittest.TestCase): # Like tuples, lists have a small, fixed-sized head and an array of # pointers to data, so 8 bytes per size. Also like tuples, we make the # lists hold references to various objects to test their refcount # limits. @bigmemtest(minsize=_2G + 2, memuse=16) def test_compare(self, size): l1 = [''] * size l2 = [''] * size self.assertEqual(l1, l2) del l2 l2 = [''] * (size + 1) self.assertFalse(l1 == l2) del l2 l2 = [2] * size self.assertFalse(l1 == l2) # Test concatenating into a single list of more than 2G in length, # and concatenating a list of more than 2G in length separately, so # the smaller test still gets run even if there isn't memory for the # larger test (but we still let the tester know the larger test is # skipped, in verbose mode.) def basic_test_concat(self, size): l = [[]] * size self.assertEqual(len(l), size) l = l + l self.assertEqual(len(l), size * 2) @bigmemtest(minsize=_2G // 2 + 2, memuse=24) def test_concat_small(self, size): return self.basic_test_concat(size) @bigmemtest(minsize=_2G + 2, memuse=24) def test_concat_large(self, size): return self.basic_test_concat(size) def basic_test_inplace_concat(self, size): l = [sys.stdout] * size l += l self.assertEqual(len(l), size * 2) self.assertTrue(l[0] is l[-1]) self.assertTrue(l[size - 1] is l[size + 1]) @bigmemtest(minsize=_2G // 2 + 2, memuse=24) def test_inplace_concat_small(self, size): return self.basic_test_inplace_concat(size) @bigmemtest(minsize=_2G + 2, memuse=24) def test_inplace_concat_large(self, size): return self.basic_test_inplace_concat(size) @bigmemtest(minsize=_2G // 5 + 10, memuse=8 * 5) def test_contains(self, size): l = [1, 2, 3, 4, 5] * size self.assertEqual(len(l), size * 5) self.assertTrue(5 in l) self.assertFalse([1, 2, 3, 4, 5] in l) self.assertFalse(0 in l) @bigmemtest(minsize=_2G + 10, memuse=8) def test_hash(self, size): l = [0] * size self.assertRaises(TypeError, hash, l) @bigmemtest(minsize=_2G + 10, memuse=8) def test_index_and_slice(self, size): l = [None] * size self.assertEqual(len(l), size) self.assertEqual(l[-1], None) self.assertEqual(l[5], None) self.assertEqual(l[size - 1], None) self.assertRaises(IndexError, operator.getitem, l, size) self.assertEqual(l[:5], [None] * 5) self.assertEqual(l[-5:], [None] * 5) self.assertEqual(l[20:25], [None] * 5) self.assertEqual(l[-25:-20], [None] * 5) self.assertEqual(l[size - 5:], [None] * 5) self.assertEqual(l[size - 5:size], [None] * 5) self.assertEqual(l[size - 6:size - 2], [None] * 4) self.assertEqual(l[size:size], []) self.assertEqual(l[size:size+5], []) l[size - 2] = 5 self.assertEqual(len(l), size) self.assertEqual(l[-3:], [None, 5, None]) self.assertEqual(l.count(5), 1) self.assertRaises(IndexError, operator.setitem, l, size, 6) self.assertEqual(len(l), size) l[size - 7:] = [1, 2, 3, 4, 5] size -= 2 self.assertEqual(len(l), size) self.assertEqual(l[-7:], [None, None, 1, 2, 3, 4, 5]) l[:7] = [1, 2, 3, 4, 5] size -= 2 self.assertEqual(len(l), size) self.assertEqual(l[:7], [1, 2, 3, 4, 5, None, None]) del l[size - 1] size -= 1 self.assertEqual(len(l), size) self.assertEqual(l[-1], 4) del l[-2:] size -= 2 self.assertEqual(len(l), size) self.assertEqual(l[-1], 2) del l[0] size -= 1 self.assertEqual(len(l), size) self.assertEqual(l[0], 2) del l[:2] size -= 2 self.assertEqual(len(l), size) self.assertEqual(l[0], 4) # Like test_concat, split in two. def basic_test_repeat(self, size): l = [] * size self.assertFalse(l) l = [''] * size self.assertEqual(len(l), size) l = l * 2 self.assertEqual(len(l), size * 2) @bigmemtest(minsize=_2G // 2 + 2, memuse=24) def test_repeat_small(self, size): return self.basic_test_repeat(size) @bigmemtest(minsize=_2G + 2, memuse=24) def test_repeat_large(self, size): return self.basic_test_repeat(size) def basic_test_inplace_repeat(self, size): l = [''] l *= size self.assertEqual(len(l), size) self.assertTrue(l[0] is l[-1]) del l l = [''] * size l *= 2 self.assertEqual(len(l), size * 2) self.assertTrue(l[size - 1] is l[-1]) @bigmemtest(minsize=_2G // 2 + 2, memuse=16) def test_inplace_repeat_small(self, size): return self.basic_test_inplace_repeat(size) @bigmemtest(minsize=_2G + 2, memuse=16) def test_inplace_repeat_large(self, size): return self.basic_test_inplace_repeat(size) def basic_test_repr(self, size): l = [0] * size s = repr(l) # The repr of a list of 0's is exactly three times the list length. self.assertEqual(len(s), size * 3) self.assertEqual(s[:5], '[0, 0') self.assertEqual(s[-5:], '0, 0]') self.assertEqual(s.count('0'), size) @bigmemtest(minsize=_2G // 3 + 2, memuse=8 + 3) def test_repr_small(self, size): return self.basic_test_repr(size) @bigmemtest(minsize=_2G + 2, memuse=8 + 3) def test_repr_large(self, size): return self.basic_test_repr(size) # list overallocates ~1/8th of the total size (on first expansion) so # the single list.append call puts memuse at 9 bytes per size. @bigmemtest(minsize=_2G, memuse=9) def test_append(self, size): l = [object()] * size l.append(object()) self.assertEqual(len(l), size+1) self.assertTrue(l[-3] is l[-2]) self.assertFalse(l[-2] is l[-1]) @bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5) def test_count(self, size): l = [1, 2, 3, 4, 5] * size self.assertEqual(l.count(1), size) self.assertEqual(l.count("1"), 0) def basic_test_extend(self, size): l = [object] * size l.extend(l) self.assertEqual(len(l), size * 2) self.assertTrue(l[0] is l[-1]) self.assertTrue(l[size - 1] is l[size + 1]) @bigmemtest(minsize=_2G // 2 + 2, memuse=16) def test_extend_small(self, size): return self.basic_test_extend(size) @bigmemtest(minsize=_2G + 2, memuse=16) def test_extend_large(self, size): return self.basic_test_extend(size) @bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5) def test_index(self, size): l = [1, 2, 3, 4, 5] * size size *= 5 self.assertEqual(l.index(1), 0) self.assertEqual(l.index(5, size - 5), size - 1) self.assertEqual(l.index(5, size - 5, size), size - 1) self.assertRaises(ValueError, l.index, 1, size - 4, size) self.assertRaises(ValueError, l.index, 6) # This tests suffers from overallocation, just like test_append. @bigmemtest(minsize=_2G + 10, memuse=9) def test_insert(self, size): l = [1.0] * size l.insert(size - 1, "A") size += 1 self.assertEqual(len(l), size) self.assertEqual(l[-3:], [1.0, "A", 1.0]) l.insert(size + 1, "B") size += 1 self.assertEqual(len(l), size) self.assertEqual(l[-3:], ["A", 1.0, "B"]) l.insert(1, "C") size += 1 self.assertEqual(len(l), size) self.assertEqual(l[:3], [1.0, "C", 1.0]) self.assertEqual(l[size - 3:], ["A", 1.0, "B"]) @bigmemtest(minsize=_2G // 5 + 4, memuse=8 * 5) def test_pop(self, size): l = ["a", "b", "c", "d", "e"] * size size *= 5 self.assertEqual(len(l), size) item = l.pop() size -= 1 self.assertEqual(len(l), size) self.assertEqual(item, "e") self.assertEqual(l[-2:], ["c", "d"]) item = l.pop(0) size -= 1 self.assertEqual(len(l), size) self.assertEqual(item, "a") self.assertEqual(l[:2], ["b", "c"]) item = l.pop(size - 2) size -= 1 self.assertEqual(len(l), size) self.assertEqual(item, "c") self.assertEqual(l[-2:], ["b", "d"]) @bigmemtest(minsize=_2G + 10, memuse=8) def test_remove(self, size): l = [10] * size self.assertEqual(len(l), size) l.remove(10) size -= 1 self.assertEqual(len(l), size) # Because of the earlier l.remove(), this append doesn't trigger # a resize. l.append(5) size += 1 self.assertEqual(len(l), size) self.assertEqual(l[-2:], [10, 5]) l.remove(5) size -= 1 self.assertEqual(len(l), size) self.assertEqual(l[-2:], [10, 10]) @bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5) def test_reverse(self, size): l = [1, 2, 3, 4, 5] * size l.reverse() self.assertEqual(len(l), size * 5) self.assertEqual(l[-5:], [5, 4, 3, 2, 1]) self.assertEqual(l[:5], [5, 4, 3, 2, 1]) @bigmemtest(minsize=_2G // 5 + 2, memuse=8 * 5) def test_sort(self, size): l = [1, 2, 3, 4, 5] * size l.sort() self.assertEqual(len(l), size * 5) self.assertEqual(l.count(1), size) self.assertEqual(l[:10], [1] * 10) self.assertEqual(l[-10:], [5] * 10) def test_main(): support.run_unittest(StrTest, BytesTest, BytearrayTest, TupleTest, ListTest) if __name__ == '__main__': if len(sys.argv) > 1: support.set_memlimit(sys.argv[1]) test_main()
gpl-3.0
2,800,543,251,407,446,000
34.590403
79
0.552608
false
3.34197
true
false
false
dwhagar/snowboard
snowboard/connection.py
1
6948
# This file is part of snowboard. # # snowboard is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # snowboard is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with snowboard. If not, see <http://www.gnu.org/licenses/>. ''' Connection object, designed to be the only object to directly interface with the server. See https://github.com/dwhagar/snowboard/wiki/Class-Docs for documentation. ''' import time import socket import ssl import sys from . import debug from . import server class Connection: def __init__(self, srv): self.host = srv.host self.port = srv.port self.__socket = None self.__ssl = None self.__connected = False self.ssl = srv.ssl self.sslVerify = True self.retries = 3 # Numbers of times to retry a connection self.delay = 1 # Delay between connection attempts def connected(self): '''Returns the state of the connection.''' return self.__connected def connect(self): '''Connect to the configured server.''' # Keep track of attempts. attempt = 0 # Try until the connection succeeds or no more tries are left. while (not self.__connected) and (attempt < self.retries): # Attempt to establish a connection. debug.message("Attempting connection to " + self.host + ":" + str(self.port) + ".") try: self.__socket = socket.setdefaulttimeout(30) self.__socket = socket.create_connection((self.host, self.port)) # Handle SSL if self.ssl: self.__context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) self.__context.options |= ssl.OP_NO_SSLv2 self.__context.options |= ssl.OP_NO_SSLv3 if self.sslVerify: self.__context.verify_mode = ssl.CERT_REQUIRED else: self.__context.verify_mode = ssl.CERT_NONE self.__ssl = self.__context.wrap_socket(self.__socket) self.__ssl.setblocking(False) # Handle not SSL else: self.__socket.setblocking(False) self.__connected = True # Assume connection errors are no big deal but do display an error. except ConnectionAbortedError: debug.error("Connection to " + self.host + " aborted by server.") except ConnectionRefusedError: debug.error("Connection to " + self.host + " refused by server.") except TimeoutError: debug.error("Connection to " + self.host + " timed out.") except socket.gaierror: debug.error("Failed to resolve " + self.host + ".") except OSError as err: debug.error("Failed to connect '" + err.errno + "' " + err.strerror + ".") attempt += 1 time.sleep(self.delay) return self.__connected def disconnect(self): '''Disconnect from the server.''' debug.message("Disconnected from " + self.host + ":" + str(self.port) + ".") if ssl: if not self.__ssl is None: self.__ssl.close() self.__ssl = None else: if not self.__socket is None: self.__socket.close() self.__socket = None self.__connected = False def read(self): '''Read a line of data from the server, if any.''' # Only do something if we're connected. if self.__connected: done = False received = "" while not done: try: if self.ssl: data = self.__ssl.recv(1) else: data = self.__socket.recv(1) except (ssl.SSLWantReadError, BlockingIOError): received = None break except OSError as err: debug.error("Error #" + str(err.errno) + ": '" + err.strerror + "' disconnecting.") data = False # Process the data. # socket.recv is supposed to return a False if the connection # been broken. if not data: self.disconnect() done = True received = None else: text = data.decode('utf-8','replace') if text == '\n': done = True else: received += text else: received = None # Remove the trailing carriage return character (cr/lf pair) if not received is None: received = received.strip('\r') if len(received) > 0: if received[0] == ':': received = received[1:] # Bug fix for Issue #18, do not return blank lines. if received == "": received = None return received def write(self, data): '''Sends data to the server.''' # Encode the data for the server. data += '\n' data = data.encode('utf-8') # Prepare to keep track of what is being sent. dataSent = 0 bufferSize = len(data) if self.__connected: # Loop to send the data. while dataSent < bufferSize: try: if self.ssl: sentNow = self.__ssl.send(data[dataSent:]) else: sentNow = self.__socket.send(data[dataSent:]) except OSError as err: debug.error("Error #" + str(err.errno) + ": '" + err.strerror + "' disconnecting.") self.disconnect() return False # If nothing gets sent, we are disconnected from the server. if sentNow == 0: debug.error("Data could not be sent for an unknown reason, disconnecting.") self.disconnect() return False # Keep track of the data. dataSent += sentNow else: sent = False # If sending completed, set the flag to true. if dataSent == bufferSize: sent = True return sent
gpl-3.0
5,802,513,024,660,214,000
34.454082
103
0.517559
false
4.814969
false
false
false
Southpaw-TACTIC/Team
src/python/Lib/site-packages/PySide/examples/demos/qtdemo/demotextitem.py
1
2157
from PySide import QtCore, QtGui from demoitem import DemoItem class DemoTextItem(DemoItem): STATIC_TEXT, DYNAMIC_TEXT = range(2) def __init__(self, text, font, textColor, textWidth, scene=None, parent=None, type=STATIC_TEXT, bgColor=QtGui.QColor()): super(DemoTextItem, self).__init__(scene, parent) self.type = type self.text = text self.font = font self.textColor = textColor self.bgColor = bgColor self.textWidth = textWidth self.noSubPixeling = True def setText(self, text): self.text = text self.update() def createImage(self, matrix): if self.type == DemoTextItem.DYNAMIC_TEXT: return None sx = min(matrix.m11(), matrix.m22()) sy = max(matrix.m22(), sx) textItem = QtGui.QGraphicsTextItem() textItem.setHtml(self.text) textItem.setTextWidth(self.textWidth) textItem.setFont(self.font) textItem.setDefaultTextColor(self.textColor) textItem.document().setDocumentMargin(2) w = textItem.boundingRect().width() h = textItem.boundingRect().height() image = QtGui.QImage(int(w * sx), int(h * sy), QtGui.QImage.Format_ARGB32_Premultiplied) image.fill(QtGui.QColor(0, 0, 0, 0).rgba()) painter = QtGui.QPainter(image) painter.scale(sx, sy) style = QtGui.QStyleOptionGraphicsItem() textItem.paint(painter, style, None) return image def animationStarted(self, id=0): self.noSubPixeling = False def animationStopped(self, id=0): self.noSubPixeling = True def boundingRect(self): if self.type == DemoTextItem.STATIC_TEXT: return super(DemoTextItem, self).boundingRect() # Sorry for using magic number. return QtCore.QRectF(0, 0, 50, 20) def paint(self, painter, option, widget): if self.type == DemoTextItem.STATIC_TEXT: super(DemoTextItem, self).paint(painter, option, widget) return painter.setPen(self.textColor) painter.drawText(0, 0, self.text)
epl-1.0
-7,139,952,236,623,638,000
29.814286
68
0.618915
false
3.712565
false
false
false
Jumpscale/core9
JumpScale9/data/serializers/SerializerYAML.py
1
3063
import yaml from collections import OrderedDict from js9 import j from .SerializerBase import SerializerBase testtoml=""" name = 'something' multiline = ''' these are multiple lines next line ''' nr = 87 nr2 = 34.4 """ # from .PrettyYAMLDumper import PrettyYaml class SerializerYAML(SerializerBase): def __init__(self): SerializerBase.__init__(self) def dumps(self, obj): return yaml.dump(obj, default_flow_style=False, default_style='',indent=4,line_break="\n") def loads(self, s): # out=cStringIO.StringIO(s) try: return yaml.load(s) except Exception as e: error = "error:%s\n" % e error += "\nyaml could not parse:\n%s\n" % s raise j.exceptions.Input(message=error, level=1, source="", tags="", msgpub="") def load(self, path): try: s = j.sal.fs.readFile(path) except Exception as e: error = "error:%s\n" % e error += '\npath:%s\n' % path raise j.exceptions.Input(message=error, level=1, source="", tags="", msgpub="") try: return yaml.load(s) except Exception as e: error = "error:%s\n" % e error += "\nyaml could not parse:\n%s\n" % s raise j.exceptions.Input(message=error, level=1, source="", tags="", msgpub="") def ordered_load(self, stream, Loader=yaml.Loader, object_pairs_hook=OrderedDict): """ load a yaml stream and keep the order """ class OrderedLoader(Loader): pass def construct_mapping(loader, node): loader.flatten_mapping(node) return object_pairs_hook(loader.construct_pairs(node)) OrderedLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping) return yaml.load(stream, OrderedLoader) def ordered_dump(self, data, stream=None, Dumper=yaml.Dumper, **kwds): """ dump a yaml stream with keeping the order """ class OrderedDumper(Dumper): pass def _dict_representer(dumper, data): return dumper.represent_mapping( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, data.items()) OrderedDumper.add_representer(OrderedDict, _dict_representer) return yaml.dump(data, stream, OrderedDumper, **kwds) def test(self): ddict=j.data.serializer.toml.loads(testtoml) #TODO:*3 write some test # from js9 import j # from yaml import load, dump # try: # from yaml import CLoader as Loader, CDumper as Dumper # except ImportError: # from yaml import Loader, Dumper # class YAMLTool: # def decode(self,string): # """ # decode yaml string to python object # """ # return load(string) # def encode(self,obj,width=120): # """ # encode python (simple) objects to yaml # """ # return dump(obj, width=width, default_flow_style=False) #
apache-2.0
5,639,020,786,048,849,000
28.171429
98
0.588965
false
3.843162
false
false
false
rsalmaso/django-cms
cms/toolbar/items.py
1
16937
import json from abc import ABCMeta from collections import defaultdict from django.template.loader import render_to_string from django.utils.encoding import force_str from django.utils.functional import Promise from cms.constants import RIGHT, LEFT, REFRESH_PAGE, URL_CHANGE class ItemSearchResult: def __init__(self, item, index): self.item = item self.index = index def __add__(self, other): return ItemSearchResult(self.item, self.index + other) def __sub__(self, other): return ItemSearchResult(self.item, self.index - other) def __int__(self): return self.index def may_be_lazy(thing): if isinstance(thing, Promise): return thing._proxy____args[0] else: return thing class ToolbarAPIMixin(metaclass=ABCMeta): REFRESH_PAGE = REFRESH_PAGE URL_CHANGE = URL_CHANGE LEFT = LEFT RIGHT = RIGHT def __init__(self): self.items = [] self.menus = {} self._memo = defaultdict(list) def _memoize(self, item): self._memo[item.__class__].append(item) def _unmemoize(self, item): self._memo[item.__class__].remove(item) def _item_position(self, item): return self.items.index(item) def _add_item(self, item, position): if position is not None: self.items.insert(position, item) else: self.items.append(item) def _remove_item(self, item): if item in self.items: self.items.remove(item) else: raise KeyError("Item %r not found" % item) def get_item_count(self): return len(self.items) def add_item(self, item, position=None): if not isinstance(item, BaseItem): raise ValueError("Items must be subclasses of cms.toolbar.items.BaseItem, %r isn't" % item) if isinstance(position, ItemSearchResult): position = position.index elif isinstance(position, BaseItem): position = self._item_position(position) elif not (position is None or isinstance(position, (int,))): raise ValueError("Position must be None, an integer, an item or an ItemSearchResult, got %r instead" % position) self._add_item(item, position) self._memoize(item) return item def find_items(self, item_type, **attributes): results = [] attr_items = attributes.items() notfound = object() for candidate in self._memo[item_type]: if all(may_be_lazy(getattr(candidate, key, notfound)) == value for key, value in attr_items): results.append(ItemSearchResult(candidate, self._item_position(candidate))) return results def find_first(self, item_type, **attributes): try: return self.find_items(item_type, **attributes)[0] except IndexError: return None # # This will only work if it is used to determine the insert position for # all items in the same menu. # def get_alphabetical_insert_position(self, new_menu_name, item_type, default=0): results = self.find_items(item_type) # No items yet? Use the default value provided if not len(results): return default last_position = 0 for result in sorted(results, key=lambda x: x.item.name): if result.item.name > new_menu_name: return result.index if result.index > last_position: last_position = result.index else: return last_position + 1 def remove_item(self, item): self._remove_item(item) self._unmemoize(item) def add_sideframe_item(self, name, url, active=False, disabled=False, extra_classes=None, on_close=None, side=LEFT, position=None): item = SideframeItem(name, url, active=active, disabled=disabled, extra_classes=extra_classes, on_close=on_close, side=side, ) self.add_item(item, position=position) return item def add_modal_item(self, name, url, active=False, disabled=False, extra_classes=None, on_close=REFRESH_PAGE, side=LEFT, position=None): item = ModalItem(name, url, active=active, disabled=disabled, extra_classes=extra_classes, on_close=on_close, side=side, ) self.add_item(item, position=position) return item def add_link_item(self, name, url, active=False, disabled=False, extra_classes=None, side=LEFT, position=None): item = LinkItem(name, url, active=active, disabled=disabled, extra_classes=extra_classes, side=side ) self.add_item(item, position=position) return item def add_ajax_item(self, name, action, active=False, disabled=False, extra_classes=None, data=None, question=None, side=LEFT, position=None, on_success=None, method='POST'): item = AjaxItem(name, action, self.csrf_token, active=active, disabled=disabled, extra_classes=extra_classes, data=data, question=question, side=side, on_success=on_success, method=method, ) self.add_item(item, position=position) return item class BaseItem(metaclass=ABCMeta): toolbar = None template = None def __init__(self, side=LEFT): self.side = side @property def right(self): return self.side is RIGHT def render(self): if self.toolbar: template = self.toolbar.templates.get_cached_template(self.template) return template.render(self.get_context()) # Backwards compatibility return render_to_string(self.template, self.get_context()) def get_context(self): return {} class TemplateItem(BaseItem): def __init__(self, template, extra_context=None, side=LEFT): super().__init__(side) self.template = template self.extra_context = extra_context def get_context(self): if self.extra_context: return self.extra_context return {} class SubMenu(ToolbarAPIMixin, BaseItem): template = "cms/toolbar/items/menu.html" sub_level = True active = False def __init__(self, name, csrf_token, disabled=False, side=LEFT): ToolbarAPIMixin.__init__(self) BaseItem.__init__(self, side) self.name = name self.disabled = disabled self.csrf_token = csrf_token def __repr__(self): return '<Menu:%s>' % force_str(self.name) def add_break(self, identifier=None, position=None): item = Break(identifier) self.add_item(item, position=position) return item def get_items(self): items = self.items for item in items: item.toolbar = self.toolbar if hasattr(item, 'disabled'): item.disabled = self.disabled or item.disabled return items def get_context(self): return { 'active': self.active, 'disabled': self.disabled, 'items': self.get_items(), 'title': self.name, 'sub_level': self.sub_level } class Menu(SubMenu): sub_level = False def get_or_create_menu(self, key, verbose_name, disabled=False, side=LEFT, position=None): if key in self.menus: return self.menus[key] menu = SubMenu(verbose_name, self.csrf_token, disabled=disabled, side=side) self.menus[key] = menu self.add_item(menu, position=position) return menu class LinkItem(BaseItem): template = "cms/toolbar/items/item_link.html" def __init__(self, name, url, active=False, disabled=False, extra_classes=None, side=LEFT): super().__init__(side) self.name = name self.url = url self.active = active self.disabled = disabled self.extra_classes = extra_classes or [] def __repr__(self): return '<LinkItem:%s>' % force_str(self.name) def get_context(self): return { 'url': self.url, 'name': self.name, 'active': self.active, 'disabled': self.disabled, 'extra_classes': self.extra_classes, } class FrameItem(BaseItem): # Be sure to define the correct template def __init__(self, name, url, active=False, disabled=False, extra_classes=None, on_close=None, side=LEFT): super().__init__(side) self.name = "%s..." % force_str(name) self.url = url self.active = active self.disabled = disabled self.extra_classes = extra_classes or [] self.on_close = on_close def __repr__(self): # Should be overridden return '<FrameItem:%s>' % force_str(self.name) def get_context(self): return { 'url': self.url, 'name': self.name, 'active': self.active, 'disabled': self.disabled, 'extra_classes': self.extra_classes, 'on_close': self.on_close, } class SideframeItem(FrameItem): template = "cms/toolbar/items/item_sideframe.html" def __repr__(self): return '<SideframeItem:%s>' % force_str(self.name) class ModalItem(FrameItem): template = "cms/toolbar/items/item_modal.html" def __repr__(self): return '<ModalItem:%s>' % force_str(self.name) class AjaxItem(BaseItem): template = "cms/toolbar/items/item_ajax.html" def __init__(self, name, action, csrf_token, data=None, active=False, disabled=False, extra_classes=None, question=None, side=LEFT, on_success=None, method='POST'): super().__init__(side) self.name = name self.action = action self.active = active self.disabled = disabled self.csrf_token = csrf_token self.data = data or {} self.extra_classes = extra_classes or [] self.question = question self.on_success = on_success self.method = method def __repr__(self): return '<AjaxItem:%s>' % force_str(self.name) def get_context(self): data = self.data.copy() if self.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'): data['csrfmiddlewaretoken'] = self.csrf_token return { 'action': self.action, 'name': self.name, 'active': self.active, 'disabled': self.disabled, 'extra_classes': self.extra_classes, 'data': json.dumps(data), 'question': self.question, 'on_success': self.on_success, 'method': self.method, } class Break(BaseItem): template = "cms/toolbar/items/break.html" def __init__(self, identifier=None): self.identifier = identifier class BaseButton(metaclass=ABCMeta): toolbar = None template = None def render(self): if self.toolbar: template = self.toolbar.templates.get_cached_template(self.template) return template.render(self.get_context()) # Backwards compatibility return render_to_string(self.template, self.get_context()) def get_context(self): return {} class Button(BaseButton): template = "cms/toolbar/items/button.html" def __init__(self, name, url, active=False, disabled=False, extra_classes=None): self.name = name self.url = url self.active = active self.disabled = disabled self.extra_classes = extra_classes or [] def __repr__(self): return '<Button:%s>' % force_str(self.name) def get_context(self): return { 'name': self.name, 'url': self.url, 'active': self.active, 'disabled': self.disabled, 'extra_classes': self.extra_classes, } class ModalButton(Button): template = "cms/toolbar/items/button_modal.html" def __init__(self, name, url, active=False, disabled=False, extra_classes=None, on_close=None): self.name = name self.url = url self.active = active self.disabled = disabled self.extra_classes = extra_classes or [] self.on_close = on_close def __repr__(self): return '<ModalButton:%s>' % force_str(self.name) def get_context(self): return { 'name': self.name, 'url': self.url, 'active': self.active, 'disabled': self.disabled, 'extra_classes': self.extra_classes, 'on_close': self.on_close, } class SideframeButton(ModalButton): template = "cms/toolbar/items/button_sideframe.html" def __repr__(self): return '<SideframeButton:%s>' % force_str(self.name) class ButtonList(BaseItem): template = "cms/toolbar/items/button_list.html" def __init__(self, identifier=None, extra_classes=None, side=LEFT): super().__init__(side) self.extra_classes = extra_classes or [] self.buttons = [] self.identifier = identifier def __repr__(self): return '<ButtonList:%s>' % self.identifier def add_item(self, item): if not isinstance(item, Button): raise ValueError("Expected instance of cms.toolbar.items.Button, got %r instead" % item) self.buttons.append(item) def add_button(self, name, url, active=False, disabled=False, extra_classes=None): item = Button(name, url, active=active, disabled=disabled, extra_classes=extra_classes ) self.buttons.append(item) return item def add_modal_button(self, name, url, active=False, disabled=False, extra_classes=None, on_close=REFRESH_PAGE): item = ModalButton(name, url, active=active, disabled=disabled, extra_classes=extra_classes, on_close=on_close, ) self.buttons.append(item) return item def add_sideframe_button(self, name, url, active=False, disabled=False, extra_classes=None, on_close=None): item = SideframeButton(name, url, active=active, disabled=disabled, extra_classes=extra_classes, on_close=on_close, ) self.buttons.append(item) return item def get_buttons(self): for button in self.buttons: button.toolbar = self.toolbar yield button def get_context(self): context = { 'buttons': list(self.get_buttons()), 'extra_classes': self.extra_classes } if self.toolbar: context['cms_structure_on'] = self.toolbar.structure_mode_url_on return context class Dropdown(ButtonList): template = "cms/toolbar/items/dropdown.html" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.primary_button = None def __repr__(self): return '<Dropdown:%s>' % force_str(self.name) def add_primary_button(self, button): self.primary_button = button def get_buttons(self): for button in self.buttons: button.toolbar = self.toolbar button.is_in_dropdown = True yield button def get_context(self): return { 'primary_button': self.primary_button, 'buttons': list(self.get_buttons()), 'extra_classes': self.extra_classes, } class DropdownToggleButton(BaseButton): template = "cms/toolbar/items/dropdown_button.html" has_no_action = True def __init__(self, name, active=False, disabled=False, extra_classes=None): self.name = name self.active = active self.disabled = disabled self.extra_classes = extra_classes or [] def __repr__(self): return '<DropdownToggleButton:%s>' % force_str(self.name) def get_context(self): return { 'name': self.name, 'active': self.active, 'disabled': self.disabled, 'extra_classes': self.extra_classes, }
bsd-3-clause
1,316,855,163,184,581,600
29.46223
124
0.56598
false
4.066507
false
false
false
krissrex/python_projects
Projects/Oving10-itgk/main.py
1
6889
# -*- coding: utf-8 -*- """ Created on Sun Nov 9 00:06:24 2014 @author: kristian """ from skumleskogen import * import time ################## OPTIONS ################## debug_on = True write_to_file = True hukommelse = {} sti_totalt = ["inn"] noder_med_lås = set() forrige_retning = [] file = None try: del print except: pass _print = print class Print_To_File(object): def __init__(self, *text): _print(text) string = "" for t in text: string += str(t) if file: file.write("\n" + string) if write_to_file: print = Print_To_File file = open("output.txt", mode="a") class MovementException(Exception): def __init__(self, error): self.error = error def __str__(self): return str(self.error) def start_solving(): print("Er inngang:", er_inngang()) nøkler = 0 while True: debug() husk_node() if er_stank(): if gaa_tilbake(): sti_totalt.append("STANK! tilbake til " + str(nummer())) kom_fra_retning = forrige_retning.pop(len(forrige_retning) - 1) continue if er_nokkel(): if plukk_opp(): nøkler += 1 sti_totalt.append("plukket nøkkel " + str(nøkler)) continue if (not hukommelse[nummer()]["venstre"]) \ or kan_låse_opp(nummer(), nøkler, "venstre"): try: hukommelse[nummer()]["lås"][0] = False hukommelse[nummer()]["superlås"][0] = False besøk_node("venstre") except MovementException as ex: print(ex) else: forrige_retning.append("venstre") sti_totalt.append("venstre " + str(nummer())) continue if (not hukommelse[nummer()]["høyre"]) \ or kan_låse_opp(nummer(), nøkler, "høyre"): try: hukommelse[nummer()]["lås"][1] = False hukommelse[nummer()]["superlås"][1] = False besøk_node("høyre") except MovementException as ex: print(ex) else: forrige_retning.append("høyre") sti_totalt.append("høyre " + str(nummer())) continue if er_laas(): noder_med_lås.add(nummer()) if er_superlaas(): if nøkler >= 2: utfall = laas_opp() if utfall: nøkler -= 2 sti_totalt.append("låste opp sl " + str(nøkler)) if nummer() in noder_med_lås: noder_med_lås.remove(nummer()) continue else: noder_med_lås.add(nummer()) else: if nøkler >= 1: utfall = laas_opp() if utfall: nøkler -= 1 sti_totalt.append("låste opp s " + str(nøkler)) if nummer() in noder_med_lås: noder_med_lås.remove(nummer()) continue if er_utgang(): gaa_ut() return # Vi er stuck. Noen noder må være låste. har_lås = er_laas() har_superlås = er_superlaas() if har_lås and har_superlås: # Låsen var ikke en vanlig lås, men superlås. har_lås = False if barn_har_lås(nummer()): har_lås = True if barn_har_superlås(nummer()): har_superlås = True if gaa_tilbake(): sti_totalt.append("tilbake til " + str(nummer())) kom_fra_retning = forrige_retning.pop(len(forrige_retning) - 1) print("kom fra:", kom_fra_retning) if har_lås: print("har lås") if kom_fra_retning == "venstre": hukommelse[nummer()]["lås"][0] = True else: hukommelse[nummer()]["lås"][1] = True if har_superlås: print("har superlås") if kom_fra_retning == "venstre": hukommelse[nummer()]["superlås"][0] = True else: hukommelse[nummer()]["superlås"][1] = True print(hukommelse[nummer()]) else: print("KLARTE IKKE Å GÅ TILBAKE!!!") return def kan_låse_opp(n, nøkler, retning): indeks = 0 if retning == "høyre": indeks = 1 if hukommelse[n]["lås"][indeks] and (nøkler >= 1): return True if hukommelse[n]["superlås"][indeks] and (nøkler >= 2): return True return False def barn_har_lås(n): return hukommelse[n]["lås"][0] or hukommelse[n]["lås"][1] def barn_har_superlås(n): return hukommelse[n]["superlås"][0] or hukommelse[n]["superlås"][1] def husk_node(): n = nummer() if n not in hukommelse: hukommelse[n] = {"venstre": False, "høyre": False, "lås": [False, False], "superlås": [False, False]} def besøk_node(retning): n = nummer() utfall = False if retning == "venstre": utfall = gaa_venstre() elif retning == "høyre": utfall = gaa_hoyre() else: print("Ugyldig retning oppgitt!", n, retning) return if utfall: hukommelse[n][retning] = True else: if er_laas(): raise MovementException("Er låst") else: raise MovementException("Er blindvei") def debug(): if debug_on: print("/"*25 + "DEBUG:" + "/"*25) print(("Nummer: {n}\n" + "Type:\n " + "i: {i}, l: {l}, sl: {sl}, st: {st}, nk: {nk}, v: {v}, u: {u}" + "\nLabel: {la}") .format(n=nummer(), i=er_inngang(), l=er_laas(), sl=er_superlaas(), st=er_stank(), u=er_utgang(), v=er_vanlig(), nk=er_nokkel(), la=label(nummer()))) def main(): # Initialisation. def get_hours(): return time.asctime().split(' ')[4] start_time = time.time() print("Starting. Time:", get_hours()) # Start solving the maze. try: start_solving() # In case of failure, e.g. a rabbit ate you. except Exception as e: print("Exception occured:") print(e) print("Exciting. Time:", get_hours()) # Done, do final actions. finally: print("\nRan for {0} seconds.".format( abs( round(start_time - time.time(), 4)))) print("Maze completed.") print(sti_totalt) if __name__ == "__main__": main() if file: file.close()
mit
-6,669,817,222,215,976,000
26.135458
79
0.474233
false
3.179739
false
false
false
sajuptpm/neutron-ipam
neutron/services/firewall/drivers/linux/iptables_fwaas.py
1
11774
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright 2013 Dell Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # @author: Rajesh Mohan, [email protected], DELL Inc. from neutron.agent.linux import iptables_manager from neutron.extensions import firewall as fw_ext from neutron.openstack.common import log as logging from neutron.services.firewall.drivers import fwaas_base LOG = logging.getLogger(__name__) FWAAS_DRIVER_NAME = 'Fwaas iptables driver' FWAAS_CHAIN = 'fwaas' FWAAS_DEFAULT_CHAIN = 'fwaas-default-policy' INGRESS_DIRECTION = 'ingress' EGRESS_DIRECTION = 'egress' CHAIN_NAME_PREFIX = {INGRESS_DIRECTION: 'i', EGRESS_DIRECTION: 'o'} """ Firewall rules are applied on internal-interfaces of Neutron router. The packets ingressing tenant's network will be on the output direction on internal-interfaces. """ IPTABLES_DIR = {INGRESS_DIRECTION: '-o', EGRESS_DIRECTION: '-i'} IPV4 = 'ipv4' IPV6 = 'ipv6' IP_VER_TAG = {IPV4: 'v4', IPV6: 'v6'} class IptablesFwaasDriver(fwaas_base.FwaasDriverBase): """IPTables driver for Firewall As A Service.""" def __init__(self): LOG.debug(_("Initializing fwaas iptables driver")) def create_firewall(self, apply_list, firewall): LOG.debug(_('Creating firewall %(fw_id)s for tenant %(tid)s)'), {'fw_id': firewall['id'], 'tid': firewall['tenant_id']}) try: if firewall['admin_state_up']: self._setup_firewall(apply_list, firewall) else: self.apply_default_policy(apply_list, firewall) except (LookupError, RuntimeError): # catch known library exceptions and raise Fwaas generic exception LOG.exception(_("Failed to create firewall: %s"), firewall['id']) raise fw_ext.FirewallInternalDriverError(driver=FWAAS_DRIVER_NAME) def delete_firewall(self, apply_list, firewall): LOG.debug(_('Deleting firewall %(fw_id)s for tenant %(tid)s)'), {'fw_id': firewall['id'], 'tid': firewall['tenant_id']}) fwid = firewall['id'] try: for router_info in apply_list: ipt_mgr = router_info.iptables_manager self._remove_chains(fwid, ipt_mgr) self._remove_default_chains(ipt_mgr) # apply the changes immediately (no defer in firewall path) ipt_mgr.defer_apply_off() except (LookupError, RuntimeError): # catch known library exceptions and raise Fwaas generic exception LOG.exception(_("Failed to delete firewall: %s"), fwid) raise fw_ext.FirewallInternalDriverError(driver=FWAAS_DRIVER_NAME) def update_firewall(self, apply_list, firewall): LOG.debug(_('Updating firewall %(fw_id)s for tenant %(tid)s)'), {'fw_id': firewall['id'], 'tid': firewall['tenant_id']}) try: if firewall['admin_state_up']: self._setup_firewall(apply_list, firewall) else: self.apply_default_policy(apply_list, firewall) except (LookupError, RuntimeError): # catch known library exceptions and raise Fwaas generic exception LOG.exception(_("Failed to update firewall: %s"), firewall['id']) raise fw_ext.FirewallInternalDriverError(driver=FWAAS_DRIVER_NAME) def apply_default_policy(self, apply_list, firewall): LOG.debug(_('Applying firewall %(fw_id)s for tenant %(tid)s)'), {'fw_id': firewall['id'], 'tid': firewall['tenant_id']}) fwid = firewall['id'] try: for router_info in apply_list: ipt_mgr = router_info.iptables_manager # the following only updates local memory; no hole in FW self._remove_chains(fwid, ipt_mgr) self._remove_default_chains(ipt_mgr) # create default 'DROP ALL' policy chain self._add_default_policy_chain_v4v6(ipt_mgr) self._enable_policy_chain(fwid, ipt_mgr) # apply the changes immediately (no defer in firewall path) ipt_mgr.defer_apply_off() except (LookupError, RuntimeError): # catch known library exceptions and raise Fwaas generic exception LOG.exception(_("Failed to apply default policy on firewall: %s"), fwid) raise fw_ext.FirewallInternalDriverError(driver=FWAAS_DRIVER_NAME) def _setup_firewall(self, apply_list, firewall): fwid = firewall['id'] for router_info in apply_list: ipt_mgr = router_info.iptables_manager # the following only updates local memory; no hole in FW self._remove_chains(fwid, ipt_mgr) self._remove_default_chains(ipt_mgr) # create default 'DROP ALL' policy chain self._add_default_policy_chain_v4v6(ipt_mgr) #create chain based on configured policy self._setup_chains(firewall, ipt_mgr) # apply the changes immediately (no defer in firewall path) ipt_mgr.defer_apply_off() def _get_chain_name(self, fwid, ver, direction): return '%s%s%s' % (CHAIN_NAME_PREFIX[direction], IP_VER_TAG[ver], fwid) def _setup_chains(self, firewall, ipt_mgr): """Create Fwaas chain using the rules in the policy """ fw_rules_list = firewall['firewall_rule_list'] fwid = firewall['id'] #default rules for invalid packets and established sessions invalid_rule = self._drop_invalid_packets_rule() est_rule = self._allow_established_rule() for ver in [IPV4, IPV6]: if ver == IPV4: table = ipt_mgr.ipv4['filter'] else: table = ipt_mgr.ipv6['filter'] ichain_name = self._get_chain_name(fwid, ver, INGRESS_DIRECTION) ochain_name = self._get_chain_name(fwid, ver, EGRESS_DIRECTION) for name in [ichain_name, ochain_name]: table.add_chain(name) table.add_rule(name, invalid_rule) table.add_rule(name, est_rule) for rule in fw_rules_list: if not rule['enabled']: continue iptbl_rule = self._convert_fwaas_to_iptables_rule(rule) if rule['ip_version'] == 4: ver = IPV4 table = ipt_mgr.ipv4['filter'] else: ver = IPV6 table = ipt_mgr.ipv6['filter'] ichain_name = self._get_chain_name(fwid, ver, INGRESS_DIRECTION) ochain_name = self._get_chain_name(fwid, ver, EGRESS_DIRECTION) table.add_rule(ichain_name, iptbl_rule) table.add_rule(ochain_name, iptbl_rule) self._enable_policy_chain(fwid, ipt_mgr) def _remove_default_chains(self, nsid): """Remove fwaas default policy chain.""" self._remove_chain_by_name(IPV4, FWAAS_DEFAULT_CHAIN, nsid) self._remove_chain_by_name(IPV6, FWAAS_DEFAULT_CHAIN, nsid) def _remove_chains(self, fwid, ipt_mgr): """Remove fwaas policy chain.""" for ver in [IPV4, IPV6]: for direction in [INGRESS_DIRECTION, EGRESS_DIRECTION]: chain_name = self._get_chain_name(fwid, ver, direction) self._remove_chain_by_name(ver, chain_name, ipt_mgr) def _add_default_policy_chain_v4v6(self, ipt_mgr): ipt_mgr.ipv4['filter'].add_chain(FWAAS_DEFAULT_CHAIN) ipt_mgr.ipv4['filter'].add_rule(FWAAS_DEFAULT_CHAIN, '-j DROP') ipt_mgr.ipv6['filter'].add_chain(FWAAS_DEFAULT_CHAIN) ipt_mgr.ipv6['filter'].add_rule(FWAAS_DEFAULT_CHAIN, '-j DROP') def _remove_chain_by_name(self, ver, chain_name, ipt_mgr): if ver == IPV4: ipt_mgr.ipv4['filter'].ensure_remove_chain(chain_name) else: ipt_mgr.ipv6['filter'].ensure_remove_chain(chain_name) def _add_rules_to_chain(self, ipt_mgr, ver, chain_name, rules): if ver == IPV4: table = ipt_mgr.ipv4['filter'] else: table = ipt_mgr.ipv6['filter'] for rule in rules: table.add_rule(chain_name, rule) def _enable_policy_chain(self, fwid, ipt_mgr): bname = iptables_manager.binary_name for (ver, tbl) in [(IPV4, ipt_mgr.ipv4['filter']), (IPV6, ipt_mgr.ipv6['filter'])]: for direction in [INGRESS_DIRECTION, EGRESS_DIRECTION]: chain_name = self._get_chain_name(fwid, ver, direction) chain_name = iptables_manager.get_chain_name(chain_name) if chain_name in tbl.chains: jump_rule = ['%s qr-+ -j %s-%s' % (IPTABLES_DIR[direction], bname, chain_name)] self._add_rules_to_chain(ipt_mgr, ver, 'FORWARD', jump_rule) #jump to DROP_ALL policy chain_name = iptables_manager.get_chain_name(FWAAS_DEFAULT_CHAIN) jump_rule = ['-o qr-+ -j %s-%s' % (bname, chain_name)] self._add_rules_to_chain(ipt_mgr, IPV4, 'FORWARD', jump_rule) self._add_rules_to_chain(ipt_mgr, IPV6, 'FORWARD', jump_rule) #jump to DROP_ALL policy chain_name = iptables_manager.get_chain_name(FWAAS_DEFAULT_CHAIN) jump_rule = ['-i qr-+ -j %s-%s' % (bname, chain_name)] self._add_rules_to_chain(ipt_mgr, IPV4, 'FORWARD', jump_rule) self._add_rules_to_chain(ipt_mgr, IPV6, 'FORWARD', jump_rule) def _convert_fwaas_to_iptables_rule(self, rule): action = rule.get('action') == 'allow' and 'ACCEPT' or 'DROP' args = [self._protocol_arg(rule.get('protocol')), self._port_arg('dport', rule.get('protocol'), rule.get('destination_port')), self._port_arg('sport', rule.get('protocol'), rule.get('source_port')), self._ip_prefix_arg('s', rule.get('source_ip_address')), self._ip_prefix_arg('d', rule.get('destination_ip_address')), self._action_arg(action)] iptables_rule = ' '.join(args) return iptables_rule def _drop_invalid_packets_rule(self): return '-m state --state INVALID -j DROP' def _allow_established_rule(self): return '-m state --state ESTABLISHED,RELATED -j ACCEPT' def _action_arg(self, action): if action: return '-j %s' % action return '' def _protocol_arg(self, protocol): if protocol: return '-p %s' % protocol return '' def _port_arg(self, direction, protocol, port): if not (protocol in ['udp', 'tcp'] and port): return '' return '--%s %s' % (direction, port) def _ip_prefix_arg(self, direction, ip_prefix): if ip_prefix: return '-%s %s' % (direction, ip_prefix) return ''
apache-2.0
4,463,777,469,175,995,000
41.65942
79
0.58213
false
3.746102
false
false
false
ant31/kpm
kpm/commands/kexec.py
1
1587
from kpm.console import KubernetesExec from kpm.commands.command_base import CommandBase class ExecCmd(CommandBase): name = 'exec' help_message = "exec a command in pod from the RC or RS name.\ It executes the command on the first matching pod'" def __init__(self, options): self.output = options.output self.kind = options.kind self.container = options.container self.namespace = options.namespace self.resource = options.name self.cmd = options.cmd self.result = None super(ExecCmd, self).__init__(options) @classmethod def _add_arguments(self, parser): parser.add_argument('cmd', nargs='+', help="command to execute") parser.add_argument("--namespace", nargs="?", help="kubernetes namespace", default='default') parser.add_argument('-k', '--kind', choices=['deployment', 'rs', 'rc'], nargs="?", help="deployment, rc or rs", default='rc') parser.add_argument('-n', '--name', help="resource name", default='rs') parser.add_argument('-c', '--container', nargs='?', help="container name", default=None) def _call(self): c = KubernetesExec(self.resource, cmd=" ".join(self.cmd), namespace=self.namespace, container=self.container, kind=self.kind) self.result = c.call() def _render_json(self): pass def _render_console(self): print self.result
apache-2.0
6,065,838,701,070,110,000
35.068182
96
0.570258
false
4.408333
false
false
false
jim-easterbrook/pywws
src/pywws/process.py
1
29244
# pywws - Python software for USB Wireless Weather Stations # http://github.com/jim-easterbrook/pywws # Copyright (C) 2008-21 pywws contributors # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """Generate hourly, daily & monthly summaries of raw weather station data :: %s This module takes raw weather station data (typically sampled every five or ten minutes) and generates hourly, daily and monthly summary data, which is useful when creating tables and graphs. Before computing the data summaries, raw data is "calibrated" using a user-programmable function. See :doc:`pywws.calib` for details. The hourly data is derived from all the records in one hour, e.g. from 18:00:00 to 18:59:59, and is given the index of the last complete record in that hour. The daily data summarises the weather over a 24 hour period typically ending at 2100 or 0900 hours, local (non DST) time, though midnight is another popular convention. It is also indexed by the last complete record in the period. Daytime and nighttime, as used when computing maximum and minimum temperatures, are assumed to start at 0900 and 2100 local time, or 1000 and 2200 when DST is in effect, regardless of the meteorological day. To adjust the meteorological day to your preference, or that used by your local official weather station, edit the "day end hour" line in your ``weather.ini`` file, then run :mod:`pywws.reprocess` to regenerate the summaries. Monthly summary data is computed from the daily summary data. If the meteorological day does not end at midnight, then each month may begin and end up to 12 hours before or after midnight. Wind speed data is averaged over the hour (or day) and the maximum gust speed during the hour (or day) is recorded. The predominant wind direction is calculated using vector arithmetic. Rainfall is converted from the raw "total since last reset" figure to a more useful total in the last hour, day or month. """ from __future__ import absolute_import, print_function __docformat__ = "restructuredtext en" __usage__ = """ usage: python -m pywws.process [options] data_dir options are: -h or --help display this help -v or --verbose increase number of informative messages data_dir is the root directory of the weather data """ __doc__ %= __usage__ __usage__ = __doc__.split('\n')[0] + __usage__ from ast import literal_eval from collections import deque from datetime import date, datetime, timedelta import getopt import logging import math import os import sys from pywws.calib import Calib from pywws.constants import HOUR, DAY, SECOND import pywws.logger import pywws.storage from pywws.timezone import time_zone logger = logging.getLogger(__name__) TIME_ERR = timedelta(seconds=45) MINUTEx5 = timedelta(minutes=5) HOURx3 = timedelta(hours=3) WEEK = timedelta(days=7) class Average(object): """Compute average of multiple data values.""" def __init__(self): self.acc = 0.0 self.count = 0 def add(self, value): if value is None: return self.acc += value self.count += 1 def result(self): if self.count == 0: return None return self.acc / float(self.count) class Minimum(object): """Compute minimum value and timestamp of multiple data values.""" def __init__(self): self.value = None self.time = None def add(self, value, time): if not self.time or value <= self.value: self.value = value self.time = time def result(self): if self.time: return self.value, self.time return None, None class Maximum(object): """Compute maximum value and timestamp of multiple data values.""" def __init__(self): self.value = None self.time = None def add(self, value, time): if not self.time or value > self.value: self.value = value self.time = time def result(self): if self.time: return self.value, self.time return None, None sin_LUT = list(map( lambda x: math.sin(math.radians(float(x * 360) / 16.0)), range(16))) cos_LUT = list(map( lambda x: math.cos(math.radians(float(x * 360) / 16.0)), range(16))) class WindFilter(object): """Compute average wind speed and direction. The wind speed and direction of each data item is converted to a vector before averaging, so the result reflects the dominant wind direction during the time period covered by the data. Setting the ``decay`` parameter converts the filter from a simple averager to one where the most recent sample carries the highest weight, and earlier samples have a lower weight according to how long ago they were. This process is an approximation of "exponential smoothing". See `Wikipedia <http://en.wikipedia.org/wiki/Exponential_smoothing>`_ for a detailed discussion. The parameter ``decay`` corresponds to the value ``(1 - alpha)`` in the Wikipedia description. Because the weather data being smoothed may not be at regular intervals this parameter is the decay over 5 minutes. Weather data at other intervals will have its weight scaled accordingly. The return value is a (speed, direction) tuple. :param decay: filter coefficient decay rate. :type decay: float :rtype: (float, float) """ def __init__(self, decay=1.0): self.decay = decay self.Ve = None self.Vn = 0.0 self.total = 0.0 self.weight = 1.0 self.total_weight = 0.0 self.last_idx = None def add(self, data): speed = data['wind_ave'] if speed is None: return if self.last_idx and self.decay != 1.0: interval = data['idx'] - self.last_idx assert interval.days == 0 decay = self.decay if interval != MINUTEx5: decay = decay ** (float(interval.seconds) / float(MINUTEx5.seconds)) self.weight = self.weight / decay self.last_idx = data['idx'] speed = speed * self.weight self.total += speed self.total_weight += self.weight direction = data['wind_dir'] if direction is None: return if self.Ve is None: self.Ve = 0.0 if isinstance(direction, int): self.Ve -= speed * sin_LUT[direction] self.Vn -= speed * cos_LUT[direction] else: direction = math.radians(float(direction) * 22.5) self.Ve -= speed * math.sin(direction) self.Vn -= speed * math.cos(direction) def result(self): if self.total_weight == 0.0: return (None, None) if self.Ve is None: return (self.total / self.total_weight, None) return (self.total / self.total_weight, (math.degrees(math.atan2(self.Ve, self.Vn)) + 180.0) / 22.5) class HourAcc(object): """'Accumulate' raw weather data to produce hourly summary. Compute average wind speed and maximum wind gust, find dominant wind direction and compute total rainfall. """ def __init__(self, last_rain): self.last_rain = last_rain self.copy_keys = ['idx', 'hum_in', 'temp_in', 'hum_out', 'temp_out', 'abs_pressure', 'rel_pressure'] self.reset() def reset(self): self.wind_fil = WindFilter() self.wind_gust = (-2.0, None) self.rain = 0.0 self.retval = {} def add_raw(self, data): idx = data['idx'] self.wind_fil.add(data) wind_gust = data['wind_gust'] if wind_gust is not None and wind_gust > self.wind_gust[0]: self.wind_gust = (wind_gust, idx) rain = data['rain'] if rain is not None: if self.last_rain is not None: diff = rain - self.last_rain if diff < -0.001: logger.warning( '%s rain reset %.1f -> %.1f', str(idx), self.last_rain, rain) elif diff > float(data['delay'] * 5): # rain exceeds 5mm / minute, assume corrupt data and ignore it logger.warning( '%s rain jump %.1f -> %.1f', str(idx), self.last_rain, rain) else: self.rain += max(0.0, diff) self.last_rain = rain # copy some current readings if 'illuminance' in data and not 'illuminance' in self.copy_keys: self.copy_keys.append('illuminance') self.copy_keys.append('uv') # if already have data to return, ignore 'lost contact' readings if data['temp_out'] is not None or not self.retval: for key in self.copy_keys: self.retval[key] = data[key] def result(self): if not self.retval: return None self.retval['wind_ave'], self.retval['wind_dir'] = self.wind_fil.result() if self.wind_gust[1]: self.retval['wind_gust'] = self.wind_gust[0] else: self.retval['wind_gust'] = None self.retval['rain'] = self.rain return self.retval class DayAcc(object): """'Accumulate' weather data to produce daily summary. Compute average wind speed, maximum wind gust and daytime max & nighttime min temperatures, find dominant wind direction and compute total rainfall. Daytime is assumed to be 0900-2100 and nighttime to be 2100-0900, local time (1000-2200 and 2200-1000 during DST), regardless of the "day end hour" setting. """ def __init__(self): self.has_illuminance = False self.ave = {} self.max = {} self.min = {} self.reset() def reset(self): self.wind_fil = WindFilter() self.wind_gust = (-1.0, None) self.rain = 0.0 for i in ('temp_in', 'temp_out', 'hum_in', 'hum_out', 'abs_pressure', 'rel_pressure'): self.ave[i] = Average() self.max[i] = Maximum() self.min[i] = Minimum() for i in ('illuminance', 'uv'): self.ave[i] = Average() self.max[i] = Maximum() self.retval = dict() def add_raw(self, data): idx = data['idx'] local_hour = time_zone.utc_to_nodst(idx).hour wind_gust = data['wind_gust'] if wind_gust is not None and wind_gust > self.wind_gust[0]: self.wind_gust = (wind_gust, idx) for i in ('temp_in', 'temp_out'): temp = data[i] if temp is not None: self.ave[i].add(temp) if local_hour >= 9 and local_hour < 21: # daytime max temperature self.max[i].add(temp, idx) else: # nighttime min temperature self.min[i].add(temp, idx) for i in ('hum_in', 'hum_out', 'abs_pressure', 'rel_pressure'): value = data[i] if value is not None: self.ave[i].add(value) self.max[i].add(value, idx) self.min[i].add(value, idx) if 'illuminance' in data: self.has_illuminance = True for i in ('illuminance', 'uv'): value = data[i] if value is not None: self.ave[i].add(value) self.max[i].add(value, idx) def add_hourly(self, data): self.wind_fil.add(data) rain = data['rain'] if rain is not None: self.rain += rain self.retval['idx'] = data['idx'] def result(self): if not self.retval: return None self.retval['wind_ave'], self.retval['wind_dir'] = self.wind_fil.result() if self.wind_gust[1]: self.retval['wind_gust'] = self.wind_gust[0] else: self.retval['wind_gust'] = None self.retval['wind_gust_t'] = self.wind_gust[1] self.retval['rain'] = self.rain for i in ('temp_in', 'temp_out', 'hum_in', 'hum_out', 'abs_pressure', 'rel_pressure'): self.retval['%s_ave' % i] = self.ave[i].result() (self.retval['%s_max' % i], self.retval['%s_max_t' % i]) = self.max[i].result() (self.retval['%s_min' % i], self.retval['%s_min_t' % i]) = self.min[i].result() if self.has_illuminance: for i in ('illuminance', 'uv'): self.retval['%s_ave' % i] = self.ave[i].result() (self.retval['%s_max' % i], self.retval['%s_max_t' % i]) = self.max[i].result() return self.retval class MonthAcc(object): """'Accumulate' daily weather data to produce monthly summary. Compute daytime max & nighttime min temperatures. """ def __init__(self, rain_day_threshold): self.rain_day_threshold = rain_day_threshold self.has_illuminance = False self.ave = {} self.min = {} self.max = {} self.min_lo = {} self.min_hi = {} self.min_ave = {} self.max_lo = {} self.max_hi = {} self.max_ave = {} self.reset() def reset(self): for i in ('temp_in', 'temp_out'): self.ave[i] = Average() self.min_lo[i] = Minimum() self.min_hi[i] = Maximum() self.min_ave[i] = Average() self.max_lo[i] = Minimum() self.max_hi[i] = Maximum() self.max_ave[i] = Average() for i in ('hum_in', 'hum_out', 'abs_pressure', 'rel_pressure'): self.ave[i] = Average() self.max[i] = Maximum() self.min[i] = Minimum() for i in ('illuminance', 'uv'): self.ave[i] = Average() self.max_lo[i] = Minimum() self.max_hi[i] = Maximum() self.max_ave[i] = Average() self.wind_fil = WindFilter() self.wind_gust = (-1.0, None) self.rain = 0.0 self.rain_days = 0 self.valid = False def add_daily(self, data): self.idx = data['idx'] for i in ('temp_in', 'temp_out'): temp = data['%s_ave' % i] if temp is not None: self.ave[i].add(temp) temp = data['%s_min' % i] if temp is not None: self.min_lo[i].add(temp, data['%s_min_t' % i]) self.min_hi[i].add(temp, data['%s_min_t' % i]) self.min_ave[i].add(temp) temp = data['%s_max' % i] if temp is not None: self.max_lo[i].add(temp, data['%s_max_t' % i]) self.max_hi[i].add(temp, data['%s_max_t' % i]) self.max_ave[i].add(temp) for i in ('hum_in', 'hum_out', 'abs_pressure', 'rel_pressure'): value = data['%s_ave' % i] if value is not None: self.ave[i].add(value) value = data['%s_min' % i] if value is not None: self.min[i].add(value, data['%s_min_t' % i]) value = data['%s_max' % i] if value is not None: self.max[i].add(value, data['%s_max_t' % i]) self.wind_fil.add(data) wind_gust = data['wind_gust'] if wind_gust is not None and wind_gust > self.wind_gust[0]: self.wind_gust = (wind_gust, data['wind_gust_t']) if 'illuminance_ave' in data: self.has_illuminance = True for i in ('illuminance', 'uv'): value = data['%s_ave' % i] if value is not None: self.ave[i].add(value) value = data['%s_max' % i] if value is not None: self.max_lo[i].add(value, data['%s_max_t' % i]) self.max_hi[i].add(value, data['%s_max_t' % i]) self.max_ave[i].add(value) self.rain += data['rain'] if data['rain'] >= self.rain_day_threshold: self.rain_days += 1 self.valid = True def result(self): if not self.valid: return None result = {} result['idx'] = self.idx result['rain'] = self.rain result['rain_days'] = self.rain_days for i in ('temp_in', 'temp_out'): result['%s_ave' % i] = self.ave[i].result() result['%s_min_ave' % i] = self.min_ave[i].result() (result['%s_min_lo' % i], result['%s_min_lo_t' % i]) = self.min_lo[i].result() (result['%s_min_hi' % i], result['%s_min_hi_t' % i]) = self.min_hi[i].result() result['%s_max_ave' % i] = self.max_ave[i].result() (result['%s_max_lo' % i], result['%s_max_lo_t' % i]) = self.max_lo[i].result() (result['%s_max_hi' % i], result['%s_max_hi_t' % i]) = self.max_hi[i].result() for i in ('hum_in', 'hum_out', 'abs_pressure', 'rel_pressure'): result['%s_ave' % i] = self.ave[i].result() (result['%s_max' % i], result['%s_max_t' % i]) = self.max[i].result() (result['%s_min' % i], result['%s_min_t' % i]) = self.min[i].result() result['wind_ave'], result['wind_dir'] = self.wind_fil.result() if self.wind_gust[1]: result['wind_gust'] = self.wind_gust[0] else: result['wind_gust'] = None result['wind_gust_t'] = self.wind_gust[1] if self.has_illuminance: for i in ('illuminance', 'uv'): result['%s_ave' % i] = self.ave[i].result() result['%s_max_ave' % i] = self.max_ave[i].result() (result['%s_max_lo' % i], result['%s_max_lo_t' % i]) = self.max_lo[i].result() (result['%s_max_hi' % i], result['%s_max_hi_t' % i]) = self.max_hi[i].result() return result def calibrate_data(params, raw_data, calib_data): """'Calibrate' raw data, using a user-supplied function.""" start = calib_data.before(datetime.max) if start is None: start = datetime.min start = raw_data.after(start + SECOND) if start is None: return start del calib_data[start:] calibrator = Calib(params, raw_data) def calibgen(inputdata): """Internal generator function""" count = 0 for data in inputdata: idx = data['idx'] count += 1 if count % 10000 == 0: logger.info("calib: %s", idx.isoformat(' ')) elif count % 500 == 0: logger.debug("calib: %s", idx.isoformat(' ')) for key in ('rain', 'abs_pressure', 'temp_in'): if data[key] is None: logger.error('Ignoring invalid data at %s', idx.isoformat(' ')) break else: yield calibrator.calib(data) calib_data.update(calibgen(raw_data[start:])) return start def generate_hourly(calib_data, hourly_data, process_from): """Generate hourly summaries from calibrated data.""" start = hourly_data.before(datetime.max) if start is None: start = datetime.min start = calib_data.after(start + SECOND) if process_from: if start: start = min(start, process_from) else: start = process_from if start is None: return start # set start of hour in local time (not all time offsets are integer hours) start = time_zone.hour_start(start) del hourly_data[start:] # preload pressure history, and find last valid rain prev = None pressure_history = deque() last_rain = None for data in calib_data[start - HOURx3:start]: if data['rel_pressure']: pressure_history.append((data['idx'], data['rel_pressure'])) if data['rain'] is not None: last_rain = data['rain'] prev = data # iterate over data in one hour chunks stop = calib_data.before(datetime.max) acc = HourAcc(last_rain) def hourlygen(inputdata, prev): """Internal generator function""" hour_start = start count = 0 while hour_start <= stop: count += 1 if count % 1008 == 0: logger.info("hourly: %s", hour_start.isoformat(' ')) elif count % 24 == 0: logger.debug("hourly: %s", hour_start.isoformat(' ')) hour_end = hour_start + HOUR acc.reset() for data in inputdata[hour_start:hour_end]: if data['rel_pressure']: pressure_history.append((data['idx'], data['rel_pressure'])) if prev: err = data['idx'] - prev['idx'] if abs(err - timedelta(minutes=data['delay'])) > TIME_ERR: logger.info('unexpected data interval %s %s', data['idx'].isoformat(' '), str(err)) acc.add_raw(data) prev = data new_data = acc.result() if new_data and (new_data['idx'] - hour_start) >= timedelta(minutes=9): # compute pressure trend new_data['pressure_trend'] = None if new_data['rel_pressure']: target = new_data['idx'] - HOURx3 while (len(pressure_history) >= 2 and abs(pressure_history[0][0] - target) > abs(pressure_history[1][0] - target)): pressure_history.popleft() if (pressure_history and abs(pressure_history[0][0] - target) < HOUR): new_data['pressure_trend'] = ( new_data['rel_pressure'] - pressure_history[0][1]) # store new hourly data yield new_data hour_start = hour_end hourly_data.update(hourlygen(calib_data, prev)) return start def generate_daily(day_end_hour, use_dst, calib_data, hourly_data, daily_data, process_from): """Generate daily summaries from calibrated and hourly data.""" start = daily_data.before(datetime.max) if start is None: start = datetime.min start = calib_data.after(start + SECOND) if process_from: if start: start = min(start, process_from) else: start = process_from if start is None: return start # round to start of this day, in local time start = time_zone.day_start(start, day_end_hour, use_dst=use_dst) del daily_data[start:] stop = calib_data.before(datetime.max) acc = DayAcc() def dailygen(inputdata): """Internal generator function""" day_start = start count = 0 while day_start <= stop: count += 1 if count % 30 == 0: logger.info("daily: %s", day_start.isoformat(' ')) else: logger.debug("daily: %s", day_start.isoformat(' ')) day_end = day_start + DAY if use_dst: # day might be 23 or 25 hours long day_end = time_zone.day_start( day_end + HOURx3, day_end_hour, use_dst=use_dst) acc.reset() for data in inputdata[day_start:day_end]: acc.add_raw(data) for data in hourly_data[day_start:day_end]: acc.add_hourly(data) new_data = acc.result() if new_data: new_data['start'] = day_start yield new_data day_start = day_end daily_data.update(dailygen(calib_data)) return start def generate_monthly(rain_day_threshold, day_end_hour, use_dst, daily_data, monthly_data, process_from): """Generate monthly summaries from daily data.""" start = monthly_data.before(datetime.max) if start is None: start = datetime.min start = daily_data.after(start + SECOND) if process_from: if start: start = min(start, process_from) else: start = process_from if start is None: return start # set start to noon on start of first day of month (local time) local_start = time_zone.utc_to_local(start).replace(tzinfo=None) local_start = local_start.replace(day=1, hour=12, minute=0, second=0) # go back to UTC and get start of day (which might be previous day) start = time_zone.local_to_utc(local_start) start = time_zone.day_start(start, day_end_hour, use_dst=use_dst) del monthly_data[start:] stop = daily_data.before(datetime.max) if stop is None: return None def monthlygen(inputdata, start, local_start): """Internal generator function""" acc = MonthAcc(rain_day_threshold) month_start = start count = 0 while month_start <= stop: count += 1 if count % 12 == 0: logger.info("monthly: %s", month_start.isoformat(' ')) else: logger.debug("monthly: %s", month_start.isoformat(' ')) if local_start.month < 12: local_start = local_start.replace(month=local_start.month+1) else: local_start = local_start.replace( month=1, year=local_start.year+1) month_end = time_zone.local_to_utc(local_start) month_end = time_zone.day_start( month_end, day_end_hour, use_dst=use_dst) acc.reset() for data in inputdata[month_start:month_end]: acc.add_daily(data) new_data = acc.result() if new_data: new_data['start'] = month_start yield new_data month_start = month_end monthly_data.update(monthlygen(daily_data, start, local_start)) return start def get_day_end_hour(params): # get daytime end hour (in local time) day_end_hour, use_dst = literal_eval( params.get('config', 'day end hour', '9, False')) day_end_hour = day_end_hour % 24 return day_end_hour, use_dst def process_data(context): """Generate summaries from raw weather station data. The meteorological day end (typically 2100 or 0900 local time) is set in the preferences file ``weather.ini``. The default value is 2100 (2200 during DST), following the historical convention for weather station readings. """ logger.info('Generating summary data') # get time of last record last_raw = context.raw_data.before(datetime.max) if last_raw is None: raise IOError('No data found. Check data directory parameter.') # get daytime end hour (in local time) day_end_hour, use_dst = get_day_end_hour(context.params) # get other config rain_day_threshold = float( context.params.get('config', 'rain day threshold', '0.2')) # calibrate raw data start = calibrate_data(context.params, context.raw_data, context.calib_data) # generate hourly data start = generate_hourly(context.calib_data, context.hourly_data, start) # generate daily data start = generate_daily(day_end_hour, use_dst, context.calib_data, context.hourly_data, context.daily_data, start) # generate monthly data generate_monthly(rain_day_threshold, day_end_hour, use_dst, context.daily_data, context.monthly_data, start) return 0 def main(argv=None): if argv is None: argv = sys.argv try: opts, args = getopt.getopt(argv[1:], "hv", ['help', 'verbose']) except getopt.error as msg: print('Error: %s\n' % msg, file=sys.stderr) print(__usage__.strip(), file=sys.stderr) return 1 # process options verbose = 0 for o, a in opts: if o in ('-h', '--help'): print(__usage__.strip()) return 0 elif o in ('-v', '--verbose'): verbose += 1 # check arguments if len(args) != 1: print('Error: 1 argument required\n', file=sys.stderr) print(__usage__.strip(), file=sys.stderr) return 2 pywws.logger.setup_handler(verbose) data_dir = args[0] with pywws.storage.pywws_context(data_dir) as context: return process_data(context) if __name__ == "__main__": sys.exit(main())
gpl-2.0
-1,985,312,193,686,834,200
35.784906
94
0.559328
false
3.663284
false
false
false
brickfiestastem/brickfiesta
shop/views.py
1
11431
import datetime import json import urllib.error import urllib.parse import urllib.request import uuid from django.conf import settings from django.contrib import messages from django.contrib.auth.models import User from django.core.mail import EmailMessage from django.core.mail import send_mail from django.shortcuts import render, redirect from django.template import loader from django.urls import reverse from django.utils.html import format_html from django.views import View from django.views.generic import DetailView, FormView from django.views.generic.detail import SingleObjectMixin from django.views.generic.list import ListView from event.models import Event from shop.utils import check_recaptcha from .cart import ShoppingCart from .forms import CartItemForm from .models import Product, Order, OrderItem from .utils import add_attendee_fan_badge_shirt # Create your views here. class EventListView(ListView): queryset = Event.objects.all().order_by('start_date').filter( start_date__gt=datetime.date.today()) template_name = 'shop/event_list.html' class EventProductView(View): def get(self, request, event_id): obj_products = Product.objects.filter( event__id__exact=event_id, is_public=True).order_by('product_type').extra( select={'is_top': "product_type = '" + Product.EXHIBITION + "'"}) date_two_weeks = datetime.date.today() + datetime.timedelta(days=14) if obj_products.first().event.start_date <= date_two_weeks: obj_products = obj_products.extra( order_by=['-is_top', 'product_type']) return render(request, 'shop/product_list.html', {'object_list': obj_products, 'first': obj_products.first()}) class CartTestView(View): def get(self, request): str_checkout_id = request.GET.get('checkoutId', None) str_reference_id = request.GET.get('referenceId', None) if str_reference_id: request.session['cart_id'] = str_reference_id if str_checkout_id: request.session['checkout_id'] = str_checkout_id obj_cart = ShoppingCart(request) return render(request, 'shop/cart_contents.html', {'error_message': obj_cart.get_debug(request), 'cart': obj_cart.get_basket(), 'cart_total': obj_cart.total()}) class CartCheckoutView(View): def get(self, request): list_message = list() obj_cart = ShoppingCart(request) str_checkout_id = request.GET.get('checkoutId', "INVALID") str_reference_id = request.GET.get('referenceId', "INVALID") str_transaction_id = request.GET.get('transactionId', "INVALID") if obj_cart.check_checkout_id(str_checkout_id): # valid save everything in the users obj_order = None obj_basket = obj_cart.get_basket() for obj_item in obj_basket: obj_user = None try: obj_user = User.objects.get(email=obj_item.email) list_message.append( "Found existing customer information " + obj_item.email + ".") except User.DoesNotExist: obj_user = User.objects.create_user(username=obj_item.email, email=obj_item.email, first_name=obj_item.first_name, last_name=obj_item.last_name, password=uuid.uuid4()) list_message.append( "Created a user for " + obj_item.email + ". Please check your email for password instructions.") send_mail(subject="Brick Fiesta - New Account Created", message=loader.render_to_string( "afol/new_account_email.html"), from_email=settings.DEFAULT_FROM_EMAIL, recipient_list=[obj_item.email]) if obj_order is None: if request.user.is_authenticated: obj_order = Order(user=request.user, transaction_id=str_transaction_id, reference_id=str_reference_id, guest="") else: obj_order = Order(user=obj_user, transaction_id=str_transaction_id, reference_id=str_reference_id, guest="") obj_order.save() list_message.append( "Order associated with " + obj_item.email + ".") obj_order_item = OrderItem(order=obj_order, user=obj_user, first_name=obj_item.first_name, last_name=obj_item.last_name, product=obj_item.product, price=obj_item.product.price) # if obj_item.product.quantity_available > 0: # obj_product = obj_item.product # obj_product.quantity_available -= 1 # obj_product.save() obj_order_item.save() list_message.append( "Order item " + obj_order_item.product.title + " associated with " + obj_item.email + ".") add_attendee_fan_badge_shirt(request, obj_order_item) obj_cart.clear() else: list_message.append( "It looks like there was an problem with your cart and processing it.") list_message.append( "We have gathered the data and have sent an email to look into the issue.") list_message.append( "If you do not hear back in a few days please contact us using the contact form.") str_body = "JSON: " + obj_cart.get_debug(request) + "\n\nReference: " + str_reference_id + \ "\n\nTransaction: " + str_transaction_id email = EmailMessage( 'Brick Fiesta - URGENT - Cart Error', str_body, to=[settings.DEFAULT_FROM_EMAIL]) email.send() obj_cart.clear() return render(request, 'shop/cart_complete.html', {'message': list_message, }) class CartView(View): def post(self, request, *args, **kwargs): str_error_message = False obj_cart = ShoppingCart(request) if 'cart_item' in request.POST: obj_cart.remove(request.POST['cart_item']) if 'cart' in request.POST: # generate json objects str_json = obj_cart.get_json() str_json = str_json.encode('utf-8') print(str_json) str_url = "https://connect.squareup.com/v2/locations/" + \ settings.SQUARE_LOCATION_KEY + "/checkouts" # send request for objects obj_request = urllib.request.Request(url=str_url) obj_request.add_header( 'Authorization', 'Bearer ' + settings.SQUARE_CART_KEY) obj_request.add_header( 'Content-Type', 'application/json; charset=utf-8') obj_request.add_header('Accept', 'application/json') # get response obj_response = "" try: obj_response = urllib.request.urlopen( obj_request, data=str_json) except urllib.error.URLError as obj_error: # print(obj_error.reason) str_error_message = "Unable to reach payment server. Please try again later." str_body = "URL: " + str_url + "\n\nJSON: " + \ str_json.decode('ascii') + "\n\nRESPONSE:" + obj_response email = EmailMessage( 'Brick Fiesta - Check Out URL Error', str_body, to=[settings.DEFAULT_FROM_EMAIL]) email.send() pass except urllib.error.HTTPError as obj_error: str_error_message = "Unable to process payment correctly. Error sent to event organizers." str_body = "URL: " + str_url + "\n\nJSON: " + \ str_json.decode('ascii') + "\n\nRESPONSE:" + obj_response email = EmailMessage( 'Brick Fiesta - Check Out HTTP Error', str_body, to=[settings.DEFAULT_FROM_EMAIL]) email.send() # print(obj_error.code) # print(obj_error.read()) pass else: result = json.loads(obj_response.read().decode()) # print(result) obj_cart.set_checkout_id(request, result['checkout']['id']) return redirect(result['checkout']['checkout_page_url']) return render(request, 'shop/cart_contents.html', {'error_message': str_error_message, 'cart': obj_cart.get_basket(), 'cart_total': obj_cart.total()}) def get(self, request, token=None): if token: request.session['cart'] = str(token) obj_cart = ShoppingCart(request) return render(request, 'shop/cart_contents.html', {'cart': obj_cart.get_basket(), 'cart_total': obj_cart.total()}) class ProductDetailView(DetailView): model = Product def get_context_data(self, **kwargs): context = super(ProductDetailView, self).get_context_data(**kwargs) context['form'] = CartItemForm() return context class ProductCartItemView(SingleObjectMixin, FormView): template_name = 'shop/product_detail.html' form_class = CartItemForm model = Product def post(self, request, *args, **kwargs): cart = ShoppingCart(request) self.object = self.get_object() form = CartItemForm(request.POST) if not check_recaptcha(request): form.add_error( None, 'You failed the human test. Try the reCAPTCHA again.') if form.is_valid(): cart.add(first_name=form.cleaned_data['first_name'], last_name=form.cleaned_data['last_name'], email=form.cleaned_data['email'], product=self.object) messages.info(request, format_html( 'Product added to <a href="{}">cart</a>.', reverse('shop:cart'))) return super(ProductCartItemView, self).post(request, *args, **kwargs) def get_success_url(self): return reverse('shop:event', kwargs={'event_id': self.object.event.id}) class ProductDetail(View): def get(self, request, *args, **kwargs): view = ProductDetailView.as_view() return view(request, *args, **kwargs) def post(self, request, *args, **kwargs): view = ProductCartItemView.as_view() return view(request, *args, **kwargs)
agpl-3.0
1,022,110,666,750,949,400
44.181818
120
0.53906
false
4.321739
false
false
false
kyubifire/softlayer-python
SoftLayer/CLI/user/permissions.py
1
1893
"""List A users permissions.""" import click import SoftLayer from SoftLayer.CLI import environment from SoftLayer.CLI import formatting from SoftLayer.CLI import helpers @click.command() @click.argument('identifier') @environment.pass_env def cli(env, identifier): """User Permissions. TODO change to list all permissions, and which users have them""" mgr = SoftLayer.UserManager(env.client) user_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'username') object_mask = "mask[id, permissions, isMasterUserFlag, roles]" user = mgr.get_user(user_id, object_mask) all_permissions = mgr.get_all_permissions() user_permissions = perms_to_dict(user['permissions']) if user['isMasterUserFlag']: click.secho('This account is the Master User and has all permissions enabled', fg='green') env.fout(roles_table(user)) env.fout(permission_table(user_permissions, all_permissions)) def perms_to_dict(perms): """Takes a list of permissions and transforms it into a dictionary for better searching""" permission_dict = {} for perm in perms: permission_dict[perm['keyName']] = True return permission_dict def permission_table(user_permissions, all_permissions): """Creates a table of available permissions""" table = formatting.Table(['Description', 'KeyName', 'Assigned']) table.align['KeyName'] = 'l' table.align['Description'] = 'l' table.align['Assigned'] = 'l' for perm in all_permissions: assigned = user_permissions.get(perm['keyName'], False) table.add_row([perm['name'], perm['keyName'], assigned]) return table def roles_table(user): """Creates a table for a users roles""" table = formatting.Table(['id', 'Role Name', 'Description']) for role in user['roles']: table.add_row([role['id'], role['name'], role['description']]) return table
mit
-6,047,882,941,413,080,000
32.210526
98
0.688325
false
3.879098
false
false
false
dracidoupe/graveyard
ddcz/migrations/0018_auto_20180617_1740.py
1
2511
# Generated by Django 2.0.2 on 2018-06-17 15:40 import ddcz.models.magic from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("ddcz", "0017_auto_20180617_1604"), ] operations = [ migrations.AddField( model_name="gallerypicture", name="hodnota_hlasovani", field=models.IntegerField(blank=True, null=True), ), migrations.AddField( model_name="gallerypicture", name="pocet_hlasujicich", field=models.IntegerField(blank=True, null=True), ), migrations.AddField( model_name="gallerypicture", name="precteno", field=models.IntegerField(default=0), ), migrations.AddField( model_name="gallerypicture", name="tisknuto", field=models.IntegerField(default=0), ), migrations.AlterField( model_name="gallerypicture", name="autmail", field=ddcz.models.magic.MisencodedCharField( blank=True, max_length=50, null=True ), ), migrations.AlterField( model_name="gallerypicture", name="autor", field=ddcz.models.magic.MisencodedCharField( blank=True, max_length=50, null=True ), ), migrations.AlterField( model_name="gallerypicture", name="datum", field=models.DateTimeField(auto_now_add=True), ), migrations.AlterField( model_name="gallerypicture", name="id", field=models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), migrations.AlterField( model_name="gallerypicture", name="schvaleno", field=ddcz.models.magic.MisencodedCharField( choices=[("a", "Schváleno"), ("n", "Neschváleno")], max_length=1 ), ), migrations.AlterField( model_name="gallerypicture", name="zdrojmail", field=ddcz.models.magic.MisencodedCharField( blank=True, max_length=30, null=True ), ), migrations.AlterField( model_name="gallerypicture", name="pochvez", field=ddcz.models.magic.MisencodedIntegerField(max_length=5), ), ]
mit
5,232,469,323,310,818,000
30.759494
87
0.540454
false
4.238176
false
false
false
magenta-aps/mox
oio_rest/oio_rest/validate.py
1
13865
# Copyright (C) 2015-2019 Magenta ApS, https://magenta.dk. # Contact: [email protected]. # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import copy import jsonschema from . import settings # A very nice reference explaining the JSON schema syntax can be found # here: https://spacetelescope.github.io/understanding-json-schema/ # JSON schema types BOOLEAN = {'type': 'boolean'} INTEGER = {'type': 'integer'} STRING = {'type': 'string'} def _generate_schema_array(items, maxItems=None): schema_array = { 'type': 'array', 'items': items } if maxItems: schema_array['maxItems'] = maxItems return schema_array def _generate_schema_object(properties, required, kwargs=None): schema_obj = { 'type': 'object', 'properties': properties, 'additionalProperties': False } # passing an empty array causes the schema to fail validation... if required: schema_obj['required'] = required if kwargs: schema_obj.update(kwargs) return schema_obj # Mapping from DATABASE_STRUCTURE types to JSON schema types TYPE_MAP = { 'aktoerattr': _generate_schema_object( { 'accepteret': STRING, 'obligatorisk': STRING, 'repraesentation_uuid': {'$ref': '#/definitions/uuid'}, }, ['accepteret', 'obligatorisk', 'repraesentation_uuid'] ), 'boolean': BOOLEAN, 'date': STRING, 'int': INTEGER, 'interval(0)': STRING, 'journaldokument': _generate_schema_object( { 'dokumenttitel': STRING, 'offentlighedundtaget': { '$ref': '#/definitions/offentlighedundtaget'} }, ['dokumenttitel', 'offentlighedundtaget'] ), 'journalnotat': _generate_schema_object( { 'titel': STRING, 'notat': STRING, 'format': STRING, }, ['titel', 'notat', 'format'] ), 'offentlighedundtagettype': { '$ref': '#/definitions/offentlighedundtaget'}, 'soegeord': _generate_schema_array(_generate_schema_array(STRING), 2), 'text[]': _generate_schema_array(STRING), 'timestamptz': STRING, 'vaerdirelationattr': _generate_schema_object( { 'forventet': BOOLEAN, 'nominelvaerdi': STRING }, ['forventet', 'nominelvaerdi'] ) } def _get_metadata(obj, metadata_type, key): """ Get the metadata for a given attribute :param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc. :param metadata_type: Must be either 'attributter' or 'relationer' :param key: The attribute to get the metadata from, e.g. 'egenskaber' :return: Dictionary containing the metadata for the attribute fields """ metadata = settings.REAL_DB_STRUCTURE[obj].get( '{}_metadata'.format(metadata_type), []) if not metadata or key not in metadata: return metadata return metadata[key] def _get_mandatory(obj, attribute_name): """ Get a list of mandatory attribute fields for a given attribute. :param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc. :param attribute_name: The attribute to get the fields from, e.g. 'egenskaber' :return: Sorted list of mandatory attribute keys """ attribute = _get_metadata(obj, 'attributter', attribute_name) mandatory = sorted( key for key in attribute if attribute[key].get('mandatory', False) ) return mandatory def _handle_attribute_metadata(obj, fields, attribute_name): """ Update the types of the attribute fields. :param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc. :param fields: A dictionary of attribute fields to update. :param attribute_name: The name of the attribute fields :return: Dictionary of updated attribute fields. """ attribute = _get_metadata(obj, 'attributter', attribute_name) fields.update( { key: TYPE_MAP[attribute[key]['type']] for key in attribute if attribute[key].get('type', False) } ) return fields def _generate_attributter(obj): """ Generate the 'attributter' part of the JSON schema. :param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc. :return: Dictionary representing the 'attributter' part of the JSON schema. """ db_attributter = settings.REAL_DB_STRUCTURE[obj]['attributter'] attrs = {} required = [] for attrname, attrval in db_attributter.items(): full_name = '{}{}'.format(obj, attrname) schema = { key: STRING for key in attrval } schema.update({'virkning': {'$ref': '#/definitions/virkning'}}) schema = _handle_attribute_metadata(obj, schema, attrname) mandatory = _get_mandatory(obj, attrname) attrs[full_name] = _generate_schema_array( _generate_schema_object( schema, mandatory + ['virkning'], ), ) if mandatory: required.append(full_name) return _generate_schema_object(attrs, required) def _generate_tilstande(obj): """ Generate the 'tilstande' part of the JSON schema. :param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc. :return: Dictionary representing the 'tilstande' part of the JSON schema. """ tilstande = dict(settings.REAL_DB_STRUCTURE[obj]['tilstande']) properties = {} required = [] for key in sorted(tilstande): tilstand_name = obj + key properties[tilstand_name] = _generate_schema_array( _generate_schema_object( { key: { 'type': 'string', 'enum': tilstande[key] }, 'virkning': {'$ref': '#/definitions/virkning'}, }, [key, 'virkning'] ) ) required.append(tilstand_name) return _generate_schema_object(properties, required) def _handle_relation_metadata_all(obj, relation): """ Update relations an their metadata (e.g. types) for all relations of the given LoRa object. :param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc. :param relation: The base relation to update. :return: Dictionary representing the updated relation. """ metadata_all = _get_metadata(obj, 'relationer', '*') for key in metadata_all: if 'type' in metadata_all[key]: relation['items']['oneOf'][0]['properties'][key] = TYPE_MAP[ metadata_all[key]['type']] relation['items']['oneOf'][1]['properties'][key] = TYPE_MAP[ metadata_all[key]['type']] return relation def _handle_relation_metadata_specific(obj, relation_schema): """ Update relations an their metadata (e.g. types) for specific relations of the given LoRa object. :param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc. :param relation_schema: Dictionary representing the 'relationer' part of the JSON schema. :return: Dictionary representing the updated 'relationer' part of the JSON schema. """ metadata_specific = ( settings.REAL_DB_STRUCTURE[obj].get('relationer_metadata', []) ) for relation in [key for key in metadata_specific if not key == '*']: for i in range(2): properties = relation_schema[relation]['items']['oneOf'][i][ 'properties'] metadata = metadata_specific[relation] for key in metadata: if 'type' in metadata[key]: properties[key] = TYPE_MAP[metadata[key]['type']] if 'enum' in metadata[key]: # Enum implies type = text properties[key] = { 'type': 'string', 'enum': metadata[key]['enum'] } if metadata[key].get('mandatory', False): relation_schema[relation]['items']['oneOf'][i][ 'required'].append(key) if obj == 'tilstand': # Handle special case for 'tilstand' where UUID not allowed item = relation_schema['tilstandsvaerdi']['items']['oneOf'][0] del item['properties']['uuid'] item['required'].remove('uuid') relation_schema['tilstandsvaerdi']['items'] = item return relation_schema def _generate_relationer(obj): """ Generate the 'relationer' part of the JSON schema. :param obj: The type of LoRa object, i.e. 'bruger', 'organisation' etc. :return: Dictionary representing the 'relationer' part of the JSON schema. """ relationer_nul_til_en = \ settings.REAL_DB_STRUCTURE[obj]['relationer_nul_til_en'] relationer_nul_til_mange = settings.REAL_DB_STRUCTURE[obj][ 'relationer_nul_til_mange'] relation_nul_til_mange = _generate_schema_array( { 'oneOf': [ _generate_schema_object( { 'uuid': {'$ref': '#/definitions/uuid'}, 'virkning': {'$ref': '#/definitions/virkning'}, 'objekttype': STRING }, ['uuid', 'virkning'] ), _generate_schema_object( { 'urn': {'$ref': '#/definitions/urn'}, 'virkning': {'$ref': '#/definitions/virkning'}, 'objekttype': STRING }, ['urn', 'virkning'] ) ] } ) relation_nul_til_mange = _handle_relation_metadata_all( obj, relation_nul_til_mange) relation_schema = { relation: copy.deepcopy(relation_nul_til_mange) for relation in relationer_nul_til_mange } relation_nul_til_en = copy.deepcopy(relation_nul_til_mange) relation_nul_til_en['items']['oneOf'][0]['properties'].pop('indeks', None) relation_nul_til_en['items']['oneOf'][1]['properties'].pop('indeks', None) relation_nul_til_en['maxItems'] = 1 for relation in relationer_nul_til_en: relation_schema[relation] = relation_nul_til_en relation_schema = _handle_relation_metadata_specific(obj, relation_schema) return { 'type': 'object', 'properties': relation_schema, 'additionalProperties': False } def _generate_varianter(): """ Function to generate the special 'varianter' section of the JSON schema used for the the 'Dokument' LoRa object type. """ return _generate_schema_array(_generate_schema_object( { 'egenskaber': _generate_schema_array(_generate_schema_object( { 'varianttekst': STRING, 'arkivering': BOOLEAN, 'delvisscannet': BOOLEAN, 'offentliggoerelse': BOOLEAN, 'produktion': BOOLEAN, 'virkning': {'$ref': '#/definitions/virkning'} }, ['varianttekst', 'virkning'] )) }, ['egenskaber'] )) def generate_json_schema(obj): """ Generate the JSON schema corresponding to LoRa object type. :param obj: The LoRa object type, i.e. 'bruger', 'organisation',... :return: Dictionary representing the JSON schema. """ if obj == 'dokument': # Due to an inconsistency between the way LoRa handles # "DokumentVariantEgenskaber" and the specs' we will have to do # this for now, i.e. we allow any JSON-object for "Dokument". return {'type': 'object'} schema = _generate_schema_object( { 'attributter': _generate_attributter(obj), 'tilstande': _generate_tilstande(obj), 'relationer': _generate_relationer(obj), 'note': STRING, }, ['attributter', 'tilstande'] ) schema['$schema'] = 'http://json-schema.org/schema#' schema['id'] = 'http://github.com/magenta-aps/mox' schema['definitions'] = { 'urn': { 'type': 'string', 'pattern': '^urn:.' }, 'uuid': { 'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-' '[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$' }, 'virkning': _generate_schema_object( { 'from': STRING, 'to': STRING, 'from_included': BOOLEAN, 'to_included': BOOLEAN, 'aktoerref': {'$ref': '#/definitions/uuid'}, 'aktoertypekode': STRING, 'notetekst': STRING, }, ['from', 'to'] ), 'offentlighedundtaget': _generate_schema_object( { 'alternativtitel': STRING, 'hjemmel': STRING }, ['alternativtitel', 'hjemmel'] ) } return schema SCHEMAS = {} def get_schema(obj_type): try: return SCHEMAS[obj_type] except KeyError: pass schema = SCHEMAS[obj_type] = copy.deepcopy(generate_json_schema(obj_type)) return schema def validate(input_json, obj_type): """ Validate request JSON according to JSON schema. :param input_json: The request JSON :raise jsonschema.exceptions.ValidationError: If the request JSON is not valid according to the JSON schema. """ jsonschema.validate(input_json, get_schema(obj_type))
mpl-2.0
8,253,058,164,997,057,000
30.227477
79
0.567039
false
3.872905
false
false
false
OpenSPA/dvbapp
lib/python/Screens/MessageBox.py
1
4589
from Screen import Screen from Components.ActionMap import ActionMap from Components.Label import Label from Components.Pixmap import Pixmap from Components.Sources.StaticText import StaticText from Components.MenuList import MenuList from enigma import eTimer class MessageBox(Screen): TYPE_YESNO = 0 TYPE_INFO = 1 TYPE_WARNING = 2 TYPE_ERROR = 3 TYPE_MESSAGE = 4 def __init__(self, session, text, type=TYPE_YESNO, timeout=-1, close_on_any_key=False, default=True, enable_input=True, msgBoxID=None, picon=None, simple=False, list=[], timeout_default=None): self.type = type Screen.__init__(self, session) self.skinName = ["MessageBox"] if self.type == self.TYPE_YESNO: self.setTitle(_("Question")) elif self.type == self.TYPE_INFO: self.setTitle(_("Information")) elif self.type == self.TYPE_WARNING: self.setTitle(_("Warning")) elif self.type == self.TYPE_ERROR: self.setTitle(_("Error")) else: self.setTitle(_("Message")) if simple: self.skinName="MessageBoxSimple" self.msgBoxID = msgBoxID self["text"] = Label(text) self["Text"] = StaticText(text) self["selectedChoice"] = StaticText() self.text = text self.close_on_any_key = close_on_any_key self.timeout_default = timeout_default self["ErrorPixmap"] = Pixmap() self["QuestionPixmap"] = Pixmap() self["InfoPixmap"] = Pixmap() self["WarningPixmap"] = Pixmap() self.timerRunning = False self.initTimeout(timeout) picon = picon or type if picon != self.TYPE_ERROR: self["ErrorPixmap"].hide() if picon != self.TYPE_YESNO: self["QuestionPixmap"].hide() if picon != self.TYPE_INFO: self["InfoPixmap"].hide() if picon != self.TYPE_WARNING: self["WarningPixmap"].hide() self.title = self.type < self.TYPE_MESSAGE and [_("Question"), _("Information"), _("Warning"), _("Error")][self.type] or _("Message") if type == self.TYPE_YESNO: if list: self.list = list elif default == True: self.list = [ (_("yes"), True), (_("no"), False) ] else: self.list = [ (_("no"), False), (_("yes"), True) ] else: self.list = [] self["list"] = MenuList(self.list) if self.list: self["selectedChoice"].setText(self.list[0][0]) else: self["list"].hide() if enable_input: self["actions"] = ActionMap(["MsgBoxActions", "DirectionActions"], { "cancel": self.cancel, "ok": self.ok, "alwaysOK": self.alwaysOK, "up": self.up, "down": self.down, "left": self.left, "right": self.right, "upRepeated": self.up, "downRepeated": self.down, "leftRepeated": self.left, "rightRepeated": self.right }, -1) self.onLayoutFinish.append(self.layoutFinished) def layoutFinished(self): self.setTitle(self.title) def initTimeout(self, timeout): self.timeout = timeout if timeout > 0: self.timer = eTimer() self.timer.callback.append(self.timerTick) self.onExecBegin.append(self.startTimer) self.origTitle = None if self.execing: self.timerTick() else: self.onShown.append(self.__onShown) self.timerRunning = True else: self.timerRunning = False def __onShown(self): self.onShown.remove(self.__onShown) self.timerTick() def startTimer(self): self.timer.start(1000) def stopTimer(self): if self.timerRunning: del self.timer self.onExecBegin.remove(self.startTimer) self.setTitle(self.origTitle) self.timerRunning = False def timerTick(self): if self.execing: self.timeout -= 1 if self.origTitle is None: self.origTitle = self.instance.getTitle() self.setTitle(self.origTitle + " (" + str(self.timeout) + ")") if self.timeout == 0: self.timer.stop() self.timerRunning = False self.timeoutCallback() def timeoutCallback(self): print "Timeout!" if self.timeout_default is not None: self.close(self.timeout_default) else: self.ok() def cancel(self): self.close(False) def ok(self): if self.list: self.close(self["list"].getCurrent()[1]) else: self.close(True) def alwaysOK(self): self.close(True) def up(self): self.move(self["list"].instance.moveUp) def down(self): self.move(self["list"].instance.moveDown) def left(self): self.move(self["list"].instance.pageUp) def right(self): self.move(self["list"].instance.pageDown) def move(self, direction): if self.close_on_any_key: self.close(True) self["list"].instance.moveSelection(direction) if self.list: self["selectedChoice"].setText(self["list"].getCurrent()[0]) self.stopTimer() def __repr__(self): return str(type(self)) + "(" + self.text + ")"
gpl-2.0
843,997,252,445,277,600
24.780899
193
0.665504
false
3.015112
false
false
false
HaoboGu/Structure-Similarity
Drugbank.py
1
6376
import random import numpy as np import time from sklearn.linear_model import LogisticRegression from sklearn.metrics import roc_auc_score def read_drugbank_data(): # read interaction data interaction_file = open('data/interacts.csv') interact_dict = {} line = interaction_file.readline() while line: db_id1, db_id2, interact_level = line[0:-1].split('\t') interact_dict[db_id1, db_id2] = int(interact_level) # use multiple keys line = interaction_file.readline() interaction_file.close() # read similarity data similarity_file = open('data/chemicalsimilarity.csv') similarity_dict = {} line = similarity_file.readline() while line: db_id1, db_id2, similarity = line[0:-1].split('\t') similarity_dict[db_id1, db_id2] = float(similarity) line = similarity_file.readline() similarity_file.close() return interact_dict, similarity_dict class Validation: def __init__(self, interact_dict, similarity_dict): self.interaction = interact_dict self.similarity = similarity_dict self.train_set = {} self.validation_set = {} self.sim_link = {} self.positive_train = {} self.max_sim_with_positive_link = {} self.max_sim_with_positive_link_for_val = {} def divide_data(self): self.train_set = {} self.validation_set = {} index = random.sample(range(0, 9892), 989) # randomly select 1/10 interactions as test_set flag = 0 for i in self.interaction: if flag in index: self.validation_set[i] = self.interaction[i] else: self.train_set[i] = self.interaction[i] flag += 1 # create known ddi dict: for key in self.train_set: if self.train_set[key] == 1: self.positive_train[key] = 1 def compute_link_sim(self, key1, key2): link_sim1 = (self.similarity[key1[0], key2[0]] + self.similarity[key1[1], key2[1]]) / 2.0 link_sim2 = (self.similarity[key1[0], key2[1]] + self.similarity[key1[1], key2[0]]) / 2.0 return max(link_sim1, link_sim2) def create_simlink(self): self.sim_link = {} # num = 1 for inter_key in self.train_set: max_link_sim = 0 for inter_key2 in self.positive_train: if inter_key[0] in inter_key2 and inter_key[1] in inter_key2: continue else: link_sim = self.compute_link_sim(inter_key, inter_key2) if link_sim > max_link_sim: max_link_sim = link_sim self.sim_link[inter_key] = inter_key2 self.max_sim_with_positive_link[inter_key] = max_link_sim # print('iter', num) # num += 1 def create_simlink_for_val(self): self.sim_link = {} # num = 1 for inter_key in self.validation_set: max_link_sim = 0 for inter_key2 in self.positive_train: if inter_key[0] in inter_key2 and inter_key[1] in inter_key2: continue else: link_sim = self.compute_link_sim(inter_key, inter_key2) if link_sim > max_link_sim: max_link_sim = link_sim # self.sim_link[inter_key] = inter_key2 self.max_sim_with_positive_link_for_val[inter_key] = max_link_sim sim_list = [] inter_list = [] for inter_key in self.validation_set: feature = self.max_sim_with_positive_link_for_val[inter_key] sim_list.append(feature) inter_list.append(self.validation_set[inter_key]) return sim_list, inter_list def create_train_array(self): sim_list = [] inter_list = [] num = 0 for inter_key in self.train_set: if self.train_set[inter_key] == 1: feature = self.max_sim_with_positive_link[inter_key] sim_list.append(feature) inter_list.append(self.train_set[inter_key]) num += 1 print('num of positive samples in train set: ', num) num = num * 3 for inter_key in self.train_set: if self.train_set[inter_key] == 0: feature = self.max_sim_with_positive_link[inter_key] sim_list.append(feature) inter_list.append(self.train_set[inter_key]) num = num - 1 if num == 0: break return sim_list, inter_list def lr(self, sim_list, inter_list): lr = LogisticRegression(solver='sag') sim_list = np.array(sim_list) sim_list = sim_list.reshape(sim_list.shape[0], 1) inter_list = np.array(inter_list) inter_list = inter_list.reshape(inter_list.shape[0], 1) lr.fit(sim_list, inter_list) val_sim, val_inter = self.create_simlink_for_val() val_sim = np.array(val_sim) val_sim = val_sim.reshape(val_sim.shape[0], 1) val_inter = np.array(val_inter).reshape(val_inter.__len__(), 1) result = lr.predict(val_sim) prob_re = lr.predict_proba(val_sim) prob_re = prob_re.transpose() auroc = roc_auc_score(val_inter, prob_re[1]) print('roc score:', auroc) return result, prob_re, val_inter start = time.time() interact_dict, sim_dict = read_drugbank_data() v = Validation(interact_dict, sim_dict) v.divide_data() v.create_simlink() sim_list, inter_list = v.create_train_array() result, prob_re, val_inter = v.lr(sim_list, inter_list) TP = 0 # predict 1, actual 1 FP = 0 # predict 1, actual 0 TN = 0 # predict 0, actual 0 FN = 0 # predict 0, actual 1 for i in range(0, 989): if result[i] == 0 and result[i] == 0: TN += 1 elif result[i] == 0 and val_inter[i] == 1: FN += 1 elif result[i] == 1 and val_inter[i] == 0: FP += 1 elif result[i] == 1 and val_inter[i] == 1: TP += 1 print('tp:', TP, ' tn:', TN, ' fp:', FP, ' fn:', FN) precision = TP / (TP + FP) recall = TP / (TP + FN) print('precision:', precision) print('recall:', recall) print('f-score: ', 2 * precision * recall / (precision + recall)) end = time.time() print(end-start)
mit
7,838,667,354,194,536,000
35.649425
99
0.559285
false
3.402348
false
false
false
dunkenj/smpy
scripts/data/fitting.py
1
27412
import numpy as np import array import os, sys import re import time import multiprocessing import h5py import logging from astropy.table import Table, Column from astropy import units as u import argparse parser = argparse.ArgumentParser() parser.add_argument("-p","--params", type=str, help = "Parameter file") parser.add_argument("-q", "--quiet", help = "Suppress extra outputs", action = "store_true") args = parser.parse_args() quiet = args.quiet params_root = re.split(".py", args.params)[0] if os.path.isfile(params_root+".pyc"): os.remove(params_root+".pyc") import importlib try: params = importlib.import_module(params_root) print('Successfully loaded "{0}" as params'.format(args.params)) importlib.reload(params) except: print('Failed to load "{0}" as params'.format(args.params)) raise if quiet: quietprint = lambda *a: None else: def quietprint(*args): for arg in args: print(arg, end=' ') print() # Fitting function definition for later use by Processess def galaxyFit(inputQueue, printQueue, printlock): for gal in iter(inputQueue.get, 'STOP'): j = np.argmin(np.abs(z-zobs[gal])) # Find closest model redshift flux_obs = obs[gal,:] flux_err = obs_err[gal,:] #flux_obs[fo <= 0.] = 0. # Set negative fluxes to zero I = np.where(flux_err > 0.)[0] # Find bands with no observation if len(I) == 0: if include_rest: M_scaled = np.ones(len(fo)) * -99. restframe_output = ' '.join(M_scaled.astype('str')) output_string = '{0} {1} {2} {3} {4} {5} {6} {7}' \ ' {8} {9} {10} {11} {12} {13} {14} {15} {16}'.format(gal+1,ID[gal],zobs[gal],-99,-99,-99,-99,-99,-99, -99, -99,-99,len(I),-99,z[j],restframe_output,'\n') else: output_string = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} {13} {14}'.format(gal+1,ID[gal],zobs[gal],-99,-99,-99,-99,-99,-99,-99, -99,-99,len(I),-99,'\n') printQueue.put(output_string) continue flux_obs = flux_obs[I] # and exclude from fit flux_err = flux_err[I] flux_models = f[j,I,:] tot_err = np.sqrt(flux_err**2 + (0.1*flux_obs)**2) top = 0. bottom = 0. for i in range(len(flux_obs)): top += (flux_models[i,:]*flux_obs[i])/(tot_err[i]**2) bottom += (flux_models[i,:]**2)/(tot_err[i]**2) scale = top/bottom scale = np.reshape(scale, (n_metal, n_tg, n_tau, n_tauv, n_fesc)) chisq = 0. for i in range(len(flux_obs)): chisq += ((np.abs(scale*flux_models[i,:]-flux_obs[i])**2)/(flux_err[i])**2) chimin, minind = np.nanmin(chisq), np.nanargmin(chisq) if np.isinf(chimin) or np.isnan(minind): if include_rest: M_scaled = np.ones(len(flux_obs)) * -99. restframe_output = ' '.join(M_scaled.astype('str')) output_string = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} {13} {14} {15} {16}'.format(gal+1,ID[gal],zobs[gal],-99,-99,-99,-99,-99,-99, -99, -99,-99,len(I),-99,z[j],restframe_output,'\n') else: output_string = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} {13} {14}'.format(gal+1,ID[gal],zobs[gal],-99,-99,-99,-99,-99,-99,-99, -99,-99,len(I),-99,'\n') printQueue.put(output_string) continue #Find the coordinate of the model with the bestfit mass mi, tgi, ti, tvi, fi = np.unravel_index(minind, (n_metal, n_tg, n_tau, n_tauv, n_fesc)) Bestfit_Mass = np.log10(scale[mi, tgi, ti, tvi, fi]*flux_corr) Bestfit_SFR = (scale[mi, tgi, ti, tvi, fi] * SFR[mi, tgi, ti, tvi, fi]*flux_corr) #Bestfit_Beta = beta[tgi,tvi,ti,mi] Bestfit_Beta = -99. #Scale the observed tot_mag band of the template to be the same as the observed tot_mag band of the galaxy #Convert the templates so they are no longer units of per stellar mass F_rest = f[0,:]*scale[mi, tgi, ti, tvi, fi]*flux_corr restframeMags = 23.9 - 2.5*np.log10(F_rest) #UV_rest = UV_flux[0]*scale[tgi,tvi,ti,mi]*flux_corr #restframeMUV = 23.9 - 2.5*np.log10(UV_rest) M_scaled = restframeMags[:, mi, tgi, ti, tvi, fi] #MUV_scaled = restframeMUV[tgi,tvi,ti,mi] MUV_scaled = -99. if np.isnan(Bestfit_Mass) or np.isinf(chimin): Bestfit_Mass = -99 #M_scaled[:] = -99 tgs = -99 tvs = -99 taus = -99 mis = -99 escape_fraction = -99 else: tgs = tg[tgi]/1e9 tvs = tv[tvi] taus = tau[ti] mis = metallicities[mi] escape_fraction = fesc[fi] printlock.acquire() print('{0:6d} {1:8d} {2:>5.2f} {3:>7.2f} {4:>8.1f} {5:>8.3f} {6:>5.1f} {7:>8.2f} {8:>4.2f} {9:>5.2f}'.format(gal+1,ID[gal], zobs[gal],Bestfit_Mass,chimin,tgs,tvs,taus,mis,np.log10(Bestfit_SFR))) if include_rest: restframe_output = ' '.join(M_scaled.astype('str')) output_string = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} {13} {14} {15} {16}'.format(gal+1,ID[gal],zobs[gal],Bestfit_Mass,chimin,tgs,tvs,taus,mis, MUV_scaled, minind,Bestfit_SFR,len(I),Bestfit_Beta,z[j],restframe_output,'\n') else: output_string = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} {13} {14}'.format(gal+1,ID[gal],zobs[gal],Bestfit_Mass,chimin,tgs,tvs,taus,mis, MUV_scaled, minind,Bestfit_SFR,len(I),Bestfit_Beta,'\n') printlock.release() printQueue.put(output_string) def galaxyFit2(inputQueue, printQueue, printlock): for gal in iter(inputQueue.get, 'STOP'): output_string = '{0[0]} {0[1]} {0[2]} {0[3]} {0[4]} {0[5]} ' + \ '{0[6]} {0[7]} {0[8]} {0[9]} {0[10]} {0[11]} ' + \ '{0[12]} {0[13]} {0[14]}' j = np.argmin(np.abs(z-zobs[gal])) # Find closest model redshift log_mass_min, log_mass_max = 7, 13 log_sfr_min, log_sfr_max = -3, 4 flux_obs = obs[gal,:] flux_err = obs_err[gal,:] #flux_obs[fo <= 0.] = 0. # Set negative fluxes to zero I = np.where(flux_err > 0.)[0] # Find bands with no observation if len(I) == 0: output_array = [gal+1, ID[gal], zobs[gal], z[j], -99, -99, -99, -99, -99, -99, -99, -99,-99,len(I),-99,'\n'] output = output_string.format(output_array) if include_rest: M_scaled = np.ones(len(flux_obs)) * -99. restframe_output = ' '.join(M_scaled.astype('str')) output = output + restframe_output + ' \n' else: output = output + ' \n' printQueue.put(output_string) continue flux_obs = flux_obs[I] # and exclude from fit flux_err = flux_err[I] flux_models = f[j,I,:] tot_err = np.sqrt(flux_err**2 + (params.flux_err*flux_obs)**2) top = 0. bottom = 0. for i in range(len(flux_obs)): top += (flux_models[i,:]*flux_obs[i])/(tot_err[i]**2) bottom += (flux_models[i,:]**2)/(tot_err[i]**2) scale = top/bottom scale = np.reshape(scale, (n_metal, n_tg, n_tau, n_tauv, n_fesc)) chisq = 0. for i in range(len(flux_obs)): chisq += ((np.abs(scale*flux_models[i,:]-flux_obs[i])**2)/(tot_err[i])**2) chimin, minind = np.nanmin(chisq), np.nanargmin(chisq) likelihood = np.reshape(np.exp(-0.5*chisq), (n_metal, n_tg, n_tau, n_tauv, n_fesc)) likelihood[np.isnan(likelihood)] = 0. likelihood = np.abs(likelihood/likelihood.sum()) if np.isinf(chimin) or np.isnan(minind): output_array = [gal+1, ID[gal], zobs[gal], z[j], -99, -99, -99, -99, -99, -99, -99, -99,-99,len(I),-99,'\n'] output = output_string.format(output_array) else: #Find the coordinate of the model with the bestfit mass mi, tgi, ti, tvi, fi = np.unravel_index(minind, (n_metal, n_tg, n_tau, n_tauv, n_fesc)) Masses = np.abs(np.log10(scale*flux_corr)) SFRs = np.abs(np.log10(scale * SFR * flux_corr)) mass_hist = np.histogram(Masses.flatten(), range = (log_mass_min, log_mass_max), bins = 120, weights = likelihood.flatten(), density = True) sfr_hist = np.histogram(SFRs.flatten(), range = (log_sfr_min, log_sfr_max), bins = 140, weights = likelihood.flatten(), density = True) Bestfit_Mass = np.abs(np.log10(scale[mi, tgi, ti, tvi, fi]*flux_corr)) Bestfit_SFR = np.abs(np.log10(scale[mi, tgi, ti, tvi, fi] * SFR[mi, tgi, ti, tvi, fi]*flux_corr)) if np.isnan(Bestfit_Mass) or np.isinf(chimin): Bestfit_Mass = -99 #M_scaled[:] = -99 tgs = -99 tvs = -99 taus = -99 mis = -99 escape_fraction = -99 else: tgs = tg[tgi]/1e9 tvs = tv[tvi] taus = tau[ti] mis = metallicities[mi] escape_fraction = fesc[fi] m16, m50, m84 = weighted_quantile(Masses.flatten(), [0.16, 0.5, 0.84], sample_weight=likelihood.flatten(), values_sorted=False) s16, s50, s84 = weighted_quantile(SFRs.flatten(), [0.16, 0.5, 0.84], sample_weight=likelihood.flatten(), values_sorted=False) printlock.acquire() MUV_scaled = -99. Bestfit_Beta = -99. print_string = "{0[0]:6d} {0[1]:8d} {0[2]:>5.2f} " + \ "{0[3]:>7.2f} {0[4]:>8.1f} {0[5]:>8.3f} " + \ "{0[6]:>5.1f} {0[7]:>8.2f} {0[8]:>4.2f} " + \ "{0[9]:>5.2f}" print_array = [gal+1, ID[gal], zobs[gal], Bestfit_Mass, chimin, tgs, tvs, taus, mis, Bestfit_SFR] print(print_string.format(print_array)) output_string = '{n} {id} {zobs} {ztemp} {mass_best} {sfr_best} '+ \ '{chi_best} {tg} {tvs} {taus} {mis} {fesc} '+ \ '{mass_med} {mass_l68} {mass_u68} ' + \ '{sfr_med} {sfr_l68} {sfr_u68} ' + \ '{nfilts} ' output_values = {'n': gal+1, 'id': ID[gal], 'zobs': zobs[gal], 'ztemp':z[j], 'mass_best': Bestfit_Mass, 'sfr_best': Bestfit_SFR, 'chi_best': chimin, 'tg': tgs, 'tvs': tvs, 'taus': taus, 'mis': mis, 'fesc': escape_fraction, 'mass_med': m50, 'mass_l68': m16, 'mass_u68': m84, 'sfr_med': s50, 'sfr_l68': s16, 'sfr_u68': s84, 'nfilts': len(I)} output_array = [gal+1, ID[gal], zobs[gal], Bestfit_Mass, chimin, tgs, tvs, taus, mis, MUV_scaled, minind, Bestfit_SFR, len(I), -99., '\n'] output = output_string.format(**output_values) if include_rest: if np.isinf(chimin) or np.isnan(minind): M_scaled = np.ones(len(flux_obs)) * -99. restframe_output = ' '.join(M_scaled.astype('str')) output = output + restframe_output + ' \n' else: F_rest = np.array(f[0, :, mi, tgi, ti, tvi, fi] * scale[mi, tgi, ti, tvi, fi] * flux_corr) restframeMags = 23.9 - 2.5*np.log10(F_rest) restframe_output = ' '.join(restframeMags.astype('str')) output = output + restframe_output + ' \n' else: output = output + ' \n' printlock.release() printQueue.put([output, mass_hist, sfr_hist]) def galaxyFitPlus(inputQueue, printQueue, printlock): for gal in iter(inputQueue.get, 'STOP'): mass_range = 7, 13 log_sfr_min, log_sfr_max = -3, 4 j = np.argmin(np.abs(z-zobs[gal])) # Find closest model redshift fo = obs[gal,:] ferr = obs_err[gal,:] flux_obs[fo <= 0.] = 0. # Set negative fluxes to zero #print fo I = (ferr > 0.)*(ferr < 1e6) # Find bands with no observation fo = flux_obs[I] # and exclude from fit ferr = flux_err[I] fm = f[I,j,:] #print flux_models[:,0,0,0,0] top = 0. bottom = 0. for i in range(len(fo)): top += (flux_models[i,:]*flux_obs[i])/(flux_err[i]**2) bottom += (flux_models[i,:]**2)/(flux_err[i]**2) scale = top/bottom scale = np.reshape(scale, (n_metal, n_tg, n_tau, n_tauv, n_fesc)) chisq = 0. for i in range(len(fo)): chisq += ((np.abs(scale*flux_models[i,:]-flux_obs[i])**2)/(flux_err[i])**2) chimin, minind = np.nanmin(chisq), np.nanargmin(chisq) chisq -= (chisq.min() - 1) likelihood = np.exp(-0.5*chisq) likelihood /= likelihood.sum() if np.isinf(chimin) or np.isnan(minind) or len(fo) == 0: output_string = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} \ {10} {11} {12} {13} {14} {15} {16} {17} {18}'.format(gal+1,ID[gal],zobs[gal], -99,-99,-99,-99,-99,-99, -99, -99, -99, -99,-99,-99,-99, len(I),-99,'\n') massLikelihood = np.zeros(mass_bins+1) massLikelihood[0] = gal muvLikelihood = np.zeros(muv_bins+1) muvLikelihood[0] = gal betaLikelihood = np.zeros(beta_bins+1) betaLikelihood[0] = gal #tauLikelihood = np.zeros(n_tau) #tauLikelihood = np.insert(tauLikelihood,0,gal) printQueue.put([output_string,massLikelihood,muvLikelihood,betaLikelihood]) continue #Find the coordinate of the model with the bestfit mass si,tgi,tvi,ti,mi = np.unravel_index(minind,(mass_bins,n_tg,n_tauv,n_tau,n_ssp)) Bestfit_Mass = np.log10(mass_range[si]*flux_corr) Bestfit_SFR = (mass_range[si]*SFR[tgi,ti,mi]*flux_corr) Bestfit_Beta = beta[tgi,tvi,ti,mi] F_rest = f[:,0]*mass_range[likelihood.argmax(0)]*flux_corr restframeMags = 23.9 - 2.5*np.log10(F_rest) UV_rest = UV_flux[0]*mass_range[likelihood.argmax(0)]*flux_corr restframeMUV = 23.9 - 2.5*np.log10(UV_rest) Bestfit_restframeMags = restframeMags[:,tgi,tvi,ti,mi] Bestfit_restframeMUV = restframeMUV[tgi,tvi,ti,mi] if np.isnan(Bestfit_Mass) or np.isinf(chimin): Bestfit_Mass = -99 #M_scaled[:] = -99 tgs = -99 tvs = -99 taus = -99 mis = -99 else: tgs = tg[tgi]/1.e9 tvs = tv[tvi] taus = tau[ti]/1.e9 mis = mi """ Likelihood array section: """ mass_hist = np.histogram(np.log10(mass_)) printlock.acquire() if calc_mode: print('{0:4d} {1:6d} {2:>6.2f} {3:>8.1f} {4:>6.2f}'.format(gal+1,ID[gal],Bestfit_Mass,chimin, np.log10(Mode_Mass), '/n')) else: print('{0:6d} {1:8f} {2:>5.2f} {3:>7.2f} {4:>8.1f} {5:>8.3f} {6:>5.1f} {7:>8.2f} {8:>3d} {9:>5.2f}'.format(gal+1,int(ID[gal]),zobs[gal],Bestfit_Mass,chimin,tgs,tvs,taus,mis,np.log10(Bestfit_SFR))) output_string = '{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12} {13} {14} {15}'.format(gal+1,int(ID[gal]),zobs[gal],Bestfit_Mass,chimin,tgs,tvs,taus,mis,Bestfit_restframeMags[tot],Bestfit_restframeMUV,minind,Bestfit_SFR,len(I),Bestfit_Beta,'\n') printlock.release() printQueue.put([output_string, massLikelihoods, muvLikelihoods, betaLikelihoods]) def getObservations(inputpath): input_data = Table.read(inputpath,format=input_format) column_names = list(input_data.columns.keys()) ID = input_data[ID_col] zobs = input_data[z_col] filter_names = [] k,l = 0,0 for ii in range(len(column_names)): if column_names[ii].lower().endswith(flux_col_end.lower()): if k == 0: fluxes = input_data[column_names[ii]] else: fluxes = np.column_stack((fluxes,input_data[column_names[ii]])) k+=1 filter_names.append(column_names[ii]) if column_names[ii].lower().endswith(fluxerr_col_end.lower()): if l == 0: fluxerrs = input_data[column_names[ii]] else: fluxerrs = np.column_stack((fluxerrs,input_data[column_names[ii]])) l+=1 """ if filts_used != None: try: fluxes = fluxes[:,filts_used] fluxerrs = fluxerrs[:,filts_used] except:r print('Filter mismatch 1') # Array slicing fail """ return ID, zobs, fluxes, fluxerrs, k, filter_names class _function_wrapper(object): """ This is a hack to make the likelihood function pickleable when ``args`` or ``kwargs`` are also included. Stolen from emcee """ def __init__(self, f, args, kwargs): self.f = f self.args = args self.kwargs = kwargs def __call__(self, x): try: return self.f(x, *self.args, **self.kwargs) except: import traceback print("emcee: Exception while calling your likelihood function:") print(" params:", x) print(" args:", self.args) print(" kwargs:", self.kwargs) print(" exception:") traceback.print_exc() raise def weighted_quantile(values, quantiles, sample_weight=None, values_sorted=False, old_style=False): """ Very close to np.percentile, but supports weights. NOTE: quantiles should be in [0, 1]! :param values: np.array with data :param quantiles: array-like with many quantiles needed :param sample_weight: array-like of the same length as `array` :param values_sorted: bool, if True, then will avoid sorting of initial array :param old_style: if True, will correct output to be consistent with np.percentile. :return: np.array with computed quantiles. """ values = np.array(values) quantiles = np.array(quantiles) if sample_weight is None: sample_weight = np.ones(len(values)) sample_weight = np.array(sample_weight) assert np.all(quantiles >= 0) and np.all(quantiles <= 1), 'quantiles should be in [0, 1]' if not values_sorted: sorter = np.argsort(values) values = values[sorter] sample_weight = sample_weight[sorter] weighted_quantiles = np.cumsum(sample_weight) - 0.5 * sample_weight if old_style: # To be convenient with np.percentile weighted_quantiles -= weighted_quantiles[0] weighted_quantiles /= weighted_quantiles[-1] else: weighted_quantiles /= np.sum(sample_weight) return np.interp(quantiles, weighted_quantiles, values) if __name__ == '__main__': logfile = open("error.log", "w") original_stderr = sys.stderr sys.stderr = logfile start = time.time() """ SECTION 1 """ model_path = params.model_path input_catalog = params.input_catalog input_format = params.input_format z_col = params.z_col ID_col = params.ID_col flux_col_end = params.flux_col_end fluxerr_col_end = params.fluxerr_col_end ncpus = params.ncpus filts_used = params.filts_used include_rest = params.include_rest output_path = params.output_catalog_path output_format = params.output_format output_hdf_path = params.output_hdf_path calc_mode = params.fitting_mode flux_corr = params.flux_corr ID, zobs, obs, obs_err, filters_found, filter_names = getObservations(input_catalog) """ Section 2 """ print("Loading synthetic mags and mass array:") models = h5py.File(model_path, 'r') tg = models['ages'].value tv = models['dust'].value tau = models['sfh'].value metallicities = models['metallicities'].value fesc = models['fesc'].value Mshape = models['fluxes'].shape z = models['z'] nfilts = Mshape[1] n_metal = Mshape[2] n_tg = Mshape[3] n_tau = Mshape[4] n_tauv = Mshape[5] n_fesc = Mshape[6] #UV_flux = synmags['UV_flux'] SFR = models['SFR'] Ms = models['Ms'] if (nfilts == filters_found) and (filts_used == None): f = models['fluxes'] elif filts_used != None: try: f = models['fluxes'][:,filts_used] obs = obs[:,filts_used] obs_err = obs_err[:,filts_used] filter_names = np.array(filter_names)[filts_used] except: print('Mis-match between model and observed filter numbers') raise # Slice fail print ("Done.") """ SECTION 3 """ if os.path.isfile(output_path+".temp_output.txt"): os.remove(output_path+".temp_output.txt") temp_file = open(output_path+".temp_output.txt","w") """ SECTION 4 Chi-sq calculation """ out_string = '{0:6s} {1:8s} {2:>5s} {3:>7s} {4:>8s} {5:>8s}' + \ '{6:>5s} {7:>8s} {8:>4s} {9:>5s}' print(out_string.format('N','ID','zobs','Best', 'chimin', 'tg', 'tauv','tau','met', 'sfr')) loop_start = time.time() ncpus = np.clip(ncpus, 1, multiprocessing.cpu_count()) inputQueue = multiprocessing.Queue() printQueue = multiprocessing.Queue() printlock = multiprocessing.Lock() if calc_mode == 'hist': output_hdf = h5py.File(output_hdf_path, 'w') output_hdf.create_dataset("mass_pdf", (len(ID), 120), dtype="f") output_hdf.create_dataset("sfr_pdf", (len(ID), 140), dtype="f") fitFunction = galaxyFit2 else: fitFunction = galaxyFit for i in range( ncpus ): multiprocessing.Process(target = fitFunction, args = (inputQueue, printQueue, printlock)).start() # Put elements in the send queue for processing for gal in range( len(ID) ): inputQueue.put( gal ) if calc_mode == 'hist': for i, gal in enumerate(ID): printout, mass_hist, sfr_hist = printQueue.get() if i == 0: mass_centers = 0.5*(mass_hist[1][1:] + mass_hist[1][:-1]) sfr_centers = 0.5*(sfr_hist[1][1:] + sfr_hist[1][:-1]) output_hdf.create_dataset("mass_bins", data = mass_centers) output_hdf.create_dataset("sfr_bins", data = sfr_centers) output_hdf["mass_pdf"][i] = mass_hist[0] output_hdf["sfr_pdf"][i] = sfr_hist[0] temp_file.write( printout ) #tau_array.tofile(tau_file) else: for i, gal in enumerate(ID): printout = printQueue.get() temp_file.write( printout ) #print len(mass_array), len(muv_array), len(beta_array) # Stop all the running processes for i in range( ncpus ): inputQueue.put( 'STOP' ) # Close both send and receive queues inputQueue.close() printQueue.close() temp_file.close() models.close() output_hdf.close() print("Fitting time taken: {0:.2f} {1}".format(time.time()-loop_start, '\n')) """ Section 3 Reload, format and save output table """ while temp_file.closed == False: pause(0.1) data = np.loadtxt(output_path+".temp_output.txt") try: rows, cols = data.shape except: cols = len(data) output = Table() names = ['N', 'ID', 'z', 'zmodel', 'Mass_best', 'SFR_best', 'chi_best', 'Age_best','Dust_best', 'SFH_best', 'Metallicity_best', 'fesc_best', 'Mass_median', 'Mass_l68', 'Mass_u68', 'SFR_median', 'SFR_l68', 'SFR_u68', 'Nfilts'] units = [None, None, None, None, u.Msun, u.Msun/u.yr, None, u.Gyr, None, None, None, None, u.Msun, u.Msun, u.Msun, u.Msun/u.yr, u.Msun/u.yr, u.Msun/u.yr, None] types = ['i4', 'i4', 'f4', 'f4', 'f4', 'f4', 'f4', 'f4', 'f4', 'f4', 'f4', 'f4', 'f4', 'f4', 'f4', 'f4', 'f4', 'f4', 'i4'] if include_rest: for name in filter_names: names.append(name[:-len(flux_col_end)]+'_rest') units.append(u.mag) types.append('f4') for col in range(cols): column = Column( data[:,col], name = names[col], unit=units[col], dtype=types[col]) output.add_column(column) table_format = 'ascii.commented_header' output.sort('ID') if os.path.isfile(output_path): os.remove(output_path) output.write(output_path,format=table_format) print('Catalog saved') os.remove(temp_file.name) print() print("Total time taken: "+str(time.time()-start)) sys.stderr = original_stderr logfile.close()
mit
8,329,940,140,997,636,000
35.844086
261
0.489311
false
3.233691
false
false
false
kszys/num2words
num2words/lang_ID.py
1
6232
# Copyright (c) 2003, Taro Ogawa. All Rights Reserved. # Copyright (c) 2013, Savoir-faire Linux inc. All Rights Reserved. # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301 USA from __future__ import unicode_literals, print_function class Num2Word_ID(): BASE = {0: [], 1: ["satu"], 2: ["dua"], 3: ["tiga"], 4: ["empat"], 5: ["lima"], 6: ["enam"], 7: ["tujuh"], 8: ["delapan"], 9: ["sembilan"]} TENS_TO = {3: "ribu", 6: "juta", 9: "miliar", 12: "triliun", 15: "kuadriliun", 18: "kuantiliun", 21: "sekstiliun", 24: "septiliun", 27: "oktiliun", 30: "noniliun", 33: "desiliun"} errmsg_floatord = "Cannot treat float number as ordinal" errmsg_negord = "Cannot treat negative number as ordinal" errmsg_toobig = "Too large" max_num = 10**36 def split_by_koma(self, number): return str(number).split('.') def split_by_3(self, number): """ starting here, it groups the number by three from the tail '1234567' -> (('1',),('234',),('567',)) :param number:str :rtype:tuple """ blocks = () length = len(number) if length < 3: blocks += ((number,),) else: len_of_first_block = length % 3 if len_of_first_block > 0: first_block = number[0:len_of_first_block], blocks += first_block, for i in range(len_of_first_block, length, 3): next_block = (number[i:i+3],), blocks += next_block return blocks def spell(self, blocks): """ it adds the list of spelling to the blocks (('1',),('034',)) -> (('1',['satu']),('234',['tiga', 'puluh', 'empat'])) :param blocks: tuple :rtype: tuple """ word_blocks = () first_block = blocks[0] if len(first_block[0]) == 1: if first_block[0] == '0': spelling = ['nol'] else: spelling = self.BASE[int(first_block[0])] elif len(first_block[0]) == 2: spelling = self.puluh(first_block[0]) else: spelling = self.ratus(first_block[0][0]) + self.puluh(first_block[0][1:3]) word_blocks += (first_block[0], spelling), for block in blocks[1:]: spelling = self.ratus(block[0][0]) + self.puluh(block[0][1:3]) block += spelling, word_blocks += block, return word_blocks def ratus(self, number): # it is used to spell if number == '1': return ['seratus'] elif number == '0': return [] else: return self.BASE[int(number)]+['ratus'] def puluh(self, number): # it is used to spell if number[0] == '1': if number[1]== '0': return ['sepuluh'] elif number[1] == '1': return ['sebelas'] else: return self.BASE[int(number[1])]+['belas'] elif number[0] == '0': return self.BASE[int(number[1])] else: return self.BASE[int(number[0])]+['puluh']+ self.BASE[int(number[1])] def spell_float(self, float_part): # spell the float number word_list = [] for n in float_part: if n == '0': word_list += ['nol'] continue word_list += self.BASE[int(n)] return ' '.join(['','koma']+word_list) def join(self, word_blocks, float_part): """ join the words by first join lists in the tuple :param word_blocks: tuple :rtype: str """ word_list = [] length = len(word_blocks)-1 first_block = word_blocks[0], start = 0 if length == 1 and first_block[0][0] == '1': word_list += ['seribu'] start = 1 for i in range(start, length+1, 1): word_list += word_blocks[i][1] if not word_blocks[i][1]: continue if i == length: break word_list += [self.TENS_TO[(length-i)*3]] return ' '.join(word_list)+float_part def to_cardinal(self, number): if number >= self.max_num: raise OverflowError(self.errmsg_toobig % (number, self.maxnum)) minus = '' if number < 0: minus = 'min ' float_word = '' n = self.split_by_koma(abs(number)) if len(n)==2: float_word = self.spell_float(n[1]) return minus + self.join(self.spell(self.split_by_3(n[0])), float_word) def to_ordinal(self, number): self.verify_ordinal(number) out_word = self.to_cardinal(number) if out_word == "satu": return "pertama" return "ke" + out_word def to_ordinal_num(self, number): self.verify_ordinal(number) return "ke-" + str(number) def to_currency(self, value): return self.to_cardinal(value)+" rupiah" def to_year(self, value): return self.to_cardinal(value) def verify_ordinal(self, value): if not value == int(value): raise TypeError(self.errmsg_floatord % value) if not abs(value) == value: raise TypeError(self.errmsg_negord % value)
lgpl-2.1
64,732,259,284,972,610
30.795918
86
0.516367
false
3.705113
false
false
false
Qubad786/pr-code-review
gitcodereview/settings.py
1
3117
""" Django settings for gitcodereview project. """ import os from os.path import abspath, dirname, join import dj_database_url from django.core.urlresolvers import reverse_lazy # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # SECURITY WARNING: don't run with debug turned on in production! DEBUG = os.environ.get('DEBUG', True) ALLOWED_HOSTS = ['*'] # Application definition INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'web', 'web.pullrequest', 'web.user', ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'gitcodereview.urls' # Custom user model AUTH_USER_MODEL = "user.User" AUTHENTICATION_BACKENDS = [ 'web.user.auth_backend.UserAuthenticationBackend', ] TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')] , 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'gitcodereview.wsgi.application' # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_ROOT = 'staticfiles' STATIC_URL = '/static/' # Login URL LOGIN_URL = reverse_lazy('index') # Login Redirect URL LOGIN_REDIRECT_URL = reverse_lazy('dashboard') # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Secret key used in production secret. SECRET_KEY = os.environ.get('SECRET_KEY', 'secret_key') # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': dj_database_url.config() } # Github Oauth settings OAUTH_SETTINGS = { 'CLIENT_ID': os.environ.get('CLIENT_ID', None), 'CLIENT_SECRET': os.environ.get('CLIENT_SECRET', None), 'BASE_URL': os.environ.get('BASE_URL', None), 'ACCESS_TOKEN_URL': os.environ.get('ACCESS_TOKEN_URL', None), 'REDIRECT_URL': os.environ.get('REDIRECT_URL', None), } # Use developer's overrides if environment variables are not set. if os.path.isfile(join(dirname(abspath(__file__)), 'private.py')): from private import *
mit
6,610,438,276,086,385,000
25.87069
71
0.688803
false
3.506187
false
false
false
BadrYoubiIdrissi/TIPE-Algorithme-Genetique
Source/NEAT/test.py
1
2640
# -*- coding: utf-8 -*- """ Created on Wed Oct 12 11:36:14 2016 @author: Badr Youbi Idrissi """ import pygame import pygame.gfxdraw import numpy as np from pygame.locals import * from individu import Individu from phenotype import Phenotype from population import Population from datadisplay import DataDisplay import utilitaires as ut import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D pygame.init() screen = pygame.display.set_mode((860, 600), DOUBLEBUF and RESIZABLE) pygame.display.set_caption("Test") f = pygame.font.SysFont(pygame.font.get_default_font(), 20) clock = pygame.time.Clock() nb_e = 3 nb_s = 1 pop = Population(10, nb_e, nb_s) pop.generer() status = DataDisplay((0,0), padding = 20) status.add("FPS", lambda : clock.get_fps()) status.add("Current generation", lambda : pop.generationCount) status.add("Number of species", lambda : len(pop.especes)) status.add("Best fitness", pop.getBestFitness) status.add("Best shared fitness", pop.getBestSharedFitness) status.add("Average fitness", lambda : pop.averageFitness) evol = False while True: clock.tick() screen.fill((255,255,255)) for event in pygame.event.get(): if event.type == QUIT: pygame.quit() exit() elif event.type == KEYDOWN and event.key == K_UP: nbPoints = 100 X,Y = np.meshgrid(np.linspace(0,1,nbPoints),np.linspace(0,1,nbPoints)) Z = np.zeros((nbPoints,nbPoints)) for i in range(nbPoints): for j in range(nbPoints): pop.best[-1].phenotype.evaluate(ut.entree('1;'+str(X[i,j])+';'+str(Y[i,j]))) Z[i,j] = pop.best[-1].output() fig = plt.figure() ax = fig.gca(projection='3d') surf = ax.plot_surface(X, Y, Z) plt.show() elif event.type == KEYDOWN and event.key == K_DOWN: l = [pop.contenu[i].fitness for i in range(pop.length)] l2 = [pop.contenu[i].sharedFitness for i in range(pop.length)] plt.plot(range(pop.length), l) plt.plot(range(pop.length), l2) plt.show() elif event.type == KEYDOWN and event.key == K_e: evol = not(evol) elif event.type == VIDEORESIZE: pygame.display.set_mode((event.w, event.h), DOUBLEBUF and RESIZABLE) if evol: pop.evoluer() if (pop.generationCount % 10 == 0): pop.updateBest() pop.draw(status.police) status.draw() pygame.display.flip()
gpl-3.0
4,906,027,643,944,131,000
29.697674
96
0.595455
false
3.312422
false
false
false
tu-darmstadt-ros-pkg/hector_flexbe_behavior
behaviors/behavior_pathdrivemission/src/behavior_pathdrivemission/pathdrivemission_sm.py
1
2883
#!/usr/bin/env python ########################################################### # WARNING: Generated code! # # ************************** # # Manual changes may get lost if file is generated again. # # Only code inside the [MANUAL] tags will be kept. # ########################################################### import roslib; roslib.load_manifest('behavior_pathdrivemission') from flexbe_core import Behavior, Autonomy, OperatableStateMachine, ConcurrencyContainer, PriorityContainer, Logger from hector_flexbe_states.create_path import CreatePath from hector_flexbe_states.invert_path import InvertPath from hector_flexbe_states.move_along_path import MoveAlongPath from hector_flexbe_states.sparse_path import SparsePath # Additional imports can be added inside the following tags # [MANUAL_IMPORT] from geometry_msgs.msg import PoseStamped # [/MANUAL_IMPORT] ''' Created on Thu Jun 02 2016 @author: Gabriel, Elisa ''' class PathDriveMissionSM(Behavior): ''' Robot moves along a given path ''' def __init__(self): super(PathDriveMissionSM, self).__init__() self.name = 'PathDriveMission' # parameters of this behavior # references to used behaviors # Additional initialization code can be added inside the following tags # [MANUAL_INIT] # [/MANUAL_INIT] # Behavior comments: def create(self): # x:52 y:481, x:179 y:505 _state_machine = OperatableStateMachine(outcomes=['finished', 'failed']) _state_machine.userdata.speed = 0.2 # Additional creation code can be added inside the following tags # [MANUAL_CREATE] # [/MANUAL_CREATE] with _state_machine: # x:169 y:61 OperatableStateMachine.add('Create_Path', CreatePath(), transitions={'succeeded': 'Invert_Path', 'retry': 'Create_Path'}, autonomy={'succeeded': Autonomy.Off, 'retry': Autonomy.Off}, remapping={'path': 'path'}) # x:309 y:56 OperatableStateMachine.add('Invert_Path', InvertPath(), transitions={'reached': 'Sparse_Path', 'failed': 'failed'}, autonomy={'reached': Autonomy.Off, 'failed': Autonomy.Off}, remapping={'path': 'path'}) # x:670 y:162 OperatableStateMachine.add('Move_Along_Path', MoveAlongPath(), transitions={'reached': 'finished', 'failed': 'failed'}, autonomy={'reached': Autonomy.Off, 'failed': Autonomy.Off}, remapping={'path': 'path', 'speed': 'speed'}) # x:482 y:64 OperatableStateMachine.add('Sparse_Path', SparsePath(max_dist=.2, max_angle=.2, min_dist=.1), transitions={'done': 'Move_Along_Path'}, autonomy={'done': Autonomy.Off}, remapping={'path': 'path'}) return _state_machine # Private functions can be added inside the following tags # [MANUAL_FUNC] # [/MANUAL_FUNC]
bsd-3-clause
-1,531,267,439,377,354,000
29.347368
115
0.623656
false
3.520147
false
false
false
napalm-automation/napalm-yang
napalm_yang/models/openconfig/network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global_/hellos/__init__.py
1
18060
# -*- coding: utf-8 -*- from operator import attrgetter from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType from pyangbind.lib.yangtypes import RestrictedClassType from pyangbind.lib.yangtypes import TypedListType from pyangbind.lib.yangtypes import YANGBool from pyangbind.lib.yangtypes import YANGListType from pyangbind.lib.yangtypes import YANGDynClass from pyangbind.lib.yangtypes import ReferenceType from pyangbind.lib.base import PybindBase from collections import OrderedDict from decimal import Decimal from bitarray import bitarray import six # PY3 support of some PY2 keywords (needs improved) if six.PY3: import builtins as __builtin__ long = int elif six.PY2: import __builtin__ from . import config from . import state class hellos(PybindBase): """ This class was auto-generated by the PythonClass plugin for PYANG from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/mpls/signaling-protocols/rsvp-te/global/hellos. Each member element of the container is represented as a class variable - with a specific YANG type. YANG Description: Top level container for RSVP hello parameters """ __slots__ = ("_path_helper", "_extmethods", "__config", "__state") _yang_name = "hellos" _pybind_generated_by = "container" def __init__(self, *args, **kwargs): self._path_helper = False self._extmethods = False self.__config = YANGDynClass( base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) self.__state = YANGDynClass( base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path() + [self._yang_name] else: return [ "network-instances", "network-instance", "mpls", "signaling-protocols", "rsvp-te", "global", "hellos", ] def _get_config(self): """ Getter method for config, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/config (container) YANG Description: Configuration parameters relating to RSVP hellos """ return self.__config def _set_config(self, v, load=False): """ Setter method for config, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/config (container) If this variable is read-only (config: false) in the source YANG file, then _set_config is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_config() directly. YANG Description: Configuration parameters relating to RSVP hellos """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """config must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__config = t if hasattr(self, "_set"): self._set() def _unset_config(self): self.__config = YANGDynClass( base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) def _get_state(self): """ Getter method for state, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/state (container) YANG Description: State information associated with RSVP hellos """ return self.__state def _set_state(self, v, load=False): """ Setter method for state, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/state (container) If this variable is read-only (config: false) in the source YANG file, then _set_state is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_state() directly. YANG Description: State information associated with RSVP hellos """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """state must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__state = t if hasattr(self, "_set"): self._set() def _unset_state(self): self.__state = YANGDynClass( base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) config = __builtin__.property(_get_config, _set_config) state = __builtin__.property(_get_state, _set_state) _pyangbind_elements = OrderedDict([("config", config), ("state", state)]) from . import config from . import state class hellos(PybindBase): """ This class was auto-generated by the PythonClass plugin for PYANG from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/mpls/signaling-protocols/rsvp-te/global/hellos. Each member element of the container is represented as a class variable - with a specific YANG type. YANG Description: Top level container for RSVP hello parameters """ __slots__ = ("_path_helper", "_extmethods", "__config", "__state") _yang_name = "hellos" _pybind_generated_by = "container" def __init__(self, *args, **kwargs): self._path_helper = False self._extmethods = False self.__config = YANGDynClass( base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) self.__state = YANGDynClass( base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path() + [self._yang_name] else: return [ "network-instances", "network-instance", "mpls", "signaling-protocols", "rsvp-te", "global", "hellos", ] def _get_config(self): """ Getter method for config, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/config (container) YANG Description: Configuration parameters relating to RSVP hellos """ return self.__config def _set_config(self, v, load=False): """ Setter method for config, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/config (container) If this variable is read-only (config: false) in the source YANG file, then _set_config is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_config() directly. YANG Description: Configuration parameters relating to RSVP hellos """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """config must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__config = t if hasattr(self, "_set"): self._set() def _unset_config(self): self.__config = YANGDynClass( base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) def _get_state(self): """ Getter method for state, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/state (container) YANG Description: State information associated with RSVP hellos """ return self.__state def _set_state(self, v, load=False): """ Setter method for state, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/rsvp_te/global/hellos/state (container) If this variable is read-only (config: false) in the source YANG file, then _set_state is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_state() directly. YANG Description: State information associated with RSVP hellos """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """state must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""", } ) self.__state = t if hasattr(self, "_set"): self._set() def _unset_state(self): self.__state = YANGDynClass( base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/network-instance", defining_module="openconfig-network-instance", yang_type="container", is_config=True, ) config = __builtin__.property(_get_config, _set_config) state = __builtin__.property(_get_state, _set_state) _pyangbind_elements = OrderedDict([("config", config), ("state", state)])
apache-2.0
4,378,248,771,271,248,000
37.181818
377
0.577187
false
4.380306
true
false
false
GodotNativeTools/godot-cpp
binding_generator.py
1
27665
#!/usr/bin/env python import json # comment. # Convenience function for using template get_node def correct_method_name(method_list): for method in method_list: if method["name"] == "get_node": method["name"] = "get_node_internal" classes = [] def generate_bindings(path, use_template_get_node): global classes classes = json.load(open(path)) icalls = set() for c in classes: # print c['name'] used_classes = get_used_classes(c) if use_template_get_node and c["name"] == "Node": correct_method_name(c["methods"]) header = generate_class_header(used_classes, c, use_template_get_node) impl = generate_class_implementation(icalls, used_classes, c, use_template_get_node) header_file = open("include/gen/" + strip_name(c["name"]) + ".hpp", "w+") header_file.write(header) source_file = open("src/gen/" + strip_name(c["name"]) + ".cpp", "w+") source_file.write(impl) icall_header_file = open("include/gen/__icalls.hpp", "w+") icall_header_file.write(generate_icall_header(icalls)) register_types_file = open("src/gen/__register_types.cpp", "w+") register_types_file.write(generate_type_registry(classes)) init_method_bindings_file = open("src/gen/__init_method_bindings.cpp", "w+") init_method_bindings_file.write(generate_init_method_bindings(classes)) def is_reference_type(t): for c in classes: if c['name'] != t: continue if c['is_reference']: return True return False def make_gdnative_type(t, ref_allowed): if is_enum(t): return remove_enum_prefix(t) + " " elif is_class_type(t): if is_reference_type(t) and ref_allowed: return "Ref<" + strip_name(t) + "> " else: return strip_name(t) + " *" else: if t == "int": return "int64_t " if t == "float" or t == "real": return "real_t " return strip_name(t) + " " def generate_class_header(used_classes, c, use_template_get_node): source = [] source.append("#ifndef GODOT_CPP_" + strip_name(c["name"]).upper() + "_HPP") source.append("#define GODOT_CPP_" + strip_name(c["name"]).upper() + "_HPP") source.append("") source.append("") source.append("#include <gdnative_api_struct.gen.h>") source.append("#include <stdint.h>") source.append("") source.append("#include <core/CoreTypes.hpp>") class_name = strip_name(c["name"]) # Ref<T> is not included in object.h in Godot either, # so don't include it here because it's not needed if class_name != "Object" and class_name != "Reference": source.append("#include <core/Ref.hpp>") ref_allowed = True else: source.append("#include <core/TagDB.hpp>") ref_allowed = False included = [] for used_class in used_classes: if is_enum(used_class) and is_nested_type(used_class): used_class_name = remove_enum_prefix(extract_nested_type(used_class)) if used_class_name not in included: included.append(used_class_name) source.append("#include \"" + used_class_name + ".hpp\"") elif is_enum(used_class) and is_nested_type(used_class) and not is_nested_type(used_class, class_name): used_class_name = remove_enum_prefix(used_class) if used_class_name not in included: included.append(used_class_name) source.append("#include \"" + used_class_name + ".hpp\"") source.append("") if c["base_class"] != "": source.append("#include \"" + strip_name(c["base_class"]) + ".hpp\"") source.append("namespace godot {") source.append("") for used_type in used_classes: if is_enum(used_type) or is_nested_type(used_type, class_name): continue else: source.append("class " + strip_name(used_type) + ";") source.append("") vararg_templates = "" # generate the class definition here source.append("class " + class_name + (" : public _Wrapped" if c["base_class"] == "" else (" : public " + strip_name(c["base_class"])) ) + " {") if c["base_class"] == "": source.append("public: enum { ___CLASS_IS_SCRIPT = 0, };") source.append("") source.append("private:") if c["singleton"]: source.append("\tstatic " + class_name + " *_singleton;") source.append("") source.append("\t" + class_name + "();") source.append("") # Generate method table source.append("\tstruct ___method_bindings {") for method in c["methods"]: source.append("\t\tgodot_method_bind *mb_" + method["name"] + ";") source.append("\t};") source.append("\tstatic ___method_bindings ___mb;") source.append("\tstatic void *_detail_class_tag;") source.append("") source.append("public:") source.append("\tstatic void ___init_method_bindings();") # class id from core engine for casting source.append("\tinline static size_t ___get_id() { return (size_t)_detail_class_tag; }") source.append("") if c["singleton"]: source.append("\tstatic inline " + class_name + " *get_singleton()") source.append("\t{") source.append("\t\tif (!" + class_name + "::_singleton) {") source.append("\t\t\t" + class_name + "::_singleton = new " + class_name + ";") source.append("\t\t}") source.append("\t\treturn " + class_name + "::_singleton;") source.append("\t}") source.append("") # godot::api->godot_global_get_singleton((char *) \"" + strip_name(c["name"]) + "\");" # ___get_class_name source.append("\tstatic inline const char *___get_class_name() { return (const char *) \"" + strip_name(c["name"]) + "\"; }") source.append("\tstatic inline Object *___get_from_variant(Variant a) { godot_object *o = (godot_object*) a; return (o) ? (Object *) godot::nativescript_1_1_api->godot_nativescript_get_instance_binding_data(godot::_RegisterState::language_index, o) : nullptr; }") enum_values = [] source.append("\n\t// enums") for enum in c["enums"]: source.append("\tenum " + strip_name(enum["name"]) + " {") for value in enum["values"]: source.append("\t\t" + remove_nested_type_prefix(value) + " = " + str(enum["values"][value]) + ",") enum_values.append(value) source.append("\t};") source.append("\n\t// constants") for name in c["constants"]: if name not in enum_values: source.append("\tconst static int " + name + " = " + str(c["constants"][name]) + ";") if c["instanciable"]: source.append("") source.append("") source.append("\tstatic " + class_name + " *_new();") source.append("\n\t// methods") if class_name == "Object": source.append("#ifndef GODOT_CPP_NO_OBJECT_CAST") source.append("\ttemplate<class T>") source.append("\tstatic T *cast_to(const Object *obj);") source.append("#endif") source.append("") for method in c["methods"]: method_signature = "" # TODO decide what to do about virtual methods # method_signature += "virtual " if method["is_virtual"] else "" method_signature += make_gdnative_type(method["return_type"], ref_allowed) method_name = escape_cpp(method["name"]) method_signature += method_name + "(" has_default_argument = False method_arguments = "" for i, argument in enumerate(method["arguments"]): method_signature += "const " + make_gdnative_type(argument["type"], ref_allowed) argument_name = escape_cpp(argument["name"]) method_signature += argument_name method_arguments += argument_name # default arguments def escape_default_arg(_type, default_value): if _type == "Color": return "Color(" + default_value + ")" if _type == "bool" or _type == "int": return default_value.lower() if _type == "Array": return "Array()" if _type in ["PoolVector2Array", "PoolStringArray", "PoolVector3Array", "PoolColorArray", "PoolIntArray", "PoolRealArray"]: return _type + "()" if _type == "Vector2": return "Vector2" + default_value if _type == "Vector3": return "Vector3" + default_value if _type == "Transform": return "Transform()" if _type == "Transform2D": return "Transform2D()" if _type == "Rect2": return "Rect2" + default_value if _type == "Variant": return "Variant()" if default_value == "Null" else default_value if _type == "String": return "\"" + default_value + "\"" if _type == "RID": return "RID()" if default_value == "Null" or default_value == "[Object:null]": return "nullptr" return default_value if argument["has_default_value"] or has_default_argument: method_signature += " = " + escape_default_arg(argument["type"], argument["default_value"]) has_default_argument = True if i != len(method["arguments"]) - 1: method_signature += ", " method_arguments += "," if method["has_varargs"]: if len(method["arguments"]) > 0: method_signature += ", " method_arguments += ", " vararg_templates += "\ttemplate <class... Args> " + method_signature + "Args... args){\n\t\treturn " + method_name + "(" + method_arguments + "Array::make(args...));\n\t}\n""" method_signature += "const Array& __var_args = Array()" method_signature += ")" + (" const" if method["is_const"] else "") source.append("\t" + method_signature + ";") source.append(vararg_templates) if use_template_get_node and class_name == "Node": # Extra definition for template get_node that calls the renamed get_node_internal; has a default template parameter for backwards compatibility. source.append("\ttemplate <class T = Node>") source.append("\tT *get_node(const NodePath path) const {") source.append("\t\treturn Object::cast_to<T>(get_node_internal(path));") source.append("\t}") source.append("};") source.append("") # ...And a specialized version so we don't unnecessarily cast when using the default. source.append("template <>") source.append("inline Node *Node::get_node<Node>(const NodePath path) const {") source.append("\treturn get_node_internal(path);") source.append("}") source.append("") else: source.append("};") source.append("") source.append("}") source.append("") source.append("#endif") return "\n".join(source) def generate_class_implementation(icalls, used_classes, c, use_template_get_node): class_name = strip_name(c["name"]) ref_allowed = class_name != "Object" and class_name != "Reference" source = [] source.append("#include \"" + class_name + ".hpp\"") source.append("") source.append("") source.append("#include <core/GodotGlobal.hpp>") source.append("#include <core/CoreTypes.hpp>") source.append("#include <core/Ref.hpp>") source.append("#include <core/Godot.hpp>") source.append("") source.append("#include \"__icalls.hpp\"") source.append("") source.append("") for used_class in used_classes: if is_enum(used_class): continue else: source.append("#include \"" + strip_name(used_class) + ".hpp\"") source.append("") source.append("") source.append("namespace godot {") core_object_name = "this" source.append("") source.append("") if c["singleton"]: source.append("" + class_name + " *" + class_name + "::_singleton = NULL;") source.append("") source.append("") # FIXME Test if inlining has a huge impact on binary size source.append(class_name + "::" + class_name + "() {") source.append("\t_owner = godot::api->godot_global_get_singleton((char *) \"" + strip_name(c["name"]) + "\");") source.append("}") source.append("") source.append("") # Method table initialization source.append(class_name + "::___method_bindings " + class_name + "::___mb = {};") source.append("") source.append("void *" + class_name + "::_detail_class_tag = nullptr;") source.append("") source.append("void " + class_name + "::___init_method_bindings() {") for method in c["methods"]: source.append("\t___mb.mb_" + method["name"] + " = godot::api->godot_method_bind_get_method(\"" + c["name"] + "\", \"" + ("get_node" if use_template_get_node and method["name"] == "get_node_internal" else method["name"]) + "\");") source.append("\tgodot_string_name class_name;") source.append("\tgodot::api->godot_string_name_new_data(&class_name, \"" + c["name"] + "\");") source.append("\t_detail_class_tag = godot::core_1_2_api->godot_get_class_tag(&class_name);") source.append("\tgodot::api->godot_string_name_destroy(&class_name);") source.append("}") source.append("") if c["instanciable"]: source.append(class_name + " *" + strip_name(c["name"]) + "::_new()") source.append("{") source.append("\treturn (" + class_name + " *) godot::nativescript_1_1_api->godot_nativescript_get_instance_binding_data(godot::_RegisterState::language_index, godot::api->godot_get_class_constructor((char *)\"" + c["name"] + "\")());") source.append("}") for method in c["methods"]: method_signature = "" method_signature += make_gdnative_type(method["return_type"], ref_allowed) method_signature += strip_name(c["name"]) + "::" + escape_cpp(method["name"]) + "(" for i, argument in enumerate(method["arguments"]): method_signature += "const " + make_gdnative_type(argument["type"], ref_allowed) method_signature += escape_cpp(argument["name"]) if i != len(method["arguments"]) - 1: method_signature += ", " if method["has_varargs"]: if len(method["arguments"]) > 0: method_signature += ", " method_signature += "const Array& __var_args" method_signature += ")" + (" const" if method["is_const"] else "") source.append(method_signature + " {") if method["name"] == "free": # dirty hack because Object::free is marked virtual but doesn't actually exist... source.append("\tgodot::api->godot_object_destroy(_owner);") source.append("}") source.append("") continue return_statement = "" return_type_is_ref = is_reference_type(method["return_type"]) and ref_allowed if method["return_type"] != "void": if is_class_type(method["return_type"]): if is_enum(method["return_type"]): return_statement += "return (" + remove_enum_prefix(method["return_type"]) + ") " elif return_type_is_ref: return_statement += "return Ref<" + strip_name(method["return_type"]) + ">::__internal_constructor("; else: return_statement += "return " + ("(" + strip_name(method["return_type"]) + " *) " if is_class_type(method["return_type"]) else "") else: return_statement += "return " def get_icall_type_name(name): if is_enum(name): return "int" if is_class_type(name): return "Object" return name if method["has_varargs"]: if len(method["arguments"]) != 0: source.append("\tVariant __given_args[" + str(len(method["arguments"])) + "];") for i, argument in enumerate(method["arguments"]): source.append("\tgodot::api->godot_variant_new_nil((godot_variant *) &__given_args[" + str(i) + "]);") source.append("") for i, argument in enumerate(method["arguments"]): source.append("\t__given_args[" + str(i) + "] = " + escape_cpp(argument["name"]) + ";") source.append("") size = "" if method["has_varargs"]: size = "(__var_args.size() + " + str(len(method["arguments"])) + ")" else: size = "(" + str(len(method["arguments"])) + ")" source.append("\tgodot_variant **__args = (godot_variant **) alloca(sizeof(godot_variant *) * " + size + ");") source.append("") for i, argument in enumerate(method["arguments"]): source.append("\t__args[" + str(i) + "] = (godot_variant *) &__given_args[" + str(i) + "];") source.append("") if method["has_varargs"]: source.append("\tfor (int i = 0; i < __var_args.size(); i++) {") source.append("\t\t__args[i + " + str(len(method["arguments"])) + "] = (godot_variant *) &((Array &) __var_args)[i];") source.append("\t}") source.append("") source.append("\tVariant __result;") source.append("\t*(godot_variant *) &__result = godot::api->godot_method_bind_call(___mb.mb_" + method["name"] + ", ((const Object *) " + core_object_name + ")->_owner, (const godot_variant **) __args, " + size + ", nullptr);") source.append("") if is_class_type(method["return_type"]): source.append("\tObject *obj = Object::___get_from_variant(__result);") source.append("\tif (obj->has_method(\"reference\"))") source.append("\t\tobj->callv(\"reference\", Array());") source.append("") for i, argument in enumerate(method["arguments"]): source.append("\tgodot::api->godot_variant_destroy((godot_variant *) &__given_args[" + str(i) + "]);") source.append("") if method["return_type"] != "void": cast = "" if is_class_type(method["return_type"]): if return_type_is_ref: cast += "Ref<" + strip_name(method["return_type"]) + ">::__internal_constructor(__result);" else: cast += "(" + strip_name(method["return_type"]) + " *) " + strip_name(method["return_type"] + "::___get_from_variant(") + "__result);" else: cast += "__result;" source.append("\treturn " + cast) else: args = [] for arg in method["arguments"]: args.append(get_icall_type_name(arg["type"])) icall_ret_type = get_icall_type_name(method["return_type"]) icall_sig = tuple((icall_ret_type, tuple(args))) icalls.add(icall_sig) icall_name = get_icall_name(icall_sig) return_statement += icall_name + "(___mb.mb_" + method["name"] + ", (const Object *) " + core_object_name for arg in method["arguments"]: arg_is_ref = is_reference_type(arg["type"]) and ref_allowed return_statement += ", " + escape_cpp(arg["name"]) + (".ptr()" if arg_is_ref else "") return_statement += ")" if return_type_is_ref: return_statement += ")" source.append("\t" + return_statement + ";") source.append("}") source.append("") source.append("}") return "\n".join(source) def generate_icall_header(icalls): source = [] source.append("#ifndef GODOT_CPP__ICALLS_HPP") source.append("#define GODOT_CPP__ICALLS_HPP") source.append("") source.append("#include <gdnative_api_struct.gen.h>") source.append("#include <stdint.h>") source.append("") source.append("#include <core/GodotGlobal.hpp>") source.append("#include <core/CoreTypes.hpp>") source.append("#include \"Object.hpp\"") source.append("") source.append("") source.append("namespace godot {") source.append("") for icall in icalls: ret_type = icall[0] args = icall[1] method_signature = "static inline " method_signature += get_icall_return_type(ret_type) + get_icall_name(icall) + "(godot_method_bind *mb, const Object *inst" for i, arg in enumerate(args): method_signature += ", const " if is_core_type(arg): method_signature += arg + "&" elif arg == "int": method_signature += "int64_t " elif arg == "float": method_signature += "double " elif is_primitive(arg): method_signature += arg + " " else: method_signature += "Object *" method_signature += "arg" + str(i) method_signature += ")" source.append(method_signature + " {") if ret_type != "void": source.append("\t" + ("godot_object *" if is_class_type(ret_type) else get_icall_return_type(ret_type)) + "ret;") if is_class_type(ret_type): source.append("\tret = nullptr;") source.append("\tconst void *args[" + ("1" if len(args) == 0 else "") + "] = {") for i, arg in enumerate(args): wrapped_argument = "\t\t" if is_primitive(arg) or is_core_type(arg): wrapped_argument += "(void *) &arg" + str(i) else: wrapped_argument += "(void *) (arg" + str(i) + ") ? arg" + str(i) + "->_owner : nullptr" wrapped_argument += "," source.append(wrapped_argument) source.append("\t};") source.append("") source.append("\tgodot::api->godot_method_bind_ptrcall(mb, inst->_owner, args, " + ("nullptr" if ret_type == "void" else "&ret") + ");") if ret_type != "void": if is_class_type(ret_type): source.append("\tif (ret) {") source.append("\t\treturn (Object *) godot::nativescript_1_1_api->godot_nativescript_get_instance_binding_data(godot::_RegisterState::language_index, ret);") source.append("\t}") source.append("") source.append("\treturn (Object *) ret;") else: source.append("\treturn ret;") source.append("}") source.append("") source.append("}") source.append("") source.append("#endif") return "\n".join(source) def generate_type_registry(classes): source = [] source.append("#include \"TagDB.hpp\"") source.append("#include <typeinfo>") source.append("\n") for c in classes: source.append("#include <" + strip_name(c["name"]) + ".hpp>") source.append("") source.append("") source.append("namespace godot {") source.append("void ___register_types()") source.append("{") for c in classes: class_name = strip_name(c["name"]) base_class_name = strip_name(c["base_class"]) class_type_hash = "typeid(" + class_name + ").hash_code()" base_class_type_hash = "typeid(" + base_class_name + ").hash_code()" if base_class_name == "": base_class_type_hash = "0" source.append("\tgodot::_TagDB::register_global_type(\"" + c["name"] + "\", " + class_type_hash + ", " + base_class_type_hash + ");") source.append("}") source.append("") source.append("}") return "\n".join(source) def generate_init_method_bindings(classes): source = [] for c in classes: source.append("#include <" + strip_name(c["name"]) + ".hpp>") source.append("") source.append("") source.append("namespace godot {") source.append("void ___init_method_bindings()") source.append("{") for c in classes: class_name = strip_name(c["name"]) source.append("\t" + strip_name(c["name"]) + "::___init_method_bindings();") source.append("}") source.append("") source.append("}") return "\n".join(source) def get_icall_return_type(t): if is_class_type(t): return "Object *" if t == "int": return "int64_t " if t == "float" or t == "real": return "double " return t + " " def get_icall_name(sig): ret_type = sig[0] args = sig[1] name = "___godot_icall_" name += strip_name(ret_type) for arg in args: name += "_" + strip_name(arg) return name def get_used_classes(c): classes = [] for method in c["methods"]: if is_class_type(method["return_type"]) and not (method["return_type"] in classes): classes.append(method["return_type"]) for arg in method["arguments"]: if is_class_type(arg["type"]) and not (arg["type"] in classes): classes.append(arg["type"]) return classes def strip_name(name): if len(name) == 0: return name if name[0] == '_': return name[1:] return name def extract_nested_type(nested_type): return strip_name(nested_type[:nested_type.find("::")]) def remove_nested_type_prefix(name): return name if name.find("::") == -1 else strip_name(name[name.find("::") + 2:]) def remove_enum_prefix(name): return strip_name(name[name.find("enum.") + 5:]) def is_nested_type(name, type = ""): return name.find(type + "::") != -1 def is_enum(name): return name.find("enum.") == 0 def is_class_type(name): return not is_core_type(name) and not is_primitive(name) def is_core_type(name): core_types = ["Array", "Basis", "Color", "Dictionary", "Error", "NodePath", "Plane", "PoolByteArray", "PoolIntArray", "PoolRealArray", "PoolStringArray", "PoolVector2Array", "PoolVector3Array", "PoolColorArray", "PoolIntArray", "PoolRealArray", "Quat", "Rect2", "AABB", "RID", "String", "Transform", "Transform2D", "Variant", "Vector2", "Vector3"] return name in core_types def is_primitive(name): core_types = ["int", "bool", "real", "float", "void"] return name in core_types def escape_cpp(name): escapes = { "class": "_class", "char": "_char", "short": "_short", "bool": "_bool", "int": "_int", "default": "_default", "case": "_case", "switch": "_switch", "export": "_export", "template": "_template", "new": "new_", "operator": "_operator", "typename": "_typename" } if name in escapes: return escapes[name] return name
mit
4,136,963,166,776,560,600
31.547059
267
0.53396
false
3.733468
false
false
false
iwhiz/Algorist
fibonacci_dp.py
1
1163
# This function uses the dynamic programming concept, where it stores previously computed values rather # computing it every time like in normal recursion. import time # This version of fibonacci uses dynamic programming concept. # First check the normal fibonacci.py for better understanding def fibo_dp(n: int): # pass 'n' to this function and it should be of type int memo = {} # create an empty dictionary to hold the already calculated items if n < len(memo): # if 'n' is less than the length of memo, then it already has that value stored in it return memo.get(n) # then return that value else: if n <= 2: f = 1 # if 'n' is less than 2 then the value of f is 1 else: f = fibo_dp(n - 1) + fibo_dp(n - 2) # otherwise call the recursion memo[n] = f # store it in memo for future use return f # return the value to the calling function if __name__ == "__main__": start_time = time.clock() # starting the timer print(fibo_dp(15)) # calling the function here to find 700th number in fibonnaci series. print(time.clock() - start_time) # print the finish time
mit
-8,367,374,644,856,106,000
45.52
108
0.66466
false
3.715655
false
false
false
tedye/leetcode
Python/leetcode.037.sudoku-solver.py
1
2045
class Solution(object): def solveSudoku(self, board): """ :type board: List[List[str]] :rtype: void Do not return anything, modify board in-place instead. """ hset = [{'1','2','3','4','5','6','7','8','9'} for _ in range(9)] vset = [{'1','2','3','4','5','6','7','8','9'} for _ in range(9)] boxset = [{'1','2','3','4','5','6','7','8','9'}for _ in range(9)] temp = self.solver(board,hset,vset,boxset) board[:] = temp[:] def solver(self, board, h, v, b): q = [] for i in range(9): for j in range(9): if board[i][j] == '.': q.append([(i,j), h[i], v[j], b[(i//3) * 3 + j // 3]]) else: num = board[i][j] h[i] -= {num} v[j] -= {num} b[(i//3) * 3 + j // 3] -= {num} while q: q.sort(key = lambda x: len(x[1] & x[2] & x[3])) cur = q.pop(0) avail = cur[1]&cur[2]&cur[3] i = cur[0][0] j = cur[0][1] if len(avail) == 0: return [] elif len(avail) == 1: num = avail.pop() h[i] -= {num} v[j] -= {num} b[(i//3) * 3 + j // 3] -= {num} board[i][j] = num else: l = len(avail) for k in range(l): num = avail.pop() h[i] -= {num} v[j] -= {num} b[(i//3) * 3 + j // 3] -= {num} board[i][j] = num temp = self.solver([x[:] for x in board], [set(a) for a in h], [set(a) for a in v], [set(a) for a in b]) if temp: return temp board[i][j] = '.' h[i].add(num) v[j].add(num) b[(i//3) * 3 + j // 3].add(num) return [] return board
mit
-8,874,471,319,951,807,000
36.888889
124
0.326161
false
3.385762
false
false
false
SheffieldML/TVB
likelihoods.py
1
1637
# Copyright (c) 2014, James Hensman, Max Zwiessele # Distributed under the terms of the GNU General public License, see LICENSE.txt import numpy as np from scipy.special import gamma, digamma from scipy import stats class student_t(): def __init__(self): self._set_params(np.ones(2)) def _set_params(self, p): self.nu, self.lamb = p #compute some constants so that they don't appear in a loop self._pdf_const = gamma((self.nu + 1)/2.) / gamma(self.nu/2.) * np.sqrt(self.lamb/(self.nu*np.pi) ) self._dnu_const = 0.5*digamma((self.nu + 1.)/2.) - 0.5*digamma(self.nu/2.) - 0.5/self.nu def _get_params(self): return np.array([self.nu, self.lamb]) def _get_param_names(self): return ['nu', 'lambda'] def pdf(self, x, Y): x2 = np.square(x-Y) return self._pdf_const * np.power(1 + self.lamb*x2/self.nu, -(self.nu + 1.)/2.) def dlnpdf_dtheta(self, x, Y): x2 = np.square(x-Y) dnu = self._dnu_const - 0.5*np.log(1. + self.lamb*x2/self.nu) + 0.5*(self.nu + 1.)*(self.lamb*x2/self.nu**2)/(1. + self.lamb*x2/self.nu) dlamb = 0.5/self.lamb - 0.5*(self.nu + 1.)*(x2/self.nu/(1.+self.lamb*x2/self.nu)) return np.vstack((dnu, dlamb)) def predictive_values(self, mu, var, percentiles): if len(percentiles)==0: return mu, [] samples = (np.random.randn(40e3,*mu.shape) + mu)*np.sqrt(var) samples = stats.t.rvs(self.nu, loc=samples, scale=np.array(self.lamb).reshape(1,1)) qs = [stats.scoreatpercentile(samples,q,axis=0) for q in percentiles] return samples.mean(0), qs
gpl-3.0
8,509,867,237,343,463,000
45.771429
144
0.602932
false
2.719269
false
false
false
bulax41/Commands
scripts/mcast_listen.py
1
2340
#!/bin/python import socket import struct import sys import signal import time import datetime import argparse import threading class McastSocket(socket.socket): def __init__(self, local_port='', reuse=False): socket.socket.__init__(self, socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) if(reuse): self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) if hasattr(socket, "SO_REUSEPORT"): self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) self.setsockopt(socket.SOL_SOCKET,socket.SO_RCVBUF,8388608) self.bind(('', local_port)) def mcast_add(self, addr, iface): self.setsockopt( socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, socket.inet_aton(addr) + socket.inet_aton(iface)) def signal_handler(signal, frame): global estop estop.set() sys.exit(0) def join_group(group,args,event): global count (mcast_group,mcast_port) = group.split(":") sock = McastSocket(local_port=int(mcast_port),reuse=1) sock.mcast_add(mcast_group, args.interface) stime= datetime.datetime.now() print "Joining %s:%s at %s" % (mcast_group,mcast_port,stime.strftime("%b %d %Y %X.%f")) while not event.isSet(): msg,source = sock.recvfrom(1500) count[group] += 1 print "Exiting Group %s... %s" % (group,datetime.datetime.now().strftime("%b %d %Y %X.%f")) def main(): parser = argparse.ArgumentParser(description='Subscribe and decode multicast for CME or LMAX') parser.add_argument('-g', '--group',action="append",required=True,help="Group to join in IP:Port format, may be used more than once") parser.add_argument('-i','--interface',required=True,help="IP address of the Interface to join on") parser.add_argument('-q','--quiet',action="count",help="Do not print packet count") args = parser.parse_args() global estop, count count = {} signal.signal(signal.SIGINT, signal_handler) estop = threading.Event() threads = [] for group in args.group: count[group] = 0 t = threading.Thread(target=join_group, args=(group,args,estop)) threads.append(t) t.start() while True: time.sleep(1) for c,v in count.items(): print "%s: %s" % (c,v), print "\r", if __name__ == '__main__': main()
gpl-3.0
1,310,441,972,813,679,400
30.2
137
0.641026
false
3.347639
false
false
false
McIntyre-Lab/papers
newman_events_2017/python_workflow/programs/build_intron2border_junction_index.py
1
5945
#!/usr/bin/env python3 ####################################################################################################################### # # DATE: 2017-12-15 # NAME: build_Event2Transcript_index.py # AUTHOR: Jeremy R. B. Newman ([email protected]) # # DESCRIPTION: This script creates an intron-to-border junction index file used by Event Analysis to report # the read coverage of introns, their associated border junctions and flanking exonic regions (fusions), to aid # the user in deciding whether there is evidence on intron retention, alternative/novel splice usage, etc. # It takes the annotation CSVs for junctions, exonic regions and introns to assemble a complete intron/border index, # where each border junction and intron are assigned to a single intron event, flanked by its neighboring # exonic regions. Where the exonic regions of intron events can be assigned to multiple genes, then the output of this # intron event is suppressed, to avoid instances of overlapping intron events. # # REQUIRED PACKAGES: pandas (tested with v0.19.2) # argparse (tested with v1.1) # logging (tested with v0.5.1.2) # ####################################################################################################################### # Import required packages import pandas as pd import logging import argparse import sqlite3 def getOptions(): # Parse command line arguments parser = argparse.ArgumentParser(description="Generate an intron-to-border-junction index file for" "interpreting read coverage of intronic regions") parser.add_argument('--intron-annotation-file', dest="inIntrons", required=True, help="Input intron annotation CSV") parser.add_argument("--junction-annotation-file", dest="inJunctions", required=True, help="Input junction annotation CSV") parser.add_argument("--output-intron-index", dest="outCSV", required=True, help="Output event index CSV") args = parser.parse_args() return args def main(): # Connect to SQL database con = sqlite3.connect(":memory:") cur = con.cursor() # Import intron and junction annotations logger.info("Importing intron and junction annotations") intronDF = pd.read_csv(args.inIntrons, usecols=('intron_id','chr','intron_start','intron_stop','gene_id', 'exonic_region_id_5prime','exonic_region_id_3prime')) juncDF = pd.read_csv(args.inJunctions, usecols=('junction_id','chr','donor_stop','acceptor_start','transcript_id', 'gene_id','flag_border_junction')) # Convert to SQL tables intronDF.to_sql("intronInfo", con, if_exists="replace") juncDF.to_sql("juncInfo", con, if_exists="replace") # So border junctions and introns can be merged, donor_stop and acceptor start need to renamed to intron_start # and intron_stop respectively. When the "donor exon" is an intron, donor_stop = intron_stop # When the "acceptor exon" is an intron, acceptor_start = intron_start # I am going to first map 5' border junctions to the 5' end of introns, then 3' # border junctions for the 3' end of the introns. # First, I want to expand concatenated gene IDs. Junctions with multiple gene ID shouldn't be retained in the # final output, but iterate over these for completeness cur.execute("""Select junction_id, chr , donor_stop , acceptor_start , gene_id from juncInfo WHERE flag_border_junction = 1""") allBorders = cur.fetchall() cur.execute("""CREATE TABLE IF NOT EXISTS borderInfo (junction_id TEXT, chr TEXT, donor_stop INT, acceptor_start INT, gene_id TEXT)""") for border in allBorders: genes = border[4].split("|") for gn in genes: cur.execute("INSERT INTO borderInfo VALUES(:junction_id, :chr, :donor_stop, :acceptor_start, :gene_id)", {"junction_id": border[0], "chr": border[1], "donor_stop": border[2], "acceptor_start": border[3], "gene_id":gn}) # Merge INNER with intron table on chromosome, gene, and acceptor_start (as intron_start) cur.execute("CREATE TABLE intronWstart AS SELECT in1.intron_id, in1.chr, in1.intron_start, in1.intron_stop, " "in1.gene_id, in1.exonic_region_id_5prime, in2.junction_id AS border_junction_id_5prime " "FROM intronInfo in1 INNER JOIN borderInfo in2 " "ON in1.chr = in2.chr AND in1.gene_id = in2.gene_id AND in1.intron_start = in2.acceptor_start ;") # Merge INNER with intron table on chromosome, gene, and donor_stop (as intron_stop) cur.execute("CREATE TABLE intronWstop AS SELECT in1.intron_id, in1.chr, in1.gene_id, " "in1.exonic_region_id_3prime, in2.junction_id AS border_junction_id_3prime " "FROM intronInfo in1 INNER JOIN borderInfo in2 " "ON in1.chr = in2.chr AND in1.gene_id = in2.gene_id AND in1.intron_stop = in2.donor_stop ;") cur.execute("CREATE TABLE intronBorderIndex AS SELECT in1.*, in2.exonic_region_id_3prime," "in2.border_junction_id_3prime FROM intronWstart in1 " "INNER JOIN intronWstop in2 ON in1.gene_id = in2.gene_id AND in1.intron_id = in2.intron_id ;") intronBorderIndexDF = pd.read_sql("SELECT * FROM intronBorderIndex ORDER BY chr, intron_start, intron_stop ;", con) # Write output index with open(args.outCSV, 'w') as outIndex: intronBorderIndexDF.to_csv(outIndex, encoding='utf-8', index=False) if __name__ == '__main__': # Parse command line arguments global args args = getOptions() # Setting up logger logger = logging.getLogger() logger.info('Starting script') # Calling main script main() logger.info('Script complete: index created!')
lgpl-3.0
2,977,821,361,398,456,300
56.728155
131
0.639024
false
3.690255
false
false
false
cragusa/cocoma
bin/Logger.py
1
5704
#!/usr/bin/env python #Copyright 2012-2013 SAP Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # This is part of the COCOMA framework # # COCOMA is a framework for COntrolled COntentious and MAlicious patterns # import psutil,time,Library,logging,EMQproducer from datetime import datetime as dt from logging import handlers from EMQproducer import Producer global producer producer = Producer() global myName myName = "Logger" emulationEndLogger = None def singleLogger(elementName,level=None,filename=None): #file writing handler producer=Producer() HOMEPATH= Library.getHomepath() global emulationEndLogger emulationEndLogger=Library.loggerSet("Logger") def logLevelGet(): LOG_LEVEL=logging.INFO LogLevel=Library.readLogLevel("coreloglevel") if LogLevel=="info": LOG_LEVEL=logging.INFO if LogLevel=="debug": LOG_LEVEL=logging.DEBUG else: LOG_LEVEL=logging.INFO return LOG_LEVEL if level==None: level=logLevelGet() fileLogger=logging.getLogger(elementName) fileLogger.setLevel(level) #we do not add additional handlers if they are there if not len(fileLogger.handlers): #adding producer handler #bHandler= EMQproducer.BroadcastLogHandler(elementName,producer) #fileLogger.addHandler(bHandler) #EMQproducer.StreamAndBroadcastHandler("TEST",producer) if filename == None: #setting log rotation for 10 files each up to 10000000 bytes (10MB) fileHandler = handlers.RotatingFileHandler(HOMEPATH+"/logs/COCOMAlogfile.csv",'a', 10000000, 10) fileLoggerFormatter=logging.Formatter ('%(asctime)s;%(name)s;%(levelname)s;%(message)s',datefmt='%m/%d/%Y %H:%M:%S') fileHandler.setFormatter(fileLoggerFormatter) fileLogger.addHandler(fileHandler) #cli writing handler cliLoggerFormatter=logging.Formatter ('%(asctime)s - [%(name)s] - %(levelname)s : %(message)s',datefmt='%m/%d/%Y %H:%M:%S') cliHandler = logging.StreamHandler() cliHandler.setFormatter(cliLoggerFormatter) fileLogger.addHandler(cliHandler) else: fileHandler= logging.FileHandler(HOMEPATH+"/logs/"+str(filename)) fileLoggerFormatter=logging.Formatter ('%(asctime)s;%(name)s;%(levelname)s;%(message)s',datefmt='%m/%d/%Y %H:%M:%S') fileHandler.setFormatter(fileLoggerFormatter) fileLogger.addHandler(fileHandler) return fileLogger #Logger job that collects system stats during emulation , run by scheduler def emulationEnd(emulationName): """ IN: job that executes at the end of emulation DOING: just producing logger notification OUT: nothing """ try: print "Emulation Time expired, removing extra jobs and stopping running processes" global emulationEndLogger msg = {"Action":"Emulation finished","EmulationName":str(emulationName)} producer.sendmsg(myName,msg) emulationEndLogger.info(msg) #emulationEndLogger.info("Emulation '"+str(emulationName)+"' finished.") Library.removeExtraJobs(emulationName) Library.killRemainingProcesses() Library.deleteFiles("/tmp/stressapptestFile", "*") # Remove any stressappTest files left behind from I/O loading return True except: return False def loadMon(duration,interval,emulationID,emulationName,emuStartTime): HOMEPATH= Library.getHomepath() emulationName=str(emulationName) interval=int(interval) ''' starting cpu monitoring in the loop ''' iterationsNo=int(duration)/int(interval) try: f = open(HOMEPATH+"/logs/"+str(emulationID)+"-"+str(emulationName)+"-res"+"_"+str(emuStartTime)+".csv", 'a') f.write(emulationName+";\nCountdown;Time;CPU(%);MEM(%);IOread(bytes);IOwrite(bytes);NET(bytes_sent)\n") #start time initTime=time.time() while iterationsNo !=0: CPU=str(psutil.cpu_percent(interval, False)) #MEM=str(psutil.virtual_memory().percent) MEM=str(psutil.avail_virtmem()) IOr=str(psutil.disk_io_counters().read_time) IOw=str(psutil.disk_io_counters().write_time) NET=str(psutil.network_io_counters(False).bytes_sent) #print (emulationName+";\nTime;CPU(%);MEM(%);IOread(bytes);IOwrite(bytes);NET(bytes_sent)\n"+str(time.time())+";"+CPU+";"+MEM+";"+IOr+";"+IOw+";"+NET) probeTime=time.time()-initTime timeStamp=dt.now() f.write(str(int(probeTime))+";"+str(timeStamp.strftime("%Y-%m-%d %H:%M:%S.%f"))+";"+CPU+";"+MEM+";"+IOr+";"+IOw+";"+NET+"\n") iterationsNo=iterationsNo-1 except Exception,e: print "Unable to create log file\nError: ",e f.closed if __name__ == '__main__': duration = 20 interval = 1 emulationName = "Emulation-1" loadMon(duration,interval,emulationName) pass
apache-2.0
-399,721,959,556,241,200
32.162791
162
0.645863
false
3.950139
false
false
false
karolciba/playground
markov/baumwelch.py
1
4027
import numpy as np # functions and classes go here def fb_alg(A_mat, O_mat, observ): # set up k = observ.size (n,m) = O_mat.shape prob_mat = np.zeros( (n,k) ) fw = np.zeros( (n,k+1) ) bw = np.zeros( (n,k+1) ) # forward part fw[:, 0] = 1.0/n for obs_ind in xrange(k): f_row_vec = np.matrix(fw[:,obs_ind]) fw[:, obs_ind+1] = f_row_vec * \ np.matrix(A_mat) * \ np.matrix(np.diag(O_mat[:,observ[obs_ind]])) fw[:,obs_ind+1] = fw[:,obs_ind+1]/np.sum(fw[:,obs_ind+1]) # backward part bw[:,-1] = 1.0 for obs_ind in xrange(k, 0, -1): b_col_vec = np.matrix(bw[:,obs_ind]).transpose() bw[:, obs_ind-1] = (np.matrix(A_mat) * \ np.matrix(np.diag(O_mat[:,observ[obs_ind-1]])) * \ b_col_vec).transpose() bw[:,obs_ind-1] = bw[:,obs_ind-1]/np.sum(bw[:,obs_ind-1]) # combine it prob_mat = np.array(fw)*np.array(bw) prob_mat = prob_mat/np.sum(prob_mat, 0) # get out return prob_mat, fw, bw def baum_welch( num_states, num_obs, observ ): # allocate # A_mat = np.ones( (num_states, num_states) ) A_mat = np.random.random( (num_states, num_states) ) A_mat = A_mat / np.sum(A_mat,1)[:,None] # O_mat = np.ones( (num_states, num_obs) ) O_mat = np.random.random( (num_states, num_obs) ) O_mat = O_mat / np.sum(O_mat,1)[:,None] theta = np.zeros( (num_states, num_states, observ.size) ) while True: old_A = A_mat old_O = O_mat A_mat = np.ones( (num_states, num_states) ) O_mat = np.ones( (num_states, num_obs) ) # A_mat = np.random.random( (num_states, num_states) ) # A_mat = A_mat / np.sum(A_mat,1)[:,None] # O_mat = np.random.random( (num_states, num_obs) ) # O_mat = O_mat / np.sum(O_mat,1)[:,None] # expectation step, forward and backward probs P,F,B = fb_alg( old_A, old_O, observ) # need to get transitional probabilities at each time step too for a_ind in xrange(num_states): for b_ind in xrange(num_states): for t_ind in xrange(observ.size): theta[a_ind,b_ind,t_ind] = \ F[a_ind,t_ind] * \ B[b_ind,t_ind+1] * \ old_A[a_ind,b_ind] * \ old_O[b_ind, observ[t_ind]] # form A_mat and O_mat for a_ind in xrange(num_states): for b_ind in xrange(num_states): A_mat[a_ind, b_ind] = np.sum( theta[a_ind, b_ind, :] )/ \ np.sum(P[a_ind,:]) A_mat = A_mat / np.sum(A_mat,1) for a_ind in xrange(num_states): for o_ind in xrange(num_obs): right_obs_ind = np.array(np.where(observ == o_ind))+1 O_mat[a_ind, o_ind] = np.sum(P[a_ind,right_obs_ind])/ \ np.sum( P[a_ind,1:]) O_mat = O_mat / np.sum(O_mat,1) # compare if np.linalg.norm(old_A-A_mat) < .00001 and np.linalg.norm(old_O-O_mat) < .00001: break # get out return A_mat, O_mat import casino num_obs = 100 g = casino.casino() observations1 = [ 1 if g.next()[0].name == 'H' else 0 for x in xrange(num_obs) ] observations1 = np.array(observations1) # observations1 = np.random.randn( num_obs ) # observations1[observations1>0] = 1 # observations1[observations1<=0] = 0 # import pdb; pdb.set_trace() A_mat, O_mat = baum_welch(2,2,observations1) print "observation 1" print observations1[:30] print "trans" print A_mat print "emiss" print O_mat # observations2 = np.random.random(num_obs) # observations2[observations2>.15] = 1 # observations2[observations2<=.85] = 0 # A_mat, O_mat = baum_welch(2,2,observations2) # print "observations2" # print observations2[:30] # print A_mat # print O_mat # A_mat, O_mat = baum_welch(2,2,np.hstack( (observations1, observations2) ) ) # print A_mat # print O_mat
unlicense
-5,241,369,529,496,385,000
36.635514
89
0.534145
false
2.765797
false
false
false
schef/schef.github.io
source/14/mc-14-04-whf-pid.py
1
2581
#!/usr/bin/python # Written by Stjepan Horvat # ( [email protected] ) # by the exercises from David Lucal Burge - Perfect Pitch Ear Traning Supercourse # Thanks to Wojciech M. Zabolotny ( [email protected] ) for snd-virmidi example # ( [email protected] ) import random import sys sys.path.append("/home/schef/github/schef.github.io/source/") from pptraning import * print ("Exercise: 14-03") print ("White harmonic fours. Pitch indentification drill. OVR.") runda = 0 try: while True: runda += 1 print ("Possible commands: 1-again, 2-play, 3-next, 4-compare-to-c:") while True: notes = [] for i in range(0, 4): notes.append(random.choice(whiteNotes[7:28])) # if len(list(set(notes))) == 4: # break; if (len(list(set(notes))) == 4 \ and (notes[0]%12 != notes[1]%12) \ and (notes[0]%12 != notes[2]%12) \ and (notes[0]%12 != notes[3]%12) \ and (notes[1]%12 != notes[2]%12) \ and (notes[1]%12 != notes[3]%12) \ and (notes[2]%12 != notes[3]%12) \ ): break; #notes.sort() match = False noteError = None while not match: #here starts the practice done = False #playFourNotes(notes) playNote(notes[0]) playNote(notes[1]) playNote(notes[2]) playNote(notes[3]) while not done: n = input("? ") if n =="1": #playFourNotes(notes) playNote(notes[0]) playNote(notes[1]) playNote(notes[2]) playNote(notes[3]) elif n == "3": print ("Next") print (str(runda) + ". round.") done = True match = True elif n =="5": print (num2Name(notes[0]), num2Name(notes[1]), num2Name(notes[2]), num2Name(notes[3])) elif n =="4": print ("C the comparrer") playNote(name2Num("c")) elif n =="2": print(num2Name(notes[0]), num2Name(notes[1]), num2Name(notes[2]), num2Name(notes[3])) elif re.compile("^[0-3] [0-3]$").match(n): splited = n.split() playTwoNotes(notes[int(splited[0])], notes[int(splited[1])]) elif re.compile("^[0-3] [0-3] [0-3]$").match(n): splited = n.split() playThreeNotes(notes[int(splited[0])], notes[int(splited[1])], notes[int(splited[2])]) elif splitFour.match(n): splitNote = n.split() if splitNote[0] == num2Name(notes[0]).lower() and splitNote[1] == num2Name(notes[1]).lower() and splitNote[2] == num2Name(notes[2]).lower() and splitNote[3] == num2Name(notes[3]).lower(): print ("Next") print (str(runda) + ". round.") done = True match = True except KeyboardInterrupt: pass
mit
1,993,438,828,375,399,400
30.47561
193
0.590469
false
2.802389
false
false
false
Zephor5/zspider
zspider/crawler.py
1
5759
# coding=utf-8 import json import logging from queue import Queue from pooled_pika import PooledConn from scrapy.crawler import CrawlerProcess from scrapy.settings import Settings from scrapy.utils.log import log_scrapy_info from scrapy.utils.ossignal import install_shutdown_handlers from twisted.internet import defer from twisted.internet.error import ConnectionDone from zspider.confs.conf import AMQP_PARAM from zspider.confs.conf import EXCHANGE_PARAMS from zspider.confs.conf import TASK_BIND_PARAMS from zspider.confs.conf import TASK_Q_PARAMS __author__ = "zephor" logger = logging.getLogger("crawler") class TestCrawler(CrawlerProcess): def __init__(self): from zspider.confs import crawl_conf as p_settings settings = Settings() settings.setmodule(p_settings) super(CrawlerProcess, self).__init__(settings) self.task_q = defer.DeferredQueue() self.res_q = Queue() self.task_q.get().addCallback(self.crawl) def crawl(self, kwargs): spider_name = kwargs.pop("spider_name", "") crawler = self._create_crawler(spider_name) self.crawlers.add(crawler) d = crawler.crawl(**kwargs) self._active.add(d) def _done(_): self.crawlers.discard(crawler) self._active.discard(d) try: result = crawler.spider.test_result del crawler.spider.test_result except AttributeError: result = None # spider may be None in case Failure self.res_q.put(result) return _ d.addBoth(_done) d.addErrback(lambda _: logger.error(_)) d.addCallback(lambda _: self.task_q.get().addCallback(self.crawl)) return d def debug(_=None): """ for debug use """ import objgraph # with open('logs/test', 'w') as f: # objs = objgraph.get_leaking_objects() # for o in objs: # f.write('%s\n' % o.encode('utf-8') if isinstance(o, unicode) else str(o)) leak_ref = objgraph.by_type("Newspaper") objgraph.show_backrefs(leak_ref, max_depth=10, filename="my_leak.png") class CrawlerDaemon(CrawlerProcess): def __init__(self): from zspider.confs import crawl_conf as p_settings settings = Settings() settings.setmodule(p_settings) super(CrawlerProcess, self).__init__( settings ) # 跳过CrawlerProcess的初始日志配置,由init.py处理 install_shutdown_handlers(self._signal_shutdown) log_scrapy_info(self.settings) self.__task_queue = None self._pconn = PooledConn(AMQP_PARAM) self._set_up() def _set_up(self, _=None): d = self._pconn.acquire() d.addCallbacks(self._on_conn, self._on_err_conn) d.addErrback(self._on_err) @defer.inlineCallbacks def _on_conn(self, conn): # in case the connection is lost; mostly closed by the mq server conn.ready.addErrback(self.__clear) conn.ready.addCallback(self._set_up) self._conn = conn channel = self._channel = yield conn.channel() # do some setup yield channel.exchange_declare(**EXCHANGE_PARAMS) yield channel.queue_declare(**TASK_Q_PARAMS) yield channel.queue_bind(**TASK_BIND_PARAMS) self.__task_queue, consumer_tag = yield channel.basic_consume( queue=TASK_Q_PARAMS["queue"], auto_ack=False ) yield self._on_get() @staticmethod def _on_err_conn(err): logger.fatal(err) @staticmethod def _on_err(err): if err.type is ConnectionDone: logger.info("connection lost when waiting, handled..") else: logger.error(err) @defer.inlineCallbacks def _on_get(self): ch, method, properties, body = yield self.__task_queue.get() d = self._on_msg(body) yield ch.basic_ack(delivery_tag=method.delivery_tag) if isinstance(d, defer.Deferred): self._channel.close() self._pconn.release(self._conn) d.addCallback(self._set_up) else: d = self._on_get() yield d def _on_msg(self, body): logger.info("_on_msg %s" % body) try: msg = json.loads(body) self.settings.set("COOKIES_ENABLED", msg["is_login"], "spider") d = self.crawl( msg["spider"], parser=msg["parser"], task_id=msg["id"], task_name=msg["name"], ) # d.addCallback(lambda som: reactor.callLater(2, debug)) d.addErrback(lambda err: logger.error(err)) except Exception as e: logger.error(repr(e)) if len(self._active) > 1: return self.join() def __clear(self, _=None): if self.__task_queue is not None: self.__task_queue.close(ConnectionDone("done")) def crawl(self, spider_name, *args, **kwargs): crawler = self._create_crawler(spider_name) self.crawlers.add(crawler) d = crawler.crawl(*args, **kwargs) self._active.add(d) def _done(result): self.crawlers.discard(crawler) self._active.discard(d) # parser may hold large memory, release it manually try: del crawler.spider.parser except AttributeError: pass # spider may be None in case Failure return result return d.addBoth(_done) def main(): from zspider import init init.init("crawler") if init.done: p = CrawlerDaemon() p.start(stop_after_crawl=False) if __name__ == "__main__": main()
mit
-4,324,731,660,645,133,300
29.333333
87
0.593232
false
3.737288
false
false
false
andymckay/zamboni
mkt/inapp/views.py
1
3169
import json from django.db import transaction from django.shortcuts import get_object_or_404 from rest_framework.permissions import AllowAny from rest_framework.viewsets import ModelViewSet import commonware.log from mkt.api.authentication import (RestAnonymousAuthentication, RestOAuthAuthentication, RestSharedSecretAuthentication) from mkt.api.authorization import AllowAuthor, ByHttpMethod from mkt.api.base import CORSMixin, MarketplaceView from mkt.inapp.models import InAppProduct from mkt.inapp.serializers import InAppProductSerializer from mkt.prices.models import Price from mkt.webapps.models import Webapp log = commonware.log.getLogger('z.inapp') class InAppProductViewSet(CORSMixin, MarketplaceView, ModelViewSet): serializer_class = InAppProductSerializer cors_allowed_methods = ('get', 'post', 'put', 'patch', 'delete') lookup_field = 'guid' permission_classes = [ByHttpMethod({ 'options': AllowAny, # Needed for CORS. 'get': AllowAny, 'post': AllowAuthor, 'put': AllowAuthor, 'patch': AllowAuthor, })] authentication_classes = [RestOAuthAuthentication, RestSharedSecretAuthentication, RestAnonymousAuthentication] def destroy(self): raise NotImplemented('destroy is not allowed') def pre_save(self, in_app_product): in_app_product.webapp = self.get_app() def get_queryset(self): return InAppProduct.objects.filter(webapp=self.get_app()) def get_app(self): if not hasattr(self, 'app'): self.app = get_object_or_404(Webapp, app_domain=self.kwargs['origin']) return self.app def get_authors(self): return self.get_app().authors.all() class StubInAppProductViewSet(CORSMixin, MarketplaceView, ModelViewSet): serializer_class = InAppProductSerializer lookup_field = 'guid' cors_allowed_methods = ('get',) allowed_methods = ('GET',) permission_classes = [AllowAny] authentication_classes = [] def _queryset(self): return InAppProduct.objects.filter(stub=True) def get_queryset(self): qs = self._queryset() # Since caching count() is unreliable, this optimizes for the case of # having already created stub products. if not len(qs): with transaction.atomic(): self._create_stub_products() qs = self._queryset() return qs def _create_stub_products(self): for name, amount in (('Kiwi', '0.99'), ('Unicorn', '1.99')): log.info('Creating stub in-app product {n} {p}' .format(n=name, p=amount)) # TODO: make this adjustable. simulate = json.dumps({'result': 'postback'}) InAppProduct.objects.create(stub=True, simulate=simulate, name=name, price=Price.objects.get(price=amount))
bsd-3-clause
-7,194,169,135,948,572,000
35.011364
78
0.61218
false
4.317439
false
false
false
do-mpc/do-mpc
documentation/source/release_overview.py
1
1138
import requests import os def get_overview(): # Use Github Rest API to get releases: release_dict = requests.get('https://api.github.com/repos/do-mpc/do-mpc/releases').json() text = '' text += '# Release notes' text += '\n' text += 'This content is autogenereated from our Github [release notes](https://github.com/do-mpc/do-mpc/releases).' text += '\n' for release_i in release_dict: name_i = release_i['name'] body_i = release_i['body'] body_i = body_i.replace('# ', '### ') print(name_i) text += '## {}'.format(name_i) text += '\n' text += body_i text += '\n' try: if release_i['assets']: text += '### Example files'.format(name_i) text += '\n' text += 'Please download the example files for release {} [here]({}).'.format(name_i, release_i['assets'][0]['browser_download_url']) text += '\n' except: print('Couldnt provide download link for example files.') with open('release_notes.md', 'w') as f: f.write(text)
lgpl-3.0
2,997,779,202,889,332,700
28.179487
149
0.529877
false
3.612698
false
false
false
PetterS/easy-IP
examples/run_nurses.py
1
1028
#!/usr/bin/env python3 from glob import glob import os # Set this to the location of NSPLib. nsplib = r"C:\Users\Petter\Dropbox\Datasets\NSPLib" def run_solver(data_set, case): case_file = os.path.join(nsplib, "Cases", str(case) + ".gen") log_file = data_set + "." + str(case) + ".output.log" files = glob(os.path.join(nsplib, data_set, "*.nsp")) names = [f.split(".")[0] for f in files] names = [n.split(os.path.sep)[-1] for n in names] nums = sorted([int(n) for n in names]) files = [os.path.join(nsplib, data_set, str(n) + ".nsp") for n in nums] try: os.unlink(log_file) except FileNotFoundError: pass for f in files: print(case_file) print(f) print(log_file) # This may need to change depending on shell. os.system("nurses " + f + " " + case_file + " >> " + log_file) for data_set in ["N25", "N50", "N75", "N100"]: for case in [1, 2, 3, 4, 5, 6, 7, 8]: run_solver(data_set, case) for data_set in ["N30", "N60"]: for case in [9, 10, 11, 12, 13, 14, 15, 16]: run_solver(data_set, case)
bsd-2-clause
2,813,238,958,239,927,000
26.052632
72
0.620623
false
2.39627
false
false
false
mxOBS/deb-pkg_trusty_chromium-browser
native_client/src/trusted/validator_ragel/proof_tools.py
1
15125
# Copyright (c) 2014 The Native Client Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Tools and utilities for creating proofs about tries.""" import itertools import multiprocessing import optparse import spec import trie import validator class Operands(object): """Contains parts of the disassembly of a single instruction. Also holds the implied restriction state. input_rr means that register must have the MSB 32 bits 0 before the instruction executes. Such a register can be used by this instruction as the index register for a memory operation in x86_64. There can only be one memory operand per instruction. Some AVX instructions allow a vector register to be used as an index register, impling multiple index values. However, we currently have no way to sandbox such instructions. output_rr means that the instruction produces a restricted register, i.e zeroes out the top 32 bits of a register. Can also hold partial information about an instruction while incrementally building up a full instruction. e.g. vaddpd 0x0(%r15,%r11,8),%ymm3,%ymm2 in ATT syntax is represented as: -> disasms: ('vaddpd', '0x0(%r15,%r11,8)', '%ymm3', '%ymm2') -> input_rr: r11 (for x86_64) (or None for x86_32) -> output_rr: None When building up partial state, could be: e.g. just (disasms: ('0x0(%r15,%r11,8)', '%ymm3), input_rr: '%r111', output_rr: None) from example above. """ __slots__ = ('disasms', 'input_rr', 'output_rr') def __init__(self, disasms=(), input_rr=None, output_rr=None): assert isinstance(disasms, tuple), disasms self.disasms = disasms self.input_rr = input_rr self.output_rr = output_rr def __repr__(self): return str((self.disasms, self.input_rr, self.output_rr)) def __eq__(self, other): return (self.disasms == other.disasms and self.input_rr == other.input_rr and self.output_rr == other.output_rr) def __hash__(self): return hash((self.disasms, self.input_rr, self.output_rr)) def MergeOperands(ops1, ops2): """Combine two different Operands (disassembly parts and implications).""" assert ops1.input_rr is None or ops2.input_rr is None assert ops1.output_rr is None or ops2.output_rr is None return Operands(ops1.disasms + ops2.disasms, ops1.input_rr if ops1.input_rr else ops2.input_rr, ops1.output_rr if ops1.output_rr else ops2.output_rr) def AllXMMOperands(bitness): """Returns the set of all XMM registers as individual Operands objects.""" assert bitness in (32, 64), bitness return set([Operands(disasms=('%xmm{}'.format(i),)) for i in xrange(8 if bitness == 32 else 16)]) def AllYMMOperands(bitness): """Returns the set of all YMM registers as individual Operands objects.""" assert bitness in (32, 64), bitness return set([Operands(disasms=('%ymm{}'.format(i),)) for i in xrange(8 if bitness == 32 else 16)]) def GprOperands(bitness, operand_size, is_write_for_64_bit=True, can_restrict=False): """Returns all gpr operands as an operand set. Args: bitness: architecture bitness to distinguish x86_32/x86_64: (32, 64) operand_size: size of register to be used in write. is_write_for_64_bit: if bitness == 64, and operand_size == 64, exclude special registers rsp, rbp, r15 for sandbox reasons. If bitness == 64 and operand_size == 32, exclude 'esp', 'ebp', and 'r15d' if it's not can_restrict. If can_restrict, then just exclude 'r15d' can_restrict: if true and bitness == 64, and operand_size == 32, and is_write_for_64_bit == True, disallow r15 write, and produce restricted register. """ regs = [] operand_to_restriction_map = { '%eax': '%rax', '%ebx' : '%rbx', '%ecx' : '%rcx', '%edx': '%rdx', '%ebp': '%rbp', '%edi': '%rdi', '%esi': '%rsi', '%esp': '%rsp', '%r8d': '%r8', '%r9d': '%r9', '%r10d' : '%r10', '%r11d': '%r11', '%r12d': '%r12', '%r13d': '%r13', '%r14d' : '%r14', } restricts = False if operand_size == 16 and bitness == 32: regs = ['%ax', '%bx', '%cx', '%dx', '%bp', '%sp', '%di', '%si'] elif operand_size == 32 and bitness == 32: regs = ['%eax', '%ebp', '%ebx', '%ecx', '%edi', '%edx', '%esi', '%esp'] elif bitness == 64 and operand_size == 32: regs = ['%eax', '%ebx', '%ecx', '%edi', '%edx', '%esi', '%r8d', '%r9d', '%r10d', '%r11d', '%r12d', '%r13d', '%r14d'] # Don't include '%ebp', '%esp', '%r15d' in allowed registers when # is_write_for_64_bit == True. if is_write_for_64_bit == False: regs += ['%esp', '%ebp', '%r15d'] elif can_restrict == True: regs += ['%esp', '%ebp'] restricts = True elif bitness == 64 and operand_size == 64: regs = ['%rax', '%rbx', '%rcx', '%rdi', '%rdx', '%rsi', '%r8', '%r9', '%r10', '%r11', '%r12', '%r13', '%r14'] # Don't include '%ebp', '%esp', '%r15d' in allowed registers when # is_write_for_64_bit == True. if is_write_for_64_bit == False: regs += ['%rsp', '%rbp', '%r15'] else: raise AssertionError("Unimplemented") if restricts: return set([ Operands(disasms=(reg,), output_rr=operand_to_restriction_map[reg]) for reg in regs]) else: return set([Operands(disasms=(reg,)) for reg in regs]) def MnemonicOp(name): """Returns the mnemonic as an operand set.""" assert isinstance(name, str) return set([Operands(disasms=(name,))]) def ImmOp(): """Returns an immediate as an operand set.""" # When walking the DFA, immediates are currently returned as 0x0. return set([Operands(disasms=('$0x0',))]) def LockPrefix(): """Returns the lock prefix as an operand set.""" return set([Operands(disasms=('lock',))]) def MemoryOperandsTemplate(disp, base, index, scale, bitness): """Returns all the possible different memory operands using given parameters. Returns list of Operands instances. e.g. for disp='0x0', base='%eax', index='%ebx', scale=2 [ '(%ebx)', # Base Register Only '0x0', # Displacement Only '(%ebx,%eax',2)', # Base Register + Index register * scale. '0x0(,%eax,2)', # Displacement + Index Register * scale. '0x0(%ebx)', # Displacement + Base Register. '0x0(%ebx,%eax,2), # Displacement + Base Register + Index Register * scale ] Note that Base register must be used for x86_64. Within the returned Operands objects, the input RR is set to the index register if the index is used for x86_64. Args: disp: displacement to use in memory operand. base: string register name to use for base register in addressing. index: string register name to use for index register in addressing. scale: integer scale to use to multiply index register by in addressing. bitness: 32 or 64 Returns: list of Operands instances representing all ways to use the parameters. """ assert bitness in (32, 64), bitness input_rr = None # Note: %riz is a fake register that always reads 0. It is allowed as an # index register (though it is redundant). However, because it is always # 0, we don't encode that it needs to be restricted. if bitness == 64 and index != '%riz': input_rr = index base_only_encoding = [] # There is no way to encode base without displacement with ebp/rbp. # Have to use 0x0+%ebp. if base not in ('%ebp', '%rbp'): base_only_encoding = [Operands(disasms=('({})'.format(base),))] base_plus_index_scale_encoding = [] # There is no way to encode base without displacement with ebp/rbp. # Have to use 0x0+%ebp. if base not in ('%ebp', '%rbp'): base_plus_index_scale_encoding = [ Operands(disasms=('({},{},{})'.format(base, index, scale),), input_rr=input_rr)] disp_only_encoding = [Operands(disasms=(disp,))] disp_plus_index_scale_encoding = [ Operands(disasms=('{}(,{},{})'.format(disp, index, scale),), input_rr=input_rr)] disp_plus_base_encoding = [ Operands(disasms=('{}({})'.format(disp, base),))] disp_plus_base_plus_index_scale_encoding = [ Operands( disasms=('{}({},{},{})'.format(disp, base, index, scale),), input_rr=input_rr)] # Redundant %eiz/%riz encoding isn't available with scale == 1. if (base in ('%esp', '%rsp') and index in ('%eiz', '%riz') and scale == 1): return [] if bitness == 32: return (base_only_encoding + disp_only_encoding + base_plus_index_scale_encoding + disp_plus_index_scale_encoding + disp_plus_base_encoding + disp_plus_base_plus_index_scale_encoding) else: # Note: x86_64 allows rip relative addressing (x86_32 doesn't_). # However, not all of the different addressing modes are available # for rip relative addressing (only disp + rip). This is # MOD==b'00, RM==b'101 if base == '%rip': return disp_plus_base_encoding else: # x86_64 memory disasms must always include base register, so the # Disp() and DispPlusIndexScale() options available for x86_32 aren't # permitted. return (base_only_encoding + disp_plus_base_encoding + base_plus_index_scale_encoding + disp_plus_base_plus_index_scale_encoding) def AllMemoryOperands(bitness): """The set of all possible memory operands as individual Operands objects.""" assert bitness in (32, 64), bitness displacements = ['0x0'] scales = [1, 2, 4, 8] if bitness == 32: bases = set(['%eax', '%ebp', '%ebx', '%ecx', '%edi', '%edx', '%esi', '%esp']) indexes = (bases | set(['%eiz'])) - set(['%esp']) elif bitness == 64: indexes = set(['%rax', '%rbx', '%rcx', '%rdi', '%rdx', '%rsi', '%r8', '%r9', '%r10', '%r11', '%r12', '%r13', '%r14', '%r15', '%riz']) bases = set(['%rsp', '%rbp', '%r15', '%rip']) result = set() for (d, b, i, s) in itertools.product(displacements, bases, indexes, scales): result.update(MemoryOperandsTemplate(disp=d, base=b, index=i, scale=s, bitness=bitness)) return result def OpsProd(*args): """A version of itertools.product that builds Operands. e.g. XMM = (Operands(disasms=('%xmm1',)), Operands(disasms=('%xmm2',))) REG = (Operands(disasms=('%rax',)), Operands(disasms=('%rbx',))) OpsProd(XMM, REG) -> set([Operands(disasms=('%xmm1', '%rax')), Operands(disasms=('%xmm1', '%rbx')), Operands(disasms=('%xmm2', '%rax')), Operands(disasms=('%xmm2', '%rbx'))]) Args: *args: each input is a collection of Operands. Returns: set of Operands instances, where each instance is a merge of Operands objects, one taken from each input iterator. """ result = set([Operands()]) for pool in args: result = set([MergeOperands(x, y) for (x,y) in itertools.product(result, pool)]) return result def GetRRInfoFromTrie(trie_state, bitness): """Convert rr info from trie to format suitable for Operands instance.""" input_rr = trie_state.input_rr output_rr = trie_state.output_rr if bitness == 32: assert not input_rr, input_rr assert not output_rr, output_rr if input_rr == 'any_nonspecial' or not input_rr: input_rr = None if output_rr == 'None' or not output_rr: output_rr = None return input_rr, output_rr def Disassemble((bitness, (byte_tuple, accept_info1, accept_info2))): """Disassembles byte sequence and returns it in old or new trie.""" global the_validator old_trie_set = set() new_trie_set = set() disassembly = the_validator.DisassembleChunk( ''.join([chr(int(x)) for x in byte_tuple]), bitness=bitness) assert len(disassembly) == 1 prefixes, mnemonic, operands = (spec.ParseInstruction(disassembly[0])) full_operands = tuple(prefixes + [mnemonic] + operands) if accept_info1 is not None: input_rr, output_rr = GetRRInfoFromTrie(accept_info1, bitness) old_trie_set.add(Operands(disasms=full_operands, input_rr=input_rr, output_rr=output_rr)) if accept_info2 is not None: input_rr, output_rr = GetRRInfoFromTrie(accept_info2, bitness) new_trie_set.add(Operands(disasms=full_operands, input_rr=input_rr, output_rr=output_rr)) return old_trie_set, new_trie_set def ParseStandardOpts(): """Parses a standard set of options for validator proofs from command line.""" parser = optparse.OptionParser( usage='%prog --bitness=[32,64] --old=path1 --new=path2') parser.add_option('--old', help='Path of the old trie') parser.add_option('--new', help='Path of the new trie') parser.add_option('--bitness', choices=['32', '64']) parser.add_option('--validator_dll', help='Path of the validator library') parser.add_option('--decoder_dll', help='Path of the decoder library') options, _ = parser.parse_args() return options def RunProof(standard_opts, proof_func): """Validates that trie diffs conform to to a proof. Args: standard_opts: command line options describing the two tries to be diffed, arch type, etc. (as returned by ParseStandardOpts) proof_func: Callback of (TrieDiffSet, bitness) to run to prove the diff. Returns: None """ # The validator itself must be passed to the other processes as a global # as it is c object that must be passed via forking and not as an argument # which means the validator must support being via pickled. global the_validator the_validator = validator.Validator( validator_dll=standard_opts.validator_dll, decoder_dll=standard_opts.decoder_dll) bitness = int(standard_opts.bitness) adds = set() removes = set() tasks = itertools.izip(itertools.repeat(bitness), trie.DiffTrieFiles(standard_opts.new, standard_opts.old)) pool = multiprocessing.Pool() results = pool.imap_unordered(Disassemble, tasks, chunksize=10000) for new, old in results: adds |= new removes |= old proof_func((adds, removes), bitness) def AssertDiffSetEquals((adds, removes), expected_adds, expected_removes): """Assert that diffs is composed of expected_adds and expected_removes.""" if adds != expected_adds: raise AssertionError('falsely added instructions: ', adds - expected_adds, 'unadded instructions: ', expected_adds - adds) if removes != expected_removes: raise AssertionError('falsely removed instructions: ', removes - expected_removes, 'missing instructions: ', expected_removes - removes)
bsd-3-clause
-8,105,469,482,561,294,000
37.388325
80
0.618182
false
3.442194
false
false
false
NicoSantangelo/sublime-gulp
status_bar.py
1
1362
import sublime is_sublime_text_3 = int(sublime.version()) >= 3000 if is_sublime_text_3: from .settings import Settings from .caches import ProcessCache from .timeout import defer_sync else: from settings import Settings from caches import ProcessCache from timeout import defer_sync class StatusBar(): def __init__(self, window): self.window = window self.settings = Settings() def update(self): if ProcessCache.empty(): return self.erase() status_bar_tasks = self.settings.get('status_bar_tasks', False) if status_bar_tasks: task_names = set([process.get_task_name() for process in ProcessCache.get()]) if status_bar_tasks != True: if not isinstance(status_bar_tasks, list): status_bar_tasks = [status_bar_tasks] task_names = task_names.intersection(set(status_bar_tasks)) if task_names: defer_sync(lambda: self.set(', '.join(task_names))) def set(self, text): text_format = self.settings.get('status_bar_format', '{task_name}') status = text_format.format(task_name=text) self.window.active_view().set_status(Settings.PACKAGE_NAME, status) def erase(self): self.window.active_view().erase_status(Settings.PACKAGE_NAME)
mit
7,182,725,561,763,034,000
29.266667
89
0.623348
false
3.913793
false
false
false
fedora-conary/rbuild
plugins/buildpackages.py
1
4531
# # Copyright (c) SAS Institute Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from rbuild import errors from rbuild import pluginapi from rbuild.pluginapi import command from rbuild_plugins.build import packages from rbuild_plugins.build import refresh class BuildPackagesCommand(command.BaseCommand): """ Builds or rebuilds specified packages, or all checked-out packages if none are specified. Additionally, rebuilds any other packages in the product group that depend on the built packages. """ help = 'Build edited packages for this stage' paramHelp = '[package]*' docs = {'refresh' : 'refreshes the source of specified packages, or all ' 'checked-out packages if none are specified', 'message' : 'message describing why the commit was performed', 'no-watch' : 'do not watch the job after starting the build', 'no-commit' : 'do not automatically commit successful builds', 'no-recurse' : 'default behavior left for backwards compatibility', 'recurse' : 'build every package listed on the ' 'command line plus all of its dependencies', } def addLocalParameters(self, argDef): argDef['no-watch'] = command.NO_PARAM argDef['no-commit'] = command.NO_PARAM argDef['no-recurse'] = command.NO_PARAM argDef['recurse'] = command.NO_PARAM argDef['refresh'] = command.NO_PARAM argDef['message'] = '-m', command.ONE_PARAM #pylint: disable-msg=R0201,R0903 # could be a function, and too few public methods def runCommand(self, handle, argSet, args): watch = not argSet.pop('no-watch', False) commit = not argSet.pop('no-commit', False) recurse = argSet.pop('recurse', False) argSet.pop('no-recurse', False) # ignored, now the default refreshArg = argSet.pop('refresh', False) message = argSet.pop('message', None) success = True _, packageList, = self.requireParameters(args, allowExtra=True) if not packageList: if refreshArg: handle.BuildPackages.refreshAllPackages() jobId = handle.BuildPackages.buildAllPackages() else: if refreshArg: handle.BuildPackages.refreshPackages(packageList) jobId = handle.BuildPackages.buildPackages(packageList, recurse) if watch and commit: success = handle.Build.watchAndCommitJob(jobId, message) elif watch: success = handle.Build.watchJob(jobId) if not success: raise errors.PluginError('Package build failed') class BuildPackages(pluginapi.Plugin): def initialize(self): self.handle.Commands.getCommandClass('build').registerSubCommand( 'packages', BuildPackagesCommand, aliases=['package', ]) def buildAllPackages(self): self.handle.Build.warnIfOldProductDefinition('building all packages') job = self.createJobForAllPackages() jobId = self.handle.facade.rmake.buildJob(job) self.handle.productStore.setPackageJobId(jobId) return jobId def buildPackages(self, packageList, recurse=True): self.handle.Build.warnIfOldProductDefinition('building packages') job = self.createJobForPackages(packageList, recurse) jobId = self.handle.facade.rmake.buildJob(job) self.handle.productStore.setPackageJobId(jobId) return jobId def createJobForAllPackages(self): return packages.createRmakeJobForAllPackages(self.handle) def createJobForPackages(self, packageList, recurse=True): return packages.createRmakeJobForPackages(self.handle, packageList, recurse) def refreshPackages(self, packageList=None): return refresh.refreshPackages(self.handle, packageList) def refreshAllPackages(self): return refresh.refreshAllPackages(self.handle)
apache-2.0
-3,516,606,443,660,765,000
38.4
79
0.672699
false
4.262465
false
false
false
Autodesk/molecular-design-toolkit
moldesign/helpers/qmmm.py
1
3277
# Copyright 2016 Autodesk Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import moldesign as mdt LINKBONDRATIO = 0.709 # fixed ratio of C-C to C-H bond length for link atoms def create_link_atoms(mol, qmatoms): """ Create hydrogen caps for bonds between QM and MM regions. Each link atom will have ``metadata.mmatom``, ``metadata.mmpartner`` attributes to identify the atom it replaces and the atom it's bonded to in the MM system. Raises: ValueError: if any MM/QM atom is bonded to more than one QM/MM atom, or the bond order is not one Returns: List[mdt.Atom]: list of link atoms """ linkatoms = [] qmset = set(qmatoms) for qmatom in qmatoms: mmatom = _get_mm_nbr(mol, qmatom, qmset) if mmatom is None: continue la = mdt.Atom(atnum=1, name='HL%d' % len(linkatoms), metadata={'mmatom': mmatom, 'mmpartner': qmatom}) linkatoms.append(la) set_link_atom_positions(linkatoms) return linkatoms def _get_mm_nbr(mol, qmatom, qmset): mm_nbrs = [nbr for nbr in qmatom.bonded_atoms if nbr not in qmset] if len(mm_nbrs) == 0: return None # everything below is sanity checks mmatom = mm_nbrs[0] if len(mm_nbrs) != 1: raise ValueError('QM atom %s is bonded to more than one MM atom' % qmatom) if mol.bond_graph[qmatom][mmatom] != 1: raise ValueError('Bond crossing QM/MM boundary (%s - %s) does not have order 1' % (qmatom, mmatom)) if qmatom.atnum != 6 or mmatom.atnum != 6: print ('WARNING: QM/MM bond involving non-carbon atoms: %s - %s' % (qmatom, mmatom)) mm_qm_nbrs = [qmnbr for qmnbr in mmatom.bonded_atoms if qmnbr in qmset] if len(mm_qm_nbrs) != 1: raise ValueError('MM atom %s is bonded to more than one QM atom'%mmatom) return mmatom def set_link_atom_positions(linkatoms): """ Set link atom positions using a fixed ratio of MM bond length to QM bond length Warnings: - This is only valid for - Presumably, the most "correct" way to do this is to place the hydrogen in order to match the force exterted on the QM atom by the MM atom. This is not currently supported. Args: linkatoms (List[mdt.Atom]): list of link atoms to set positions for References: http://www.nwchem-sw.org/index.php/Qmmm_link_atoms """ for atom in linkatoms: nbr = atom.metadata.mmpartner proxy = atom.metadata.mmatom dist = LINKBONDRATIO * nbr.distance(proxy) atom.position = (nbr.position + dist * mdt.mathutils.normalized(proxy.position - nbr.position))
apache-2.0
3,375,231,192,028,924,000
34.619565
99
0.641745
false
3.445846
false
false
false
USGSDenverPychron/pychron
pychron/hardware/fusions/fusions_motor_configurer.py
1
1639
# =============================================================================== # Copyright 2011 Jake Ross # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== ''' @author: Jake Ross @copyright: 2009 @license: Educational Community License 1.0 ''' # =============enthought library imports======================= from traits.api import HasTraits, List from traitsui.api import View, Item, Group # =============standard library imports ======================== # =============local library imports ========================== class FusionsMotorConfigurer(HasTraits): ''' G{classtree} ''' motors = List def traits_view(self): ''' ''' motorgroup = Group(layout='tabbed') for m in self.motors: n = m.name self.add_trait(n, m) i = Item(n, style='custom', show_label=False) motorgroup.content.append(i) return View(motorgroup, resizable=True, title='Configure Motors', buttons=['OK', 'Cancel', 'Revert'], )
apache-2.0
3,609,808,186,760,802,300
29.351852
81
0.546065
false
4.394102
false
false
false
bccp/nbodykit
nbodykit/source/catalog/subvolumes.py
1
2079
from nbodykit.base.catalog import CatalogSource from pmesh.domain import GridND from nbodykit.utils import split_size_3d import numpy class SubVolumesCatalog(CatalogSource): """ A catalog that distributes the particles spatially into subvolumes per MPI rank. Attributes ---------- domain : :class:`pmesh.domain.GridND`; The domain objects for decomposition. If None, generate a domain to decompose the catalog into a 3d grid. layout : A large object that holds which particle belongs to which rank. source : the original source object Parameters ---------- columns: list a list of columns to already exchange """ def __init__(self, source, domain=None, position='Position', columns=None): comm = source.comm if domain is None: # determine processor division for domain decomposition np = split_size_3d(comm.size) if comm.rank == 0: self.logger.info("using cpu grid decomposition: %s" %str(np)) grid = [ numpy.linspace(0, source.attrs['BoxSize'][0], np[0] + 1, endpoint=True), numpy.linspace(0, source.attrs['BoxSize'][1], np[1] + 1, endpoint=True), numpy.linspace(0, source.attrs['BoxSize'][2], np[2] + 1, endpoint=True), ] domain = GridND(grid, comm=comm) self.domain = domain self.source = source layout = domain.decompose(source[position].compute()) self._size = layout.recvlength CatalogSource.__init__(self, comm=comm) self.attrs.update(source.attrs) self._frozen = {} if columns is None: columns = source.columns for column in columns: data = source[column].compute() self._frozen[column] = self.make_column(layout.exchange(data)) @property def hardcolumns(self): return sorted(list(self._frozen.keys())) def get_hardcolumn(self, col): return self._frozen[col]
gpl-3.0
3,959,098,747,508,798,500
31.484375
88
0.599327
false
4.295455
false
false
false
DANS-KNAW/dariah-contribute
dariah_static_data/migrations/0003_auto__del_field_country_iso3166_2__del_field_country_uri__add_field_co.py
1
4571
# -*- coding: utf-8 -*- """ DARIAH Contribute - DARIAH-EU Contribute: edit your DARIAH contributions. Copyright 2014 Data Archiving and Networked Services Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Deleting field 'Country.iso3166_2' db.delete_column(u'dariah_static_data_country', 'iso3166_2') # Deleting field 'Country.uri' db.delete_column(u'dariah_static_data_country', 'uri') # Adding field 'Country.geonameid' db.add_column(u'dariah_static_data_country', 'geonameid', self.gf('django.db.models.fields.PositiveIntegerField')(default=0), keep_default=False) def backwards(self, orm): # Adding field 'Country.iso3166_2' db.add_column(u'dariah_static_data_country', 'iso3166_2', self.gf('django.db.models.fields.CharField')(default='', max_length=2), keep_default=False) # Adding field 'Country.uri' db.add_column(u'dariah_static_data_country', 'uri', self.gf('django.db.models.fields.URLField')(default='', max_length=200), keep_default=False) # Deleting field 'Country.geonameid' db.delete_column(u'dariah_static_data_country', 'geonameid') models = { u'dariah_static_data.activitygroupname': { 'Meta': {'object_name': 'ActivityGroupName'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, u'dariah_static_data.country': { 'Meta': {'object_name': 'Country'}, 'geonameid': ('django.db.models.fields.PositiveIntegerField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, u'dariah_static_data.tadirahactivity': { 'Meta': {'object_name': 'TADIRAHActivity'}, 'activity_group_name': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tadirah_activities'", 'to': u"orm['dariah_static_data.ActivityGroupName']"}), 'activity_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'uri': ('django.db.models.fields.URLField', [], {'max_length': '200'}) }, u'dariah_static_data.tadirahobject': { 'Meta': {'object_name': 'TADIRAHObject'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'uri': ('django.db.models.fields.URLField', [], {'max_length': '200'}) }, u'dariah_static_data.tadirahtechnique': { 'Meta': {'object_name': 'TADIRAHTechnique'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'uri': ('django.db.models.fields.URLField', [], {'max_length': '200'}) }, u'dariah_static_data.vcc': { 'Meta': {'object_name': 'VCC'}, 'description': ('django.db.models.fields.TextField', [], {'max_length': '255'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'uri': ('django.db.models.fields.URLField', [], {'max_length': '200'}) } } complete_apps = ['dariah_static_data']
apache-2.0
5,867,931,267,306,300,000
46.134021
182
0.589805
false
3.689266
false
false
false
puttarajubr/commcare-hq
custom/succeed/reports/patient_task_list.py
1
14071
from datetime import datetime import logging from django.core.urlresolvers import reverse from django.utils import html from django.utils.translation import ugettext as _, ugettext_noop import json from corehq.apps.api.es import ReportCaseES from corehq.apps.cloudcare.api import get_cloudcare_app, get_cloudcare_form_url from corehq.apps.reports.datatables import DataTablesHeader, DataTablesColumn from corehq.apps.reports.filters.search import SearchFilter from corehq.apps.reports.generic import ElasticProjectInspectionReport from corehq.apps.reports.standard import CustomProjectReport, ProjectReportParametersMixin from corehq.apps.reports.standard.cases.data_sources import CaseDisplay from corehq.elastic import es_query from corehq.pillows.base import restore_property_dict from corehq.pillows.mappings.reportcase_mapping import REPORT_CASE_INDEX from custom.succeed.reports.patient_Info import PatientInfoReport from custom.succeed.reports import VISIT_SCHEDULE, LAST_INTERACTION_LIST, EMPTY_FIELD, \ INPUT_DATE_FORMAT, OUTPUT_DATE_FORMAT, CM_APP_UPDATE_VIEW_TASK_MODULE, CM_UPDATE_TASK, TASK_RISK_FACTOR, TASK_ACTIVITY from custom.succeed.utils import is_succeed_admin, has_any_role, SUCCEED_CM_APPNAME, get_app_build from casexml.apps.case.models import CommCareCase from dimagi.utils.decorators.memoized import memoized class PatientTaskListReportDisplay(CaseDisplay): def __init__(self, report, case_dict): next_visit = VISIT_SCHEDULE[0] last_inter = None for action in case_dict['actions']: if action['xform_xmlns'] in LAST_INTERACTION_LIST: last_inter = action for visit_key, visit in enumerate(VISIT_SCHEDULE): for key, action in enumerate(case_dict['actions']): if visit['xmlns'] == action['xform_xmlns']: try: next_visit = VISIT_SCHEDULE[visit_key + 1] del case_dict['actions'][key] break except IndexError: next_visit = 'last' self.next_visit = next_visit if last_inter: self.last_interaction = last_inter['date'] self.domain = report.domain self.app_dict = get_cloudcare_app(self.domain, SUCCEED_CM_APPNAME) self.latest_build = get_app_build(self.app_dict) super(PatientTaskListReportDisplay, self).__init__(report, case_dict) def get_property(self, key): if key in self.case: return self.case[key] else: return EMPTY_FIELD def get_link(self, url, field): if url: return html.mark_safe("<a class='ajax_dialog' href='%s' target='_blank'>%s</a>" % (url, html.escape(field))) else: return "%s (bad ID format)" % self.case["indices"][0]["referenced_id"] def get_form_url(self, app_dict, app_build_id, module_idx, form, case_id=None): try: module = app_dict['modules'][module_idx] form_idx = [ix for (ix, f) in enumerate(module['forms']) if f['xmlns'] == form][0] except IndexError: form_idx = None return html.escape(get_cloudcare_form_url(domain=self.domain, app_build_id=app_build_id, module_id=module_idx, form_id=form_idx, case_id=case_id) + '/enter/') @property @memoized def full_name(self): return CommCareCase.get(self.get_property("indices")[0]["referenced_id"])["full_name"] @property def full_name_url(self): return html.escape( PatientInfoReport.get_url(*[self.case["domain"]]) + "?patient_id=%s" % self.case["indices"][0]["referenced_id"]) @property def full_name_link(self): return self.get_link(self.full_name_url, self.full_name) @property def name(self): return self.get_property("name") @property def name_url(self): if self.status == "Closed": url = reverse('case_details', args=[self.domain, self.get_property("_id")]) return url + '#!history' else: return self.get_form_url(self.app_dict, self.latest_build, CM_APP_UPDATE_VIEW_TASK_MODULE, CM_UPDATE_TASK, self.get_property("_id")) @property def name_link(self): return self.get_link(self.name_url, self.name) @property def task_responsible(self): return self.get_property("task_responsible") @property def case_filter(self): filters = [] care_site = self.request_params.get('task_responsible', '') if care_site != '': filters.append({'term': {'task_responsible.#value': care_site.lower()}}) return {'and': filters} if filters else {} @property def status(self): return self.get_property("closed") and "Closed" or "Open" @property def task_due(self): rand_date = self.get_property("task_due") if rand_date and rand_date != EMPTY_FIELD: date = datetime.strptime(rand_date, INPUT_DATE_FORMAT) return date.strftime(OUTPUT_DATE_FORMAT) else: return EMPTY_FIELD @property def last_modified(self): rand_date = self.get_property("last_updated") if rand_date and rand_date != EMPTY_FIELD: date = datetime.strptime(rand_date, INPUT_DATE_FORMAT) return date.strftime(OUTPUT_DATE_FORMAT) else: return EMPTY_FIELD @property def task_activity(self): key = self.case.get("task_activity", EMPTY_FIELD) return TASK_ACTIVITY.get(key, key) @property def task_risk_factor(self): key = self.case.get("task_risk_factor", EMPTY_FIELD) return TASK_RISK_FACTOR.get(key, key) @property def task_details(self): return self.get_property("task_details") class PatientTaskListReport(CustomProjectReport, ElasticProjectInspectionReport, ProjectReportParametersMixin): ajax_pagination = True name = ugettext_noop('Patient Tasks') slug = 'patient_task_list' default_sort = {'task_due.#value': 'asc'} base_template_filters = 'succeed/report.html' case_type = 'task' fields = ['custom.succeed.fields.ResponsibleParty', 'custom.succeed.fields.PatientName', 'custom.succeed.fields.TaskStatus', 'corehq.apps.reports.standard.cases.filters.CaseSearchFilter'] @classmethod def show_in_navigation(cls, domain=None, project=None, user=None): return True @property @memoized def rendered_report_title(self): return self.name @property @memoized def case_es(self): return ReportCaseES(self.domain) @property def case_filter(self): filters = [] care_site = self.request_params.get('care_site', '') if care_site != '': filters.append({'term': {'care_site.#value': care_site.lower()}}) return {'and': filters} if filters else {} @property def headers(self): headers = DataTablesHeader( DataTablesColumn(_("Patient Name"), sortable=False), DataTablesColumn(_("Task Name"), prop_name="name"), DataTablesColumn(_("Responsible Party"), prop_name="task_responsible", sortable=False), DataTablesColumn(_("Status"), prop_name='status', sortable=False), DataTablesColumn(_("Action Due"), prop_name="task_due.#value"), DataTablesColumn(_("Last Update"), prop_name='last_updated.#value'), DataTablesColumn(_("Task Type"), prop_name="task_activity.#value"), DataTablesColumn(_("Associated Risk Factor"), prop_name="task_risk_factor.#value"), DataTablesColumn(_("Details"), prop_name="task_details", sortable=False), ) return headers @property @memoized def es_results(self): q = { "query": { "filtered": { "query": { "match_all": {} }, "filter": { "and": [ {"term": { "domain.exact": "succeed" }}, ] } } }, 'sort': self.get_sorting_block(), 'from': self.pagination.start if self.pagination else None, 'size': self.pagination.count if self.pagination else None, } search_string = SearchFilter.get_value(self.request, self.domain) es_filters = q["query"]["filtered"]["filter"] responsible_party = self.request_params.get('responsible_party', '') if responsible_party != '': if responsible_party == 'Care Manager': es_filters["and"].append({"term": {"task_responsible.#value": "cm"}}) else: es_filters["and"].append({"term": {"task_responsible.#value": "chw"}}) task_status = self.request_params.get('task_status', '') if task_status != '': if task_status == 'closed': es_filters["and"].append({"term": {"closed": True}}) else: es_filters["and"].append({"term": {"closed": False}}) patient_id = self.request_params.get('patient_id', '') if patient_id != '': es_filters["and"].append({"term": {"indices.referenced_id": patient_id}}) def _filter_gen(key, flist): return {"terms": { key: [item.lower() for item in flist if item] }} user = self.request.couch_user if not user.is_web_user(): owner_ids = user.get_group_ids() user_ids = [user._id] owner_filters = _filter_gen('owner_id', owner_ids) user_filters = _filter_gen('user_id', user_ids) filters = filter(None, [owner_filters, user_filters]) subterms = [] subterms.append({'or': filters}) es_filters["and"].append({'and': subterms} if subterms else {}) if self.case_type: es_filters["and"].append({"term": {"type.exact": 'task'}}) if search_string: query_block = {"queryString": {"query": "*" + search_string + "*"}} q["query"]["filtered"]["query"] = query_block sorting_block = self.get_sorting_block()[0].keys()[0] if len(self.get_sorting_block()) != 0 else None order = self.get_sorting_block()[0].values()[0] if len(self.get_sorting_block()) != 0 else None if sorting_block == 'task_risk_factor.#value': sort = { "_script": { "script": """ foreach(String key : task_risk_factor_list.keySet()) { String value = _source.task_risk_factor.get('#value'); if (value == null) { return ''; } else { return task_risk_factor_list.get(value); } } return '' """, "type": "string", "params": { "task_risk_factor_list": TASK_RISK_FACTOR }, "order": order } } q['sort'] = sort if sorting_block == 'task_activity.#value': sort = { "_script": { "script": """ foreach(String key : task_activity_list.keySet()) { String value = _source.task_activity.get('#value'); if (value == null) { return value; } else { return task_activity_list.get(value); } } return '' """, "type": "string", "params": { "task_activity_list": TASK_ACTIVITY }, "order": order } } q['sort'] = sort logging.info("ESlog: [%s.%s] ESquery: %s" % (self.__class__.__name__, self.domain, json.dumps(q))) if self.pagination: return es_query(q=q, es_url=REPORT_CASE_INDEX + '/_search', dict_only=False, start_at=self.pagination.start) else: return es_query(q=q, es_url=REPORT_CASE_INDEX + '/_search', dict_only=False) @property def get_all_rows(self): return self.rows @property def rows(self): case_displays = (PatientTaskListReportDisplay(self, restore_property_dict(self.get_case(case))) for case in self.es_results['hits'].get('hits', [])) for disp in case_displays: yield [ disp.full_name_link, disp.name_link, disp.task_responsible, disp.status, disp.task_due, disp.last_modified, disp.task_activity, disp.task_risk_factor, disp.task_details ] @property def user_filter(self): return super(PatientTaskListReport, self).user_filter def get_case(self, row): if '_source' in row: case_dict = row['_source'] else: raise ValueError("Case object is not in search result %s" % row) if case_dict['domain'] != self.domain: raise Exception("case.domain != self.domain; %r and %r, respectively" % (case_dict['domain'], self.domain)) return case_dict
bsd-3-clause
-7,832,998,788,708,410,000
37.763085
144
0.542108
false
4.120351
false
false
false
mahmoudShaheen/PyMedox
packages/arduino.py
1
1286
#!/usr/bin/env python ################################# # @author: Mahmoud Shaheen # # MedicalBox IOT Project # # Arduino # ################################# #functions for serial communication with Arduino #called from controlHardware module import serial import data import time ser = serial.Serial(data.arduinoPort) ser.baudrate = data.baudRate time.sleep(5) #wait for serial communication to start #encodes string and sends it on serial port for Arduino def sendSerial(serialString): #checks if the port is closed to re-open it if(not ser.isOpen): ser.open() time.sleep(5) serialString = str(serialString) #makes sure that the data is string "convert any to string" serialString = serialString.encode() #encodes the string "converts string to byte array" print "serial to write: " + serialString ser.write(serialString) #gets a line from serial port from Arduino def getSerial(): if(not ser.isOpen): #checks if the port is closed to re-open it ser.open() time.sleep(5) line = ser.readline() #get a line from serial terminated by \n line = line.strip() #removers \r\n at the end of the string line = line.decode("utf-8") #removes b at the start of the string "converts byte to string" print "serial received: ", line return line
mit
-8,242,292,681,083,054,000
31.974359
93
0.691291
false
3.297436
false
false
false
a3qz/networked_platformer
editor/editor.py
1
4284
import random import time import data import sprite import sys import pygame import constants import collectable import client import wall from struct import * import board class Game: def __init__(self, s): self.objects = [] #start with a list of no objects self.screen = s #get the screen surface #make a player ship to use to control the view self.player = Ship(self, 100, 100, "91913") #load a font for drawing our typed string self.bigfont = pygame.font.Font("./fonts/megaman_2.ttf", 32) #make a board and load in the level editor level self.board = board.Board(self) self.board.parse("./levels/beta.lvl") def tick(self): #handle just our player for the editor self.player.tick() def draw(self): #draw the background self.screen.fill(constants.GREEN) #draw the objects in reversed order, for depth reasons for b in reversed(self.objects): b.draw() #handle player actions def handleKeyDown(self, k): self.player.handleKeyDown(k) def handleKeyUp(self, k): self.player.handleKeyUp(k) #no longer used def handleMUP(self, xxt, yyt): pass def handleMDOWN(self, xxt, yyt, event): #figure out which grid space the player clicked on x = int((16 + xxt - self.player.view[0])/32)*32 y = int((16 + yyt - self.player.view[1])/32)*32 #check if they are left clicking or not if event.button == 1: #if left click, add a thing to the board based off #where you clicked and what the user typed self.board.ref[int(self.player.toadd)](self, x, y, int(self.player.toadd)) else: #otherwise, make a rectangle and figure out who you clicked on rect = pygame.Rect(0, 0, 1, 1) l1 = self.objects l2 = [w.rect for w in l1] #check the objects for collision i = rect.move(x, y).collidelist(l2) #if we clicked on a valid thing to remove, remove it if i != -1 and not isinstance(l1[i], Ship): self.objects = [o for o in self.objects if o != l1[i]] class Ship(sprite.Sprite): def __init__(self, game, x, y, descriptor): super(Ship, self).__init__(game) self.rect.move_ip(x, y) #move to the correct coordinates #load an image self.img = pygame.image.load('imgs/cards/smaller_pngs/{}'.format(data.num_as_key[descriptor])).convert_alpha() #set up our game's viewport self.view = (0, 0) #start a string for typing self.toadd = '' #make us our correct size self.rect.inflate_ip(100, 145) #we aren't pressing anything self.keys = 0 def tick(self): #move us based off our velocity self.rect.move_ip(self.vx, self.vy) #move our view to the right place self.view = (constants.WIDTH/2 - self.rect.x, (constants.HEIGHT*3)/4 - self.rect.y) #handle keys self.fly() def draw(self): self.game.screen.blit(self.img, self.rect.move(*self.view)) label = self.game.bigfont.render(self.toadd, 1, (255, 255, 255)) self.game.screen.blit(label, (10, 10)) def handleKeyDown(self, k): #asdw control flight if k == 'a': self.keys |= 1 elif k == 'd': self.keys |= 2 elif k == 'w': self.keys |= 4 elif k == 's': self.keys |= 8 elif k.isdigit() and len(k) == 1: #if we did a single digit, type it self.toadd = self.toadd + k elif k == 'backspace': #if we backspaced, delete a char from our string self.toadd = self.toadd[:-1] #stop flying when releasing keys def handleKeyUp(self, k): if k == 'a': self.keys &= ~1 elif k == 'd': self.keys &= ~2 elif k == 'w': self.keys &= ~4 elif k == 's': self.keys &= ~8 def fly(self): #handle our velocities self.vx = (((self.keys & 2)>>1) - ((self.keys & 1)>>0)) * 7 self.vy = (((self.keys & 4)>>2) - ((self.keys & 8)>>3)) * -7
gpl-3.0
-2,789,428,229,882,804,700
33.272
118
0.558123
false
3.567027
false
false
false
bwhite/hadoopy
examples/l4-vision-and-image-processing-with-hadoop/ex0-face-finder/face_finder.py
1
3141
#!/usr/bin/env python # (C) Copyright 2011 Brandyn A. White # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Hadoopy Face Finding Demo""" __author__ = 'Brandyn A. White <[email protected]>' __license__ = 'GPL V3' import hadoopy import Image import imfeat import cStringIO as StringIO import os import cv class Mapper(object): def __init__(self): path = 'haarcascade_frontalface_default.xml' if os.path.exists(path): self._cascade = cv.Load(path) else: path = 'fixtures/haarcascade_frontalface_default.xml' if os.path.exists(path): self._cascade = cv.Load(path) else: raise ValueError("Can't find .xml file!") def _detect_faces(self, img): min_size = (20, 20) image_scale = 2 haar_scale = 1.2 min_neighbors = 2 haar_flags = 0 if img.nChannels == 3: gray = cv.CreateImage((img.width, img.height), 8, 1) cv.CvtColor(img, gray, cv.CV_BGR2GRAY) else: gray = img small_img = cv.CreateImage((cv.Round(img.width / image_scale), cv.Round(img.height / image_scale)), 8, 1) cv.Resize(gray, small_img, cv.CV_INTER_LINEAR) cv.EqualizeHist(small_img, small_img) faces = cv.HaarDetectObjects(small_img, self._cascade, cv.CreateMemStorage(0), haar_scale, min_neighbors, haar_flags, min_size) return [((x * image_scale, y * image_scale, w * image_scale, h * image_scale), n) for (x, y, w, h), n in faces] def _load_cv_image(self, value): return imfeat.convert_image(Image.open(StringIO.StringIO(value)), [('opencv', 'rgb', 8)]) def map(self, key, value): """ Args: key: Image name value: Image as jpeg byte data Yields: A tuple in the form of (key, value) key: Image name value: (image, faces) where image is the input value and faces is a list of ((x, y, w, h), n) """ try: image = self._load_cv_image(value) except: hadoopy.counter('DATA_ERRORS', 'ImageLoadError') return faces = self._detect_faces(image) if faces: yield key, (value, faces) if __name__ == "__main__": hadoopy.run(Mapper, doc=__doc__)
gpl-3.0
-3,343,407,461,399,199,000
32.774194
78
0.562241
false
3.770708
false
false
false
ctools/ctools
cscripts/csphagen.py
1
44281
#! /usr/bin/env python # ========================================================================== # Computes the PHA spectra for source/background and ARF/RMF files using the # reflected region method # # Copyright (C) 2017-2021 Luigi Tibaldo # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # ========================================================================== import gammalib import ctools import math import sys from cscripts import mputils # =============== # # csfindobs class # # =============== # class csphagen(ctools.csobservation): """ Generate PHA, ARF and RMF files for classical IACT spectral analysis """ # Constructor def __init__(self, *argv): """ Constructor """ # Initialise application by calling the appropriate class constructor self._init_csobservation(self.__class__.__name__, ctools.__version__, argv) # Initialise other variables self._obs_off = gammalib.GObservations() self._ebounds = gammalib.GEbounds() self._etruebounds = gammalib.GEbounds() self._src_dir = gammalib.GSkyDir() self._src_reg = gammalib.GSkyRegions() self._models = gammalib.GModels() self._srcname = '' self._bkg_regs = [] self._excl_reg = None self._has_exclusion = False self._srcshape = '' self._rad = 0.0 self._reg_width = 0.0 self._reg_height = 0.0 self._reg_posang = 0.0 self._nthreads = 0 # Return return # State methods por pickling def __getstate__(self): """ Extend ctools.csobservation getstate method to include some members Returns ------- state : dict Pickled instance """ # Set pickled dictionary state = {'base' : ctools.csobservation.__getstate__(self), 'obs_off' : self._obs_off, 'ebounds' : self._ebounds, 'etruebounds' : self._etruebounds, 'src_dir' : self._src_dir, 'src_reg' : self._src_reg, 'models' : self._models, 'srcname' : self._srcname, 'bkg_regs' : self._bkg_regs, 'excl_reg' : self._excl_reg, 'has_exclusion' : self._has_exclusion, 'srcshape' : self._srcshape, 'rad' : self._rad, 'reg_width' : self._reg_width, 'reg_height' : self._reg_height, 'reg_posang' : self._reg_posang, 'nthreads' : self._nthreads} # Return pickled dictionary return state def __setstate__(self, state): """ Extend ctools.csobservation setstate method to include some members Parameters ---------- state : dict Pickled instance """ ctools.csobservation.__setstate__(self, state['base']) self._obs_off = state['obs_off'] self._ebounds = state['ebounds'] self._etruebounds = state['etruebounds'] self._src_dir = state['src_dir'] self._src_reg = state['src_reg'] self._models = state['models'] self._srcname = state['srcname'] self._bkg_regs = state['bkg_regs'] self._excl_reg = state['excl_reg'] self._has_exclusion = state['has_exclusion'] self._srcshape = state['srcshape'] self._rad = state['rad'] self._reg_width = state['reg_width'] self._reg_height = state['reg_height'] self._reg_posang = state['reg_posang'] self._nthreads = state['nthreads'] # Return return # Private methods def _query_src_direction(self): """ Set up the source direction parameter """ # Initialise source direction self._src_dir = gammalib.GSkyDir() # Get coordinate systel coordsys = self['coordsys'].string() # If coordinate system is celestial then query "ra" and "dec" if coordsys == 'CEL': ra = self['ra'].real() dec = self['dec'].real() self._src_dir.radec_deg(ra, dec) # ... otherwise, if coordinate system is galactic then query "glon" # and "glat" elif coordsys == 'GAL': glon = self['glon'].real() glat = self['glat'].real() self._src_dir.lb_deg(glon, glat) # Return return def _compute_posang(self, pnt_dir, a, b): """ Compute the difference in position angle wrt the pointing in degrees Parameters ---------- pnt_dir : `~gammalib.GSkyDir` Pointing direction a : `~gammalib.GSkyDir` First sky direction a : `~gammalib.GSkyDir` Second sky direction Returns ------- posang : float Position angle (degrees) """ # Compute position angles posang_a = pnt_dir.posang_deg(a) % 360 posang_b = pnt_dir.posang_deg(b) % 360 # Compute difference posang = abs(posang_a - posang_b) # Return position angle return posang def _get_regions(self, filename): """ Get regions from DS9 file or FITS file Parameters ---------- filename : `~gammalib.GFilename` Filename Returns ------- regs : `~gammalib.GSkyRegions` Region container """ # If filename is a FITS file then load region map and append to # list of regions if filename.is_fits(): map = gammalib.GSkyRegionMap(filename) regs = gammalib.GSkyRegions() regs.append(map) # ... otherwise load DS9 file else: regs = gammalib.GSkyRegions(filename) # Return region container return regs def _get_source_parameters(self): """ Get parameters to define source/On region """ # Get source shape self._srcshape = self['srcshape'].string() # Query source direction self._query_src_direction() # If source shape is a circle the append GSkyRegionCircle if self._srcshape == 'CIRCLE': # Set circular source region self._rad = self['rad'].real() self._src_reg.append(gammalib.GSkyRegionCircle(self._src_dir, self._rad)) # ... otherwise if source shape is a rectangle then append # GSkyRegionRectangle elif self._srcshape == 'RECT': # Set rectangular source region self._reg_width = self['width'].real() self._reg_height = self['height'].real() self._reg_posang = self['posang'].real() self._src_reg.append(gammalib.GSkyRegionRectangle(self._src_dir, self._reg_width, self._reg_height, self._reg_posang)) # Return return def _get_parameters_bkgmethod_reflected(self): """ Get parameters for REFLECTED background method """ # Query parameters for source/On region definition self._get_source_parameters() # Query minimum number of background regions and # number of background regions to skip next to On region self['bkgregmin'].integer() self['bkgregskip'].integer() # Return return def _get_parameters_bkgmethod_custom(self): """ Get parameters for CUSTOM background method Raises ------ RuntimeError Only one On region is allowed """ # Set up source region filename = self['srcregfile'].filename() self._src_reg = self._get_regions(filename) # Raise an exception if there is more than one source region if len(self._src_reg) != 1: raise RuntimeError('Only one On region is allowed') # Set up source direction. Query parameters if neccessary. if self._models.is_empty(): if isinstance(self._src_reg[0], gammalib.GSkyRegionCircle): self._src_dir = self._src_reg[0].centre() self._rad = self._src_reg[0].radius() else: self._query_src_direction() # Make sure that all CTA observations have an Off region by loading the # Off region region the parameter 'bkgregfile' for all CTA observations # without Off region for obs in self.obs(): if obs.classname() == 'GCTAObservation': if obs.off_regions().is_empty(): filename = self['bkgregfile'].filename() regions = self._get_regions(filename) obs.off_regions(regions) # Return return def _get_parameters_bkgmethod_off(self): """ Get parameters for OFF background method Raises ------ RuntimeError On and Off observations must have same size RuntimeError Off observations must be event lists """ # Set up Off observations. If there are no Off observations in the # container then load them via user parameters if self.obs_off().is_empty(): # Get Off observation file name filename = self['inobsoff'].filename() # If Off observation is a FITS file then load observation and # append it to the Off observation container if gammalib.GFilename(filename).is_fits(): self._obs_off.append(gammalib.GCTAObservation(filename)) # ... otherwise load XML file into Off observation container else: self._obs_off.load(filename) # Check that size of On and Off observations are the same, otherwise # raise error if self.obs().size() != self.obs_off().size(): raise RuntimeError('On and Off observations must have the same size') # Loop through observations for obs in self.obs_off(): # Check that observation is event list, otherwise throw error if obs.eventtype() != "EventList": raise RuntimeError('Off observations must be event lists') # Check that they have response, otherwise assign based on user parameter if obs.has_response() == False: # Get database and IRF database = self["caldb"].string() irf = self["irf"].string() # Create an XML element for response parameter = "parameter name=\"Calibration\"" +\ " database=\"" + database + "\"" +\ " response=\"" + irf + "\"" xml = gammalib.GXmlElement() xml.append(parameter) # Create CTA response response = gammalib.GCTAResponseIrf(xml) # Attach response to observation obs.response(response) # Add models from Off observations to model container for model in self.obs_off().models(): self._models.append(model) # Query parameters for source/On region definition self._get_source_parameters() # Return return def _get_parameters_bkgmethod(self): """ Get background method parameters """ # Get background method bkgmethod = self['bkgmethod'].string() # Get background method dependent parameters if bkgmethod == 'REFLECTED': self._get_parameters_bkgmethod_reflected() elif bkgmethod == 'CUSTOM': self._get_parameters_bkgmethod_custom() elif bkgmethod == 'OFF': self._get_parameters_bkgmethod_off() # Query parameters that are needed for all background methods self['maxoffset'].real() self['use_model_bkg'].boolean() # Return return def _get_parameters(self): """ Get parameters from parfile and setup observations """ # Clear source models self._models.clear() # Setup observations (require response and allow event list, don't # allow counts cube) self._setup_observations(self.obs(), True, True, False) # Get source model and source name. First try to extract models from # observation container. If this does not work then try creating # model from the inmodel parameter if self.obs().models().size() > 0: self._models = self.obs().models().clone() self._srcname = self['srcname'].string() elif self['inmodel'].is_valid(): inmodel = self['inmodel'].filename() self._models = gammalib.GModels(inmodel) self._srcname = self['srcname'].string() # Set energy bounds self._ebounds = self._create_ebounds() # Initialize empty src regions container self._src_reg = gammalib.GSkyRegions() # Exclusion map if (self._excl_reg is not None) and (self._excl_reg.map().npix() > 0): # Exclusion map set and is not empty self._has_exclusion = True elif self['inexclusion'].is_valid(): inexclusion = self['inexclusion'].filename() # If the user has not specified the extension to use # and there is an extension called 'EXCLUSION' ... if not inexclusion.has_extname()\ and not inexclusion.has_extno()\ and gammalib.GFits(inexclusion).contains('EXCLUSION'): # ... choose it for the exclusion map extname = inexclusion.url() + '[EXCLUSION]' inexclusion = gammalib.GFilename(extname) # Otherwise will pick the default (primary) HDU self._excl_reg = gammalib.GSkyRegionMap(inexclusion) self._has_exclusion = True else: self._has_exclusion = False # Get background method parameters (have to come after setting up of # observations and models) self._get_parameters_bkgmethod() # If there are multiple observations query whether to stack them if self.obs().size() > 1: self['stack'].boolean() # Query ahead output parameters if (self._read_ahead()): self['outobs'].filename() self['outmodel'].filename() self['prefix'].string() # Write input parameters into logger self._log_parameters(gammalib.TERSE) # Set number of processes for multiprocessing self._nthreads = mputils.nthreads(self) # If we have no model then create now a dummy model if self._models.is_empty(): spatial = gammalib.GModelSpatialPointSource(self._src_dir) spectral = gammalib.GModelSpectralPlaw(1.0e-18, -2.0, gammalib.GEnergy(1.0, 'TeV')) model = gammalib.GModelSky(spatial, spectral) model.name('Dummy') self._models.append(model) self._srcname = 'Dummy' self['use_model_bkg'].boolean(False) # Return return def _compute_region_separation(self, pnt_dir): """ Compute the separation angle for reflected off regions in radians Returns ------- angle : float Separation angle of two off regions (radians) """ # Initialise the result separation = -1.0 # Compute offset of reflected regions to pointing position offset = pnt_dir.dist_deg(self._src_dir) # If shape is a circle then compute apparent diameter of the circle # as separation if self._srcshape == 'CIRCLE': separation = 2.0 * self._rad / offset # ... otherwise if shape is a rectangle then compute the opening angle # towards combinations of rectangle corners. This method overestimates # the real need of space between the ectangles, so the method may be # optimised to gain more off regions! Anyway, it is assured that the # off regions will never overlap. elif self._srcshape == 'RECT': # Get the sky directions of the corners of the rectangle cs = [self._src_reg[0].corner(icorner) for icorner in range(4)] # Compute the 6 opening angles combinations = [[0,1], [0,2], [0,3], [1,2], [1,3], [2,3]] angles = [self._compute_posang(pnt_dir, cs[i], cs[j]) \ for i,j in combinations] # The desired separation is the maximum opening angle separation = max(angles) * gammalib.deg2rad # Return return separation def _reflected_regions(self, obs): """ Calculate list of reflected regions for a single observation (pointing) Parameters ---------- obs : `~gammalib.GCTAObservation()` CTA observation Returns ------- regions : `~gammalib.GSkyRegions` List of reflected regions """ # Initialise list of reflected regions regions = gammalib.GSkyRegions() # Get offset angle of source pnt_dir = obs.pointing().dir() offset = pnt_dir.dist_deg(self._src_dir) # Skip observation if it is too close to source if self._src_reg.contains(pnt_dir): msg = ' Skip because observation is pointed at %.3f deg from source'\ % (offset) if self._srcshape == 'CIRCLE': msg += ' (circle rad=%.3f).' % (self._rad) self._log_string(gammalib.NORMAL, msg) # ... otherwise else: posang = pnt_dir.posang_deg(self._src_dir) if (self._srcshape == 'CIRCLE') or (self._srcshape == 'RECT'): # Determine number of background regions to skip N_skip = self['bkgregskip'].integer() N_lim = 1 + 2 * N_skip # Compute the angular separation of reflected regions wrt # camera center. The factor 1.05 ensures background regions # do not overlap due to numerical precision issues alpha = 1.05 * self._compute_region_separation(pnt_dir) # Compute number of reflected regions by dividing the angular # separation by 2 pi. N = int(2.0 * math.pi / alpha) # If there are not enough reflected regions then skip the # observation ... if N < self['bkgregmin'].integer() + N_lim: msg = ' Skip because the number %d of reflected regions '\ 'for background estimation is smaller than '\ '"bkgregmin"=%d.' % (N-N_lim, self['bkgregmin'].integer()) self._log_string(gammalib.NORMAL, msg) # ... otherwise loop over position angle to create reflected # regions else: # Append reflected regions alpha = 360.0 / N dphi_max = 360.0 - alpha * (1 + N_skip) dphi = alpha * (1 + N_skip) while dphi <= dphi_max: ctr_dir = pnt_dir.clone() ctr_dir.rotate_deg(posang + dphi, offset) if self._srcshape == 'CIRCLE': region = gammalib.GSkyRegionCircle(ctr_dir, self._rad) elif self._srcshape == 'RECT': # Adjust the posang of the rectangle correspondingly region = gammalib.GSkyRegionRectangle(ctr_dir, self._reg_width, self._reg_height, self._reg_posang + dphi) if self._has_exclusion: if self._excl_reg.overlaps(region): # Signal region overlap msg = ' Reflected region overlaps with '\ 'exclusion region.' self._log_string(gammalib.EXPLICIT, msg) # If region overlaps with exclusion region # try to increment by 10% of angular step dphi += 0.1 * alpha else: regions.append(region) dphi += alpha else: regions.append(region) dphi += alpha # Check again that we have enough background regions # now that we have checked for overlap with exclusion region if regions.size() >= self['bkgregmin'].integer(): # Log number of reflected regions msg = ' Use %d reflected regions.' % (regions.size()) self._log_string(gammalib.NORMAL, msg) # Otherwise log observation skipped and return empty region container else: msg = ' Skip because the number %d of regions' \ 'for background estimation not overlapping ' \ 'with the exclusion region is smaller than ' \ '"bkgregmin"=%d.' % \ (regions.size(), self['bkgregmin'].integer()) self._log_string(gammalib.NORMAL, msg) regions = gammalib.GSkyRegions() # Return reflected regions return regions def _instrument_regions(self, obs, obs_off): """ Compute background regions for Off observation Calculate background region in Off observation that corresponds to the source region in the On observation in instrument coordinates Parameters ---------- obs : `~gammalib.GCTAObservation()` On CTA observation obs_off : `~gammalib.GCTAObservation()` Off CTA observation Returns ------- regions : `~gammalib.GSkyRegions` Container with background region """ # Initialise region container regions = gammalib.GSkyRegions() # Convert source position in On observation to instrument coordinates instdir = obs.pointing().instdir(self._src_dir) # Convert instrument position to sky direction for Off observation off_dir = obs_off.pointing().skydir(instdir) # Build region according to shape specified by user # If circle if self._srcshape == 'CIRCLE': region = gammalib.GSkyRegionCircle(off_dir, self._rad) # ... otherwise if rectangle elif self._srcshape == 'RECT': # Instrument coordinates take sky direction as reference # so no need to change the position angle region = gammalib.GSkyRegionRectangle(off_dir, self._reg_width, self._reg_height, self._reg_posang) # Check if background region overlaps with exclusion region is_valid = True if self._has_exclusion: if self._excl_reg.overlaps(region): # Signal region overlap msg = ' Background region overlaps with exclusion region.' self._log_string(gammalib.EXPLICIT, msg) is_valid = False # If region is valid then append it to container if is_valid: regions.append(region) # Return return regions def _set_models(self, results): """ Set models for On/Off fitting The method does the following - append "OnOff" to the instrument name of all background models - fix all spatial and temporal parameters Parameters ---------- results : list of dict Result dictionaries Returns ------- models : `~gammalib.GModels()` Model container """ # Write header self._log_header1(gammalib.NORMAL, 'Set models') # Initialise model container models = gammalib.GModels() # Initialies stacked model flag has_stacked_model = False # Loop over all models in observation and append "OnOff" to instrument # names for model in self._models: # Initialise model usage use_model = False # If model is a background model then check if it will be # used if 'GCTA' in model.classname(): # Skip model if background model should not be used if not self['use_model_bkg'].boolean(): self._log_string(gammalib.NORMAL, ' Skip "%s" model "%s" (%s)' % \ (model.instruments(), model.name(), model.ids())) continue # Check if model corresponds to one of the relevant # observations for result in results: if model.is_valid(result['instrument'], result['id']): if result['bkg_reg'].size() > 0: use_model = True break # If stacked analysis is requested then just use for model # and remove instrument ID if self['stack'].boolean(): # If there is already a model for stacked analysis then # skip this one if has_stacked_model: msg = ' Skip "%s" model "%s" (%s). There is already ' \ 'a model for stacked analysis.' % \ (model.instruments(), model.name(), model.ids()) self._log_string(gammalib.NORMAL, msg) continue # ... otherwise use model for stacked analysis else: has_stacked_model = True use_model = True model.ids('') # Append "OnOff" to instrument name model.instruments(model.instruments()+'OnOff') # ... otherwise, if model is not a background model then use it else: use_model = True # If model is relevant then append it now to the model # container if use_model: # Log model usage self._log_string(gammalib.NORMAL, ' Use "%s" model "%s" (%s)' % \ (model.instruments(), model.name(), model.ids())) # Append model to container models.append(model) # ... otherwise signal that model is skipped else: self._log_string(gammalib.NORMAL, ' Skip "%s" model "%s" (%s)' % \ (model.instruments(), model.name(), model.ids())) # Return model container return models def _set_statistic(self, obs): """ Set statistic for observation If the "use_model_bkg" is true then set statistic to "cstat", otherwise set it to "wstat" Parameters ---------- obs : `~gammalib.GObservation()` Observation Returns ------- obs : `~gammalib.GObservation()` Observation """ # Set statistic according to background model usage if self['use_model_bkg'].boolean(): obs.statistic('cstat') else: obs.statistic('wstat') # Return observation return obs def _etrue_ebounds(self): """ Set true energy boundaries Returns ------- ebounds : `~gammalib.GEbounds()` True energy boundaries """ # Determine minimum and maximum energies emin = self._ebounds.emin() * 0.5 emax = self._ebounds.emax() * 1.2 if emin.TeV() < self['etruemin'].real(): emin = gammalib.GEnergy(self['etruemin'].real(), 'TeV') if emax.TeV() > self['etruemax'].real(): emax = gammalib.GEnergy(self['etruemax'].real(), 'TeV') # Determine number of energy bins n_decades = (emax.log10TeV() - emin.log10TeV()) n_bins = int(n_decades * float(self['etruebins'].integer()) + 0.5) if n_bins < 1: n_bins = 1 # Set energy boundaries ebounds = gammalib.GEbounds(n_bins, emin, emax) # Write header self._log_header1(gammalib.TERSE, 'True energy binning') # Log true energy bins for i in range(ebounds.size()): value = '%s - %s' % (str(ebounds.emin(i)), str(ebounds.emax(i))) self._log_value(gammalib.TERSE, 'Bin %d' % (i+1), value) # Return energy boundaries return ebounds def _set_background_regions(self, obs, obs_off=None): """ Set background regions for an observation Parameters ---------- obs : `~gammalib.GCTAObservation()` CTA observation Returns ------- regions : `~gammalib.GSkyRegions()` Background regions """ # Initialise empty background regions for this observation bkg_reg = gammalib.GSkyRegions() # If reflected background is requested then create reflected # background regions if self['bkgmethod'].string() == 'REFLECTED': bkg_reg = self._reflected_regions(obs) # ... otherwise if custom background is requested then get the # background regions from the observation. We use a copy here since # otherwise the background regions go out of scope once the observations # are replaced by the On/Off observations. elif self['bkgmethod'].string() == 'CUSTOM': bkg_reg = obs.off_regions().copy() # ... otherwise if dedicated Off runs are use then # use background region that correspond to the same instrument coordinates if self['bkgmethod'].string() == 'OFF': bkg_reg = self._instrument_regions(obs,obs_off) # Return background regions return bkg_reg def _process_observation(self,i): """ Generate On/Off spectra for individual observation Parameters ---------- i : int Observation number Returns ------- result : dict On/Off spectra, background regions, observation id """ # Retrieve observation from container onoff = None bkg_reg = None obs = self.obs()[i] # Retrieve dedicated Off observation if it exists if not self.obs_off().is_empty(): obs_off = self.obs_off()[i] # Otherwise use the same as On else: obs_off = self.obs()[i] # Log header self._log_header3(gammalib.NORMAL,'%s observation "%s"' % \ (obs.instrument(), obs.id())) # Skip non CTA observations if obs.classname() != 'GCTAObservation': self._log_string(gammalib.NORMAL, ' Skip because not a "GCTAObservation"') # Otherwise calculate On/Off spectra else: # Get background model usage flag and log flag use_model_bkg = self['use_model_bkg'].boolean() if use_model_bkg: msg = ' Use background model.' else: msg = ' Background model not used, assume constant backround rate.' self._log_string(gammalib.NORMAL, msg) # Get offset angle of source pnt_dir = obs.pointing().dir() offset = pnt_dir.dist_deg(self._src_dir) # Skip observation if it is pointed too far from the source if offset >= self['maxoffset'].real(): msg = ' Skip because observation is pointed at %.3f deg >= ' \ '"maxoffset=%.3f" from source.' \ % (offset, self['maxoffset'].real()) self._log_string(gammalib.NORMAL, msg) # ... otherwise continue to process else: # Set background regions for this observation bkg_reg = self._set_background_regions(obs,obs_off) # If there are any background regions then create On/Off observation # and append it to the output container if bkg_reg.size() >= 0: # Create On/Off observation onoff = gammalib.GCTAOnOffObservation(obs, obs_off, self._models, self._srcname, self._etruebounds, self._ebounds, self._src_reg, bkg_reg, use_model_bkg) # Set On/Off observation ID onoff.id(obs.id()) # Otherwise log observation skipped else: msg = ' Skip because no valid Off regions could be determined' self._log_string(gammalib.NORMAL, msg) # Construct dictionary with results result = {'onoff' : onoff, 'bkg_reg' : bkg_reg, 'instrument': obs.instrument(), 'id' : obs.id()} # Return results return result def _unpack_result(self, outobs, result): """ Unpack result from calculation of On/Off regions Parameters ---------- outobs : `~gammalib.GObservations` Observation container result : dict On/Off spectra, background regions, observation id Returns ------- outobs : `~gammalib.GObservations` Observation container with result appended """ # Continue only if result is valid if result['onoff'] != None: # If the results contain an On/Off observation if result['onoff'].classname() == 'GCTAOnOffObservation': # Set statistic according to background model usage obs = self._set_statistic(result['onoff']) # Append observation to observation container outobs.append(obs) # Append background regions self._bkg_regs.append({'regions': result['bkg_reg'], 'id': result['id']}) # Return observation container return outobs # Public methods def run(self): """ Run the script """ # Switch screen logging on in debug mode if self._logDebug(): self._log.cout(True) # Get parameters self._get_parameters() # Write observation into logger self._log_observations(gammalib.NORMAL, self.obs(), 'Observation') if not self.obs_off().is_empty(): self._log_observations(gammalib.NORMAL, self._obs_off, 'Off Observation') # Set true energy bins self._etruebounds = self._etrue_ebounds() # Write header self._log_header1(gammalib.TERSE, 'Spectral binning') # Log reconstructed energy bins for i in range(self._ebounds.size()): value = '%s - %s' % (str(self._ebounds.emin(i)), str(self._ebounds.emax(i))) self._log_value(gammalib.TERSE, 'Bin %d' % (i+1), value) # Write header self._log_header1(gammalib.NORMAL, 'Generation of source and background spectra') # Initialise run variables outobs = gammalib.GObservations() self._bkg_regs = [] results = [] # If there is more than one observation and we use multiprocessing if self._nthreads > 1 and self.obs().size() > 1: # Compute observations args = [(self, '_process_observation', i) for i in range(self.obs().size())] poolresults = mputils.process(self._nthreads, mputils.mpfunc, args) # Construct results for i in range(self.obs().size()): result = poolresults[i][0] outobs = self._unpack_result(outobs, result) results.append(result) self._log_string(gammalib.TERSE, poolresults[i][1]['log'], False) # Otherwise, loop through observations and generate pha, arf, rmf files else: for i in range(self.obs().size()): # Process individual observation result = self._process_observation(i) outobs = self._unpack_result(outobs, result) results.append(result) # Stack observations if outobs.size() > 1 and self['stack'].boolean(): # Write header self._log_header1(gammalib.NORMAL, 'Stacking %d observations' % (outobs.size())) # Stack observations stacked_obs = gammalib.GCTAOnOffObservation(outobs) # Set statistic according to background model usage stacked_obs = self._set_statistic(stacked_obs) # Put stacked observations in output container outobs = gammalib.GObservations() outobs.append(stacked_obs) # Create models that allow On/Off fitting models = self._set_models(results) # Set models in output container outobs.models(models) # Set observation container self.obs(outobs) # Return return def save(self): """ Save data """ # Write header self._log_header1(gammalib.TERSE, 'Save data') # Get XML output filename, prefix and clobber outobs = self['outobs'].filename() outmodel = self['outmodel'].filename() prefix = self['prefix'].string() clobber = self['clobber'].boolean() # Loop over all observation in container for obs in self.obs(): # Set filenames if self['stack'].boolean(): onname = prefix + '_stacked_pha_on.fits' offname = prefix + '_stacked_pha_off.fits' arfname = prefix + '_stacked_arf.fits' rmfname = prefix + '_stacked_rmf.fits' elif self.obs().size() > 1: onname = prefix + '_%s_pha_on.fits' % (obs.id()) offname = prefix + '_%s_pha_off.fits' % (obs.id()) arfname = prefix + '_%s_arf.fits' % (obs.id()) rmfname = prefix + '_%s_rmf.fits' % (obs.id()) else: onname = prefix + '_pha_on.fits' offname = prefix + '_pha_off.fits' arfname = prefix + '_arf.fits' rmfname = prefix + '_rmf.fits' # Set background and response file names in On spectrum obs.on_spec().backfile(offname) obs.on_spec().respfile(rmfname) obs.on_spec().ancrfile(arfname) # Save files obs.on_spec().save(onname, clobber) obs.off_spec().save(offname, clobber) obs.arf().save(arfname, clobber) obs.rmf().save(rmfname, clobber) # Stamp files self._stamp(onname) self._stamp(offname) self._stamp(arfname) self._stamp(rmfname) # Log file names self._log_value(gammalib.NORMAL, 'PHA on file', onname) self._log_value(gammalib.NORMAL, 'PHA off file', offname) self._log_value(gammalib.NORMAL, 'ARF file', arfname) self._log_value(gammalib.NORMAL, 'RMF file', rmfname) # Save observation definition XML file self.obs().save(outobs) # Save model definition XML file self.obs().models().save(outmodel) # Log file names self._log_value(gammalib.NORMAL, 'Obs. definition XML file', outobs.url()) self._log_value(gammalib.NORMAL, 'Model definition XML file', outmodel.url()) # Save ds9 On region file regname = prefix + '_on.reg' self._src_reg.save(regname) self._log_value(gammalib.NORMAL, 'On region file', regname) # Save ds9 Off region files for bkg_reg in self._bkg_regs: # Set filename if len(self._bkg_regs) > 1: regname = prefix + '_%s_off.reg' % (bkg_reg['id']) else: regname = prefix + '_off.reg' # Save ds9 region file bkg_reg['regions'].save(regname) # Log file name self._log_value(gammalib.NORMAL, 'Off region file', regname) # Return return def exclusion_map(self, object=None): """ Return and optionally set the exclusion regions map Parameters ---------- object : `~gammalib.GSkyRegion` or `~gammalib.GSkyMap` or `~gammalib.GFilename` Exclusion regions object Returns ------- region : `~gammalib.GSkyRegionMap` Exclusion regions map """ # If a regions object is provided then set the regions ... if object is not None: self._excl_reg = gammalib.GSkyRegionMap(object) # Return return self._excl_reg def obs_off(self, obs=None): """ Return and optionally set the Off observations Parameters ---------- obs : `~gammalib.GCTAObservations` Off observations container Returns ------- observation container : `~gammalib.GCTAObservations` Off observations container """ # If an observation container is provided then set the Off observations ... if obs is not None: self._obs_off = obs # Return return self._obs_off # ======================== # # Main routine entry point # # ======================== # if __name__ == '__main__': # Create instance of application app = csphagen(sys.argv) # Execute application app.execute()
gpl-3.0
-5,513,716,661,560,983,000
34.768174
99
0.522549
false
4.524934
false
false
false
uclouvain/osis
base/tests/views/test_learning_unit_proposal.py
1
55572
############################################################################## # # OSIS stands for Open Student Information System. It's an application # designed to manage the core business of higher education institutions, # such as universities, faculties, institutes and professional schools. # The core business involves the administration of students, teachers, # courses, programs and so on. # # Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # A copy of this license - GNU General Public License - is available # at the root of the source code of this program. If not, # see http://www.gnu.org/licenses/. # ############################################################################## import datetime from unittest import mock from django.contrib import messages from django.contrib.messages import get_messages from django.contrib.messages.storage.fallback import FallbackStorage from django.http import HttpResponseNotFound, HttpResponse, HttpResponseForbidden from django.test import TestCase, RequestFactory from django.urls import reverse from django.utils.translation import gettext_lazy as _ from waffle.testutils import override_flag from attribution.tests.factories.attribution_charge_new import AttributionChargeNewFactory from attribution.tests.factories.attribution_new import AttributionNewFactory from base.business import learning_unit_proposal as proposal_business from base.business.learning_unit_proposal import INITIAL_DATA_FIELDS, copy_learning_unit_data from base.forms.learning_unit.edition import LearningUnitProposalEndDateForm from base.forms.learning_unit_proposal import ProposalLearningUnitForm from base.models import proposal_learning_unit from base.models.academic_year import AcademicYear from base.models.enums import groups from base.models.enums import learning_component_year_type from base.models.enums import learning_unit_year_periodicity from base.models.enums import organization_type, entity_type, \ learning_unit_year_subtypes, proposal_type, learning_container_year_types, proposal_state from base.models.enums.proposal_state import ProposalState, LimitedProposalState from base.models.enums.proposal_type import ProposalType from base.tests.factories import campus as campus_factory, organization as organization_factory, \ person as person_factory from base.tests.factories.academic_calendar import generate_proposal_calendars, \ generate_proposal_calendars_without_start_and_end_date from base.tests.factories.academic_year import create_current_academic_year, \ AcademicYearFactory from base.tests.factories.business.learning_units import GenerateContainer from base.tests.factories.campus import CampusFactory from base.tests.factories.entity import EntityFactory from base.tests.factories.entity_version import EntityVersionFactory from base.tests.factories.group import CentralManagerGroupFactory, FacultyManagerGroupFactory from base.tests.factories.learning_component_year import LearningComponentYearFactory from base.tests.factories.learning_container_year import LearningContainerYearFactory from base.tests.factories.learning_unit import LearningUnitFactory from base.tests.factories.learning_unit_year import LearningUnitYearFactory from base.tests.factories.learning_unit_year import LearningUnitYearFakerFactory from base.tests.factories.organization import OrganizationFactory from base.tests.factories.person import PersonFactory from base.tests.factories.proposal_learning_unit import ProposalLearningUnitFactory from base.tests.factories.tutor import TutorFactory from base.tests.factories.user import UserFactory from base.views.learning_units.proposal.update import update_learning_unit_proposal, \ learning_unit_modification_proposal, learning_unit_suppression_proposal from base.views.learning_units.search.proposal import ACTION_CONSOLIDATE, ACTION_BACK_TO_INITIAL, ACTION_FORCE_STATE from learning_unit.tests.factories.central_manager import CentralManagerFactory from learning_unit.tests.factories.faculty_manager import FacultyManagerFactory from reference.tests.factories.language import LanguageFactory, FrenchLanguageFactory LABEL_VALUE_BEFORE_PROPOSAL = _('Value before proposal') @override_flag('learning_unit_proposal_update', active=True) class TestLearningUnitModificationProposal(TestCase): @classmethod def setUpTestData(cls): academic_years = AcademicYearFactory.produce(number_past=3, number_future=10) an_organization = OrganizationFactory(type=organization_type.MAIN) current_academic_year = create_current_academic_year() generate_proposal_calendars_without_start_and_end_date(academic_years) cls.entity_version = EntityVersionFactory( entity__organization=an_organization, entity_type=entity_type.FACULTY, start_date=current_academic_year.start_date, end_date=current_academic_year.end_date ) learning_container_year = LearningContainerYearFactory( acronym="LOSIS1212", academic_year=current_academic_year, container_type=learning_container_year_types.COURSE, requirement_entity=cls.entity_version.entity, allocation_entity=cls.entity_version.entity, additional_entity_1=cls.entity_version.entity, additional_entity_2=cls.entity_version.entity, ) cls.learning_unit_year = LearningUnitYearFakerFactory( acronym=learning_container_year.acronym, subtype=learning_unit_year_subtypes.FULL, academic_year=current_academic_year, learning_container_year=learning_container_year, quadrimester=None, specific_title_english="title english", campus=CampusFactory(organization=an_organization, is_administration=True), internship_subtype=None ) cls.person = FacultyManagerFactory(entity=cls.entity_version.entity).person cls.url = reverse(learning_unit_modification_proposal, args=[cls.learning_unit_year.id]) cls.form_data = { "academic_year": cls.learning_unit_year.academic_year.id, "acronym_0": cls.learning_unit_year.acronym[0], "acronym_1": cls.learning_unit_year.acronym[1:], "common_title": cls.learning_unit_year.learning_container_year.common_title, "common_title_english": cls.learning_unit_year.learning_container_year.common_title_english, "specific_title": cls.learning_unit_year.specific_title, "specific_title_english": cls.learning_unit_year.specific_title_english, "container_type": cls.learning_unit_year.learning_container_year.container_type, "internship_subtype": "", "credits": cls.learning_unit_year.credits, "periodicity": cls.learning_unit_year.periodicity, "status": cls.learning_unit_year.status, "language": cls.learning_unit_year.language.pk, "quadrimester": "", "campus": cls.learning_unit_year.campus.id, "session": cls.learning_unit_year.session, "entity": cls.entity_version.id, "folder_id": "1", "state": proposal_state.ProposalState.FACULTY.name, 'requirement_entity': cls.entity_version.id, 'allocation_entity': cls.entity_version.id, 'additional_entity_1': cls.entity_version.id, 'additionanl_entity_2': cls.entity_version.id, # Learning component year data model form 'component-TOTAL_FORMS': '2', 'component-INITIAL_FORMS': '0', 'component-MAX_NUM_FORMS': '2', 'component-0-hourly_volume_total_annual': 20, 'component-0-hourly_volume_partial_q1': 10, 'component-0-hourly_volume_partial_q2': 10, 'component-0-planned_classes': 1, 'component-1-hourly_volume_total_annual': 20, 'component-1-hourly_volume_partial_q1': 10, 'component-1-hourly_volume_partial_q2': 10, 'component-1-planned_classes': 1, } cls.academic_year_for_suppression_proposal = AcademicYear.objects.filter( year=cls.learning_unit_year.academic_year.year - 1) def setUp(self): self.client.force_login(self.person.user) def test_user_not_logged(self): self.client.logout() response = self.client.get(self.url) self.assertRedirects(response, '/login/?next={}'.format(self.url)) def test_user_has_not_permission(self): person = person_factory.PersonFactory() self.client.force_login(person.user) response = self.client.get(self.url) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) self.assertTemplateUsed(response, "access_denied.html") def test_get_request(self): response = self.client.get(self.url) self.assertEqual(response.status_code, HttpResponse.status_code) self.assertTemplateUsed(response, 'learning_unit/proposal/create_modification.html') self.assertEqual(response.context['learning_unit_year'], self.learning_unit_year) self.assertEqual(response.context['person'], self.person) self.assertIsInstance(response.context['form_proposal'], ProposalLearningUnitForm) luy_initial = response.context['learning_unit_year_form'].initial lcy_initial = response.context['learning_container_year_form'].initial self.assertEqual(luy_initial['academic_year'], self.learning_unit_year.academic_year.id) self.assertEqual(luy_initial['acronym'], [ self.learning_unit_year.acronym[0], self.learning_unit_year.acronym[1:]]) self.assertEqual(luy_initial['specific_title'], self.learning_unit_year.specific_title) self.assertEqual(lcy_initial['container_type'], self.learning_unit_year. learning_container_year.container_type) self.assertEqual(luy_initial['credits'], self.learning_unit_year.credits) self.assertEqual(luy_initial['periodicity'], self.learning_unit_year.periodicity) self.assertEqual(luy_initial['status'], self.learning_unit_year.status) self.assertEqual(luy_initial['language'], self.learning_unit_year.language.pk) self.assertEqual(luy_initial['campus'], self.learning_unit_year.campus.id) def test_post_request_with_invalid_form(self): response = self.client.post(self.url, data={}) self.assertEqual(response.status_code, HttpResponse.status_code) self.assertTemplateUsed(response, 'learning_unit/proposal/create_modification.html') self.assertEqual(response.context['learning_unit_year'], self.learning_unit_year) self.assertEqual(response.context['person'], self.person) self.assertIsInstance(response.context['form_proposal'], ProposalLearningUnitForm) def test_post_request(self): response = self.client.post(self.url, data=self.form_data) redirected_url = reverse("learning_unit", args=[self.learning_unit_year.id]) self.assertRedirects(response, redirected_url, fetch_redirect_response=False) a_proposal_learning_unit = proposal_learning_unit.find_by_learning_unit_year(self.learning_unit_year) self.assertTrue(a_proposal_learning_unit) self.assertEqual(a_proposal_learning_unit.author, self.person) messages_list = [str(message) for message in get_messages(response.wsgi_request)] self.assertIn( _("You proposed a modification of type %(type)s for the learning unit %(acronym)s.") % { 'type': proposal_type.ProposalType.MODIFICATION.value, 'acronym': self.learning_unit_year.acronym }, list(messages_list)) def test_initial_data_fields(self): expected_initial_data_fields = { 'learning_container_year': [ "id", "acronym", "common_title", "container_type", "in_charge", "common_title_english", "team", 'requirement_entity', 'allocation_entity', 'additional_entity_1', 'additional_entity_2', ], 'learning_unit': [ "id", "end_year", ], 'learning_unit_year': [ "id", "acronym", "specific_title", "internship_subtype", "credits", "campus", "language", "periodicity", "status", "professional_integration", "specific_title", "specific_title_english", "quadrimester", "session", "faculty_remark", "other_remark", "other_remark_english" ], 'learning_component_year': [ "id", "acronym", "hourly_volume_total_annual", "hourly_volume_partial_q1", "hourly_volume_partial_q2", "planned_classes", "type", "repartition_volume_requirement_entity", "repartition_volume_additional_entity_1", "repartition_volume_additional_entity_2" ], } self.assertEqual(expected_initial_data_fields, INITIAL_DATA_FIELDS) def test_proposal_already_exists(self): ProposalLearningUnitFactory(learning_unit_year=self.learning_unit_year) response = self.client.get(self.url) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) self.assertTemplateUsed(response, "access_denied.html") @override_flag('learning_unit_proposal_update', active=True) class TestLearningUnitSuppressionProposal(TestCase): @classmethod def setUpTestData(cls): cls.academic_years = AcademicYearFactory.produce(number_past=3, number_future=10) an_organization = OrganizationFactory(type=organization_type.MAIN) cls.current_academic_year = cls.academic_years[4] cls.next_academic_year = cls.academic_years[5] cls.previous_academic_year = cls.academic_years[3] generate_proposal_calendars(cls.academic_years) cls.entity_version = EntityVersionFactory( entity__organization=an_organization, entity_type=entity_type.FACULTY, start_date=cls.academic_years[0].start_date, end_date=cls.academic_years[-1].end_date ) learning_container_years = [ LearningContainerYearFactory( academic_year=year, container_type=learning_container_year_types.COURSE, requirement_entity=cls.entity_version.entity, allocation_entity=cls.entity_version.entity, ) for year in [cls.previous_academic_year, cls.current_academic_year] ] cls.learning_unit = LearningUnitFactory( start_year=AcademicYear.objects.first(), end_year=None ) cls.learning_unit_year = LearningUnitYearFakerFactory( acronym="LOSIS1212", subtype=learning_unit_year_subtypes.FULL, academic_year=cls.current_academic_year, learning_container_year=learning_container_years[1], quadrimester=None, learning_unit=cls.learning_unit, campus=CampusFactory( organization=an_organization, is_administration=True ), periodicity=learning_unit_year_periodicity.ANNUAL ) cls.previous_learning_unit_year = LearningUnitYearFakerFactory( acronym="LOSIS1212", subtype=learning_unit_year_subtypes.FULL, academic_year=cls.previous_academic_year, learning_container_year=learning_container_years[0], quadrimester=None, learning_unit=cls.learning_unit, campus=cls.learning_unit_year.campus, periodicity=learning_unit_year_periodicity.ANNUAL ) cls.person = CentralManagerFactory(entity=cls.entity_version.entity).person cls.url = reverse(learning_unit_suppression_proposal, args=[cls.learning_unit_year.id]) cls.academic_year_for_suppression_proposal = AcademicYear.objects.filter( year=cls.learning_unit_year.academic_year.year - 1) cls.form_data = { "academic_year": cls.academic_year_for_suppression_proposal.first().id, "entity": cls.entity_version.id, "folder_id": "1", "state": ProposalState.FACULTY.name } def setUp(self): self.client.force_login(self.person.user) def test_get_request(self): response = self.client.get(self.url) self.assertEqual(response.status_code, HttpResponse.status_code) self.assertTemplateUsed(response, 'learning_unit/proposal/create_suppression.html') self.assertEqual(response.context['learning_unit_year'], self.learning_unit_year) self.assertEqual(response.context['person'], self.person) self.assertIsInstance(response.context['form_proposal'], ProposalLearningUnitForm) self.assertIsInstance(response.context['form_end_date'], LearningUnitProposalEndDateForm) self.assertCountEqual( list(response.context['form_end_date'].fields['academic_year'].queryset), list(self.academic_year_for_suppression_proposal) ) form_proposal = response.context['form_proposal'] form_end_date = response.context['form_end_date'] self.assertEqual(form_end_date.fields['academic_year'].initial, None) self.assertTrue(form_end_date.fields['academic_year'].required) self.assertEqual(form_proposal.fields['folder_id'].initial, None) self.assertEqual(form_proposal.fields['entity'].initial, None) def test_get_request_first_year_of_UE(self): url = reverse(learning_unit_suppression_proposal, args=[self.previous_learning_unit_year.id]) response = self.client.get(url) redirected_url = reverse("learning_unit", args=[self.previous_learning_unit_year.id]) self.assertRedirects(response, redirected_url) msgs = [str(message) for message in get_messages(response.wsgi_request)] self.assertEqual( msgs[0], _("You cannot put in proposal for ending date on the first year of the learning unit.") ) def test_get_request_on_UE_with_end_date(self): self.learning_unit.end_year = self.next_academic_year self.learning_unit.save() response = self.client.get(self.url) self.assertEqual(response.status_code, HttpResponse.status_code) def test_get_request_academic_year_list_in_form_for_central_manager(self): person_factory.add_person_to_groups(self.person, [groups.CENTRAL_MANAGER_GROUP]) response = self.client.get(self.url) self.assertCountEqual( list(response.context['form_end_date'].fields['academic_year'].queryset), list(self.academic_year_for_suppression_proposal) ) def test_post_request(self): response = self.client.post(self.url, data=self.form_data) redirected_url = reverse("learning_unit", args=[self.learning_unit_year.id]) self.assertRedirects(response, redirected_url, fetch_redirect_response=False) a_proposal_learning_unit = proposal_learning_unit.find_by_learning_unit_year(self.learning_unit_year) self.assertTrue(a_proposal_learning_unit) self.assertEqual(a_proposal_learning_unit.author, self.person) messages = [str(message) for message in get_messages(response.wsgi_request)] self.assertIn( _("You proposed a modification of type %(type)s for the learning unit %(acronym)s.") % { 'type': proposal_type.ProposalType.SUPPRESSION.value, 'acronym': self.learning_unit_year.acronym }, list(messages) ) self.learning_unit.refresh_from_db() self.assertEqual(self.learning_unit.end_year, self.academic_year_for_suppression_proposal.first()) class TestLearningUnitProposalSearch(TestCase): @classmethod def setUpTestData(cls): AcademicYearFactory.produce(number_past=3, number_future=10) cls.person = person_factory.PersonWithPermissionsFactory("can_propose_learningunit", "can_access_learningunit") ac_years = AcademicYearFactory.produce_in_future(quantity=3) cls.an_entity = EntityFactory() cls.entity_version = EntityVersionFactory(entity=cls.an_entity, entity_type=entity_type.SCHOOL, start_date=ac_years[0].start_date, end_date=ac_years[1].end_date) cls.proposals = [_create_proposal_learning_unit("LOSIS1211"), _create_proposal_learning_unit("LOSIS1212"), _create_proposal_learning_unit("LOSIS1213")] def setUp(self): self.client.force_login(self.person.user) def test_learning_units_proposal_search(self): url = reverse("learning_units_proposal") response = self.client.get(url, data={'acronym': self.proposals[0].learning_unit_year.acronym}) self.assertEqual(response.context['learning_units_count'], 1) def test_learning_units_proposal_search_by_tutor(self): proposal = _create_proposal_learning_unit("LOSIS1214") tutor = TutorFactory(person=self.person) attribution = AttributionNewFactory(tutor=tutor) learning_unit_component = LearningComponentYearFactory(learning_unit_year=proposal.learning_unit_year) AttributionChargeNewFactory(attribution=attribution, learning_component_year=learning_unit_component) url = reverse("learning_units_proposal") response = self.client.get(url, data={'tutor': self.person.first_name}) self.assertEqual(response.context['learning_units_count'], 1) def test_learning_units_proposal_force_state_available_choices_as_faculty_manager(self): url = reverse("learning_units_proposal") self.person.user.groups.add(FacultyManagerGroupFactory()) response = self.client.get(url, data={'acronym': self.proposals[0].learning_unit_year.acronym}) state_choices = response.context['form_proposal_state'].fields['state'].choices self.assertEqual(state_choices, list(LimitedProposalState.choices())) def test_learning_units_proposal_force_state_available_choices_as_central_manager(self): url = reverse("learning_units_proposal") self.person.user.groups.add(CentralManagerGroupFactory()) response = self.client.get(url, data={'acronym': self.proposals[0].learning_unit_year.acronym}) state_choices = response.context['form_proposal_state'].fields['state'].choices self.assertEqual(state_choices, list(ProposalState.choices())) class TestGroupActionsOnProposals(TestCase): @classmethod def setUpTestData(cls): AcademicYearFactory.produce(number_past=3, number_future=10) cls.person = person_factory.PersonWithPermissionsFactory("can_access_learningunit") cls.proposals = [_create_proposal_learning_unit("LOSIS1211"), _create_proposal_learning_unit("LOSIS1212"), _create_proposal_learning_unit("LOSIS1213")] cls.url = reverse("learning_units_proposal") AcademicYearFactory.produce_in_future(quantity=3) def setUp(self): self.client.force_login(self.person.user) def test_when_no_proposals_selected(self): response = self.client.post(self.url, data={"action": ACTION_BACK_TO_INITIAL}, follow=True) messages = [str(message) for message in response.context["messages"]] self.assertIn(_("No proposals was selected."), messages) @mock.patch("base.business.learning_unit_proposal.cancel_proposals_and_send_report", side_effect=lambda proposals, author, research_criteria: {}) def test_when_action_is_back_to_initial(self, mock_cancel_proposals): post_data = { "action": ACTION_BACK_TO_INITIAL, "selected_action": [self.proposals[0].learning_unit_year.acronym] } self.client.post(self.url, data=post_data, follow=True) proposals, author, research_criteria = mock_cancel_proposals.call_args[0] self.assertEqual(list(proposals), [self.proposals[0]]) self.assertEqual(author, self.person) self.assertFalse(research_criteria) @mock.patch("base.business.learning_unit_proposal.consolidate_proposals_and_send_report", side_effect=lambda proposals, author, research_criteria: {}) def test_when_action_is_consolidate(self, mock_consolidate): post_data = { "action": ACTION_CONSOLIDATE, "selected_action": [self.proposals[0].learning_unit_year.acronym] } self.client.post(self.url, data=post_data, follow=True) proposals, author, research_criteria = mock_consolidate.call_args[0] self.assertEqual(list(proposals), [self.proposals[0]]) self.assertEqual(author, self.person) self.assertFalse(research_criteria) @mock.patch("base.business.learning_unit_proposal.force_state_of_proposals", side_effect=lambda proposals, author, research_criteria: {}) def test_when_action_is_force_state_but_no_new_state(self, mock_force_state): post_data = { "action": ACTION_FORCE_STATE, "selected_action": [self.proposals[0].learning_unit_year.acronym] } self.client.post(self.url, data=post_data, follow=True) self.assertFalse(mock_force_state.called) @mock.patch("base.business.learning_unit_proposal.force_state_of_proposals", side_effect=lambda proposals, author, research_criteria: {}) def test_when_action_is_force_state(self, mock_force_state): post_data = { "action": ACTION_FORCE_STATE, "selected_action": [self.proposals[0].learning_unit_year.acronym, self.proposals[2].learning_unit_year.acronym], "state": proposal_state.ProposalState.ACCEPTED.name } self.client.post(self.url, data=post_data, follow=True) proposals, author, new_state = mock_force_state.call_args[0] self.assertCountEqual(list(proposals), [self.proposals[0], self.proposals[2]]) self.assertEqual(author, self.person) self.assertEqual(new_state, proposal_state.ProposalState.ACCEPTED.name) @override_flag('learning_unit_proposal_delete', active=True) class TestLearningUnitProposalCancellation(TestCase): @classmethod def setUpTestData(cls): academic_year = create_current_academic_year() generate_proposal_calendars_without_start_and_end_date([academic_year]) cls.learning_unit_proposal = _create_proposal_learning_unit("LOSIS1211") cls.learning_unit_year = cls.learning_unit_proposal.learning_unit_year cls.person = FacultyManagerFactory( entity=cls.learning_unit_year.learning_container_year.requirement_entity ).person def setUp(self): self.url = reverse('learning_unit_cancel_proposal', args=[self.learning_unit_year.id]) self.client.force_login(self.person.user) def test_user_not_logged(self): self.client.logout() response = self.client.get(self.url) self.assertRedirects(response, '/login/?next={}'.format(self.url)) def test_user_has_not_permission(self): person = PersonFactory() self.client.force_login(person.user) response = self.client.get(self.url) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) self.assertTemplateUsed(response, "access_denied.html") def test_with_non_existent_learning_unit_year(self): self.learning_unit_proposal_to_delete = _create_proposal_learning_unit("LOSIS1211D") self.learning_unit_year_to_delete = self.learning_unit_proposal_to_delete.learning_unit_year self.person_to_delete = FacultyManagerFactory( entity=self.learning_unit_year_to_delete.learning_container_year.requirement_entity ).person self.url = reverse('learning_unit_cancel_proposal', args=[self.learning_unit_year_to_delete.id]) self.client.force_login(self.person_to_delete.user) self.learning_unit_year_to_delete.delete() response = self.client.get(self.url) self.assertEqual(response.status_code, HttpResponseNotFound.status_code) self.assertTemplateUsed(response, "page_not_found.html") def test_with_none_person(self): user = UserFactory() self.client.force_login(user) response = self.client.get(self.url) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) self.assertTemplateUsed(response, "access_denied.html") def test_with_no_proposal(self): self.learning_unit_proposal_to_delete = _create_proposal_learning_unit("LOSIS1211D") self.learning_unit_year_to_delete = self.learning_unit_proposal_to_delete.learning_unit_year self.person_to_delete = FacultyManagerFactory( entity=self.learning_unit_year_to_delete.learning_container_year.requirement_entity ).person self.url = reverse('learning_unit_cancel_proposal', args=[self.learning_unit_year_to_delete.id]) self.client.force_login(self.person_to_delete.user) self.learning_unit_proposal_to_delete.delete() response = self.client.get(self.url) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) self.assertTemplateUsed(response, "access_denied.html") def test_with_proposal_of_state_different_than_faculty(self): self.learning_unit_proposal.state = proposal_state.ProposalState.CENTRAL.name self.learning_unit_proposal.save() response = self.client.get(self.url) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) self.assertTemplateUsed(response, "access_denied.html") def test_user_not_linked_to_current_requirement_entity(self): person = PersonFactory() self.client.force_login(person.user) response = self.client.get(self.url) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) self.assertTemplateUsed(response, "access_denied.html") def test_context_after_valid_get_request(self): response = self.client.get(self.url) redirected_url = reverse('learning_unit', args=[self.learning_unit_year.id]) self.assertRedirects(response, redirected_url, fetch_redirect_response=False) messages = [str(message) for message in get_messages(response.wsgi_request)] self.assertIn(_("Proposal %(acronym)s (%(academic_year)s) successfully canceled.") % { "acronym": self.learning_unit_year.acronym, "academic_year": self.learning_unit_year.academic_year }, messages) def test_models_after_cancellation_of_proposal(self): _modify_learning_unit_year_data(self.learning_unit_year) _modify_entities_linked_to_learning_container_year(self.learning_unit_year.learning_container_year) new_entity = self.learning_unit_year.learning_container_year.requirement_entity FacultyManagerFactory(entity=new_entity, person=self.person) self.client.get(self.url) self.learning_unit_year.refresh_from_db() self.learning_unit_year.learning_container_year.refresh_from_db() initial_data = self.learning_unit_proposal.initial_data self.assertTrue(_test_attributes_equal(self.learning_unit_year, initial_data["learning_unit_year"])) self.assertTrue(_test_attributes_equal(self.learning_unit_year.learning_unit, initial_data["learning_unit"])) self.assertTrue(_test_attributes_equal(self.learning_unit_year.learning_container_year, initial_data["learning_container_year"])) def _test_attributes_equal(obj, attribute_values_dict): for key, value in attribute_values_dict.items(): attr_value = getattr(obj, key) foreign_key_fields = [ "campus", "language", 'requirement_entity', 'allocation_entity', 'additional_entity_1', 'additional_entity_2' ] if key == "credits": if float(attr_value) != float(value): return False elif attr_value and key in foreign_key_fields: if attr_value.pk != value: return False elif attr_value != value: return False return True def _create_proposal_learning_unit(acronym): a_learning_unit_year = LearningUnitYearFactory( acronym=acronym, subtype=learning_unit_year_subtypes.FULL, learning_container_year__requirement_entity=EntityVersionFactory().entity, ) learning_component_lecturing = LearningComponentYearFactory( learning_unit_year=a_learning_unit_year, type=learning_component_year_type.LECTURING ) learning_component_practical = LearningComponentYearFactory( learning_unit_year=a_learning_unit_year, type=learning_component_year_type.PRACTICAL_EXERCISES) container_year = a_learning_unit_year.learning_container_year initial_data = { "learning_container_year": { "id": container_year.id, "acronym": a_learning_unit_year.acronym, "common_title": a_learning_unit_year.specific_title, "common_title_english": a_learning_unit_year.specific_title_english, "container_type": container_year.container_type, "in_charge": container_year.in_charge, "requirement_entity": container_year.requirement_entity.id, "allocation_entity": None, "additional_entity_1": None, "additional_entity_2": None, }, "learning_unit_year": { "id": a_learning_unit_year.id, "acronym": a_learning_unit_year.acronym, "specific_title": a_learning_unit_year.specific_title, "specific_title_english": a_learning_unit_year.specific_title_english, "internship_subtype": a_learning_unit_year.internship_subtype, "credits": float(a_learning_unit_year.credits), "language": a_learning_unit_year.language.pk, "campus": a_learning_unit_year.campus.id, "periodicity": a_learning_unit_year.periodicity }, "learning_unit": { "id": a_learning_unit_year.learning_unit.id, }, "learning_component_years": [ {"id": learning_component_lecturing.id, "planned_classes": learning_component_lecturing.planned_classes, "hourly_volume_partial_q1": learning_component_lecturing.hourly_volume_partial_q1, "hourly_volume_partial_q2": learning_component_lecturing.hourly_volume_partial_q2, "hourly_volume_total_annual": learning_component_lecturing.hourly_volume_total_annual }, {"id": learning_component_practical.id, "planned_classes": learning_component_practical.planned_classes, "hourly_volume_partial_q1": learning_component_practical.hourly_volume_partial_q1, "hourly_volume_partial_q2": learning_component_practical.hourly_volume_partial_q2, "hourly_volume_total_annual": learning_component_practical.hourly_volume_total_annual } ] } return ProposalLearningUnitFactory(learning_unit_year=a_learning_unit_year, type=proposal_type.ProposalType.MODIFICATION.name, state=proposal_state.ProposalState.FACULTY.name, initial_data=initial_data, entity=container_year.requirement_entity) def _modify_learning_unit_year_data(a_learning_unit_year): a_learning_unit_year.specific_title = "New title" a_learning_unit_year.specific_title_english = "New english title" a_learning_unit_year.acronym = "LNEW456" a_learning_unit_year.credits = 123 a_learning_unit_year.language = LanguageFactory() a_learning_unit_year.save() a_learning_container = a_learning_unit_year.learning_container_year a_learning_container.campus = CampusFactory() a_learning_container.save() def _modify_entities_linked_to_learning_container_year(a_learning_container_year): a_learning_container_year.requirement_entity = EntityFactory() a_learning_container_year.save() @override_flag('learning_unit_proposal_update', active=True) class TestEditProposal(TestCase): @classmethod def setUpTestData(cls): today = datetime.date.today() cls.academic_years = AcademicYearFactory.produce_in_future(quantity=5) cls.current_academic_year = cls.academic_years[0] end_year = AcademicYearFactory(year=cls.current_academic_year.year + 10) generate_proposal_calendars(cls.academic_years) cls.language = FrenchLanguageFactory() cls.organization = organization_factory.OrganizationFactory(type=organization_type.MAIN) cls.campus = campus_factory.CampusFactory(organization=cls.organization, is_administration=True) cls.entity = EntityFactory(organization=cls.organization) cls.entity_version = EntityVersionFactory(entity=cls.entity, entity_type=entity_type.FACULTY, start_date=today.replace(year=1900), end_date=None) cls.generated_container = GenerateContainer(cls.current_academic_year, end_year, parent_entity=cls.entity) cls.generated_container_first_year = cls.generated_container.generated_container_years[1] cls.learning_unit_year = cls.generated_container_first_year.learning_unit_year_full cls.requirement_entity_of_luy = cls.generated_container_first_year.requirement_entity_container_year cls.person = FacultyManagerFactory(entity=cls.entity, with_child=True).person cls.url = reverse(update_learning_unit_proposal, args=[cls.learning_unit_year.id]) cls.academic_year_for_suppression_proposal = AcademicYear.objects.filter( year=cls.learning_unit_year.academic_year.year - 1) def setUp(self): self.proposal = ProposalLearningUnitFactory(learning_unit_year=self.learning_unit_year, state=ProposalState.FACULTY.name, folder_id=1, entity=self.entity, type=proposal_type.ProposalType.MODIFICATION.name) self.client.force_login(self.person.user) def test_edit_proposal_get_no_permission(self): person = person_factory.PersonFactory() self.client.force_login(person.user) response = self.client.get(self.url) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) self.assertTemplateUsed(response, 'access_denied.html') def test_edit_proposal_get_regular_user_with_permission(self): person = FacultyManagerFactory().person self.client.force_login(person.user) response = self.client.get(self.url) self.assertEqual(response.status_code, HttpResponseForbidden.status_code) self.assertTemplateUsed(response, 'access_denied.html') def test_edit_proposal_get_as_faculty_manager(self): response = self.client.get(self.url) self.assertTemplateUsed(response, 'learning_unit/proposal/update_modification.html') self.assertIsInstance(response.context['form_proposal'], ProposalLearningUnitForm) def test_edit_proposal_get_as_central_manager_with_instance(self): central_manager = person_factory.CentralManagerForUEFactory("can_edit_learning_unit_proposal") self.client.logout() self.client.force_login(central_manager.user) response = self.client.get(self.url) self.assertTemplateUsed(response, 'learning_unit/proposal/update_modification.html') self.assertIsInstance(response.context['form_proposal'], ProposalLearningUnitForm) self.assertEqual(response.context['form_proposal'].initial['state'], str(ProposalState.FACULTY.name)) def get_valid_data(self): return { 'acronym_0': 'L', 'acronym_1': 'TAU2000', "subtype": learning_unit_year_subtypes.FULL, "container_type": learning_container_year_types.COURSE, "academic_year": self.current_academic_year.id, "status": True, "credits": "5", "campus": self.campus.id, "common_title": "Common UE title", "language": self.language.pk, "periodicity": learning_unit_year_periodicity.ANNUAL, "entity": self.entity_version.pk, "folder_id": 1, 'requirement_entity': self.entity_version.pk, 'allocation_entity': self.entity_version.pk, 'additional_entity_1': '', # Learning component year data model form 'component-TOTAL_FORMS': '2', 'component-INITIAL_FORMS': '0', 'component-MAX_NUM_FORMS': '2', 'component-0-hourly_volume_total_annual': 20, 'component-0-hourly_volume_partial_q1': 10, 'component-0-hourly_volume_partial_q2': 10, 'component-0-planned_classes': 1, 'component-1-hourly_volume_total_annual': 20, 'component-1-hourly_volume_partial_q1': 10, 'component-1-hourly_volume_partial_q2': 10, 'component-1-planned_classes': 1, } def get_modify_data(self): modifydict = dict(self.get_valid_data()) modifydict["state"] = ProposalState.CENTRAL.value return modifydict def get_faulty_data(self): faultydict = dict(self.get_valid_data()) faultydict["state"] = "bad_choice" return faultydict def test_edit_proposal_post_as_faculty_manager(self): initial_data = copy_learning_unit_data(self.learning_unit_year) self.proposal.initial_data = initial_data request_factory = RequestFactory() request = request_factory.post(self.url, data=self.get_modify_data()) request.user = self.person.user request.session = self.client.session request._messages = FallbackStorage(request) update_learning_unit_proposal(request, learning_unit_year_id=self.learning_unit_year.id) msg = [m.message for m in get_messages(request)] msg_level = [m.level for m in get_messages(request)] self.assertIn(messages.SUCCESS, msg_level, msg) self.assertEqual(len(msg), 1) self.proposal.refresh_from_db() self.assertEqual(self.proposal.state, 'FACULTY') def test_edit_proposal_post_wrong_data(self): self.person.user.groups.add(CentralManagerGroupFactory()) response = self.client.post(self.url, data=self.get_faulty_data()) self.assertTemplateUsed(response, 'learning_unit/proposal/update_modification.html') self.assertIsInstance(response.context['form_proposal'], ProposalLearningUnitForm) form = response.context['form_proposal'] self.assertEqual(len(form.errors), 1) self.proposal.refresh_from_db() self.assertEqual(self.proposal.state, ProposalState.FACULTY.name) def test_edit_suppression_proposal_get(self): self.proposal.type = ProposalType.SUPPRESSION.name self.proposal.save() response = self.client.get(self.url) self.assertTemplateUsed(response, 'learning_unit/proposal/update_suppression.html') self.assertIsInstance(response.context['form_end_date'], LearningUnitProposalEndDateForm) self.assertCountEqual( list(response.context['form_end_date'].fields['academic_year'].queryset), list(self.academic_year_for_suppression_proposal) ) self.assertIsInstance(response.context['form_proposal'], ProposalLearningUnitForm) def test_edit_suppression_proposal_post(self): self.proposal.type = ProposalType.SUPPRESSION.name self.proposal.save() request_factory = RequestFactory() request = request_factory.post(self.url, data={"academic_year": self.academic_year_for_suppression_proposal.first().id, "entity": self.entity_version.id, "folder_id": 12}) request.user = self.person.user request.session = 'session' request._messages = FallbackStorage(request) update_learning_unit_proposal(request, learning_unit_year_id=self.learning_unit_year.id) msg = [m.message for m in get_messages(request)] msg_level = [m.level for m in get_messages(request)] self.assertEqual(len(msg), 1) self.assertIn(messages.SUCCESS, msg_level) self.proposal.refresh_from_db() self.assertEqual(self.proposal.folder_id, 12) def test_edit_suppression_proposal_wrong_post(self): self.proposal.type = ProposalType.SUPPRESSION.name self.proposal.save() response = self.client.post(self.url, data={"academic_year": self.academic_years[3].id, "entity": self.entity_version.id}) self.assertEqual(self.url, response.request['PATH_INFO']) class TestLearningUnitProposalDisplay(TestCase): @classmethod def setUpTestData(cls): cls.language_pt = LanguageFactory(code='PT', name="Portugais") cls.language_it = LanguageFactory(code='IT', name="Italien") cls.campus = CampusFactory() cls.academic_year = create_current_academic_year() cls.l_container_year = LearningContainerYearFactory( acronym="LBIR1212", academic_year=cls.academic_year, ) cls.learning_unit = LearningUnitFactory(learning_container=cls.l_container_year.learning_container) cls.learning_unit_yr = LearningUnitYearFactory( acronym="LBIR1212", learning_unit=cls.learning_unit, learning_container_year=cls.l_container_year, academic_year=cls.academic_year, subtype=learning_unit_year_subtypes.FULL, status=True, quadrimester="Q3", credits=4, campus=cls.campus, language=cls.language_pt, periodicity=learning_unit_year_periodicity.BIENNIAL_EVEN ) cls.proposal_learning_unit = ProposalLearningUnitFactory(learning_unit_year=cls.learning_unit_yr) cls.initial_credits = 3.0 cls.initial_quadrimester = 'Q1' cls.initial_language = cls.language_it.pk cls.initial_periodicity = learning_unit_year_periodicity.ANNUAL cls.initial_data_learning_unit_year = {'credits': cls.initial_credits, 'periodicity': cls.initial_periodicity} organization_main = OrganizationFactory(type=organization_type.MAIN) cls.entity_from_main_organization = EntityFactory(organization=organization_main) cls.entity_version = EntityVersionFactory(entity=cls.entity_from_main_organization) organization_not_main = OrganizationFactory(type=organization_type.ACADEMIC_PARTNER) cls.entity_from_not_main_organization = EntityFactory(organization=organization_not_main) cls.entity_version_not_main = EntityVersionFactory(entity=cls.entity_from_not_main_organization) cls.initial_language_en = cls.language_it end_year = AcademicYearFactory(year=cls.academic_year.year + 1) cls.generator_learning_container = GenerateContainer(start_year=cls.academic_year, end_year=end_year) cls.l_container_year_with_entities = cls.generator_learning_container.generated_container_years[0] def test_is_foreign_key(self): current_data = {"language{}".format(proposal_business.END_FOREIGN_KEY_NAME): self.language_it.pk} self.assertTrue(proposal_business._is_foreign_key("language", current_data)) def test_is_not_foreign_key(self): current_data = {"credits": self.language_it.pk} self.assertFalse(proposal_business._is_foreign_key("credits", current_data)) def test_check_differences(self): proposal = ProposalLearningUnitFactory() proposal.initial_data = {'learning_unit_year': { 'credits': self.initial_credits }} proposal.learning_unit_year.credits = self.learning_unit_yr.credits differences = proposal_business.get_difference_of_proposal(proposal, proposal.learning_unit_year) self.assertEqual(float(differences.get('credits')), self.initial_credits) def test_get_the_old_value(self): differences = proposal_business._get_the_old_value('credits', {"credits": self.initial_credits + 1}, {'credits': self.initial_credits}) self.assertEqual(differences, "{}".format(self.initial_credits)) def test_get_the_old_value_no_initial_value(self): differences = proposal_business._get_the_old_value('credits', {"credits": self.initial_credits + 1}, {}) self.assertEqual(differences, proposal_business.NO_PREVIOUS_VALUE) def test_get_the_old_value_for_foreign_key(self): initial_data_learning_unit_year = {'language': self.language_pt.pk} current_data = {"language_id": self.language_it.pk} differences = proposal_business._get_the_old_value('language', current_data, initial_data_learning_unit_year) self.assertEqual(differences, str(self.language_pt)) def test_get_the_old_value_for_foreign_key_no_previous_value(self): initial_data = {"language": None} current_data = {"language_id": self.language_it.pk} differences = proposal_business._get_the_old_value('language', current_data, initial_data) self.assertEqual(differences, proposal_business.NO_PREVIOUS_VALUE) initial_data = {} differences = proposal_business._get_the_old_value('language', current_data, initial_data) self.assertEqual(differences, proposal_business.NO_PREVIOUS_VALUE) def test_get_the_old_value_with_translation(self): key = proposal_business.VALUES_WHICH_NEED_TRANSLATION[0] initial_data = {key: learning_unit_year_periodicity.ANNUAL} current_data = {key: learning_unit_year_periodicity.BIENNIAL_EVEN} differences = proposal_business._get_the_old_value(key, current_data, initial_data) self.assertEqual(differences, _(learning_unit_year_periodicity.ANNUAL)) def test_get_str_representing_old_data_from_foreign_key(self): differences = proposal_business._get_str_representing_old_data_from_foreign_key('campus', self.campus.id) self.assertEqual(differences, str(self.campus.name)) def test_get_str_representing_old_data_from_foreign_key_equals_no_value(self): differences = proposal_business._get_str_representing_old_data_from_foreign_key( 'campus', proposal_business.NO_PREVIOUS_VALUE) self.assertEqual(differences, proposal_business.NO_PREVIOUS_VALUE) def test_get_old_value_of_foreign_key_for_campus(self): differences = proposal_business._get_old_value_of_foreign_key('campus', self.campus.id) self.assertEqual(differences, str(self.campus.name)) def test_get_old_value_of_foreign_key_for_language(self): differences = proposal_business._get_old_value_of_foreign_key('language', self.language_it.pk) self.assertEqual(differences, str(self.language_it)) def test_get_old_value_of_foreign_key_for_additional_requirement_entity_main_organization(self): differences = proposal_business._get_old_value_of_foreign_key('ADDITIONAL_REQUIREMENT_ENTITY_1', self.entity_from_main_organization.pk) self.assertEqual(differences, str(self.entity_from_main_organization.most_recent_entity_version.acronym)) def test_get_old_value_of_foreign_key_for_additional_requirement_entity_not_main_organization(self): differences = proposal_business._get_old_value_of_foreign_key('ADDITIONAL_REQUIREMENT_ENTITY_1', self.entity_from_not_main_organization.pk) self.assertEqual(differences, str(self.entity_from_not_main_organization.most_recent_entity_version.title)) def test_get_status_initial_value(self): self.assertEqual(proposal_business._get_status_initial_value(True), proposal_business.LABEL_ACTIVE) self.assertEqual(proposal_business._get_status_initial_value(False), proposal_business.LABEL_INACTIVE) def test_get_old_value_for_periodicity(self): differences = proposal_business._get_the_old_value('periodicity', {"periodicity": self.learning_unit_yr.periodicity}, {'periodicity': self.initial_periodicity}) self.assertEqual(differences, dict(learning_unit_year_periodicity.PERIODICITY_TYPES)[self.initial_periodicity]) @override_flag('learning_unit_proposal_delete', active=True) class TestCreationProposalCancel(TestCase): @mock.patch('base.utils.send_mail.send_mail_cancellation_learning_unit_proposals') def test_cancel_proposal_of_learning_unit(self, mock_send_mail): a_proposal = _create_proposal_learning_unit("LOSIS1211") luy = a_proposal.learning_unit_year url = reverse('learning_unit_cancel_proposal', args=[luy.id]) generate_proposal_calendars_without_start_and_end_date([luy.academic_year]) self.central_manager = CentralManagerFactory(entity=luy.learning_container_year.requirement_entity) self.client.force_login(self.central_manager.person.user) response = self.client.post(url, data={}) redirected_url = reverse('learning_unit', args=[luy.id]) msgs = [str(message) for message in get_messages(response.wsgi_request)] self.assertRedirects(response, redirected_url, fetch_redirect_response=False) self.assertEqual(len(msgs), 2) self.assertTrue(mock_send_mail.called)
agpl-3.0
-6,050,080,561,337,178,000
49.473206
120
0.670134
false
3.813285
true
false
false
christianwengert/mailclient
src/bin/__init__.py
1
2230
# coding=utf-8 import subprocess from imapclient import IMAPClient HOST = 'mail.netzone.ch' USERNAME = '[email protected]' PASSWORD = subprocess.check_output(["/usr/local/bin/pass", "mail/[email protected]"]) PASSWORD = PASSWORD.split()[0].decode('utf8') KEYMAPPING = {} ssl = True class Signature(): pass class Account(): #username, password, First name, Name, host, port, ssl pass class Mailbox(): #name, account pass class Search(): flags = '' searchtemrs = '' date = '' class Message(): id = '' flags = '' # is replied and forwarded here? attachments = '' subject = '' content = '' date = '' mailbox = '' label = '' def save_search(): pass def new_mailbox(): pass def delete_mailbox(): pass def rename_mailbox(): pass def reply(): pass def forward(): pass def mark_unread(): pass def label(): pass def move(): pass def search(): pass def flag(): pass def delete(): pass def compose(): pass def clean_database(): pass def sync_database(): #fetch pass def main(): server = IMAPClient(HOST, use_uid=True, ssl=ssl) server.login(USERNAME, PASSWORD) select_info = server.select_folder('INBOX') print('%d messages in INBOX' % select_info[b'EXISTS']) messages = server.search(['NOT', 'DELETED']) print("%d messages that aren't deleted" % len(messages)) print() print("Messages:") response = server.fetch(messages, ['FLAGS', 'RFC822', 'RFC822.SIZE', 'INTERNALDATE']) for msgid, data in response.items(): print(' ID %d: %d bytes, flags=%s' % (msgid, data[b'RFC822.SIZE'], data[b'FLAGS'])) if __name__ == "__main__": # parser = argparse.ArgumentParser(description='Command line mail client.') # # parser.add_argument('--host', dest='accumulate', action='store_const', # const=sum, default=max, # help='sum the integers (default: find the max)') # # args = parser.parse_args() # print(args.accumulate(args.integers)) main()
bsd-3-clause
-5,255,035,904,629,647,000
14.594406
89
0.565471
false
3.673806
false
false
false
ThomasMcVay/MediaApp
MediaAppKnobs/KnobElements/RectButton.py
1
1663
#=============================================================================== # @Author: Madison Aster # @ModuleDescription: # @License: # MediaApp Library - Python Package framework for developing robust Media # Applications with Qt Library # Copyright (C) 2013 Madison Aster # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License version 2.1 as published by the Free Software Foundation; # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # See LICENSE in the root directory of this library for copy of # GNU Lesser General Public License and other license details. #=============================================================================== from Qt import QtGui, QtCore, QtWidgets class RectButton(QtWidgets.QPushButton): def __init__(self, *args): if type(args[0]) is str: text = args[0] else: text = '' super(RectButton, self).__init__(text) self.setSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed) def sizeHint(self): return QtCore.QSize(600,16)
lgpl-2.1
5,703,280,998,694,598,000
42.945946
83
0.607336
false
4.342037
false
false
false
kontron/python-ipmi
pyipmi/sensor.py
1
7640
# cOPYRIGht (c) 2014 Kontron Europe GmbH # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA from __future__ import absolute_import from .utils import check_completion_code from .msgs import create_request_by_name from .helper import get_sdr_data_helper, get_sdr_chunk_helper from . import sdr # THRESHOLD BASED STATES EVENT_READING_TYPE_CODE_THRESHOLD = 0x01 # DMI-based "Usage States" STATES EVENT_READING_TYPE_CODE_DISCRETE = 0x02 # DIGITAL/DISCRETE EVENT STATES EVENT_READING_TYPE_CODE_STATE = 0x03 EVENT_READING_TYPE_CODE_PREDICTIVE_FAILURE = 0x04 EVENT_READING_TYPE_CODE_LIMIT = 0x05 EVENT_READING_TYPE_CODE_PERFORMANCE = 0x06 # Sensor Types SENSOR_TYPE_TEMPERATURE = 0x01 SENSOR_TYPE_VOLTAGE = 0x02 SENSOR_TYPE_CURRENT = 0x03 SENSOR_TYPE_FAN = 0x04 SENSOR_TYPE_CHASSIS_INTRUSION = 0x05 SENSOR_TYPE_PLATFORM_SECURITY = 0x06 SENSOR_TYPE_PROCESSOR = 0x07 SENSOR_TYPE_POWER_SUPPLY = 0x08 SENSOR_TYPE_POWER_UNIT = 0x09 SENSOR_TYPE_COOLING_DEVICE = 0x0a SENSOR_TYPE_OTHER_UNITS_BASED_SENSOR = 0x0b SENSOR_TYPE_MEMORY = 0x0c SENSOR_TYPE_DRIVE_SLOT = 0x0d SENSOR_TYPE_POST_MEMORY_RESIZE = 0x0e SENSOR_TYPE_SYSTEM_FIRMWARE_PROGRESS = 0x0f SENSOR_TYPE_EVENT_LOGGING_DISABLED = 0x10 SENSOR_TYPE_WATCHDOG_1 = 0x11 SENSOR_TYPE_SYSTEM_EVENT = 0x12 SENSOR_TYPE_CRITICAL_INTERRUPT = 0x13 SENSOR_TYPE_BUTTON = 0x14 SENSOR_TYPE_MODULE_BOARD = 0x15 SENSOR_TYPE_MICROCONTROLLER_COPROCESSOR = 0x16 SENSOR_TYPE_ADD_IN_CARD = 0x17 SENSOR_TYPE_CHASSIS = 0x18 SENSOR_TYPE_CHIP_SET = 0x19 SENSOR_TYPE_OTHER_FRU = 0x1a SENSOR_TYPE_CABLE_INTERCONNECT = 0x1b SENSOR_TYPE_TERMINATOR = 0x1c SENSOR_TYPE_SYSTEM_BOOT_INITIATED = 0x1d SENSOR_TYPE_BOOT_ERROR = 0x1e SENSOR_TYPE_OS_BOOT = 0x1f SENSOR_TYPE_OS_CRITICAL_STOP = 0x20 SENSOR_TYPE_SLOT_CONNECTOR = 0x21 SENSOR_TYPE_SYSTEM_ACPI_POWER_STATE = 0x22 SENSOR_TYPE_WATCHDOG_2 = 0x23 SENSOR_TYPE_PLATFORM_ALERT = 0x24 SENSOR_TYPE_ENTITY_PRESENT = 0x25 SENSOR_TYPE_MONITOR_ASIC_IC = 0x26 SENSOR_TYPE_LAN = 0x27 SENSOR_TYPE_MANGEMENT_SUBSYSTEM_HEALTH = 0x28 SENSOR_TYPE_BATTERY = 0x29 SENSOR_TYPE_SESSION_AUDIT = 0x2a SENSOR_TYPE_VERSION_CHANGE = 0x2b SENSOR_TYPE_FRU_STATE = 0x2c SENSOR_TYPE_FRU_HOT_SWAP = 0xf0 SENSOR_TYPE_IPMB_PHYSICAL_LINK = 0xf1 SENSOR_TYPE_MODULE_HOT_SWAP = 0xf2 SENSOR_TYPE_POWER_CHANNEL_NOTIFICATION = 0xf3 SENSOR_TYPE_TELCO_ALARM_INPUT = 0xf4 SENSOR_TYPE_OEM_KONTRON_FRU_INFORMATION_AGENT = 0xc5 SENSOR_TYPE_OEM_KONTRON_POST_VALUE = 0xc6 SENSOR_TYPE_OEM_KONTRON_FW_UPGRADE = 0xc7 SENSOR_TYPE_OEM_KONTRON_DIAGNOSTIC = 0xc9 SENSOR_TYPE_OEM_KONTRON_SYSTEM_FIRMWARE_UPGRADE = 0xca SENSOR_TYPE_OEM_KONTRON_POWER_DENIED = 0xcd SENSOR_TYPE_OEM_KONTRON_RESET = 0xcf class Sensor(object): def reserve_device_sdr_repository(self): rsp = self.send_message_with_name('ReserveDeviceSdrRepository') return rsp.reservation_id def _get_device_sdr_chunk(self, reservation_id, record_id, offset, length): req = create_request_by_name('GetDeviceSdr') req.reservation_id = reservation_id req.record_id = record_id req.offset = offset req.bytes_to_read = length rsp = get_sdr_chunk_helper(self.send_message, req, self.reserve_device_sdr_repository) return (rsp.next_record_id, rsp.record_data) def get_device_sdr(self, record_id, reservation_id=None): """Collect all data from the sensor device to get the SDR. `record_id` the Record ID. `reservation_id=None` can be set. if None the reservation ID will be determined. """ (next_id, record_data) = \ get_sdr_data_helper(self.reserve_device_sdr_repository, self._get_device_sdr_chunk, record_id, reservation_id) return sdr.SdrCommon.from_data(record_data, next_id) def device_sdr_entries(self): """A generator that returns the SDR list. Starting with ID=0x0000 and end when ID=0xffff is returned. """ reservation_id = self.reserve_device_sdr_repository() record_id = 0 while True: record = self.get_device_sdr(record_id, reservation_id) yield record if record.next_id == 0xffff: break record_id = record.next_id def get_device_sdr_list(self, reservation_id=None): """Return the complete SDR list.""" return list(self.device_sdr_entries()) def rearm_sensor_events(self, sensor_number): """Rearm sensor events for the given sensor number.""" self.send_message_with_name('RearmSensorEvents', sensor_number=sensor_number) def get_sensor_reading(self, sensor_number, lun=0): """Return the sensor reading at the assertion states. `sensor_number` Returns a tuple with `raw reading`and `assertion states`. """ rsp = self.send_message_with_name('GetSensorReading', sensor_number=sensor_number, lun=lun) reading = rsp.sensor_reading if rsp.config.initial_update_in_progress: reading = None states = None if rsp.states1 is not None: states = rsp.states1 if rsp.states2 is not None: states |= (rsp.states2 << 8) return (reading, states) def set_sensor_thresholds(self, sensor_number, lun=0, unr=None, ucr=None, unc=None, lnc=None, lcr=None, lnr=None): """Set the sensor thresholds that are not 'None'. `sensor_number` `unr` for upper non-recoverable `ucr` for upper critical `unc` for upper non-critical `lnc` for lower non-critical `lcr` for lower critical `lnr` for lower non-recoverable """ req = create_request_by_name('SetSensorThresholds') req.sensor_number = sensor_number req.lun = lun thresholds = dict(unr=unr, ucr=ucr, unc=unc, lnc=lnc, lcr=lcr, lnr=lnr) for key, value in thresholds.items(): if value is not None: setattr(req.set_mask, key, 1) setattr(req.threshold, key, value) rsp = self.send_message(req) check_completion_code(rsp.completion_code) def get_sensor_thresholds(self, sensor_number, lun=0): rsp = self.send_message_with_name('GetSensorThresholds', sensor_number=sensor_number, lun=lun) thresholds = {} threshold_list = ('unr', 'ucr', 'unc', 'lnc', 'lcr', 'lnr') for threshold in threshold_list: if hasattr(rsp.readable_mask, threshold): if getattr(rsp.readable_mask, threshold): thresholds[threshold] = getattr(rsp.threshold, threshold) return thresholds
lgpl-2.1
-3,930,262,043,866,632,700
34.868545
79
0.657984
false
3.339161
false
false
false
henriquegemignani/randovania
randovania/gui/tracker_window.py
1
32424
import collections import functools import json import typing from pathlib import Path from random import Random from typing import Optional, Dict, Set, List, Tuple, Iterator, Union import matplotlib.pyplot as plt import networkx from PySide2 import QtWidgets from PySide2.QtCore import Qt from PySide2.QtWidgets import QMainWindow, QTreeWidgetItem, QCheckBox, QLabel, QGridLayout, QWidget, QMessageBox from matplotlib.axes import Axes from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar from matplotlib.figure import Figure from randovania.game_description.area_location import AreaLocation from randovania.game_description.game_description import GameDescription from randovania.game_description.item.item_category import ItemCategory from randovania.game_description.node import Node, ResourceNode, TranslatorGateNode, TeleporterNode, DockNode from randovania.game_description.resources.item_resource_info import ItemResourceInfo from randovania.game_description.resources.pickup_entry import PickupEntry from randovania.game_description.resources.resource_info import add_resource_gain_to_current_resources from randovania.game_description.resources.translator_gate import TranslatorGate from randovania.game_description.world import World from randovania.games.game import RandovaniaGame from randovania.games.prime import patcher_file from randovania.generator import generator from randovania.gui.generated.tracker_window_ui import Ui_TrackerWindow from randovania.gui.lib.common_qt_lib import set_default_window_icon from randovania.gui.lib.custom_spin_box import CustomSpinBox from randovania.layout import translator_configuration from randovania.layout.echoes_configuration import EchoesConfiguration from randovania.layout.teleporters import TeleporterShuffleMode from randovania.layout.translator_configuration import LayoutTranslatorRequirement from randovania.resolver.bootstrap import logic_bootstrap from randovania.resolver.logic import Logic from randovania.resolver.resolver_reach import ResolverReach from randovania.resolver.state import State, add_pickup_to_state class InvalidLayoutForTracker(Exception): pass def _load_previous_state(persistence_path: Path, layout_configuration: EchoesConfiguration, ) -> Optional[dict]: previous_layout_path = persistence_path.joinpath("layout_configuration.json") try: with previous_layout_path.open() as previous_layout_file: previous_layout = EchoesConfiguration.from_json(json.load(previous_layout_file)) except (FileNotFoundError, TypeError, KeyError, ValueError, json.JSONDecodeError): return None if previous_layout != layout_configuration: return None previous_state_path = persistence_path.joinpath("state.json") try: with previous_state_path.open() as previous_state_file: return json.load(previous_state_file) except (FileNotFoundError, json.JSONDecodeError): return None class MatplotlibWidget(QtWidgets.QWidget): ax: Axes def __init__(self, parent=None): super().__init__(parent) fig = Figure(figsize=(7, 5), dpi=65, facecolor=(1, 1, 1), edgecolor=(0, 0, 0)) self.canvas = FigureCanvas(fig) self.toolbar = NavigationToolbar(self.canvas, self) lay = QtWidgets.QVBoxLayout(self) lay.addWidget(self.toolbar) lay.addWidget(self.canvas) self.ax = fig.add_subplot(111) self.line, *_ = self.ax.plot([]) class TrackerWindow(QMainWindow, Ui_TrackerWindow): # Tracker state _collected_pickups: Dict[PickupEntry, int] _actions: List[Node] # Tracker configuration logic: Logic game_description: GameDescription layout_configuration: EchoesConfiguration persistence_path: Path _initial_state: State _elevator_id_to_combo: Dict[int, QtWidgets.QComboBox] _translator_gate_to_combo: Dict[TranslatorGate, QtWidgets.QComboBox] _starting_nodes: Set[ResourceNode] _undefined_item = ItemResourceInfo(-1, "Undefined", "Undefined", 0, None) # UI tools _asset_id_to_item: Dict[int, QTreeWidgetItem] _node_to_item: Dict[Node, QTreeWidgetItem] _widget_for_pickup: Dict[PickupEntry, Union[QCheckBox, CustomSpinBox]] _during_setup = False def __init__(self, persistence_path: Path, layout_configuration: EchoesConfiguration): super().__init__() self.setupUi(self) set_default_window_icon(self) self._collected_pickups = {} self._widget_for_pickup = {} self._actions = [] self._asset_id_to_item = {} self._node_to_item = {} self.layout_configuration = layout_configuration self.persistence_path = persistence_path player_pool = generator.create_player_pool(Random(0), self.layout_configuration, 0, 1) pool_patches = player_pool.patches self.game_description, self._initial_state = logic_bootstrap(layout_configuration, player_pool.game, pool_patches) self.logic = Logic(self.game_description, layout_configuration) self._initial_state.resources["add_self_as_requirement_to_resources"] = 1 self.menu_reset_action.triggered.connect(self._confirm_reset) self.resource_filter_check.stateChanged.connect(self.update_locations_tree_for_reachable_nodes) self.hide_collected_resources_check.stateChanged.connect(self.update_locations_tree_for_reachable_nodes) self.undo_last_action_button.clicked.connect(self._undo_last_action) self.configuration_label.setText("Trick Level: {}; Starts with:\n{}".format( layout_configuration.trick_level.pretty_description, ", ".join( resource.short_name for resource in pool_patches.starting_items.keys() ) )) self.setup_pickups_box(player_pool.pickups) self.setup_possible_locations_tree() self.setup_elevators() self.setup_translator_gates() self.matplot_widget = MatplotlibWidget(self.tab_graph_map) self.tab_graph_map_layout.addWidget(self.matplot_widget) self._world_to_node_positions = {} self.map_tab_widget.currentChanged.connect(self._on_tab_changed) for world in self.game_description.world_list.worlds: self.graph_map_world_combo.addItem(world.name, world) self.graph_map_world_combo.currentIndexChanged.connect(self.on_graph_map_world_combo) persistence_path.mkdir(parents=True, exist_ok=True) previous_state = _load_previous_state(persistence_path, layout_configuration) if not self.apply_previous_state(previous_state): self.setup_starting_location(None) with persistence_path.joinpath("layout_configuration.json").open("w") as layout_file: json.dump(layout_configuration.as_json, layout_file) self._add_new_action(self._initial_state.node) def apply_previous_state(self, previous_state: Optional[dict]) -> bool: if previous_state is None: return False starting_location = None needs_starting_location = len(self.layout_configuration.starting_location.locations) > 1 resource_db = self.game_description.resource_database translator_gates = {} try: pickup_name_to_pickup = {pickup.name: pickup for pickup in self._collected_pickups.keys()} quantity_to_change = { pickup_name_to_pickup[pickup_name]: quantity for pickup_name, quantity in previous_state["collected_pickups"].items() } previous_actions = [ self.game_description.world_list.all_nodes[index] for index in previous_state["actions"] ] if needs_starting_location: starting_location = AreaLocation.from_json(previous_state["starting_location"]) elevators = { int(elevator_id): AreaLocation.from_json(location) if location is not None else None for elevator_id, location in previous_state["elevators"].items() } if self.layout_configuration.game == RandovaniaGame.PRIME2: translator_gates = { TranslatorGate(int(gate)): (resource_db.get_item(item) if item is not None else self._undefined_item) for gate, item in previous_state["translator_gates"].items() } except KeyError: return False self.setup_starting_location(starting_location) for elevator_id, area_location in elevators.items(): combo = self._elevator_id_to_combo[elevator_id] if area_location is None: combo.setCurrentIndex(0) continue for i in range(combo.count()): if area_location == combo.itemData(i): combo.setCurrentIndex(i) break for gate, item in translator_gates.items(): combo = self._translator_gate_to_combo[gate] for i in range(combo.count()): if item == combo.itemData(i): combo.setCurrentIndex(i) break self.bulk_change_quantity(quantity_to_change) self._add_new_actions(previous_actions) return True def reset(self): self.bulk_change_quantity({ pickup: 0 for pickup in self._collected_pickups.keys() }) while len(self._actions) > 1: self._actions.pop() self.actions_list.takeItem(len(self._actions)) for elevator in self._elevator_id_to_combo.values(): elevator.setCurrentIndex(0) for elevator in self._translator_gate_to_combo.values(): elevator.setCurrentIndex(0) self._refresh_for_new_action() def _confirm_reset(self): reply = QMessageBox.question(self, "Reset Tracker?", "Do you want to reset the tracker progression?", QMessageBox.Yes | QMessageBox.No, QMessageBox.No) if reply == QMessageBox.Yes: self.reset() @property def _show_only_resource_nodes(self) -> bool: return self.resource_filter_check.isChecked() @property def _hide_collected_resources(self) -> bool: return self.hide_collected_resources_check.isChecked() @property def _collected_nodes(self) -> Set[ResourceNode]: return self._starting_nodes | set(action for action in self._actions if action.is_resource_node) def _pretty_node_name(self, node: Node) -> str: world_list = self.game_description.world_list return "{} / {}".format(world_list.area_name(world_list.nodes_to_area(node)), node.name) def _refresh_for_new_action(self): self.undo_last_action_button.setEnabled(len(self._actions) > 1) self.current_location_label.setText("Current location: {}".format(self._pretty_node_name(self._actions[-1]))) self.update_locations_tree_for_reachable_nodes() def _add_new_action(self, node: Node): self._add_new_actions([node]) def _add_new_actions(self, nodes: Iterator[Node]): for node in nodes: self.actions_list.addItem(self._pretty_node_name(node)) self._actions.append(node) self._refresh_for_new_action() def _undo_last_action(self): self._actions.pop() self.actions_list.takeItem(len(self._actions)) self._refresh_for_new_action() def _on_tree_node_double_clicked(self, item: QTreeWidgetItem, _): node: Optional[Node] = getattr(item, "node", None) if not item.isDisabled() and node is not None and node != self._actions[-1]: self._add_new_action(node) def _positions_for_world(self, world: World): g = networkx.DiGraph() world_list = self.game_description.world_list state = self.state_for_current_configuration() for area in world.areas: g.add_node(area) for area in world.areas: nearby_areas = set() for node in area.nodes: if isinstance(node, DockNode): try: target_node = world_list.resolve_dock_node(node, state.patches) nearby_areas.add(world_list.nodes_to_area(target_node)) except IndexError as e: print(f"For {node.name} in {area.name}, received {e}") continue for other_area in nearby_areas: g.add_edge(area, other_area) return networkx.drawing.spring_layout(g) def update_matplot_widget(self, nodes_in_reach: Set[Node]): g = networkx.DiGraph() world_list = self.game_description.world_list state = self.state_for_current_configuration() world = self.graph_map_world_combo.currentData() for area in world.areas: g.add_node(area) for area in world.areas: nearby_areas = set() for node in area.nodes: if node not in nodes_in_reach: continue if isinstance(node, DockNode): # TODO: respect is_blast_shield: if already opened once, no requirement needed. # Includes opening form behind with different criteria try: target_node = world_list.resolve_dock_node(node, state.patches) dock_weakness = state.patches.dock_weakness.get((area.area_asset_id, node.dock_index), node.default_dock_weakness) if dock_weakness.requirement.satisfied(state.resources, state.energy): nearby_areas.add(world_list.nodes_to_area(target_node)) except IndexError as e: print(f"For {node.name} in {area.name}, received {e}") continue for other_area in nearby_areas: g.add_edge(area, other_area) self.matplot_widget.ax.clear() cf = self.matplot_widget.ax.get_figure() cf.set_facecolor("w") if world.world_asset_id not in self._world_to_node_positions: self._world_to_node_positions[world.world_asset_id] = self._positions_for_world(world) pos = self._world_to_node_positions[world.world_asset_id] networkx.draw_networkx_nodes(g, pos, ax=self.matplot_widget.ax) networkx.draw_networkx_edges(g, pos, arrows=True, ax=self.matplot_widget.ax) networkx.draw_networkx_labels(g, pos, ax=self.matplot_widget.ax, labels={area: area.name for area in world.areas}, verticalalignment='top') self.matplot_widget.ax.set_axis_off() plt.draw_if_interactive() self.matplot_widget.canvas.draw() def on_graph_map_world_combo(self): nodes_in_reach = self.current_nodes_in_reach(self.state_for_current_configuration()) self.update_matplot_widget(nodes_in_reach) def current_nodes_in_reach(self, state): if state is None: nodes_in_reach = set() else: reach = ResolverReach.calculate_reach(self.logic, state) nodes_in_reach = set(reach.nodes) nodes_in_reach.add(state.node) return nodes_in_reach def _on_tab_changed(self): if self.map_tab_widget.currentWidget() == self.tab_graph_map: self.on_graph_map_world_combo() def update_locations_tree_for_reachable_nodes(self): state = self.state_for_current_configuration() nodes_in_reach = self.current_nodes_in_reach(state) if self.map_tab_widget.currentWidget() == self.tab_graph_map: self.update_matplot_widget(nodes_in_reach) all_nodes = self.game_description.world_list.all_nodes for world in self.game_description.world_list.worlds: for area in world.areas: area_is_visible = False for node in area.nodes: is_collected = node in self._collected_nodes is_visible = node in nodes_in_reach and not (self._hide_collected_resources and is_collected) if self._show_only_resource_nodes: is_visible = is_visible and node.is_resource_node node_item = self._node_to_item[node] node_item.setHidden(not is_visible) if node.is_resource_node: resource_node = typing.cast(ResourceNode, node) node_item.setDisabled(not resource_node.can_collect(state.patches, state.resources, all_nodes)) node_item.setCheckState(0, Qt.Checked if is_collected else Qt.Unchecked) area_is_visible = area_is_visible or is_visible self._asset_id_to_item[area.area_asset_id].setHidden(not area_is_visible) # Persist the current state self.persist_current_state() def persist_current_state(self): world_list = self.game_description.world_list with self.persistence_path.joinpath("state.json").open("w") as state_file: json.dump( { "actions": [ node.index for node in self._actions ], "collected_pickups": { pickup.name: quantity for pickup, quantity in self._collected_pickups.items() }, "elevators": { str(elevator_id): combo.currentData().as_json if combo.currentIndex() > 0 else None for elevator_id, combo in self._elevator_id_to_combo.items() }, "translator_gates": { str(gate.index): combo.currentData().index if combo.currentIndex() > 0 else None for gate, combo in self._translator_gate_to_combo.items() }, "starting_location": world_list.node_to_area_location(self._initial_state.node).as_json, }, state_file ) def setup_possible_locations_tree(self): """ Creates the possible_locations_tree with all worlds, areas and nodes. """ self.possible_locations_tree.itemDoubleClicked.connect(self._on_tree_node_double_clicked) # TODO: Dark World names for world in self.game_description.world_list.worlds: world_item = QTreeWidgetItem(self.possible_locations_tree) world_item.setText(0, world.name) world_item.setExpanded(True) self._asset_id_to_item[world.world_asset_id] = world_item for area in world.areas: area_item = QTreeWidgetItem(world_item) area_item.area = area area_item.setText(0, area.name) area_item.setHidden(True) self._asset_id_to_item[area.area_asset_id] = area_item for node in area.nodes: node_item = QTreeWidgetItem(area_item) if isinstance(node, TranslatorGateNode): node_item.setText(0, "{} ({})".format(node.name, node.gate)) else: node_item.setText(0, node.name) node_item.node = node if node.is_resource_node: node_item.setFlags(node_item.flags() & ~Qt.ItemIsUserCheckable) self._node_to_item[node] = node_item def setup_elevators(self): world_list = self.game_description.world_list nodes_by_world: Dict[str, List[TeleporterNode]] = collections.defaultdict(list) self._elevator_id_to_combo = {} areas_to_not_change = { 2278776548, # Sky Temple Gateway 2068511343, # Sky Temple Energy Controller 3136899603, # Aerie Transport Station 1564082177, # Aerie } targets = {} for world, area, node in world_list.all_worlds_areas_nodes: if isinstance(node, TeleporterNode) and node.editable and area.area_asset_id not in areas_to_not_change: name = world.correct_name(area.in_dark_aether) nodes_by_world[name].append(node) location = AreaLocation(world.world_asset_id, area.area_asset_id) targets[patcher_file.elevator_area_name(world_list, location, True)] = location if self.layout_configuration.elevators.mode == TeleporterShuffleMode.ONE_WAY_ANYTHING: targets = {} for world in world_list.worlds: for area in world.areas: name = world.correct_name(area.in_dark_aether) targets[f"{name} - {area.name}"] = AreaLocation(world.world_asset_id, area.area_asset_id) combo_targets = sorted(targets.items(), key=lambda it: it[0]) for world_name in sorted(nodes_by_world.keys()): nodes = nodes_by_world[world_name] nodes_locations = [AreaLocation(world_list.nodes_to_world(node).world_asset_id, world_list.nodes_to_area(node).area_asset_id) for node in nodes] nodes_names = [patcher_file.elevator_area_name(world_list, location, True) for location in nodes_locations] nodes = sorted(nodes_by_world[world_name], key=lambda it: world_list.nodes_to_area(it).name) group = QtWidgets.QGroupBox(self.elevators_scroll_contents) group.setTitle(world_name) self.elevators_scroll_layout.addWidget(group) layout = QtWidgets.QGridLayout(group) for i, (node, location, name) in enumerate(sorted(zip(nodes, nodes_locations, nodes_names), key=lambda it: it[2])): node_name = QtWidgets.QLabel(group) node_name.setText(name) layout.addWidget(node_name, i, 0) combo = QtWidgets.QComboBox(group) if self.layout_configuration.elevators.is_vanilla: combo.addItem("Vanilla", node.default_connection) combo.setEnabled(False) else: combo.addItem("Undefined", location) for target_name, connection in combo_targets: combo.addItem(target_name, connection) combo.currentIndexChanged.connect(self.update_locations_tree_for_reachable_nodes) self._elevator_id_to_combo[node.teleporter_instance_id] = combo layout.addWidget(combo, i, 1) def setup_translator_gates(self): world_list = self.game_description.world_list resource_db = self.game_description.resource_database self._translator_gate_to_combo = {} if self.layout_configuration.game != RandovaniaGame.PRIME2: return gates = { f"{area.name} ({node.gate.index})": node.gate for world, area, node in world_list.all_worlds_areas_nodes if isinstance(node, TranslatorGateNode) } translator_requirement = self.layout_configuration.translator_configuration.translator_requirement for i, (gate_name, gate) in enumerate(sorted(gates.items(), key=lambda it: it[0])): node_name = QtWidgets.QLabel(self.translator_gate_scroll_contents) node_name.setText(gate_name) self.translator_gate_scroll_layout.addWidget(node_name, i, 0) combo = QtWidgets.QComboBox(self.translator_gate_scroll_contents) gate_requirement = translator_requirement[gate] if gate_requirement in (LayoutTranslatorRequirement.RANDOM, LayoutTranslatorRequirement.RANDOM_WITH_REMOVED): combo.addItem("Undefined", self._undefined_item) for translator, index in translator_configuration.ITEM_INDICES.items(): combo.addItem(translator.long_name, resource_db.get_item(index)) else: combo.addItem(gate_requirement.long_name, resource_db.get_item(gate_requirement.item_index)) combo.setEnabled(False) combo.currentIndexChanged.connect(self.update_locations_tree_for_reachable_nodes) self._translator_gate_to_combo[gate] = combo self.translator_gate_scroll_layout.addWidget(combo, i, 1) def setup_starting_location(self, area_location: Optional[AreaLocation]): world_list = self.game_description.world_list if len(self.layout_configuration.starting_location.locations) > 1: if area_location is None: area_locations = sorted(self.layout_configuration.starting_location.locations, key=lambda it: world_list.area_name(world_list.area_by_area_location(it))) location_names = [world_list.area_name(world_list.area_by_area_location(it)) for it in area_locations] selected_name = QtWidgets.QInputDialog.getItem(self, "Starting Location", "Select starting location", location_names, 0, False) area_location = area_locations[location_names.index(selected_name[0])] self._initial_state.node = world_list.resolve_teleporter_connection(area_location) self._starting_nodes = { node for node in world_list.all_nodes if node.is_resource_node and node.resource() in self._initial_state.resources } def _change_item_quantity(self, pickup: PickupEntry, use_quantity_as_bool: bool, quantity: int): if use_quantity_as_bool: if bool(quantity): quantity = 1 else: quantity = 0 self._collected_pickups[pickup] = quantity if not self._during_setup: self.update_locations_tree_for_reachable_nodes() def bulk_change_quantity(self, new_quantity: Dict[PickupEntry, int]): self._during_setup = True for pickup, quantity in new_quantity.items(): widget = self._widget_for_pickup[pickup] if isinstance(widget, QCheckBox): widget.setChecked(quantity > 0) else: widget.setValue(quantity) self._during_setup = False def _create_widgets_with_quantity(self, pickup: PickupEntry, parent_widget: QWidget, parent_layout: QGridLayout, row: int, quantity: int, ): label = QLabel(parent_widget) label.setText(pickup.name) parent_layout.addWidget(label, row, 0) spin_bix = CustomSpinBox(parent_widget) spin_bix.setMaximumWidth(50) spin_bix.setMaximum(quantity) spin_bix.valueChanged.connect(functools.partial(self._change_item_quantity, pickup, False)) self._widget_for_pickup[pickup] = spin_bix parent_layout.addWidget(spin_bix, row, 1) def setup_pickups_box(self, item_pool: List[PickupEntry]): parent_widgets: Dict[ItemCategory, Tuple[QWidget, QGridLayout]] = { ItemCategory.EXPANSION: (self.expansions_box, self.expansions_layout), ItemCategory.ENERGY_TANK: (self.expansions_box, self.expansions_layout), ItemCategory.TRANSLATOR: (self.translators_box, self.translators_layout), ItemCategory.TEMPLE_KEY: (self.keys_box, self.keys_layout), ItemCategory.SKY_TEMPLE_KEY: (self.keys_box, self.keys_layout), } major_pickup_parent_widgets = (self.upgrades_box, self.upgrades_layout) row_for_parent = { self.expansions_box: 0, self.translators_box: 0, self.upgrades_box: 0, self.keys_box: 0, } column_for_parent = { self.translators_box: 0, self.upgrades_box: 0, self.keys_box: 0, } k_column_count = 2 pickup_by_name = {} pickup_with_quantity = {} for pickup in item_pool: if pickup.name in pickup_by_name: pickup_with_quantity[pickup_by_name[pickup.name]] += 1 else: pickup_by_name[pickup.name] = pickup pickup_with_quantity[pickup] = 1 non_expansions_with_quantity = [] for pickup, quantity in pickup_with_quantity.items(): self._collected_pickups[pickup] = 0 parent_widget, parent_layout = parent_widgets.get(pickup.item_category, major_pickup_parent_widgets) row = row_for_parent[parent_widget] if parent_widget is self.expansions_box: self._create_widgets_with_quantity(pickup, parent_widget, parent_layout, row, quantity) row_for_parent[parent_widget] += 1 else: if quantity > 1: non_expansions_with_quantity.append((parent_widget, parent_layout, pickup, quantity)) else: check_box = QCheckBox(parent_widget) check_box.setText(pickup.name) check_box.stateChanged.connect(functools.partial(self._change_item_quantity, pickup, True)) self._widget_for_pickup[pickup] = check_box column = column_for_parent[parent_widget] parent_layout.addWidget(check_box, row, column) column += 1 if column >= k_column_count: column = 0 row += 1 row_for_parent[parent_widget] = row column_for_parent[parent_widget] = column for parent_widget, parent_layout, pickup, quantity in non_expansions_with_quantity: if column_for_parent[parent_widget] != 0: column_for_parent[parent_widget] = 0 row_for_parent[parent_widget] += 1 self._create_widgets_with_quantity(pickup, parent_widget, parent_layout, row_for_parent[parent_widget], quantity) row_for_parent[parent_widget] += 1 def state_for_current_configuration(self) -> Optional[State]: all_nodes = self.game_description.world_list.all_nodes state = self._initial_state.copy() if self._actions: state.node = self._actions[-1] for teleporter, combo in self._elevator_id_to_combo.items(): assert combo.currentData() is not None state.patches.elevator_connection[teleporter] = combo.currentData() for gate, item in self._translator_gate_to_combo.items(): state.patches.translator_gates[gate] = item.currentData() for pickup, quantity in self._collected_pickups.items(): for _ in range(quantity): add_pickup_to_state(state, pickup) for node in self._collected_nodes: add_resource_gain_to_current_resources(node.resource_gain_on_collect(state.patches, state.resources, all_nodes), state.resources) return state
gpl-3.0
5,743,560,068,089,182,000
43.599725
119
0.599926
false
4.092906
true
false
false
tkw1536/GitManager
tests/commands/test_state.py
1
4328
import unittest import unittest.mock from GitManager.commands import state from GitManager.repo import description from GitManager.utils import format from GitManager.repo import implementation class TestState(unittest.TestCase): """ Tests that the state command works properly """ @unittest.mock.patch( 'GitManager.repo.implementation.LocalRepository') @unittest.mock.patch( 'builtins.print') def test_run(self, builtins_print: unittest.mock.Mock, implementation_LocalRepository: unittest.mock.Mock): # create a repository repo = description.RepositoryDescription('/path/to/source', '/path/to/clone') # create a line line = format.TerminalLine() # and a command instance cmd = state.State(line, [repo], "--no-update") # if we are up-to-date, nothing should have been printed implementation_LocalRepository.return_value.exists.return_value = True implementation_LocalRepository.return_value.remote_status \ .return_value = implementation.RemoteStatus.UP_TO_DATE self.assertTrue(cmd.run(repo)) implementation_LocalRepository.return_value.remote_status \ .assert_called_with(False) builtins_print.assert_not_called() # reset the mock implementation_LocalRepository.reset_mock() builtins_print.reset_mock() # create another command instance cmd = state.State(line, [repo], "--update") # if the local repository does not exist, we implementation_LocalRepository.return_value.exists.return_value = False self.assertFalse(cmd.run(repo)) # reset the mock implementation_LocalRepository.reset_mock() builtins_print.reset_mock() # if we are up-to-date, nothing should have been printed implementation_LocalRepository.return_value.exists.return_value = True implementation_LocalRepository.return_value.remote_status \ .return_value = implementation.RemoteStatus.UP_TO_DATE self.assertTrue(cmd.run(repo)) implementation_LocalRepository.return_value.remote_status\ .assert_called_with(True) builtins_print.assert_not_called() # reset the mock implementation_LocalRepository.reset_mock() builtins_print.reset_mock() # we need to pull implementation_LocalRepository.return_value.exists.return_value = True implementation_LocalRepository.return_value.remote_status \ .return_value = implementation.RemoteStatus.REMOTE_NEWER self.assertFalse(cmd.run(repo)) implementation_LocalRepository.return_value.remote_status \ .assert_called_with(True) builtins_print.assert_called_with( format.Format.yellow('Upstream is ahead of your branch, ' 'pull required. ')) # reset the mock implementation_LocalRepository.reset_mock() builtins_print.reset_mock() # we need to push implementation_LocalRepository.return_value.exists.return_value = True implementation_LocalRepository.return_value.remote_status \ .return_value = implementation.RemoteStatus.LOCAL_NEWER self.assertFalse(cmd.run(repo)) implementation_LocalRepository.return_value.remote_status \ .assert_called_with(True) builtins_print.assert_called_with( format.Format.green('Your branch is ahead of upstream, ' 'push required.')) # reset the mock implementation_LocalRepository.reset_mock() builtins_print.reset_mock() # divergence implementation_LocalRepository.return_value.exists.return_value = True implementation_LocalRepository.return_value.remote_status \ .return_value = implementation.RemoteStatus.DIVERGENCE self.assertFalse(cmd.run(repo)) implementation_LocalRepository.return_value.remote_status \ .assert_called_with(True) builtins_print.assert_called_with( format.Format.red('Your branch and upstream have diverged, ' 'merge or rebase required. '))
mit
-7,933,464,358,307,545,000
39.830189
79
0.654575
false
4.570222
true
false
false
muffinresearch/amo-validator
validator/testcases/javascript/actions.py
1
39965
from copy import deepcopy from functools import partial import sys import types # Global import of predefinedentities will cause an import loop import instanceactions from validator.constants import (BUGZILLA_BUG, DESCRIPTION_TYPES, FENNEC_GUID, FIREFOX_GUID, MAX_STR_SIZE) from validator.decorator import version_range from jstypes import JSArray, JSContext, JSLiteral, JSObject, JSWrapper NUMERIC_TYPES = (int, long, float, complex) # None of these operations (or their augmented assignment counterparts) should # be performed on non-numeric data. Any time we get non-numeric data for these # guys, we just return window.NaN. NUMERIC_OPERATORS = ('-', '*', '/', '%', '<<', '>>', '>>>', '|', '^', '&') NUMERIC_OPERATORS += tuple('%s=' % op for op in NUMERIC_OPERATORS) def get_NaN(traverser): # If we've cached the traverser's NaN instance, just use that. ncache = getattr(traverser, 'NAN_CACHE', None) if ncache is not None: return ncache # Otherwise, we need to import GLOBAL_ENTITIES and build a raw copy. from predefinedentities import GLOBAL_ENTITIES ncache = traverser._build_global('NaN', GLOBAL_ENTITIES[u'NaN']) # Cache it so we don't need to do this again. traverser.NAN_CACHE = ncache return ncache def _get_member_exp_property(traverser, node): """Return the string value of a member expression's property.""" if node['property']['type'] == 'Identifier' and not node.get('computed'): return unicode(node['property']['name']) else: eval_exp = traverser._traverse_node(node['property']) return _get_as_str(eval_exp.get_literal_value()) def _expand_globals(traverser, node): """Expands a global object that has a lambda value.""" if node.is_global and callable(node.value.get('value')): result = node.value['value'](traverser) if isinstance(result, dict): output = traverser._build_global('--', result) elif isinstance(result, JSWrapper): output = result else: output = JSWrapper(result, traverser) # Set the node context. if 'context' in node.value: traverser._debug('CONTEXT>>%s' % node.value['context']) output.context = node.value['context'] else: traverser._debug('CONTEXT>>INHERITED') output.context = node.context return output return node def trace_member(traverser, node, instantiate=False): 'Traces a MemberExpression and returns the appropriate object' traverser._debug('TESTING>>%s' % node['type']) if node['type'] == 'MemberExpression': # x.y or x[y] # x = base base = trace_member(traverser, node['object'], instantiate) base = _expand_globals(traverser, base) identifier = _get_member_exp_property(traverser, node) # Handle the various global entity properties. if base.is_global: # If we've got an XPCOM wildcard, return a copy of the entity. if 'xpcom_wildcard' in base.value: traverser._debug('MEMBER_EXP>>XPCOM_WILDCARD') from predefinedentities import CONTRACT_ENTITIES if identifier in CONTRACT_ENTITIES: kw = dict(err_id=('js', 'actions', 'dangerous_contract'), warning='Dangerous XPCOM contract ID') kw.update(CONTRACT_ENTITIES[identifier]) traverser.warning(**kw) base.value = base.value.copy() del base.value['xpcom_wildcard'] return base test_identifier(traverser, identifier) traverser._debug('MEMBER_EXP>>PROPERTY: %s' % identifier) output = base.get( traverser=traverser, instantiate=instantiate, name=identifier) output.context = base.context if base.is_global: # In the cases of XPCOM objects, methods generally # remain bound to their parent objects, even when called # indirectly. output.parent = base return output elif node['type'] == 'Identifier': traverser._debug('MEMBER_EXP>>ROOT:IDENTIFIER') test_identifier(traverser, node['name']) # If we're supposed to instantiate the object and it doesn't already # exist, instantitate the object. if instantiate and not traverser._is_defined(node['name']): output = JSWrapper(JSObject(), traverser=traverser) traverser.contexts[0].set(node['name'], output) else: output = traverser._seek_variable(node['name']) return _expand_globals(traverser, output) else: traverser._debug('MEMBER_EXP>>ROOT:EXPRESSION') # It's an expression, so just try your damndest. return traverser._traverse_node(node) def test_identifier(traverser, name): 'Tests whether an identifier is banned' import predefinedentities if name in predefinedentities.BANNED_IDENTIFIERS: traverser.err.warning( err_id=('js', 'actions', 'banned_identifier'), warning='Banned or deprecated JavaScript Identifier', description=predefinedentities.BANNED_IDENTIFIERS[name], filename=traverser.filename, line=traverser.line, column=traverser.position, context=traverser.context) def _function(traverser, node): 'Prevents code duplication' def wrap(traverser, node): me = JSObject() traverser.function_collection.append([]) # Replace the current context with a prototypeable JS object. traverser._pop_context() me.type_ = 'default' # Treat the function as a normal object. traverser._push_context(me) traverser._debug('THIS_PUSH') traverser.this_stack.append(me) # Allow references to "this" # Declare parameters in the local scope params = [] for param in node['params']: if param['type'] == 'Identifier': params.append(param['name']) elif param['type'] == 'ArrayPattern': for element in param['elements']: # Array destructuring in function prototypes? LOL! if element is None or element['type'] != 'Identifier': continue params.append(element['name']) local_context = traverser._peek_context(1) for param in params: var = JSWrapper(lazy=True, traverser=traverser) # We can assume that the params are static because we don't care # about what calls the function. We want to know whether the # function solely returns static values. If so, it is a static # function. local_context.set(param, var) traverser._traverse_node(node['body']) # Since we need to manually manage the "this" stack, pop off that # context. traverser._debug('THIS_POP') traverser.this_stack.pop() # Call all of the function collection's members to traverse all of the # child functions. func_coll = traverser.function_collection.pop() for func in func_coll: func() # Put the function off for traversal at the end of the current block scope. traverser.function_collection[-1].append(partial(wrap, traverser, node)) return JSWrapper(traverser=traverser, callable=True, dirty=True) def _define_function(traverser, node): me = _function(traverser, node) traverser._peek_context(2).set(node['id']['name'], me) return me def _func_expr(traverser, node): 'Represents a lambda function' return _function(traverser, node) def _define_with(traverser, node): 'Handles `with` statements' object_ = traverser._traverse_node(node['object']) if isinstance(object_, JSWrapper) and isinstance(object_.value, JSObject): traverser.contexts[-1] = object_.value traverser.contexts.append(JSContext('block')) return def _define_var(traverser, node): 'Creates a local context variable' traverser._debug('VARIABLE_DECLARATION') traverser.debug_level += 1 declarations = (node['declarations'] if 'declarations' in node else node['head']) kind = node.get('kind', 'let') for declaration in declarations: # It could be deconstruction of variables :( if declaration['id']['type'] == 'ArrayPattern': vars = [] for element in declaration['id']['elements']: # NOTE : Multi-level array destructuring sucks. Maybe implement # it someday if you're bored, but it's so rarely used and it's # so utterly complex, there's probably no need to ever code it # up. if element is None or element['type'] != 'Identifier': vars.append(None) continue vars.append(element['name']) # The variables are not initialized if declaration['init'] is None: # Simple instantiation; no initialization for var in vars: if not var: continue traverser._declare_variable(var, None) # The variables are declared inline elif declaration['init']['type'] == 'ArrayPattern': # TODO : Test to make sure len(values) == len(vars) for value in declaration['init']['elements']: if vars[0]: traverser._declare_variable( vars[0], JSWrapper(traverser._traverse_node(value), traverser=traverser)) vars = vars[1:] # Pop off the first value # It's being assigned by a JSArray (presumably) elif declaration['init']['type'] == 'ArrayExpression': assigner = traverser._traverse_node(declaration['init']) for value in assigner.value.elements: if vars[0]: traverser._declare_variable(vars[0], value) vars = vars[1:] elif declaration['id']['type'] == 'ObjectPattern': init = traverser._traverse_node(declaration['init']) def _proc_objpattern(init_obj, properties): for prop in properties: # Get the name of the init obj's member if prop['key']['type'] == 'Literal': prop_name = prop['key']['value'] elif prop['key']['type'] == 'Identifier': prop_name = prop['key']['name'] else: continue if prop['value']['type'] == 'Identifier': traverser._declare_variable( prop['value']['name'], init_obj.get(traverser, prop_name)) elif prop['value']['type'] == 'ObjectPattern': _proc_objpattern(init_obj.get(traverser, prop_name), prop['value']['properties']) if init is not None: _proc_objpattern(init_obj=init, properties=declaration['id']['properties']) else: var_name = declaration['id']['name'] traverser._debug('NAME>>%s' % var_name) var_value = traverser._traverse_node(declaration['init']) traverser._debug('VALUE>>%s' % (var_value.output() if var_value is not None else 'None')) if not isinstance(var_value, JSWrapper): var = JSWrapper(value=var_value, const=kind == 'const', traverser=traverser) else: var = var_value var.const = kind == 'const' traverser._declare_variable(var_name, var, type_=kind) if 'body' in node: traverser._traverse_node(node['body']) traverser.debug_level -= 1 # The "Declarations" branch contains custom elements. return True def _define_obj(traverser, node): 'Creates a local context object' var = JSObject() for prop in node['properties']: if prop['type'] == 'PrototypeMutation': var_name = 'prototype' else: key = prop['key'] if key['type'] == 'Literal': var_name = key['value'] elif isinstance(key['name'], basestring): var_name = key['name'] else: if 'property' in key['name']: name = key['name'] else: name = {'property': key['name']} var_name = _get_member_exp_property(traverser, name) var_value = traverser._traverse_node(prop['value']) var.set(var_name, var_value, traverser) # TODO: Observe "kind" if not isinstance(var, JSWrapper): return JSWrapper(var, lazy=True, traverser=traverser) var.lazy = True return var def _define_array(traverser, node): """Instantiate an array object from the parse tree.""" arr = JSArray() arr.elements = map(traverser._traverse_node, node['elements']) return arr def _define_template_strings(traverser, node): """Instantiate an array of raw and cooked template strings.""" cooked = JSArray() cooked.elements = map(traverser._traverse_node, node['cooked']) raw = JSArray() raw.elements = map(traverser._traverse_node, node['raw']) cooked.set('raw', raw, traverser) return cooked def _define_template(traverser, node): """Instantiate a template literal.""" elements = map(traverser._traverse_node, node['elements']) return reduce(partial(_binary_op, '+', traverser=traverser), elements) def _define_literal(traverser, node): """ Convert a literal node in the parse tree to its corresponding interpreted value. """ value = node['value'] if isinstance(value, dict): return JSWrapper(JSObject(), traverser=traverser, dirty=True) wrapper = JSWrapper(value if value is not None else JSLiteral(None), traverser=traverser) test_literal(traverser, wrapper) return wrapper def test_literal(traverser, wrapper): """ Test the value of a literal, in particular only a string literal at the moment, against possibly dangerous patterns. """ value = wrapper.get_literal_value() if isinstance(value, basestring): # Local import to prevent import loop. from validator.testcases.regex import (validate_compat_pref, validate_string) validate_string(value, traverser, wrapper=wrapper) validate_compat_pref(value, traverser, wrapper=wrapper) def _call_expression(traverser, node): args = node['arguments'] for arg in args: traverser._traverse_node(arg, source='arguments') member = traverser._traverse_node(node['callee']) if (traverser.filename.startswith('defaults/preferences/') and ('name' not in node['callee'] or node['callee']['name'] not in (u'pref', u'user_pref'))): traverser.err.warning( err_id=('testcases_javascript_actions', '_call_expression', 'complex_prefs_defaults_code'), warning='Complex code should not appear in preference defaults ' 'files', description="Calls to functions other than 'pref' and 'user_pref' " 'should not appear in defaults/preferences/ files.', filename=traverser.filename, line=traverser.line, column=traverser.position, context=traverser.context) if member.is_global and callable(member.value.get('dangerous', None)): result = member.value['dangerous'](a=args, t=traverser._traverse_node, e=traverser.err) name = member.value.get('name', '') if result and name: kwargs = { 'err_id': ('testcases_javascript_actions', '_call_expression', 'called_dangerous_global'), 'warning': '`%s` called in potentially dangerous manner' % member.value['name'], 'description': 'The global `%s` function was called using a set ' 'of dangerous parameters. Calls of this nature ' 'are deprecated.' % member.value['name']} if isinstance(result, DESCRIPTION_TYPES): kwargs['description'] = result elif isinstance(result, dict): kwargs.update(result) traverser.warning(**kwargs) elif (node['callee']['type'] == 'MemberExpression' and node['callee']['property']['type'] == 'Identifier'): # If we can identify the function being called on any member of any # instance, we can use that to either generate an output value or test # for additional conditions. identifier_name = node['callee']['property']['name'] if identifier_name in instanceactions.INSTANCE_DEFINITIONS: result = instanceactions.INSTANCE_DEFINITIONS[identifier_name]( args, traverser, node, wrapper=member) return result if member.is_global and 'return' in member.value: if 'object' in node['callee']: member.parent = trace_member(traverser, node['callee']['object']) return member.value['return'](wrapper=member, arguments=args, traverser=traverser) return JSWrapper(JSObject(), dirty=True, traverser=traverser) def _call_settimeout(a, t, e): """ Handler for setTimeout and setInterval. Should determine whether a[0] is a lambda function or a string. Strings are banned, lambda functions are ok. Since we can't do reliable type testing on other variables, we flag those, too. """ if not a: return if a[0]['type'] in ('FunctionExpression', 'ArrowFunctionExpression'): return if t(a[0]).callable: return return {'err_id': ('javascript', 'dangerous_global', 'eval'), 'description': 'In order to prevent vulnerabilities, the `setTimeout` ' 'and `setInterval` functions should be called only with ' 'function expressions as their first argument.', 'signing_help': ( 'Please do not ever call `setTimeout` or `setInterval` with ' 'string arguments. If you are passing a function which is ' 'not being correctly detected as such, please consider ' 'passing a closure or arrow function, which in turn calls ' 'the original function.'), 'signing_severity': 'high'} def _call_require(a, t, e): """ Tests for unsafe uses of `require()` in SDK add-ons. """ args, traverse, err = a, t, e if not err.metadata.get('is_jetpack') and len(args): return module = traverse(args[0]).get_literal_value() if not isinstance(module, basestring): return if module.startswith('sdk/'): module = module[len('sdk/'):] LOW_LEVEL = { # Added from bugs 689340, 731109 'chrome', 'window-utils', 'observer-service', # Added from bug 845492 'window/utils', 'sdk/window/utils', 'sdk/deprecated/window-utils', 'tab/utils', 'sdk/tab/utils', 'system/events', 'sdk/system/events', } if module in LOW_LEVEL: err.metadata['requires_chrome'] = True return {'warning': 'Usage of low-level or non-SDK interface', 'description': 'Your add-on uses an interface which bypasses ' 'the high-level protections of the add-on SDK. ' 'This interface should be avoided, and its use ' 'may significantly complicate your review ' 'process.'} if module == 'widget': return {'warning': 'Use of deprecated SDK module', 'description': "The 'widget' module has been deprecated due to a number " 'of performance and usability issues, and has been ' 'removed from the SDK as of Firefox 40. Please use the ' "'sdk/ui/button/action' or 'sdk/ui/button/toggle' module " 'instead. See ' 'https://developer.mozilla.org/Add-ons/SDK/High-Level_APIs' '/ui for more information.'} def _call_create_pref(a, t, e): """ Handler for pref() and user_pref() calls in defaults/preferences/*.js files to ensure that they don't touch preferences outside of the "extensions." branch. """ # We really need to clean up the arguments passed to these functions. traverser = t.im_self if not traverser.filename.startswith('defaults/preferences/') or not a: return instanceactions.set_preference(JSWrapper(JSLiteral(None), traverser=traverser), a, traverser) value = _get_as_str(t(a[0])) return test_preference(value) def test_preference(value): for branch in 'extensions.', 'services.sync.prefs.sync.extensions.': if value.startswith(branch) and value.rindex('.') > len(branch): return return ('Extensions should not alter preferences outside of the ' "'extensions.' preference branch. Please make sure that " "all of your extension's preferences are prefixed with " "'extensions.add-on-name.', where 'add-on-name' is a " 'distinct string unique to and indicative of your add-on.') def _readonly_top(traverser, right, node_right): """Handle the readonly callback for window.top.""" traverser.notice( err_id=('testcases_javascript_actions', '_readonly_top'), notice='window.top is a reserved variable', description='The `top` global variable is reserved and cannot be ' 'assigned any values starting with Gecko 6. Review your ' 'code for any uses of the `top` global, and refer to ' '%s for more information.' % BUGZILLA_BUG % 654137, for_appversions={FIREFOX_GUID: version_range('firefox', '6.0a1', '7.0a1'), FENNEC_GUID: version_range('fennec', '6.0a1', '7.0a1')}, compatibility_type='warning', tier=5) def _expression(traverser, node): """ This is a helper method that allows node definitions to point at `_traverse_node` without needing a reference to a traverser. """ return traverser._traverse_node(node['expression']) def _get_this(traverser, node): 'Returns the `this` object' if not traverser.this_stack: from predefinedentities import GLOBAL_ENTITIES return traverser._build_global('window', GLOBAL_ENTITIES[u'window']) return traverser.this_stack[-1] def _new(traverser, node): 'Returns a new copy of a node.' # We don't actually process the arguments as part of the flow because of # the Angry T-Rex effect. For now, we just traverse them to ensure they # don't contain anything dangerous. args = node['arguments'] if isinstance(args, list): for arg in args: traverser._traverse_node(arg, source='arguments') else: traverser._traverse_node(args) elem = traverser._traverse_node(node['callee']) if not isinstance(elem, JSWrapper): elem = JSWrapper(elem, traverser=traverser) if elem.is_global: traverser._debug('Making overwritable') elem.value = deepcopy(elem.value) elem.value['overwritable'] = True return elem def _ident(traverser, node): 'Initiates an object lookup on the traverser based on an identifier token' name = node['name'] # Ban bits like "newThread" test_identifier(traverser, name) if traverser._is_defined(name): return traverser._seek_variable(name) return JSWrapper(JSObject(), traverser=traverser, dirty=True) def _expr_assignment(traverser, node): """Evaluate an AssignmentExpression node.""" traverser._debug('ASSIGNMENT_EXPRESSION') traverser.debug_level += 1 traverser._debug('ASSIGNMENT>>PARSING RIGHT') right = traverser._traverse_node(node['right']) right = JSWrapper(right, traverser=traverser) # Treat direct assignment different than augmented assignment. if node['operator'] == '=': from predefinedentities import GLOBAL_ENTITIES, is_shared_scope global_overwrite = False readonly_value = is_shared_scope(traverser) node_left = node['left'] traverser._debug('ASSIGNMENT:DIRECT(%s)' % node_left['type']) if node_left['type'] == 'Identifier': # Identifiers just need the ID name and a value to push. # Raise a global overwrite issue if the identifier is global. global_overwrite = traverser._is_global(node_left['name']) # Get the readonly attribute and store its value if is_global if global_overwrite: global_dict = GLOBAL_ENTITIES[node_left['name']] if 'readonly' in global_dict: readonly_value = global_dict['readonly'] traverser._declare_variable(node_left['name'], right, type_='glob') elif node_left['type'] == 'MemberExpression': member_object = trace_member(traverser, node_left['object'], instantiate=True) global_overwrite = (member_object.is_global and not ('overwritable' in member_object.value and member_object.value['overwritable'])) member_property = _get_member_exp_property(traverser, node_left) traverser._debug('ASSIGNMENT:MEMBER_PROPERTY(%s)' % member_property) traverser._debug('ASSIGNMENT:GLOB_OV::%s' % global_overwrite) # Don't do the assignment if we're facing a global. if not member_object.is_global: if member_object.value is None: member_object.value = JSObject() if not member_object.is_global: member_object.value.set(member_property, right, traverser) else: # It's probably better to do nothing. pass elif 'value' in member_object.value: member_object_value = _expand_globals(traverser, member_object).value if member_property in member_object_value['value']: # If it's a global and the actual member exists, test # whether it can be safely overwritten. member = member_object_value['value'][member_property] if 'readonly' in member: global_overwrite = True readonly_value = member['readonly'] traverser._debug('ASSIGNMENT:DIRECT:GLOB_OVERWRITE %s' % global_overwrite) traverser._debug('ASSIGNMENT:DIRECT:READONLY %r' % readonly_value) if callable(readonly_value): readonly_value = readonly_value(traverser, right, node['right']) if readonly_value and global_overwrite: kwargs = dict( err_id=('testcases_javascript_actions', '_expr_assignment', 'global_overwrite'), warning='Global variable overwrite', description='An attempt was made to overwrite a global ' 'variable in some JavaScript code.') if isinstance(readonly_value, DESCRIPTION_TYPES): kwargs['description'] = readonly_value elif isinstance(readonly_value, dict): kwargs.update(readonly_value) traverser.warning(**kwargs) return right lit_right = right.get_literal_value() traverser._debug('ASSIGNMENT>>PARSING LEFT') left = traverser._traverse_node(node['left']) traverser._debug('ASSIGNMENT>>DONE PARSING LEFT') traverser.debug_level -= 1 if isinstance(left, JSWrapper): if left.dirty: return left lit_left = left.get_literal_value() token = node['operator'] # Don't perform an operation on None. Python freaks out if lit_left is None: lit_left = 0 if lit_right is None: lit_right = 0 # Give them default values so we have them in scope. gleft, gright = 0, 0 # All of the assignment operators operators = {'=': lambda: right, '+=': lambda: lit_left + lit_right, '-=': lambda: gleft - gright, '*=': lambda: gleft * gright, '/=': lambda: 0 if gright == 0 else (gleft / gright), '%=': lambda: 0 if gright == 0 else (gleft % gright), '<<=': lambda: int(gleft) << int(gright), '>>=': lambda: int(gleft) >> int(gright), '>>>=': lambda: float(abs(int(gleft)) >> gright), '|=': lambda: int(gleft) | int(gright), '^=': lambda: int(gleft) ^ int(gright), '&=': lambda: int(gleft) & int(gright)} # If we're modifying a non-numeric type with a numeric operator, return # NaN. if (not isinstance(lit_left, NUMERIC_TYPES) and token in NUMERIC_OPERATORS): left.set_value(get_NaN(traverser), traverser=traverser) return left # If either side of the assignment operator is a string, both sides # need to be casted to strings first. if (isinstance(lit_left, types.StringTypes) or isinstance(lit_right, types.StringTypes)): lit_left = _get_as_str(lit_left) lit_right = _get_as_str(lit_right) gleft, gright = _get_as_num(left), _get_as_num(right) traverser._debug('ASSIGNMENT>>OPERATION:%s' % token) if token not in operators: # We don't support that operator. (yet?) traverser._debug('ASSIGNMENT>>OPERATOR NOT FOUND', 1) return left elif token in ('<<=', '>>=', '>>>=') and gright < 0: # The user is doing weird bitshifting that will return 0 in JS but # not in Python. left.set_value(0, traverser=traverser) return left elif (token in ('<<=', '>>=', '>>>=', '|=', '^=', '&=') and (abs(gleft) == float('inf') or abs(gright) == float('inf'))): # Don't bother handling infinity for integer-converted operations. left.set_value(get_NaN(traverser), traverser=traverser) return left traverser._debug('ASSIGNMENT::L-value global? (%s)' % ('Y' if left.is_global else 'N'), 1) try: new_value = operators[token]() except Exception: traverser.system_error(exc_info=sys.exc_info()) new_value = None # Cap the length of analyzed strings. if (isinstance(new_value, types.StringTypes) and len(new_value) > MAX_STR_SIZE): new_value = new_value[:MAX_STR_SIZE] traverser._debug('ASSIGNMENT::New value >> %s' % new_value, 1) left.set_value(new_value, traverser=traverser) return left # Though it would otherwise be a syntax error, we say that 4=5 should # evaluate out to 5. return right def _expr_binary(traverser, node): 'Evaluates a BinaryExpression node.' traverser.debug_level += 1 # Select the proper operator. operator = node['operator'] traverser._debug('BIN_OPERATOR>>%s' % operator) # Traverse the left half of the binary expression. with traverser._debug('BIN_EXP>>l-value'): if (node['left']['type'] == 'BinaryExpression' and '__traversal' not in node['left']): # Process the left branch of the binary expression directly. This # keeps the recursion cap in line and speeds up processing of # large chains of binary expressions. left = _expr_binary(traverser, node['left']) node['left']['__traversal'] = left else: left = traverser._traverse_node(node['left']) # Traverse the right half of the binary expression. with traverser._debug('BIN_EXP>>r-value'): if (operator == 'instanceof' and node['right']['type'] == 'Identifier' and node['right']['name'] == 'Function'): # We make an exception for instanceof's r-value if it's a # dangerous global, specifically Function. return JSWrapper(True, traverser=traverser) else: right = traverser._traverse_node(node['right']) traverser._debug('Is dirty? %r' % right.dirty, 1) return _binary_op(operator, left, right, traverser) def _binary_op(operator, left, right, traverser): """Perform a binary operation on two pre-traversed nodes.""" # Dirty l or r values mean we can skip the expression. A dirty value # indicates that a lazy operation took place that introduced some # nondeterminacy. # FIXME(Kris): We should process these as if they're strings anyway. if left.dirty: return left elif right.dirty: return right # Binary expressions are only executed on literals. left = left.get_literal_value() right_wrap = right right = right.get_literal_value() # Coerce the literals to numbers for numeric operations. gleft = _get_as_num(left) gright = _get_as_num(right) operators = { '==': lambda: left == right or gleft == gright, '!=': lambda: left != right, '===': lambda: left == right, # Be flexible. '!==': lambda: type(left) != type(right) or left != right, '>': lambda: left > right, '<': lambda: left < right, '<=': lambda: left <= right, '>=': lambda: left >= right, '<<': lambda: int(gleft) << int(gright), '>>': lambda: int(gleft) >> int(gright), '>>>': lambda: float(abs(int(gleft)) >> int(gright)), '+': lambda: left + right, '-': lambda: gleft - gright, '*': lambda: gleft * gright, '/': lambda: 0 if gright == 0 else (gleft / gright), '%': lambda: 0 if gright == 0 else (gleft % gright), 'in': lambda: right_wrap.contains(left), # TODO : implement instanceof # FIXME(Kris): Treat instanceof the same as `QueryInterface` } output = None if (operator in ('>>', '<<', '>>>') and (left is None or right is None or gright < 0)): output = False elif operator in operators: # Concatenation can be silly, so always turn undefineds into empty # strings and if there are strings, make everything strings. if operator == '+': if left is None: left = '' if right is None: right = '' if isinstance(left, basestring) or isinstance(right, basestring): left = _get_as_str(left) right = _get_as_str(right) # Don't even bother handling infinity if it's a numeric computation. if (operator in ('<<', '>>', '>>>') and (abs(gleft) == float('inf') or abs(gright) == float('inf'))): return get_NaN(traverser) try: output = operators[operator]() except Exception: traverser.system_error(exc_info=sys.exc_info()) output = None # Cap the length of analyzed strings. if (isinstance(output, types.StringTypes) and len(output) > MAX_STR_SIZE): output = output[:MAX_STR_SIZE] wrapper = JSWrapper(output, traverser=traverser) # Test the newly-created literal for dangerous values. # This may cause duplicate warnings for strings which # already match a dangerous value prior to concatenation. test_literal(traverser, wrapper) return wrapper return JSWrapper(output, traverser=traverser) def _expr_unary(traverser, node): """Evaluate a UnaryExpression node.""" expr = traverser._traverse_node(node['argument']) expr_lit = expr.get_literal_value() expr_num = _get_as_num(expr_lit) operators = {'-': lambda: -1 * expr_num, '+': lambda: expr_num, '!': lambda: not expr_lit, '~': lambda: -1 * (expr_num + 1), 'void': lambda: None, 'typeof': lambda: _expr_unary_typeof(expr), 'delete': lambda: None} # We never want to empty the context if node['operator'] in operators: output = operators[node['operator']]() else: output = None if not isinstance(output, JSWrapper): output = JSWrapper(output, traverser=traverser) return output def _expr_unary_typeof(wrapper): """Evaluate the "typeof" value for a JSWrapper object.""" if (wrapper.callable or (wrapper.is_global and 'return' in wrapper.value and 'value' not in wrapper.value)): return 'function' value = wrapper.value if value is None: return 'undefined' elif isinstance(value, JSLiteral): value = value.value if isinstance(value, bool): return 'boolean' elif isinstance(value, (int, long, float)): return 'number' elif isinstance(value, types.StringTypes): return 'string' return 'object' def _get_as_num(value): """Return the JS numeric equivalent for a value.""" if isinstance(value, JSWrapper): value = value.get_literal_value() if value is None: return 0 try: if isinstance(value, types.StringTypes): if value.startswith('0x'): return int(value, 16) else: return float(value) elif isinstance(value, (int, float, long)): return value else: return int(value) except (ValueError, TypeError): return 0 def _get_as_str(value): """Return the JS string equivalent for a literal value.""" if isinstance(value, JSWrapper): value = value.get_literal_value() if value is None: return '' if isinstance(value, bool): return u'true' if value else u'false' elif isinstance(value, (int, float, long)): if value == float('inf'): return u'Infinity' elif value == float('-inf'): return u'-Infinity' # Try to see if we can shave off some trailing significant figures. try: if int(value) == value: return unicode(int(value)) except ValueError: pass return unicode(value)
bsd-3-clause
-7,636,647,010,772,091,000
36.490619
79
0.573702
false
4.217052
true
false
false
gplssm/europepstrans
europepstrans/results/__init__.py
1
13654
""" TimeFrameResults steals methods from oemof.outputlib adapted to the structure applied here. Most relevant difference is results data stored in self.data """ from oemof.outputlib import DataFramePlot, ResultsDataFrame import pickle from matplotlib import pyplot as plt import logging import pandas as pd class TimeFrameResults: """ Container for results of one time frame (i.e. one year) Attributes ---------- data : DataFrame Structure multi-indexed result data """ def __init__(self, **kwargs): """ Initializes data object based on oemof results class """ results_file = kwargs.get('results_file', None) self.subset = kwargs.get('subset', None) self.ax = kwargs.get('ax') if results_file is None: # self.data = DataFramePlot(energy_system=kwargs.get('energy_system')) self.data = ResultsDataFrame(energy_system=kwargs.get('energy_system')) else: self.data = pickle.load(open(results_file, 'rb')) self.reformat_data() def preview(self): """ Print short preview of data """ return self.data.head() def reformat_data(self): """ Extract region information from bus label put into separate index label """ # TODO: get regions list from elsewhere regions = ['deu', 'xfra', 'xbnl'] regions_leading_underscore = ['_' + x for x in regions] # put bus_label to column (required to work on) self.data.reset_index(level='bus_label', inplace=True) self.data.reset_index(level='obj_label', inplace=True) # extra region from bus label and write to new column self.data['region'] = self.data['bus_label'].str.extract( r"(?=(" + '|'.join(regions) + r"))", expand=True) self.data['region'].fillna('global', inplace=True) # remove region from bus_label and obj_label self.data['bus_label'] = self.data['bus_label'].str.replace( r"(" + '|'.join(regions_leading_underscore) + r")", '') self.data['obj_label'] = self.data['obj_label'].str.replace( r"(" + '|'.join(regions_leading_underscore) + r")", '') # put bus_label back to index self.data = self.data.set_index(['bus_label', 'region', 'obj_label'], append=True) # reorder and resort levels level_order = ['bus_label', 'type', 'obj_label', 'region', 'datetime'] self.data = self.data.reorder_levels(level_order) def slice_by(self, **kwargs): r""" Method for slicing the ResultsDataFrame. A subset is returned. Parameters ---------- bus_label : string type : string (to_bus/from_bus/other) obj_label: string date_from : string Start date selection e.g. "2016-01-01 00:00:00". If not set, the whole time range will be plotted. date_to : string End date selection e.g. "2016-03-01 00:00:00". If not set, the whole time range will be plotted. """ kwargs.setdefault('bus_label', slice(None)) kwargs.setdefault('type', slice(None)) kwargs.setdefault('obj_label', slice(None)) kwargs.setdefault( 'date_from', self.data.index.get_level_values('datetime')[0]) kwargs.setdefault( 'date_to', self.data.index.get_level_values('datetime')[-1]) # slicing idx = pd.IndexSlice subset = self.data.loc[idx[ kwargs['bus_label'], kwargs['type'], kwargs['obj_label'], slice(pd.Timestamp(kwargs['date_from']), pd.Timestamp(kwargs['date_to']))], :] return subset def slice_unstacked(self, unstacklevel='obj_label', formatted=False, **kwargs): r"""Method for slicing the ResultsDataFrame. An unstacked subset is returned. Parameters ---------- unstacklevel : string (default: 'obj_label') Level to unstack the subset of the DataFrame. formatted : boolean missing... """ subset = self.slice_by(**kwargs) subset = subset.unstack(level=unstacklevel) if formatted is True: subset.reset_index(level=['bus_label', 'type'], drop=True, inplace=True) # user standard insteadt of multi-indexed columns subset.columns = subset.columns.get_level_values(1).unique() # return subset self.subset = subset def plot(self, **kwargs): r""" Passing the data attribute to the pandas plotting method. All parameters will be directly passed to pandas.DataFrame.plot(). See http://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.plot.html for more information. Returns ------- self """ self.ax = self.subset.plot(**kwargs) return self def io_plot(self, bus_label, cdict, line_kwa=None, lineorder=None, bar_kwa=None, barorder=None, **kwargs): r""" Plotting a combined bar and line plot to see the fitting of in- and outcomming flows of a bus balance. Parameters ---------- bus_label : string Uid of the bus to plot the balance. cdict : dictionary A dictionary that has all possible components as keys and its colors as items. line_kwa : dictionary Keyword arguments to be passed to the pandas line plot. bar_kwa : dictionary Keyword arguments to be passed to the pandas bar plot. lineorder : list Order of columns to plot the line plot barorder : list Order of columns to plot the bar plot Note ---- Further keyword arguments will be passed to the :class:`slice_unstacked method <DataFramePlot.slice_unstacked>`. Returns ------- handles, labels Manipulated labels to correct the unsual construction of the stack line plot. You can use them for further maipulations. """ self.ax = kwargs.get('ax', self.ax) if bar_kwa is None: bar_kwa = dict() if line_kwa is None: line_kwa = dict() if self.ax is None: fig = plt.figure() self.ax = fig.add_subplot(1, 1, 1) # Create a bar plot for all input flows self.slice_unstacked(bus_label=bus_label, type='to_bus', **kwargs) if barorder is not None: self.rearrange_subset(barorder) self.subset.plot(kind='bar', linewidth=0, stacked=True, width=1, ax=self.ax, color=self.color_from_dict(cdict), **bar_kwa) # Create a line plot for all output flows self.slice_unstacked(bus_label=bus_label, type='from_bus', **kwargs) if lineorder is not None: self.rearrange_subset(lineorder) # The following changes are made to have the bottom line on top layer # of all lines. Normally the bottom line is the first line that is # plotted and will be on the lowest layer. This is difficult to read. new_df = pd.DataFrame(index=self.subset.index) n = 0 tmp = 0 for col in self.subset.columns: if n < 1: new_df[col] = self.subset[col] else: new_df[col] = self.subset[col] + tmp tmp = new_df[col] n += 1 if lineorder is None: new_df.sort_index(axis=1, ascending=False, inplace=True) else: lineorder.reverse() new_df = new_df[lineorder] colorlist = self.color_from_dict(cdict) if isinstance(colorlist, list): colorlist.reverse() separator = len(colorlist) new_df.plot(kind='line', ax=self.ax, color=colorlist, drawstyle='steps-mid', **line_kwa) # Adapt the legend to the new oder handles, labels = self.ax.get_legend_handles_labels() tmp_lab = [x for x in reversed(labels[0:separator])] tmp_hand = [x for x in reversed(handles[0:separator])] handles = tmp_hand + handles[separator:] labels = tmp_lab + labels[separator:] labels.reverse() handles.reverse() self.ax.legend(handles, labels) return handles, labels def rearrange_subset(self, order): r""" Change the order of the subset DataFrame Parameters ---------- order : list New order of columns Returns ------- self """ cols = list(self.subset.columns.values) neworder = [x for x in list(order) if x in set(cols)] missing = [x for x in list(cols) if x not in set(order)] if len(missing) > 0: logging.warning( "Columns that are not part of the order list are removed: " + str(missing)) self.subset = self.subset[neworder] def color_from_dict(self, colordict): r""" Method to convert a dictionary containing the components and its colors to a color list that can be directly useed with the color parameter of the pandas plotting method. Parameters ---------- colordict : dictionary A dictionary that has all possible components as keys and its colors as items. Returns ------- list Containing the colors of all components of the subset attribute """ tmplist = list( map(colordict.get, list(self.subset.columns))) tmplist = ['#00FFFF' if v is None else v for v in tmplist] if len(tmplist) == 1: colorlist = tmplist[0] else: colorlist = tmplist return colorlist def set_datetime_ticks(self, tick_distance=None, number_autoticks=3, date_format='%d-%m-%Y %H:%M'): r""" Set configurable ticks for the time axis. One can choose the number of ticks or the distance between ticks and the format. Parameters ---------- tick_distance : real The disctance between to ticks in hours. If not set autoticks are set (see number_autoticks). number_autoticks : int (default: 3) The number of ticks on the time axis, independent of the time range. The higher the number of ticks is, the shorter should be the date_format string. date_format : string (default: '%d-%m-%Y %H:%M') The string to define the format of the date and time. See https://docs.python.org/3/library/datetime.html#strftime-and-strptime-behavior for more information. """ dates = self.subset.index.get_level_values('datetime').unique() if tick_distance is None: tick_distance = int(len(dates) / number_autoticks) - 1 self.ax.set_xticks(range(0, len(dates), tick_distance), minor=False) self.ax.set_xticklabels( [item.strftime(date_format) for item in dates.tolist()[0::tick_distance]], rotation=0, minor=False) def outside_legend(self, reverse=False, plotshare=0.9, **kwargs): r""" Move the legend outside the plot. Bases on the ideas of Joe Kington. See http://stackoverflow.com/questions/4700614/how-to-put-the-legend-out-of-the-plot for more information. Parameters ---------- reverse : boolean (default: False) Print out the legend in reverse order. This is interesting for stack-plots to have the legend in the same order as the stacks. plotshare : real (default: 0.9) Share of the plot area to create space for the legend (0 to 1). loc : string (default: 'center left') Location of the plot. bbox_to_anchor : tuple (default: (1, 0.5)) Set the anchor for the legend. ncol : integer (default: 1) Number of columns of the legend. handles : list of handles A list of handels if they are already modified by another function or method. Normally these handles will be automatically taken from the artis object. lables : list of labels A list of labels if they are already modified by another function or method. Normally these handles will be automatically taken from the artis object. Note ---- All keyword arguments (kwargs) will be directly passed to the matplotlib legend class. See http://matplotlib.org/api/legend_api.html#matplotlib.legend.Legend for more parameters. """ kwargs.setdefault('loc', 'center left') kwargs.setdefault('bbox_to_anchor', (1, 0.5)) kwargs.setdefault('ncol', 1) handles = kwargs.pop('handles', self.ax.get_legend_handles_labels()[0]) labels = kwargs.pop('labels', self.ax.get_legend_handles_labels()[1]) if reverse: handles.reverse() labels.reverse() box = self.ax.get_position() self.ax.set_position([box.x0, box.y0, box.width * plotshare, box.height]) self.ax.legend(handles, labels, **kwargs) if __name__ == '__main__': pass
gpl-3.0
-7,392,969,019,323,245,000
35.413333
90
0.576388
false
4.224629
false
false
false
bollu/polymage
sandbox/apps/python/img_proc/interpolate/init.py
2
2104
import sys import os.path from PIL import Image import numpy as np from arg_parser import parse_args from printer import print_header, print_usage, print_line def init_images(app_data): print("[init.py] : initializing images...") app_args = app_data['app_args'] # input image: img_path = app_args.img_file image = np.array(Image.open(img_path)) img_path2 = app_args.alpha_file alpha = np.array(Image.open(img_path2)) if image.shape[0] != alpha.shape[0] or image.shape[1] != alpha.shape[1]: print("Please use alpha image with the same shape as the image") sys.exit(0) R = image.shape[0] C = image.shape[1] image_flip = np.rollaxis(image, 2) # add alpha channel to image along with other colour channels imgalpha = np.append(image_flip, alpha) imgalpha = imgalpha.reshape(4, R, C) imgalpha_region = imgalpha[0:4, 0:R, 0:C] # add ghost region imgalpha_ghost = np.empty((4, R+2, C+2), np.float32) imgalpha_ghost[0:4, 1:R+1, 1:C+1] = imgalpha_region # convert input image to floating point imgalpha_f = np.float32(imgalpha_ghost) / 255.0 # result array res = np.empty((3, R, C), np.float32) img_data = {} img_data['IN'] = imgalpha_f img_data['OUT'] = res app_data['img_data'] = img_data app_data['R'] = R app_data['C'] = C return def get_input(app_data): # parse the command-line arguments app_args = parse_args() app_data['app_args'] = app_args app_data['mode'] = app_args.mode app_data['runs'] = int(app_args.runs) app_data['graph_gen'] = bool(app_args.graph_gen) app_data['timer'] = app_args.timer # storage optimization app_data['optimize_storage'] = bool(app_args.optimize_storage) # early freeing of allocated arrays app_data['early_free'] = bool(app_args.early_free) # pool allocate option app_data['pool_alloc'] = bool(app_args.pool_alloc) return def init_all(app_data): pipe_data = {} app_data['pipe_data'] = pipe_data get_input(app_data) init_images(app_data) return
apache-2.0
-8,058,180,493,064,463,000
24.658537
76
0.63308
false
3.05814
false
false
false
MadeiraCloud/salt
sources/salt/states/locale.py
1
1224
# -*- coding: utf-8 -*- ''' Management of languages/locales ==============================+ The locale can be managed for the system: .. code-block:: yaml en_US.UTF-8: locale.system ''' def __virtual__(): ''' Only load if the locale module is available in __salt__ ''' return 'locale' if 'locale.get_locale' in __salt__ else False def system(name): ''' Set the locale for the system name The name of the locale to use ''' ret = {'name': name, 'changes': {}, 'result': None, 'comment': '', 'state_stdout': ''} if __salt__['locale.get_locale']() == name: ret['result'] = True ret['comment'] = 'System locale {0} already set'.format(name) return ret if __opts__['test']: ret['comment'] = 'System locale {0} needs to be set'.format(name) return ret if __salt__['locale.set_locale'](name, state_ret=ret): ret['changes'] = {'locale': name} ret['result'] = True ret['comment'] = 'Set system locale {0}'.format(name) return ret else: ret['result'] = False ret['comment'] = 'Failed to set system locale' return ret
apache-2.0
5,171,256,469,383,106,000
23.979592
73
0.521242
false
3.885714
false
false
false
Alberto-Beralix/Beralix
i386-squashfs-root/usr/lib/python2.7/trace.py
1
30152
#! /usr/bin/python2.7 # portions copyright 2001, Autonomous Zones Industries, Inc., all rights... # err... reserved and offered to the public under the terms of the # Python 2.2 license. # Author: Zooko O'Whielacronx # http://zooko.com/ # mailto:[email protected] # # Copyright 2000, Mojam Media, Inc., all rights reserved. # Author: Skip Montanaro # # Copyright 1999, Bioreason, Inc., all rights reserved. # Author: Andrew Dalke # # Copyright 1995-1997, Automatrix, Inc., all rights reserved. # Author: Skip Montanaro # # Copyright 1991-1995, Stichting Mathematisch Centrum, all rights reserved. # # # Permission to use, copy, modify, and distribute this Python software and # its associated documentation for any purpose without fee is hereby # granted, provided that the above copyright notice appears in all copies, # and that both that copyright notice and this permission notice appear in # supporting documentation, and that the name of neither Automatrix, # Bioreason or Mojam Media be used in advertising or publicity pertaining to # distribution of the software without specific, written prior permission. # """program/module to trace Python program or function execution Sample use, command line: trace.py -c -f counts --ignore-dir '$prefix' spam.py eggs trace.py -t --ignore-dir '$prefix' spam.py eggs trace.py --trackcalls spam.py eggs Sample use, programmatically import sys # create a Trace object, telling it what to ignore, and whether to # do tracing or line-counting or both. tracer = trace.Trace(ignoredirs=[sys.prefix, sys.exec_prefix,], trace=0, count=1) # run the new command using the given tracer tracer.run('main()') # make a report, placing output in /tmp r = tracer.results() r.write_results(show_missing=True, coverdir="/tmp") """ import linecache import os import re import sys import time import token import tokenize import inspect import gc import dis try: import cPickle pickle = cPickle except ImportError: import pickle try: import threading except ImportError: _settrace = sys.settrace def _unsettrace(): sys.settrace(None) else: def _settrace(func): threading.settrace(func) sys.settrace(func) def _unsettrace(): sys.settrace(None) threading.settrace(None) def usage(outfile): outfile.write("""Usage: %s [OPTIONS] <file> [ARGS] Meta-options: --help Display this help then exit. --version Output version information then exit. Otherwise, exactly one of the following three options must be given: -t, --trace Print each line to sys.stdout before it is executed. -c, --count Count the number of times each line is executed and write the counts to <module>.cover for each module executed, in the module's directory. See also `--coverdir', `--file', `--no-report' below. -l, --listfuncs Keep track of which functions are executed at least once and write the results to sys.stdout after the program exits. -T, --trackcalls Keep track of caller/called pairs and write the results to sys.stdout after the program exits. -r, --report Generate a report from a counts file; do not execute any code. `--file' must specify the results file to read, which must have been created in a previous run with `--count --file=FILE'. Modifiers: -f, --file=<file> File to accumulate counts over several runs. -R, --no-report Do not generate the coverage report files. Useful if you want to accumulate over several runs. -C, --coverdir=<dir> Directory where the report files. The coverage report for <package>.<module> is written to file <dir>/<package>/<module>.cover. -m, --missing Annotate executable lines that were not executed with '>>>>>> '. -s, --summary Write a brief summary on stdout for each file. (Can only be used with --count or --report.) -g, --timing Prefix each line with the time since the program started. Only used while tracing. Filters, may be repeated multiple times: --ignore-module=<mod> Ignore the given module(s) and its submodules (if it is a package). Accepts comma separated list of module names --ignore-dir=<dir> Ignore files in the given directory (multiple directories can be joined by os.pathsep). """ % sys.argv[0]) PRAGMA_NOCOVER = "#pragma NO COVER" # Simple rx to find lines with no code. rx_blank = re.compile(r'^\s*(#.*)?$') class Ignore: def __init__(self, modules = None, dirs = None): self._mods = modules or [] self._dirs = dirs or [] self._dirs = map(os.path.normpath, self._dirs) self._ignore = { '<string>': 1 } def names(self, filename, modulename): if modulename in self._ignore: return self._ignore[modulename] # haven't seen this one before, so see if the module name is # on the ignore list. Need to take some care since ignoring # "cmp" musn't mean ignoring "cmpcache" but ignoring # "Spam" must also mean ignoring "Spam.Eggs". for mod in self._mods: if mod == modulename: # Identical names, so ignore self._ignore[modulename] = 1 return 1 # check if the module is a proper submodule of something on # the ignore list n = len(mod) # (will not overflow since if the first n characters are the # same and the name has not already occurred, then the size # of "name" is greater than that of "mod") if mod == modulename[:n] and modulename[n] == '.': self._ignore[modulename] = 1 return 1 # Now check that __file__ isn't in one of the directories if filename is None: # must be a built-in, so we must ignore self._ignore[modulename] = 1 return 1 # Ignore a file when it contains one of the ignorable paths for d in self._dirs: # The '+ os.sep' is to ensure that d is a parent directory, # as compared to cases like: # d = "/usr/local" # filename = "/usr/local.py" # or # d = "/usr/local.py" # filename = "/usr/local.py" if filename.startswith(d + os.sep): self._ignore[modulename] = 1 return 1 # Tried the different ways, so we don't ignore this module self._ignore[modulename] = 0 return 0 def modname(path): """Return a plausible module name for the patch.""" base = os.path.basename(path) filename, ext = os.path.splitext(base) return filename def fullmodname(path): """Return a plausible module name for the path.""" # If the file 'path' is part of a package, then the filename isn't # enough to uniquely identify it. Try to do the right thing by # looking in sys.path for the longest matching prefix. We'll # assume that the rest is the package name. comparepath = os.path.normcase(path) longest = "" for dir in sys.path: dir = os.path.normcase(dir) if comparepath.startswith(dir) and comparepath[len(dir)] == os.sep: if len(dir) > len(longest): longest = dir if longest: base = path[len(longest) + 1:] else: base = path # the drive letter is never part of the module name drive, base = os.path.splitdrive(base) base = base.replace(os.sep, ".") if os.altsep: base = base.replace(os.altsep, ".") filename, ext = os.path.splitext(base) return filename.lstrip(".") class CoverageResults: def __init__(self, counts=None, calledfuncs=None, infile=None, callers=None, outfile=None): self.counts = counts if self.counts is None: self.counts = {} self.counter = self.counts.copy() # map (filename, lineno) to count self.calledfuncs = calledfuncs if self.calledfuncs is None: self.calledfuncs = {} self.calledfuncs = self.calledfuncs.copy() self.callers = callers if self.callers is None: self.callers = {} self.callers = self.callers.copy() self.infile = infile self.outfile = outfile if self.infile: # Try to merge existing counts file. try: counts, calledfuncs, callers = \ pickle.load(open(self.infile, 'rb')) self.update(self.__class__(counts, calledfuncs, callers)) except (IOError, EOFError, ValueError), err: print >> sys.stderr, ("Skipping counts file %r: %s" % (self.infile, err)) def update(self, other): """Merge in the data from another CoverageResults""" counts = self.counts calledfuncs = self.calledfuncs callers = self.callers other_counts = other.counts other_calledfuncs = other.calledfuncs other_callers = other.callers for key in other_counts.keys(): counts[key] = counts.get(key, 0) + other_counts[key] for key in other_calledfuncs.keys(): calledfuncs[key] = 1 for key in other_callers.keys(): callers[key] = 1 def write_results(self, show_missing=True, summary=False, coverdir=None): """ @param coverdir """ if self.calledfuncs: print print "functions called:" calls = self.calledfuncs.keys() calls.sort() for filename, modulename, funcname in calls: print ("filename: %s, modulename: %s, funcname: %s" % (filename, modulename, funcname)) if self.callers: print print "calling relationships:" calls = self.callers.keys() calls.sort() lastfile = lastcfile = "" for ((pfile, pmod, pfunc), (cfile, cmod, cfunc)) in calls: if pfile != lastfile: print print "***", pfile, "***" lastfile = pfile lastcfile = "" if cfile != pfile and lastcfile != cfile: print " -->", cfile lastcfile = cfile print " %s.%s -> %s.%s" % (pmod, pfunc, cmod, cfunc) # turn the counts data ("(filename, lineno) = count") into something # accessible on a per-file basis per_file = {} for filename, lineno in self.counts.keys(): lines_hit = per_file[filename] = per_file.get(filename, {}) lines_hit[lineno] = self.counts[(filename, lineno)] # accumulate summary info, if needed sums = {} for filename, count in per_file.iteritems(): # skip some "files" we don't care about... if filename == "<string>": continue if filename.startswith("<doctest "): continue if filename.endswith((".pyc", ".pyo")): filename = filename[:-1] if coverdir is None: dir = os.path.dirname(os.path.abspath(filename)) modulename = modname(filename) else: dir = coverdir if not os.path.exists(dir): os.makedirs(dir) modulename = fullmodname(filename) # If desired, get a list of the line numbers which represent # executable content (returned as a dict for better lookup speed) if show_missing: lnotab = find_executable_linenos(filename) else: lnotab = {} source = linecache.getlines(filename) coverpath = os.path.join(dir, modulename + ".cover") n_hits, n_lines = self.write_results_file(coverpath, source, lnotab, count) if summary and n_lines: percent = 100 * n_hits // n_lines sums[modulename] = n_lines, percent, modulename, filename if summary and sums: mods = sums.keys() mods.sort() print "lines cov% module (path)" for m in mods: n_lines, percent, modulename, filename = sums[m] print "%5d %3d%% %s (%s)" % sums[m] if self.outfile: # try and store counts and module info into self.outfile try: pickle.dump((self.counts, self.calledfuncs, self.callers), open(self.outfile, 'wb'), 1) except IOError, err: print >> sys.stderr, "Can't save counts files because %s" % err def write_results_file(self, path, lines, lnotab, lines_hit): """Return a coverage results file in path.""" try: outfile = open(path, "w") except IOError, err: print >> sys.stderr, ("trace: Could not open %r for writing: %s" "- skipping" % (path, err)) return 0, 0 n_lines = 0 n_hits = 0 for i, line in enumerate(lines): lineno = i + 1 # do the blank/comment match to try to mark more lines # (help the reader find stuff that hasn't been covered) if lineno in lines_hit: outfile.write("%5d: " % lines_hit[lineno]) n_hits += 1 n_lines += 1 elif rx_blank.match(line): outfile.write(" ") else: # lines preceded by no marks weren't hit # Highlight them if so indicated, unless the line contains # #pragma: NO COVER if lineno in lnotab and not PRAGMA_NOCOVER in lines[i]: outfile.write(">>>>>> ") n_lines += 1 else: outfile.write(" ") outfile.write(lines[i].expandtabs(8)) outfile.close() return n_hits, n_lines def find_lines_from_code(code, strs): """Return dict where keys are lines in the line number table.""" linenos = {} for _, lineno in dis.findlinestarts(code): if lineno not in strs: linenos[lineno] = 1 return linenos def find_lines(code, strs): """Return lineno dict for all code objects reachable from code.""" # get all of the lineno information from the code of this scope level linenos = find_lines_from_code(code, strs) # and check the constants for references to other code objects for c in code.co_consts: if inspect.iscode(c): # find another code object, so recurse into it linenos.update(find_lines(c, strs)) return linenos def find_strings(filename): """Return a dict of possible docstring positions. The dict maps line numbers to strings. There is an entry for line that contains only a string or a part of a triple-quoted string. """ d = {} # If the first token is a string, then it's the module docstring. # Add this special case so that the test in the loop passes. prev_ttype = token.INDENT f = open(filename) for ttype, tstr, start, end, line in tokenize.generate_tokens(f.readline): if ttype == token.STRING: if prev_ttype == token.INDENT: sline, scol = start eline, ecol = end for i in range(sline, eline + 1): d[i] = 1 prev_ttype = ttype f.close() return d def find_executable_linenos(filename): """Return dict where keys are line numbers in the line number table.""" try: prog = open(filename, "rU").read() except IOError, err: print >> sys.stderr, ("Not printing coverage data for %r: %s" % (filename, err)) return {} code = compile(prog, filename, "exec") strs = find_strings(filename) return find_lines(code, strs) class Trace: def __init__(self, count=1, trace=1, countfuncs=0, countcallers=0, ignoremods=(), ignoredirs=(), infile=None, outfile=None, timing=False): """ @param count true iff it should count number of times each line is executed @param trace true iff it should print out each line that is being counted @param countfuncs true iff it should just output a list of (filename, modulename, funcname,) for functions that were called at least once; This overrides `count' and `trace' @param ignoremods a list of the names of modules to ignore @param ignoredirs a list of the names of directories to ignore all of the (recursive) contents of @param infile file from which to read stored counts to be added into the results @param outfile file in which to write the results @param timing true iff timing information be displayed """ self.infile = infile self.outfile = outfile self.ignore = Ignore(ignoremods, ignoredirs) self.counts = {} # keys are (filename, linenumber) self.blabbed = {} # for debugging self.pathtobasename = {} # for memoizing os.path.basename self.donothing = 0 self.trace = trace self._calledfuncs = {} self._callers = {} self._caller_cache = {} self.start_time = None if timing: self.start_time = time.time() if countcallers: self.globaltrace = self.globaltrace_trackcallers elif countfuncs: self.globaltrace = self.globaltrace_countfuncs elif trace and count: self.globaltrace = self.globaltrace_lt self.localtrace = self.localtrace_trace_and_count elif trace: self.globaltrace = self.globaltrace_lt self.localtrace = self.localtrace_trace elif count: self.globaltrace = self.globaltrace_lt self.localtrace = self.localtrace_count else: # Ahem -- do nothing? Okay. self.donothing = 1 def run(self, cmd): import __main__ dict = __main__.__dict__ if not self.donothing: threading.settrace(self.globaltrace) sys.settrace(self.globaltrace) try: exec cmd in dict, dict finally: if not self.donothing: sys.settrace(None) threading.settrace(None) def runctx(self, cmd, globals=None, locals=None): if globals is None: globals = {} if locals is None: locals = {} if not self.donothing: _settrace(self.globaltrace) try: exec cmd in globals, locals finally: if not self.donothing: _unsettrace() def runfunc(self, func, *args, **kw): result = None if not self.donothing: sys.settrace(self.globaltrace) try: result = func(*args, **kw) finally: if not self.donothing: sys.settrace(None) return result def file_module_function_of(self, frame): code = frame.f_code filename = code.co_filename if filename: modulename = modname(filename) else: modulename = None funcname = code.co_name clsname = None if code in self._caller_cache: if self._caller_cache[code] is not None: clsname = self._caller_cache[code] else: self._caller_cache[code] = None ## use of gc.get_referrers() was suggested by Michael Hudson # all functions which refer to this code object funcs = [f for f in gc.get_referrers(code) if inspect.isfunction(f)] # require len(func) == 1 to avoid ambiguity caused by calls to # new.function(): "In the face of ambiguity, refuse the # temptation to guess." if len(funcs) == 1: dicts = [d for d in gc.get_referrers(funcs[0]) if isinstance(d, dict)] if len(dicts) == 1: classes = [c for c in gc.get_referrers(dicts[0]) if hasattr(c, "__bases__")] if len(classes) == 1: # ditto for new.classobj() clsname = classes[0].__name__ # cache the result - assumption is that new.* is # not called later to disturb this relationship # _caller_cache could be flushed if functions in # the new module get called. self._caller_cache[code] = clsname if clsname is not None: funcname = "%s.%s" % (clsname, funcname) return filename, modulename, funcname def globaltrace_trackcallers(self, frame, why, arg): """Handler for call events. Adds information about who called who to the self._callers dict. """ if why == 'call': # XXX Should do a better job of identifying methods this_func = self.file_module_function_of(frame) parent_func = self.file_module_function_of(frame.f_back) self._callers[(parent_func, this_func)] = 1 def globaltrace_countfuncs(self, frame, why, arg): """Handler for call events. Adds (filename, modulename, funcname) to the self._calledfuncs dict. """ if why == 'call': this_func = self.file_module_function_of(frame) self._calledfuncs[this_func] = 1 def globaltrace_lt(self, frame, why, arg): """Handler for call events. If the code block being entered is to be ignored, returns `None', else returns self.localtrace. """ if why == 'call': code = frame.f_code filename = frame.f_globals.get('__file__', None) if filename: # XXX modname() doesn't work right for packages, so # the ignore support won't work right for packages modulename = modname(filename) if modulename is not None: ignore_it = self.ignore.names(filename, modulename) if not ignore_it: if self.trace: print (" --- modulename: %s, funcname: %s" % (modulename, code.co_name)) return self.localtrace else: return None def localtrace_trace_and_count(self, frame, why, arg): if why == "line": # record the file name and line number of every trace filename = frame.f_code.co_filename lineno = frame.f_lineno key = filename, lineno self.counts[key] = self.counts.get(key, 0) + 1 if self.start_time: print '%.2f' % (time.time() - self.start_time), bname = os.path.basename(filename) print "%s(%d): %s" % (bname, lineno, linecache.getline(filename, lineno)), return self.localtrace def localtrace_trace(self, frame, why, arg): if why == "line": # record the file name and line number of every trace filename = frame.f_code.co_filename lineno = frame.f_lineno if self.start_time: print '%.2f' % (time.time() - self.start_time), bname = os.path.basename(filename) print "%s(%d): %s" % (bname, lineno, linecache.getline(filename, lineno)), return self.localtrace def localtrace_count(self, frame, why, arg): if why == "line": filename = frame.f_code.co_filename lineno = frame.f_lineno key = filename, lineno self.counts[key] = self.counts.get(key, 0) + 1 return self.localtrace def results(self): return CoverageResults(self.counts, infile=self.infile, outfile=self.outfile, calledfuncs=self._calledfuncs, callers=self._callers) def _err_exit(msg): sys.stderr.write("%s: %s\n" % (sys.argv[0], msg)) sys.exit(1) def main(argv=None): import getopt if argv is None: argv = sys.argv try: opts, prog_argv = getopt.getopt(argv[1:], "tcrRf:d:msC:lTg", ["help", "version", "trace", "count", "report", "no-report", "summary", "file=", "missing", "ignore-module=", "ignore-dir=", "coverdir=", "listfuncs", "trackcalls", "timing"]) except getopt.error, msg: sys.stderr.write("%s: %s\n" % (sys.argv[0], msg)) sys.stderr.write("Try `%s --help' for more information\n" % sys.argv[0]) sys.exit(1) trace = 0 count = 0 report = 0 no_report = 0 counts_file = None missing = 0 ignore_modules = [] ignore_dirs = [] coverdir = None summary = 0 listfuncs = False countcallers = False timing = False for opt, val in opts: if opt == "--help": usage(sys.stdout) sys.exit(0) if opt == "--version": sys.stdout.write("trace 2.0\n") sys.exit(0) if opt == "-T" or opt == "--trackcalls": countcallers = True continue if opt == "-l" or opt == "--listfuncs": listfuncs = True continue if opt == "-g" or opt == "--timing": timing = True continue if opt == "-t" or opt == "--trace": trace = 1 continue if opt == "-c" or opt == "--count": count = 1 continue if opt == "-r" or opt == "--report": report = 1 continue if opt == "-R" or opt == "--no-report": no_report = 1 continue if opt == "-f" or opt == "--file": counts_file = val continue if opt == "-m" or opt == "--missing": missing = 1 continue if opt == "-C" or opt == "--coverdir": coverdir = val continue if opt == "-s" or opt == "--summary": summary = 1 continue if opt == "--ignore-module": for mod in val.split(","): ignore_modules.append(mod.strip()) continue if opt == "--ignore-dir": for s in val.split(os.pathsep): s = os.path.expandvars(s) # should I also call expanduser? (after all, could use $HOME) s = s.replace("$prefix", os.path.join(sys.prefix, "lib", "python" + sys.version[:3])) s = s.replace("$exec_prefix", os.path.join(sys.exec_prefix, "lib", "python" + sys.version[:3])) s = os.path.normpath(s) ignore_dirs.append(s) continue assert 0, "Should never get here" if listfuncs and (count or trace): _err_exit("cannot specify both --listfuncs and (--trace or --count)") if not (count or trace or report or listfuncs or countcallers): _err_exit("must specify one of --trace, --count, --report, " "--listfuncs, or --trackcalls") if report and no_report: _err_exit("cannot specify both --report and --no-report") if report and not counts_file: _err_exit("--report requires a --file") if no_report and len(prog_argv) == 0: _err_exit("missing name of file to run") # everything is ready if report: results = CoverageResults(infile=counts_file, outfile=counts_file) results.write_results(missing, summary=summary, coverdir=coverdir) else: sys.argv = prog_argv progname = prog_argv[0] sys.path[0] = os.path.split(progname)[0] t = Trace(count, trace, countfuncs=listfuncs, countcallers=countcallers, ignoremods=ignore_modules, ignoredirs=ignore_dirs, infile=counts_file, outfile=counts_file, timing=timing) try: with open(progname) as fp: code = compile(fp.read(), progname, 'exec') # try to emulate __main__ namespace as much as possible globs = { '__file__': progname, '__name__': '__main__', '__package__': None, '__cached__': None, } t.runctx(code, globs, globs) except IOError, err: _err_exit("Cannot run file %r because: %s" % (sys.argv[0], err)) except SystemExit: pass results = t.results() if not no_report: results.write_results(missing, summary=summary, coverdir=coverdir) if __name__=='__main__': main()
gpl-3.0
-8,048,764,754,056,504,000
35.459492
79
0.541457
false
4.272637
false
false
false
twbarber/pygooglevoice
googlevoice/voice.py
1
11310
from conf import config from util import * import settings import base64 qpat = re.compile(r'\?') if settings.DEBUG: import logging logging.basicConfig() log = logging.getLogger('PyGoogleVoice') log.setLevel(logging.DEBUG) else: log = None class Voice(object): """ Main voice instance for interacting with the Google Voice service Handles login/logout and most of the baser HTTP methods """ def __init__(self): install_opener(build_opener(HTTPCookieProcessor(CookieJar()))) for name in settings.FEEDS: setattr(self, name, self.__get_xml_page(name)) setattr(self, 'message', self.__get_xml_page('message')) ###################### # Some handy methods ###################### def special(self): """ Returns special identifier for your session (if logged in) """ if hasattr(self, '_special') and getattr(self, '_special'): return self._special try: try: regex = bytes("('_rnr_se':) '(.+)'", 'utf8') except TypeError: regex = bytes("('_rnr_se':) '(.+)'") except NameError: regex = r"('_rnr_se':) '(.+)'" try: sp = re.search(regex, urlopen(settings.INBOX).read()).group(2) except AttributeError: sp = None self._special = sp return sp special = property(special) def login(self, email=None, passwd=None, smsKey=None): """ Login to the service using your Google Voice account Credentials will be propmpted for if not given as args or in the ``~/.gvoice`` config file """ if hasattr(self, '_special') and getattr(self, '_special'): return self if email is None: email = config.email if email is None: email = input('Email address: ') if passwd is None: passwd = config.password if passwd is None: from getpass import getpass passwd = getpass() content = self.__do_page('login').read() # holy hackjob galx = re.search(r"type=\"hidden\"\s+name=\"GALX\"\s+value=\"(.+)\"", content).group(1) result = self.__do_page('login', {'Email': email, 'Passwd': passwd, 'GALX': galx}) if result.geturl().startswith(getattr(settings, "SMSAUTH")): content = self.__smsAuth(smsKey) try: smsToken = re.search(r"name=\"smsToken\"\s+value=\"([^\"]+)\"", content).group(1) galx = re.search(r"name=\"GALX\"\s+value=\"([^\"]+)\"", content).group(1) content = self.__do_page('login', {'smsToken': smsToken, 'service': "grandcentral", 'GALX': galx}) except AttributeError: raise LoginError del smsKey, smsToken, galx del email, passwd try: assert self.special except (AssertionError, AttributeError): raise LoginError return self def __smsAuth(self, smsKey=None): if smsKey is None: smsKey = config.smsKey if smsKey is None: from getpass import getpass smsPin = getpass("SMS PIN: ") content = self.__do_page('smsauth', {'smsUserPin': smsPin}).read() else: smsKey = base64.b32decode(re.sub(r' ', '', smsKey), casefold=True).encode("hex") content = self.__oathtoolAuth(smsKey) try_count = 1 while "The code you entered didn&#39;t verify." in content and try_count < 5: sleep_seconds = 10 try_count += 1 print('invalid code, retrying after %s seconds (attempt %s)' % (sleep_seconds, try_count)) import time time.sleep(sleep_seconds) content = self.__oathtoolAuth(smsKey) del smsKey return content def __oathtoolAuth(self, smsKey): import commands smsPin = commands.getstatusoutput('oathtool --totp ' + smsKey)[1] content = self.__do_page('smsauth', {'smsUserPin': smsPin}).read() del smsPin return content def logout(self): """ Logs out an instance and makes sure it does not still have a session """ self.__do_page('logout') del self._special assert self.special == None return self def call(self, outgoingNumber, forwardingNumber=None, phoneType=None, subscriberNumber=None): """ Make a call to an ``outgoingNumber`` from your ``forwardingNumber`` (optional). If you pass in your ``forwardingNumber``, please also pass in the correct ``phoneType`` """ if forwardingNumber is None: forwardingNumber = config.forwardingNumber if phoneType is None: phoneType = config.phoneType self.__validate_special_page('call', { 'outgoingNumber': outgoingNumber, 'forwardingNumber': forwardingNumber, 'subscriberNumber': subscriberNumber or 'undefined', 'phoneType': phoneType, 'remember': '1' }) __call__ = call def cancel(self, outgoingNumber=None, forwardingNumber=None): """ Cancels a call matching outgoing and forwarding numbers (if given). Will raise an error if no matching call is being placed """ self.__validate_special_page('cancel', { 'outgoingNumber': outgoingNumber or 'undefined', 'forwardingNumber': forwardingNumber or 'undefined', 'cancelType': 'C2C', }) def phones(self): """ Returns a list of ``Phone`` instances attached to your account. """ return [Phone(self, data) for data in self.contacts['phones'].values()] phones = property(phones) def settings(self): """ Dict of current Google Voice settings """ return AttrDict(self.contacts['settings']) settings = property(settings) def send_sms(self, phoneNumber, text): """ Send an SMS message to a given ``phoneNumber`` with the given ``text`` message """ self.__validate_special_page('sms', {'phoneNumber': phoneNumber, 'text': text}) def search(self, query): """ Search your Google Voice Account history for calls, voicemails, and sms Returns ``Folder`` instance containting matching messages """ return self.__get_xml_page('search', data='?q=%s' % quote(query))() def archive(self, msg, archive=1): """ Archive the specified message by removing it from the Inbox. """ if isinstance(msg, Message): msg = msg.id assert is_sha1(msg), 'Message id not a SHA1 hash' self.__messages_post('archive', msg, archive=archive) def delete(self, msg, trash=1): """ Moves this message to the Trash. Use ``message.delete(0)`` to move it out of the Trash. """ if isinstance(msg, Message): msg = msg.id assert is_sha1(msg), 'Message id not a SHA1 hash' self.__messages_post('delete', msg, trash=trash) def download(self, msg, adir=None): """ Download a voicemail or recorded call MP3 matching the given ``msg`` which can either be a ``Message`` instance, or a SHA1 identifier. Saves files to ``adir`` (defaults to current directory). Message hashes can be found in ``self.voicemail().messages`` for example. Returns location of saved file. """ from os import path, getcwd if isinstance(msg, Message): msg = msg.id assert is_sha1(msg), 'Message id not a SHA1 hash' if adir is None: adir = getcwd() try: response = self.__do_page('download', msg) except: raise DownloadError fn = path.join(adir, '%s.mp3' % msg) with open(fn, 'wb') as fo: fo.write(response.read()) return fn def contacts(self): """ Partial data of your Google Account Contacts related to your Voice account. For a more comprehensive suite of APIs, check out http://code.google.com/apis/contacts/docs/1.0/developers_guide_python.html """ if hasattr(self, '_contacts'): return self._contacts self._contacts = self.__get_xml_page('contacts')() return self._contacts contacts = property(contacts) ###################### # Helper methods ###################### def __do_page(self, page, data=None, headers={}, terms={}): """ Loads a page out of the settings and pass it on to urllib Request """ page = page.upper() if isinstance(data, dict) or isinstance(data, tuple): data = urlencode(data) headers.update({'User-Agent': 'PyGoogleVoice/0.5'}) if log: log.debug('%s?%s - %s' % (getattr(settings, page)[22:], data or '', headers)) if page in ('DOWNLOAD', 'XML_SEARCH'): return urlopen(Request(getattr(settings, page) + data, None, headers)) if data: headers.update({'Content-type': 'application/x-www-form-urlencoded;charset=utf-8'}) pageuri = getattr(settings, page) if len(terms) > 0: m = qpat.match(page) if m: pageuri += '&' else: pageuri += '?' for i, k in enumerate(terms.keys()): pageuri += k + '=' + terms[k] if i < len(terms) - 1: pageuri += '&' return urlopen(Request(pageuri, data, headers)) def __validate_special_page(self, page, data={}, **kwargs): """ Validates a given special page for an 'ok' response """ data.update(kwargs) load_and_validate(self.__do_special_page(page, data)) _Phone__validate_special_page = __validate_special_page def __do_special_page(self, page, data=None, headers={}, terms={}): """ Add self.special to request data """ assert self.special, 'You must login before using this page' if isinstance(data, tuple): data += ('_rnr_se', self.special) elif isinstance(data, dict): data.update({'_rnr_se': self.special}) return self.__do_page(page, data, headers, terms) _Phone__do_special_page = __do_special_page def __get_xml_page(self, page, data=None, headers={}): """ Return XMLParser instance generated from given page """ return XMLParser(self, page, lambda terms={}: self.__do_special_page('XML_%s' % page.upper(), data, headers, terms).read()) def __messages_post(self, page, *msgs, **kwargs): """ Performs message operations, eg deleting,staring,moving """ data = kwargs.items() for msg in msgs: if isinstance(msg, Message): msg = msg.id assert is_sha1(msg), 'Message id not a SHA1 hash' data += (('messages', msg),) return self.__do_special_page(page, dict(data)) _Message__messages_post = __messages_post
bsd-3-clause
1,199,714,198,842,147,000
34.01548
132
0.558267
false
4.151982
true
false
false
jtakayama/makahiki-draft
install/run_initialize_instance.py
1
7542
import os import sys import subprocess import shlex import sys import StringIO import datetime sys.path.append(os.path.dirname(os.path.realpath(__file__)) + os.sep + os.pardir + os.sep + "makahiki" + os.sep) from apps.utils import script_utils def termination_string(): """ Gets the current system time and appends it to a termination notice. """ now = datetime.datetime.now() time = now.strftime("%Y-%m-%d %H:%M:%S") end_time = "Script exiting at %s\n" % time return end_time # Modified from manage_py_dir() in script_utils.py def local_manage_py_dir(): """Returns the directory holding the manage.py file as a string.""" return os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + os.sep + os.pardir + os.sep + "makahiki") # Modified from local_reset_db(heroku_app) in script_utils.py def local_reset_db(logfile): """reset db. Returns a tuple result_tuple. result_tuple[0] has the logfile. result_tuple[1] is True if the reset was aborted, and False if was not. """ local_reset_db_cancel = False logfile.write("WARNING: This command will reset the database. " \ "All existing data will be deleted. This process is irreversible.\n") print "WARNING: This command will reset the database. " \ "All existing data will be deleted. This process is irreversible.\n" value = raw_input("Do you wish to continue (Y/n)? ") while value != "Y" and value != "n": logfile.write("Invalid option %s\n" % value) print "Invalid option %s\n" % value value = raw_input("Do you wish to continue (Y/n)? ") if value == "n": logfile.write("Do you wish to continue (Y/n)? %s\n" % value) logfile.write("Operation cancelled.") print "Operation cancelled.\n" local_reset_db_cancel = True result_tuple = [logfile, local_reset_db_cancel] return result_tuple elif value =="Y": logfile.write("Do you wish to continue (Y/n)? %s\n" % value) print "resetting the db..." os.system("cd " + local_manage_py_dir() + "; python scripts/initialize_postgres.py") result_tuple = [logfile, local_reset_db_cancel] return result_tuple def run(logfile): """ Initializes the Makahiki database with default options and logs the output to a file. This should only be used to initialize local installations. """ now = datetime.datetime.now() time = now.strftime("%Y-%m-%d %H:%M:%S") start_time = "Makahiki instance initialization script started at %s\n" % time logfile.write(start_time) print start_time try: # Retrieve the user's home directory USER_HOME = subprocess.check_output(["echo $HOME"], stderr=subprocess.STDOUT, shell=True) # Remove newline from expected "/home/<username>\n" USER_HOME = USER_HOME[:-1] USER_PROJECT_HOME = USER_HOME + os.sep + "makahiki" # cd to makahiki directory os.chdir(USER_PROJECT_HOME) # Capture console output from script_utils functions: normal_stdout = sys.stdout output_capturer = StringIO.StringIO() sys.stdout = output_capturer # Runs the initialization scripts in same order as # makahiki/makahiki/scripts/initialize_instance.py instance_type = None heroku_app = None manage_py = script_utils.manage_py_command() manage_command = "python " + manage_py fixture_path = "makahiki" + os.sep + "fixtures" # Install requirements script_utils.install_requirements() # Switch back to standard I/O sys.stdout = normal_stdout output = output_capturer.getvalue() logfile.write(output) print(output) # Clear the logfile buffer. logfile.flush() os.fsync(logfile) # Reset the database reset_db_result = local_reset_db(logfile) # If successful, write the output of local_reset_db to a logfile logfile = reset_db_result[0] local_reset_db_cancel = reset_db_result[1] if local_reset_db_cancel: logfile.write("Makahiki instance initialization was cancelled by the user.") print "Makahiki instance initialization was cancelled by the user." end_time = termination_string() logfile.write(end_time) print end_time return logfile else: # Resume capturing I/O normal_stdout = sys.stdout output_capturer = StringIO.StringIO() sys.stdout = output_capturer # Sync the database script_utils.syncdb(manage_command) # Switch I/O back, write output to logfile sys.stdout = normal_stdout output = output_capturer.getvalue() logfile.write(output) print(output) # Clear the logfile buffer. logfile.flush() os.fsync(logfile) # Resume capturing I/O normal_stdout = sys.stdout output_capturer = StringIO.StringIO() sys.stdout = output_capturer # Copy static files script_utils.copy_static_media(heroku_app) # Switch I/O back, write output to logfile sys.stdout = normal_stdout output = output_capturer.getvalue() logfile.write(output) print(output) # Clear the logfile buffer. logfile.flush() os.fsync(logfile) # Resume capturing I/O normal_stdout = sys.stdout output_capturer = StringIO.StringIO() sys.stdout = output_capturer # Load data script_utils.load_data(manage_command, instance_type, fixture_path) # Switch I/O back, write output to logfile sys.stdout = normal_stdout output = output_capturer.getvalue() logfile.write(output) print(output) # Clear the logfile buffer. logfile.flush() os.fsync(logfile) # Print a closing message closing = "\nMakahiki initialization script has completed.\n" logfile.write(closing) print closing end_time = termination_string() logfile.write(end_time) print end_time return logfile except subprocess.CalledProcessError as cpe: logfile.write("CalledProcessError: ") print "CalledProcessError: " logfile.write(cpe.output) print cpe.output logfile.write("Warning: Makahiki initialization did not complete successfully.") print "Warning: Makahiki initialization did not complete successfully." end_time = termination_string() logfile.write(end_time) print end_time return logfile except OSError as ose: logfile.write("OSError: ") print "OSError: " oserror_output = " errno: %s\n filename: %s\n strerror: %s\n" % (ose.errno, ose.filename, ose.strerror) logfile.write(oserror_output) print oserror_output logfile.write("Warning: Makahiki initialization did not complete successfully.") print "Warning: Makahiki initialization did not complete successfully." end_time = termination_string() logfile.write(end_time) print end_time return logfile
mit
-7,530,258,053,230,034,000
37.284264
115
0.60647
false
4.119061
false
false
false
jcberquist/SublimeText-Lucee
src/inline_documentation.py
1
2952
import sublime, sublime_plugin, webbrowser from os.path import dirname, realpath from . import utils from collections import namedtuple Documentation = namedtuple('Documentation', 'doc_html_variables on_navigate priority') FILE_PATH = dirname(realpath(__file__)).replace("\\", "/") DOC_TEMPLATE = "" PAGINATION_TEMPLATE = "" documentation_sources = [] def add_documentation_source(callback): documentation_sources.append(callback) def get_inline_documentation(view, position): docs = [ ] for callback in documentation_sources: inline_doc = callback(view, position) if inline_doc: docs.append(inline_doc) return docs def plugin_loaded(): global DOC_TEMPLATE, PAGINATION_TEMPLATE DOC_TEMPLATE = sublime.load_resource("Packages/" + utils.get_plugin_name() + "/templates/inline_documentation.html") PAGINATION_TEMPLATE = sublime.load_resource("Packages/" + utils.get_plugin_name() + "/templates/pagination.html") def build_links(links): html_links = ['<a class="link" href="' + link["href"] + '">' + link["text"] + '</a>' for link in links] return "<br>".join(html_links) def build_pagination(current_index, total_pages): pagination_variables = {"current_page": str(current_index + 1), "total_pages": str(total_pages)} previous_index = current_index - 1 if current_index > 0 else total_pages - 1 pagination_variables["prev"] = "page_" + str(previous_index) next_index = current_index + 1 if current_index < total_pages - 1 else 0 pagination_variables["next"] = "page_" + str(next_index) return sublime.expand_variables(PAGINATION_TEMPLATE, pagination_variables) def build_doc_html(inline_doc): return sublime.expand_variables(DOC_TEMPLATE, inline_doc) def get_on_navigate(view, docs, current_index): def on_navigate(href): if href.startswith("page_"): new_index = int(href.split("_").pop()) display_documentation(view, docs, new_index) elif docs[current_index].on_navigate: docs[current_index].on_navigate(href) else: webbrowser.open_new_tab(href) return on_navigate def generate_documentation(docs, current_index): doc_html_variables = dict(docs[current_index].doc_html_variables) doc_html_variables["pagination"] = build_pagination(current_index, len(docs)) if len(docs) > 1 else "" doc_html_variables["links"] = build_links(doc_html_variables["links"]) if "links" in doc_html_variables else "" return build_doc_html(doc_html_variables) def display_documentation(view, docs, current_index=0): doc_html = generate_documentation(docs, current_index) on_navigate = get_on_navigate(view, docs, current_index) view.show_popup(doc_html, max_width=640, max_height=320, on_navigate=on_navigate) class LuceeInlineDocumentationCommand(sublime_plugin.TextCommand): def run(self, edit): position = self.view.sel()[0].begin() docs = get_inline_documentation(self.view, position) if len(docs) > 0: display_documentation(self.view, sorted(docs, key=lambda doc: doc.priority, reverse=True))
mit
6,132,070,834,389,892,000
36.379747
117
0.732724
false
3.205212
false
false
false
pivotal-jbarrett/geode-native
tools/gnmsg/server_message_decoder.py
1
9484
#!/usr/local/bin/python3 # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import re import struct from dateutil import parser from server_messages import parse_server_message from decoder_base import DecoderBase from message_types import message_types from numeric_conversion import to_hex_digit class ServerMessageDecoder(DecoderBase): def __init__(self, output_queue): super(ServerMessageDecoder, self).__init__(output_queue) self.STATE_NEUTRAL_ = 0 self.STATE_WAITING_FOR_MESSAGE_BODY_ = 1 self.receive_trace_parts_retriever_ = None self.receive_trace_parser_ = None self.connection_states_ = {} self.last_header_ = {} self.nc_version_ = None self.get_receive_trace_parts_functions_ = { "0.0.42": self.get_receive_trace_header_base, "10.0.3": self.get_receive_trace_header_base, "10.1.1": self.get_receive_trace_header_base, "10.1.2": self.get_receive_trace_header_base, "10.1.3": self.get_receive_trace_header_base, "9.1.1": self.get_receive_trace_header_v911, } self.receive_trace_parsers_ = { "0.0.42": self.parse_response_fields_base, "10.0.3": self.parse_response_fields_base, "10.1.1": self.parse_response_fields_base, "10.1.2": self.parse_response_fields_base, "10.1.3": self.parse_response_fields_base, "9.1.1": self.parse_response_fields_v911, } def search_for_version(self, line): if self.nc_version_ == None: expression = re.compile(r"Product version:.*Native (\d+)\.(\d+)\.(\d+)-") match = expression.search(line) if match: major = match.group(1) minor = match.group(2) patch = match.group(3) self.nc_version_ = major + "." + minor + "." + patch self.receive_trace_parts_retriever_ = self.get_receive_trace_parts_functions_[ self.nc_version_ ] self.receive_trace_parser_ = self.receive_trace_parsers_[ self.nc_version_ ] def get_receive_trace_header_with_pointer(self, line, parts): result = False expression = re.compile( r"(\d\d:\d\d:\d\d\.\d+).*TcrConnection::readMessage:\s*\[([\d|a-f|A-F|x|X]+).*received header from endpoint.*bytes:\s*([\d|a-f|A-F]+)" ) match = expression.search(line) if match: parts.append(match.group(1)) parts.append(match.group(2)) parts.append(match.group(3)) result = True return result def get_receive_trace_header_without_pointer(self, line, parts): result = False expression = re.compile( r"(\d\d:\d\d:\d\d\.\d+).*TcrConnection::readMessage:\s*received header from endpoint.*bytes:\s*([\d|a-f|A-F]+)" ) match = expression.search(line) if match: parts.append(match.group(1)) parts.append("0") parts.append(match.group(2)) result = True return result def get_receive_trace_header_base(self, line, parts): result = self.get_receive_trace_header_with_pointer(line, parts) if not result: result = self.get_receive_trace_header_without_pointer(line, parts) return result def get_receive_trace_header_v911(self, line, parts): result = False expression = re.compile( r"(\d\d:\d\d:\d\d\.\d+).*TcrConnection::readMessage: received header from endpoint.*bytes:\s*([\d| ]+)" ) match = expression.search(line) if match: parts.append(parser.parse(match.group(1))) parts.append("0") parts.append(match.group(2)) result = True return result def get_receive_trace_body_parts(self, line, parts): result = False expression = re.compile( "received message body from endpoint.*bytes:\s*([\d|a-f|A-F]+)" ) match = expression.search(line) if match: message = match.group(1) parts.append(message) result = True return result def get_receive_trace_parts(self, line, parts): if self.receive_trace_parts_retriever_ is not None: return self.receive_trace_parts_retriever_(line, parts) def get_add_security_trace_parts(self, line, parts): result = False expression = re.compile( r"(\d\d:\d\d:\d\d\.\d+).*TcrMessage::addSecurityPart\s*\[(0x[\d|a-f|A-F]*).*length\s*=\s*(\d+)\s*,\s*encrypted\s+ID\s*=\s*([\d|a-f|A-F]+)" ) match = expression.search(line) if match: parts.append(parser.parse(match.group(1))) parts.append(match.group(2)) parts.append(match.group(3)) parts.append(match.group(4)) result = True return result def decimal_string_to_hex_string(self, byte): high_nibble = int(int(byte) / 16) low_nibble = int(byte) % 16 return to_hex_digit[high_nibble] + to_hex_digit[low_nibble] def format_bytes_as_hex_v911(self, message_bytes): byte_list = message_bytes.split(" ") hex_string = "" for byte in byte_list: if byte: hex_string += self.decimal_string_to_hex_string(byte) return hex_string def parse_response_fields_base(self, message_bytes): message_type = message_types[int(message_bytes[0:8], 16)] message_length = int(message_bytes[8:16], 16) message_number_of_parts = int(message_bytes[16:24], 16) message_transaction_id = struct.unpack( ">i", bytes.fromhex(message_bytes[24:32]) )[0] message_security_flag = (int(message_bytes[32:34], 16) & 0x02) >> 1 return ( message_type, message_length, message_number_of_parts, message_transaction_id, message_security_flag, ) def parse_response_fields_v911(self, message_bytes): hex_message_bytes = self.format_bytes_as_hex_v911(message_bytes) message_type = message_types[int(hex_message_bytes[0:8], 16)] message_length = int(hex_message_bytes[8:16], 16) message_number_of_parts = int(hex_message_bytes[16:24], 16) message_transaction_id = struct.unpack( ">i", bytes.fromhex(hex_message_bytes[24:32]) )[0] message_security_flag = (int(hex_message_bytes[32:34], 16) & 0x02) >> 1 return ( message_type, message_length, message_number_of_parts, message_transaction_id, message_security_flag, ) def parse_response_fields(self, message_bytes): if self.receive_trace_parser_ is not None: return self.receive_trace_parser_(message_bytes) def process_line(self, line): connection = None message_bytes = None message_body = None self.search_for_version(line) parts = [] if self.get_receive_trace_parts(line, parts): ( self.last_header_["Timestamp"], self.last_header_["Connection"], message_bytes, ) = parts elif self.get_receive_trace_body_parts(line, parts): message_body = parts[0] elif self.get_add_security_trace_parts(line, parts): connection = parts[1] else: return if connection not in self.connection_states_: self.connection_states_[connection] = self.STATE_NEUTRAL_ if self.connection_states_[connection] == self.STATE_NEUTRAL_: if message_bytes: self.last_header_["Direction"] = "<---" ( self.last_header_["Type"], self.last_header_["Length"], self.last_header_["Parts"], self.last_header_["TransactionId"], self.last_header_["SecurityFlag"], ) = self.parse_response_fields(message_bytes) self.connection_states_[ connection ] = self.STATE_WAITING_FOR_MESSAGE_BODY_ elif ( self.connection_states_[connection] == self.STATE_WAITING_FOR_MESSAGE_BODY_ ): if message_body: receive_trace = self.last_header_ self.last_header_ = {} parse_server_message(receive_trace, message_body) self.connection_states_[connection] = self.STATE_NEUTRAL_ self.output_queue_.put({"message": receive_trace})
apache-2.0
-3,065,811,444,816,776,000
37.552846
150
0.575812
false
3.714845
false
false
false
Azure/azure-sdk-for-python
sdk/managedservices/azure-mgmt-managedservices/azure/mgmt/managedservices/aio/operations/_marketplace_registration_definitions_without_scope_operations.py
1
8182
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class MarketplaceRegistrationDefinitionsWithoutScopeOperations: """MarketplaceRegistrationDefinitionsWithoutScopeOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.managedservices.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def list( self, filter: Optional[str] = None, **kwargs ) -> AsyncIterable["_models.MarketplaceRegistrationDefinitionList"]: """Gets a list of the marketplace registration definitions for the marketplace identifier. :param filter: The filter query parameter. Might be used to filter marketplace registration definition by plan identifier, publisher, version etc. :type filter: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either MarketplaceRegistrationDefinitionList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.managedservices.models.MarketplaceRegistrationDefinitionList] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.MarketplaceRegistrationDefinitionList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore # Construct parameters query_parameters = {} # type: Dict[str, Any] if filter is not None: query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('MarketplaceRegistrationDefinitionList', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list.metadata = {'url': '/providers/Microsoft.ManagedServices/marketplaceRegistrationDefinitions'} # type: ignore async def get( self, marketplace_identifier: str, **kwargs ) -> "_models.MarketplaceRegistrationDefinition": """Get the marketplace registration definition for the marketplace identifier. :param marketplace_identifier: Market place identifier. Expected Formats - {publisher}.{product[-preview]}.{planName}.{version} or {publisher}.{product[-preview]}.{planName} or {publisher}.{product[-preview]} or {publisher}). :type marketplace_identifier: str :keyword callable cls: A custom type or function that will be passed the direct response :return: MarketplaceRegistrationDefinition, or the result of cls(response) :rtype: ~azure.mgmt.managedservices.models.MarketplaceRegistrationDefinition :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.MarketplaceRegistrationDefinition"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore path_format_arguments = { 'marketplaceIdentifier': self._serialize.url("marketplace_identifier", marketplace_identifier, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MarketplaceRegistrationDefinition', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/providers/Microsoft.ManagedServices/marketplaceRegistrationDefinitions/{marketplaceIdentifier}'} # type: ignore
mit
-8,387,111,801,339,155,000
47.994012
141
0.664141
false
4.654152
false
false
false
westernx/vee
vee/environmentrepo.py
1
6434
from subprocess import CalledProcessError import os import re from vee import log from vee.cli import style_note, style_warning, style_error, style from vee.environment import Environment from vee.exceptions import CliMixin from vee.git import GitRepo from vee.packageset import PackageSet from vee.requirements import Requirements, Header from vee.utils import cached_property, makedirs class EnvironmentRepo(GitRepo): def __init__(self, dbrow, home): super(EnvironmentRepo, self).__init__( work_tree=dbrow['path'] or home._abs_path('repos', dbrow['name']), remote_name=dbrow['remote'], branch_name=dbrow['branch'], ) self.id = dbrow['id'] self.name = dbrow['name'] self.home = home self._req_path = os.path.join(self.work_tree, 'requirements.txt') def fetch(self): return super(EnvironmentRepo, self).fetch(self.remote_name, self.branch_name) def checkout(self, force=False): super(EnvironmentRepo, self).checkout( revision='%s/%s' % (self.remote_name, self.branch_name), branch=self.branch_name, force=force ) def get_environment(self): return Environment(repo=self, home=self.home) def load_requirements(self, revision=None): reqs = Requirements(env_repo=self, home=self.home) if revision is not None: contents = self.show(revision, 'requirements.txt') if contents: reqs.parse_file(contents.splitlines()) else: if os.path.exists(self._req_path): reqs.parse_file(self._req_path) return reqs def dump_requirements(self, req_set): tmp = self._req_path + '.tmp' with open(tmp, 'wb') as fh: for line in req_set.iter_dump(): fh.write(line) os.rename(tmp, self._req_path) def commit(self, message, semver_level=None): self.git('add', self._req_path, silent=True) status = list(self.status()) if not status: raise RuntimeError('nothing to commit') # Make sure there are no other changes. for idx, tree, name in status: if tree.strip(): raise RuntimeError('work-tree is dirty') req_set = self.load_requirements() version_header = req_set.headers.get('Version') if not version_header: version_header = req_set.add_header('Version', '0.0.0') if semver_level is not None: version = [] for i, x in enumerate(re.split(r'[.-]', version_header.value)): try: version.append(int(x)) except ValueError: version.append(x) while len(version) <= semver_level: version.append(0) version[semver_level] = version[semver_level] + 1 for i in xrange(semver_level + 1, len(version)): version[i] = 0 version_header.value = '.'.join(str(x) for x in version) from vee import __about__ as about req_set.set_header('Vee-Revision', about.__version__ + '+' + about.__revision__) self.dump_requirements(req_set) self.git('add', self._req_path, silent=True) self.git('commit', '-m', message, silent=True) def update(self, force=False): log.info(style_note('Updating repo', self.name)) self.clone_if_not_exists() if self.remote_name not in self.remotes(): log.warning(style_warning('"%s" does not have remote "%s"' % (self.name, self.remote_name))) return True rev = self.fetch() if not force and not self.check_ff_safety(rev): log.error('Cannot fast-forward; skipping.') return False self.checkout(force=force) return True def upgrade(self, dirty=False, subset=None, reinstall=False, relink=False, no_deps=False, force_branch_link=True ): self.clone_if_not_exists() try: head = self.head except CalledProcessError: log.warning(style_warning('no commits in repository')) head = None try: remote_head = self.rev_parse('%s/%s' % (self.remote_name, self.branch_name)) except ValueError: log.warning(style_warning('tracked %s/%s does not exist in self' % (self.remote_name, self.branch_name))) remote_head = None if remote_head and head != remote_head: log.warning(style_warning('%s repo not checked out to %s/%s' % ( self.name, self.remote_name, self.branch_name))) dirty = bool(list(self.status())) if not dirty and self.is_dirty(): log.error('%s repo is dirty; force with --dirty' % self.name) return False env = self.get_environment() req_set = self.load_requirements() pkg_set = PackageSet(env=env, home=self.home) # Register the whole set, so that dependencies are pulled from here instead # of weakly resolved from installed packages. # TODO: This blanket reinstalls things, even if no_deps is set. pkg_set.resolve_set(req_set, check_existing=not reinstall) # Install and/or link. pkg_set.install(subset or None, link_env=env, reinstall=reinstall, relink=relink, no_deps=no_deps) if pkg_set._errored and not force_branch_link: log.warning(style_warning("Not creating branch or version links; force with --force-branch-link")) return False # Create a symlink by branch. path_by_branch = self.home._abs_path('environments', self.name, self.branch_name) if os.path.lexists(path_by_branch): os.unlink(path_by_branch) makedirs(os.path.dirname(path_by_branch)) os.symlink(env.path, path_by_branch) # Create a symlink by version. version = req_set.headers.get('Version') if version: path_by_version = self.home._abs_path('environments', self.name, 'versions', version.value + ('-dirty' if dirty else '')) if os.path.lexists(path_by_version): os.unlink(path_by_version) makedirs(os.path.dirname(path_by_version)) os.symlink(env.path, path_by_version) return True
bsd-3-clause
6,445,287,524,280,731,000
34.546961
133
0.591389
false
3.875904
false
false
false
kparal/anaconda
pyanaconda/localization.py
1
23995
# Localization classes and functions # # Copyright (C) 2012-2013 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # # Red Hat Author(s): Martin Gracik <[email protected]> # Vratislav Podzimek <[email protected]> # import gettext import os import re import langtable import locale as locale_mod import glob from collections import namedtuple from pyanaconda import constants from pyanaconda.iutil import upcase_first_letter, setenv, execWithRedirect from pyanaconda.iutil import open # pylint: disable=redefined-builtin import logging log = logging.getLogger("anaconda") LOCALE_CONF_FILE_PATH = "/etc/locale.conf" SCRIPTS_SUPPORTED_BY_CONSOLE = {'Latn', 'Cyrl', 'Grek'} #e.g. 'SR_RS.UTF-8@latin' LANGCODE_RE = re.compile(r'(?P<language>[A-Za-z]+)' r'(_(?P<territory>[A-Za-z]+))?' r'(\.(?P<encoding>[-A-Za-z0-9]+))?' r'(@(?P<script>[-A-Za-z0-9]+))?') class LocalizationConfigError(Exception): """Exception class for localization configuration related problems""" pass class InvalidLocaleSpec(LocalizationConfigError): """Exception class for the errors related to invalid locale specs""" pass def parse_langcode(langcode): """ For a given langcode (e.g. 'SR_RS.UTF-8@latin') returns a dictionary with the following keys and example values: 'language' : 'SR' 'territory' : 'RS' 'encoding' : 'UTF-8' 'script' : 'latin' or None if the given string doesn't match the LANGCODE_RE. """ if not langcode: return None match = LANGCODE_RE.match(langcode) if match: return match.groupdict() else: return None def is_supported_locale(locale): """ Function that tells if the given locale is supported by the Anaconda or not. We consider locales supported by the langtable as supported by the Anaconda. :param locale: locale to test :type locale: str :return: whether the given locale is supported or not :rtype: bool :raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE) """ en_name = get_english_name(locale) return bool(en_name) def locale_supported_in_console(locale): """ Function that tells if the given locale can be displayed by the Linux console. The Linux console can display Latin, Cyrillic and Greek characters reliably, but others such as Japanese, can't be correctly installed. :param str locale: locale to test :return: whether the given locale is supported by the console or not :rtype: bool :raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE) """ locale_scripts = get_locale_scripts(locale) return set(locale_scripts).issubset(SCRIPTS_SUPPORTED_BY_CONSOLE) def langcode_matches_locale(langcode, locale): """ Function that tells if the given langcode matches the given locale. I.e. if all parts of appearing in the langcode (language, territory, script and encoding) are the same as the matching parts of the locale. :param langcode: a langcode (e.g. en, en_US, en_US@latin, etc.) :type langcode: str :param locale: a valid locale (e.g. en_US.UTF-8 or sr_RS.UTF-8@latin, etc.) :type locale: str :return: whether the given langcode matches the given locale or not :rtype: bool """ langcode_parts = parse_langcode(langcode) locale_parts = parse_langcode(locale) if not langcode_parts or not locale_parts: # to match, both need to be valid langcodes (need to have at least # language specified) return False # Check parts one after another. If some part appears in the langcode and # doesn't match the one from the locale (or is missing in the locale), # return False, otherwise they match for part in ("language", "territory", "script", "encoding"): if langcode_parts[part] and langcode_parts[part] != locale_parts.get(part): return False return True def find_best_locale_match(locale, langcodes): """ Find the best match for the locale in a list of langcodes. This is useful when e.g. pt_BR is a locale and there are possibilities to choose an item (e.g. rnote) for a list containing both pt and pt_BR or even also pt_PT. :param locale: a valid locale (e.g. en_US.UTF-8 or sr_RS.UTF-8@latin, etc.) :type locale: str :param langcodes: a list or generator of langcodes (e.g. en, en_US, en_US@latin, etc.) :type langcodes: list(str) or generator(str) :return: the best matching langcode from the list of None if none matches :rtype: str or None """ SCORE_MAP = {"language" : 1000, "territory": 100, "script" : 10, "encoding" : 1} def get_match_score(locale, langcode): score = 0 locale_parts = parse_langcode(locale) langcode_parts = parse_langcode(langcode) if not locale_parts or not langcode_parts: return score for part, part_score in SCORE_MAP.items(): if locale_parts[part] and langcode_parts[part]: if locale_parts[part] == langcode_parts[part]: # match score += part_score else: # not match score -= part_score elif langcode_parts[part] and not locale_parts[part]: # langcode has something the locale doesn't have score -= part_score return score scores = [] # get score for each langcode for langcode in langcodes: scores.append((langcode, get_match_score(locale, langcode))) # find the best one sorted_langcodes = sorted(scores, key=lambda item_score: item_score[1], reverse=True) # matches matching only script or encoding or both are not useful if sorted_langcodes and sorted_langcodes[0][1] > SCORE_MAP["territory"]: return sorted_langcodes[0][0] else: return None def setup_locale(locale, lang=None, text_mode=False): """ Procedure setting the system to use the given locale and store it in to the ksdata.lang object (if given). DOES NOT PERFORM ANY CHECKS OF THE GIVEN LOCALE. $LANG must be set by the caller in order to set the language used by gettext. Doing this in a thread-safe way is up to the caller. We also try to set a proper console font for the locale in text mode. If the font for the locale can't be displayed in the Linux console, we fall back to the English locale. :param str locale: locale to setup :param lang: ksdata.lang object or None :param bool text_mode: if the locale is being setup for text mode :return: None :rtype: None """ if lang: lang.lang = locale # not all locales might be displayable in text mode if text_mode: # check if the script corresponding to the locale/language # can be displayed by the Linux console # * all scripts for the given locale/language need to be # supported by the linux console # * otherwise users might get a screen full of white rectangles # (also known as "tofu") in text mode # then we also need to check if we have information about what # font to use for correctly displaying the given language/locale script_supported = locale_supported_in_console(locale) log.debug("scripts found for locale %s: %s", locale, get_locale_scripts(locale)) console_fonts = get_locale_console_fonts(locale) log.debug("console fonts found for locale %s: %s", locale, console_fonts) font_set = False if script_supported and console_fonts: # try to set console font for font in console_fonts: if set_console_font(font): # console font set successfully, skip the rest font_set = True break if not font_set: log.warning("can't set console font for locale %s", locale) # report what exactly went wrong if not(script_supported): log.warning("script not supported by console for locale %s", locale) if not(console_fonts): # no fonts known for locale log.warning("no console font found for locale %s", locale) if script_supported and console_fonts: log.warning("none of the suggested fonts can be set for locale %s", locale) log.warning("falling back to the English locale") locale = constants.DEFAULT_LANG os.environ["LANG"] = locale # pylint: disable=environment-modify # set the locale to the value we have selected log.debug("setting locale to: %s", locale) setenv("LANG", locale) locale_mod.setlocale(locale_mod.LC_ALL, locale) def get_english_name(locale): """ Function returning english name for the given locale. :param locale: locale to return english name for :type locale: str :return: english name for the locale or empty string if unknown :rtype: st :raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE) """ parts = parse_langcode(locale) if "language" not in parts: raise InvalidLocaleSpec("'%s' is not a valid locale" % locale) name = langtable.language_name(languageId=parts["language"], territoryId=parts.get("territory", ""), scriptId=parts.get("script", ""), languageIdQuery="en") return upcase_first_letter(name) def get_native_name(locale): """ Function returning native name for the given locale. :param locale: locale to return native name for :type locale: str :return: english name for the locale or empty string if unknown :rtype: st :raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE) """ parts = parse_langcode(locale) if "language" not in parts: raise InvalidLocaleSpec("'%s' is not a valid locale" % locale) name = langtable.language_name(languageId=parts["language"], territoryId=parts.get("territory", ""), scriptId=parts.get("script", ""), languageIdQuery=parts["language"], territoryIdQuery=parts.get("territory", ""), scriptIdQuery=parts.get("script", "")) return upcase_first_letter(name) def get_available_translations(localedir=None): """ Method that generates (i.e. returns a generator) available translations for the installer in the given localedir. :type localedir: str :return: generator yielding available translations (languages) :rtype: generator yielding strings """ localedir = localedir or gettext._default_localedir # usually there are no message files for en messagefiles = sorted(glob.glob(localedir + "/*/LC_MESSAGES/anaconda.mo") + ["blob/en/blob/blob"]) trans_gen = (path.split(os.path.sep)[-3] for path in messagefiles) langs = set() for trans in trans_gen: parts = parse_langcode(trans) lang = parts.get("language", "") if lang and lang not in langs: langs.add(lang) # check if there are any locales for the language locales = get_language_locales(lang) if not locales: continue yield lang def get_language_locales(lang): """ Function returning all locales available for the given language. :param lang: language to get available locales for :type lang: str :return: a list of available locales :rtype: list of strings :raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE) """ parts = parse_langcode(lang) if "language" not in parts: raise InvalidLocaleSpec("'%s' is not a valid language" % lang) return langtable.list_locales(languageId=parts["language"], territoryId=parts.get("territory", ""), scriptId=parts.get("script", "")) def get_territory_locales(territory): """ Function returning list of locales for the given territory. The list is sorted from the most probable locale to the least probable one (based on langtable's ranking. :param territory: territory to return locales for :type territory: str :return: list of locales :rtype: list of strings """ return langtable.list_locales(territoryId=territory) def get_locale_keyboards(locale): """ Function returning preferred keyboard layouts for the given locale. :param locale: locale string (see LANGCODE_RE) :type locale: str :return: list of preferred keyboard layouts :rtype: list of strings :raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE) """ parts = parse_langcode(locale) if "language" not in parts: raise InvalidLocaleSpec("'%s' is not a valid locale" % locale) return langtable.list_keyboards(languageId=parts["language"], territoryId=parts.get("territory", ""), scriptId=parts.get("script", "")) def get_locale_timezones(locale): """ Function returning preferred timezones for the given locale. :param locale: locale string (see LANGCODE_RE) :type locale: str :return: list of preferred timezones :rtype: list of strings :raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE) """ parts = parse_langcode(locale) if "language" not in parts: raise InvalidLocaleSpec("'%s' is not a valid locale" % locale) return langtable.list_timezones(languageId=parts["language"], territoryId=parts.get("territory", ""), scriptId=parts.get("script", "")) def get_locale_territory(locale): """ Function returning locale's territory. :param locale: locale string (see LANGCODE_RE) :type locale: str :return: territory or None :rtype: str or None :raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE) """ parts = parse_langcode(locale) if "language" not in parts: raise InvalidLocaleSpec("'%s' is not a valid locale" % locale) return parts.get("territory", None) def get_locale_console_fonts(locale): """ Function returning preferred console fonts for the given locale. :param str locale: locale string (see LANGCODE_RE) :return: list of preferred console fonts :rtype: list of strings :raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE) """ parts = parse_langcode(locale) if "language" not in parts: raise InvalidLocaleSpec("'%s' is not a valid locale" % locale) return langtable.list_consolefonts(languageId=parts["language"], territoryId=parts.get("territory", ""), scriptId=parts.get("script", "")) def get_locale_scripts(locale): """ Function returning preferred scripts (writing systems) for the given locale. :param locale: locale string (see LANGCODE_RE) :type locale: str :return: list of preferred scripts :rtype: list of strings :raise InvalidLocaleSpec: if an invalid locale is given (see LANGCODE_RE) """ parts = parse_langcode(locale) if "language" not in parts: raise InvalidLocaleSpec("'%s' is not a valid locale" % locale) return langtable.list_scripts(languageId=parts["language"], territoryId=parts.get("territory", ""), scriptId=parts.get("script", "")) def get_xlated_timezone(tz_spec_part): """ Function returning translated name of a region, city or complete timezone name according to the current value of the $LANG variable. :param tz_spec_part: a region, city or complete timezone name :type tz_spec_part: str :return: translated name of the given region, city or timezone :rtype: str """ locale = os.environ.get("LANG", constants.DEFAULT_LANG) parts = parse_langcode(locale) if "language" not in parts: raise InvalidLocaleSpec("'%s' is not a valid locale" % locale) xlated = langtable.timezone_name(tz_spec_part, languageIdQuery=parts["language"], territoryIdQuery=parts.get("territory", ""), scriptIdQuery=parts.get("script", "")) return xlated def write_language_configuration(lang, root): """ Write language configuration to the $root/etc/locale.conf file. :param lang: ksdata.lang object :param root: path to the root of the installed system """ try: fpath = os.path.normpath(root + LOCALE_CONF_FILE_PATH) with open(fpath, "w") as fobj: fobj.write('LANG="%s"\n' % lang.lang) except IOError as ioerr: msg = "Cannot write language configuration file: %s" % ioerr.strerror raise LocalizationConfigError(msg) def load_firmware_language(lang, text_mode=False): """ Procedure that loads firmware language information (if any). It stores the information in the given ksdata.lang object and sets the $LANG environment variable. This method must be run before any other threads are started. :param lang: ksdata.lang object :return: None :rtype: None """ if lang.lang and lang.seen: # set in kickstart, do not override return try: n = "/sys/firmware/efi/efivars/PlatformLang-8be4df61-93ca-11d2-aa0d-00e098032b8c" d = open(n, 'r', 0).read() except IOError: return # the contents of the file are: # 4-bytes of attribute data that we don't care about # NUL terminated ASCII string like 'en-US'. if len(d) < 10: log.debug("PlatformLang was too short") return d = d[4:] if d[2] != '-': log.debug("PlatformLang was malformed") return # they use - and we use _, so fix it... d = d[:2] + '_' + d[3:-1] # UEFI 2.3.1 Errata C specifies 2 aliases in common use that # aren't part of RFC 4646, but are allowed in PlatformLang. # Because why make anything simple? if d.startswith('zh_chs'): d = 'zh_Hans' elif d.startswith('zh_cht'): d = 'zh_Hant' d += '.UTF-8' if not is_supported_locale(d): log.debug("PlatformLang was '%s', which is unsupported.", d) return locales = get_language_locales(d) if not locales: log.debug("No locales found for the PlatformLang '%s'.", d) return log.debug("Using UEFI PlatformLang '%s' ('%s') as our language.", d, locales[0]) setup_locale(locales[0], lang, text_mode) os.environ["LANG"] = locales[0] # pylint: disable=environment-modify _DateFieldSpec = namedtuple("DateFieldSpec", ["format", "suffix"]) def resolve_date_format(year, month, day, fail_safe=True): """ Puts the year, month and day objects in the right order according to the currently set locale and provides format specification for each of the fields. :param year: any object or value representing year :type year: any :param month: any object or value representing month :type month: any :param day: any object or value representing day :type day: any :param bool fail_safe: whether to fall back to default in case of invalid format or raise exception instead :returns: a pair where the first field contains a tuple with the year, month and day objects/values put in the right order and where the second field contains a tuple with three :class:`_DateFieldSpec` objects specifying formats respectively to the first (year, month, day) field, e.g. ((year, month, day), (y_fmt, m_fmt, d_fmt)) :rtype: tuple :raise ValueError: in case currently set locale has unsupported date format and fail_safe is set to False """ FAIL_SAFE_DEFAULT = "%Y-%m-%d" def order_terms_formats(fmt_str): # see date (1), 'O' (not '0') is a mystery, 'E' is Buddhist calendar, '(.*)' # is an arbitrary suffix field_spec_re = re.compile(r'([-_0OE^#]*)([yYmbBde])(.*)') # see date (1) fmt_str = fmt_str.replace("%F", "%Y-%m-%d") # e.g. "%d.%m.%Y" -> ['d.', 'm.', 'Y'] fields = fmt_str.split("%")[1:] ordered_terms = [] ordered_formats = [] for field in fields: match = field_spec_re.match(field) if not match: # ignore fields we are not interested in (like %A for weekday name, etc.) continue prefix, item, suffix = match.groups() if item in ("d", "e"): # "e" is the same as "_d" ordered_terms.append(day) elif item in ("Y", "y"): # 4-digit year, 2-digit year ordered_terms.append(year) elif item in ("m", "b", "B"): # month number, short month name, long month name ordered_terms.append(month) # "%" + prefix + item gives a format for date/time formatting functions ordered_formats.append(_DateFieldSpec("%" + prefix + item, suffix.strip())) if len(ordered_terms) != 3 or len(ordered_formats) != 3: raise ValueError("Not all fields successfully identified in the format '%s'" % fmt_str) return (tuple(ordered_terms), tuple(ordered_formats)) fmt_str = locale_mod.nl_langinfo(locale_mod.D_FMT) if not fmt_str or "%" not in fmt_str: if fail_safe: # use some sane default fmt_str = FAIL_SAFE_DEFAULT else: raise ValueError("Invalid date format string for current locale: '%s'" % fmt_str) try: return order_terms_formats(fmt_str) except ValueError: if not fail_safe: raise else: # if this call fails too, something is going terribly wrong and we # should be informed about it return order_terms_formats(FAIL_SAFE_DEFAULT) def set_console_font(font): """ Try to set console font to the given value. :param str font: console font name :returns: True on success, False on failure :rtype: Bool """ log.debug("setting console font to %s", font) rc = execWithRedirect("setfont", [font]) if rc == 0: log.debug("console font set successfully to %s", font) return True else: log.error("setting console font to %s failed", font) return False
gpl-2.0
3,819,219,907,003,919,000
33.978134
99
0.628506
false
4.057322
false
false
false
hsolbrig/SNOMEDToOWL
SNOMEDCTToOWL/RF2Files/Transitive.py
1
2725
from typing import Dict, Set from SNOMEDCTToOWL.SNOMEDToOWLConstants import RelationshipFilePrefix class Transitive: relationship_prefix = RelationshipFilePrefix def __init__(self): self._children = {} # parent -> set(children) Dict[int, Set[int]] self._parents = {} # child -> set(parents) Dict[int, Set[int]] self.__desc_cache = {} # parent -> set(descendants) self.__ancestor_cache = {} # child -> set(ancestors) @classmethod def filtr(cls, fname: str) -> bool: """ Return true if this is a computed relationship file. Transitivity is always based on computed :param fname: file name to test :return: true if it should be processed """ return fname.startswith(cls.relationship_prefix) def add(self, row: Dict) -> None: """ Add an RF2 relationship row to the Transitive file :param row: row to add -- already tested for active """ child = int(row["sourceId"]) parent = int(row["destinationId"]) self._children.setdefault(parent, set()).add(child) self._parents.setdefault(child, set()).add(parent) def descendants_of(self, parent: int) -> Set[int]: """ Return all descendants of parent :param parent: parent concept :return: set of concepts """ return self._children.get(parent, set())\ .union(*[self.descendants_of(x) for x in self._children.get(parent, set())]) def is_descendant_of(self, desc: int, parent: int) -> bool: """ Determine whether desc is a descendant of parent :param desc: descendant to test :param parent: parent concept :return: True or False """ if parent not in self.__desc_cache: self.__desc_cache[parent] = self.descendants_of(parent) return desc in self.__desc_cache[parent] def is_descendant_or_self_of(self, desc: int, parent: int) -> bool: """ Determine whether desc is a descendant of the parent or is the parent itself :param desc: descendant to test :param parent: parent concept :return: True or False """ return self.is_descendant_of(desc, parent) or desc == parent def ancestors_of(self, child: int) -> Set[int]: return self._parents.get(child, set())\ .union(*[self.ancestors_of(x) for x in self._parents.get(child, set())]) def is_ancestor_of(self, ancestor: int, child: int) -> bool: if child not in self.__ancestor_cache: self.__ancestor_cache[child] = self.ancestors_of(child) return ancestor in self.__ancestor_cache[child]
apache-2.0
-6,966,169,800,768,780,000
38.492754
102
0.605872
false
4.05506
false
false
false
ConservationInternational/ldmp-qgis-plugin
LDMP/processing_provider/carbon.py
1
8231
import numpy as np from osgeo import gdal, osr from qgis import processing from qgis.core import (QgsGeometry, QgsProcessing, QgsProcessingAlgorithm, QgsProcessingException, QgsProcessingParameterFile, QgsProcessingParameterFileDestination, QgsProcessingParameterNumber, QgsProcessingOutputString, QgsProcessingOutputNumber) from qgis.PyQt.QtCore import QCoreApplication from LDMP import log from LDMP.summary import calc_cell_area class TCSummary(QgsProcessingAlgorithm): """ Used for summarizing results of output of the carbon change analysis. """ def tr(self, string): return QCoreApplication.translate('processing\\carbon', string) def createInstance(self): # Must return a new copy of your algorithm. return TCSummary() def name(self): return 'carbon_summary' def displayName(self): return self.tr('Carbon change summary') def group(self): return self.tr('Carbon change') def groupId(self): return 'trendsearth' def shortHelpString(self): return self.tr('Summarize output of a carbon change analysis') def initAlgorithm(self, config=None): # Inputs self.addParameter( QgsProcessingParameterFile( 'INPUT', self.tr('Input carbon analysis file') ) ) self.addParameter( QgsProcessingParameterNumber( 'YEAR_START', self.tr('Starting year') ) ) self.addParameter( QgsProcessingParameterNumber( 'YEAR_END', self.tr('Ending year') ) ) # Outputs self.addOutput( QgsProcessingOutputString( 'FOREST_LOSS', self.tr('Forest loss per year in sq km.') ) ) self.addOutput( QgsProcessingOutputString( 'CARBON_LOSS', self.tr('Carbon loss per year in tonnes of C') ) ) self.addOutput( QgsProcessingOutputNumber( 'CARBON_INITIAL', self.tr('Initial tonnes of C') ) ) self.addOutput( QgsProcessingOutputNumber( 'AREA_FOREST', self.tr('Area of forest in sq km') ) ) self.addOutput( QgsProcessingOutputNumber( 'AREA_NON_FOREST', self.tr('Area of non-forest in sq km') ) ) self.addOutput( QgsProcessingOutputNumber( 'AREA_MISSING', self.tr('Area of missing data in sq km') ) ) self.addOutput( QgsProcessingOutputNumber( 'AREA_WATER', self.tr('Area of water in sq km') ) ) self.addOutput( QgsProcessingOutputNumber( 'AREA_SITE', self.tr('Area of site in sq km') ) ) def processAlgorithm(self, parameters, context, feedback): src_file = self.parameterAsFile(parameters,'INPUT', context) year_start = self.parameterAsInt(parameters,'YEAR_START', context) year_end = self.parameterAsInt(parameters,'YEAR_END', context) src_ds = gdal.Open(src_file) band_f_loss = src_ds.GetRasterBand(1) band_tc = src_ds.GetRasterBand(2) block_sizes = band_f_loss.GetBlockSize() xsize = band_f_loss.XSize ysize = band_f_loss.YSize n_out_bands = 1 x_block_size = block_sizes[0] y_block_size = block_sizes[1] src_gt = src_ds.GetGeoTransform() # Width of cells in longitude long_width = src_gt[1] # Set initial lat ot the top left corner latitude lat = src_gt[3] # Width of cells in latitude pixel_height = src_gt[5] area_missing = 0 area_non_forest = 0 area_water = 0 area_site = 0 initial_forest_area = 0 initial_carbon_total = 0 forest_loss = np.zeros(year_end - year_start) carbon_loss = np.zeros(year_end - year_start) blocks = 0 for y in range(0, ysize, y_block_size): if y + y_block_size < ysize: rows = y_block_size else: rows = ysize - y for x in range(0, xsize, x_block_size): if feedback.isCanceled(): log("Processing of {} killed by user after processing {} out of {} blocks.".format(src_file, y, ysize)) break feedback.setProgress(100 * (float(y) + (float(x)/xsize)*y_block_size) / ysize) if x + x_block_size < xsize: cols = x_block_size else: cols = xsize - x f_loss_array = band_f_loss.ReadAsArray(x, y, cols, rows) tc_array = band_tc.ReadAsArray(x, y, cols, rows) # Caculate cell area for each horizontal line cell_areas = np.array([calc_cell_area(lat + pixel_height*n, lat + pixel_height*(n + 1), long_width) for n in range(rows)]) cell_areas.shape = (cell_areas.size, 1) # Make an array of the same size as the input arrays containing # the area of each cell (which is identicalfor all cells ina # given row - cell areas only vary among rows) cell_areas_array = np.repeat(cell_areas, cols, axis=1) initial_forest_pixels = (f_loss_array == 0) | (f_loss_array > (year_start - 2000)) # The site area includes everything that isn't masked area_missing = area_missing + np.sum(((f_loss_array == -32768) | (tc_array == -32768)) * cell_areas_array) area_water = area_water + np.sum((f_loss_array == -2) * cell_areas_array) area_non_forest = area_non_forest + np.sum((f_loss_array == -1) * cell_areas_array) area_site = area_site + np.sum((f_loss_array != -32767) * cell_areas_array) initial_forest_area = initial_forest_area + np.sum(initial_forest_pixels * cell_areas_array) initial_carbon_total = initial_carbon_total + np.sum(initial_forest_pixels * tc_array * (tc_array >= 0) * cell_areas_array) for n in range(year_end - year_start): # Note the codes are year - 2000 forest_loss[n] = forest_loss[n] + np.sum((f_loss_array == year_start - 2000 + n + 1) * cell_areas_array) # Check units here - is tc_array in per m or per ha? carbon_loss[n] = carbon_loss[n] + np.sum((f_loss_array == year_start - 2000 + n + 1) * tc_array * (tc_array >= 0) * cell_areas_array) blocks += 1 lat += pixel_height * rows feedback.setProgress(100) if feedback.isCanceled(): return {} else: # Convert all area tables from meters into hectares forest_loss = forest_loss * 1e-4 # Note that carbon is scaled by 10 carbon_loss = carbon_loss * 1e-4 / 10 area_missing = area_missing * 1e-4 area_water = area_water * 1e-4 area_non_forest = area_non_forest * 1e-4 area_site = area_site * 1e-4 initial_forest_area = initial_forest_area * 1e-4 # Note that carbon is scaled by 10 initial_carbon_total = initial_carbon_total * 1e-4 / 10 return {'FOREST_LOSS': np.array2string(forest_loss), 'CARBON_LOSS': np.array2string(carbon_loss), 'CARBON_INITIAL': initial_carbon_total, 'AREA_FOREST': initial_forest_area, 'AREA_NON_FOREST': area_non_forest, 'AREA_WATER': area_water, 'AREA_MISSING': area_missing, 'AREA_SITE': area_site}
gpl-2.0
3,302,916,721,898,712,000
35.910314
153
0.532864
false
4.082837
false
false
false
hasgeek/funnel
migrations/versions/887db555cca9_adding_uuid_to_commentset.py
1
1769
"""Adding uuid to commentset. Revision ID: 887db555cca9 Revises: 222b78a8508d Create Date: 2020-05-08 19:16:15.324555 """ from uuid import uuid4 from alembic import op from sqlalchemy.sql import column, table from sqlalchemy_utils import UUIDType import sqlalchemy as sa from progressbar import ProgressBar import progressbar.widgets # revision identifiers, used by Alembic. revision = '887db555cca9' down_revision = '222b78a8508d' branch_labels = None depends_on = None commentset = table( 'commentset', column('id', sa.Integer()), column('uuid', UUIDType(binary=False)) ) def get_progressbar(label, maxval): return ProgressBar( maxval=maxval, widgets=[ label, ': ', progressbar.widgets.Percentage(), ' ', progressbar.widgets.Bar(), ' ', progressbar.widgets.ETA(), ' ', ], ) def upgrade(): conn = op.get_bind() op.add_column( 'commentset', sa.Column('uuid', UUIDType(binary=False), nullable=True) ) count = conn.scalar(sa.select([sa.func.count('*')]).select_from(commentset)) progress = get_progressbar("Commentsets", count) progress.start() items = conn.execute(sa.select([commentset.c.id])) for counter, item in enumerate(items): conn.execute( sa.update(commentset).where(commentset.c.id == item.id).values(uuid=uuid4()) ) progress.update(counter) progress.finish() op.alter_column('commentset', 'uuid', nullable=False) op.create_unique_constraint('commentset_uuid_key', 'commentset', ['uuid']) def downgrade(): op.drop_constraint('commentset_uuid_key', 'commentset', type_='unique') op.drop_column('commentset', 'uuid')
agpl-3.0
-8,246,667,750,301,554,000
23.915493
88
0.642736
false
3.580972
false
false
false
brigittebigi/proceed
proceed/src/wxgui/frames/import_wizard.py
1
18211
#!/usr/bin/python # -*- coding: UTF-8 -*- # --------------------------------------------------------------------------- # ___ __ ___ ___ ____ ____ __ # | \ | \ | | / | | | \ Automatic # |__/ |__/ | | | |__ |__ | | Conference # | |\_ | | | | | | | Proceedings # | | \ |___| \___ |___ |___ |__/ Generator # ========================================================== # # http://www.lpl-aix.fr/~bigi/ # # --------------------------------------------------------------------------- # developed at: # # Laboratoire Parole et Langage # # Copyright (C) 2013-2014 Brigitte Bigi # # Use of this software is governed by the GPL, v3 # This banner notice must not be removed # --------------------------------------------------------------------------- # # Proceed is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Proceed is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Proceed. If not, see <http://www.gnu.org/licenses/>. # # --------------------------------------------------------------------------- __docformat__ = "epytext" # --------------------------------------------------------------------------- import wx import wx.lib.newevent import wx.wizard import logging import os.path import sys sys.path.append( os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__))))), "src") ) from wxgui.cutils.imageutils import spBitmap from wxgui.sp_consts import HEADER_FONTSIZE from wxgui.sp_consts import FRAME_STYLE from wxgui.sp_consts import FRAME_TITLE from wxgui.sp_icons import IMPORT_EXPORT_ICON from wxgui.sp_icons import GRID_ICON from wxgui.sp_icons import TEX_ICON from wxgui.sp_icons import WWW_ICON from DataIO.Read.reader import Reader from DataIO.Write.writer import Writer from structs.prefs import Preferences from structs.abstracts_themes import all_themes from wxgui.frames.processprogress import ProcessProgressDialog # --------------------------------------------------------------------------- ImportFinishedEvent, EVT_IMPORT_WIZARD_FINISHED = wx.lib.newevent.NewEvent() ImportFinishedCommandEvent, EVT_IMPORT_WIZARD_FINISHED_COMMAND = wx.lib.newevent.NewCommandEvent() # --------------------------------------------------------------------------- class ImportWizard( wx.wizard.Wizard ): def __init__(self, parent): wx.wizard.Wizard.__init__(self, parent, -1, title=FRAME_TITLE+" - Import", style=FRAME_STYLE) self.output = "" self.page0 = InputPage(self) self.page0.SetName("input") self.page1 = OutputPage(self) self.page1.SetName("output") self.page2 = LatexPage(self) self.page2.SetName("latex") wx.wizard.WizardPageSimple.Chain(self.page0, self.page1) wx.wizard.WizardPageSimple.Chain(self.page1, self.page2) self.Bind(wx.wizard.EVT_WIZARD_PAGE_CHANGED, self.onPageChanged) self.Bind(wx.wizard.EVT_WIZARD_FINISHED, self.onFinished) wx.CallAfter(self.SetSize,(520,440)) self.RunWizard(self.page0) self.Destroy() #---------------------------------------------------------------------- def onPageChanged(self, event): """""" page = event.GetPage() if page.GetName() == "output": if not os.path.exists(self.page0.urlFld.GetValue()): wx.MessageBox("A valid input file name is required.", 'Info', wx.OK | wx.ICON_INFORMATION) self.RunWizard(self.page0) return else: p = ProcessProgressDialog(self) p.Show() arguments = {} arguments['readername'] = self.page0.confname arguments['filename'] = self.page0.urlFld.GetValue() arguments['authorsfilename'] = self.page0.urlauthFld.GetValue() arguments['progress'] = p try: self.reader = Reader( arguments ) p.close() except Exception as e: wx.MessageBox("Error while reading file:\n%s"%str(e), 'Info', wx.OK | wx.ICON_INFORMATION) self.Destroy() elif page.GetName() == "latex": # if len(self.page1.urlFld.GetValue().strip()): # wx.MessageBox("A directory is required.", 'Info', wx.OK | wx.ICON_INFORMATION) # self.RunWizard(self.page1) # return self.output = self.page1.urlFld.GetValue().strip() if not os.path.exists( self.output ): try: os.mkdir( self.output ) except Exception as e: wx.MessageBox("Error while creating output directory:\n%s"%str(e), 'Info', wx.OK | wx.ICON_INFORMATION) self.RunWizard(self.page1) return try: self.writer = Writer( self.reader.docs ) self.writer.set_status( self.page1.status ) if self.page1.exportcsv: self.writer.writeCSV( self.output ) if self.page1.exporthtml: self.writer.writeHTML( self.output ) except Exception as e: wx.MessageBox("Error while creating output files:\n%s"%str(e), 'Info', wx.OK | wx.ICON_INFORMATION) self.RunWizard(self.page1) return #---------------------------------------------------------------------- def onFinished(self, event): """""" if self.page2.export is True: # Create preferences prefs = Preferences() theme = all_themes.get_theme( self.page2.theme ) prefs.SetTheme( theme ) prefs.SetValue('COMPILER', 'str', self.page2.compiler.strip()) # Write as LaTeX in the same dir as proceed CSV files p = ProcessProgressDialog(self) p.Show() self.writer.set_progress(p) self.writer.writeLaTeX_as_Dir( self.output, prefs ) self.writer.set_progress(None) p.close() evt = ImportFinishedEvent(path=self.output) evt.SetEventObject(self) wx.PostEvent(self.GetParent(), evt) #---------------------------------------------------------------------- # ---------------------------------------------------------------------------- class InputPage(wx.wizard.WizardPageSimple): """ Parameters for the input data. """ def __init__(self, parent): """ Constructor. """ wx.wizard.WizardPageSimple.__init__(self, parent) sizer = wx.BoxSizer(wx.VERTICAL) self.dirname = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))) title_layout = wx.BoxSizer(wx.HORIZONTAL) bmp = wx.BitmapButton(self, bitmap=spBitmap(IMPORT_EXPORT_ICON, 32), style=wx.NO_BORDER) font = wx.Font(HEADER_FONTSIZE, wx.MODERN, wx.NORMAL, wx.BOLD, False, u'Consolas') title_label = wx.StaticText(self, label="File to import and related information:", style=wx.ALIGN_CENTER) title_label.SetFont( font ) title_layout.Add(bmp, flag=wx.TOP|wx.RIGHT|wx.ALIGN_RIGHT, border=5) title_layout.Add(title_label, flag=wx.EXPAND|wx.ALL|wx.ALIGN_CENTER_VERTICAL, border=5) sizer.Add(title_layout, 0, flag=wx.ALL, border=0) sizer.Add((-1, 10)) # --------- Conference web site confnames = ['sciencesconf', 'easychair'] self.confname = 'sciencesconf' readername = wx.RadioBox(self, label=" The file to import comes from: ", size=(410,-1), choices=confnames, majorDimension=1) readername.SetSelection( 0 ) readername.Bind(wx.EVT_RADIOBOX, self.onConfName) sizer.Add(readername, 0, flag=wx.ALL, border=0) sizer.Add((-1, 10)) # --------- Input file name hBox = wx.BoxSizer(wx.HORIZONTAL) hBox.Add(wx.StaticText(self, label="File name:", size=(100,30)), flag=wx.TOP|wx.ALIGN_CENTER_VERTICAL, border=5) self.urlFld = wx.TextCtrl(self, size=(300,30)) hBox.Add(self.urlFld, 1, flag=wx.LEFT, border=2) checkBtn = wx.Button(self, -1, "Choose...", size=(80,30)) checkBtn.Bind(wx.EVT_BUTTON, lambda evt, temp="input": self.onOpen(evt, temp) ) hBox.Add(checkBtn, 0, flag=wx.LEFT, border=10) sizer.Add(hBox, flag=wx.EXPAND | wx.LEFT | wx.RIGHT | wx.TOP) sizer.Add((-1, 10)) # --------- Input file name for authors hBox = wx.BoxSizer(wx.HORIZONTAL) self.authtext = wx.StaticText(self, label="Authors file:", size=(100,30)) hBox.Add(self.authtext, flag=wx.TOP|wx.ALIGN_CENTER_VERTICAL, border=5) self.urlauthFld = wx.TextCtrl(self, size=(300,30)) hBox.Add(self.urlauthFld, 1, flag=wx.LEFT, border=2) self.checkauthBtn = wx.Button(self, -1, "Choose...", size=(80,30)) self.checkauthBtn.Bind(wx.EVT_BUTTON, lambda evt, temp="author": self.onOpen(evt, temp) ) hBox.Add(self.checkauthBtn, 0, flag=wx.LEFT, border=10) sizer.Add(hBox, flag=wx.EXPAND | wx.LEFT | wx.RIGHT | wx.TOP) self.enable() self.Layout() self.SetSizerAndFit(sizer) def onOpen(self, event, temp): filename = self.file_open() if filename: if temp == "input": self.urlFld.SetValue(filename) else: self.urlauthFld.SetValue(filename) def onConfName(self, event): o = event.GetEventObject() self.confname = o.GetStringSelection() self.enable() def enable(self): if self.confname == 'easychair': self.authtext.SetForegroundColour( wx.Colour(180,80,80)) self.checkauthBtn.Enable(True) else: self.authtext.SetForegroundColour( wx.Colour(128,128,128)) self.checkauthBtn.Enable(False) def file_open(self): with wx.FileDialog(self, "Choose a file to import", self.dirname, "", "*.*", wx.OPEN) as dlg: if dlg.ShowModal() == wx.ID_OK: directory, filename = dlg.GetDirectory(), dlg.GetFilename() return os.path.join(directory, filename) return None # ---------------------------------------------------------------------------- class OutputPage(wx.wizard.WizardPageSimple): """ Parameters for the output data. """ def __init__(self, parent): """ Constructor. """ wx.wizard.WizardPageSimple.__init__(self, parent) self.urlFld = "" self.dirname = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))) sizer = wx.BoxSizer(wx.VERTICAL) title_layout = wx.BoxSizer(wx.HORIZONTAL) bmp = wx.BitmapButton(self, bitmap=spBitmap(GRID_ICON, 32), style=wx.NO_BORDER) font = wx.Font(HEADER_FONTSIZE, wx.MODERN, wx.NORMAL, wx.BOLD, False, u'Consolas') title_label = wx.StaticText(self, label="Where to save:", style=wx.ALIGN_CENTER) title_label.SetFont( font ) title_layout.Add(bmp, flag=wx.TOP|wx.RIGHT|wx.ALIGN_RIGHT, border=5) title_layout.Add(title_label, flag=wx.EXPAND|wx.ALL|wx.ALIGN_CENTER_VERTICAL, border=5) sizer.Add(title_layout, 0, flag=wx.ALL, border=0) sizer.Add((-1, 10)) # --------- Output directory hBox = wx.BoxSizer(wx.HORIZONTAL) hBox.Add(wx.StaticText(self, label="Directory:", size=(100,30)), flag=wx.TOP|wx.ALIGN_CENTER_VERTICAL, border=5) self.urlFld = wx.TextCtrl(self, size=(300,30)) hBox.Add(self.urlFld, 1, flag=wx.LEFT, border=2) checkBtn = wx.Button(self, -1, "Choose...", size=(80,30)) checkBtn.Bind(wx.EVT_BUTTON, self.onDirectory ) hBox.Add(checkBtn, 0, flag=wx.LEFT, border=10) sizer.Add(hBox, flag=wx.EXPAND | wx.LEFT | wx.RIGHT | wx.TOP) sizer.Add((-1, 10)) self.SetSizer(sizer) # ---------- Status allstatus = ['init papers (status=0)', 'only accepted papers (status=1)'] self.status = 1 statusradio = wx.RadioBox(self, label=" Choose papers to save: ", size=(410,-1), choices=allstatus, majorDimension=1) statusradio.SetSelection( 1 ) statusradio.Bind(wx.EVT_RADIOBOX, self.onStatus) sizer.Add(statusradio, 0, flag=wx.ALL, border=0) sizer.Add((-1, 20)) # ----------CSV self.exportcsv = True cbp = wx.CheckBox(self, label="Save as CSV files for Proceed", size=(300,-1)) cbp.SetValue(True) cbp.Bind(wx.EVT_CHECKBOX, self.onExportAsCSV) sizer.Add(cbp, 0, flag=wx.LEFT, border=0) sizer.Add((-1, 10)) # ----------HTML self.exporthtml = False cbp = wx.CheckBox(self, label="Save the list of papers in HTML", size=(300,-1)) cbp.SetValue(False) cbp.Bind(wx.EVT_CHECKBOX, self.onExportAsHTML) sizer.Add(cbp, 0, flag=wx.LEFT, border=0) self.SetSizerAndFit(sizer) def onDirectory(self, event): with wx.DirDialog(self, "Choose a directory to save in", self.dirname, style=wx.DD_CHANGE_DIR) as dlg: if dlg.ShowModal() == wx.ID_OK: self.urlFld.SetValue( dlg.GetPath() ) def onStatus(self, event): o = event.GetEventObject() self.status = o.GetSelection() def onExportAsCSV(self, event): o = event.GetEventObject() self.exportcsv = bool( o.GetValue() ) def onExportAsHTML(self, event): o = event.GetEventObject() self.exporthtml = bool( o.GetValue() ) # ---------------------------------------------------------------------------- class LatexPage(wx.wizard.WizardPageSimple): """ Process the data. """ def __init__(self, parent): """ Constructor. """ wx.wizard.WizardPageSimple.__init__(self, parent) self.urlFld = "" self.dirname = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))) sizer = wx.BoxSizer(wx.VERTICAL) title_layout = wx.BoxSizer(wx.HORIZONTAL) bmp = wx.BitmapButton(self, bitmap=spBitmap(GRID_ICON, 32), style=wx.NO_BORDER) font = wx.Font(HEADER_FONTSIZE, wx.MODERN, wx.NORMAL, wx.BOLD, False, u'Consolas') title_label = wx.StaticText(self, label="Save abstracts as LaTeX...", style=wx.ALIGN_CENTER) title_label.SetFont( font ) title_layout.Add(bmp, flag=wx.TOP|wx.RIGHT|wx.ALIGN_RIGHT, border=5) title_layout.Add(title_label, flag=wx.EXPAND|wx.ALL|wx.ALIGN_CENTER_VERTICAL, border=5) sizer.Add(title_layout, 0, flag=wx.ALL, border=0) sizer.Add((-1, 10)) # ----------CHECK self.export = False cbp = wx.CheckBox(self, label="Create each abstract as a LaTeX file", size=(300,-1)) cbp.SetValue(False) cbp.Bind(wx.EVT_CHECKBOX, self.onExport) sizer.Add(cbp, 0, flag=wx.LEFT, border=0) sizer.Add((-1, 10)) # ------------- Theme self.theme = 'basic' thlist = sorted(all_themes.get_themes().keys()) self.themeradio = wx.RadioBox(self, label=" Choose a style: ", size=(410,-1), choices=thlist, majorDimension=1) self.themeradio.SetSelection( thlist.index( 'basic' ) ) self.themeradio.Bind(wx.EVT_RADIOBOX, self.onTheme) sizer.Add(self.themeradio, 0, flag=wx.LEFT, border=40) sizer.Add((-1, 10)) # ------------- Compiler self.compilers = ['pdflatex', 'xetex'] self.compiler = 'pdflatex' self.comradio = wx.RadioBox(self, label=" Choose the LaTeX compiler: ", size=(410,-1), choices=self.compilers, majorDimension=1) self.comradio.SetSelection( 0 ) self.comradio.Bind(wx.EVT_RADIOBOX, self.onCompiler) sizer.Add(self.comradio, 0, flag=wx.LEFT, border=40) sizer.Add((-1, 10)) # ------------- PDF self.pdf = True self.cbp = wx.CheckBox(self, label="Compile the LaTeX files", size=(300,-1)) self.cbp.SetValue(True) self.cbp.Bind(wx.EVT_CHECKBOX, self.onPDFChange) sizer.Add(self.cbp, 0, flag=wx.LEFT, border=40) self.enable(False) self.SetSizerAndFit(sizer) def onCompiler(self, event): o = event.GetEventObject() self.compiler = o.GetStringSelection() def onTheme(self, event): o = event.GetEventObject() self.theme = o.GetStringSelection() def onPDFChange(self, event): o = event.GetEventObject() self.pdf = bool( o.GetValue() ) def onExport(self, event): o = event.GetEventObject() self.export = bool( o.GetValue() ) self.enable(self.export) def enable(self, value): if value is False: self.themeradio.SetForegroundColour(wx.Colour(128,128,128)) self.comradio.SetForegroundColour(wx.Colour(128,128,128)) else: self.themeradio.SetForegroundColour(wx.Colour(80,80,200)) self.comradio.SetForegroundColour(wx.Colour(80,80,200)) for i in range(len(all_themes.get_themes().keys())): self.themeradio.EnableItem(i,value) for i in range(len(self.compilers)): self.comradio.EnableItem(i,value) self.cbp.Enable(value) # ---------------------------------------------------------------------------- if __name__ == "__main__": app = wx.App(False) ImportWizard(None) app.MainLoop() #----------------------------------------------------------------------
gpl-3.0
-3,954,575,004,273,499,000
40.768349
143
0.557905
false
3.648037
false
false
false
mch/python-ant
src/ant/core/message.py
1
18462
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (c) 2011, Martín Raúl Villalba # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # ############################################################################## # pylint: disable=missing-docstring from __future__ import division, absolute_import, print_function, unicode_literals from struct import pack, unpack from six import with_metaclass from ant.core import constants from ant.core.constants import MESSAGE_TX_SYNC, RESPONSE_NO_ERROR from ant.core.exceptions import MessageError class MessageType(type): def __init__(cls, name, bases, dict_): super(MessageType, cls).__init__(name, bases, dict_) type_ = cls.type if type_ is not None: cls.TYPES[type_] = cls def __call__(cls, *args, **kwargs): if cls.type is not None: return super(MessageType, cls).__call__(*args, **kwargs) type_ = kwargs.get('type') if type_ is None: raise RuntimeError("Message' cannot be untyped") del kwargs['type'] msgType = cls.TYPES.get(type_) if msgType is not None: return msgType(*args, **kwargs) if 0x00 <= type_ <= 0xFF: msg = super(MessageType, cls).__call__(*args, **kwargs) msg.type = type_ return msg else: raise MessageError('Could not set type (type out of range).', internal=Message.CORRUPTED) MSG_HEADER_SIZE = 3 MSG_FOOTER_SIZE = 1 class Message(with_metaclass(MessageType)): TYPES = {} type = None INCOMPLETE = 'incomplete' CORRUPTED = 'corrupted' MALFORMED = 'malformed' def __init__(self, payload=None): self._payload = None self.payload = payload if payload is not None else bytearray() @property def payload(self): return self._payload @payload.setter def payload(self, payload): if len(payload) > 9: raise MessageError('Could not set payload (payload too long).', internal=Message.MALFORMED) self._payload = payload @property def checksum(self): checksum = MESSAGE_TX_SYNC ^ len(self._payload) ^ self.type for byte in self._payload: checksum ^= byte return checksum def encode(self): raw, payload = bytearray(len(self)), self._payload raw[0:MSG_HEADER_SIZE-1] = (MESSAGE_TX_SYNC, len(payload), self.type) raw[MSG_HEADER_SIZE:-MSG_FOOTER_SIZE] = payload raw[-1] = self.checksum return raw @classmethod def decode(cls, raw): raw = bytearray(raw) if len(raw) < 5: raise MessageError('Could not decode. Message length should be >=5 bytes but was %d.' % len(raw), internal=Message.INCOMPLETE) sync, length, type_ = raw[:MSG_HEADER_SIZE] if sync != MESSAGE_TX_SYNC: raise MessageError('Could not decode. Expected TX sync but got 0x%.2x.' % sync, internal=Message.CORRUPTED) if len(raw) < (length + MSG_HEADER_SIZE + MSG_FOOTER_SIZE): raise MessageError('Could not decode. Message length should be %d but was %d.' % (length + MSG_HEADER_SIZE + MSG_FOOTER_SIZE, len(raw)), internal=Message.INCOMPLETE) msg = Message(type=type_) # pylint: disable=unexpected-keyword-arg msg.payload = raw[MSG_HEADER_SIZE:length + MSG_HEADER_SIZE] if msg.checksum != raw[length + MSG_HEADER_SIZE]: raise MessageError('Could not decode. Checksum should be 0x%.2x but was 0x%.2x.' % (raw[length + MSG_HEADER_SIZE], msg.checksum), internal=Message.CORRUPTED) return msg def __len__(self): return len(self._payload) + MSG_HEADER_SIZE + MSG_FOOTER_SIZE def __str__(self, data=None): rawstr = '<' + self.__class__.__name__ if data is not None: rawstr += ': ' + data return rawstr + '>' class ChannelMessage(Message): def __init__(self, payload=b'', number=0x00): super(ChannelMessage, self).__init__(bytearray(1) + payload) self.channelNumber = number @property def channelNumber(self): return self._payload[0] @channelNumber.setter def channelNumber(self, number): if (number > 0xFF) or (number < 0x00): raise MessageError('Could not set channel number. Should be 0 to 255 but was %s.' % number) self._payload[0] = number def __str__(self, data=None): rawstr = "C(%d)" % self.channelNumber if data is not None: rawstr += ': ' + data return super(ChannelMessage, self).__str__(data=rawstr) # Config messages class ChannelUnassignMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_UNASSIGN def __init__(self, number=0x00): super(ChannelUnassignMessage, self).__init__(number=number) class ChannelAssignMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_ASSIGN def __init__(self, number=0x00, channelType=0x00, network=0x00): super(ChannelAssignMessage, self).__init__(payload=bytearray(2), number=number) self.channelType = channelType self.networkNumber = network @property def channelType(self): return self._payload[1] @channelType.setter def channelType(self, type_): self._payload[1] = type_ @property def networkNumber(self): return self._payload[2] @networkNumber.setter def networkNumber(self, number): self._payload[2] = number class ChannelIDMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_ID def __init__(self, number=0x00, device_number=0x0000, device_type=0x00, trans_type=0x00): super(ChannelIDMessage, self).__init__(payload=bytearray(4), number=number) self.deviceNumber = device_number self.deviceType = device_type self.transmissionType = trans_type @property def deviceNumber(self): return unpack(b'<H', bytes(self._payload[1:3]))[0] @deviceNumber.setter def deviceNumber(self, device_number): self._payload[1:3] = pack(b'<H', device_number) @property def deviceType(self): return self._payload[3] @deviceType.setter def deviceType(self, device_type): self._payload[3] = device_type @property def transmissionType(self): return self._payload[4] @transmissionType.setter def transmissionType(self, trans_type): self._payload[4] = trans_type class ChannelPeriodMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_PERIOD def __init__(self, number=0x00, period=8192): super(ChannelPeriodMessage, self).__init__(payload=bytearray(2), number=number) self.channelPeriod = period @property def channelPeriod(self): return unpack('<H', bytes(self._payload[1:3]))[0] @channelPeriod.setter def channelPeriod(self, period): self._payload[1:3] = pack('<H', period) class ChannelSearchTimeoutMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_SEARCH_TIMEOUT def __init__(self, number=0x00, timeout=0xFF): super(ChannelSearchTimeoutMessage, self).__init__(payload=bytearray(1), number=number) self.timeout = timeout @property def timeout(self): return self._payload[1] @timeout.setter def timeout(self, timeout): self._payload[1] = timeout class ChannelFrequencyMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_FREQUENCY def __init__(self, number=0x00, frequency=66): super(ChannelFrequencyMessage, self).__init__(payload=bytearray(1), number=number) self.frequency = frequency @property def frequency(self): return self._payload[1] @frequency.setter def frequency(self, frequency): self._payload[1] = frequency class ChannelTXPowerMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_TX_POWER def __init__(self, number=0x00, power=0x00): super(ChannelTXPowerMessage, self).__init__(payload=bytearray(1), number=number) self.power = power @property def power(self): return self._payload[1] @power.setter def power(self, power): self._payload[1] = power class NetworkKeyMessage(Message): type = constants.MESSAGE_NETWORK_KEY def __init__(self, number=0x00, key=b'\x00' * 8): super(NetworkKeyMessage, self).__init__(payload=bytearray(9)) self.number = number self.key = key @property def number(self): return self._payload[0] @number.setter def number(self, number): self._payload[0] = number @property def key(self): return self._payload[1:] @key.setter def key(self, key): self._payload[1:] = key class TXPowerMessage(Message): type = constants.MESSAGE_TX_POWER def __init__(self, power=0x00): super(TXPowerMessage, self).__init__(payload=bytearray(2)) self.power = power @property def power(self): return self._payload[1] @power.setter def power(self, power): self._payload[1] = power # Control messages class SystemResetMessage(Message): type = constants.MESSAGE_SYSTEM_RESET def __init__(self): super(SystemResetMessage, self).__init__(payload=bytearray(1)) class ChannelOpenMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_OPEN def __init__(self, number=0x00): super(ChannelOpenMessage, self).__init__(number=number) class ChannelCloseMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_CLOSE def __init__(self, number=0x00): super(ChannelCloseMessage, self).__init__(number=number) class ChannelRequestMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_REQUEST def __init__(self, number=0x00, messageID=constants.MESSAGE_CHANNEL_STATUS): super(ChannelRequestMessage, self).__init__(payload=bytearray(1), number=number) self.messageID = messageID @property def messageID(self): return self._payload[1] @messageID.setter def messageID(self, messageID): if (messageID > 0xFF) or (messageID < 0x00): raise MessageError('Could not set message ID. Should be 0 to 255 but was %s.' % messageID) self._payload[1] = messageID # Data messages class ChannelBroadcastDataMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_BROADCAST_DATA def __init__(self, number=0x00, data=b'\x00' * 7): super(ChannelBroadcastDataMessage, self).__init__(payload=data, number=number) @property def data(self): return self._payload[1:9] class ChannelAcknowledgedDataMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_ACKNOWLEDGED_DATA def __init__(self, number=0x00, data=b'\x00' * 7): super(ChannelAcknowledgedDataMessage, self).__init__(payload=data, number=number) @property def data(self): return self._payload[1:9] class ChannelBurstDataMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_BURST_DATA def __init__(self, number=0x00, data=b'\x00' * 7): super(ChannelBurstDataMessage, self).__init__(payload=data, number=number) @property def data(self): return self._payload[1:9] # Channel event messages class ChannelEventResponseMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_EVENT def __init__(self, number=0x00, message_id=0x00, message_code=0x00): super(ChannelEventResponseMessage, self).__init__(payload=bytearray(2), number=number) self.messageID = message_id self.messageCode = message_code @property def messageID(self): return self._payload[1] @messageID.setter def messageID(self, message_id): if (message_id > 0xFF) or (message_id < 0x00): raise MessageError('Could not set message ID. Should be 0 to 255 but was %s.' % message_id) self._payload[1] = message_id @property def messageCode(self): return self._payload[2] @messageCode.setter def messageCode(self, message_code): if (message_code > 0xFF) or (message_code < 0x00): raise MessageError('Could not set message code. Should be 0 to 255 but was %s.' % message_code) self._payload[2] = message_code def __str__(self): # pylint: disable=W0221 msgCode = self.messageCode if self.messageID != 1: return "<ChannelResponse: '%s' on C(%d): %s>" % ( self.TYPES[self.messageID].__name__, self.channelNumber, 'OK' if msgCode == RESPONSE_NO_ERROR else '0x%.2x' % msgCode) return "<ChannelEvent: C(%d): 0x%.2x>" % (self.channelNumber, msgCode) # Requested response messages class ChannelStatusMessage(ChannelMessage): type = constants.MESSAGE_CHANNEL_STATUS def __init__(self, number=0x00, status=0x00): super(ChannelStatusMessage, self).__init__(payload=bytearray(1), number=number) self.status = status @property def status(self): return self._payload[1] @status.setter def status(self, status): if (status > 0xFF) or (status < 0x00): raise MessageError('Could not set channel status. Should be 0 to 255 but was %s.' % status) self._payload[1] = status class VersionMessage(Message): type = constants.MESSAGE_VERSION def __init__(self, version=b'\x00' * 9): super(VersionMessage, self).__init__(payload=bytearray(9)) self.version = version @property def version(self): return self._payload @version.setter def version(self, version): if len(version) != 9: raise MessageError('Could not set ANT version (expected 9 bytes).') self.payload = bytearray(version) class StartupMessage(Message): type = constants.MESSAGE_STARTUP def __init__(self, startupMessage=0x00): super(StartupMessage, self).__init__(payload=bytearray(1)) self.startupMessage = startupMessage @property def startupMessage(self): return self._payload[0] @startupMessage.setter def startupMessage(self, startupMessage): if (startupMessage > 0xFF) or (startupMessage < 0x00): raise MessageError('Could not set start-up message. Should be 0 to 255 but was %s.' % startupMessage) self._payload[0] = startupMessage class CapabilitiesMessage(Message): type = constants.MESSAGE_CAPABILITIES def __init__(self, max_channels=0x00, max_nets=0x00, std_opts=0x00, adv_opts=0x00, adv_opts2=0x00): super(CapabilitiesMessage, self).__init__(payload=bytearray(4)) self.maxChannels = max_channels self.maxNetworks = max_nets self.stdOptions = std_opts self.advOptions = adv_opts if adv_opts2 is not None: self.advOptions2 = adv_opts2 @property def maxChannels(self): return self._payload[0] @maxChannels.setter def maxChannels(self, num): if (num > 0xFF) or (num < 0x00): raise MessageError('Could not set max channels. Should be 0 to 255 but was %s.' % num) self._payload[0] = num @property def maxNetworks(self): return self._payload[1] @maxNetworks.setter def maxNetworks(self, num): if (num > 0xFF) or (num < 0x00): raise MessageError('Could not set max networks. Should be 0 to 255 but was %s.' % num) self._payload[1] = num @property def stdOptions(self): return self._payload[2] @stdOptions.setter def stdOptions(self, num): if (num > 0xFF) or (num < 0x00): raise MessageError('Could not set std options. Should be 0 to 255 but was %s.' % num) self._payload[2] = num @property def advOptions(self): return self._payload[3] @advOptions.setter def advOptions(self, num): if (num > 0xFF) or (num < 0x00): raise MessageError('Could not set adv options. Should be 0 to 255 but was %s.' % num) self._payload[3] = num @property def advOptions2(self): return self._payload[4] if len(self._payload) == 5 else 0x00 @advOptions2.setter def advOptions2(self, num): if (num > 0xFF) or (num < 0x00): raise MessageError('Could not set adv options 2. Should be 0 to 255 but was %s.' % num) if len(self._payload) == 4: self._payload.append(0) self._payload[4] = num class SerialNumberMessage(Message): type = constants.MESSAGE_SERIAL_NUMBER def __init__(self, serial=b'\x00' * 4): super(SerialNumberMessage, self).__init__() self.serialNumber = serial @property def serialNumber(self): return self._payload @serialNumber.setter def serialNumber(self, serial): if len(serial) != 4: raise MessageError('Could not set serial number (expected 4 bytes).') self.payload = bytearray(serial)
mit
603,550,664,031,524,400
31.329247
113
0.626002
false
3.926824
false
false
false
dakrauth/picker
picker/migrations/0003_auto_20180801_0800.py
1
5687
# Generated by Django 2.0.7 on 2018-08-01 12:00 from django.conf import settings from django.db import migrations, models import django.db.models.deletion import picker.models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('picker', '0002_auto_20160720_0917'), ] operations = [ migrations.CreateModel( name='PickerFavorite', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), migrations.CreateModel( name='PickerGrouping', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=75, unique=True)), ('status', models.CharField(choices=[('ACTV', 'Active'), ('IDLE', 'Inactive')], default='ACTV', max_length=4)), ], ), migrations.CreateModel( name='PickerMembership', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('status', models.CharField(choices=[('ACTV', 'Active'), ('IDLE', 'Inactive'), ('SUSP', 'Suspended'), ('MNGT', 'Manager')], default='ACTV', max_length=4)), ('autopick', models.CharField(choices=[('NONE', 'None'), ('RAND', 'Random')], default='RAND', max_length=4)), ('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='members', to='picker.PickerGrouping')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='picker_memberships', to=settings.AUTH_USER_MODEL)), ], ), migrations.AddField( model_name='league', name='current_season', field=models.IntegerField(blank=True, null=True), ), migrations.AddField( model_name='league', name='slug', field=models.SlugField(default=picker.models.temp_slug), ), migrations.AddField( model_name='pickset', name='is_winner', field=models.BooleanField(default=False), ), migrations.AlterField( model_name='game', name='category', field=models.CharField(choices=[('REG', 'Regular Season'), ('POST', 'Post Season'), ('PRE', 'Pre Season'), ('FRND', 'Friendly')], default='REG', max_length=4), ), migrations.AlterField( model_name='game', name='status', field=models.CharField(choices=[('U', 'Unplayed'), ('T', 'Tie'), ('H', 'Home Win'), ('A', 'Away Win'), ('X', 'Cancelled')], default='U', max_length=1), ), migrations.AlterField( model_name='game', name='tv', field=models.CharField(blank=True, max_length=8, verbose_name='TV'), ), migrations.AlterField( model_name='gameset', name='byes', field=models.ManyToManyField(blank=True, related_name='bye_set', to='picker.Team', verbose_name='Bye Teams'), ), migrations.AlterField( model_name='league', name='logo', field=models.ImageField(blank=True, null=True, upload_to='picker/logos'), ), migrations.AlterField( model_name='pickset', name='strategy', field=models.CharField(choices=[('USER', 'User'), ('RAND', 'Random'), ('HOME', 'Home Team'), ('BEST', 'Best Record')], default='USER', max_length=4), ), migrations.AlterField( model_name='playoffpicks', name='user', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='preference', name='autopick', field=models.CharField(choices=[('NONE', 'None'), ('RAND', 'Random')], default='RAND', max_length=4), ), migrations.AlterField( model_name='team', name='logo', field=models.ImageField(blank=True, null=True, upload_to='picker/logos'), ), migrations.AlterUniqueTogether( name='preference', unique_together=set(), ), migrations.AddField( model_name='pickergrouping', name='leagues', field=models.ManyToManyField(blank=True, to='picker.League'), ), migrations.AddField( model_name='pickerfavorite', name='league', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='picker.League'), ), migrations.AddField( model_name='pickerfavorite', name='team', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='picker.Team'), ), migrations.AddField( model_name='pickerfavorite', name='user', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), migrations.RemoveField( model_name='preference', name='favorite_team', ), migrations.RemoveField( model_name='preference', name='league', ), migrations.RemoveField( model_name='preference', name='status', ), ]
mit
9,125,686,761,335,229,000
40.510949
171
0.55706
false
4.157164
false
false
false
vlegoff/tsunami
src/primaires/joueur/commandes/montrer/niveaux.py
1
2438
# -*-coding:Utf-8 -* # Copyright (c) 2010-2017 LE GOFF Vincent # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT # OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Module contenant la commande 'montrer niveaux'.""" from primaires.interpreteur.masque.parametre import Parametre from primaires.perso.montrer.niveaux import MontrerNiveaux class PrmNiveaux(Parametre): """Commande 'montrer niveaux'.""" def __init__(self): """Constructeur du paramètre""" Parametre.__init__(self, "niveaux", "levels") self.tronquer = True self.schema = "<nom_joueur>" self.aide_courte = "affiche le niveaux d'un joueur" self.aide_longue = \ "Cette commande montre les niveaux d'un joueur. Vous devez " \ "simplement préciser le nom du joueur en paramètre." def interpreter(self, personnage, dic_masques): """Méthode d'interprétation de commande""" joueur = dic_masques["nom_joueur"].joueur personnage << MontrerNiveaux.montrer(joueur)
bsd-3-clause
7,708,689,610,241,708,000
44.90566
79
0.737772
false
3.849684
false
false
false
cryvate/project-euler
project_euler/library/number_theory/pells_equation.py
1
1029
from .continued_fractions import convergents_sqrt from typing import Generator, Tuple def solve_pells_equation(n: int) -> Generator[Tuple[int, int], None, None]: convergents = convergents_sqrt(n) for convergent in convergents: h = convergent.numerator k = convergent.denominator if h ** 2 - n * (k ** 2) == 1: break x, y = h, k while True: yield x, y x, y = h * x + n * k * y, h * y + k * x def solve_negative_pells_equation(n: int) -> \ Generator[Tuple[int, int], None, None]: convergents = convergents_sqrt(n) for convergent in convergents: h = convergent.numerator k = convergent.denominator if h ** 2 - n * (k ** 2) == -1: break if h ** 2 - n * (k ** 2) == 1: raise ValueError(f"Equation x^2 - {n}y^2 = -1 has no solution") x, y = h, k while True: yield x, y x, y = h * x + n * k * y, h * y + k * x x, y = h * x + n * k * y, h * y + k * x
mit
-1,582,267,298,741,601,800
23.5
75
0.512148
false
3.118182
false
false
false
dmlc/xgboost
tests/python/test_with_pandas.py
1
10402
# -*- coding: utf-8 -*- import numpy as np import xgboost as xgb import testing as tm import pytest try: import pandas as pd except ImportError: pass pytestmark = pytest.mark.skipif(**tm.no_pandas()) dpath = 'demo/data/' rng = np.random.RandomState(1994) class TestPandas: def test_pandas(self): df = pd.DataFrame([[1, 2., True], [2, 3., False]], columns=['a', 'b', 'c']) dm = xgb.DMatrix(df, label=pd.Series([1, 2])) assert dm.feature_names == ['a', 'b', 'c'] assert dm.feature_types == ['int', 'float', 'i'] assert dm.num_row() == 2 assert dm.num_col() == 3 np.testing.assert_array_equal(dm.get_label(), np.array([1, 2])) # overwrite feature_names and feature_types dm = xgb.DMatrix(df, label=pd.Series([1, 2]), feature_names=['x', 'y', 'z'], feature_types=['q', 'q', 'q']) assert dm.feature_names == ['x', 'y', 'z'] assert dm.feature_types == ['q', 'q', 'q'] assert dm.num_row() == 2 assert dm.num_col() == 3 # incorrect dtypes df = pd.DataFrame([[1, 2., 'x'], [2, 3., 'y']], columns=['a', 'b', 'c']) with pytest.raises(ValueError): xgb.DMatrix(df) # numeric columns df = pd.DataFrame([[1, 2., True], [2, 3., False]]) dm = xgb.DMatrix(df, label=pd.Series([1, 2])) assert dm.feature_names == ['0', '1', '2'] assert dm.feature_types == ['int', 'float', 'i'] assert dm.num_row() == 2 assert dm.num_col() == 3 np.testing.assert_array_equal(dm.get_label(), np.array([1, 2])) df = pd.DataFrame([[1, 2., 1], [2, 3., 1]], columns=[4, 5, 6]) dm = xgb.DMatrix(df, label=pd.Series([1, 2])) assert dm.feature_names == ['4', '5', '6'] assert dm.feature_types == ['int', 'float', 'int'] assert dm.num_row() == 2 assert dm.num_col() == 3 df = pd.DataFrame({'A': ['X', 'Y', 'Z'], 'B': [1, 2, 3]}) dummies = pd.get_dummies(df) # B A_X A_Y A_Z # 0 1 1 0 0 # 1 2 0 1 0 # 2 3 0 0 1 result, _, _ = xgb.data._transform_pandas_df(dummies, enable_categorical=False) exp = np.array([[1., 1., 0., 0.], [2., 0., 1., 0.], [3., 0., 0., 1.]]) np.testing.assert_array_equal(result, exp) dm = xgb.DMatrix(dummies) assert dm.feature_names == ['B', 'A_X', 'A_Y', 'A_Z'] assert dm.feature_types == ['int', 'int', 'int', 'int'] assert dm.num_row() == 3 assert dm.num_col() == 4 df = pd.DataFrame({'A=1': [1, 2, 3], 'A=2': [4, 5, 6]}) dm = xgb.DMatrix(df) assert dm.feature_names == ['A=1', 'A=2'] assert dm.feature_types == ['int', 'int'] assert dm.num_row() == 3 assert dm.num_col() == 2 df_int = pd.DataFrame([[1, 1.1], [2, 2.2]], columns=[9, 10]) dm_int = xgb.DMatrix(df_int) df_range = pd.DataFrame([[1, 1.1], [2, 2.2]], columns=range(9, 11, 1)) dm_range = xgb.DMatrix(df_range) assert dm_int.feature_names == ['9', '10'] # assert not "9 " assert dm_int.feature_names == dm_range.feature_names # test MultiIndex as columns df = pd.DataFrame( [ (1, 2, 3, 4, 5, 6), (6, 5, 4, 3, 2, 1) ], columns=pd.MultiIndex.from_tuples(( ('a', 1), ('a', 2), ('a', 3), ('b', 1), ('b', 2), ('b', 3), )) ) dm = xgb.DMatrix(df) assert dm.feature_names == ['a 1', 'a 2', 'a 3', 'b 1', 'b 2', 'b 3'] assert dm.feature_types == ['int', 'int', 'int', 'int', 'int', 'int'] assert dm.num_row() == 2 assert dm.num_col() == 6 def test_slice(self): rng = np.random.RandomState(1994) rows = 100 X = rng.randint(3, 7, size=rows) X = pd.DataFrame({'f0': X}) y = rng.randn(rows) ridxs = [1, 2, 3, 4, 5, 6] m = xgb.DMatrix(X, y) sliced = m.slice(ridxs) assert m.feature_types == sliced.feature_types def test_pandas_categorical(self): rng = np.random.RandomState(1994) rows = 100 X = rng.randint(3, 7, size=rows) X = pd.Series(X, dtype="category") X = pd.DataFrame({'f0': X}) y = rng.randn(rows) m = xgb.DMatrix(X, y, enable_categorical=True) assert m.feature_types[0] == 'categorical' def test_pandas_sparse(self): import pandas as pd rows = 100 X = pd.DataFrame( {"A": pd.arrays.SparseArray(np.random.randint(0, 10, size=rows)), "B": pd.arrays.SparseArray(np.random.randn(rows)), "C": pd.arrays.SparseArray(np.random.permutation( [True, False] * (rows // 2)))} ) y = pd.Series(pd.arrays.SparseArray(np.random.randn(rows))) dtrain = xgb.DMatrix(X, y) booster = xgb.train({}, dtrain, num_boost_round=4) predt_sparse = booster.predict(xgb.DMatrix(X)) predt_dense = booster.predict(xgb.DMatrix(X.sparse.to_dense())) np.testing.assert_allclose(predt_sparse, predt_dense) def test_pandas_label(self): # label must be a single column df = pd.DataFrame({'A': ['X', 'Y', 'Z'], 'B': [1, 2, 3]}) with pytest.raises(ValueError): xgb.data._transform_pandas_df(df, False, None, None, 'label', 'float') # label must be supported dtype df = pd.DataFrame({'A': np.array(['a', 'b', 'c'], dtype=object)}) with pytest.raises(ValueError): xgb.data._transform_pandas_df(df, False, None, None, 'label', 'float') df = pd.DataFrame({'A': np.array([1, 2, 3], dtype=int)}) result, _, _ = xgb.data._transform_pandas_df(df, False, None, None, 'label', 'float') np.testing.assert_array_equal(result, np.array([[1.], [2.], [3.]], dtype=float)) dm = xgb.DMatrix(np.random.randn(3, 2), label=df) assert dm.num_row() == 3 assert dm.num_col() == 2 def test_pandas_weight(self): kRows = 32 kCols = 8 X = np.random.randn(kRows, kCols) y = np.random.randn(kRows) w = np.random.uniform(size=kRows).astype(np.float32) w_pd = pd.DataFrame(w) data = xgb.DMatrix(X, y, w_pd) assert data.num_row() == kRows assert data.num_col() == kCols np.testing.assert_array_equal(data.get_weight(), w) def test_cv_as_pandas(self): dm = xgb.DMatrix(dpath + 'agaricus.txt.train') params = {'max_depth': 2, 'eta': 1, 'verbosity': 0, 'objective': 'binary:logistic', 'eval_metric': 'error'} cv = xgb.cv(params, dm, num_boost_round=10, nfold=10) assert isinstance(cv, pd.DataFrame) exp = pd.Index([u'test-error-mean', u'test-error-std', u'train-error-mean', u'train-error-std']) assert len(cv.columns.intersection(exp)) == 4 # show progress log (result is the same as above) cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, verbose_eval=True) assert isinstance(cv, pd.DataFrame) exp = pd.Index([u'test-error-mean', u'test-error-std', u'train-error-mean', u'train-error-std']) assert len(cv.columns.intersection(exp)) == 4 cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, verbose_eval=True, show_stdv=False) assert isinstance(cv, pd.DataFrame) exp = pd.Index([u'test-error-mean', u'test-error-std', u'train-error-mean', u'train-error-std']) assert len(cv.columns.intersection(exp)) == 4 params = {'max_depth': 2, 'eta': 1, 'verbosity': 0, 'objective': 'binary:logistic', 'eval_metric': 'auc'} cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, as_pandas=True) assert 'eval_metric' in params assert 'auc' in cv.columns[0] params = {'max_depth': 2, 'eta': 1, 'verbosity': 0, 'objective': 'binary:logistic', 'eval_metric': ['auc']} cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, as_pandas=True) assert 'eval_metric' in params assert 'auc' in cv.columns[0] params = {'max_depth': 2, 'eta': 1, 'verbosity': 0, 'objective': 'binary:logistic', 'eval_metric': ['auc']} cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, as_pandas=True, early_stopping_rounds=1) assert 'eval_metric' in params assert 'auc' in cv.columns[0] assert cv.shape[0] < 10 params = {'max_depth': 2, 'eta': 1, 'verbosity': 0, 'objective': 'binary:logistic'} cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, as_pandas=True, metrics='auc') assert 'auc' in cv.columns[0] params = {'max_depth': 2, 'eta': 1, 'verbosity': 0, 'objective': 'binary:logistic'} cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, as_pandas=True, metrics=['auc']) assert 'auc' in cv.columns[0] params = {'max_depth': 2, 'eta': 1, 'verbosity': 0, 'objective': 'binary:logistic', 'eval_metric': ['auc']} cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, as_pandas=True, metrics='error') assert 'eval_metric' in params assert 'auc' not in cv.columns[0] assert 'error' in cv.columns[0] cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, as_pandas=True, metrics=['error']) assert 'eval_metric' in params assert 'auc' not in cv.columns[0] assert 'error' in cv.columns[0] params = list(params.items()) cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, as_pandas=True, metrics=['error']) assert isinstance(params, list) assert 'auc' not in cv.columns[0] assert 'error' in cv.columns[0]
apache-2.0
5,201,633,062,039,068,000
39.007692
82
0.506441
false
3.253675
true
false
false
welshjf/bitnomon
bitnomon/formatting.py
1
1555
# Copyright 2015 Jacob Welsh # # This file is part of Bitnomon; see the README for license information. """Text/number formatting""" class ByteCountFormatter(object): #pylint: disable=too-few-public-methods """Human-readable display of byte counts in various formats. By default, the formatter uses SI and bytes, so 1000 => "1 KB". All combinations of (byte, bit) x (SI, binary) are supported, though you probably shouldn't use bits with the binary prefixes. Attributes: unit_bits True for bits or False for bytes prefix_si True for SI or False for binary prefixes """ SI_prefixes = ('k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') binary_prefixes = ('Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi') def __init__(self): self.unit_bits = False self.prefix_si = True def __call__(self, count): """Formats a byte count using the configured settings.""" if self.unit_bits: count *= 8 unit = 'b' else: unit = 'B' if self.prefix_si: factor = 1000. prefixes = self.SI_prefixes else: factor = 1024. prefixes = self.binary_prefixes if abs(count) < factor: return u'%d %c' % (count, unit) size = float(count) prefix_index = 0 while abs(size) >= factor and prefix_index < len(prefixes): size /= factor prefix_index += 1 return u'%.2f %s%c' % (size, prefixes[prefix_index-1], unit)
apache-2.0
5,630,058,581,634,303,000
28.903846
72
0.560129
false
3.783455
false
false
false
warp1337/opencv_facerecognizer
src/ocvfacerec/facerec/classifier.py
1
9086
# Copyright (c) 2015. # Philipp Wagner <bytefish[at]gmx[dot]de> and # Florian Lier <flier[at]techfak.uni-bielefeld.de> and # Norman Koester <nkoester[at]techfak.uni-bielefeld.de> # # # Released to public domain under terms of the BSD Simplified license. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the organization nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # See <http://www.opensource.org/licenses/bsd-license> # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from ocvfacerec.facerec.distance import EuclideanDistance from ocvfacerec.facerec.util import as_row_matrix import logging import numpy as np import operator as op class AbstractClassifier(object): def compute(self, X, y): raise NotImplementedError("Every AbstractClassifier must implement the compute method.") def predict(self, X): raise NotImplementedError("Every AbstractClassifier must implement the predict method.") def update(self, X, y): raise NotImplementedError("This Classifier is cannot be updated.") class NearestNeighbor(AbstractClassifier): """ Implements a k-Nearest Neighbor Model with a generic distance metric. """ def __init__(self, dist_metric=EuclideanDistance(), k=1): AbstractClassifier.__init__(self) self.k = k self.dist_metric = dist_metric self.X = [] self.y = np.array([], dtype=np.int32) def update(self, X, y): """ Updates the classifier. """ self.X.append(X) self.y = np.append(self.y, y) def compute(self, X, y): self.X = X self.y = np.asarray(y) def predict(self, q): """ Predicts the k-nearest neighbor for a given query in q. Args: q: The given query sample, which is an array. Returns: A list with the classifier output. In this framework it is assumed, that the predicted class is always returned as first element. Moreover, this class returns the distances for the first k-Nearest Neighbors. Example: [ 0, { 'labels' : [ 0, 0, 1 ], 'distances' : [ 10.132, 10.341, 13.314 ] } ] So if you want to perform a thresholding operation, you could pick the distances in the second array of the generic classifier output. """ distances = [] for xi in self.X: xi = xi.reshape(-1, 1) d = self.dist_metric(xi, q) distances.append(d) if len(distances) > len(self.y): raise Exception("More distances than classes. Is your distance metric correct?") distances = np.asarray(distances) # Get the indices in an ascending sort order: idx = np.argsort(distances) # Sort the labels and distances accordingly: sorted_y = self.y[idx] sorted_distances = distances[idx] # Take only the k first items: sorted_y = sorted_y[0:self.k] sorted_distances = sorted_distances[0:self.k] # Make a histogram of them: hist = dict((key, val) for key, val in enumerate(np.bincount(sorted_y)) if val) # And get the bin with the maximum frequency: predicted_label = max(hist.iteritems(), key=op.itemgetter(1))[0] # A classifier should output a list with the label as first item and # generic data behind. The k-nearest neighbor classifier outputs the # distance of the k first items. So imagine you have a 1-NN and you # want to perform a threshold against it, you should take the first # item return [predicted_label, {'labels': sorted_y, 'distances': sorted_distances}] def __repr__(self): return "NearestNeighbor (k=%s, dist_metric=%s)" % (self.k, repr(self.dist_metric)) # libsvm try: from svmutil import * except ImportError: logger = logging.getLogger("facerec.classifier.SVM") logger.debug("Import Error: libsvm bindings not available.") except: logger = logging.getLogger("facerec.classifier.SVM") logger.debug("Import Error: libsvm bindings not available.") import sys from StringIO import StringIO bkp_stdout = sys.stdout class SVM(AbstractClassifier): """ This class is just a simple wrapper to use libsvm in the CrossValidation module. If you don't use this framework use the validation methods coming with LibSVM, they are much easier to access (simply pass the correct class labels in svm_predict and you are done...). The grid search method in this class is somewhat similar to libsvm grid.py, as it performs a parameter search over a logarithmic scale. Again if you don't use this framework, use the libsvm tools as they are much easier to access. Please keep in mind to normalize your input data, as expected for the model. There's no way to assume a generic normalization step. """ def __init__(self, param=None): AbstractClassifier.__init__(self) self.logger = logging.getLogger("facerec.classifier.SVM") self.param = param self.svm = svm_model() self.param = param if self.param is None: self.param = svm_parameter("-q") def compute(self, X, y): self.logger.debug("SVM TRAINING (C=%.2f,gamma=%.2f,p=%.2f,nu=%.2f,coef=%.2f,degree=%.2f)" % ( self.param.C, self.param.gamma, self.param.p, self.param.nu, self.param.coef0, self.param.degree)) # turn data into a row vector (needed for libsvm) X = as_row_matrix(X) y = np.asarray(y) problem = svm_problem(y, X.tolist()) self.svm = svm_train(problem, self.param) self.y = y def predict(self, X): """ Args: X: The query image, which is an array. Returns: A list with the classifier output. In this framework it is assumed, that the predicted class is always returned as first element. Moreover, this class returns the libsvm output for p_labels, p_acc and p_vals. The libsvm help states: p_labels: a list of predicted labels p_acc: a tuple including accuracy (for classification), mean-squared error, and squared correlation coefficient (for regression). p_vals: a list of decision values or probability estimates (if '-b 1' is specified). If k is the number of classes, for decision values, each element includes results of predicting k(k-1)/2 binary-class SVMs. For probabilities, each element contains k values indicating the probability that the testing instance is in each class. Note that the order of classes here is the same as 'model.label' field in the model structure. """ X = np.asarray(X).reshape(1, -1) sys.stdout = StringIO() p_lbl, p_acc, p_val = svm_predict([0], X.tolist(), self.svm) sys.stdout = bkp_stdout predicted_label = int(p_lbl[0]) return [predicted_label, {'p_lbl': p_lbl, 'p_acc': p_acc, 'p_val': p_val}] def __repr__(self): return "Support Vector Machine (kernel_type=%s, C=%.2f,gamma=%.2f,p=%.2f,nu=%.2f,coef=%.2f,degree=%.2f)" % ( KERNEL_TYPE[self.param.kernel_type], self.param.C, self.param.gamma, self.param.p, self.param.nu, self.param.coef0, self.param.degree)
bsd-3-clause
3,765,346,731,640,892,000
39.5625
116
0.633832
false
4.133758
false
false
false
debbiedub/bcdef
features/steps/application.py
1
1080
import logging from multiprocessing import Process from bc import BCMain from fcp.CommunicationQueues import comm def run_create_first_block(queues, *args): global comm comm.set(queues=queues) try: logging.getLogger().setLevel(logging.DEBUG) # logging.getLogger().addHandler(comm.get_handler()) logging.info("Started logging") bc = BCMain(*args) bc.participants.round_timeout = 1 bc.create_first_block() finally: comm.empty_queues() @when(u'the application is started to create the first block') def step_impl(context): global comm context.bc_process = Process(target=run_create_first_block, args=(comm, "Me",)) context.bc_process.start() context.node_simulator.expect("hello") context.node_simulator.respond(("olleh",)) context.node_simulator.expect_wot("Ping") context.node_simulator.respond_wot({"Message":"Pong"}) context.node_simulator.expect_wot("GetOwnIdentities") context.node_simulator.respond_wot({"Replies.Amount": "0"})
gpl-3.0
6,960,105,033,797,665,000
29.857143
63
0.671296
false
3.789474
false
false
false
google/capirca
capirca/lib/windows.py
1
12745
# Copyright 2016 Google Inc. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Generic Windows security policy generator; requires subclassing.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import datetime import string from absl import logging from capirca.lib import aclgenerator from capirca.lib import nacaddr CMD_PREFIX = 'netsh ipsec static add ' class Term(aclgenerator.Term): """Generate generic windows policy terms.""" _PLATFORM = 'windows' _COMMENT_FORMAT = string.Template(': $comment') # filter rules _ACTION_TABLE = {} def __init__(self, term, filter_name, filter_action, af='inet'): """Setup a new term. Args: term: A policy.Term object to represent in windows_ipsec. filter_name: The name of the filter chan to attach the term to. filter_action: The default action of the filter. af: Which address family ('inet' or 'inet6') to apply the term to. Raises: UnsupportedFilterError: Filter is not supported. """ super(Term, self).__init__(term) self.term = term # term object self.filter = filter_name # actual name of filter self.default_action = filter_action self.options = [] self.af = af if af == 'inet6': self._all_ips = nacaddr.IPv6('::/0') else: self._all_ips = nacaddr.IPv4('0.0.0.0/0') self.term_name = '%s_%s' % (self.filter[:1], self.term.name) def __str__(self): # Verify platform specific terms. Skip whole term if platform does not # match. if self.term.platform: if self._PLATFORM not in self.term.platform: return '' if self.term.platform_exclude: if self._PLATFORM in self.term.platform_exclude: return '' ret_str = [] # Don't render icmpv6 protocol terms under inet, or icmp under inet6 if ((self.af == 'inet6' and 'icmp' in self.term.protocol) or (self.af == 'inet' and 'icmpv6' in self.term.protocol)): logging.debug(self.NO_AF_LOG_PROTO.substitute(term=self.term.name, proto=self.term.protocol, af=self.af)) return '' # append comments to output ret_str.append(self._COMMENT_FORMAT.substitute(filter=self.filter, term=self.term_name, comment=self.term.comment)) # if terms does not specify action, use filter default action if not self.term.action: self.term.action[0].value = self.default_action if self.term.action[0] == 'next': return '' if len(self.term.action) > 1: raise aclgenerator.UnsupportedFilterError('\n%s %s %s %s' % ( 'Multiple actions unsupported by', self._PLATFORM, '\nError in term:', self.term.name)) # protocol if self.term.protocol: protocols = self.term.protocol else: protocols = ['any'] # addresses src_addr = self.term.source_address if not src_addr: src_addr = [self._all_ips] dst_addr = self.term.destination_address if not dst_addr: dst_addr = [self._all_ips] if (self.term.source_address_exclude or self.term.destination_address_exclude): raise aclgenerator.UnsupportedFilterError('\n%s %s %s %s' % ( 'address exclusions unsupported by', self._PLATFORM, '\nError in term:', self.term.name)) # ports = Map the ports in a straight list since multiports aren't supported (src_ports, dst_ports) = self._HandlePorts(self.term.source_port, self.term.destination_port) # The windows ipsec driver requires either 'tcp' or 'udp' to be specified # if a srcport or dstport is specified. Fail if src or dst ports are # specified and of the protocols are not exactly one or both of 'tcp' # or 'udp'. if ((not set(protocols).issubset(set(['tcp', 'udp']))) and (len(src_ports) > 1 or len(dst_ports) > 1)): raise aclgenerator.UnsupportedFilterError('%s %s %s' % ( '\n', self.term.name, 'src or dst ports may only be specified with "tcp" and/or "udp".')) # icmp-types (icmp_types, protocols) = self._HandleIcmpTypes(self.term.icmp_type, protocols) ret_str = [] self._HandlePreRule(ret_str) self._CartesianProduct(src_addr, dst_addr, protocols, icmp_types, src_ports, dst_ports, ret_str) self._HandlePreRule(ret_str) return '\n'.join(str(v) for v in ret_str if v) def _HandleIcmpTypes(self, icmp_types, protocols): """Perform implementation-specific icmp_type and protocol transforms. Note that icmp_types or protocols are passed as parameters in case they are to be munged prior to this function call, and may not be identical to self.term.* parameters. Args: icmp_types: a list of icmp types, e.g., self.term.icmp_types protocols: a list of protocols, e.g., self.term.protocols Returns: A pair of lists of (icmp_types, protocols) """ return None, None def _HandlePorts(self, src_ports, dst_ports): """Perform implementation-specific port transforms. Note that icmp_types or protocols are passed as parameters in case they are to be munged prior to this function call, and may not be identical to self.term.* parameters. Args: src_ports: list of source port range tuples, e.g., self.term.source_port dst_ports: list of destination port range tuples Returns: A pair of lists of (icmp_types, protocols) """ return None, None def _HandlePreRule(self, ret_str): """Perform any pre-cartesian product transforms on the ret_str array. Args: ret_str: an array of strings that will eventually be joined to form the string output for the term. """ pass def _CartesianProduct(self, src_addr, dst_addr, protocol, icmp_types, src_ports, dst_ports, ret_str): """Perform any the appropriate cartesian product of the input parameters. Args: src_addr: a type(IP) list of the source addresses dst_addr: a type(IP) list of the destination addresses protocol: a string list of the protocols icmp_types: a numeric list of the icmp_types src_ports: a (start, end) list of the source ports dst_ports: a (start,end) list of the destination ports ret_str: an array of strings that will eventually be joined to form the string output for the term. """ pass def _HandlePostRule(self, ret_str): """Perform any port-cartesian product transforms on the ret_str array. Args: ret_str: an array of strings that will eventually be joined to form the string output for the term. """ pass class WindowsGenerator(aclgenerator.ACLGenerator): """Generates filters and terms from provided policy object.""" _PLATFORM = 'windows' _DEFAULT_PROTOCOL = 'all' SUFFIX = '.bat' _RENDER_PREFIX = None _DEFAULT_ACTION = 'block' _TERM = Term _GOOD_AFS = ['inet', 'inet6'] def _BuildTokens(self): """Build supported tokens for platform. Returns: tuple containing both supported tokens and sub tokens """ supported_tokens, supported_sub_tokens = super( WindowsGenerator, self)._BuildTokens() supported_tokens |= {'option'} supported_tokens -= {'verbatim'} supported_sub_tokens.update({'action': {'accept', 'deny'}}) del supported_sub_tokens['option'] return supported_tokens, supported_sub_tokens def _TranslatePolicy(self, pol, exp_info): """Translate a policy from objects into strings.""" self.windows_policies = [] current_date = datetime.datetime.utcnow().date() exp_info_date = current_date + datetime.timedelta(weeks=exp_info) default_action = None good_default_actions = ['permit', 'block'] good_options = [] for header, terms in pol.filters: filter_type = None if self._PLATFORM not in header.platforms: continue filter_options = header.FilterOptions(self._PLATFORM)[1:] filter_name = header.FilterName(self._PLATFORM) # ensure all options after the filter name are expected for opt in filter_options: if opt not in good_default_actions + self._GOOD_AFS + good_options: raise aclgenerator.UnsupportedTargetOptionError('%s %s %s %s' % ( '\nUnsupported option found in', self._PLATFORM, 'target definition:', opt)) # Check for matching af for address_family in self._GOOD_AFS: if address_family in filter_options: # should not specify more than one AF in options if filter_type is not None: raise aclgenerator.UnsupportedFilterError('%s %s %s %s' % ( '\nMay only specify one of', self._GOOD_AFS, 'in filter options:', filter_options)) filter_type = address_family if filter_type is None: filter_type = 'inet' # does this policy override the default filter actions? for next_target in header.target: if next_target.platform == self._PLATFORM: if len(next_target.options) > 1: for arg in next_target.options: if arg in good_default_actions: default_action = arg if default_action and default_action not in good_default_actions: raise aclgenerator.UnsupportedTargetOptionError('%s %s %s %s %s' % ( '\nOnly', ', '.join(good_default_actions), 'default filter action allowed;', default_action, 'used.')) # add the terms new_terms = [] term_names = set() for term in terms: if term.name in term_names: raise aclgenerator.DuplicateTermError( 'You have a duplicate term: %s' % term.name) term_names.add(term.name) if term.expiration: if term.expiration <= exp_info_date: logging.info('INFO: Term %s in policy %s expires ' 'in less than two weeks.', term.name, filter_name) if term.expiration <= current_date: logging.warning('WARNING: Term %s in policy %s is expired and ' 'will not be rendered.', term.name, filter_name) continue if 'established' in term.option or 'tcp-established' in term.option: continue new_terms.append(self._TERM(term, filter_name, default_action, filter_type)) self.windows_policies.append((header, filter_name, filter_type, default_action, new_terms)) def __str__(self): target = [] pretty_platform = '%s%s' % (self._PLATFORM[0].upper(), self._PLATFORM[1:]) if self._RENDER_PREFIX: target.append(self._RENDER_PREFIX) for header, _, filter_type, default_action, terms in self.windows_policies: # Add comments for this filter target.append(': %s %s Policy' % (pretty_platform, header.FilterName(self._PLATFORM))) self._HandlePolicyHeader(header, target) # reformat long text comments, if needed comments = aclgenerator.WrapWords(header.comment, 70) if comments and comments[0]: for line in comments: target.append(': %s' % line) target.append(':') # add the p4 tags target.extend(aclgenerator.AddRepositoryTags(': ')) target.append(': ' + filter_type) if default_action: raise aclgenerator.UnsupportedTargetOptionError( 'Windows generator does not support default actions') # add the terms for term in terms: term_str = str(term) if term_str: target.append(term_str) self._HandleTermFooter(header, term, target) target.append('') return '\n'.join(target) def _HandlePolicyHeader(self, header, target): pass def _HandleTermFooter(self, header, term, target): pass
apache-2.0
6,289,347,090,319,784,000
34.013736
80
0.626991
false
4.028129
false
false
false
AlfredNeverKog/BrainCarya
src/my/kadenze/lesson3/mnist_autoencoder.py
1
2610
from mnist import MNIST import numpy as np import tensorflow as tf from src.my.lib.utils import montage import matplotlib.pyplot as plt from PIL import Image src = '../../../../data/mnist/' output='./content/1/%s.jpg' mndata = MNIST(src) data = np.array(mndata.load_testing()) X = data[0] Y = data[1] items = 100 imgs = np.array([i for i in np.array(X[:items])]).reshape(items,28,28) n_features = 784 n_input = n_features Y = imgs.reshape(items,n_features).astype(float) current_input = imgs.reshape(items,n_features).astype(float) Ws = [] Bs = [] dimensions = [512,256,128,64] for layer_i,n_ouputs in enumerate(dimensions): with tf.variable_scope("encoder/variable/%s" % layer_i): W = tf.get_variable(name="weight%s" % layer_i, dtype=tf.float64, initializer=tf.contrib.layers.xavier_initializer(), shape=[n_input, n_ouputs]) #B = tf.get_variable(name='bias%s' % layer_i, dtype=tf.float64, # initializer=tf.random_normal_initializer(mean=0.0, stddev=1.1), # shape=[n_ouputs]) #h = tf.nn.bias_add(value=tf.matmul(current_input, W), # bias=B) h = tf.matmul(current_input, W) current_input = h current_input = tf.nn.relu(current_input) n_input = n_ouputs Ws.append(W) #Bs.append() Ws = Ws[::-1]#reverse Bs = Bs[::-1]#reverse #dimensions = dimensions[::1][1:].append(n_features) dimensions = dimensions[::-1][1:] +[n_features] #Build DECODER for layer_i,n_ouputs in enumerate(dimensions): with tf.variable_scope("encoder/variable/%s" % layer_i): ##128x64 -> 64x128 h = value=tf.matmul(current_input,tf.transpose(Ws[layer_i])) if layer_i + 1 < len(Bs): h = tf.nn.bias_add(h,bias=Bs[layer_i + 1]) current_input = h current_input = tf.nn.relu(current_input) n_input = n_ouputs loss_func = tf.reduce_mean(tf.squared_difference(current_input, Y), 1) optimizer = tf.train.AdamOptimizer(learning_rate=0.00001) train = optimizer.minimize(loss_func) counter = 0 with tf.Session() as sess: sess.run(tf.initialize_all_variables()) for i in range(50000): sess.run(train) if i % 15 == 0: Image.fromarray(montage(sess.run(current_input).reshape(items,28,28)).astype(np.uint8)) \ .save(output % ("0"*(5 - len(str(counter))) + str(counter))) print(sess.run(tf.reduce_mean(loss_func))) counter += 1
mit
2,853,203,673,216,453,000
28
101
0.589272
false
3.107143
false
false
false
icydoge/AdventOfCodeSolutions2
day3.py
1
1161
################################### # Many lines # # Such O(n) # # Very Doge # ################################### # By icydoge <[email protected]> # ################################### with open("inputs/day3-1.txt") as f: content = f.readlines() # Part 1 lines = list(map(str.strip, content)) triangles = [] for line in lines: split = line.split(' ') triangles.append([int(i) for i in split if i != '']) valid = 0 for triangle in triangles: if (triangle[0] + triangle[1]) > triangle[2] and (triangle[1] + triangle[2]) > triangle[0] and (triangle[0] + triangle[2]) > triangle[1]: valid += 1 print("Final answer for Part 1: %d" % (valid)) # Part 2 triangles2 = [] for i in range(0, len(triangles) - 2, 3): for j in range(0, 3): triangles2.append([triangles[i][j], triangles[i + 1][j], triangles[i + 2][j]]) valid = 0 for triangle in triangles2: if (triangle[0] + triangle[1]) > triangle[2] and (triangle[1] + triangle[2]) > triangle[0] and (triangle[0] + triangle[2]) > triangle[1]: valid += 1 print("Final answer for Part 2: %d" % (valid))
mit
-4,814,290,426,159,412,000
30.378378
141
0.51938
false
3.146341
false
false
false
shapiromatron/bmds-server
bmds_server/analysis/transforms.py
1
3196
from enum import Enum from typing import Dict, List, Union import bmds from bmds.bmds3.sessions import get_model from bmds.bmds3.types.continuous import ContinuousModelSettings from bmds.bmds3.types.dichotomous import DichotomousModelSettings from bmds.bmds3.types.priors import PriorClass, get_continuous_prior, get_dichotomous_prior from bmds.constants import Dtype from .validators.datasets import AdverseDirection class PriorEnum(str, Enum): frequentist_restricted = "frequentist_restricted" frequentist_unrestricted = "frequentist_unrestricted" bayesian = "bayesian" # TODO - remove these maps; use contants from bmds bmd3_prior_map = { PriorEnum.frequentist_restricted: PriorClass.frequentist_restricted, PriorEnum.frequentist_unrestricted: PriorClass.frequentist_unrestricted, PriorEnum.bayesian: PriorClass.bayesian, } is_increasing_map = { AdverseDirection.AUTOMATIC: None, AdverseDirection.UP: True, AdverseDirection.DOWN: False, } def build_model_settings( bmds_version: str, dataset_type: str, model_name: str, prior_class: str, options: Dict, dataset_options: Dict, ) -> Union[DichotomousModelSettings, ContinuousModelSettings]: model = get_model(bmds_version, dataset_type, model_name) prior_class = bmd3_prior_map[prior_class] if dataset_type in bmds.constants.DICHOTOMOUS_DTYPES: return DichotomousModelSettings( bmr=options["bmr_value"], alpha=1.0 - options["confidence_level"], bmr_type=options["bmr_type"], degree=dataset_options["degree"], priors=get_dichotomous_prior(model.bmd_model_class, prior_class), ) elif dataset_type in bmds.constants.CONTINUOUS_DTYPES: return ContinuousModelSettings( bmr=options["bmr_value"], alpha=1.0 - options["confidence_level"], tailProb=options["tail_probability"], bmr_type=options["bmr_type"], disttype=options["dist_type"], degree=dataset_options["degree"], is_increasing=is_increasing_map[dataset_options["adverse_direction"]], priors=get_continuous_prior(model.bmd_model_class, prior_class), ) else: raise ValueError(f"Unknown dataset_type: {dataset_type}") def build_dataset(dataset_type: str, dataset: Dict[str, List[float]]) -> bmds.datasets.DatasetType: if dataset_type == Dtype.CONTINUOUS: schema = bmds.datasets.ContinuousDatasetSchema elif dataset_type == Dtype.CONTINUOUS_INDIVIDUAL: schema = bmds.datasets.ContinuousIndividualDatasetSchema elif dataset_type == Dtype.DICHOTOMOUS: schema = bmds.datasets.DichotomousDatasetSchema else: raise ValueError(f"Unknown dataset type: {dataset_type}") return schema.parse_obj(dataset).deserialize() def remap_exponential(models: List[str]) -> List[str]: # recursively expand user-specified "exponential" model into M3 and M5 if bmds.constants.M_Exponential in models: pos = models.index(bmds.constants.M_Exponential) models[pos : pos + 1] = (bmds.constants.M_ExponentialM3, bmds.constants.M_ExponentialM5) return models
mit
4,329,694,989,769,762,300
37.506024
99
0.707447
false
3.481481
false
false
false
V155/qutebrowser
qutebrowser/components/zoomcommands.py
1
3262
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2018 Florian Bruhin (The Compiler) <[email protected]> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Zooming-related commands.""" from qutebrowser.api import cmdutils, apitypes, message, config @cmdutils.register() @cmdutils.argument('tab', value=cmdutils.Value.cur_tab) @cmdutils.argument('count', value=cmdutils.Value.count) def zoom_in(tab: apitypes.Tab, count: int = 1, quiet: bool = False) -> None: """Increase the zoom level for the current tab. Args: count: How many steps to zoom in. quiet: Don't show a zoom level message. """ try: perc = tab.zoom.apply_offset(count) except ValueError as e: raise cmdutils.CommandError(e) if not quiet: message.info("Zoom level: {}%".format(int(perc)), replace=True) @cmdutils.register() @cmdutils.argument('tab', value=cmdutils.Value.cur_tab) @cmdutils.argument('count', value=cmdutils.Value.count) def zoom_out(tab: apitypes.Tab, count: int = 1, quiet: bool = False) -> None: """Decrease the zoom level for the current tab. Args: count: How many steps to zoom out. quiet: Don't show a zoom level message. """ try: perc = tab.zoom.apply_offset(-count) except ValueError as e: raise cmdutils.CommandError(e) if not quiet: message.info("Zoom level: {}%".format(int(perc)), replace=True) @cmdutils.register() @cmdutils.argument('tab', value=cmdutils.Value.cur_tab) @cmdutils.argument('count', value=cmdutils.Value.count) def zoom(tab: apitypes.Tab, level: str = None, count: int = None, quiet: bool = False) -> None: """Set the zoom level for the current tab. The zoom can be given as argument or as [count]. If neither is given, the zoom is set to the default zoom. If both are given, use [count]. Args: level: The zoom percentage to set. count: The zoom percentage to set. quiet: Don't show a zoom level message. """ if count is not None: int_level = count elif level is not None: try: int_level = int(level.rstrip('%')) except ValueError: raise cmdutils.CommandError("zoom: Invalid int value {}" .format(level)) else: int_level = int(config.val.zoom.default) try: tab.zoom.set_factor(int_level / 100) except ValueError: raise cmdutils.CommandError("Can't zoom {}%!".format(int_level)) if not quiet: message.info("Zoom level: {}%".format(int_level), replace=True)
gpl-3.0
-1,385,101,638,705,507,600
33.336842
77
0.658492
false
3.690045
false
false
false
bocajspear1/vulnfeed
vulnfeed/sender.py
1
6613
# This is the part of the code that sends the emails import os import threading from datetime import datetime, timedelta, date import calendar import re import time from util.email_sender import send_email from database.user import get_users, User from database.feed import get_feed_reports from database.rules import fill_rules from scorer.parser import VulnFeedRuleParser from config import Config CONFIG = Config() # Sender master breaks users off into groups of 50 to be proccessed on different threads class SenderMaster(): def __init__(self): self.threads = [] def start_senders(self): offset = 0 length = 50 user_chunk = get_users(offset, length) while len(user_chunk) > 0: worker_thread = SenderWorker(user_chunk) worker_thread.start() self.threads.append(worker_thread) offset += length user_chunk = get_users(offset, length) for thread in self.threads: thread.join() # Works on a chunk of users class SenderWorker(threading.Thread): def __init__(self, user_chunk): threading.Thread.__init__(self) self.user_chunk = user_chunk def check_report(self, report_map, report, rules): for rule_item in rules: parser = VulnFeedRuleParser() parser.parse_rule(rule_item['rule']) title_score, _ = parser.process_text(report['title'], report['title_freq']) print(title_score) contents_score, words = parser.process_text(report['contents'], report['contents_freq']) print(words) print("Score: ", contents_score) small_report = { "title": report['raw_title'], "contents": report['raw_contents'], "link": report['link'], "id": report['report_id'] } if not report['id'] in report_map: report_map[report['id']] = { "report": small_report, "score": 0 } base_score = contents_score + (title_score * 2) if rule_item['weight'] == 'high': base_score *= 2 elif rule_item['weight'] == 'medium': base_score += (base_score * 0.5) report_map[report['id']]['score'] += base_score if contents_score > 0: for word in words: # Check if contains HTML if "<" in report_map[report['id']]['report']['contents']: boldify = re.compile('([>][^<]+)(' + word + ')', re.IGNORECASE) report_map[report['id']]['report']['contents'] = boldify.sub(r"\1<strong>\2</strong>", report_map[report['id']]['report']['contents']) else: boldify = re.compile('(' + word + ')', re.IGNORECASE) report_map[report['id']]['report']['contents'] = boldify.sub(r"<strong>\1</strong>", report_map[report['id']]['report']['contents']) def process_user(self, user_email): # Get object u = User(user_email) if u.is_confirmed() == False: print("Ignoring " + user_email) return days_to_run = u.get_days() # Last run is day of year last_day = u.last_run # Get the current day current_time = datetime.combine(date.today(), datetime.min.time()) current_day = int(current_time.strftime("%w")) + 1 current_day_of_year = int(current_time.strftime("%j")) # Check if today is a day set by the user if current_day not in days_to_run: return # Check if same day if current_day_of_year == last_day: return day_diff = 2 if last_day > 0: # If the last day is greater than the current day # we have had a new year! if last_day > current_day_of_year: leap_day = 0 if calendar.isleap(current_time.year - 1): leap_day = 1 day_diff = (current_day_of_year + 365 + leap_day) - last_day else: day_diff = current_day_of_year - last_day # Get reports between the time requested plus some buffer time query_time = current_time - timedelta(hours=(day_diff*24)+2) reports = get_feed_reports(query_time) # Get rule data rules = u.get_rules() filled_rules = fill_rules(rules) # Score the reports report_map = {} for report in reports: self.check_report(report_map, report, filled_rules) # Sort the reports sorted_reports = sorted(report_map, key=lambda item: report_map[item]['score'], reverse=True) # Seperate reports into scored and unscored scored_reports = [] unscored_reports = [] # Clear the last report info u.last_scored_list = [] u.last_unscored_list = [] for item in sorted_reports: if report_map[item]['score'] > 0: scored_reports.append(report_map[item]['report']) u.last_scored_list.append(report_map[item]) else: unscored_reports.append(report_map[item]['report']) u.last_unscored_list.append(report_map[item]) # for item in sorted_reports: # print(report_map[item]['score']) # print(report_map[item]['report']['contents']) report_count = len(sorted_reports) # Prepare to render the email template render_map = { "vulncount": report_count, "scored_reports": scored_reports, "unscored_reports": unscored_reports } print(scored_reports) print("Sending for " + user_email) response = send_email("reports_email.html", "VulnFeed Report for " + time.strftime("%m/%d/%Y"), render_map, user_email) # Update the users last sent day u.last_run = current_day_of_year u.last_status = "Status: " + str(response.status_code) + ", " + response.status_text.decode("utf-8") u.last_status = "Okay" u.update() # Process each user def run(self): for user_email in self.user_chunk: self.process_user(user_email) sm = SenderMaster() sm.start_senders()
gpl-3.0
5,944,331,318,391,569,000
32.573604
127
0.536217
false
4.057055
false
false
false
philippbosch/django-tellafriend
docs/source/conf.py
1
7092
# -*- coding: utf-8 -*- # # django-tellafriend documentation build configuration file, created by # sphinx-quickstart on Fri Aug 6 20:14:06 2010. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.ifconfig'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'django-tellafriend' copyright = u'2010, Philipp Bosch' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.0.1' # The full version, including alpha/beta/rc tags. release = '0.0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'django-tellafrienddoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'django-tellafriend.tex', u'django-tellafriend Documentation', u'Philipp Bosch', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'django-tellafriend', u'django-tellafriend Documentation', [u'Philipp Bosch'], 1) ]
mit
5,453,983,071,573,978,000
31.833333
80
0.710378
false
3.728707
true
false
false
AndyDiamondstein/vitess
py/vttest/run_local_database.py
1
5488
#!/usr/bin/env python """Command-line tool for starting a local Vitess database for testing. USAGE: $ run_local_database --port 12345 \ --topology test_keyspace/-80:test_keyspace_0,test_keyspace/80-:test_keyspace_1 \ --schema_dir /path/to/schema/dir It will run the tool, logging to stderr. On stdout, a small json structure can be waited on and then parsed by the caller to figure out how to reach the vtgate process. Once done with the test, send an empty line to this process for it to clean-up, and then just wait for it to exit. """ import json import logging import optparse import os import re import sys from vttest import environment from vttest import local_database from vttest import mysql_flavor from vttest import vt_processes from vttest import init_data_options shard_exp = re.compile(r'(.+)/(.+):(.+)') def main(cmdline_options): shards = [] for shard in cmdline_options.topology.split(','): m = shard_exp.match(shard) if m: shards.append( vt_processes.ShardInfo(m.group(1), m.group(2), m.group(3))) else: sys.stderr.write('invalid --shard flag format: %s\n' % shard) sys.exit(1) environment.base_port = cmdline_options.port init_data_opts = None if cmdline_options.initialize_with_random_data: init_data_opts = init_data_options.InitDataOptions() init_data_opts.rng_seed = cmdline_options.rng_seed init_data_opts.min_table_shard_size = cmdline_options.min_table_shard_size init_data_opts.max_table_shard_size = cmdline_options.max_table_shard_size init_data_opts.null_probability = cmdline_options.null_probability with local_database.LocalDatabase( shards, cmdline_options.schema_dir, cmdline_options.vschema, cmdline_options.mysql_only, init_data_opts, web_dir=cmdline_options.web_dir) as local_db: print json.dumps(local_db.config()) sys.stdout.flush() try: raw_input() except EOFError: sys.stderr.write( 'WARNING: %s: No empty line was received on stdin.' ' Instead, stdin was closed and the cluster will be shut down now.' ' Make sure to send the empty line instead to proactively shutdown' ' the local cluster. For example, did you forget the shutdown in' ' your test\'s tearDown()?\n' % os.path.basename(__file__)) if __name__ == '__main__': parser = optparse.OptionParser() parser.add_option( '-p', '--port', type='int', help='Port to use for vtcombo. If this is 0, a random port ' 'will be chosen.') parser.add_option( '-t', '--topology', help='Define which shards exist in the test topology in the' ' form <keyspace>/<shardrange>:<dbname>,... The dbname' ' must be unique among all shards, since they share' ' a MySQL instance in the test environment.') parser.add_option( '-s', '--schema_dir', help='Directory for initial schema files. Within this dir,' ' there should be a subdir for each keyspace. Within' ' each keyspace dir, each file is executed as SQL' ' after the database is created on each shard.' ' If the directory contains a vschema.json file, it' ' will be used as the vschema for the V3 API.') parser.add_option( '-e', '--vschema', help='If this file is specified, it will be used' ' as the vschema for the V3 API.') parser.add_option( '-m', '--mysql_only', action='store_true', help='If this flag is set only mysql is initialized.' ' The rest of the vitess components are not started.' ' Also, the output specifies the mysql unix socket' ' instead of the vtgate port.') parser.add_option( '-r', '--initialize_with_random_data', action='store_true', help='If this flag is each table-shard will be initialized' ' with random data. See also the "rng_seed" and "min_shard_size"' ' and "max_shard_size" flags.') parser.add_option( '-d', '--rng_seed', type='int', default=123, help='The random number generator seed to use when initializing' ' with random data (see also --initialize_with_random_data).' ' Multiple runs with the same seed will result with the same' ' initial data.') parser.add_option( '-x', '--min_table_shard_size', type='int', default=1000, help='The minimum number of initial rows in a table shard. Ignored if' '--initialize_with_random_data is false. The actual number is chosen' ' randomly.') parser.add_option( '-y', '--max_table_shard_size', type='int', default=10000, help='The maximum number of initial rows in a table shard. Ignored if' '--initialize_with_random_data is false. The actual number is chosen' ' randomly') parser.add_option( '-n', '--null_probability', type='float', default=0.1, help='The probability to initialize a field with "NULL" ' ' if --initialize_with_random_data is true. Only applies to fields' ' that can contain NULL values.') parser.add_option( '-w', '--web_dir', help='location of the vtctld web server files.') parser.add_option( '-v', '--verbose', action='store_true', help='Display extra error messages.') (options, args) = parser.parse_args() if options.verbose: logging.getLogger().setLevel(logging.DEBUG) # This will set the flavor based on the MYSQL_FLAVOR env var, # or default to MariaDB. mysql_flavor.set_mysql_flavor(None) main(options)
bsd-3-clause
1,329,981,277,944,915,700
36.081081
84
0.662719
false
3.700607
true
false
false
flyingbanana1024102/transmission-line-simulator
src/views/contextmenu.py
1
3701
# # Transmission Line Simulator # # Author(s): Jiacong Xu # Created: Jul-10-2017 # from materialwidget import MaterialWidget from materialbutton import MaterialButton from kivy.properties import * from kivy.lang.builder import Builder from util.constants import * from kivy.animation import Animation from kivy.clock import Clock class ContextMenu(MaterialWidget): """ A contextual menu that displays text and icons. """ _container = ObjectProperty(None) def __init__(self, titles, actions, icons = None, **kwargs): """ Initializes this menu. Does not yet display it. titles: list of strings for each item in the menu. actions: list of callbacks that that takes no arguments. icons: list of unicode strings for icons of each item. Default None. Eg. [unichr(0xf26b)] """ super(ContextMenu, self).__init__(**kwargs) # Generate buttons according to title and icon for i in range(len(titles)): btn = MaterialButton() btn.changeStyle('flat') btn.title = titles[i] if icons != None: btn.icon = icons[i] else: btn.icon = '' btn.onClick.append(actions[i]) btn.onClick.append(lambda: self.dismiss(True)) btn.size_hint_y = None btn.height = 60 btn.titleLabel.color = TEXT_BLACK self._container.add_widget(btn) self._anim = None Clock.schedule_once(self._completeLayout, 0) def _completeLayout(self, dt): w = 0 for child in self._container.children: w = max(w, child.width) for child in self._container.children: child.width = w def show(self, layer, pos, animated): # Determine orientation self.orientation = 'upright' h = len(self._container.children) * 60 if pos[1] + h > layer.height: self.orientation = 'downright' layer.add_widget(self) self.pos = pos self._cachedPos = pos if animated: self.size = 0, 0 self.opacity = 0.0 self._animate(True) else: self.size = self._container.minimum_size self.opacity = 1.0 if self.orientation == 'downright': self.y = pos[1] - h def dismiss(self, animated): if not animated: self.parent.remove_widget(self) self.size = 0, 0 self.opacity = 0.0 else: self._animate(False) self._anim.on_complete = self._animComplete def _animComplete(self, x): if self.parent != None: self.parent.remove_widget(self) def _animate(self, isEntering): if self._anim != None: self._anim.cancel(self) if isEntering: if self.orientation == 'downright': h = self._cachedPos[1] - self._container.minimum_height self._anim = Animation(size = self._container.minimum_size, y = h, opacity = 1.0, d = 0.2, t = 'in_out_quad') else: self._anim = Animation(size = self._container.minimum_size, opacity = 1.0, d = 0.2, t = 'in_out_quad') self._anim.start(self) else: self._anim = Animation(size = [0, 0], pos = self._cachedPos, d = 0.2, opacity = 0.0, t = 'in_out_quad') self._anim.start(self) def on_touch_down(self, touch): if not self.collide_point(touch.pos[0], touch.pos[1]): self.dismiss(True) return super(ContextMenu, self).on_touch_down(touch)
mit
-6,667,060,846,733,342,000
27.689922
125
0.560929
false
3.883526
false
false
false