code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# Github-flavored markdown parsing
# Thanks to http://blog.freedomsponsors.org/markdown_formatting/
import itertools
import misaka
from markupsafe import Markup
from pygments import highlight
from pygments.formatters.html import HtmlFormatter
from pygments.lexers import get_lexer_by_name
from pygments.util import ClassNotFound
class HighlighterRenderer(misaka.HtmlRenderer):
def blockcode(self, text, lang):
if not lang:
get_lexer_by_name("text")
try:
lexer = get_lexer_by_name(lang, stripall=True)
except ClassNotFound:
lexer = get_lexer_by_name("text")
formatter = HtmlFormatter()
return highlight(text, lexer, formatter)
def table(self, content):
return '<table class="table">\n' + content + '\n</table>'
# And use the renderer
renderer = HighlighterRenderer(flags=misaka.HTML_ESCAPE)
md = misaka.Markdown(renderer, extensions=(
'tables', 'fenced-code', 'autolink', 'underline', 'no-intra-emphasis',
'highlight', 'disable-indented-code', 'space-headers'
))
def markdown(text):
return md(text)
def markdown_filter(s):
text_lines = str(s).split('\n')
if not text_lines:
return "" # there aren't any lines, so return nothing
# this gets the first non-empty string, so we can get indentation from it
initial_line = next(s for s in text_lines if s)
# count leading spaces on first line, so we can strip the indentation spaces from all other lines
leading_spaces = sum(1 for _ in itertools.takewhile(str.isspace, initial_line))
# strip indentation spaces from lines so they work correctly inlined in html.
# Note that we are removing an exact number of spaces so that we only strip spaces used for initial indentation in
# html, not spaces used for marking code sections for instance.
text_lines = (line[leading_spaces:] for line in text_lines)
br_processed = (line[:-len('<br/>')] + ' '
if line.endswith('<br/>')
else line for line in text_lines)
# reconstruct text
text = '\n'.join(br_processed)
return Markup(markdown(text))
def register(app):
app.add_template_filter(markdown_filter, name="markdown")
| daboross/dabo.guru | content/util/gh_markdown.py | Python | apache-2.0 | 2,238 |
"""Allow users to set and activate scenes."""
import importlib
import logging
import voluptuous as vol
from homeassistant.const import CONF_PLATFORM, SERVICE_TURN_ON
from homeassistant.core import DOMAIN as HA_DOMAIN
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
# mypy: allow-untyped-defs, no-check-untyped-defs
DOMAIN = "scene"
STATE = "scening"
STATES = "states"
def _hass_domain_validator(config):
"""Validate platform in config for homeassistant domain."""
if CONF_PLATFORM not in config:
config = {CONF_PLATFORM: HA_DOMAIN, STATES: config}
return config
def _platform_validator(config):
"""Validate it is a valid platform."""
try:
platform = importlib.import_module(
".{}".format(config[CONF_PLATFORM]), __name__
)
except ImportError:
try:
platform = importlib.import_module(
"homeassistant.components.{}.scene".format(config[CONF_PLATFORM])
)
except ImportError:
raise vol.Invalid("Invalid platform specified") from None
if not hasattr(platform, "PLATFORM_SCHEMA"):
return config
return platform.PLATFORM_SCHEMA(config)
PLATFORM_SCHEMA = vol.Schema(
vol.All(
_hass_domain_validator,
vol.Schema({vol.Required(CONF_PLATFORM): str}, extra=vol.ALLOW_EXTRA),
_platform_validator,
),
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the scenes."""
logger = logging.getLogger(__name__)
component = hass.data[DOMAIN] = EntityComponent(logger, DOMAIN, hass)
await component.async_setup(config)
# Ensure Home Assistant platform always loaded.
await component.async_setup_platform(
HA_DOMAIN, {"platform": "homeasistant", STATES: []}
)
component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_activate")
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class Scene(Entity):
"""A scene is a group of entities and the states we want them to be."""
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def state(self):
"""Return the state of the scene."""
return STATE
def activate(self):
"""Activate scene. Try to get entities into requested state."""
raise NotImplementedError()
def async_activate(self):
"""Activate scene. Try to get entities into requested state.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.activate)
| leppa/home-assistant | homeassistant/components/scene/__init__.py | Python | apache-2.0 | 2,906 |
"""
Given an array of numbers nums, in which exactly two elements appear only once and all the other elements appear exactly twice. Find the two elements that appear only once.
Example:
Input: [1,2,1,3,2,5]
Output: [3,5]
Note:
The order of the result is not important. So in the above example, [5, 3] is also correct.
Your algorithm should run in linear runtime complexity. Could you implement it using only constant space complexity?
"""
class Solution(object):
def singleNumber(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
mapping = {}
for num in nums:
if num not in mapping:
mapping[num] = True
else:
del(mapping[num])
return mapping.keys()
| danielsunzhongyuan/my_leetcode_in_python | single_number_iii_260.py | Python | apache-2.0 | 781 |
__author__ = 'saeedamen' # Saeed Amen
#
# Copyright 2016-2020 Cuemacro - https://www.cuemacro.com / @cuemacro
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and limitations under the License.
#
import numpy as np
import pandas as pd
from numba import guvectorize
from findatapy.timeseries import Calendar
from findatapy.util import LoggerManager
from finmarketpy.util.marketconstants import MarketConstants
from finmarketpy.curve.abstractpricer import AbstractPricer
from finmarketpy.curve.rates.fxforwardspricer import FXForwardsPricer
from financepy.finutils.FinDate import FinDate
from financepy.models.FinModelBlackScholes import FinModelBlackScholes
from financepy.products.fx.FinFXVanillaOption import FinFXVanillaOption
from financepy.finutils.FinGlobalTypes import FinOptionTypes
from financepy.products.fx.FinFXMktConventions import *
market_constants = MarketConstants()
class FXOptionsPricer(AbstractPricer):
"""Prices various vanilla FX options, using FinancePy underneath.
"""
def __init__(self, fx_vol_surface=None, premium_output=market_constants.fx_options_premium_output,
delta_output=market_constants.fx_options_delta_output):
self._calendar = Calendar()
self._fx_vol_surface = fx_vol_surface
self._fx_forwards_pricer = FXForwardsPricer()
self._premium_output = premium_output
self._delta_output = delta_output
def price_instrument(self, cross, horizon_date, strike, expiry_date=None, vol=None, notional=1000000,
contract_type='european-call', tenor=None,
fx_vol_surface=None, premium_output=None, delta_output=None, depo_tenor=None, use_atm_quoted=False,
return_as_df=True):
"""Prices FX options for horizon dates/expiry dates given by the user from FX spot rates, FX volatility surface
and deposit rates.
Parameters
----------
cross : str
Currency pair
horizon_date : DateTimeIndex
Horizon dates for options
strike : np.ndarray, float or str
Strike of option
eg. 'atm' - at-the-money
eg. 'atmf' - at-the-money forward
eg. 'atms' - at-the-money spot
eg. '25d-otm' - out-of-the-money 25d
eg. '10d-otm
expiry_date : DateTimeIndex (optional)
Expiry dates for options
vol : np.ndarray (optional)
Umplied vol for options
notional : float
Notional in base currency of the option
contract_type : str
What type of option are we pricing?
eg. 'european-call'
tenor : str (optional)
Tenor of option
fx_vol_surface : FXVolSurface
Interpolates FX vol surface
premium_output : str
'pct-for' (in base currency pct) or 'pct-dom' (in terms currency pct)
delta_output : bool
Also output delta of options
depo_tenor : str
Tenor of the deposit to use in the option pricing
use_atm_quoted : bool
True - takes the direct market quote
False - uses interpolated version
return_as_df : bool
True - returns output as DataFrame
False - returns output as np.ndarray
Returns
-------
DataFrame
"""
# if market_df is None: market_df = self._market_df
if fx_vol_surface is None: fx_vol_surface = self._fx_vol_surface
if premium_output is None: premium_output = self._premium_output
if delta_output is None: delta_output = self._delta_output
logger = LoggerManager().getLogger(__name__)
field = fx_vol_surface._field
# Make horizon date and expiry date pandas DatetimeIndex
if isinstance(horizon_date, pd.Timestamp):
horizon_date = pd.DatetimeIndex([horizon_date])
else:
horizon_date = pd.DatetimeIndex(horizon_date)
if expiry_date is not None:
if isinstance(expiry_date, pd.Timestamp):
expiry_date = pd.DatetimeIndex([expiry_date])
else:
expiry_date = pd.DatetimeIndex(expiry_date)
else:
expiry_date = self._calendar.get_expiry_date_from_horizon_date(horizon_date, tenor, cal=cross)
# If the strike hasn't been supplied need to work this out
if not(isinstance(strike, np.ndarray)):
old_strike = strike
if isinstance(strike, str):
strike = np.empty(len(horizon_date), dtype=object)
else:
strike = np.empty(len(horizon_date))
strike.fill(old_strike)
# If the vol hasn't been supplied need to work this out
if not(isinstance(vol, np.ndarray)):
if vol is None:
vol = np.nan
old_vol = vol
vol = np.empty(len(horizon_date))
vol.fill(old_vol)
option_values = np.zeros(len(horizon_date))
spot = np.zeros(len(horizon_date))
delta = np.zeros(len(horizon_date))
intrinsic_values = np.zeros(len(horizon_date))
def _price_option(contract_type_, contract_type_fin_):
for i in range(len(expiry_date)):
built_vol_surface = False
# If we have a "key strike" need to fit the vol surface
if isinstance(strike[i], str):
if not(built_vol_surface):
fx_vol_surface.build_vol_surface(horizon_date[i])
fx_vol_surface.extract_vol_surface(num_strike_intervals=None)
built_vol_surface = True
# Delta neutral strike/or whatever strike is quoted as ATM
# usually this is ATM delta neutral strike, but can sometimes be ATMF for some Latam
# Take the vol directly quoted, rather than getting it from building vol surface
if strike[i] == 'atm':
strike[i] = fx_vol_surface.get_atm_strike(tenor)
if use_atm_quoted:
vol[i] = fx_vol_surface.get_atm_quoted_vol(tenor) / 100.0
else:
vol[i] = fx_vol_surface.get_atm_vol(tenor) / 100.0 # interpolated
elif strike[i] == 'atms':
strike[i] = fx_vol_surface.get_spot() # Interpolate vol later
elif strike[i] == 'atmf':
# Quoted tenor, no need to interpolate
strike[i] = float(fx_vol_surface.get_all_market_data()[cross + ".close"][horizon_date[i]]) \
+ (float(fx_vol_surface.get_all_market_data()[cross + tenor + ".close"][horizon_date[i]]) \
/ self._fx_forwards_pricer.get_forwards_divisor(cross[3:6]))
# Interpolate vol later
# TODO: work on 25d and 10d strikes
elif strike[i] == '25d-otm':
if 'call' in contract_type_:
strike[i] = fx_vol_surface.get_25d_call_strike(tenor)
vol[i] = fx_vol_surface.get_25d_call_vol(tenor) / 100.0
elif 'put' in contract_type_:
strike[i] = fx_vol_surface.get_25d_put_strike(tenor)
vol[i] = fx_vol_surface.get_25d_put_vol(tenor) / 100.0
elif strike[i] == '10d-otm':
if 'call' in contract_type_:
strike[i] = fx_vol_surface.get_10d_call_strike(tenor)
vol[i] = fx_vol_surface.get_10d_call_vol(tenor) / 100.0
elif 'put' in contract_type_:
strike[i] = fx_vol_surface.get_10d_put_strike(tenor)
vol[i] = fx_vol_surface.get_10d_put_vol(tenor) / 100.0
if not(built_vol_surface):
try:
fx_vol_surface.build_vol_surface(horizon_date[i])
except:
logger.warn("Failed to build vol surface for " + str(horizon_date) + ", won't be able to interpolate vol")
# fx_vol_surface.extract_vol_surface(num_strike_intervals=None)
# If an implied vol hasn't been provided, interpolate that one, fit the vol surface (if hasn't already been
# done)
if np.isnan(vol[i]):
if tenor is None:
vol[i] = fx_vol_surface.calculate_vol_for_strike_expiry(strike[i], expiry_date=expiry_date[i], tenor=None)
else:
vol[i] = fx_vol_surface.calculate_vol_for_strike_expiry(strike[i], expiry_date=None, tenor=tenor)
model = FinModelBlackScholes(float(vol[i]))
logger.info("Pricing " + contract_type_ + " option, horizon date = " + str(horizon_date[i]) + ", expiry date = "
+ str(expiry_date[i]))
option = FinFXVanillaOption(self._findate(expiry_date[i]), strike[i],
cross, contract_type_fin_, notional, cross[0:3])
spot[i] = fx_vol_surface.get_spot()
""" FinancePy will return the value in the following dictionary for values
{'v': vdf,
"cash_dom": cash_dom,
"cash_for": cash_for,
"pips_dom": pips_dom,
"pips_for": pips_for,
"pct_dom": pct_dom,
"pct_for": pct_for,
"not_dom": notional_dom,
"not_for": notional_for,
"ccy_dom": self._domName,
"ccy_for": self._forName}
"""
option_values[i] = option_values[i] + option.value(self._findate(horizon_date[i]),
spot[i], fx_vol_surface.get_dom_discount_curve(),
fx_vol_surface.get_for_discount_curve(),
model)[premium_output.replace('-', '_')]
intrinsic_values[i] = intrinsic_values[i] + option.value(self._findate(expiry_date[i]),
spot[i], fx_vol_surface.get_dom_discount_curve(),
fx_vol_surface.get_for_discount_curve(),
model)[premium_output.replace('-', '_')]
"""FinancePy returns this dictionary for deltas
{"pips_spot_delta": pips_spot_delta,
"pips_fwd_delta": pips_fwd_delta,
"pct_spot_delta_prem_adj": pct_spot_delta_prem_adj,
"pct_fwd_delta_prem_adj": pct_fwd_delta_prem_adj}
"""
delta[i] = delta[i] + option.delta(self._findate(horizon_date[i]),
spot[i], fx_vol_surface.get_dom_discount_curve(),
fx_vol_surface.get_for_discount_curve(), model)[delta_output.replace('-', '_')]
if contract_type == 'european-call':
contract_type_fin = FinOptionTypes.EUROPEAN_CALL
_price_option(contract_type, contract_type_fin)
elif contract_type == 'european-put':
contract_type_fin = FinOptionTypes.EUROPEAN_PUT
_price_option(contract_type, contract_type_fin)
elif contract_type == 'european-straddle' or contract_type == 'european-strangle':
contract_type = 'european-call'
contract_type_fin = FinOptionTypes.EUROPEAN_CALL
_price_option(contract_type, contract_type_fin)
contract_type = 'european-put'
contract_type_fin = FinOptionTypes.EUROPEAN_PUT
_price_option(contract_type, contract_type_fin)
if return_as_df:
option_prices_df = pd.DataFrame(index=horizon_date)
option_prices_df[cross + '-option-price.' + field] = option_values
option_prices_df[cross + '.' + field] = spot
option_prices_df[cross + '-strike.' + field] = strike
option_prices_df[cross + '-vol.' + field] = vol
option_prices_df[cross + '-delta.' + field] = delta
option_prices_df[cross + '.expiry-date'] = expiry_date
option_prices_df[cross + '-intrinsic-value.' + field] = intrinsic_values
return option_prices_df
return option_values, spot, strike, vol, delta, expiry_date, intrinsic_values
def get_day_count_conv(self, currency):
if currency in market_constants.currencies_with_365_basis:
return 365.0
return 360.0
def _findate(self, timestamp):
return FinDate(timestamp.day, timestamp.month, timestamp.year,
hh=timestamp.hour, mm=timestamp.minute, ss=timestamp.second) | cuemacro/finmarketpy | finmarketpy/curve/volatility/fxoptionspricer.py | Python | apache-2.0 | 13,613 |
#!/usr/bin/env python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the GCI Task Model.
"""
__authors__ = [
'"Madhusudan.C.S" <[email protected]>',
'"Daniel Hans" <[email protected]>',
'"Lennard de Rijk" <[email protected]>',
]
from google.appengine.ext import db
from django.utils.translation import ugettext
from taggable.taggable import Tag
from taggable.taggable import Taggable
from taggable.taggable import tag_property
import soc.models.linkable
import soc.models.role
import soc.models.student
import soc.models.user
import soc.modules.gci.models.program
class TaskTag(Tag):
"""Model for storing all Task tags.
"""
order = db.IntegerProperty(required=True, default=0)
@classmethod
def get_by_scope(cls, scope):
"""Get the list of tag objects that has the given scope and sorts the
result by order values.
"""
tags = db.Query(cls).filter('scope =', scope).order('order').fetch(1000)
return tags
@classmethod
def get_highest_order(cls, scope):
"""Get a tag with highest order.
"""
tag = db.Query(cls).filter('scope =', scope).order('-order').get()
if tag:
return tag.order
else:
return -1
@classmethod
def update_order(cls, scope, tag_name, order):
"""Updates the order of the tag.
"""
tag = cls.get_by_scope_and_name(scope, tag_name)
if tag:
tag.order = order
tag.put()
return tag
@classmethod
def get_or_create(cls, scope, tag_name, order=0):
"""Get the Tag object that has the tag value given by tag_value.
"""
tag_key_name = cls._key_name(scope.key().name(), tag_name)
existing_tag = cls.get_by_key_name(tag_key_name)
if existing_tag is None:
# the tag does not yet exist, so create it.
if not order:
order = cls.get_highest_order(scope=scope) + 1
def create_tag_txn():
new_tag = cls(key_name=tag_key_name, tag=tag_name,
scope=scope, order=order)
new_tag.put()
return new_tag
existing_tag = db.run_in_transaction(create_tag_txn)
return existing_tag
class TaskTypeTag(TaskTag):
"""Model for storing of task type tags.
"""
pass
class TaskDifficultyTag(TaskTag):
"""Model for storing of task difficulty level tags.
"""
value = db.IntegerProperty(default=0, verbose_name=ugettext('value'))
class TaskArbitraryTag(TaskTag):
"""Model for storing of arbitrary tags.
"""
def __init__(self, *args, **kwds):
"""Initialization function.
"""
TaskTag.__init__(self, *args, **kwds)
self.auto_delete = True
class GCITask(Taggable, soc.models.linkable.Linkable):
"""Model for a task used in GCI workflow.
The scope property of Linkable will be set to the Organization to which
this task belongs to. A link_id will be generated automatically and will
have no specific meaning other than identification.
"""
#: Required field indicating the "title" of the task
title = db.StringProperty(required=True,
verbose_name=ugettext('Title'))
title.help_text = ugettext('Title of the task')
#: Required field containing the description of the task
description = db.TextProperty(required=True,
verbose_name=ugettext('Description'))
description.help_text = ugettext('Complete description of the task')
#: Field indicating the difficulty level of the Task. This is not
#: mandatory so the it can be assigned at any later stage.
#: The options are configured by a Program Admin.
difficulty = tag_property('difficulty')
#: Required field which contains the type of the task. These types are
#: configured by a Program Admin.
task_type = tag_property('task_type')
#: Field which contains the arbitrary tags for the task. These tags can
#: be assigned by Org Admins and mentors.
arbit_tag = tag_property('arbit_tag')
#: A field which contains time allowed for completing the task (in hours)
#: from the moment that this task has been assigned to a Student
time_to_complete = db.IntegerProperty(required=True,
verbose_name=('Time to Complete'))
time_to_complete.help_text = ugettext(
'Time allowed to complete the task, in hours, once it is claimed')
#: List of Mentors assigned to this task. A Mentor who creates this
#: task is assigned as the Mentor by default. An Org Admin will have
#: to assign a Mentor upon task creation.
mentors = db.ListProperty(item_type=db.Key, default=[])
#: User profile by whom this task has been claimed by. This field
#: is mandatory for claimed tasks
user = db.ReferenceProperty(reference_class=soc.models.user.User,
required=False,
collection_name='assigned_tasks')
#: Student profile to whom this task is currently assigned to. If the user
#: has registered as a Student than this field will be filled in. This field
#: is mandatory for all Tasks in the closed state.
student = db.ReferenceProperty(reference_class=soc.models.student.Student,
required=False,
collection_name='assigned_tasks')
#: Program in which this Task has been created
program = db.ReferenceProperty(
reference_class=soc.modules.gci.models.program.GCIProgram,
required=True, collection_name='tasks')
#: Required property which holds the state, the Task is currently in.
#: This is a hidden field not shown on forms. Handled by logic internally.
#: The state can be one of the following:
#: Unapproved: If Task is created by a Mentor, this is the automatically
#: assigned state.
#: Unpublished: This Task is not published yet.
#: Open: This Task is open and ready to be claimed.
#: Reopened: This Task has been claimed but never finished and has been
#: reopened.
#: ClaimRequested: A Student has requested to claim this task.
#: Claimed: This Task has been claimed and someone is working on it.
#: ActionNeeded: Work on this Task must be submitted for review within
#: 24 hours.
#: Closed: Work on this Task has been completed to the org's content.
#: AwaitingRegistration: Student has completed work on this task, but
#: needs to complete Student registration before this task is closed.
#: NeedsWork: This work on this Tasks needs a bit more brushing up. This
#: state is followed by a Mentor review.
#: NeedsReview: Student has submitted work for this task and it should
#: be reviewed by a Mentor.
#: Invalid: The Task is deleted either by an Org Admin/Mentor
status = db.StringProperty(
required=True, verbose_name=ugettext('Status'),
choices=['Unapproved', 'Unpublished', 'Open', 'Reopened',
'ClaimRequested', 'Claimed', 'ActionNeeded',
'Closed', 'AwaitingRegistration', 'NeedsWork',
'NeedsReview', 'Invalid'],
default='Unapproved')
#: Indicates when the Task was closed. Its value is None before it is
#: completed.
closed_on = db.DateTimeProperty(required=False,
verbose_name=ugettext('Closed on'))
#: This field is set to the next deadline that will have consequences for
#: this Task. For instance this will store a DateTime property which will
#: tell when this Task should be completed.
deadline = db.DateTimeProperty(required=False,
verbose_name=ugettext('Deadline'))
#: Required field containing the Mentor/Org Admin who created this task.
#: If site developer has created the task, it is empty.
created_by = db.ReferenceProperty(reference_class=soc.models.role.Role,
required=False,
collection_name='created_tasks',
verbose_name=ugettext('Created by'))
#: Date when the proposal was created
created_on = db.DateTimeProperty(required=True, auto_now_add=True,
verbose_name=ugettext('Created on'))
#: Required field containing the Mentor/Org Admin who last edited this
#: task. It changes only when Mentor/Org Admin changes title, description,
#: difficulty, task_type, time_to_complete. If site developer has modified
#: the task, it is empty.
modified_by = db.ReferenceProperty(reference_class=soc.models.role.Role,
required=False,
collection_name='edited_tasks',
verbose_name=ugettext('Modified by'))
#: Date when the proposal was last modified, should be set manually on edit
modified_on = db.DateTimeProperty(required=True, auto_now_add=True,
verbose_name=ugettext('Modified on'))
#: A field which holds the entire history of this task in JSON. The
#: structure of this JSON string is as follows:
#: {
#: timestamp1: {
#: "user": User reference
#: "student": Student reference
#: ...
#: "state": "Unapproved"
#: ...
#: "edited_by": Role reference
#:
#: }
#: timestamp2: {
#: "state": "Unpublished"
#: }
#: }
#: First dictionary item holds the values for all the properties in this
#: model. The subsequent items hold the properties that changed at the
#: timestamp given by the key.
#: Reference properties will be stored by calling str() on their Key.
history = db.TextProperty(required=False, default='')
def __init__(self, parent=None, key_name=None,
app=None, **entity_values):
"""Constructor for GCITask Model.
Args:
See Google App Engine APIs.
"""
# explicitly call the AppEngine datastore Model constructor
# pylint: disable=W0233
db.Model.__init__(self, parent, key_name, app, **entity_values)
# call the Taggable constructor to initialize the tags specified as
# keyword arguments
Taggable.__init__(self, task_type=TaskTypeTag,
difficulty=TaskDifficultyTag,
arbit_tag=TaskArbitraryTag)
def taskDifficulty(self, all_difficulties=None):
if all_difficulties:
key = self.key()
difficulties = [i for i in all_difficulties if key in i.tagged]
else:
difficulties = self.difficulty
if len(difficulties) == 1:
return difficulties[0]
self.difficulty = {'tags': ['Unknown'], 'scope': self.program}
return self.difficulty[0]
def taskType(self, all_types=None, ret_list=False):
if all_types:
key = self.key()
types = [i for i in all_types if key in i.tagged]
else:
types = self.task_type
return self.tags_string(types, ret_list=ret_list)
def taskArbitTag(self, ret_list=False):
return self.tags_string(self.arbit_tag, ret_list=ret_list)
def taskDifficultyValue(self, all_difficulties=None):
difficulty = self.taskDifficulty(all_difficulties)
return "%s (%s)" % (difficulty.value, difficulty.tag)
def taskTimeToComplete(self):
days = self.time_to_complete / 24
hours = self.time_to_complete % 24
result = []
if days == 1:
result.append("1 day")
if days > 1:
result.append("%d days" % days)
if days and hours:
result.append(" and ")
if hours == 1:
result.append("1 hour")
if hours > 1:
result.append("%d hours" % hours)
return "".join(result)
| SRabbelier/Melange | app/soc/modules/gci/models/task.py | Python | apache-2.0 | 12,127 |
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import time
from pants.util.dirutil import touch
from pants_test.tasks.task_test_base import TaskTestBase
from pants.contrib.go.targets.go_library import GoLibrary
from pants.contrib.go.tasks.go_compile import GoCompile
class GoCompileTest(TaskTestBase):
@classmethod
def task_type(cls):
return GoCompile
def setUp(self):
super(GoCompileTest, self).setUp()
self.go_compile = self.create_task(self.context())
def _create_binary(self, target):
p = os.path.join(self.go_compile.get_gopath(target), 'pkg', target.address.spec)
touch(p)
return p
def _create_lib_binary_map(self, *args):
m = {}
for target in args:
m[target] = self._create_binary(target)
return m
def test_sync_binary_dep_links_basic(self):
b = self.make_target(spec='libB', target_type=GoLibrary)
a = self.make_target(spec='libA', target_type=GoLibrary, dependencies=[b])
lib_binary_map = self._create_lib_binary_map(a, b)
a_gopath = self.go_compile.get_gopath(a)
self.go_compile._sync_binary_dep_links(a, a_gopath, lib_binary_map)
b_link = os.path.join(a_gopath, 'pkg', b.address.spec)
self.assertTrue(os.path.islink(b_link))
self.assertEqual(os.readlink(b_link), lib_binary_map[b])
def test_sync_binary_dep_links_removes_unused_links(self):
b = self.make_target(spec='libB', target_type=GoLibrary)
a = self.make_target(spec='libA', target_type=GoLibrary, dependencies=[b])
lib_binary_map = self._create_lib_binary_map(a, b)
a_gopath = self.go_compile.get_gopath(a)
self.go_compile._sync_binary_dep_links(a, a_gopath, lib_binary_map)
b_link = os.path.join(a_gopath, 'pkg', b.address.spec)
# Remove b as dependency of a and assert a's pkg/ dir no longer contains link to b.
self.reset_build_graph()
b = self.make_target(spec='libB', target_type=GoLibrary)
a = self.make_target(spec='libA', target_type=GoLibrary)
self.go_compile._sync_binary_dep_links(a, a_gopath, lib_binary_map)
self.assertFalse(os.path.islink(b_link))
def test_sync_binary_dep_links_refreshes_links(self):
c = self.make_target(spec='libC', target_type=GoLibrary)
b = self.make_target(spec='libB', target_type=GoLibrary)
a = self.make_target(spec='libA', target_type=GoLibrary, dependencies=[b, c])
lib_binary_map = self._create_lib_binary_map(a, b, c)
a_gopath = self.go_compile.get_gopath(a)
self.go_compile._sync_binary_dep_links(a, a_gopath, lib_binary_map)
# Ensure future links are older than original links by at least 1.5 seconds.
time.sleep(1.5)
# "Modify" b's binary.
touch(lib_binary_map[b])
self.go_compile._sync_binary_dep_links(a, a_gopath, lib_binary_map)
mtime = lambda t: os.lstat(os.path.join(os.path.join(a_gopath, 'pkg', t.address.spec))).st_mtime
# Make sure c's link was untouched, while b's link was refreshed.
self.assertLessEqual(mtime(c), mtime(b) - 1)
def test_split_build_flags_simple(self):
actual = GoCompile._split_build_flags("-v -race")
expected = ['-v', '-race']
self.assertEqual(actual, expected)
def test_split_build_flags_single_quoted(self):
actual = GoCompile._split_build_flags("-v -race -tags 'tag list'")
expected = ['-v', '-race', '-tags', "tag list"]
self.assertEqual(actual, expected)
def test_split_build_flags_nested_quotes(self):
actual = GoCompile._split_build_flags("--ldflags \'-extldflags \"-static\"\'")
expected = ['--ldflags', '-extldflags "-static"']
self.assertEqual(actual, expected)
def test_split_build_flags_ldflags(self):
actual = GoCompile._split_build_flags(' '.join([
'compile',
'contrib/go/examples/src/go/server',
'--compile-go-build-flags="--ldflags \'-extldflags \"-static\"\'"'
]))
expected = [
'compile',
'contrib/go/examples/src/go/server',
"--compile-go-build-flags=--ldflags '-extldflags -static'",
]
self.assertEqual(actual, expected)
| UnrememberMe/pants | contrib/go/tests/python/pants_test/contrib/go/tasks/test_go_compile.py | Python | apache-2.0 | 4,270 |
# licensed under the Apache License 2.0
# github.com/masoncodes
# masoncodes.me
import os
import shutil
print("This installer is PROOF OF CONCEPT AND IS NOT REQUIRED TO BE RUN.")
print("Before you begin, please ensure you have downloaded the latest version of masonCLI.")
print("Where should masonCLI be installed? (please specify a folder for masonCLI as well.)")
direc = input(">> ")
if not os.path.exists(direc):
os.makedirs(direc)
shutil.move("masonCLI.py", direc)
shutil.move("apps.py", direc)
shutil.move("core.py", direc)
print("masonCLI has been successfully installed to:", direc+"!")
else:
print("You already installed masonCLI!")
exit(0)
| masoncodes/masonCLI | installer.py | Python | apache-2.0 | 699 |
import json
import gzip as gz
class TextCleaner(object):
def __init__(self):
characters_to_replace = ',.&!+:;?"#()\'*+,./<=>@[\\]^_`{|}~\n'
self.remove_white_map = dict((ord(char), u' ') for char in characters_to_replace)
def clean_text(self, text):
"""
Replaces some characters with white space
:param text: String
:return: Text with chars_to_replace replaced with white space
"""
return text.translate(self.remove_white_map)
def load_files(file_names=None, max_lines=None):
"""
:param file_names: List of files paths to load
:param max_lines: Max number of lines to return
:return text: List of text, one string per ad
:return labels: List of labels, one per ad
:return indices: List of indices, one per ad
:return ad_id: List of ad ids, one per ad
:return phone: List of tuples, each tuple contains strings of each phone number in ad
"""
if file_names is None:
file_names = ['data/ht_training_UPDATED.gz', 'data/ht_training_2.gz']
text, labels, ad_id, phone = zip(*(d for d in _extract_data(file_names, max_lines=max_lines)))
return text, labels, ad_id, phone
def _extract_data(filenames, max_lines=None):
"""
Extracts ad text, id, and label (0 or 1)s
:param filenames: gz files containing json objects
"""
count = 0
for file_name in filenames:
if count == max_lines:
break
with gz.open(file_name, 'r') as f:
for line in f:
d = json.loads(line)
try:
if 'extracted_text' in d['ad']:
text = d['ad']['extracted_text']
else:
text = d['ad']['extractions']['text']['results'][0]
if 'class' in d:
if d['class'] == 'positive':
yield text.encode('utf8'), 1, d['ad']['_id'], tuple(d['phone'])
else:
yield text.encode('utf8'), 0, d['ad']['_id'], tuple(d['phone'])
else:
yield text.encode('utf8'), None, d['ad']['_id'], tuple(d['phone'])
count += 1
except:
print d
if count == max_lines:
break
| benbo/QPR_CP1 | load_data.py | Python | apache-2.0 | 2,364 |
# Copyright 2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file implements some wrappers to linear algebra functions.
"""
from ctypes import c_int
import numpy
from numpy.ctypeslib import ndpointer
from numpy import ndarray as Nparray
from fqe.lib import lib_fqe, c_double_complex
def _zimatadd(outp: 'Nparray', inp: 'Nparray', alpha: complex):
"""Wrapper to C function `zimatadd`.
Performs `outp += alpha * inp.T`
Args:
outp (Nparray): output array
inp (Nparray): input array
alpha (complex): scalar coefficient to be multiplied to input
"""
func = lib_fqe.zimatadd
if outp.ndim != 2:
raise ValueError(f'outp of shape {outp.shape} not two-dimensional')
dim1, dim2 = outp.shape
func.argtypes = [
c_int,
c_int,
ndpointer(shape=(dim1, dim2),
dtype=numpy.complex128,
flags=('C_CONTIGUOUS', 'ALIGNED')),
ndpointer(shape=(dim2, dim1),
dtype=numpy.complex128,
flags=('C_CONTIGUOUS', 'ALIGNED')),
c_double_complex,
]
alpha_c = c_double_complex(alpha.real, alpha.imag)
func(outp.shape[0], outp.shape[1], outp, inp, alpha_c)
def _transpose(outp: 'Nparray', inp: 'Nparray'):
"""Wrapper to C function `transpose`.
Performs `outp = inp.T`
Args:
outp (Nparray): output array
inp (Nparray): input array
"""
func = lib_fqe.transpose
if outp.ndim != 2:
raise ValueError(f'outp of shape {outp.shape} not two-dimensional')
dim1, dim2 = outp.shape
func.argtypes = [
c_int, c_int,
ndpointer(shape=(dim1, dim2),
dtype=numpy.complex128,
flags=('C_CONTIGUOUS', 'ALIGNED')),
ndpointer(shape=(dim2, dim1),
dtype=numpy.complex128,
flags=('C_CONTIGUOUS', 'ALIGNED'))
]
func(outp.shape[0], outp.shape[1], outp, inp)
| quantumlib/OpenFermion-FQE | src/fqe/lib/linalg.py | Python | apache-2.0 | 2,498 |
#!/usr/bin/python
#
# Copyright 2008 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: [email protected] (Mingyu Wu)
"""A basic shell runner class which implements FrameworkAdaptor.
This module contains only one class BaseRunner which implemented
PyreRingFrameworkAdaptor.
"""
__author__ = '[email protected] (Mingyu Wu)'
import logging
import os
import re
import sys
import time
import traceback
from lib import common_util
from lib import constants
from lib import emailmessage
from lib import filesystemhandlerextend
from lib import pyreringconfig
from lib import pyreringutil
from lib import reporter_txt
from lib import scanscripts
global_settings = pyreringconfig.GlobalPyreRingConfig.settings
logger = logging.getLogger('PyreRing')
DEBUG = common_util.DebugLog
ScanScriptsError = scanscripts.ScanScriptsError
WARNING_PATTERN = re.compile(r'warn', re.I)
ERROR_PATTERN = re.compile(r'error', re.I)
FATAL_PATTERN = re.compile(r'fatal', re.I)
CATCHING_LIST = [FATAL_PATTERN, ERROR_PATTERN, WARNING_PATTERN]
SETUP_SUITE = ['SETUP.sh', 'SETUP.py', 'SETUP.par', 'SETUP.suite']
SETUP_SUITE_SET = set(SETUP_SUITE)
TEARDOWN_SUITE = ['TEARDOWN.sh', 'TEARDOWN.py', 'TEARDOWN.par',
'TEARDOWN.suite']
TEARDOWN_SUITE_SET = set(TEARDOWN_SUITE)
class BaseRunner(pyreringutil.PyreRingFrameworkAdaptor):
"""The basic shell runner.
This class implemented some PyreRingFramworkAdaptor methods to be able
to run simple shell commands
It implemented Prepare, CleanUp, Run, GetFrameworkName, GetFrameworkType
"""
@DEBUG
def __init__(self,
name='/bin/sh',
scanner=None,
email_message=emailmessage.EmailMessage(),
filesystem=filesystemhandlerextend.FileSystemHandlerExtend(),
reporter=None):
"""Init the BaseRunner with a name.
It should get a name from the init call of this instance. The name is not
used currently.
Args:
name: a unique name to identify this instance, the default is /bin/sh.
scanner: an initialized scanner with source_dir.
email_message: An email constructor object.
filesystem: an instance of filesystem. Default to Matt's filesystem.
reporter: a report file generator.
"""
super(BaseRunner, self).__init__()
# framework_type should be the command to invoke the framework
self.framework_type = '/bin/sh'
self.name = name
# Get a reference to the global prop
self.prop = global_settings
# Init a scanner using the source_dir
if scanner:
self.scanner = scanner
else:
self.scanner = scanscripts.ScanScripts(self.prop['source_dir'])
# Init a filesystem for interact with shell
self.filesystem = filesystem
self.email_message = email_message
# Init a reporter for generating a report
self.reporter = reporter or (
reporter_txt.TxtReporter(global_settings['project_name']))
# Set the file_errors boolean.
self.file_errors = global_settings['file_errors']
self.failed = 0
self.passed = 0
self.timeout = 0
self.error = 0
self.notrun = 0
@DEBUG
def Prepare(self):
"""This is to prepare the test run.
Prepare the reporter ready to do report.
"""
log_name = '%s_%s' % (global_settings['host_name'],
global_settings['log_file'])
result_name = ''.join([os.path.splitext(log_name)[0],
global_settings['time'],
'.txt'])
report_file = os.path.join(global_settings['report_dir'], result_name)
self.reporter.SetReportFile(report_file)
@DEBUG
def CleanUp(self):
"""This is used to clean up its own.
For base runner, nothing to do, so just implement the method with empty
body.
"""
pass
def _RunSuites(self, suites):
"""Run a list of suites.
It runs suites given. If any test in SETUP_SUITE fails, it will return 1
to mark it is a setup failure. Otherwise, return 0 for all.
Args:
suites: <list> names of test suite/cases.
Returns:
int: 1 if a setup test case failed; otherwise 0
"""
for test in suites:
try:
logger.debug('running %s' % test)
test_fail_flag = self._RunSingleSuite(test)[0]
# if the test failed and test is one of SETUP_SUITE, stop the rest of
# testing.
if test_fail_flag and test in SETUP_SUITE_SET:
logger.warning('Setup test "%s" failed. No other test executed.' %
test)
return 1
except ScanScriptsError:
# If the test doesn't exist or not supported, _RunSingleSuite will
# throw out ScanScriptsError. Ignore the error, just go do next test
# case.
continue
return 0
def _AddUserInfo(self):
"""Add user specific information to report file.
It will try to find a pre-defined header_file and attach it to the report
file header part.
"""
header_info_file = os.path.join(global_settings['source_dir'],
global_settings['header_file'])
try:
header_info = open(header_info_file).readlines()
self.reporter.AttachHeader(''.join(header_info))
logger.info('user info file: %s added to the report' % header_info_file)
except IOError:
# It is OK if I don't find this file, log and ignore the error.
logger.info('Did not find header_file: %s, did not attach it to the'
' report. Moving on.' % header_info_file)
def _Run(self, suite_list):
"""Actually run the suite_list method.
Args:
suite_list: a list of suite/testcases.
Returns:
None.
"""
self.reporter.StartTest(str(suite_list),
global_settings['host_name'],
global_settings['tester'],
os.getuid(),
str(os.uname()))
# Now try to find and run the setup test case if any.
if global_settings['skip_setup']:
logger.info('setup skipped')
else:
logger.info('setup suite runs')
result = self._RunSuites(SETUP_SUITE)
# If the SETUP_SUITE has any failed test cases, stop the test right away.
if result:
return result
# Now is a good time to attach some header info,
# if user wants to plug in some header info into the report.
# It is OK, if this file does not exist. It will just ignore it and move
# on.
self._AddUserInfo()
# TODO(mwu): need to remove setup and teardown test cases from
# suite_list. So I will not run them twice.
self._RunSuites(suite_list)
if global_settings['skip_setup']:
logger.info('teardown skipped')
else:
self._RunSuites(TEARDOWN_SUITE)
self._SummaryToLog()
self.reporter.EndTest()
log_messages = [
'End of this test run.',
'=====' * 10,
]
for message in log_messages:
logger.info(message)
def _SummaryToLog(self):
"""Summary the test result and write to log."""
total_test = (self.failed + self.timeout + self.error + self.notrun +
self.passed)
log_messages = [
'TOTAL TESTS: %d' % total_test,
'%s: %d' % (constants.PASS, self.passed),
'%s: %d' % (constants.FAIL, self.failed),
'%s: %d' % (constants.TIMEOUT, self.timeout),
'%s: %d' % (constants.ERROR, self.error),
'%s: %d' % (constants.NOTRUN, self.notrun),
]
for message in log_messages:
logger.info(message)
@DEBUG
def Run(self, suite_list, email_flag=True):
"""The public method to invoke the test.
The suite list should be run and reports will be generated under report_dir
directory as set in the global settings. Also email should go out if
email_flag is set to True.
Args:
suite_list: a list of suites/tests understandable to the framework.
email_flag: a boolean value to indicate if an email is needed for the
report.
Returns:
The count of non-successful test cases.
"""
try:
self._Run(suite_list)
finally:
if email_flag and (self.failed + self.timeout + self.error + self.notrun):
self._SendMail(suite_list)
else:
if email_flag:
log_message = 'email is not sent since all test cases have passed.'
else:
log_message = 'email is not sent since email_flag is not set.'
logger.info(log_message)
return self.failed + self.timeout + self.error + self.notrun
def _SendMail(self, suite_list):
"""Send out email after test.
Args:
suite_list: the user input list of suites.
Returns:
None.
"""
from_address = self.prop['tester']
to_address = self.prop['email_recipients']
title = 'project:%s suites:%s' %(self.prop['project_name'], suite_list)
if self.failed or self.timeout or self.error or self.notrun:
title = '%s is RED' % title
else:
title = '%s is GREEN' % title
body = 'Here is the report:\n'
content_file = self.reporter.GetReportFile()
self.email_message.SetMessage(from_address,
to_address,
title,
body,
content_file)
log_message = self.email_message.Send()
if log_message:
logger.info(log_message)
else:
logger.info('email sent to %s' % to_address)
@DEBUG
def _RunSingleSuite(self, one_suite):
"""Runs a given suite/test case one by one.
This method is going to disassemble the given one suite to test cases and
run each one test case in a subshell sequentially and collect return code,
also write the results to report and log all output to log_pipe for
further inspection.
Args:
one_suite: a test suite/test case name.
Returns:
A tuple of an overall return code and a dict of individual return codes
"""
results = {}
# This is used to check the suite pass or fail.
suite_fail_flag = False
for one_script_dict in self.scanner.BaseScan(one_suite):
try:
result = 0
cmd = one_script_dict['TEST_SCRIPT']
time_out = one_script_dict['TIMEOUT']
args = ''
logger.info('Test: %s......' % cmd)
result = self._CommandStreamer(cmd, args, time_out)
except KeyboardInterrupt:
err_msg = 'Keyboard interrupt'
logger.critical('Test: %s got Keyboard interrupt' % (cmd, err_msg))
self.reporter.TestCaseReport(cmd, constants.ERROR)
self.error += 1
suite_fail_flag = True
# Set this test as ERROR out.
result = one_script_dict['ERROR']
try:
err_msg = """
Current test %s was interrupted by keyboard interrupt. Another ctrl+c
in 5 seconds will stop PyreRing, otherwise test will move on to the
next test.
""" % cmd
print err_msg
logger.info(err_msg)
time.sleep(5)
except KeyboardInterrupt:
err_msg = """PyreRing stopped by KeyboardInterrupt."""
print err_msg
logger.critical(err_msg)
raise
# Eat any other exceptions and keep going to the next test.
except Exception:
err_msg = ('Exception[%s] on command[%s]. \n\tSTACK TRACE:\n%s'
% (sys.exc_type, cmd, traceback.format_exc()))
log_message = 'Test: %s got Exception %s' % (cmd, err_msg)
logger.warn(log_message)
# Here the exception must come from executing the test, since I can't
# decide what might be the cause here. Just fail it and keep going to
# the next test.
self.reporter.TestCaseReport(cmd, constants.ERROR)
self.error += 1
# Set this test as ERROR out.
result = one_script_dict['ERROR']
suite_fail_flag = True
continue
results[cmd] = result
# Be careful about short circuit "or", need to _ReportTestCase first.
suite_fail_flag = (self._CheckAndReportResult(one_script_dict, result) or
suite_fail_flag
)
if suite_fail_flag:
self.reporter.SuiteReport(one_suite, constants.FAIL)
else:
self.reporter.SuiteReport(one_suite, constants.PASS)
return suite_fail_flag, results
def _CheckAndReportResult(self, one_script_dict, result):
"""Check and report test result to reporter.
After run finished for the test, send the result to reporter and return a
decision if the test passed or not.
Here in Unix shell, the exit code is stored in a Byte which is 8 bits and
range from -127 to 127. when
a cmd exit with a negative number, the return code is actually the positive
complement number. So when we compare the results with expected values, we
need to mode 256 on the expected value to make them match.
For example: -1 will be stored as 10000001 and the complement is 01111111
which is 255. For the command 'exit -1', the actual value you will get
from the shell is -1%256 = 255. And the expected value if set as '-1', we
can't compare the 2 values directly. So (result == expected%256) is the
right way to do it. mode 256 will have no effect on positive values under
256 which is desired. This is true on 32 bit system, not verified on 64 bit
system yet.
Args:
one_script_dict: <dict> test case dictionary.
result: None/int based on the test return.
Returns:
Boolean: True if the test is not a pass.
"""
test_fail_flag = False
cmd = one_script_dict['TEST_SCRIPT']
if result is None:
# If it is timeout, None is returned.
logger.warn('Test: %s timeout' % cmd)
self.reporter.TestCaseReport(cmd, constants.TIMEOUT)
self.timeout += 1
test_fail_flag = True
elif result == one_script_dict['EXPECTED_RETURN']%256:
# This is a pass.
logger.info('Test: %s %d' % (cmd, result))
self.reporter.TestCaseReport(cmd, constants.PASS)
self.passed += 1
elif result == one_script_dict['ERROR']%256:
# This is a test error.
logger.warn('Test: %s %d' % (cmd, result))
self.reporter.TestCaseReport(cmd, constants.ERROR)
self.error += 1
test_fail_flag = True
else:
logger.warn('Test: %s %d' % (cmd, result))
self.reporter.TestCaseReport(cmd, constants.FAIL)
self.failed += 1
test_fail_flag = True
return test_fail_flag
@DEBUG
def _CommandStreamer(self, cmd, args, time_out):
"""Run the run command with a timeout.
This method will spawn a subshell to run the command and log the output to
the log_pipe.
Args:
cmd: <string> the sys command to execute
args: <string> the args to follow the command
time_out: <int> a time limit for this cmd in seconds
Returns:
the return code of the execution.
"""
logger.info('-----running test %s %s... with timeout:%s' % (cmd, args,
time_out))
# Back up current path.
current_path = os.getcwd()
# Run into a bug: if cmd = 'touch /pyrering.txt' and no args. Then
# os.path.split(cmd)[0] will return a head = 'touch' and try to
# ChDir(head), that will cause me problem. So in case the cmd already have
# some arguments enbed. split the cmd first.
head = os.path.split(cmd.split()[0])[0]
# Go to the script dir to run it locally.
if head:
self.filesystem.ChDir(head)
try:
# Now run the test and collect return code and output message.
ret, message = self.filesystem.RunCommandToLoggerWithTimeout(
cmd, time_out)
fatal_strings = global_settings.get('FATAL_STRING').split(',')
# This is to check if the screen output contains any FATAL_STRING, then
# test should be failed automatically, no matter what is the return code.
for line in message.splitlines():
if not ret:
for fatal_string in fatal_strings:
if fatal_string and fatal_string in line:
ret = -1
self.reporter.ExtraMessage('%s failed by fatal string:\n\t%s\n' %
(cmd, line))
logger.warn('%s failed by fatal string:\n\t%s' % (cmd, line))
break
else:
for catch_string in CATCHING_LIST:
# Catch suspicious output messages to log and reporter.
if catch_string.search(line):
self.reporter.ExtraMessage('%s:\n\t%s\n' % (cmd, line))
logger.warn('Caught one suspicous string: %s')
break
logger.info('-----completed test %s %s with return code %s' % (cmd,
args,
ret))
# If file_errors is True, create a separate output file for each non zero
# return code.
if self.file_errors and ret <> 0:
test_cmd = cmd.split()[0]
testcase = os.path.basename(test_cmd)
path = os.path.join(global_settings['report_dir'], testcase) + '.out'
self.reporter.SendTestOutput(path, testcase, message)
finally:
self.filesystem.ChDir(current_path)
return ret
@DEBUG
def GetFrameworkName(self):
"""Return the instance's name.
Returns:
the instance's name
"""
return self.name
@DEBUG
def GetFrameworkType(self):
"""Return the framework type which is the command to invoke the framework.
Returns:
the string command to invoke the framework
"""
return self.framework_type
@DEBUG
def CheckFramework(self):
"""Return True always."""
return True
| kdlucas/pyrering | lib/baserunner.py | Python | apache-2.0 | 18,344 |
from __future__ import division
import numpy as np
import math
import sys
import copy
from typing import Dict, Tuple, List
class NeuralNetwork:
"""
A neural network implimentation that uses Numpy Matricies to do most of the calculations
"""
def __init__(self, num_inputs, num_outputs, num_in_hidden_layer_1=None, num_in_hidden_layer_2=None):
"""
Assign the structure of the network.
Give the number of inputs, outputs, and number per hidden layer.
Then constructs the network.
"""
self.num_inputs = num_inputs
self.num_outputs = num_outputs
self.num_in_hidden_layer_1 = num_in_hidden_layer_1
self.num_in_hidden_layer_2 = num_in_hidden_layer_2
self.init()
def init(self):
"""
allows for network to be reset in between runs.
Randomly generates weights that conform the the specified number of weights in each layer.
Actually creaets the wieghts for the network
Self.weights stores the matrix of weights in order from input to output
"""
num_inputs = self.num_inputs
num_outputs = self.num_outputs
num_in_hidden_layer_1 = self.num_in_hidden_layer_1
num_in_hidden_layer_2 = self.num_in_hidden_layer_2
self.layer_outputs = []
if num_in_hidden_layer_1 is None:
self.output_weight = np.random.rand(num_outputs, num_inputs + 1)
self.weights = np.array([self.output_weight])
elif num_in_hidden_layer_1 is not None and num_in_hidden_layer_2 is None:
self.layer1_weights = np.random.rand(num_in_hidden_layer_1, num_inputs + 1)
self.output_weight = np.random.rand(num_outputs, num_in_hidden_layer_1 + 1)
self.weights = np.array([self.layer1_weights, self.output_weight])
else:
self.layer1_weights = np.random.rand(num_in_hidden_layer_1, num_inputs + 1)
self.layer2_weights = np.random.rand(num_in_hidden_layer_2, num_in_hidden_layer_1 + 1)
self.output_weight = np.random.rand(num_outputs, num_in_hidden_layer_2 + 1)
self.weights = np.array([self.layer1_weights, self.layer2_weights, self.output_weight])
def estimate_output(self, input_vector):
"""
This is the forward pass of the network.
Applies the network to the input vector to get the estimated output.
Saves the layer outputs per layer so that backProp can be done after words if training.
"""
self.layer_outputs = []
input_value = np.copy(input_vector[:-1])
for layer in self.weights:
# Add Bias to the last position of the inputs for all layers
input_value = np.append(input_value, [1])
node_outputs = self.compute_node_value(input_value, layer)
self.layer_outputs.append(np.copy(node_outputs))
input_value = node_outputs
return copy.copy(self.layer_outputs[-1])
def compute_node_value(self, input_vector, layer):
"""
Computes the output value for a given layer given a vector input.
A layer is a layer of WEIGHTS not nodes. Allows for the matrix mulitiplication to work.
Applies the activation function to the nodes as well.
Hands back a vector of node outputs.
"""
result_vector = np.inner(input_vector, layer)
node_results = np.apply_along_axis(self.logistic_function, 0, result_vector)
return node_results
def logistic_function(self, value):
return 1/(1 + math.e**(-1*value))
def learn(self, data):
return self.learn_model(data)
def learn_model(self, data):
"""
Perform Gradient Descent to learn the weights for the nodes.
Does online training where backprop is done after each datapoint passes through the network.
Also ensures that the error is decreasing and not increasing.
"""
epsilon = .000001
previous_error = sys.maxsize
current_error = self.get_total_error_average(data)
ittor_count = 0
while (not self.can_stop(current_error, previous_error, epsilon)) and abs(current_error) < abs(previous_error):
for data_point in data:
node_outputs = self.estimate_output(data_point)
# do back prop to update weights
self.weights = self.back_prop(self.weights, node_outputs, np.array(self.layer_outputs), data_point[-1], data_point[:-1])
previous_error = current_error
current_error = self.get_total_error_average(data)
# if ittor_count % 1000 == 0:
# print("Count: {0} \n Current Error: {1}".format(ittor_count, current_error))
# print("Count: {0} \n Current Error: {1}".format(ittor_count, current_error))
ittor_count += 1
#
# if abs(current_error) > abs(previous_error):
# print("SOMETHING WENT WRONG")
# print("Current error: {}".format(abs(current_error)))
# print("previous error: {}".format(abs(previous_error)))
# print("Count: {0} \n Current Error: {1}".format(ittor_count, current_error))
return self.weights
def can_stop(self, current_error, previous_error, epsilon):
"""
Simple check to see if the network can stop
"""
if abs(current_error - previous_error) > epsilon:
return False
return True
def back_prop(self, weights: np.ndarray, node_outputs: np.ndarray, layer_outputs:np.ndarray,
actual_result, data_input):
"""
Main work horse for the back propagation algorithm.
First we calculate all of the errors for each layer, ( a matrix of those)
Then apply the update weights rule based on the errors.
"""
weight_error_matrix = self.calculate_layer_errors(weights, node_outputs, layer_outputs, actual_result)
new_weights = self.update_wights(np.copy(weights), data_input, layer_outputs, weight_error_matrix)
return new_weights
def calculate_layer_errors(self, weights: np.ndarray, node_outputs: np.ndarray, layer_outputs:np.ndarray,
actual_result):
"""
calculate the error terms for the different weight layers
output layer uses a different error calculation than the rest.
Errors are added to the matrix
Errors for hidden layer bias nodes are removed since they do not propagate backwards
"""
layer_errors = np.empty(len(weights), dtype=object)
for layer_index in range(len(weights)-1, -1, -1):
# if this is the output later, calculate the error differently
if layer_index == len(weights)-1:
# deltas
output_layer_weights = self.calculate_output_node_error(actual_result, node_outputs)
layer_errors[layer_index] = output_layer_weights
else:
# Get the error for a hidden layer.
# need all weights, the layer_index, the layer_outputs and the delta/error for the previous layer.
hidden_layer_error = self.calculate_hidden_node_error(weights[layer_index+1], layer_outputs[layer_index],
layer_errors[layer_index + 1])
# Remove the hidden layer error for the Bias node (IT IS NOT REUSED)
layer_errors[layer_index] = np.delete(hidden_layer_error, len(hidden_layer_error)-1)
return layer_errors
def calculate_hidden_node_error(self, previous_layer_weights: np.ndarray, layer_outputs: np.ndarray, layer_errors: np.ndarray):
"""
Calculates the error for the hidden nodes
Takes into accout the contribution to different output nodes.
Then the derivative of the activation function is applied to all of the sum of error contributions.
"""
# calc contribution to error
# contrib_to_error = layer_errors * weights
# contrib_to_error = np.sum(contrib_to_error, axis=1)
# contrib_to_error = np.inner(layer_errors, previous_layer_weights)
contrib_to_error = np.empty(len(previous_layer_weights[0]), dtype=object)
for node_index in range(len(previous_layer_weights[0])):
# for node ith in the hidden layer, get the ith, weight for every node that it was applied to.
weights_that_contribed = previous_layer_weights[:, node_index]
theta_error = np.inner(layer_errors, weights_that_contribed)
contrib_to_error[node_index] = theta_error
# layer_output_mults = y_hat (1-y_hat)
layer_output_with_bias = np.append(layer_outputs, [1]) # add BIAS back to layer outputs
layer_output_mults = np.apply_along_axis(lambda y_hat: y_hat * (1-y_hat), 0, layer_output_with_bias)
hidden_layer_errors = layer_output_mults * contrib_to_error
return hidden_layer_errors
def update_wights(self, weights: np.ndarray, data_input: np.ndarray, layer_outputs: np.ndarray, weight_error_matrix: np.ndarray, alpha=.1):
"""
Update weights given the error matrix.
Updates the weights based on the input input from the input to the layer from the previous layer. (closer to output)
"""
for layer_index in range(len(weights)-1, -1, -1):
layer_weights = weights[layer_index]
# get the previous layer INPUTS
if layer_index == 0:
layer_output = np.array(data_input)
else:
# get the previous layer INPUTS
layer_output = layer_outputs[layer_index - 1]
layer_error = weight_error_matrix[layer_index]
# Add BIAS to input values, in last position, as done in estimating
layer_output = np.append(layer_output, [1])
# weight_delta = layer_output * layer_error
layer_output = layer_output.reshape(1, len(layer_output))
layer_error = layer_error.reshape(1, len(layer_error))
weight_delta = layer_output * layer_error.transpose()
weight_delta = alpha * weight_delta
weights[layer_index] = layer_weights + weight_delta
return weights
def get_total_error_average(self, data: List[list]):
"""
data[0][-1] are the expected values for the output of the network.
For a network with more than 1 output node, this value will be an array.
For a network with 1 output node, this value will be an value, corresponding the the actual value
"""
if type(data[0][-1]) != list:
output_errors = np.zeros((len(data), 1))
else:
output_errors = np.zeros((len(data), len(data[0][-1])))
for index in range(len(data)):
node_outputs = self.estimate_output(data[index])
# error_per_output_node = self.calculate_output_node_error(data[index][-1], node_outputs)
squared_error = self.calc_output_node_squared_error(data[index][-1], node_outputs)
output_errors[index] = squared_error
error_totals = np.sum(output_errors, axis=0)
error_average = np.apply_along_axis(lambda error_total: error_total/len(data), 0, error_totals)
mean_summed_error = np.sum(error_average)
return abs(mean_summed_error)
def calc_output_node_squared_error(self, actual_output_vector: list, predicted_output_vector: np.ndarray):
"""
Calculates the output node squared error.
Error is cauclated for all output nodes.
"""
if type(actual_output_vector) != list:
actual_output_vector = [actual_output_vector]
error_list = []
for actual_value, predicted_value in zip(actual_output_vector, predicted_output_vector):
error_list.append((actual_value - predicted_value)**2)
error_sum = sum(error_list) / float(2)
return error_sum
def calculate_output_node_error(self, actual_output_vector: list, predicted_output_vector: np.ndarray) -> np.ndarray:
"""
Caclulates the derivative of the error function and gives the error of the output node
that is to be used during UPDATING / backpropagation.
"""
if type(actual_output_vector) != list:
actual_output_vector = [actual_output_vector]
error_list = []
for actual_value, predicted_value in zip(actual_output_vector, predicted_output_vector):
error = predicted_value * (1 - predicted_value) * (actual_value - predicted_value)
error_list.append(error)
return np.array(error_list)
# <editor-fold desc="Classify">
def classify(self, nn_model, test_data):
"""
classify based on one's hot encoding or a single value for 1 node output.
Use a .5 cutoff range to deterine if the value is part of the class or not.
Works for both arrays and single vlues.
"""
self.weights = nn_model
results = []
for data_point in test_data:
estimate = self.estimate_output(data_point)
if len(estimate) == 1:
if estimate > .5:
results.append(1)
else:
results.append(0)
else:
max_index = np.array(estimate).argmax()
result = np.zeros(len(estimate))
result[max_index] = 1
results.append(result.tolist())
return results
# </editor-fold>
# <editor-fold desc="Preprocess Data">
"""
Pre-processes the data for a given test run.
The data is preprocessed by taking a positive class label, and modifying the in memory data to replace the
positive_class_name with a 1, and all other classification names as 0, negative.
This allows for easier binary classificaiton
input:
+ data: list of feature vecotrs
+ positive_class_name: Stirng, class to be the positive set.
"""
def pre_process(self, data, positive_class_name):
new_data = []
for record in data:
current_class = record[-1]
if current_class == positive_class_name:
record[-1] = 1
else:
record[-1] = 0
new_data.append(record)
return new_data
# </editor-fold>
# <editor-fold desc="Tests">
# nn = NeuralNetwork(num_inputs=2, num_outputs=1, num_in_hidden_layer_1=5, num_in_hidden_layer_2=2)
# nn = NeuralNetwork(num_inputs=2, num_outputs=1, num_in_hidden_layer_1=3)
# nn = NeuralNetwork(num_inputs=2, num_outputs=1)
# nn = NeuralNetwork(num_inputs=2, num_outputs=1, num_in_hidden_layer_1=3)
# input_vector = np.random.rand(5)
# input2 = np.ones((2, 2))
# layer = np.ones((1, 90))
# result = nn.compute_node_value(input_vector, layer)
# result = nn.estimate_output(input_vector)
# print(result)
# model = nn.learn_model(np.random.rand(1,5))
#
# print(model)
# nn.layer1_weights = np.array([[.26, -.42, .01], [.78, .19, -.05], [-.23, .37, .42]])
# nn.output_weight = np.array([[.61, .12, -.9, .2], [.28, -.34, .10, .3]])
# nn.weights = np.array([nn.layer1_weights, nn.output_weight])
#
# estimate = nn.estimate_output([.52, -.97, [1,0]])
# layer_outputs = nn.layer_outputs
#
# print(estimate)
# print(layer_outputs)
#
# new_weights = nn.back_prop(nn.weights, estimate, np.array(layer_outputs), [1, 0], [.52, -.97])
# print(new_weights)
# nn.learn_model([[.52, -.97, 1], [.6, -1, 1], [1, -.77, 1], [.3, -.31, 1]])
# </editor-fold>
| MaxRobinson/CS449 | project6/NeuralNetworkFast.py | Python | apache-2.0 | 15,693 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from selenium.webdriver.common import by
from openstack_horizon.test.integration_tests.pages import basepage
from openstack_horizon.test.integration_tests.pages import pageobject
class ChangePasswordPage(basepage.BasePage):
@property
def modal(self):
return ChangePasswordPage.ChangePasswordModal(self.driver,
self.conf)
def change_password(self, current, new):
self.fill_field_element(
current, self.modal.current_password)
self.fill_field_element(
new, self.modal.new_password)
self.fill_field_element(
new, self.modal.confirm_new_password)
self.modal.click_on_change_button()
def reset_to_default_password(self, current):
if self.topbar.user.text == self.conf.identity.admin_username:
return self.change_password(current,
self.conf.identity.admin_password)
else:
return self.change_password(current,
self.conf.identity.password)
class ChangePasswordModal(pageobject.PageObject):
_current_password_locator = (by.By.CSS_SELECTOR,
'input#id_current_password')
_new_password_locator = (by.By.CSS_SELECTOR,
'input#id_new_password')
_confirm_new_password_locator = (by.By.CSS_SELECTOR,
'input#id_confirm_password')
_change_submit_button_locator = (by.By.CSS_SELECTOR,
'div.modal-footer button.btn')
@property
def current_password(self):
return self.get_element(*self._current_password_locator)
@property
def new_password(self):
return self.get_element(*self._new_password_locator)
@property
def confirm_new_password(self):
return self.get_element(*self._confirm_new_password_locator)
@property
def change_button(self):
return self.get_element(*self._change_submit_button_locator)
def click_on_change_button(self):
self.change_button.click()
| mrunge/openstack_horizon | openstack_horizon/test/integration_tests/pages/changepasswordpage.py | Python | apache-2.0 | 2,977 |
import json
from pathlib import Path
from resolwe import process
from resolwe.process import (
Cmd,
DataField,
DirField,
FileField,
FloatField,
GroupField,
IntegerField,
JsonField,
ListField,
Persistence,
Process,
SchedulingClass,
StringField,
UrlField,
)
from resolwe.process.models import Collection, Data, Entity
from resolwe.process.models import Process as ProcessM
class EntityProcess(Process):
slug = "entity-process"
name = "Entity process"
data_name = "Data with entity"
version = "1.0.0"
process_type = "data:entity"
entity = {
"type": "sample",
}
class Output:
list_string = ListField(StringField(), label="My list")
optional = StringField("Optional output", required=False)
def run(self, inputs, outputs):
outputs.list_string = ["foo", "bar"]
class PythonProcess(Process):
"""This is a process description."""
slug = "test-python-process"
name = "Test Python Process"
version = "0.1.2"
process_type = "data:python"
category = "analyses"
scheduling_class = SchedulingClass.BATCH
persistence = Persistence.CACHED
data_name = "Foo: {{input_data | name}}"
entity = {
"type": "sample",
"descriptor_schema": "sample",
"input": "input_data",
}
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {
"image": "public.ecr.aws/s4q6j6e8/resolwe/base:ubuntu-20.04",
}
},
}
class Input:
"""Input fields."""
my_field = StringField(label="My field")
my_list = ListField(StringField(), label="My list")
input_data = DataField("test:save", label="My input data")
input_entity_data = DataField("entity", label="My entity data")
bar = DataField(data_type="test:save", label="My bar", relation_type="group")
baz = DataField(
data_type="test:save",
required=False,
label="My bar",
relation_type="group",
relation_npartitions=1,
)
baz_list = ListField(
DataField(data_type="test:save"),
required=False,
label="My baz list",
relation_type="group",
relation_npartitions=1,
)
url = UrlField(UrlField.DOWNLOAD, label="My URL")
integer = IntegerField(label="My integer", range=[0, 100])
my_float = FloatField(label="My float", range=[0.0, 1.0])
my_json = JsonField(label="Blah blah")
my_optional = StringField(
label="Optional", required=False, default="default value"
)
my_optional_no_default = StringField(
label="Optional no default", required=False
)
class MyGroup:
foo = IntegerField(label="Foo")
bar = StringField(label="Bar")
group_optional_no_default = StringField(
label="Group optional no default", required=False
)
class SubGroup:
foo = IntegerField(label="Foo", default=2)
subgroup = GroupField(SubGroup, label="Subgroup")
my_group = GroupField(MyGroup, label="My group")
class Output:
string_output = StringField(label="My string output")
list_string_output = ListField(StringField(), label="My list string output")
file_output = FileField(label="My output")
list_file_output = ListField(FileField(), label="My list output")
dir_output = DirField(label="My output")
input_data_name = StringField(label="Input data name")
input_entity_name = StringField(label="Input entity name")
docker_image = StringField(label="Docker image")
def run(self, inputs, outputs):
print("All inputs are:", inputs)
print("Input data:", inputs.input_data)
print("Input data ID:", inputs.input_data.id)
print("Input data file output:", inputs.input_data.output.saved_file.path)
print("Input data type:", inputs.input_data.type)
print("Input data descriptor:", inputs.input_data.descriptor)
print("Group bar:", inputs.my_group.bar)
print("Group foo:", inputs.my_group.foo)
print("Group subgroup foo: ", inputs.my_group.subgroup.foo)
print("Entity name of the input:", inputs.input_entity_data.entity.name)
print("Docker image:", self.requirements.executor.docker.image)
if inputs.my_optional:
print("My optional:", inputs.my_optional)
if inputs.my_optional_no_default:
raise AttributeError("inputs.my_optional_no_default should not exist.")
if inputs.my_group.group_optional_no_default:
raise AttributeError(
"inputs.my_group.group_optional_no_default should not exist."
)
if inputs.input_entity_data.output.optional:
raise AttributeError("inputs.list_string_output.optional should not exist.")
try:
inputs.invalid_input
except AttributeError as err:
if "Inputs have no field invalid_input" in str(err):
pass
try:
inputs.input_entity_data.output.invalid_field
except AttributeError as err:
if "DataField has no member invalid_field" in str(err):
pass
bar = Cmd["ls"]["-l", "-a", "/"] | Cmd["grep"]["python"]
print("hello world:\n", bar())
Cmd["mkdir"]["test"]()
(Cmd["echo"]['"Some content"'] > "test/testfile.txt")()
(Cmd["echo"]['"Some more content"'] > "testfile2.txt")()
outputs.file_output = "test/testfile.txt"
outputs.list_file_output = ["test/testfile.txt", "testfile2.txt"]
outputs.dir_output = "test/"
outputs.input_data_name = inputs.input_data.name
outputs.input_entity_name = inputs.input_entity_data.entity.name
outputs.docker_image = self.requirements.executor.docker.image
outputs.string_output = "OK"
outputs.list_string_output = ["foo", "bar"]
class PythonProcessGroup(Process):
"""Process with Group fields."""
slug = "test-python-process-group-field"
name = "Test Python Process for GroupField"
version = "0.1.2"
process_type = "data:python:group"
class Input:
"""Input fields."""
class MyGroup:
foo = IntegerField(label="Foo", required=False, default=42)
bar = StringField(label="Bar", required=False)
class MyGroup2:
foo = IntegerField(label="Foo", required=False)
class MySubGroup:
class SubGroup:
foo = IntegerField(label="Foo", default=2)
subgroup = GroupField(SubGroup, label="Subgroup foo")
my_group = GroupField(MyGroup, label="My group")
my_group2 = GroupField(
MyGroup2, label="My group2 that has all elements without defaults."
)
my_subgroup = GroupField(MySubGroup, label="My subgroup")
class Output:
"""Output fields."""
out_foo = IntegerField(label="Foo.", required=False)
out_bar = StringField(label="Bar.", required=False)
out_foo2 = IntegerField(label="Foo2.", required=False)
out_subgroup = IntegerField(label="SubGroupFoo", required=True)
def run(self, inputs, outputs):
if inputs.my_group.foo:
outputs.out_foo = inputs.my_group.foo
if inputs.my_group.bar:
outputs.out_bar = inputs.my_group.bar
if inputs.my_group2.foo:
outputs.out_foo2 = inputs.my_group2.foo
outputs.out_subgroup = inputs.my_subgroup.subgroup.foo
class PythonProcess2(process.Process):
"""Inherit from 'module.Class'."""
slug = "test-python-process-2"
name = "Test Python Process 2"
version = "0.0.1"
process_type = "data:python"
def run(self, inputs, outputs):
pass
class ErrorProcess(Process):
slug = "test-python-process-error"
name = "Test Python Process Error"
version = "0.0.1"
process_type = "data:python:error"
def run(self, inputs, outputs):
raise ValueError("Value error in ErrorProcess")
class AnnotateProcess(Process):
slug = "test-python-process-annotate-entity"
name = "Test Python Process Annotate Entity"
version = "0.0.1"
process_type = "data:python:annotate"
entity = {
"type": "sample",
}
def run(self, inputs, outputs):
annotations = {"general.description": "desc", "general.species": "Valid"}
self.update_entity_descriptor(annotations)
class FileProcess(Process):
slug = "test-python-process-file"
name = "Test Python Process File"
version = "0.0.1"
process_type = "data:python:file"
class Input:
"""Input fields."""
src = FileField(label="Input file")
class Output:
"""Input fields."""
dst = FileField(label="Output file")
def run(self, inputs, outputs):
file_name = inputs.src.import_file()
outputs.dst = file_name
class RequirementsProcess(Process):
slug = "test-python-process-requirements"
name = "Test Python Process Requirements"
version = "0.0.1"
process_type = "data:python:requirements"
requirements = {
"resources": {
"cores": 2,
"memory": 4096,
"storage": 200,
},
}
class Output:
"""Input fields."""
cores = IntegerField(label="Cores")
memory = IntegerField(label="Memory")
storage = IntegerField(label="Storage")
def run(self, inputs, outputs):
outputs.cores = self.requirements["resources"]["cores"]
outputs.memory = self.requirements["resources"]["memory"]
outputs.cores = self.requirements["resources"]["cores"]
outputs.storage = self.requirements["resources"]["storage"]
print("Cores:", outputs.cores)
print("Memory:", outputs.memory)
print("Storage:", outputs.storage)
class ProcessWithWorkflowInput(Process):
slug = "process-with-workflow-input"
name = "Process with workflow input"
version = "1.0.0"
process_type = "data:test"
class Input:
"""Input fields."""
data = DataField(data_type="", label="Data.")
def run(self, inputs, outputs):
pass
class ProcessWithChoicesInput(Process):
slug = "process-with-choices-input"
name = "Process with choices input"
version = "1.0.0"
process_type = "data:test"
class Input:
"""Input fields."""
string_input = StringField(
label="Input field",
allow_custom_choice=True,
choices=[
("foo", "foo"),
("bar", "bar"),
],
)
class Output:
"""Output fields."""
string_output = StringField(label="Output field")
def run(self, inputs, outputs):
outputs.string_output = inputs.string_input
class RelationsProcess(Process):
slug = "test-process-relations"
name = "Test Python Process relations"
version = "0.0.1"
process_type = "data:python:relations"
requirements = {
"relations": [{"type": "series"}],
}
class Input:
"""Input fields."""
data = ListField(DataField(data_type=""), label="Data.")
class Output:
"""Output fields."""
relation_id = IntegerField(label="Relation id")
relation_type = StringField(label="Relation type")
relation_ordered = StringField(label="Relation ordering")
relation_category = StringField(label="Relation category")
relation_unit = StringField(label="Relation unit")
relation_partition_label = StringField(label="Relation partition label")
relation_partition_position = IntegerField(label="Relation partition label")
def run(self, inputs, outputs):
# Access relation attributes
outputs.relation_id = inputs.data[0].relations[0].id
outputs.relation_type = inputs.data[0].relations[0].type
outputs.relation_ordered = str(inputs.data[0].relations[0].ordered)
outputs.relation_category = inputs.data[0].relations[0].category
outputs.relation_unit = inputs.data[0].relations[0].unit
# Access relation partition attributes
outputs.relation_partition_label = (
inputs.data[0].relations[0].partitions[0].label
)
outputs.relation_partition_position = (
inputs.data[0].relations[0].partitions[0].position
)
class DataNameProcess(Process):
slug = "data-name-process"
name = "Data name process"
data_name = "{{ data_input | name | default('?') }}"
version = "1.0.0"
process_type = "data:name"
requirements = {"expression-engine": "jinja"}
class Input:
data_input = DataField(data_type="", label="Data input")
class Output:
name = StringField(label="Name")
def run(self, inputs, outputs):
outputs.name = self.name
class CreateCollection(Process):
slug = "create-collection"
name = "Create collection"
data_name = "{{ data_input | name | default('?') }}"
version = "1.0.0"
process_type = "data:name"
requirements = {"expression-engine": "jinja"}
class Input:
collection_name = StringField(label="Collection name")
def run(self, inputs, outputs):
Collection.create(name=inputs.collection_name)
class GetLatestProcess(Process):
slug = "get-latest-process"
name = "Get process by slug"
data_name = "{{ data_input | name | default('?') }}"
version = "1.0.0"
process_type = "data:name"
requirements = {"expression-engine": "jinja"}
class Input:
process_slug = StringField(label="Process slug")
class Output:
process_pk = IntegerField(label="Process primary key")
def run(self, inputs, outputs):
process = ProcessM.get_latest(inputs.process_slug)
outputs.process_pk = process.id
class GetCollection(Process):
slug = "get-collection"
name = "Get collection"
data_name = "{{ data_input | name | default('?') }}"
version = "1.0.0"
process_type = "data:name"
requirements = {"expression-engine": "jinja"}
class Input:
collection_name = StringField(label="Collection name")
class Output:
collection_slug = StringField(label="Collection slug")
def run(self, inputs, outputs):
collection = Collection.get(name=inputs.collection_name)
outputs.collection_slug = collection.slug
class FilterCollection(Process):
slug = "filter-collection"
name = "Filter collection"
data_name = "{{ data_input | name | default('?') }}"
version = "1.0.0"
process_type = "data:name"
requirements = {"expression-engine": "jinja"}
class Input:
collection_name = StringField(label="Collection name")
class Output:
number_of_collections = IntegerField(label="Number of collections")
def run(self, inputs, outputs):
collections = Collection.filter(name=inputs.collection_name)
outputs.number_of_collections = len(collections)
class CreateData(Process):
slug = "create-data"
name = "Create data object"
data_name = "{{ data_input | name | default('?') }}"
version = "1.0.0"
process_type = "data:name"
requirements = {"expression-engine": "jinja"}
class Input:
data_name = StringField(label="Data name")
collection_name = StringField(label="Collection name")
def run(self, inputs, outputs):
process = ProcessM.get(slug="create-collection")
Data.create(
process=process,
name=inputs.data_name,
input={"collection_name": inputs.collection_name},
)
class CreateEntityTags(Process):
slug = "assign-entity-tags"
name = "Create entity tags"
data_name = "{{ data_input | name | default('?') }}"
version = "1.0.0"
process_type = "data:name"
requirements = {"expression-engine": "jinja"}
class Input:
data_name = StringField(label="Data name")
sample_name = StringField(label="Collection name")
tags = ListField(StringField(), label="My tags")
def run(self, inputs, outputs):
sample = Entity.create(name=inputs.sample_name)
sample.tags = inputs.tags
self.data.name = inputs.data_name
self.data.entity = sample
class ChangeEntityName(Process):
slug = "change-entity-name"
name = "Rename entity"
data_name = "{{ data_input | name | default('?') }}"
version = "1.0.0"
process_type = "data:name"
requirements = {"expression-engine": "jinja"}
class Input:
entity_id = IntegerField(label="Entity id")
entity_name = StringField(label="New entity name")
def run(self, inputs, outputs):
entity = Entity.get(pk=inputs.entity_id)
entity.name = inputs.entity_name
class ChangeEntityDescriptor(Process):
slug = "change-entity-descriptor"
name = "Change entity descriptor"
data_name = "{{ data_input | name | default('?') }}"
version = "1.0.0"
process_type = "data:name"
requirements = {"expression-engine": "jinja"}
class Input:
entity_id = IntegerField(label="Entity id")
description = StringField(label="New description")
def run(self, inputs, outputs):
entity = Entity.get(pk=inputs.entity_id)
entity.descriptor = {"Description": inputs.description}
class TestStorage(Process):
slug = "storage-objects-test"
name = "Test working with storage objects"
data_name = "{{ data_input | name | default('?') }}"
version = "1.0.0"
process_type = "data:name"
requirements = {"expression-engine": "jinja"}
class Output:
output_string = JsonField(label="Output string")
output_file = JsonField(label="Output file")
def run(self, inputs, outputs):
# Test update of storage objects.
outputs.output_string = ["first", "valid", "json"]
outputs.output_string = ["valid", "json"]
# Test storage objects are read from file (it it exists).
json_filename = "json_file.out"
Path(json_filename).write_text(json.dumps(["valid", "json", "file"]))
outputs.output_file = json_filename
| genialis/resolwe | resolwe/process/tests/processes/python_test.py | Python | apache-2.0 | 18,394 |
import json
import logging
import tornado.web
import tornado.gen
import tornado.iostream
from raven.contrib.tornado import SentryMixin
from waterbutler import tasks
from waterbutler.core import utils
from waterbutler.core import signing
from waterbutler.core import exceptions
from waterbutler.server import settings
from waterbutler.core import remote_logging
from waterbutler.server.auth import AuthHandler
from waterbutler.core.log_payload import LogPayload
from waterbutler.server import utils as server_utils
def list_or_value(value):
assert isinstance(value, list)
if len(value) == 0:
return None
if len(value) == 1:
# Remove leading slashes as they break things
return value[0].decode('utf-8')
return [item.decode('utf-8') for item in value]
logger = logging.getLogger(__name__)
auth_handler = AuthHandler(settings.AUTH_HANDLERS)
signer = signing.Signer(settings.HMAC_SECRET, settings.HMAC_ALGORITHM)
class BaseHandler(server_utils.CORsMixin, server_utils.UtilMixin, tornado.web.RequestHandler, SentryMixin):
"""Base Handler to inherit from when defining a new view.
Handles CORs headers, additional status codes, and translating
:class:`waterbutler.core.exceptions.ProviderError`s into http responses
.. note::
For IE compatability passing a ?method=<httpmethod> will cause that request, regardless of the
actual method, to be interpreted as the specified method.
"""
ACTION_MAP = {}
def write_error(self, status_code, exc_info):
self.captureException(exc_info)
etype, exc, _ = exc_info
if issubclass(etype, exceptions.PluginError):
self.set_status(int(exc.code))
if exc.data:
self.finish(exc.data)
else:
self.finish({
'code': exc.code,
'message': exc.message
})
elif issubclass(etype, tasks.WaitTimeOutError):
# TODO
self.set_status(202)
else:
self.finish({
'code': status_code,
'message': self._reason,
})
class BaseProviderHandler(BaseHandler):
async def prepare(self):
self.arguments = {
key: list_or_value(value)
for key, value in self.request.query_arguments.items()
}
try:
self.arguments['action'] = self.ACTION_MAP[self.request.method]
except KeyError:
return
self.payload = await auth_handler.fetch(self.request, self.arguments)
self.provider = utils.make_provider(
self.arguments['provider'],
self.payload['auth'],
self.payload['credentials'],
self.payload['settings'],
)
self.path = await self.provider.validate_path(**self.arguments)
self.arguments['path'] = self.path # TODO Not this
def _send_hook(self, action, metadata=None, path=None):
source = LogPayload(self.arguments['nid'], self.provider, metadata=metadata, path=path)
remote_logging.log_file_action(action, source=source, api_version='v0',
request=remote_logging._serialize_request(self.request),
bytes_downloaded=self.bytes_downloaded,
bytes_uploaded=self.bytes_uploaded)
class BaseCrossProviderHandler(BaseHandler):
JSON_REQUIRED = False
async def prepare(self):
try:
self.action = self.ACTION_MAP[self.request.method]
except KeyError:
return
self.source_provider = await self.make_provider(prefix='from', **self.json['source'])
self.destination_provider = await self.make_provider(prefix='to', **self.json['destination'])
self.json['source']['path'] = await self.source_provider.validate_path(**self.json['source'])
self.json['destination']['path'] = await self.destination_provider.validate_path(**self.json['destination'])
async def make_provider(self, provider, prefix='', **kwargs):
payload = await auth_handler.fetch(
self.request,
dict(kwargs, provider=provider, action=self.action + prefix)
)
self.auth = payload
return utils.make_provider(provider, **payload)
@property
def json(self):
try:
return self._json
except AttributeError:
pass
try:
self._json = json.loads(self.request.body.decode('utf-8'))
except ValueError:
if self.JSON_REQUIRED:
raise Exception # TODO
self._json = None
return self._json
def _send_hook(self, action, metadata):
source = LogPayload(self.json['source']['nid'], self.source_provider,
path=self.json['source']['path'])
destination = LogPayload(self.json['destination']['nid'], self.destination_provider,
metadata=metadata)
remote_logging.log_file_action(action, source=source, destination=destination, api_version='v0',
request=remote_logging._serialize_request(self.request),
bytes_downloaded=self.bytes_downloaded,
bytes_uploaded=self.bytes_uploaded)
| Johnetordoff/waterbutler | waterbutler/server/api/v0/core.py | Python | apache-2.0 | 5,410 |
#
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import torch
try:
import torchmetrics
except ImportError:
raise ImportError("please install torchmetrics: pip install torchmetrics")
from abc import ABC, abstractmethod
def _unify_input_formats(preds, target):
if not (preds.ndim == target.ndim or preds.ndim == target.ndim + 1):
raise ValueError("preds the same or one more dimensions than targets")
if preds.ndim == target.ndim + 1:
preds = torch.argmax(preds, dim=-1)
if preds.ndim == target.ndim and preds.is_floating_point():
preds = (preds >= 0.5).long()
return preds, target
def _check_same_shape(preds, targets):
if preds.shape != targets.shape:
raise RuntimeError("preds and targets are expected to have the same shape")
class PytorchMetric(ABC):
"""
Base class for all pytorch metrics
"""
@abstractmethod
def __call__(self, preds, targets):
pass
@abstractmethod
def compute(self):
pass
class Accuracy(PytorchMetric):
"""Calculates how often predictions matches labels.
For example, if `y_true` is tensor([1, 2, 3, 4])_ and `y_pred` is tensor([0, 2, 3, 4])
then the accuracy is 3/4 or .75. If the weights were specified as
tensor([1, 1, 0, 0]) then the accuracy would be 1/2 or .5.
Usage:
```python
acc = Accuracy()
acc(torch.tensor([0, 2, 3, 4]), torch.tensor([1, 2, 3, 4]))
assert acc.compute() == 0.75
```
"""
def __init__(self):
self.correct = torch.tensor(0)
self.total = torch.tensor(0)
def __call__(self, preds, targets):
preds, target = _unify_input_formats(preds, targets)
self.correct += torch.sum(torch.eq(preds, targets))
self.total += target.numel()
def compute(self):
return self.correct.float() / self.total
class SparseCategoricalAccuracy(PytorchMetric):
"""Calculates how often predictions matches integer labels.
For example, if `y_true` is tensor([[2], [1]]) and `y_pred` is
tensor([[0.1, 0.9, 0.8], [0.05, 0.95, 0]]) then the categorical accuracy is 1/2 or .5.
If the weights were specified as tensor([0.7, 0.3]) then the categorical accuracy
would be .3. You can provide logits of classes as `y_pred`, since argmax of
logits and probabilities are same.
Usage:
```python
acc = SparseCategoricalAccuracy()
acc(torch.tensor([[0.1, 0.9, 0.8], [0.05, 0.95, 0]]), torch.tensor([[2], [1]]))
assert acc.compute() == 0.5
```
"""
def __init__(self):
self.total = torch.tensor(0)
self.correct = torch.tensor(0)
def __call__(self, preds, targets):
batch_size = targets.size(0)
if preds.ndim == targets.ndim:
targets = torch.squeeze(targets, dim=-1)
preds = torch.argmax(preds, dim=-1)
preds = preds.type_as(targets)
self.correct += torch.sum(torch.eq(preds, targets))
self.total += batch_size
def compute(self):
return self.correct.float() / self.total
class CategoricalAccuracy(PytorchMetric):
"""Calculates how often predictions matches integer labels.
For example, if `y_true` is torch.tensor([[0, 0, 1], [0, 1, 0]]) and `y_pred` is
torch.tensor([[0.1, 0.9, 0.8], [0.05, 0.95, 0]]) then the categorical accuracy is 1/2 or .5.
If the weights were specified as tensor([0.7, 0.3]) then the categorical accuracy
would be .3. You can provide logits of classes as `y_pred`, since argmax of
logits and probabilities are same.
Usage:
```python
pred = torch.tensor([[0.1, 0.9, 0.8], [0.05, 0.95, 0]])
target = torch.tensor([[0, 0, 1], [0, 1, 0]])
cacc = CategoricalAccuracy()
cacc(pred, target)
```
"""
def __init__(self):
self.total = torch.tensor(0)
self.correct = torch.tensor(0)
def __call__(self, preds, targets):
batch_size = targets.size(0)
self.correct += torch.sum(
torch.eq(
torch.argmax(preds, dim=-1), torch.argmax(targets, dim=-1)))
self.total += batch_size
def compute(self):
return self.correct.float() / self.total
class BinaryAccuracy(PytorchMetric):
"""Calculates how often predictions matches labels.
For example, if `y_true` is tensor([1, 1, 0, 0]) and `y_pred` is tensor([0.98, 1, 0, 0.6])
then the binary accuracy is 3/4 or .75. If the weights were specified as
[1, 0, 0, 1] then the binary accuracy would be 1/2 or .5.
Usage:
```python
target = torch.tensor([1, 1, 0, 0])
pred = torch.tensor([0.98, 1, 0, 0.6])
bac = BinaryAccuracy()
bac(pred, target)
assert bac.compute() == 0.75
```
"""
def __init__(self):
self.total = torch.tensor(0)
self.correct = torch.tensor(0)
def __call__(self, preds, targets, threshold=0.5):
batch_size = targets.size(0)
threshold = torch.tensor(threshold)
self.correct += torch.sum(
torch.eq(
torch.gt(preds, threshold), targets))
self.total += batch_size
def compute(self):
return self.correct.float() / self.total
class Top5Accuracy(PytorchMetric):
"""Computes how often integer targets are in the top `K` predictions.
Usage:
```python
pred = torch.tensor([[0.1, 0.9, 0.8, 0.4, 0.5, 0.2],
[0.05, 0.95, 0, 0.4, 0.5, 0.2]])
target = torch.tensor([2, 2])
top5acc = Top5Accuracy()
top5acc(pred, target)
assert top5acc.compute() == 0.5
```
"""
def __init__(self):
self.total = torch.tensor(0)
self.correct = torch.tensor(0)
def __call__(self, preds, targets):
batch_size = targets.size(0)
_, preds = preds.topk(5, dim=-1, largest=True, sorted=True)
preds = preds.type_as(targets).t()
targets = targets.view(1, -1).expand_as(preds)
# torch.view requests Elements of tensors are stored
# as a long contiguous vector in memory.
# So need to call contiguous() before view().
self.correct += preds.eq(targets).contiguous().view(-1).sum()
self.total += batch_size
def compute(self):
return self.correct.float() / self.total
class MSE(PytorchMetric):
"""Computes the mean square error between labels and predictions.
`loss = square(abs(y_true - y_pred), axis=-1)`
Usage:
```python
pred = torch.tensor([[1, -2], [1, 1]])
target = torch.tensor([[0, 1], [0, 1]])
m = MSE()
m(pred, target)
print(m.compute()) # tensor(2.7500)
```
"""
def __init__(self):
self.total = torch.tensor(0)
self.sum_squared_error = torch.tensor(0.0)
def __call__(self, preds, targets):
_check_same_shape(preds, targets)
self.sum_squared_error += torch.sum(torch.square(torch.sub(preds, targets)))
self.total += targets.numel()
def compute(self):
return self.sum_squared_error / self.total
class MAE(PytorchMetric):
"""Computes the mean absolute error between labels and predictions.
`loss = mean(abs(y_true - y_pred), axis=-1)`
Usage:
```python
pred = torch.tensor([[1, -2], [1, 1]])
target = torch.tensor([[0, 1], [0, 1]])
m = MAE()
m(pred, target)
print(m.compute()) # tensor(1.2500)
```
"""
def __init__(self):
self.total = torch.tensor(0)
self.sum_abs_error = torch.tensor(0.0)
def __call__(self, preds, targets):
_check_same_shape(preds, targets)
self.sum_abs_error += torch.sum(torch.abs(torch.sub(preds, targets)))
self.total += targets.numel()
def compute(self):
return self.sum_abs_error / self.total
class BinaryCrossEntropy(PytorchMetric):
"""Computes the crossentropy metric between the labels and predictions.
This is used when there are only two labels (0 and 1).
Usage:
```python
pred = torch.tensor([[0.6, 0.4], [0.4, 0.6]])
target = torch.tensor([[0, 1], [0, 0]])
entropy = BinaryCrossEntropy()
entropy(pred, target)
assert abs(entropy.compute() - 0.81492424) < 1e-6
```
"""
def __init__(self):
self.total = torch.tensor(0)
self.crossentropy = torch.tensor(0)
def __call__(self, preds, targets):
# Avoid problems with logarithm
epsilon = 1e-7
preds[preds <= 0] = epsilon
preds[preds >= 1] = 1 - epsilon
output_size = targets.view(-1).size(0)
self.crossentropy = self.crossentropy + \
(- targets * torch.log(preds) - (1-targets) * torch.log(1-preds)).view(-1).sum()
self.total += output_size
def compute(self):
return self.crossentropy.float() / self.total
class CategoricalCrossEntropy(PytorchMetric):
"""Computes the crossentropy metric between the labels and predictions.
This is used when there are multiple lables. The labels should be in
the form of one-hot vectors.
Usage:
```python
pred = torch.tensor([[0.05, 0.95, 0], [0.1, 0.8, 0.1]])
target = torch.tensor([[0, 1, 0], [0, 0, 1]])
entropy = CategoricalCrossEntropy()
entropy(pred, target)
assert abs(entropy.compute() - 1.1769392) < 1e-6
```
"""
def __init__(self):
self.total = torch.tensor(0)
self.crossentropy = torch.tensor(0)
def __call__(self, preds, targets):
# Avoid problems with logarithm
epsilon = 1e-7
preds[preds <= 0] = epsilon
preds[preds >= 1] = 1 - epsilon
output_size = targets.size(0)
self.crossentropy = self.crossentropy + \
(-preds.log() * targets).sum()
self.total += output_size
def compute(self):
return self.crossentropy.float() / self.total
class SparseCategoricalCrossEntropy(PytorchMetric):
"""Computes the crossentropy metric between the labels and predictions.
This is used when there are multiple lables. The labels should be in
the form of integers, instead of one-hot vectors.
Usage:
```python
pred = torch.tensor([[0.05, 0.95, 0], [0.1, 0.8, 0.1]])
target = torch.tensor([1, 2])
entropy = SparseCategoricalCrossEntropy()
entropy(pred, target)
assert abs(entropy.compute() - 1.1769392) < 1e-6
```
"""
def __init__(self):
self.total = torch.tensor(0)
self.crossentropy = torch.tensor(0)
def __call__(self, preds, targets):
# Avoid problems with logarithm
epsilon = 1e-7
preds[preds <= 0] = epsilon
preds[preds >= 1] = 1 - epsilon
output_size = targets.size(0)
self.crossentropy = self.crossentropy + \
(-preds.log() * torch.nn.functional.one_hot(targets)).sum()
self.total += output_size
def compute(self):
return self.crossentropy.float() / self.total
class KLDivergence(PytorchMetric):
"""Computes the Kullback-Liebler divergence metric between labels and
predictions.
Usage:
```python
pred = torch.tensor([[0.6, 0.4], [0.4, 0.6]])
target = torch.tensor([[0, 1], [0, 0]])
div = KLDivergence()
div(pred, target)
assert abs(div.compute() - 0.45814306) < 1e-6
```
"""
def __init__(self):
self.total = torch.tensor(0)
self.divergence = torch.tensor(0)
def __call__(self, preds, targets):
# Avoid problems with dividing zero
epsilon = 1e-7
_check_same_shape(preds, targets)
output_size = targets.size(0)
div = targets / preds
self.divergence = self.divergence + \
(targets * (targets / preds + epsilon).log()).sum()
self.total += output_size
def compute(self):
return self.divergence.float() / self.total
class Poisson(PytorchMetric):
"""Computes the Poisson metric between labels and
predictions.
Usage:
```python
pred = torch.tensor([[1, 1], [0, 0]])
target = torch.tensor([[0, 1], [0, 0]])
poisson = Poisson()
poisson(pred, target)
assert abs(poisson.compute() - 0.49999997) < 1e-6
```
"""
def __init__(self):
self.total = torch.tensor(0)
self.poisson = torch.tensor(0)
def __call__(self, preds, targets):
# Avoid problems with dividing zero
epsilon = 1e-7
_check_same_shape(preds, targets)
output_size = targets.view(-1).size(0)
self.poisson = self.poisson + \
(preds - targets * torch.log(preds + epsilon)).sum()
self.total += output_size
def compute(self):
return self.poisson.float() / self.total
class AUROC(PytorchMetric):
"""Computes the AUROC metric between labels and
predictions.
Usage:
```python
pred = torch.tensor([0.3, 0.4, 0.2, 0.5, 0.6, 0.7, 0.8])
target = torch.tensor([0, 1, 0, 1, 1, 1, 1.0])
auc = AUROC()
auc(pred, target)
assert (auc.compute() - 1.0) < 1e-6
```
"""
def __init__(self):
self.internal_auc = torchmetrics.AUROC()
def __call__(self, preds, targets):
self.internal_auc.update(preds, targets.to(torch.int64))
def compute(self):
return self.internal_auc.compute()
class ROC(PytorchMetric):
"""Computes the ROC metric between labels and
predictions.
Usage:
```python
>>> pred = torch.tensor([0.3, 0.6, 0.7, 0.8])
>>> target = torch.tensor([0, 1, 1, 1.0])
>>> auc = ROC()
>>> auc(pred, target)
>>> fpr, tpr, thresholds = auc.compute()
>>> fpr
tensor([0., 0., 0., 0., 1.])
>>> tpr
tensor([0.0000, 0.3333, 0.6667, 1.0000, 1.0000])
>>> thresholds
tensor([1.8000, 0.8000, 0.7000, 0.6000, 0.3000])
'''
"""
def __init__(self):
self.internal_roc = torchmetrics.ROC()
def __call__(self, preds, targets):
self.internal_roc.update(preds, targets.to(torch.int64))
def compute(self):
return self.internal_roc.compute()
class F1Score(PytorchMetric):
"""Computes the F1score metric between labels and
predictions.
Usage:
```python
target = torch.tensor([0, 1, 2, 0, 1, 2])
preds = torch.tensor([0, 2, 1, 0, 0, 1])
f1 = F1Score()
f1(preds, target)
```
"""
def __init__(self):
self.internal_f1 = torchmetrics.F1Score()
def __call__(self, preds, targets):
self.internal_f1.update(preds, targets.to(torch.int64))
def compute(self):
return self.internal_f1.compute()
class Precision(PytorchMetric):
"""Computes the Precision metric between labels and
predictions.
Usage:
```python
target = torch.tensor([0, 1, 1, 0, 1, 1])
preds = torch.tensor([0, 0.2, 1.0, 0.8, 0.6, 0.5])
precision = Precision()
precision(preds, target)
assert (precision.compute() - 0.75 < 10e-6)
```
"""
def __init__(self):
self.internal_precision = torchmetrics.Precision()
def __call__(self, preds, targets):
self.internal_precision.update(preds, targets.to(torch.int64))
def compute(self):
return self.internal_precision.compute()
class Recall(PytorchMetric):
"""Computes the Recall metric between labels and
predictions.
Usage:
```python
target = torch.tensor([0, 1, 1, 0, 1, 1])
preds = torch.tensor([0, 0.2, 1.0, 0.8, 0.6, 0.5])
recall = Recall()
recall(preds, target)
assert (recall.compute() - 0.75 < 10e-6)
```
"""
def __init__(self):
self.internal_recall = torchmetrics.Recall()
def __call__(self, preds, targets):
self.internal_recall.update(preds, targets.to(torch.int64))
def compute(self):
return self.internal_recall.compute()
class PrecisionRecallCurve(PytorchMetric):
"""Computes the PrecisionRecallCurve metric between labels and
predictions.
Usage:
```python
>>> target = torch.tensor([0, 1, 1, 0, 1, 1])
>>> preds = torch.tensor([0, 0.2, 1.0, 0.8, 0.6, 0.5])
>>> curve = PrecisionRecallCurve()
>>> curve(preds, target)
>>> precision, recall, thresholds = curve.compute()
>>> precision
(tensor([0.8000, 0.7500, 0.6667, 0.5000, 1.0000, 1.0000])
>>> recall
tensor([1.0000, 0.7500, 0.5000, 0.2500, 0.2500, 0.0000])
>>> thresholds
tensor([0.2000, 0.5000, 0.6000, 0.8000, 1.0000]))
'''
"""
def __init__(self):
self.internal_curve = torchmetrics.PrecisionRecallCurve()
def __call__(self, preds, targets):
self.internal_curve.update(preds, targets.to(torch.int64))
def compute(self):
return self.internal_curve.compute()
| intel-analytics/BigDL | python/orca/src/bigdl/orca/learn/pytorch/pytorch_metrics.py | Python | apache-2.0 | 17,141 |
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.nn.functional as F
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest, ExecutionMode
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_training()
self.set_data_feed()
self.set_feed_attr()
self.set_op_attrs()
@property
def fp16_enabled(self):
return True
def set_data_feed(self):
data = np.random.uniform(size=[1, 3, 10, 10])
self.feed_fp32 = {'in_0': data.astype(np.float32)}
self.feed_fp16 = {'in_0': data.astype(np.float16)}
self.feed_list = list(self.feed_fp32.keys())
def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed_fp32.values()]
self.feed_list = list(self.feed_fp32.keys())
self.feed_dtype = [x.dtype for x in self.feed_fp32.values()]
def set_op_attrs(self):
self.attrs = {"axis": -1}
def _test_base(self, exec_mode):
scope = paddle.static.Scope()
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
main_prog.random_seed = self.SEED
startup_prog.random_seed = self.SEED
with paddle.static.scope_guard(scope):
with paddle.static.program_guard(main_prog, startup_prog):
x = paddle.static.data(
name=self.feed_list[0],
shape=self.feed_shape[0],
dtype='float32')
out = F.log_softmax(x, **self.attrs)
fetch_list = [out.name]
if exec_mode == ExecutionMode.CPU_FP32:
place = paddle.CPUPlace()
else:
place = paddle.IPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)
if exec_mode != ExecutionMode.CPU_FP32:
feed_list = self.feed_list
ipu_strategy = paddle.static.IpuStrategy()
ipu_strategy.set_graph_config(is_training=self.is_training)
if exec_mode == ExecutionMode.IPU_POPART_FP16:
ipu_strategy.set_precision_config(enable_fp16=True)
program = paddle.static.IpuCompiledProgram(
main_prog,
ipu_strategy=ipu_strategy).compile(feed_list, fetch_list)
else:
program = main_prog
feed = self.feed_fp32
if exec_mode > ExecutionMode.IPU_FP32:
feed = self.feed_fp16
result = exe.run(program, feed=feed, fetch_list=fetch_list)
return result[0]
def test(self):
output_dict = {}
for mode in ExecutionMode:
if mode > ExecutionMode.IPU_FP32 and not self.fp16_enabled:
break
output_dict[mode] = self._test_base(mode).flatten()
self.check(output_dict)
class TestCase1(TestBase):
def set_attrs(self):
self.attrs = {"axis": 1}
if __name__ == "__main__":
unittest.main()
| PaddlePaddle/Paddle | python/paddle/fluid/tests/unittests/ipu/test_log_softmax_op_ipu.py | Python | apache-2.0 | 3,785 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
import uuid
from openstack.load_balancer.v2 import listener
IDENTIFIER = 'IDENTIFIER'
EXAMPLE = {
'admin_state_up': True,
'connection_limit': '2',
'default_pool_id': uuid.uuid4(),
'description': 'test description',
'id': IDENTIFIER,
'insert_headers': {"X-Forwarded-For": "true"},
'l7policies': [{'id': uuid.uuid4()}],
'loadbalancers': [{'id': uuid.uuid4()}],
'name': 'test_listener',
'project_id': uuid.uuid4(),
'protocol': 'TEST_PROTOCOL',
'protocol_port': 10,
'default_tls_container_ref': ('http://198.51.100.10:9311/v1/containers/'
'a570068c-d295-4780-91d4-3046a325db51'),
'sni_container_refs': [],
'created_at': '2017-07-17T12:14:57.233772',
'updated_at': '2017-07-17T12:16:57.233772',
'operating_status': 'ONLINE',
'provisioning_status': 'ACTIVE',
}
class TestListener(testtools.TestCase):
def test_basic(self):
test_listener = listener.Listener()
self.assertEqual('listener', test_listener.resource_key)
self.assertEqual('listeners', test_listener.resources_key)
self.assertEqual('/v2.0/lbaas/listeners', test_listener.base_path)
self.assertEqual('load-balancer', test_listener.service.service_type)
self.assertTrue(test_listener.allow_create)
self.assertTrue(test_listener.allow_get)
self.assertTrue(test_listener.allow_update)
self.assertTrue(test_listener.allow_delete)
self.assertTrue(test_listener.allow_list)
def test_make_it(self):
test_listener = listener.Listener(**EXAMPLE)
self.assertTrue(test_listener.is_admin_state_up)
self.assertEqual(EXAMPLE['connection_limit'],
test_listener.connection_limit)
self.assertEqual(EXAMPLE['default_pool_id'],
test_listener.default_pool_id)
self.assertEqual(EXAMPLE['description'], test_listener.description)
self.assertEqual(EXAMPLE['id'], test_listener.id)
self.assertEqual(EXAMPLE['insert_headers'],
test_listener.insert_headers)
self.assertEqual(EXAMPLE['l7policies'],
test_listener.l7_policies)
self.assertEqual(EXAMPLE['loadbalancers'],
test_listener.load_balancers)
self.assertEqual(EXAMPLE['name'], test_listener.name)
self.assertEqual(EXAMPLE['project_id'], test_listener.project_id)
self.assertEqual(EXAMPLE['protocol'], test_listener.protocol)
self.assertEqual(EXAMPLE['protocol_port'], test_listener.protocol_port)
self.assertEqual(EXAMPLE['default_tls_container_ref'],
test_listener.default_tls_container_ref)
self.assertEqual(EXAMPLE['sni_container_refs'],
test_listener.sni_container_refs)
self.assertEqual(EXAMPLE['created_at'], test_listener.created_at)
self.assertEqual(EXAMPLE['updated_at'], test_listener.updated_at)
self.assertEqual(EXAMPLE['provisioning_status'],
test_listener.provisioning_status)
self.assertEqual(EXAMPLE['operating_status'],
test_listener.operating_status)
| briancurtin/python-openstacksdk | openstack/tests/unit/load_balancer/test_listener.py | Python | apache-2.0 | 3,791 |
#!/usr/bin/env python
import datetime
import email
import imaplib
import psycopg2
import re
import smtplib
import sys
import time
import yaml
from email.mime.text import MIMEText
CHANGELOG_TIMELIMIT = datetime.timedelta(minutes=30)
def generate_changelog_name():
d = datetime.datetime.now()
offset = d.day % 7
if offset == 0:
offset = 7
d = d - datetime.timedelta(days=offset)
week_letter = ['A', 'B', 'C', 'D', 'E'][int((d.day-1)/7)]
return "CHANGELOG-%s%s" % (d.strftime("%Y%m"), week_letter)
with open('/email.yml') as email_yml:
email_credentials = yaml.load(email_yml)
latest_changelog = generate_changelog_name()
listserv_address = '[email protected]'
msg = MIMEText("GET ZOONIVERSE.%s" % latest_changelog)
msg['from'] = email_credentials['user']
msg['to'] = listserv_address
print "Requesting %s" % latest_changelog
s = smtplib.SMTP_SSL(email_credentials['smtp_host'])
s.login(email_credentials['user'], email_credentials['pass'])
s.sendmail(email_credentials['user'], [listserv_address], msg.as_string())
s.quit()
expected_subject = "File: ZOONIVERSE %s" % latest_changelog
print "Waiting for changelog to arrive",
changelog_cutoff = datetime.datetime.now() + CHANGELOG_TIMELIMIT
while True:
if datetime.datetime.now() > changelog_cutoff:
print "\nChangelog didn't arrive in time. Giving up."
sys.exit(1)
try:
M = imaplib.IMAP4_SSL(email_credentials['host'])
M.login(email_credentials['user'], email_credentials['pass'])
M.select()
typ, data = M.search(None, '(FROM "%s")' % listserv_address,
'(HEADER Subject "%s")' % expected_subject)
if len(data) == 0 or len(data[0]) == 0:
print ".",
sys.stdout.flush()
time.sleep(60)
continue
num = data[0]
typ, data = M.fetch(num, '(RFC822)')
message = email.message_from_string(data[0][1])
changelog = message.as_string()
M.store(num, '+FLAGS', '\\Deleted')
break
finally:
M.close()
M.logout()
print ""
changes = {}
for line in changelog.split('\n'):
m = re.match(
r'(?P<timestamp>\d{14}) (?P<action>\w+) (?P<email>[^\s]+).*',
line
)
if not m:
continue
timestamp, action, email = m.groups()
changes.setdefault(action, set()).add(email)
removed_addresses = (
list(changes.get('AUTODEL', [])) + list(changes.get('SIGNOFF', []))
)
removed_addresses = map(lambda s: s.lower(), removed_addresses)
print "Unsubscribing: "
for e in removed_addresses:
print "* %s" % e
with open('/database.yml') as db_yaml:
db_credentials = yaml.load(db_yaml)
prod = db_credentials['production']
conn = psycopg2.connect(
host=prod['host'], user=prod['username'], password=prod['password'],
dbname=prod['database']
)
try:
cur = conn.cursor()
cur.execute(
("UPDATE users SET global_email_communication = FALSE "
"WHERE LOWER(email) = ANY(%s)"),
(removed_addresses,)
)
conn.commit()
print "Updated %d matching rows." % cur.rowcount
finally:
cur.close()
conn.close()
| zooniverse/jiscmail-bounces | process_bounces.py | Python | apache-2.0 | 3,188 |
#!/usr/bin/python
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from StringIO import StringIO
import difflib
import os
import sys
import time
from conary.lib import util
from conary_test import recipes
from rbuild_test import rbuildhelp
class CheckoutTest(rbuildhelp.CommandTest):
def assertEquals(self, v1, v2):
try:
rbuildhelp.CommandTest.assertEquals(self, v1, v2)
except AssertionError:
for line in difflib.unified_diff(
StringIO(v1).readlines(),
StringIO(v2).readlines(),
"v1",
"v2",
):
sys.stdout.write(line)
raise
def testCheckoutNoOption(self):
self.openRepository()
self.initProductDirectory('foo')
# bogus top level group with nothing relevant in it.
self.addCollection('group-dist', ['simple:run'])
os.chdir('foo/devel')
txt = self.runCommand('checkout package')
self.assertEquals(txt, "Created new package 'package' in './package'\n")
os.chdir('package')
assert('@NEW@' in open('CONARY').read())
def testCheckoutShadow(self):
self.openRepository()
self.addComponent('simple:source',
[('simple.recipe', recipes.simpleRecipe)])
self.addComponent('simple:runtime')
self.addCollection('simple', [':runtime'])
trv = self.addCollection('group-dist', ['simple'])
self.initProductDirectory('foo')
os.chdir('foo/devel')
txt = self.runCommand('checkout simple', exitCode=1)
expectedText = '\n'.join((
'error: The upstream source provides a version of this package.',
'Please specify:',
' --shadow to shadow this package',
' --derive to derive from it',
' --new to replace it with a new version',
''))
assert txt == expectedText
txt = self.runCommand('checkout simple --shadow')
self.assertEquals(txt, "Shadowed package 'simple' in './simple'\n")
os.chdir('simple')
assert('@NEW@' not in open('CONARY').read())
trv = self.findAndGetTrove('simple:source=localhost@foo:foo-1-devel')
self.assertEquals(str(trv.getVersion()),
'/localhost@rpl:linux//foo:foo-1-devel/1.0-1')
os.chdir('..')
util.rmtree('simple')
txt = self.runCommand('checkout simple')
self.assertEquals(txt, "Checked out existing package 'simple' in './simple'\n")
os.chdir('simple')
assert('@NEW@' not in open('CONARY').read())
def testDerive(self):
self.openRepository()
self.addComponent('simple:source',
[('simple.recipe', recipes.simpleRecipe),
('extrafile', 'foo'),
('subdir/foo', 'bar')])
self.addComponent('simple:runtime',
[('/some/file', 'contents\n')])
self.addCollection('simple', [':runtime'])
trv = self.addCollection('group-dist', ['simple'])
self.initProductDirectory('foo')
os.chdir('foo/devel')
txt = self.runCommand('checkout simple --derive')
self.assertEquals(txt, '''\
Shadowing simple=/localhost@rpl:linux/1.0-1-1[] onto localhost@foo:foo-1-devel
Derived 'simple' in '%s/foo/devel/simple' from simple=/localhost@rpl:linux/1.0-1-1[]
Edit the recipe to add your changes to the binary package.
''' %(self.workDir))
os.chdir('simple')
state = open('CONARY').read()
self.failIf('extrafile' in state,
'Extra files not removed from checkout')
assert(os.path.exists('_ROOT_'))
self.verifyFile('_ROOT_/some/file', 'contents\n')
def testCheckoutNew(self):
self.openRepository()
self.addComponent('simple:source',
[('simple.recipe', recipes.simpleRecipe)])
self.addComponent('simple:runtime')
self.addCollection('simple', [':runtime'])
self.addCollection('group-dist', ['simple'])
self.initProductDirectory('foo')
os.chdir('foo/devel')
txt = self.runCommand('checkout simple --new', stdin='Y\n')
self.failUnlessEqual(txt, "Do you want to replace the upstream "
"version? (Y/N): (Default: Y): "
"Created new package 'simple' in './simple'\n"
"warning: Package simple exists upstream.\n")
os.chdir('simple')
assert('@NEW@' in open('CONARY').read())
self.assertEquals(open('simple.recipe').read(),'''\
#
# Copyright (c) %s Test (http://bugzilla.rpath.com/)
#
class Simple(PackageRecipe):
name = 'simple'
version = ''
buildRequires = []
def setup(r):
pass
''' % time.localtime().tm_year)
def testCheckoutNewTemplate(self):
self.openRepository()
self.addComponent('simple:source',
[('simple.recipe', recipes.simpleRecipe)])
self.addComponent('simple:runtime')
self.addCollection('simple', [':runtime'])
self.addCollection('group-dist', ['simple'])
self.initProductDirectory('foo')
os.chdir('foo/devel')
#Do templates exist in the environment?
txt = self.runCommand('checkout simple --new --template=rpath',
stdin='Y\n')
self.failUnlessEqual(txt, "Do you want to replace the upstream "
"version? (Y/N): (Default: Y): "
"Created new package 'simple' in './simple'\n"
"warning: Package simple exists upstream.\n")
os.chdir('simple')
assert('@NEW@' in open('CONARY').read())
self.assertEquals(open('simple.recipe').read(),'''\
#
# Copyright (c) %s rPath, Inc.
# This file is distributed under the terms of the MIT License.
# A copy is available at http://www.rpath.com/permanent/mit-license.html
#
class Simple(PackageRecipe):
name = 'simple'
version = ''
buildRequires = []
def setup(r):
pass
''' % time.localtime().tm_year)
def testCheckoutTemplate(self):
self.openRepository()
self.initProductDirectory('foo')
# bogus top level group with nothing relevant in it.
self.addCollection('group-dist', ['simple:run'])
os.chdir('foo/devel')
txt = self.runCommand('checkout package --template=rpath')
self.assertEquals(txt, "Created new package 'package' in './package'\n")
os.chdir('package')
assert('@NEW@' in open('CONARY').read())
self.assertEquals(open('package.recipe').read(),'''\
#
# Copyright (c) %s rPath, Inc.
# This file is distributed under the terms of the MIT License.
# A copy is available at http://www.rpath.com/permanent/mit-license.html
#
class Package(PackageRecipe):
name = 'package'
version = ''
buildRequires = []
def setup(r):
pass
''' % time.localtime().tm_year)
def testCheckoutGroup(self):
self.openRepository()
self.initProductDirectory('foo')
# bogus top level group with nothing relevant in it.
self.addCollection('group-dist', ['simple:run'])
os.chdir('foo/devel')
txt = self.runCommand('checkout group-foo')
self.assertEquals(txt, "Created new package 'group-foo' in './group-foo'\n")
os.chdir('group-foo')
assert('@NEW@' in open('CONARY').read())
self.assertEquals(open('group-foo.recipe').read(),"""\
#
# Copyright (c) %s Test (http://bugzilla.rpath.com/)
#
class GroupFoo(GroupSetRecipe):
name = 'group-foo'
version = ''
checkPathConflicts = True
depCheck = True
imageGroup = True
# packages to be added to group
packages = []
def makeSearchPath(r):
'''
Constructs a search path using the buildLabel and the product
definition, if available. If additional search paths are required,
add them to the sps list below
'''
# add additional search paths
sps = []
buildLabel = r.cfg.buildLabel
repo = r.Repository(buildLabel, r.flavor)
if 'productDefinitionSearchPath' in r.macros:
for specs in r.macros.productDefinitionSearchPath.split('\\n'):
if isinstance(specs, basestring):
specs = [specs]
sps.append(repo.find(*specs))
return r.SearchPath(repo, *sps)
def setup(r):
sp = r.makeSearchPath()
packages = sp.find(*r.packages)
if r.depCheck:
# Checks against upstream searchpaths
deps = packages.depsNeeded(sp)
if r.imageGroup:
# For a bootable image (hopefully)
packages += deps
r.Group(packages, checkPathConflicts=r.checkPathConflicts)
""" % time.localtime().tm_year)
def testCheckoutGroupAppliance(self):
self.openRepository()
self.initProductDirectory('foo')
# bogus top level group with nothing relevant in it.
self.addCollection('group-dist', ['simple:run'])
os.chdir('foo/devel')
txt = self.runCommand('checkout group-foo-appliance')
self.assertEquals(txt, "Created new package 'group-foo-appliance' in './group-foo-appliance'\n")
os.chdir('group-foo-appliance')
assert('@NEW@' in open('CONARY').read())
self.assertEquals(open('group-foo-appliance.recipe').read(), '''\
#
# Copyright (c) %s Test (http://bugzilla.rpath.com/)
#
loadSuperClass("group-set-appliance=centos6.rpath.com@rpath:centos-6-common")
class GroupFooAppliance(GroupSetAppliance):
name = "group-foo-appliance"
version = ""
buildRequires = []
# add additional search path groups here
additionalSearchPath = []
def addPackages(r):
"""
Here is where you define your appliance by manipulating the
packages included in the appliance and scripts that are run.
Packages may be added, removed or replaced
Add application packages by calling r.add("pkgname")
Remove packages from the appliance by calling r.remove("pkgname")
Replace upstream packages by calling r.replace("pkgname")
Scripts may be added by calling the appropriate method with the
text of the script. The available methods are:
r.addPostInstallScript(txt)
r.addPostUpdateScript(txt)
r.addPostRollbackScript(txt)
r.addPreInstallScript(txt)
r.addPreUpdateScript(txt)
r.addPreRollbackScript(txt)
"""
pass
''' % time.localtime().tm_year)
| sassoftware/rbuild | rbuild_test/functionaltest/commandbasedtest/checkouttest.py | Python | apache-2.0 | 11,234 |
# -*- coding: utf-8 -*-
from __future__ import (
print_function,
division,
unicode_literals,
absolute_import
)
# Local imports.
from natsort.compat.py23 import PY_VERSION, cmp_to_key
# Make the strxfrm function from strcoll on Python2
# It can be buggy (especially on BSD-based systems),
# so prefer icu if available.
try:
import icu
from locale import getlocale
null_string = b''
def dumb_sort():
return False
# If using icu, get the locale from the current global locale,
def get_icu_locale():
try:
return icu.Locale('.'.join(getlocale()))
except TypeError: # pragma: no cover
return icu.Locale()
def get_strxfrm():
return icu.Collator.createInstance(get_icu_locale()).getSortKey
def get_thousands_sep():
sep = icu.DecimalFormatSymbols.kGroupingSeparatorSymbol
return icu.DecimalFormatSymbols(get_icu_locale()).getSymbol(sep)
def get_decimal_point():
sep = icu.DecimalFormatSymbols.kDecimalSeparatorSymbol
return icu.DecimalFormatSymbols(get_icu_locale()).getSymbol(sep)
except ImportError:
import locale
if PY_VERSION < 3:
from locale import strcoll
strxfrm = cmp_to_key(strcoll)
null_string = strxfrm('')
else:
from locale import strxfrm
null_string = ''
# On some systems, locale is broken and does not sort in the expected
# order. We will try to detect this and compensate.
def dumb_sort():
return strxfrm('A') < strxfrm('a')
def get_strxfrm():
return strxfrm
def get_thousands_sep():
sep = locale.localeconv()['thousands_sep']
# If this locale library is broken, some of the thousands separator
# characters are incorrectly blank. Here is a lookup table of the
# corrections I am aware of.
if dumb_sort():
try:
loc = '.'.join(locale.getlocale())
except TypeError: # No locale loaded, default to ','
return ','
return {'de_DE.ISO8859-15': '.',
'es_ES.ISO8859-1': '.',
'de_AT.ISO8859-1': '.',
'de_at': '\xa0',
'nl_NL.UTF-8': '.',
'es_es': '.',
'fr_CH.ISO8859-15': '\xa0',
'fr_CA.ISO8859-1': '\xa0',
'de_CH.ISO8859-1': '.',
'fr_FR.ISO8859-15': '\xa0',
'nl_NL.ISO8859-1': '.',
'ca_ES.UTF-8': '.',
'nl_NL.ISO8859-15': '.',
'de_ch': "'",
'ca_es': '.',
'de_AT.ISO8859-15': '.',
'ca_ES.ISO8859-1': '.',
'de_AT.UTF-8': '.',
'es_ES.UTF-8': '.',
'fr_fr': '\xa0',
'es_ES.ISO8859-15': '.',
'de_DE.ISO8859-1': '.',
'nl_nl': '.',
'fr_ch': '\xa0',
'fr_ca': '\xa0',
'de_DE.UTF-8': '.',
'ca_ES.ISO8859-15': '.',
'de_CH.ISO8859-15': '.',
'fr_FR.ISO8859-1': '\xa0',
'fr_CH.ISO8859-1': '\xa0',
'de_de': '.',
'fr_FR.UTF-8': '\xa0',
'fr_CA.ISO8859-15': '\xa0',
}.get(loc, sep)
else:
return sep
def get_decimal_point():
return locale.localeconv()['decimal_point']
| NixaSoftware/CVis | venv/lib/python2.7/site-packages/natsort/compat/locale.py | Python | apache-2.0 | 3,591 |
def add_new_idx_to_hand(base_idx, new_idx, add_to_sensor, const):
if add_new(base_idx, new_idx, const, 'thumb', 'all'):
pass
elif add_new(base_idx, new_idx, const, 'finger_1', 'all'):
pass
elif add_new(base_idx, new_idx, const, 'finger_2', 'all'):
pass
elif add_new(base_idx, new_idx, const, 'finger_3', 'all'):
pass
elif add_new(base_idx, new_idx, const, 'finger_4', 'all'):
pass
elif add_new(base_idx, new_idx, const, 'wrist', 'all'):
if add_new(base_idx, new_idx, const, 'wrist', 'flex'):
pass
elif add_new(base_idx, new_idx, const, 'wrist', 'imu'):
pass
elif add_new(base_idx, new_idx, const, 'palm', 'all'):
pass
else:
print "(basis) fatal: hand index not found {}".format(base_idx)
# then add back to sensor:
if add_to_sensor:
if add_new(base_idx, new_idx, const, 'flex', 'all'):
if add_new(base_idx, new_idx, const, 'flex', 'row_1'):
pass
elif add_new(base_idx, new_idx, const, 'flex', 'row_2'):
pass
elif add_new(base_idx, new_idx, const, 'pressure'):
pass
elif add_new(base_idx, new_idx, const, 'accel'):
pass
elif add_new(base_idx, new_idx, const, 'gyro'):
pass
elif add_new(base_idx, new_idx, const, 'magnetometer'):
pass
elif add_new(base_idx, new_idx, const, 'lin_accel'):
pass
else:
print "(basis) fatal: sensor index not found {}".format(new_idx)
def add_new(base_idx, new_idx, const, parent_field, field=None):
if field == None:
return add_new_direct(base_idx, new_idx, const, parent_field)
else:
return add_new_hierachy(base_idx, new_idx, const, parent_field, field)
def add_new_hierachy(base_idx, new_idx, const, parent_field, field):
if base_idx in const.raw_indices[parent_field][field]:
if not new_idx in const.raw_indices[parent_field][field]:
const.raw_indices[parent_field][field].append(new_idx)
return True
return False
def add_new_direct(base_idx, new_idx, const, field):
if base_idx in const.raw_indices[field]:
if not new_idx in const.raw_indices[field]:
const.raw_indices[field].append(new_idx)
return True
return False
def add_new_idx_of_feature_to_hand(sensor_idx, feature_idx, const, debug_header, debug_feature):
# first at to part of hand:
if add(sensor_idx, feature_idx, const, 'thumb', 'all'):
pass
elif add(sensor_idx, feature_idx, const, 'finger_1', 'all'):
pass
elif add(sensor_idx, feature_idx, const, 'finger_2', 'all'):
pass
elif add(sensor_idx, feature_idx, const, 'finger_3', 'all'):
pass
elif add(sensor_idx, feature_idx, const, 'finger_4', 'all'):
pass
elif add(sensor_idx, feature_idx, const, 'wrist','all'):
if add(sensor_idx, feature_idx, const, 'wrist','flex'):
pass
elif add(sensor_idx, feature_idx, const, 'wrist','imu'):
pass
elif add(sensor_idx, feature_idx, const, 'palm', 'all'):
pass
else:
print "(feature) fatal: hand index not found {} (new: {})".format(sensor_idx, feature_idx)
print "(feature) fatal: header: {} (feature: {})".format(debug_header, debug_feature)
# then add back to sensor:
if add(sensor_idx, feature_idx, const, 'flex','all'):
if add(sensor_idx, feature_idx, const, 'flex','row_1'):
pass
elif add(sensor_idx, feature_idx, const, 'flex','row_2'):
pass
elif add(sensor_idx, feature_idx, const, 'pressure'):
pass
elif add(sensor_idx, feature_idx, const, 'accel'):
pass
elif add(sensor_idx, feature_idx, const, 'gyro'):
pass
elif add(sensor_idx, feature_idx, const, 'magnetometer'):
pass
elif add(sensor_idx, feature_idx, const, 'lin_accel'):
pass
elif add(sensor_idx, feature_idx, const, 'direction_cosine'):
pass
elif add(sensor_idx, feature_idx, const, 'absolute_froce'):
pass
elif add(sensor_idx, feature_idx, const, 'absolute_lin_froce'):
pass
else:
print "(feature) fatal: sensor index not found {} (new: {})".format(sensor_idx, feature_idx)
print "(feature) fatal: header: {} (feature: {})".format(debug_header, debug_feature)
def add(sensor_idx, feature_idx, const, parent_field, field=None):
if field == None:
return add_direct(sensor_idx, feature_idx, const, parent_field)
else:
return add_hierachy(sensor_idx, feature_idx, const, parent_field, field)
def add_hierachy(sensor_idx, feature_idx, const, parent_field, field):
if sensor_idx in const.raw_indices[parent_field][field]:
if not feature_idx in const.feature_indices[parent_field][field]:
const.feature_indices[parent_field][field].append(feature_idx)
return True
return False
def add_direct(sensor_idx, feature_idx, const, field):
if sensor_idx in const.raw_indices[field]:
if not feature_idx in const.feature_indices[field]:
const.feature_indices[field].append(feature_idx)
return True
return False
def dict_feature_sortet(feature_indexes, const):
# first at to part of hand:
dict = const.index_dict()
for feature_idx in feature_indexes:
if feature_idx in const.feature_indices['thumb']['all']:
dict['thumb']['all'].append(feature_idx)
elif feature_idx in const.feature_indices['finger_1']['all']:
dict['finger_1']['all'].append(feature_idx)
elif feature_idx in const.feature_indices['finger_2']['all']:
dict['finger_2']['all'].append(feature_idx)
elif feature_idx in const.feature_indices['finger_3']['all']:
dict['finger_3']['all'].append(feature_idx)
elif feature_idx in const.feature_indices['finger_4']['all']:
dict['finger_4']['all'].append(feature_idx)
elif feature_idx in const.feature_indices['wrist']['all']:
dict['wrist']['all'].append(feature_idx)
if feature_idx in const.feature_indices['wrist']['flex']:
dict['wrist']['flex'].append(feature_idx)
elif feature_idx in const.feature_indices['wrist']['imu']:
dict['wrist']['imu'].append(feature_idx)
elif feature_idx in const.feature_indices['palm']['all']:
dict['palm']['all'].append(feature_idx)
else:
print "(trace) fatal: hand index not found {}".format(feature_idx)
print "(trace) fatal: header: {}".format(const.feature_headers[feature_idx])
# then add back to sensor:
if feature_idx in const.feature_indices['flex']['all']:
dict['flex']['all'].append(feature_idx)
if feature_idx in const.feature_indices['flex']['row_1']:
dict['flex']['row_1'].append(feature_idx)
elif feature_idx in const.feature_indices['flex']['row_2']:
dict['flex']['row_2'].append(feature_idx)
elif feature_idx in const.feature_indices['pressure']:
dict['pressure'].append(feature_idx)
elif feature_idx in const.feature_indices['accel']:
dict['accel'].append(feature_idx)
elif feature_idx in const.feature_indices['gyro']:
dict['gyro'].append(feature_idx)
elif feature_idx in const.feature_indices['magnetometer']:
dict['magnetometer'].append(feature_idx)
elif feature_idx in const.feature_indices['lin_accel']:
dict['lin_accel'].append(feature_idx)
else:
print "(trace) fatal: sensor index not found {}".format(feature_idx)
print "(trace) fatal: header: {}".format(const.feature_headers[feature_idx])
return dict | joergsimon/gesture-analysis | utils/index_management.py | Python | apache-2.0 | 7,884 |
#!/usr/bin/env python
import logging
import argparse
import sqlline
parser = argparse.ArgumentParser()
parser.add_argument('--debug', '-d', action='store_true')
parser.add_argument('url')
args = parser.parse_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
with sqlline.SqlLine() as sqlline:
sqlline.connect('pyphoenix', args.url)
sqlline.connection.autocommit = True
sqlline.run()
| Pirionfr/pyPhoenix | examples/shell.py | Python | apache-2.0 | 416 |
# stdlib
from typing import Dict
# third party
from fastapi import FastAPI
from fastapi.responses import JSONResponse
from starlette.middleware.cors import CORSMiddleware
# grid absolute
from grid.api.router import api_router
from grid.core.config import settings
from grid.logger.handler import get_log_handler
app = FastAPI(
title=settings.PROJECT_NAME,
openapi_url=f"{settings.API_V1_STR}/openapi.json",
)
app.add_event_handler("startup", get_log_handler().init_logger)
# Set all CORS enabled origins
if settings.BACKEND_CORS_ORIGINS:
app.add_middleware(
CORSMiddleware,
allow_origins=[str(origin) for origin in settings.BACKEND_CORS_ORIGINS],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(api_router, prefix=settings.API_V1_STR)
# needed for Google Kubernetes Engine LoadBalancer Healthcheck
@app.get(
"/",
name="healthcheck",
status_code=200,
response_class=JSONResponse,
)
def healthcheck() -> Dict[str, str]:
"""
Currently, all service backends must satisfy either of the following requirements to
pass the HTTP health checks sent to it from the GCE loadbalancer: 1. Respond with a
200 on '/'. The content does not matter. 2. Expose an arbitrary url as a readiness
probe on the pods backing the Service.
"""
return {"status": "ok"}
| OpenMined/PySyft | packages/grid/backend/grid/main.py | Python | apache-2.0 | 1,388 |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.backend.jvm.subsystems.jvm_platform import JvmPlatform
from pants.backend.jvm.targets.jvm_target import JvmTarget
from pants.base.payload import Payload
from pants.base.payload_field import PrimitiveField
class JavaTests(JvmTarget):
"""JUnit tests."""
def __init__(self, cwd=None, test_platform=None, payload=None, timeout=None,
extra_jvm_options=None, extra_env_vars=None, **kwargs):
"""
:param str cwd: working directory (relative to the build root) for the tests under this
target. If unspecified (None), the working directory will be controlled by junit_run's --cwd.
:param str test_platform: The name of the platform (defined under the jvm-platform subsystem) to
use for running tests (that is, a key into the --jvm-platform-platforms dictionary). If
unspecified, the platform will default to the same one used for compilation.
:param list extra_jvm_options: A list of key value pairs of jvm options to use when running the
tests. Example: ['-Dexample.property=1'] If unspecified, no extra jvm options will be added.
:param dict extra_env_vars: A map of environment variables to set when running the tests, e.g.
{ 'FOOBAR': 12 }. Using `None` as the value will cause the variable to be unset.
"""
self.cwd = cwd
payload = payload or Payload()
if extra_env_vars is None:
extra_env_vars = {}
for key, value in extra_env_vars.items():
if value is not None:
extra_env_vars[key] = str(value)
payload.add_fields({
'test_platform': PrimitiveField(test_platform),
'extra_jvm_options': PrimitiveField(tuple(extra_jvm_options or ())),
'extra_env_vars': PrimitiveField(tuple(extra_env_vars.items())),
})
self._timeout = timeout
super(JavaTests, self).__init__(payload=payload, **kwargs)
# TODO(John Sirois): These could be scala, clojure, etc. 'jvm' and 'tests' are the only truly
# applicable labels - fixup the 'java' misnomer.
self.add_labels('java', 'tests')
@property
def test_platform(self):
if self.payload.test_platform:
return JvmPlatform.global_instance().get_platform_by_name(self.payload.test_platform)
return self.platform
@property
def timeout(self):
return self._timeout
| dturner-tw/pants | src/python/pants/backend/jvm/targets/java_tests.py | Python | apache-2.0 | 2,581 |
# Copyright 2015 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, types
from pathlib import PurePath
from .. import build
from .. import dependencies
from ..dependencies.misc import ThreadDependency
from .. import mesonlib
from .. import mlog
from . import ModuleReturnValue
from . import ExtensionModule
from ..interpreterbase import permittedKwargs, FeatureNew, FeatureNewKwargs
already_warned_objs = set()
class DependenciesHelper:
def __init__(self, state, name):
self.state = state
self.name = name
self.pub_libs = []
self.pub_reqs = []
self.priv_libs = []
self.priv_reqs = []
self.cflags = []
self.version_reqs = {}
self.link_whole_targets = []
def add_pub_libs(self, libs):
libs, reqs, cflags = self._process_libs(libs, True)
self.pub_libs = libs + self.pub_libs # prepend to preserve dependencies
self.pub_reqs += reqs
self.cflags += cflags
def add_priv_libs(self, libs):
libs, reqs, _ = self._process_libs(libs, False)
self.priv_libs = libs + self.priv_libs
self.priv_reqs += reqs
def add_pub_reqs(self, reqs):
self.pub_reqs += self._process_reqs(reqs)
def add_priv_reqs(self, reqs):
self.priv_reqs += self._process_reqs(reqs)
def _check_generated_pc_deprecation(self, obj):
if not hasattr(obj, 'generated_pc_warn'):
return
name = obj.generated_pc_warn[0]
if (name, obj.name) in already_warned_objs:
return
mlog.deprecation('Library', mlog.bold(obj.name), 'was passed to the '
'"libraries" keyword argument of a previous call '
'to generate() method instead of first positional '
'argument.', 'Adding', mlog.bold(obj.generated_pc),
'to "Requires" field, but this is a deprecated '
'behaviour that will change in a future version '
'of Meson. Please report the issue if this '
'warning cannot be avoided in your case.',
location=obj.generated_pc_warn[1])
already_warned_objs.add((name, obj.name))
def _process_reqs(self, reqs):
'''Returns string names of requirements'''
processed_reqs = []
for obj in mesonlib.unholder(mesonlib.listify(reqs)):
if not isinstance(obj, str):
FeatureNew.single_use('pkgconfig.generate requirement from non-string object', '0.46.0', self.state.subproject)
if hasattr(obj, 'generated_pc'):
self._check_generated_pc_deprecation(obj)
processed_reqs.append(obj.generated_pc)
elif hasattr(obj, 'pcdep'):
pcdeps = mesonlib.listify(obj.pcdep)
for d in pcdeps:
processed_reqs.append(d.name)
self.add_version_reqs(d.name, obj.version_reqs)
elif isinstance(obj, dependencies.PkgConfigDependency):
if obj.found():
processed_reqs.append(obj.name)
self.add_version_reqs(obj.name, obj.version_reqs)
elif isinstance(obj, str):
name, version_req = self.split_version_req(obj)
processed_reqs.append(name)
self.add_version_reqs(name, version_req)
elif isinstance(obj, dependencies.Dependency) and not obj.found():
pass
elif isinstance(obj, ThreadDependency):
pass
else:
raise mesonlib.MesonException('requires argument not a string, '
'library with pkgconfig-generated file '
'or pkgconfig-dependency object, '
'got {!r}'.format(obj))
return processed_reqs
def add_cflags(self, cflags):
self.cflags += mesonlib.stringlistify(cflags)
def _process_libs(self, libs, public):
libs = mesonlib.unholder(mesonlib.listify(libs))
processed_libs = []
processed_reqs = []
processed_cflags = []
for obj in libs:
shared_library_only = getattr(obj, 'shared_library_only', False)
if hasattr(obj, 'pcdep'):
pcdeps = mesonlib.listify(obj.pcdep)
for d in pcdeps:
processed_reqs.append(d.name)
self.add_version_reqs(d.name, obj.version_reqs)
elif hasattr(obj, 'generated_pc'):
self._check_generated_pc_deprecation(obj)
processed_reqs.append(obj.generated_pc)
elif isinstance(obj, dependencies.PkgConfigDependency):
if obj.found():
processed_reqs.append(obj.name)
self.add_version_reqs(obj.name, obj.version_reqs)
elif isinstance(obj, dependencies.InternalDependency):
if obj.found():
processed_libs += obj.get_link_args()
processed_cflags += obj.get_compile_args()
self._add_lib_dependencies(obj.libraries, obj.whole_libraries, obj.ext_deps, public, private_external_deps=True)
elif isinstance(obj, dependencies.Dependency):
if obj.found():
processed_libs += obj.get_link_args()
processed_cflags += obj.get_compile_args()
elif isinstance(obj, build.SharedLibrary) and shared_library_only:
# Do not pull dependencies for shared libraries because they are
# only required for static linking. Adding private requires has
# the side effect of exposing their cflags, which is the
# intended behaviour of pkg-config but force Debian to add more
# than needed build deps.
# See https://bugs.freedesktop.org/show_bug.cgi?id=105572
processed_libs.append(obj)
elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)):
processed_libs.append(obj)
# If there is a static library in `Libs:` all its deps must be
# public too, otherwise the generated pc file will never be
# usable without --static.
self._add_lib_dependencies(obj.link_targets,
obj.link_whole_targets,
obj.external_deps,
isinstance(obj, build.StaticLibrary) and public)
elif isinstance(obj, str):
processed_libs.append(obj)
else:
raise mesonlib.MesonException('library argument not a string, library or dependency object.')
return processed_libs, processed_reqs, processed_cflags
def _add_lib_dependencies(self, link_targets, link_whole_targets, external_deps, public, private_external_deps=False):
add_libs = self.add_pub_libs if public else self.add_priv_libs
# Recursively add all linked libraries
for t in link_targets:
# Internal libraries (uninstalled static library) will be promoted
# to link_whole, treat them as such here.
if t.is_internal():
self._add_link_whole(t, public)
else:
add_libs([t])
for t in link_whole_targets:
self._add_link_whole(t, public)
# And finally its external dependencies
if private_external_deps:
self.add_priv_libs(external_deps)
else:
add_libs(external_deps)
def _add_link_whole(self, t, public):
# Don't include static libraries that we link_whole. But we still need to
# include their dependencies: a static library we link_whole
# could itself link to a shared library or an installed static library.
# Keep track of link_whole_targets so we can remove them from our
# lists in case a library is link_with and link_whole at the same time.
# See remove_dups() below.
self.link_whole_targets.append(t)
self._add_lib_dependencies(t.link_targets, t.link_whole_targets, t.external_deps, public)
def add_version_reqs(self, name, version_reqs):
if version_reqs:
if name not in self.version_reqs:
self.version_reqs[name] = set()
# Note that pkg-config is picky about whitespace.
# 'foo > 1.2' is ok but 'foo>1.2' is not.
# foo, bar' is ok, but 'foo,bar' is not.
new_vreqs = [s for s in mesonlib.stringlistify(version_reqs)]
self.version_reqs[name].update(new_vreqs)
def split_version_req(self, s):
for op in ['>=', '<=', '!=', '==', '=', '>', '<']:
pos = s.find(op)
if pos > 0:
return s[0:pos].strip(), s[pos:].strip()
return s, None
def format_vreq(self, vreq):
# vreq are '>=1.0' and pkgconfig wants '>= 1.0'
for op in ['>=', '<=', '!=', '==', '=', '>', '<']:
if vreq.startswith(op):
return op + ' ' + vreq[len(op):]
return vreq
def format_reqs(self, reqs):
result = []
for name in reqs:
vreqs = self.version_reqs.get(name, None)
if vreqs:
result += [name + ' ' + self.format_vreq(vreq) for vreq in vreqs]
else:
result += [name]
return ', '.join(result)
def remove_dups(self):
# Set of ids that have already been handled and should not be added any more
exclude = set()
# We can't just check if 'x' is excluded because we could have copies of
# the same SharedLibrary object for example.
def _ids(x):
if hasattr(x, 'generated_pc'):
yield x.generated_pc
if isinstance(x, build.Target):
yield x.get_id()
yield x
# Exclude 'x' in all its forms and return if it was already excluded
def _add_exclude(x):
was_excluded = False
for i in _ids(x):
if i in exclude:
was_excluded = True
else:
exclude.add(i)
return was_excluded
# link_whole targets are already part of other targets, exclude them all.
for t in self.link_whole_targets:
_add_exclude(t)
def _fn(xs, libs=False):
# Remove duplicates whilst preserving original order
result = []
for x in xs:
# Don't de-dup unknown strings to avoid messing up arguments like:
# ['-framework', 'CoreAudio', '-framework', 'CoreMedia']
known_flags = ['-pthread']
cannot_dedup = libs and isinstance(x, str) and \
not x.startswith(('-l', '-L')) and \
x not in known_flags
if not cannot_dedup and _add_exclude(x):
continue
result.append(x)
return result
# Handle lists in priority order: public items can be excluded from
# private and Requires can excluded from Libs.
self.pub_reqs = _fn(self.pub_reqs)
self.pub_libs = _fn(self.pub_libs, True)
self.priv_reqs = _fn(self.priv_reqs)
self.priv_libs = _fn(self.priv_libs, True)
# Reset exclude list just in case some values can be both cflags and libs.
exclude = set()
self.cflags = _fn(self.cflags)
class PkgConfigModule(ExtensionModule):
def _get_lname(self, l, msg, pcfile):
# Nothing special
if not l.name_prefix_set:
return l.name
# Sometimes people want the library to start with 'lib' everywhere,
# which is achieved by setting name_prefix to '' and the target name to
# 'libfoo'. In that case, try to get the pkg-config '-lfoo' arg correct.
if l.prefix == '' and l.name.startswith('lib'):
return l.name[3:]
# If the library is imported via an import library which is always
# named after the target name, '-lfoo' is correct.
if isinstance(l, build.SharedLibrary) and l.import_filename:
return l.name
# In other cases, we can't guarantee that the compiler will be able to
# find the library via '-lfoo', so tell the user that.
mlog.warning(msg.format(l.name, 'name_prefix', l.name, pcfile))
return l.name
def _escape(self, value):
'''
We cannot use quote_arg because it quotes with ' and " which does not
work with pkg-config and pkgconf at all.
'''
# We should always write out paths with / because pkg-config requires
# spaces to be quoted with \ and that messes up on Windows:
# https://bugs.freedesktop.org/show_bug.cgi?id=103203
if isinstance(value, PurePath):
value = value.as_posix()
return value.replace(' ', r'\ ')
def _make_relative(self, prefix, subdir):
if isinstance(prefix, PurePath):
prefix = prefix.as_posix()
if isinstance(subdir, PurePath):
subdir = subdir.as_posix()
try:
if os.path.commonpath([prefix, subdir]) == prefix:
skip = len(prefix) + 1
subdir = subdir[skip:]
except ValueError:
pass
return subdir
def generate_pkgconfig_file(self, state, deps, subdirs, name, description,
url, version, pcfile, conflicts, variables,
uninstalled=False, dataonly=False):
coredata = state.environment.get_coredata()
if uninstalled:
outdir = os.path.join(state.environment.build_dir, 'meson-uninstalled')
if not os.path.exists(outdir):
os.mkdir(outdir)
prefix = PurePath(state.environment.get_build_dir())
srcdir = PurePath(state.environment.get_source_dir())
else:
outdir = state.environment.scratch_dir
prefix = PurePath(coredata.get_option(mesonlib.OptionKey('prefix')))
# These always return paths relative to prefix
libdir = PurePath(coredata.get_option(mesonlib.OptionKey('libdir')))
incdir = PurePath(coredata.get_option(mesonlib.OptionKey('includedir')))
fname = os.path.join(outdir, pcfile)
with open(fname, 'w', encoding='utf-8') as ofile:
if not dataonly:
ofile.write('prefix={}\n'.format(self._escape(prefix)))
if uninstalled:
ofile.write('srcdir={}\n'.format(self._escape(srcdir)))
ofile.write('libdir={}\n'.format(self._escape('${prefix}' / libdir)))
ofile.write('includedir={}\n'.format(self._escape('${prefix}' / incdir)))
if variables:
ofile.write('\n')
for k, v in variables:
ofile.write('{}={}\n'.format(k, self._escape(v)))
ofile.write('\n')
ofile.write('Name: %s\n' % name)
if len(description) > 0:
ofile.write('Description: %s\n' % description)
if len(url) > 0:
ofile.write('URL: %s\n' % url)
ofile.write('Version: %s\n' % version)
reqs_str = deps.format_reqs(deps.pub_reqs)
if len(reqs_str) > 0:
ofile.write(f'Requires: {reqs_str}\n')
reqs_str = deps.format_reqs(deps.priv_reqs)
if len(reqs_str) > 0:
ofile.write(f'Requires.private: {reqs_str}\n')
if len(conflicts) > 0:
ofile.write('Conflicts: {}\n'.format(' '.join(conflicts)))
def generate_libs_flags(libs):
msg = 'Library target {0!r} has {1!r} set. Compilers ' \
'may not find it from its \'-l{2}\' linker flag in the ' \
'{3!r} pkg-config file.'
Lflags = []
for l in libs:
if isinstance(l, str):
yield l
else:
if uninstalled:
install_dir = os.path.dirname(state.backend.get_target_filename_abs(l))
else:
install_dir = l.get_custom_install_dir()[0]
if install_dir is False:
continue
if 'cs' in l.compilers:
if isinstance(install_dir, str):
Lflag = '-r${{prefix}}/{}/{}'.format(self._escape(self._make_relative(prefix, install_dir)), l.filename)
else: # install_dir is True
Lflag = '-r${libdir}/%s' % l.filename
else:
if isinstance(install_dir, str):
Lflag = '-L${prefix}/%s' % self._escape(self._make_relative(prefix, install_dir))
else: # install_dir is True
Lflag = '-L${libdir}'
if Lflag not in Lflags:
Lflags.append(Lflag)
yield Lflag
lname = self._get_lname(l, msg, pcfile)
# If using a custom suffix, the compiler may not be able to
# find the library
if l.name_suffix_set:
mlog.warning(msg.format(l.name, 'name_suffix', lname, pcfile))
if 'cs' not in l.compilers:
yield '-l%s' % lname
def get_uninstalled_include_dirs(libs):
result = []
for l in libs:
if isinstance(l, str):
continue
if l.get_subdir() not in result:
result.append(l.get_subdir())
for i in l.get_include_dirs():
curdir = i.get_curdir()
for d in i.get_incdirs():
path = os.path.join(curdir, d)
if path not in result:
result.append(path)
return result
def generate_uninstalled_cflags(libs):
for d in get_uninstalled_include_dirs(libs):
for basedir in ['${prefix}', '${srcdir}']:
path = os.path.join(basedir, d)
yield '-I%s' % self._escape(path)
if len(deps.pub_libs) > 0:
ofile.write('Libs: {}\n'.format(' '.join(generate_libs_flags(deps.pub_libs))))
if len(deps.priv_libs) > 0:
ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(deps.priv_libs))))
cflags = []
if uninstalled:
cflags += generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs)
else:
for d in subdirs:
if d == '.':
cflags.append('-I${includedir}')
else:
cflags.append(self._escape(PurePath('-I${includedir}') / d))
cflags += [self._escape(f) for f in deps.cflags]
if cflags and not dataonly:
ofile.write('Cflags: {}\n'.format(' '.join(cflags)))
@FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['uninstalled_variables'])
@FeatureNewKwargs('pkgconfig.generate', '0.42.0', ['extra_cflags'])
@FeatureNewKwargs('pkgconfig.generate', '0.41.0', ['variables'])
@FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['dataonly'])
@permittedKwargs({'libraries', 'version', 'name', 'description', 'filebase',
'subdirs', 'requires', 'requires_private', 'libraries_private',
'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions',
'dataonly', 'conflicts', 'uninstalled_variables'})
def generate(self, state, args, kwargs):
default_version = state.project_version['version']
default_install_dir = None
default_description = None
default_name = None
mainlib = None
default_subdirs = ['.']
if not args and 'version' not in kwargs:
FeatureNew.single_use('pkgconfig.generate implicit version keyword', '0.46.0', state.subproject)
elif len(args) == 1:
FeatureNew.single_use('pkgconfig.generate optional positional argument', '0.46.0', state.subproject)
mainlib = getattr(args[0], 'held_object', args[0])
if not isinstance(mainlib, (build.StaticLibrary, build.SharedLibrary)):
raise mesonlib.MesonException('Pkgconfig_gen first positional argument must be a library object')
default_name = mainlib.name
default_description = state.project_name + ': ' + mainlib.name
install_dir = mainlib.get_custom_install_dir()[0]
if isinstance(install_dir, str):
default_install_dir = os.path.join(install_dir, 'pkgconfig')
elif len(args) > 1:
raise mesonlib.MesonException('Too many positional arguments passed to Pkgconfig_gen.')
dataonly = kwargs.get('dataonly', False)
if dataonly:
default_subdirs = []
blocked_vars = ['libraries', 'libraries_private', 'require_private', 'extra_cflags', 'subdirs']
if len(set(kwargs) & set(blocked_vars)) > 0:
raise mesonlib.MesonException(f'Cannot combine dataonly with any of {blocked_vars}')
subdirs = mesonlib.stringlistify(kwargs.get('subdirs', default_subdirs))
version = kwargs.get('version', default_version)
if not isinstance(version, str):
raise mesonlib.MesonException('Version must be specified.')
name = kwargs.get('name', default_name)
if not isinstance(name, str):
raise mesonlib.MesonException('Name not specified.')
filebase = kwargs.get('filebase', name)
if not isinstance(filebase, str):
raise mesonlib.MesonException('Filebase must be a string.')
description = kwargs.get('description', default_description)
if not isinstance(description, str):
raise mesonlib.MesonException('Description is not a string.')
url = kwargs.get('url', '')
if not isinstance(url, str):
raise mesonlib.MesonException('URL is not a string.')
conflicts = mesonlib.stringlistify(kwargs.get('conflicts', []))
# Prepend the main library to public libraries list. This is required
# so dep.add_pub_libs() can handle dependency ordering correctly and put
# extra libraries after the main library.
libraries = mesonlib.extract_as_list(kwargs, 'libraries')
if mainlib:
libraries = [mainlib] + libraries
deps = DependenciesHelper(state, filebase)
deps.add_pub_libs(libraries)
deps.add_priv_libs(kwargs.get('libraries_private', []))
deps.add_pub_reqs(kwargs.get('requires', []))
deps.add_priv_reqs(kwargs.get('requires_private', []))
deps.add_cflags(kwargs.get('extra_cflags', []))
dversions = kwargs.get('d_module_versions', None)
if dversions:
compiler = state.environment.coredata.compilers.host.get('d')
if compiler:
deps.add_cflags(compiler.get_feature_args({'versions': dversions}, None))
deps.remove_dups()
def parse_variable_list(vardict):
reserved = ['prefix', 'libdir', 'includedir']
variables = []
for name, value in vardict.items():
if name in reserved:
raise mesonlib.MesonException(f'Variable "{name}" is reserved')
variables.append((name, value))
return variables
variables = self.interpreter.extract_variables(kwargs, dict_new=True)
variables = parse_variable_list(variables)
pcfile = filebase + '.pc'
pkgroot = kwargs.get('install_dir', default_install_dir)
if pkgroot is None:
if mesonlib.is_freebsd():
pkgroot = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('prefix')), 'libdata', 'pkgconfig')
else:
pkgroot = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'pkgconfig')
if not isinstance(pkgroot, str):
raise mesonlib.MesonException('Install_dir must be a string.')
self.generate_pkgconfig_file(state, deps, subdirs, name, description, url,
version, pcfile, conflicts, variables,
False, dataonly)
res = build.Data([mesonlib.File(True, state.environment.get_scratch_dir(), pcfile)], pkgroot, None, state.subproject)
variables = self.interpreter.extract_variables(kwargs, argname='uninstalled_variables', dict_new=True)
variables = parse_variable_list(variables)
pcfile = filebase + '-uninstalled.pc'
self.generate_pkgconfig_file(state, deps, subdirs, name, description, url,
version, pcfile, conflicts, variables,
uninstalled=True, dataonly=dataonly)
# Associate the main library with this generated pc file. If the library
# is used in any subsequent call to the generated, it will generate a
# 'Requires:' or 'Requires.private:'.
# Backward compatibility: We used to set 'generated_pc' on all public
# libraries instead of just the main one. Keep doing that but warn if
# anyone is relying on that deprecated behaviour.
if mainlib:
if not hasattr(mainlib, 'generated_pc'):
mainlib.generated_pc = filebase
else:
mlog.warning('Already generated a pkg-config file for', mlog.bold(mainlib.name))
else:
for lib in deps.pub_libs:
if not isinstance(lib, str) and not hasattr(lib, 'generated_pc'):
lib.generated_pc = filebase
location = state.current_node
lib.generated_pc_warn = [name, location]
return ModuleReturnValue(res, [res])
def initialize(*args, **kwargs):
return PkgConfigModule(*args, **kwargs)
| pexip/meson | mesonbuild/modules/pkgconfig.py | Python | apache-2.0 | 27,270 |
import logging
import pynetbox
import requests
from django.conf import settings
class NetBox(object):
"""
Class used to interact with the NetBox API.
"""
logger = logging.getLogger("peering.manager.netbox")
def __init__(self, *args, **kwargs):
self.api = None
if settings.NETBOX_API:
# pynetbox adds /api on its own. strip it off here to maintain
# backward compatibility with earlier Peering Manager behavior
base_url = settings.NETBOX_API.strip("/")
if base_url.endswith("/api"):
base_url = base_url[:-3]
self.api = pynetbox.api(
base_url,
token=settings.NETBOX_API_TOKEN,
threading=settings.NETBOX_API_THREADING,
)
# Disable SSL verification on user request
if not settings.NETBOX_API_VERIFY_SSL:
self.api.http_session.verify = False
def get_devices(self):
"""
Return all devices found with the NetBox API.
"""
self.logger.debug(
f"calling dcim.devices.filter: role={settings.NETBOX_DEVICE_ROLES}"
)
return self.api.dcim.devices.filter(role=settings.NETBOX_DEVICE_ROLES)
def napalm(self, device_id, method):
"""
Runs the given NAPALM method on the device via the NetBox API.
"""
self.logger.debug(f"calling dcim.devices.get: {device_id}")
device = self.api.dcim.devices.get(device_id)
self.logger.debug(f"calling napalm: {method}")
result = device.napalm.list(method=method)
return next(result)[method]
| respawner/peering-manager | netbox/api.py | Python | apache-2.0 | 1,658 |
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import model
from google.appengine.api import users
from google.appengine.ext import ndb
def AuthUser(user):
"""Validates the passed in user.
Args:
user An App Engine / endpoints user object.
Returns:
The user model entity if the user is authorized, and None otherwise.
"""
if not user:
return None
user_db = model.AuthorizedUser.get_by_id(user.email())
if not user_db and users.is_current_user_admin():
# Admins can automatically become users.
logging.info('Adding a new admin user %s', user.email())
user_db = model.AuthorizedUser(id=user.email())
user_db.put()
return user_db
| google/pinotify | appengine/auth.py | Python | apache-2.0 | 1,262 |
# file test_binfile/test_eudora.py
#
# Copyright 2011 Emory University Libraries
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import os
from eulcommon.binfile import eudora
TEST_ROOT = os.path.dirname(__file__)
def fixture(fname):
return os.path.join(TEST_ROOT, 'fixtures', fname)
class TestEudora(unittest.TestCase):
def test_members(self):
fname = fixture('In.toc')
obj = eudora.Toc(fname)
self.assertEqual(obj.version, 1)
self.assertEqual(obj.name, 'In')
messages = list(obj.messages)
self.assertEqual(len(messages), 2)
# note: we don't actually test all of the fields here. it's not
# clear what a few of them actually are, so we only test the ones we
# know how to interpret.
self.assertTrue(isinstance(messages[0], eudora.Message))
self.assertEqual(messages[0].offset, 0)
self.assertEqual(messages[0].size, 1732)
self.assertEqual(messages[0].body_offset, 955)
self.assertEqual(messages[0].to, 'Somebody ')
self.assertEqual(messages[0].subject, 'Welcome')
# second message isn't *necessarily* immediately after first, but
# in this case it is.
self.assertEqual(messages[1].offset, 1732)
if __name__ == '__main__':
main()
| emory-libraries/eulcommon | test/test_binfile/test_eudora.py | Python | apache-2.0 | 1,830 |
# -*- coding: iso-8859-15 -*-
__author__ = 'daniel'
import logging
from Transaction import Buy, Deposit, Dividend, Earning, Fee, Sell, Split, Tax, Transfer, Withdrawal
def _convert_units_by_transaction_type(transaction_type, units):
if transaction_type.lower().startswith(u'övf till'):
return -units
return units
def _transaction_is_transfer(transaction_type):
# print transaction_type, transaction_type.startswith(u'Överföring'),
# transaction_type.startswith(u'Övf')
return transaction_type.startswith(u'Överföring') or transaction_type.lower().startswith(u'övf')
def _ignore_transaction(account, transaction_type):
logging.debug("Transaction type: %s", transaction_type)
return account == "Paulina ISK" or "1455005" in transaction_type or "Roger" in transaction_type
def _transaction_is_buy(transaction_type):
if transaction_type == u"Köp":
return True
startswith_list = ["Teckningslikvid", "OMVANDLING", "BANCO SANT VP UTD",
"VP-UTD", "VPU AV MTG B", "Avknoppning", "Inl"]
for transaction_str in startswith_list:
if transaction_type.startswith(transaction_str):
return True
return False
class Parser:
def parse_row(self, date, account, transaction_type, description, units, price,
amount, fee, currency, isin=None):
logging.debug(account)
if date == "Datum" or account == "Konto" or "Paulina" in account:
logging.debug(account)
return None
logging.debug((date, account, transaction_type, description, units,
price, amount, fee, currency))
units = self.num(units)
price = self.num(price)
amount = self.num(amount)
fee = self.num(fee)
logging.debug("%s == %s => %s", transaction_type, u"Utdelning",
(transaction_type == u"Utdelning"))
if _ignore_transaction(account, transaction_type):
logging.debug("Ignoring transaction %s", [date, account, transaction_type, description,
units, price, amount, fee, currency])
return None
if transaction_type == u"Utdelning":
return Dividend(description, date, price, units)
elif _transaction_is_buy(transaction_type):
return Buy(description, date, price, units, amount)
elif transaction_type in (u"Sälj", u"Köp, rättelse"):
return Sell(description, date, price, units, amount)
elif transaction_type in (u"Split", u"Omvänd split", u"Övrigt"):
return Split(description, date, units)
elif _transaction_is_transfer(transaction_type):
units = _convert_units_by_transaction_type(transaction_type, units)
return Transfer(description, date, units)
elif transaction_type == u"Insättning":
return Deposit(date, amount)
elif transaction_type == u"Uttag":
return Withdrawal(date, amount)
elif transaction_type == u"Prelskatt utdelningar" or \
(transaction_type == u"Övrigt" and "källskatt" in description) or \
transaction_type.startswith(u"Utländsk källskatt") or \
transaction_type == u"Preliminärskatt" or \
(transaction_type == u"Övrigt" and description == u"Avkastningsskatt"):
return Tax(date, amount)
elif transaction_type == u"Övrigt" and description == u"Riskpremie":
return Fee(date, amount)
elif transaction_type == u"Räntor" or \
(
transaction_type == u"Övrigt" and description == u"Överföring ränta "
u"kapitalmedelskonto"):
return Earning(date, amount)
logging.error("Unknown transaction %s", [date, account, transaction_type, description,
units, price, amount, fee, currency])
return None
@staticmethod
def num(my_str):
try:
my_str = my_str.replace(',', '.')
if len(my_str) == 1:
my_str = my_str.replace('-', '0')
return int(my_str)
except ValueError:
logging.debug("Error when converting to int, trying float instead: %s", my_str)
return float(my_str)
class AvanzaTransactionParser(Parser):
pass
| dahuuhad/Stocks | parser/Parser.py | Python | apache-2.0 | 4,457 |
# Copyright 2022 The Deluca Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shallow boundary module for first few time steps."""
import flax.linen as nn
class ShallowBoundaryModel(nn.Module):
"""Shallow boundary module."""
out_dim: int = 1
hidden_dim: int = 100
model_num: int = 0
@nn.compact
def __call__(self, x):
# need to flatten extra dimensions required by CNN and LSTM
x = x.squeeze()
x = nn.Dense(
features=self.hidden_dim,
use_bias=False,
name=f"shallow_fc{1}_model" + str(self.model_num),
)(
x)
x = nn.tanh(x)
x = nn.Dense(
features=self.out_dim,
use_bias=True,
name=f"shallow_fc{2}_model" + str(self.model_num))(
x)
return x.squeeze() # squeeze for consistent shape w/ boundary model output
| google/deluca | deluca/lung/utils/nn/shallow_boundary_model.py | Python | apache-2.0 | 1,325 |
#!/usr/bin/env python
import base64
import rsa
import six
from st2common.runners.base_action import Action
class AwsDecryptPassworData(Action):
def run(self, private_key, password_data):
# copied from:
# https://github.com/aws/aws-cli/blob/master/awscli/customizations/ec2/decryptpassword.py#L96-L122
self.logger.debug("Decrypting password data using private_key")
value = password_data
if not value:
return ''
# Note: Somewhere in the param transformation pipeline line break and
# carrieage return characters get messed up
value = value.strip('\\r').strip('\\n')
self.logger.debug('Encrypted value: "%s"' % (value))
value = base64.b64decode(value)
try:
rsa_private_key = rsa.PrivateKey.load_pkcs1(six.b(private_key))
value = rsa.decrypt(value, rsa_private_key)
return value.decode('utf-8')
except Exception:
msg = ('Unable to decrypt password data using provided private key')
self.logger.debug(msg, exc_info=True)
raise ValueError(msg)
| StackStorm/st2cd | actions/aws_decrypt_password_data.py | Python | apache-2.0 | 1,133 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2016 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
topology_lib_tcpdump communication library implementation.
"""
from __future__ import unicode_literals, absolute_import
from __future__ import print_function, division
from re import match
from re import search
from datetime import datetime
from time import sleep
def tcpdump_rate(enode, interface_name):
"""
Get packet rate in packets per second after capture has been run.
:param enode: Engine node to communicate with.
:type enode: topology.platforms.base.BaseNode
:param str interface_name: Interface name on which capture was run.
:rtype: int
:return: The rate of packets catured in packets per second.
"""
rate = 0
total_packets = 0
output = enode('cat /tmp/{}.cap | wc -l'.format(interface_name),
shell='bash')
total_lines = output.split("\n")[0]
for i in range(1, int(total_lines)):
cmd = ('tail -{line_num} /tmp/{interface_name}.cap |'
' head -1').format(line_num=i, interface_name=interface_name)
packet_info = enode(cmd, shell='bash')
if "packets captured" in packet_info:
total_packets = packet_info.split()[0]
time = match(r"^\d\d?:\d\d?:\d\d?\.\d+", packet_info)
if time:
fields = packet_info.split()
timestamp = datetime.strptime(fields[0],
'%H:%M:%S.%f').time()
break
msec = (timestamp.hour * 60 * 60 + timestamp.minute * 60 +
timestamp.second) * 1000 + (timestamp.microsecond / 1000)
rate = int(total_packets) * 1000 / msec
return rate
def tcpdump_capture_interface(enode, interface_name, capture_time,
options='', num_cpu_samples=0, namespace=None):
"""
Start packet capture using tcpdump.
:param enode: Engine node to communicate with.
:type enode: topology.platforms.base.BaseNode
:param str options: The filter options to be passed to tcpdump.
:param str interface_name: interface name.
:param int capture_time: Time in seconds to capture with tcpdump.
:param int num_cpu_samples: Number of CPU samples to get CPU utilization.
:param str namespace: The network namespace in which to run the capture.
:rtype: dict
:return: Dictionary of any metadata with information collected
during the capture.
"""
cmd = [
'tcpdump -D',
]
if namespace:
cmd.insert(0, 'ip netns exec {} '.format(namespace))
cmd_output = enode(' '.join(cmd), shell='bash')
interface_re = (r'(?P<linux_interface>\d)\.' + str(interface_name) +
r'\s[\[Up, Running\]]')
re_result = search(interface_re, cmd_output)
assert re_result
result = re_result.groupdict()
cmd = [
'tcpdump -ni ',
result['linux_interface'],
' ',
options,
' -ttttt > /tmp/',
interface_name,
'.cap 2>&1 &'
]
if namespace:
cmd.insert(0, 'ip netns exec {} '.format(namespace))
enode(''.join(cmd), shell='bash')
sleep(capture_time)
cpu_util = 0.0
if num_cpu_samples:
cmd = ('top -bn{num_samples}'
'| grep "Cpu(s)" | sed "s/.*: *\\([0-9.]*\)%* '
'us.*/\\1/"').format(num_samples=(num_cpu_samples + 1))
top_output = enode(cmd, shell='bash')
cpu_samples = top_output.split('\n')
if 'top' in cpu_samples[0]:
del cpu_samples[0]
del cpu_samples[0]
for cpu_us in cpu_samples:
if 'tcpdump' not in cpu_us:
cpu_util = cpu_util + float(cpu_us)
cpu_util = cpu_util/num_cpu_samples
enode('killall tcpdump &', shell='bash')
return {'cpu_util': cpu_util}
__all__ = [
'tcpdump_capture_interface',
'tcpdump_rate'
]
| vivekramamoorthy/topology_lib_tcpdump | lib/topology_lib_tcpdump/library.py | Python | apache-2.0 | 4,430 |
from __future__ import unicode_literals
from moto.core.responses import BaseResponse
from .models import iam_backend, User
class IamResponse(BaseResponse):
def attach_role_policy(self):
policy_arn = self._get_param('PolicyArn')
role_name = self._get_param('RoleName')
iam_backend.attach_role_policy(policy_arn, role_name)
template = self.response_template(ATTACH_ROLE_POLICY_TEMPLATE)
return template.render()
def detach_role_policy(self):
role_name = self._get_param('RoleName')
policy_arn = self._get_param('PolicyArn')
iam_backend.detach_role_policy(policy_arn, role_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name="DetachRolePolicyResponse")
def attach_user_policy(self):
policy_arn = self._get_param('PolicyArn')
user_name = self._get_param('UserName')
iam_backend.attach_user_policy(policy_arn, user_name)
template = self.response_template(ATTACH_USER_POLICY_TEMPLATE)
return template.render()
def detach_user_policy(self):
policy_arn = self._get_param('PolicyArn')
user_name = self._get_param('UserName')
iam_backend.detach_user_policy(policy_arn, user_name)
template = self.response_template(DETACH_USER_POLICY_TEMPLATE)
return template.render()
def create_policy(self):
description = self._get_param('Description')
path = self._get_param('Path')
policy_document = self._get_param('PolicyDocument')
policy_name = self._get_param('PolicyName')
policy = iam_backend.create_policy(
description, path, policy_document, policy_name)
template = self.response_template(CREATE_POLICY_TEMPLATE)
return template.render(policy=policy)
def list_attached_role_policies(self):
marker = self._get_param('Marker')
max_items = self._get_int_param('MaxItems', 100)
path_prefix = self._get_param('PathPrefix', '/')
role_name = self._get_param('RoleName')
policies, marker = iam_backend.list_attached_role_policies(
role_name, marker=marker, max_items=max_items, path_prefix=path_prefix)
template = self.response_template(LIST_ATTACHED_ROLE_POLICIES_TEMPLATE)
return template.render(policies=policies, marker=marker)
def list_attached_user_policies(self):
marker = self._get_param('Marker')
max_items = self._get_int_param('MaxItems', 100)
path_prefix = self._get_param('PathPrefix', '/')
user_name = self._get_param('UserName')
policies, marker = iam_backend.list_attached_user_policies(
user_name, marker=marker, max_items=max_items,
path_prefix=path_prefix)
template = self.response_template(LIST_ATTACHED_USER_POLICIES_TEMPLATE)
return template.render(policies=policies, marker=marker)
def list_policies(self):
marker = self._get_param('Marker')
max_items = self._get_int_param('MaxItems', 100)
only_attached = self._get_bool_param('OnlyAttached', False)
path_prefix = self._get_param('PathPrefix', '/')
scope = self._get_param('Scope', 'All')
policies, marker = iam_backend.list_policies(
marker, max_items, only_attached, path_prefix, scope)
template = self.response_template(LIST_POLICIES_TEMPLATE)
return template.render(policies=policies, marker=marker)
def create_role(self):
role_name = self._get_param('RoleName')
path = self._get_param('Path')
assume_role_policy_document = self._get_param(
'AssumeRolePolicyDocument')
role = iam_backend.create_role(
role_name, assume_role_policy_document, path)
template = self.response_template(CREATE_ROLE_TEMPLATE)
return template.render(role=role)
def get_role(self):
role_name = self._get_param('RoleName')
role = iam_backend.get_role(role_name)
template = self.response_template(GET_ROLE_TEMPLATE)
return template.render(role=role)
def delete_role(self):
role_name = self._get_param('RoleName')
iam_backend.delete_role(role_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name="DeleteRoleResponse")
def list_role_policies(self):
role_name = self._get_param('RoleName')
role_policies_names = iam_backend.list_role_policies(role_name)
template = self.response_template(LIST_ROLE_POLICIES)
return template.render(role_policies=role_policies_names)
def put_role_policy(self):
role_name = self._get_param('RoleName')
policy_name = self._get_param('PolicyName')
policy_document = self._get_param('PolicyDocument')
iam_backend.put_role_policy(role_name, policy_name, policy_document)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name="PutRolePolicyResponse")
def delete_role_policy(self):
role_name = self._get_param('RoleName')
policy_name = self._get_param('PolicyName')
iam_backend.delete_role_policy(role_name, policy_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name="DeleteRolePolicyResponse")
def get_role_policy(self):
role_name = self._get_param('RoleName')
policy_name = self._get_param('PolicyName')
policy_name, policy_document = iam_backend.get_role_policy(
role_name, policy_name)
template = self.response_template(GET_ROLE_POLICY_TEMPLATE)
return template.render(role_name=role_name,
policy_name=policy_name,
policy_document=policy_document)
def update_assume_role_policy(self):
role_name = self._get_param('RoleName')
role = iam_backend.get_role(role_name)
role.assume_role_policy_document = self._get_param('PolicyDocument')
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name="UpdateAssumeRolePolicyResponse")
def create_policy_version(self):
policy_arn = self._get_param('PolicyArn')
policy_document = self._get_param('PolicyDocument')
set_as_default = self._get_param('SetAsDefault')
policy_version = iam_backend.create_policy_version(policy_arn, policy_document, set_as_default)
template = self.response_template(CREATE_POLICY_VERSION_TEMPLATE)
return template.render(policy_version=policy_version)
def get_policy_version(self):
policy_arn = self._get_param('PolicyArn')
version_id = self._get_param('VersionId')
policy_version = iam_backend.get_policy_version(policy_arn, version_id)
template = self.response_template(GET_POLICY_VERSION_TEMPLATE)
return template.render(policy_version=policy_version)
def list_policy_versions(self):
policy_arn = self._get_param('PolicyArn')
policy_versions = iam_backend.list_policy_versions(policy_arn)
template = self.response_template(LIST_POLICY_VERSIONS_TEMPLATE)
return template.render(policy_versions=policy_versions)
def delete_policy_version(self):
policy_arn = self._get_param('PolicyArn')
version_id = self._get_param('VersionId')
iam_backend.delete_policy_version(policy_arn, version_id)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='DeletePolicyVersion')
def create_instance_profile(self):
profile_name = self._get_param('InstanceProfileName')
path = self._get_param('Path')
profile = iam_backend.create_instance_profile(
profile_name, path, role_ids=[])
template = self.response_template(CREATE_INSTANCE_PROFILE_TEMPLATE)
return template.render(profile=profile)
def get_instance_profile(self):
profile_name = self._get_param('InstanceProfileName')
profile = iam_backend.get_instance_profile(profile_name)
template = self.response_template(GET_INSTANCE_PROFILE_TEMPLATE)
return template.render(profile=profile)
def add_role_to_instance_profile(self):
profile_name = self._get_param('InstanceProfileName')
role_name = self._get_param('RoleName')
iam_backend.add_role_to_instance_profile(profile_name, role_name)
template = self.response_template(
ADD_ROLE_TO_INSTANCE_PROFILE_TEMPLATE)
return template.render()
def remove_role_from_instance_profile(self):
profile_name = self._get_param('InstanceProfileName')
role_name = self._get_param('RoleName')
iam_backend.remove_role_from_instance_profile(profile_name, role_name)
template = self.response_template(
REMOVE_ROLE_FROM_INSTANCE_PROFILE_TEMPLATE)
return template.render()
def list_roles(self):
roles = iam_backend.get_roles()
template = self.response_template(LIST_ROLES_TEMPLATE)
return template.render(roles=roles)
def list_instance_profiles(self):
profiles = iam_backend.get_instance_profiles()
template = self.response_template(LIST_INSTANCE_PROFILES_TEMPLATE)
return template.render(instance_profiles=profiles)
def list_instance_profiles_for_role(self):
role_name = self._get_param('RoleName')
profiles = iam_backend.get_instance_profiles_for_role(
role_name=role_name)
template = self.response_template(
LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE)
return template.render(instance_profiles=profiles)
def upload_server_certificate(self):
cert_name = self._get_param('ServerCertificateName')
cert_body = self._get_param('CertificateBody')
path = self._get_param('Path')
private_key = self._get_param('PrivateKey')
cert_chain = self._get_param('CertificateName')
cert = iam_backend.upload_server_cert(
cert_name, cert_body, private_key, cert_chain=cert_chain, path=path)
template = self.response_template(UPLOAD_CERT_TEMPLATE)
return template.render(certificate=cert)
def list_server_certificates(self, marker=None):
certs = iam_backend.get_all_server_certs(marker=marker)
template = self.response_template(LIST_SERVER_CERTIFICATES_TEMPLATE)
return template.render(server_certificates=certs)
def get_server_certificate(self):
cert_name = self._get_param('ServerCertificateName')
cert = iam_backend.get_server_certificate(cert_name)
template = self.response_template(GET_SERVER_CERTIFICATE_TEMPLATE)
return template.render(certificate=cert)
def create_group(self):
group_name = self._get_param('GroupName')
path = self._get_param('Path')
group = iam_backend.create_group(group_name, path)
template = self.response_template(CREATE_GROUP_TEMPLATE)
return template.render(group=group)
def get_group(self):
group_name = self._get_param('GroupName')
group = iam_backend.get_group(group_name)
template = self.response_template(GET_GROUP_TEMPLATE)
return template.render(group=group)
def list_groups(self):
groups = iam_backend.list_groups()
template = self.response_template(LIST_GROUPS_TEMPLATE)
return template.render(groups=groups)
def list_groups_for_user(self):
user_name = self._get_param('UserName')
groups = iam_backend.get_groups_for_user(user_name)
template = self.response_template(LIST_GROUPS_FOR_USER_TEMPLATE)
return template.render(groups=groups)
def put_group_policy(self):
group_name = self._get_param('GroupName')
policy_name = self._get_param('PolicyName')
policy_document = self._get_param('PolicyDocument')
iam_backend.put_group_policy(group_name, policy_name, policy_document)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name="PutGroupPolicyResponse")
def list_group_policies(self):
group_name = self._get_param('GroupName')
marker = self._get_param('Marker')
max_items = self._get_param('MaxItems')
policies = iam_backend.list_group_policies(group_name,
marker=marker, max_items=max_items)
template = self.response_template(LIST_GROUP_POLICIES_TEMPLATE)
return template.render(name="ListGroupPoliciesResponse",
policies=policies,
marker=marker)
def get_group_policy(self):
group_name = self._get_param('GroupName')
policy_name = self._get_param('PolicyName')
policy_result = iam_backend.get_group_policy(group_name, policy_name)
template = self.response_template(GET_GROUP_POLICY_TEMPLATE)
return template.render(name="GetGroupPolicyResponse", **policy_result)
def create_user(self):
user_name = self._get_param('UserName')
path = self._get_param('Path')
user = iam_backend.create_user(user_name, path)
template = self.response_template(USER_TEMPLATE)
return template.render(action='Create', user=user)
def get_user(self):
user_name = self._get_param('UserName')
if user_name:
user = iam_backend.get_user(user_name)
else:
user = User(name='default_user')
# If no user is specific, IAM returns the current user
template = self.response_template(USER_TEMPLATE)
return template.render(action='Get', user=user)
def list_users(self):
path_prefix = self._get_param('PathPrefix')
marker = self._get_param('Marker')
max_items = self._get_param('MaxItems')
users = iam_backend.list_users(path_prefix, marker, max_items)
template = self.response_template(LIST_USERS_TEMPLATE)
return template.render(action='List', users=users)
def create_login_profile(self):
user_name = self._get_param('UserName')
password = self._get_param('Password')
password = self._get_param('Password')
user = iam_backend.create_login_profile(user_name, password)
template = self.response_template(CREATE_LOGIN_PROFILE_TEMPLATE)
return template.render(user=user)
def get_login_profile(self):
user_name = self._get_param('UserName')
user = iam_backend.get_login_profile(user_name)
template = self.response_template(GET_LOGIN_PROFILE_TEMPLATE)
return template.render(user=user)
def update_login_profile(self):
user_name = self._get_param('UserName')
password = self._get_param('Password')
password_reset_required = self._get_param('PasswordResetRequired')
user = iam_backend.update_login_profile(user_name, password, password_reset_required)
template = self.response_template(UPDATE_LOGIN_PROFILE_TEMPLATE)
return template.render(user=user)
def add_user_to_group(self):
group_name = self._get_param('GroupName')
user_name = self._get_param('UserName')
iam_backend.add_user_to_group(group_name, user_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='AddUserToGroup')
def remove_user_from_group(self):
group_name = self._get_param('GroupName')
user_name = self._get_param('UserName')
iam_backend.remove_user_from_group(group_name, user_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='RemoveUserFromGroup')
def get_user_policy(self):
user_name = self._get_param('UserName')
policy_name = self._get_param('PolicyName')
policy_document = iam_backend.get_user_policy(user_name, policy_name)
template = self.response_template(GET_USER_POLICY_TEMPLATE)
return template.render(
user_name=user_name,
policy_name=policy_name,
policy_document=policy_document
)
def list_user_policies(self):
user_name = self._get_param('UserName')
policies = iam_backend.list_user_policies(user_name)
template = self.response_template(LIST_USER_POLICIES_TEMPLATE)
return template.render(policies=policies)
def put_user_policy(self):
user_name = self._get_param('UserName')
policy_name = self._get_param('PolicyName')
policy_document = self._get_param('PolicyDocument')
iam_backend.put_user_policy(user_name, policy_name, policy_document)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='PutUserPolicy')
def delete_user_policy(self):
user_name = self._get_param('UserName')
policy_name = self._get_param('PolicyName')
iam_backend.delete_user_policy(user_name, policy_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='DeleteUserPolicy')
def create_access_key(self):
user_name = self._get_param('UserName')
key = iam_backend.create_access_key(user_name)
template = self.response_template(CREATE_ACCESS_KEY_TEMPLATE)
return template.render(key=key)
def list_access_keys(self):
user_name = self._get_param('UserName')
keys = iam_backend.get_all_access_keys(user_name)
template = self.response_template(LIST_ACCESS_KEYS_TEMPLATE)
return template.render(user_name=user_name, keys=keys)
def delete_access_key(self):
user_name = self._get_param('UserName')
access_key_id = self._get_param('AccessKeyId')
iam_backend.delete_access_key(access_key_id, user_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='DeleteAccessKey')
def deactivate_mfa_device(self):
user_name = self._get_param('UserName')
serial_number = self._get_param('SerialNumber')
iam_backend.deactivate_mfa_device(user_name, serial_number)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='DeactivateMFADevice')
def enable_mfa_device(self):
user_name = self._get_param('UserName')
serial_number = self._get_param('SerialNumber')
authentication_code_1 = self._get_param('AuthenticationCode1')
authentication_code_2 = self._get_param('AuthenticationCode2')
iam_backend.enable_mfa_device(
user_name,
serial_number,
authentication_code_1,
authentication_code_2
)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='EnableMFADevice')
def list_mfa_devices(self):
user_name = self._get_param('UserName')
devices = iam_backend.list_mfa_devices(user_name)
template = self.response_template(LIST_MFA_DEVICES_TEMPLATE)
return template.render(user_name=user_name, devices=devices)
def delete_user(self):
user_name = self._get_param('UserName')
iam_backend.delete_user(user_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='DeleteUser')
def delete_login_profile(self):
user_name = self._get_param('UserName')
iam_backend.delete_login_profile(user_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='DeleteLoginProfile')
def generate_credential_report(self):
if iam_backend.report_generated():
template = self.response_template(CREDENTIAL_REPORT_GENERATED)
else:
template = self.response_template(CREDENTIAL_REPORT_GENERATING)
iam_backend.generate_report()
return template.render()
def get_credential_report(self):
report = iam_backend.get_credential_report()
template = self.response_template(CREDENTIAL_REPORT)
return template.render(report=report)
def list_account_aliases(self):
aliases = iam_backend.list_account_aliases()
template = self.response_template(LIST_ACCOUNT_ALIASES_TEMPLATE)
return template.render(aliases=aliases)
def create_account_alias(self):
alias = self._get_param('AccountAlias')
iam_backend.create_account_alias(alias)
template = self.response_template(CREATE_ACCOUNT_ALIAS_TEMPLATE)
return template.render()
def delete_account_alias(self):
alias = self._get_param('AccountAlias')
iam_backend.delete_account_alias(alias)
template = self.response_template(DELETE_ACCOUNT_ALIAS_TEMPLATE)
return template.render()
ATTACH_ROLE_POLICY_TEMPLATE = """<AttachRolePolicyResponse>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</AttachRolePolicyResponse>"""
DETACH_ROLE_POLICY_TEMPLATE = """<DetachRolePolicyResponse>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</DetachRolePolicyResponse>"""
ATTACH_USER_POLICY_TEMPLATE = """<AttachUserPolicyResponse>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</AttachUserPolicyResponse>"""
DETACH_USER_POLICY_TEMPLATE = """<DetachUserPolicyResponse>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</DetachUserPolicyResponse>"""
CREATE_POLICY_TEMPLATE = """<CreatePolicyResponse>
<CreatePolicyResult>
<Policy>
<Arn>{{ policy.arn }}</Arn>
<AttachmentCount>{{ policy.attachment_count }}</AttachmentCount>
<CreateDate>{{ policy.create_datetime.isoformat() }}</CreateDate>
<DefaultVersionId>{{ policy.default_version_id }}</DefaultVersionId>
<Path>{{ policy.path }}</Path>
<PolicyId>{{ policy.id }}</PolicyId>
<PolicyName>{{ policy.name }}</PolicyName>
<UpdateDate>{{ policy.update_datetime.isoformat() }}</UpdateDate>
</Policy>
</CreatePolicyResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</CreatePolicyResponse>"""
LIST_ATTACHED_ROLE_POLICIES_TEMPLATE = """<ListAttachedRolePoliciesResponse>
<ListAttachedRolePoliciesResult>
{% if marker is none %}
<IsTruncated>false</IsTruncated>
{% else %}
<IsTruncated>true</IsTruncated>
<Marker>{{ marker }}</Marker>
{% endif %}
<AttachedPolicies>
{% for policy in policies %}
<member>
<PolicyName>{{ policy.name }}</PolicyName>
<PolicyArn>{{ policy.arn }}</PolicyArn>
</member>
{% endfor %}
</AttachedPolicies>
</ListAttachedRolePoliciesResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListAttachedRolePoliciesResponse>"""
LIST_ATTACHED_USER_POLICIES_TEMPLATE = """<ListAttachedUserPoliciesResponse>
<ListAttachedUserPoliciesResult>
{% if marker is none %}
<IsTruncated>false</IsTruncated>
{% else %}
<IsTruncated>true</IsTruncated>
<Marker>{{ marker }}</Marker>
{% endif %}
<AttachedPolicies>
{% for policy in policies %}
<member>
<PolicyName>{{ policy.name }}</PolicyName>
<PolicyArn>{{ policy.arn }}</PolicyArn>
</member>
{% endfor %}
</AttachedPolicies>
</ListAttachedUserPoliciesResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListAttachedUserPoliciesResponse>"""
LIST_POLICIES_TEMPLATE = """<ListPoliciesResponse>
<ListPoliciesResult>
{% if marker is none %}
<IsTruncated>false</IsTruncated>
{% else %}
<IsTruncated>true</IsTruncated>
<Marker>{{ marker }}</Marker>
{% endif %}
<Policies>
{% for policy in policies %}
<member>
<Arn>{{ policy.arn }}</Arn>
<AttachmentCount>{{ policy.attachment_count }}</AttachmentCount>
<CreateDate>{{ policy.create_datetime.isoformat() }}</CreateDate>
<DefaultVersionId>{{ policy.default_version_id }}</DefaultVersionId>
<Path>{{ policy.path }}</Path>
<PolicyId>{{ policy.id }}</PolicyId>
<PolicyName>{{ policy.name }}</PolicyName>
<UpdateDate>{{ policy.update_datetime.isoformat() }}</UpdateDate>
</member>
{% endfor %}
</Policies>
</ListPoliciesResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListPoliciesResponse>"""
GENERIC_EMPTY_TEMPLATE = """<{{ name }}Response>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</{{ name }}Response>"""
CREATE_INSTANCE_PROFILE_TEMPLATE = """<CreateInstanceProfileResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<CreateInstanceProfileResult>
<InstanceProfile>
<InstanceProfileId>{{ profile.id }}</InstanceProfileId>
<Roles/>
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn>
<CreateDate>2012-05-09T16:11:10.222Z</CreateDate>
</InstanceProfile>
</CreateInstanceProfileResult>
<ResponseMetadata>
<RequestId>974142ee-99f1-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</CreateInstanceProfileResponse>"""
GET_INSTANCE_PROFILE_TEMPLATE = """<GetInstanceProfileResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<GetInstanceProfileResult>
<InstanceProfile>
<InstanceProfileId>{{ profile.id }}</InstanceProfileId>
<Roles>
{% for role in profile.roles %}
<member>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
</Roles>
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn>
<CreateDate>2012-05-09T16:11:10Z</CreateDate>
</InstanceProfile>
</GetInstanceProfileResult>
<ResponseMetadata>
<RequestId>37289fda-99f2-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</GetInstanceProfileResponse>"""
CREATE_ROLE_TEMPLATE = """<CreateRoleResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<CreateRoleResult>
<Role>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-08T23:34:01.495Z</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</Role>
</CreateRoleResult>
<ResponseMetadata>
<RequestId>4a93ceee-9966-11e1-b624-b1aEXAMPLE7c</RequestId>
</ResponseMetadata>
</CreateRoleResponse>"""
GET_ROLE_POLICY_TEMPLATE = """<GetRolePolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<GetRolePolicyResult>
<PolicyName>{{ policy_name }}</PolicyName>
<RoleName>{{ role_name }}</RoleName>
<PolicyDocument>{{ policy_document }}</PolicyDocument>
</GetRolePolicyResult>
<ResponseMetadata>
<RequestId>7e7cd8bc-99ef-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</GetRolePolicyResponse>"""
GET_ROLE_TEMPLATE = """<GetRoleResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<GetRoleResult>
<Role>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-08T23:34:01Z</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</Role>
</GetRoleResult>
<ResponseMetadata>
<RequestId>df37e965-9967-11e1-a4c3-270EXAMPLE04</RequestId>
</ResponseMetadata>
</GetRoleResponse>"""
ADD_ROLE_TO_INSTANCE_PROFILE_TEMPLATE = """<AddRoleToInstanceProfileResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ResponseMetadata>
<RequestId>12657608-99f2-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</AddRoleToInstanceProfileResponse>"""
REMOVE_ROLE_FROM_INSTANCE_PROFILE_TEMPLATE = """<RemoveRoleFromInstanceProfileResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ResponseMetadata>
<RequestId>12657608-99f2-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</RemoveRoleFromInstanceProfileResponse>"""
LIST_ROLES_TEMPLATE = """<ListRolesResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ListRolesResult>
<IsTruncated>false</IsTruncated>
<Roles>
{% for role in roles %}
<member>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
</Roles>
</ListRolesResult>
<ResponseMetadata>
<RequestId>20f7279f-99ee-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</ListRolesResponse>"""
LIST_ROLE_POLICIES = """<ListRolePoliciesResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ListRolePoliciesResult>
<PolicyNames>
{% for policy_name in role_policies %}
<member>{{ policy_name }}</member>
{% endfor %}
</PolicyNames>
<IsTruncated>false</IsTruncated>
</ListRolePoliciesResult>
<ResponseMetadata>
<RequestId>8c7e1816-99f0-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</ListRolePoliciesResponse>"""
CREATE_POLICY_VERSION_TEMPLATE = """<CreatePolicyVersionResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<CreatePolicyVersionResult>
<PolicyVersion>
<Document>{{ policy_version.document }}</Document>
<VersionId>{{ policy_version.version_id }}</VersionId>
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
</PolicyVersion>
</CreatePolicyVersionResult>
<ResponseMetadata>
<RequestId>20f7279f-99ee-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</CreatePolicyVersionResponse>"""
GET_POLICY_VERSION_TEMPLATE = """<GetPolicyVersionResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<GetPolicyVersionResult>
<PolicyVersion>
<Document>{{ policy_version.document }}</Document>
<VersionId>{{ policy_version.version_id }}</VersionId>
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
</PolicyVersion>
</GetPolicyVersionResult>
<ResponseMetadata>
<RequestId>20f7279f-99ee-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</GetPolicyVersionResponse>"""
LIST_POLICY_VERSIONS_TEMPLATE = """<ListPolicyVersionsResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ListPolicyVersionsResult>
<IsTruncated>false</IsTruncated>
<Versions>
{% for policy_version in policy_versions %}
<member>
<Document>{{ policy_version.document }}</Document>
<VersionId>{{ policy_version.version_id }}</VersionId>
<IsDefaultVersion>{{ policy_version.is_default }}</IsDefaultVersion>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
</member>
{% endfor %}
</Versions>
</ListPolicyVersionsResult>
<ResponseMetadata>
<RequestId>20f7279f-99ee-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</ListPolicyVersionsResponse>"""
LIST_INSTANCE_PROFILES_TEMPLATE = """<ListInstanceProfilesResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ListInstanceProfilesResult>
<IsTruncated>false</IsTruncated>
<InstanceProfiles>
{% for instance in instance_profiles %}
<member>
<Id>{{ instance.id }}</Id>
<Roles>
{% for role in instance.roles %}
<member>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
</Roles>
<InstanceProfileName>{{ instance.name }}</InstanceProfileName>
<Path>{{ instance.path }}</Path>
<Arn>{{ instance.arn }}</Arn>
<CreateDate>2012-05-09T16:27:03Z</CreateDate>
</member>
{% endfor %}
</InstanceProfiles>
</ListInstanceProfilesResult>
<ResponseMetadata>
<RequestId>fd74fa8d-99f3-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</ListInstanceProfilesResponse>"""
UPLOAD_CERT_TEMPLATE = """<UploadServerCertificateResponse>
<UploadServerCertificateResult>
<ServerCertificateMetadata>
<ServerCertificateName>{{ certificate.cert_name }}</ServerCertificateName>
{% if certificate.path %}
<Path>{{ certificate.path }}</Path>
{% endif %}
<Arn>{{ certificate.arn }}</Arn>
<UploadDate>2010-05-08T01:02:03.004Z</UploadDate>
<ServerCertificateId>ASCACKCEVSQ6C2EXAMPLE</ServerCertificateId>
<Expiration>2012-05-08T01:02:03.004Z</Expiration>
</ServerCertificateMetadata>
</UploadServerCertificateResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</UploadServerCertificateResponse>"""
LIST_SERVER_CERTIFICATES_TEMPLATE = """<ListServerCertificatesResponse>
<ListServerCertificatesResult>
<IsTruncated>false</IsTruncated>
<ServerCertificateMetadataList>
{% for certificate in server_certificates %}
<member>
<ServerCertificateName>{{ certificate.cert_name }}</ServerCertificateName>
{% if certificate.path %}
<Path>{{ certificate.path }}</Path>
{% endif %}
<Arn>{{ certificate.arn }}</Arn>
<UploadDate>2010-05-08T01:02:03.004Z</UploadDate>
<ServerCertificateId>ASCACKCEVSQ6C2EXAMPLE</ServerCertificateId>
<Expiration>2012-05-08T01:02:03.004Z</Expiration>
</member>
{% endfor %}
</ServerCertificateMetadataList>
</ListServerCertificatesResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListServerCertificatesResponse>"""
GET_SERVER_CERTIFICATE_TEMPLATE = """<GetServerCertificateResponse>
<GetServerCertificateResult>
<ServerCertificate>
<ServerCertificateMetadata>
<ServerCertificateName>{{ certificate.cert_name }}</ServerCertificateName>
{% if certificate.path %}
<Path>{{ certificate.path }}</Path>
{% endif %}
<Arn>{{ certificate.arn }}</Arn>
<UploadDate>2010-05-08T01:02:03.004Z</UploadDate>
<ServerCertificateId>ASCACKCEVSQ6C2EXAMPLE</ServerCertificateId>
<Expiration>2012-05-08T01:02:03.004Z</Expiration>
</ServerCertificateMetadata>
<CertificateBody>{{ certificate.cert_body }}</CertificateBody>
</ServerCertificate>
</GetServerCertificateResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</GetServerCertificateResponse>"""
CREATE_GROUP_TEMPLATE = """<CreateGroupResponse>
<CreateGroupResult>
<Group>
<Path>{{ group.path }}</Path>
<GroupName>{{ group.name }}</GroupName>
<GroupId>{{ group.id }}</GroupId>
<Arn>{{ group.arn }}</Arn>
</Group>
</CreateGroupResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</CreateGroupResponse>"""
GET_GROUP_TEMPLATE = """<GetGroupResponse>
<GetGroupResult>
<Group>
<Path>{{ group.path }}</Path>
<GroupName>{{ group.name }}</GroupName>
<GroupId>{{ group.id }}</GroupId>
<Arn>{{ group.arn }}</Arn>
</Group>
<Users>
{% for user in group.users %}
<member>
<Path>{{ user.path }}</Path>
<UserName>{{ user.name }}</UserName>
<UserId>{{ user.id }}</UserId>
<Arn>{{ user.arn }}</Arn>
</member>
{% endfor %}
</Users>
<IsTruncated>false</IsTruncated>
</GetGroupResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</GetGroupResponse>"""
LIST_GROUPS_TEMPLATE = """<ListGroupsResponse>
<ListGroupsResult>
<Groups>
{% for group in groups %}
<member>
<Path>{{ group.path }}</Path>
<GroupName>{{ group.name }}</GroupName>
<GroupId>{{ group.id }}</GroupId>
<Arn>{{ group.arn }}</Arn>
</member>
{% endfor %}
</Groups>
<IsTruncated>false</IsTruncated>
</ListGroupsResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListGroupsResponse>"""
LIST_GROUPS_FOR_USER_TEMPLATE = """<ListGroupsForUserResponse>
<ListGroupsForUserResult>
<Groups>
{% for group in groups %}
<member>
<Path>{{ group.path }}</Path>
<GroupName>{{ group.name }}</GroupName>
<GroupId>{{ group.id }}</GroupId>
<Arn>{{ group.arn }}</Arn>
</member>
{% endfor %}
</Groups>
<IsTruncated>false</IsTruncated>
</ListGroupsForUserResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListGroupsForUserResponse>"""
LIST_GROUP_POLICIES_TEMPLATE = """<ListGroupPoliciesResponse>
<ListGroupPoliciesResult>
{% if marker is none %}
<IsTruncated>false</IsTruncated>
{% else %}
<IsTruncated>true</IsTruncated>
<Marker>{{ marker }}</Marker>
{% endif %}
<PolicyNames>
{% for policy in policies %}
<member>{{ policy }}</member>
{% endfor %}
</PolicyNames>
</ListGroupPoliciesResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListGroupPoliciesResponse>"""
GET_GROUP_POLICY_TEMPLATE = """<GetGroupPolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<GetGroupPolicyResult>
<PolicyName>{{ policy_name }}</PolicyName>
<GroupName>{{ group_name }}</GroupName>
<PolicyDocument>{{ policy_document }}</PolicyDocument>
</GetGroupPolicyResult>
<ResponseMetadata>
<RequestId>7e7cd8bc-99ef-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</GetGroupPolicyResponse>"""
USER_TEMPLATE = """<{{ action }}UserResponse>
<{{ action }}UserResult>
<User>
<Path>{{ user.path }}</Path>
<UserName>{{ user.name }}</UserName>
<UserId>{{ user.id }}</UserId>
<CreateDate>{{ user.created_iso_8601 }}</CreateDate>
<Arn>{{ user.arn }}</Arn>
</User>
</{{ action }}UserResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</{{ action }}UserResponse>"""
LIST_USERS_TEMPLATE = """<{{ action }}UsersResponse>
<{{ action }}UsersResult>
<Users>
{% for user in users %}
<member>
<UserId>{{ user.id }}</UserId>
<Path>{{ user.path }}</Path>
<UserName>{{ user.name }}</UserName>
<CreateDate>{{ user.created_iso_8601 }}</CreateDate>
<Arn>{{ user.arn }}</Arn>
</member>
{% endfor %}
</Users>
</{{ action }}UsersResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</{{ action }}UsersResponse>"""
CREATE_LOGIN_PROFILE_TEMPLATE = """<CreateLoginProfileResponse>
<CreateLoginProfileResult>
<LoginProfile>
<UserName>{{ user.name }}</UserName>
<CreateDate>{{ user.created_iso_8601 }}</CreateDate>
</LoginProfile>
</CreateLoginProfileResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</CreateLoginProfileResponse>
"""
GET_LOGIN_PROFILE_TEMPLATE = """<GetLoginProfileResponse>
<GetLoginProfileResult>
<LoginProfile>
<UserName>{{ user.name }}</UserName>
<CreateDate>{{ user.created_iso_8601 }}</CreateDate>
{% if user.password_reset_required %}
<PasswordResetRequired>true</PasswordResetRequired>
{% endif %}
</LoginProfile>
</GetLoginProfileResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</GetLoginProfileResponse>
"""
UPDATE_LOGIN_PROFILE_TEMPLATE = """<UpdateLoginProfileResponse>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</UpdateLoginProfileResponse>
"""
GET_USER_POLICY_TEMPLATE = """<GetUserPolicyResponse>
<GetUserPolicyResult>
<UserName>{{ user_name }}</UserName>
<PolicyName>{{ policy_name }}</PolicyName>
<PolicyDocument>
{{ policy_document }}
</PolicyDocument>
</GetUserPolicyResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</GetUserPolicyResponse>"""
LIST_USER_POLICIES_TEMPLATE = """<ListUserPoliciesResponse>
<ListUserPoliciesResult>
<PolicyNames>
{% for policy in policies %}
<member>{{ policy }}</member>
{% endfor %}
</PolicyNames>
</ListUserPoliciesResult>
<IsTruncated>false</IsTruncated>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListUserPoliciesResponse>"""
CREATE_ACCESS_KEY_TEMPLATE = """<CreateAccessKeyResponse>
<CreateAccessKeyResult>
<AccessKey>
<UserName>{{ key.user_name }}</UserName>
<AccessKeyId>{{ key.access_key_id }}</AccessKeyId>
<Status>{{ key.status }}</Status>
<SecretAccessKey>
{{ key.secret_access_key }}
</SecretAccessKey>
</AccessKey>
</CreateAccessKeyResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</CreateAccessKeyResponse>"""
LIST_ACCESS_KEYS_TEMPLATE = """<ListAccessKeysResponse>
<ListAccessKeysResult>
<UserName>{{ user_name }}</UserName>
<AccessKeyMetadata>
{% for key in keys %}
<member>
<UserName>{{ user_name }}</UserName>
<AccessKeyId>{{ key.access_key_id }}</AccessKeyId>
<Status>{{ key.status }}</Status>
<CreateDate>{{ key.create_date }}</CreateDate>
</member>
{% endfor %}
</AccessKeyMetadata>
<IsTruncated>false</IsTruncated>
</ListAccessKeysResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListAccessKeysResponse>"""
CREDENTIAL_REPORT_GENERATING = """
<GenerateCredentialReportResponse>
<GenerateCredentialReportResult>
<state>STARTED</state>
<description>No report exists. Starting a new report generation task</description>
</GenerateCredentialReportResult>
<ResponseMetadata>
<RequestId>fa788a82-aa8a-11e4-a278-1786c418872b"</RequestId>
</ResponseMetadata>
</GenerateCredentialReportResponse>"""
CREDENTIAL_REPORT_GENERATED = """<GenerateCredentialReportResponse>
<GenerateCredentialReportResult>
<state>COMPLETE</state>
</GenerateCredentialReportResult>
<ResponseMetadata>
<RequestId>fa788a82-aa8a-11e4-a278-1786c418872b"</RequestId>
</ResponseMetadata>
</GenerateCredentialReportResponse>"""
CREDENTIAL_REPORT = """<GetCredentialReportResponse>
<GetCredentialReportResult>
<content>{{ report }}</content>
<GeneratedTime>2015-02-02T20:02:02Z</GeneratedTime>
<ReportFormat>text/csv</ReportFormat>
</GetCredentialReportResult>
<ResponseMetadata>
<RequestId>fa788a82-aa8a-11e4-a278-1786c418872b"</RequestId>
</ResponseMetadata>
</GetCredentialReportResponse>"""
LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE = """<ListInstanceProfilesForRoleResponse>
<ListInstanceProfilesForRoleResult>
<IsTruncated>false</IsTruncated>
<InstanceProfiles>
{% for profile in instance_profiles %}
<member>
<Id>{{ profile.id }}</Id>
<Roles>
{% for role in profile.roles %}
<member>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
</Roles>
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn>
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
</member>
{% endfor %}
</InstanceProfiles>
</ListInstanceProfilesForRoleResult>
<ResponseMetadata>
<RequestId>6a8c3992-99f4-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</ListInstanceProfilesForRoleResponse>"""
LIST_MFA_DEVICES_TEMPLATE = """<ListMFADevicesResponse>
<ListMFADevicesResult>
<MFADevices>
{% for device in devices %}
<member>
<UserName>{{ user_name }}</UserName>
<SerialNumber>{{ device.serial_number }}</SerialNumber>
</member>
{% endfor %}
</MFADevices>
<IsTruncated>false</IsTruncated>
</ListMFADevicesResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListMFADevicesResponse>"""
LIST_ACCOUNT_ALIASES_TEMPLATE = """<ListAccountAliasesResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ListAccountAliasesResult>
<IsTruncated>false</IsTruncated>
<AccountAliases>
{% for alias in aliases %}
<member>{{ alias }}</member>
{% endfor %}
</AccountAliases>
</ListAccountAliasesResult>
<ResponseMetadata>
<RequestId>c5a076e9-f1b0-11df-8fbe-45274EXAMPLE</RequestId>
</ResponseMetadata>
</ListAccountAliasesResponse>"""
CREATE_ACCOUNT_ALIAS_TEMPLATE = """<CreateAccountAliasResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ResponseMetadata>
<RequestId>36b5db08-f1b0-11df-8fbe-45274EXAMPLE</RequestId>
</ResponseMetadata>
</CreateAccountAliasResponse>"""
DELETE_ACCOUNT_ALIAS_TEMPLATE = """<DeleteAccountAliasResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</DeleteAccountAliasResponse>"""
| kefo/moto | moto/iam/responses.py | Python | apache-2.0 | 47,155 |
import os
import shutil
from crawler.crawler import ArchiveCrawler
c = ArchiveCrawler()
def crawler_sample_archive_test():
try:
c.sample(1)
print "crawler_sample_archive_test: PASS"
except Exception as err:
print err
print "crawler_sample_archive_test: FAIL"
def crawler_get_permissions_test():
try:
c.get_permissions()
print "crawler_get_permissions_test: PASS"
except Exception as err:
print err
print "crawler_get_permissions_test: FAIL"
def crawler_download_test():
try:
c.download()
print "crawler_download_test: PASS"
except Exception as err:
print err
print "crawler_download_test: FAIL"
def crawler_test_cleanup():
# delete database
os.remove("apekit.db")
# delete downloaded apks
shutil.rmtree("apks/")
print "crawler_test_cleanup: DONE"
| ksparakis/apekit | tests/crawler_test.py | Python | apache-2.0 | 937 |
#!/usr/bin/env python
"""
Copyright 2017 Matt Settles
Created June 8, 2017
"""
from optparse import OptionParser
import os
import sys
import time
import traceback
import signal
from subprocess import Popen
from subprocess import PIPE
def sp_bwa_index(ref, overwrite=False):
if os.path.isfile(ref):
if os.path.isfile(ref + '.sa') and not overwrite:
sys.stderr.write('MAPPING\tNOTE\tFound existing bwo index for %s\n' % ref)
return 0
else:
FNULL = open(os.devnull, 'w')
call = 'bwa index'
call = call + ' ' + ref
sys.stdout.write(call + '\n')
p = Popen(['bwa index', ref],
stdout=FNULL,
stderr=FNULL,
bufsize=-1,
preexec_fn=lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL))
p.communicate()
if p.returncode:
sys.stderr.write('MAPPING\tERROR\tSomething in bwa index went wrong\n')
raise
# system call, check for return
sys.stderr.write('MAPPING\tNOTE\tSuccessfully indexed %s\n' % ref)
return 0
else:
sys.stderr.write("MAPPING\tERROR\t%s Reference file not found\n" % ref)
return 1
sys.stderr.write('MAPPING\tERROR\tSomething in bwa index went wrong\n')
raise
def sp_bwa_map_bc(reads, ref, overwrite=False, sensitivity=0, procs=1):
# build the call,
if sp_bwa_index(ref, overwrite) != 0:
sys.exit(1)
call = 'bwa mem -p -a -t ' + procs
p = Popen([call, ref],
stdin=PIPE,
stdout=PIPE,
stderr=None,
bufsize=-1,
shell=True,
executable='/bin/bash',
preexec_fn=lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL))
p.communicate(input=reads)
if p.returncode:
raise
return p.stdout
def reverseComplement(s):
"""
given a seqeucne with 'A', 'C', 'T', and 'G' return the reverse complement
"""
basecomplement = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A', 'N': 'N'}
letters = list(s)
try:
letters = [basecomplement[base] for base in letters]
except:
raise
return ''.join(letters[::-1])
def reverse(s):
"""
given a sequence return the reverse
"""
letters = list(s)
return ''.join(letters[::-1])
class bcProcessing:
orig_bc_lines = []
remap_reads = []
remap_lines = []
ok_bc_lines = []
def __init__(self, minMQ=40, verbose=False):
self.verbose = verbose
self.minMQ = minMQ
def addLine(self, line):
self.orig_bc_lines.append(line)
def clearbc(self):
self.orig_bc_lines = []
self.remap_lines = []
self.ok_bc_lines = []
def addRead(self, read):
"""
Add a pair of reads to the output queue
"""
self.R1.append(read[0])
self.R2.append(read[3])
self.mcount += 1
def mapReads(self):
"""
Write the paired reads in the queue to the output files
"""
if (len(self.R1) == 0):
pass
else:
if not self.isOpen:
try:
if self.open() == 1:
sys.stderr.write('MAPPING\tERROR:[IlluminaFourReadOutput] ERROR Opening files for writing\n')
raise
except Exception:
raise
try:
self.R1f.write('\n'.join(self.R1) + '\n')
self.R4f.write('\n'.join(self.R2) + '\n')
except Exception:
sys.stderr.write('MAPPING\tERROR:[IlluminaFourReadOutput] Cannot write reads to file with prefix: %s\n' % self.output_prefix)
raise
self.R1 = []
self.R2 = []
self.B1 = []
self.B2 = []
self.close()
def process(self, refDB):
"""
process reads against a reference fasta file
"""
try:
bc = self.orig_bc_lines[0].split(":")[0]
mapped_pairs_count = 0
remapped_pairs_count = 0
mapped_singles_count = 0
remapped_singles_count = 0
secondary_alignment = 0
count = 0
PE1 = {}
PE2 = {}
for line in self.orig_bc_lines:
# 0x1 template having multiple segments in sequencing
# 0x2 each segment properly aligned according to the aligner
# 0x4 segment unmapped
# 0x8 next segment in the template unmapped
# 0x10 SEQ being reverse complemented
# 0x20 SEQ of the next segment in the template being reversed
# 0x40 the first segment in the template
# 0x80 the last segment in the template
# 0x100 secondary alignment
# 0x200 not passing quality controls
# 0x400 PCR or optical duplicate
lbc = line.split(":")[0]
if lbc != bc:
sys.err("Something went wrong, more than one barcode in process barcodes")
count += 1
line2 = line.strip().split()
flag = int(line2[1])
# Secondary alignment
if (flag & 0x100): # not sure what to do with secondary alignment yet, for now ignore
secondary_alignment += 1
continue
mq = int(line2[4])
# if mq < self.minMQ:
# # add to those that need to be remapped
# self.remap_lines.append(line)
# Handle SE:
# mapped SE reads have 0x1 set to 0, and 0x4 (third bit) set to 1
if not (flag & 0x1): # SE READ, shouldn't see singles, maybe handle them later
# if not (flag & 0x4 and mq >= self.minMQ): # MAPPED
# self.ok_bc_lines.append(line)
# # TODO: NEED to determine read cloud for read
# mapped_singles_count += 1
# else: # UNMAPPED or Poor minMQ, remap the read
# if (flag & 0x10): # reverse complement
# line2[9] = reverseComplement(line2[9])
# line2[10] = reverse(line2[10])
# self.addRead(['\n'.join(['@' + line2[0] + ' 1:N:O', line2[9], '+', line2[10]])])
# remapped_singles_count += 1
continue
# Handle PE:
# logic: 0x1 = multiple segments in sequencing, 0x4 = segment unmapped, 0x8 = next segment unmapped
if (flag & 0x1): # PE READ
if (not (flag & 0x4) and not (flag & 0x8)): # both pairs mapped
if (flag & 0x40): # is this PE1 (first segment in template)
# PE1 read, check that PE2 is in dict
ID = line2[0]
if ID in PE2:
if mq >= self.minMQ and int(PE2[ID].strip().split()[4]) >= self.minMQ: # check MQ of both reads
self.ok_bc_lines.append(line)
self.ok_bc_lines.append(PE2[ID])
del PE2[ID]
# TODO: NEED to determine read cloud for read
mapped_pairs_count += 1
else:
if (flag & 0x10): # reverse complement
line2[9] = reverseComplement(line2[9])
line2[10] = reverse(line2[10])
r1 = '\n'.join(['@' + line2[0] + ' 1:N:O', line2[9], '+', line2[10]]) # sequence + qual
rl2 = PE2[ID].strip().split()
if (int(rl2[1]) & 0x10): # reverse complement
rl2[9] = reverseComplement(rl2[9])
rl2[10] = reverse(rl2[10])
r2 = '\n'.join(['@' + rl2[0] + ' 2:N:O', rl2[9], '+', rl2[10]]) # sequence + qual
self.addRead('\n'.join([r1, r2]))
del PE2[ID]
remapped_pairs_count += 1
else:
PE1[ID] = line
elif (flag & 0x80): # is this PE2 (last segment in template)
# PE2 read, check that PE1 is in dict and write out
ID = line2[0]
if ID in PE1:
if mq >= self.minMQ and int(PE1[ID].strip().split()[4]) >= self.minMQ: # check MQ of both reads
self.ok_bc_lines.append(line)
self.ok_bc_lines.append(PE1[ID])
del PE1[ID]
# TODO: NEED to determine read cloud for read
mapped_pairs_count += 1
else:
if (flag & 0x10): # reverse complement
line2[9] = reverseComplement(line2[9])
line2[10] = reverse(line2[10])
r2 = '\n'.join(['@' + line2[0] + ' 2:N:O', line2[9], '+', line2[10]]) # sequence + qual
rl1 = PE1[ID].strip().split()
if (int(rl1[1]) & 0x10): # reverse complement
rl1[9] = reverseComplement(rl1[9])
rl1[10] = reverse(rl1[10])
r1 = '\n'.join(['@' + rl1[0] + ' 1:N:O', rl1[9], '+', rl1[10]]) # sequence + qual
self.addRead('\n'.join([r1, r2]))
del PE1[ID]
remapped_pairs_count += 1
else:
PE2[ID] = line
else: # an 'unmapped' pair, at least 1 unmapped
if (flag & 0x40): # is this PE1 (first segment in template)
# PE1 read, check that PE2 is in dict and write out
ID = line2[0]
if ID in PE2:
if (flag & 0x10): # reverse complement
line2[9] = reverseComplement(line2[9])
line2[10] = reverse(line2[10])
r1 = '\n'.join(['@' + line2[0] + ' 1:N:O', line2[9], '+', line2[10]]) # sequence + qual
rl2 = PE2[ID].strip().split()
if (int(rl2[1]) & 0x10): # reverse complement
rl2[9] = reverseComplement(rl2[9])
rl2[10] = reverse(rl2[10])
r2 = '\n'.join(['@' + rl2[0] + ' 2:N:O', rl2[9], '+', rl2[10]]) # sequence + qual
self.addRead('\n'.join([r1, r2]))
del PE2[ID]
remapped_pairs_count += 1
else:
PE1[ID] = line
elif (flag & 0x80): # is this PE2 (last segment in template)
# PE2 read, check that PE1 is in dict and write out
ID = line2[0]
if ID in PE1:
if (flag & 0x10): # reverse complement
line2[9] = reverseComplement(line2[9])
line2[10] = reverse(line2[10])
r1 = '\n'.join(['@' + line2[0] + ' 1:N:O', line2[9], '+', line2[10]]) # sequence + qual
rl2 = PE2[ID].strip().split()
if (int(rl2[1]) & 0x10): # reverse complement
rl2[9] = reverseComplement(rl2[9])
rl2[10] = reverse(rl2[10])
r2 = '\n'.join(['@' + rl2[0] + ' 2:N:O', rl2[9], '+', rl2[10]]) # sequence + qual
self.addRead('\n'.join([r1, r2]))
del PE2[ID]
remapped_pairs_count += 1
else:
PE2[ID] = line
except (KeyboardInterrupt, SystemExit):
sys.stderr.write("MAPPING\tERROR\t%s unexpectedly terminated\n" % (__name__))
return 1
except:
sys.stderr.write("".join(traceback.format_exception(*sys.exc_info())))
return 1
def main(insam, outsam, output_all, verbose):
global file_path
refDict = {}
proc_bc = bcProcessing(verbose)
line_count = 0
current_bc = None
current_bc_count = 0
bc_count = 0
for line in insam:
# Comment/header lines start with @
if line[0] != "@" and len(line.strip().split()) > 2:
line_count += 1
bc = line.split(":")[0]
# instead check the ST:Z:GOOD for GOOD or MATCH or MISMATCH1
if line.split()[15][5:] not in ['GOOD', 'MATCH', 'MISMATCH1']:
# if seqToHash(bc) not in gbcDict:
# barcode does not match whitelist
if output_all:
# if output_all pass line directly to output
outsam.write(line)
elif bc == current_bc:
# add line to bc processing
proc_bc.addLine(line)
current_bc_count += 1
elif current_bc is None:
current_bc = bc
# add line to bc processing
proc_bc.addLine(line)
current_bc_count += 1
else:
# this is a new barcode
# can add a check to see if seen bc before, which is a no-no
# process the bc
proc_bc.process()
# record having processed the barcode
# output to sam file
bc_count += 1
proc_bc.clearbc()
current_bc = bc
# add line to bc processing
current_bc_count = 1
proc_bc.addLine(line)
else:
# pass header directly to output
outsam.write(line)
if line[0:3] == "@SQ":
# reference sequence id
sp = line.split()
refDict[sp[1][3:]] = int(sp[2][3:])
if line_count % 100000 == 0 and line_count > 0 and verbose:
print "Records processed: %s" % (line_count)
#####################################
# Parse options and setup #
usage = "usage %prog -o [output file prefix (path + name)] -(a) --quiet samfile"
usage += "%prog will process alignment file produced by processing_10xReads and do some stuff, assumes sam file is sorted by read name"
parser = OptionParser(usage=usage, version="%prog 0.0.1")
parser.add_option('-o', '--output', help="Directory + filename to output sam file, or stdout",
action="store", type="str", dest="outfile", default="stdout")
parser.add_option('-a', '--all', help="output all alignment, not just those with valid gem barcode (STATUS is UNKNOWN, or AMBIGUOUS)",
action="store_true", dest="output_all", default=False)
parser.add_option('--quiet', help="turn off verbose output",
action="store_false", dest="verbose", default=True)
(options, args) = parser.parse_args()
if len(args) == 1:
infile = args[0]
# Start opening input/output files:
if not os.path.exists(infile):
sys.exit("Error, can't find input file %s" % infile)
insam = open(infile, 'r')
else:
# reading from stdin
insam = sys.stdin
outfile = options.outfile
if outfile == "stdout":
outsam = sys.stdout
else:
outsam = open(outfile, 'r')
output_all = options.output_all
verbose = options.verbose
# need to check, can write to output folder
# global variables
file_path = os.path.dirname(os.path.realpath(__file__))
stime = time.time()
main(insam, outsam, output_all, verbose)
sys.exit(0)
| ucdavis-bioinformatics/proc10xG | process_mapping.py | Python | apache-2.0 | 16,762 |
from nn_wtf.data_sets import DataSets
from nn_wtf.neural_network_graph import NeuralNetworkGraph
from nn_wtf.tests.util import create_train_data_set, train_data_input, \
create_vector, allow_fail, train_neural_network
import unittest
__author__ = 'Lene Preuss <[email protected]>'
# pylint: disable=missing-docstring
class PredictorTest(unittest.TestCase):
@allow_fail(max_times_fail=1)
def test_all_prediction_functions_at_once_to_save_computing_time(self):
"""Training takes time, if I run tests separately I have to train for each test."""
graph = train_neural_network(create_train_data_set())
self.assertEqual(0, graph.get_predictor().predict(train_data_input(0)))
self.assertEqual(1, graph.get_predictor().predict(train_data_input(1)))
probabilities_for_0 = graph.get_predictor().prediction_probabilities(train_data_input(0))
self.assertGreater(probabilities_for_0[0], probabilities_for_0[1])
probabilities_for_1 = graph.get_predictor().prediction_probabilities(train_data_input(1))
self.assertGreater(probabilities_for_1[1], probabilities_for_1[0])
self._check_multiple_values_get_predicted(graph, [0, 1])
self._check_multiple_values_get_predicted(graph, [1, 0])
self._check_multiple_values_get_predicted(graph, [0, 1, 0])
def _check_multiple_values_get_predicted(self, graph, train_data):
predictions = graph.get_predictor().predict_multiple(
generate_train_data(train_data), len(train_data)
)
self.assertEqual(train_data, predictions)
def repeat_list_items(data, num_repeats=2):
from itertools import repeat
return [x for item in data for x in repeat(item, num_repeats)]
def generate_train_data(values):
return create_vector(repeat_list_items(values, 2))
| lene/nn-wtf | nn_wtf/tests/predictor_test.py | Python | apache-2.0 | 1,834 |
"""Hello World API implemented using Google Cloud Endpoints.
Defined here are the ProtoRPC messages needed to define Schemas for methods
as well as those methods defined in an API.
"""
import endpoints
from protorpc import messages
from protorpc import message_types
from protorpc import remote
package = "blaxcy"
class Greeting(messages.Message):
message = messages.StringField(1)
class GreetingCollection(messages.Message):
items = messages.MessageField(Greeting,1,repeated=True)
STORED_GREETINGS = GreetingCollection(items=[Greeting(message="hello"),Greeting(message="WORLD")])
@endpoints.api(name="helloworld",version="v1")
class Helloworldapi(remote.Service):
@endpoints.method(message_types.VoidMessage,GreetingCollection,
path="hellogreeting",http_method="GET",
name="greetings.listGreeting")
def greetings_list(self,unused_request):
return STORED_GREETINGS
ID_RESOURCE = endpoints.ResourceContainer(message_types.VoidMessage,id=messages.IntegerField(1,variant=messages.Variant.INT32))
@endpoints.method(ID_RESOURCE,Greeting,path="hellogreeting/{id}",http_method="GET",name="greetings.getGreeting")
def greeting_get(self,request):
try:
return STORED_GREETINGS.items[request.id]
except (IndexError,TypeError):
raise endpoints.NotFoundException("Greeting %s not foung" %(request.id,))
APPLICATION = endpoints.api_server([Helloworldapi]) | sers-cy/blaxcy | helloworld_api.py | Python | apache-2.0 | 1,510 |
# Copyright 2016 Quora, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from asynq.py3.decorators import async
values = {}
@async(pure=True)
def get(key):
global values
value = values.get(key)
print('Get %s -> %s' % (str(key), str(value)))
return value
@async(pure=True)
def set(key, value):
global values
values[key] = value
print('Set %s <- %s' % (str(key), str(value)))
@async(pure=True)
def get_and_set(key_from, key_to, depends_on):
yield depends_on
value = yield get(key_from)
yield set(key_to, value)
@async(pure=True)
def order_test():
global values
values = {}
prev_task = set(0, 'value')
tasks = []
for i in range(0, 10):
task = get_and_set(i, i + 1, prev_task) # No need to yield!
prev_task = task
tasks.append(task)
assert len(values) == 0 # Nothing is executed yet!
yield tasks
assert len(values) == 11 # Done at this point
def test():
order_test().value()
| manannayak/asynq | asynq/py3/test_base.py | Python | apache-2.0 | 1,487 |
from quark_runtime import *
import test1
import test2
import test3
import test4
def call_main(): import sys; main(_List(sys.argv[1:]))
def main(args):
test1.go();
test2.go();
test3.go();
test4.go();
| bozzzzo/quark | quarkc/test/emit/expected/py/constructors/constructors/__init__.py | Python | apache-2.0 | 218 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import config
from tempest.openstack.common import log as logging
from tempest.scenario import manager
from tempest.scenario import utils as test_utils
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
load_tests = test_utils.load_tests_input_scenario_utils
class TestServerBasicOps(manager.ScenarioTest):
"""
This smoke test case follows this basic set of operations:
* Create a keypair for use in launching an instance
* Create a security group to control network access in instance
* Add simple permissive rules to the security group
* Launch an instance
* Perform ssh to instance
* Terminate the instance
"""
def setUp(self):
super(TestServerBasicOps, self).setUp()
# Setup image and flavor the test instance
# Support both configured and injected values
if not hasattr(self, 'image_ref'):
self.image_ref = CONF.compute.image_ref
if not hasattr(self, 'flavor_ref'):
self.flavor_ref = CONF.compute.flavor_ref
self.image_utils = test_utils.ImageUtils()
if not self.image_utils.is_flavor_enough(self.flavor_ref,
self.image_ref):
raise self.skipException(
'{image} does not fit in {flavor}'.format(
image=self.image_ref, flavor=self.flavor_ref
)
)
self.run_ssh = CONF.compute.run_ssh and \
self.image_utils.is_sshable_image(self.image_ref)
self.ssh_user = self.image_utils.ssh_user(self.image_ref)
LOG.debug('Starting test for i:{image}, f:{flavor}. '
'Run ssh: {ssh}, user: {ssh_user}'.format(
image=self.image_ref, flavor=self.flavor_ref,
ssh=self.run_ssh, ssh_user=self.ssh_user))
def add_keypair(self):
self.keypair = self.create_keypair()
def boot_instance(self):
# Create server with image and flavor from input scenario
security_groups = [{'name': self.security_group['name']}]
create_kwargs = {
'key_name': self.keypair['name'],
'security_groups': security_groups
}
self.instance = self.create_server(image=self.image_ref,
flavor=self.flavor_ref,
create_kwargs=create_kwargs)
def verify_ssh(self):
if self.run_ssh:
# Obtain a floating IP
floating_ip = self.floating_ips_client.create_floating_ip()
self.addCleanup(self.delete_wrapper,
self.floating_ips_client.delete_floating_ip,
floating_ip['id'])
# Attach a floating IP
self.floating_ips_client.associate_floating_ip_to_server(
floating_ip['ip'], self.instance['id'])
# Check ssh
self.get_remote_client(
server_or_ip=floating_ip['ip'],
username=self.image_utils.ssh_user(self.image_ref),
private_key=self.keypair['private_key'])
@test.idempotent_id('7fff3fb3-91d8-4fd0-bd7d-0204f1f180ba')
@test.services('compute', 'network')
def test_server_basicops(self):
self.add_keypair()
self.security_group = self._create_security_group()
self.boot_instance()
self.verify_ssh()
self.servers_client.delete_server(self.instance['id'])
| rzarzynski/tempest | tempest/scenario/test_server_basic_ops.py | Python | apache-2.0 | 4,155 |
# Copyright 2015 Metaswitch Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from netaddr import IPAddress, IPNetwork
import json
import logging
from pycalico import PyCalicoError
from pycalico.util import get_hostname
_log = logging.getLogger(__name__)
_log.addHandler(logging.NullHandler())
BITS_BY_VERSION = {4: 32, 6: 128}
BLOCK_SIZE_BITS = 6
BLOCK_PREFIXLEN = {4: 32 - BLOCK_SIZE_BITS,
6: 128 - BLOCK_SIZE_BITS}
BLOCK_SIZE = 2 ** BLOCK_SIZE_BITS
PREFIX_MASK = {4: (IPAddress("255.255.255.255") ^ (BLOCK_SIZE - 1)),
6: (IPAddress("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff") ^
(BLOCK_SIZE - 1))}
class AllocationBlock(object):
"""
A block of IP addresses from which to allocate for IPAM clients.
Blocks are identified by IP prefix. Each block is a single, keyed object
in etcd and the value of the block object in the datastore encodes all the
allocations for all the IP addresses within that prefix.
Thus, allocations and releases of IP addresses correspond to changes in the
block's value. Compare-and-swap atomicity is used to ensure allocations
and releases are consistent operations.
If another process updates the Block in the data store, then we will fail
to write this one. The owning code will need to
- drop the invalid instance,
- re-read a new instance from the data store,
- recompute the required modifications, and
- try the compare-and-swap operation again.
"""
CIDR = "cidr"
AFFINITY = "affinity"
HOST_AFFINITY_T = "host:%s"
ALLOCATIONS = "allocations"
ATTRIBUTES = "attributes"
ATTR_HANDLE_ID = "handle_id"
ATTR_SECONDARY = "secondary"
def __init__(self, cidr_prefix, host_affinity):
assert isinstance(cidr_prefix, IPNetwork)
assert cidr_prefix.cidr == cidr_prefix
# Make sure the block is the right size.
assert cidr_prefix.prefixlen == (BLOCK_PREFIXLEN[cidr_prefix.version])
self.cidr = cidr_prefix
self.db_result = None
self.host_affinity = host_affinity
"""
Both to minimize collisions, where multiple hosts attempt to change a
single block, and to support route aggregation, each block has affinity
to a single Calico host. That host does not hold exclusive rights to
modify the block; any host may still do that. The host with affinity
simply uses the block as the place where it first searches if the user
asked to have the IP assigned automatically.
"""
self.allocations = [None] * BLOCK_SIZE
"""
A fixed length array with one entry for every address in the block.
None means unallocated. A non-negative integer indicates the address
is allocated, and is the index into the `attributes` array for the
attributes assigned to the allocation.
"""
self.attributes = []
"""
List of dictionaries of attributes for allocations.
Each has the format:
{
ATTR_PRIMARY: <primary handle key>,
ATTR_SECONDARY: {...}
}
"""
def to_json(self):
"""
Convert to a JSON representation for writing to etcd.
"""
json_dict = {AllocationBlock.CIDR: str(self.cidr),
AllocationBlock.AFFINITY:
AllocationBlock.HOST_AFFINITY_T % self.host_affinity,
AllocationBlock.ALLOCATIONS: self.allocations,
AllocationBlock.ATTRIBUTES: self.attributes}
return json.dumps(json_dict)
@classmethod
def from_etcd_result(cls, etcd_result):
"""
Convert a JSON representation into an instance of AllocationBlock.
"""
json_dict = json.loads(etcd_result.value)
cidr_prefix = IPNetwork(json_dict[AllocationBlock.CIDR])
# Parse out the host. For now, it's in the form host:<hostname>
affinity = json_dict[AllocationBlock.AFFINITY]
assert affinity[:5] == "host:"
host_affinity = affinity[5:]
block = cls(cidr_prefix, host_affinity)
block.db_result = etcd_result
# Process & check allocations
allocations = json_dict[AllocationBlock.ALLOCATIONS]
assert len(allocations) == BLOCK_SIZE
block.allocations = allocations
# Process & check attributes
attributes = json_dict[AllocationBlock.ATTRIBUTES]
block.attributes = attributes
assert (block._verify_attributes())
return block
def update_result(self):
"""
Return the EtcdResult with any changes to the object written to
result.value.
:return:
"""
self.db_result.value = self.to_json()
return self.db_result
def auto_assign(self, num, handle_id, attributes, affinity_check=True):
"""
Automatically pick and assign the given number of IP addresses.
:param num: Number of addresses to request
:param handle_id: allocation handle ID for this request. You can
query this key using get_assignments_by_handle() or release all
addresses with this key using release_by_handle().
:param attributes: Contents of this dict will be stored with the
assignment and can be queried using get_assignment_attributes(). Must
be JSON serializable.
:param affinity_check: If true, verify that this block's affinity is
this host and throw a NoHostAffinityWarning if it isn't. Set to false
to disable this check.
:return: List of assigned addresses. When the block is at or near
full, this method may return fewer than requested IPs.
"""
assert num >= 0
if affinity_check and get_hostname() != self.host_affinity:
raise NoHostAffinityWarning("Host affinity is %s" %
self.host_affinity)
ordinals = []
# Walk the allocations until we find enough.
for o in xrange(BLOCK_SIZE):
if len(ordinals) == num:
break
if self.allocations[o] is None:
ordinals.append(o)
ips = []
if ordinals:
# We found some addresses, now we need to set up attributes.
attr_index = self._find_or_add_attrs(handle_id, attributes)
# Perform the allocation.
for o in ordinals:
assert self.allocations[o] is None
self.allocations[o] = attr_index
# Convert ordinal to IP.
ip = IPAddress(self.cidr.first + o, version=self.cidr.version)
ips.append(ip)
return ips
def assign(self, address, handle_id, attributes):
"""
Assign the given address. Throws AlreadyAssignedError if the address
is taken.
:param address: IPAddress to assign.
:param handle_id: allocation handle ID for this request. You can
query this key using get_assignments_by_handle() or release all addresses
with this key using release_by_handle().
:param attributes: Contents of this dict will be stored with the
assignment and can be queried using get_assignment_attributes(). Must
be JSON serializable.
:return: None.
"""
assert isinstance(address, IPAddress)
# Convert to an ordinal
ordinal = int(address - self.cidr.first)
assert 0 <= ordinal <= BLOCK_SIZE, "Address not in block."
# Check if allocated
if self.allocations[ordinal] is not None:
raise AlreadyAssignedError("%s is already assigned in block %s" % (
address, self.cidr))
# Set up attributes
attr_index = self._find_or_add_attrs(handle_id, attributes)
self.allocations[ordinal] = attr_index
return
def count_free_addresses(self):
"""
Count the number of free addresses in this block.
:return: Number of free addresses.
"""
count = 0
for a in self.allocations:
if a is None:
count += 1
return count
def release(self, addresses):
"""
Release the given addresses.
:param addresses: Set of IPAddresses to release.
:return: (unallocated, handles_with_counts) Where:
- unallocted is a set of IPAddresses. If any of the requested
addresses were not allocated, they are returned so the caller can
handle appropriately.
- handles_with_counts is a dictionary of handle_ids and the number of
addresses released for that handle. They are returned so the
caller can decrement the affected handles.
"""
assert isinstance(addresses, (set, frozenset))
deleting_ref_counts = {}
ordinals = []
unallocated = set()
handles_with_counts = {}
for address in addresses:
assert isinstance(address, IPAddress)
# Convert to an ordinal
ordinal = int(address - self.cidr.first)
assert 0 <= ordinal <= BLOCK_SIZE, "Address not in block."
# Check if allocated
attr_idx = self.allocations[ordinal]
if attr_idx is None:
_log.warning("Asked to release %s in block %s, but it was not "
"allocated.", address, self.cidr)
unallocated.add(address)
continue
ordinals.append(ordinal)
old_count = deleting_ref_counts.get(attr_idx, 0)
deleting_ref_counts[attr_idx] = old_count + 1
# Increment our count of addresses by handle.
handle_id = self.\
attributes[attr_idx][AllocationBlock.ATTR_HANDLE_ID]
handle_count = handles_with_counts.setdefault(handle_id, 0)
handle_count += 1
handles_with_counts[handle_id] = handle_count
# Compute which attributes need to be cleaned up. We do this by
# reference counting. If we're deleting all the references, then it
# needs to be cleaned up.
attr_indexes_to_delete = set()
ref_counts = self._get_attribute_ref_counts()
for idx, refs in deleting_ref_counts.iteritems():
if ref_counts[idx] == refs:
attr_indexes_to_delete.add(idx)
# Delete attributes if necessary
if attr_indexes_to_delete:
self._delete_attributes(attr_indexes_to_delete, ordinals)
# All attributes updated. Finally, release all the requested
# addressses.
for ordinal in ordinals:
self.allocations[ordinal] = None
return unallocated, handles_with_counts
def release_by_handle(self, handle_id):
"""
Release all addresses with the given handle ID.
:param handle_id: The handle ID to release.
:return: Number of addresses released.
"""
attr_indexes_to_delete = self._get_attr_indexes_by_handle(handle_id)
if attr_indexes_to_delete:
# Get the ordinals of IPs to release
ordinals = []
for o in xrange(BLOCK_SIZE):
if self.allocations[o] in attr_indexes_to_delete:
ordinals.append(o)
# Clean and renumber remaining attributes.
self._delete_attributes(attr_indexes_to_delete, ordinals)
# Release the addresses.
for ordinal in ordinals:
self.allocations[ordinal] = None
return len(ordinals)
else:
# Nothing to release.
return 0
def get_ip_assignments_by_handle(self, handle_id):
"""
Get the IP Addresses assigned to a particular handle.
:param handle_id: The handle ID to search for.
:return: List of IPAddress objects.
"""
attr_indexes = self._get_attr_indexes_by_handle(handle_id)
ips = []
for o in xrange(BLOCK_SIZE):
if self.allocations[o] in attr_indexes:
ip = IPAddress(self.cidr.first + o,
version=self.cidr.version)
ips.append(ip)
return ips
def get_attributes_for_ip(self, address):
"""
Get the attributes and handle ID for an IP address.
:param address: The IPAddress object to query.
:return: (handle_id, attributes)
"""
assert isinstance(address, IPAddress)
# Convert to an ordinal
ordinal = int(address - self.cidr.first)
assert 0 <= ordinal <= BLOCK_SIZE, "Address not in block."
# Check if allocated
attr_index = self.allocations[ordinal]
if attr_index is None:
raise AddressNotAssignedError("%s is not assigned in block %s" % (
address, self.cidr))
else:
# Allocated. Look up attributes.
assert isinstance(attr_index, int)
attr = self.attributes[attr_index]
return (attr[AllocationBlock.ATTR_HANDLE_ID],
attr[AllocationBlock.ATTR_SECONDARY])
def _get_attr_indexes_by_handle(self, handle_id):
"""
Get the attribute indexes for a given handle.
:param handle_id: The handle ID to search for.
:return: List of attribute indexes.
"""
attr_indexes = []
for ii, attr in enumerate(self.attributes):
if attr[AllocationBlock.ATTR_HANDLE_ID] == handle_id:
attr_indexes.append(ii)
return attr_indexes
def _delete_attributes(self, attr_indexes_to_delete, ordinals):
"""
Delete some attributes (used during release processing).
This removes the attributes from the self.attributes list, and updates
the allocation list with the new indexes.
:param attr_indexes_to_delete: set of indexes of attributes to delete
:param ordinals: list of ordinals of IPs to release (for debugging)
:return: None.
"""
new_indexes = range(len(self.attributes))
new_attributes = []
y = 0 # next free slot in new attributes list.
for x in xrange(len(self.attributes)):
if x in attr_indexes_to_delete:
# current attr at x being deleted.
new_indexes[x] = None
else:
# current attr at x is kept.
new_indexes[x] = y
y += 1
new_attributes.append(self.attributes[x])
self.attributes = new_attributes
# Spin through all the allocations and update indexes
for i in xrange(BLOCK_SIZE):
if self.allocations[i] is not None:
new_index = new_indexes[self.allocations[i]]
self.allocations[i] = new_index
# If the new index is None, we better be releasing that
# address
assert new_index is not None or i in ordinals
def _get_attribute_ref_counts(self):
"""
Walk the allocations and get a dictionary of reference counts to each
set of attributes.
"""
ref_counts = {}
for a in self.allocations:
old_counts = ref_counts.get(a, 0)
ref_counts[a] = old_counts + 1
return ref_counts
def _find_or_add_attrs(self, primary_key, attributes):
"""
Check if the key and attributes match existing and return the index, or
if they don't exist, add them and return the index.
"""
assert json.dumps(attributes), \
"Attributes aren't JSON serializable."
attr = {AllocationBlock.ATTR_HANDLE_ID: primary_key,
AllocationBlock.ATTR_SECONDARY: attributes}
attr_index = None
for index, exist_attr in enumerate(self.attributes):
if cmp(attr, exist_attr) == 0:
attr_index = index
break
if attr_index is None:
# Attributes are new, add them.
attr_index = len(self.attributes)
self.attributes.append(attr)
return attr_index
def _verify_attributes(self):
"""
Verify the integrity of attribute & allocations.
This is a debug-only function to detect errors.
"""
attr_indexes = set(self.allocations)
max_attr = max(attr_indexes)
if max_attr is None:
# Empty block. Just assert empty attrs and exit.
assert len(self.attributes) == 0
return True
# All attributes present?
assert len(self.attributes) == max_attr + 1
# All attributes actually used?
for x in xrange(max_attr + 1):
assert x in attr_indexes
# All assignments point to attributes or None.
for assignment in self.allocations:
assert assignment is None or isinstance(assignment, int)
return True
def get_block_cidr_for_address(address):
"""
Get the block ID to which a given address belongs.
:param address: IPAddress
"""
prefix = PREFIX_MASK[address.version] & address
block_id = "%s/%s" % (prefix, BLOCK_PREFIXLEN[address.version])
return IPNetwork(block_id)
class BlockError(PyCalicoError):
"""
Base exception class for AllocationBlocks.
"""
pass
class NoHostAffinityWarning(BlockError):
"""
Tried to auto-assign in a block this host didn't own. This exception can
be explicitly disabled.
"""
pass
class AlreadyAssignedError(BlockError):
"""
Tried to assign an address, but the address is already taken.
"""
pass
class AddressNotAssignedError(BlockError):
"""
Tried to query an address that isn't assigned.
"""
pass
| alexhersh/libcalico | calico_containers/pycalico/block.py | Python | apache-2.0 | 18,415 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers
from google.api_core import operations_v1
from google.api_core import gapic_v1
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.domains_v1beta1.types import domains
from google.longrunning import operations_pb2 # type: ignore
from .base import DomainsTransport, DEFAULT_CLIENT_INFO
class DomainsGrpcTransport(DomainsTransport):
"""gRPC backend transport for Domains.
The Cloud Domains API enables management and configuration of
domain names.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(
self,
*,
host: str = "domains.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client: Optional[operations_v1.OperationsClient] = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "domains.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Quick check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsClient(self.grpc_channel)
# Return the client from cache.
return self._operations_client
@property
def search_domains(
self,
) -> Callable[[domains.SearchDomainsRequest], domains.SearchDomainsResponse]:
r"""Return a callable for the search domains method over gRPC.
Searches for available domain names similar to the provided
query.
Availability results from this method are approximate; call
``RetrieveRegisterParameters`` on a domain before registering to
confirm availability.
Returns:
Callable[[~.SearchDomainsRequest],
~.SearchDomainsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "search_domains" not in self._stubs:
self._stubs["search_domains"] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/SearchDomains",
request_serializer=domains.SearchDomainsRequest.serialize,
response_deserializer=domains.SearchDomainsResponse.deserialize,
)
return self._stubs["search_domains"]
@property
def retrieve_register_parameters(
self,
) -> Callable[
[domains.RetrieveRegisterParametersRequest],
domains.RetrieveRegisterParametersResponse,
]:
r"""Return a callable for the retrieve register parameters method over gRPC.
Gets parameters needed to register a new domain name, including
price and up-to-date availability. Use the returned values to
call ``RegisterDomain``.
Returns:
Callable[[~.RetrieveRegisterParametersRequest],
~.RetrieveRegisterParametersResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "retrieve_register_parameters" not in self._stubs:
self._stubs["retrieve_register_parameters"] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/RetrieveRegisterParameters",
request_serializer=domains.RetrieveRegisterParametersRequest.serialize,
response_deserializer=domains.RetrieveRegisterParametersResponse.deserialize,
)
return self._stubs["retrieve_register_parameters"]
@property
def register_domain(
self,
) -> Callable[[domains.RegisterDomainRequest], operations_pb2.Operation]:
r"""Return a callable for the register domain method over gRPC.
Registers a new domain name and creates a corresponding
``Registration`` resource.
Call ``RetrieveRegisterParameters`` first to check availability
of the domain name and determine parameters like price that are
needed to build a call to this method.
A successful call creates a ``Registration`` resource in state
``REGISTRATION_PENDING``, which resolves to ``ACTIVE`` within
1-2 minutes, indicating that the domain was successfully
registered. If the resource ends up in state
``REGISTRATION_FAILED``, it indicates that the domain was not
registered successfully, and you can safely delete the resource
and retry registration.
Returns:
Callable[[~.RegisterDomainRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "register_domain" not in self._stubs:
self._stubs["register_domain"] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/RegisterDomain",
request_serializer=domains.RegisterDomainRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["register_domain"]
@property
def retrieve_transfer_parameters(
self,
) -> Callable[
[domains.RetrieveTransferParametersRequest],
domains.RetrieveTransferParametersResponse,
]:
r"""Return a callable for the retrieve transfer parameters method over gRPC.
Gets parameters needed to transfer a domain name from another
registrar to Cloud Domains. For domains managed by Google
Domains, transferring to Cloud Domains is not supported.
Use the returned values to call ``TransferDomain``.
Returns:
Callable[[~.RetrieveTransferParametersRequest],
~.RetrieveTransferParametersResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "retrieve_transfer_parameters" not in self._stubs:
self._stubs["retrieve_transfer_parameters"] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/RetrieveTransferParameters",
request_serializer=domains.RetrieveTransferParametersRequest.serialize,
response_deserializer=domains.RetrieveTransferParametersResponse.deserialize,
)
return self._stubs["retrieve_transfer_parameters"]
@property
def transfer_domain(
self,
) -> Callable[[domains.TransferDomainRequest], operations_pb2.Operation]:
r"""Return a callable for the transfer domain method over gRPC.
Transfers a domain name from another registrar to Cloud Domains.
For domains managed by Google Domains, transferring to Cloud
Domains is not supported.
Before calling this method, go to the domain's current registrar
to unlock the domain for transfer and retrieve the domain's
transfer authorization code. Then call
``RetrieveTransferParameters`` to confirm that the domain is
unlocked and to get values needed to build a call to this
method.
A successful call creates a ``Registration`` resource in state
``TRANSFER_PENDING``. It can take several days to complete the
transfer process. The registrant can often speed up this process
by approving the transfer through the current registrar, either
by clicking a link in an email from the registrar or by visiting
the registrar's website.
A few minutes after transfer approval, the resource transitions
to state ``ACTIVE``, indicating that the transfer was
successful. If the transfer is rejected or the request expires
without being approved, the resource can end up in state
``TRANSFER_FAILED``. If transfer fails, you can safely delete
the resource and retry the transfer.
Returns:
Callable[[~.TransferDomainRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "transfer_domain" not in self._stubs:
self._stubs["transfer_domain"] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/TransferDomain",
request_serializer=domains.TransferDomainRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["transfer_domain"]
@property
def list_registrations(
self,
) -> Callable[
[domains.ListRegistrationsRequest], domains.ListRegistrationsResponse
]:
r"""Return a callable for the list registrations method over gRPC.
Lists the ``Registration`` resources in a project.
Returns:
Callable[[~.ListRegistrationsRequest],
~.ListRegistrationsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_registrations" not in self._stubs:
self._stubs["list_registrations"] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/ListRegistrations",
request_serializer=domains.ListRegistrationsRequest.serialize,
response_deserializer=domains.ListRegistrationsResponse.deserialize,
)
return self._stubs["list_registrations"]
@property
def get_registration(
self,
) -> Callable[[domains.GetRegistrationRequest], domains.Registration]:
r"""Return a callable for the get registration method over gRPC.
Gets the details of a ``Registration`` resource.
Returns:
Callable[[~.GetRegistrationRequest],
~.Registration]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_registration" not in self._stubs:
self._stubs["get_registration"] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/GetRegistration",
request_serializer=domains.GetRegistrationRequest.serialize,
response_deserializer=domains.Registration.deserialize,
)
return self._stubs["get_registration"]
@property
def update_registration(
self,
) -> Callable[[domains.UpdateRegistrationRequest], operations_pb2.Operation]:
r"""Return a callable for the update registration method over gRPC.
Updates select fields of a ``Registration`` resource, notably
``labels``. To update other fields, use the appropriate custom
update method:
- To update management settings, see
``ConfigureManagementSettings``
- To update DNS configuration, see ``ConfigureDnsSettings``
- To update contact information, see
``ConfigureContactSettings``
Returns:
Callable[[~.UpdateRegistrationRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_registration" not in self._stubs:
self._stubs["update_registration"] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/UpdateRegistration",
request_serializer=domains.UpdateRegistrationRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["update_registration"]
@property
def configure_management_settings(
self,
) -> Callable[
[domains.ConfigureManagementSettingsRequest], operations_pb2.Operation
]:
r"""Return a callable for the configure management settings method over gRPC.
Updates a ``Registration``'s management settings.
Returns:
Callable[[~.ConfigureManagementSettingsRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "configure_management_settings" not in self._stubs:
self._stubs[
"configure_management_settings"
] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/ConfigureManagementSettings",
request_serializer=domains.ConfigureManagementSettingsRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["configure_management_settings"]
@property
def configure_dns_settings(
self,
) -> Callable[[domains.ConfigureDnsSettingsRequest], operations_pb2.Operation]:
r"""Return a callable for the configure dns settings method over gRPC.
Updates a ``Registration``'s DNS settings.
Returns:
Callable[[~.ConfigureDnsSettingsRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "configure_dns_settings" not in self._stubs:
self._stubs["configure_dns_settings"] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/ConfigureDnsSettings",
request_serializer=domains.ConfigureDnsSettingsRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["configure_dns_settings"]
@property
def configure_contact_settings(
self,
) -> Callable[[domains.ConfigureContactSettingsRequest], operations_pb2.Operation]:
r"""Return a callable for the configure contact settings method over gRPC.
Updates a ``Registration``'s contact settings. Some changes
require confirmation by the domain's registrant contact .
Returns:
Callable[[~.ConfigureContactSettingsRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "configure_contact_settings" not in self._stubs:
self._stubs["configure_contact_settings"] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/ConfigureContactSettings",
request_serializer=domains.ConfigureContactSettingsRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["configure_contact_settings"]
@property
def export_registration(
self,
) -> Callable[[domains.ExportRegistrationRequest], operations_pb2.Operation]:
r"""Return a callable for the export registration method over gRPC.
Exports a ``Registration`` resource, such that it is no longer
managed by Cloud Domains.
When an active domain is successfully exported, you can continue
to use the domain in `Google
Domains <https://domains.google/>`__ until it expires. The
calling user becomes the domain's sole owner in Google Domains,
and permissions for the domain are subsequently managed there.
The domain does not renew automatically unless the new owner
sets up billing in Google Domains.
Returns:
Callable[[~.ExportRegistrationRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "export_registration" not in self._stubs:
self._stubs["export_registration"] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/ExportRegistration",
request_serializer=domains.ExportRegistrationRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["export_registration"]
@property
def delete_registration(
self,
) -> Callable[[domains.DeleteRegistrationRequest], operations_pb2.Operation]:
r"""Return a callable for the delete registration method over gRPC.
Deletes a ``Registration`` resource.
This method works on any ``Registration`` resource using
`Subscription or Commitment
billing </domains/pricing#billing-models>`__, provided that the
resource was created at least 1 day in the past.
For ``Registration`` resources using `Monthly
billing </domains/pricing#billing-models>`__, this method works
if:
- ``state`` is ``EXPORTED`` with ``expire_time`` in the past
- ``state`` is ``REGISTRATION_FAILED``
- ``state`` is ``TRANSFER_FAILED``
When an active registration is successfully deleted, you can
continue to use the domain in `Google
Domains <https://domains.google/>`__ until it expires. The
calling user becomes the domain's sole owner in Google Domains,
and permissions for the domain are subsequently managed there.
The domain does not renew automatically unless the new owner
sets up billing in Google Domains.
Returns:
Callable[[~.DeleteRegistrationRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_registration" not in self._stubs:
self._stubs["delete_registration"] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/DeleteRegistration",
request_serializer=domains.DeleteRegistrationRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_registration"]
@property
def retrieve_authorization_code(
self,
) -> Callable[
[domains.RetrieveAuthorizationCodeRequest], domains.AuthorizationCode
]:
r"""Return a callable for the retrieve authorization code method over gRPC.
Gets the authorization code of the ``Registration`` for the
purpose of transferring the domain to another registrar.
You can call this method only after 60 days have elapsed since
the initial domain registration.
Returns:
Callable[[~.RetrieveAuthorizationCodeRequest],
~.AuthorizationCode]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "retrieve_authorization_code" not in self._stubs:
self._stubs["retrieve_authorization_code"] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/RetrieveAuthorizationCode",
request_serializer=domains.RetrieveAuthorizationCodeRequest.serialize,
response_deserializer=domains.AuthorizationCode.deserialize,
)
return self._stubs["retrieve_authorization_code"]
@property
def reset_authorization_code(
self,
) -> Callable[[domains.ResetAuthorizationCodeRequest], domains.AuthorizationCode]:
r"""Return a callable for the reset authorization code method over gRPC.
Resets the authorization code of the ``Registration`` to a new
random string.
You can call this method only after 60 days have elapsed since
the initial domain registration.
Returns:
Callable[[~.ResetAuthorizationCodeRequest],
~.AuthorizationCode]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "reset_authorization_code" not in self._stubs:
self._stubs["reset_authorization_code"] = self.grpc_channel.unary_unary(
"/google.cloud.domains.v1beta1.Domains/ResetAuthorizationCode",
request_serializer=domains.ResetAuthorizationCodeRequest.serialize,
response_deserializer=domains.AuthorizationCode.deserialize,
)
return self._stubs["reset_authorization_code"]
def close(self):
self.grpc_channel.close()
__all__ = ("DomainsGrpcTransport",)
| googleapis/python-domains | google/cloud/domains_v1beta1/services/domains/transports/grpc.py | Python | apache-2.0 | 33,443 |
# Make sure to include any Metrics your want exported below!
| great-expectations/great_expectations | contrib/capitalone_dataprofiler_expectations/capitalone_dataprofiler_expectations/metrics/__init__.py | Python | apache-2.0 | 61 |
# Copyright 2016 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The module contains the / endpoint object for Arestor API."""
import os
import cherrypy
from arestor.api import base as api_base
from arestor.api import v1 as api_v1
from arestor import config as arestor_config
CONFIG = arestor_config.CONFIG
class Root(api_base.BaseAPI):
"""The / endpoint for the Arestor API."""
resources = [("v1", api_v1.ArestorV1)]
@classmethod
def config(cls):
"""Prepare the configurations for the current metadata service."""
return {
'global': {
'server.socket_host': CONFIG.api.host,
'server.socket_port': CONFIG.api.port,
'environment': CONFIG.api.environment,
'log.screen': False,
'log.error_file': os.path.join(CONFIG.log_dir or "",
"arestor-api-error.log"),
'server.thread_pool': CONFIG.api.thread_pool,
},
'/': {
'request.dispatch': cherrypy.dispatch.MethodDispatcher()
}
}
| alexcoman/arestor | arestor/api/__init__.py | Python | apache-2.0 | 1,680 |
#
# Copyright (c) 2013-2016 Quarkslab.
# This file is part of IRMA project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the top-level directory
# of this distribution and at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# No part of the project, including this file, may be copied,
# modified, propagated, or distributed except according to the
# terms contained in the LICENSE file.
""" Helpers for python 2 and python 3 compatibility
This file should be imported in all modules
"""
import sys
import time
if sys.version_info >= (3,):
str = str
unicode = str
bytes = bytes
basestring = (str, bytes)
else:
str = str
unicode = unicode
bytes = str
basestring = basestring
def timestamp():
""" On some systems, time.time() returns an int
instead of a float. This function always returns a float
rounded at 2 decimals
:rtype: float
:return: the current timestamp
"""
return round(time.time(), 2)
| hirokihamasaki/irma | common/common/compat.py | Python | apache-2.0 | 1,100 |
import sys
from JumpScale import j
import JumpScale.lib.ms1
import JumpScale.baselib.redis
redis=j.clients.redis.getRedisClient("localhost",9999)
secret=j.tools.ms1.getCloudspaceSecret(\
login=j.console.askString("ms1 login"),\
password=j.console.askString("ms1 passwd"), \
cloudspace_name=j.console.askString("cloudspace name",defaultparam="default"),\
location=j.console.askString("location (ca1,us2)",defaultparam="ca1"))
redis.set("ms1:secret",secret) | Jumpscale/play | jumpscale_ms1/config.py | Python | apache-2.0 | 508 |
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from dataclasses import dataclass
from pants.backend.go.go_sources import load_go_binary
from pants.backend.go.go_sources.load_go_binary import LoadedGoBinary, LoadedGoBinaryRequest
from pants.backend.go.subsystems.golang import GoRoot
from pants.engine.fs import Digest
from pants.engine.internals.selectors import Get
from pants.engine.rules import collect_rules, rule
@dataclass(frozen=True)
class PackageAnalyzerSetup:
digest: Digest
path: str
@rule
async def setup_go_package_analyzer(goroot: GoRoot) -> PackageAnalyzerSetup:
binary_path = "./package_analyzer"
sources = (
"main.go",
"read.go",
"build_context.go",
"string_utils.go",
"syslist.go",
"tags.go1.17.go" if goroot.is_compatible_version("1.17") else "tags.go",
)
binary = await Get(
LoadedGoBinary,
LoadedGoBinaryRequest(
"analyze_package",
sources,
binary_path,
),
)
return PackageAnalyzerSetup(
digest=binary.digest,
path=binary_path,
)
def rules():
return (
*collect_rules(),
*load_go_binary.rules(),
)
| pantsbuild/pants | src/python/pants/backend/go/util_rules/pkg_analyzer.py | Python | apache-2.0 | 1,294 |
def my_function(a, b):
"""Returns a * b.
>>> my_function(['A', 'B'], 3) #doctest: +NORMALIZE_WHITESPACE
['A', 'B',
'A', 'B',
'A', 'B']
This does not match because of the extra space after the [ in
the list.
>>> my_function(['A', 'B'], 2) #doctest: +NORMALIZE_WHITESPACE
[ 'A', 'B',
'A', 'B', ]
"""
return a * b
| jasonwee/asus-rt-n14uhp-mrtg | src/lesson_developer_tools/doctest_normalize_whitespace.py | Python | apache-2.0 | 367 |
def permutationIsPalindrome(word):
alphabet = [0]*26
for letter in word:
alphabet[translateLetterInIx(letter)] = ~alphabet[translateLetterInIx(letter)]
if sum(alphabet) < -1:
return False
else:
return True
def translateLetterInIx(letter):
return ord(letter) - 97
print(permutationIsPalindrome("civic"))#True
print(permutationIsPalindrome("ivicc"))#True
print(permutationIsPalindrome("civil"))#False
print(permutationIsPalindrome("livci"))#False
#IC approach
def permutationIsPalindrome2(word):
characters = set()
for letter in word:
if letter in characters:
characters.remove(letter)
else:
characters.add(letter)
if len(characters) > 1:
return False
return True
print(permutationIsPalindrome2("civic"))#True
print(permutationIsPalindrome2("ivicc"))#True
print(permutationIsPalindrome2("civil"))#False
print(permutationIsPalindrome2("livci"))#False
| katchengli/tech-interview-prep | interview_cake/ic30.py | Python | apache-2.0 | 964 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2014 Netheos (http://www.netheos.net)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# OAuth2 tokens bootstrapper:
# manually retrieving tokens
# (we usually fetch refresh_tokens, if provider supports these ;
# if not, access_token are returned but with a long lifetime)
#
# This small utility program is to be used for bootstrapping UserCredentialsRepository
from __future__ import absolute_import, unicode_literals, print_function
class OAuth2BootStrapper(object):
"""Utility class to retrieve initial token and populate a UserCredentialsRepository"""
def __init__(self, storage):
self._storage = storage
def do_code_workflow(self):
url, state = self._storage._session_manager.get_authorize_url()
print("Authorize URL:\n\n%s\n\n" % url)
print("Copy paste in browser, authorize, then input full callback URL or authorization code:")
code_or_url = raw_input()
user_credentials = self._storage._session_manager.fetch_user_credentials(code_or_url, state)
# (user_id is still unknown in UserCredentials, so we won't be able to save it yet)
# So at first we have to retrieve user_id:
user_id = self._storage.get_user_id()
print("Retrieved user_id = ", user_id)
user_credentials.user_id = user_id
# By now we can save credentials:
self._storage._session_manager._user_credentials_repository.save(user_credentials)
| netheosgithub/pcs_api | python/pcs_api/oauth/oauth2_bootstrap.py | Python | apache-2.0 | 1,976 |
#!/usr/bin/env python3
""" 航空器事故数据转换
:author Wang Weiwei <email>[email protected] / [email protected]</email>
:sine 2017/8/20
:version 1.0
"""
import datetime
import gzip
import os
import pickle
import sys
# gzip文件的模数
GZIP_MAGIC = b"\x1F\x8B"
class IncidentError(Exception):
pass
class Incident:
"""航空器事故对象"""
def __init__(self, report_id, date, airport, aircraft_id, aircraft_type, pilot_percent_hour_on_type,
pilot_total_hours, midair, narrative=""):
assert len(report_id) >= 8 and len(report_id.split()) == 1, "Invalid report ID"
self.__report_id = report_id
self.__date = date
self.__airport = airport
self.__aircraft_id = aircraft_id
self.__aircraft_type = aircraft_type
self.__pilot_percent_hours_on_type = pilot_percent_hour_on_type
self.__pilot_total_hours = pilot_total_hours
self.__midair = midair
self.__narrative = narrative
@property
def date(self):
return self.__date
@date.setter
def date(self, date):
assert isinstance(date, datetime.date), "invalid date"
self.__date = date
@property
def pilot_percent_hours_on_type(self):
"""The percentage of total hours flown on this aircraft type"""
return self.__pilot_percent_hours_on_type
@pilot_percent_hours_on_type.setter
def pilot_percent_hours_on_type(self, percent):
assert 0.0 <= percent <= 100.0, "out of range percentage"
self.__pilot_percent_hours_on_type = percent
@property
def pilot_total_hours(self):
"""The total hours this pilot has flown"""
return self.__pilot_total_hours
@pilot_total_hours.setter
def pilot_total_hours(self, hours):
assert hours > 0, "invalid number of hours"
self.__pilot_total_hours = hours
@property
def approximate_hours_on_type(self):
return int(self.__pilot_total_hours *
(self.__pilot_percent_hours_on_type / 100))
@property
def midair(self):
"""Whether the incident involved another aircraft"""
return self.__midair
@midair.setter
def midair(self, value):
assert isinstance(value, bool), "invalid midair value"
self.__midair = value
@property
def airport(self):
"""The incident's airport"""
return self.__airport
@airport.setter
def airport(self, airport):
assert airport and "\n" not in airport, "invalid airport"
self.__airport = airport
@property
def aircraft_id(self):
"""The aircraft ID"""
return self.__aircraft_id
@aircraft_id.setter
def aircraft_id(self, aircraft_id):
assert aircraft_id and "\n" not in aircraft_id, \
"invalid aircraft ID"
self.__aircraft_id = aircraft_id
@property
def aircraft_type(self):
"""The aircraft type"""
return self.__aircraft_type
@aircraft_type.setter
def aircraft_type(self, aircraft_type):
assert aircraft_type and "\n" not in aircraft_type, \
"invalid aircraft type"
self.__aircraft_type = aircraft_type
@property
def narrative(self):
"The incident's narrative"
return self.__narrative
@narrative.setter
def narrative(self, narrative):
self.__narrative = narrative
def __repr__(self):
return ("Incident({report_id!r}, {date!r}, "
"{airport!r}, {aircraft_id!r}, "
"{aircraft_type!r}, "
"{pilot_percent_hours_on_type!r}, "
"{pilot_total_hours!r}, {midair!r}, "
"'''{narrative}''')".format(**self))
class IncidentCollection(dict):
"""
>>> kwargs = dict(report_id="2007061289X")
>>> kwargs["date"] = datetime.date(2007, 6, 12)
>>> kwargs["airport"] = "Los Angeles"
>>> kwargs["aircraft_id"] = "8184XK"
>>> kwargs["aircraft_type"] = "CVS91"
>>> kwargs["pilot_percent_hours_on_type"] = 17.5
>>> kwargs["pilot_total_hours"] = 1258
>>> kwargs["midair"] = False
>>> incidents = IncidentCollection()
>>> incident = Incident(**kwargs)
>>> incidents[incident.report_id] = incident
>>> kwargs["report_id"] = "2007061989K"
>>> kwargs["date"] = datetime.date(2007, 6, 19)
>>> kwargs["pilot_percent_hours_on_type"] = 20
>>> kwargs["pilot_total_hours"] = 17521
>>> incident = Incident(**kwargs)
>>> incidents[incident.report_id] = incident
>>> kwargs["report_id"] = "2007052989V"
>>> kwargs["date"] = datetime.date(2007, 5, 29)
>>> kwargs["pilot_total_hours"] = 1875
>>> incident = Incident(**kwargs)
>>> incidents[incident.report_id] = incident
>>> for incident in incidents.values():
... print(incident.report_id, incident.date.isoformat())
2007052989V 2007-05-29
2007061289X 2007-06-12
2007061989K 2007-06-19
>>> for report_id in reversed(incidents):
... print(report_id, incidents[report_id].date.isoformat())
2007061989K 2007-06-19
2007061289X 2007-06-12
2007052989V 2007-05-29
"""
def values(self):
for report_id in self.keys():
yield self[report_id]
def items(self):
for report_id in self.keys():
yield (report_id, self[report_id])
def __iter__(self):
for report_id in sorted(super().keys()):
yield report_id
keys = __iter__
def __reversed__(self):
for report_id in sorted(super().keys(), reverse=True):
yield report_id
def export(self, filename, writer=None, compress=False):
extension = os.path.splitext(filename)[1].lower()
if extension == ".aix":
if writer == "dom":
return self.export_xml_dom(filename)
elif writer == "etree":
return self.export_xml_etree(filename)
elif writer == "manual":
return self.export_xml_manual(filename)
elif extension == ".ait":
return self.export_text(filename)
elif extension == ".aib":
return self.export_binary(filename, compress)
elif extension == ".aip":
return self.export_pickle(filename, compress)
elif extension in (".htm", ".html"):
return self.export_html(filename)
def import_(self, filename, reader=None):
extension = os.path.splitext(filename)[1].lower()
call = {(".aix", "dom"): self.import_xml_dom,
(".aix", "etree"): self.import_xml_etree,
(".aix", "sax"): self.import_xml_sax,
(".ait", "manual"): self.import_text_manual,
(".ait", "regex"): self.import_text_regex,
(".aib", None): self.import_binary,
(".aip", None): self.import_pickle}
result = call[extension, reader](filename)
if not result:
self.clear()
return result
if USE_LONG_WINDED_IMPORT_FUNCTION:
def import_(self, filename, reader=None):
extension = os.path.splitext(filename)[1].lower()
result = False
if extension == ".aix":
if reader == "dom":
result = self.import_xml_dom(filename)
elif reader == "etree":
result = self.import_xml_etree(filename)
elif reader == "sax":
result = self.import_xml_sax(filename)
elif extension == ".ait":
if reader == "manual":
result = self.import_text_manual(filename)
elif reader == "regex":
result = self.import_text_regex(filename)
elif extension == ".aib":
result = self.import_binary(filename)
elif extension == ".aip":
result = self.import_pickle(filename)
if result == False:
self.clear()
return result
def export_pickle(self, filename, compress=False):
"""将事故文件保存到pickle中,使用protocol3协议"""
fh = None
try:
if compress:
fh = gzip.open(filename, "wb")
else:
fh = open(filename, "wb")
pickle.dump(self, fh, pickle.HIGHEST_PROTOCOL)
return True
except (EnvironmentError, pickle.PicklingError) as err:
print("{0}: export error : {1}".format(os.path.basename(sys.argv[0]), err))
return False
finally:
if fh is not None:
fh.close()
def import_pickle(self, filename):
"""从事故pickle中读取数据"""
fh = None
try:
fh = open(filename, "rb")
magic = fh.read(len(GZIP_MAGIC))
if magic == GZIP_MAGIC:
fh.close()
fh = gzip.open(filename, "rb")
else:
fh.seek(0)
self.clear()
self.update(pickle.load(fh))
return True
except (EnvironmentError, pickle.UnpicklingError) as err:
print("{0}: import error : {1}".format(os.path.basename(sys.argv[0]), err))
return False
finally:
if fh is not None:
fh.close()
| weiwei02/Technical--Documentation | python/src/base/file/convert_incidents.py | Python | apache-2.0 | 9,364 |
# -*- coding: utf-8 -*-
#
# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from difflib import SequenceMatcher
from mycroft.util.time import now_local
from mycroft.util.lang.parse_en import *
from mycroft.util.lang.parse_pt import *
from mycroft.util.lang.parse_es import *
from mycroft.util.lang.parse_it import *
from mycroft.util.lang.parse_sv import *
from mycroft.util.lang.parse_de import extractnumber_de
from mycroft.util.lang.parse_de import extract_numbers_de
from mycroft.util.lang.parse_de import extract_datetime_de
from mycroft.util.lang.parse_de import normalize_de
from mycroft.util.lang.parse_fr import extractnumber_fr
from mycroft.util.lang.parse_fr import extract_numbers_fr
from mycroft.util.lang.parse_fr import extract_datetime_fr
from mycroft.util.lang.parse_fr import normalize_fr
from mycroft.util.lang.parse_common import *
from .log import LOG
def fuzzy_match(x, against):
"""Perform a 'fuzzy' comparison between two strings.
Returns:
float: match percentage -- 1.0 for perfect match,
down to 0.0 for no match at all.
"""
return SequenceMatcher(None, x, against).ratio()
def match_one(query, choices):
"""
Find best match from a list or dictionary given an input
Arguments:
query: string to test
choices: list or dictionary of choices
Returns: tuple with best match, score
"""
if isinstance(choices, dict):
_choices = list(choices.keys())
elif isinstance(choices, list):
_choices = choices
else:
raise ValueError('a list or dict of choices must be provided')
best = (_choices[0], fuzzy_match(query, _choices[0]))
for c in _choices[1:]:
score = fuzzy_match(query, c)
if score > best[1]:
best = (c, score)
if isinstance(choices, dict):
return (choices[best[0]], best[1])
else:
return best
def extract_numbers(text, short_scale=True, ordinals=False, lang="en-us"):
"""
Takes in a string and extracts a list of numbers.
Args:
text (str): the string to extract a number from
short_scale (bool): Use "short scale" or "long scale" for large
numbers -- over a million. The default is short scale, which
is now common in most English speaking countries.
See https://en.wikipedia.org/wiki/Names_of_large_numbers
ordinals (bool): consider ordinal numbers, e.g. third=3 instead of 1/3
lang (str): the BCP-47 code for the language to use
Returns:
list: list of extracted numbers as floats
"""
if lang.startswith("en"):
return extract_numbers_en(text, short_scale, ordinals)
elif lang.startswith("de"):
return extract_numbers_de(text, short_scale, ordinals)
elif lang.startswith("fr"):
return extract_numbers_fr(text, short_scale, ordinals)
elif lang.startswith("it"):
return extract_numbers_it(text, short_scale, ordinals)
return []
def extract_number(text, short_scale=True, ordinals=False, lang="en-us"):
"""Takes in a string and extracts a number.
Args:
text (str): the string to extract a number from
short_scale (bool): Use "short scale" or "long scale" for large
numbers -- over a million. The default is short scale, which
is now common in most English speaking countries.
See https://en.wikipedia.org/wiki/Names_of_large_numbers
ordinals (bool): consider ordinal numbers, e.g. third=3 instead of 1/3
lang (str): the BCP-47 code for the language to use
Returns:
(int, float or False): The number extracted or False if the input
text contains no numbers
"""
lang_lower = str(lang).lower()
if lang_lower.startswith("en"):
return extractnumber_en(text, short_scale=short_scale,
ordinals=ordinals)
elif lang_lower.startswith("es"):
return extractnumber_es(text)
elif lang_lower.startswith("pt"):
return extractnumber_pt(text)
elif lang_lower.startswith("it"):
return extractnumber_it(text)
elif lang_lower.startswith("fr"):
return extractnumber_fr(text)
elif lang_lower.startswith("sv"):
return extractnumber_sv(text)
elif lang_lower.startswith("de"):
return extractnumber_de(text)
# TODO: extractnumber_xx for other languages
LOG.warning('Language "{}" not recognized! Please make sure your '
'language is one of the following: '
'en, es, pt, it, fr, sv, de.'.format(lang_lower))
return text
def extract_datetime(text, anchorDate=None, lang="en-us", default_time=None):
"""
Extracts date and time information from a sentence. Parses many of the
common ways that humans express dates and times, including relative dates
like "5 days from today", "tomorrow', and "Tuesday".
Vague terminology are given arbitrary values, like:
- morning = 8 AM
- afternoon = 3 PM
- evening = 7 PM
If a time isn't supplied or implied, the function defaults to 12 AM
Args:
text (str): the text to be interpreted
anchorDate (:obj:`datetime`, optional): the date to be used for
relative dating (for example, what does "tomorrow" mean?).
Defaults to the current local date/time.
lang (string): the BCP-47 code for the language to use
default_time (datetime.time): time to use if none was found in
the input string.
Returns:
[:obj:`datetime`, :obj:`str`]: 'datetime' is the extracted date
as a datetime object in the user's local timezone.
'leftover_string' is the original phrase with all date and time
related keywords stripped out. See examples for further
clarification
Returns 'None' if no date or time related text is found.
Examples:
>>> extract_datetime(
... "What is the weather like the day after tomorrow?",
... datetime(2017, 06, 30, 00, 00)
... )
[datetime.datetime(2017, 7, 2, 0, 0), 'what is weather like']
>>> extract_datetime(
... "Set up an appointment 2 weeks from Sunday at 5 pm",
... datetime(2016, 02, 19, 00, 00)
... )
[datetime.datetime(2016, 3, 6, 17, 0), 'set up appointment']
>>> extract_datetime(
... "Set up an appointment",
... datetime(2016, 02, 19, 00, 00)
... )
None
"""
lang_lower = str(lang).lower()
if not anchorDate:
anchorDate = now_local()
if lang_lower.startswith("en"):
return extract_datetime_en(text, anchorDate, default_time)
elif lang_lower.startswith("es"):
return extract_datetime_es(text, anchorDate, default_time)
elif lang_lower.startswith("pt"):
return extract_datetime_pt(text, anchorDate, default_time)
elif lang_lower.startswith("it"):
return extract_datetime_it(text, anchorDate, default_time)
elif lang_lower.startswith("fr"):
return extract_datetime_fr(text, anchorDate, default_time)
elif lang_lower.startswith("sv"):
return extract_datetime_sv(text, anchorDate, default_time)
elif lang_lower.startswith("de"):
return extract_datetime_de(text, anchorDate, default_time)
# TODO: extract_datetime for other languages
LOG.warning('Language "{}" not recognized! Please make sure your '
'language is one of the following: '
'en, es, pt, it, fr, sv, de.'.format(lang_lower))
return text
# ==============================================================
def normalize(text, lang="en-us", remove_articles=True):
"""Prepare a string for parsing
This function prepares the given text for parsing by making
numbers consistent, getting rid of contractions, etc.
Args:
text (str): the string to normalize
lang (str): the code for the language text is in
remove_articles (bool): whether to remove articles (like 'a', or
'the'). True by default.
Returns:
(str): The normalized string.
"""
lang_lower = str(lang).lower()
if lang_lower.startswith("en"):
return normalize_en(text, remove_articles)
elif lang_lower.startswith("es"):
return normalize_es(text, remove_articles)
elif lang_lower.startswith("pt"):
return normalize_pt(text, remove_articles)
elif lang_lower.startswith("it"):
return normalize_it(text, remove_articles)
elif lang_lower.startswith("fr"):
return normalize_fr(text, remove_articles)
elif lang_lower.startswith("sv"):
return normalize_sv(text, remove_articles)
elif lang_lower.startswith("de"):
return normalize_de(text, remove_articles)
# TODO: Normalization for other languages
LOG.warning('Language "{}" not recognized! Please make sure your '
'language is one of the following: '
'en, es, pt, it, fr, sv, de.'.format(lang_lower))
return text
def get_gender(word, input_string="", lang="en-us"):
'''
guess gender of word, optionally use raw input text for context
returns "m" if the word is male, "f" if female, False if unknown
'''
if "pt" in lang or "es" in lang:
# spanish follows same rules
return get_gender_pt(word, input_string)
elif "it" in lang:
return get_gender_it(word, input_string)
return False
| linuxipho/mycroft-core | mycroft/util/parse.py | Python | apache-2.0 | 10,108 |
#from: https://www.pyimagesearch.com/2015/03/30/accessing-the-raspberry-pi-camera-with-opencv-and-python/
# import the necessary packages
from picamera.array import PiRGBArray
from picamera import PiCamera
import time
import cv2
# initialize the camera and grab a reference to the raw camera capture
camera = PiCamera()
rawCapture = PiRGBArray(camera)
# allow the camera to warmup
time.sleep(0.1)
# grab an image from the camera
camera.capture(rawCapture, format="bgr")
image = rawCapture.array
# display the image on screen and wait for a keypress
cv2.imshow("Image", image)
cv2.waitKey(0)
| weedge/doubanFmSpeackerPi | test/simpleImage.py | Python | apache-2.0 | 595 |
#MenuTitle: Save Selected Glyphs as PNG
# -*- coding: utf-8 -*-
__doc__="""
Saves selected glyphs as PNGs. Uses ascender and descender for top and bottom edges of the images.
"""
def transform(shiftX=0.0, shiftY=0.0, rotate=0.0, scale=1.0):
"""
Returns an NSAffineTransform object for transforming layers.
"""
myTransform = NSAffineTransform.transform()
if rotate:
myTransform.rotateByDegrees_(rotate)
if scale != 1.0:
myTransform.scaleBy_(scale)
if not (shiftX == 0.0 and shiftY == 0.0):
myTransform.translateXBy_yBy_(shiftX,shiftY)
return myTransform
def saveLayerAsPNG( thisLayer, baseurl ):
# create a bitmap 1u = 1px, width of glyph, from descender to ascender
thisMaster = thisLayer.associatedFontMaster()
offscreenRect = NSMakeRect(
0.0, 0.0, # x y
thisLayer.width, # width
thisMaster.ascender-thisMaster.descender # height
)
bitmap = NSBitmapImageRep.alloc().initWithBitmapDataPlanes_pixelsWide_pixelsHigh_bitsPerSample_samplesPerPixel_hasAlpha_isPlanar_colorSpaceName_bitmapFormat_bytesPerRow_bitsPerPixel_(
None, #BitmapDataPlanes
int(offscreenRect.size.width), #pixelsWide
int(offscreenRect.size.height), #pixelsHigh
8, #bitsPerSample
4, #samplesPerPixel
True, #hasAlpha
False, #isPlanar
NSCalibratedRGBColorSpace, #colorSpaceName
0, #bitmapFormat
int(4 * offscreenRect.size.width), #bytesPerRow
32 #bitsPerPixel
)
# save the current graphics context and lock focus on the bitmap
originalContext = NSGraphicsContext.currentContext()
NSGraphicsContext.setCurrentContext_(NSGraphicsContext.graphicsContextWithBitmapImageRep_(bitmap))
NSGraphicsContext.saveGraphicsState()
# draw the image
NSBezierPath.bezierPathWithRect_(offscreenRect).addClip() # set the rectangle as a clipping path
baselineShift = -thisMaster.descender
shiftTransform = transform(shiftY=baselineShift) # shift from bottom edge (y=0 in PNG) to baseline (y=0 in glyph)
image = thisLayer.bezierPath.copy() # otherwise your glyphs start floating in mid air :)
image.transformUsingAffineTransform_(shiftTransform)
image.fill()
# restore the original graphics context
NSGraphicsContext.restoreGraphicsState()
NSGraphicsContext.setCurrentContext_(originalContext)
# file name (prefix uppercase letters with an underscore to avoid overwrite)
thisGlyph = thisLayer.parent
pngName = thisGlyph.name
if thisGlyph.subCategory == "Uppercase":
pngName = "_%s" % pngName
# construct filepath URL and write the file
fullURL = "%s/%s.png" % (baseurl, pngName)
url = NSURL.fileURLWithPath_( fullURL )
pngData = bitmap.representationUsingType_properties_( NSPNGFileType, None ) # get PNG data from image rep
pngData.writeToURL_options_error_( url, NSDataWritingAtomic, None ) # write
return fullURL # for status message in Macro Window
thisFont = Glyphs.font # frontmost font
selectedLayers = thisFont.selectedLayers # active layers of selected glyphs
saveDir = GetFolder(message="Select a folder for saving the images:", allowsMultipleSelection = False)
# brings macro window to front and clears its log:
Glyphs.clearLog()
Glyphs.showMacroWindow()
print "Saving PNG files:"
for thisLayer in selectedLayers:
print saveLayerAsPNG( thisLayer, saveDir )
| schriftgestalt/Mekka-Scripts | App/Save as PNGs.py | Python | apache-2.0 | 3,198 |
# -*- coding: utf-8 -*-
'''
Installation of NPM Packages
============================
These states manage the installed packages for node.js using the Node Package
Manager (npm). Note that npm must be installed for these states to be
available, so npm states should include a requisite to a pkg.installed state
for the package which provides npm (simply ``npm`` in most cases). Example:
.. code-block:: yaml
npm:
pkg.installed
yaml:
npm.installed:
- require:
- pkg: npm
'''
# Import salt libs
import salt.utils
from salt.exceptions import CommandExecutionError, CommandNotFoundError
def __virtual__():
'''
Only load if the npm module is available in __salt__
'''
return 'npm' if 'npm.list' in __salt__ else False
def installed(name,
dir=None,
runas=None,
user=None,
force_reinstall=False,
**kwargs):
'''
Verify that the given package is installed and is at the correct version
(if specified).
dir
The target directory in which to install the package, or None for
global installation
runas
The user to run NPM with
.. deprecated:: 0.17.0
user
The user to run NPM with
.. versionadded:: 0.17.0
force_reinstall
Install the package even if it is already installed
'''
ret = {'name': name, 'result': None, 'comment': '', 'changes': {}}
salt.utils.warn_until(
'Hydrogen',
'Please remove \'runas\' support at this stage. \'user\' support was '
'added in 0.17.0',
_dont_call_warnings=True
)
if runas:
# Warn users about the deprecation
ret.setdefault('warnings', []).append(
'The \'runas\' argument is being deprecated in favor of \'user\', '
'please update your state files.'
)
if user is not None and runas is not None:
# user wins over runas but let warn about the deprecation.
ret.setdefault('warnings', []).append(
'Passed both the \'runas\' and \'user\' arguments. Please don\'t. '
'\'runas\' is being ignored in favor of \'user\'.'
)
runas = None
elif runas is not None:
# Support old runas usage
user = runas
runas = None
prefix = name.split('@')[0].strip()
try:
installed_pkgs = __salt__['npm.list'](pkg=name, dir=dir)
except (CommandNotFoundError, CommandExecutionError) as err:
ret['result'] = False
ret['comment'] = 'Error installing \'{0}\': {1}'.format(name, err)
return ret
installed_pkgs = dict((p.lower(), info) for p, info in installed_pkgs.items())
if prefix.lower() in installed_pkgs:
if force_reinstall is False:
ret['result'] = True
ret['comment'] = 'Package {0} satisfied by {1}@{2}'.format(
name, prefix, installed_pkgs[prefix.lower()]['version'])
return ret
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'NPM package {0} is set to be installed'.format(name)
return ret
try:
call = __salt__['npm.install'](
pkg=name,
dir=dir,
runas=user
)
except (CommandNotFoundError, CommandExecutionError) as err:
ret['result'] = False
ret['comment'] = 'Error installing \'{0}\': {1}'.format(name, err)
return ret
if call or isinstance(call, list) or isinstance(call, dict):
ret['result'] = True
version = call[0]['version']
pkg_name = call[0]['name']
ret['changes']['{0}@{1}'.format(pkg_name, version)] = 'Installed'
ret['comment'] = 'Package {0} was successfully installed'.format(name)
else:
ret['result'] = False
ret['comment'] = 'Could not install package'
return ret
def removed(name,
dir=None,
runas=None,
user=None,
**kwargs):
'''
Verify that the given package is not installed.
dir
The target directory in which to install the package, or None for
global installation
runas
The user to run NPM with
.. deprecated:: 0.17.0
user
The user to run NPM with
.. versionadded:: 0.17.0
'''
ret = {'name': name, 'result': None, 'comment': '', 'changes': {}}
salt.utils.warn_until(
'Hydrogen',
'Please remove \'runas\' support at this stage. \'user\' support was '
'added in 0.17.0',
_dont_call_warnings=True
)
if runas:
# Warn users about the deprecation
ret.setdefault('warnings', []).append(
'The \'runas\' argument is being deprecated in favor of \'user\', '
'please update your state files.'
)
if user is not None and runas is not None:
# user wins over runas but let warn about the deprecation.
ret.setdefault('warnings', []).append(
'Passed both the \'runas\' and \'user\' arguments. Please don\'t. '
'\'runas\' is being ignored in favor of \'user\'.'
)
runas = None
elif runas is not None:
# Support old runas usage
user = runas
runas = None
try:
installed_pkgs = __salt__['npm.list'](dir=dir)
except (CommandExecutionError, CommandNotFoundError) as err:
ret['result'] = False
ret['comment'] = 'Error uninstalling \'{0}\': {1}'.format(name, err)
return ret
if name not in installed_pkgs:
ret['result'] = True
ret['comment'] = 'Package is not installed.'
return ret
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'Package {0} is set to be removed'.format(name)
return ret
if __salt__['npm.uninstall'](pkg=name, dir=dir, runas=user):
ret['result'] = True
ret['changes'][name] = 'Removed'
ret['comment'] = 'Package was successfully removed.'
else:
ret['result'] = False
ret['comment'] = 'Error removing package.'
return ret
def bootstrap(name,
runas=None,
user=None):
'''
Bootstraps a node.js application.
will execute npm install --json on the specified directory
runas
The user to run NPM with
.. deprecated:: 0.17.0
user
The user to run NPM with
.. versionadded:: 0.17.0
'''
ret = {'name': name, 'result': None, 'comment': '', 'changes': {}}
salt.utils.warn_until(
'Hydrogen',
'Please remove \'runas\' support at this stage. \'user\' support was '
'added in 0.17.0',
_dont_call_warnings=True
)
if runas:
# Warn users about the deprecation
ret.setdefault('warnings', []).append(
'The \'runas\' argument is being deprecated in favor of \'user\', '
'please update your state files.'
)
if user is not None and runas is not None:
# user wins over runas but let warn about the deprecation.
ret.setdefault('warnings', []).append(
'Passed both the \'runas\' and \'user\' arguments. Please don\'t. '
'\'runas\' is being ignored in favor of \'user\'.'
)
runas = None
elif runas is not None:
# Support old runas usage
user = runas
runas = None
try:
call = __salt__['npm.install'](dir=name, runas=user, pkg=None)
except (CommandNotFoundError, CommandExecutionError) as err:
ret['result'] = False
ret['comment'] = 'Error Bootstrapping \'{0}\': {1}'.format(name, err)
return ret
# npm.install will return a string if it can't parse a JSON result
if isinstance(call, str):
ret['result'] = False
ret['comment'] = 'Could not bootstrap directory'
else:
ret['result'] = True
ret['changes'] = name, 'Bootstrapped'
ret['comment'] = 'Directory was successfully bootstrapped'
return ret
| victorywang80/Maintenance | saltstack/src/salt/states/npm.py | Python | apache-2.0 | 8,055 |
import logging
from airflow.configuration import conf
from airflow.executors.base_executor import BaseExecutor
from airflow.executors.local_executor import LocalExecutor
from airflow.executors.sequential_executor import SequentialExecutor
# TODO Fix this emergency fix
try:
from airflow.executors.celery_executor import CeleryExecutor
except:
pass
try:
from airflow.contrib.executors.mesos_executor import MesosExecutor
except:
pass
from airflow.utils import AirflowException
_EXECUTOR = conf.get('core', 'EXECUTOR')
if _EXECUTOR == 'LocalExecutor':
DEFAULT_EXECUTOR = LocalExecutor()
elif _EXECUTOR == 'CeleryExecutor':
DEFAULT_EXECUTOR = CeleryExecutor()
elif _EXECUTOR == 'SequentialExecutor':
DEFAULT_EXECUTOR = SequentialExecutor()
elif _EXECUTOR == 'MesosExecutor':
DEFAULT_EXECUTOR = MesosExecutor()
else:
# Loading plugins
from airflow.plugins_manager import executors as _executors
for _executor in _executors:
globals()[_executor.__name__] = _executor
if _EXECUTOR in globals():
DEFAULT_EXECUTOR = globals()[_EXECUTOR]()
else:
raise AirflowException("Executor {0} not supported.".format(_EXECUTOR))
logging.info("Using executor " + _EXECUTOR)
| jason-z-hang/airflow | airflow/executors/__init__.py | Python | apache-2.0 | 1,235 |
# -*- coding: utf-8 -*-
# Copyright (2017-2018) Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Python libs
import collections
# Modules own libs
from oneview_redfish_toolkit.api.redfish_json_validator \
import RedfishJsonValidator
import oneview_redfish_toolkit.api.status_mapping as status_mapping
class Manager(RedfishJsonValidator):
"""Creates a Manager Redfish dict
Populates self.redfish with some hardcoded Manager
values and with the response from OneView.
"""
SCHEMA_NAME = 'Manager'
def __init__(self, oneview_appliance_info, oneview_appliance_state,
oneview_appliance_health_status):
"""Manager constructor
Populates self.redfish with some hardcoded Manager
values and with the response from OneView.
Args:
oneview_appliance_info: An Oneview's appliance info dict
oneview_appliance_state: An Oneview's appliance status dict
oneview_appliance_health_status: An Oneview's appliance
health state dict
"""
super().__init__(self.SCHEMA_NAME)
self.redfish["@odata.type"] = self.get_odata_type()
self.redfish["Id"] = oneview_appliance_info['uuid']
self.redfish["Description"] = oneview_appliance_info["family"]
self.redfish["ManagerType"] = "Service"
self.redfish["FirmwareVersion"] = \
oneview_appliance_info["softwareVersion"]
self.redfish["Status"] = collections.OrderedDict()
state = status_mapping.APPLIANCE_STATE_TO_REDFISH_STATE.\
get(oneview_appliance_state["state"])
health = self._get_highest_health_state(
oneview_appliance_health_status["members"])
self.redfish["Status"]["State"] = state
self.redfish["Status"]["Health"] = health
self.redfish["@odata.context"] = \
"/redfish/v1/$metadata#Manager.Manager"
self.redfish["@odata.id"] = \
"/redfish/v1/Managers/" + oneview_appliance_info['uuid']
@staticmethod
def _get_highest_health_state(health_state_members):
health_state_result = None
highest_status = 0
for member in health_state_members:
redfish_health_state = status_mapping.MANAGER_HEALTH_STATE. \
get(member["severity"])
current_status = \
status_mapping.CRITICALITY_STATUS[redfish_health_state]
if current_status > highest_status:
highest_status = current_status
health_state_result = redfish_health_state
return health_state_result
| HewlettPackard/oneview-redfish-toolkit | oneview_redfish_toolkit/api/manager.py | Python | apache-2.0 | 3,184 |
#!/usr/bin/env python
import csv
from os import listdir
from os import walk
import os,sys
# -*- coding: utf-8
#Variable para la ruta al directorio
path = '/s3mnt2/all_years/'
#Lista vacia para incluir los archivos
lstFiles = []
#lista vacia para incluir directorio y archivo
lstFiles2 = []
#Lista con todos los ficheros del directorio
lstDir = os.walk(path) #os.walk()Lista directorios y ficheros
#definiendo una funcion a llamar luego
def creandobigfile(nombreFusionar):
print 'anadiendo: '+nombreFusionar
#creo el mega archivo en el dir data-britanica-crimenes
reader = csv.reader(open(nombreFusionar, 'rb'))
f = open ("todos-crimenes.csv", "a")
for index,row in enumerate(reader):
#print str(row)
#transformo vector de string en cadena y automatica elimino corchetes
cad=",".join(row)
f.write(cad+'\n')
#Crea una lista de los ficheros csv que existen en el directorio y los incluye a la lista.
for root, dirs, files in lstDir:
for fichero in files:
(nombreFichero, extension) = os.path.splitext(fichero)
if(extension == ".csv"):
lstFiles.append(nombreFichero+extension)
lstFiles2.append(root+'/'+nombreFichero+extension)
print 'Archivos a fusionar en uno solo:'
for item in lstFiles:
print item
print ('LISTADO FINALIZADO')
print "longitud de la lista = ", len(lstFiles)
#paso la lista con ruta completa de directorios con sus archivos
for item in lstFiles2:
creandobigfile(item)
#fin
| eilarraza/CrimePrevention- | scripts/unionMasiva.py | Python | apache-2.0 | 1,526 |
# Copyright 2021 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import re
import uuid
from typing import List, cast, Any
import numpy as np
import pytest
import cirq
import cirq_google as cg
from cirq_google.workflow.quantum_executable_test import _get_quantum_executables, _get_example_spec
def cg_assert_equivalent_repr(value):
"""cirq.testing.assert_equivalent_repr with cirq_google.workflow imported."""
return cirq.testing.assert_equivalent_repr(
value,
global_vals={
'cirq_google': cg,
},
)
def test_shared_runtime_info():
shared_rtinfo = cg.SharedRuntimeInfo(run_id='my run')
cg_assert_equivalent_repr(shared_rtinfo)
def test_runtime_info():
rtinfo = cg.RuntimeInfo(execution_index=5)
cg_assert_equivalent_repr(rtinfo)
def test_executable_result():
rtinfo = cg.RuntimeInfo(execution_index=5)
er = cg.ExecutableResult(
spec=_get_example_spec(name='test-spec'),
runtime_info=rtinfo,
raw_data=cirq.ResultDict(
params=cirq.ParamResolver(), measurements={'z': np.ones((1_000, 4))}
),
)
cg_assert_equivalent_repr(er)
def _assert_json_roundtrip(o, tmpdir):
cirq.to_json_gzip(o, f'{tmpdir}/o.json')
o2 = cirq.read_json_gzip(f'{tmpdir}/o.json')
assert o == o2
def test_quantum_runtime_configuration():
rt_config = cg.QuantumRuntimeConfiguration(
processor_record=cg.SimulatedProcessorWithLocalDeviceRecord('rainbow'),
run_id='unit-test',
)
sampler = rt_config.processor_record.get_sampler()
result = sampler.run(cirq.Circuit(cirq.measure(cirq.GridQubit(5, 3), key='z')))
assert isinstance(result, cirq.Result)
assert isinstance(rt_config.processor_record.get_device(), cirq.Device)
def test_quantum_runtime_configuration_serialization(tmpdir):
rt_config = cg.QuantumRuntimeConfiguration(
processor_record=cg.SimulatedProcessorWithLocalDeviceRecord('rainbow'),
run_id='unit-test',
)
cg_assert_equivalent_repr(rt_config)
_assert_json_roundtrip(rt_config, tmpdir)
def test_executable_group_result(tmpdir):
egr = cg.ExecutableGroupResult(
runtime_configuration=cg.QuantumRuntimeConfiguration(
processor_record=cg.SimulatedProcessorWithLocalDeviceRecord('rainbow'),
run_id='unit-test',
),
shared_runtime_info=cg.SharedRuntimeInfo(run_id='my run'),
executable_results=[
cg.ExecutableResult(
spec=_get_example_spec(name=f'test-spec-{i}'),
runtime_info=cg.RuntimeInfo(execution_index=i),
raw_data=cirq.ResultDict(
params=cirq.ParamResolver(), measurements={'z': np.ones((1_000, 4))}
),
)
for i in range(3)
],
)
cg_assert_equivalent_repr(egr)
assert len(egr.executable_results) == 3
_assert_json_roundtrip(egr, tmpdir)
def _load_result_by_hand(tmpdir: str, run_id: str) -> cg.ExecutableGroupResult:
"""Load `ExecutableGroupResult` "by hand" without using
`ExecutableGroupResultFilesystemRecord`."""
rt_config = cirq.read_json_gzip(f'{tmpdir}/{run_id}/QuantumRuntimeConfiguration.json.gz')
shared_rt_info = cirq.read_json_gzip(f'{tmpdir}/{run_id}/SharedRuntimeInfo.json.gz')
fns = glob.glob(f'{tmpdir}/{run_id}/ExecutableResult.*.json.gz')
fns = sorted(
fns,
key=lambda s: int(cast(Any, re.search(r'ExecutableResult\.(\d+)\.json\.gz$', s)).group(1)),
)
assert len(fns) == 3
exe_results: List[cg.ExecutableResult] = [cirq.read_json_gzip(fn) for fn in fns]
return cg.ExecutableGroupResult(
runtime_configuration=rt_config,
shared_runtime_info=shared_rt_info,
executable_results=exe_results,
)
@pytest.mark.parametrize('run_id_in', ['unit_test_runid', None])
def test_execute(tmpdir, run_id_in):
rt_config = cg.QuantumRuntimeConfiguration(
processor_record=cg.SimulatedProcessorWithLocalDeviceRecord('rainbow'),
run_id=run_id_in,
qubit_placer=cg.NaiveQubitPlacer(),
)
executable_group = cg.QuantumExecutableGroup(_get_quantum_executables())
returned_exegroup_result = cg.execute(
rt_config=rt_config, executable_group=executable_group, base_data_dir=tmpdir
)
run_id = returned_exegroup_result.shared_runtime_info.run_id
if run_id_in is not None:
assert run_id_in == run_id
else:
assert isinstance(uuid.UUID(run_id), uuid.UUID)
manual_exegroup_result = _load_result_by_hand(tmpdir, run_id)
egr_record: cg.ExecutableGroupResultFilesystemRecord = cirq.read_json_gzip(
f'{tmpdir}/{run_id}/ExecutableGroupResultFilesystemRecord.json.gz'
)
exegroup_result: cg.ExecutableGroupResult = egr_record.load(base_data_dir=tmpdir)
helper_loaded_result = cg.ExecutableGroupResultFilesystemRecord.from_json(
run_id=run_id, base_data_dir=tmpdir
).load(base_data_dir=tmpdir)
# TODO(gh-4699): Don't null-out device once it's serializable.
assert isinstance(returned_exegroup_result.shared_runtime_info.device, cirq.Device)
returned_exegroup_result.shared_runtime_info.device = None
assert returned_exegroup_result == exegroup_result
assert manual_exegroup_result == exegroup_result
assert helper_loaded_result == exegroup_result
| quantumlib/Cirq | cirq-google/cirq_google/workflow/quantum_runtime_test.py | Python | apache-2.0 | 5,893 |
from wtforms import Form, BooleanField, StringField, PasswordField, validators
from wtforms.validators import DataRequired, Email
class LoginForm(Form):
username = StringField('Username', [validators.Length(min=4, max=20)])
password = PasswordField('Password', [
validators.Length(min=6),
validators.DataRequired()
])
class RegistrationForm(Form):
username = StringField('Username', [validators.Length(min=4, max=20)])
email = StringField('Email Address', [
validators.Length(min=6, max=20),
validators.Email()
])
password = PasswordField('Password', [
validators.Length(min=6),
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat Password')
accept_tos = BooleanField('I accept the Terms of Service', [validators.DataRequired()])
class ChangePwdForm(Form):
old_pwd = PasswordField('Old Password', [
validators.DataRequired()
])
new_pwd = PasswordField('New Password', [
validators.Length(min=6),
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat New Password')
class EnterEmail(Form):
email = StringField('Email Address', [
validators.Length(min=6, max=50),
validators.Email(),
validators.DataRequired()
])
class ForgotPass(Form):
new_pwd = PasswordField('New Password', [
validators.Length(min=6),
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat New Password')
| disfear86/mushcloud | app/forms.py | Python | apache-2.0 | 1,728 |
from collections import defaultdict
from operator import itemgetter
urls = open('all_github_urls.txt', 'r').read().split('\n')[:-1]
url_dict = defaultdict(int)
for url in urls:
url_dict[url] += 1
sorted_list = sorted(url_dict.items(), key=itemgetter(1),reverse=True)
print(len(set(sorted_list)))
| chaconnewu/awesome-augmented | replicates.py | Python | apache-2.0 | 302 |
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Script for running backend tests in parallel.
This should not be run directly. Instead, navigate to the oppia/ folder and
execute:
python -m scripts.run_backend_tests
You can also append the following options to the above command:
--verbose prints the output of the tests to the console.
--test_target=core.controllers.editor_test runs only the tests in the
core.controllers.editor_test module. (You can change
"core.controllers.editor_test" to any valid module path.)
--test_path=core/controllers runs all tests in test files in the
core/controllers directory. (You can change "core/controllers" to any
valid subdirectory path.)
--generate_coverage_report generates a coverage report as part of the final
test output (but it makes the tests slower).
Note: If you've made some changes and tests are failing to run at all, this
might mean that you have introduced a circular dependency (e.g. module A
imports module B, which imports module C, which imports module A). This needs
to be fixed before the tests will run.
"""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import argparse
import importlib
import inspect
import multiprocessing
import os
import re
import subprocess
import sys
import threading
import time
import unittest
import python_utils
from . import common
from . import concurrent_task_utils
from . import install_third_party_libs
DIRS_TO_ADD_TO_SYS_PATH = [
os.path.join(common.OPPIA_TOOLS_DIR, 'pylint-1.9.4'),
common.GOOGLE_APP_ENGINE_SDK_HOME,
os.path.join(common.OPPIA_TOOLS_DIR, 'webtest-%s' % common.WEBTEST_VERSION),
os.path.join(common.OPPIA_TOOLS_DIR, 'Pillow-%s' % common.PILLOW_VERSION),
os.path.join(
common.OPPIA_TOOLS_DIR, 'protobuf-%s' % common.PROTOBUF_VERSION),
os.path.join(common.OPPIA_TOOLS_DIR, 'psutil-%s' % common.PSUTIL_VERSION),
os.path.join(
common.OPPIA_TOOLS_DIR, 'PyGithub-%s' % common.PYGITHUB_VERSION),
os.path.join(
common.OPPIA_TOOLS_DIR, 'pip-tools-%s' % common.PIP_TOOLS_VERSION),
common.CURR_DIR,
common.THIRD_PARTY_PYTHON_LIBS_DIR
]
COVERAGE_DIR = os.path.join(
os.getcwd(), os.pardir, 'oppia_tools',
'coverage-%s' % common.COVERAGE_VERSION)
COVERAGE_MODULE_PATH = os.path.join(
os.getcwd(), os.pardir, 'oppia_tools',
'coverage-%s' % common.COVERAGE_VERSION, 'coverage')
TEST_RUNNER_PATH = os.path.join(os.getcwd(), 'core', 'tests', 'gae_suite.py')
# This should be the same as core.test_utils.LOG_LINE_PREFIX.
LOG_LINE_PREFIX = 'LOG_INFO_TEST: '
_LOAD_TESTS_DIR = os.path.join(os.getcwd(), 'core', 'tests', 'load_tests')
_PARSER = argparse.ArgumentParser(
description="""
Run this script from the oppia root folder:
python -m scripts.run_backend_tests
IMPORTANT: Only one of --test_path and --test_target should be specified.
""")
_EXCLUSIVE_GROUP = _PARSER.add_mutually_exclusive_group()
_EXCLUSIVE_GROUP.add_argument(
'--test_target',
help='optional dotted module name of the test(s) to run',
type=python_utils.UNICODE)
_EXCLUSIVE_GROUP.add_argument(
'--test_path',
help='optional subdirectory path containing the test(s) to run',
type=python_utils.UNICODE)
_PARSER.add_argument(
'--generate_coverage_report',
help='optional; if specified, generates a coverage report',
action='store_true')
_PARSER.add_argument(
'--exclude_load_tests',
help='optional; if specified, exclude load tests from being run',
action='store_true')
_PARSER.add_argument(
'-v',
'--verbose',
help='optional; if specified, display the output of the tests being run',
action='store_true')
def run_shell_cmd(exe, stdout=subprocess.PIPE, stderr=subprocess.PIPE):
"""Runs a shell command and captures the stdout and stderr output.
If the cmd fails, raises Exception. Otherwise, returns a string containing
the concatenation of the stdout and stderr logs.
"""
p = subprocess.Popen(exe, stdout=stdout, stderr=stderr)
last_stdout_str, last_stderr_str = p.communicate()
# Converting to unicode to stay compatible with the rest of the strings.
last_stdout_str = last_stdout_str.decode(encoding='utf-8')
last_stderr_str = last_stderr_str.decode(encoding='utf-8')
last_stdout = last_stdout_str.split('\n')
if LOG_LINE_PREFIX in last_stdout_str:
concurrent_task_utils.log('')
for line in last_stdout:
if line.startswith(LOG_LINE_PREFIX):
concurrent_task_utils.log(
'INFO: %s' % line[len(LOG_LINE_PREFIX):])
concurrent_task_utils.log('')
result = '%s%s' % (last_stdout_str, last_stderr_str)
if p.returncode != 0:
raise Exception('Error %s\n%s' % (p.returncode, result))
return result
class TestingTaskSpec(python_utils.OBJECT):
"""Executes a set of tests given a test class name."""
def __init__(self, test_target, generate_coverage_report):
self.test_target = test_target
self.generate_coverage_report = generate_coverage_report
def run(self):
"""Runs all tests corresponding to the given test target."""
test_target_flag = '--test_target=%s' % self.test_target
if self.generate_coverage_report:
exc_list = [
sys.executable, COVERAGE_MODULE_PATH, 'run', '-p',
TEST_RUNNER_PATH, test_target_flag]
else:
exc_list = [sys.executable, TEST_RUNNER_PATH, test_target_flag]
result = run_shell_cmd(exc_list)
return [concurrent_task_utils.TaskResult(
None, None, None, [result])]
def _get_all_test_targets(test_path=None, include_load_tests=True):
"""Returns a list of test targets for all classes under test_path
containing tests.
"""
def _get_test_target_classes(path):
"""Returns a list of all test classes in a given test file path.
Args:
path: str. The path of the test file from which all test classes
are to be extracted.
Returns:
list. A list of all test classes in a given test file path.
"""
class_names = []
test_target_path = os.path.relpath(
path, os.getcwd())[:-3].replace('/', '.')
python_module = importlib.import_module(test_target_path)
for name, clazz in inspect.getmembers(
python_module, predicate=inspect.isclass):
if unittest.TestCase in inspect.getmro(clazz):
class_names.append(name)
return [
'%s.%s' % (test_target_path, class_name)
for class_name in class_names]
base_path = os.path.join(os.getcwd(), test_path or '')
result = []
excluded_dirs = ['.git', 'third_party', 'core/tests', 'node_modules']
for root in os.listdir(base_path):
if any([s in root for s in excluded_dirs]):
continue
if root.endswith('_test.py'):
result = result + (
_get_test_target_classes(os.path.join(base_path, root)))
for subroot, _, files in os.walk(os.path.join(base_path, root)):
if _LOAD_TESTS_DIR in subroot and include_load_tests:
for f in files:
if f.endswith('_test.py'):
result = result + (
_get_test_target_classes(os.path.join(subroot, f)))
for f in files:
if (f.endswith('_test.py') and
os.path.join('core', 'tests') not in subroot):
result = result + (
_get_test_target_classes(os.path.join(subroot, f)))
return result
def main(args=None):
"""Run the tests."""
parsed_args = _PARSER.parse_args(args=args)
# Make sure that third-party libraries are up-to-date before running tests,
# otherwise import errors may result.
install_third_party_libs.main()
for directory in DIRS_TO_ADD_TO_SYS_PATH:
if not os.path.exists(os.path.dirname(directory)):
raise Exception('Directory %s does not exist.' % directory)
# The directories should only be inserted starting at index 1. See
# https://stackoverflow.com/a/10095099 and
# https://stackoverflow.com/q/10095037 for more details.
sys.path.insert(1, directory)
import dev_appserver
dev_appserver.fix_sys_path()
if parsed_args.generate_coverage_report:
python_utils.PRINT(
'Checking whether coverage is installed in %s'
% common.OPPIA_TOOLS_DIR)
if not os.path.exists(
os.path.join(
common.OPPIA_TOOLS_DIR,
'coverage-%s' % common.COVERAGE_VERSION)):
raise Exception(
'Coverage is not installed, please run the start script.')
pythonpath_components = [COVERAGE_DIR]
if os.environ.get('PYTHONPATH'):
pythonpath_components.append(os.environ.get('PYTHONPATH'))
os.environ['PYTHONPATH'] = os.pathsep.join(pythonpath_components)
if parsed_args.test_target and parsed_args.test_path:
raise Exception(
'At most one of test_path and test_target should be specified.')
if parsed_args.test_path and '.' in parsed_args.test_path:
raise Exception('The delimiter in test_path should be a slash (/)')
if parsed_args.test_target and '/' in parsed_args.test_target:
raise Exception('The delimiter in test_target should be a dot (.)')
if parsed_args.test_target:
if '_test' in parsed_args.test_target:
all_test_targets = [parsed_args.test_target]
else:
python_utils.PRINT('')
python_utils.PRINT(
'---------------------------------------------------------')
python_utils.PRINT(
'WARNING : test_target flag should point to the test file.')
python_utils.PRINT(
'---------------------------------------------------------')
python_utils.PRINT('')
time.sleep(3)
python_utils.PRINT('Redirecting to its corresponding test file...')
all_test_targets = [parsed_args.test_target + '_test']
else:
include_load_tests = not parsed_args.exclude_load_tests
all_test_targets = _get_all_test_targets(
test_path=parsed_args.test_path,
include_load_tests=include_load_tests)
# Prepare tasks.
max_concurrent_runs = 25
concurrent_count = min(multiprocessing.cpu_count(), max_concurrent_runs)
semaphore = threading.Semaphore(concurrent_count)
task_to_taskspec = {}
tasks = []
for test_target in all_test_targets:
test = TestingTaskSpec(
test_target, parsed_args.generate_coverage_report)
task = concurrent_task_utils.create_task(
test.run, parsed_args.verbose, semaphore, name=test_target,
report_enabled=False)
task_to_taskspec[task] = test
tasks.append(task)
task_execution_failed = False
try:
concurrent_task_utils.execute_tasks(tasks, semaphore)
except Exception:
task_execution_failed = True
for task in tasks:
if task.exception:
concurrent_task_utils.log(
python_utils.convert_to_bytes(task.exception.args[0]))
python_utils.PRINT('')
python_utils.PRINT('+------------------+')
python_utils.PRINT('| SUMMARY OF TESTS |')
python_utils.PRINT('+------------------+')
python_utils.PRINT('')
# Check we ran all tests as expected.
total_count = 0
total_errors = 0
total_failures = 0
for task in tasks:
spec = task_to_taskspec[task]
if not task.finished:
python_utils.PRINT('CANCELED %s' % spec.test_target)
test_count = 0
elif (task.exception and
'No tests were run' in python_utils.convert_to_bytes(
task.exception.args[0])):
python_utils.PRINT(
'ERROR %s: No tests found.' % spec.test_target)
test_count = 0
elif task.exception:
exc_str = python_utils.convert_to_bytes(task.exception.args[0])
python_utils.PRINT(exc_str[exc_str.find('='): exc_str.rfind('-')])
tests_failed_regex_match = re.search(
r'Test suite failed: ([0-9]+) tests run, ([0-9]+) errors, '
'([0-9]+) failures',
python_utils.convert_to_bytes(task.exception.args[0]))
try:
test_count = int(tests_failed_regex_match.group(1))
errors = int(tests_failed_regex_match.group(2))
failures = int(tests_failed_regex_match.group(3))
total_errors += errors
total_failures += failures
python_utils.PRINT('FAILED %s: %s errors, %s failures' % (
spec.test_target, errors, failures))
except AttributeError:
# There was an internal error, and the tests did not run (The
# error message did not match `tests_failed_regex_match`).
test_count = 0
total_errors += 1
python_utils.PRINT('')
python_utils.PRINT(
'------------------------------------------------------')
python_utils.PRINT(
' WARNING: FAILED TO RUN %s' % spec.test_target)
python_utils.PRINT('')
python_utils.PRINT(
' This is most likely due to an import error.')
python_utils.PRINT(
'------------------------------------------------------')
else:
try:
tests_run_regex_match = re.search(
r'Ran ([0-9]+) tests? in ([0-9\.]+)s',
task.task_results[0].get_report()[0])
test_count = int(tests_run_regex_match.group(1))
test_time = float(tests_run_regex_match.group(2))
python_utils.PRINT(
'SUCCESS %s: %d tests (%.1f secs)' %
(spec.test_target, test_count, test_time))
except Exception:
python_utils.PRINT(
'An unexpected error occurred. '
'Task output:\n%s' % task.task_results[0].get_report()[0])
total_count += test_count
python_utils.PRINT('')
if total_count == 0:
raise Exception('WARNING: No tests were run.')
python_utils.PRINT('Ran %s test%s in %s test class%s.' % (
total_count, '' if total_count == 1 else 's',
len(tasks), '' if len(tasks) == 1 else 'es'))
if total_errors or total_failures:
python_utils.PRINT(
'(%s ERRORS, %s FAILURES)' % (total_errors, total_failures))
else:
python_utils.PRINT('All tests passed.')
if task_execution_failed:
raise Exception('Task execution failed.')
elif total_errors or total_failures:
raise Exception(
'%s errors, %s failures' % (total_errors, total_failures))
if parsed_args.generate_coverage_report:
subprocess.check_call([sys.executable, COVERAGE_MODULE_PATH, 'combine'])
process = subprocess.Popen(
[sys.executable, COVERAGE_MODULE_PATH, 'report',
'--omit="%s*","third_party/*","/usr/share/*"'
% common.OPPIA_TOOLS_DIR, '--show-missing'],
stdout=subprocess.PIPE)
report_stdout, _ = process.communicate()
python_utils.PRINT(report_stdout)
coverage_result = re.search(
r'TOTAL\s+(\d+)\s+(\d+)\s+(?P<total>\d+)%\s+', report_stdout)
if coverage_result.group('total') != '100':
raise Exception('Backend test coverage is not 100%')
python_utils.PRINT('')
python_utils.PRINT('Done!')
if __name__ == '__main__':
main()
| prasanna08/oppia | scripts/run_backend_tests.py | Python | apache-2.0 | 16,674 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-31 05:58
from __future__ import unicode_literals
import django.core.validators
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('partner', '0011_partnerprofile_acronym'),
]
operations = [
migrations.AddField(
model_name='partnerprofile',
name='partnership_collaborate_institution',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='partnerprofile',
name='partnership_collaborate_institution_desc',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='partnerbudget',
name='year',
field=models.PositiveSmallIntegerField(help_text='Enter valid year.', validators=[django.core.validators.MinValueValidator(1800)], verbose_name='Weight in percentage'),
),
migrations.AlterField(
model_name='partnerotherdocument',
name='partner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='other_documents', to='partner.Partner'),
),
]
| unicef/un-partner-portal | backend/unpp_api/apps/partner/migrations/0012_auto_20170831_0558.py | Python | apache-2.0 | 1,314 |
"""
.. module: security_monkey.auditors.CIS4_124_Auditor
:platform: Unix
.. version:: $$VERSION$$
.. moduleauthor:: Pritam D. Gautam <[email protected]> @nuagedm
"""
from security_monkey.auditor import Auditor, Entity
from security_monkey.watchers.security_group import SecurityGroup
from security_monkey import app
from security_monkey.watchers.vpc.peering import Peering
def _check_empty_security_group(sg_item):
if app.config.get('SECURITYGROUP_INSTANCE_DETAIL', None) in ['SUMMARY', 'FULL'] and \
not sg_item.config.get("assigned_to", None):
return 0
return 1
class CIS_4_124_Auditor(Auditor):
INTERNET_ACCESSIBLE_NOTES_SG = '{entity} Access: [{access}]'
index = SecurityGroup.index
i_am_singular = SecurityGroup.i_am_singular
i_am_plural = SecurityGroup.i_am_plural
def __init__(self, accounts=None, debug=False):
super(CIS_4_124_Auditor, self).__init__(accounts=accounts, debug=debug)
def _port_for_rule(self, rule):
"""
Looks at the from_port and to_port and returns a sane representation.
"""
phrase = '{direction}:{protocol}:{port}'
direction = rule.get('rule_type')
protocol = rule['ip_protocol']
port_range = '{0}-{1}'.format(rule['from_port'], rule['to_port'])
if protocol == '-1':
protocol = 'all_protocols'
port_range = 'all_ports'
elif rule['from_port'] == rule['to_port']:
port_range = str(rule['from_port'])
return phrase.format(direction=direction, protocol=protocol, port=port_range)
def check_cis_4_1(self, item):
"""
alert if EC2 SG contains an ingress from 0.0.0.0/0 to port 22 either explicit or implicit.
As per AWS CIS Guide:
It is recommended that no security group allows unrestricted ingress access to port 22.
Note: A Port value of ALL or a port range such as 0-1024 are inclusive of port 22.
"""
tag = "CIS 4.1 Security Group permits unrestricted ingress access to port 22"
severity = 10
multiplier = _check_empty_security_group(item)
score = severity * multiplier
direction = 'ingress' # check for ingress traffic rules only
for rule in item.config.get("rules", []):
actions = self._port_for_rule(rule)
cidr = rule.get("cidr_ip")
from_port = rule['from_port']
to_port = rule['to_port']
protocol = rule['ip_protocol']
entity = Entity(category='cidr', value=cidr)
if protocol == '-1':
cidr = '0.0.0.0/0'
from_port = 0
to_port = 65535
app.logger.debug("Checking {}/{}/{}".format(self.index, actions, entity))
if not rule.get("rule_type") == direction:
# Skip egress rules
continue
if not str(cidr).endswith('/0'):
# Skip rules that do not end with /0
continue
if not (from_port <= 22 <= to_port):
# Skip rules which do not have reference to port 22
continue
notes = self.INTERNET_ACCESSIBLE_NOTES_SG
notes = notes.format(entity=entity, access=actions)
self.add_issue(score, tag, item, notes=notes)
def check_cis_4_2(self, item):
"""
alert if EC2 SG contains an ingress from 0.0.0.0/0 to port 3389 either explicit or implicit.
As per AWS CIS Guide:
It is recommended that no security group allows unrestricted ingress access to port 3389.
Note: A Port value of ALL or a port range such as 1024-4000 are inclusive of port 3389.
"""
tag = "CIS 4.2 Security Group permits unrestricted ingress access to port 3389"
severity = 10
multiplier = _check_empty_security_group(item)
score = severity * multiplier
direction = 'ingress' # check for ingress traffic rules only
for rule in item.config.get("rules", []):
actions = self._port_for_rule(rule)
cidr = rule.get("cidr_ip")
from_port = rule['from_port']
to_port = rule['to_port']
protocol = rule['ip_protocol']
entity = Entity(category='cidr', value=cidr)
if protocol == '-1':
cidr = '0.0.0.0/0'
from_port = 0
to_port = 65535
app.logger.debug("Checking {}/{}/{}".format(self.index, actions, entity))
if not rule.get("rule_type") == direction:
# Skip egress rules
continue
if not str(cidr).endswith('/0'):
# Skip rules that do not end with /0
continue
if not (from_port <= 3389 <= to_port):
# Skip rules which do not have reference to port 3389
continue
notes = self.INTERNET_ACCESSIBLE_NOTES_SG
notes = notes.format(entity=entity, access=actions)
self.add_issue(score, tag, item, notes=notes)
def check_cis_4_4(self, item):
"""
alert if EC2 default Security Group contains any ingress or egress rules.
"""
severity = 10
multiplier = _check_empty_security_group(item)
score = severity * multiplier
for rule in item.config.get("rules", []):
if rule.get('name') != 'default':
continue
tag = "CIS 4.4 Security Group permits unrestricted {} access".format(rule.get("rule_type"))
actions = self._port_for_rule(rule)
cidr = rule.get("cidr_ip")
entity = Entity(category='cidr', value=cidr)
app.logger.debug("Checking {}/{}/{}".format(self.index, actions, entity))
notes = self.INTERNET_ACCESSIBLE_NOTES_SG
notes = notes.format(entity=entity, access=actions)
self.add_issue(score, tag, item, notes=notes)
class CIS_4_5_Auditor(Auditor):
index = Peering.index
i_am_singular = Peering.i_am_singular
i_am_plural = Peering.i_am_plural
def __init__(self, accounts=None, debug=False):
super(CIS_4_5_Auditor, self).__init__(accounts=accounts, debug=debug)
def check_cis_4_5(self, item):
"""
4.5 Ensure routing tables for VPC peering are "least access" (Not Scored)
"""
score = 10
tag = "CIS 4.5 Ensure routing tables for VPC peering are least access"
note = "Requester {req_cidr}, Acceptor {acceptor_cidr} has {status} status"
requester = item.config.get("requester_vpc_info")
acceptor = item.config.get("accepter_vpc_info")
vpc_peering_id = item.config.get("vpc_peering_connection_id")
vpc_peering_status = item.config["status"]["Code"]
requester_cid = Entity(category='cidr', value=requester.get("CidrBlock"))
acceptor_cidr = Entity(category='cidr', value=acceptor.get("CidrBlock"))
note = note.format(
peeringid=vpc_peering_id,
req_cidr=requester_cid,
acceptor_cidr=acceptor_cidr,
status=vpc_peering_status)
app.logger.debug("Checking {}/{}/{}/{}/{}".format(self.index, vpc_peering_id, requester_cid, acceptor_cidr, vpc_peering_status))
self.add_issue(score, tag, item, notes=note)
| stackArmor/security_monkey | security_monkey/auditors/custom/cis/cis4x.py | Python | apache-2.0 | 7,403 |
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the ProjectDao."""
import json
from google.apputils import basetest
import mock
from MySQLdb import DataError
from google.cloud.security.common.data_access import _db_connector
from google.cloud.security.common.data_access import errors
from google.cloud.security.common.data_access import project_dao
from google.cloud.security.common.data_access.sql_queries import select_data
from google.cloud.security.common.gcp_type import project
from google.cloud.security.common.gcp_type import resource
from tests.common.gcp_type.test_data import fake_projects
class ProjectDaoTest(basetest.TestCase):
"""Tests for the ProjectDao."""
@mock.patch.object(_db_connector.DbConnector, '__init__', autospec=True)
def setUp(self, mock_db_connector):
mock_db_connector.return_value = None
self.project_dao = project_dao.ProjectDao()
self.fetch_mock = mock.MagicMock()
self.project_dao.execute_sql_with_fetch = self.fetch_mock
self.resource_name = 'projects'
self.fake_timestamp = '12345'
self.fake_projects_db_rows = fake_projects.FAKE_PROJECTS_DB_ROWS
self.fake_projects_bad_iam_db_rows = \
fake_projects.FAKE_PROJECTS_BAD_IAM_DB_ROWS
self.fake_projects_iam_rows = \
fake_projects.FAKE_PROJECTS_OK_IAM_DB_ROWS
def test_get_project_numbers(self):
"""Test get_project_numbers().
Setup:
Format the fake query.
Expect:
execute_sql_with_fetch() called once.
"""
fake_query = select_data.PROJECT_NUMBERS.format(self.fake_timestamp)
self.project_dao.get_project_numbers(
self.resource_name, self.fake_timestamp)
self.fetch_mock.assert_called_once_with(
self.resource_name, fake_query, ())
def test_get_project_numbers_raises_error(self):
"""Test get_project_numbers() raises a MySQLError.
Setup:
Set execute_sql_with_fetch() side effect to MySQLError.
Expect:
get_project_numbers() raises a MySQLError.
"""
self.fetch_mock.side_effect = errors.MySQLError(
self.resource_name, mock.MagicMock())
with self.assertRaises(errors.MySQLError):
self.project_dao.get_project_numbers(
self.resource_name, self.fake_timestamp)
def test_get_project(self):
"""Test that get_project() returns expected data.
Setup:
Mock execute_sql_with_fetch() return value.
Create fake row of project data.
Expect:
get_project() call returns expected data: a single Project.
"""
fake_project = self.fake_projects_db_rows[0]
self.fetch_mock.return_value = [fake_project]
fake_query = select_data.PROJECT_BY_ID.format(
self.fake_timestamp, self.fake_timestamp)
actual = self.project_dao.get_project(
fake_project['project_id'],
self.fake_timestamp)
self.assertEqual(
self.project_dao.map_row_to_object(fake_project),
actual)
def test_get_project_iam_policies(self):
"""Test that get_project_iam_policies() database methods are called.
Setup:
Format the fake query.
Expect:
execute_sql_with_fetch() called once.
"""
fake_query = select_data.PROJECT_IAM_POLICIES_RAW.format(
self.fake_timestamp, self.fake_timestamp)
self.project_dao.get_project_policies(
resource.ResourceType.PROJECT, self.fake_timestamp)
self.fetch_mock.assert_called_once_with(
resource.ResourceType.PROJECT, fake_query, ())
def test_get_project_policies(self):
"""Test that get_project_policies() returns expected data.
Setup:
Create magic mock for execute_sql_with_fetch().
Create fake row of project data.
Expect:
* get_project_policies() call returns expected data: a dict of
Projects and their IAM policies.
"""
self.fetch_mock.return_value = self.fake_projects_iam_rows
actual = self.project_dao.get_project_policies(
'projects', self.fake_timestamp)
expected_projects = [self.project_dao.map_row_to_object(r)
for r in self.fake_projects_iam_rows]
expected_iam = [json.loads(p['iam_policy'])
for p in self.fake_projects_iam_rows]
expected = dict(zip(expected_projects, expected_iam))
self.assertEqual(expected, actual)
def test_get_project_policies_query_failed_handles_error(self):
"""Test that a failed get_project_policies() handles the error.
Setup:
Set execute_sql_with_fetch() side effect to MySQLError.
Expect:
get_project_policies() raises a MySQLError.
"""
self.fetch_mock.side_effect = errors.MySQLError(
self.resource_name, mock.MagicMock())
with self.assertRaises(errors.MySQLError):
self.project_dao.get_project_policies(
self.resource_name, self.fake_timestamp)
def test_get_project_iam_policies_malformed_json_error_handled(self):
"""Test malformed json error is handled in get_project_policies().
Setup:
Set execute_sql_with_fetch() return value to fake data with bad
malformed json.
Expect:
Log a warning and skip the row, such that the output only contains
1 result (out of 2).
"""
self.fetch_mock.return_value = self.fake_projects_bad_iam_db_rows
project_dao.LOGGER = mock.MagicMock()
ok_row = self.fake_projects_bad_iam_db_rows[0]
expected_project = self.project_dao.map_row_to_object(ok_row)
expected_iam = json.loads(ok_row['iam_policy'])
expected = {
expected_project: expected_iam
}
actual = self.project_dao.get_project_policies(
self.resource_name, self.fake_timestamp)
self.assertEqual(1, project_dao.LOGGER.warn.call_count)
self.assertEqual(expected, actual)
def test_get_projects(self):
"""Test get_projects().
Setup:
Set execute_sql_with_fetch() return value to fake data.
Expected:
Expected projects equal actual.
"""
self.fetch_mock.return_value = self.fake_projects_iam_rows
actual = self.project_dao.get_projects(self.fake_timestamp)
expected = [self.project_dao.map_row_to_object(r)
for r in self.fake_projects_iam_rows]
self.assertEqual(expected, actual)
def test_get_projects_raises_error_on_fetch_error(self):
"""Test get_projects() raises MySQLError on fetch error.
Setup:
Set execute_sql_with_fetch() side effect to MySQLError.
Expected:
get_projects() raises MySQLError.
"""
self.fetch_mock.side_effect = errors.MySQLError(
self.resource_name, mock.MagicMock())
with self.assertRaises(errors.MySQLError):
self.project_dao.get_projects(self.fake_timestamp)
if __name__ == '__main__':
basetest.main()
| felixbb/forseti-security | tests/common/data_access/project_dao_test.py | Python | apache-2.0 | 7,808 |
'''
From BIFF8 on, strings are always stored using UTF-16LE text encoding. The
character array is a sequence of 16-bit values4. Additionally it is
possible to use a compressed format, which omits the high bytes of all
characters, if they are all zero.
The following tables describe the standard format of the entire string, but
in many records the strings differ from this format. This will be mentioned
separately. It is possible (but not required) to store Rich-Text formatting
information and Asian phonetic information inside a Unicode string. This
results in four different ways to store a string. The character array
is not zero-terminated.
The string consists of the character count (as usual an 8-bit value or
a 16-bit value), option flags, the character array and optional formatting
information. If the string is empty, sometimes the option flags field will
not occur. This is mentioned at the respective place.
Offset Size Contents
0 1 or 2 Length of the string (character count, ln)
1 or 2 1 Option flags:
Bit Mask Contents
0 01H Character compression (ccompr):
0 = Compressed (8-bit characters)
1 = Uncompressed (16-bit characters)
2 04H Asian phonetic settings (phonetic):
0 = Does not contain Asian phonetic settings
1 = Contains Asian phonetic settings
3 08H Rich-Text settings (richtext):
0 = Does not contain Rich-Text settings
1 = Contains Rich-Text settings
[2 or 3] 2 (optional, only if richtext=1) Number of Rich-Text formatting runs (rt)
[var.] 4 (optional, only if phonetic=1) Size of Asian phonetic settings block (in bytes, sz)
var. ln or
2·ln Character array (8-bit characters or 16-bit characters, dependent on ccompr)
[var.] 4·rt (optional, only if richtext=1) List of rt formatting runs
[var.] sz (optional, only if phonetic=1) Asian Phonetic Settings Block
'''
from struct import pack
def upack2(s, encoding='ascii'):
# If not unicode, make it so.
if isinstance(s, str):
us = s
else:
us = str(s, encoding)
# Limit is based on number of content characters
# (not on number of bytes in packed result)
len_us = len(us)
if len_us > 65535:
raise Exception('String longer than 65535 characters')
try:
encs = us.encode('latin1')
# Success here means all chars are in U+0000 to U+00FF
# inclusive, meaning that we can use "compressed format".
flag = 0
n_items = len_us
except UnicodeEncodeError:
encs = us.encode('utf_16_le')
flag = 1
n_items = len(encs) // 2
# n_items is the number of "double byte characters" i.e. MS C wchars
# Can't use len(us).
# len(u"\U0001D400") -> 1 on a wide-unicode build
# and 2 on a narrow-unicode build.
# We need n_items == 2 in this case.
return pack('<HB', n_items, flag) + encs
def upack2rt(rt, encoding='ascii'):
us = ''
fr = b''
offset = 0
# convert rt strings to unicode if not already unicode
# also generate the formatting run for the styles added
for s, fontx in rt:
if not isinstance(s, str):
s = str(s, encoding)
us += s
if fontx is not None:
# code in Rows.py ensures that
# fontx can be None only for the first piece
fr += pack('<HH', offset, fontx)
# offset is the number of MS C wchar characters.
# That is 1 if c <= u'\uFFFF' else 2
offset += len(s.encode('utf_16_le')) // 2
num_fr = len(fr) // 4 # ensure result is int
if offset > 32767:
raise Exception('String longer than 32767 characters')
try:
encs = us.encode('latin1')
# Success here means all chars are in U+0000 to U+00FF
# inclusive, meaning that we can use "compressed format".
flag = 0 | 8
n_items = len(encs)
except UnicodeEncodeError:
encs = us.encode('utf_16_le')
flag = 1 | 8
n_items = len(encs) // 2 # see comments in upack2 function above
return pack('<HBH', n_items, flag, num_fr) + encs, fr
def upack1(s, encoding='ascii'):
# Same as upack2(), but with a one-byte length field.
if isinstance(s, str):
us = s
else:
us = str(s, encoding)
len_us = len(us)
if len_us > 255:
raise Exception('String longer than 255 characters')
try:
encs = us.encode('latin1')
flag = 0
n_items = len_us
except UnicodeEncodeError:
encs = us.encode('utf_16_le')
flag = 1
n_items = len(encs) // 2
return pack('<BB', n_items, flag) + encs
| sternshus/arelle2.7 | svr-2.7/arelle/xlwt/UnicodeUtils.py | Python | apache-2.0 | 4,919 |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Tool for mobile testapps to Test on iOS Simulator / Android Emulator locally.
Usage:
python test_simulator.py --testapp_dir ~/testapps
This will recursively search ~/testapps for apps,
test on local simulators/emulators, and validate their results. The validation is
specific to the structure of the Firebase Unity and C++ testapps.
----iOS only----
Requires simulators installed. iOS simulator can be installed via tool:
https://github.com/xcpretty/xcode-install#simulators
If you wish to specify a particular iOS device to test on, you will need the model
id and version (OS version for iOS). These change over time. You can listing all
available simulators (supported models and versions) with the following commands:
xcrun simctl list
Device Information is stored in TEST_DEVICES in print_matrix_configuration.py
Example:
iPhone 8, OS 12.0:
--ios_name "iPhone 8" --ios_version "12.0"
Alternatively, to set an iOS device, use the one of the values below:
[simulator_min, simulator_target, simulator_latest]
Example:
--ios_device "simulator_target"
----Android only----
Java 8 is required
Environment Variables (on MacOS)
JAVA_HOME=/Library/Java/JavaVirtualMachines/jdk-8-latest/Contents/Home
ANDROID_HOME=/Users/user_name/Library/Android/sdk
Environment Variables (on Linux)
JAVA_HOME=/usr/local/buildtools/java/jdk8/
ANDROID_HOME=~/Android/Sdk
If you wish to specify a particular Android device to test on, you will need
the sdk id and build tool version. These change over time. You can listing all
available tools with the following commands:
$ANDROID_HOME/tools/bin/sdkmanager --list
Device Information is stored in TEST_DEVICES in print_matrix_configuration.py
Example:
sdk id "system-images;android-29;google_apis;x86":
--android_sdk "system-images;android-29;google_apis;x86" --build_tools_version "29.0.2"
Alternatively, to set an Android device, use the one of the values below:
[emulator_min, emulator_target, emulator_latest]
Example:
--android_device "emulator_target" --build_tools_version "29.0.2"
Returns:
1: No iOS/Android integration_test apps found
20: Invalid ios_device flag
21: iOS Simulator created fail
22: iOS gameloop app not found
23: build_testapps.json file not found
30: Invalid android_device flag
31: For android test, JAVA_HOME is not set to java 8
"""
import json
import os
import pathlib
import subprocess
import time
from absl import app
from absl import flags
from absl import logging
import attr
from integration_testing import test_validation
from print_matrix_configuration import TEST_DEVICES
_GAMELOOP_PACKAGE = "com.google.firebase.gameloop"
_RESULT_FILE = "Results1.json"
_TEST_RETRY = 3
_CMD_TIMEOUT = 300
_DEVICE_NONE = "None"
_DEVICE_ANDROID = "Android"
_DEVICE_APPLE = "Apple"
_RESET_TYPE_REBOOT = 1
_RESET_TYPE_WIPE_REBOOT = 2
FLAGS = flags.FLAGS
flags.DEFINE_string(
"testapp_dir", None,
"Testapps in this directory will be tested.")
flags.DEFINE_string(
"ios_device", None,
"iOS device, which is a combination of device name and os version"
"See module docstring for details on how to set and get this id. "
"If none, will use ios_name and ios_version.")
flags.DEFINE_string(
"ios_name", "iPhone 8",
"See module docstring for details on how to set and get this name.")
flags.DEFINE_string(
"ios_version", "12.0",
"See module docstring for details on how to set and get this version.")
flags.DEFINE_string(
"tvos_device", None,
"tvOS device, which is a combination of device name and os version"
"See module docstring for details on how to set and get this id. "
"If none, will use ios_name and ios_version.")
flags.DEFINE_string(
"tvos_name", "Apple TV",
"See module docstring for details on how to set and get this name.")
flags.DEFINE_string(
"tvos_version", "14.0",
"See module docstring for details on how to set and get this version.")
flags.DEFINE_string(
"android_device", None,
"Android device, which is the sdk id of an emulator image"
"See module docstring for details on how to set and get this id."
"If none, will use android_sdk.")
flags.DEFINE_string(
"android_sdk", "system-images;android-29;google_apis;x86",
"See module docstring for details on how to set and get this id.")
flags.DEFINE_string(
"build_tools_version", "29.0.2",
"android build_tools_version")
flags.DEFINE_string(
"logfile_name", "simulator-test",
"Create test log artifact test-results-$logfile_name.log."
" logfile will be created and placed in testapp_dir.")
flags.DEFINE_boolean(
"ci", False,
"If this script used in a CI system, set True.")
@attr.s(frozen=False, eq=False)
class Test(object):
"""Holds data related to the testing of one testapp."""
testapp_path = attr.ib()
logs = attr.ib()
def main(argv):
if len(argv) > 1:
raise app.UsageError("Too many command-line arguments.")
current_dir = pathlib.Path(__file__).parent.absolute()
testapp_dir = os.path.abspath(os.path.expanduser(FLAGS.testapp_dir))
ios_testapps = []
tvos_testapps = []
android_testapps = []
for file_dir, directories, file_names in os.walk(testapp_dir):
# .app is treated as a directory, not a file in MacOS
for directory in directories:
full_path = os.path.join(file_dir, directory)
if directory.endswith("integration_test.app"):
ios_testapps.append(full_path)
elif directory.endswith("integration_test_tvos.app"):
tvos_testapps.append(full_path)
for file_name in file_names:
full_path = os.path.join(file_dir, file_name)
if file_name.endswith(".apk"):
android_testapps.append(full_path)
if not ios_testapps and not tvos_testapps and not android_testapps:
logging.info("No testapps found")
return 1
tests = []
if ios_testapps:
logging.info("iOS Testapps found: %s", "\n".join(path for path in ios_testapps))
if FLAGS.ios_device:
device_info = TEST_DEVICES.get(FLAGS.ios_device)
if not device_info:
logging.error("Not a valid ios device: %s" % FLAGS.ios_device)
return 20
device_name = device_info.get("name")
device_os = device_info.get("version")
else:
device_name = FLAGS.ios_name
device_os = FLAGS.ios_version
device_id = _create_and_boot_simulator("iOS", device_name, device_os)
if not device_id:
logging.error("simulator created fail")
return 21
# A tool that enable game-loop test. This is a XCode project
ios_gameloop_project = os.path.join(current_dir, "integration_testing", "gameloop_apple")
ios_gameloop_app = _build_ios_gameloop(ios_gameloop_project, device_name, device_os)
if not ios_gameloop_app:
logging.error("gameloop app not found")
return 22
config_path = os.path.join(current_dir, "integration_testing", "build_testapps.json")
with open(config_path, "r") as configFile:
config = json.load(configFile)
if not config:
logging.error("No config file found")
return 23
for app_path in ios_testapps:
bundle_id = _get_bundle_id(app_path, config)
logs=_run_apple_gameloop_test(bundle_id, app_path, ios_gameloop_app, device_id, _TEST_RETRY)
tests.append(Test(testapp_path=app_path, logs=logs))
_shutdown_simulator()
if tvos_testapps:
logging.info("tvOS Testapps found: %s", "\n".join(path for path in tvos_testapps))
if FLAGS.tvos_device:
device_info = TEST_DEVICES.get(FLAGS.tvos_device)
if not device_info:
logging.error("Not a valid tvos device: %s" % FLAGS.tvos_device)
return 20
device_name = device_info.get("name")
device_os = device_info.get("version")
else:
device_name = FLAGS.tvos_name
device_os = FLAGS.tvos_version
device_id = _create_and_boot_simulator("tvOS", device_name, device_os)
if not device_id:
logging.error("simulator created fail")
return 21
# A tool that enable game-loop test. This is a XCode project
tvos_gameloop_project = os.path.join(current_dir, "integration_testing", "gameloop_apple")
tvos_gameloop_app = _build_tvos_gameloop(tvos_gameloop_project, device_name, device_os)
if not tvos_gameloop_app:
logging.error("gameloop app not found")
return 22
config_path = os.path.join(current_dir, "integration_testing", "build_testapps.json")
with open(config_path, "r") as configFile:
config = json.load(configFile)
if not config:
logging.error("No config file found")
return 23
for app_path in tvos_testapps:
bundle_id = _get_bundle_id(app_path, config)
logs=_run_apple_gameloop_test(bundle_id, app_path, tvos_gameloop_app, device_id, _TEST_RETRY)
tests.append(Test(testapp_path=app_path, logs=logs))
_shutdown_simulator()
if android_testapps:
logging.info("Android Testapps found: %s", "\n".join(path for path in android_testapps))
if FLAGS.android_device:
device_info = TEST_DEVICES.get(FLAGS.android_device)
if not device_info:
logging.error("Not a valid android device: %s" % FLAGS.android_device)
return 30
sdk_id = device_info.get("image")
else:
sdk_id = FLAGS.android_sdk
platform_version = sdk_id.split(";")[1]
build_tool_version = FLAGS.build_tools_version
if not _check_java_version():
logging.error("Please set JAVA_HOME to java 8")
return 31
_setup_android(platform_version, build_tool_version, sdk_id)
_create_and_boot_emulator(sdk_id)
android_gameloop_project = os.path.join(current_dir, "integration_testing", "gameloop_android")
_install_android_gameloop_app(android_gameloop_project)
for app_path in android_testapps:
package_name = _get_package_name(app_path)
logs=_run_android_gameloop_test(package_name, app_path, android_gameloop_project, _TEST_RETRY)
tests.append(Test(testapp_path=app_path, logs=logs))
_shutdown_emulator()
return test_validation.summarize_test_results(
tests,
test_validation.CPP,
testapp_dir,
file_name="test-results-" + FLAGS.logfile_name + ".log",
extra_info=" (ON SIMULATOR/EMULATOR)")
# -------------------Apple Only-------------------
def _build_ios_gameloop(gameloop_project, device_name, device_os):
"""Build gameloop UI Test app.
This gameloop app can run integration_test app automatically.
"""
project_path = os.path.join(gameloop_project, "gameloop.xcodeproj")
output_path = os.path.join(gameloop_project, "Build")
"""Build the gameloop app for test."""
args = ["xcodebuild", "-project", project_path,
"-scheme", "gameloop",
"build-for-testing",
"-destination", "platform=iOS Simulator,name=%s,OS=%s" % (device_name, device_os),
"SYMROOT=%s" % output_path]
logging.info("Building game-loop test: %s", " ".join(args))
subprocess.run(args=args, check=True)
for file_dir, _, file_names in os.walk(output_path):
for file_name in file_names:
if file_name.endswith(".xctestrun") and "iphonesimulator" in file_name:
return os.path.join(file_dir, file_name)
def _build_tvos_gameloop(gameloop_project, device_name, device_os):
"""Build gameloop UI Test app.
This gameloop app can run integration_test app automatically.
"""
project_path = os.path.join(gameloop_project, "gameloop.xcodeproj")
output_path = os.path.join(gameloop_project, "Build")
"""Build the gameloop app for test."""
args = ["xcodebuild", "-project", project_path,
"-scheme", "gameloop_tvos",
"build-for-testing",
"-destination", "platform=tvOS Simulator,name=%s,OS=%s" % (device_name, device_os),
"SYMROOT=%s" % output_path]
logging.info("Building game-loop test: %s", " ".join(args))
subprocess.run(args=args, check=True)
for file_dir, _, file_names in os.walk(output_path):
for file_name in file_names:
if file_name.endswith(".xctestrun") and "appletvsimulator" in file_name:
return os.path.join(file_dir, file_name)
def _run_xctest(gameloop_app, device_id):
"""Run the gameloop UI Test app.
This gameloop app can run integration_test app automatically.
"""
args = ["xcodebuild", "test-without-building",
"-xctestrun", gameloop_app,
"-destination", "id=%s" % device_id]
logging.info("Running game-loop test: %s", " ".join(args))
result = subprocess.run(args=args, capture_output=True, text=True, check=False)
if not result.stdout:
logging.info("No xctest result")
return
result = result.stdout.splitlines()
log_path = next((line for line in result if ".xcresult" in line), None)
logging.info("game-loop xctest result: %s", log_path)
return log_path
def _shutdown_simulator():
args = ["xcrun", "simctl", "shutdown", "all"]
logging.info("Shutdown all simulators: %s", " ".join(args))
subprocess.run(args=args, check=True)
def _create_and_boot_simulator(apple_platform, device_name, device_os):
"""Create a simulator locally. Will wait until this simulator booted."""
_shutdown_simulator()
command = "xcrun xctrace list devices 2>&1 | grep \"%s (%s)\" | awk -F'[()]' '{print $4}'" % (device_name, device_os)
logging.info("Get test simulator: %s", command)
result = subprocess.Popen(command, universal_newlines=True, shell=True, stdout=subprocess.PIPE)
device_id = result.stdout.read().strip()
if not device_id:
# download and create device
os.environ["GEM_HOME"] = "$HOME/.gem"
args = ["gem", "install", "xcode-install"]
logging.info("Download xcode-install: %s", " ".join(args))
subprocess.run(args=args, check=True)
args = ["xcversion", "simulators", "--install=%s %s" % (apple_platform, device_os)]
logging.info("Download simulator: %s", " ".join(args))
subprocess.run(args=args, check=False)
args = ["xcrun", "simctl", "create", "test_simulator", device_name, "%s%s" % (apple_platform, device_os)]
logging.info("Create test simulator: %s", " ".join(args))
result = subprocess.run(args=args, capture_output=True, text=True, check=True)
device_id = result.stdout.strip()
args = ["xcrun", "simctl", "boot", device_id]
logging.info("Boot my simulator: %s", " ".join(args))
subprocess.run(args=args, check=True)
args = ["xcrun", "simctl", "bootstatus", device_id]
logging.info("Wait for simulator to boot: %s", " ".join(args))
subprocess.run(args=args, check=True)
return device_id
def _delete_simulator(device_id):
"""Delete the created simulator."""
args = ["xcrun", "simctl", "delete", device_id]
logging.info("Delete created simulator: %s", " ".join(args))
subprocess.run(args=args, check=True)
def _reset_simulator_on_error(device_id, type=_RESET_TYPE_REBOOT):
_shutdown_simulator()
if type == _RESET_TYPE_WIPE_REBOOT:
args = ["xcrun", "simctl", "erase", device_id]
logging.info("Erase my simulator: %s", " ".join(args))
subprocess.run(args=args, check=True)
# reboot simulator: _RESET_TYPE_WIPE_REBOOT, _RESET_TYPE_REBOOT
args = ["xcrun", "simctl", "boot", device_id]
logging.info("Reboot my simulator: %s", " ".join(args))
subprocess.run(args=args, check=True)
def _get_bundle_id(app_path, config):
"""Get app bundle id from build_testapps.json file."""
for api in config["apis"]:
if api["name"] != "app" and (api["name"] in app_path or api["full_name"] in app_path):
return api["bundle_id"]
def _run_apple_gameloop_test(bundle_id, app_path, gameloop_app, device_id, retry=1):
"""Run gameloop test and collect test result."""
logging.info("Running apple gameloop test: %s, %s, %s, %s", bundle_id, app_path, gameloop_app, device_id)
_install_apple_app(app_path, device_id)
_run_xctest(gameloop_app, device_id)
log = _get_apple_test_log(bundle_id, app_path, device_id)
_uninstall_apple_app(bundle_id, device_id)
if retry > 1:
result = test_validation.validate_results(log, test_validation.CPP)
if not result.complete:
logging.info("Retry _run_apple_gameloop_test. Remaining retry: %s", retry-1)
return _run_apple_gameloop_test(bundle_id, app_path, gameloop_app, device_id, retry=retry-1)
return log
def _install_apple_app(app_path, device_id):
"""Install integration_test app into the simulator."""
args = ["xcrun", "simctl", "install", device_id, app_path]
logging.info("Install testapp: %s", " ".join(args))
_run_with_retry(args, device=device_id, type=_RESET_TYPE_WIPE_REBOOT)
def _uninstall_apple_app(bundle_id, device_id):
"""Uninstall integration_test app from the simulator."""
args = ["xcrun", "simctl", "uninstall", device_id, bundle_id]
logging.info("Uninstall testapp: %s", " ".join(args))
_run_with_retry(args, device=device_id, type=_RESET_TYPE_REBOOT)
def _get_apple_test_log(bundle_id, app_path, device_id):
"""Read integration_test app testing result."""
args=["xcrun", "simctl", "get_app_container", device_id, bundle_id, "data"]
logging.info("Get test result: %s", " ".join(args))
result = subprocess.run(
args=args,
capture_output=True, text=True, check=False)
if not result.stdout:
logging.info("No test Result")
return None
log_path = os.path.join(result.stdout.strip(), "Documents", "GameLoopResults", _RESULT_FILE)
log = _read_file(log_path)
logging.info("Apple test result: %s", log)
return log
def _read_file(path):
"""Extracts the contents of a file."""
if os.path.isfile(path):
with open(path, "r") as f:
test_result = f.read()
logging.info("Reading file: %s", path)
logging.info("File content: %s", test_result)
return test_result
# -------------------Android Only-------------------
def _check_java_version():
command = "java -version 2>&1 | awk -F[\\\"_] 'NR==1{print $2}'"
logging.info("Get java version: %s", command)
result = subprocess.Popen(command, universal_newlines=True, shell=True, stdout=subprocess.PIPE)
java_version = result.stdout.read().strip()
logging.info("Java version: %s", java_version)
return "1.8" in java_version
def _setup_android(platform_version, build_tool_version, sdk_id):
android_home = os.environ["ANDROID_HOME"]
pathlist = [os.path.join(android_home, "emulator"),
os.path.join(android_home, "tools"),
os.path.join(android_home, "tools", "bin"),
os.path.join(android_home, "platform-tools"),
os.path.join(android_home, "build-tools", build_tool_version)]
os.environ["PATH"] += os.pathsep + os.pathsep.join(pathlist)
args = ["sdkmanager",
"emulator", "platform-tools",
"platforms;%s" % platform_version,
"build-tools;%s" % build_tool_version]
logging.info("Install packages: %s", " ".join(args))
_run_with_retry(args)
command = "yes | sdkmanager --licenses"
logging.info("Accept all licenses: %s", command)
_run_with_retry(command, shell=True, check=False)
args = ["sdkmanager", sdk_id]
logging.info("Download an emulator: %s", " ".join(args))
_run_with_retry(args)
args = ["sdkmanager", "--update"]
logging.info("Update all installed packages: %s", " ".join(args))
_run_with_retry(args, check=False)
def _shutdown_emulator():
command = "adb devices | grep emulator | cut -f1 | while read line; do adb -s $line emu kill; done"
logging.info("Kill all running emulator: %s", command)
subprocess.Popen(command, universal_newlines=True, shell=True, stdout=subprocess.PIPE)
time.sleep(5)
args = ["adb", "kill-server"]
logging.info("Kill adb server: %s", " ".join(args))
subprocess.run(args=args, check=False)
time.sleep(5)
def _create_and_boot_emulator(sdk_id):
_shutdown_emulator()
command = "echo no | avdmanager -s create avd -n test_emulator -k '%s' -f" % sdk_id
logging.info("Create an emulator: %s", command)
subprocess.run(command, shell=True, check=True)
args = ["adb", "start-server"]
logging.info("Start adb server: %s", " ".join(args))
subprocess.run(args=args, check=True)
if not FLAGS.ci:
command = "$ANDROID_HOME/emulator/emulator -avd test_emulator &"
else:
command = "$ANDROID_HOME/emulator/emulator -avd test_emulator -no-window -no-audio -no-boot-anim -gpu swiftshader_indirect &"
logging.info("Boot test emulator: %s", command)
subprocess.Popen(command, universal_newlines=True, shell=True, stdout=subprocess.PIPE)
args = ["adb", "wait-for-device"]
logging.info("Wait for emulator to boot: %s", " ".join(args))
subprocess.run(args=args, check=True)
if FLAGS.ci:
# wait extra 210 seconds to ensure emulator fully booted.
time.sleep(210)
else:
time.sleep(45)
def _reset_emulator_on_error(type=_RESET_TYPE_REBOOT):
if type == _RESET_TYPE_WIPE_REBOOT:
# wipe emulator data
args = ["adb", "shell", "recovery", "--wipe_data"]
logging.info("Erase my Emulator: %s", " ".join(args))
subprocess.run(args=args, check=True)
# reboot emulator: _RESET_TYPE_WIPE_REBOOT, _RESET_TYPE_REBOOT
logging.info("game-loop test error!!! reboot emualtor...")
args = ["adb", "-e", "reboot"]
logging.info("Reboot android emulator: %s", " ".join(args))
subprocess.run(args=args, check=True)
args = ["adb", "wait-for-device"]
logging.info("Wait for emulator to boot: %s", " ".join(args))
subprocess.run(args=args, check=True)
if FLAGS.ci:
# wait extra 210 seconds to ensure emulator booted.
time.sleep(210)
else:
time.sleep(45)
def _get_package_name(app_path):
command = "aapt dump badging %s | awk -v FS=\"'\" '/package: name=/{print $2}'" % app_path
logging.info("Get package_name: %s", command)
result = subprocess.Popen(command, universal_newlines=True, shell=True, stdout=subprocess.PIPE)
package_name = result.stdout.read().strip()
return package_name
def _run_android_gameloop_test(package_name, app_path, gameloop_project, retry=1):
logging.info("Running android gameloop test: %s, %s, %s", package_name, app_path, gameloop_project)
_install_android_app(app_path)
_run_instrumented_test()
log = _get_android_test_log(package_name)
_uninstall_android_app(package_name)
if retry > 1:
result = test_validation.validate_results(log, test_validation.CPP)
if not result.complete:
logging.info("Retry _run_android_gameloop_test. Remaining retry: %s", retry-1)
return _run_android_gameloop_test(package_name, app_path, gameloop_project, retry=retry-1)
return log
def _install_android_app(app_path):
"""Install integration_test app into the emulator."""
args = ["adb", "install", app_path]
logging.info("Install testapp: %s", " ".join(args))
_run_with_retry(args, device=_DEVICE_ANDROID, type=_RESET_TYPE_WIPE_REBOOT)
def _uninstall_android_app(package_name):
"""Uninstall integration_test app from the emulator."""
args = ["adb", "uninstall", package_name]
logging.info("Uninstall testapp: %s", " ".join(args))
_run_with_retry(args, device=_DEVICE_ANDROID, type=_RESET_TYPE_REBOOT)
def _install_android_gameloop_app(gameloop_project):
os.chdir(gameloop_project)
logging.info("cd to gameloop_project: %s", gameloop_project)
args = ["adb", "uninstall", "com.google.firebase.gameloop"]
_run_with_retry(args, check=False, device=_DEVICE_ANDROID, type=_RESET_TYPE_REBOOT)
args = ["./gradlew", "clean"]
logging.info("Clean game-loop cache: %s", " ".join(args))
_run_with_retry(args, check=False, device=_DEVICE_ANDROID, type=_RESET_TYPE_REBOOT)
args = ["./gradlew", "installDebug", "installDebugAndroidTest"]
logging.info("Installing game-loop app and test: %s", " ".join(args))
_run_with_retry(args, device=_DEVICE_ANDROID, type=_RESET_TYPE_REBOOT)
def _run_instrumented_test():
"""Run the gameloop UI Test app.
This gameloop app can run integration_test app automatically.
"""
args = ["adb", "shell", "am", "instrument",
"-w", "%s.test/androidx.test.runner.AndroidJUnitRunner" % _GAMELOOP_PACKAGE]
logging.info("Running game-loop test: %s", " ".join(args))
result = subprocess.run(args=args, capture_output=True, text=True, check=False)
if "FAILURES!!!" in result.stdout:
_reset_emulator_on_error(_RESET_TYPE_REBOOT)
def _get_android_test_log(test_package):
"""Read integration_test app testing result."""
# getFilesDir() -> /data/data/package/files
path = "/data/data/%s/files/%s/%s" % (_GAMELOOP_PACKAGE, test_package, _RESULT_FILE)
args = ["adb", "shell", "su", "0", "cat", path]
logging.info("Get android test result: %s", " ".join(args))
result = subprocess.run(args=args, capture_output=True, text=True, check=False)
logging.info("Android test result: %s", result.stdout)
return result.stdout
def _run_with_retry(args, shell=False, check=True, timeout=_CMD_TIMEOUT, retry_time=_TEST_RETRY, device=_DEVICE_NONE, type=_RESET_TYPE_REBOOT):
logging.info("run_with_retry: %s; remaining retry: %s", args, retry_time)
if retry_time > 1:
try:
subprocess.run(args, shell=shell, check=check, timeout=timeout)
except:
if device == _DEVICE_NONE:
pass
elif device == _DEVICE_ANDROID:
# Android
_reset_emulator_on_error(type)
else:
# Apple
_reset_simulator_on_error(device, type)
_run_with_retry(args, shell, check, timeout, retry_time-1, device, type)
else:
subprocess.run(args, shell=shell, check=check, timeout=timeout)
if __name__ == '__main__':
flags.mark_flag_as_required("testapp_dir")
app.run(main)
| firebase/firebase-cpp-sdk | scripts/gha/test_simulator.py | Python | apache-2.0 | 25,967 |
# Copyright 2016 Autodesk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import json
from . import args_from
# TODO: defined JSON types that we can serialize directly into MDT objects OR
# use a JSON "pickling" library (only if there's more complexity than covered here already)
class JsonEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj, 'to_json'):
return obj.to_json()
elif hasattr(obj, 'tolist'):
return obj.tolist()
else:
raise TypeError('No seralizer for object "%s" (class: %s)'
% (obj,obj.__class__.__name__))
@args_from(json.dump)
def json_dump(*args, **kwargs):
return json.dump(*args, cls=JsonEncoder, **kwargs)
@args_from(json.dumps)
def json_dumps(*args, **kwargs):
return json.dumps(*args, cls=JsonEncoder, **kwargs)
| Autodesk/molecular-design-toolkit | moldesign/utils/json_extension.py | Python | apache-2.0 | 1,405 |
"""This file has only one function: to provide a correctly configured
DcosApiSession object that will be injected into the pytest 'dcos_api_session' fixture
via the make_session_fixture() method
"""
from dcos_test_utils.dcos_api_session import DcosApiSession, DcosUser
from dcos_test_utils.helpers import CI_CREDENTIALS
from test_helpers import expanded_config
def make_session_fixture():
args = DcosApiSession.get_args_from_env()
exhibitor_admin_password = None
if expanded_config['exhibitor_admin_password_enabled'] == 'true':
exhibitor_admin_password = expanded_config['exhibitor_admin_password']
dcos_api_session = DcosApiSession(
auth_user=DcosUser(CI_CREDENTIALS),
exhibitor_admin_password=exhibitor_admin_password,
**args)
dcos_api_session.wait_for_dcos()
return dcos_api_session
| asridharan/dcos | packages/dcos-integration-test/extra/api_session_fixture.py | Python | apache-2.0 | 848 |
#!/usr/bin/env python
# Copyright 2016 Criteo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cache for the accessors."""
from __future__ import absolute_import
from __future__ import print_function
import abc
import hashlib
import cachetools
class AccessorCache(object):
"""A cache that can be given to an accessor.
It looks like Django's cache.
https://docs.djangoproject.com/en/1.11/topics/cache/#the-low-level-cache-api
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def set(self, key, value, timeout=None, version=None):
"""Set a key in the cache."""
pass
@abc.abstractmethod
def get(self, key, default=None, version=None):
"""Get a single key."""
pass
def set_many(self, data, timeout=None, version=None):
"""Set a bunch of keys in the cache."""
for key, value in data.items():
self.set(key, value, timeout=timeout, version=version)
def get_many(self, keys, version=None):
"""Fetch a bunch of keys from the cache.
Args:
keys: a list of keys.
version: an optional version.
"""
d = {}
for k in keys:
val = self.get(k, version=version)
if val is not None:
d[k] = val
return d
class MemoryCache(AccessorCache):
"""A per-process memory cache."""
def __init__(self, size, ttl):
"""Initialize the memory cache."""
super(MemoryCache, self).__init__()
self.__size = size
self.__ttl = ttl
self.__cache = cachetools.TTLCache(maxsize=self.__size, ttl=self.__ttl)
def _make_key(self, key, version):
return str(version) + "-" + key
def set(self, key, value, timeout=None, version=None):
"""Set a key in the cache."""
self.__cache[self._make_key(key, version)] = value
def get(self, key, default=None, version=None):
"""Get a single key."""
return self.__cache.get(self._make_key(key, version), default=default)
class DjangoCache(AccessorCache):
"""Django cache, but safe."""
def __init__(self, django_cache):
"""Initialize the cache."""
self.__cache = django_cache
def _make_key(self, key):
"""Construct a clean key from a key."""
return hashlib.md5(key).hexdigest()
def set(self, key, value, timeout=None, version=None):
"""Set a key."""
key = self._make_key(key)
return self.__cache.set(key, value, timeout=timeout, version=version)
def get(self, key, value, default=None, version=None):
"""Get a key."""
key = self._make_key(key)
return self.__cache.get(key, value, default=default, version=version)
def set_many(self, data, timeout=None, version=None):
"""Set a bunch of keys in the cache."""
new_data = {self._make_key(key): value for key, value in data.items()}
return self.__cache.set_many(new_data, timeout=timeout, version=None)
def get_many(self, keys, version=None):
"""Fetch a bunch of keys from the cache."""
keymap = {self._make_key(key): key for key in keys}
data = self.__cache.get_many(keymap.keys(), version=None)
return {keymap[key]: value for key, value in data.items()}
| iksaif/biggraphite | biggraphite/accessor_cache.py | Python | apache-2.0 | 3,791 |
# -*- coding : utf- 8 -*-
from django.db import models
import random
import string
class myURL(models.Model):
url = models.URLField(verbose_name='Your base url', unique=True)
secret = models.CharField(verbose_name='My Secret Key', max_length=6, unique=True)
username = models.CharField(verbose_name='Nickname', max_length=255, blank=True, null=True)
accessNb = models.FloatField(default=0, verbose_name='Number of access allowed')
date = models.DateTimeField(verbose_name='Added', auto_now_add=True)
def __unicode__(self):
return u'[{0}] {1}'.format(self.secret, self.url)
def save(self, *args, **kwargs):
if self.pk is None:
self.generate(6)
super(myURL, self).save(*args, **kwargs)
def generate(self, N):
char = string.letters + string.digits
randomize = [random.choice(char) for _ in xrange(N)]
self.secret = ''.join(randomize)
class Meta:
verbose_name="URL Reductor" | Tosta-Mixta/urlreductor | mini_url/models.py | Python | apache-2.0 | 981 |
import math
import random
class Point(object):
def __init__(self, x, y, z=0):
self.x = x
self.y = y
self.z = z
def get_x(self):
return self.x
def get_y(self):
return self.y
def get_z(self):
return self.z
def set_x(self, x):
self.x = x
return self
def set_y(self, y):
self.y = y
return self
def set_z(self, z):
self.z = z
return self
def dist_to(self, other_point):
return math.sqrt(
pow(self.x - other_point.x, 2) +
pow(self.y - other_point.y, 2) +
pow(self.z - other_point.z, 2)
)
def to_unit_vector(self):
mag = self.dist_to(Point(0, 0, 0))
if mag == 0:
return Point(0, 0, 0)
else:
return Point(self.x / mag, self.y / mag, self.z / mag)
def to_list(self):
return [self.x, self.y, self.z]
def __str__(self):
return "X: {0}, Y: {1}, Z: {2}".format(self.x, self.y, self.z)
def __repr__(self):
return "Point({0}, {1}, z={2})".format(self.x, self.y, self.z)
def __hash__(self):
return hash(str(self))
def __sub__(self, other):
return Point(
self.x - other.x,
self.y - other.y,
self.z - other.z
)
def __mul__(self, scalar):
return Point(
scalar * self.x,
scalar * self.y,
scalar * self.z
)
def __eq__(self, val):
try:
return val.x == self.x and val.y == self.y and val.z == self.z
except:
return False
def get_random_point(width, height):
x = random.randint(0, width)
y = random.randint(0, height)
return Point(x, y)
def get_random_point_3d(width, height, depth):
p = get_random_point(width, height)
p.set_z(random.randint(0, depth))
return p
def get_noisy_point(std):
p = Point(0, 0, 0)
p.set_x(random.gauss(0, std))
p.set_y(random.gauss(0, std))
return p
def get_noisy_point_3d(std):
p = Point(0, 0, 0)
p.set_x(random.gauss(0, std))
p.set_y(random.gauss(0, std))
p.set_z(random.gauss(0, std))
return p
| wallarelvo/rover | rover/point.py | Python | apache-2.0 | 2,222 |
# coding=utf-8
# Copyright 2022 GradMax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data pipeline.
Forked from simclr/tf2 codebase.
"""
from typing import Optional
from absl import logging
import tensorflow.compat.v2 as tf
import tensorflow_datasets as tfds
def build_input_fn(
builder,
global_batch_size,
topology,
is_training,
cache_dataset = True):
"""Build input function.
Args:
builder: TFDS builder for specified dataset.
global_batch_size: Global batch size.
topology: An instance of `tf.tpu.experimental.Topology` or None.
is_training: Whether to build in training mode.
cache_dataset: bool, whether to cache the dataset.
Returns:
A function that accepts a dict of params and returns a tuple of images and
features, to be used as the input_fn in TPUEstimator.
"""
def _input_fn(input_context):
"""Inner input function."""
batch_size = input_context.get_per_replica_batch_size(global_batch_size)
logging.info('Global batch size: %d', global_batch_size)
logging.info('Per-replica batch size: %d', batch_size)
def map_fn(image, label):
"""Produces multiple transformations of the same batch."""
if is_training:
image_shape = tf.shape(image)
# Expand the image by 2 pixels, then crop back down to 32x32.
image = tf.image.resize_with_crop_or_pad(
image, image_shape[0] + 4, image_shape[1] + 4)
image = tf.image.random_crop(image, (image_shape[0], image_shape[0], 3))
image = tf.image.random_flip_left_right(image)
image = tf.image.convert_image_dtype(image, tf.float32)
return image, label
dataset = builder.as_dataset(
split='train' if is_training else 'test',
shuffle_files=is_training,
as_supervised=True)
logging.info('num_input_pipelines: %d', input_context.num_input_pipelines)
# The dataset is always sharded by number of hosts.
# num_input_pipelines is the number of hosts rather than number of cores.
if input_context.num_input_pipelines > 1:
dataset = dataset.shard(input_context.num_input_pipelines,
input_context.input_pipeline_id)
if cache_dataset:
dataset = dataset.cache()
if is_training:
dataset = dataset.shuffle(50000)
dataset = dataset.repeat(-1)
dataset = dataset.map(
map_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE)
dataset = dataset.batch(batch_size, drop_remainder=is_training)
prefetch_buffer_size = 2 * topology.num_tpus_per_task if topology else 2
dataset = dataset.prefetch(prefetch_buffer_size)
return dataset
return _input_fn
| google-research/growneuron | growneuron/cifar/data.py | Python | apache-2.0 | 3,185 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import bz2
import errno
import filecmp
import gzip
import logging
import shutil
import tempfile
import unittest
from airflow.utils import compression
class TestCompression(unittest.TestCase):
def setUp(self):
self.file_names = {}
try:
header = b"Sno\tSome,Text \n"
line1 = b"1\tAirflow Test\n"
line2 = b"2\tCompressionUtil\n"
self.tmp_dir = tempfile.mkdtemp(prefix='test_utils_compression_')
# create sample txt, gz and bz2 files
with tempfile.NamedTemporaryFile(mode='wb+',
dir=self.tmp_dir,
delete=False) as f_txt:
self._set_fn(f_txt.name, '.txt')
f_txt.writelines([header, line1, line2])
fn_gz = self._get_fn('.txt') + ".gz"
with gzip.GzipFile(filename=fn_gz,
mode="wb") as f_gz:
self._set_fn(fn_gz, '.gz')
f_gz.writelines([header, line1, line2])
fn_bz2 = self._get_fn('.txt') + '.bz2'
with bz2.BZ2File(filename=fn_bz2, mode="wb") as f_bz2:
self._set_fn(fn_bz2, '.bz2')
f_bz2.writelines([header, line1, line2])
# Base Exception so it catches Keyboard Interrupt
except BaseException as e:
logging.error(e)
self.tearDown()
def tearDown(self):
try:
shutil.rmtree(self.tmp_dir)
except OSError as e:
# ENOENT - no such file or directory
if e.errno != errno.ENOENT:
raise e
# Helper method to create a dictionary of file names and
# file extension
def _set_fn(self, fn, ext):
self.file_names[ext] = fn
# Helper method to fetch a file of a
# certain extension
def _get_fn(self, ext):
return self.file_names[ext]
def test_uncompress_file(self):
# Testing txt file type
self.assertRaisesRegex(NotImplementedError,
"^Received .txt format. Only gz and bz2.*",
compression.uncompress_file,
**{'input_file_name': None,
'file_extension': '.txt',
'dest_dir': None
})
# Testing gz file type
fn_txt = self._get_fn('.txt')
fn_gz = self._get_fn('.gz')
txt_gz = compression.uncompress_file(fn_gz, '.gz', self.tmp_dir)
self.assertTrue(filecmp.cmp(txt_gz, fn_txt, shallow=False),
msg="Uncompressed file doest match original")
# Testing bz2 file type
fn_bz2 = self._get_fn('.bz2')
txt_bz2 = compression.uncompress_file(fn_bz2, '.bz2', self.tmp_dir)
self.assertTrue(filecmp.cmp(txt_bz2, fn_txt, shallow=False),
msg="Uncompressed file doest match original")
| wooga/airflow | tests/utils/test_compression.py | Python | apache-2.0 | 3,779 |
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import glob
import os
from resource_management.core import shell, sudo
from resource_management.core.logger import Logger
from resource_management.core.resources import Directory
from resource_management.core.resources.system import Execute, File
from resource_management.core.source import InlineTemplate
from resource_management.libraries import XmlConfig
from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions.check_process_status import check_process_status
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.script.script import Script
class Master(Script):
def install(self, env):
import params
env.set_params(params)
self.install_packages(env)
self.create_zeppelin_log_dir(env)
if params.spark_version:
Execute('echo spark_version:' + str(params.spark_version) + ' detected for spark_home: '
+ params.spark_home + ' >> ' + params.zeppelin_log_file, user=params.zeppelin_user)
if params.spark2_version:
Execute('echo spark2_version:' + str(params.spark2_version) + ' detected for spark2_home: '
+ params.spark2_home + ' >> ' + params.zeppelin_log_file, user=params.zeppelin_user)
def create_zeppelin_dir(self, params):
params.HdfsResource(format("/user/{zeppelin_user}"),
type="directory",
action="create_on_execute",
owner=params.zeppelin_user,
recursive_chown=True,
recursive_chmod=True
)
params.HdfsResource(format("/user/{zeppelin_user}/test"),
type="directory",
action="create_on_execute",
owner=params.zeppelin_user,
recursive_chown=True,
recursive_chmod=True
)
params.HdfsResource(format("/apps/zeppelin"),
type="directory",
action="create_on_execute",
owner=params.zeppelin_user,
recursive_chown=True,
recursive_chmod=True
)
spark_deps_full_path = self.get_zeppelin_spark_dependencies()[0]
spark_dep_file_name = os.path.basename(spark_deps_full_path)
params.HdfsResource(params.spark_jar_dir + "/" + spark_dep_file_name,
type="file",
action="create_on_execute",
source=spark_deps_full_path,
group=params.zeppelin_group,
owner=params.zeppelin_user,
mode=0444,
replace_existing_files=True,
)
params.HdfsResource(None, action="execute")
def create_zeppelin_log_dir(self, env):
import params
env.set_params(params)
Directory([params.zeppelin_log_dir],
owner=params.zeppelin_user,
group=params.zeppelin_group,
cd_access="a",
create_parents=True,
mode=0755
)
def create_zeppelin_hdfs_conf_dir(self, env):
import params
env.set_params(params)
Directory([params.external_dependency_conf],
owner=params.zeppelin_user,
group=params.zeppelin_group,
cd_access="a",
create_parents=True,
mode=0755
)
def chown_zeppelin_pid_dir(self, env):
import params
env.set_params(params)
Execute(("chown", "-R", format("{zeppelin_user}") + ":" + format("{zeppelin_group}"), params.zeppelin_pid_dir),
sudo=True)
def configure(self, env):
import params
import status_params
env.set_params(params)
env.set_params(status_params)
self.create_zeppelin_log_dir(env)
# create the pid and zeppelin dirs
Directory([params.zeppelin_pid_dir, params.zeppelin_dir],
owner=params.zeppelin_user,
group=params.zeppelin_group,
cd_access="a",
create_parents=True,
mode=0755
)
self.chown_zeppelin_pid_dir(env)
# write out zeppelin-site.xml
XmlConfig("zeppelin-site.xml",
conf_dir=params.conf_dir,
configurations=params.config['configurations']['zeppelin-config'],
owner=params.zeppelin_user,
group=params.zeppelin_group
)
# write out zeppelin-env.sh
env_content = InlineTemplate(params.zeppelin_env_content)
File(format("{params.conf_dir}/zeppelin-env.sh"), content=env_content,
owner=params.zeppelin_user, group=params.zeppelin_group)
# write out shiro.ini
shiro_ini_content = InlineTemplate(params.shiro_ini_content)
File(format("{params.conf_dir}/shiro.ini"), content=shiro_ini_content,
owner=params.zeppelin_user, group=params.zeppelin_group)
# write out log4j.properties
File(format("{params.conf_dir}/log4j.properties"), content=params.log4j_properties_content,
owner=params.zeppelin_user, group=params.zeppelin_group)
self.create_zeppelin_hdfs_conf_dir(env)
if len(params.hbase_master_hosts) > 0 and params.is_hbase_installed:
# copy hbase-site.xml
XmlConfig("hbase-site.xml",
conf_dir=params.external_dependency_conf,
configurations=params.config['configurations']['hbase-site'],
configuration_attributes=params.config['configuration_attributes']['hbase-site'],
owner=params.zeppelin_user,
group=params.zeppelin_group,
mode=0644)
XmlConfig("hdfs-site.xml",
conf_dir=params.external_dependency_conf,
configurations=params.config['configurations']['hdfs-site'],
configuration_attributes=params.config['configuration_attributes']['hdfs-site'],
owner=params.zeppelin_user,
group=params.zeppelin_group,
mode=0644)
XmlConfig("core-site.xml",
conf_dir=params.external_dependency_conf,
configurations=params.config['configurations']['core-site'],
configuration_attributes=params.config['configuration_attributes']['core-site'],
owner=params.zeppelin_user,
group=params.zeppelin_group,
mode=0644)
def check_and_copy_notebook_in_hdfs(self, params):
if params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir'].startswith("/"):
notebook_directory = params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir']
else:
notebook_directory = "/user/" + format("{zeppelin_user}") + "/" + \
params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir']
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
notebook_directory_exists = shell.call(format("{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -test -e {notebook_directory};echo $?"),
user=params.zeppelin_user)[1]
#if there is no kerberos setup then the string will contain "-bash: kinit: command not found"
if "\n" in notebook_directory_exists:
notebook_directory_exists = notebook_directory_exists.split("\n")[1]
# '1' means it does not exists
if notebook_directory_exists == '1':
# hdfs dfs -mkdir {notebook_directory}
params.HdfsResource(format("{notebook_directory}"),
type="directory",
action="create_on_execute",
owner=params.zeppelin_user,
recursive_chown=True,
recursive_chmod=True
)
# hdfs dfs -put /usr/hdp/current/zeppelin-server/notebook/ {notebook_directory}
params.HdfsResource(format("{notebook_directory}"),
type="directory",
action="create_on_execute",
source=params.notebook_dir,
owner=params.zeppelin_user,
recursive_chown=True,
recursive_chmod=True
)
def stop(self, env, upgrade_type=None):
import params
self.create_zeppelin_log_dir(env)
self.chown_zeppelin_pid_dir(env)
Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh stop >> ' + params.zeppelin_log_file,
user=params.zeppelin_user)
def start(self, env, upgrade_type=None):
import params
import status_params
self.configure(env)
Execute(("chown", "-R", format("{zeppelin_user}") + ":" + format("{zeppelin_group}"), "/etc/zeppelin"),
sudo=True)
Execute(("chown", "-R", format("{zeppelin_user}") + ":" + format("{zeppelin_group}"),
os.path.join(params.zeppelin_dir, "notebook")), sudo=True)
if 'zeppelin.notebook.storage' in params.config['configurations']['zeppelin-config'] \
and params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] == 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo':
self.check_and_copy_notebook_in_hdfs(params)
if params.security_enabled:
zeppelin_kinit_cmd = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal}; ")
Execute(zeppelin_kinit_cmd, user=params.zeppelin_user)
zeppelin_spark_dependencies = self.get_zeppelin_spark_dependencies()
if zeppelin_spark_dependencies and os.path.exists(zeppelin_spark_dependencies[0]):
self.create_zeppelin_dir(params)
# if first_setup:
if not glob.glob(params.conf_dir + "/interpreter.json") and \
not os.path.exists(params.conf_dir + "/interpreter.json"):
self.create_interpreter_json()
self.update_zeppelin_interpreter()
if params.zeppelin_interpreter_config_upgrade == True:
self.reset_interpreter_settings()
self.update_zeppelin_interpreter()
Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh restart >> '
+ params.zeppelin_log_file, user=params.zeppelin_user)
pidfile = glob.glob(os.path.join(status_params.zeppelin_pid_dir,
'zeppelin-' + params.zeppelin_user + '*.pid'))[0]
Logger.info(format("Pid file is: {pidfile}"))
def status(self, env):
import status_params
env.set_params(status_params)
try:
pid_file = glob.glob(status_params.zeppelin_pid_dir + '/zeppelin-' +
status_params.zeppelin_user + '*.pid')[0]
except IndexError:
pid_file = ''
check_process_status(pid_file)
def reset_interpreter_settings(self):
import json
import interpreter_json_template
interpreter_json_template = json.loads(interpreter_json_template.template)['interpreterSettings']
config_data = self.get_interpreter_settings()
interpreter_settings = config_data['interpreterSettings']
for setting_key in interpreter_json_template.keys():
if setting_key not in interpreter_settings:
interpreter_settings[setting_key] = interpreter_json_template[
setting_key]
self.set_interpreter_settings(config_data)
def get_interpreter_settings(self):
import params
import json
interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
config_content = sudo.read_file(interpreter_config)
config_data = json.loads(config_content)
return config_data
def pre_upgrade_restart(self, env, upgrade_type=None):
Logger.info("Executing Stack Upgrade pre-restart")
import params
env.set_params(params)
if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, format_stack_version(params.version)):
stack_select.select_packages(params.version)
def set_interpreter_settings(self, config_data):
import params
import json
interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
File(interpreter_config,
group=params.zeppelin_group,
owner=params.zeppelin_user,
content=json.dumps(config_data, indent=2)
)
def update_kerberos_properties(self):
import params
config_data = self.get_interpreter_settings()
interpreter_settings = config_data['interpreterSettings']
for interpreter_setting in interpreter_settings:
interpreter = interpreter_settings[interpreter_setting]
if interpreter['group'] == 'livy' and params.livy_livyserver_host:
if params.zeppelin_kerberos_principal and params.zeppelin_kerberos_keytab and params.security_enabled:
interpreter['properties']['zeppelin.livy.principal'] = params.zeppelin_kerberos_principal
interpreter['properties']['zeppelin.livy.keytab'] = params.zeppelin_kerberos_keytab
else:
interpreter['properties']['zeppelin.livy.principal'] = ""
interpreter['properties']['zeppelin.livy.keytab'] = ""
elif interpreter['group'] == 'spark':
if params.zeppelin_kerberos_principal and params.zeppelin_kerberos_keytab and params.security_enabled:
interpreter['properties']['spark.yarn.principal'] = params.zeppelin_kerberos_principal
interpreter['properties']['spark.yarn.keytab'] = params.zeppelin_kerberos_keytab
else:
interpreter['properties']['spark.yarn.principal'] = ""
interpreter['properties']['spark.yarn.keytab'] = ""
elif interpreter['group'] == 'jdbc':
if params.zeppelin_kerberos_principal and params.zeppelin_kerberos_keytab and params.security_enabled:
interpreter['properties']['zeppelin.jdbc.auth.type'] = "KERBEROS"
interpreter['properties']['zeppelin.jdbc.principal'] = params.zeppelin_kerberos_principal
interpreter['properties']['zeppelin.jdbc.keytab.location'] = params.zeppelin_kerberos_keytab
if params.zookeeper_znode_parent \
and params.hbase_zookeeper_quorum \
and 'phoenix.url' in interpreter['properties'] \
and params.zookeeper_znode_parent not in interpreter['properties']['phoenix.url']:
interpreter['properties']['phoenix.url'] = "jdbc:phoenix:" + \
params.hbase_zookeeper_quorum + ':' + \
params.zookeeper_znode_parent
else:
interpreter['properties']['zeppelin.jdbc.auth.type'] = "SIMPLE"
interpreter['properties']['zeppelin.jdbc.principal'] = ""
interpreter['properties']['zeppelin.jdbc.keytab.location'] = ""
elif interpreter['group'] == 'sh':
if params.zeppelin_kerberos_principal and params.zeppelin_kerberos_keytab and params.security_enabled:
interpreter['properties']['zeppelin.shell.auth.type'] = "KERBEROS"
interpreter['properties']['zeppelin.shell.principal'] = params.zeppelin_kerberos_principal
interpreter['properties']['zeppelin.shell.keytab.location'] = params.zeppelin_kerberos_keytab
else:
interpreter['properties']['zeppelin.shell.auth.type'] = ""
interpreter['properties']['zeppelin.shell.principal'] = ""
interpreter['properties']['zeppelin.shell.keytab.location'] = ""
self.set_interpreter_settings(config_data)
def update_zeppelin_interpreter(self):
import params
config_data = self.get_interpreter_settings()
interpreter_settings = config_data['interpreterSettings']
if 'spark2-defaults' in params.config['configurations']:
spark2_config = self.get_spark2_interpreter_config()
config_id = spark2_config["id"]
interpreter_settings[config_id] = spark2_config
if params.livy2_livyserver_host:
livy2_config = self.get_livy2_interpreter_config()
config_id = livy2_config["id"]
interpreter_settings[config_id] = livy2_config
if params.zeppelin_interpreter:
settings_to_delete = []
for settings_key, interpreter in interpreter_settings.items():
if interpreter['group'] not in params.zeppelin_interpreter:
settings_to_delete.append(settings_key)
for key in settings_to_delete:
del interpreter_settings[key]
hive_interactive_properties_key = 'hive_interactive'
for setting_key in interpreter_settings.keys():
interpreter = interpreter_settings[setting_key]
if interpreter['group'] == 'jdbc':
interpreter['dependencies'] = []
if not params.hive_server_host and params.hive_server_interactive_hosts:
hive_interactive_properties_key = 'hive'
if params.hive_server_host:
interpreter['properties']['hive.driver'] = 'org.apache.hive.jdbc.HiveDriver'
interpreter['properties']['hive.user'] = 'hive'
interpreter['properties']['hive.password'] = ''
interpreter['properties']['hive.proxy.user.property'] = 'hive.server2.proxy.user'
if params.hive_server2_support_dynamic_service_discovery:
interpreter['properties']['hive.url'] = 'jdbc:hive2://' + \
params.hive_zookeeper_quorum + \
'/;' + 'serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=' + \
params.hive_zookeeper_namespace
else:
interpreter['properties']['hive.url'] = 'jdbc:hive2://' + \
params.hive_server_host + \
':' + params.hive_server_port
if params.hive_server_interactive_hosts:
interpreter['properties'][hive_interactive_properties_key + '.driver'] = 'org.apache.hive.jdbc.HiveDriver'
interpreter['properties'][hive_interactive_properties_key + '.user'] = 'hive'
interpreter['properties'][hive_interactive_properties_key + '.password'] = ''
interpreter['properties'][hive_interactive_properties_key + '.proxy.user.property'] = 'hive.server2.proxy.user'
if params.hive_server2_support_dynamic_service_discovery:
interpreter['properties'][hive_interactive_properties_key + '.url'] = 'jdbc:hive2://' + \
params.hive_zookeeper_quorum + \
'/;' + 'serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=' + \
params.hive_interactive_zookeeper_namespace
else:
interpreter['properties'][hive_interactive_properties_key + '.url'] = 'jdbc:hive2://' + \
params.hive_server_interactive_hosts + \
':' + params.hive_server_port
if params.spark_thrift_server_hosts:
interpreter['properties']['spark.driver'] = 'org.apache.hive.jdbc.HiveDriver'
interpreter['properties']['spark.user'] = 'hive'
interpreter['properties']['spark.password'] = ''
interpreter['properties']['spark.proxy.user.property'] = 'hive.server2.proxy.user'
interpreter['properties']['spark.url'] = 'jdbc:hive2://' + \
params.spark_thrift_server_hosts + ':' + params.spark_hive_thrift_port + '/'
if params.spark_hive_principal:
interpreter['properties']['spark.url'] += ';principal=' + params.spark_hive_principal
if params.spark2_thrift_server_hosts:
interpreter['properties']['spark2.driver'] = 'org.apache.hive.jdbc.HiveDriver'
interpreter['properties']['spark2.user'] = 'hive'
interpreter['properties']['spark2.password'] = ''
interpreter['properties']['spark2.proxy.user.property'] = 'hive.server2.proxy.user'
interpreter['properties']['spark2.url'] = 'jdbc:hive2://' + \
params.spark2_thrift_server_hosts + ':' + params.spark2_hive_thrift_port + '/'
if params.spark_hive_principal:
interpreter['properties']['spark2.url'] += ';principal=' + params.spark2_hive_principal
if params.zookeeper_znode_parent \
and params.hbase_zookeeper_quorum:
interpreter['properties']['phoenix.driver'] = 'org.apache.phoenix.jdbc.PhoenixDriver'
interpreter['properties']['phoenix.hbase.client.retries.number'] = '1'
interpreter['properties']['phoenix.user'] = 'phoenixuser'
interpreter['properties']['phoenix.password'] = ''
interpreter['properties']['phoenix.url'] = "jdbc:phoenix:" + \
params.hbase_zookeeper_quorum + ':' + \
params.zookeeper_znode_parent
elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy':
if params.livy_livyserver_host:
interpreter['properties']['zeppelin.livy.url'] = "http://" + params.livy_livyserver_host + \
":" + params.livy_livyserver_port
else:
del interpreter_settings[setting_key]
elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy2':
if params.livy2_livyserver_host:
interpreter['properties']['zeppelin.livy.url'] = "http://" + params.livy2_livyserver_host + \
":" + params.livy2_livyserver_port
else:
del interpreter_settings[setting_key]
elif interpreter['group'] == 'spark' and interpreter['name'] == 'spark':
if 'spark-env' in params.config['configurations']:
interpreter['properties']['master'] = "yarn-client"
interpreter['properties']['SPARK_HOME'] = "/usr/hdp/current/spark-client/"
else:
del interpreter_settings[setting_key]
elif interpreter['group'] == 'spark' and interpreter['name'] == 'spark2':
if 'spark2-env' in params.config['configurations']:
interpreter['properties']['master'] = "yarn-client"
interpreter['properties']['SPARK_HOME'] = "/usr/hdp/current/spark2-client/"
else:
del interpreter_settings[setting_key]
self.set_interpreter_settings(config_data)
self.update_kerberos_properties()
def create_interpreter_json(self):
import interpreter_json_template
import params
interpreter_json = interpreter_json_template.template
File(format("{params.conf_dir}/interpreter.json"), content=interpreter_json,
owner=params.zeppelin_user, group=params.zeppelin_group)
def get_zeppelin_spark_dependencies(self):
import params
return glob.glob(params.zeppelin_dir + '/interpreter/spark/dep/zeppelin-spark-dependencies*.jar')
def get_spark2_interpreter_config(self):
import spark2_config_template
import json
return json.loads(spark2_config_template.template)
def get_livy2_interpreter_config(self):
import livy2_config_template
import json
return json.loads(livy2_config_template.template)
if __name__ == "__main__":
Master().execute()
| arenadata/ambari | ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/master.py | Python | apache-2.0 | 24,600 |
from django.conf.urls.defaults import patterns # noqa
from django.conf.urls.defaults import url # noqa
from openstack_dashboard.dashboards.fogbow.members import views
from openstack_dashboard.dashboards.fogbow.members.views import IndexView
urlpatterns = patterns('',
url(r'^$', IndexView.as_view(), name='index'),
url(r'^(?P<member_id>.*)/quota$', views.getSpecificMemberQuota, name='quota'),
)
| fogbow/fogbow-dashboard | openstack_dashboard/dashboards/fogbow/members/urls.py | Python | apache-2.0 | 408 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from eventlet import greenlet
from eventlet import greenpool
from eventlet import greenthread
from mtaaas.openstack.common import log as logging
from mtaaas.openstack.common import loopingcall
LOG = logging.getLogger(__name__)
def _thread_done(gt, *args, **kwargs):
""" Callback function to be passed to GreenThread.link() when we spawn()
Calls the :class:`ThreadGroup` to notify if.
"""
kwargs['group'].thread_done(kwargs['thread'])
class Thread(object):
""" Wrapper around a greenthread, that holds a reference to the
:class:`ThreadGroup`. The Thread will notify the :class:`ThreadGroup` when
it has done so it can be removed from the threads list.
"""
def __init__(self, thread, group):
self.thread = thread
self.thread.link(_thread_done, group=group, thread=self)
def stop(self):
self.thread.kill()
def wait(self):
return self.thread.wait()
class ThreadGroup(object):
""" The point of the ThreadGroup classis to:
* keep track of timers and greenthreads (making it easier to stop them
when need be).
* provide an easy API to add timers.
"""
def __init__(self, thread_pool_size=10):
self.pool = greenpool.GreenPool(thread_pool_size)
self.threads = []
self.timers = []
def add_timer(self, interval, callback, initial_delay=None,
*args, **kwargs):
pulse = loopingcall.FixedIntervalLoopingCall(callback, *args, **kwargs)
pulse.start(interval=interval,
initial_delay=initial_delay)
self.timers.append(pulse)
def add_thread(self, callback, *args, **kwargs):
gt = self.pool.spawn(callback, *args, **kwargs)
th = Thread(gt, self)
self.threads.append(th)
def thread_done(self, thread):
self.threads.remove(thread)
def stop(self):
current = greenthread.getcurrent()
for x in self.threads:
if x is current:
# don't kill the current thread.
continue
try:
x.stop()
except Exception as ex:
LOG.exception(ex)
for x in self.timers:
try:
x.stop()
except Exception as ex:
LOG.exception(ex)
self.timers = []
def wait(self):
for x in self.timers:
try:
x.wait()
except greenlet.GreenletExit:
pass
except Exception as ex:
LOG.exception(ex)
current = greenthread.getcurrent()
for x in self.threads:
if x is current:
continue
try:
x.wait()
except greenlet.GreenletExit:
pass
except Exception as ex:
LOG.exception(ex)
| townbull/mtaaas-openstack | mtaaas/openstack/common/threadgroup.py | Python | apache-2.0 | 3,514 |
# Copyright 2021, Kay Hayen, mailto:[email protected]
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Reformulation of "yield" and "yield from" expressions.
Consult the developer manual for information. TODO: Add ability to sync
source code comments with developer manual sections.
"""
import ast
from nuitka.nodes.ConstantRefNodes import ExpressionConstantNoneRef
from nuitka.nodes.YieldNodes import ExpressionYield, ExpressionYieldFrom
from nuitka.PythonVersions import python_version
from .SyntaxErrors import raiseSyntaxError
from .TreeHelpers import buildNode
def _checkInsideGenerator(provider, node, source_ref):
if provider.isCompiledPythonModule():
raiseSyntaxError(
"'yield' outside function", source_ref.atColumnNumber(node.col_offset)
)
# This yield is forbidden in 3.5, but allowed in 3.6, but yield_from
# is neither.
if provider.isExpressionAsyncgenObjectBody() and (
node.__class__ is not ast.Yield or python_version < 0x360
):
raiseSyntaxError(
"'%s' inside async function"
% ("yield" if node.__class__ is ast.Yield else "yield from",),
source_ref.atColumnNumber(node.col_offset),
)
if (
python_version >= 0x380
and provider.isExpressionGeneratorObjectBody()
and provider.name == "<genexpr>"
):
raiseSyntaxError(
"'%s' inside generator expression"
% ("yield" if node.__class__ is ast.Yield else "yield from",),
provider.getSourceReference(),
)
assert (
provider.isExpressionGeneratorObjectBody()
or provider.isExpressionAsyncgenObjectBody()
), provider
def buildYieldNode(provider, node, source_ref):
_checkInsideGenerator(provider, node, source_ref)
if node.value is not None:
return ExpressionYield(
expression=buildNode(provider, node.value, source_ref),
source_ref=source_ref,
)
else:
return ExpressionYield(
expression=ExpressionConstantNoneRef(source_ref=source_ref),
source_ref=source_ref,
)
def buildYieldFromNode(provider, node, source_ref):
assert python_version >= 0x300
_checkInsideGenerator(provider, node, source_ref)
return ExpressionYieldFrom(
expression=buildNode(provider, node.value, source_ref), source_ref=source_ref
)
| kayhayen/Nuitka | nuitka/tree/ReformulationYieldExpressions.py | Python | apache-2.0 | 3,088 |
"""Provides functionality to interact with climate devices."""
from abc import abstractmethod
from datetime import timedelta
import functools as ft
import logging
from typing import Any, Dict, List, Optional
import voluptuous as vol
from homeassistant.const import (
ATTR_TEMPERATURE,
PRECISION_TENTHS,
PRECISION_WHOLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
make_entity_service_schema,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.temperature import display_temp as show_temp
from homeassistant.helpers.typing import ConfigType, HomeAssistantType, ServiceDataType
from homeassistant.util.temperature import convert as convert_temperature
from .const import (
ATTR_AUX_HEAT,
ATTR_CURRENT_HUMIDITY,
ATTR_CURRENT_TEMPERATURE,
ATTR_FAN_MODE,
ATTR_FAN_MODES,
ATTR_HUMIDITY,
ATTR_HVAC_ACTION,
ATTR_HVAC_MODE,
ATTR_HVAC_MODES,
ATTR_MAX_HUMIDITY,
ATTR_MAX_TEMP,
ATTR_MIN_HUMIDITY,
ATTR_MIN_TEMP,
ATTR_PRESET_MODE,
ATTR_PRESET_MODES,
ATTR_SWING_MODE,
ATTR_SWING_MODES,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
ATTR_TARGET_TEMP_STEP,
DOMAIN,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
HVAC_MODES,
SERVICE_SET_AUX_HEAT,
SERVICE_SET_FAN_MODE,
SERVICE_SET_HUMIDITY,
SERVICE_SET_HVAC_MODE,
SERVICE_SET_PRESET_MODE,
SERVICE_SET_SWING_MODE,
SERVICE_SET_TEMPERATURE,
SUPPORT_AUX_HEAT,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_SWING_MODE,
SUPPORT_TARGET_HUMIDITY,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
DEFAULT_MIN_TEMP = 7
DEFAULT_MAX_TEMP = 35
DEFAULT_MIN_HUMIDITY = 30
DEFAULT_MAX_HUMIDITY = 99
ENTITY_ID_FORMAT = DOMAIN + ".{}"
SCAN_INTERVAL = timedelta(seconds=60)
CONVERTIBLE_ATTRIBUTE = [ATTR_TEMPERATURE, ATTR_TARGET_TEMP_LOW, ATTR_TARGET_TEMP_HIGH]
_LOGGER = logging.getLogger(__name__)
SET_TEMPERATURE_SCHEMA = vol.All(
cv.has_at_least_one_key(
ATTR_TEMPERATURE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW
),
make_entity_service_schema(
{
vol.Exclusive(ATTR_TEMPERATURE, "temperature"): vol.Coerce(float),
vol.Inclusive(ATTR_TARGET_TEMP_HIGH, "temperature"): vol.Coerce(float),
vol.Inclusive(ATTR_TARGET_TEMP_LOW, "temperature"): vol.Coerce(float),
vol.Optional(ATTR_HVAC_MODE): vol.In(HVAC_MODES),
}
),
)
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
"""Set up climate devices."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on")
component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off")
component.async_register_entity_service(
SERVICE_SET_HVAC_MODE,
{vol.Required(ATTR_HVAC_MODE): vol.In(HVAC_MODES)},
"async_set_hvac_mode",
)
component.async_register_entity_service(
SERVICE_SET_PRESET_MODE,
{vol.Required(ATTR_PRESET_MODE): cv.string},
"async_set_preset_mode",
)
component.async_register_entity_service(
SERVICE_SET_AUX_HEAT,
{vol.Required(ATTR_AUX_HEAT): cv.boolean},
async_service_aux_heat,
)
component.async_register_entity_service(
SERVICE_SET_TEMPERATURE, SET_TEMPERATURE_SCHEMA, async_service_temperature_set,
)
component.async_register_entity_service(
SERVICE_SET_HUMIDITY,
{vol.Required(ATTR_HUMIDITY): vol.Coerce(float)},
"async_set_humidity",
)
component.async_register_entity_service(
SERVICE_SET_FAN_MODE,
{vol.Required(ATTR_FAN_MODE): cv.string},
"async_set_fan_mode",
)
component.async_register_entity_service(
SERVICE_SET_SWING_MODE,
{vol.Required(ATTR_SWING_MODE): cv.string},
"async_set_swing_mode",
)
return True
async def async_setup_entry(hass: HomeAssistantType, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass: HomeAssistantType, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class ClimateDevice(Entity):
"""Representation of a climate device."""
@property
def state(self) -> str:
"""Return the current state."""
return self.hvac_mode
@property
def precision(self) -> float:
"""Return the precision of the system."""
if self.hass.config.units.temperature_unit == TEMP_CELSIUS:
return PRECISION_TENTHS
return PRECISION_WHOLE
@property
def state_attributes(self) -> Dict[str, Any]:
"""Return the optional state attributes."""
supported_features = self.supported_features
data = {
ATTR_HVAC_MODES: self.hvac_modes,
ATTR_CURRENT_TEMPERATURE: show_temp(
self.hass,
self.current_temperature,
self.temperature_unit,
self.precision,
),
ATTR_MIN_TEMP: show_temp(
self.hass, self.min_temp, self.temperature_unit, self.precision
),
ATTR_MAX_TEMP: show_temp(
self.hass, self.max_temp, self.temperature_unit, self.precision
),
}
if self.target_temperature_step:
data[ATTR_TARGET_TEMP_STEP] = self.target_temperature_step
if supported_features & SUPPORT_TARGET_TEMPERATURE:
data[ATTR_TEMPERATURE] = show_temp(
self.hass,
self.target_temperature,
self.temperature_unit,
self.precision,
)
if supported_features & SUPPORT_TARGET_TEMPERATURE_RANGE:
data[ATTR_TARGET_TEMP_HIGH] = show_temp(
self.hass,
self.target_temperature_high,
self.temperature_unit,
self.precision,
)
data[ATTR_TARGET_TEMP_LOW] = show_temp(
self.hass,
self.target_temperature_low,
self.temperature_unit,
self.precision,
)
if self.current_humidity is not None:
data[ATTR_CURRENT_HUMIDITY] = self.current_humidity
if supported_features & SUPPORT_TARGET_HUMIDITY:
data[ATTR_HUMIDITY] = self.target_humidity
data[ATTR_MIN_HUMIDITY] = self.min_humidity
data[ATTR_MAX_HUMIDITY] = self.max_humidity
if supported_features & SUPPORT_FAN_MODE:
data[ATTR_FAN_MODE] = self.fan_mode
data[ATTR_FAN_MODES] = self.fan_modes
if self.hvac_action:
data[ATTR_HVAC_ACTION] = self.hvac_action
if supported_features & SUPPORT_PRESET_MODE:
data[ATTR_PRESET_MODE] = self.preset_mode
data[ATTR_PRESET_MODES] = self.preset_modes
if supported_features & SUPPORT_SWING_MODE:
data[ATTR_SWING_MODE] = self.swing_mode
data[ATTR_SWING_MODES] = self.swing_modes
if supported_features & SUPPORT_AUX_HEAT:
data[ATTR_AUX_HEAT] = STATE_ON if self.is_aux_heat else STATE_OFF
return data
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement used by the platform."""
raise NotImplementedError()
@property
def current_humidity(self) -> Optional[int]:
"""Return the current humidity."""
return None
@property
def target_humidity(self) -> Optional[int]:
"""Return the humidity we try to reach."""
return None
@property
@abstractmethod
def hvac_mode(self) -> str:
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
@property
@abstractmethod
def hvac_modes(self) -> List[str]:
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
@property
def hvac_action(self) -> Optional[str]:
"""Return the current running hvac operation if supported.
Need to be one of CURRENT_HVAC_*.
"""
return None
@property
def current_temperature(self) -> Optional[float]:
"""Return the current temperature."""
return None
@property
def target_temperature(self) -> Optional[float]:
"""Return the temperature we try to reach."""
return None
@property
def target_temperature_step(self) -> Optional[float]:
"""Return the supported step of target temperature."""
return None
@property
def target_temperature_high(self) -> Optional[float]:
"""Return the highbound target temperature we try to reach.
Requires SUPPORT_TARGET_TEMPERATURE_RANGE.
"""
raise NotImplementedError
@property
def target_temperature_low(self) -> Optional[float]:
"""Return the lowbound target temperature we try to reach.
Requires SUPPORT_TARGET_TEMPERATURE_RANGE.
"""
raise NotImplementedError
@property
def preset_mode(self) -> Optional[str]:
"""Return the current preset mode, e.g., home, away, temp.
Requires SUPPORT_PRESET_MODE.
"""
raise NotImplementedError
@property
def preset_modes(self) -> Optional[List[str]]:
"""Return a list of available preset modes.
Requires SUPPORT_PRESET_MODE.
"""
raise NotImplementedError
@property
def is_aux_heat(self) -> Optional[bool]:
"""Return true if aux heater.
Requires SUPPORT_AUX_HEAT.
"""
raise NotImplementedError
@property
def fan_mode(self) -> Optional[str]:
"""Return the fan setting.
Requires SUPPORT_FAN_MODE.
"""
raise NotImplementedError
@property
def fan_modes(self) -> Optional[List[str]]:
"""Return the list of available fan modes.
Requires SUPPORT_FAN_MODE.
"""
raise NotImplementedError
@property
def swing_mode(self) -> Optional[str]:
"""Return the swing setting.
Requires SUPPORT_SWING_MODE.
"""
raise NotImplementedError
@property
def swing_modes(self) -> Optional[List[str]]:
"""Return the list of available swing modes.
Requires SUPPORT_SWING_MODE.
"""
raise NotImplementedError
def set_temperature(self, **kwargs) -> None:
"""Set new target temperature."""
raise NotImplementedError()
async def async_set_temperature(self, **kwargs) -> None:
"""Set new target temperature."""
await self.hass.async_add_executor_job(
ft.partial(self.set_temperature, **kwargs)
)
def set_humidity(self, humidity: int) -> None:
"""Set new target humidity."""
raise NotImplementedError()
async def async_set_humidity(self, humidity: int) -> None:
"""Set new target humidity."""
await self.hass.async_add_executor_job(self.set_humidity, humidity)
def set_fan_mode(self, fan_mode: str) -> None:
"""Set new target fan mode."""
raise NotImplementedError()
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set new target fan mode."""
await self.hass.async_add_executor_job(self.set_fan_mode, fan_mode)
def set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target hvac mode."""
raise NotImplementedError()
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target hvac mode."""
await self.hass.async_add_executor_job(self.set_hvac_mode, hvac_mode)
def set_swing_mode(self, swing_mode: str) -> None:
"""Set new target swing operation."""
raise NotImplementedError()
async def async_set_swing_mode(self, swing_mode: str) -> None:
"""Set new target swing operation."""
await self.hass.async_add_executor_job(self.set_swing_mode, swing_mode)
def set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
raise NotImplementedError()
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
await self.hass.async_add_executor_job(self.set_preset_mode, preset_mode)
def turn_aux_heat_on(self) -> None:
"""Turn auxiliary heater on."""
raise NotImplementedError()
async def async_turn_aux_heat_on(self) -> None:
"""Turn auxiliary heater on."""
await self.hass.async_add_executor_job(self.turn_aux_heat_on)
def turn_aux_heat_off(self) -> None:
"""Turn auxiliary heater off."""
raise NotImplementedError()
async def async_turn_aux_heat_off(self) -> None:
"""Turn auxiliary heater off."""
await self.hass.async_add_executor_job(self.turn_aux_heat_off)
async def async_turn_on(self) -> None:
"""Turn the entity on."""
if hasattr(self, "turn_on"):
# pylint: disable=no-member
await self.hass.async_add_executor_job(self.turn_on)
return
# Fake turn on
for mode in (HVAC_MODE_HEAT_COOL, HVAC_MODE_HEAT, HVAC_MODE_COOL):
if mode not in self.hvac_modes:
continue
await self.async_set_hvac_mode(mode)
break
async def async_turn_off(self) -> None:
"""Turn the entity off."""
if hasattr(self, "turn_off"):
# pylint: disable=no-member
await self.hass.async_add_executor_job(self.turn_off)
return
# Fake turn off
if HVAC_MODE_OFF in self.hvac_modes:
await self.async_set_hvac_mode(HVAC_MODE_OFF)
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
raise NotImplementedError()
@property
def min_temp(self) -> float:
"""Return the minimum temperature."""
return convert_temperature(
DEFAULT_MIN_TEMP, TEMP_CELSIUS, self.temperature_unit
)
@property
def max_temp(self) -> float:
"""Return the maximum temperature."""
return convert_temperature(
DEFAULT_MAX_TEMP, TEMP_CELSIUS, self.temperature_unit
)
@property
def min_humidity(self) -> int:
"""Return the minimum humidity."""
return DEFAULT_MIN_HUMIDITY
@property
def max_humidity(self) -> int:
"""Return the maximum humidity."""
return DEFAULT_MAX_HUMIDITY
async def async_service_aux_heat(
entity: ClimateDevice, service: ServiceDataType
) -> None:
"""Handle aux heat service."""
if service.data[ATTR_AUX_HEAT]:
await entity.async_turn_aux_heat_on()
else:
await entity.async_turn_aux_heat_off()
async def async_service_temperature_set(
entity: ClimateDevice, service: ServiceDataType
) -> None:
"""Handle set temperature service."""
hass = entity.hass
kwargs = {}
for value, temp in service.data.items():
if value in CONVERTIBLE_ATTRIBUTE:
kwargs[value] = convert_temperature(
temp, hass.config.units.temperature_unit, entity.temperature_unit
)
else:
kwargs[value] = temp
await entity.async_set_temperature(**kwargs)
| leppa/home-assistant | homeassistant/components/climate/__init__.py | Python | apache-2.0 | 15,863 |
# -*- encoding: utf-8 -*-
from opensearchsdk.apiclient import api_base
class SearchManager(api_base.Manager):
"""Search resource manage class"""
def search(self, query=None, index_name=None, fetch_fields=None, qp=None,
disable=None, first_formula_name=None, formula_name=None,
summary=None, scroll=None, search_type=None, scroll_id=None):
"""
do search with given parameters
:param query: query string
:param index_name: application name(s), separate by ';'
:param fetch_fields: field to return, separate by ';'
:param qp: search analyse rules, separate by ','
:param disable: whether turn off search analyse
:param first_formula_name:
:param formula_name:
:param summary:
:param scroll: expire time, default ms
:param search_type: scan
:param scroll_id: last search id, None if first time search
:return: dict, search result
"""
body = {}
def _simple_search():
body['index_name'] = index_name
body['query'] = query
if fetch_fields:
body['fetch_fields'] = fetch_fields
if qp:
body['qp'] = qp
if disable:
body['disable'] = disable
if first_formula_name:
body['first_formula_name'] = first_formula_name
if formula_name:
body['formula_name'] = formula_name
if summary:
body['summary'] = summary
# check whether do combine search
if scroll:
body['scroll'] = scroll
# if not the first time, must with last search id.
if scroll_id:
body['scroll_id'] = scroll_id
else:
# first time do combine search
body['search_type'] = search_type
_simple_search()
else:
_simple_search()
return self.send_get(body)
| yanheven/ali-opensearch-sdk | opensearchsdk/v2/search.py | Python | apache-2.0 | 2,022 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .client import OsLoginServiceClient
from .async_client import OsLoginServiceAsyncClient
__all__ = (
"OsLoginServiceClient",
"OsLoginServiceAsyncClient",
)
| googleapis/python-oslogin | google/cloud/oslogin_v1/services/os_login_service/__init__.py | Python | apache-2.0 | 769 |
# -*- coding: utf-8 -*-
"""demo_app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [path("admin/", admin.site.urls), path("", include("example_app.urls"))]
| pivotal-energy-solutions/django-datatable-view | demo_app/demo_app/urls.py | Python | apache-2.0 | 816 |
# (C) Copyright 2019 Fujitsu Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock
from mock import patch
from oslotest import base
from monasca_persister.repositories.influxdb.alarm_state_history_repository \
import AlarmStateHistInfluxdbRepository
from monasca_persister.repositories.influxdb import abstract_repository
class TestInfluxdbAlarmStateHistoryRepo(base.BaseTestCase):
def setUp(self):
super(TestInfluxdbAlarmStateHistoryRepo, self).setUp()
with patch.object(abstract_repository.cfg, 'CONF', return_value=Mock()):
self.alarm_state_repo = AlarmStateHistInfluxdbRepository()
def tearDown(self):
super(TestInfluxdbAlarmStateHistoryRepo, self).tearDown()
def test_process_message(self):
message = Mock()
message.value.return_value = """{
"alarm-transitioned": {
"alarmId": "dummyid",
"metrics": "dummymetrics",
"newState": "dummynewState",
"oldState": "dummyoldState",
"link": "dummylink",
"lifecycleState": "dummylifecycleState",
"stateChangeReason": "dummystateChangeReason",
"tenantId": "dummytenantId",
"timestamp": "10",
"subAlarms": {
"subAlarmExpression": "dummy_sub_alarm",
"currentValues": "dummy_values",
"metricDefinition": "dummy_definition",
"subAlarmState": "dummy_state"
}
}
}"""
expected_output = u'alarm_state_history,tenant_id=dummytenantId ' \
u'tenant_id="dummytenantId",alarm_id="dummyid",' \
u'metrics="\\"dummymetrics\\"",new_state="dummynewState"' \
u',old_state="dummyoldState",link="dummylink",' \
u'lifecycle_state="dummylifecycleState",' \
u'reason="dummystateChangeReason",reason_data="{}"'
expected_dict = ['\\"sub_alarm_expression\\":\\"dummy_sub_alarm\\"',
'\\"metric_definition\\":\\"dummy_definition\\"',
'\\"sub_alarm_state\\":\\"dummy_state\\"',
'\\"current_values\\":\\"dummy_values\\"']
actual_output = self.alarm_state_repo.process_message(message)
self.assertIn(expected_output, actual_output)
for elem in expected_dict:
self.assertIn(elem, actual_output)
| stackforge/monasca-persister | monasca_persister/tests/test_influxdb_alarm_state_history_repository.py | Python | apache-2.0 | 3,046 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tempfile import mkstemp
import numpy as np
import tensorflow as tf
from official.resnet import cifar10_main
tf.logging.set_verbosity(tf.logging.ERROR)
_BATCH_SIZE = 128
_HEIGHT = 32
_WIDTH = 32
_NUM_CHANNELS = 3
class BaseTest(tf.test.TestCase):
def test_dataset_input_fn(self):
fake_data = bytearray()
fake_data.append(7)
for i in range(_NUM_CHANNELS):
for _ in range(_HEIGHT * _WIDTH):
fake_data.append(i)
_, filename = mkstemp(dir=self.get_temp_dir())
data_file = open(filename, 'wb')
data_file.write(fake_data)
data_file.close()
fake_dataset = tf.data.FixedLengthRecordDataset(
filename, cifar10_main._RECORD_BYTES)
fake_dataset = fake_dataset.map(
lambda val: cifar10_main.parse_record(val, False))
image, label = fake_dataset.make_one_shot_iterator().get_next()
self.assertAllEqual(label.shape, (10,))
self.assertAllEqual(image.shape, (_HEIGHT, _WIDTH, _NUM_CHANNELS))
with self.test_session() as sess:
image, label = sess.run([image, label])
self.assertAllEqual(label, np.array([int(i == 7) for i in range(10)]))
for row in image:
for pixel in row:
self.assertAllClose(pixel, np.array([-1.225, 0., 1.225]), rtol=1e-3)
def cifar10_model_fn_helper(self, mode, version, multi_gpu=False):
input_fn = cifar10_main.get_synth_input_fn()
dataset = input_fn(True, '', _BATCH_SIZE)
iterator = dataset.make_one_shot_iterator()
features, labels = iterator.get_next()
spec = cifar10_main.cifar10_model_fn(
features, labels, mode, {
'resnet_size': 32,
'data_format': 'channels_last',
'batch_size': _BATCH_SIZE,
'version': version,
'multi_gpu': multi_gpu
})
predictions = spec.predictions
self.assertAllEqual(predictions['probabilities'].shape,
(_BATCH_SIZE, 10))
self.assertEqual(predictions['probabilities'].dtype, tf.float32)
self.assertAllEqual(predictions['classes'].shape, (_BATCH_SIZE,))
self.assertEqual(predictions['classes'].dtype, tf.int64)
if mode != tf.estimator.ModeKeys.PREDICT:
loss = spec.loss
self.assertAllEqual(loss.shape, ())
self.assertEqual(loss.dtype, tf.float32)
if mode == tf.estimator.ModeKeys.EVAL:
eval_metric_ops = spec.eval_metric_ops
self.assertAllEqual(eval_metric_ops['accuracy'][0].shape, ())
self.assertAllEqual(eval_metric_ops['accuracy'][1].shape, ())
self.assertEqual(eval_metric_ops['accuracy'][0].dtype, tf.float32)
self.assertEqual(eval_metric_ops['accuracy'][1].dtype, tf.float32)
def test_cifar10_model_fn_train_mode_v1(self):
self.cifar10_model_fn_helper(tf.estimator.ModeKeys.TRAIN, version=1)
def test_cifar10_model_fn_trainmode__v2(self):
self.cifar10_model_fn_helper(tf.estimator.ModeKeys.TRAIN, version=2)
def test_cifar10_model_fn_train_mode_multi_gpu_v1(self):
self.cifar10_model_fn_helper(tf.estimator.ModeKeys.TRAIN, version=1,
multi_gpu=True)
def test_cifar10_model_fn_train_mode_multi_gpu_v2(self):
self.cifar10_model_fn_helper(tf.estimator.ModeKeys.TRAIN, version=2,
multi_gpu=True)
def test_cifar10_model_fn_eval_mode_v1(self):
self.cifar10_model_fn_helper(tf.estimator.ModeKeys.EVAL, version=1)
def test_cifar10_model_fn_eval_mode_v2(self):
self.cifar10_model_fn_helper(tf.estimator.ModeKeys.EVAL, version=2)
def test_cifar10_model_fn_predict_mode_v1(self):
self.cifar10_model_fn_helper(tf.estimator.ModeKeys.PREDICT, version=1)
def test_cifar10_model_fn_predict_mode_v2(self):
self.cifar10_model_fn_helper(tf.estimator.ModeKeys.PREDICT, version=2)
def test_cifar10model_shape(self):
batch_size = 135
num_classes = 246
for version in (1, 2):
model = cifar10_main.Cifar10Model(32, data_format='channels_last',
num_classes=num_classes, version=version)
fake_input = tf.random_uniform([batch_size, _HEIGHT, _WIDTH, _NUM_CHANNELS])
output = model(fake_input, training=True)
self.assertAllEqual(output.shape, (batch_size, num_classes))
if __name__ == '__main__':
tf.test.main()
| jiaphuan/models | official/resnet/cifar10_test.py | Python | apache-2.0 | 5,068 |
#!/usr/bin/python
"""
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import gzip
import logging
import math
import os
import time
# try a fast json parser if it is installed
try:
import ujson as json
except:
import json
########################################################################################################################
# Trace processing
########################################################################################################################
class Trace():
def __init__(self):
self.thread_stack = {}
self.ignore_threads = {}
self.threads = {}
self.user_timing = []
self.event_names = {}
self.event_name_lookup = {}
self.scripts = None
self.timeline_events = []
self.trace_events = []
self.interactive = []
self.interactive_start = 0
self.interactive_end = None
self.start_time = None
self.end_time = None
self.cpu = {'main_thread': None}
self.feature_usage = None
self.feature_usage_start_time = None
self.netlog = {'bytes_in': 0, 'bytes_out': 0}
return
########################################################################################################################
# Output Logging
########################################################################################################################
def WriteJson(self, file, json_data):
try:
file_name, ext = os.path.splitext(file)
if ext.lower() == '.gz':
with gzip.open(file, 'wb') as f:
json.dump(json_data, f)
else:
with open(file, 'w') as f:
json.dump(json_data, f)
except:
logging.critical("Error writing to " + file)
def WriteUserTiming(self, file):
self.WriteJson(file, self.user_timing)
def WriteCPUSlices(self, file):
self.WriteJson(file, self.cpu)
def WriteScriptTimings(self, file):
if self.scripts is not None:
self.WriteJson(file, self.scripts)
def WriteFeatureUsage(self, file):
self.WriteJson(file, self.feature_usage)
def WriteInteractive(self, file):
self.WriteJson(file, self.interactive)
def WriteNetlog(self, file):
self.WriteJson(file, self.netlog)
########################################################################################################################
# Top-level processing
########################################################################################################################
def Process(self, trace):
f = None
line_mode = False
self.__init__()
try:
file_name, ext = os.path.splitext(trace)
if ext.lower() == '.gz':
f = gzip.open(trace, 'rb')
else:
f = open(trace, 'r')
for line in f:
try:
trace_event = json.loads(line.strip("\r\n\t ,"))
if not line_mode and 'traceEvents' in trace_event:
for sub_event in trace_event['traceEvents']:
self.FilterTraceEvent(sub_event)
else:
line_mode = True
self.FilterTraceEvent(trace_event)
except:
pass
except:
logging.critical("Error processing trace " + trace)
if f is not None:
f.close()
self.ProcessTraceEvents()
def ProcessTimeline(self, timeline):
self.__init__()
self.cpu['main_thread'] = '0'
self.threads['0'] = {}
events = None
f = None
try:
file_name, ext = os.path.splitext(timeline)
if ext.lower() == '.gz':
f = gzip.open(timeline, 'rb')
else:
f = open(timeline, 'r')
events = json.load(f)
if events:
# convert the old format timeline events into our internal representation
for event in events:
if 'method' in event and 'params' in event:
if self.start_time is None:
if event['method'] == 'Network.requestWillBeSent' and 'timestamp' in event['params']:
self.start_time = event['params']['timestamp'] * 1000000.0
self.end_time = event['params']['timestamp'] * 1000000.0
else:
if 'timestamp' in event['params']:
t = event['params']['timestamp'] * 1000000.0
if t > self.end_time:
self.end_time = t
if event['method'] == 'Timeline.eventRecorded' and 'record' in event['params']:
e = self.ProcessOldTimelineEvent(event['params']['record'], None)
if e is not None:
self.timeline_events.append(e)
self.ProcessTimelineEvents()
except:
logging.critical("Error processing timeline " + timeline)
if f is not None:
f.close()
def FilterTraceEvent(self, trace_event):
cat = trace_event['cat']
if cat == 'toplevel' or cat == 'ipc,toplevel':
return
if cat == 'devtools.timeline' or \
cat.find('devtools.timeline') >= 0 or \
cat.find('blink.feature_usage') >= 0 or \
cat.find('blink.user_timing') >= 0:
self.trace_events.append(trace_event)
def ProcessTraceEvents(self):
#sort the raw trace events by timestamp and then process them
if len(self.trace_events):
self.trace_events.sort(key=lambda trace_event: trace_event['ts'])
for trace_event in self.trace_events:
self.ProcessTraceEvent(trace_event)
self.trace_events = []
# Do the post-processing on timeline events
self.ProcessTimelineEvents()
def ProcessTraceEvent(self, trace_event):
cat = trace_event['cat']
if cat == 'devtools.timeline' or cat.find('devtools.timeline') >= 0:
self.ProcessTimelineTraceEvent(trace_event)
elif cat.find('blink.feature_usage') >= 0:
self.ProcessFeatureUsageEvent(trace_event)
elif cat.find('blink.user_timing') >= 0:
self.user_timing.append(trace_event)
#Netlog support is still in progress
#elif cat.find('netlog') >= 0:
# self.ProcessNetlogEvent(trace_event)
########################################################################################################################
# Timeline
########################################################################################################################
def ProcessTimelineTraceEvent(self, trace_event):
thread = '{0}:{1}'.format(trace_event['pid'], trace_event['tid'])
# Keep track of the main thread
if self.cpu['main_thread'] is None and trace_event['name'] == 'ResourceSendRequest' and 'args' in trace_event and \
'data' in trace_event['args'] and 'url' in trace_event['args']['data']:
if trace_event['args']['data']['url'][:21] == 'http://127.0.0.1:8888':
self.ignore_threads[thread] = True
else:
if thread not in self.threads:
self.threads[thread] = {}
if self.start_time is None or trace_event['ts'] < self.start_time:
self.start_time = trace_event['ts']
self.cpu['main_thread'] = thread
if 'dur' not in trace_event:
trace_event['dur'] = 1
# Make sure each thread has a numerical ID
if self.cpu['main_thread'] is not None and thread not in self.threads and thread not in self.ignore_threads and \
trace_event['name'] != 'Program':
self.threads[thread] = {}
# Build timeline events on a stack. 'B' begins an event, 'E' ends an event
if (thread in self.threads and ('dur' in trace_event or trace_event['ph'] == 'B' or trace_event['ph'] == 'E')):
trace_event['thread'] = self.threads[thread]
if thread not in self.thread_stack:
self.thread_stack[thread] = []
if trace_event['name'] not in self.event_names:
self.event_names[trace_event['name']] = len(self.event_names)
self.event_name_lookup[self.event_names[trace_event['name']]] = trace_event['name']
if trace_event['name'] not in self.threads[thread]:
self.threads[thread][trace_event['name']] = self.event_names[trace_event['name']]
e = None
if trace_event['ph'] == 'E':
if len(self.thread_stack[thread]) > 0:
e = self.thread_stack[thread].pop()
if e['n'] == self.event_names[trace_event['name']]:
e['e'] = trace_event['ts']
else:
e = {'t': thread, 'n': self.event_names[trace_event['name']], 's': trace_event['ts']}
if (trace_event['name'] == 'EvaluateScript' or trace_event['name'] == 'v8.compile' or trace_event['name'] == 'v8.parseOnBackground')\
and 'args' in trace_event and 'data' in trace_event['args'] and 'url' in trace_event['args']['data'] and\
trace_event['args']['data']['url'].startswith('http'):
e['js'] = trace_event['args']['data']['url']
if trace_event['name'] == 'FunctionCall' and 'args' in trace_event and 'data' in trace_event['args']:
if 'scriptName' in trace_event['args']['data'] and trace_event['args']['data']['scriptName'].startswith('http'):
e['js'] = trace_event['args']['data']['scriptName']
elif 'url' in trace_event['args']['data'] and trace_event['args']['data']['url'].startswith('http'):
e['js'] = trace_event['args']['data']['url']
if trace_event['ph'] == 'B':
self.thread_stack[thread].append(e)
e = None
elif 'dur' in trace_event:
e['e'] = e['s'] + trace_event['dur']
if e is not None and 'e' in e and e['s'] >= self.start_time and e['e'] >= e['s']:
if self.end_time is None or e['e'] > self.end_time:
self.end_time = e['e']
# attach it to a parent event if there is one
if len(self.thread_stack[thread]) > 0:
parent = self.thread_stack[thread].pop()
if 'c' not in parent:
parent['c'] = []
parent['c'].append(e)
self.thread_stack[thread].append(parent)
else:
self.timeline_events.append(e)
def ProcessOldTimelineEvent(self, event, type):
e = None
thread = '0'
if 'type' in event:
type = event['type']
if type not in self.event_names:
self.event_names[type] = len(self.event_names)
self.event_name_lookup[self.event_names[type]] = type
if type not in self.threads[thread]:
self.threads[thread][type] = self.event_names[type]
start = None
end = None
if 'startTime' in event and 'endTime' in event:
start = event['startTime'] * 1000000.0
end = event['endTime'] * 1000000.0
if 'callInfo' in event:
if 'startTime' in event['callInfo'] and 'endTime' in event['callInfo']:
start = event['callInfo']['startTime'] * 1000000.0
end = event['callInfo']['endTime'] * 1000000.0
if start is not None and end is not None and end >= start and type is not None:
if end > self.end_time:
self.end_time = end
e = {'t': thread, 'n': self.event_names[type], 's': start, 'e': end}
if 'callInfo' in event and 'url' in event and event['url'].startswith('http'):
e['js'] = event['url']
# Process profile child events
if 'data' in event and 'profile' in event['data'] and 'rootNodes' in event['data']['profile']:
for child in event['data']['profile']['rootNodes']:
c = self.ProcessOldTimelineEvent(child, type)
if c is not None:
if 'c' not in e:
e['c'] = []
e['c'].append(c)
# recursively process any child events
if 'children' in event:
for child in event['children']:
c = self.ProcessOldTimelineEvent(child, type)
if c is not None:
if 'c' not in e:
e['c'] = []
e['c'].append(c)
return e
def ProcessTimelineEvents(self):
if len(self.timeline_events) and self.end_time > self.start_time:
# Figure out how big each slice should be in usecs. Size it to a power of 10 where we have at least 2000 slices
exp = 0
last_exp = 0
slice_count = self.end_time - self.start_time
while slice_count > 2000:
last_exp = exp
exp += 1
slice_count = int(math.ceil(float(self.end_time - self.start_time) / float(pow(10, exp))))
self.cpu['total_usecs'] = self.end_time - self.start_time
self.cpu['slice_usecs'] = int(pow(10, last_exp))
slice_count = int(math.ceil(float(self.end_time - self.start_time) / float(self.cpu['slice_usecs'])))
# Create the empty time slices for all of the threads
self.cpu['slices'] = {}
for thread in self.threads.keys():
self.cpu['slices'][thread] = {'total': [0.0] * slice_count}
for name in self.threads[thread].keys():
self.cpu['slices'][thread][name] = [0.0] * slice_count
# Go through all of the timeline events recursively and account for the time they consumed
for timeline_event in self.timeline_events:
self.ProcessTimelineEvent(timeline_event, None)
if self.interactive_end is not None and self.interactive_end - self.interactive_start > 500000:
self.interactive.append([int(math.ceil(self.interactive_start / 1000.0)), int(math.floor(self.interactive_end / 1000.0))])
# Go through all of the fractional times and convert the float fractional times to integer usecs
for thread in self.cpu['slices'].keys():
del self.cpu['slices'][thread]['total']
for name in self.cpu['slices'][thread].keys():
for slice in range(len(self.cpu['slices'][thread][name])):
self.cpu['slices'][thread][name][slice] =\
int(self.cpu['slices'][thread][name][slice] * self.cpu['slice_usecs'])
def ProcessTimelineEvent(self, timeline_event, parent):
start = timeline_event['s'] - self.start_time
end = timeline_event['e'] - self.start_time
if end > start:
elapsed = end - start
thread = timeline_event['t']
name = self.event_name_lookup[timeline_event['n']]
# Keep track of periods on the main thread where at least 500ms are available with no tasks longer than 50ms
if 'main_thread' in self.cpu and thread == self.cpu['main_thread']:
if elapsed > 50000:
if start - self.interactive_start > 500000:
self.interactive.append([int(math.ceil(self.interactive_start / 1000.0)), int(math.floor(start / 1000.0))])
self.interactive_start = end
self.interactive_end = None
else:
self.interactive_end = end
if 'js' in timeline_event:
script = timeline_event['js']
s = start / 1000.0
e = end / 1000.0
if self.scripts is None:
self.scripts = {}
if 'main_thread' not in self.scripts and 'main_thread' in self.cpu:
self.scripts['main_thread'] = self.cpu['main_thread']
if thread not in self.scripts:
self.scripts[thread] = {}
if script not in self.scripts[thread]:
self.scripts[thread][script] = {}
if name not in self.scripts[thread][script]:
self.scripts[thread][script][name] = []
# make sure the script duration isn't already covered by a parent event
new_duration = True
if len(self.scripts[thread][script][name]):
for period in self.scripts[thread][script][name]:
if s >= period[0] and e <= period[1]:
new_duration = False
break
if new_duration:
self.scripts[thread][script][name].append([s, e])
slice_usecs = self.cpu['slice_usecs']
first_slice = int(float(start) / float(slice_usecs))
last_slice = int(float(end) / float(slice_usecs))
for slice_number in xrange(first_slice, last_slice + 1):
slice_start = slice_number * slice_usecs
slice_end = slice_start + slice_usecs
used_start = max(slice_start, start)
used_end = min(slice_end, end)
slice_elapsed = used_end - used_start
self.AdjustTimelineSlice(thread, slice_number, name, parent, slice_elapsed)
# Recursively process any child events
if 'c' in timeline_event:
for child in timeline_event['c']:
self.ProcessTimelineEvent(child, name)
# Add the time to the given slice and subtract the time from a parent event
def AdjustTimelineSlice(self, thread, slice_number, name, parent, elapsed):
try:
# Don't bother adjusting if both the current event and parent are the same category
# since they would just cancel each other out.
if name != parent:
fraction = min(1.0, float(elapsed) / float(self.cpu['slice_usecs']))
self.cpu['slices'][thread][name][slice_number] += fraction
self.cpu['slices'][thread]['total'][slice_number] += fraction
if parent is not None and self.cpu['slices'][thread][parent][slice_number] >= fraction:
self.cpu['slices'][thread][parent][slice_number] -= fraction
self.cpu['slices'][thread]['total'][slice_number] -= fraction
# Make sure we didn't exceed 100% in this slice
self.cpu['slices'][thread][name][slice_number] = min(1.0, self.cpu['slices'][thread][name][slice_number])
# make sure we don't exceed 100% for any slot
if self.cpu['slices'][thread]['total'][slice_number] > 1.0:
available = max(0.0, 1.0 - fraction)
for slice_name in self.cpu['slices'][thread].keys():
if slice_name != name:
self.cpu['slices'][thread][slice_name][slice_number] =\
min(self.cpu['slices'][thread][slice_name][slice_number], available)
available = max(0.0, available - self.cpu['slices'][thread][slice_name][slice_number])
self.cpu['slices'][thread]['total'][slice_number] = min(1.0, max(0.0, 1.0 - available))
except:
pass
########################################################################################################################
# Blink Features
########################################################################################################################
def ProcessFeatureUsageEvent(self, trace_event):
global BLINK_FEATURES
if 'name' in trace_event and\
'args' in trace_event and\
'feature' in trace_event['args'] and\
(trace_event['name'] == 'FeatureFirstUsed' or trace_event['name'] == 'CSSFirstUsed'):
if self.feature_usage is None:
self.feature_usage = {'Features': {}, 'CSSFeatures': {}}
if self.feature_usage_start_time is None:
if self.start_time is not None:
self.feature_usage_start_time = self.start_time
else:
self.feature_usage_start_time = trace_event['ts']
id = '{0:d}'.format(trace_event['args']['feature'])
timestamp = float('{0:0.3f}'.format((trace_event['ts'] - self.feature_usage_start_time) / 1000.0))
if trace_event['name'] == 'FeatureFirstUsed':
if id in BLINK_FEATURES:
name = BLINK_FEATURES[id]
else:
name = 'Feature_{0}'.format(id)
if name not in self.feature_usage['Features']:
self.feature_usage['Features'][name] = timestamp
elif trace_event['name'] == 'CSSFirstUsed':
if id in CSS_FEATURES:
name = CSS_FEATURES[id]
else:
name = 'CSSFeature_{0}'.format(id)
if name not in self.feature_usage['CSSFeatures']:
self.feature_usage['CSSFeatures'][name] = timestamp
########################################################################################################################
# Netlog
########################################################################################################################
def ProcessNetlogEvent(self, trace_event):
if 'args' in trace_event and 'id' in trace_event and 'name' in trace_event and 'source_type' in trace_event['args']:
# Convert the source event id to hex if one exists
if 'params' in trace_event['args'] and 'source_dependency' in trace_event['args']['params'] and 'id' in trace_event['args']['params']['source_dependency']:
dependency_id = int(trace_event['args']['params']['source_dependency']['id'])
trace_event['args']['params']['source_dependency']['id'] = 'x%X' % dependency_id
if trace_event['args']['source_type'] == 'SOCKET':
self.ProcessNetlogSocketEvent(trace_event)
if trace_event['args']['source_type'] == 'HTTP2_SESSION':
self.ProcessNetlogHTTP2SessionEvent(trace_event)
def ProcessNetlogSocketEvent(self, s):
if 'sockets' not in self.netlog:
self.netlog['sockets'] = {}
if s['id'] not in self.netlog['sockets']:
self.netlog['sockets'][s['id']] = {'bytes_in': 0, 'bytes_out': 0}
if s['name'] == 'SOCKET_BYTES_RECEIVED' and 'params' in s['args'] and 'byte_count' in s['args']['params']:
self.netlog['sockets'][s['id']]['bytes_in'] += s['args']['params']['byte_count']
self.netlog['bytes_in'] += s['args']['params']['byte_count']
if s['name'] == 'SOCKET_BYTES_SENT' and 'params' in s['args'] and 'byte_count' in s['args']['params']:
self.netlog['sockets'][s['id']]['bytes_out'] += s['args']['params']['byte_count']
self.netlog['bytes_out'] += s['args']['params']['byte_count']
def ProcessNetlogHTTP2SessionEvent(self, s):
if 'params' in s['args'] and 'stream_id' in s['args']['params']:
if 'http2' not in self.netlog:
self.netlog['http2'] = {'bytes_in': 0, 'bytes_out': 0}
if s['id'] not in self.netlog['http2']:
self.netlog['http2'][s['id']] = {'bytes_in': 0, 'bytes_out': 0, 'streams':{}}
stream = '{0:d}'.format(s['args']['params']['stream_id'])
if stream not in self.netlog['http2'][s['id']]['streams']:
self.netlog['http2'][s['id']]['streams'][stream] = {'start': s['tts'], 'end': s['tts'], 'bytes_in': 0, 'bytes_out': 0}
if s['tts'] > self.netlog['http2'][s['id']]['streams'][stream]['end']:
self.netlog['http2'][s['id']]['streams'][stream]['end'] = s['tts']
if s['name'] == 'HTTP2_SESSION_SEND_HEADERS' and 'params' in s['args']:
if 'request' not in self.netlog['http2'][s['id']]['streams'][stream]:
self.netlog['http2'][s['id']]['streams'][stream]['request'] = {}
if 'headers' in s['args']['params']:
self.netlog['http2'][s['id']]['streams'][stream]['request']['headers'] = s['args']['params']['headers']
if 'parent_stream_id' in s['args']['params']:
self.netlog['http2'][s['id']]['streams'][stream]['request']['parent_stream_id'] = s['args']['params']['parent_stream_id']
if 'exclusive' in s['args']['params']:
self.netlog['http2'][s['id']]['streams'][stream]['request']['exclusive'] = s['args']['params']['exclusive']
if 'priority' in s['args']['params']:
self.netlog['http2'][s['id']]['streams'][stream]['request']['priority'] = s['args']['params']['priority']
if s['name'] == 'HTTP2_SESSION_RECV_HEADERS' and 'params' in s['args']:
if 'first_byte' not in self.netlog['http2'][s['id']]['streams'][stream]:
self.netlog['http2'][s['id']]['streams'][stream]['first_byte'] = s['tts']
if 'response' not in self.netlog['http2'][s['id']]['streams'][stream]:
self.netlog['http2'][s['id']]['streams'][stream]['response'] = {}
if 'headers' in s['args']['params']:
self.netlog['http2'][s['id']]['response']['streams'][stream]['headers'] = s['args']['params']['headers']
if s['name'] == 'HTTP2_SESSION_RECV_DATA' and 'params' in s['args'] and 'size' in s['args']['params']:
if 'first_byte' not in self.netlog['http2'][s['id']]['streams'][stream]:
self.netlog['http2'][s['id']]['streams'][stream]['first_byte'] = s['tts']
self.netlog['http2'][s['id']]['streams'][stream]['bytes_in'] += s['args']['params']['size']
self.netlog['http2'][s['id']]['bytes_in'] += s['args']['params']['size']
########################################################################################################################
# Main Entry Point
########################################################################################################################
def main():
import argparse
parser = argparse.ArgumentParser(description='Chrome trace parser.',
prog='trace-parser')
parser.add_argument('-v', '--verbose', action='count',
help="Increase verbosity (specify multiple times for more). -vvvv for full debug output.")
parser.add_argument('-t', '--trace', help="Input trace file.")
parser.add_argument('-l', '--timeline', help="Input timeline file (iOS or really old Chrome).")
parser.add_argument('-c', '--cpu', help="Output CPU time slices file.")
parser.add_argument('-j', '--js', help="Output Javascript per-script parse/evaluate/execute timings.")
parser.add_argument('-u', '--user', help="Output user timing file.")
parser.add_argument('-f', '--features', help="Output blink feature usage file.")
parser.add_argument('-i', '--interactive', help="Output list of interactive times.")
parser.add_argument('-n', '--netlog', help="Output netlog details file.")
options, unknown = parser.parse_known_args()
# Set up logging
log_level = logging.CRITICAL
if options.verbose == 1:
log_level = logging.ERROR
elif options.verbose == 2:
log_level = logging.WARNING
elif options.verbose == 3:
log_level = logging.INFO
elif options.verbose >= 4:
log_level = logging.DEBUG
logging.basicConfig(level=log_level, format="%(asctime)s.%(msecs)03d - %(message)s", datefmt="%H:%M:%S")
if not options.trace and not options.timeline:
parser.error("Input trace or timeline file is not specified.")
start = time.time()
trace = Trace()
if options.trace:
trace.Process(options.trace)
elif options.timeline:
trace.ProcessTimeline(options.timeline)
if options.user:
trace.WriteUserTiming(options.user)
if options.cpu:
trace.WriteCPUSlices(options.cpu)
if options.js:
trace.WriteScriptTimings(options.js)
if options.features:
trace.WriteFeatureUsage(options.features)
if options.interactive:
trace.WriteInteractive(options.interactive)
if options.netlog:
trace.WriteNetlog(options.netlog)
end = time.time()
elapsed = end - start
logging.debug("Elapsed Time: {0:0.4f}".format(elapsed))
########################################################################################################################
# Blink feature names from https://cs.chromium.org/chromium/src/third_party/WebKit/Source/core/frame/UseCounter.h
########################################################################################################################
BLINK_FEATURES = {
"0": "PageDestruction",
"3": "PrefixedIndexedDB",
"4": "WorkerStart",
"5": "SharedWorkerStart",
"9": "UnprefixedIndexedDB",
"10": "OpenWebDatabase",
"13": "UnprefixedRequestAnimationFrame",
"14": "PrefixedRequestAnimationFrame",
"15": "ContentSecurityPolicy",
"16": "ContentSecurityPolicyReportOnly",
"18": "PrefixedTransitionEndEvent",
"19": "UnprefixedTransitionEndEvent",
"20": "PrefixedAndUnprefixedTransitionEndEvent",
"21": "AutoFocusAttribute",
"23": "DataListElement",
"24": "FormAttribute",
"25": "IncrementalAttribute",
"26": "InputTypeColor",
"27": "InputTypeDate",
"29": "InputTypeDateTimeFallback",
"30": "InputTypeDateTimeLocal",
"31": "InputTypeEmail",
"32": "InputTypeMonth",
"33": "InputTypeNumber",
"34": "InputTypeRange",
"35": "InputTypeSearch",
"36": "InputTypeTel",
"37": "InputTypeTime",
"38": "InputTypeURL",
"39": "InputTypeWeek",
"40": "InputTypeWeekFallback",
"41": "ListAttribute",
"42": "MaxAttribute",
"43": "MinAttribute",
"44": "PatternAttribute",
"45": "PlaceholderAttribute",
"47": "PrefixedDirectoryAttribute",
"49": "RequiredAttribute",
"51": "StepAttribute",
"52": "PageVisits",
"53": "HTMLMarqueeElement",
"55": "Reflection",
"57": "PrefixedStorageInfo",
"58": "XFrameOptions",
"59": "XFrameOptionsSameOrigin",
"60": "XFrameOptionsSameOriginWithBadAncestorChain",
"61": "DeprecatedFlexboxWebContent",
"62": "DeprecatedFlexboxChrome",
"63": "DeprecatedFlexboxChromeExtension",
"65": "UnprefixedPerformanceTimeline",
"67": "UnprefixedUserTiming",
"69": "WindowEvent",
"70": "ContentSecurityPolicyWithBaseElement",
"74": "DocumentClear",
"77": "XMLDocument",
"78": "XSLProcessingInstruction",
"79": "XSLTProcessor",
"80": "SVGSwitchElement",
"83": "DocumentAll",
"84": "FormElement",
"85": "DemotedFormElement",
"90": "SVGAnimationElement",
"96": "LineClamp",
"97": "SubFrameBeforeUnloadRegistered",
"98": "SubFrameBeforeUnloadFired",
"102": "ConsoleMarkTimeline",
"111": "DocumentCreateAttribute",
"112": "DocumentCreateAttributeNS",
"113": "DocumentCreateCDATASection",
"115": "DocumentXMLEncoding",
"116": "DocumentXMLStandalone",
"117": "DocumentXMLVersion",
"123": "NavigatorProductSub",
"124": "NavigatorVendor",
"125": "NavigatorVendorSub",
"128": "PrefixedAnimationEndEvent",
"129": "UnprefixedAnimationEndEvent",
"130": "PrefixedAndUnprefixedAnimationEndEvent",
"131": "PrefixedAnimationStartEvent",
"132": "UnprefixedAnimationStartEvent",
"133": "PrefixedAndUnprefixedAnimationStartEvent",
"134": "PrefixedAnimationIterationEvent",
"135": "UnprefixedAnimationIterationEvent",
"136": "PrefixedAndUnprefixedAnimationIterationEvent",
"137": "EventReturnValue",
"138": "SVGSVGElement",
"143": "DOMSubtreeModifiedEvent",
"144": "DOMNodeInsertedEvent",
"145": "DOMNodeRemovedEvent",
"146": "DOMNodeRemovedFromDocumentEvent",
"147": "DOMNodeInsertedIntoDocumentEvent",
"148": "DOMCharacterDataModifiedEvent",
"150": "DocumentAllLegacyCall",
"152": "HTMLEmbedElementLegacyCall",
"153": "HTMLObjectElementLegacyCall",
"155": "GetMatchedCSSRules",
"160": "AttributeOwnerElement",
"162": "AttributeSpecified",
"164": "PrefixedAudioDecodedByteCount",
"165": "PrefixedVideoDecodedByteCount",
"166": "PrefixedVideoSupportsFullscreen",
"167": "PrefixedVideoDisplayingFullscreen",
"168": "PrefixedVideoEnterFullscreen",
"169": "PrefixedVideoExitFullscreen",
"170": "PrefixedVideoEnterFullScreen",
"171": "PrefixedVideoExitFullScreen",
"172": "PrefixedVideoDecodedFrameCount",
"173": "PrefixedVideoDroppedFrameCount",
"176": "PrefixedElementRequestFullscreen",
"177": "PrefixedElementRequestFullScreen",
"178": "BarPropLocationbar",
"179": "BarPropMenubar",
"180": "BarPropPersonalbar",
"181": "BarPropScrollbars",
"182": "BarPropStatusbar",
"183": "BarPropToolbar",
"184": "InputTypeEmailMultiple",
"185": "InputTypeEmailMaxLength",
"186": "InputTypeEmailMultipleMaxLength",
"190": "InputTypeText",
"191": "InputTypeTextMaxLength",
"192": "InputTypePassword",
"193": "InputTypePasswordMaxLength",
"196": "PrefixedPageVisibility",
"198": "CSSStyleSheetInsertRuleOptionalArg",
"200": "DocumentBeforeUnloadRegistered",
"201": "DocumentBeforeUnloadFired",
"202": "DocumentUnloadRegistered",
"203": "DocumentUnloadFired",
"204": "SVGLocatableNearestViewportElement",
"205": "SVGLocatableFarthestViewportElement",
"209": "SVGPointMatrixTransform",
"211": "DOMFocusInOutEvent",
"212": "FileGetLastModifiedDate",
"213": "HTMLElementInnerText",
"214": "HTMLElementOuterText",
"215": "ReplaceDocumentViaJavaScriptURL",
"217": "ElementPrefixedMatchesSelector",
"219": "CSSStyleSheetRules",
"220": "CSSStyleSheetAddRule",
"221": "CSSStyleSheetRemoveRule",
"222": "InitMessageEvent",
"233": "PrefixedDevicePixelRatioMediaFeature",
"234": "PrefixedMaxDevicePixelRatioMediaFeature",
"235": "PrefixedMinDevicePixelRatioMediaFeature",
"237": "PrefixedTransform3dMediaFeature",
"240": "PrefixedStorageQuota",
"243": "ResetReferrerPolicy",
"244": "CaseInsensitiveAttrSelectorMatch",
"246": "FormNameAccessForImageElement",
"247": "FormNameAccessForPastNamesMap",
"248": "FormAssociationByParser",
"250": "SVGSVGElementInDocument",
"251": "SVGDocumentRootElement",
"257": "WorkerSubjectToCSP",
"258": "WorkerAllowedByChildBlockedByScript",
"260": "DeprecatedWebKitGradient",
"261": "DeprecatedWebKitLinearGradient",
"262": "DeprecatedWebKitRepeatingLinearGradient",
"263": "DeprecatedWebKitRadialGradient",
"264": "DeprecatedWebKitRepeatingRadialGradient",
"267": "PrefixedImageSmoothingEnabled",
"268": "UnprefixedImageSmoothingEnabled",
"274": "TextAutosizing",
"276": "HTMLAnchorElementPingAttribute",
"279": "SVGClassName",
"281": "HTMLMediaElementSeekToFragmentStart",
"282": "HTMLMediaElementPauseAtFragmentEnd",
"283": "PrefixedWindowURL",
"285": "WindowOrientation",
"286": "DOMStringListContains",
"287": "DocumentCaptureEvents",
"288": "DocumentReleaseEvents",
"289": "WindowCaptureEvents",
"290": "WindowReleaseEvents",
"295": "DocumentXPathCreateExpression",
"296": "DocumentXPathCreateNSResolver",
"297": "DocumentXPathEvaluate",
"298": "AttrGetValue",
"299": "AttrSetValue",
"300": "AnimationConstructorKeyframeListEffectObjectTiming",
"302": "AnimationConstructorKeyframeListEffectNoTiming",
"303": "AttrSetValueWithElement",
"304": "PrefixedCancelAnimationFrame",
"305": "PrefixedCancelRequestAnimationFrame",
"306": "NamedNodeMapGetNamedItem",
"307": "NamedNodeMapSetNamedItem",
"308": "NamedNodeMapRemoveNamedItem",
"309": "NamedNodeMapItem",
"310": "NamedNodeMapGetNamedItemNS",
"311": "NamedNodeMapSetNamedItemNS",
"312": "NamedNodeMapRemoveNamedItemNS",
"318": "PrefixedDocumentIsFullscreen",
"320": "PrefixedDocumentCurrentFullScreenElement",
"321": "PrefixedDocumentCancelFullScreen",
"322": "PrefixedDocumentFullscreenEnabled",
"323": "PrefixedDocumentFullscreenElement",
"324": "PrefixedDocumentExitFullscreen",
"325": "SVGForeignObjectElement",
"327": "SelectionSetPosition",
"328": "AnimationFinishEvent",
"329": "SVGSVGElementInXMLDocument",
"341": "PrefixedPerformanceClearResourceTimings",
"342": "PrefixedPerformanceSetResourceTimingBufferSize",
"343": "EventSrcElement",
"344": "EventCancelBubble",
"345": "EventPath",
"347": "NodeIteratorDetach",
"348": "AttrNodeValue",
"349": "AttrTextContent",
"350": "EventGetReturnValueTrue",
"351": "EventGetReturnValueFalse",
"352": "EventSetReturnValueTrue",
"353": "EventSetReturnValueFalse",
"356": "WindowOffscreenBuffering",
"357": "WindowDefaultStatus",
"358": "WindowDefaultstatus",
"361": "PrefixedTransitionEventConstructor",
"362": "PrefixedMutationObserverConstructor",
"363": "PrefixedIDBCursorConstructor",
"364": "PrefixedIDBDatabaseConstructor",
"365": "PrefixedIDBFactoryConstructor",
"366": "PrefixedIDBIndexConstructor",
"367": "PrefixedIDBKeyRangeConstructor",
"368": "PrefixedIDBObjectStoreConstructor",
"369": "PrefixedIDBRequestConstructor",
"370": "PrefixedIDBTransactionConstructor",
"371": "NotificationPermission",
"372": "RangeDetach",
"386": "PrefixedFileRelativePath",
"387": "DocumentCaretRangeFromPoint",
"389": "ElementScrollIntoViewIfNeeded",
"393": "RangeExpand",
"396": "HTMLImageElementX",
"397": "HTMLImageElementY",
"400": "SelectionBaseNode",
"401": "SelectionBaseOffset",
"402": "SelectionExtentNode",
"403": "SelectionExtentOffset",
"404": "SelectionType",
"405": "SelectionModify",
"406": "SelectionSetBaseAndExtent",
"407": "SelectionEmpty",
"409": "VTTCue",
"410": "VTTCueRender",
"411": "VTTCueRenderVertical",
"412": "VTTCueRenderSnapToLinesFalse",
"413": "VTTCueRenderLineNotAuto",
"414": "VTTCueRenderPositionNot50",
"415": "VTTCueRenderSizeNot100",
"416": "VTTCueRenderAlignNotMiddle",
"417": "ElementRequestPointerLock",
"418": "VTTCueRenderRtl",
"419": "PostMessageFromSecureToInsecure",
"420": "PostMessageFromInsecureToSecure",
"421": "DocumentExitPointerLock",
"422": "DocumentPointerLockElement",
"424": "PrefixedCursorZoomIn",
"425": "PrefixedCursorZoomOut",
"429": "TextEncoderConstructor",
"430": "TextEncoderEncode",
"431": "TextDecoderConstructor",
"432": "TextDecoderDecode",
"433": "FocusInOutEvent",
"434": "MouseEventMovementX",
"435": "MouseEventMovementY",
"440": "DocumentFonts",
"441": "MixedContentFormsSubmitted",
"442": "FormsSubmitted",
"443": "TextInputEventOnInput",
"444": "TextInputEventOnTextArea",
"445": "TextInputEventOnContentEditable",
"446": "TextInputEventOnNotNode",
"447": "WebkitBeforeTextInsertedOnInput",
"448": "WebkitBeforeTextInsertedOnTextArea",
"449": "WebkitBeforeTextInsertedOnContentEditable",
"450": "WebkitBeforeTextInsertedOnNotNode",
"451": "WebkitEditableContentChangedOnInput",
"452": "WebkitEditableContentChangedOnTextArea",
"453": "WebkitEditableContentChangedOnContentEditable",
"454": "WebkitEditableContentChangedOnNotNode",
"455": "HTMLImports",
"456": "ElementCreateShadowRoot",
"457": "DocumentRegisterElement",
"458": "EditingAppleInterchangeNewline",
"459": "EditingAppleConvertedSpace",
"460": "EditingApplePasteAsQuotation",
"461": "EditingAppleStyleSpanClass",
"462": "EditingAppleTabSpanClass",
"463": "HTMLImportsAsyncAttribute",
"465": "XMLHttpRequestSynchronous",
"466": "CSSSelectorPseudoUnresolved",
"467": "CSSSelectorPseudoShadow",
"468": "CSSSelectorPseudoContent",
"469": "CSSSelectorPseudoHost",
"470": "CSSSelectorPseudoHostContext",
"471": "CSSDeepCombinator",
"473": "UseAsm",
"475": "DOMWindowOpen",
"476": "DOMWindowOpenFeatures",
"478": "MediaStreamTrackGetSources",
"479": "AspectRatioFlexItem",
"480": "DetailsElement",
"481": "DialogElement",
"482": "MapElement",
"483": "MeterElement",
"484": "ProgressElement",
"490": "PrefixedHTMLElementDropzone",
"491": "WheelEventWheelDeltaX",
"492": "WheelEventWheelDeltaY",
"493": "WheelEventWheelDelta",
"494": "SendBeacon",
"495": "SendBeaconQuotaExceeded",
"501": "SVGSMILElementInDocument",
"502": "MouseEventOffsetX",
"503": "MouseEventOffsetY",
"504": "MouseEventX",
"505": "MouseEventY",
"506": "MouseEventFromElement",
"507": "MouseEventToElement",
"508": "RequestFileSystem",
"509": "RequestFileSystemWorker",
"510": "RequestFileSystemSyncWorker",
"519": "SVGStyleElementTitle",
"520": "PictureSourceSrc",
"521": "Picture",
"522": "Sizes",
"523": "SrcsetXDescriptor",
"524": "SrcsetWDescriptor",
"525": "SelectionContainsNode",
"529": "XMLExternalResourceLoad",
"530": "MixedContentPrivateHostnameInPublicHostname",
"531": "LegacyProtocolEmbeddedAsSubresource",
"532": "RequestedSubresourceWithEmbeddedCredentials",
"533": "NotificationCreated",
"534": "NotificationClosed",
"535": "NotificationPermissionRequested",
"538": "ConsoleTimeline",
"539": "ConsoleTimelineEnd",
"540": "SRIElementWithMatchingIntegrityAttribute",
"541": "SRIElementWithNonMatchingIntegrityAttribute",
"542": "SRIElementWithUnparsableIntegrityAttribute",
"545": "V8Animation_StartTime_AttributeGetter",
"546": "V8Animation_StartTime_AttributeSetter",
"547": "V8Animation_CurrentTime_AttributeGetter",
"548": "V8Animation_CurrentTime_AttributeSetter",
"549": "V8Animation_PlaybackRate_AttributeGetter",
"550": "V8Animation_PlaybackRate_AttributeSetter",
"551": "V8Animation_PlayState_AttributeGetter",
"552": "V8Animation_Finish_Method",
"553": "V8Animation_Play_Method",
"554": "V8Animation_Pause_Method",
"555": "V8Animation_Reverse_Method",
"556": "BreakIterator",
"557": "ScreenOrientationAngle",
"558": "ScreenOrientationType",
"559": "ScreenOrientationLock",
"560": "ScreenOrientationUnlock",
"561": "GeolocationSecureOrigin",
"562": "GeolocationInsecureOrigin",
"563": "NotificationSecureOrigin",
"564": "NotificationInsecureOrigin",
"565": "NotificationShowEvent",
"569": "SVGTransformListConsolidate",
"570": "SVGAnimatedTransformListBaseVal",
"571": "QuotedAnimationName",
"572": "QuotedKeyframesRule",
"573": "SrcsetDroppedCandidate",
"574": "WindowPostMessage",
"575": "WindowPostMessageWithLegacyTargetOriginArgument",
"576": "RenderRuby",
"578": "ScriptElementWithInvalidTypeHasSrc",
"581": "XMLHttpRequestSynchronousInNonWorkerOutsideBeforeUnload",
"582": "CSSSelectorPseudoScrollbar",
"583": "CSSSelectorPseudoScrollbarButton",
"584": "CSSSelectorPseudoScrollbarThumb",
"585": "CSSSelectorPseudoScrollbarTrack",
"586": "CSSSelectorPseudoScrollbarTrackPiece",
"587": "LangAttribute",
"588": "LangAttributeOnHTML",
"589": "LangAttributeOnBody",
"590": "LangAttributeDoesNotMatchToUILocale",
"591": "InputTypeSubmit",
"592": "InputTypeSubmitWithValue",
"593": "SetReferrerPolicy",
"595": "MouseEventWhich",
"598": "UIEventWhich",
"599": "TextWholeText",
"603": "NotificationCloseEvent",
"606": "StyleMedia",
"607": "StyleMediaType",
"608": "StyleMediaMatchMedium",
"609": "MixedContentPresent",
"610": "MixedContentBlockable",
"611": "MixedContentAudio",
"612": "MixedContentDownload",
"613": "MixedContentFavicon",
"614": "MixedContentImage",
"615": "MixedContentInternal",
"616": "MixedContentPlugin",
"617": "MixedContentPrefetch",
"618": "MixedContentVideo",
"620": "AudioListenerDopplerFactor",
"621": "AudioListenerSpeedOfSound",
"622": "AudioListenerSetVelocity",
"628": "CSSSelectorPseudoFullScreenAncestor",
"629": "CSSSelectorPseudoFullScreen",
"630": "WebKitCSSMatrix",
"631": "AudioContextCreateAnalyser",
"632": "AudioContextCreateBiquadFilter",
"633": "AudioContextCreateBufferSource",
"634": "AudioContextCreateChannelMerger",
"635": "AudioContextCreateChannelSplitter",
"636": "AudioContextCreateConvolver",
"637": "AudioContextCreateDelay",
"638": "AudioContextCreateDynamicsCompressor",
"639": "AudioContextCreateGain",
"640": "AudioContextCreateMediaElementSource",
"641": "AudioContextCreateMediaStreamDestination",
"642": "AudioContextCreateMediaStreamSource",
"643": "AudioContextCreateOscillator",
"645": "AudioContextCreatePeriodicWave",
"646": "AudioContextCreateScriptProcessor",
"647": "AudioContextCreateStereoPanner",
"648": "AudioContextCreateWaveShaper",
"649": "AudioContextDecodeAudioData",
"650": "AudioContextResume",
"651": "AudioContextSuspend",
"652": "AudioContext",
"653": "OfflineAudioContext",
"654": "PrefixedAudioContext",
"655": "PrefixedOfflineAudioContext",
"661": "MixedContentInNonHTTPSFrameThatRestrictsMixedContent",
"662": "MixedContentInSecureFrameThatDoesNotRestrictMixedContent",
"663": "MixedContentWebSocket",
"664": "SyntheticKeyframesInCompositedCSSAnimation",
"665": "MixedContentFormPresent",
"666": "GetUserMediaInsecureOrigin",
"667": "GetUserMediaSecureOrigin",
"668": "DeviceMotionInsecureOrigin",
"669": "DeviceMotionSecureOrigin",
"670": "DeviceOrientationInsecureOrigin",
"671": "DeviceOrientationSecureOrigin",
"672": "SandboxViaIFrame",
"673": "SandboxViaCSP",
"674": "BlockedSniffingImageToScript",
"675": "Fetch",
"676": "FetchBodyStream",
"677": "XMLHttpRequestAsynchronous",
"679": "WhiteSpacePreFromXMLSpace",
"680": "WhiteSpaceNowrapFromXMLSpace",
"685": "SVGSVGElementForceRedraw",
"686": "SVGSVGElementSuspendRedraw",
"687": "SVGSVGElementUnsuspendRedraw",
"688": "SVGSVGElementUnsuspendRedrawAll",
"689": "AudioContextClose",
"691": "CSSZoomNotEqualToOne",
"694": "ClientRectListItem",
"695": "WindowClientInformation",
"696": "WindowFind",
"697": "WindowScreenLeft",
"698": "WindowScreenTop",
"699": "V8Animation_Cancel_Method",
"700": "V8Animation_Onfinish_AttributeGetter",
"701": "V8Animation_Onfinish_AttributeSetter",
"707": "V8Window_WebKitAnimationEvent_ConstructorGetter",
"710": "CryptoGetRandomValues",
"711": "SubtleCryptoEncrypt",
"712": "SubtleCryptoDecrypt",
"713": "SubtleCryptoSign",
"714": "SubtleCryptoVerify",
"715": "SubtleCryptoDigest",
"716": "SubtleCryptoGenerateKey",
"717": "SubtleCryptoImportKey",
"718": "SubtleCryptoExportKey",
"719": "SubtleCryptoDeriveBits",
"720": "SubtleCryptoDeriveKey",
"721": "SubtleCryptoWrapKey",
"722": "SubtleCryptoUnwrapKey",
"723": "CryptoAlgorithmAesCbc",
"724": "CryptoAlgorithmHmac",
"725": "CryptoAlgorithmRsaSsaPkcs1v1_5",
"726": "CryptoAlgorithmSha1",
"727": "CryptoAlgorithmSha256",
"728": "CryptoAlgorithmSha384",
"729": "CryptoAlgorithmSha512",
"730": "CryptoAlgorithmAesGcm",
"731": "CryptoAlgorithmRsaOaep",
"732": "CryptoAlgorithmAesCtr",
"733": "CryptoAlgorithmAesKw",
"734": "CryptoAlgorithmRsaPss",
"735": "CryptoAlgorithmEcdsa",
"736": "CryptoAlgorithmEcdh",
"737": "CryptoAlgorithmHkdf",
"738": "CryptoAlgorithmPbkdf2",
"739": "DocumentSetDomain",
"740": "UpgradeInsecureRequestsEnabled",
"741": "UpgradeInsecureRequestsUpgradedRequest",
"742": "DocumentDesignMode",
"743": "GlobalCacheStorage",
"744": "NetInfo",
"745": "BackgroundSync",
"748": "LegacyConst",
"750": "V8Permissions_Query_Method",
"754": "V8HTMLInputElement_Autocapitalize_AttributeGetter",
"755": "V8HTMLInputElement_Autocapitalize_AttributeSetter",
"756": "V8HTMLTextAreaElement_Autocapitalize_AttributeGetter",
"757": "V8HTMLTextAreaElement_Autocapitalize_AttributeSetter",
"758": "SVGHrefBaseVal",
"759": "SVGHrefAnimVal",
"760": "V8CSSRuleList_Item_Method",
"761": "V8MediaList_Item_Method",
"762": "V8StyleSheetList_Item_Method",
"763": "StyleSheetListAnonymousNamedGetter",
"764": "AutocapitalizeAttribute",
"765": "FullscreenSecureOrigin",
"766": "FullscreenInsecureOrigin",
"767": "DialogInSandboxedContext",
"768": "SVGSMILAnimationInImageRegardlessOfCache",
"770": "EncryptedMediaSecureOrigin",
"771": "EncryptedMediaInsecureOrigin",
"772": "PerformanceFrameTiming",
"773": "V8Element_Animate_Method",
"778": "V8SVGSVGElement_GetElementById_Method",
"779": "ElementCreateShadowRootMultiple",
"780": "V8MessageChannel_Constructor",
"781": "V8MessagePort_PostMessage_Method",
"782": "V8MessagePort_Start_Method",
"783": "V8MessagePort_Close_Method",
"784": "MessagePortsTransferred",
"785": "CSSKeyframesRuleAnonymousIndexedGetter",
"786": "V8Screen_AvailLeft_AttributeGetter",
"787": "V8Screen_AvailTop_AttributeGetter",
"791": "V8SVGFEConvolveMatrixElement_PreserveAlpha_AttributeGetter",
"798": "V8SVGStyleElement_Disabled_AttributeGetter",
"799": "V8SVGStyleElement_Disabled_AttributeSetter",
"801": "InputTypeFileSecureOrigin",
"802": "InputTypeFileInsecureOrigin",
"804": "ElementAttachShadow",
"806": "V8SecurityPolicyViolationEvent_DocumentURI_AttributeGetter",
"807": "V8SecurityPolicyViolationEvent_BlockedURI_AttributeGetter",
"808": "V8SecurityPolicyViolationEvent_StatusCode_AttributeGetter",
"809": "HTMLLinkElementDisabled",
"810": "V8HTMLLinkElement_Disabled_AttributeGetter",
"811": "V8HTMLLinkElement_Disabled_AttributeSetter",
"812": "V8HTMLStyleElement_Disabled_AttributeGetter",
"813": "V8HTMLStyleElement_Disabled_AttributeSetter",
"816": "V8DOMError_Constructor",
"817": "V8DOMError_Name_AttributeGetter",
"818": "V8DOMError_Message_AttributeGetter",
"823": "V8Location_AncestorOrigins_AttributeGetter",
"824": "V8IDBDatabase_ObjectStoreNames_AttributeGetter",
"825": "V8IDBObjectStore_IndexNames_AttributeGetter",
"826": "V8IDBTransaction_ObjectStoreNames_AttributeGetter",
"830": "TextInputFired",
"831": "V8TextEvent_Data_AttributeGetter",
"832": "V8TextEvent_InitTextEvent_Method",
"833": "V8SVGSVGElement_UseCurrentView_AttributeGetter",
"834": "V8SVGSVGElement_CurrentView_AttributeGetter",
"835": "ClientHintsDPR",
"836": "ClientHintsResourceWidth",
"837": "ClientHintsViewportWidth",
"838": "SRIElementIntegrityAttributeButIneligible",
"839": "FormDataAppendFile",
"840": "FormDataAppendFileWithFilename",
"841": "FormDataAppendBlob",
"842": "FormDataAppendBlobWithFilename",
"843": "FormDataAppendNull",
"844": "HTMLDocumentCreateAttributeNameNotLowercase",
"845": "NonHTMLElementSetAttributeNodeFromHTMLDocumentNameNotLowercase",
"846": "DOMStringList_Item_AttributeGetter_IndexedDB",
"847": "DOMStringList_Item_AttributeGetter_Location",
"848": "DOMStringList_Contains_Method_IndexedDB",
"849": "DOMStringList_Contains_Method_Location",
"850": "NavigatorVibrate",
"851": "NavigatorVibrateSubFrame",
"853": "V8XPathEvaluator_Constructor",
"854": "V8XPathEvaluator_CreateExpression_Method",
"855": "V8XPathEvaluator_CreateNSResolver_Method",
"856": "V8XPathEvaluator_Evaluate_Method",
"857": "RequestMIDIAccess",
"858": "V8MouseEvent_LayerX_AttributeGetter",
"859": "V8MouseEvent_LayerY_AttributeGetter",
"860": "InnerTextWithShadowTree",
"861": "SelectionToStringWithShadowTree",
"862": "WindowFindWithShadowTree",
"863": "V8CompositionEvent_InitCompositionEvent_Method",
"864": "V8CustomEvent_InitCustomEvent_Method",
"865": "V8DeviceMotionEvent_InitDeviceMotionEvent_Method",
"866": "V8DeviceOrientationEvent_InitDeviceOrientationEvent_Method",
"867": "V8Event_InitEvent_Method",
"868": "V8KeyboardEvent_InitKeyboardEvent_Method",
"869": "V8MouseEvent_InitMouseEvent_Method",
"870": "V8MutationEvent_InitMutationEvent_Method",
"871": "V8StorageEvent_InitStorageEvent_Method",
"872": "V8TouchEvent_InitTouchEvent_Method",
"873": "V8UIEvent_InitUIEvent_Method",
"874": "V8Document_CreateTouch_Method",
"876": "RequestFileSystemNonWebbyOrigin",
"879": "V8MemoryInfo_TotalJSHeapSize_AttributeGetter",
"880": "V8MemoryInfo_UsedJSHeapSize_AttributeGetter",
"881": "V8MemoryInfo_JSHeapSizeLimit_AttributeGetter",
"882": "V8Performance_Timing_AttributeGetter",
"883": "V8Performance_Navigation_AttributeGetter",
"884": "V8Performance_Memory_AttributeGetter",
"885": "V8SharedWorker_WorkerStart_AttributeGetter",
"886": "HTMLKeygenElement",
"892": "HTMLMediaElementPreloadNone",
"893": "HTMLMediaElementPreloadMetadata",
"894": "HTMLMediaElementPreloadAuto",
"895": "HTMLMediaElementPreloadDefault",
"896": "MixedContentBlockableAllowed",
"897": "PseudoBeforeAfterForInputElement",
"898": "V8Permissions_Revoke_Method",
"899": "LinkRelDnsPrefetch",
"900": "LinkRelPreconnect",
"901": "LinkRelPreload",
"902": "LinkHeaderDnsPrefetch",
"903": "LinkHeaderPreconnect",
"904": "ClientHintsMetaAcceptCH",
"905": "HTMLElementDeprecatedWidth",
"906": "ClientHintsContentDPR",
"907": "ElementAttachShadowOpen",
"908": "ElementAttachShadowClosed",
"909": "AudioParamSetValueAtTime",
"910": "AudioParamLinearRampToValueAtTime",
"911": "AudioParamExponentialRampToValueAtTime",
"912": "AudioParamSetTargetAtTime",
"913": "AudioParamSetValueCurveAtTime",
"914": "AudioParamCancelScheduledValues",
"915": "V8Permissions_Request_Method",
"917": "LinkRelPrefetch",
"918": "LinkRelPrerender",
"919": "LinkRelNext",
"920": "PrefixedPerformanceResourceTimingBufferFull",
"921": "CSSValuePrefixedMinContent",
"922": "CSSValuePrefixedMaxContent",
"923": "CSSValuePrefixedFitContent",
"924": "CSSValuePrefixedFillAvailable",
"926": "PresentationDefaultRequest",
"927": "PresentationAvailabilityChangeEventListener",
"928": "PresentationRequestConstructor",
"929": "PresentationRequestStart",
"930": "PresentationRequestReconnect",
"931": "PresentationRequestGetAvailability",
"932": "PresentationRequestConnectionAvailableEventListener",
"933": "PresentationConnectionTerminate",
"934": "PresentationConnectionSend",
"936": "PresentationConnectionMessageEventListener",
"937": "CSSAnimationsStackedNeutralKeyframe",
"938": "ReadingCheckedInClickHandler",
"939": "FlexboxIntrinsicSizeAlgorithmIsDifferent",
"940": "HTMLImportsHasStyleSheets",
"944": "ClipPathOfPositionedElement",
"945": "ClipCssOfPositionedElement",
"946": "NetInfoType",
"947": "NetInfoDownlinkMax",
"948": "NetInfoOnChange",
"949": "NetInfoOnTypeChange",
"950": "V8Window_Alert_Method",
"951": "V8Window_Confirm_Method",
"952": "V8Window_Prompt_Method",
"953": "V8Window_Print_Method",
"954": "V8Window_RequestIdleCallback_Method",
"955": "FlexboxPercentagePaddingVertical",
"956": "FlexboxPercentageMarginVertical",
"957": "BackspaceNavigatedBack",
"958": "BackspaceNavigatedBackAfterFormInteraction",
"959": "CSPSourceWildcardWouldMatchExactHost",
"960": "CredentialManagerGet",
"961": "CredentialManagerGetWithUI",
"962": "CredentialManagerGetWithoutUI",
"963": "CredentialManagerStore",
"964": "CredentialManagerRequireUserMediation",
"966": "BlockableMixedContentInSubframeBlocked",
"967": "AddEventListenerThirdArgumentIsObject",
"968": "RemoveEventListenerThirdArgumentIsObject",
"969": "CSSAtRuleCharset",
"970": "CSSAtRuleFontFace",
"971": "CSSAtRuleImport",
"972": "CSSAtRuleKeyframes",
"973": "CSSAtRuleMedia",
"974": "CSSAtRuleNamespace",
"975": "CSSAtRulePage",
"976": "CSSAtRuleSupports",
"977": "CSSAtRuleViewport",
"978": "CSSAtRuleWebkitKeyframes",
"979": "V8HTMLFieldSetElement_Elements_AttributeGetter",
"980": "HTMLMediaElementPreloadForcedNone",
"981": "ExternalAddSearchProvider",
"982": "ExternalIsSearchProviderInstalled",
"983": "V8Permissions_RequestAll_Method",
"987": "DeviceOrientationAbsoluteInsecureOrigin",
"988": "DeviceOrientationAbsoluteSecureOrigin",
"989": "FontFaceConstructor",
"990": "ServiceWorkerControlledPage",
"993": "MeterElementWithMeterAppearance",
"994": "MeterElementWithNoneAppearance",
"997": "SelectionAnchorNode",
"998": "SelectionAnchorOffset",
"999": "SelectionFocusNode",
"1000": "SelectionFocusOffset",
"1001": "SelectionIsCollapsed",
"1002": "SelectionRangeCount",
"1003": "SelectionGetRangeAt",
"1004": "SelectionAddRange",
"1005": "SelectionRemoveAllRanges",
"1006": "SelectionCollapse",
"1007": "SelectionCollapseToStart",
"1008": "SelectionCollapseToEnd",
"1009": "SelectionExtend",
"1010": "SelectionSelectAllChildren",
"1011": "SelectionDeleteDromDocument",
"1012": "SelectionDOMString",
"1013": "InputTypeRangeVerticalAppearance",
"1014": "CSSFilterReference",
"1015": "CSSFilterGrayscale",
"1016": "CSSFilterSepia",
"1017": "CSSFilterSaturate",
"1018": "CSSFilterHueRotate",
"1019": "CSSFilterInvert",
"1020": "CSSFilterOpacity",
"1021": "CSSFilterBrightness",
"1022": "CSSFilterContrast",
"1023": "CSSFilterBlur",
"1024": "CSSFilterDropShadow",
"1025": "BackgroundSyncRegister",
"1027": "ExecCommandOnInputOrTextarea",
"1028": "V8History_ScrollRestoration_AttributeGetter",
"1029": "V8History_ScrollRestoration_AttributeSetter",
"1030": "SVG1DOMFilter",
"1031": "OfflineAudioContextStartRendering",
"1032": "OfflineAudioContextSuspend",
"1033": "OfflineAudioContextResume",
"1034": "AttrCloneNode",
"1035": "SVG1DOMPaintServer",
"1036": "SVGSVGElementFragmentSVGView",
"1037": "SVGSVGElementFragmentSVGViewElement",
"1038": "PresentationConnectionClose",
"1039": "SVG1DOMShape",
"1040": "SVG1DOMText",
"1041": "RTCPeerConnectionConstructorConstraints",
"1042": "RTCPeerConnectionConstructorCompliant",
"1044": "RTCPeerConnectionCreateOfferLegacyFailureCallback",
"1045": "RTCPeerConnectionCreateOfferLegacyConstraints",
"1046": "RTCPeerConnectionCreateOfferLegacyOfferOptions",
"1047": "RTCPeerConnectionCreateOfferLegacyCompliant",
"1049": "RTCPeerConnectionCreateAnswerLegacyFailureCallback",
"1050": "RTCPeerConnectionCreateAnswerLegacyConstraints",
"1051": "RTCPeerConnectionCreateAnswerLegacyCompliant",
"1052": "RTCPeerConnectionSetLocalDescriptionLegacyNoSuccessCallback",
"1053": "RTCPeerConnectionSetLocalDescriptionLegacyNoFailureCallback",
"1054": "RTCPeerConnectionSetLocalDescriptionLegacyCompliant",
"1055": "RTCPeerConnectionSetRemoteDescriptionLegacyNoSuccessCallback",
"1056": "RTCPeerConnectionSetRemoteDescriptionLegacyNoFailureCallback",
"1057": "RTCPeerConnectionSetRemoteDescriptionLegacyCompliant",
"1058": "RTCPeerConnectionGetStatsLegacyNonCompliant",
"1059": "NodeFilterIsFunction",
"1060": "NodeFilterIsObject",
"1062": "CSSSelectorInternalPseudoListBox",
"1063": "CSSSelectorInternalMediaControlsCastButton",
"1064": "CSSSelectorInternalMediaControlsOverlayCastButton",
"1065": "CSSSelectorInternalPseudoSpatialNavigationFocus",
"1066": "SameOriginTextScript",
"1067": "SameOriginApplicationScript",
"1068": "SameOriginOtherScript",
"1069": "CrossOriginTextScript",
"1070": "CrossOriginApplicationScript",
"1071": "CrossOriginOtherScript",
"1072": "SVG1DOMSVGTests",
"1073": "V8SVGViewElement_ViewTarget_AttributeGetter",
"1074": "DisableRemotePlaybackAttribute",
"1075": "V8SloppyMode",
"1076": "V8StrictMode",
"1077": "V8StrongMode",
"1078": "AudioNodeConnectToAudioNode",
"1079": "AudioNodeConnectToAudioParam",
"1080": "AudioNodeDisconnectFromAudioNode",
"1081": "AudioNodeDisconnectFromAudioParam",
"1082": "V8CSSFontFaceRule_Style_AttributeGetter",
"1083": "SelectionCollapseNull",
"1084": "SelectionSetBaseAndExtentNull",
"1085": "V8SVGSVGElement_CreateSVGNumber_Method",
"1086": "V8SVGSVGElement_CreateSVGLength_Method",
"1087": "V8SVGSVGElement_CreateSVGAngle_Method",
"1088": "V8SVGSVGElement_CreateSVGPoint_Method",
"1089": "V8SVGSVGElement_CreateSVGMatrix_Method",
"1090": "V8SVGSVGElement_CreateSVGRect_Method",
"1091": "V8SVGSVGElement_CreateSVGTransform_Method",
"1092": "V8SVGSVGElement_CreateSVGTransformFromMatrix_Method",
"1093": "FormNameAccessForNonDescendantImageElement",
"1095": "V8SVGSVGElement_Viewport_AttributeGetter",
"1096": "V8RegExpPrototypeStickyGetter",
"1097": "V8RegExpPrototypeToString",
"1098": "V8InputDeviceCapabilities_FiresTouchEvents_AttributeGetter",
"1099": "DataElement",
"1100": "TimeElement",
"1101": "SVG1DOMUriReference",
"1102": "SVG1DOMZoomAndPan",
"1103": "V8SVGGraphicsElement_Transform_AttributeGetter",
"1104": "MenuItemElement",
"1105": "MenuItemCloseTag",
"1106": "SVG1DOMMarkerElement",
"1107": "SVG1DOMUseElement",
"1108": "SVG1DOMMaskElement",
"1109": "V8SVGAElement_Target_AttributeGetter",
"1110": "V8SVGClipPathElement_ClipPathUnits_AttributeGetter",
"1111": "SVG1DOMFitToViewBox",
"1112": "SVG1DOMCursorElement",
"1113": "V8SVGPathElement_PathLength_AttributeGetter",
"1114": "SVG1DOMSVGElement",
"1115": "SVG1DOMImageElement",
"1116": "SVG1DOMForeignObjectElement",
"1117": "AudioContextCreateIIRFilter",
"1118": "CSSSelectorPseudoSlotted",
"1119": "MediaDevicesEnumerateDevices",
"1120": "NonSecureSharedWorkerAccessedFromSecureContext",
"1121": "SecureSharedWorkerAccessedFromNonSecureContext",
"1123": "EventComposedPath",
"1124": "LinkHeaderPreload",
"1125": "MouseWheelEvent",
"1126": "WheelEvent",
"1127": "MouseWheelAndWheelEvent",
"1128": "BodyScrollsInAdditionToViewport",
"1129": "DocumentDesignModeEnabeld",
"1130": "ContentEditableTrue",
"1131": "ContentEditableTrueOnHTML",
"1132": "ContentEditablePlainTextOnly",
"1133": "V8RegExpPrototypeUnicodeGetter",
"1134": "V8IntlV8Parse",
"1135": "V8IntlPattern",
"1136": "V8IntlResolved",
"1137": "V8PromiseChain",
"1138": "V8PromiseAccept",
"1139": "V8PromiseDefer",
"1140": "EventComposed",
"1141": "GeolocationInsecureOriginIframe",
"1142": "GeolocationSecureOriginIframe",
"1143": "RequestMIDIAccessIframe",
"1144": "GetUserMediaInsecureOriginIframe",
"1145": "GetUserMediaSecureOriginIframe",
"1146": "ElementRequestPointerLockIframe",
"1147": "NotificationAPIInsecureOriginIframe",
"1148": "NotificationAPISecureOriginIframe",
"1149": "WebSocket",
"1150": "MediaStreamConstraintsNameValue",
"1151": "MediaStreamConstraintsFromDictionary",
"1152": "MediaStreamConstraintsConformant",
"1153": "CSSSelectorIndirectAdjacent",
"1156": "CreateImageBitmap",
"1157": "PresentationConnectionConnectEventListener",
"1158": "PresentationConnectionCloseEventListener",
"1159": "PresentationConnectionTerminateEventListener",
"1160": "DocumentCreateEventFontFaceSetLoadEvent",
"1161": "DocumentCreateEventMediaQueryListEvent",
"1162": "DocumentCreateEventAnimationEvent",
"1164": "DocumentCreateEventApplicationCacheErrorEvent",
"1166": "DocumentCreateEventBeforeUnloadEvent",
"1167": "DocumentCreateEventClipboardEvent",
"1168": "DocumentCreateEventCompositionEvent",
"1169": "DocumentCreateEventDragEvent",
"1170": "DocumentCreateEventErrorEvent",
"1171": "DocumentCreateEventFocusEvent",
"1172": "DocumentCreateEventHashChangeEvent",
"1173": "DocumentCreateEventMutationEvent",
"1174": "DocumentCreateEventPageTransitionEvent",
"1176": "DocumentCreateEventPopStateEvent",
"1177": "DocumentCreateEventProgressEvent",
"1178": "DocumentCreateEventPromiseRejectionEvent",
"1180": "DocumentCreateEventResourceProgressEvent",
"1181": "DocumentCreateEventSecurityPolicyViolationEvent",
"1182": "DocumentCreateEventTextEvent",
"1183": "DocumentCreateEventTransitionEvent",
"1184": "DocumentCreateEventWheelEvent",
"1186": "DocumentCreateEventTrackEvent",
"1187": "DocumentCreateEventWebKitAnimationEvent",
"1188": "DocumentCreateEventMutationEvents",
"1189": "DocumentCreateEventOrientationEvent",
"1190": "DocumentCreateEventSVGEvents",
"1191": "DocumentCreateEventWebKitTransitionEvent",
"1192": "DocumentCreateEventBeforeInstallPromptEvent",
"1193": "DocumentCreateEventSyncEvent",
"1195": "DocumentCreateEventDeviceMotionEvent",
"1196": "DocumentCreateEventDeviceOrientationEvent",
"1197": "DocumentCreateEventMediaEncryptedEvent",
"1198": "DocumentCreateEventMediaKeyMessageEvent",
"1199": "DocumentCreateEventGamepadEvent",
"1201": "DocumentCreateEventIDBVersionChangeEvent",
"1202": "DocumentCreateEventBlobEvent",
"1203": "DocumentCreateEventMediaStreamEvent",
"1204": "DocumentCreateEventMediaStreamTrackEvent",
"1205": "DocumentCreateEventRTCDTMFToneChangeEvent",
"1206": "DocumentCreateEventRTCDataChannelEvent",
"1207": "DocumentCreateEventRTCIceCandidateEvent",
"1209": "DocumentCreateEventNotificationEvent",
"1210": "DocumentCreateEventPresentationConnectionAvailableEvent",
"1211": "DocumentCreateEventPresentationConnectionCloseEvent",
"1212": "DocumentCreateEventPushEvent",
"1213": "DocumentCreateEventExtendableEvent",
"1214": "DocumentCreateEventExtendableMessageEvent",
"1215": "DocumentCreateEventFetchEvent",
"1217": "DocumentCreateEventServiceWorkerMessageEvent",
"1218": "DocumentCreateEventSpeechRecognitionError",
"1219": "DocumentCreateEventSpeechRecognitionEvent",
"1220": "DocumentCreateEventSpeechSynthesisEvent",
"1221": "DocumentCreateEventStorageEvent",
"1222": "DocumentCreateEventAudioProcessingEvent",
"1223": "DocumentCreateEventOfflineAudioCompletionEvent",
"1224": "DocumentCreateEventWebGLContextEvent",
"1225": "DocumentCreateEventMIDIConnectionEvent",
"1226": "DocumentCreateEventMIDIMessageEvent",
"1227": "DocumentCreateEventCloseEvent",
"1228": "DocumentCreateEventKeyboardEvents",
"1229": "HTMLMediaElement",
"1230": "HTMLMediaElementInDocument",
"1231": "HTMLMediaElementControlsAttribute",
"1233": "V8Animation_Oncancel_AttributeGetter",
"1234": "V8Animation_Oncancel_AttributeSetter",
"1235": "V8HTMLCommentInExternalScript",
"1236": "V8HTMLComment",
"1237": "V8SloppyModeBlockScopedFunctionRedefinition",
"1238": "V8ForInInitializer",
"1239": "V8Animation_Id_AttributeGetter",
"1240": "V8Animation_Id_AttributeSetter",
"1243": "WebAnimationHyphenatedProperty",
"1244": "FormControlsCollectionReturnsRadioNodeListForFieldSet",
"1245": "ApplicationCacheManifestSelectInsecureOrigin",
"1246": "ApplicationCacheManifestSelectSecureOrigin",
"1247": "ApplicationCacheAPIInsecureOrigin",
"1248": "ApplicationCacheAPISecureOrigin",
"1249": "CSSAtRuleApply",
"1250": "CSSSelectorPseudoAny",
"1251": "PannerNodeSetVelocity",
"1252": "DocumentAllItemNoArguments",
"1253": "DocumentAllItemNamed",
"1254": "DocumentAllItemIndexed",
"1255": "DocumentAllItemIndexedWithNonNumber",
"1256": "DocumentAllLegacyCallNoArguments",
"1257": "DocumentAllLegacyCallNamed",
"1258": "DocumentAllLegacyCallIndexed",
"1259": "DocumentAllLegacyCallIndexedWithNonNumber",
"1260": "DocumentAllLegacyCallTwoArguments",
"1263": "HTMLLabelElementControlForNonFormAssociatedElement",
"1265": "HTMLMediaElementLoadNetworkEmptyNotPaused",
"1267": "V8Window_WebkitSpeechGrammar_ConstructorGetter",
"1268": "V8Window_WebkitSpeechGrammarList_ConstructorGetter",
"1269": "V8Window_WebkitSpeechRecognition_ConstructorGetter",
"1270": "V8Window_WebkitSpeechRecognitionError_ConstructorGetter",
"1271": "V8Window_WebkitSpeechRecognitionEvent_ConstructorGetter",
"1272": "V8Window_SpeechSynthesis_AttributeGetter",
"1273": "V8IDBFactory_WebkitGetDatabaseNames_Method",
"1274": "ImageDocument",
"1275": "ScriptPassesCSPDynamic",
"1277": "CSPWithStrictDynamic",
"1278": "ScrollAnchored",
"1279": "AddEventListenerFourArguments",
"1280": "RemoveEventListenerFourArguments",
"1281": "InvalidReportUriDirectiveInMetaCSP",
"1282": "InvalidSandboxDirectiveInMetaCSP",
"1283": "InvalidFrameAncestorsDirectiveInMetaCSP",
"1287": "SVGCalcModeDiscrete",
"1288": "SVGCalcModeLinear",
"1289": "SVGCalcModePaced",
"1290": "SVGCalcModeSpline",
"1291": "FormSubmissionStarted",
"1292": "FormValidationStarted",
"1293": "FormValidationAbortedSubmission",
"1294": "FormValidationShowedMessage",
"1295": "WebAnimationsEasingAsFunctionLinear",
"1296": "WebAnimationsEasingAsFunctionOther",
"1297": "V8Document_Images_AttributeGetter",
"1298": "V8Document_Embeds_AttributeGetter",
"1299": "V8Document_Plugins_AttributeGetter",
"1300": "V8Document_Links_AttributeGetter",
"1301": "V8Document_Forms_AttributeGetter",
"1302": "V8Document_Scripts_AttributeGetter",
"1303": "V8Document_Anchors_AttributeGetter",
"1304": "V8Document_Applets_AttributeGetter",
"1305": "XMLHttpRequestCrossOriginWithCredentials",
"1306": "MediaStreamTrackRemote",
"1307": "V8Node_IsConnected_AttributeGetter",
"1308": "ShadowRootDelegatesFocus",
"1309": "MixedShadowRootV0AndV1",
"1310": "ImageDocumentInFrame",
"1311": "MediaDocument",
"1312": "MediaDocumentInFrame",
"1313": "PluginDocument",
"1314": "PluginDocumentInFrame",
"1315": "SinkDocument",
"1316": "SinkDocumentInFrame",
"1317": "TextDocument",
"1318": "TextDocumentInFrame",
"1319": "ViewSourceDocument",
"1320": "FileAPINativeLineEndings",
"1321": "PointerEventAttributeCount",
"1322": "CompositedReplication",
"1323": "EncryptedMediaAllSelectedContentTypesHaveCodecs",
"1324": "EncryptedMediaAllSelectedContentTypesMissingCodecs",
"1325": "V8DataTransferItem_WebkitGetAsEntry_Method",
"1326": "V8HTMLInputElement_WebkitEntries_AttributeGetter",
"1327": "Entry_Filesystem_AttributeGetter_IsolatedFileSystem",
"1328": "Entry_GetMetadata_Method_IsolatedFileSystem",
"1329": "Entry_MoveTo_Method_IsolatedFileSystem",
"1330": "Entry_CopyTo_Method_IsolatedFileSystem",
"1331": "Entry_Remove_Method_IsolatedFileSystem",
"1332": "Entry_GetParent_Method_IsolatedFileSystem",
"1333": "Entry_ToURL_Method_IsolatedFileSystem",
"1334": "During_Microtask_Alert",
"1335": "During_Microtask_Confirm",
"1336": "During_Microtask_Print",
"1337": "During_Microtask_Prompt",
"1338": "During_Microtask_SyncXHR",
"1342": "CredentialManagerGetReturnedCredential",
"1343": "GeolocationInsecureOriginDeprecatedNotRemoved",
"1344": "GeolocationInsecureOriginIframeDeprecatedNotRemoved",
"1345": "ProgressElementWithNoneAppearance",
"1346": "ProgressElementWithProgressBarAppearance",
"1347": "PointerEventAddListenerCount",
"1348": "EventCancelBubbleAffected",
"1349": "EventCancelBubbleWasChangedToTrue",
"1350": "EventCancelBubbleWasChangedToFalse",
"1351": "CSSValueAppearanceNone",
"1352": "CSSValueAppearanceNotNone",
"1353": "CSSValueAppearanceOthers",
"1354": "CSSValueAppearanceButton",
"1355": "CSSValueAppearanceCaret",
"1356": "CSSValueAppearanceCheckbox",
"1357": "CSSValueAppearanceMenulist",
"1358": "CSSValueAppearanceMenulistButton",
"1359": "CSSValueAppearanceListbox",
"1360": "CSSValueAppearanceRadio",
"1361": "CSSValueAppearanceSearchField",
"1362": "CSSValueAppearanceTextField",
"1363": "AudioContextCreatePannerAutomated",
"1364": "PannerNodeSetPosition",
"1365": "PannerNodeSetOrientation",
"1366": "AudioListenerSetPosition",
"1367": "AudioListenerSetOrientation",
"1368": "IntersectionObserver_Constructor",
"1369": "DurableStoragePersist",
"1370": "DurableStoragePersisted",
"1371": "DurableStorageEstimate",
"1372": "UntrustedEventDefaultHandled",
"1375": "CSSDeepCombinatorAndShadow",
"1376": "OpacityWithPreserve3DQuirk",
"1377": "CSSSelectorPseudoReadOnly",
"1378": "CSSSelectorPseudoReadWrite",
"1379": "UnloadHandler_Navigation",
"1380": "TouchStartUserGestureUtilized",
"1381": "TouchMoveUserGestureUtilized",
"1382": "TouchEndDuringScrollUserGestureUtilized",
"1383": "CSSSelectorPseudoDefined",
"1384": "RTCPeerConnectionAddIceCandidatePromise",
"1385": "RTCPeerConnectionAddIceCandidateLegacy",
"1386": "RTCIceCandidateDefaultSdpMLineIndex",
"1389": "MediaStreamConstraintsOldAndNew",
"1390": "V8ArrayProtectorDirtied",
"1391": "V8ArraySpeciesModified",
"1392": "V8ArrayPrototypeConstructorModified",
"1393": "V8ArrayInstanceProtoModified",
"1394": "V8ArrayInstanceConstructorModified",
"1395": "V8LegacyFunctionDeclaration",
"1396": "V8RegExpPrototypeSourceGetter",
"1397": "V8RegExpPrototypeOldFlagGetter",
"1398": "V8DecimalWithLeadingZeroInStrictMode",
"1399": "FormSubmissionNotInDocumentTree",
"1400": "GetUserMediaPrefixed",
"1401": "GetUserMediaLegacy",
"1402": "GetUserMediaPromise",
"1403": "CSSFilterFunctionNoArguments",
"1404": "V8LegacyDateParser",
"1405": "OpenSearchInsecureOriginInsecureTarget",
"1406": "OpenSearchInsecureOriginSecureTarget",
"1407": "OpenSearchSecureOriginInsecureTarget",
"1408": "OpenSearchSecureOriginSecureTarget",
"1409": "RegisterProtocolHandlerSecureOrigin",
"1410": "RegisterProtocolHandlerInsecureOrigin",
"1411": "CrossOriginWindowAlert",
"1412": "CrossOriginWindowConfirm",
"1413": "CrossOriginWindowPrompt",
"1414": "CrossOriginWindowPrint",
"1415": "MediaStreamOnActive",
"1416": "MediaStreamOnInactive",
"1417": "AddEventListenerPassiveTrue",
"1418": "AddEventListenerPassiveFalse",
"1419": "CSPReferrerDirective",
"1420": "DocumentOpen",
"1421": "ElementRequestPointerLockInShadow",
"1422": "ShadowRootPointerLockElement",
"1423": "DocumentPointerLockElementInV0Shadow",
"1424": "TextAreaMaxLength",
"1425": "TextAreaMinLength",
"1426": "TopNavigationFromSubFrame",
"1427": "PrefixedElementRequestFullscreenInShadow",
"1428": "MediaSourceAbortRemove",
"1429": "MediaSourceDurationTruncatingBuffered",
"1430": "AudioContextCrossOriginIframe",
"1431": "PointerEventSetCapture",
"1432": "PointerEventDispatch",
"1433": "MIDIMessageEventReceivedTime",
"1434": "SummaryElementWithDisplayBlockAuthorRule",
"1435": "V8MediaStream_Active_AttributeGetter",
"1436": "BeforeInstallPromptEvent",
"1437": "BeforeInstallPromptEventUserChoice",
"1438": "BeforeInstallPromptEventPreventDefault",
"1439": "BeforeInstallPromptEventPrompt",
"1440": "ExecCommandAltersHTMLStructure",
"1441": "SecureContextCheckPassed",
"1442": "SecureContextCheckFailed",
"1443": "SecureContextCheckForSandboxedOriginPassed",
"1444": "SecureContextCheckForSandboxedOriginFailed",
"1445": "V8DefineGetterOrSetterWouldThrow",
"1446": "V8FunctionConstructorReturnedUndefined",
"1447": "V8BroadcastChannel_Constructor",
"1448": "V8BroadcastChannel_PostMessage_Method",
"1449": "V8BroadcastChannel_Close_Method",
"1450": "TouchStartFired",
"1451": "MouseDownFired",
"1452": "PointerDownFired",
"1453": "PointerDownFiredForTouch",
"1454": "PointerEventDispatchPointerDown",
"1455": "SVGSMILBeginOrEndEventValue",
"1456": "SVGSMILBeginOrEndSyncbaseValue",
"1457": "SVGSMILElementInsertedAfterLoad",
"1458": "V8VisualViewport_ScrollLeft_AttributeGetter",
"1459": "V8VisualViewport_ScrollTop_AttributeGetter",
"1460": "V8VisualViewport_PageX_AttributeGetter",
"1461": "V8VisualViewport_PageY_AttributeGetter",
"1462": "V8VisualViewport_ClientWidth_AttributeGetter",
"1463": "V8VisualViewport_ClientHeight_AttributeGetter",
"1464": "V8VisualViewport_Scale_AttributeGetter",
"1465": "VisualViewportScrollFired",
"1466": "VisualViewportResizeFired",
"1467": "NodeGetRootNode",
"1468": "SlotChangeEventAddListener",
"1469": "CSSValueAppearanceButtonRendered",
"1470": "CSSValueAppearanceButtonForAnchor",
"1471": "CSSValueAppearanceButtonForButton",
"1472": "CSSValueAppearanceButtonForOtherButtons",
"1473": "CSSValueAppearanceTextFieldRendered",
"1474": "CSSValueAppearanceTextFieldForSearch",
"1475": "CSSValueAppearanceTextFieldForTextField",
"1476": "RTCPeerConnectionGetStats",
"1477": "SVGSMILAnimationAppliedEffect",
"1478": "PerformanceResourceTimingSizes",
"1479": "EventSourceDocument",
"1480": "EventSourceWorker",
"1481": "SingleOriginInTimingAllowOrigin",
"1482": "MultipleOriginsInTimingAllowOrigin",
"1483": "StarInTimingAllowOrigin",
"1484": "SVGSMILAdditiveAnimation",
"1485": "SendBeaconWithNonSimpleContentType",
"1486": "ChromeLoadTimesRequestTime",
"1487": "ChromeLoadTimesStartLoadTime",
"1488": "ChromeLoadTimesCommitLoadTime",
"1489": "ChromeLoadTimesFinishDocumentLoadTime",
"1490": "ChromeLoadTimesFinishLoadTime",
"1491": "ChromeLoadTimesFirstPaintTime",
"1492": "ChromeLoadTimesFirstPaintAfterLoadTime",
"1493": "ChromeLoadTimesNavigationType",
"1494": "ChromeLoadTimesWasFetchedViaSpdy",
"1495": "ChromeLoadTimesWasNpnNegotiated",
"1496": "ChromeLoadTimesNpnNegotiatedProtocol",
"1497": "ChromeLoadTimesWasAlternateProtocolAvailable",
"1498": "ChromeLoadTimesConnectionInfo",
"1499": "ChromeLoadTimesUnknown",
"1500": "SVGViewElement",
"1501": "WebShareShare",
"1502": "AuxclickAddListenerCount",
"1503": "HTMLCanvasElement",
"1504": "SVGSMILAnimationElementTiming",
"1505": "SVGSMILBeginEndAnimationElement",
"1506": "SVGSMILPausing",
"1507": "SVGSMILCurrentTime",
"1508": "HTMLBodyElementOnSelectionChangeAttribute",
"1509": "ForeignFetchInterception",
"1510": "MapNameMatchingStrict",
"1511": "MapNameMatchingASCIICaseless",
"1512": "MapNameMatchingUnicodeLower",
"1513": "RadioNameMatchingStrict",
"1514": "RadioNameMatchingASCIICaseless",
"1515": "RadioNameMatchingCaseFolding",
"1517": "InputSelectionGettersThrow",
"1519": "UsbGetDevices",
"1520": "UsbRequestDevice",
"1521": "UsbDeviceOpen",
"1522": "UsbDeviceClose",
"1523": "UsbDeviceSelectConfiguration",
"1524": "UsbDeviceClaimInterface",
"1525": "UsbDeviceReleaseInterface",
"1526": "UsbDeviceSelectAlternateInterface",
"1527": "UsbDeviceControlTransferIn",
"1528": "UsbDeviceControlTransferOut",
"1529": "UsbDeviceClearHalt",
"1530": "UsbDeviceTransferIn",
"1531": "UsbDeviceTransferOut",
"1532": "UsbDeviceIsochronousTransferIn",
"1533": "UsbDeviceIsochronousTransferOut",
"1534": "UsbDeviceReset",
"1535": "PointerEnterLeaveFired",
"1536": "PointerOverOutFired",
"1539": "DraggableAttribute",
"1540": "CleanScriptElementWithNonce",
"1541": "PotentiallyInjectedScriptElementWithNonce",
"1542": "PendingStylesheetAddedAfterBodyStarted",
"1543": "UntrustedMouseDownEventDispatchedToSelect",
"1544": "BlockedSniffingAudioToScript",
"1545": "BlockedSniffingVideoToScript",
"1546": "BlockedSniffingCSVToScript",
"1547": "MetaSetCookie",
"1548": "MetaRefresh",
"1549": "MetaSetCookieWhenCSPBlocksInlineScript",
"1550": "MetaRefreshWhenCSPBlocksInlineScript",
"1551": "MiddleClickAutoscrollStart",
"1552": "ClipCssOfFixedPositionElement",
"1553": "RTCPeerConnectionCreateOfferOptionsOfferToReceive",
"1554": "DragAndDropScrollStart",
"1555": "PresentationConnectionListConnectionAvailableEventListener",
"1556": "WebAudioAutoplayCrossOriginIframe",
"1557": "ScriptInvalidTypeOrLanguage",
"1558": "VRGetDisplays",
"1559": "VRPresent",
"1560": "VRDeprecatedGetPose",
"1561": "WebAudioAnalyserNode",
"1562": "WebAudioAudioBuffer",
"1563": "WebAudioAudioBufferSourceNode",
"1564": "WebAudioBiquadFilterNode",
"1565": "WebAudioChannelMergerNode",
"1566": "WebAudioChannelSplitterNode",
"1567": "WebAudioConvolverNode",
"1568": "WebAudioDelayNode",
"1569": "WebAudioDynamicsCompressorNode",
"1570": "WebAudioGainNode",
"1571": "WebAudioIIRFilterNode",
"1572": "WebAudioMediaElementAudioSourceNode",
"1573": "WebAudioOscillatorNode",
"1574": "WebAudioPannerNode",
"1575": "WebAudioPeriodicWave",
"1576": "WebAudioStereoPannerNode",
"1577": "WebAudioWaveShaperNode",
"1578": "CSSZoomReset",
"1579": "CSSZoomDocument",
"1580": "PaymentAddressCareOf",
"1581": "XSSAuditorBlockedScript",
"1582": "XSSAuditorBlockedEntirePage",
"1583": "XSSAuditorDisabled",
"1584": "XSSAuditorEnabledFilter",
"1585": "XSSAuditorEnabledBlock",
"1586": "XSSAuditorInvalid",
"1587": "SVGCursorElement",
"1588": "SVGCursorElementHasClient",
"1589": "TextInputEventOnInput",
"1590": "TextInputEventOnTextArea",
"1591": "TextInputEventOnContentEditable",
"1592": "TextInputEventOnNotNode",
"1593": "WebkitBeforeTextInsertedOnInput",
"1594": "WebkitBeforeTextInsertedOnTextArea",
"1595": "WebkitBeforeTextInsertedOnContentEditable",
"1596": "WebkitBeforeTextInsertedOnNotNode",
"1597": "WebkitEditableContentChangedOnInput",
"1598": "WebkitEditableContentChangedOnTextArea",
"1599": "WebkitEditableContentChangedOnContentEditable",
"1600": "WebkitEditableContentChangedOnNotNode",
"1601": "V8NavigatorUserMediaError_ConstraintName_AttributeGetter",
"1602": "V8HTMLMediaElement_SrcObject_AttributeGetter",
"1603": "V8HTMLMediaElement_SrcObject_AttributeSetter",
"1604": "CreateObjectURLBlob",
"1605": "CreateObjectURLMediaSource",
"1606": "CreateObjectURLMediaStream",
"1607": "DocumentCreateTouchWindowNull",
"1608": "DocumentCreateTouchWindowWrongType",
"1609": "DocumentCreateTouchTargetNull",
"1610": "DocumentCreateTouchTargetWrongType",
"1611": "DocumentCreateTouchLessThanSevenArguments",
"1612": "DocumentCreateTouchMoreThanSevenArguments",
"1613": "EncryptedMediaCapabilityProvided",
"1614": "EncryptedMediaCapabilityNotProvided",
"1615": "LongTaskObserver",
"1616": "CSSMotionInEffect",
"1617": "CSSOffsetInEffect",
"1618": "VRGetDisplaysInsecureOrigin",
"1619": "VRRequestPresent",
"1620": "VRRequestPresentInsecureOrigin",
"1621": "VRDeprecatedFieldOfView",
"1622": "VideoInCanvas",
"1623": "HiddenAutoplayedVideoInCanvas",
"1624": "OffscreenCanvas",
"1625": "GamepadPose",
"1626": "GamepadHand",
"1627": "GamepadDisplayId",
"1628": "GamepadButtonTouched",
"1629": "GamepadPoseHasOrientation",
"1630": "GamepadPoseHasPosition",
"1631": "GamepadPosePosition",
"1632": "GamepadPoseLinearVelocity",
"1633": "GamepadPoseLinearAcceleration",
"1634": "GamepadPoseOrientation",
"1635": "GamepadPoseAngularVelocity",
"1636": "GamepadPoseAngularAcceleration",
"1638": "V8RTCDataChannel_MaxRetransmitTime_AttributeGetter",
"1639": "V8RTCDataChannel_MaxRetransmits_AttributeGetter",
"1640": "V8RTCDataChannel_Reliable_AttributeGetter",
"1641": "V8RTCPeerConnection_AddStream_Method",
"1642": "V8RTCPeerConnection_CreateDTMFSender_Method",
"1643": "V8RTCPeerConnection_GetLocalStreams_Method",
"1644": "V8RTCPeerConnection_GetRemoteStreams_Method",
"1645": "V8RTCPeerConnection_GetStreamById_Method",
"1646": "V8RTCPeerConnection_RemoveStream_Method",
"1647": "V8RTCPeerConnection_UpdateIce_Method",
"1648": "RTCPeerConnectionCreateDataChannelMaxRetransmitTime",
"1649": "RTCPeerConnectionCreateDataChannelMaxRetransmits",
"1650": "AudioContextCreateConstantSource",
"1651": "WebAudioConstantSourceNode",
"1652": "LoopbackEmbeddedInSecureContext",
"1653": "LoopbackEmbeddedInNonSecureContext",
"1654": "BlinkMacSystemFont",
"1655": "RTCConfigurationIceTransportsNone",
"1656": "RTCIceServerURL",
"1657": "RTCIceServerURLs",
"1658": "OffscreenCanvasTransferToImageBitmap2D",
"1659": "OffscreenCanvasTransferToImageBitmapWebGL",
"1660": "OffscreenCanvasCommit2D",
"1661": "OffscreenCanvasCommitWebGL",
"1662": "RTCConfigurationIceTransportPolicy",
"1663": "RTCConfigurationIceTransportPolicyNone",
"1664": "RTCConfigurationIceTransports",
"1665": "DocumentFullscreenElementInV0Shadow",
"1666": "ScriptWithCSPBypassingSchemeParserInserted",
"1667": "ScriptWithCSPBypassingSchemeNotParserInserted",
"1668": "DocumentCreateElement2ndArgStringHandling",
"1669": "V8MediaRecorder_Start_Method",
"1670": "WebBluetoothRequestDevice",
"1671": "UnitlessPerspectiveInPerspectiveProperty",
"1672": "UnitlessPerspectiveInTransformProperty",
"1673": "V8RTCSessionDescription_Type_AttributeGetter",
"1674": "V8RTCSessionDescription_Type_AttributeSetter",
"1675": "V8RTCSessionDescription_Sdp_AttributeGetter",
"1676": "V8RTCSessionDescription_Sdp_AttributeSetter",
"1677": "RTCSessionDescriptionInitNoType",
"1678": "RTCSessionDescriptionInitNoSdp",
"1679": "HTMLMediaElementPreloadForcedMetadata",
"1680": "GenericSensorStart",
"1681": "GenericSensorStop",
"1682": "TouchEventPreventedNoTouchAction",
"1683": "TouchEventPreventedForcedDocumentPassiveNoTouchAction",
"1684": "V8Event_StopPropagation_Method",
"1685": "V8Event_StopImmediatePropagation_Method",
"1686": "ImageCaptureConstructor",
"1687": "V8Document_RootScroller_AttributeGetter",
"1688": "V8Document_RootScroller_AttributeSetter",
"1689": "CustomElementRegistryDefine",
"1690": "LinkHeaderServiceWorker",
"1691": "CSSShadowPiercingDescendantCombinator",
"1692": "CSSFlexibleBox",
"1693": "CSSGridLayout",
"1694": "V8BarcodeDetector_Detect_Method",
"1695": "V8FaceDetector_Detect_Method"
}
########################################################################################################################
# CSS feature names from https://cs.chromium.org/chromium/src/third_party/WebKit/Source/core/frame/UseCounter.cpp
########################################################################################################################
CSS_FEATURES = {
"2": "CSSPropertyColor",
"3": "CSSPropertyDirection",
"4": "CSSPropertyDisplay",
"5": "CSSPropertyFont",
"6": "CSSPropertyFontFamily",
"7": "CSSPropertyFontSize",
"8": "CSSPropertyFontStyle",
"9": "CSSPropertyFontVariant",
"10": "CSSPropertyFontWeight",
"11": "CSSPropertyTextRendering",
"12": "CSSPropertyAliasWebkitFontFeatureSettings",
"13": "CSSPropertyFontKerning",
"14": "CSSPropertyWebkitFontSmoothing",
"15": "CSSPropertyFontVariantLigatures",
"16": "CSSPropertyWebkitLocale",
"17": "CSSPropertyWebkitTextOrientation",
"18": "CSSPropertyWebkitWritingMode",
"19": "CSSPropertyZoom",
"20": "CSSPropertyLineHeight",
"21": "CSSPropertyBackground",
"22": "CSSPropertyBackgroundAttachment",
"23": "CSSPropertyBackgroundClip",
"24": "CSSPropertyBackgroundColor",
"25": "CSSPropertyBackgroundImage",
"26": "CSSPropertyBackgroundOrigin",
"27": "CSSPropertyBackgroundPosition",
"28": "CSSPropertyBackgroundPositionX",
"29": "CSSPropertyBackgroundPositionY",
"30": "CSSPropertyBackgroundRepeat",
"31": "CSSPropertyBackgroundRepeatX",
"32": "CSSPropertyBackgroundRepeatY",
"33": "CSSPropertyBackgroundSize",
"34": "CSSPropertyBorder",
"35": "CSSPropertyBorderBottom",
"36": "CSSPropertyBorderBottomColor",
"37": "CSSPropertyBorderBottomLeftRadius",
"38": "CSSPropertyBorderBottomRightRadius",
"39": "CSSPropertyBorderBottomStyle",
"40": "CSSPropertyBorderBottomWidth",
"41": "CSSPropertyBorderCollapse",
"42": "CSSPropertyBorderColor",
"43": "CSSPropertyBorderImage",
"44": "CSSPropertyBorderImageOutset",
"45": "CSSPropertyBorderImageRepeat",
"46": "CSSPropertyBorderImageSlice",
"47": "CSSPropertyBorderImageSource",
"48": "CSSPropertyBorderImageWidth",
"49": "CSSPropertyBorderLeft",
"50": "CSSPropertyBorderLeftColor",
"51": "CSSPropertyBorderLeftStyle",
"52": "CSSPropertyBorderLeftWidth",
"53": "CSSPropertyBorderRadius",
"54": "CSSPropertyBorderRight",
"55": "CSSPropertyBorderRightColor",
"56": "CSSPropertyBorderRightStyle",
"57": "CSSPropertyBorderRightWidth",
"58": "CSSPropertyBorderSpacing",
"59": "CSSPropertyBorderStyle",
"60": "CSSPropertyBorderTop",
"61": "CSSPropertyBorderTopColor",
"62": "CSSPropertyBorderTopLeftRadius",
"63": "CSSPropertyBorderTopRightRadius",
"64": "CSSPropertyBorderTopStyle",
"65": "CSSPropertyBorderTopWidth",
"66": "CSSPropertyBorderWidth",
"67": "CSSPropertyBottom",
"68": "CSSPropertyBoxShadow",
"69": "CSSPropertyBoxSizing",
"70": "CSSPropertyCaptionSide",
"71": "CSSPropertyClear",
"72": "CSSPropertyClip",
"73": "CSSPropertyAliasWebkitClipPath",
"74": "CSSPropertyContent",
"75": "CSSPropertyCounterIncrement",
"76": "CSSPropertyCounterReset",
"77": "CSSPropertyCursor",
"78": "CSSPropertyEmptyCells",
"79": "CSSPropertyFloat",
"80": "CSSPropertyFontStretch",
"81": "CSSPropertyHeight",
"82": "CSSPropertyImageRendering",
"83": "CSSPropertyLeft",
"84": "CSSPropertyLetterSpacing",
"85": "CSSPropertyListStyle",
"86": "CSSPropertyListStyleImage",
"87": "CSSPropertyListStylePosition",
"88": "CSSPropertyListStyleType",
"89": "CSSPropertyMargin",
"90": "CSSPropertyMarginBottom",
"91": "CSSPropertyMarginLeft",
"92": "CSSPropertyMarginRight",
"93": "CSSPropertyMarginTop",
"94": "CSSPropertyMaxHeight",
"95": "CSSPropertyMaxWidth",
"96": "CSSPropertyMinHeight",
"97": "CSSPropertyMinWidth",
"98": "CSSPropertyOpacity",
"99": "CSSPropertyOrphans",
"100": "CSSPropertyOutline",
"101": "CSSPropertyOutlineColor",
"102": "CSSPropertyOutlineOffset",
"103": "CSSPropertyOutlineStyle",
"104": "CSSPropertyOutlineWidth",
"105": "CSSPropertyOverflow",
"106": "CSSPropertyOverflowWrap",
"107": "CSSPropertyOverflowX",
"108": "CSSPropertyOverflowY",
"109": "CSSPropertyPadding",
"110": "CSSPropertyPaddingBottom",
"111": "CSSPropertyPaddingLeft",
"112": "CSSPropertyPaddingRight",
"113": "CSSPropertyPaddingTop",
"114": "CSSPropertyPage",
"115": "CSSPropertyPageBreakAfter",
"116": "CSSPropertyPageBreakBefore",
"117": "CSSPropertyPageBreakInside",
"118": "CSSPropertyPointerEvents",
"119": "CSSPropertyPosition",
"120": "CSSPropertyQuotes",
"121": "CSSPropertyResize",
"122": "CSSPropertyRight",
"123": "CSSPropertySize",
"124": "CSSPropertySrc",
"125": "CSSPropertySpeak",
"126": "CSSPropertyTableLayout",
"127": "CSSPropertyTabSize",
"128": "CSSPropertyTextAlign",
"129": "CSSPropertyTextDecoration",
"130": "CSSPropertyTextIndent",
"136": "CSSPropertyTextOverflow",
"142": "CSSPropertyTextShadow",
"143": "CSSPropertyTextTransform",
"149": "CSSPropertyTop",
"150": "CSSPropertyTransition",
"151": "CSSPropertyTransitionDelay",
"152": "CSSPropertyTransitionDuration",
"153": "CSSPropertyTransitionProperty",
"154": "CSSPropertyTransitionTimingFunction",
"155": "CSSPropertyUnicodeBidi",
"156": "CSSPropertyUnicodeRange",
"157": "CSSPropertyVerticalAlign",
"158": "CSSPropertyVisibility",
"159": "CSSPropertyWhiteSpace",
"160": "CSSPropertyWidows",
"161": "CSSPropertyWidth",
"162": "CSSPropertyWordBreak",
"163": "CSSPropertyWordSpacing",
"164": "CSSPropertyWordWrap",
"165": "CSSPropertyZIndex",
"166": "CSSPropertyAliasWebkitAnimation",
"167": "CSSPropertyAliasWebkitAnimationDelay",
"168": "CSSPropertyAliasWebkitAnimationDirection",
"169": "CSSPropertyAliasWebkitAnimationDuration",
"170": "CSSPropertyAliasWebkitAnimationFillMode",
"171": "CSSPropertyAliasWebkitAnimationIterationCount",
"172": "CSSPropertyAliasWebkitAnimationName",
"173": "CSSPropertyAliasWebkitAnimationPlayState",
"174": "CSSPropertyAliasWebkitAnimationTimingFunction",
"175": "CSSPropertyWebkitAppearance",
"176": "CSSPropertyWebkitAspectRatio",
"177": "CSSPropertyAliasWebkitBackfaceVisibility",
"178": "CSSPropertyWebkitBackgroundClip",
"179": "CSSPropertyWebkitBackgroundComposite",
"180": "CSSPropertyWebkitBackgroundOrigin",
"181": "CSSPropertyAliasWebkitBackgroundSize",
"182": "CSSPropertyWebkitBorderAfter",
"183": "CSSPropertyWebkitBorderAfterColor",
"184": "CSSPropertyWebkitBorderAfterStyle",
"185": "CSSPropertyWebkitBorderAfterWidth",
"186": "CSSPropertyWebkitBorderBefore",
"187": "CSSPropertyWebkitBorderBeforeColor",
"188": "CSSPropertyWebkitBorderBeforeStyle",
"189": "CSSPropertyWebkitBorderBeforeWidth",
"190": "CSSPropertyWebkitBorderEnd",
"191": "CSSPropertyWebkitBorderEndColor",
"192": "CSSPropertyWebkitBorderEndStyle",
"193": "CSSPropertyWebkitBorderEndWidth",
"194": "CSSPropertyWebkitBorderFit",
"195": "CSSPropertyWebkitBorderHorizontalSpacing",
"196": "CSSPropertyWebkitBorderImage",
"197": "CSSPropertyAliasWebkitBorderRadius",
"198": "CSSPropertyWebkitBorderStart",
"199": "CSSPropertyWebkitBorderStartColor",
"200": "CSSPropertyWebkitBorderStartStyle",
"201": "CSSPropertyWebkitBorderStartWidth",
"202": "CSSPropertyWebkitBorderVerticalSpacing",
"203": "CSSPropertyWebkitBoxAlign",
"204": "CSSPropertyWebkitBoxDirection",
"205": "CSSPropertyWebkitBoxFlex",
"206": "CSSPropertyWebkitBoxFlexGroup",
"207": "CSSPropertyWebkitBoxLines",
"208": "CSSPropertyWebkitBoxOrdinalGroup",
"209": "CSSPropertyWebkitBoxOrient",
"210": "CSSPropertyWebkitBoxPack",
"211": "CSSPropertyWebkitBoxReflect",
"212": "CSSPropertyAliasWebkitBoxShadow",
"215": "CSSPropertyWebkitColumnBreakAfter",
"216": "CSSPropertyWebkitColumnBreakBefore",
"217": "CSSPropertyWebkitColumnBreakInside",
"218": "CSSPropertyAliasWebkitColumnCount",
"219": "CSSPropertyAliasWebkitColumnGap",
"220": "CSSPropertyWebkitColumnProgression",
"221": "CSSPropertyAliasWebkitColumnRule",
"222": "CSSPropertyAliasWebkitColumnRuleColor",
"223": "CSSPropertyAliasWebkitColumnRuleStyle",
"224": "CSSPropertyAliasWebkitColumnRuleWidth",
"225": "CSSPropertyAliasWebkitColumnSpan",
"226": "CSSPropertyAliasWebkitColumnWidth",
"227": "CSSPropertyAliasWebkitColumns",
"228": "CSSPropertyWebkitBoxDecorationBreak",
"229": "CSSPropertyWebkitFilter",
"230": "CSSPropertyAlignContent",
"231": "CSSPropertyAlignItems",
"232": "CSSPropertyAlignSelf",
"233": "CSSPropertyFlex",
"234": "CSSPropertyFlexBasis",
"235": "CSSPropertyFlexDirection",
"236": "CSSPropertyFlexFlow",
"237": "CSSPropertyFlexGrow",
"238": "CSSPropertyFlexShrink",
"239": "CSSPropertyFlexWrap",
"240": "CSSPropertyJustifyContent",
"241": "CSSPropertyWebkitFontSizeDelta",
"242": "CSSPropertyGridTemplateColumns",
"243": "CSSPropertyGridTemplateRows",
"244": "CSSPropertyGridColumnStart",
"245": "CSSPropertyGridColumnEnd",
"246": "CSSPropertyGridRowStart",
"247": "CSSPropertyGridRowEnd",
"248": "CSSPropertyGridColumn",
"249": "CSSPropertyGridRow",
"250": "CSSPropertyGridAutoFlow",
"251": "CSSPropertyWebkitHighlight",
"252": "CSSPropertyWebkitHyphenateCharacter",
"257": "CSSPropertyWebkitLineBoxContain",
"258": "CSSPropertyWebkitLineAlign",
"259": "CSSPropertyWebkitLineBreak",
"260": "CSSPropertyWebkitLineClamp",
"261": "CSSPropertyWebkitLineGrid",
"262": "CSSPropertyWebkitLineSnap",
"263": "CSSPropertyWebkitLogicalWidth",
"264": "CSSPropertyWebkitLogicalHeight",
"265": "CSSPropertyWebkitMarginAfterCollapse",
"266": "CSSPropertyWebkitMarginBeforeCollapse",
"267": "CSSPropertyWebkitMarginBottomCollapse",
"268": "CSSPropertyWebkitMarginTopCollapse",
"269": "CSSPropertyWebkitMarginCollapse",
"270": "CSSPropertyWebkitMarginAfter",
"271": "CSSPropertyWebkitMarginBefore",
"272": "CSSPropertyWebkitMarginEnd",
"273": "CSSPropertyWebkitMarginStart",
"280": "CSSPropertyWebkitMask",
"281": "CSSPropertyWebkitMaskBoxImage",
"282": "CSSPropertyWebkitMaskBoxImageOutset",
"283": "CSSPropertyWebkitMaskBoxImageRepeat",
"284": "CSSPropertyWebkitMaskBoxImageSlice",
"285": "CSSPropertyWebkitMaskBoxImageSource",
"286": "CSSPropertyWebkitMaskBoxImageWidth",
"287": "CSSPropertyWebkitMaskClip",
"288": "CSSPropertyWebkitMaskComposite",
"289": "CSSPropertyWebkitMaskImage",
"290": "CSSPropertyWebkitMaskOrigin",
"291": "CSSPropertyWebkitMaskPosition",
"292": "CSSPropertyWebkitMaskPositionX",
"293": "CSSPropertyWebkitMaskPositionY",
"294": "CSSPropertyWebkitMaskRepeat",
"295": "CSSPropertyWebkitMaskRepeatX",
"296": "CSSPropertyWebkitMaskRepeatY",
"297": "CSSPropertyWebkitMaskSize",
"298": "CSSPropertyWebkitMaxLogicalWidth",
"299": "CSSPropertyWebkitMaxLogicalHeight",
"300": "CSSPropertyWebkitMinLogicalWidth",
"301": "CSSPropertyWebkitMinLogicalHeight",
"303": "CSSPropertyOrder",
"304": "CSSPropertyWebkitPaddingAfter",
"305": "CSSPropertyWebkitPaddingBefore",
"306": "CSSPropertyWebkitPaddingEnd",
"307": "CSSPropertyWebkitPaddingStart",
"308": "CSSPropertyAliasWebkitPerspective",
"309": "CSSPropertyAliasWebkitPerspectiveOrigin",
"310": "CSSPropertyWebkitPerspectiveOriginX",
"311": "CSSPropertyWebkitPerspectiveOriginY",
"312": "CSSPropertyWebkitPrintColorAdjust",
"313": "CSSPropertyWebkitRtlOrdering",
"314": "CSSPropertyWebkitRubyPosition",
"315": "CSSPropertyWebkitTextCombine",
"316": "CSSPropertyWebkitTextDecorationsInEffect",
"317": "CSSPropertyWebkitTextEmphasis",
"318": "CSSPropertyWebkitTextEmphasisColor",
"319": "CSSPropertyWebkitTextEmphasisPosition",
"320": "CSSPropertyWebkitTextEmphasisStyle",
"321": "CSSPropertyWebkitTextFillColor",
"322": "CSSPropertyWebkitTextSecurity",
"323": "CSSPropertyWebkitTextStroke",
"324": "CSSPropertyWebkitTextStrokeColor",
"325": "CSSPropertyWebkitTextStrokeWidth",
"326": "CSSPropertyAliasWebkitTransform",
"327": "CSSPropertyAliasWebkitTransformOrigin",
"328": "CSSPropertyWebkitTransformOriginX",
"329": "CSSPropertyWebkitTransformOriginY",
"330": "CSSPropertyWebkitTransformOriginZ",
"331": "CSSPropertyAliasWebkitTransformStyle",
"332": "CSSPropertyAliasWebkitTransition",
"333": "CSSPropertyAliasWebkitTransitionDelay",
"334": "CSSPropertyAliasWebkitTransitionDuration",
"335": "CSSPropertyAliasWebkitTransitionProperty",
"336": "CSSPropertyAliasWebkitTransitionTimingFunction",
"337": "CSSPropertyWebkitUserDrag",
"338": "CSSPropertyWebkitUserModify",
"339": "CSSPropertyAliasWebkitUserSelect",
"340": "CSSPropertyWebkitFlowInto",
"341": "CSSPropertyWebkitFlowFrom",
"342": "CSSPropertyWebkitRegionFragment",
"343": "CSSPropertyWebkitRegionBreakAfter",
"344": "CSSPropertyWebkitRegionBreakBefore",
"345": "CSSPropertyWebkitRegionBreakInside",
"346": "CSSPropertyShapeInside",
"347": "CSSPropertyShapeOutside",
"348": "CSSPropertyShapeMargin",
"349": "CSSPropertyShapePadding",
"350": "CSSPropertyWebkitWrapFlow",
"351": "CSSPropertyWebkitWrapThrough",
"355": "CSSPropertyClipPath",
"356": "CSSPropertyClipRule",
"357": "CSSPropertyMask",
"359": "CSSPropertyFilter",
"360": "CSSPropertyFloodColor",
"361": "CSSPropertyFloodOpacity",
"362": "CSSPropertyLightingColor",
"363": "CSSPropertyStopColor",
"364": "CSSPropertyStopOpacity",
"365": "CSSPropertyColorInterpolation",
"366": "CSSPropertyColorInterpolationFilters",
"367": "CSSPropertyColorProfile",
"368": "CSSPropertyColorRendering",
"369": "CSSPropertyFill",
"370": "CSSPropertyFillOpacity",
"371": "CSSPropertyFillRule",
"372": "CSSPropertyMarker",
"373": "CSSPropertyMarkerEnd",
"374": "CSSPropertyMarkerMid",
"375": "CSSPropertyMarkerStart",
"376": "CSSPropertyMaskType",
"377": "CSSPropertyShapeRendering",
"378": "CSSPropertyStroke",
"379": "CSSPropertyStrokeDasharray",
"380": "CSSPropertyStrokeDashoffset",
"381": "CSSPropertyStrokeLinecap",
"382": "CSSPropertyStrokeLinejoin",
"383": "CSSPropertyStrokeMiterlimit",
"384": "CSSPropertyStrokeOpacity",
"385": "CSSPropertyStrokeWidth",
"386": "CSSPropertyAlignmentBaseline",
"387": "CSSPropertyBaselineShift",
"388": "CSSPropertyDominantBaseline",
"392": "CSSPropertyTextAnchor",
"393": "CSSPropertyVectorEffect",
"394": "CSSPropertyWritingMode",
"399": "CSSPropertyWebkitBlendMode",
"400": "CSSPropertyWebkitBackgroundBlendMode",
"401": "CSSPropertyTextDecorationLine",
"402": "CSSPropertyTextDecorationStyle",
"403": "CSSPropertyTextDecorationColor",
"404": "CSSPropertyTextAlignLast",
"405": "CSSPropertyTextUnderlinePosition",
"406": "CSSPropertyMaxZoom",
"407": "CSSPropertyMinZoom",
"408": "CSSPropertyOrientation",
"409": "CSSPropertyUserZoom",
"412": "CSSPropertyWebkitAppRegion",
"413": "CSSPropertyAliasWebkitFilter",
"414": "CSSPropertyWebkitBoxDecorationBreak",
"415": "CSSPropertyWebkitTapHighlightColor",
"416": "CSSPropertyBufferedRendering",
"417": "CSSPropertyGridAutoRows",
"418": "CSSPropertyGridAutoColumns",
"419": "CSSPropertyBackgroundBlendMode",
"420": "CSSPropertyMixBlendMode",
"421": "CSSPropertyTouchAction",
"422": "CSSPropertyGridArea",
"423": "CSSPropertyGridTemplateAreas",
"424": "CSSPropertyAnimation",
"425": "CSSPropertyAnimationDelay",
"426": "CSSPropertyAnimationDirection",
"427": "CSSPropertyAnimationDuration",
"428": "CSSPropertyAnimationFillMode",
"429": "CSSPropertyAnimationIterationCount",
"430": "CSSPropertyAnimationName",
"431": "CSSPropertyAnimationPlayState",
"432": "CSSPropertyAnimationTimingFunction",
"433": "CSSPropertyObjectFit",
"434": "CSSPropertyPaintOrder",
"435": "CSSPropertyMaskSourceType",
"436": "CSSPropertyIsolation",
"437": "CSSPropertyObjectPosition",
"438": "CSSPropertyInternalCallback",
"439": "CSSPropertyShapeImageThreshold",
"440": "CSSPropertyColumnFill",
"441": "CSSPropertyTextJustify",
"443": "CSSPropertyJustifySelf",
"444": "CSSPropertyScrollBehavior",
"445": "CSSPropertyWillChange",
"446": "CSSPropertyTransform",
"447": "CSSPropertyTransformOrigin",
"448": "CSSPropertyTransformStyle",
"449": "CSSPropertyPerspective",
"450": "CSSPropertyPerspectiveOrigin",
"451": "CSSPropertyBackfaceVisibility",
"452": "CSSPropertyGridTemplate",
"453": "CSSPropertyGrid",
"454": "CSSPropertyAll",
"455": "CSSPropertyJustifyItems",
"457": "CSSPropertyAliasMotionPath",
"458": "CSSPropertyAliasMotionOffset",
"459": "CSSPropertyAliasMotionRotation",
"460": "CSSPropertyMotion",
"461": "CSSPropertyX",
"462": "CSSPropertyY",
"463": "CSSPropertyRx",
"464": "CSSPropertyRy",
"465": "CSSPropertyFontSizeAdjust",
"466": "CSSPropertyCx",
"467": "CSSPropertyCy",
"468": "CSSPropertyR",
"469": "CSSPropertyAliasEpubCaptionSide",
"470": "CSSPropertyAliasEpubTextCombine",
"471": "CSSPropertyAliasEpubTextEmphasis",
"472": "CSSPropertyAliasEpubTextEmphasisColor",
"473": "CSSPropertyAliasEpubTextEmphasisStyle",
"474": "CSSPropertyAliasEpubTextOrientation",
"475": "CSSPropertyAliasEpubTextTransform",
"476": "CSSPropertyAliasEpubWordBreak",
"477": "CSSPropertyAliasEpubWritingMode",
"478": "CSSPropertyAliasWebkitAlignContent",
"479": "CSSPropertyAliasWebkitAlignItems",
"480": "CSSPropertyAliasWebkitAlignSelf",
"481": "CSSPropertyAliasWebkitBorderBottomLeftRadius",
"482": "CSSPropertyAliasWebkitBorderBottomRightRadius",
"483": "CSSPropertyAliasWebkitBorderTopLeftRadius",
"484": "CSSPropertyAliasWebkitBorderTopRightRadius",
"485": "CSSPropertyAliasWebkitBoxSizing",
"486": "CSSPropertyAliasWebkitFlex",
"487": "CSSPropertyAliasWebkitFlexBasis",
"488": "CSSPropertyAliasWebkitFlexDirection",
"489": "CSSPropertyAliasWebkitFlexFlow",
"490": "CSSPropertyAliasWebkitFlexGrow",
"491": "CSSPropertyAliasWebkitFlexShrink",
"492": "CSSPropertyAliasWebkitFlexWrap",
"493": "CSSPropertyAliasWebkitJustifyContent",
"494": "CSSPropertyAliasWebkitOpacity",
"495": "CSSPropertyAliasWebkitOrder",
"496": "CSSPropertyAliasWebkitShapeImageThreshold",
"497": "CSSPropertyAliasWebkitShapeMargin",
"498": "CSSPropertyAliasWebkitShapeOutside",
"499": "CSSPropertyScrollSnapType",
"500": "CSSPropertyScrollSnapPointsX",
"501": "CSSPropertyScrollSnapPointsY",
"502": "CSSPropertyScrollSnapCoordinate",
"503": "CSSPropertyScrollSnapDestination",
"504": "CSSPropertyTranslate",
"505": "CSSPropertyRotate",
"506": "CSSPropertyScale",
"507": "CSSPropertyImageOrientation",
"508": "CSSPropertyBackdropFilter",
"509": "CSSPropertyTextCombineUpright",
"510": "CSSPropertyTextOrientation",
"511": "CSSPropertyGridColumnGap",
"512": "CSSPropertyGridRowGap",
"513": "CSSPropertyGridGap",
"514": "CSSPropertyFontFeatureSettings",
"515": "CSSPropertyVariable",
"516": "CSSPropertyFontDisplay",
"517": "CSSPropertyContain",
"518": "CSSPropertyD",
"519": "CSSPropertySnapHeight",
"520": "CSSPropertyBreakAfter",
"521": "CSSPropertyBreakBefore",
"522": "CSSPropertyBreakInside",
"523": "CSSPropertyColumnCount",
"524": "CSSPropertyColumnGap",
"525": "CSSPropertyColumnRule",
"526": "CSSPropertyColumnRuleColor",
"527": "CSSPropertyColumnRuleStyle",
"528": "CSSPropertyColumnRuleWidth",
"529": "CSSPropertyColumnSpan",
"530": "CSSPropertyColumnWidth",
"531": "CSSPropertyColumns",
"532": "CSSPropertyApplyAtRule",
"533": "CSSPropertyFontVariantCaps",
"534": "CSSPropertyHyphens",
"535": "CSSPropertyFontVariantNumeric",
"536": "CSSPropertyTextSizeAdjust",
"537": "CSSPropertyAliasWebkitTextSizeAdjust",
"538": "CSSPropertyOverflowAnchor",
"539": "CSSPropertyUserSelect",
"540": "CSSPropertyOffsetDistance",
"541": "CSSPropertyOffsetPath",
"542": "CSSPropertyOffsetRotation",
"543": "CSSPropertyOffset",
"544": "CSSPropertyOffsetAnchor",
"545": "CSSPropertyOffsetPosition",
"546": "CSSPropertyTextDecorationSkip",
"547": "CSSPropertyCaretColor",
"548": "CSSPropertyOffsetRotate"
}
if '__main__' == __name__:
# import cProfile
# cProfile.run('main()', None, 2)
main()
| xeonchen/cdpagent | webpagetest/support/trace-parser.py | Python | apache-2.0 | 102,619 |
"""
WSGI config for gdelt project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "gdelt.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gdelt.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| Berico-Technologies/CLAVIN-contrib | gdelt/gdelt/gdelt/wsgi.py | Python | apache-2.0 | 1,416 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# Copyright (c) 2009-2010 Ars Aperta, Itaapy, Pierlis, Talend.
#
# Authors: Hervé Cauwelier <[email protected]>
# David Versmisse <[email protected]>
#
# This file is part of Lpod (see: http://lpod-project.org).
# Lpod is free software; you can redistribute it and/or modify it under
# the terms of either:
#
# a) the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option)
# any later version.
# Lpod is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Lpod. If not, see <http://www.gnu.org/licenses/>.
#
# b) the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Import from the standard library
from optparse import OptionParser
from os.path import basename, splitext, exists
from sys import exit, stdout
# Import from lpod
from lpod import __version__, ODF_MANIFEST
from lpod.const import ODF_TEXT, ODF_SPREADSHEET, ODF_PRESENTATION
from lpod.document import odf_new_document, odf_get_document
from lpod.element import FIRST_CHILD
from lpod.table import import_from_csv
from lpod.toc import odf_create_toc
from lpod.scriptutils import add_option_output, StdoutWriter
from lpod.scriptutils import printerr, printinfo, get_mimetype
from lpod.scriptutils import check_target_file
CSV_SHORT = 'text/csv'
CSV_LONG = 'text/comma-separated-values'
def init_doc(filename, mimetype):
if mimetype in (ODF_TEXT, ODF_SPREADSHEET, ODF_PRESENTATION):
output_doc = odf_get_document(filename)
if mimetype == ODF_TEXT:
# Extra for text: begin with a TOC
output_body = output_doc.get_body()
output_body.insert(odf_create_toc(), FIRST_CHILD)
elif mimetype in (CSV_SHORT, CSV_LONG):
output_doc = odf_new_document('spreadsheet')
add_csv(filename, output_doc)
else:
raise NotImplementedError, mimetype
return output_doc
def _add_pictures(document, output_doc):
# Copy extra parts (images...)
manifest = output_doc.get_part(ODF_MANIFEST)
document_manifest = document.get_part(ODF_MANIFEST)
for partname in document.get_parts():
if partname.startswith('Pictures/'):
data = document.get_part(partname)
# Manually add the part to keep the name (suppose uniqueness)
output_doc.set_part(partname, data)
media_type = document_manifest.get_media_type(partname)
manifest.add_full_path(partname, media_type)
def add_odt(filename, output_doc):
document = odf_get_document(filename)
# Copy content
src_body = document.get_body()
output_body = output_doc.get_body()
for element in src_body.get_children():
tagname = element.get_tag()
# Skip TOC, etc.
if tagname in ('text:sequence-decls', 'text:table-of-content'):
continue
# Copy the rest recursively
output_body.append(element.clone())
# Add pictures/
_add_pictures(document, output_doc)
def _get_table_name(name, output_body):
if isinstance(name, str):
encoding = stdout.encoding or 'utf8'
name = unicode(name, encoding)
already_names = set([ table.get_name()
for table in output_body.get_tables() ])
if name in already_names:
i = 1
while True:
new_name = u"%s_%d" % (name, i)
if new_name not in already_names:
return new_name
i += 1
else:
return name
def add_ods(filename, output_doc):
document = odf_get_document(filename)
# Add the sheets
output_body = output_doc.get_body()
ods_body = document.get_body()
for table in ods_body.get_tables():
name = table.get_name()
name = _get_table_name(name, output_body)
table.set_name(name)
output_body.append(table)
# Add pictures/
_add_pictures(document, output_doc)
def add_csv(filename, output_doc):
output_body = output_doc.get_body()
# Make the name
name = splitext(basename(filename))[0]
name = _get_table_name(name, output_body)
table = import_from_csv(filename, name)
output_body.append(table)
def add_odp(filename, output_doc):
document = odf_get_document(filename)
# Add the pages
output_body = output_doc.get_body()
already_names = set([ page.get_name()
for page in output_body.get_draw_pages() ])
odp_body = document.get_body()
for page in odp_body.get_draw_pages():
name = page.get_name()
if name in already_names:
i = 1
while True:
new_name = u"%s_%d" % (name, i)
if new_name not in already_names:
name = new_name
break
i += 1
page.set_name(name)
already_names.add(name)
output_body.append(page)
# Add pictures/
_add_pictures(document, output_doc)
def print_incompatible(filename, type):
printerr('Cannot merge "%s" in %s document, skipping.' % (filename, type))
if __name__ == '__main__':
# Options initialisation
usage = "%prog -o FILE <file1> [<file2> ...]"
description = "Merge all input files in an unique OpenDocument file"
parser = OptionParser(usage, version=__version__,
description=description)
# --output
add_option_output(parser, complement='("-" for stdout)')
# Parse !
options, filenames = parser.parse_args()
# Arguments
if not filenames:
parser.print_help()
exit(1)
target = options.output
if target is None:
printerr('"-o" option mandatory (use "-" to print to stdout)')
exit(1)
check_target_file(target)
output_doc = None
output_type = None
# Concatenate content in the output doc
for filename in filenames:
# Exists ?
if not exists(filename):
printerr("Skip", filename, "not existing")
continue
# A good file => Only text, spreadsheet and CSV
mimetype = get_mimetype(filename)
if mimetype not in (ODF_TEXT, ODF_SPREADSHEET, ODF_PRESENTATION,
CSV_SHORT, CSV_LONG):
printerr('Skip "%s" with unknown mimetype "%s"' % (filename,
mimetype))
continue
# Not yet an output_doc ?
if output_doc is None:
# Use the first doc as the output_doc
output_doc = init_doc(filename, mimetype)
output_type = output_doc.get_type()
printinfo('%s document detected' % output_type.title())
elif mimetype == ODF_TEXT:
# Add a text doc
if output_type != 'text':
print_incompatible(filename, output_type)
continue
add_odt(filename, output_doc)
elif mimetype in (ODF_SPREADSHEET, CSV_SHORT, CSV_LONG):
# Add a spreadsheet doc
if output_type != 'spreadsheet':
print_incompatible(filename, output_type)
continue
# CSV?
if mimetype in (CSV_SHORT, CSV_LONG):
add_csv(filename, output_doc)
else:
add_ods(filename, output_doc)
elif mimetype == ODF_PRESENTATION:
# Add a presentation doc
if output_type != 'presentation':
print_incompatible(filename, output_type)
continue
add_odp(filename, output_doc)
printinfo('Add "%s"' % filename)
# Extra for odt
if output_type == 'text':
output_body = output_doc.get_body()
toc = output_body.get_toc()
toc.toc_fill()
# Save
if output_doc is not None:
if target == "-":
target = StdoutWriter()
output_doc.save(target=target, pretty=True)
if options.output:
printinfo('Document "%s" generated' % options.output)
| Agicia/lpod-python | scripts/lpod-merge.py | Python | apache-2.0 | 8,413 |
# Copyright 2014 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from rosidl_adapter.parser import Constant
def test_constant_constructor():
value = Constant('bool', 'FOO', '1')
assert value
with pytest.raises(TypeError):
Constant('pkg/Foo', 'FOO', '')
with pytest.raises(NameError):
Constant('bool', 'FOO BAR', '')
with pytest.raises(ValueError):
Constant('bool', 'FOO', None)
def test_constant_methods():
assert Constant('bool', 'FOO', '1') != 23
assert Constant('bool', 'FOO', '1') == Constant('bool', 'FOO', '1')
assert Constant('bool', 'FOO', '1') != Constant('bool', 'FOO', '0')
assert Constant('bool', 'FOO', '1') != Constant('bool', 'BAR', '1')
assert Constant('bool', 'FOO', '1') != Constant('byte', 'FOO', '1')
assert str(Constant('bool', 'FOO', '1')) == 'bool FOO=True'
assert str(Constant('string', 'FOO', 'foo')) == "string FOO='foo'"
assert str(Constant('wstring', 'FOO', 'foo')) == "wstring FOO='foo'"
| ros2/rosidl | rosidl_adapter/test/test_constant.py | Python | apache-2.0 | 1,555 |
from .technical import (
AnnualizedVolatility,
Aroon,
AverageDollarVolume,
BollingerBands,
EWMA,
EWMSTD,
ExponentialWeightedMovingAverage,
ExponentialWeightedMovingStdDev,
FastStochasticOscillator,
IchimokuKinkoHyo,
LinearWeightedMovingAverage,
MACDSignal,
MaxDrawdown,
MovingAverageConvergenceDivergenceSignal,
RateOfChangePercentage,
Returns,
RSI,
SimpleMovingAverage,
TrueRange,
VWAP,
WeightedAverageValue,
)
__all__ = [
'AnnualizedVolatility',
'Aroon',
'AverageDollarVolume',
'BollingerBands',
'EWMA',
'EWMSTD',
'ExponentialWeightedMovingAverage',
'ExponentialWeightedMovingStdDev',
'FastStochasticOscillator',
'IchimokuKinkoHyo',
'LinearWeightedMovingAverage',
'MACDSignal',
'MaxDrawdown',
'MovingAverageConvergenceDivergenceSignal',
'RateOfChangePercentage',
'Returns',
'RSI',
'SimpleMovingAverage',
'TrueRange',
'VWAP',
'WeightedAverageValue',
]
| enigmampc/catalyst | catalyst/pipeline/factors/crypto/__init__.py | Python | apache-2.0 | 1,022 |
# Copyright 2019 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START composer_trigger]
from google.auth.transport.requests import Request
from google.oauth2 import id_token
import requests
IAM_SCOPE = 'https://www.googleapis.com/auth/iam'
OAUTH_TOKEN_URI = 'https://www.googleapis.com/oauth2/v4/token'
# If you are using the stable API, set this value to False
# For more info about Airflow APIs see https://cloud.google.com/composer/docs/access-airflow-api
USE_EXPERIMENTAL_API = True
def trigger_dag(data, context=None):
"""Makes a POST request to the Composer DAG Trigger API
When called via Google Cloud Functions (GCF),
data and context are Background function parameters.
For more info, refer to
https://cloud.google.com/functions/docs/writing/background#functions_background_parameters-python
To call this function from a Python script, omit the ``context`` argument
and pass in a non-null value for the ``data`` argument.
This function is currently only compatible with Composer v1 environments.
"""
# Fill in with your Composer info here
# Navigate to your webserver's login page and get this from the URL
# Or use the script found at
# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/composer/rest/get_client_id.py
client_id = 'YOUR-CLIENT-ID'
# This should be part of your webserver's URL:
# {tenant-project-id}.appspot.com
webserver_id = 'YOUR-TENANT-PROJECT'
# The name of the DAG you wish to trigger
dag_name = 'composer_sample_trigger_response_dag'
if USE_EXPERIMENTAL_API:
endpoint = f'api/experimental/dags/{dag_name}/dag_runs'
json_data = {'conf': data, 'replace_microseconds': 'false'}
else:
endpoint = f'api/v1/dags/{dag_name}/dagRuns'
json_data = {'conf': data}
webserver_url = (
'https://'
+ webserver_id
+ '.appspot.com/'
+ endpoint
)
# Make a POST request to IAP which then Triggers the DAG
make_iap_request(
webserver_url, client_id, method='POST', json=json_data)
# This code is copied from
# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/iap/make_iap_request.py
# START COPIED IAP CODE
def make_iap_request(url, client_id, method='GET', **kwargs):
"""Makes a request to an application protected by Identity-Aware Proxy.
Args:
url: The Identity-Aware Proxy-protected URL to fetch.
client_id: The client ID used by Identity-Aware Proxy.
method: The request method to use
('GET', 'OPTIONS', 'HEAD', 'POST', 'PUT', 'PATCH', 'DELETE')
**kwargs: Any of the parameters defined for the request function:
https://github.com/requests/requests/blob/master/requests/api.py
If no timeout is provided, it is set to 90 by default.
Returns:
The page body, or raises an exception if the page couldn't be retrieved.
"""
# Set the default timeout, if missing
if 'timeout' not in kwargs:
kwargs['timeout'] = 90
# Obtain an OpenID Connect (OIDC) token from metadata server or using service
# account.
google_open_id_connect_token = id_token.fetch_id_token(Request(), client_id)
# Fetch the Identity-Aware Proxy-protected URL, including an
# Authorization header containing "Bearer " followed by a
# Google-issued OpenID Connect token for the service account.
resp = requests.request(
method, url,
headers={'Authorization': 'Bearer {}'.format(
google_open_id_connect_token)}, **kwargs)
if resp.status_code == 403:
raise Exception('Service account does not have permission to '
'access the IAP-protected application.')
elif resp.status_code != 200:
raise Exception(
'Bad response from application: {!r} / {!r} / {!r}'.format(
resp.status_code, resp.headers, resp.text))
else:
return resp.text
# END COPIED IAP CODE
# [END composer_trigger]
| GoogleCloudPlatform/python-docs-samples | composer/functions/composer_storage_trigger.py | Python | apache-2.0 | 4,541 |
# Copyright 2015 Infoblox Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from heat.tests import common
from heat_infoblox import connector
from heat_infoblox import resource_utils
class ResourceUtilsTest(common.HeatTestCase):
def setUp(self):
super(ResourceUtilsTest, self).setUp()
def test_wapi_config_file(self):
connector.Infoblox = mock.MagicMock()
resource_utils.connect_to_infoblox({'url': 'test_wapi_url',
'username': 'test_username',
'password': 'test_password',
'sslverify': False})
connector.Infoblox.assert_called_with({'url': 'test_wapi_url',
'username': 'test_username',
'password': 'test_password',
'sslverify': False})
def test_get_vrrp_mac(self):
# For IPv4 should be '00:00:5E:00:01:00' with last octet = VRID
mac_v4 = resource_utils.get_vrrp_mac(123, True)
self.assertEqual(mac_v4, '00:00:5E:00:01:7B')
# For IPv6 should be '00:00:5E:00:02:00' with last octet = VRID
mac_v4 = resource_utils.get_vrrp_mac(153, True)
self.assertEqual(mac_v4, '00:00:5E:00:01:99')
# Check VRID type validation
self.assertRaises(ValueError, resource_utils.get_vrrp_mac, None, True)
self.assertRaises(ValueError, resource_utils.get_vrrp_mac, '11', True)
def test_get_ip_address(self):
vip = {
'ipv4': {'address': '1.1.1.1'},
'ipv6': {'virtual_ip': u'1234:5678:90ab:cdef::1'}
}
# Check get IPv4 address
self.assertEqual(vip['ipv4']['address'],
resource_utils.get_ip_address(vip, True, 'vip'))
# Check get IPv6 address
self.assertEqual(vip['ipv6']['virtual_ip'],
resource_utils.get_ip_address(vip, False, 'vip'))
# Check ip validation
vip['ipv4']['address'] = 1
vip['ipv6']['virtual_ip'] = None
self.assertRaises(ValueError, resource_utils.get_ip_address,
vip, True, 'vip')
self.assertRaises(ValueError, resource_utils.get_ip_address,
vip, False, 'vip')
vip['ipv4'] = None
vip['ipv6'] = None
self.assertRaises(ValueError, resource_utils.get_ip_address,
vip, True, 'vip')
self.assertRaises(ValueError, resource_utils.get_ip_address,
vip, False, 'vip')
vip = {}
self.assertRaises(ValueError, resource_utils.get_ip_address,
vip, True, 'vip')
self.assertRaises(ValueError, resource_utils.get_ip_address,
vip, False, 'vip')
vip = 1244
self.assertRaises(ValueError, resource_utils.get_ip_address,
vip, True, 'vip')
self.assertRaises(ValueError, resource_utils.get_ip_address,
vip, False, 'vip')
vip = None
self.assertRaises(ValueError, resource_utils.get_ip_address,
vip, True, 'vip')
self.assertRaises(ValueError, resource_utils.get_ip_address,
vip, False, 'vip')
def test_fix_ha_ports_mac(self):
neutron = mock.MagicMock()
vip = {
'ipv4': {'address': '1.1.1.1'},
'ipv6': {'virtual_ip': '1234:5678:90ab:cdef::1'}
}
ports = ['port_1', 'port_2']
vrid = 123
mac_addr = '00:00:5E:00:01:7B'
resource_utils.fix_ha_ports_mac(neutron, vip, vrid, use_ipv4=True,
ports=ports)
self.assertEqual(
[mock.call(
'port_1',
{'port': {'allowed_address_pairs': [{
'ip_address': vip['ipv4']['address'],
'mac_address': mac_addr}]}}),
mock.call(
'port_2',
{'port': {'allowed_address_pairs': [{
'ip_address': vip['ipv4']['address'],
'mac_address': mac_addr}]}})
],
neutron.update_port.call_args_list
)
neutron = mock.MagicMock()
vrid = 153
mac_addr = '00:00:5E:00:02:99'
resource_utils.fix_ha_ports_mac(neutron, vip, vrid, use_ipv4=False,
ports=ports)
self.assertEqual(
[mock.call(
'port_1',
{'port': {'allowed_address_pairs': [{
'ip_address': vip['ipv6']['virtual_ip'],
'mac_address': mac_addr}]}}),
mock.call(
'port_2',
{'port': {'allowed_address_pairs': [{
'ip_address': vip['ipv6']['virtual_ip'],
'mac_address': mac_addr}]}})
],
neutron.update_port.call_args_list
)
| infobloxopen/heat-infoblox | heat_infoblox/tests/test_resource_utils.py | Python | apache-2.0 | 5,663 |
#!/usr/bin/env python
#
# Copyright 2017-2021 University Of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from Pegasus.db.admin.versions.base_version import BaseVersion
DB_VERSION = 9
log = logging.getLogger(__name__)
class Version(BaseVersion):
def __init__(self, connection):
super().__init__(connection)
def update(self, force=False):
"""
:param force:
:return:
"""
log.info("Updating to version %s" % DB_VERSION)
# this update is not necessary any longer
def downgrade(self, force=False):
"""
Downgrade is not necessary as integrity_meta table does not affect the system"
"""
| pegasus-isi/pegasus | packages/pegasus-python/src/Pegasus/db/admin/versions/v9.py | Python | apache-2.0 | 1,223 |
# -*- coding: utf-8 -*-
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from solar.core.handlers.ansible_template import AnsibleTemplate
from solar.core.handlers.ansible_playbook import AnsiblePlaybook
from solar.core.handlers.base import Empty
from solar.core.handlers.puppet import Puppet
from solar.core.handlers.shell import Shell
HANDLERS = {'ansible': AnsibleTemplate,
'ansible_playbook': AnsiblePlaybook,
'shell': Shell,
'puppet': Puppet,
'none': Empty}
def get(handler_name):
handler = HANDLERS.get(handler_name, None)
if handler:
return handler
raise Exception('Handler {0} does not exist'.format(handler_name))
| torgartor21/solar | solar/solar/core/handlers/__init__.py | Python | apache-2.0 | 1,254 |
from markdown import Extension
from markdown.treeprocessors import Treeprocessor
class BS4Extension(Extension):
def extendMarkdown(self, md): # noqa: N802
md.registerExtension(self)
md.treeprocessors.register(BS4Treeprocessor(md), "bs4_extension", 0)
class BS4Treeprocessor(Treeprocessor):
def run(self, root):
for el in root.iter():
if el.tag == "img":
el.set("class", "img-fluid")
elif el.tag == "blockquote":
el.set("class", "blockquote")
elif el.tag == "table":
el.set("class", "table table-hover table-borderless")
elif el.tag == "thead":
el.set("class", "thead-light")
class LinkBlankTargetExtension(Extension):
def extendMarkdown(self, md): # noqa: N802
md.registerExtension(self)
md.treeprocessors.register(
LinkBlankTargetTreeprocessor(md), "link_blank_target_extension", 0
)
class LinkBlankTargetTreeprocessor(Treeprocessor):
def run(self, root):
for el in root.iter():
if el.tag == "a":
el.set("target", "_blank")
el.set("rel", "noopener")
| comic/comic-django | app/grandchallenge/core/utils/markdown.py | Python | apache-2.0 | 1,204 |
import json
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, Http404, HttpResponse
from django import http
from django.contrib.auth.decorators import login_required
from django.template.loader import get_template
from django.template import Context
from django.utils.translation import ugettext as _
from django.utils.html import strip_tags
from django.contrib.auth.decorators import login_required
from django.core.serializers.json import DjangoJSONEncoder
from students.models import *
from finance.models import Responsable, Autorizado
from students.forms import AlumnoForm, InformeForm
import ho.pisa as pisa
import cStringIO as StringIO
import cgi
import pdb
from django.forms.models import model_to_dict
import forms
@login_required
def students_list(request):
user = request.user
tipo = user.empleado.puesto
if tipo == 3:
resp = user.empleado.responsable
students = resp.alumnos.all()
elif tipo == 2:
maestra = user.empleado.maestra
salas_maestra = maestra.salas.all()
students = []
for sala in salas_maestra:
if sala.get_alumnos():
for alumno in sala.get_alumnos():
students.append(alumno)
else:
students = Alumno.objects.all()
context = {
'students' : students, 'tipo': tipo,
}
return render_to_response(
'students/st_list.html',
context,
context_instance = RequestContext(request),
)
@login_required
def new_student(request):
if request.method == 'POST':
form = AlumnoForm(data=request.POST)
pdb.set_trace()
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse('st_students_list'))
else:
form = AlumnoForm()
context = {
'student_form': form,
}
return render_to_response(
'students/st_new_student.html',
context,
context_instance = RequestContext(request),
)
@login_required
def nuevo_informe(request, st_id):
st = Alumno.objects.get(pk=st_id)
if request.method == 'POST':
form = InformeForm(data=request.POST)
if form.is_valid():
informe = form.save(commit=False)
informe.alumno = Alumno.objects.get(pk=st_id)
informe.maestra = request.user.empleado.maestra
informe.texto = strip_tags(informe.texto)
form.save()
return HttpResponseRedirect(reverse('st_students_list'))
else:
form = InformeForm()
context = {
'informe_form': form, 'st': st,
}
return render_to_response(
'students/st_nuevo_informe.html',
context,
context_instance = RequestContext(request),
)
@login_required
def modificar_informe(request, inf_id):
inf = Informe.objects.get(id=inf_id)
dictionary = model_to_dict(inf, fields=[], exclude=[])
#form = forms.InformeForm(dictionary)
if request.method == 'POST':
form = InformeForm(data=request.POST)
if form.is_valid():
inf.titulo = form.cleaned_data['titulo']
inf.texto = strip_tags(form.cleaned_data['texto'])
inf.fecha = form.cleaned_data['fecha']
inf.save()
return HttpResponseRedirect(reverse('st_students_list'))
else:
form = forms.InformeForm(dictionary)
context = {
'informe_form': form,
}
return render_to_response(
'students/st_modificar_informe.html',
context,
context_instance = RequestContext(request),
)
@login_required
def student_info(request, st_id):
student = Alumno.objects.get(pk=st_id)
context = {
'student': student,
}
return render_to_response(
'students/st_info.html',
context,
context_instance = RequestContext(request),
)
#igual que anterior, solo cambia template
@login_required
def student_personal_info(request, st_id):
tstudent = Alumno.objects.get(pk=st_id)
student = AlumnoForm(data=model_to_dict(Alumno.objects.get(pk=st_id)))
# FIXME arreglar esta negrada
context = {
'student': student , 'st': tstudent,
'tipo': request.user.empleado.puesto,
}
return render_to_response(
'students/st_personal_info.html',
context,
context_instance = RequestContext(request),
)
# vista que se repite
@login_required
def student_reports(request, st_id):
student = Alumno.objects.get(pk=st_id)
context = {
'student': student,
}
return render_to_response(
'students/st_reports.html',
context,
context_instance = RequestContext(request),
)
@login_required
def student_report(request, inf_id):
informe = Informe.objects.get(pk=inf_id)
context = {
'informe': informe,
}
return render_to_response(
'students/st_report.html',
context,
context_instance = RequestContext(request),
)
@login_required
def salas(request):
salas = Sala.objects.all()
salas_list = []
for sala in salas:
conf_varones = sala.alumnosala_set.filter(estado=0, alumno__sexo=0).count()
conf_nenas = sala.alumnosala_set.filter(estado=0, alumno__sexo=1).count()
en_espera = sala.alumnosala_set.filter(estado=1).count()
confirmados = conf_varones + conf_nenas
vacantes = sala.capacidad - confirmados
salas_list.append([sala, conf_varones, conf_nenas, vacantes, en_espera])
context = {
'salas': salas_list,
}
return render_to_response(
'students/salas.html',
context,
context_instance = RequestContext(request),
)
@login_required
def info_sala(request, sala_id):
sala = Sala.objects.get(pk=sala_id)
conf_varones = sala.alumnosala_set.filter(estado=0, alumno__sexo=0)
conf_nenas = sala.alumnosala_set.filter(estado=0, alumno__sexo=1)
en_espera = sala.alumnosala_set.filter(estado=1)
confirmados = len(conf_varones) + len(conf_nenas)
vacantes = sala.capacidad - confirmados
context = {
'sala': sala, 'varones': conf_varones, 'nenas': conf_nenas, 'vacantes': vacantes,
'cant_alumnos': confirmados, 'en_espera': en_espera,
}
return render_to_response(
'students/info_sala.html',
context,
context_instance = RequestContext(request),
)
@login_required
def get_informe_as_pdf(request, inf_id):
i = Informe.objects.get(pk=inf_id) #get_object_or_404
return write_pdf('students/s_informe_pdf.html',
{ 'i': i, 'pagesize': 'A4'})
def write_pdf(template_src, context_dict):
template = get_template(template_src)
context = Context(context_dict)
html = template.render(context)
result = StringIO.StringIO()
pdf = pisa.pisaDocument(StringIO.StringIO(
html.encode("ISO-8859-1")), result)
if not pdf.err:
return http.HttpResponse(result.getvalue(), \
mimetype='application/pdf')
return http.HttpResponse("Error creando el pdf %s" % cgi.escape(html))
def get_hermano_info(request):
if request.GET and 'hermano_id' in request.GET:
hermano = Alumno.objects.get(pk=request.GET['hermano_id'])
responsables = Responsable.objects.filter(alumnos=hermano.pk)
autorizados = Autorizado.objects.filter(alumnos=hermano.pk)
response = {'apellido': hermano.apellido,
'responsables':[r.pk for r in responsables],
'autorizados': [a.pk for a in autorizados], 'traslado_emergencia':hermano.traslado_emergencia,
'telefono_emergencia':hermano.telefono_emergencia}
data = json.dumps(response, cls=DjangoJSONEncoder)
return HttpResponse(data, mimetype='application/json') | mfalcon/edujango | students/views.py | Python | apache-2.0 | 8,057 |
import numpy as np
import matplotlib.pyplot as plt
import scipy.stats as stats
import random
import math
##############################################
def sepLine(w, x):
return -((w[0]+w[1]*x)/w[2])
#end
def drawSepLine(w, minX, maxX):
sepx = range(minX, maxX)
sepy = []
for e in sepx:
tmp = sepLine(w, e)
sepy.append( tmp )
#end for
plt.plot(sepx, sepy )
#end drawSepLine
##############################################
"""
Get the label.
Note: the x has the constant item.
The return is 1 or -1.
if 1, then the x belong to w.
"""
def genLabel(x, w):
t = np.inner(w, x)
ty = np.sign(t)
return ty;
#emd
##############################################
#diamond
gdiamond=np.array([
[1.0, 12.0, 1.],
[1.5, 12.5, 1.],
[3.5, 11.5, 1.],
[4.5, 14.0, 1.],
[5.5, 16.0, 1.],
[6.0, 11.5, 1.],
[7.0, 10.5, 1.]
])
#rectangle
grectangle=np.array([
[9.5, 13.0, 2.],
[10.0, 11.5, 2.],
[10.5, 11.5, 2.],
[11.0, 13.0, 2.],
[12.0, 12.0, 2.],
[12.5, 12.5, 2.],
[13.0, 11.0, 2.],
[14.0, 10.0, 2.],
[15.0, 10.5, 2.],
[15.5, 10.6, 2.]
])
#triangle
gtriangle=np.array([
[1.0, 2.5, 3.],
[2.0, 6.0, 3.],
[3.0, 2.0, 3.],
[3.0, 5.0, 3.],
[4.0, 2.2, 3.],
[4.0, 5.5, 3.],
[6.0, 2.0, 3.],
[6.0, 5.5, 3.],
[6.5, 2.0, 3.],
[6.7, 0.5, 3.]
])
#star
gstar=np.array([
[9.5, 8.5, 4.],
[10.0, 1.5, 4.],
[11.0, 6.0, 4.],
[7.7, 6.0, 4.],
[8.0, 4.5, 4.],
[8.2, 4.0, 4.],
[9.0, 1.5, 4.],
[9.0, 4.5, 4.],
[9.5, 5.0, 4.],
[11.0, 1.5, 4.],
])
grtd = np.concatenate((gdiamond,grectangle, gtriangle, gstar))
gminX = (int)(np.min(grtd[:,:1]))-3
gmaxX = (int)(np.max(grtd[:,:1]))+3
gminY = np.min(grtd[:,1:2])-3
gmaxY = np.max(grtd[:,1:2])+3
grtestData = np.array([
[15.0, 15.0, 2.],
[13.0, 4.0, 4.],
[8.0, 8.0, 0.],
[10.0, 9.0, 0.],
[1.5, 7.0, 13.],
[2.0, 6.0, 13.],
[16.0, 7.0, 24.],
])
###plot the data
plt.xlim( (gminX, gmaxX) )
plt.ylim( (gminY, gmaxY) )
plt.plot(gdiamond[:,:1], gdiamond[:, 1:2], '.')
plt.plot(grectangle[:,:1], grectangle[:, 1:2], '1')
plt.plot(gtriangle[:,:1], gtriangle[:, 1:2], '+')
plt.plot(gstar[:,:1], gstar[:, 1:2], '*')
################
"""
Here we use cyclic_pla to binary classify two class.
"""
def cyclic_pla(td):
x0 = np.zeros( (len(td), 1) )
x0[:]=1.0
td = np.concatenate( (x0, td[:,:1], td[:,1:2], td[:,2:3]), 1 )
#The this initial value of w. td[0] include y. so we need to minus 1
w=np.zeros( len(td[0])-1 );
#
#ensure all point corret
stage=0;
while(True):
stage = stage+1;
#print("stage "+str(stage), w );
pass
isModifing=False;
#check each point for w
for idx in range(len(td)):
sample = td[idx]
sx = sample[:len(sample)-1]; sy=sample[len(sample)-1]
t = np.inner(w, sx)
ty = np.sign(t)
#print(idx, ty, sy)
if(ty!=sy):
#failed, we need to update w
#print("In stage "+str(stage)+".we need to update w ", w);
print(idx, ty, sy)
w = w + sy*sx
isModifing = True
#end if
#end for
print("The w is ", w)
if(isModifing==False):
break;
#end while
return w
#end
################
"""
Here we use pocket to binary classify two class.
"""
def pocket(td):
#The this initial value of w. td[0] include y. so we need to minus 1
w=np.zeros( len(td[0])-1 );
#todo:we can set it as max of float
weighOfPocket=1000000000.0
wPocket=w #w in pocket, that current best w.
#
#ensure all point corret
maxIter=900000
maxIter=1200000
maxIter=42000
weighOfPocketThres=0.05
#calc weight for w
def calWeight(w, td):
weight=0.;
for idx in range(len(td)):
sample = td[idx]
sx = sample[:len(sample)-1]; sy=sample[len(sample)-1]
t = np.inner(w, sx)
ty = np.sign(t)
#print(idx, ty, sy)
if(ty!=sy):
weight += 1.0;
#end for
return weight;
#end
curIter=0
while(curIter<maxIter):
curIter = curIter +1;
#pick up an element in sample to try to improve w
rndIdx=random.randint(0, len(td)-1)
sample = td[rndIdx]
sx = sample[:len(sample)-1]; sy=sample[len(sample)-1]
t = np.inner(w, sx)
ty = np.sign(t)
print(rndIdx, ty, sy)
if(ty!=sy):
#failed, we need to update w
w = w + sy*sx
#print("The w is ", w, sy, sx)
weight = calWeight(w, td)
#if the new w is better than stuff in pocket, then update stuff in pocket
if(weight<weighOfPocket):
weighOfPocket = weight
wPocket = w
#end if
if(weighOfPocket<weighOfPocketThres):
break;
#end if
#print("The curIter is ", curIter)
print("The weighOfPocket is ", weighOfPocket)
print("The w is ", w)
#drawSepLine(w, gminX, gmaxX)
#end while
return wPocket;
#end
################
"""
if the y in each element of nrtd is not equal to label,
then set it as -1, thus we form the train data as one versus all.
Note:should set as -1 rather than 0!!!! refer to our current formula.
"""
def formOneVesusAll(td, label):
ntd = td.copy()
labelIdx = len(ntd[0])-1
for e in ntd:
if(e[labelIdx]!=label):
e[labelIdx]=-1 #IMPORTANT
else:
e[labelIdx]=1 #IMPORTANT
#end
return ntd
#end
"""
Use the one versus all to calculate all w. store all w in ws
"""
def oneVersusAllHard(td, ws):
pass;
labels=[1,2,3,4] #we can get shi from rtd[:,2:3], we just skip this here
for label in labels:
nrtd = formOneVesusAll(td, label);
#w=cyclic_pla(nrtd) #not work, since the nrtd are not binary classification strictly!!
w = pocket(nrtd)
ws.append(w)
print("w for label ", label, " is ", w)
pass;
#end for
#end
################
#add constant two the training data
x0 = np.zeros( (len(grtd), 1) )
x0[:]=1.0
gtd = np.concatenate( (x0, grtd[:,:1], grtd[:,1:2], grtd[:,2:3]), 1 )
gw=[];
oneVersusAllHard(gtd, gw);
#plot the line
for w in gw:
print("w :", w)
drawSepLine(w, gminX, gmaxX)
#end for
#gw : 1, 2, 3, 4
#lable: 1, 2, 3, 4
#plot test data
plt.plot(grtestData[:,:1], grtestData[:, 1:2], '_')
#update the test data
xt0 = np.zeros( (len(grtestData), 1) )
xt0[:]=1.0
gtestData = np.concatenate( (xt0, grtestData[:,:1], grtestData[:,1:2], grtestData[:,2:3]), 1 )
#test
for e in gtestData:
x = e[:len(e)-1]; y=e[len(e)-1]
msg = "For "+str(x)+" expented label:"+str(y)+", actual:"
for w in gw:
actualY=genLabel(x, w)
msg += str(actualY) + ";";
#end for
print(msg)
#end for
pass
################
| YuncyYe/ml | mlf/oneVersusAll_hard.py | Python | apache-2.0 | 7,007 |
from configobj import ConfigObj
from utils_log import utils_log
sys_config_log = utils_log(name='utils_sys_config')
class utils_sys_config():
""" Use ConfigObj to generate config file """
def __init__(self, filename):
self.filename = filename
self.config = ConfigObj(self.filename, list_values=False)
def write_to_file(self, file_name):
self.config.write()
orig_filename = self.config.filename
self.config.filename = file_name
self.config.write()
self.config.filename = orig_filename
def exist(self, section, keyword=None):
if section not in self.config.keys():
return False
if keyword is None:
# Only check section exists
return True
if keyword not in self.config[section].keys():
return False
return True
def get_all_sections(self):
return self.config.keys()
def get_keyword_value_by_section(self, section):
sys_config_log.logger.debug('section -> %s' % section)
if section in self.config.keys():
return self.config[section]
else:
sys_config_log.logger.error('section:%s does not exist' % (section))
return None
def get_value_by_section_keyword(self, section, keyword, default=None):
sys_config_log.logger.debug('section , keyword-> %s , %s' % (section, keyword))
if section in self.config.keys():
if keyword in self.config[section].keys():
return self.config[section][keyword]
else:
sys_config_log.logger.error('keyword:%s of section:%s does not exist' % (keyword, section))
return default
else:
sys_config_log.logger.error('section:%s does not exist' % (section))
return default
def add_section(self, section, keyword=None, value=None):
# just to add section
if keyword is None and value is None:
self.config[section] = {}
self.add_comment(section,'\n')
# want to add section, keyword, and value
else:
# section name already exists, to add or modify keyword and value
if section in self.config.keys():
section1 = self.config[section]
section1[keyword] = value
# new section, new keywords, and new value
else:
self.config[section] = {}
self.config[section][keyword] = value
self.add_comment(section,'\n')
self.config.write()
return True
def add_comment(self, section, comment):
''' the comment will be write up the section
example:
# test comments
[section]
'''
self.config.comments[section] = [comment]
return True
def del_section(self, section):
try:
del self.config[section]
except KeyError:
return True
self.config.write()
return True
def add_keyword(self, keyword, value):
self.config[keyword]=value
self.config.write()
return True
def get_keyword(self, keyword):
if keyword in self.config.keys():
return self.config[keyword]
else:
return None
def del_keyword(self, keyword):
if keyword in self.config.keys():
del self.config[keyword]
self.config.write()
return True
def edit_multi_level_section(self, section ,keyword, value):
'''
section value: ['section1','section2']
example:
[section1]
[section2]
keyword=value
'''
if type(section) != list:
print 'Input section type must be a list'
return False
tmp = 0
for _section in section:
try:
tmp += 1
if tmp == 1:
sectione1 = self.config[_section]
else:
sectione1 = sectione1[_section]
except:
print 'Wrong section name %s' % _section
return False
sectione1[keyword] = value
self.config.write()
return True
def write_script_type_file(self, fileName):
try:
#Write the file contents
with open(fileName, 'w+') as file:
#Loop through the file to change with new values in dict
for _key in self.get_all_sections():
line = _key + "=" + self.get_keyword(_key) + "\n"
file.write(line)
return True
except IOError as e:
print "ERROR opening file " + fileName + ": " + e.strerror + "\n"
return False
def update_section_keyvalues(self, sec_name, keyvalues):
"""
Update key value of a section
"""
section = self.config[sec_name]
for key in keyvalues:
section[key] = keyvalues[key]
self.config.write()
return True
if __name__ == '__main__':
network = utils_sys_config('/etc/sysconfig/network-scripts/ifcfg-eth0')
print network.config | AnsonShie/system_monitor | utils/utils_sys_config.py | Python | apache-2.0 | 5,301 |
# time: 30 mins
# used time: 8 mins
# this time try use recursion to solve the problem
# thought: every element can be in a set or not; if i know all subsets from S[:-1], i can know all subsets from S
class Solution:
"""
@param S: The set of numbers.
@return: A list of lists. See example.
"""
def subsets(self, S):
# implement it recursively
if S is None:
return []
if S is []:
return []
if len(S) == 1:
return [[], S]
# recursive
S.sort()
# non-descending..
all_sets = []
for subset in self.subsets(S[:-1]):
all_sets.append(subset + [S[-1]])
all_sets.append(subset)
return all_sets
# runtime:
# sort: nlog(n),
# each f(N) with a sort of nlog(N) + O((N-1)**2) = O(N**2) and there are N of them so O(1**2) + O(2**2) + .. O((N-1)**2) = N*(N**2)/2 = N**3 ?
| stonemary/lintcode_solutions | subsets/3.py | Python | apache-2.0 | 956 |
import sys
import os
import shlex
import sys
from recommonmark.parser import CommonMarkParser
source_parsers = {
'.md': CommonMarkParser,
}
source_suffix = ['.rst', '.md']
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
]
templates_path = ['_templates']
master_doc = 'index'
project = u'HoverPy'
copyright = u'2017, SpectoLabs'
author = u'SpectoLabs'
version = '0.2.2'
# The full version, including alpha/beta/rc tags.
release = version
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
todo_include_todos = False
if 'READTHEDOCS' not in os.environ:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_static_path = ['_static']
html_context = {
'css_files': [
'https://media.readthedocs.org/css/sphinx_rtd_theme.css',
'https://media.readthedocs.org/css/readthedocs-doc-embed.css',
'_static/theme_overrides.css',
],
}
html_show_sphinx = False
html_show_copyright = True
htmlhelp_basename = 'hoverpydoc'
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
latex_documents = [
(master_doc, 'hoverpy.tex', u'HoverPy Documentation',
u'SpectoLabs', 'manual'),
]
man_pages = [
(master_doc, 'HoverPy', u'HoverPy Documentation',
[author], 1)
]
texinfo_documents = [
(master_doc, 'HoverPy', u'HoverPy Documentation',
author, 'HoverPy', 'Python library for Hoverfly API simulation tool',
'Miscellaneous'),
]
| SpectoLabs/hoverpy | docs/conf.py | Python | apache-2.0 | 2,160 |
# Copyright 2016 Big Switch Networks, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Adapted from neutron.tests.unit.db.test_agents_db.py
from networking_bigswitch.plugins.bigswitch.db import reachability_test_db
from neutron.tests.unit import testlib_api
from neutron_lib import context
class TestReachabilityTestDbMixin(testlib_api.SqlTestCase):
def setUp(self):
super(TestReachabilityTestDbMixin, self).setUp()
self.context = context.get_admin_context()
self.dbmixin = reachability_test_db.ReachabilityTestDbMixin()
def _assert_ref_fields_are_equal(self, reference, result):
"""Compare (key, value) pairs of a reference dict with the result
Note: the result MAY have additional keys
"""
for field, value in reference.items():
self.assertEqual(value, result[field], field)
def test_create_reachabilitytest(self):
reachabilitytest_dict = {
'reachabilitytest': {
'tenant_id': 'admin_tenant_id',
'name': 'test1',
'src_tenant_id': 'admin_tenant_id',
'src_tenant_name': 'admin',
'src_segment_id': 'web_segment_id',
'src_segment_name': 'web',
'src_ip': '10.1.1.2',
'dst_ip': '10.2.1.2',
'expected_result': 'dropped'
}
}
reachabilitytest = self.dbmixin.create_reachabilitytest(
self.context, reachabilitytest_dict)
self._assert_ref_fields_are_equal(
reachabilitytest_dict['reachabilitytest'], reachabilitytest)
def test_get_reachabilitytest(self):
pass
def test_get_reachabilitytests(self):
pass
def test_update_reachabilitytest(self):
pass
def test_delete_reachabilitytest(self):
pass
| wolverineav/networking-bigswitch | networking_bigswitch/tests/unit/bigswitch/test_reachabilitytest_db.py | Python | apache-2.0 | 2,406 |
Subsets and Splits