ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | b40ca5d02e8e1bd68edbd17aca5c40b89c2e1f5b | """Provides the `ExportSystemDesign` class."""
__author__ = "Rob Hammond"
__copyright__ = "Copyright 2020, National Renewable Energy Laboratory"
__maintainer__ = "Rob Hammond"
__email__ = "[email protected]"
import numpy as np
from wisdem.orbit.phases.design._cables import CableSystem
class ExportSystemDesign(CableSystem):
"""
Design phase for the export cabling system.
Attributes
----------
num_cables : int
Total number of cables required for transmitting power.
length : float
Length of a single cable connecting the OSS to the interconnection in km.
mass : float
Mass of `length` in tonnes.
cable : `Cable`
Instance of `ORBIT.phases.design.Cable`. An export system will
only require a single type of cable.
total_length : float
Total length of cable required to trasmit power.
total_mass : float
Total mass of cable required to transmit power.
sections_cables : np.ndarray, shape: (`num_cables, )
An array of `cable`.
sections_lengths : np.ndarray, shape: (`num_cables, )
An array of `length`.
"""
expected_config = {
"site": {"distance_to_landfall": "km", "depth": "m"},
"landfall": {"interconnection_distance": "km (optional)"},
"plant": {"num_turbines": "int"},
"turbine": {"turbine_rating": "MW"},
"export_system_design": {
"cables": "str",
"num_redundant": "int (optional)",
"percent_added_length": "float (optional)",
},
}
output_config = {
"export_system": {
"cable": {
"linear_density": "t/km",
"number": "int",
"sections": "list",
}
}
}
def __init__(self, config, **kwargs):
"""
Defines the cables and sections required to install an offwhore wind
farm.
Parameters
----------
config : dict
Configuration dictionary for the array cabling system. See
`expected_config` for details on what is required.
"""
super().__init__(config, "export", **kwargs)
# For export cables there should only be one cable type due to the
# custom nature of their design
for name in self.expected_config["site"]:
setattr(self, "".join(("_", name)), config["site"][name])
self._depth = config["site"]["depth"]
self._plant_capacity = (
self.config["plant"]["num_turbines"]
* self.config["turbine"]["turbine_rating"]
)
self._distance_to_landfall = config["site"]["distance_to_landfall"]
try:
self._distance_to_interconnection = config["landfall"][
"interconnection_distance"
]
except KeyError:
self._distance_to_interconnection = 3
def run(self):
"""
Instantiates the export cable system and runs all the required methods.
"""
self._initialize_cables()
self.cable = self.cables[[*self.cables][0]]
self.compute_number_cables()
self.compute_cable_length()
self.compute_cable_mass()
self.compute_total_cable()
@property
def total_cable_cost(self):
"""Returns total array system cable cost."""
return sum(self.cost_by_type.values())
@property
def detailed_output(self):
"""Returns export system design outputs."""
_output = {
**self.design_result,
"export_system_total_mass": self.total_mass,
"export_system_total_length": self.total_length,
"export_system_total_cost": self.total_cable_cost,
"export_system_cable_power": self.cable.cable_power,
}
return _output
def compute_number_cables(self):
"""
Calculate the total number of required and redundant cables to
transmit power to the onshore interconnection.
"""
num_required = np.ceil(self._plant_capacity / self.cable.cable_power)
num_redundant = self._design.get("num_redundant", 0)
self.num_cables = int(num_required + num_redundant)
def compute_cable_length(self):
"""
Calculates the total distance an export cable must travel.
"""
added_length = 1.0 + self._design.get("percent_added_length", 0.0)
self.length = round(
(
(self._depth / 1000.0) # convert to km
+ self._distance_to_landfall
+ self._distance_to_interconnection
)
* added_length,
10,
)
def compute_cable_mass(self):
"""
Calculates the total mass of a single length of export cable.
"""
self.mass = round(self.length * self.cable.linear_density, 10)
def compute_total_cable(self):
"""
Calculates the total length and mass of cables required to fully
connect the OSS to the interconnection point.
"""
self.total_length = round(self.num_cables * self.length, 10)
self.total_mass = round(self.num_cables * self.mass, 10)
@property
def sections_cable_lengths(self):
"""
Creates an array of section lengths to work with `CableSystem`
Returns
-------
np.ndarray
Array of `length` with shape (`num_cables`, ).
"""
return np.full(self.num_cables, self.length)
@property
def sections_cables(self):
"""
Creates an array of cable names to work with `CableSystem`.
Returns
-------
np.ndarray
Array of `cable.name` with shape (`num_cables`, ).
"""
return np.full(self.num_cables, self.cable.name)
@property
def design_result(self):
"""
A dictionary of cables types and number of different cable lengths and
linear density.
Returns
-------
output : dict
Dictionary containing the output export system. Contains:
- 'linear_density': 't/km'
- 'sections': 'list [self.length]'
- 'number': 'int'
"""
if self.cables is None:
raise Exception(f"Has {self.__class__.__name__} been ran?")
output = {
"export_system": {
"interconnection_distance": self._distance_to_interconnection
}
}
for name, cable in self.cables.items():
output["export_system"]["cable"] = {
"linear_density": cable.linear_density,
"sections": [self.length],
"number": self.num_cables,
}
return output
|
py | b40ca5f6cb4d95be7bb8e4a1f67c3dc3b34fde34 | """
Scan a single account authorization file
"""
# Copyright (c) 2020, salesforce.com, inc.
# All rights reserved.
# Licensed under the BSD 3-Clause license.
# For full license text, see the LICENSE file in the repo root
# or https://opensource.org/licenses/BSD-3-Clause
import logging
import os
import webbrowser
import json
from pathlib import Path
from typing import Dict, Any, List
import yaml
import click
from policy_sentry.util.arns import get_account_from_arn
from cloudsplaining.shared.constants import EXCLUSIONS_FILE
from cloudsplaining.shared.validation import check_authorization_details_schema
from cloudsplaining.shared.exclusions import Exclusions, DEFAULT_EXCLUSIONS
from cloudsplaining.scan.authorization_details import AuthorizationDetails
from cloudsplaining.shared.utils import write_results_data_file
from cloudsplaining.output.report import HTMLReport
from cloudsplaining import set_log_level
logger = logging.getLogger(__name__)
@click.command(
short_help="Scan a single file containing AWS IAM account authorization details and generate report on "
"IAM security posture. "
)
@click.option(
"--input-file",
"-i",
type=click.Path(exists=True),
required=True,
help="Path of IAM account authorization details file",
)
@click.option(
"--exclusions-file",
"-e",
help="A yaml file containing a list of policy names to exclude from the scan.",
type=click.Path(exists=True),
required=False,
default=EXCLUSIONS_FILE,
)
@click.option(
"--output",
"-o",
required=False,
type=click.Path(exists=True),
default=os.getcwd(),
help="Output directory.",
)
@click.option(
"--skip-open-report",
"-s",
required=False,
default=False,
is_flag=True,
help="Don't open the HTML report in the web browser after creating. "
"This helps when running the report in automation.",
)
@click.option(
"--minimize",
"-m",
required=False,
default=False,
is_flag=True,
help="Reduce the size of the HTML Report by pulling the Cloudsplaining Javascript code over the internet.",
)
@click.option("--verbose", "-v", "verbosity", count=True)
def scan(
input_file: str,
exclusions_file: str,
output: str,
skip_open_report: bool,
minimize: bool,
verbosity: int,
) -> None: # pragma: no cover
"""
Given the path to account authorization details files and the exclusions config file, scan all inline and
managed policies in the account to identify actions that do not leverage resource constraints.
"""
set_log_level(verbosity)
if exclusions_file:
# Get the exclusions configuration
with open(exclusions_file, "r") as yaml_file:
try:
exclusions_cfg = yaml.safe_load(yaml_file)
except yaml.YAMLError as exc:
logger.critical(exc)
exclusions = Exclusions(exclusions_cfg)
else:
exclusions = DEFAULT_EXCLUSIONS
if os.path.isfile(input_file):
account_name = Path(input_file).stem
with open(input_file) as f:
contents = f.read()
account_authorization_details_cfg = json.loads(contents)
rendered_html_report = scan_account_authorization_details(
account_authorization_details_cfg,
exclusions,
account_name,
output,
write_data_files=True,
minimize=minimize,
)
html_output_file = os.path.join(output, f"iam-report-{account_name}.html")
logger.info("Saving the report to %s", html_output_file)
if os.path.exists(html_output_file):
os.remove(html_output_file)
with open(html_output_file, "w", encoding="utf-8") as f:
f.write(rendered_html_report)
print(f"Wrote HTML results to: {html_output_file}")
# Open the report by default
if not skip_open_report:
print("Opening the HTML report")
url = "file://%s" % os.path.abspath(html_output_file)
webbrowser.open(url, new=2)
if os.path.isdir(input_file):
logger.info(
"The path given is a directory. Scanning for account authorization files and generating report."
)
input_files = get_authorization_files_in_directory(input_file)
for file in input_files:
logger.info(f"Scanning file: {file}")
with open(file) as f:
contents = f.read()
account_authorization_details_cfg = json.loads(contents)
account_name = Path(file).stem
# Scan the Account Authorization Details config
rendered_html_report = scan_account_authorization_details(
account_authorization_details_cfg,
exclusions,
account_name,
output,
write_data_files=True,
minimize=minimize,
)
html_output_file = os.path.join(output, f"iam-report-{account_name}.html")
logger.info("Saving the report to %s", html_output_file)
if os.path.exists(html_output_file):
os.remove(html_output_file)
with open(html_output_file, "w") as f:
f.write(rendered_html_report)
print(f"Wrote HTML results to: {html_output_file}")
# Open the report by default
if not skip_open_report:
print("Opening the HTML report")
url = "file://%s" % os.path.abspath(html_output_file)
webbrowser.open(url, new=2)
def scan_account_authorization_details(
account_authorization_details_cfg: Dict[str, Any],
exclusions: Exclusions,
account_name: str = "default",
output_directory: str = os.getcwd(),
write_data_files: bool = False,
minimize: bool = False,
) -> str: # pragma: no cover
"""
Given the path to account authorization details files and the exclusions config file, scan all inline and
managed policies in the account to identify actions that do not leverage resource constraints.
"""
logger.debug(
"Identifying modify-only actions that are not leveraging "
"resource constraints..."
)
check_authorization_details_schema(account_authorization_details_cfg)
authorization_details = AuthorizationDetails(
account_authorization_details_cfg, exclusions
)
results = authorization_details.results
# Lazy method to get an account ID
account_id = ""
for role in results.get("roles", []):
if "arn:aws:iam::aws:" not in results["roles"][role]["arn"]:
account_id = get_account_from_arn(results["roles"][role]["arn"])
break
html_report = HTMLReport(
account_id=account_id,
account_name=account_name,
results=results,
minimize=minimize,
)
rendered_report = html_report.get_html_report()
# Raw data file
if write_data_files:
if output_directory is None:
output_directory = os.getcwd()
results_data_file = os.path.join(
output_directory, f"iam-results-{account_name}.json"
)
results_data_filepath = write_results_data_file(
authorization_details.results, results_data_file
)
print(f"Results data saved: {results_data_filepath}")
findings_data_file = os.path.join(
output_directory, f"iam-findings-{account_name}.json"
)
findings_data_filepath = write_results_data_file(results, findings_data_file)
print(f"Findings data file saved: {findings_data_filepath}")
return rendered_report
def get_authorization_files_in_directory(
directory: str,
) -> List[str]: # pragma: no cover
"""Get a list of download-account-authorization-files in a directory"""
file_list_with_full_path = [
file.absolute() for file in Path(directory).glob("*.json")
]
new_file_list = []
for file in file_list_with_full_path:
contents = file.read_text()
account_authorization_details_cfg = json.loads(contents, default=str)
valid_schema = check_authorization_details_schema(
account_authorization_details_cfg
)
if valid_schema:
new_file_list.append(str(file))
return new_file_list
|
py | b40ca67652c064b3aa8ebe6e00888915ad9f12aa | """A shared library of classes and functions useful for many different applications."""
def fix_path_seperators(path):
"""Replaces unix-style path seperators with Windows-style path seperators.
Args:
path: A path string to check for unix-style path seperators.
Returns:
The given path with the seperators fixed."""
return path.replace('/', '\\') if path is not None else ''
|
py | b40ca67e57950c05220cf6431e3a184b170b885c | import json
import numpy as np
from periodictable import elements
import h5py as h5
SYMBOL_TO_NUMBER = {el.symbol: el.number for el in elements}
def xyz_to_numpy(xyz_file):
number_of_atoms = int(xyz_file.readline().strip())
xyz_file.readline()
elements = []
positions = np.ndarray(shape=(3, number_of_atoms))
for i, line in enumerate(xyz_file):
(e, x, y, z) = line.split()
elements.append(e.strip())
positions [0][i] = float(x.strip())
positions [1][i] = float(y.strip())
positions [2][i] = float(z.strip())
i += 1
if i != number_of_atoms:
raise Exception('XYZ states that there are %s atom and there are only %s'
% (number_of_atoms, i))
atomic_numbers = [SYMBOL_TO_NUMBER[e] for e in set(elements)]
atomic_numbers.sort()
atomic_spec = [atomic_numbers.index(SYMBOL_TO_NUMBER[e]) for e in elements]
atomic_spec_np = np.ndarray(shape=(1, number_of_atoms), dtype=np.int)
atomic_spec_np[0] = np.asarray(atomic_spec)
return (positions, atomic_spec_np, atomic_numbers)
def recon_to_numpy(emd_file):
with h5.File(emd_file, 'r') as f:
emdgrp = f['data/tomography']
data = emdgrp['data'][:]
return data
def proj_to_numpy(emd_file):
with h5.File(emd_file, 'r') as f:
emdgrp = f['data/tomography']
proj = emdgrp['data'][:]
angle = emdgrp['dim3'][:]
return (proj, angle)
def json_to_reconstruction_params(json_file):
# with open(str(json_file)) as f:
# parameters = json.load(f)
parameters = json.load(json_file)
assert('resolution' in parameters)
resolution = parameters['resolution']
assert('cropHalfWidth' in parameters)
crop_half_width = parameters['cropHalfWidth']
assert('volumeSize' in parameters)
volume_size = parameters['volumeSize']
assert('zDirection' in parameters)
z_direction = parameters['zDirection']
assert('bFactor' in parameters)
b_factor = parameters['bFactor']
assert('hFactor' in parameters)
h_factor = parameters['hFactor']
assert('axisConvention' in parameters)
axis_convention = parameters['axisConvention']
return (resolution, crop_half_width, volume_size,
z_direction, b_factor, h_factor, axis_convention)
|
py | b40ca7eba18f3f06497beae13c8b97850653b35a | # emacs: -*- mode: python; py-indent-offset: 4; tab-width: 4; indent-tabs-mode: nil -*-
# ex: set sts=4 ts=4 sw=4 noet:
# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the datalad package for the
# copyright and license terms.
#
# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
from datalad.tests.utils import skip_if_no_module
skip_if_no_module('scrapy') # e.g. not present under Python3 |
py | b40ca801b8fdfff7d90ed51fd6fae852fa317eed | from getratings.models.ratings import Ratings
class NA_Warwick_Bot_Aatrox(Ratings):
pass
class NA_Warwick_Bot_Ahri(Ratings):
pass
class NA_Warwick_Bot_Akali(Ratings):
pass
class NA_Warwick_Bot_Alistar(Ratings):
pass
class NA_Warwick_Bot_Amumu(Ratings):
pass
class NA_Warwick_Bot_Anivia(Ratings):
pass
class NA_Warwick_Bot_Annie(Ratings):
pass
class NA_Warwick_Bot_Ashe(Ratings):
pass
class NA_Warwick_Bot_AurelionSol(Ratings):
pass
class NA_Warwick_Bot_Azir(Ratings):
pass
class NA_Warwick_Bot_Bard(Ratings):
pass
class NA_Warwick_Bot_Blitzcrank(Ratings):
pass
class NA_Warwick_Bot_Brand(Ratings):
pass
class NA_Warwick_Bot_Braum(Ratings):
pass
class NA_Warwick_Bot_Caitlyn(Ratings):
pass
class NA_Warwick_Bot_Camille(Ratings):
pass
class NA_Warwick_Bot_Cassiopeia(Ratings):
pass
class NA_Warwick_Bot_Chogath(Ratings):
pass
class NA_Warwick_Bot_Corki(Ratings):
pass
class NA_Warwick_Bot_Darius(Ratings):
pass
class NA_Warwick_Bot_Diana(Ratings):
pass
class NA_Warwick_Bot_Draven(Ratings):
pass
class NA_Warwick_Bot_DrMundo(Ratings):
pass
class NA_Warwick_Bot_Ekko(Ratings):
pass
class NA_Warwick_Bot_Elise(Ratings):
pass
class NA_Warwick_Bot_Evelynn(Ratings):
pass
class NA_Warwick_Bot_Ezreal(Ratings):
pass
class NA_Warwick_Bot_Fiddlesticks(Ratings):
pass
class NA_Warwick_Bot_Fiora(Ratings):
pass
class NA_Warwick_Bot_Fizz(Ratings):
pass
class NA_Warwick_Bot_Galio(Ratings):
pass
class NA_Warwick_Bot_Gangplank(Ratings):
pass
class NA_Warwick_Bot_Garen(Ratings):
pass
class NA_Warwick_Bot_Gnar(Ratings):
pass
class NA_Warwick_Bot_Gragas(Ratings):
pass
class NA_Warwick_Bot_Graves(Ratings):
pass
class NA_Warwick_Bot_Hecarim(Ratings):
pass
class NA_Warwick_Bot_Heimerdinger(Ratings):
pass
class NA_Warwick_Bot_Illaoi(Ratings):
pass
class NA_Warwick_Bot_Irelia(Ratings):
pass
class NA_Warwick_Bot_Ivern(Ratings):
pass
class NA_Warwick_Bot_Janna(Ratings):
pass
class NA_Warwick_Bot_JarvanIV(Ratings):
pass
class NA_Warwick_Bot_Jax(Ratings):
pass
class NA_Warwick_Bot_Jayce(Ratings):
pass
class NA_Warwick_Bot_Jhin(Ratings):
pass
class NA_Warwick_Bot_Jinx(Ratings):
pass
class NA_Warwick_Bot_Kalista(Ratings):
pass
class NA_Warwick_Bot_Karma(Ratings):
pass
class NA_Warwick_Bot_Karthus(Ratings):
pass
class NA_Warwick_Bot_Kassadin(Ratings):
pass
class NA_Warwick_Bot_Katarina(Ratings):
pass
class NA_Warwick_Bot_Kayle(Ratings):
pass
class NA_Warwick_Bot_Kayn(Ratings):
pass
class NA_Warwick_Bot_Kennen(Ratings):
pass
class NA_Warwick_Bot_Khazix(Ratings):
pass
class NA_Warwick_Bot_Kindred(Ratings):
pass
class NA_Warwick_Bot_Kled(Ratings):
pass
class NA_Warwick_Bot_KogMaw(Ratings):
pass
class NA_Warwick_Bot_Leblanc(Ratings):
pass
class NA_Warwick_Bot_LeeSin(Ratings):
pass
class NA_Warwick_Bot_Leona(Ratings):
pass
class NA_Warwick_Bot_Lissandra(Ratings):
pass
class NA_Warwick_Bot_Lucian(Ratings):
pass
class NA_Warwick_Bot_Lulu(Ratings):
pass
class NA_Warwick_Bot_Lux(Ratings):
pass
class NA_Warwick_Bot_Malphite(Ratings):
pass
class NA_Warwick_Bot_Malzahar(Ratings):
pass
class NA_Warwick_Bot_Maokai(Ratings):
pass
class NA_Warwick_Bot_MasterYi(Ratings):
pass
class NA_Warwick_Bot_MissFortune(Ratings):
pass
class NA_Warwick_Bot_MonkeyKing(Ratings):
pass
class NA_Warwick_Bot_Mordekaiser(Ratings):
pass
class NA_Warwick_Bot_Morgana(Ratings):
pass
class NA_Warwick_Bot_Nami(Ratings):
pass
class NA_Warwick_Bot_Nasus(Ratings):
pass
class NA_Warwick_Bot_Nautilus(Ratings):
pass
class NA_Warwick_Bot_Nidalee(Ratings):
pass
class NA_Warwick_Bot_Nocturne(Ratings):
pass
class NA_Warwick_Bot_Nunu(Ratings):
pass
class NA_Warwick_Bot_Olaf(Ratings):
pass
class NA_Warwick_Bot_Orianna(Ratings):
pass
class NA_Warwick_Bot_Ornn(Ratings):
pass
class NA_Warwick_Bot_Pantheon(Ratings):
pass
class NA_Warwick_Bot_Poppy(Ratings):
pass
class NA_Warwick_Bot_Quinn(Ratings):
pass
class NA_Warwick_Bot_Rakan(Ratings):
pass
class NA_Warwick_Bot_Rammus(Ratings):
pass
class NA_Warwick_Bot_RekSai(Ratings):
pass
class NA_Warwick_Bot_Renekton(Ratings):
pass
class NA_Warwick_Bot_Rengar(Ratings):
pass
class NA_Warwick_Bot_Riven(Ratings):
pass
class NA_Warwick_Bot_Rumble(Ratings):
pass
class NA_Warwick_Bot_Ryze(Ratings):
pass
class NA_Warwick_Bot_Sejuani(Ratings):
pass
class NA_Warwick_Bot_Shaco(Ratings):
pass
class NA_Warwick_Bot_Shen(Ratings):
pass
class NA_Warwick_Bot_Shyvana(Ratings):
pass
class NA_Warwick_Bot_Singed(Ratings):
pass
class NA_Warwick_Bot_Sion(Ratings):
pass
class NA_Warwick_Bot_Sivir(Ratings):
pass
class NA_Warwick_Bot_Skarner(Ratings):
pass
class NA_Warwick_Bot_Sona(Ratings):
pass
class NA_Warwick_Bot_Soraka(Ratings):
pass
class NA_Warwick_Bot_Swain(Ratings):
pass
class NA_Warwick_Bot_Syndra(Ratings):
pass
class NA_Warwick_Bot_TahmKench(Ratings):
pass
class NA_Warwick_Bot_Taliyah(Ratings):
pass
class NA_Warwick_Bot_Talon(Ratings):
pass
class NA_Warwick_Bot_Taric(Ratings):
pass
class NA_Warwick_Bot_Teemo(Ratings):
pass
class NA_Warwick_Bot_Thresh(Ratings):
pass
class NA_Warwick_Bot_Tristana(Ratings):
pass
class NA_Warwick_Bot_Trundle(Ratings):
pass
class NA_Warwick_Bot_Tryndamere(Ratings):
pass
class NA_Warwick_Bot_TwistedFate(Ratings):
pass
class NA_Warwick_Bot_Twitch(Ratings):
pass
class NA_Warwick_Bot_Udyr(Ratings):
pass
class NA_Warwick_Bot_Urgot(Ratings):
pass
class NA_Warwick_Bot_Varus(Ratings):
pass
class NA_Warwick_Bot_Vayne(Ratings):
pass
class NA_Warwick_Bot_Veigar(Ratings):
pass
class NA_Warwick_Bot_Velkoz(Ratings):
pass
class NA_Warwick_Bot_Vi(Ratings):
pass
class NA_Warwick_Bot_Viktor(Ratings):
pass
class NA_Warwick_Bot_Vladimir(Ratings):
pass
class NA_Warwick_Bot_Volibear(Ratings):
pass
class NA_Warwick_Bot_Warwick(Ratings):
pass
class NA_Warwick_Bot_Xayah(Ratings):
pass
class NA_Warwick_Bot_Xerath(Ratings):
pass
class NA_Warwick_Bot_XinZhao(Ratings):
pass
class NA_Warwick_Bot_Yasuo(Ratings):
pass
class NA_Warwick_Bot_Yorick(Ratings):
pass
class NA_Warwick_Bot_Zac(Ratings):
pass
class NA_Warwick_Bot_Zed(Ratings):
pass
class NA_Warwick_Bot_Ziggs(Ratings):
pass
class NA_Warwick_Bot_Zilean(Ratings):
pass
class NA_Warwick_Bot_Zyra(Ratings):
pass
|
py | b40ca8244a4d360af670157f2d955a1e8635c6c3 | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetSqlPoolVulnerabilityAssessmentResult',
'AwaitableGetSqlPoolVulnerabilityAssessmentResult',
'get_sql_pool_vulnerability_assessment',
]
@pulumi.output_type
class GetSqlPoolVulnerabilityAssessmentResult:
"""
A Sql pool vulnerability assessment.
"""
def __init__(__self__, id=None, name=None, recurring_scans=None, storage_account_access_key=None, storage_container_path=None, storage_container_sas_key=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if recurring_scans and not isinstance(recurring_scans, dict):
raise TypeError("Expected argument 'recurring_scans' to be a dict")
pulumi.set(__self__, "recurring_scans", recurring_scans)
if storage_account_access_key and not isinstance(storage_account_access_key, str):
raise TypeError("Expected argument 'storage_account_access_key' to be a str")
pulumi.set(__self__, "storage_account_access_key", storage_account_access_key)
if storage_container_path and not isinstance(storage_container_path, str):
raise TypeError("Expected argument 'storage_container_path' to be a str")
pulumi.set(__self__, "storage_container_path", storage_container_path)
if storage_container_sas_key and not isinstance(storage_container_sas_key, str):
raise TypeError("Expected argument 'storage_container_sas_key' to be a str")
pulumi.set(__self__, "storage_container_sas_key", storage_container_sas_key)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="recurringScans")
def recurring_scans(self) -> Optional['outputs.VulnerabilityAssessmentRecurringScansPropertiesResponse']:
"""
The recurring scans settings
"""
return pulumi.get(self, "recurring_scans")
@property
@pulumi.getter(name="storageAccountAccessKey")
def storage_account_access_key(self) -> Optional[str]:
"""
Specifies the identifier key of the storage account for vulnerability assessment scan results. If 'StorageContainerSasKey' isn't specified, storageAccountAccessKey is required.
"""
return pulumi.get(self, "storage_account_access_key")
@property
@pulumi.getter(name="storageContainerPath")
def storage_container_path(self) -> Optional[str]:
"""
A blob storage container path to hold the scan results (e.g. https://myStorage.blob.core.windows.net/VaScans/). It is required if server level vulnerability assessment policy doesn't set
"""
return pulumi.get(self, "storage_container_path")
@property
@pulumi.getter(name="storageContainerSasKey")
def storage_container_sas_key(self) -> Optional[str]:
"""
A shared access signature (SAS Key) that has write access to the blob container specified in 'storageContainerPath' parameter. If 'storageAccountAccessKey' isn't specified, StorageContainerSasKey is required.
"""
return pulumi.get(self, "storage_container_sas_key")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetSqlPoolVulnerabilityAssessmentResult(GetSqlPoolVulnerabilityAssessmentResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSqlPoolVulnerabilityAssessmentResult(
id=self.id,
name=self.name,
recurring_scans=self.recurring_scans,
storage_account_access_key=self.storage_account_access_key,
storage_container_path=self.storage_container_path,
storage_container_sas_key=self.storage_container_sas_key,
type=self.type)
def get_sql_pool_vulnerability_assessment(resource_group_name: Optional[str] = None,
sql_pool_name: Optional[str] = None,
vulnerability_assessment_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSqlPoolVulnerabilityAssessmentResult:
"""
A Sql pool vulnerability assessment.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str sql_pool_name: SQL pool name
:param str vulnerability_assessment_name: The name of the vulnerability assessment.
:param str workspace_name: The name of the workspace
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['sqlPoolName'] = sql_pool_name
__args__['vulnerabilityAssessmentName'] = vulnerability_assessment_name
__args__['workspaceName'] = workspace_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:synapse/v20190601preview:getSqlPoolVulnerabilityAssessment', __args__, opts=opts, typ=GetSqlPoolVulnerabilityAssessmentResult).value
return AwaitableGetSqlPoolVulnerabilityAssessmentResult(
id=__ret__.id,
name=__ret__.name,
recurring_scans=__ret__.recurring_scans,
storage_account_access_key=__ret__.storage_account_access_key,
storage_container_path=__ret__.storage_container_path,
storage_container_sas_key=__ret__.storage_container_sas_key,
type=__ret__.type)
|
py | b40ca84e36ab1d3d3dada24fe295dd4b0f9d5ada | # copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import paddle.fluid as fluid
from paddle.fluid.param_attr import ParamAttr
from paddle.fluid.regularizer import L2Decay
from collections import OrderedDict
from ppdet.core.workspace import register
__all__ = ["GhostNet"]
@register
class GhostNet(object):
"""
scale (float): scaling factor for convolution groups proportion of GhostNet.
feature_maps (list): index of stages whose feature maps are returned.
conv_decay (float): weight decay for convolution layer weights.
extra_block_filters (list): number of filter for each extra block.
lr_mult_list (list): learning rate ratio of different blocks, lower learning rate ratio
is need for pretrained model got using distillation(default as
[1.0, 1.0, 1.0, 1.0, 1.0]).
"""
def __init__(
self,
scale,
feature_maps=[5, 6, 7, 8, 9, 10],
conv_decay=0.00001,
extra_block_filters=[[256, 512], [128, 256], [128, 256], [64, 128]],
lr_mult_list=[1.0, 1.0, 1.0, 1.0, 1.0],
freeze_norm=False):
self.scale = scale
self.feature_maps = feature_maps
self.extra_block_filters = extra_block_filters
self.end_points = []
self.block_stride = 0
self.conv_decay = conv_decay
self.lr_mult_list = lr_mult_list
self.freeze_norm = freeze_norm
self.curr_stage = 0
self.cfgs = [
# k, t, c, se, s
[3, 16, 16, 0, 1],
[3, 48, 24, 0, 2],
[3, 72, 24, 0, 1],
[5, 72, 40, 1, 2],
[5, 120, 40, 1, 1],
[3, 240, 80, 0, 2],
[3, 200, 80, 0, 1],
[3, 184, 80, 0, 1],
[3, 184, 80, 0, 1],
[3, 480, 112, 1, 1],
[3, 672, 112, 1, 1],
[5, 672, 160, 1, 2],
[5, 960, 160, 0, 1],
[5, 960, 160, 1, 1],
[5, 960, 160, 0, 1],
[5, 960, 160, 1, 1]
]
def _conv_bn_layer(self,
input,
num_filters,
filter_size,
stride=1,
groups=1,
act=None,
name=None):
lr_idx = self.curr_stage // 3
lr_idx = min(lr_idx, len(self.lr_mult_list) - 1)
lr_mult = self.lr_mult_list[lr_idx]
norm_lr = 0. if self.freeze_norm else lr_mult
x = fluid.layers.conv2d(
input=input,
num_filters=num_filters,
filter_size=filter_size,
stride=stride,
padding=(filter_size - 1) // 2,
groups=groups,
act=None,
param_attr=ParamAttr(
regularizer=L2Decay(self.conv_decay),
learning_rate=lr_mult,
initializer=fluid.initializer.MSRA(),
name=name + "_weights"),
bias_attr=False)
bn_name = name + "_bn"
x = fluid.layers.batch_norm(
input=x,
act=act,
param_attr=ParamAttr(
name=bn_name + "_scale",
learning_rate=norm_lr,
regularizer=L2Decay(0.0)),
bias_attr=ParamAttr(
name=bn_name + "_offset",
learning_rate=norm_lr,
regularizer=L2Decay(0.0)),
moving_mean_name=bn_name + "_mean",
moving_variance_name=name + "_variance")
return x
def se_block(self, input, num_channels, reduction_ratio=4, name=None):
lr_idx = self.curr_stage // 3
lr_idx = min(lr_idx, len(self.lr_mult_list) - 1)
lr_mult = self.lr_mult_list[lr_idx]
pool = fluid.layers.pool2d(
input=input, pool_type='avg', global_pooling=True, use_cudnn=False)
stdv = 1.0 / math.sqrt(pool.shape[1] * 1.0)
squeeze = fluid.layers.fc(
input=pool,
size=num_channels // reduction_ratio,
act='relu',
param_attr=ParamAttr(
learning_rate=lr_mult,
initializer=fluid.initializer.Uniform(-stdv, stdv),
name=name + '_1_weights'),
bias_attr=ParamAttr(
name=name + '_1_offset', learning_rate=lr_mult))
stdv = 1.0 / math.sqrt(squeeze.shape[1] * 1.0)
excitation = fluid.layers.fc(
input=squeeze,
size=num_channels,
act=None,
param_attr=ParamAttr(
learning_rate=lr_mult,
initializer=fluid.initializer.Uniform(-stdv, stdv),
name=name + '_2_weights'),
bias_attr=ParamAttr(
name=name + '_2_offset', learning_rate=lr_mult))
excitation = fluid.layers.clip(x=excitation, min=0, max=1)
se_scale = fluid.layers.elementwise_mul(x=input, y=excitation, axis=0)
return se_scale
def depthwise_conv(self,
input,
output,
kernel_size,
stride=1,
relu=False,
name=None):
return self._conv_bn_layer(
input=input,
num_filters=output,
filter_size=kernel_size,
stride=stride,
groups=input.shape[1],
act="relu" if relu else None,
name=name + "_depthwise")
def ghost_module(self,
input,
output,
kernel_size=1,
ratio=2,
dw_size=3,
stride=1,
relu=True,
name=None):
self.output = output
init_channels = int(math.ceil(output / ratio))
new_channels = int(init_channels * (ratio - 1))
primary_conv = self._conv_bn_layer(
input=input,
num_filters=init_channels,
filter_size=kernel_size,
stride=stride,
groups=1,
act="relu" if relu else None,
name=name + "_primary_conv")
cheap_operation = self._conv_bn_layer(
input=primary_conv,
num_filters=new_channels,
filter_size=dw_size,
stride=1,
groups=init_channels,
act="relu" if relu else None,
name=name + "_cheap_operation")
out = fluid.layers.concat([primary_conv, cheap_operation], axis=1)
return out
def ghost_bottleneck(self,
input,
hidden_dim,
output,
kernel_size,
stride,
use_se,
name=None):
inp_channels = input.shape[1]
x = self.ghost_module(
input=input,
output=hidden_dim,
kernel_size=1,
stride=1,
relu=True,
name=name + "_ghost_module_1")
if self.block_stride == 4 and stride == 2:
self.block_stride += 1
if self.block_stride in self.feature_maps:
self.end_points.append(x)
if stride == 2:
x = self.depthwise_conv(
input=x,
output=hidden_dim,
kernel_size=kernel_size,
stride=stride,
relu=False,
name=name + "_depthwise")
if use_se:
x = self.se_block(
input=x, num_channels=hidden_dim, name=name + "_se")
x = self.ghost_module(
input=x,
output=output,
kernel_size=1,
relu=False,
name=name + "_ghost_module_2")
if stride == 1 and inp_channels == output:
shortcut = input
else:
shortcut = self.depthwise_conv(
input=input,
output=inp_channels,
kernel_size=kernel_size,
stride=stride,
relu=False,
name=name + "_shortcut_depthwise")
shortcut = self._conv_bn_layer(
input=shortcut,
num_filters=output,
filter_size=1,
stride=1,
groups=1,
act=None,
name=name + "_shortcut_conv")
return fluid.layers.elementwise_add(x=x, y=shortcut, axis=-1)
def _extra_block_dw(self,
input,
num_filters1,
num_filters2,
stride,
name=None):
pointwise_conv = self._conv_bn_layer(
input=input,
filter_size=1,
num_filters=int(num_filters1),
stride=1,
act='relu6',
name=name + "_extra1")
depthwise_conv = self._conv_bn_layer(
input=pointwise_conv,
filter_size=3,
num_filters=int(num_filters2),
stride=stride,
groups=int(num_filters1),
act='relu6',
name=name + "_extra2_dw")
normal_conv = self._conv_bn_layer(
input=depthwise_conv,
filter_size=1,
num_filters=int(num_filters2),
stride=1,
act='relu6',
name=name + "_extra2_sep")
return normal_conv
def _make_divisible(self, v, divisor=8, min_value=None):
if min_value is None:
min_value = divisor
new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)
if new_v < 0.9 * v:
new_v += divisor
return new_v
def __call__(self, input):
# build first layer
output_channel = int(self._make_divisible(16 * self.scale, 4))
x = self._conv_bn_layer(
input=input,
num_filters=output_channel,
filter_size=3,
stride=2,
groups=1,
act="relu",
name="conv1")
# build inverted residual blocks
idx = 0
for k, exp_size, c, use_se, s in self.cfgs:
if s == 2:
self.block_stride += 1
if self.block_stride in self.feature_maps:
self.end_points.append(x)
output_channel = int(self._make_divisible(c * self.scale, 4))
hidden_channel = int(self._make_divisible(exp_size * self.scale, 4))
x = self.ghost_bottleneck(
input=x,
hidden_dim=hidden_channel,
output=output_channel,
kernel_size=k,
stride=s,
use_se=use_se,
name="_ghostbottleneck_" + str(idx))
idx += 1
self.curr_stage += 1
self.block_stride += 1
if self.block_stride in self.feature_maps:
self.end_points.append(conv)
# extra block
# check whether conv_extra is needed
if self.block_stride < max(self.feature_maps):
conv_extra = self._conv_bn_layer(
x,
num_filters=self._make_divisible(self.scale * self.cfgs[-1][1]),
filter_size=1,
stride=1,
groups=1,
act='relu6',
name='conv' + str(idx + 2))
self.block_stride += 1
if self.block_stride in self.feature_maps:
self.end_points.append(conv_extra)
idx += 1
for block_filter in self.extra_block_filters:
conv_extra = self._extra_block_dw(conv_extra, block_filter[0],
block_filter[1], 2,
'conv' + str(idx + 2))
self.block_stride += 1
if self.block_stride in self.feature_maps:
self.end_points.append(conv_extra)
idx += 1
return OrderedDict([('ghost_{}'.format(idx), feat)
for idx, feat in enumerate(self.end_points)])
return res
|
py | b40ca8f07068421118fb8e46d3ac36932e74a4c0 | #Faça um programa que jogue par ou ímpar com o computador. O jogo só será interrompido quando o jogador perder, mostrando o total de vitórias consecutivas que ele conquistou no final do jogo.
from random import randint
cont = vitorias = soma = 0
flag = bool
print('༺═─────────────═༻')
print(' Par ou ímpar')
print('༺═─────────────═༻')
while True:
opçao = str(input('Par ou Ímpar? [P/I]: ')).strip().upper()
if opçao == 'P':
numero1 = int(input('Digite um valor: '))
numero2 = randint(0, 10)
print('❢◥ ▬▬▬▬▬▬▬▬▬▬▬▬▬ ◆ ▬▬▬▬▬▬▬▬▬▬▬▬▬ ◤❢')
print(f'Você jogou \033[34m{numero1}\033[m e o computador jogou \033[31m{numero2}\033[m')
print('❢◥ ▬▬▬▬▬▬▬▬▬▬▬▬▬ ◆ ▬▬▬▬▬▬▬▬▬▬▬▬▬ ◤❢')
soma = numero1 + numero2
print(f'O total é {soma}')
if soma % 2 != 0:
print('Você perdeu')
print(f'Você venceu o total de {vitorias} vezes')
flag = False
break
else:
print('Você venceu')
vitorias += 1
flag = True
if opçao == 'I':
numero1 = int(input('Digite um valor: '))
numero2 = randint(1, 10)
print('❢◥ ▬▬▬▬▬▬▬▬▬ ◆ ▬▬▬▬▬▬▬▬▬ ◤❢')
print(f'Você jogou {numero1} e o computador jogou {numero2}')
print('❢◥ ▬▬▬▬▬▬▬▬▬ ◆ ▬▬▬▬▬▬▬▬▬ ◤❢')
soma = numero1 + numero2
print(f'O total é {soma}')
if soma % 2 == 0:
print('você perdeu')
print(f'Você venceu o total de {vitorias} vezes')
flag = False
break
else:
print('Você venceu')
vitorias += 1
flag = True
if flag == False:
print(f'Você venceu o total de {vitorias} vezes')
break
|
py | b40ca91b9e4b9a6a78fd35cb510622b6a3561694 | def limpar(cursor, banco10):
cursor.execute("select id_cliente from temp_inteiro")
inteiro = cursor.fetchall()
for a in inteiro:
id1 = a
cursor.execute("select id_cliente from temp_metade1")
metade = cursor.fetchall()
for b in metade:
id2 = b
cursor.execute("select id_cliente from temp_terco1")
terco = cursor.fetchall()
for c in terco:
id3 = c
cursor.execute("select id_cliente from temp_quarto1")
quarto = cursor.fetchall()
for d in quarto:
id4 = d
cursor.execute("select id_cliente from temp_adc")
adc = cursor.fetchall()
for e in adc:
id5 = e
cursor.execute("select id_cliente from semAdc")
adc2 = cursor.fetchall()
for f in adc2:
id6 = f
cursor.execute("select id_cliente from temp_esfihas")
esfiha = cursor.fetchall()
for g in esfiha:
id7 = g
cursor.execute("select id_cliente from temp_lata")
lata = cursor.fetchall()
for h in lata:
id8 = h
cursor.execute("select id_cliente from temp_600")
s600 = cursor.fetchall()
for i in s600:
id9 = i
cursor.execute("select id_cliente from temp_1L")
umLitro = cursor.fetchall()
for j in umLitro:
id10 = j
cursor.execute("select id_cliente from temp_1Lmeio")
umLmeio = cursor.fetchall()
for k in umLmeio:
id11 = k
cursor.execute("select id_cliente from temp_2L")
doisLitros = cursor.fetchall()
for l in doisLitros:
id12 = l
cursor.execute("select id_cliente from temp_2Lmeio")
doisLmeio = cursor.fetchall()
for m in doisLmeio:
id13 = m
cursor.execute("select id_cliente from temp_outros")
outros = cursor.fetchall()
for n in outros:
id14 = n
if len(inteiro) > 0:
cursor.execute("delete from temp_inteiro where id_cliente = %s" % id1)
banco10.commit()
if len(metade) > 0:
cursor.execute("delete from temp_metade1 where id_cliente = %s" % id2)
banco10.commit()
if len(terco) > 0:
cursor.execute("delete from temp_terco1 where id_cliente = %s" % id3)
banco10.commit()
if len(quarto) > 0:
cursor.execute("delete from temp_quarto1 where id_cliente = %s" % id4)
banco10.commit()
if len(adc) > 0:
cursor.execute("delete from temp_adc where id_cliente = %s" % id5)
banco10.commit()
if len(adc2) > 0:
cursor.execute("delete from semAdc where id_cliente = %s" % id6)
banco10.commit()
if len(esfiha) > 0:
cursor.execute("delete from temp_esfihas where id_cliente = %s" % id7)
banco10.commit()
if len(lata) > 0:
cursor.execute("delete from temp_lata where id_cliente = %s" % id8)
banco10.commit()
if len(s600) > 0:
cursor.execute("delete from temp_600 where id_cliente = %s" % id9)
banco10.commit()
if len(umLitro) > 0:
cursor.execute("delete from temp_1L where id_cliente = %s" % id10)
banco10.commit()
if len(umLmeio) > 0:
cursor.execute("delete from temp_1Lmeio where id_cliente = %s" % id11)
banco10.commit()
if len(doisLitros) > 0:
cursor.execute("delete from temp_2L where id_cliente = %s" % id12)
banco10.commit()
if len(doisLmeio) > 0:
cursor.execute("delete from temp_2Lmeio where id_cliente = %s" % id13)
banco10.commit()
if len(outros) > 0:
cursor.execute("delete from temp_outros where id_cliente = %s" % id14)
banco10.commit() |
py | b40ca9767d8b72d02d034181ae4aaa0d0019f9f4 | """
API operations on a history.
.. seealso:: :class:`galaxy.model.History`
"""
import pkg_resources
pkg_resources.require( "Paste" )
from galaxy import exceptions
from galaxy.web import _future_expose_api as expose_api
from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
from galaxy.web import _future_expose_api_raw as expose_api_raw
from galaxy.web.base.controller import BaseAPIController
from galaxy.web.base.controller import UsesHistoryMixin
from galaxy.web.base.controller import UsesTagsMixin
from galaxy.web.base.controller import ExportsHistoryMixin
from galaxy.web.base.controller import ImportsHistoryMixin
from galaxy.managers import histories, citations
from galaxy import util
from galaxy.util import string_as_bool
from galaxy.util import restore_text
from galaxy.web import url_for
import logging
log = logging.getLogger( __name__ )
class HistoriesController( BaseAPIController, UsesHistoryMixin, UsesTagsMixin,
ExportsHistoryMixin, ImportsHistoryMixin ):
def __init__( self, app ):
super( HistoriesController, self ).__init__( app )
self.citations_manager = citations.CitationsManager( app )
self.mgrs = util.bunch.Bunch(
histories=histories.HistoryManager()
)
def _decode_id( self, trans, id ):
try:
return trans.security.decode_id( id )
except:
raise exceptions.MalformedId( "Malformed History id ( %s ) specified, unable to decode"
% ( str( id ) ), type='error' )
@expose_api_anonymous
def index( self, trans, deleted='False', **kwd ):
"""
index( trans, deleted='False' )
* GET /api/histories:
return undeleted histories for the current user
* GET /api/histories/deleted:
return deleted histories for the current user
.. note:: Anonymous users are allowed to get their current history
:type deleted: boolean
:param deleted: if True, show only deleted histories, if False, non-deleted
:rtype: list
:returns: list of dictionaries containing summary history information
"""
#TODO: query (by name, date, etc.)
rval = []
deleted = string_as_bool( deleted )
histories = self.mgrs.histories.by_user( trans, user=trans.user, only_deleted=deleted )
for history in histories:
item = history.to_dict( value_mapper={ 'id': trans.security.encode_id } )
item['url'] = url_for( 'history', id=trans.security.encode_id( history.id ) )
rval.append( item )
return rval
@expose_api_anonymous
def show( self, trans, id, deleted='False', **kwd ):
"""
show( trans, id, deleted='False' )
* GET /api/histories/{id}:
return the history with ``id``
* GET /api/histories/deleted/{id}:
return the deleted history with ``id``
* GET /api/histories/most_recently_used:
return the most recently used history
:type id: an encoded id string
:param id: the encoded id of the history to query or the string 'most_recently_used'
:type deleted: boolean
:param deleted: if True, allow information on a deleted history to be shown.
:rtype: dictionary
:returns: detailed history information from
:func:`galaxy.web.base.controller.UsesHistoryMixin.get_history_dict`
"""
history_id = id
deleted = string_as_bool( deleted )
if history_id == "most_recently_used":
if not trans.user or len( trans.user.galaxy_sessions ) <= 0:
return None
# Most recent active history for user sessions, not deleted
history = trans.user.galaxy_sessions[0].histories[-1].history
else:
history = self.mgrs.histories.get( trans, self._decode_id( trans, history_id ),
check_ownership=False, check_accessible=True, deleted=deleted )
history_data = self.get_history_dict( trans, history )
history_data[ 'contents_url' ] = url_for( 'history_contents', history_id=history_id )
return history_data
@expose_api_anonymous
def citations( self, trans, history_id, **kwd ):
history = self.mgrs.histories.get( trans, self._decode_id( trans, history_id ), check_ownership=False, check_accessible=True )
tool_ids = set([])
for dataset in history.datasets:
job = dataset.creating_job
if not job:
continue
tool_id = job.tool_id
if not tool_id:
continue
tool_ids.add(tool_id)
return map( lambda citation: citation.to_dict( "bibtex" ), self.citations_manager.citations_for_tool_ids( tool_ids ) )
@expose_api
def create( self, trans, payload, **kwd ):
"""
create( trans, payload )
* POST /api/histories:
create a new history
:type payload: dict
:param payload: (optional) dictionary structure containing:
* name: the new history's name
* history_id: the id of the history to copy
* archive_source: the url that will generate the archive to import
* archive_type: 'url' (default)
:rtype: dict
:returns: element view of new history
"""
hist_name = None
if payload.get( 'name', None ):
hist_name = restore_text( payload['name'] )
copy_this_history_id = payload.get( 'history_id', None )
if "archive_source" in payload:
archive_source = payload[ "archive_source" ]
archive_type = payload.get( "archive_type", "url" )
self.queue_history_import( trans, archive_type=archive_type, archive_source=archive_source )
return {}
new_history = None
# if a history id was passed, copy that history
if copy_this_history_id:
original_history = self.mgrs.histories.get( trans, self._decode_id( trans, copy_this_history_id ),
check_ownership=False, check_accessible=True )
hist_name = hist_name or ( "Copy of '%s'" % original_history.name )
new_history = original_history.copy( name=hist_name, target_user=trans.user )
# otherwise, create a new empty history
else:
new_history = trans.app.model.History( user=trans.user, name=hist_name )
trans.sa_session.add( new_history )
trans.sa_session.flush()
item = {}
item = self.get_history_dict( trans, new_history )
item['url'] = url_for( 'history', id=item['id'] )
return item
@expose_api
def delete( self, trans, id, **kwd ):
"""
delete( self, trans, id, **kwd )
* DELETE /api/histories/{id}
delete the history with the given ``id``
.. note:: Stops all active jobs in the history if purge is set.
:type id: str
:param id: the encoded id of the history to delete
:type kwd: dict
:param kwd: (optional) dictionary structure containing:
* payload: a dictionary itself containing:
* purge: if True, purge the history and all of its HDAs
:rtype: dict
:returns: an error object if an error occurred or a dictionary containing:
* id: the encoded id of the history,
* deleted: if the history was marked as deleted,
* purged: if the history was purged
"""
history_id = id
# a request body is optional here
purge = False
if kwd.get( 'payload', None ):
purge = string_as_bool( kwd['payload'].get( 'purge', False ) )
rval = { 'id' : history_id }
history = self.mgrs.histories.get( trans, self._decode_id( trans, history_id ),
check_ownership=True, check_accessible=False )
history.deleted = True
if purge:
if not trans.app.config.allow_user_dataset_purge and not trans.user_is_admin():
raise exceptions.ConfigDoesNotAllowException( 'This instance does not allow user dataset purging' )
# First purge all the datasets
for hda in history.datasets:
if hda.purged:
continue
if hda.creating_job_associations:
job = hda.creating_job_associations[0].job
job.mark_deleted( self.app.config.track_jobs_in_database )
self.app.job_manager.job_stop_queue.put( job.id )
hda.purged = True
trans.sa_session.add( hda )
trans.sa_session.flush()
if hda.dataset.user_can_purge:
try:
hda.dataset.full_delete()
trans.sa_session.add( hda.dataset )
except:
pass
# flush now to preserve deleted state in case of later interruption
trans.sa_session.flush()
# Now mark the history as purged
history.purged = True
self.sa_session.add( history )
rval[ 'purged' ] = True
trans.sa_session.flush()
rval[ 'deleted' ] = True
return rval
@expose_api
def undelete( self, trans, id, **kwd ):
"""
undelete( self, trans, id, **kwd )
* POST /api/histories/deleted/{id}/undelete:
undelete history (that hasn't been purged) with the given ``id``
:type id: str
:param id: the encoded id of the history to undelete
:rtype: str
:returns: 'OK' if the history was undeleted
"""
history_id = id
history = self.mgrs.histories.get( trans, self._decode_id( trans, history_id ),
check_ownership=True, check_accessible=False, deleted=True )
history.deleted = False
trans.sa_session.add( history )
trans.sa_session.flush()
return 'OK'
@expose_api
def update( self, trans, id, payload, **kwd ):
"""
update( self, trans, id, payload, **kwd )
* PUT /api/histories/{id}
updates the values for the history with the given ``id``
:type id: str
:param id: the encoded id of the history to undelete
:type payload: dict
:param payload: a dictionary containing any or all the
fields in :func:`galaxy.model.History.to_dict` and/or the following:
* annotation: an annotation for the history
:rtype: dict
:returns: an error object if an error occurred or a dictionary containing
any values that were different from the original and, therefore, updated
"""
#TODO: PUT /api/histories/{encoded_history_id} payload = { rating: rating } (w/ no security checks)
history_id = id
history = self.mgrs.histories.get( trans, self._decode_id( trans, history_id ),
check_ownership=True, check_accessible=True )
# validation handled here and some parsing, processing, and conversion
payload = self._validate_and_parse_update_payload( payload )
# additional checks here (security, etc.)
changed = self.set_history_from_dict( trans, history, payload )
return changed
@expose_api
def archive_export( self, trans, id, **kwds ):
"""
export_archive( self, trans, id, payload )
* PUT /api/histories/{id}/exports:
start job (if needed) to create history export for corresponding
history.
:type id: str
:param id: the encoded id of the history to undelete
:rtype: dict
:returns: object containing url to fetch export from.
"""
# PUT instead of POST because multiple requests should just result
# in one object being created.
history_id = id
history = self.mgrs.histories.get( trans, self._decode_id( trans, history_id ),
check_ownership=False, check_accessible=True )
jeha = history.latest_export
up_to_date = jeha and jeha.up_to_date
if 'force' in kwds:
up_to_date = False #Temp hack to force rebuild everytime during dev
if not up_to_date:
# Need to create new JEHA + job.
gzip = kwds.get( "gzip", True )
include_hidden = kwds.get( "include_hidden", False )
include_deleted = kwds.get( "include_deleted", False )
self.queue_history_export( trans, history, gzip=gzip, include_hidden=include_hidden, include_deleted=include_deleted )
if up_to_date and jeha.ready:
jeha_id = trans.security.encode_id( jeha.id )
return dict( download_url=url_for( "history_archive_download", id=id, jeha_id=jeha_id ) )
else:
# Valid request, just resource is not ready yet.
trans.response.status = "202 Accepted"
return ''
@expose_api_raw
def archive_download( self, trans, id, jeha_id, **kwds ):
"""
export_download( self, trans, id, jeha_id )
* GET /api/histories/{id}/exports/{jeha_id}:
If ready and available, return raw contents of exported history.
Use/poll "PUT /api/histories/{id}/exports" to initiate the creation
of such an export - when ready that route will return 200 status
code (instead of 202) with a JSON dictionary containing a
`download_url`.
"""
# Seems silly to put jeha_id in here, but want GET to be immuatable?
# and this is being accomplished this way.
history_id = id
history = self.mgrs.histories.get( trans, trans.security.decode_id( history_id ),
check_ownership=False, check_accessible=True )
matching_exports = filter( lambda e: trans.security.encode_id( e.id ) == jeha_id, history.exports )
if not matching_exports:
raise exceptions.ObjectNotFound()
jeha = matching_exports[ 0 ]
if not jeha.ready:
# User should not have been given this URL, PUT export should have
# return a 202.
raise exceptions.MessageException( "Export not available or not yet ready." )
return self.serve_ready_history_export( trans, jeha )
def _validate_and_parse_update_payload( self, payload ):
"""
Validate and parse incomming data payload for a history.
"""
# This layer handles (most of the stricter idiot proofing):
# - unknown/unallowed keys
# - changing data keys from api key to attribute name
# - protection against bad data form/type
# - protection against malicious data content
# all other conversions and processing (such as permissions, etc.) should happen down the line
# keys listed here don't error when attempting to set, but fail silently
# this allows PUT'ing an entire model back to the server without attribute errors on uneditable attrs
valid_but_uneditable_keys = (
'id', 'model_class', 'nice_size', 'contents_url', 'purged', 'tags',
'state', 'state_details', 'state_ids'
)
validated_payload = {}
for key, val in payload.items():
if val is None:
continue
if key in ( 'name', 'genome_build', 'annotation' ):
validated_payload[ key ] = self.validate_and_sanitize_basestring( key, val )
if key in ( 'deleted', 'published', 'importable' ):
validated_payload[ key ] = self.validate_boolean( key, val )
elif key == 'tags':
validated_payload[ key ] = self.validate_and_sanitize_basestring_list( key, val )
elif key not in valid_but_uneditable_keys:
pass
#log.warn( 'unknown key: %s', str( key ) )
return validated_payload
|
py | b40ca9abb7e0d501ef254dae32b571d3265a9219 | import json
import logging
logger = logging.getLogger(__name__)
DEFAULT_CFG = """{
"client_id": "",
"oauth": "oauth:",
"username": "BOT_USERNAME",
"secret": "",
"user_id": 123456789,
"log_level":"INFO",
"bttv": true
}
"""
_no_default = object()
class ReloadException(Exception):
pass
class ConfigError(Exception):
pass
class Config():
# Singleton stuff
instance = None
def __new__(self):
if not Config.instance:
Config.instance = Config.__Config()
return Config.instance
def __getattr__(self, name):
return getattr(self.instance, name)
# Actual class def
class __Config():
def __init__(self):
self._config = {}
self._load()
def __contains__(self, key):
return key in self._config.keys()
def __getitem__(self, key):
if key not in self._config.keys():
raise ValueError(f"{key} not configured")
return self._config[key]
def _load(self):
try:
with open('config.json') as f:
self._config = json.load(f)
except Exception:
with open('config.json', 'w') as f:
f.write(DEFAULT_CFG)
raise ConfigError("No config.json found")
return self
def get(self, key, default=_no_default):
if key not in self._config.keys():
if default is _no_default:
raise ValueError(f"{key} not configured")
else:
return default
return self._config[key]
|
py | b40ca9bab6a345f435b8126075433bd1d8a406d2 | from textacy import load_spacy_lang
from textacy import cache
def test_cache_clear():
cache.clear()
assert len(cache.LRU_CACHE.keys()) == 0
def test_cache_size():
# check cache size; low thresh but still larger than if the size of
# loaded data was not being correctly assessed
# NOTE: should come *after* the function that clears the cache
_ = load_spacy_lang("en_core_web_sm")
assert cache.LRU_CACHE.currsize >= 1000
|
py | b40caa35e7bb4d61d65583b8113b1b6d6f12a74f | ''' Encapsulation : Part 1
Encapsulation is the process of restricting access to methods and variables in a class in order to prevent direct data modification so it prevents accidental data modification.
Encapsulation basically allows the internal representation of an object to be hidden from the view outside of the objects definition.
Public methods and variables can be accessed from an
'''
|
py | b40caa58f7027dd022937a8452d8450429971b65 | """Classic graph generators from NetworkX."""
import snapx as sx
from snapx.utils import nodes_or_number
from snapx.utils import pairwise
@nodes_or_number(0)
def empty_graph(n=0, create_using=None, default=sx.Graph):
"""PORTED FROM NETWORKX
Returns the empty graph with n nodes and zero edges.
Parameters
----------
n : int or iterable container of nodes (default = 0)
If n is an integer, nodes are from `range(n)`.
If n is a container of nodes, those nodes appear in the graph.
create_using : Graph Instance, Constructor or None
Indicator of type of graph to return.
If a Graph-type instance, then clear and use it.
If None, use the `default` constructor.
If a constructor, call it to create an empty graph.
default : Graph constructor (optional, default = nx.Graph)
The constructor to use if create_using is None.
If None, then nx.Graph is used.
This is used when passing an unknown `create_using` value
through your home-grown function to `empty_graph` and
you want a default constructor other than nx.Graph.
Examples
--------
>>> G = nx.empty_graph(10)
>>> G.number_of_nodes()
10
>>> G.number_of_edges()
0
>>> G = nx.empty_graph("ABC")
>>> G.number_of_nodes()
3
>>> sorted(G)
['A', 'B', 'C']
Notes
-----
The variable create_using should be a Graph Constructor or a
"graph"-like object. Constructors, e.g. `nx.Graph` or `nx.MultiGraph`
will be used to create the returned graph. "graph"-like objects
will be cleared (nodes and edges will be removed) and refitted as
an empty "graph" with nodes specified in n. This capability
is useful for specifying the class-nature of the resulting empty
"graph" (i.e. Graph, DiGraph, MyWeirdGraphClass, etc.).
The variable create_using has three main uses:
Firstly, the variable create_using can be used to create an
empty digraph, multigraph, etc. For example,
>>> n = 10
>>> G = nx.empty_graph(n, create_using=nx.DiGraph)
will create an empty digraph on n nodes.
Secondly, one can pass an existing graph (digraph, multigraph,
etc.) via create_using. For example, if G is an existing graph
(resp. digraph, multigraph, etc.), then empty_graph(n, create_using=G)
will empty G (i.e. delete all nodes and edges using G.clear())
and then add n nodes and zero edges, and return the modified graph.
Thirdly, when constructing your home-grown graph creation function
you can use empty_graph to construct the graph by passing a user
defined create_using to empty_graph. In this case, if you want the
default constructor to be other than nx.Graph, specify `default`.
>>> def mygraph(n, create_using=None):
... G = nx.empty_graph(n, create_using, nx.MultiGraph)
... G.add_edges_from([(0, 1), (0, 1)])
... return G
>>> G = mygraph(3)
>>> G.is_multigraph()
True
>>> G = mygraph(3, nx.Graph)
>>> G.is_multigraph()
False
See also create_empty_copy(G).
"""
if create_using is None:
G = default()
elif hasattr(create_using, 'adj'):
# create_using is a SnapX style Graph
create_using.clear()
G = create_using
else:
# try create_using as constructor
G = create_using()
n_name, nodes = n
G.add_nodes_from(nodes)
return G
@nodes_or_number(0)
def path_graph(n, create_using=None):
"""PORTED FROM NETWORKX
Returns the Path graph `P_n` of linearly connected nodes.
Parameters
----------
n : int or iterable
If an integer, node labels are 0 to n with center 0.
If an iterable of nodes, the center is the first.
create_using : NetworkX graph constructor, optional (default=nx.Graph)
Graph type to create. If graph instance, then cleared before populated.
"""
n_name, nodes = n
G = empty_graph(nodes, create_using)
G.add_edges_from(pairwise(nodes))
return G
|
py | b40caa9f37dcbed9e3ac96e72bd1a0bb1a5068cf | import unittest
class ClipboardTestCase(unittest.TestCase):
def setUp(self):
from kivy.core.clipboard import Clipboard
self._clippy = Clipboard
clippy_types = Clipboard.get_types()
cliptype = clippy_types[0]
if 'UTF8_STRING' in clippy_types:
cliptype = 'UTF8_STRING'
self._cliptype = cliptype
def test_clipboard_not_dummy(self):
clippy = self._clippy
if clippy.__class__.__name__ == 'ClipboardDummy':
self.fail('Something went wrong "dummy" clipboard is being used')
def test_clipboard_paste(self):
clippy = self._clippy
try:
clippy.paste()
except:
self.fail(
'Can not get data from clipboard')
def test_clipboard_copy(self):
clippy = self._clippy
try:
clippy.copy("Hello World")
except:
self.fail(
'Can not get put data to clipboard')
|
py | b40caade4d617816bc05843efc03675ee4a87be1 | import socket
from urllib.parse import urlparse
import re
import MockHttpClient
class Parameter:
url = None
verbose = False
headers = None
bodyData = None
writeFileName = None
port = 80
@staticmethod
def reInit():
Parameter.url = None
Parameter.verbose = False
Parameter.headers = None
Parameter.bodyData = None
Parameter.writeFileName = None
def writeFile(fileName, content):
with open(fileName, 'w') as f:
f.write(content)
print("Write reponse to the file: " + fileName)
def showHelpMenu():
print("httpc is a curl-like application but supports HTTP protocol only." + "\n" +
"Usage: " + "\n" +
"httpc command [arguments]" + "\n" +
"The commands are: " + "\n" +
"get executes a HTTP GET request and prints the response." + "\n" +
"post executes a HTTP POST request and prints the resonse." + "\n" +
"help prints this screen." + "\n")
def sendHttpRequest(command):
if ("-o" in command):
Parameter.writeFileName = command.split(" -o ")[1]
command = command.split(" -o ")[0]
if ("-v" in command):
Parameter.verbose = True
if ("-h" in command):
Parameter.headers = getHeaders(command)
urlString = command.split(" ")[-1]
if("'" in urlString):
Parameter.url = urlString[1:-1]
else:
Parameter.url = urlString
# Get Usage: httpc get [-v] [-h key:value] URL
# Post Usage: httpc post [-v] [-h key:value] [-d inline-data] [-f file] URL
if (command.startswith("get") or command.startswith("post")):
o = urlparse(Parameter.url)
host = o.hostname
if(o.port is None):
port = Parameter.port
else:
port = o.port
while(True):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((host, port))
if(command.startswith("post")):
if ("-d" in command and "-f" not in command):
infos = command.split(" -d ")[1].split(" ")
Parameter.bodyData = (infos[0] + infos[1])[1:-1]
if ("-f" in command and "-d" not in command):
readFileName = command.split(" -f ")[1].split(" ")[0]
with open(readFileName, 'r') as f:
Parameter.bodyData = f.read()
request = MockHttpClient.HttpRequest(host, o.path, Parameter.bodyData, Parameter.headers)
#print(request.getPost().decode('utf-8'))
s.sendall(request.getPost())
else:
request = MockHttpClient.HttpRequest(host, o.path, o.query, Parameter.headers)
#print(request.getGet().decode('utf-8'))
s.sendall(request.getGet())
data = recvall(s)
response = MockHttpClient.HttpResponse(data)
if(response.code == MockHttpClient.HttpCode.redirect):
host = response.location
else:
break
if Parameter.verbose:
print(response.text)
if(Parameter.writeFileName != None):
writeFile(Parameter.writeFileName, response.text)
else:
print(response.body)
if(Parameter.writeFileName != None):
writeFile(Parameter.writeFileName, response.text)
# Invaid
else:
print("Invalid command.")
def getHeaders(command):
pairs = re.findall("-h (.+?:.+?) ", command)
return "\r\n".join(pairs)
#return command.split(" -h ")[1].split(" ")[0]
def recvall(sock):
BUFF_SIZE = 1024 # 1 KiB
data = b''
while True:
part = sock.recv(BUFF_SIZE)
data += part
if len(part) < BUFF_SIZE:
# either 0 or end of data
break
return data
def execute(command):
if ("help" in command):
showHelpMenu()
else:
sendHttpRequest(command)
# program entrance
while True:
command = input("\nplease enter the command; enter 'quit' or 'exit' or 'bye' to quit:\n" + "httpc ")
if("quit" in command or "exit" in command or "bye" in command):
break
Parameter.reInit()
execute(command)
|
py | b40cab48f2d1f34c938ca8f7b8c368c88066d111 | from typing import List
from pydantic import BaseModel, validator
from tracardi.service.plugin.domain.register import Plugin, Spec, MetaData, Form, FormGroup, FormField, FormComponent, \
Documentation, PortDoc
from tracardi.service.plugin.domain.result import Result
from tracardi.service.plugin.runner import ActionRunner
from tracardi.domain.event import Event
from tracardi.domain.profile import Profile
from tracardi.domain.session import Session
class DeleteTraitConfiguration(BaseModel):
delete: List[str]
@validator("delete")
def list_must_not_be_empty(cls, value):
if not len(value) > 0:
raise ValueError("List to delete must not be empty.")
return value
def validate(config: dict):
return DeleteTraitConfiguration(**config)
class DeleteTraitAction(ActionRunner):
def __init__(self, **kwargs):
self.config = validate(kwargs)
async def run(self, payload: dict):
dot = self._get_dot_accessor(payload if isinstance(payload, dict) else None)
for value in self.config.delete:
try:
del dot[value]
except KeyError as e:
self.console.warning("Could not delete value {} due to error: {}".format(value, str(e)))
if self.event.metadata.profile_less is False:
profile = Profile(**dot.profile)
self.profile.replace(profile)
if 'id' in dot.session:
session = Session(**dot.session)
self.session.replace(session)
event = Event(**dot.event)
self.event.replace(event)
return Result(port="payload", value=payload)
def register() -> Plugin:
return Plugin(
start=False,
spec=Spec(
module='tracardi.process_engine.action.v1.traits.delete_trait_action',
className='DeleteTraitAction',
inputs=['payload'],
outputs=["payload"],
init={
"delete": []
},
form=Form(groups=[
FormGroup(
fields=[
FormField(
id="delete",
name="Delete fields",
description="Type a list of fields that must be deleted.",
component=FormComponent(type="listOfDotPaths", props={"label": "Path to field"})
)
]
),
]),
version='0.1',
license="MIT",
author="Risto Kowaczewski"
),
metadata=MetaData(
name='Delete Trait',
desc='Deletes traits from profile or payload. Accepts dotted notation as definition of a field to be '
'deleted. Returns payload.',
icon='remove',
group=["Data processing"],
documentation=Documentation(
inputs={
"payload": PortDoc(desc="This port takes any JSON-like object.")
},
outputs={
"payload": PortDoc(desc="This port returns given payload with selected fields deleted.")
}
)
)
)
|
py | b40cac5051b511f75d6fd21d5e17b2612cf5ada2 | # yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
dict(type='TensorboardLoggerHook')
])
# yapf:enable
dist_params = dict(backend='nccl')
log_level = 'INFO'
load_from = None
resume_from = None
workflow = [('train', 1)]
|
py | b40cb01f0da5f094d2a8cddc64109323fc993c9e | """
rosegraphics.py - a simple Graphics library for Python.
Its key feature is:
-- USING this library provides a simple introduction to USING objects.
Other key features include:
-- It has a rich set of classes, methods and instance variables.
-- In addition to classes like Circles that are natural for
students, it has other kinds of classes like RoseWindow
and FortuneTeller to provide a richer set of examples
than "just" a graphics library.
-- It allows one to do a reasonable set of graphics operations
with reasonable efficiency. The API mimics Java's Shape API
for the most part.
-- It is built on top of tkinter and its extension ttk
(the standard graphics libraries that come with Python).
-- Unlike tkinter, it is NOT event-driven and hence can be used
before students see that paradigm. (There is a behind-the-scenes
facilty for listening for and responding to events,
for those who want to do so.)
-- It attempts to be as bullet-proof as possible, to make it easy
for beginners to use it. In particular, it attempts to provide
reasonable error messages when a student misuses the API.
-- It was inspired by zellegraphics but is a complete re-implemenation
that attempts to:
-- Be more bullet-proof.
-- Provide a richer set of examples for using objects.
-- Have an API that is more like Java's Shape API than tkinter's
(older) API.
-- While it can serve as an example for defining classes,
it is NOT intended to do so for beginners.
It is excellent for helping students learn to USE objects;
it is NOT perfect for helping students learn to WRITE CLASSES.
See the MAIN function below for typical examples of its use.
Authors: David Mutchler, Mark Hays, Michael Wollowswki, Matt Boutell,
Chandan Rupakheti, Claude Anderson and their colleagues,
with thanks to John Zelle for inspiration and hints.
First completed version: September 2014.
"""
# FIXME (errors):
# -- clone() does not really make a copy; it just makes a new one
# but without cloning all the attributes.
# -- _ShapeWithCenter claims that things like Ellipse are subclasses,
# but they are not at this point, I think. In general, need to
# deal with overlap between _ShapeWithCenter and _RectangularShape.
# KEEP both of them to have some classes have corner_1 and corner_2
# while others have center and ...
# FIXME (things that have yet to be implemented):
# -- Allow multiple canvasses.
# -- Better close_on ... ala zellegraphics.
# -- Keyboard.
# -- Better Mouse.
# -- Add type hints.
# -- Catch all Exceptions and react appropriately.
# -- Implement unimplemented classes.
# -- Add and allow FortuneTellers and other non-canvas classes.
import tkinter
from tkinter import font as tkinter_font
import time
import turtle
# ----------------------------------------------------------------------
# All the windows that are constructed during a run share the single
# _master_Tk (a tkinter.Tk object)
# as their common root. The first construction of a RoseWindow
# sets this _master_Tk to a Tkinter.Tk object.
# ----------------------------------------------------------------------
_master_Tk = None
# ----------------------------------------------------------------------
# At the risk of not being Pythonic, we provide a simple type-checking
# facility that attempts to provide meaningful error messages to
# students when they pass arguments that are not of the expected type.
# ----------------------------------------------------------------------
class WrongTypeException(Exception):
""" Not yet implemented. """
pass
def check_types(pairs):
""" Not yet implemented fully. """
for pair in pairs:
value = pair[0]
expected_type = pair[1]
if not isinstance(value, expected_type):
raise WrongTypeException(pair)
# ----------------------------------------------------------------------
# Serialization facility
# ----------------------------------------------------------------------
def _serialize_shapes(self):
"""Returns a list of strings representing the shapes in sorted order."""
# Idea: dump all the stats on all shapes, return a sorted list for easy comparison.
# Problem: the order in which keys appear in dictionaries is random!
# Solution: sort keys and manually print
shapes = [shape.__dict__ for shape in self.initial_canvas.shapes]
keys_by_shape = [sorted(shape) for shape in shapes]
for k in range(len(shapes)):
shapes[k]['_method_for_drawing'] = None
shapes[k]['shape_id_by_canvas'] = None
result = []
for k in range(len(keys_by_shape)):
shape = shapes[k]
result.append([])
for key in keys_by_shape[k]:
result[-1].append(str(key) + ":" + str(shape[key]))
result[-1] = str(result[-1])
return "\n".join(sorted(result))
# ----------------------------------------------------------------------
# RoseWindow is the top-level object.
# It starts with a single RoseCanvas.
# ----------------------------------------------------------------------
class RoseWindow(object):
"""
A RoseWindow is a window that pops up when constructed.
It can have RoseWidgets on it and starts by default with
a single RoseCanvas upon which one can draw shapes.
To construct a RoseWindow, use:
- rg.RoseWindow()
or use any of its optional arguments, as in these examples:
window = rg.RoseWindow(400, 300) # 400 wide by 300 tall
window = rg.RoseWindow(400, 300, 'Funny window') # with a title
Instance variables include:
width: width of this window (in pixels)
height: width of this window (in pixels)
title: displayed on the window's bar
widgets: the things attached to this window
"""
def __init__(self, width=400, height=300, title='Rose Graphics',
color='black', canvas_color=None,
make_initial_canvas=True):
"""
Pops up a tkinter.Toplevel window with (by default)
a RoseCanvas (and associated tkinter.Canvas) on it.
Arguments are:
-- width, height: dimensions of the window (in pixels).
-- title: title displayed on the windoww.
-- color: background color of the window
-- canvas_color: background color of the canvas
displayed on the window by default
-- make_initial_canvas:
-- If True, a default canvas is placed on the window.
-- Otherwise, no default canvas is placed on the window.
If this is the first RoseWindow constructed, then a
hidden Tk object is constructed to control the event loop.
Preconditions:
:type width: int
:type height: int
:type title: str
:type color: Color
:type canvas_color: Color
:type make_initial_canvas: bool
"""
# check_types([(width, (int, float)),
# (height, (int, float)),
# (title, (Color, str)
# --------------------------------------------------------------
# The _master_Tk controls the mainloop for ALL the RoseWindows.
# If this is the first RoseWindow constructed in this run,
# then construct the _master_Tk object.
# --------------------------------------------------------------
global _master_Tk
if not _master_Tk:
_master_Tk = tkinter.Tk()
_master_Tk.withdraw()
else:
time.sleep(0.1) # Helps the window appear on TOP of Eclipse
# --------------------------------------------------------------
# Has a tkinter.Toplevel, and a tkinter.Canvas on the Toplevel.
# --------------------------------------------------------------
self.toplevel = tkinter.Toplevel(_master_Tk,
background=color,
width=width, height=height)
self.toplevel.title(title)
self._is_closed = False
self.toplevel.protocol("WM_DELETE_WINDOW", self.close)
# FIXME: The next two need to be properties to have
# setting happen correctly. Really belongs to RoseCanvas.
# See comments elsewhere on this.
self.width = width
self.height = height
if make_initial_canvas:
self.initial_canvas = RoseCanvas(self, width, height,
canvas_color)
else:
self.initial_canvas = None
self.widgets = [self.initial_canvas]
# FIXME: Do any other tailoring of the toplevel as desired,
# e.g. borderwidth and style...
# --------------------------------------------------------------
# Catch mouse clicks and key presses.
# --------------------------------------------------------------
self.mouse = Mouse()
self.keyboard = Keyboard()
self.toplevel.bind('<Button>', self._on_mouse_click)
self.toplevel.bind('<KeyPress>', self._on_key_press)
self.update()
def close(self):
""" Closes this RoseWindow. """
if self.toplevel:
self.toplevel.destroy()
self.toplevel = None
self.update()
self._is_closed = True
def update(self):
"""
Checks for and handles events that has happened
in this RoseWindow (e.g. mouse clicks, drawing shapes).
"""
global _master_Tk
_master_Tk.update()
def render(self, seconds_to_pause=None):
"""
Updates all the Shapes attached to RoseCanvas objects associated with this RoseWindow, then draws all those Shapes.
After doing so, pauses the given number of seconds.
:type seconds_to_pause: int
"""
for widget in self.widgets:
if type(widget) == RoseCanvas:
widget.render()
self.update()
if seconds_to_pause:
time.sleep(seconds_to_pause)
def close_on_mouse_click(self):
"""
Displays a message at the bottom center of the window and waits for the user to click the mouse anywhere in the window.
Then closes this RoseWindow.
Returns an rg.Point that specifies where the user clicked the mouse.
"""
message = 'To exit, click anywhere in this window'
click_position = self.continue_on_mouse_click(message=message,
close_it=True)
return click_position
def continue_on_mouse_click(self,
message='To continue, click anywhere in this window',
x_position=None,
y_position=None,
close_it=False,
erase_it=True):
"""
Displays a message at the bottom center of the window and waits for the user to click the mouse, then erases the message.
Optional parameters let you:
-- Display a different message
-- Place the message at a different place in the window (xpos and ypos are as in Text)
-- Close the window after the mouse is clicked (and ignore the GraphicsError that results if the user instead chooses to click the X in the window)
-- NOT erase the message when done
"""
if self._is_closed:
return
if x_position is None:
x_position = self.width / 2
if y_position is None:
y_position = self.height - 20
anchor_point = Point(x_position, y_position)
text = Text(anchor_point, message)
# FIXME: Really should do all this on a per-RoseCanvas basis.
if self.initial_canvas:
text.attach_to(self.initial_canvas)
self.initial_canvas._renderShape(text, render_NOW=True)
click_position = self.get_next_mouse_click()
if erase_it and self.initial_canvas:
text.detach_from(self.initial_canvas)
if close_it:
self.close() # then close the window
return click_position
def get_next_mouse_click(self):
"""
Waits for the user to click in the window.
Then returns the rg.Point that represents the point where the user clicked.
Example:
If this method is called and then the user clicks near the upper-right corner of a 300 x 500 window,
this function would return something like rg.Point(295, 5).
"""
self.mouse.position = None
while True:
if self._is_closed:
return None
if self.mouse.position is not None:
break
self.update()
time.sleep(.05) # allow time for other events to be handled
click_point = self.mouse.position
self.mouse.position = None
return click_point
def _on_mouse_click(self, event):
self.mouse._update(event)
def _on_key_press(self, event):
self.keyboard._update(event)
# def add_canvas(self, width=None, height=None, background_color=0):
# FIXME: Set defaults based on the main canvas.
# new_canvas = RoseCanvas(self, background_color='white')
# self.widgets.append(new_canvas)
#
# _root.update()
def __serialize_shapes(self):
"""Returns a list of strings representing the shapes in sorted order."""
return _serialize_shapes(self)
class RoseWidget():
"""
A Widget is a thing that one can put on a Window,
e.g. a Canvas, FortuneTeller, etc.
"""
def __init__(self, window):
self._window = window
def get_window(self):
return self._window
class RoseCanvas(RoseWidget):
defaults = {'colors': [None, 'yellow', 'light blue', 'dark grey']}
count = 0
"""
A RoseCanvas is a RoseWidget (i.e., a thing on a RoseWindow)
upon which one can draw shapes and other Drawable things.
"""
def __init__(self, window, width=200, height=200,
background_color=0):
super().__init__(window)
RoseCanvas.count = RoseCanvas.count + 1
# FIXME: Deal with default background colors.
# FIXME: Store background color as a property
# so that modifying it changes the tkinter canvas.
# Ditto width and height.
# if background_color == 0:
# index = RoseCanvas.count % len(defaults['colors'])
# self.background_color = defaults['colors'][index]
# else:
# self.background_color = background_color
tk_canvas = tkinter.Canvas(window.toplevel,
width=width, height=height,
background=background_color)
self._tkinter_canvas = tk_canvas
# FIXME: Automate gridding better.
self._tkinter_canvas.grid(padx=5, pady=5)
self.shapes = []
def render(self, seconds_to_pause=None):
"""
Updates all the Shapes attached to this RoseCanvas, then draws all those Shapes.
After doing so, pauses the given number of seconds.
:type seconds_to_pause: int
"""
self._update_shapes()
self._window.update()
if seconds_to_pause:
time.sleep(seconds_to_pause)
def _renderShape(self, shape, render_NOW=False):
"""Renders a shape."""
coordinates = shape._get_coordinates_for_drawing()
options = shape._get_options_for_drawing()
if shape.shape_id_by_canvas[self] is None:
shape.shape_id_by_canvas[self] = \
shape._method_for_drawing(self._tkinter_canvas, *coordinates)
try:
self._tkinter_canvas.coords(shape.shape_id_by_canvas[self],
*coordinates)
except tkinter.TclError:
msg = 'Could not place the shape\n'
msg += 'on the given window.\n'
msg += 'Did you accidentally close a window\n'
msg += 'that later needed to be rendered again?'
raise Exception(msg) from None
self._tkinter_canvas.itemconfigure(shape.shape_id_by_canvas[self],
options)
if render_NOW:
# redraw NOW
self._window.update()
def _draw(self, shape):
"""Queues a shape for being drawn. Does NOT draw it just yet."""
shapeInList = False
for listShape in self.shapes:
if listShape is shape:
shapeInList = True
break
if not shapeInList:
shape.shape_id_by_canvas[self] = None
self.shapes.append(shape)
def _undraw(self, shape):
if shape in self.shapes:
for i in range(len(self.shapes)):
if self.shapes[i] is shape:
self._tkinter_canvas.delete(shape.shape_id_by_canvas[self])
del self.shapes[i]
break
def _update_shapes(self):
for shape in self.shapes:
self._renderShape(shape)
class Mouse(object):
def __init__(self):
self.position = None
def _update(self, event):
self.position = Point(event.x, event.y)
class Keyboard(object):
def __init__(self):
self.key_pressed = None
def _update(self, event):
pass
class __FreezeClass__ (type):
"""Prevents class variable assignment."""
def __setattr__(self, name, _ignored): # last parameter is the value
err = "You tried to set the instance variable '" + name + "'\n"
err += " on the CLASS '" + self.__name__ + "'"
err += ", which is not an OBJECT.\n"
err += " Did you forget the () after the word "
err += self.__name__ + ",\n"
err += " on the line where you constructed the object?"
raise SyntaxError(err)
class _Shape(object, metaclass=__FreezeClass__):
"""
A Shape is a thing that can be drawn on a RoseCanvas
(which itself draws on a tkinter Canvas).
Its constructor provides the tkinter method to be used to
draw this Shape.
This abstract type has concrete subclasses that include:
Arc, Bitmap, Circle, Ellipse, Image, Line, Path, Polygon,
Rectangle, RoundedRectangle, Square, Text and Window.
Public data attributes: None.
Public methods: attach_to.
"""
def __init__(self, method_for_drawing):
""" Arguments:
-- the tkinter method for drawing the Shape.
"""
self._method_for_drawing = method_for_drawing
self.shape_id_by_canvas = {}
def __eq__(self, other):
"""
Two Shape objects are equal (==) if all their attributes
are equal to each other.
"""
# check before we go deleting keys that may or may not exist
if(not isinstance(other, self.__class__)):
return False
self_dict = self.__dict__.copy()
other_dict = other.__dict__.copy()
del self_dict["shape_id_by_canvas"]
del other_dict["shape_id_by_canvas"]
return (self_dict == other_dict)
def __ne__(self, other):
return not self.__eq__(other)
def attach_to(self, window_or_canvas):
"""
'draws' this Shape. More precisely:
Attaches this Shape to the given
RoseWindow or RoseCanvas. When that
RoseWindow/RoseCanvas is rendered, this shape
will appear on that RoseWindow/RoseCanvas.
"""
if isinstance(window_or_canvas, RoseWindow):
window_or_canvas = window_or_canvas.initial_canvas
window_or_canvas._draw(self)
def detach_from(self, rose_canvas):
"""
'undraws' this Shape. More precisely:
Detaches this Shape from the given
RoseWindow or RoseCanvas. When that
RoseWindow/RoseCanvas is rendered,
this shape will no longer appear
on that RoseWindow/RoseCanvas.
"""
if type(rose_canvas) == RoseWindow:
rose_canvas = rose_canvas.initial_canvas
rose_canvas._undraw(self)
class _ShapeWithOutline(object):
"""
A Shape that has an interior (which can be filled with a color)
and an outline (which has a color and thickness).
This abstract type has concrete subclasses that include:
Arc, Circle, Ellipse, Image, Line, Path,
Polygon, Rectangle, Square, Text and Window.
Public data attributes: fill_color, outline_color, outline_thickness.
Public methods: _initialize_options.
"""
defaults = {'fill_color': None,
'outline_color': 'black',
'outline_thickness': 1}
def _initialize_options(self):
self.fill_color = _ShapeWithOutline.defaults['fill_color']
self.outline_color = _ShapeWithOutline.defaults['outline_color']
self.outline_thickness = _ShapeWithOutline.defaults[
'outline_thickness']
def _get_options_for_drawing(self):
options = {'fill': self.fill_color,
'outline': self.outline_color,
'width': self.outline_thickness}
# If a color is None, that means transparent here:
for option in ('fill', 'outline'):
if not options[option]:
options[option] = ''
return options
class _ShapeWithThickness(object):
"""
A Shape that can be (and almost always is) filled with a color
and has a thickness but no outline.
This abstract type has concrete subclasses that include:
Line and Path.
Public data attributes: color, thickness.
Public methods: _initialize_options.
"""
defaults = {'color': 'black',
'thickness': 1,
'arrow': None}
def _initialize_options(self):
self.color = _ShapeWithThickness.defaults['color']
self.thickness = _ShapeWithThickness.defaults['thickness']
self.arrow = _ShapeWithThickness.defaults['arrow']
def _get_options_for_drawing(self):
options = {'fill': self.color,
'width': self.thickness,
'arrow': self.arrow}
# If a color is None, that means 'black' here:
if options['fill'] is None:
options['fill'] = 'black'
return options
class _ShapeWithText(object):
"""
A Shape that has text and a font for displaying that text.
This abstract type has concrete subclasses that include:
Text.
Public data attributes: font_family, font_size,
is_bold, is_italic, is_underline, is_overstrike.
Public methods: _initialize_options.
"""
# FIXME: Add more to the above docstring.
defaults = {'font_family': 'helvetica',
'font_size': 14,
'weight': 'normal',
'slant': 'roman',
'underline': 0,
'overstrike': 0,
'justify': tkinter.CENTER,
'text_box_width': None,
'text_color': 'black',
'text': ''}
def _initialize_options(self):
self.font_family = _ShapeWithText.defaults['font_family']
self.font_size = _ShapeWithText.defaults['font_size']
self.is_bold = _ShapeWithText.defaults['weight'] == 'bold'
self.is_italic = _ShapeWithText.defaults['slant'] == 'italic'
self.is_underline = _ShapeWithText.defaults['underline'] == 1
self.is_overstrike = _ShapeWithText.defaults['overstrike'] == 1
self.justify = _ShapeWithText.defaults['justify']
self.text_box_width = _ShapeWithText.defaults['text_box_width']
self.text_color = _ShapeWithText.defaults['text_color']
self.text = _ShapeWithText.defaults['text']
def _get_options_for_drawing(self):
weight = 'bold' if self.is_bold else 'normal'
slant = 'italic' if self.is_italic else 'roman'
underline = 1 if self.is_underline else 0
overstrike = 1 if self.is_overstrike else 0
font = tkinter_font.Font(family=self.font_family,
size=self.font_size,
weight=weight,
slant=slant,
underline=underline,
overstrike=overstrike)
options = {'font': font,
'justify': self.justify,
'fill': self.text_color,
'text': self.text}
if self.text_box_width:
options['width'] = self.text_box_width
return options
class _ShapeWithCenter(_Shape):
"""
A Shape that has a center (and for which moving its center
moves the entire Shape). Its constructor provides the center
of the Shape along with its method for drawing this Shape.
This abstract type has concrete subclasses that include:
Arc, Bitmap, Circle, Ellipse, Image,
Rectangle, RoundedRectangle, Square, Text and Window.
Public data attributes: center.
Public methods: move_by, move_center_to.
"""
def __init__(self, center, method_for_drawing):
"""
Arguments:
-- the Point that is the center of the Shape
(the Shape stores a CLONE of that Point)
-- the tkinter method for drawing the Shape.
"""
# Clone the center argument, so that if the caller
# mutates the argument, it does NOT affect this Shape.
super().__init__(method_for_drawing)
self.center = center.clone()
def move_by(self, dx, dy):
"""
Moves this _Shape to the right by dx and down by dy.
Negative values move it to the left/up instead.
Does NOT return a value; instead, it mutates this shape.
:type dx: float
:type dy: float
"""
self.center.move_by(dx, dy)
def move_center_to(self, x, y):
"""
Moves this _Shape's center to (x, y),
thus translating the entire Shape
by however much its center moved.
:type x: float
:type y: float
"""
self.center.move_to(x, y)
class _RectangularShape(_Shape):
"""
A _Shape determined by its rectangular bounding box (plus possibly
other information).
Concrete sub-classes include: rg.Ellipse, rg.Rectangle.
Examples:
These all assume that the variable shape is a _RectangularShape
(e.g. an rg.Ellipse or a rg.Rectangle):
The methods in these examples all return rg.Point objects that are
copies of a corner/center of the _RectangularShape:
ul = shape.get_upper_left_corner()
ur = shape.get_upper_right_corner()
ll = shape.get_lower_left_corner()
lr = shape.get_lower_right_corner()
center = shape.get_center()
The methods in these examples return a positive number:
h = shape.get_height()
w = shape.get_width()
The method in this example returns an rg.Rectangle that encloses
this _RectangularShape:
bbox = shape.get_bounding_box()
This example moves this _RectangularShape right 100 and up 50:
shape.move_by(100, -50)
This example does the same thing another way:
shape.corner_1 = shape.corner_1 + 100
shape.corner_2 = shape.corner_2 - 50
"""
def __init__(self, corner_1, corner_2, method_for_drawing):
"""
:type corner_1: Point
:type corner_2: Point
:type method_for_drawing: callable(int, int, int, int) -> int
"""
super().__init__(method_for_drawing)
self.corner_1 = corner_1.clone()
self.corner_2 = corner_2.clone()
self._update_corners()
def __repr__(self):
""" Returns a string representation of this shape. """
f_string = ''
f_string += '{}: corner_1=({}, {}), corner_2=({}, {}),'
f_string += ' fill_color={},'
f_string += ' outline_color={}, outline_thickness={}.'
return f_string.format(self.__class__.__name__,
self.corner_1.x, self.corner_1.y,
self.corner_2.x, self.corner_2.y,
self.fill_color, self.outline_color,
self.outline_thickness)
def move_by(self, dx, dy):
"""
Moves this _Shape to the right by dx and down by dy.
Negative values move it to the left/up instead.
Does NOT return a value; instead, it mutates this shape.
:type dx: float
:type dy: float
"""
self.corner_1.x += dx
self.corner_1.y += dy
self.corner_2.x += dx
self.corner_2.y += dy
def clone(self):
"""
Returns a copy of this _RectangularShape.
"""
return self.__class__(self.corner_1.clone(),
self.corner_2.clone())
def get_upper_left_corner(self):
"""
Returns a copy of the ** upper-left **
corner of this _RectanglarShape.
The returned value is an rg.Point.
"""
self._update_corners()
return self._upper_left_corner
def get_lower_left_corner(self):
"""
Returns a copy of the ** lower-left **
corner of this _RectanglarShape.
The returned value is an rg.Point.
"""
self._update_corners()
return self._lower_left_corner
def get_upper_right_corner(self):
"""
Returns a copy of the ** upper-right **
corner of this _RectanglarShape.
The returned value is an rg.Point.
"""
self._update_corners()
return self._upper_right_corner
def get_lower_right_corner(self):
"""
Returns a copy of the ** lower-right **
corner of this _RectanglarShape.
The returned value is an rg.Point.
"""
self._update_corners()
return self._lower_right_corner
def get_center(self):
"""
Returns a copy of the ** center ** of this _RectanglarShape.
The returned value is an rg.Point.
"""
return Point((self.corner_1.x + self.corner_2.x) / 2,
(self.corner_1.y + self.corner_2.y) / 2)
def get_height(self):
"""
Returns the height (i.e., the size in
the y-direction) of this _RectangularShape.
The returned value is always positive.
"""
return abs(self.corner_1.y - self.corner_2.y)
def get_width(self):
"""
Returns the width (i.e., the size in
the x-direction) of this _RectangularShape.
The returned value is always positive.
"""
return abs(self.corner_1.x - self.corner_2.x)
def get_bounding_box(self):
"""
Returns an rg.Rectangle that encloses this _RectangularShape.
"""
return Rectangle(self.corner_1, self.corner_2)
def _update_corners(self):
min_x = min(self.corner_1.x, self.corner_2.x)
min_y = min(self.corner_1.y, self.corner_2.y)
max_x = max(self.corner_1.x, self.corner_2.x)
max_y = max(self.corner_1.y, self.corner_2.y)
self._upper_left_corner = Point(min_x, min_y)
self._upper_right_corner = Point(max_x, min_y)
self._lower_left_corner = Point(min_x, max_y)
self._lower_right_corner = Point(max_x, max_y)
def _get_coordinates_for_drawing(self):
return [self.get_upper_left_corner().x,
self.get_upper_left_corner().y,
self.get_lower_right_corner().x,
self.get_lower_right_corner().y]
class Arc(_RectangularShape, _ShapeWithOutline):
""" Not yet implemented. """
class Bitmap(_Shape):
""" Not yet implemented. """
class Circle(_ShapeWithCenter, _ShapeWithOutline):
"""
A Shape that is an circle.
To construct a Circle, use:
- rg.Circle(center, radius)
where center is an rg.Point object
and radius is a positive integer.
For example:
- rg.Circle(rg.Point(100, 75), 30)
specifies the circle whose center
is at (100, 75) and whose radius is 30.
Instance variables include:
center: An rg.Point that specifies
the center of the Circle.
radius: The radius of the Circle.
fill_color:
The Circle is filled with this color.
Example: circle.fill_color = 'green'
outline_color:
The outline of the Circle is this color.
Example: circle.outline_color = 'blue'
outline_thickness: The thickness (in pixels)
of the outline of the Circle.
Examples:
circle = rg.Circle(rg.Point(100, 75), 30)
print(circle.center, circle.radius)
circle.fill_color = 'blue'
circle.outline_color = 'black'
circle.outline_thickness = 5
window = rg.RoseWindow()
circle.attach_to(window)
circle.move_center_to(300, 200)
circle.move_by(-50, 60)
# Another way to move the Circle:
x = circle.center.x
y = circle.center.y
circle.center = rg.Point(x - 50, y + 60)
"""
def __init__(self, center, radius):
"""
:type center: rg.Point
:type radius: int
"""
# The following sets instance variable
# self.center
# to a clone (copy) of the given rg.Point.
super().__init__(center, tkinter.Canvas.create_oval)
# The following sets default values for:
# self.fill_color
# self.outline_color
# self.outline_thickness
super()._initialize_options()
# The radius is also stored in an instance variable:
self.radius = radius
def __repr__(self):
""" Returns a string representation of this Circle. """
f_string = ''
f_string += 'Circle: center=({}, {}), radius={}, fill_color={}, '
f_string += 'outline_color={}, outline_thickness={}.'
return f_string.format(self.center.x, self.center.y,
self.radius,
self.fill_color, self.outline_color,
self.outline_thickness)
def clone(self):
""" Returns a copy of this Circle. """
return Circle(self.center, self.radius)
def get_bounding_box(self):
"""
Returns an rg.Rectangle that encloses this Circle.
"""
c1 = Point(self.center.x - self.radius,
self.center.y - self.radius)
c2 = Point(self.center.x + self.radius,
self.center.y + self.radius)
return Rectangle(c1, c2)
def _get_coordinates_for_drawing(self):
return self.get_bounding_box()._get_coordinates_for_drawing()
class Ellipse(_RectangularShape, _ShapeWithOutline):
"""
A Shape that is an ellipse (aka oval).
To construct an Ellipse, use:
- rg.Ellipse(corner1, corner2)
where corner1 and corner2 are
rg.Point objects that specify opposite
corners of the imaginery rectangle that
encloses the Ellipse.
For example:
- rg.Ellipse(rg.Point(100, 50),
- rg.Point(300, 200))
specifies the ellipse whose imaginery
rectangle that encloses the ellipse:
- has upper-left corner (100, 50) and
- lower-right corner(300, 200).
Another example:
- rg.Ellipse(rg.Point(300, 50),
- rg.Point(100, 200))
specifies the same ellipse.
Any two opposite corners can be used.
Instance variables include:
corner_1: An rg.Point that specifies
one corner of the imaginery rectangle
that encloses the Ellipse.
corner_2: An rg.Point that specifies an
opposite corner of the imaginery rectangle
that encloses the Ellipse.
fill_color:
The Ellipse is filled with this color.
Example: ellipse.fill_color = 'green'
outline_color:
The outline of the Ellipse is this color.
Example: ellipse.outline_color = 'blue'
outline_thickness: The thickness (in pixels)
of the outline of the Ellipse.
Examples:
p1 = rg.Point(100, 50)
p2 = rg.Point(300, 200)
ellipse = rg.Rectangle(p1, p2)
print(ellipse.corner_1, ellipse.corner_2)
ellipse.fill_color = 'blue'
ellipse.outline_color = 'black'
ellipse.outline_thickness = 5
window = rg.RoseWindow()
ellipse.attach_to(window)
ellipse.move_to(300, 200)
ellipse.move_by(-50, 60)
# Another way to move the Ellipse:
ellipse.corner_1 = rect.corner_1 - 50
ellipse.corner_2 = rect.corner_2 + 60
# To get rg.Points for the corners/center:
ul = ellipse.get_upper_left_corner()
ur = ellipse.get_upper_right_corner()
ll = ellipse.get_lower_left_corner()
lr = ellipse.get_lower_right_corner()
center = ellipse.get_center()
# To get the width/height (always positive):
h = ellipse.get_height()
w = ellipse.get_width()
"""
def __init__(self, corner_1, corner_2):
"""
:type corner_1: rg.Point
:type corner_2: rg.Point
"""
# The following sets instance variables
# self.corner_1
# self.corner_2
# to clones (copies) of the given rg.Points.
super().__init__(corner_1, corner_2,
tkinter.Canvas.create_oval)
# The following sets default values for:
# self.fill_color
# self.outline_color
# self.outline_thickness
super()._initialize_options()
class Line(_Shape, _ShapeWithThickness):
"""
A Shape that is a line segment.
To construct a Line, use:
- rg.Line(start, end)
where start and end are rg.Point objects
that specify the endpoints of the Line.
For example:
- rg.Line(rg.Point(100, 50),
- rg.Point(200, 30)
specifies the Line that starts at (100, 50)
and ends at (200, 30).
Another example:
- rg.Line(rg.Point(200, 30),
- rg.Point(100, 50)
specifies the Line that is the same as the
previous example except that the start and
end points are reversed. This is important
if the Line's "arrow" type is not None.
Instance variables include:
start:
The rg.Point that is one end of the Line.
end:
The rg.Point that is the other end of the Line.
color: The Line is drawn with this color.
thickness: The thickness (in pixels) of the Line.
arrow: Specifies whether or not the Line
is drawn as an arrow. Possible values are:
- None draw the Line without arrow-heads
- 'first' draw an arrow-head at the start
- 'last' draw an arrow-head at the end
- 'both' draw an arrow-head at both
For example, if my_line is a Line, then
- my_line.arrow = 'last'
makes the Line be drawn as an arrow
from its start point to its end point.
Examples:
start = rg.Point(100, 50)
end = rg.Point(200, 30)
line = rg.Line(start, end)
line.color = 'blue'
line.thickness = 3
line.arrow = 'both' # A double-sided arrow
line.arrow = None # Just a line (no arrow)
line.arrow = 'first' # Arrow from end to start
line.arrow = 'last' # Arrow from start to end
window = rg.RoseWindow()
line.attach_to(window)
line.move_by(-50, 60)
"""
def __init__(self, start, end):
"""
:type start: rg.Point
:type end: rg.Point
"""
super().__init__(tkinter.Canvas.create_line)
# The following sets default values for:
# self.color
# self.thickness
# self.arrow
super()._initialize_options()
# The other instance variables are the endpoints:
self.start = start.clone()
self.end = end.clone()
def __repr__(self):
""" Returns a string representation of this Line. """
f_string = ''
f_string += 'Line: start=({}, {}), end=({}, {}), color={}, '
f_string += 'thickness={}, arrow={}.'
return f_string.format(self.start.x, self.start.y,
self.end.x, self.end.y,
self.color, self.thickness, self.arrow)
def clone(self):
""" Returns a copy of this Line. """
return Line(self.start, self.end)
def move_by(self, dx, dy):
"""
Moves both endpoints of this Line
(and hence the entire Line as well)
to the right by dx and down by dy.
Negative values move it to the left/up instead.
Does NOT return a value; instead, it mutates this Line.
:type dx: float
:type dy: float
"""
self.start.move_by(dx, dy)
self.end.move_by(dx, dy)
def get_midpoint(self):
"""
Returns an rg.Point at the midpoint (center) of this Line.
"""
return Point((self.start.x + self.end.x) / 2,
(self.start.y + self.end.y) / 2)
def _get_coordinates_for_drawing(self):
return [self.start.x,
self.start.y,
self.end.x,
self.end.y]
class Path(_Shape, _ShapeWithThickness):
""" Not yet implemented. """
class Point(_Shape, _ShapeWithOutline):
"""
A Shape that is a point in two-dimensional space.
It is drawn as a small circle (dot).
To construct a Point, use:
- rg.Point(x, y)
where x and y are the Point's coordinates.
For example:
- rg.Point(100, 50)
specifies the point whose x value is 100
and whose y value is 50.
Instance variables include the following:
x: The x-coordinate of the Point.
y: The y-coordinate of the Point.
fill_color:
The Point is filled with this color.
Note that a Point is drawn as a small, filled
circle, which is why it has a fill_color, etc.
Example: p.fill_color = 'green'
outline_color:
The outline of the Point is this color.
Example: p.outline_color = 'blue'
outline_thickness: The thickness (in pixels)
of the outline of the Point.
Examples:
p = rg.Point(100, 50)
print(p.x, p.y)
window = rg.RoseWindow()
p.attach_to(window)
p.move_to(300, 200)
p.move_by(-50, 60)
# Another way to move the Point:
p.x = p.x - 50
p.y = p.y + 60
p.fill_color = 'blue'
p.outline_color = 'black'
p.outline_thickness = 1
"""
defaults = {'width_for_drawing': 5,
'height_for_drawing': 5,
'fill_color': 'black',
'outline_color': 'black',
'outline_thickness': 1}
def __init__(self, x, y):
"""
:type x: float
:type y: float
"""
super().__init__(tkinter.Canvas.create_oval)
self.fill_color = Point.defaults['fill_color']
self.outline_color = Point.defaults['outline_color']
self.outline_thickness = Point.defaults['outline_thickness']
self.x = x
self.y = y
self.width_for_drawing = Point.defaults['width_for_drawing']
self.height_for_drawing = Point.defaults['height_for_drawing']
def __repr__(self):
""" Returns a string representation of this Point. """
return 'Point({:.1f}, {:.1f})'.format(self.x, self.y)
def clone(self):
""" Returns a copy of this Point. """
return Point(self.x, self.y)
def move_by(self, dx, dy):
"""
Moves this Point to the right by dx and down by dy.
Negative values move it to the left/up instead.
Does NOT return a value; instead, it mutates this Point.
:type dx: float
:type dy: float
"""
self.x = self.x + dx
self.y = self.y + dy
def move_to(self, x, y):
"""
Moves this Point to (x, y).
Does NOT return a value; instead, it mutates this Point.
:type x: float
:type y: float
"""
self.x = x
self.y = y
def get_bounding_box(self):
"""
Returns an rg.Rectangle that encloses
this Point (viewing it as a dot).
"""
c1 = Point(self.x - self.width_for_drawing / 2,
self.y - self.width_for_drawing / 2)
c2 = Point(self.x + self.height_for_drawing / 2,
self.y + self.height_for_drawing / 2)
return Rectangle(c1, c2)
def _get_coordinates_for_drawing(self):
return self.get_bounding_box()._get_coordinates_for_drawing()
class Polygon(_Shape, _ShapeWithOutline):
""" Not yet implemented. """
class Rectangle(_RectangularShape, _ShapeWithOutline):
"""
A Shape that is a rectangle.
To construct a Rectangle, use:
- rg.Rectangle(corner1, corner2)
where corner1 and corner2 are
rg.Point objects that specify opposite
corners of the rectangle.
For example:
- rg.Rectangle(rg.Point(100, 50),
- rg.Point(300, 200))
specifies the rectangle:
- whose upper-left corner is (100, 50) and
- whose lower-right corner is (300, 200).
Another example:
- rg.Rectangle(rg.Point(300, 50),
- rg.Point(100, 200))
specifies the same rectangle.
Any two opposite corners can be used.
Instance variables include:
corner_1: An rg.Point that specifies
one corner of the Rectangle.
corner_2: An rg.Point that specifies
an opposite corner of the Rectangle.
fill_color:
The Rectangle is filled with this color.
Example: rect.fill_color = 'green'
outline_color:
The outline of the Rectangle is this color.
Example: rect.outline_color = 'blue'
outline_thickness: The thickness (in pixels)
of the outline of the Rectangle.
Examples:
p1 = rg.Point(100, 50)
p2 = rg.Point(300, 200)
rect = rg.Rectangle(p1, p2)
print(rect.corner_1, rect.corner_2)
rect.fill_color = 'blue'
rect.outline_color = 'black'
rect.outline_thickness = 5
window = rg.RoseWindow()
rect.attach_to(window)
rect.move_to(300, 200)
rect.move_by(-50, 60)
# Another way to move the Rectangle:
rect.corner_1 = rect.corner_1 - 50
rect.corner_2 = rect.corner_2 + 60
# To get rg.Points for the corners/center:
ul = rect.get_upper_left_corner()
ur = rect.get_upper_right_corner()
ll = rect.get_lower_left_corner()
lr = rect.get_lower_right_corner()
center = rect.get_center()
# To get the width/height (always positive):
h = rect.get_height()
w = rect.get_width()
"""
def __init__(self, corner_1, corner_2):
"""
:type corner_1: rg.Point
:type corner_2: rg.Point
"""
# The following sets instance variables
# self.corner_1
# self.corner_2
# to clones (copies) of the given rg.Points.
super().__init__(corner_1, corner_2,
tkinter.Canvas.create_rectangle)
# The following sets default values for:
# self.fill_color
# self.outline_color
# self.outline_thickness
super()._initialize_options()
def get_bounding_box(self):
"""
Returns a new rg.Rectangle with the same corners as this one.
"""
return self.clone()
class RoundedRectangle(_RectangularShape, _ShapeWithOutline):
""" Not yet implemented. """
class Square(_ShapeWithCenter, _ShapeWithOutline):
"""
A Shape that is an square.
To construct a Square, use:
- rg.Square(center, length_of_each_side)
where center is an rg.Point object
and length_of_each_side is a positive integer.
For example:
- rg.Square(rg.Point(100, 75), 60)
specifies the square whose center
is at (100, 75) and whose length of
each side is 60. Its corners are at:
(70, 35), (70, 105), (130, 35), (130, 105).
Instance variables include:
center: An rg.Point that specifies
the center of the Square.
radius: The length of each side of the Square.
fill_color:
The Square is filled with this color.
Example: square.fill_color = 'green'
outline_color:
The outline of the Square is this color.
Example: square.outline_color = 'blue'
outline_thickness: The thickness (in pixels)
of the outline of the Square.
Examples:
square = rg.Square(rg.Point(100, 75), 60)
print(square.center, square.length_of_each_side)
square.fill_color = 'blue'
square.outline_color = 'black'
square.outline_thickness = 5
window = rg.RoseWindow()
square.attach_to(window)
square.move_center_to(300, 200)
square.move_by(-50, 60)
# Another way to move the Square:
x = square.center.x
y = square.center.y
square.center = rg.Point(x - 50, y + 60)
"""
def __init__(self, center, length_of_each_side):
"""
:type center: rg.Point
:type length_of_each_side: int
"""
# The following sets instance variable
# self.center
# to a clone (copy) of the given rg.Point.
super().__init__(center, tkinter.Canvas.create_rectangle)
# The following sets default values for:
# self.fill_color
# self.outline_color
# self.outline_thickness
super()._initialize_options()
# The length of each side is also stored in an instance variable
self.length_of_each_side = length_of_each_side
def __repr__(self):
""" Returns a string representation of this Square. """
f_string = ''
f_string += 'Square: center=({}, {}), side-lengths={}, '
f_string += 'fill_color={}, outline_color={}, outline_thickness={}.'
return f_string.format(self.center.x, self.center.y,
self.length_of_each_side,
self.fill_color, self.outline_color,
self.outline_thickness)
def clone(self):
""" Returns a copy of this Square. """
return Square(self.center, self.length_of_each_side)
def get_bounding_box(self):
"""
Returns a rg.Rectangle with the same corners as this Square.
"""
c1 = Point(self.center.x - self.length_of_each_side / 2,
self.center.y - self.length_of_each_side / 2)
c2 = Point(self.center.x + self.length_of_each_side / 2,
self.center.y + self.length_of_each_side / 2)
return Rectangle(c1, c2)
def _get_coordinates_for_drawing(self):
return self.get_bounding_box()._get_coordinates_for_drawing()
class Text(_ShapeWithCenter, _ShapeWithText):
"""
A Shape that has a string of text on it, displayed horizontally.
Its constructor specifies the rg.Point at which the text
is centered and the string that is to be displayed.
Public data attributes: center (an rg.Point),
font_size (an integer, 5 to 80 or so are reasonable values),
is_bold (True if the text is to be displayed in BOLD, else False),
is_italic (True or False),
is_underline (True or False),
is _overstrike (True or False),
text_color (color used to display the text, default is 'black')
text (the string to be displayed).
Public methods: attach_to, move_by, move_center_to.
"""
def __init__(self, center, text):
"""
The first argument must be a rg.Point.
The second argument must be a string.
When this Text object is rendered on a window,
the string (2nd argument) is drawn horizontally on the window,
centered at the rg.Point that is the 1st argument.
Preconditions:
:type center: rg.Point
:type text str
"""
super().__init__(center, tkinter.Canvas.create_text)
super()._initialize_options()
self.text = text
# FIXME: Allow __init__ to set the options.
def __repr__(self):
return "Text displaying '{}' at position {}".format(self.text,
self.center)
# FIXME: Have repr include characteristics??
# FIXME: Do a clone?
# def clone(self):
# return Square(self.center, self.length_of_each_side)
# def get_bounding_box(self):
# return Rectangle(self.center,
# 2 * self.length_of_each_side,
# 2 * self.length_of_each_side)
# FIXME: Implement bounding_box using the tkinter function for it.
def _get_coordinates_for_drawing(self):
return [self.center.x, self.center.y]
# Mark: Window/RoseWindow naming collision is causing mass confusion.
# class Window(_Shape):
# """ Not yet implemented. """
# default_options = {}
# CONSIDER: Are these right for here?
class Button(_Shape):
""" Not yet implemented. """
default_options = {}
class Entry(_Shape):
""" Not yet implemented. """
default_options = {}
class Color(object):
"""
A Color represents a fill or outline color created from custom
amounts of red, green, and blue light. The arguments are:
- The RED component (0-255),
- the GREEN component (0-255),
- the BLUE component (0-255).
This Color can be passed to RoseGraphics colors
such as fill_color and outline_color.
"""
def __init__(self, red, green=None, blue=None):
self.red = red
self.green = green
self.blue = blue
def __repr__(self):
return "#{:02x}{:02x}{:02x}".format(self.red, self.green, self.blue)
# begin STUB code for testing
class _RoseWindowStub(RoseWindow):
def __init__(self, width=400, height=300, title='Rose Graphics',
color='black', canvas_color=None,
make_initial_canvas=True):
canvas_color = "white" # FIXME
self._is_closed = False
self.width = width
self.height = height
self.initial_canvas = _RoseCanvasStub(
self, width, height, canvas_color)
def render(self, seconds_to_pause=None):
pass
def get_next_mouse_click(self):
return Point(0, 0)
def close_on_mouse_click(self):
return None
def continue_on_mouse_click(self,
message='To continue, click anywhere in this window',
x_position=None,
y_position=None,
close_it=False,
erase_it=True):
return None
def _serialize_shapes(self):
"""Returns a list of strings representing the shapes in sorted order."""
return _serialize_shapes(self)
class _RoseCanvasStub(RoseCanvas):
def __init__(self, window, width, height, canvas_color):
# super().__init__(window, width, height, canvas_color)
# canvases.append(self)
self.shapes = []
def _draw(self, shape):
# super()._draw(shape)
self.shapes.append(shape)
def render(self, seconds_to_pause=None):
# super().render() # don't pause
pass
class TurtleWindow(object):
def __init__(self):
self._screen = turtle.Screen()
turtle.Turtle._screen = self._screen
def close_on_mouse_click(self):
self._screen.exitonclick()
# We may need the statement:
# turtle.TurtleScreen._RUNNING = True
# in case we open a subsequent TurtleWindow during this run.
# The turtle library seems not to allow for that possibility
# (it uses a CLASS variable _RUNNING where I would have expected
# an INSTANCE variable).
# The next statement appeared to have a visible effect
# (something flashed) but nothing worse. At time time
# it is commented-out, since we need only a single TurtleWindow.
turtle.TurtleScreen._RUNNING = True
def delay(self, milliseconds=None):
self._screen.delay(milliseconds)
def tracer(self, n=None, delay=None):
self._screen.tracer(n, delay)
class ShapesWindow(RoseWindow):
pass
class SimpleTurtle(object):
"""
A SimpleTurtle is a Turtle with restricted (simpler) functionality.
It can move forward/backward (units are pixels), turn (spin)
left/right (units are degrees), and more.
To construct a SimpleTurtle, use:
rg.SimpleTurtle(shape)
where shape is OPTIONAL and can be any of: 'turtle'
'arrow' 'classic' 'square' 'circle' 'triangle' 'blank'
Instance variables include:
speed: An integer from 1 (slowest) to 10 (fastest) that
determines how fast the SimpleTurtle moves.
pen: an rg.Pen object (see example below) that determines
the color and thickness of the line
that the SimpleTurtle draws when moving
paint_bucket: an rg.PaintBucket object (see example below)
that determines the color with which the SimpleTurtle
"fills" shapes indicated by using the begin_fill and
end_fill methods.
Examples:
natacha = rg.SimpleTurtle()
natacha.forward(100)
boris = rg.SimpleTurtle('turtle')
boris.speed = 8
boris.pen = rg.Pen('blue', 5) # blue line 5 pixels thick
boris.paint_bucket = rg.PaintBucket('red')
# Moves with pen down, then with pen up, then with pen down again:
boris.left(90)
boris.forward(-300)
boris.pen_up()
boris.go_to(rg.Point(100, -50)
boris.pen_down()
boris.backward(75)
# Moves with the enclosed space "filled" with the paint_bucket
boris.begin_fill()
... movements ...
boris.end_fill()
"""
def __init__(self, shape='classic'):
"""
What comes in:
A turtle.Shape that determines how the Turtle looks. Defaults to
a Bitmap of the "classic" Turtle (an arrowhead) from early Turtle Graphics.
Side effects: Constructs and stores in self._turtle the "real" Turtle
to do all the work on behalf of this SimpleTurtle. This (purposely)
restricts what this SimpleTurtle knows and can do.
:type shape: str
"""
self.speed = 1
self.pen = Pen('black', 1)
self.paint_bucket = PaintBucket('black')
self._turtle = turtle.Turtle(shape)
self._update_real_turtle()
def forward(self, distance):
"""
Makes this SimpleTurtle go forward the given distance
(in pixels). Example (assuming sally is an rg.SimpleTurtle):
sally.forward(200)
"""
self._update_real_turtle()
self._turtle.forward(distance)
def backward(self, distance):
"""
Makes this SimpleTurtle go backward the given distance
(in pixels). Example (assuming sally is an rg.SimpleTurtle):
sally.backward(200)
"""
self._update_real_turtle()
self._turtle.backward(distance)
def left(self, angle):
"""
Makes this SimpleTurtle turn (i.e. spin) left the given distance
(in degrees). Example (assuming sally is an rg.SimpleTurtle):
sally.left(45)
"""
self._update_real_turtle()
self._turtle.left(angle)
def right(self, angle):
"""
Makes this SimpleTurtle turn (i.e. spin) right the given distance
(in degrees). Example (assuming sally is an rg.SimpleTurtle):
sally.right(45)
"""
self._update_real_turtle()
self._turtle.right(angle)
def go_to(self, point):
"""
Makes this SimpleTurtle go to the given rg.Point.
(0, 0) is at the center of the window.
Example (assuming sally is an rg.SimpleTurtle):
sally.go_to(rg.Point(100, -50))
"""
self._update_real_turtle()
self._turtle.goto(point.x, point.y)
def draw_circle(self, radius):
"""
Makes this SimpleTurtle draw a circle with the given radius.
Example (assuming sally is an rg.SimpleTurtle):
sally.draw_circle(40)
"""
self._update_real_turtle()
self._turtle.circle(radius)
def draw_square(self, length_of_sides):
"""
Makes this SimpleTurtle draw a square with the given value
for the length of each of its sides.
Example (assuming sally is an rg.SimpleTurtle):
sally.draw_square(100)
"""
for _ in range(4):
self.forward(length_of_sides)
self.left(90)
def draw_regular_polygon(self, number_of_sides, length_of_sides):
"""
Makes this SimpleTurtle draw a regular polygon with the given
number of sides and the given length for each of its sides.
Example (assuming sally is an rg.SimpleTurtle):
sally.draw_polygon(8, 75) # octogon
sally.draw_polygon(3, 75) # triangle
"""
for _ in range(number_of_sides):
self.forward(length_of_sides)
self.left(360 / number_of_sides)
def pen_up(self):
"""
Lifts up this SimpleTurtle's pen. Subsequent movements
will NOT draw a line (until pen_down is called).
Example (assuming sally is an rg.SimpleTurtle):
sally.pen_up()
"""
self._update_real_turtle()
self._turtle.penup()
def pen_down(self):
"""
Puts down this SimpleTurtle's pen. Subsequent movements
WILL draw a line using this SimpleTurtle's pen (until pen_up
is called). Example (assuming sally is an rg.SimpleTurtle):
sally.pen_down()
"""
self._update_real_turtle()
self._turtle.pendown()
def x_cor(self):
"""
Returns the x-coordinate of this SimpleTurtle's current position.
Example (assuming sally is an rg.SimpleTurtle):
x = sally.x_cor()
"""
return self._turtle.xcor()
def y_cor(self):
"""
Returns the y-coordinate of this SimpleTurtle's current position.
Example (assuming sally is an rg.SimpleTurtle):
y = sally.y_cor()
"""
return self._turtle.ycor()
def begin_fill(self):
"""
Begins "filling" the shape that this SimpleTurtle draws,
using this SimpleTurtle's paint_bucket as the fill.
Example (assuming sally is an rg.SimpleTurtle) that fills
a triangle with green:
sally.paint_bucket = rg.PaintBucket('green')
sally.begin_fill()
sally.forward(100)
sally.left(120)
sally.forward(100)
sally.left(120)
sally.forward(100)
sally.end_fill()
"""
self._update_real_turtle()
self._turtle.begin_fill()
def end_fill(self):
"""
Completes "filling" the shape that this SimpleTurtle draws,
using this SimpleTurtle's paint_bucket as the fill.
Example (assuming sally is an rg.SimpleTurtle) that fills
a triangle with green:
sally.paint_bucket = rg.PaintBucket('green')
sally.begin_fill()
sally.forward(100)
sally.left(120)
sally.forward(100)
sally.left(120)
sally.forward(100)
sally.end_fill()
"""
self._update_real_turtle()
self._turtle.end_fill()
def clear(self):
""" Not yet implemented. """
def clone(self):
""" Not yet implemented. """
pass
def write_text(self):
""" Not yet implemented. """
pass
def _update_real_turtle(self):
self._turtle.pencolor(self.pen.color)
self._turtle.pensize(self.pen.thickness)
self._turtle.fillcolor(self.paint_bucket.color)
self._turtle.speed(self.speed)
class Pen(object):
"""
A Pen has a color and thickness.
SimpleTurtles use a Pen for drawing lines.
To construct a Pen, use:
rg.Pen(color, thickness)
where color is a color (e.g. 'red')
and thickness is a small positive integer.
Instance variables are:
color: The color of the Pen
thickness: The thickness of the Pen
Examples:
thick_blue = rg.Pen('blue', 14)
thin_red = rg.Pen('red', 1)
"""
def __init__(self, color, thickness):
self.thickness = thickness
self.color = color
class PaintBucket(object):
"""
A PaintBucket has a color.
SimpleTurtles use a PaintBucket for filling shapes with color.
To construct a PaintBucket, use:
rg.PaintBucket(color)
where color is a color (e.g. 'red').
Instance variables are:
color: The color of the PaintBucket
Example:
paint = rg.PaintBucket('green')
"""
def __init__(self, color):
self.color = color
|
py | b40cb1596740d7e5763f08f94d57c66b7e748762 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2019 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import io
import os
import sys
import unittest
from trac.config import ConfigurationError
from trac.db.api import DatabaseManager
from trac.db.schema import Column, Index, Table
from trac.env import Environment
from trac.test import EnvironmentStub, MockRequest, get_dburi, mkdtemp, rmtree
from trac.util import translation
class DatabaseFileTestCase(unittest.TestCase):
stdout = None
stderr = None
devnull = None
@classmethod
def setUpClass(cls):
cls.stdout = sys.stdout
cls.stderr = sys.stderr
cls.devnull = io.open(os.devnull, 'wb')
sys.stdout = sys.stderr = cls.devnull
@classmethod
def tearDownClass(cls):
cls.devnull.close()
sys.stdout = cls.stdout
sys.stderr = cls.stderr
def setUp(self):
self.env_path = mkdtemp()
self.db_path = os.path.join(self.env_path, 'db', 'trac.db')
self.env = None
def tearDown(self):
if self.env:
self.env.shutdown()
rmtree(self.env_path)
def _create_env(self):
env = Environment(self.env_path, create=True)
env.shutdown()
def _db_query(self, env):
with env.db_query as db:
db("SELECT name FROM " + db.quote('system'))
def test_missing_tracdb(self):
self._create_env()
os.remove(self.db_path)
self.env = Environment(self.env_path)
try:
self._db_query(self.env)
self.fail('ConfigurationError not raised')
except ConfigurationError as e:
self.assertIn('Database "', unicode(e))
self.assertIn('" not found.', unicode(e))
def test_no_permissions(self):
self._create_env()
os.chmod(self.db_path, 0o444)
self.env = Environment(self.env_path)
try:
self._db_query(self.env)
self.fail('ConfigurationError not raised')
except ConfigurationError as e:
self.assertIn('requires read _and_ write permissions', unicode(e))
if os.name == 'posix' and os.getuid() == 0:
del test_no_permissions # For root, os.access() always returns True
def test_error_with_lazy_translation(self):
self._create_env()
os.remove(self.db_path)
self.env = Environment(self.env_path)
req = MockRequest(self.env, authname='trac_auth=1234567890')
translation.make_activable(lambda: req.locale, self.env.path)
try:
self._db_query(self.env)
self.fail('ConfigurationError not raised')
except ConfigurationError as e:
message = unicode(e)
self.assertIn('Database "', message)
self.assertIn('" not found.', message)
finally:
translation.deactivate()
class SQLiteConnectionTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub()
self.schema = [
Table('test_simple', key='id')[
Column('id', auto_increment=True),
Column('username'),
Column('email'),
Column('enabled', type='int'),
Column('extra'),
Index(['username'], unique=True),
Index(['email'], unique=False),
],
Table('test_composite', key=['id', 'name'])[
Column('id', type='int'),
Column('name'),
Column('value'),
Column('enabled', type='int'),
Index(['name', 'value'], unique=False),
Index(['name', 'enabled'], unique=True),
],
]
self.dbm = DatabaseManager(self.env)
self.dbm.drop_tables(self.schema)
self.dbm.create_tables(self.schema)
self.dbm.insert_into_tables([
('test_simple',
('username', 'email', 'enabled'),
[('joe', '[email protected]', 1), (u'joé', '[email protected]', 0)]),
('test_composite',
('id', 'name', 'value', 'enabled'),
[(1, 'foo', '42', 1),
(1, 'bar', '42', 1),
(2, 'foo', '43', 0),
(2, 'bar', '43', 0)]),
])
def tearDown(self):
DatabaseManager(self.env).drop_tables(self.schema)
self.env.reset_db()
def _table_info(self, table):
names = ('column', 'type', 'notnull', 'default', 'pk')
with self.env.db_query as db:
cursor = db.cursor()
cursor.execute("PRAGMA table_info(%s)" % db.quote(table))
return [dict(zip(names, row[1:6])) for row in cursor]
def _index_info(self, table):
with self.env.db_query as db:
cursor = db.cursor()
cursor.execute("PRAGMA index_list(%s)" % db.quote(table))
results = {row[1]: {'unique': row[2]} for row in cursor}
for index, info in results.iteritems():
cursor.execute("PRAGMA index_info(%s)" % db.quote(index))
info['columns'] = [row[2] for row in cursor]
return results
def _drop_column(self, table, column):
with self.env.db_transaction as db:
db.drop_column(table, column)
def _query(self, stmt, *args):
return self.env.db_query(stmt, args)
def test_remove_simple_keys(self):
coldef = {
'id': {'column': 'id', 'type': 'integer', 'notnull': 0,
'default': None, 'pk': 1},
'username': {'column': 'username', 'type': 'text',
'notnull': 0, 'default': None, 'pk': 0},
'email': {'column': 'email', 'type': 'text', 'notnull': 0,
'default': None, 'pk': 0},
'enabled': {'column': 'enabled', 'type': 'integer',
'notnull': 0, 'default': None, 'pk': 0},
'extra': {'column': 'extra', 'type': 'text',
'notnull': 0, 'default': None, 'pk': 0},
}
columns_0 = self._table_info('test_simple')
self.assertEqual([coldef['id'], coldef['username'], coldef['email'],
coldef['enabled'], coldef['extra']], columns_0)
indices_0 = self._index_info('test_simple')
self.assertEqual(['test_simple_email_idx', 'test_simple_username_idx'],
sorted(indices_0))
self._drop_column('test_simple', 'extra')
columns_1 = self._table_info('test_simple')
indices_1 = self._index_info('test_simple')
self.assertEqual([coldef['id'], coldef['username'], coldef['email'],
coldef['enabled']], columns_1)
self.assertEqual(indices_1, indices_0)
self._drop_column('test_simple', 'id')
columns_2 = self._table_info('test_simple')
indices_2 = self._index_info('test_simple')
self.assertEqual([coldef['username'], coldef['email'],
coldef['enabled']], columns_2)
self.assertEqual(indices_2, indices_0)
self._drop_column('test_simple', 'username')
columns_3 = self._table_info('test_simple')
indices_3 = self._index_info('test_simple')
self.assertEqual([coldef['email'], coldef['enabled']], columns_3)
self.assertEqual(['test_simple_email_idx'], sorted(indices_3))
self._drop_column('test_simple', 'email')
columns_4 = self._table_info('test_simple')
indices_4 = self._index_info('test_simple')
self.assertEqual([coldef['enabled']], columns_4)
self.assertEqual({}, indices_4)
def test_remove_composite_keys(self):
indices_0 = self._index_info('test_composite')
self.assertEqual(['sqlite_autoindex_test_composite_1',
'test_composite_name_enabled_idx',
'test_composite_name_value_idx'],
sorted(indices_0))
self.assertEqual({'unique': 1, 'columns': ['id', 'name']},
indices_0['sqlite_autoindex_test_composite_1'])
self.assertEqual({'unique': 0, 'columns': ['name', 'value']},
indices_0['test_composite_name_value_idx'])
self.assertEqual({'unique': 1, 'columns': ['name', 'enabled']},
indices_0['test_composite_name_enabled_idx'])
self._drop_column('test_composite', 'id')
indices_1 = self._index_info('test_composite')
self.assertEqual(['test_composite_name_enabled_idx',
'test_composite_name_value_idx'],
sorted(indices_1))
self.assertEqual(indices_0['test_composite_name_value_idx'],
indices_1['test_composite_name_value_idx'])
self.assertEqual(indices_0['test_composite_name_enabled_idx'],
indices_1['test_composite_name_enabled_idx'])
rows = self._query("""SELECT * FROM test_composite
ORDER BY name, value, enabled""")
self.assertEqual([('bar', '42', 1), ('bar', '43', 0),
('foo', '42', 1), ('foo', '43', 0)], rows)
self._drop_column('test_composite', 'name')
self.assertEqual({}, self._index_info('test_composite'))
rows = self._query("""SELECT * FROM test_composite
ORDER BY value, enabled""")
self.assertEqual([('42', 1), ('42', 1), ('43', 0), ('43', 0)], rows)
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DatabaseFileTestCase))
if get_dburi().startswith('sqlite:'):
suite.addTest(unittest.makeSuite(SQLiteConnectionTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
|
py | b40cb19e29483b0e02f20916545241687a773889 | """BugReport CLI Commands."""
|
py | b40cb1aa3d4dc66d9e8a231a37ed0707224a4884 | # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for ``flocker.node._loop``.
"""
from uuid import uuid4
from eliot.testing import validate_logging, assertHasAction, assertHasMessage
from machinist import LOG_FSM_TRANSITION
from twisted.trial.unittest import SynchronousTestCase
from twisted.test.proto_helpers import StringTransport, MemoryReactorClock
from twisted.internet.protocol import Protocol, ReconnectingClientFactory
from twisted.internet.defer import succeed, Deferred, fail
from twisted.internet.task import Clock
from twisted.internet.ssl import ClientContextFactory
from twisted.protocols.tls import TLSMemoryBIOFactory, TLSMemoryBIOProtocol
from ...testtools import FakeAMPClient
from .._loop import (
build_cluster_status_fsm, ClusterStatusInputs, _ClientStatusUpdate,
_StatusUpdate, _ConnectedToControlService, ConvergenceLoopInputs,
ConvergenceLoopStates, build_convergence_loop_fsm, AgentLoopService,
LOG_SEND_TO_CONTROL_SERVICE,
LOG_CONVERGE, LOG_CALCULATED_ACTIONS,
)
from ..testtools import ControllableDeployer, ControllableAction, to_node
from ...control import (
NodeState, Deployment, Manifestation, Dataset, DeploymentState,
)
from ...control._protocol import NodeStateCommand, AgentAMP
from ...control.test.test_protocol import iconvergence_agent_tests_factory
def build_protocol():
"""
:return: ``Protocol`` hooked up to transport.
"""
p = Protocol()
p.makeConnection(StringTransport())
return p
class StubFSM(object):
"""
A finite state machine look-alike that just records inputs.
"""
def __init__(self):
self.inputted = []
def receive(self, symbol):
self.inputted.append(symbol)
class ClusterStatusFSMTests(SynchronousTestCase):
"""
Tests for the cluster status FSM.
"""
def setUp(self):
self.convergence_loop = StubFSM()
self.fsm = build_cluster_status_fsm(self.convergence_loop)
def assertConvergenceLoopInputted(self, expected):
"""
Assert that that given set of symbols were input to the agent
operation FSM.
"""
self.assertEqual(self.convergence_loop.inputted, expected)
def test_creation_no_side_effects(self):
"""
Creating the FSM has no side effects.
"""
self.assertConvergenceLoopInputted([])
def test_first_status_update(self):
"""
Once the client has been connected and a status update received it
notifies the convergence loop FSM of this.
"""
client = object()
desired = object()
state = object()
self.fsm.receive(_ConnectedToControlService(client=client))
self.fsm.receive(_StatusUpdate(configuration=desired, state=state))
self.assertConvergenceLoopInputted(
[_ClientStatusUpdate(client=client, configuration=desired,
state=state)])
def test_second_status_update(self):
"""
Further status updates are also passed to the convergence loop FSM.
"""
client = object()
desired1 = object()
state1 = object()
desired2 = object()
state2 = object()
self.fsm.receive(_ConnectedToControlService(client=client))
# Initially some other status:
self.fsm.receive(_StatusUpdate(configuration=desired1, state=state1))
self.fsm.receive(_StatusUpdate(configuration=desired2, state=state2))
self.assertConvergenceLoopInputted(
[_ClientStatusUpdate(client=client, configuration=desired1,
state=state1),
_ClientStatusUpdate(client=client, configuration=desired2,
state=state2)])
def test_status_update_no_disconnect(self):
"""
Neither new connections nor status updates cause the client to be
disconnected.
"""
client = build_protocol()
self.fsm.receive(_ConnectedToControlService(client=client))
self.fsm.receive(_StatusUpdate(configuration=object(),
state=object()))
self.assertFalse(client.transport.disconnecting)
def test_disconnect_before_status_update(self):
"""
If the client disconnects before a status update is received then no
notification is needed for convergence loop FSM.
"""
self.fsm.receive(_ConnectedToControlService(client=build_protocol()))
self.fsm.receive(ClusterStatusInputs.DISCONNECTED_FROM_CONTROL_SERVICE)
self.assertConvergenceLoopInputted([])
def test_disconnect_after_status_update(self):
"""
If the client disconnects after a status update is received then the
convergence loop FSM is notified.
"""
client = object()
desired = object()
state = object()
self.fsm.receive(_ConnectedToControlService(client=client))
self.fsm.receive(_StatusUpdate(configuration=desired, state=state))
self.fsm.receive(ClusterStatusInputs.DISCONNECTED_FROM_CONTROL_SERVICE)
self.assertConvergenceLoopInputted(
[_ClientStatusUpdate(client=client, configuration=desired,
state=state),
ConvergenceLoopInputs.STOP])
def test_status_update_after_reconnect(self):
"""
If the client disconnects, reconnects, and a new status update is
received then the convergence loop FSM is notified.
"""
client = object()
desired = object()
state = object()
self.fsm.receive(_ConnectedToControlService(client=client))
self.fsm.receive(_StatusUpdate(configuration=desired, state=state))
self.fsm.receive(ClusterStatusInputs.DISCONNECTED_FROM_CONTROL_SERVICE)
client2 = object()
desired2 = object()
state2 = object()
self.fsm.receive(_ConnectedToControlService(client=client2))
self.fsm.receive(_StatusUpdate(configuration=desired2, state=state2))
self.assertConvergenceLoopInputted(
[_ClientStatusUpdate(client=client, configuration=desired,
state=state),
ConvergenceLoopInputs.STOP,
_ClientStatusUpdate(client=client2, configuration=desired2,
state=state2)])
def test_shutdown_before_connect(self):
"""
If the FSM is shutdown before a connection is made nothing happens.
"""
self.fsm.receive(ClusterStatusInputs.SHUTDOWN)
self.assertConvergenceLoopInputted([])
def test_shutdown_after_connect(self):
"""
If the FSM is shutdown after connection but before status update is
received then it disconnects but does not notify the agent
operation FSM.
"""
client = build_protocol()
self.fsm.receive(_ConnectedToControlService(client=client))
self.fsm.receive(ClusterStatusInputs.SHUTDOWN)
self.assertEqual((client.transport.disconnecting,
self.convergence_loop.inputted),
(True, []))
def test_shutdown_after_status_update(self):
"""
If the FSM is shutdown after connection and status update is received
then it disconnects and also notifys the convergence loop FSM that
is should stop.
"""
client = build_protocol()
desired = object()
state = object()
self.fsm.receive(_ConnectedToControlService(client=client))
self.fsm.receive(_StatusUpdate(configuration=desired, state=state))
self.fsm.receive(ClusterStatusInputs.SHUTDOWN)
self.assertEqual((client.transport.disconnecting,
self.convergence_loop.inputted[-1]),
(True, ConvergenceLoopInputs.STOP))
def test_shutdown_fsm_ignores_disconnection(self):
"""
If the FSM has been shutdown it ignores disconnection event.
"""
client = build_protocol()
desired = object()
state = object()
self.fsm.receive(_ConnectedToControlService(client=client))
self.fsm.receive(_StatusUpdate(configuration=desired, state=state))
self.fsm.receive(ClusterStatusInputs.SHUTDOWN)
self.fsm.receive(ClusterStatusInputs.DISCONNECTED_FROM_CONTROL_SERVICE)
self.assertConvergenceLoopInputted([
_ClientStatusUpdate(client=client, configuration=desired,
state=state),
# This is caused by the shutdown... and the disconnect results
# in no further messages:
ConvergenceLoopInputs.STOP])
def test_shutdown_fsm_ignores_cluster_status(self):
"""
If the FSM has been shutdown it ignores cluster status update.
"""
client = build_protocol()
desired = object()
state = object()
self.fsm.receive(_ConnectedToControlService(client=client))
self.fsm.receive(ClusterStatusInputs.SHUTDOWN)
self.fsm.receive(_StatusUpdate(configuration=desired, state=state))
# We never send anything to convergence loop FSM:
self.assertConvergenceLoopInputted([])
class ConvergenceLoopFSMTests(SynchronousTestCase):
"""
Tests for FSM created by ``build_convergence_loop_fsm``.
"""
def test_new_stopped(self):
"""
A newly created FSM is stopped.
"""
loop = build_convergence_loop_fsm(
Clock(), ControllableDeployer(u"192.168.1.1", [], [])
)
self.assertEqual(loop.state, ConvergenceLoopStates.STOPPED)
def test_new_status_update_starts_discovery(self):
"""
A stopped FSM that receives a status update starts discovery.
"""
deployer = ControllableDeployer(u"192.168.1.1", [Deferred()], [])
loop = build_convergence_loop_fsm(Clock(), deployer)
loop.receive(_ClientStatusUpdate(client=FakeAMPClient(),
configuration=Deployment(),
state=DeploymentState()))
self.assertEqual(len(deployer.local_states), 0) # Discovery started
def successful_amp_client(self, local_states):
"""
Create AMP client that can respond successfully to a
``NodeStateCommand``.
:param local_states: The node states we expect to be able to send.
:return FakeAMPClient: Fake AMP client appropriately setup.
"""
client = FakeAMPClient()
for local_state in local_states:
client.register_response(
NodeStateCommand, dict(state_changes=(local_state,)),
{"result": None})
return client
@validate_logging(assertHasAction, LOG_SEND_TO_CONTROL_SERVICE, True)
def test_convergence_done_notify(self, logger):
"""
A FSM doing convergence that gets a discovery result, sends the
discovered state to the control service using the last received
client.
"""
local_state = NodeState(hostname=u"192.0.2.123")
client = self.successful_amp_client([local_state])
action = ControllableAction(result=Deferred())
deployer = ControllableDeployer(
local_state.hostname, [succeed(local_state)], [action]
)
loop = build_convergence_loop_fsm(Clock(), deployer)
self.patch(loop, "logger", logger)
loop.receive(
_ClientStatusUpdate(
client=client,
configuration=Deployment(
nodes=frozenset([to_node(local_state)])
),
state=DeploymentState(
nodes=frozenset([local_state])
)
)
)
self.assertEqual(client.calls, [(NodeStateCommand,
dict(state_changes=(local_state,)))])
@validate_logging(assertHasMessage, LOG_CALCULATED_ACTIONS)
def test_convergence_done_update_local_state(self, logger):
"""
An FSM doing convergence that gets a discovery result supplies an
updated ``cluster_state`` to ``calculate_necessary_state_changes``.
"""
local_node_hostname = u'192.0.2.123'
# Control service reports that this node has no manifestations.
received_node = NodeState(hostname=local_node_hostname)
received_cluster_state = DeploymentState(nodes=[received_node])
discovered_manifestation = Manifestation(
dataset=Dataset(dataset_id=uuid4()),
primary=True
)
local_node_state = NodeState(
hostname=local_node_hostname,
manifestations={discovered_manifestation.dataset_id:
discovered_manifestation},
devices={}, paths={},
)
client = self.successful_amp_client([local_node_state])
action = ControllableAction(result=Deferred())
deployer = ControllableDeployer(
local_node_hostname, [succeed(local_node_state)], [action]
)
fsm = build_convergence_loop_fsm(Clock(), deployer)
self.patch(fsm, "logger", logger)
fsm.receive(
_ClientStatusUpdate(
client=client,
# Configuration is unimportant here, but we are recreating a
# situation where the local state now matches the desired
# configuration but the control service is not yet aware that
# convergence has been reached.
configuration=Deployment(nodes=[to_node(local_node_state)]),
state=received_cluster_state
)
)
expected_local_cluster_state = DeploymentState(
nodes=[local_node_state])
[calculate_necessary_state_changes_inputs] = deployer.calculate_inputs
(actual_local_state,
actual_desired_configuration,
actual_cluster_state) = calculate_necessary_state_changes_inputs
self.assertEqual(expected_local_cluster_state, actual_cluster_state)
def test_convergence_done_changes(self):
"""
A FSM doing convergence that gets a discovery result starts applying
calculated changes using last received desired configuration and
cluster state.
"""
local_state = NodeState(hostname=u'192.0.2.123')
configuration = object()
received_state = DeploymentState(nodes=[])
# Since this Deferred is unfired we never proceed to next
# iteration; if we did we'd get exception from discovery since we
# only configured one discovery result.
action = ControllableAction(result=Deferred())
deployer = ControllableDeployer(
local_state.hostname, [succeed(local_state)], [action]
)
loop = build_convergence_loop_fsm(Clock(), deployer)
loop.receive(_ClientStatusUpdate(
client=self.successful_amp_client([local_state]),
configuration=configuration, state=received_state))
expected_local_state = DeploymentState(nodes=[local_state])
# Calculating actions happened, and result was run:
self.assertEqual(
(deployer.calculate_inputs, action.called),
([(local_state, configuration, expected_local_state)], True))
def assert_full_logging(self, logger):
"""
A convergence action is logged inside the finite state maching
logging.
"""
transition = assertHasAction(self, logger, LOG_FSM_TRANSITION, True)
converge = assertHasAction(
self, logger, LOG_CONVERGE, True,
{u"cluster_state": self.cluster_state,
u"desired_configuration": self.configuration})
self.assertIn(converge, transition.children)
send = assertHasAction(self, logger, LOG_SEND_TO_CONTROL_SERVICE, True,
{u"local_changes": [self.local_state]})
self.assertIn(send, converge.children)
calculate = assertHasMessage(
self, logger, LOG_CALCULATED_ACTIONS,
{u"calculated_actions": self.action})
self.assertIn(calculate, converge.children)
@validate_logging(assert_full_logging)
def test_convergence_done_delays_new_iteration(self, logger):
"""
An FSM completing the changes from one convergence iteration doesn't
instantly start another iteration.
"""
self.local_state = local_state = NodeState(hostname=u'192.0.2.123')
self.configuration = configuration = Deployment()
self.cluster_state = received_state = DeploymentState(nodes=[])
self.action = action = ControllableAction(result=succeed(None))
deployer = ControllableDeployer(
local_state.hostname, [succeed(local_state)], [action]
)
client = self.successful_amp_client([local_state])
reactor = Clock()
loop = build_convergence_loop_fsm(reactor, deployer)
self.patch(loop, "logger", logger)
loop.receive(_ClientStatusUpdate(
client=client, configuration=configuration, state=received_state))
expected_cluster_state = DeploymentState(
nodes=[local_state])
# Calculating actions happened and the result was run.
self.assertTupleEqual(
(deployer.calculate_inputs, client.calls),
([(local_state, configuration, expected_cluster_state)],
[(NodeStateCommand, dict(state_changes=(local_state,)))])
)
def test_convergence_done_start_new_iteration(self):
"""
After a short delay, an FSM completing the changes from one convergence
iteration starts another iteration.
"""
local_state = NodeState(hostname=u'192.0.2.123')
local_state2 = NodeState(hostname=u'192.0.2.123')
configuration = Deployment(nodes=frozenset([to_node(local_state)]))
state = DeploymentState(nodes=[local_state])
action = ControllableAction(result=succeed(None))
# Because the second action result is unfired Deferred, the second
# iteration will never finish; applying its changes waits for this
# Deferred to fire.
action2 = ControllableAction(result=Deferred())
deployer = ControllableDeployer(
local_state.hostname,
[succeed(local_state), succeed(local_state2)],
[action, action2])
client = self.successful_amp_client([local_state, local_state2])
reactor = Clock()
loop = build_convergence_loop_fsm(reactor, deployer)
loop.receive(_ClientStatusUpdate(
client=client, configuration=configuration, state=state))
reactor.advance(1.0)
# Calculating actions happened, result was run... and then we did
# whole thing again:
self.assertTupleEqual(
(deployer.calculate_inputs, client.calls),
([(local_state, configuration, state),
(local_state2, configuration, state)],
[(NodeStateCommand, dict(state_changes=(local_state,))),
(NodeStateCommand, dict(state_changes=(local_state2,)))])
)
@validate_logging(lambda test_case, logger: test_case.assertEqual(
len(logger.flush_tracebacks(RuntimeError)), 1))
def test_convergence_error_start_new_iteration(self, logger):
"""
Even if the convergence fails, a new iteration is started anyway.
"""
local_state = NodeState(hostname=u'192.0.2.123')
configuration = Deployment(nodes=frozenset([to_node(local_state)]))
state = DeploymentState(nodes=[local_state])
action = ControllableAction(result=fail(RuntimeError()))
# First discovery succeeds, leading to failing action; second
# discovery will just wait for Deferred to fire. Thus we expect to
# finish test in discovery state.
deployer = ControllableDeployer(
local_state.hostname,
[succeed(local_state), Deferred()],
[action])
client = self.successful_amp_client([local_state])
reactor = Clock()
loop = build_convergence_loop_fsm(reactor, deployer)
self.patch(loop, "logger", logger)
loop.receive(_ClientStatusUpdate(
client=client, configuration=configuration, state=state))
reactor.advance(1.0)
# Calculating actions happened, result was run and caused error...
# but we started on loop again and are thus in discovery state,
# which we can tell because all faked local states have been
# consumed:
self.assertEqual(len(deployer.local_states), 0)
def test_convergence_status_update(self):
"""
A FSM doing convergence that receives a status update stores the
client, desired configuration and cluster state, which are then
used in next convergence iteration.
"""
local_state = NodeState(hostname=u'192.0.2.123')
local_state2 = NodeState(hostname=u'192.0.2.123')
configuration = Deployment(nodes=frozenset([to_node(local_state)]))
state = DeploymentState(nodes=[local_state])
# Until this Deferred fires the first iteration won't finish:
action = ControllableAction(result=Deferred())
# Until this Deferred fires the second iteration won't finish:
action2 = ControllableAction(result=Deferred())
deployer = ControllableDeployer(
local_state.hostname,
[succeed(local_state), succeed(local_state2)],
[action, action2])
client = self.successful_amp_client([local_state])
reactor = Clock()
loop = build_convergence_loop_fsm(reactor, deployer)
loop.receive(_ClientStatusUpdate(
client=client, configuration=configuration, state=state))
# Calculating actions happened, action is run, but waits for
# Deferred to be fired... Meanwhile a new status update appears!
client2 = self.successful_amp_client([local_state2])
configuration2 = Deployment(nodes=frozenset([to_node(local_state)]))
state2 = DeploymentState(nodes=[local_state])
loop.receive(_ClientStatusUpdate(
client=client2, configuration=configuration2, state=state2))
# Action finally finishes, and we can move on to next iteration,
# which happens with second set of client, desired configuration
# and cluster state:
action.result.callback(None)
reactor.advance(1.0)
self.assertTupleEqual(
(deployer.calculate_inputs, client.calls, client2.calls),
([(local_state, configuration, state),
(local_state2, configuration2, state2)],
[(NodeStateCommand, dict(state_changes=(local_state,)))],
[(NodeStateCommand, dict(state_changes=(local_state2,)))]))
def test_convergence_stop(self):
"""
A FSM doing convergence that receives a stop input stops when the
convergence iteration finishes.
"""
local_state = NodeState(hostname=u'192.0.2.123')
configuration = Deployment(nodes=frozenset([to_node(local_state)]))
state = DeploymentState(nodes=[local_state])
# Until this Deferred fires the first iteration won't finish:
action = ControllableAction(result=Deferred())
# Only one discovery result is configured, so a second attempt at
# discovery would fail:
deployer = ControllableDeployer(
local_state.hostname, [succeed(local_state)],
[action]
)
client = self.successful_amp_client([local_state])
reactor = Clock()
loop = build_convergence_loop_fsm(reactor, deployer)
loop.receive(_ClientStatusUpdate(
client=client, configuration=configuration, state=state))
# Calculating actions happened, action is run, but waits for
# Deferred to be fired... Meanwhile a stop input is received!
loop.receive(ConvergenceLoopInputs.STOP)
# Action finally finishes:
action.result.callback(None)
reactor.advance(1.0)
# work is scheduled:
expected = (
# The actions are calculated
[(local_state, configuration, state)],
# And the result is run
[(NodeStateCommand, dict(state_changes=(local_state,)))],
# The state machine gets to the desired state.
ConvergenceLoopStates.STOPPED,
# And no subsequent work is scheduled to be run.
[],
)
actual = (
deployer.calculate_inputs,
client.calls,
loop.state,
reactor.getDelayedCalls(),
)
self.assertTupleEqual(expected, actual)
def test_convergence_stop_then_status_update(self):
"""
A FSM doing convergence that receives a stop input and then a status
update continues on to to next convergence iteration (i.e. stop
ends up being ignored).
"""
local_state = NodeState(hostname=u'192.0.2.123')
local_state2 = NodeState(hostname=u'192.0.2.123')
configuration = Deployment(nodes=frozenset([to_node(local_state)]))
state = DeploymentState(nodes=[local_state])
# Until this Deferred fires the first iteration won't finish:
action = ControllableAction(result=Deferred())
# Until this Deferred fires the second iteration won't finish:
action2 = ControllableAction(result=Deferred())
deployer = ControllableDeployer(
local_state.hostname,
[succeed(local_state), succeed(local_state2)],
[action, action2]
)
client = self.successful_amp_client([local_state])
reactor = Clock()
loop = build_convergence_loop_fsm(reactor, deployer)
loop.receive(_ClientStatusUpdate(
client=client, configuration=configuration, state=state))
# Calculating actions happened, action is run, but waits for
# Deferred to be fired... Meanwhile a new status update appears!
client2 = self.successful_amp_client([local_state2])
configuration2 = Deployment(nodes=frozenset([to_node(local_state)]))
state2 = DeploymentState(nodes=[local_state])
loop.receive(ConvergenceLoopInputs.STOP)
# And then another status update!
loop.receive(_ClientStatusUpdate(
client=client2, configuration=configuration2, state=state2))
# Action finally finishes, and we can move on to next iteration,
# which happens with second set of client, desired configuration
# and cluster state:
action.result.callback(None)
reactor.advance(1.0)
self.assertTupleEqual(
(deployer.calculate_inputs, client.calls, client2.calls),
([(local_state, configuration, state),
(local_state2, configuration2, state2)],
[(NodeStateCommand, dict(state_changes=(local_state,)))],
[(NodeStateCommand, dict(state_changes=(local_state2,)))]))
class AgentLoopServiceTests(SynchronousTestCase):
"""
Tests for ``AgentLoopService``.
"""
def setUp(self):
self.deployer = ControllableDeployer(u"127.0.0.1", [], [])
self.reactor = MemoryReactorClock()
self.service = AgentLoopService(
reactor=self.reactor, deployer=self.deployer, host=u"example.com",
port=1234, context_factory=ClientContextFactory())
def test_start_service(self):
"""
Starting the service starts a reconnecting TCP client to given host
and port which calls ``build_agent_client`` with the service when
connected.
"""
service = self.service
service.startService()
host, port, factory = self.reactor.tcpClients[0][:3]
protocol = factory.buildProtocol(None)
self.assertEqual((host, port, factory.__class__,
service.reconnecting_factory.__class__,
service.reconnecting_factory.continueTrying,
protocol.__class__,
protocol.wrappedProtocol.__class__,
service.running),
(u"example.com", 1234, TLSMemoryBIOFactory,
ReconnectingClientFactory,
True, TLSMemoryBIOProtocol, AgentAMP, True))
def test_stop_service(self):
"""
Stopping the service stops the reconnecting TCP client and inputs
shutdown event to the cluster status FSM.
"""
service = self.service
service.cluster_status = fsm = StubFSM()
service.startService()
service.stopService()
self.assertEqual((service.reconnecting_factory.continueTrying,
fsm.inputted, service.running),
(False, [ClusterStatusInputs.SHUTDOWN], False))
def test_connected(self):
"""
When ``connnected()`` is called a ``_ConnectedToControlService`` input
is passed to the cluster status FSM.
"""
service = self.service
service.cluster_status = fsm = StubFSM()
client = object()
service.connected(client)
self.assertEqual(fsm.inputted,
[_ConnectedToControlService(client=client)])
def test_disconnected(self):
"""
When ``connnected()`` is called a
``ClusterStatusInputs.DISCONNECTED_FROM_CONTROL_SERVICE`` input is
passed to the cluster status FSM.
"""
service = self.service
service.cluster_status = fsm = StubFSM()
service.disconnected()
self.assertEqual(
fsm.inputted,
[ClusterStatusInputs.DISCONNECTED_FROM_CONTROL_SERVICE])
def test_cluster_updated(self):
"""
When ``cluster_updated()`` is called a ``_StatusUpdate`` input is
passed to the cluster status FSM.
"""
service = self.service
service.cluster_status = fsm = StubFSM()
config = object()
state = object()
service.cluster_updated(config, state)
self.assertEqual(fsm.inputted, [_StatusUpdate(configuration=config,
state=state)])
def _build_service(test):
"""
Fixture for creating ``AgentLoopService``.
"""
service = AgentLoopService(
reactor=None, deployer=object(), host=u"example.com", port=1234,
context_factory=ClientContextFactory())
service.cluster_status = StubFSM()
return service
class AgentLoopServiceInterfaceTests(
iconvergence_agent_tests_factory(_build_service)):
"""
``IConvergenceAgent`` tests for ``AgentLoopService``.
"""
|
py | b40cb2a49d6d03f4ac87bc56bd8664c504e2c622 | #!/usr/bin/env python
"""
This is a class that can be imported as a module. It implements the logging functionality.
Works with Python 2.7+
"""
import os, signal
import sys
import logging
from os.path import isfile
class Logger(object):
"""
This class implements the logging functionality.
"""
def __init__(self, logfile, environment, log_file_dir, log_title):
self.logfile = logfile
self.environment = environment
self.log_file_dir = log_file_dir
self.log_title = log_title
def create_logging_directory(self):
print("[INFO] Creating logging directory")
try:
if not os.path.exists("%s" % (self.log_file_dir)):
os.mkdir("%s" % (self.log_file_dir))
if not os.path.exists(self.log_file_dir + self.environment):
os.mkdir("%s" % (self.log_file_dir + self.environment))
print("[INFO] Logging directory created")
except Exception as e:
print("[ERROR] Unable to create logging directory or log file. Error is: %s" % str(sys.exc_info()[1]))
raise
def create_logger_instance(self):
logger_inst = None
file_handler = None
try:
logger_inst = logging.getLogger(self.log_title)
logger_inst.setLevel(logging.INFO)
lg_f = self.log_file_dir + self.environment + "/" + self.logfile
# create the logging file handler
file_handler = logging.FileHandler(lg_f)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
file_handler.setFormatter(formatter)
# add handler to logger_inst object
logger_inst.addHandler(file_handler)
except Exception as e:
print("[ERROR] Error with creating a logger instance for logging.", sys.exc_info()[1])
print("%s %s %s" % ('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e), e))
raise
else:
return (logger_inst, file_handler)
def add_logstash_handler(self, logger, logstash_handler=None):
"""
This method adds a logstash handler to the logger object that is passed to it.
It requires that the logger object is successfully created before by instatiating this class and calling the create_logger_instance() method after.
"""
try:
if logstash_handler != None:
# Adding a logstash handler to send logs to logstash
logger.addHandler(logstash_handler)
logger.info("Added logstash handler to the logger instance")
except Exception as e:
print("Unable to add logstash handler to the logger instance. Shipping script activity logs to logstash will not work.")
logger.error("Unable to add logstash handler to the logger instance. Shipping script activity logs to logstash will not work.")
logger.error(sys.exc_info()[1])
# The script can do without the logstash handler so no reason to stop the execution if it fails adding it.
pass
# No need to return a value here as the 'logger' object is being passed by reference, not by value.
def clear_log_file(self):
"""
This method removes the log file that this script creates.
"""
try:
lg_f = lg_f = self.log_file_dir + self.environment + "/" + self.logfile
print("[INFO] Removing log file %s from the previous run" % (lg_f))
if os.path.isfile(lg_f):
os.remove(lg_f)
print("[INFO] Log file from previous run removed successfully. Another one will be created in this run")
else:
print("[INFO] No such file %s, so not removing anything" % (lg_f))
except OSError as e:
print ("[ERROR] Failed to remove logfile %s" % (lg_f))
raise
except Exception as e:
print("[ERROR] An error occurred: %s" % str(sys.exc_info()[1]))
raise
|
py | b40cb2d0f5756cb90f4fb7d63e64d3a417901d66 | #!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import re
import os.path
from io import open
from setuptools import find_packages, setup
# Change the PACKAGE_NAME only to change folder and different name
PACKAGE_NAME = "azure-mgmt-batch"
PACKAGE_PPRINT_NAME = "Batch Management"
# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
# azure v0.x is not compatible with this package
# azure v0.x used to have a __version__ attribute (newer versions don't)
try:
import azure
try:
ver = azure.__version__
raise Exception(
'This package is incompatible with azure=={}. '.format(ver) +
'Uninstall it with "pip uninstall azure".'
)
except AttributeError:
pass
except ImportError:
pass
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.rst', encoding='utf-8') as f:
readme = f.read()
with open('HISTORY.rst', encoding='utf-8') as f:
history = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + history,
license='MIT License',
author='Microsoft Corporation',
author_email='[email protected]',
url='https://github.com/Azure/azure-sdk-for-python',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'License :: OSI Approved :: MIT License',
],
zip_safe=False,
packages=find_packages(exclude=[
'tests',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.mgmt',
]),
install_requires=[
'msrest>=0.5.0',
'msrestazure>=0.4.32,<2.0.0',
'azure-common~=1.1',
],
extras_require={
":python_version<'3.0'": ['azure-mgmt-nspkg'],
}
)
|
py | b40cb3d3d01293664bb68763f64b4863b23ab24a | import os
from src.processing import Processing
import pandas as pd
import re
from sklearn.decomposition import PCA
# counts triplets and number of nucleic acids in k-mer
# df: given dataframe
# all_triplets: all 64 triplet combinations of A,C,G,T
def fillDataFrame(df, all_triplets):
alphabet = ['A', 'C', 'G', 'T']
top_list_df = df.copy()
del top_list_df['File']
# add columns
for b in alphabet:
top_list_df[b] = 0
for tpl in all_triplets:
top_list_df[tpl] = 0
# counts nucleotides in k-mer
for b in alphabet:
top_list_df[b] = [kmer.upper().count(b) for kmer in top_list_df.index.tolist()]
# counts triplets in k-mer
for trpl in all_triplets:
top_list_df[trpl] = [sum(1 for _ in re.finditer('(?={})'.format(trpl), kmer.upper())) for kmer in
top_list_df.index.tolist()]
return top_list_df
# inherits from process
class KMerPCAData(Processing):
def __init__(self, data, selected, k, peak, top, feature, cmd, secStruct_data, no_sec_peak):
super().__init__(data, selected, k, peak, top, feature, cmd, secStruct_data, no_sec_peak)
# processes data to display pca as scatterplot
def processData(self):
pca_dimension = 2
top_kmer = self.getTopKmer()
all_triplets = self.getAllTriplets()
file_name1 = os.path.basename(self.getProfileObj1().getName()) # get filenames
file_name2 = os.path.basename(self.getProfileObj2().getName())
top_list_file1 = top_kmer.query('File==@file_name1') # get top k-mer
top_list_file2 = top_kmer.query('File==@file_name2') # get top k-mer
pca = PCA(n_components=pca_dimension)
pca_df1 = None
pca_df2 = None
top_list_df1 = None
top_list_df2 = None
# create dataframe
if len(top_list_file1) > 1:
try:
top_list_df1 = fillDataFrame(top_list_file1, all_triplets) # fill remaining data
pca_data1 = pca.fit_transform(top_list_df1)
pca_df1 = pd.DataFrame(data=pca_data1, columns=['PC1', 'PC2'], index=top_list_df1.index)
except ValueError:
pca_df1 = None
if len(top_list_file2) > 1:
try:
top_list_df2 = fillDataFrame(top_list_file2, all_triplets)
pca_data2 = pca.fit_transform(top_list_df2)
pca_df2 = pd.DataFrame(data=pca_data2, columns=['PC1', 'PC2'], index=top_list_df2.index)
except ValueError:
pca_df2 = None
return [pca_df1, pca_df2, file_name1, file_name2, top_list_df1, top_list_df2]
|
py | b40cb3e424d07eb1fb34eb75c3a634e39d197357 | import numpy as np
from .moments import moments
from .connectivity import neighbors, normalize_knn_graph
from .dynamics import dynamics
from .dimension_reduction import reduceDimension
from .cell_velocities import cell_velocities
from .utils import set_transition_genes
from ..preprocessing.utils import pca
from ..configuration import DynamoAdataConfig
# add recipe_csc_data()
def recipe_kin_data(
adata,
tkey=None,
reset_X=True,
X_total_layers=False,
splicing_total_layers=False,
n_top_genes=1000,
keep_filtered_cells=None,
keep_filtered_genes=None,
keep_raw_layers=None,
del_2nd_moments=None,
ekey="M_t",
vkey="velocity_T",
basis="umap",
rm_kwargs={},
):
"""An analysis recipe that properly pre-processes different layers for an kinetics experiment with both labeling and
splicing or only labeling data.
Parameters
----------
adata: :class:`~anndata.AnnData`
AnnData object that stores data for the the kinetics experiment, must include `uu, ul, su, sl` four
different layers.
tkey: `str` or None (default: None)
The column key for the labeling time of cells in .obs. Used for labeling based scRNA-seq data (will also
support for conventional scRNA-seq data). Note that `tkey` will be saved to adata.uns['pp']['tkey'] and used
in `dyn.tl.dynamics` in which when `group` is None, `tkey` will also be used for calculating 1st/2st moment
or covariance. We recommend to use hour as the unit of `time`.
reset_X: bool (default: `False`)
Whether do you want to let dynamo reset `adata.X` data based on layers stored in your experiment. One
critical functionality of dynamo is about visualizing RNA velocity vector flows which requires proper data
into which the high dimensional RNA velocity vectors will be projected.
(1) For `kinetics` experiment, we recommend the use of `total` layer as `adata.X`;
(2) For `degradation/conventional` experiment scRNA-seq, we recommend using `splicing` layer as `adata.X`.
Set `reset_X` to `True` to set those default values if you are not sure.
splicing_total_layers: bool (default `False`)
Whether to also normalize spliced / unspliced layers by size factor from total RNA. Paramter to
`recipe_monocle` function.
X_total_layers: bool (default `False`)
Whether to also normalize adata.X by size factor from total RNA. Paramter to `recipe_monocle` function.
n_top_genes: `int` (default: `1000`)
How many top genes based on scoring method (specified by sort_by) will be selected as feature genes.
Arguments required by the `recipe_monocle` function.
keep_filtered_cells: `bool` (default: `False`)
Whether to keep genes that don't pass the filtering in the returned adata object. Used in `recipe_monocle`.
keep_filtered_genes: `bool` (default: `False`)
Whether to keep genes that don't pass the filtering in the returned adata object. Used in `recipe_monocle`.
keep_raw_layers: `bool` (default: `False`)
Whether to keep layers with raw measurements in the returned adata object. Used in `recipe_monocle`.
del_2nd_moments: `bool` (default: `None`)
Whether to remove second moments or covariances. Default it is `None` rgument used for `dynamics` function.
tkey: `str` (default: `time`)
The column key for the time label of cells in .obs. Used for the "kinetic" model.
mode with labeled data. When `group` is None, `tkey` will also be used for calculating 1st/2st moment or
covariance. `{tkey}` column must exist in your adata object and indicates the labeling time period.
Parameters required for `dynamics` function.
ekey: str or None (optional, default None)
The dictionary key that corresponds to the gene expression in the layer attribute. By default, ekey and vkey
will be automatically detected from the adata object. Parameters required by `cell_velocities`.
vkey: str or None (optional, default None)
The dictionary key that corresponds to the estimated velocity values in the layers attribute. Parameters
required by `cell_velocities`
basis: int (optional, default `umap`)
The dictionary key that corresponds to the reduced dimension in `.obsm` attribute. Can be `X_spliced_umap`
or `X_total_umap`, etc. Parameters required by `cell_velocities`
rm_kwargs: `dict` or None (default: `None`)
Other Parameters passed into the pp.recipe_monocle function.
Returns
-------
An updated adata object that went through a proper and typical time-resolved RNA velocity analysis.
"""
from ..preprocessing import recipe_monocle
from ..preprocessing.utils import pca, detect_datatype
keep_filtered_cells = DynamoAdataConfig.check_config_var(
keep_filtered_cells, DynamoAdataConfig.RECIPE_KEEP_FILTERED_CELLS_KEY
)
keep_filtered_genes = DynamoAdataConfig.check_config_var(
keep_filtered_genes, DynamoAdataConfig.RECIPE_KEEP_FILTERED_GENES_KEY
)
keep_raw_layers = DynamoAdataConfig.check_config_var(keep_raw_layers, DynamoAdataConfig.RECIPE_KEEP_RAW_LAYERS_KEY)
del_2nd_moments = DynamoAdataConfig.check_config_var(del_2nd_moments, DynamoAdataConfig.RECIPE_DEL_2ND_MOMENTS_KEY)
has_splicing, has_labeling, splicing_labeling, _ = detect_datatype(adata)
if has_splicing and has_labeling and splicing_labeling:
layers = ["X_new", "X_total", "X_uu", "X_ul", "X_su", "X_sl"]
elif has_labeling:
layers = ["X_new", "X_total"]
if not has_labeling:
raise Exception(
"This recipe is only applicable to kinetics experiment datasets that have "
"labeling data (at least either with `'uu', 'ul', 'su', 'sl'` or `'new', 'total'` "
"layers."
)
if has_splicing and has_labeling:
# new, total (and uu, ul, su, sl if existed) layers will be normalized with size factor calculated with total
# layers spliced / unspliced layers will be normalized independently.
recipe_monocle(
adata,
tkey=tkey,
experiment_type="kin",
reset_X=reset_X,
X_total_layers=X_total_layers,
splicing_total_layers=splicing_total_layers,
n_top_genes=n_top_genes,
total_layers=True,
keep_filtered_cells=keep_filtered_cells,
keep_filtered_genes=keep_filtered_genes,
keep_raw_layers=keep_raw_layers,
**rm_kwargs,
)
tkey = adata.uns["pp"]["tkey"]
# first calculate moments for labeling data relevant layers using total based connectivity graph
moments(adata, group=tkey, layers=layers)
# then we want to calculate moments for spliced and unspliced layers based on connectivity graph from spliced
# data.
# first get X_spliced based pca embedding
CM = np.log1p(adata[:, adata.var.use_for_pca].layers["X_spliced"].A)
cm_genesums = CM.sum(axis=0)
valid_ind = np.logical_and(np.isfinite(cm_genesums), cm_genesums != 0)
valid_ind = np.array(valid_ind).flatten()
pca(adata, CM[:, valid_ind], pca_key="X_spliced_pca")
# then get neighbors graph based on X_spliced_pca
neighbors(adata, X_data=adata.obsm["X_spliced_pca"], layer="X_spliced")
# then normalize neighbors graph so that each row sums up to be 1
conn = normalize_knn_graph(adata.obsp["connectivities"] > 0)
# then calculate moments for spliced related layers using spliced based connectivity graph
moments(adata, conn=conn, layers=["X_spliced", "X_unspliced"])
# then perform kinetic estimations with properly preprocessed layers for either the labeling or the splicing
# data
dynamics(
adata,
model="deterministic",
est_method="twostep",
del_2nd_moments=del_2nd_moments,
)
# then perform dimension reduction
reduceDimension(adata, reduction_method=basis)
# lastly, project RNA velocity to low dimensional embedding.
cell_velocities(adata, enforce=True, vkey=vkey, ekey=ekey, basis=basis)
else:
recipe_monocle(
adata,
tkey=tkey,
experiment_type="kin",
reset_X=reset_X,
X_total_layers=X_total_layers,
splicing_total_layers=splicing_total_layers,
n_top_genes=n_top_genes,
total_layers=True,
keep_filtered_cells=keep_filtered_cells,
keep_filtered_genes=keep_filtered_genes,
keep_raw_layers=keep_raw_layers,
**rm_kwargs,
)
dynamics(
adata,
model="deterministic",
est_method="twostep",
del_2nd_moments=del_2nd_moments,
)
reduceDimension(adata, reduction_method=basis)
cell_velocities(adata, basis=basis)
return adata
def recipe_deg_data(
adata,
tkey=None,
reset_X=True,
X_total_layers=False,
splicing_total_layers=False,
n_top_genes=1000,
keep_filtered_cells=None,
keep_filtered_genes=None,
keep_raw_layers=None,
del_2nd_moments=True,
fraction_for_deg=False,
ekey="M_s",
vkey="velocity_S",
basis="umap",
rm_kwargs={},
):
"""An analysis recipe that properly pre-processes different layers for a degradation experiment with both
labeling and splicing data or only labeling . Functions need to be updated.
Parameters
----------
adata: :class:`~anndata.AnnData`
AnnData object that stores data for the the kinetics experiment, must include `uu, ul, su, sl` four
different layers.
tkey: `str` or None (default: None)
The column key for the labeling time of cells in .obs. Used for labeling based scRNA-seq data (will also
support for conventional scRNA-seq data). Note that `tkey` will be saved to adata.uns['pp']['tkey'] and used
in `dyn.tl.dynamics` in which when `group` is None, `tkey` will also be used for calculating 1st/2st moment
or covariance. We recommend to use hour as the unit of `time`.
reset_X: bool (default: `False`)
Whether do you want to let dynamo reset `adata.X` data based on layers stored in your experiment. One
critical functionality of dynamo is about visualizing RNA velocity vector flows which requires proper data
into which the high dimensional RNA velocity vectors will be projected.
(1) For `kinetics` experiment, we recommend the use of `total` layer as `adata.X`;
(2) For `degradation/conventional` experiment scRNA-seq, we recommend using `splicing` layer as `adata.X`.
Set `reset_X` to `True` to set those default values if you are not sure.
splicing_total_layers: bool (default `False`)
Whether to also normalize spliced / unspliced layers by size factor from total RNA. Paramter to
`recipe_monocle` function.
X_total_layers: bool (default `False`)
Whether to also normalize adata.X by size factor from total RNA. Paramter to `recipe_monocle` function.
n_top_genes: `int` (default: `1000`)
How many top genes based on scoring method (specified by sort_by) will be selected as feature genes.
Arguments required by the `recipe_monocle` function.
keep_filtered_cells: `bool` (default: `False`)
Whether to keep genes that don't pass the filtering in the returned adata object. Used in `recipe_monocle`.
keep_filtered_genes: `bool` (default: `False`)
Whether to keep genes that don't pass the filtering in the returned adata object. Used in `recipe_monocle`.
keep_raw_layers: `bool` (default: `False`)
Whether to keep layers with raw measurements in the returned adata object. Used in `recipe_monocle`.
del_2nd_moments: `bool` (default: `None`)
Whether to remove second moments or covariances. Default it is `None` rgument used for `dynamics` function.
fraction_for_deg: `bool` (default: `False`)
Whether to use the fraction of labeled RNA instead of the raw labeled RNA to estimate the degradation parameter.
tkey: `str` (default: `time`)
The column key for the time label of cells in .obs. Used for the "kinetic" model.
mode with labeled data. When `group` is None, `tkey` will also be used for calculating 1st/2st moment or
covariance. `{tkey}` column must exist in your adata object and indicates the labeling time period.
Parameters required for `dynamics` function.
ekey: str or None (optional, default None)
The dictionary key that corresponds to the gene expression in the layer attribute. By default, ekey and vkey
will be automatically detected from the adata object. Parameters required by `cell_velocities`.
vkey: str or None (optional, default None)
The dictionary key that corresponds to the estimated velocity values in the layers attribute. Parameters
required by `cell_velocities`
basis: int (optional, default `umap`)
The dictionary key that corresponds to the reduced dimension in `.obsm` attribute. Can be `X_spliced_umap`
or `X_total_umap`, etc. Parameters required by `cell_velocities`
rm_kwargs: `dict` or None (default: `None`)
Other Parameters passed into the pp.recipe_monocle function.
Returns
-------
An updated adata object that went through a proper and typical time-resolved RNA velocity analysis.
"""
from ..preprocessing import recipe_monocle
from ..preprocessing.utils import pca, detect_datatype
keep_filtered_cells = DynamoAdataConfig.check_config_var(
keep_filtered_cells, DynamoAdataConfig.RECIPE_KEEP_FILTERED_CELLS_KEY
)
keep_filtered_genes = DynamoAdataConfig.check_config_var(
keep_filtered_genes, DynamoAdataConfig.RECIPE_KEEP_FILTERED_GENES_KEY
)
keep_raw_layers = DynamoAdataConfig.check_config_var(keep_raw_layers, DynamoAdataConfig.RECIPE_KEEP_RAW_LAYERS_KEY)
has_splicing, has_labeling, splicing_labeling, _ = detect_datatype(adata)
if has_splicing and has_labeling and splicing_labeling:
layers = ["X_new", "X_total", "X_uu", "X_ul", "X_su", "X_sl"]
elif has_labeling:
layers = ["X_new", "X_total"]
if not has_labeling:
raise Exception(
"This recipe is only applicable to kinetics experiment datasets that have "
"labeling data (at least either with `'uu', 'ul', 'su', 'sl'` or `'new', 'total'` "
"layers."
)
if has_splicing and has_labeling:
# new, total (and uu, ul, su, sl if existed) layers will be normalized with size factor calculated with total
# layers spliced / unspliced layers will be normalized independently.
recipe_monocle(
adata,
tkey=tkey,
experiment_type="deg",
reset_X=reset_X,
X_total_layers=X_total_layers,
splicing_total_layers=splicing_total_layers,
n_top_genes=n_top_genes,
total_layers=True,
keep_filtered_cells=keep_filtered_cells,
keep_filtered_genes=keep_filtered_genes,
keep_raw_layers=keep_raw_layers,
**rm_kwargs,
)
tkey = adata.uns["pp"]["tkey"]
# first calculate moments for spliced related layers using spliced based connectivity graph
moments(adata, layers=["X_spliced", "X_unspliced"])
# then calculate moments for labeling data relevant layers using total based connectivity graph
# first get X_total based pca embedding
CM = np.log1p(adata[:, adata.var.use_for_pca].layers["X_total"].A)
cm_genesums = CM.sum(axis=0)
valid_ind = np.logical_and(np.isfinite(cm_genesums), cm_genesums != 0)
valid_ind = np.array(valid_ind).flatten()
pca(adata, CM[:, valid_ind], pca_key="X_total_pca")
# then get neighbors graph based on X_spliced_pca
neighbors(adata, X_data=adata.obsm["X_total_pca"], layer="X_total")
# then normalize neighbors graph so that each row sums up to be 1
conn = normalize_knn_graph(adata.obsp["connectivities"] > 0)
moments(adata, conn=conn, group=tkey, layers=layers)
# then perform kinetic estimations with properly preprocessed layers for either the labeling or the splicing
# data
dynamics(
adata,
model="deterministic",
est_method="twostep",
del_2nd_moments=del_2nd_moments,
fraction_for_deg=fraction_for_deg,
)
# then perform dimension reduction
reduceDimension(adata, reduction_method=basis)
# lastly, project RNA velocity to low dimensional embedding.
try:
set_transition_genes(adata)
cell_velocities(adata, enforce=True, vkey=vkey, ekey=ekey, basis=basis)
except BaseException:
cell_velocities(
adata,
min_r2=adata.var.gamma_r2.min(),
enforce=True,
vkey=vkey,
ekey=ekey,
basis=basis,
)
else:
recipe_monocle(
adata,
tkey=tkey,
experiment_type="deg",
reset_X=reset_X,
X_total_layers=X_total_layers,
splicing_total_layers=splicing_total_layers,
n_top_genes=n_top_genes,
total_layers=True,
keep_filtered_cells=keep_filtered_cells,
keep_filtered_genes=keep_filtered_genes,
keep_raw_layers=keep_raw_layers,
**rm_kwargs,
)
dynamics(
adata,
model="deterministic",
del_2nd_moments=del_2nd_moments,
fraction_for_deg=fraction_for_deg,
)
reduceDimension(adata, reduction_method=basis)
return adata
def recipe_mix_kin_deg_data(
adata,
tkey=None,
reset_X=True,
X_total_layers=False,
splicing_total_layers=False,
n_top_genes=1000,
keep_filtered_cells=None,
keep_filtered_genes=None,
keep_raw_layers=None,
del_2nd_moments=None,
ekey="M_t",
vkey="velocity_T",
basis="umap",
rm_kwargs={},
):
"""An analysis recipe that properly pre-processes different layers for an mixture kinetics and degradation
experiment with both labeling and splicing or only labeling data.
Parameters
----------
adata: :class:`~anndata.AnnData`
AnnData object that stores data for the the kinetics experiment, must include `uu, ul, su, sl` four
different layers.
tkey: `str` or None (default: None)
The column key for the labeling time of cells in .obs. Used for labeling based scRNA-seq data (will also
support for conventional scRNA-seq data). Note that `tkey` will be saved to adata.uns['pp']['tkey'] and used
in `dyn.tl.dynamics` in which when `group` is None, `tkey` will also be used for calculating 1st/2st moment
or covariance. We recommend to use hour as the unit of `time`.
reset_X: bool (default: `False`)
Whether do you want to let dynamo reset `adata.X` data based on layers stored in your experiment. One
critical functionality of dynamo is about visualizing RNA velocity vector flows which requires proper data
into which the high dimensional RNA velocity vectors will be projected.
(1) For `kinetics` experiment, we recommend the use of `total` layer as `adata.X`;
(2) For `degradation/conventional` experiment scRNA-seq, we recommend using `splicing` layer as `adata.X`.
Set `reset_X` to `True` to set those default values if you are not sure.
splicing_total_layers: bool (default `False`)
Whether to also normalize spliced / unspliced layers by size factor from total RNA. Paramter to
`recipe_monocle` function.
X_total_layers: bool (default `False`)
Whether to also normalize adata.X by size factor from total RNA. Paramter to `recipe_monocle` function.
n_top_genes: `int` (default: `1000`)
How many top genes based on scoring method (specified by sort_by) will be selected as feature genes.
Arguments required by the `recipe_monocle` function.
keep_filtered_cells: `bool` (default: `False`)
Whether to keep genes that don't pass the filtering in the returned adata object. Used in `recipe_monocle`.
keep_filtered_genes: `bool` (default: `False`)
Whether to keep genes that don't pass the filtering in the returned adata object. Used in `recipe_monocle`.
keep_raw_layers: `bool` (default: `False`)
Whether to keep layers with raw measurements in the returned adata object. Used in `recipe_monocle`.
del_2nd_moments: `bool` (default: `None`)
Whether to remove second moments or covariances. Default it is `None` rgument used for `dynamics` function.
tkey: `str` (default: `time`)
The column key for the time label of cells in .obs. Used for the "kinetic" model.
mode with labeled data. When `group` is None, `tkey` will also be used for calculating 1st/2st moment or
covariance. `{tkey}` column must exist in your adata object and indicates the labeling time period.
Parameters required for `dynamics` function.
ekey: str or None (optional, default None)
The dictionary key that corresponds to the gene expression in the layer attribute. By default, ekey and vkey
will be automatically detected from the adata object. Parameters required by `cell_velocities`.
vkey: str or None (optional, default None)
The dictionary key that corresponds to the estimated velocity values in the layers attribute. Parameters
required by `cell_velocities`
basis: int (optional, default `umap`)
The dictionary key that corresponds to the reduced dimension in `.obsm` attribute. Can be `X_spliced_umap`
or `X_total_umap`, etc. Parameters required by `cell_velocities`
rm_kwargs: `dict` or None (default: `None`)
Other Parameters passed into the pp.recipe_monocle function.
Returns
-------
An updated adata object that went through a proper and typical time-resolved RNA velocity analysis.
"""
from ..preprocessing import recipe_monocle
from ..preprocessing.utils import pca, detect_datatype
keep_filtered_cells = DynamoAdataConfig.check_config_var(
keep_filtered_cells, DynamoAdataConfig.RECIPE_KEEP_FILTERED_CELLS_KEY
)
keep_filtered_genes = DynamoAdataConfig.check_config_var(
keep_filtered_genes, DynamoAdataConfig.RECIPE_KEEP_FILTERED_GENES_KEY
)
keep_raw_layers = DynamoAdataConfig.check_config_var(keep_raw_layers, DynamoAdataConfig.RECIPE_KEEP_RAW_LAYERS_KEY)
del_2nd_moments = DynamoAdataConfig.check_config_var(del_2nd_moments, DynamoAdataConfig.RECIPE_DEL_2ND_MOMENTS_KEY)
has_splicing, has_labeling, splicing_labeling, _ = detect_datatype(adata)
if has_splicing and has_labeling and splicing_labeling:
layers = ["X_new", "X_total", "X_uu", "X_ul", "X_su", "X_sl"]
elif has_labeling:
layers = ["X_new", "X_total"]
if not has_labeling:
raise Exception(
"This recipe is only applicable to kinetics experiment datasets that have "
"labeling data (at least either with `'uu', 'ul', 'su', 'sl'` or `'new', 'total'` "
"layers."
)
if has_splicing and has_labeling:
# new, total (and uu, ul, su, sl if existed) layers will be normalized with size factor calculated with total
# layers spliced / unspliced layers will be normalized independently.
recipe_monocle(
adata,
tkey=tkey,
experiment_type="mix_pulse_chase",
reset_X=reset_X,
X_total_layers=X_total_layers,
splicing_total_layers=splicing_total_layers,
n_top_genes=n_top_genes,
total_layers=True,
keep_filtered_cells=keep_filtered_cells,
keep_filtered_genes=keep_filtered_genes,
keep_raw_layers=keep_raw_layers,
**rm_kwargs,
)
tkey = adata.uns["pp"]["tkey"]
# first calculate moments for labeling data relevant layers using total based connectivity graph
moments(adata, group=tkey, layers=layers)
# then we want to calculate moments for spliced and unspliced layers based on connectivity graph from spliced
# data.
# first get X_spliced based pca embedding
CM = np.log1p(adata[:, adata.var.use_for_pca].layers["X_spliced"].A)
cm_genesums = CM.sum(axis=0)
valid_ind = np.logical_and(np.isfinite(cm_genesums), cm_genesums != 0)
valid_ind = np.array(valid_ind).flatten()
pca(adata, CM[:, valid_ind], pca_key="X_spliced_pca")
# then get neighbors graph based on X_spliced_pca
neighbors(adata, X_data=adata.obsm["X_spliced_pca"], layer="X_spliced")
# then normalize neighbors graph so that each row sums up to be 1
conn = normalize_knn_graph(adata.obsp["connectivities"] > 0)
# then calculate moments for spliced related layers using spliced based connectivity graph
moments(adata, conn=conn, layers=["X_spliced", "X_unspliced"])
# then perform kinetic estimations with properly preprocessed layers for either the labeling or the splicing
# data
dynamics(
adata,
model="deterministic",
est_method="twostep",
del_2nd_moments=del_2nd_moments,
)
# then perform dimension reduction
reduceDimension(adata, reduction_method=basis)
# lastly, project RNA velocity to low dimensional embedding.
cell_velocities(adata, enforce=True, vkey=vkey, ekey=ekey, basis=basis)
else:
recipe_monocle(
adata,
tkey=tkey,
experiment_type="mix_pulse_chase",
reset_X=reset_X,
X_total_layers=X_total_layers,
splicing_total_layers=splicing_total_layers,
n_top_genes=n_top_genes,
total_layers=True,
keep_filtered_cells=keep_filtered_cells,
keep_filtered_genes=keep_filtered_genes,
keep_raw_layers=keep_raw_layers,
**rm_kwargs,
)
dynamics(
adata,
model="deterministic",
est_method="twostep",
del_2nd_moments=del_2nd_moments,
)
reduceDimension(adata, reduction_method=basis)
cell_velocities(adata, enforce=True, vkey=vkey, ekey=ekey, basis=basis)
return adata
# support using just spliced/unspliced/new/total 4 layers, as well as uu, ul, su, sl layers
def recipe_onde_shot_data(
adata,
tkey=None,
reset_X=True,
X_total_layers=False,
splicing_total_layers=False,
n_top_genes=1000,
keep_filtered_cells=None,
keep_filtered_genes=None,
keep_raw_layers=None,
one_shot_method="sci-fate",
del_2nd_moments=None,
ekey="M_t",
vkey="velocity_T",
basis="umap",
rm_kwargs={},
):
"""An analysis recipe that properly pre-processes different layers for an one-shot experiment with both labeling and
splicing data.
Parameters
----------
adata: :class:`~anndata.AnnData`
AnnData object that stores data for the the kinetics experiment, must include `uu, ul, su, sl` four
different layers.
tkey: `str` or None (default: None)
The column key for the labeling time of cells in .obs. Used for labeling based scRNA-seq data (will also
support for conventional scRNA-seq data). Note that `tkey` will be saved to adata.uns['pp']['tkey'] and used
in `dyn.tl.dynamics` in which when `group` is None, `tkey` will also be used for calculating 1st/2st moment
or covariance. We recommend to use hour as the unit of `time`.
reset_X: bool (default: `False`)
Whether do you want to let dynamo reset `adata.X` data based on layers stored in your experiment. One
critical functionality of dynamo is about visualizing RNA velocity vector flows which requires proper data
into which the high dimensional RNA velocity vectors will be projected.
(1) For `kinetics` experiment, we recommend the use of `total` layer as `adata.X`;
(2) For `degradation/conventional` experiment scRNA-seq, we recommend using `splicing` layer as `adata.X`.
Set `reset_X` to `True` to set those default values if you are not sure.
splicing_total_layers: bool (default `False`)
Whether to also normalize spliced / unspliced layers by size factor from total RNA. Paramter to
`recipe_monocle` function.
X_total_layers: bool (default `False`)
Whether to also normalize adata.X by size factor from total RNA. Paramter to `recipe_monocle` function.
n_top_genes: `int` (default: `1000`)
How many top genes based on scoring method (specified by sort_by) will be selected as feature genes.
Arguments required by the `recipe_monocle` function.
keep_filtered_cells: `bool` (default: `False`)
Whether to keep genes that don't pass the filtering in the returned adata object. Used in `recipe_monocle`.
keep_filtered_genes: `bool` (default: `False`)
Whether to keep genes that don't pass the filtering in the returned adata object. Used in `recipe_monocle`.
keep_raw_layers: `bool` (default: `False`)
Whether to keep layers with raw measurements in the returned adata object. Used in `recipe_monocle`.
one_shot_method: `str` (default: `sci-fate`)
The method to use for calculate the absolute labeling and splicing velocity for the one-shot data of use.
del_2nd_moments: `bool` (default: `None`)
Whether to remove second moments or covariances. Default it is `None` rgument used for `dynamics` function.
tkey: `str` (default: `time`)
The column key for the time label of cells in .obs. Used for the "kinetic" model.
mode with labeled data. When `group` is None, `tkey` will also be used for calculating 1st/2st moment or
covariance. `{tkey}` column must exist in your adata object and indicates the labeling time period.
Parameters required for `dynamics` function.
ekey: str or None (optional, default None)
The dictionary key that corresponds to the gene expression in the layer attribute. By default, ekey and vkey
will be automatically detected from the adata object. Parameters required by `cell_velocities`.
vkey: str or None (optional, default None)
The dictionary key that corresponds to the estimated velocity values in the layers attribute. Parameters
required by `cell_velocities`
basis: int (optional, default `umap`)
The dictionary key that corresponds to the reduced dimension in `.obsm` attribute. Can be `X_spliced_umap`
or `X_total_umap`, etc. Parameters required by `cell_velocities`
rm_kwargs: `dict` or None (default: `None`)
Other Parameters passed into the pp.recipe_monocle function.
Returns
-------
An updated adata object that went through a proper and typical time-resolved RNA velocity analysis.
"""
from ..preprocessing import recipe_monocle
from ..preprocessing.utils import pca, detect_datatype
keep_filtered_cells = DynamoAdataConfig.check_config_var(
keep_filtered_cells, DynamoAdataConfig.RECIPE_KEEP_FILTERED_CELLS_KEY
)
keep_filtered_genes = DynamoAdataConfig.check_config_var(
keep_filtered_genes, DynamoAdataConfig.RECIPE_KEEP_FILTERED_GENES_KEY
)
keep_raw_layers = DynamoAdataConfig.check_config_var(keep_raw_layers, DynamoAdataConfig.RECIPE_KEEP_RAW_LAYERS_KEY)
del_2nd_moments = DynamoAdataConfig.check_config_var(del_2nd_moments, DynamoAdataConfig.RECIPE_DEL_2ND_MOMENTS_KEY)
has_splicing, has_labeling, splicing_labeling, _ = detect_datatype(adata)
if has_splicing and has_labeling and splicing_labeling:
layers = ["X_new", "X_total", "X_uu", "X_ul", "X_su", "X_sl"]
elif has_labeling:
layers = ["X_new", "X_total"]
if not has_labeling:
raise Exception(
"This recipe is only applicable to kinetics experiment datasets that have "
"labeling data (at least either with `'uu', 'ul', 'su', 'sl'` or `'new', 'total'` "
"layers."
)
if has_splicing and has_labeling:
# new, total (and uu, ul, su, sl if existed) layers will be normalized with size factor calculated with total
# layers spliced / unspliced layers will be normalized independently.
recipe_monocle(
adata,
tkey=tkey,
experiment_type="one-shot",
reset_X=reset_X,
X_total_layers=X_total_layers,
splicing_total_layers=splicing_total_layers,
n_top_genes=n_top_genes,
total_layers=True,
keep_filtered_cells=keep_filtered_cells,
keep_filtered_genes=keep_filtered_genes,
keep_raw_layers=keep_raw_layers,
**rm_kwargs,
)
tkey = adata.uns["pp"]["tkey"]
# first calculate moments for labeling data relevant layers using total based connectivity graph
moments(adata, group=tkey, layers=layers)
# then we want to calculate moments for spliced and unspliced layers based on connectivity graph from spliced
# data.
# first get X_spliced based pca embedding
CM = np.log1p(adata[:, adata.var.use_for_pca].layers["X_spliced"].A)
cm_genesums = CM.sum(axis=0)
valid_ind = np.logical_and(np.isfinite(cm_genesums), cm_genesums != 0)
valid_ind = np.array(valid_ind).flatten()
pca(adata, CM[:, valid_ind], pca_key="X_spliced_pca")
# then get neighbors graph based on X_spliced_pca
neighbors(adata, X_data=adata.obsm["X_spliced_pca"], layer="X_spliced")
# then normalize neighbors graph so that each row sums up to be 1
conn = normalize_knn_graph(adata.obsp["connectivities"] > 0)
# then calculate moments for spliced related layers using spliced based connectivity graph
moments(adata, conn=conn, layers=["X_spliced", "X_unspliced"])
# then perform kinetic estimations with properly preprocessed layers for either the labeling or the splicing
# data
dynamics(
adata,
model="deterministic",
one_shot_method=one_shot_method,
del_2nd_moments=del_2nd_moments,
)
# then perform dimension reduction
reduceDimension(adata, reduction_method=basis)
# lastly, project RNA velocity to low dimensional embedding.
cell_velocities(adata, enforce=True, vkey=vkey, ekey=ekey, basis=basis)
else:
recipe_monocle(
adata,
tkey=tkey,
experiment_type="one-shot",
reset_X=reset_X,
X_total_layers=X_total_layers,
splicing_total_layers=splicing_total_layers,
n_top_genes=n_top_genes,
total_layers=True,
keep_filtered_cells=keep_filtered_cells,
keep_filtered_genes=keep_filtered_genes,
keep_raw_layers=keep_raw_layers,
**rm_kwargs,
)
dynamics(
adata,
model="deterministic",
one_shot_method=one_shot_method,
del_2nd_moments=del_2nd_moments,
)
reduceDimension(adata, reduction_method=basis)
cell_velocities(adata, enforce=True, vkey=vkey, ekey=ekey, basis=basis)
return adata
def velocity_N(
adata,
group=None,
recalculate_pca=True,
recalculate_umap=True,
del_2nd_moments=None,
):
"""use new RNA based pca, umap, for velocity calculation and projection for kinetics or one-shot experiment.
Note that currently velocity_N function only considers labeling data and removes splicing data if they exist.
Parameters
----------
adata: :class:`~anndata.AnnData`
AnnData object that stores data for the the kinetics or one-shot experiment, must include `X_new, X_total`
layers.
group: `str` or None (default: None)
The cell group that will be used to calculate velocity in each separate group. This is useful if your data
comes from different labeling condition, etc.
recalculate_pca: `bool` (default: True)
Whether to recalculate pca with the new RNA data. If setting to be False, you need to make sure the pca is
already generated via new RNA.
recalculate_umap: `bool` (default: True)
Whether to recalculate umap with the new RNA data. If setting to be False, you need to make sure the umap is
already generated via new RNA.
del_2nd_moments: `None` or `bool`
Whether to remove second moments or covariances. Default it is `None` rgument used for `dynamics` function.
Returns
-------
Nothing but the adata object is updated with the low dimensional (umap or pca) velocity projections with the
new RNA or pca based RNA velocities.
"""
del_2nd_moments = DynamoAdataConfig.check_config_var(del_2nd_moments, DynamoAdataConfig.RECIPE_DEL_2ND_MOMENTS_KEY)
var_columns = adata.var.columns
layer_keys = adata.layers.keys()
# check velocity_N, velocity_T, X_new, X_total
if not np.all([i in layer_keys for i in ["X_new", "X_total"]]):
raise Exception(f"The `X_new`, `X_total` has to exist in your data before running velocity_N function.")
# delete the moments and velocities that generated via total RNA
for i in ["M_t", "M_tt", "M_n", "M_tn", "M_nn", "velocity_N", "velocity_T"]:
if i in layer_keys:
del adata.layers[i]
# delete the kinetic paraemters that generated via total RNA
for i in [
"alpha",
"beta",
"gamma",
"half_life",
"alpha_b",
"alpha_r2",
"gamma_b",
"gamma_r2",
"gamma_logLL",
"delta_b",
"delta_r2",
"bs",
"bf",
"uu0",
"ul0",
"su0",
"sl0",
"U0",
"S0",
"total0",
"beta_k",
"gamma_k",
]:
if i in var_columns:
del adata.var[i]
if group is not None:
group_prefixes = [group + "_" + str(i) + "_" for i in adata.obs[group].unique()]
for i in group_prefixes:
for j in [
"alpha",
"beta",
"gamma",
"half_life",
"alpha_b",
"alpha_r2",
"gamma_b",
"gamma_r2",
"gamma_logLL",
"delta_b",
"delta_r2",
"bs",
"bf",
"uu0",
"ul0",
"su0",
"sl0",
"U0",
"S0",
"total0",
"beta_k",
"gamma_k",
]:
if i + j in var_columns:
del adata.var[i + j]
# now let us first run pca with new RNA
if recalculate_pca:
pca(adata, np.log1p(adata[:, adata.var.use_for_pca].layers["X_new"]), pca_key="X_pca")
# if there are unspliced / spliced data, delete them for now:
for i in ["spliced", "unspliced", "X_spliced", "X_unspliced"]:
if i in layer_keys:
del adata.layers[i]
# now redo the RNA velocity analysis with moments generated with pca space of new RNA
# let us also check whether it is a one-shot or kinetics experiment
if adata.uns["pp"]["experiment_type"] == "one-shot":
dynamics(
adata,
one_shot_method="sci_fate",
model="deterministic",
group=group,
del_2nd_moments=del_2nd_moments,
)
elif adata.uns["pp"]["experiment_type"] == "kin":
dynamics(
adata,
model="deterministic",
est_method="twostep",
group=group,
del_2nd_moments=del_2nd_moments,
)
else:
raise Exception(
f"velocity_N function only supports either the one-shot or kinetics (kin) metabolic labeling "
f"experiment."
)
# umap based on new RNA
if recalculate_umap:
reduceDimension(adata, enforce=True)
# project new RNA velocity to new RNA pca
cell_velocities(
adata,
basis="pca",
X=adata.layers["M_n"],
V=adata.layers["velocity_N"],
enforce=True,
)
# project new RNA velocity to new RNA umap
cell_velocities(
adata,
basis="umap",
X=adata.layers["M_n"],
V=adata.layers["velocity_N"],
enforce=True,
)
|
py | b40cb48aa9c790e3f3035db788c726a1ec9c2fa3 | #!/usr/bin/env python
# coding=utf-8
import random
import sacred.optional as opt
from sacred.utils import (module_is_in_cache, get_package_version,
parse_version)
SEEDRANGE = (1, int(1e9))
def get_seed(rnd=None):
if rnd is None:
return random.randint(*SEEDRANGE)
return rnd.randint(*SEEDRANGE)
def create_rnd(seed):
assert isinstance(seed, int), \
"Seed has to be integer but was {} {}".format(repr(seed), type(seed))
if opt.has_numpy:
return opt.np.random.RandomState(seed)
else:
return random.Random(seed)
def set_global_seed(seed):
random.seed(seed)
if opt.has_numpy:
opt.np.random.seed(seed)
if module_is_in_cache('tensorflow'):
# Ensures backward and forward compatibility with TensorFlow 1 and 2.
if get_package_version('tensorflow') < parse_version('1.13.1'):
import warnings
warnings.warn("Use of TensorFlow 1.12 and older is deprecated. "
"Use Tensorflow 1.13 or newer instead.",
DeprecationWarning)
import tensorflow as tf
else:
import tensorflow.compat.v1 as tf
tf.set_random_seed(seed)
if module_is_in_cache('torch'):
import torch
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(seed)
|
py | b40cb507c476f0becc53cca6a1004171ab7ba26c | """app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('api/user/', include('user.urls')),
path('api/recipe/', include('recipe.urls'))
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
py | b40cb554c69d63631d78c62fa6c4d86099a2dad7 | from functools import partial
from django.db import connections, models, router
from django.db.models.deletion import Collector
import bleach
import olympia.core.logger
from olympia.amo.fields import PositiveAutoField
from olympia.amo.models import ManagerBase, ModelBase
from olympia.amo.urlresolvers import linkify_bounce_url_callback
from . import utils
log = olympia.core.logger.getLogger('z.translations')
class TranslationManager(ManagerBase):
def remove_for(self, obj, locale):
"""Remove a locale for the given object."""
ids = [getattr(obj, f.attname) for f in obj._meta.translated_fields]
qs = Translation.objects.filter(id__in=filter(None, ids),
locale=locale)
qs.update(localized_string=None, localized_string_clean=None)
class Translation(ModelBase):
"""
Translation model.
Use :class:`translations.fields.TranslatedField` instead of a plain foreign
key to this model.
"""
autoid = PositiveAutoField(primary_key=True)
id = models.PositiveIntegerField()
locale = models.CharField(max_length=10)
localized_string = models.TextField(null=True)
localized_string_clean = models.TextField(null=True)
objects = TranslationManager()
class Meta:
db_table = 'translations'
unique_together = ('id', 'locale')
def __unicode__(self):
return self.localized_string and unicode(self.localized_string) or ''
def __nonzero__(self):
# __nonzero__ is called to evaluate an object in a boolean context. We
# want Translations to be falsy if their string is empty.
return (bool(self.localized_string) and
bool(self.localized_string.strip()))
def __eq__(self, other):
# Django implements an __eq__ that only checks pks. We need to check
# the strings if we're dealing with existing vs. unsaved Translations.
return self.__cmp__(other) == 0
def __cmp__(self, other):
if hasattr(other, 'localized_string'):
return cmp(self.localized_string, other.localized_string)
else:
return cmp(self.localized_string, other)
def clean(self):
if self.localized_string:
self.localized_string = self.localized_string.strip()
def save(self, **kwargs):
self.clean()
return super(Translation, self).save(**kwargs)
def delete(self, using=None):
# FIXME: if the Translation is the one used as default/fallback,
# then deleting it will mean the corresponding field on the related
# model will stay empty even if there are translations in other
# languages!
cls = self.__class__
using = using or router.db_for_write(cls, instance=self)
# Look for all translations for the same string (id=self.id) except the
# current one (autoid=self.autoid).
qs = cls.objects.filter(id=self.id).exclude(autoid=self.autoid)
if qs.using(using).exists():
# If other Translations for the same id exist, we just need to
# delete this one and *only* this one, without letting Django
# collect dependencies (it'd remove the others, which we want to
# keep).
assert self._get_pk_val() is not None
collector = Collector(using=using)
collector.collect([self], collect_related=False)
# In addition, because we have FK pointing to a non-unique column,
# we need to force MySQL to ignore constraints because it's dumb
# and would otherwise complain even if there are remaining rows
# that matches the FK.
with connections[using].constraint_checks_disabled():
collector.delete()
else:
# If no other Translations with that id exist, then we should let
# django behave normally. It should find the related model and set
# the FKs to NULL.
return super(Translation, self).delete(using=using)
delete.alters_data = True
@classmethod
def new(cls, string, locale, id=None):
"""
Jumps through all the right hoops to create a new translation.
If ``id`` is not given a new id will be created using
``translations_seq``. Otherwise, the id will be used to add strings to
an existing translation.
To increment IDs we use a setting on MySQL. This is to support multiple
database masters -- it's just crazy enough to work! See bug 756242.
"""
if id is None:
# Get a sequence key for the new translation.
with connections['default'].cursor() as cursor:
cursor.execute("""
UPDATE translations_seq
SET id=LAST_INSERT_ID(
id + @@global.auto_increment_increment
)
""")
# The sequence table should never be empty. But alas, if it is,
# let's fix it.
if not cursor.rowcount > 0:
cursor.execute("""
INSERT INTO translations_seq (id)
VALUES(LAST_INSERT_ID(
id + @@global.auto_increment_increment
))
""")
cursor.execute('SELECT LAST_INSERT_ID()')
id = cursor.fetchone()[0]
# Update if one exists, otherwise create a new one.
q = {'id': id, 'locale': locale}
try:
trans = cls.objects.get(**q)
trans.localized_string = string
except cls.DoesNotExist:
trans = cls(localized_string=string, **q)
return trans
class PurifiedTranslation(Translation):
"""Run the string through bleach to get a safe version."""
allowed_tags = [
'a',
'abbr',
'acronym',
'b',
'blockquote',
'code',
'em',
'i',
'li',
'ol',
'strong',
'ul',
]
allowed_attributes = {
'a': ['href', 'title', 'rel'],
'abbr': ['title'],
'acronym': ['title'],
}
class Meta:
proxy = True
def __unicode__(self):
if not self.localized_string_clean:
self.clean()
return unicode(self.localized_string_clean)
def __html__(self):
return unicode(self)
def __truncate__(self, length, killwords, end):
return utils.truncate(unicode(self), length, killwords, end)
def clean(self):
from olympia.amo.utils import clean_nl
super(PurifiedTranslation, self).clean()
cleaned = self.clean_localized_string()
self.localized_string_clean = clean_nl(cleaned).strip()
def clean_localized_string(self):
# All links (text and markup) are normalized.
linkify_filter = partial(
bleach.linkifier.LinkifyFilter,
callbacks=[linkify_bounce_url_callback, bleach.callbacks.nofollow])
# Keep only the allowed tags and attributes, escape the rest.
cleaner = bleach.Cleaner(
tags=self.allowed_tags, attributes=self.allowed_attributes,
filters=[linkify_filter])
return cleaner.clean(unicode(self.localized_string))
class LinkifiedTranslation(PurifiedTranslation):
"""Run the string through bleach to get a linkified version."""
allowed_tags = ['a']
class Meta:
proxy = True
class NoLinksNoMarkupTranslation(LinkifiedTranslation):
"""Run the string through bleach, escape markup and strip all the links."""
class Meta:
proxy = True
def clean_localized_string(self):
# First pass: bleach everything, but leave links untouched.
cleaned = super(LinkifiedTranslation, self).clean_localized_string()
# Second pass: call linkify to empty the inner text of all links.
emptied_links = bleach.linkify(
cleaned, callbacks=[lambda attrs, new: {'_text': ''}])
# Third pass: now strip links (only links will be stripped, other
# forbidden tags are already bleached/escaped.
allowed_tags = self.allowed_tags[:] # Make a copy.
allowed_tags.remove('a')
return bleach.clean(emptied_links, tags=allowed_tags, strip=True)
class TranslationSequence(models.Model):
"""
The translations_seq table, so migrations will create it during testing.
"""
id = models.IntegerField(primary_key=True)
class Meta:
db_table = 'translations_seq'
def delete_translation(obj, fieldname):
field = obj._meta.get_field(fieldname)
trans_id = getattr(obj, field.attname)
obj.update(**{field.name: None})
if trans_id:
Translation.objects.filter(id=trans_id).delete()
|
py | b40cb64a0a7ebfd2e92d41ed1330bc85ba753107 | # Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT)
# Bespoke Link to Instruments and Small Satellites (BLISS)
#
# Copyright 2016, by the California Institute of Technology. ALL RIGHTS
# RESERVED. United States Government Sponsorship acknowledged. Any
# commercial use must be negotiated with the Office of Technology Transfer
# at the California Institute of Technology.
#
# This software may be subject to U.S. export control laws. By accepting
# this software, the user agrees to comply with all applicable U.S. export
# laws and regulations. User has the responsibility to obtain export licenses,
# or other export authority as may be required before exporting such
# information to foreign countries or providing access to foreign persons.
import gevent.monkey
gevent.monkey.patch_all()
import time
import datetime
import os
import os.path
from unittest import mock
import pytest
import ait.core
from ait.core import dmc
LEAPSECOND_DATA_RESPONSE = """#
# Updated through IERS Bulletin C55
# File expires on: 28 December 2018
#
#@ 3754944000
#
2272060800 10 # 1 Jan 1972
2287785600 11 # 1 Jul 1972
2303683200 12 # 1 Jan 1973
2335219200 13 # 1 Jan 1974
2366755200 14 # 1 Jan 1975
2398291200 15 # 1 Jan 1976
2429913600 16 # 1 Jan 1977
2461449600 17 # 1 Jan 1978
2492985600 18 # 1 Jan 1979
2524521600 19 # 1 Jan 1980
2571782400 20 # 1 Jul 1981
2603318400 21 # 1 Jul 1982
2634854400 22 # 1 Jul 1983
2698012800 23 # 1 Jul 1985
2776982400 24 # 1 Jan 1988
2840140800 25 # 1 Jan 1990
2871676800 26 # 1 Jan 1991
2918937600 27 # 1 Jul 1992
2950473600 28 # 1 Jul 1993
2982009600 29 # 1 Jul 1994
3029443200 30 # 1 Jan 1996
3076704000 31 # 1 Jul 1997
"""
class MockResponse:
def __init__(self, text, status_code):
self.text = text
self.status_code = status_code
def test_getTimestampUTC():
expected = time.strftime("%Y-%j", time.gmtime())
actual = time.strftime("%Y-%j", time.gmtime(dmc.get_timestamp_utc()[0]))
assert actual == expected
def test_getUTCDatetimeDOY_w_days():
days = 1
t = datetime.datetime.utcnow() + datetime.timedelta(days=days)
timestamp = t.timetuple()
exp_year = timestamp.tm_year
exp_day = "%03d" % timestamp.tm_yday
dtime = dmc.get_utc_datetime_doy(days=days).split("T")[0].split("-")
assert str(exp_year) == dtime[0]
assert str(exp_day) == dtime[1]
def test_leap_second_attrs():
ait.config.leapseconds._config["filename"] = os.path.join(
os.path.dirname(__file__), "testdata", "dmc", "leapseconds.dat"
)
ls = dmc.LeapSeconds
ls._load_leap_second_data()
assert ls.leapseconds == ls._data["leapseconds"]
assert ls.valid_date == ls._data["valid"]
assert ls.get_current_gps_offset() == ls.leapseconds[-1][-1]
def test_leap_second_by_date_invalid_gps_date():
ait.config.leapseconds._config["filename"] = os.path.join(
os.path.dirname(__file__), "testdata", "dmc", "leapseconds.dat"
)
dmc.LeapSeconds._load_leap_second_data()
with pytest.raises(ValueError):
dmc.LeapSeconds.get_gps_offset_for_date(datetime.datetime(1980, 1, 1))
def test_leap_second_by_date():
ait.config.leapseconds._config["filename"] = os.path.join(
os.path.dirname(__file__), "testdata", "dmc", "leapseconds.dat"
)
ls = dmc.LeapSeconds
ls._load_leap_second_data()
assert ls.get_gps_offset_for_date(datetime.datetime(1981, 1, 1)) == 0
assert ls.get_gps_offset_for_date(datetime.datetime(1981, 7, 1)) == 1
assert ls.get_gps_offset_for_date(datetime.datetime(1982, 7, 1)) == 2
assert ls.get_gps_offset_for_date(datetime.datetime(1983, 7, 1)) == 3
assert ls.get_gps_offset_for_date(datetime.datetime(1985, 7, 1)) == 4
assert ls.get_gps_offset_for_date(datetime.datetime(1988, 1, 1)) == 5
assert ls.get_gps_offset_for_date(datetime.datetime(1990, 1, 1)) == 6
assert ls.get_gps_offset_for_date(datetime.datetime(1991, 1, 1)) == 7
assert ls.get_gps_offset_for_date(datetime.datetime(1992, 7, 1)) == 8
assert ls.get_gps_offset_for_date(datetime.datetime(1993, 7, 1)) == 9
assert ls.get_gps_offset_for_date(datetime.datetime(1994, 7, 1)) == 10
assert ls.get_gps_offset_for_date(datetime.datetime(1996, 1, 1)) == 11
assert ls.get_gps_offset_for_date(datetime.datetime(1997, 7, 1)) == 12
assert ls.get_gps_offset_for_date(datetime.datetime(1999, 1, 1)) == 13
assert ls.get_gps_offset_for_date(datetime.datetime(2006, 1, 1)) == 14
assert ls.get_gps_offset_for_date(datetime.datetime(2009, 1, 1)) == 15
assert ls.get_gps_offset_for_date(datetime.datetime(2012, 7, 1)) == 16
assert ls.get_gps_offset_for_date(datetime.datetime(2015, 7, 1)) == 17
assert ls.get_gps_offset_for_date(datetime.datetime(2017, 1, 1)) == 18
# Make sure not supplying a date returns the offset for the current date
assert (
ls.get_gps_offset_for_date(datetime.datetime.utcnow())
== ls.get_gps_offset_for_date()
)
def test_leap_second_data_load():
ait.config.leapseconds._config["filename"] = os.path.join(
os.path.dirname(__file__), "testdata", "dmc", "leapseconds.dat"
)
assert type(dmc.LeapSeconds.leapseconds) == type([])
assert dmc.LeapSeconds.leapseconds[0] == (datetime.datetime(1981, 7, 1), 1)
assert type(dmc.LeapSeconds.valid_date) == type(datetime.datetime.now())
@mock.patch(
"requests.get",
mock.MagicMock(return_value=MockResponse(LEAPSECOND_DATA_RESPONSE, 400)),
)
def test_failed_leapsecond_load_and_update():
ait.config.leapseconds._config["filename"] = os.path.join(
os.path.dirname(__file__), "invalidpath", "leapseconds.dat"
)
dmc.LeapSeconds._data = None
with pytest.raises(ValueError):
dmc.LeapSeconds._load_leap_second_data()
@mock.patch(
"requests.get",
mock.MagicMock(return_value=MockResponse(LEAPSECOND_DATA_RESPONSE, 200)),
)
def test_update_leap_second_data():
ait.config.leapseconds._config["filename"] = os.path.join(
os.path.dirname(__file__), "testdata", "dmc", "tmp_leapseconds.out"
)
dmc.LeapSeconds._data = None
dmc.LeapSeconds._update_leap_second_data()
assert type(dmc.LeapSeconds.leapseconds) == type([])
assert dmc.LeapSeconds.leapseconds[0] == (datetime.datetime(1981, 7, 1), 1)
assert type(dmc.LeapSeconds.valid_date) == type(datetime.datetime.now())
assert os.path.isfile(ait.config.leapseconds.filename)
os.remove(ait.config.leapseconds.filename)
@mock.patch(
"requests.get",
mock.MagicMock(return_value=MockResponse(LEAPSECOND_DATA_RESPONSE, 400)),
)
def test_unable_to_pull_leapsecond_data():
ait.config.leapseconds._config["filename"] = os.path.join(
os.path.dirname(__file__), "testdata", "dmc", "tmp_leapseconds.out"
)
dmc.LeapSeconds._data = None
with pytest.raises(ValueError):
dmc.LeapSeconds._update_leap_second_data()
|
py | b40cb6d4ac157051c18f6142409271f4e30eb133 | import torch
import torch.nn as nn
from scipy.special import beta
class PoincareGLU(nn.Module):
def __init__(self, ball=None):
super().__init__()
self.ball = ball
self.scale = nn.Parameter(torch.zeros(1))
def forward(self, x, dim=-1):
channels = x.size(dim)
beta_n = beta(channels / 2, 1 / 2)
beta_ni = beta(channels / 4, 1 / 2)
xa, xb = (self.ball.logmap0(x, dim=dim) * beta_ni / beta_n).chunk(2, dim=dim)
return self.ball.expmap0(xa * (xb * (channels ** 0.5) * self.scale.exp()).sigmoid(), dim=dim) |
py | b40cb7caa87695f54b6859d09a12b8705ac9f0fb | import json
from polypod.tensorflow import TensorflowSpawner
from schemas import TaskType
class PolyaxonSpawner(TensorflowSpawner):
@staticmethod
def _get_schedule(task_type):
if task_type == TaskType.MASTER:
return 'train_and_evaluate'
if task_type == TaskType.WORKER:
return 'train'
if task_type == TaskType.PS:
return 'run_std_server'
def get_pod_command_args(self, task_type, task_idx):
spec_data = json.dumps(self.spec.parsed_data)
schedule = self._get_schedule(task_type=task_type)
args = [
"from polyaxon.polyaxonfile.local_runner import start_experiment_run; "
"start_experiment_run('{polyaxonfile}', '{experiment_id}', "
"'{task_type}', {task_idx}, '{schedule}')".format(
polyaxonfile=spec_data,
experiment_id=0,
task_type=task_type,
task_idx=task_idx,
schedule=schedule)]
return ["python3", "-c"], args
|
py | b40cb7caf4f5b0fa6786c661ae0f325770fe33c1 | from goza import Chart
class Scatter(Chart):
"""
Plot scatter chart using x and y data points.
"""
def __init__(self, x, y, title=None, xlabel=None, ylabel=None, figsize=None):
self.x = x
self.y = y
# Initiate chart
Chart.__init__(self, title, xlabel, ylabel, figsize)
# Plot scatter plot
self.plot_scatter()
def plot_scatter(self, color="#d32323", size=3):
self.ax.scatter(self.x, self.y, color=color, s=size)
|
py | b40cb8676a217aca80b548320fa1ad96665b82c3 | from pymacaron.log import pymlogger
import os
from pymacaron.config import get_config
from pymacaron.utils import get_container_version
log = pymlogger(__name__)
# Which monitoring methods to use
use_scout = False
def monitor_init(app=None, config=None, celery=False):
if not config:
config = get_config()
global use_scout
# Note: at this point, pym_monitor should have been called earlier on to
# start any eventual agent daemon required by the monitoring app.
# Enable scoutapp monitoring
if hasattr(config, 'scout_key'):
use_scout = True
appname = os.environ.get('PYM_ENV', 'dev')
if hasattr(config, 'scout_app_name'):
appname = config.scout_app_name
scout_key = config.scout_key
version = get_container_version()
if celery:
log.info("Setting up scoutapp monitoring for Celery jobs")
import scout_apm.celery
from scout_apm.api import Config
Config.set(
key=scout_key,
name=appname,
monitor=True,
revision_sha=version,
)
scout_apm.celery.install()
elif app:
# Enable Flask monitoring for scoutapp
log.info("Setting up scoutapp monitoring for Flask app")
from scout_apm.flask import ScoutApm
ScoutApm(app)
app.config['SCOUT_KEY'] = scout_key
app.config['SCOUT_NAME'] = appname
app.config['SCOUT_MONITOR'] = True
app.config['SCOUT_REVISION_SHA'] = version
# END OF scoutapp support
class monitor():
def __init__(self, kind='Unknown', method='Unknown'):
self.kind = kind
self.method = method
def __enter__(self):
global use_scout
if use_scout:
log.debug("START MONITOR %s/%s" % (self.kind, self.method))
import scout_apm.api
self.scout_decorator = scout_apm.api.instrument(self.method, tags={}, kind=self.kind)
self.scout_decorator.__enter__()
def __exit__(self, type, value, traceback):
global use_scout
if use_scout:
log.debug("STOP MONITOR %s/%s" % (self.kind, self.method))
self.scout_decorator.__exit__(type, value, traceback)
|
py | b40cb883be9348841e4c19ced5695a8247fa3b16 | import numpy as np
from htm.bindings.sdr import SDR
from typing import Type, Sequence, Union
class Decoder:
def __init__(self, default_value):
self.default_value = default_value
def decode(self, pattern: Sequence):
return self.default_value
class IntBucketDecoder(Decoder):
def __init__(self, n_values, bucket_size, bucket_step=None, default_value: Union[str, int] = 'random', seed=None):
super(IntBucketDecoder, self).__init__(default_value)
self._rng = np.random.default_rng(seed)
if bucket_step is not None:
raise NotImplemented
self.n_values = n_values
self.bucket_size = bucket_size
self.bucket_step = bucket_step
def decode(self, pattern: Sequence):
if len(pattern) > 0:
buckets, counts = np.unique(
np.array(pattern)//self.bucket_size,
return_counts=True)
buckets = buckets[counts == counts.max()]
if buckets.size > 1:
value = self._rng.choice(buckets)
else:
value = buckets[0]
else:
if self.default_value == 'random':
value = self._rng.integers(self.n_values)
else:
value = self.default_value
return int(value)
class DecoderStack:
def __init__(self):
self.decoders = list()
def add_decoder(self, decoder: Type[Decoder], bit_range: (int, int)):
self.decoders.append((decoder, bit_range))
def decode(self, pattern: np.ndarray):
values = list()
for decoder, bit_range in self.decoders:
mask = (pattern > bit_range[0]) & (pattern < bit_range[1])
active_bits = pattern[mask] - bit_range[0]
value = decoder.decode(active_bits)
values.append(value)
return values
|
py | b40cb91656a2fa698425b13a26dfdff600c2e4f1 | import glob
import os
import re
import shutil
import traceback
try:
import OpenImageIO as oiio
except ImportError:
traceback.print_exc()
raise RuntimeError('OpenImageIO library could not be found')
sys.exit()
def dst_path(src, output_path, overwrite):
"""
Resolves the destination path for the wrapping operation
:param src: single input file path
:param output_path: output sequence path
:param overwrite: overwrite output option
:return: single output file path
"""
dst = output_path
frame = frame_of(src)
if frame and '%' in output_path:
dst = output_path % frame
if os.path.isfile(dst) and not overwrite:
raise RuntimeError('Output file {dst} already exists.'.format(dst=dst))
else:
return dst
def frame_of(path):
"""
Get the frame number of a specific image path
:param path:
:return: frame as int
"""
frame = re.findall('.+[\._](\d{4,8})\.[A-Za-z]{3,4}', path)
if frame and len(frame) == 1:
return int(frame[0])
def bu_dir(input_path, create=True):
"""
Builds the path of the directory for backed up input files and creates it.
:param input_path: input exr sequence
:param create: boolean if the directory should be created
:return: path to the backup directory
"""
backup_dir = os.path.join(os.path.dirname(input_path), '_BAK')
if create:
try:
os.mkdir(backup_dir)
prev_backup = False
except OSError:
prev_backup = True
else:
prev_backup = os.path.isdir(backup_dir)
return backup_dir, prev_backup
def detect_sequence(path):
"""
Detects a sequence naming convention based on a path's embedded frame padding
:param path: path to a single file
:return: path to a corresponding sequence in printf notation (e.g. /path/to/image.%04d.exr)
"""
re_find = re.findall('(\d{4,8}|#{4,8})\.(exr|EXR)', path)
if re_find:
padding = re_find[0][0]
path = path.replace(padding, '%{:02d}d'.format(len(padding)))
return path
def find_sequence(path):
"""
Searches the filesystem to find the corresponding sequence for a single file.
:param path: path to a single image file
:return: image sequence as a tuple (path, first frame, last frame)
"""
path = detect_sequence(path)
files = find_files(path)
if files:
first = frame_of(files[0])
last = frame_of(files[-1])
return path, first, last
else:
return path, 0, 0
def find_files(input_path, framerange=None):
"""
Discovers files on the filesystem.
:param input_path: Path to the file sequence
:param framerange: optional framerange
:return: array of single file paths
"""
files = []
if '%' not in input_path:
return [input_path]
if framerange:
for part_range in framerange.split(','):
if '-' in part_range:
first, last = part_range.split('-')
for i in range(int(first), int(last) + 1):
files.append(input_path % i)
else:
files.append(input_path % int(part_range))
else:
input_path = re.sub(r'(\%0[4-8]d)(\.[exr|EXR])', r'*\2', input_path)
files = glob.glob(input_path)
files = sorted(files)
return files
def extract_manifest(spec, image_path):
"""
Extract cryptomatte manifest from metadata to sidecar file.
:param spec: oiio.ImageSpec object containing manifest metadata
:param image_path: images's path to derive sidecar's path from
:return:
"""
cryptomattes = {}
for attribute in spec.extra_attribs:
attrib_split = attribute.name.split('/')
if len(attrib_split) == 3 and attrib_split[0] == 'cryptomatte':
id = attrib_split[1]
cryptomattes[id] = cryptomattes.get(id, {})
if attrib_split[2] == 'name':
cryptomattes[id]['name'] = attribute.value
if attrib_split[2] == 'manifest':
cryptomattes[id]['manifest'] = attribute.value
for id in cryptomattes:
crypto_name = cryptomattes[id]['name']
manifest_path = re.sub(r'(\.\d{3,8})?\.\w{3}', '_{}.json'.format(crypto_name), image_path)
manifest_data = cryptomattes[id].get('manifest')
if manifest_data:
try:
spec.erase_attribute('cryptomatte/{id}/manifest'.format(id=id))
except TypeError as e:
raise RuntimeWarning('Error while removing manifest')
spec.attribute('cryptomatte/{id}/manif_file'.format(id=id), os.path.basename(manifest_path))
import json
manifest_data = json.loads(manifest_data)
try:
with open(manifest_path) as json_file:
existing_data = json.load(json_file)
manifest_data.update(existing_data)
except IOError as e:
pass
with open(manifest_path, 'w') as manifest_file:
json.dump(manifest_data, manifest_file)
def rename_channels(src_channel_names, fix=True, strip=False):
new_channel_names = []
for channel_name in src_channel_names:
if fix and 'depth.z' in channel_name:
print('Correcting channel name: {}'.format(channel_name))
channel_name = 'depth.Z'
if strip:
channel_name = channel_name.split('.', 1)[-1]
new_channel_names.append(channel_name)
return new_channel_names
def split_subimages(image_in, properties):
"""
Splits an image into various subimages based on layer names
:param image_in: input as oiio.ImageBuf
:param properties: dictionary of additional parameters
:return: identical dictionary extend with arrays of sub_names, sub_specs and sub_pixels
"""
properties['roi'].chend = 4
channelindex = 0
for channel_name in image_in.nativespec().channelnames:
if channel_name in ['R', 'G', 'B', 'A']:
properties['current_sub'] = 'rgba'
else:
properties['current_sub'] = channel_name.split('.')[0]
# new subimage is found
if (properties['recent_sub'] and properties['current_sub'] != properties[
'recent_sub']) or channelindex + 1 == image_in.nativespec().nchannels:
# case last channel
if channelindex + 1 == image_in.nativespec().nchannels:
properties['sub_ch_count'] += 1
channelindex += 1
properties['sub_start'] = channelindex - properties['sub_ch_count']
properties['sub_end'] = channelindex - 1
if properties['verbose']:
print('Subimage found: {recent_sub} on channels: '
'{sub_start}-{sub_end}, channelcount: {sub_ch_count}'.format(**properties))
if image_in.nativespec().channelformats:
typedesc = image_in.nativespec().channelformats[properties['sub_start']]
else:
typedesc = image_in.nativespec().format
subimage_spec = oiio.ImageSpec(image_in.nativespec().width, image_in.nativespec().height,
properties['sub_ch_count'], typedesc)
subimage_spec.roi = properties['roi']
subimage_spec.full_width = image_in.nativespec().full_width
subimage_spec.full_height = image_in.nativespec().full_height
subimage_spec.depth = image_in.nativespec().depth
subimage_spec.full_x = image_in.nativespec().full_x
subimage_spec.full_y = image_in.nativespec().full_y
subimage_spec.full_z = image_in.nativespec().full_z
# copy metadata for the first subimage
if properties['sub_start'] == 0:
for i in range(len(image_in.nativespec().extra_attribs)):
if image_in.nativespec().extra_attribs[i].type in ['string', 'int', 'float']:
subimage_spec.attribute(image_in.nativespec().extra_attribs[i].name,
image_in.nativespec().extra_attribs[i].value)
else:
subimage_spec.attribute(image_in.nativespec().extra_attribs[i].name,
image_in.nativespec().extra_attribs[i].type,
image_in.nativespec().extra_attribs[i].value)
if properties['ex_manifest']:
extract_manifest(subimage_spec, properties['dst'])
if properties.get('compression'):
subimage_spec.attribute('compression', properties['compression'].strip("'"))
else:
subimage_spec.attribute('compression', image_in.nativespec().getattribute('compression'))
src_channel_names = image_in.nativespec().channelnames[properties['sub_start']:properties['sub_end'] + 1]
subimage_spec.channelnames = rename_channels(src_channel_names, fix=properties.get('fix'), strip=True)
subimage_spec.attribute('name', properties['recent_sub'])
properties['sub_names'].append(properties['recent_sub'])
properties['sub_specs'].append(subimage_spec)
out_buffer = oiio.ImageBufAlgo.channels(image_in, tuple(src_channel_names))
out_buffer = oiio.ImageBufAlgo.cut(out_buffer, properties['roi'])
properties['sub_pixels'].append(out_buffer)
properties['sub_ch_count'] = 0
channelindex += 1
properties['recent_sub'] = properties['current_sub']
properties['sub_ch_count'] += 1
if len(properties['sub_specs']) != len(properties['sub_pixels']):
print('Internal error. Mismatch between subimage specs and pixel data.')
return
return properties
def rewrap(src, dst, autocrop=False, multipart=False, ex_manifest=False, fix_channels=False, compression=None,
verbose=False, *args, **kwargs):
"""
:param src: source image
:param dst: destination image
:param autocrop: set data window to non-empty pixel in rgb
:param multipart: split subimages
:param ex_manifest: prune exr metadata
:param fix_channels: make channels names Nuke digestible (e.g. depth.Z)
:param compression: change compression, keeps the current one if None given
:param verbose: write verbose information to the terminal
:return: boolean if conversion was successful
"""
def update_specs(spec, properties):
spec.roi = properties['roi']
if properties['ex_manifest']:
extract_manifest(spec, dst)
if properties['compression']:
spec["Compression"] = compression.strip("'")
return spec
properties = locals()
image_in = oiio.ImageBuf(src)
properties['roi'] = image_in.roi
if properties['autocrop']:
properties['roi'] = oiio.ImageBufAlgo.nonzero_region(image_in, roi=properties['roi'])
if properties['compression'] == 'keep':
properties['compression'] = compression = None
properties['recent_sub'] = ''
properties['sub_ch_count'] = 0
properties['sub_specs'] = []
properties['sub_pixels'] = []
properties['sub_names'] = []
# set data window
if verbose:
print('Setting data window to: {roi}'.format(**properties))
print('{n} channels found'.format(n=len(image_in.nativespec().channelnames)))
properties['nsubimages'] = image_in.nsubimages
if properties['nsubimages'] > 1:
if properties.get('multipart', False):
print('Input file {src} already has {nsubimages} subimages.'.format(**properties))
properties['sub_specs'] = []
properties['sub_pixels'] = []
properties['sub_names'] = []
for i in range(0, properties['nsubimages']):
image_in = oiio.ImageBuf(src, i, 0)
spec = image_in.nativespec()
update_specs(spec, properties)
properties['sub_specs'].append(spec)
buffer = oiio.ImageBufAlgo.cut(image_in, properties['roi'])
properties['sub_pixels'].append(buffer)
properties['sub_names'].append(image_in.nativespec().getattribute('name'))
elif properties.get('multipart', False):
properties = split_subimages(image_in, properties)
else:
print('Writing single-part exr')
image_out = oiio.ImageOutput.create(dst)
if properties.get('sub_specs'):
ok = image_out.open(dst, tuple(properties['sub_specs']))
if not ok:
print('OIIO error while opening {} for writing parts: {} '.format(dst, image_out.geterror()))
return False
i = 0
for pixels in properties['sub_pixels']:
if verbose:
print('Writing subimage index {}: {}'.format(i, properties['sub_names'][i]))
if i != 0:
ok = image_out.open(dst, properties['sub_specs'][i], "AppendSubimage")
if not ok:
print('OIIO error while appending subimage {}: {}'.format(dst, image_out.geterror()))
return False
image_out.write_image(pixels.get_pixels())
i += 1
else:
spec = image_in.nativespec()
update_specs(spec, properties)
src_channel_names = spec.channelnames
spec.channelnames = rename_channels(src_channel_names, fix=properties.get('fix'))
ok = image_out.open(dst, spec)
if not ok:
print('OIIO error while opening {} for writing image: {}'.format(dst, image_out.geterror()))
return False
buffer = oiio.ImageBufAlgo.cut(image_in, properties['roi'])
image_out.write_image(buffer.get_pixels())
image_out.close()
return True
def main(arguments):
if not arguments['single_file']:
arguments['input'] = detect_sequence(arguments['input'])
if arguments.get('output'):
arguments['output'] = detect_sequence(arguments['output'])
input_path = arguments['input']
output_path = arguments.get('output')
overwrite = arguments.get('overwrite')
dryrun = arguments.get('dryrun', False)
framerange = arguments.get('framerange')
if dryrun:
print('Doing dry-run.')
files = find_files(input_path, framerange)
if not files:
print('No files to process')
return
if not output_path:
backup_dir, prev_backup = bu_dir(input_path)
i = 0
for image_file in files:
if not os.path.isfile(image_file):
print('{path} not found'.format(path=image_file))
continue
if output_path:
src = image_file
dst = dst_path(image_file, output_path, overwrite)
if not dst:
continue
else:
src = os.path.join(backup_dir, os.path.basename(image_file))
dst = image_file
if os.path.isfile(src):
prev_backup = True
print('Backup file for {filename} from previous conversion in place. Skipping...'.format(
filename=os.path.basename(src)))
continue
print('Re-wrapping {src} to {dst}'.format(src=src, dst=dst))
if arguments.get('dryrun'):
continue
if not arguments.get('output'):
if arguments.get('verbose'):
print('Moving {orig} ---> {bak} for backup'.format(bak=src, orig=dst))
shutil.move(dst, src)
try:
ok = rewrap(src, dst, **arguments)
except Exception as e:
if arguments.get('verbose'):
traceback.print_exc()
ok = False
if not ok and not arguments.get('output'):
print('Operation failed for {filename}, restoring backup file.'.format(
filename=os.path.basename(dst)))
shutil.move(src, dst)
elif arguments.get('no_backup'):
os.remove(src)
i += 1
progress = i * 100 / len(files)
print("Progress: {}%".format(progress))
if arguments.get('no_backup') and not prev_backup:
try:
os.removedirs(backup_dir)
except:
pass
if __name__ == "__main__":
import argparse
p = argparse.ArgumentParser(
description='Tool for processing OpenEXR image files to improve their usability in VFX workflows.'
'By default, the following operations will be performed:'
'Auto-Crop: Shrinks data window (= bounding box) to non-black pixels'
'Create multi-part exr: Splits channels into subimages based on their layer names'
'Fix channel names: Rename channel names which could cause conflicts in Nuke (depth.z to depth.Z)')
p.add_argument("-v", "--verbose", action="store_true",
help=u"Verbose")
p.add_argument("input", help=u"Input File")
p.add_argument("-o", "--output",
help=u"Output File, if not specified the input file(s) will be overwritten while a backup"
u"is kept in the _BAK folder")
p.add_argument("-F", "--framerange",
help=u"Framerange")
p.add_argument("-a", "--autocrop", action="store_false",
help=u"Skip auto-crop")
p.add_argument("-m", "--multipart", action="store_false",
help=u"Skip multi-part creation")
p.add_argument("-f", "--fix_channels", action="store_false",
help=u"Skip channel name fix.")
p.add_argument("-c", "--compression",
help=u"Override compression, if not specified the compression of the input image will be kept.")
p.add_argument("-r", "--ex_manifest", action="store_true",
help=u"Extract cryptomatte manifest to sidecar file.")
p.add_argument("-s", "--single_file", action="store_true",
help=u"Skip sequence detection, only work on specified input file.")
p.add_argument("-y", "--overwrite", action="store_true",
help=u"Overwrite output images that already exist")
p.add_argument("-b", "--no_backup", action="store_true",
help=u"Don't keep backup of the original files (only relevant if no output specified")
p.add_argument("-n", "--dryrun", action="store_true",
help=u"Dry run, prints out which images would be touched.")
p.add_argument("-ui", "--user_interface", action="store_true",
help=u"Run graphical user interface")
arguments = vars(p.parse_args())
if arguments['user_interface']:
import mainwindow
mainwindow.main()
else:
if arguments['input'].split('.')[-1] in ['exr', 'EXR'] and not os.path.isdir(arguments['input']):
main(arguments)
else:
print('Input must be an OpenEXR file or sequence.')
|
py | b40cbaed3e94fe5207121320dd40bd0994ec3e7a | from flask import render_template, url_for, flash, redirect
from flaskblog import app
from flaskblog.forms import RegistrationForm, LoginForm
from flaskblog.models import User, Post
posts = [
{
'author': 'Corey Schafer',
'title': 'Blog Post 1',
'content': 'First post content',
'date_posted': 'April 20, 2018'
},
{
'author': 'Jane Doe',
'title': 'Blog Post 2',
'content': 'Second post content',
'date_posted': 'April 21, 2018'
}
]
@app.route("/")
@app.route("/home")
def home():
return render_template('home.html', posts=posts)
@app.route("/about")
def about():
return render_template('about.html', title='About')
@app.route("/register", methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
flash(f'Account created for {form.username.data}!', 'success')
return redirect(url_for('home'))
return render_template('register.html', title='Register', form=form)
@app.route("/login", methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
if form.email.data == '[email protected]' and form.password.data == 'password':
flash('You have been logged in!', 'success')
return redirect(url_for('home'))
else:
flash('Login Unsuccessful. Please check username and password', 'danger')
return render_template('login.html', title='Login', form=form)
|
py | b40cbb53a4d2537bed5400f5ac71776742e613de | from flask_restful import Resource
from flask_restful import reqparse
from sqlalchemy.exc import SQLAlchemyError
from config import Configuration
from models.category import CategoryModel as CM
from models.post import PostModel as PM
def get_posts(posts_id):
posts = []
for _id in posts_id:
post = PM.find_by_id(_id)
if post:
posts.append(post)
else:
return {'message': 'There is no such post: \'{}\''.format(_id)}
return posts
def add_posts(new_category, posts):
for post in set(posts):
if PM.query.filter(PM.id == post.id).first():
new_category.posts.append(post)
else:
return {'message': 'There is no such post: \'{}\''.format(post.id)}
return new_category
class Category(Resource):
parser = reqparse.RequestParser()
parser.add_argument('posts',
type=int,
action='append',
required=False)
def post(self, name):
if name:
if CM.find_by_name(name):
return {'message': 'A category with name \'{}\' already exists'.format(name)}, 400
if len(name) > Configuration.MAX_CATEGORY_NAME_SIZE:
return {'message': 'A title\'s length is more than {}'.format(Configuration.MAX_CATEGORY_NAME_SIZE)}
data = Category.parser.parse_args()
posts_id = data['posts']
if posts_id:
posts = get_posts(posts_id)
else:
posts = []
category = CM(name=name)
if posts:
category = add_posts(category, posts)
try:
category.save_to_db()
except SQLAlchemyError as e:
err = str(e.__class__.__name__)
return {'message': '{}'.format(err)}, 500
return category.get_json(), 201
def delete(self, name):
category = CM.find_by_name(name)
if category:
try:
category.delete_from_db()
except SQLAlchemyError as e:
err = str(e.__class__.__name__)
return {'message': '{}'.format(err)}, 500
return {'message': 'Category was deleted'}
return {'message': 'Category with name: \'{}\' was not found'.format(name)}
def put(self, name):
data = Category.parser.parse_args()
if len(name) > Configuration.MAX_CATEGORY_NAME_SIZE:
return {'message': 'A name\'s length is more than {}'.format(Configuration.MAX_CATEGORY_NAME_SIZE)}
posts_id = data['posts']
if posts_id:
posts = get_posts(posts_id)
else:
posts = []
category = CM.find_by_name(name)
if not category:
category = CM(name=name)
else:
category.name = name
category.posts = []
if posts:
category = add_posts(category, posts)
try:
category.save_to_db()
except SQLAlchemyError as e:
err = str(e.__class__.__name__)
return {'message': '{}'.format(err)}, 500
return category.get_json(), 201
class CategoryList(Resource):
def get(self):
return {'categories': [category.get_json() for category in CM.query.all()]}
|
py | b40cbb699ca14cb26032354243ccd2288841325f | from __future__ import absolute_import, print_function
from django.conf import settings
import requests
AUTHORIZATION_ENDPOINT = getattr(settings, 'OIDC_AUTHORIZATION_ENDPOINT', None)
TOKEN_ENDPOINT = getattr(settings, 'OIDC_TOKEN_ENDPOINT', None)
CLIENT_ID = getattr(settings, 'OIDC_CLIENT_ID', None)
CLIENT_SECRET = getattr(settings, 'OIDC_CLIENT_SECRET', None)
USERINFO_ENDPOINT = getattr(settings, 'OIDC_USERINFO_ENDPOINT', None)
SCOPE = getattr(settings, 'OIDC_SCOPE', 'openid email')
ISSUER = getattr(settings, 'OIDC_ISSUER', None)
WELL_KNOWN_SCHEME = "/.well-known/openid-configuration"
ERR_INVALID_RESPONSE = 'Unable to fetch user information from provider. Please check the log.'
DATA_VERSION = '1'
OIDC_DOMAIN = getattr(settings, 'OIDC_DOMAIN', None)
if OIDC_DOMAIN:
WELL_KNOWN_URL = OIDC_DOMAIN.strip("/") + WELL_KNOWN_SCHEME
well_known_values = requests.get(WELL_KNOWN_URL, timeout=2.0).json()
if well_known_values:
USERINFO_ENDPOINT = well_known_values['userinfo_endpoint']
AUTHORIZATION_ENDPOINT = well_known_values['authorization_endpoint']
TOKEN_ENDPOINT = well_known_values['token_endpoint']
ISSUER = well_known_values['issuer']
|
py | b40cbbee759818b79abc79d6b71a9aeabc8e7fde | from setuptools import setup
setup(
name='skywatch-data-sources',
version='0.1',
py_modules=['skywatch_data_sources'],
install_requires=[
'beautifulsoup4',
'click',
'lxml',
'requests',
'tabulate',
],
entry_points='''
[console_scripts]
skywatch-data-sources=skywatch_data_sources.cli:list_data_sources
''',
)
|
py | b40cbc175c1f1dafe67b66557197d597ff732355 |
#import
import random
import os
import numpy
from mpl_toolkits.mplot3d import Axes3D
from mpl_toolkits.mplot3d.art3d import Poly3DCollection, Line3DCollection
from matplotlib.patches import Polygon
from matplotlib.collections import PatchCollection
import matplotlib.pyplot as plt
# from PIL import Image
import read_model
#retrive the dictionary
def getDict():
dic = {}
with open("points3D.txt","r") as n:
for i, line in enumerate(n):
if i>2:
a = line.split(" ")
temp = []
temp.append(float(a[1]))
temp.append(float(a[2]))
temp.append(float(a[3]))
dic[int(a[0])] = temp[:]
return dic
#get the plane function by three point
def getSpaceFunction(p1,p2,p3):
a = ((p2[1] - p1[1]) * (p3[2] - p1[2]) - (p2[2] - p1[2]) * (p3[1] - p1[1]))
b = ((p2[2] - p1[2]) * (p3[0] - p1[0]) - (p2[0] - p1[0]) * (p3[2] - p1[2]))
c = ((p2[0] - p1[0]) * (p3[1] - p1[1]) - (p2[1] - p1[1]) * (p3[0] - p1[0]))
d = (0 - (a * p1[0] + b * p1[1] + c * p1[2]))
return a,b,c,d
#get the distance between a point and a plance
def getDistance(a,b,c,d,p): #ax+by+cz+d, where p is the point
up = abs(a*p[0]+b*p[1]+c*p[2]+d)
down = (a*a+b*b+c*c)**0.5
return up/down
#count how many point are statisfied the rule that the distance should be less than threshold
def evaluatePlane(a,b,c,d,threshold,dic):
count = 0
for point in list(dic.values()):
if getDistance(a,b,c,d,point) < threshold:
count += 1
return count
# RANDSAC
def randomChoose(dic, threshold, iter): #threshold is the allowable distance between points and plane
maxCount = 0
dlist = list(dic.values())#dict values
bestParam = [] # a,b,c,d
for i in range(iter):
p1 = random.randint(1, len(dic)) #choose three points, get the index
p2 = random.randint(1, len(dic))
p3 = random.randint(1, len(dic))
a,b,c,d = getSpaceFunction(dlist[p1], dlist[p2], dlist[p3])# get plane function
thisCount = evaluatePlane(a,b,c,d,threshold,dic) #count the number of good points
if thisCount > maxCount:
maxCount = thisCount
bestParam = [a,b,c,d]
return maxCount,bestParam
# Step5
# plot the reconstruction points with different colors for inliners and outliners
def plot3D(dic, para, threshold, outfile):
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.set_xlabel('X Label')
ax.set_ylabel('Y Label')
ax.set_zlabel('Z Label')
ax.view_init(30, 30)
for k in dic:
if getDistance(para[0],para[1],para[2],para[3],dic[k]) < threshold:
ax.scatter3D(dic[k][0], dic[k][1], dic[k][2], c='r', s = 5)
else:
ax.scatter3D(dic[k][0], dic[k][1], dic[k][2], c='b', s= 5)
fig.savefig(outfile)
return fig
# Step6
def correctPlane(para,dic):
ans = {}
a = para[0]
b = para[1]
c = para[2]
d = para[3]
matrix = [[1,0,(a*c)**2/(c+1)-a*c],[0,1,(c*b)**2/(c+1)-b*c],[(a*c)**2/(c+1)-a*c,(c*b)**2/(c+1)-b*c,1-b*c]]
mat = numpy.asmatrix(matrix)
for key in dic.keys():
a = mat * numpy.asmatrix(dic[key]).T
ans[key] = numpy.asarray((mat * numpy.asmatrix(dic[key]).T -c).T)[0]
return [0,0,1,0],ans
# Step7
# draw a 3D box
def drawBox(sidelen):
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.set_xlabel('X Label')
ax.set_ylabel('Y Label')
ax.set_zlabel('Z Label')
cube_mtarix = numpy.array([[-1, -1, 0], [1, -1, 0], [1, 1, 0], [-1, 1, 0], [-1, -1, 2], [1, -1, 2], [1, 1, 2], [-1, 1, 2]])
vertexes = sidelen/2*cube_mtarix
ax.scatter3D(vertexes[:, 0], vertexes[:, 1], vertexes[:, 2], c='b', s = 5)
ax.scatter3D(0, 0, 0, c='r', s = 5)
edges = [[vertexes[0],vertexes[1],vertexes[2],vertexes[3]],[vertexes[4],vertexes[5],vertexes[6],vertexes[7]], [vertexes[0],vertexes[3],vertexes[7],vertexes[4]], [vertexes[1],vertexes[2],vertexes[6],vertexes[5]], [vertexes[0],vertexes[1],vertexes[5],vertexes[4]],[vertexes[2],vertexes[3],vertexes[7],vertexes[6]]]
# faces = Poly3DCollection(edges, linewidths=1, edgecolors='k')
# faces.set_facecolor((0,0,1,0.1))
colors = [(0.4,0.4,0.5,0.5),(0.5,0.7,1,0.5),(0.5,1,0.5,0.5),(1,0.7,0,0.5),(1,0.7,0.7,0.5),(0.6,0.4,1,0.5)]
for i in range(6):
f = [edges[i]]
face = Poly3DCollection(f, linewidths=1, edgecolors='k')
face.set_facecolor(colors[i])
ax.add_collection3d(face)
fig.savefig('box.pdf')
return vertexes
# 3D to 2D with pinhole camera model
# c = (cx, cy, cz)
# vertexes = eight vertexes of the cube
def modifyImages(c, vertexes):
cameras, images, points3D = read_model.read_model("/Users/jennydeng/Desktop/class/CSE586/CSE586/Project1", ".txt")
width = cameras[1].width
height = cameras[1].height
focus = cameras[1].params[0]
fpToPixel = numpy.array([[1, 0, width/2], [0, 1, height/2], [0, 0, 1]])
translation = numpy.array([[1, 0, 0, -1*c[0]], [0, 1, 0, -1*c[1]], [0, 0, 1, -1*c[2]], [0, 0, 0, 1]])
presectiveProjection = numpy.array([[focus, 0, 0, 0], [0, focus, 0, 0], [0, 0, 1, 0]])
for m in images:
R = qvec2rotmat(images[m].qvec)
rotation = numpy.array([[R[0][0], R[0][1], R[0][2], 0], [R[1][0], R[1][1], R[1][2], 0], [R[2][0], R[2][1], R[2][2], 0], [0, 0, 0, 1]])
to2D(m, vertexes, fpToPixel, presectiveProjection, rotation, translation)
draw2D(images[m].id, plist)
def to2D(img_id, vertexes, fpToPixel, presectiveProjection, rotation, translation):
plist = []
for v in vertexes:
worldPoint = numpy.array([[v[0]],[v[1]],[v[2]],[1]])
p = numpy.dot(numpy.dot(numpy.dot(fpToPixel, presectiveProjection), numpy.dot(rotation, translation)), worldPoint)
x = p[0]/p[2]
y = p[1]/p[2]
pixelLoc = [x, y]
# print(pixelLoc)
plist.append(pixelLoc)
return plist
def draw2D(img_id, pixels):
img = plt.imread('samples/'+str(img_id)+".jpg")
fig, ax = plt.subplots()
ax.imshow(img)
bottom = Polygon(numpy.array([pixels[0],pixels[1],pixels[2],pixels[3]]), True, linewidth=1, edgecolor='k', facecolor = [0.4,0.4,0.5,1])
ax.add_patch(bottom)
back = Polygon(numpy.array([pixels[3],pixels[2],pixels[6],pixels[7]]), True, linewidth=1, edgecolor='k', facecolor = [0.6,0.4,1,1])
ax.add_patch(back)
left = Polygon(numpy.array([pixels[0],pixels[3],pixels[7],pixels[4]]), True, linewidth=1, edgecolor='k', facecolor = [1,0.7,0,1])
ax.add_patch(left)
top = Polygon(numpy.array([pixels[4],pixels[5],pixels[6],pixels[7]]), True, linewidth=1, edgecolor='k', facecolor = [0.2,0.4,1,0.7])
ax.add_patch(top)
right = Polygon(numpy.array([pixels[1],pixels[2],pixels[6],pixels[5]]), True, linewidth=1, edgecolor='k', facecolor = [0.5,1,0.5,0.7])
ax.add_patch(right)
front = Polygon(numpy.array([pixels[0],pixels[1],pixels[5],pixels[4]]), True, linewidth=1, edgecolor='k', facecolor = [1,0.7,0.7,0.7])
ax.add_patch(front)
if not os.path.exists('2Dresults'):
os.makedirs('2Dresults')
plt.savefig('2Dresults/'+str(img_id)+'.png')
return 0
#************** main function ****************
#print getSpaceFunction([0,0,0],[1,0,0],[1,1,0])
#a,b,c,d = getSpaceFunction([0,0,0],[1,0,0],[1,1,0])
#print getDistance(a,b,c,d,[1,1,1])
# dic = getDict()
# count, para = randomChoose(dic, 0.1, 500)
# print(para)
# c = [1, 1]
# z = -1*(para[0]+para[1]+para[3])/para[2]
# c.append(z)
# plot3D(dic, para, 0.1, '3Dplots.pdf')
# para2, dic2 = correctPlane(para, dic)
# plot3D(dic2, para2, 0.1, '3Dplots_modified.pdf')
# vlist = drawBox(500)
# print(vlist)
# modifyImages(c, vlist)
draw2D(1,numpy.array([[3000,3500],[4000, 3500],[4866, 3200],[3866, 3200],[3000, 2500],[4000, 2500],[4866, 2200],[3866, 2200]]))
#end
|
py | b40cbcb76871728e35c4aa0c2fee410c5eda6090 | #simple program that prints "Hello Open Source Community!"
print("Hello Open Source Community!")
|
py | b40cbd41554ffa0fc6e7a95dee90337e14adaa17 | # coding=utf-8
import logging
import random
import string
import sys
import unittest
from time import time, sleep
import apiritif
import os
import re
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException, TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support.ui import Select
from selenium.webdriver.support import expected_conditions as econd
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.keys import Keys
from bzt.resources.selenium_extras import get_locator, dialogs_replace
class TestLocSc(unittest.TestCase):
def setUp(self):
self.vars = {'city_select_name': 'fromPort', 'input_name_id': 'inputName'}
timeout = 3.5
self.driver = None
options = webdriver.ChromeOptions()
options.add_argument('--no-sandbox')
options.add_argument('--disable-dev-shm-usage')
self.driver = webdriver.Chrome(
service_log_path='/somewhere/webdriver.log',
options=options)
self.driver.implicitly_wait(timeout)
apiritif.put_into_thread_store(scenario_name='loc_sc', timeout=timeout, func_mode=False, windows={},
driver=self.driver)
def _1_Conditions_test(self):
with apiritif.smart_transaction('Conditions test'):
self.driver.get('http://blazedemo.com')
dialogs_replace()
test = self.driver.execute_script('return document.getElementsByName("fromPort")[0].length > 0;')
if test:
var_loc_keys = get_locator([{'id': 'wrong_id'}, {'xpath': '/html/body/div[3]/form/div/input'}])
self.driver.find_element(
var_loc_keys[0],
var_loc_keys[1]).click()
sleep(1.0)
test = self.driver.execute_script('return document.getElementsByClassName("table")[0].rows.length > 5;')
if test:
var_loc_keys = get_locator([{'xpath': '/html/body/div[2]/table/tbody/tr[5]/td[1]/input'}])
self.driver.find_element(
var_loc_keys[0],
var_loc_keys[1]).click()
test = self.driver.execute_script(
'return document.getElementById("{}").value === \'\';'.format(self.vars['input_name_id']))
if test:
var_loc_keys = get_locator([{'id': self.vars['input_name_id']}])
self.driver.find_element(
var_loc_keys[0],
var_loc_keys[1]).clear()
self.driver.find_element(
var_loc_keys[0],
var_loc_keys[1]).send_keys('John Doe')
else:
var_loc_keys = get_locator([{'id': self.vars['input_name_id']}])
self.driver.find_element(
var_loc_keys[0],
var_loc_keys[1]).clear()
self.driver.find_element(
var_loc_keys[0],
var_loc_keys[1]).send_keys('Jack Green')
var_loc_keys = get_locator([{'xpath': '/html/body/div[2]/form/div[11]/div/input'}])
self.driver.find_element(
var_loc_keys[0],
var_loc_keys[1]).click()
sleep(5.0)
else:
test = self.driver.execute_script('return document.getElementsByClassName("table")[0].rows.length > 5;')
if test:
var_loc_keys = get_locator([{'id': self.vars['elem2_id']}])
self.driver.find_element(
var_loc_keys[0],
var_loc_keys[1]).clear()
self.driver.find_element(
var_loc_keys[0],
var_loc_keys[1]).send_keys('my text')
test = self.driver.execute_script('return window.screen.width > 1000;')
if test:
self.driver.save_screenshot('file_1000')
else:
self.driver.save_screenshot('file')
else:
var_loc_keys = get_locator([{'xpath': '/html/body/div[3]/input'}])
self.driver.find_element(
var_loc_keys[0],
var_loc_keys[1]).click()
def test_locsc(self):
self._1_Conditions_test()
def tearDown(self):
if self.driver:
self.driver.quit()
|
py | b40cbd6d7367527d11b3e4519e4f4f93df30054d | VERSION = "1.6.0"
HOMEPAGE = "https://github.com/sanderland/katrain"
CONFIG_MIN_VERSION = "1.6.0" # keep config files from this version
OUTPUT_ERROR = -1
OUTPUT_KATAGO_STDERR = -0.5
OUTPUT_INFO = 0
OUTPUT_DEBUG = 1
OUTPUT_EXTRA_DEBUG = 2
STATUS_ANALYSIS = 1.0 # same priority for analysis/info
STATUS_INFO = 1.1
STATUS_TEACHING = 2.0
STATUS_ERROR = 1000.0
PLAYER_HUMAN, PLAYER_AI = "player:human", "player:ai"
PLAYER_TYPES = [PLAYER_HUMAN, PLAYER_AI]
PLAYING_NORMAL, PLAYING_TEACHING = "game:normal", "game:teach"
GAME_TYPES = [PLAYING_NORMAL, PLAYING_TEACHING]
MODE_PLAY, MODE_ANALYZE = "play", "analyze"
AI_DEFAULT = "ai:default"
AI_HANDICAP = "ai:handicap"
AI_SCORELOSS = "ai:scoreloss"
AI_WEIGHTED = "ai:p:weighted"
AI_JIGO = "ai:jigo"
AI_POLICY = "ai:policy"
AI_PICK = "ai:p:pick"
AI_LOCAL = "ai:p:local"
AI_TENUKI = "ai:p:tenuki"
AI_INFLUENCE = "ai:p:influence"
AI_TERRITORY = "ai:p:territory"
AI_RANK = "ai:p:rank"
AI_SIMPLE_OWNERSHIP = "ai:simple"
AI_SETTLE_STONES = "ai:settle"
AI_CONFIG_DEFAULT = AI_RANK
AI_STRATEGIES_ENGINE = [AI_DEFAULT, AI_HANDICAP, AI_SCORELOSS, AI_SIMPLE_OWNERSHIP, AI_JIGO] # AI_SETTLE_STONES
AI_STRATEGIES_PICK = [AI_PICK, AI_LOCAL, AI_TENUKI, AI_INFLUENCE, AI_TERRITORY, AI_RANK]
AI_STRATEGIES_POLICY = [AI_WEIGHTED, AI_POLICY] + AI_STRATEGIES_PICK
AI_STRATEGIES = AI_STRATEGIES_ENGINE + AI_STRATEGIES_POLICY
AI_STRATEGIES_RECOMMENDED_ORDER = [
AI_DEFAULT,
AI_RANK,
AI_HANDICAP,
AI_SIMPLE_OWNERSHIP,
# AI_SETTLE_STONES,
AI_SCORELOSS,
AI_POLICY,
AI_WEIGHTED,
AI_PICK,
AI_LOCAL,
AI_TENUKI,
AI_TERRITORY,
AI_INFLUENCE,
AI_JIGO,
]
AI_STRENGTH = { # dan ranks, backup if model is missing. TODO: remove some?
AI_DEFAULT: 9,
AI_POLICY: 5,
AI_JIGO: float("nan"),
AI_SCORELOSS: -4,
AI_WEIGHTED: -4,
AI_PICK: -7,
AI_LOCAL: -4,
AI_TENUKI: -7,
AI_INFLUENCE: -7,
AI_TERRITORY: -7,
AI_RANK: float("nan"),
AI_SIMPLE_OWNERSHIP: 2,
AI_SETTLE_STONES: 2,
}
AI_OPTION_VALUES = {
"kyu_rank": [(k, f"{k}[strength:kyu]") for k in range(15, 0, -1)]
+ [(k, f"{1-k}[strength:dan]") for k in range(0, -3, -1)],
"strength": [0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.4, 0.5, 1],
"opening_moves": range(0, 51),
"pick_override": [0, 0.5, 0.6, 0.7, 0.8, 0.85, 0.9, 0.95, 0.99, 1],
"lower_bound": [(v, f"{v:.2%}") for v in [0, 0.0001, 0.0005, 0.001, 0.005, 0.01, 0.05]],
"weaken_fac": [x / 20 for x in range(10, 3 * 20 + 1)],
"endgame": [x / 100 for x in range(10, 80, 5)],
"pick_frac": [x / 100 for x in range(0, 101, 5)],
"pick_n": range(0, 26),
"stddev": [x / 2 for x in range(21)],
"line_weight": range(0, 11),
"threshold": [2, 2.5, 3, 3.5, 4, 4.5],
"automatic": "bool",
"pda": [(x / 10, f"{'W' if x<0 else 'B'}+{abs(x/10):.1f}") for x in range(-30, 31)],
"max_points_lost": [x / 10 for x in range(51)],
"settled_weight": [x / 4 for x in range(0, 17)],
"opponent_fac": [x / 10 for x in range(-20, 11)],
"min_visits": range(1, 10),
"attach_penalty": [x / 10 for x in range(-10, 51)],
"tenuki_penalty": [x / 10 for x in range(-10, 51)],
}
AI_KEY_PROPERTIES = {
"kyu_rank",
"strength",
"weaken_fac",
"pick_frac",
"pick_n",
"automatic",
"max_points_lost",
"min_visits",
}
CALIBRATED_RANK_ELO = [
(-21.679482223451032, 18),
(42.60243194422105, 17),
(106.88434611189314, 16),
(171.16626027956522, 15),
(235.44817444723742, 14),
(299.7300886149095, 13),
(364.0120027825817, 12),
(428.2939169502538, 11),
(492.5758311179259, 10),
(556.8577452855981, 9),
(621.1396594532702, 8),
(685.4215736209424, 7),
(749.7034877886144, 6),
(813.9854019562865, 5),
(878.2673161239586, 4),
(942.5492302916308, 3),
(1006.8311444593029, 2),
(1071.113058626975, 1),
(1135.3949727946472, 0),
(1199.6768869623193, -1),
(1263.9588011299913, -2),
(1700, -4),
]
AI_WEIGHTED_ELO = [
(0.5, 1591.5718897531551),
(1.0, 1269.9896556526198),
(1.25, 1042.25179764667),
(1.5, 848.9410084463602),
(1.75, 630.1483212024823),
(2, 575.3637091858013),
(2.5, 410.9747543504796),
(3.0, 219.8667371799533),
]
AI_SCORELOSS_ELO = [
(0.0, 539),
(0.05, 625),
(0.1, 859),
(0.2, 1035),
(0.3, 1201),
(0.4, 1299),
(0.5, 1346),
(0.75, 1374),
(1.0, 1386),
]
AI_LOCAL_ELO_GRID = [
[0.0, 0.05, 0.1, 0.2, 0.3, 0.5, 0.75, 1.0],
[0, 5, 10, 15, 25, 50],
[
[-204.0, 791.0, 1154.0, 1372.0, 1402.0, 1473.0, 1700.0, 1700.0],
[174.0, 1094.0, 1191.0, 1384.0, 1435.0, 1522.0, 1700.0, 1700.0],
[619.0, 1155.0, 1323.0, 1390.0, 1450.0, 1558.0, 1700.0, 1700.0],
[975.0, 1289.0, 1332.0, 1401.0, 1461.0, 1575.0, 1700.0, 1700.0],
[1344.0, 1348.0, 1358.0, 1467.0, 1477.0, 1616.0, 1700.0, 1700.0],
[1425.0, 1474.0, 1489.0, 1524.0, 1571.0, 1700.0, 1700.0, 1700.0],
],
]
AI_TENUKI_ELO_GRID = [
[0.0, 0.05, 0.1, 0.2, 0.3, 0.5, 0.75, 1.0],
[0, 5, 10, 15, 25, 50],
[
[47.0, 335.0, 530.0, 678.0, 830.0, 1070.0, 1376.0, 1700.0],
[99.0, 469.0, 546.0, 707.0, 855.0, 1090.0, 1413.0, 1700.0],
[327.0, 513.0, 605.0, 745.0, 875.0, 1110.0, 1424.0, 1700.0],
[429.0, 519.0, 620.0, 754.0, 900.0, 1130.0, 1435.0, 1700.0],
[492.0, 607.0, 682.0, 797.0, 1000.0, 1208.0, 1454.0, 1700.0],
[778.0, 830.0, 909.0, 949.0, 1169.0, 1461.0, 1483.0, 1700.0],
],
]
AI_TERRITORY_ELO_GRID = [
[0.0, 0.05, 0.1, 0.2, 0.3, 0.5, 0.75, 1.0],
[0, 5, 10, 15, 25, 50],
[
[34.0, 383.0, 566.0, 748.0, 980.0, 1264.0, 1527.0, 1700.0],
[131.0, 450.0, 586.0, 826.0, 995.0, 1280.0, 1537.0, 1700.0],
[291.0, 517.0, 627.0, 850.0, 1010.0, 1310.0, 1547.0, 1700.0],
[454.0, 526.0, 696.0, 870.0, 1038.0, 1340.0, 1590.0, 1700.0],
[491.0, 603.0, 747.0, 890.0, 1050.0, 1390.0, 1635.0, 1700.0],
[718.0, 841.0, 1039.0, 1076.0, 1332.0, 1523.0, 1700.0, 1700.0],
],
]
AI_INFLUENCE_ELO_GRID = [
[0.0, 0.05, 0.1, 0.2, 0.3, 0.5, 0.75, 1.0],
[0, 5, 10, 15, 25, 50],
[
[217.0, 439.0, 572.0, 768.0, 960.0, 1227.0, 1449.0, 1521.0],
[302.0, 551.0, 580.0, 800.0, 1028.0, 1257.0, 1470.0, 1529.0],
[388.0, 572.0, 619.0, 839.0, 1077.0, 1305.0, 1490.0, 1561.0],
[467.0, 591.0, 764.0, 878.0, 1097.0, 1390.0, 1530.0, 1591.0],
[539.0, 622.0, 815.0, 953.0, 1120.0, 1420.0, 1560.0, 1601.0],
[772.0, 912.0, 958.0, 1145.0, 1318.0, 1511.0, 1577.0, 1623.0],
],
]
AI_PICK_ELO_GRID = [
[0.0, 0.05, 0.1, 0.2, 0.3, 0.5, 0.75, 1.0],
[0, 5, 10, 15, 25, 50],
[
[-533.0, -515.0, -355.0, 234.0, 650.0, 1147.0, 1546.0, 1700.0],
[-531.0, -450.0, -69.0, 347.0, 670.0, 1182.0, 1550.0, 1700.0],
[-450.0, -311.0, 140.0, 459.0, 693.0, 1252.0, 1555.0, 1700.0],
[-365.0, -82.0, 265.0, 508.0, 864.0, 1301.0, 1619.0, 1700.0],
[-113.0, 273.0, 363.0, 641.0, 983.0, 1486.0, 1700.0, 1700.0],
[514.0, 670.0, 870.0, 1128.0, 1305.0, 1550.0, 1700.0, 1700.0],
],
]
|
py | b40cbe04a6f56a4ee0197aecaed614005e0eebdc | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
"""Udacity Fullstack Developer nanodegree (Logs Analyses)."""
import psycopg2
import sys
from termcolor import colored
DBNAME = "news"
def init(action=''):
"""Call user intended method."""
if action == 'authors':
get_popular_authors()
elif action == 'articles':
get_popular_articles()
elif action == 'errors':
get_bad_devops_days()
elif action == 'all':
get_popular_authors()
get_popular_articles()
get_bad_devops_days()
else:
print colored("Pass an argument to the analysis tool:", 'red') \
+ colored("\n - python logs.py authors", 'yellow') \
+ colored("\n - python logs.py articles", 'yellow') \
+ colored("\n - python logs.py errors", 'yellow')
def execute_query(query):
""" Take a query, return the results as a list of tuples."""
try:
db = psycopg2.connect(database=DBNAME)
c = db.cursor()
c.execute(query)
data = c.fetchall()
db.close()
return data
except (Exception, psycopg2.DatabaseError) as error:
print(error)
"""
Task: Which 3 articles have been accessed the most?
Output: Sorted list with the most popular article at the top.
Example: Princess Shellfish Marries Prince Handsome — 1201 views
"""
def get_popular_articles():
"""Return records for most popular articles."""
query = """SELECT articles.title, count(*) as views
FROM articles
JOIN log
ON log.path LIKE('/article/' || articles.slug)
GROUP BY articles.title
ORDER BY views DESC
LIMIT 3 """
data = execute_query(query)
print "\n\n"
print colored("Which articles have been accessed most?", 'green')
for record in data:
print colored(str(record[1]), 'red') + " views" + " - " + record[0]
print "\n"
return data
"""
Task: Who are the most popular authors of all time?
Help: Sum up all articles per author, which author get the most views?
Output: Sorted list with the most popular author at the top.
Example: Ursula La Multa — 2304 views
"""
def get_popular_authors():
"""Return records for most popular authors."""
query = """SELECT authors.name, sum(articles.views) as views
FROM authors
JOIN ( SELECT articles.author, count(*) as views
FROM articles
JOIN log
ON log.path LIKE('%' || articles.slug)
GROUP BY articles.author) as articles
ON authors.id = articles.author
GROUP BY authors.name
ORDER BY views DESC """
data = execute_query(query)
print "\n\n"
print colored("Who are the most popular authors of all time?", 'green')
for record in data:
print colored(str(record[1]), 'red') + " views" + " - " + record[0]
print "\n"
return data
"""
Task: On which days did more than 1 % of requests lead to errors?
Help: The log table includes a column status that indicates
the HTTP status code that the news site sent to the user's bdataer.
Example: July 29, 2016 — 2.5 % errors
"""
def get_bad_devops_days():
"""Return records for bad devops days."""
query = """SELECT stats.date,
(CAST(stats.error as FLOAT) / CAST(stats.all as FLOAT) * 100)
FROM (
SELECT DATE(time) as date,
count(*) AS all,
count(*) FILTER (WHERE status != '200 OK') AS error
FROM log
GROUP BY DATE(time)
ORDER BY date DESC
) AS stats
WHERE (CAST(stats.error as FLOAT) /
CAST(stats.all as FLOAT) * 100) > 1 """
data = execute_query(query)
print "\n\n"
print colored("Days with more then 1% error rate:", 'green')
for record in data:
print record[0].strftime("%B %d, %Y") + " - " \
+ colored(str(round(record[1], 1)) + "%", 'red') + " errors"
print "\n"
return data
if __name__ == '__main__':
init(*sys.argv[1:])
|
py | b40cbedc9819503666ab3d09027b13b6a2f16f8a | grid = [[1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],[0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1],[1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0],[0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1],[1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0],[1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1],[1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0],[1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1],[0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1],[0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1],[0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1],[1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1],[1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0],[1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0],[1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0],[1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1],[0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0],[0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1],[0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1],[1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0],[1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1],[1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1],[1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0],[0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0],[1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1],[1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0],[0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1],[0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0],[1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0],[0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1],[0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1],[1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1],[1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0],[0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1],[1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1],[1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0],[1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1],[1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1],[0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0],[0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1],[0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1],[1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1],[0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0],[1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0],[1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0],[1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0],[0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1],[0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],[0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0],[0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1],[0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0],[0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1],[1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0],[1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1],[0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1],[0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0],[1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0],[1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1],[0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0],[1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1],[0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1],[0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0],[1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0],[1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0],[1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0],[0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0],[0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1],[1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1],[0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0],[1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0],[1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0],[0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0],[1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0],[0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0],[1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1],[0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1],[1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0],[0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1],[1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0],[0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0],[0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1],[0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0],[0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0],[1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0],[0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1],[1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1],[0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0],[0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],[0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0],[1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1],[1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0],[0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1],[1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1],[1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0],[1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1],[1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1],[0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1],[0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1],[1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1],[0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1],[1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1],[1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1],[1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0],[0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0],[1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1],[1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1],[1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1],[1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0],[0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1],[0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1],[0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1],[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0],[0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1],[0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1],[0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0],[0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1],[0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1],[0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1],[0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0],[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1],[1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1],[0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1],[1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0],[0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0],[0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0],[1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0],[1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0],[1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0],[0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0],[0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0],[0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1],[1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0],[0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1],[1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0],[1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0],[1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0],[0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0],[1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1],[1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1],[0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0],[1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1],[1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1],[1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1],[1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1],[1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1],[1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1],[1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0],[1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1],[1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1],[1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0],[0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1],[0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1],[0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1],[0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0],[0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1],[1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0],[0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1],[1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0],[0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0],[1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1],[0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0],[0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0],[0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1],[0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0],[0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0],[0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0],[0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1],[1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0],[0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1],[0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1],[0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1],[1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0],[0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0],[0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0],[0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0],] |
py | b40cbef14edba2e052a9183c94e603392836167c | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Sumit Naiksatam, Cisco Systems, Inc.
# @author: Rohit Agarwalla, Cisco Systems, Inc.
class CiscoNEXUSFakeDriver():
"""Nexus Driver Fake Class."""
def __init__(self):
pass
def nxos_connect(self, nexus_host, nexus_ssh_port, nexus_user,
nexus_password):
"""Make the fake connection to the Nexus Switch."""
pass
def create_xml_snippet(self, cutomized_config):
"""Create XML snippet.
Creates the Proper XML structure for the Nexus Switch
Configuration.
"""
pass
def enable_vlan(self, mgr, vlanid, vlanname):
"""Create a VLAN on Nexus Switch given the VLAN ID and Name."""
pass
def disable_vlan(self, mgr, vlanid):
"""Delete a VLAN on Nexus Switch given the VLAN ID."""
pass
def disable_switch_port(self, mgr, interface):
"""Disable trunk mode an interface on Nexus Switch."""
pass
def enable_vlan_on_trunk_int(self, mgr, etype, interface, vlanid):
"""Enable vlan on trunk interface.
Enable trunk mode vlan access an interface on Nexus Switch given
VLANID.
"""
pass
def disable_vlan_on_trunk_int(self, mgr, interface, vlanid):
"""Disables vlan in trunk interface.
Enables trunk mode vlan access an interface on Nexus Switch given
VLANID.
"""
pass
def create_vlan(self, vlan_name, vlan_id, nexus_host, nexus_user,
nexus_password, nexus_ports, nexus_ssh_port, vlan_ids):
"""Create VLAN and enable it on interface.
Creates a VLAN and Enable on trunk mode an interface on Nexus Switch
given the VLAN ID and Name and Interface Number.
"""
pass
def delete_vlan(self, vlan_id, nexus_host, nexus_user, nexus_password,
nexus_ports, nexus_ssh_port):
"""Delete VLAN.
Delete a VLAN and Disables trunk mode an interface on Nexus Switch
given the VLAN ID and Interface Number.
"""
pass
def build_vlans_cmd(self):
"""Build a string with all the VLANs on the same Switch."""
pass
def add_vlan_int(self, vlan_id, nexus_host, nexus_user, nexus_password,
nexus_ports, nexus_ssh_port, vlan_ids=None):
"""Add a vlan from interfaces on the Nexus switch given the VLAN ID."""
pass
def remove_vlan_int(self, vlan_id, nexus_host, nexus_user, nexus_password,
nexus_ports, nexus_ssh_port):
"""Remove vlan from interfaces.
Removes a vlan from interfaces on the Nexus switch given the VLAN ID.
"""
pass
|
py | b40cbf1a62b9e6c98709f3afb24fa416e1c6cc85 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'Hw2_ui.ui'
#
# Created by: PyQt5 UI code generator 5.15.1
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1034, 647)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.groupBox = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox.setGeometry(QtCore.QRect(30, 30, 321, 281))
self.groupBox.setObjectName("groupBox")
self.pushButton = QtWidgets.QPushButton(self.groupBox)
self.pushButton.setGeometry(QtCore.QRect(70, 40, 171, 61))
self.pushButton.setObjectName("pushButton")
self.pushButton_2 = QtWidgets.QPushButton(self.groupBox)
self.pushButton_2.setGeometry(QtCore.QRect(70, 110, 171, 61))
self.pushButton_2.setObjectName("pushButton_2")
self.label = QtWidgets.QLabel(self.groupBox)
self.label.setGeometry(QtCore.QRect(50, 190, 221, 20))
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(self.groupBox)
self.label_2.setGeometry(QtCore.QRect(50, 220, 221, 20))
self.label_2.setObjectName("label_2")
self.groupBox_2 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_2.setGeometry(QtCore.QRect(30, 320, 691, 271))
self.groupBox_2.setObjectName("groupBox_2")
self.pushButton_3 = QtWidgets.QPushButton(self.groupBox_2)
self.pushButton_3.setGeometry(QtCore.QRect(60, 40, 171, 61))
self.pushButton_3.setObjectName("pushButton_3")
self.pushButton_4 = QtWidgets.QPushButton(self.groupBox_2)
self.pushButton_4.setGeometry(QtCore.QRect(60, 120, 171, 61))
self.pushButton_4.setObjectName("pushButton_4")
self.pushButton_5 = QtWidgets.QPushButton(self.groupBox_2)
self.pushButton_5.setGeometry(QtCore.QRect(60, 200, 171, 61))
self.pushButton_5.setObjectName("pushButton_5")
self.groupBox_3 = QtWidgets.QGroupBox(self.groupBox_2)
self.groupBox_3.setGeometry(QtCore.QRect(360, 40, 271, 211))
self.groupBox_3.setObjectName("groupBox_3")
self.label_3 = QtWidgets.QLabel(self.groupBox_3)
self.label_3.setGeometry(QtCore.QRect(30, 50, 91, 20))
self.label_3.setObjectName("label_3")
self.comboBox = QtWidgets.QComboBox(self.groupBox_3)
self.comboBox.setGeometry(QtCore.QRect(30, 80, 101, 31))
self.comboBox.setObjectName("comboBox")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.pushButton_6 = QtWidgets.QPushButton(self.groupBox_3)
self.pushButton_6.setGeometry(QtCore.QRect(30, 130, 181, 61))
self.pushButton_6.setObjectName("pushButton_6")
self.groupBox_4 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_4.setGeometry(QtCore.QRect(390, 30, 331, 131))
self.groupBox_4.setObjectName("groupBox_4")
self.pushButton_7 = QtWidgets.QPushButton(self.groupBox_4)
self.pushButton_7.setGeometry(QtCore.QRect(70, 50, 191, 61))
self.pushButton_7.setObjectName("pushButton_7")
self.groupBox_5 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_5.setGeometry(QtCore.QRect(390, 170, 331, 141))
self.groupBox_5.setObjectName("groupBox_5")
self.pushButton_8 = QtWidgets.QPushButton(self.groupBox_5)
self.pushButton_8.setGeometry(QtCore.QRect(70, 50, 191, 61))
self.pushButton_8.setObjectName("pushButton_8")
self.groupBox_6 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_6.setGeometry(QtCore.QRect(750, 30, 251, 561))
self.groupBox_6.setObjectName("groupBox_6")
self.pushButton_9 = QtWidgets.QPushButton(self.groupBox_6)
self.pushButton_9.setGeometry(QtCore.QRect(30, 70, 191, 61))
self.pushButton_9.setObjectName("pushButton_9")
self.pushButton_10 = QtWidgets.QPushButton(self.groupBox_6)
self.pushButton_10.setGeometry(QtCore.QRect(30, 190, 191, 61))
self.pushButton_10.setObjectName("pushButton_10")
self.pushButton_11 = QtWidgets.QPushButton(self.groupBox_6)
self.pushButton_11.setGeometry(QtCore.QRect(30, 310, 191, 61))
self.pushButton_11.setObjectName("pushButton_11")
self.pushButton_12 = QtWidgets.QPushButton(self.groupBox_6)
self.pushButton_12.setGeometry(QtCore.QRect(30, 430, 191, 61))
self.pushButton_12.setObjectName("pushButton_12")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1034, 28))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "鄭皓中 P76081116"))
self.groupBox.setTitle(_translate("MainWindow", "1 . Find Contour"))
self.pushButton.setText(_translate("MainWindow", "1.1 Draw Contour"))
self.pushButton_2.setText(_translate("MainWindow", "1.2 Count Coins"))
self.label.setText(_translate("MainWindow", "There are __ coins in coin01.jpg"))
self.label_2.setText(_translate("MainWindow", "There are __ coins in coin02.jpg"))
self.groupBox_2.setTitle(_translate("MainWindow", "2 . Calibration"))
self.pushButton_3.setText(_translate("MainWindow", "2.1 Find Corners"))
self.pushButton_4.setText(_translate("MainWindow", "2.2 Find Intrinsic"))
self.pushButton_5.setText(_translate("MainWindow", "2.4 Find Distortion"))
self.groupBox_3.setTitle(_translate("MainWindow", "2.3 Find Extrinsic"))
self.label_3.setText(_translate("MainWindow", "Select image"))
self.comboBox.setItemText(0, _translate("MainWindow", "1.bmp"))
self.comboBox.setItemText(1, _translate("MainWindow", "2.bmp"))
self.comboBox.setItemText(2, _translate("MainWindow", "3.bmp"))
self.comboBox.setItemText(3, _translate("MainWindow", "4.bmp"))
self.comboBox.setItemText(4, _translate("MainWindow", "5.bmp"))
self.comboBox.setItemText(5, _translate("MainWindow", "6.bmp"))
self.comboBox.setItemText(6, _translate("MainWindow", "7.bmp"))
self.comboBox.setItemText(7, _translate("MainWindow", "8.bmp"))
self.comboBox.setItemText(8, _translate("MainWindow", "9.bmp"))
self.comboBox.setItemText(9, _translate("MainWindow", "10.bmp"))
self.comboBox.setItemText(10, _translate("MainWindow", "11.bmp"))
self.comboBox.setItemText(11, _translate("MainWindow", "12.bmp"))
self.comboBox.setItemText(12, _translate("MainWindow", "13.bmp"))
self.comboBox.setItemText(13, _translate("MainWindow", "14.bmp"))
self.comboBox.setItemText(14, _translate("MainWindow", "15.bmp"))
self.pushButton_6.setText(_translate("MainWindow", "2.3 Find Extrinsic"))
self.groupBox_4.setTitle(_translate("MainWindow", "3. Augmented Reality"))
self.pushButton_7.setText(_translate("MainWindow", "3.1 Augmented Reality"))
self.groupBox_5.setTitle(_translate("MainWindow", "4. Stereo Disparity Map"))
self.pushButton_8.setText(_translate("MainWindow", "4.1 Stereo Disparity Map"))
self.groupBox_6.setTitle(_translate("MainWindow", "5. Dogs and Cats classification"))
self.pushButton_9.setText(_translate("MainWindow", "5.1 Training 5 epochs"))
self.pushButton_10.setText(_translate("MainWindow", "5.2 TensorBoard"))
self.pushButton_11.setText(_translate("MainWindow", "5.3 Randomly select"))
self.pushButton_12.setText(_translate("MainWindow", "5.4 Try to resize"))
|
py | b40cc06ce5d4bc5ffee3cb12b877c131f16fdefc | """The Winix C545 Air Purifier component."""
from __future__ import annotations
import asyncio
from datetime import timedelta
import logging
from typing import List
from homeassistant.const import CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.helpers import aiohttp_client, discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_time_interval
import voluptuous as vol
from custom_components.winix.device_wrapper import WinixDeviceWrapper
from winix import WinixAccount, auth, cmd
from .const import DOMAIN, SERVICE_REFRESH_ACCESS
_LOGGER = logging.getLogger(__name__)
MIN_SCAN_INTERVAL = timedelta(seconds=30)
SUPPORTED_PLATFORMS = ["fan", "sensor"]
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(
CONF_SCAN_INTERVAL, default=MIN_SCAN_INTERVAL
): cv.time_period,
}
)
},
extra=vol.ALLOW_EXTRA,
)
class WinixManager:
"""Representation of the Winix device manager."""
def __init__(self, hass: HomeAssistant, domain_config, scan_interval: int) -> None:
"""Initialize the manager."""
self._device_wrappers: List[WinixDeviceWrapper] = None
self._domain_config = domain_config
self._hass = hass
self._scan_interval = scan_interval
# Not creating local configuration file which always results in updated configuration
self._config = cmd.Configuration("")
@property
def scan_interval(self):
"""Return the scan interval."""
return self._scan_interval
def login(self) -> None:
"""Login and setup platforms."""
config = self._config
username = self._domain_config.get(CONF_USERNAME)
password = self._domain_config.get(CONF_PASSWORD)
try:
config.cognito = auth.login(username, password)
account = WinixAccount(config.cognito.access_token)
account.register_user(username)
account.check_access_token()
config.devices = account.get_device_info_list()
self._config = config
device_stubs = self._config.devices
_LOGGER.debug("Configuration initialized")
self._hass.async_create_task(self.async_prepare_devices(device_stubs))
self.setup_services()
except Exception as err:
_LOGGER.error(err)
async def async_prepare_devices(self, device_stubs) -> None:
"""Create devices and setup platforms."""
if device_stubs:
self._device_wrappers = []
client = aiohttp_client.async_get_clientsession(self._hass)
for device_stub in device_stubs:
self._device_wrappers.append(
WinixDeviceWrapper(client, device_stub, _LOGGER)
)
_LOGGER.info("Found %d purifiers", len(self._device_wrappers))
self._hass.async_create_task(self.async_setup_platforms())
async def async_setup_platforms(self) -> None:
"""Set up platforms."""
if self.get_device_wrappers():
# Get data once
await self.async_update()
for component in SUPPORTED_PLATFORMS:
discovery.load_platform(
self._hass, component, DOMAIN, {}, self._domain_config
)
def update_devices(event_time):
asyncio.run_coroutine_threadsafe(self.async_update(), self._hass.loop)
async_track_time_interval(self._hass, update_devices, self._scan_interval)
def setup_services(self) -> None:
"""Set up services."""
self._hass.services.register(
DOMAIN,
SERVICE_REFRESH_ACCESS,
self.handle_platform_services,
)
def handle_platform_services(self, call) -> None:
"""Handle common services."""
service = call.service
if self._config:
if service == SERVICE_REFRESH_ACCESS:
self._config.cognito = auth.refresh(
user_id=self._config.cognito.user_id,
refresh_token=self._config.cognito.refresh_token,
)
account = WinixAccount(self._config.cognito.access_token)
account.check_access_token()
_LOGGER.info("Access token refreshed")
async def async_update(self, now=None) -> None:
"""Asynchronously update all the devices."""
_LOGGER.info("Updating devices")
for device_wrapper in self._device_wrappers:
await device_wrapper.update()
def get_device_wrappers(self) -> List[WinixDeviceWrapper]:
"""Return the device wrapper objects."""
return self._device_wrappers
|
py | b40cc24ee88aeecbdec41a40193e88286d400faf | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pathlib import Path
from setuptools import setup
from setuptools.config import read_configuration
setup_cfg = Path(__file__).parent.joinpath("setup.cfg")
conf_dict = read_configuration(setup_cfg)
url = conf_dict["metadata"]["url"]
version = conf_dict["metadata"]["version"]
setup(download_url=f"{url}/archive/refs/tags/{version}.tar.gz")
|
py | b40cc2cc12cf45ad3181d311de16bd847f7b0552 | # coding: utf-8
# -----------------------------------------------------------------------------------
# <copyright company="Aspose" file="CalendarException.py">
# Copyright (c) 2020 Aspose.Tasks Cloud
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
import pprint
import re # noqa: F401
import six
class CalendarException(object):
"""Represent exceptional time periods in a calendar.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'index': 'int',
'entered_by_occurrences': 'bool',
'from_date': 'datetime',
'to_date': 'datetime',
'occurrences': 'int',
'name': 'str',
'type': 'CalendarExceptionType',
'period': 'int',
'days_of_week': 'list[DayType]',
'month_item': 'MonthItemType',
'month_position': 'MonthPosition',
'month': 'Month',
'month_day': 'int',
'day_working': 'bool',
'working_times': 'list[WorkingTime]'
}
attribute_map = {
'index': 'index',
'entered_by_occurrences': 'enteredByOccurrences',
'from_date': 'fromDate',
'to_date': 'toDate',
'occurrences': 'occurrences',
'name': 'name',
'type': 'type',
'period': 'period',
'days_of_week': 'daysOfWeek',
'month_item': 'monthItem',
'month_position': 'monthPosition',
'month': 'month',
'month_day': 'monthDay',
'day_working': 'dayWorking',
'working_times': 'workingTimes'
}
def __init__(self, index=None, entered_by_occurrences=None, from_date=None, to_date=None, occurrences=None, name=None, type=None, period=None, days_of_week=None, month_item=None, month_position=None, month=None, month_day=None, day_working=None, working_times=None): # noqa: E501
"""CalendarException - a model defined in Swagger""" # noqa: E501
self._index = None
self._entered_by_occurrences = None
self._from_date = None
self._to_date = None
self._occurrences = None
self._name = None
self._type = None
self._period = None
self._days_of_week = None
self._month_item = None
self._month_position = None
self._month = None
self._month_day = None
self._day_working = None
self._working_times = None
self.discriminator = None
if index is not None:
self.index = index
if entered_by_occurrences is not None:
self.entered_by_occurrences = entered_by_occurrences
if from_date is not None:
self.from_date = from_date
if to_date is not None:
self.to_date = to_date
if occurrences is not None:
self.occurrences = occurrences
if name is not None:
self.name = name
if type is not None:
self.type = type
if period is not None:
self.period = period
if days_of_week is not None:
self.days_of_week = days_of_week
if month_item is not None:
self.month_item = month_item
if month_position is not None:
self.month_position = month_position
if month is not None:
self.month = month
if month_day is not None:
self.month_day = month_day
if day_working is not None:
self.day_working = day_working
if working_times is not None:
self.working_times = working_times
@property
def index(self):
"""Gets the index of this CalendarException. # noqa: E501
Index of the current item in the collection of calendar's exceptions. # noqa: E501
:return: The index of this CalendarException. # noqa: E501
:rtype: int
"""
return self._index
@index.setter
def index(self, index):
"""Sets the index of this CalendarException.
Index of the current item in the collection of calendar's exceptions. # noqa: E501
:param index: The index of this CalendarException. # noqa: E501
:type: int
"""
if index is None:
raise ValueError("Invalid value for `index`, must not be `None`") # noqa: E501
self._index = index
@property
def entered_by_occurrences(self):
"""Gets the entered_by_occurrences of this CalendarException. # noqa: E501
Determines whether the range of recurrence is defined by entering a number of occurrences. False specifies that the range of recurrence is defined by entering a finish date. # noqa: E501
:return: The entered_by_occurrences of this CalendarException. # noqa: E501
:rtype: bool
"""
return self._entered_by_occurrences
@entered_by_occurrences.setter
def entered_by_occurrences(self, entered_by_occurrences):
"""Sets the entered_by_occurrences of this CalendarException.
Determines whether the range of recurrence is defined by entering a number of occurrences. False specifies that the range of recurrence is defined by entering a finish date. # noqa: E501
:param entered_by_occurrences: The entered_by_occurrences of this CalendarException. # noqa: E501
:type: bool
"""
if entered_by_occurrences is None:
raise ValueError("Invalid value for `entered_by_occurrences`, must not be `None`") # noqa: E501
self._entered_by_occurrences = entered_by_occurrences
@property
def from_date(self):
"""Gets the from_date of this CalendarException. # noqa: E501
The beginning of the exception time. # noqa: E501
:return: The from_date of this CalendarException. # noqa: E501
:rtype: datetime
"""
return self._from_date
@from_date.setter
def from_date(self, from_date):
"""Sets the from_date of this CalendarException.
The beginning of the exception time. # noqa: E501
:param from_date: The from_date of this CalendarException. # noqa: E501
:type: datetime
"""
if from_date is None:
raise ValueError("Invalid value for `from_date`, must not be `None`") # noqa: E501
self._from_date = from_date
@property
def to_date(self):
"""Gets the to_date of this CalendarException. # noqa: E501
The end of the exception time. # noqa: E501
:return: The to_date of this CalendarException. # noqa: E501
:rtype: datetime
"""
return self._to_date
@to_date.setter
def to_date(self, to_date):
"""Sets the to_date of this CalendarException.
The end of the exception time. # noqa: E501
:param to_date: The to_date of this CalendarException. # noqa: E501
:type: datetime
"""
if to_date is None:
raise ValueError("Invalid value for `to_date`, must not be `None`") # noqa: E501
self._to_date = to_date
@property
def occurrences(self):
"""Gets the occurrences of this CalendarException. # noqa: E501
The number of occurrences for which the calendar exception is valid. # noqa: E501
:return: The occurrences of this CalendarException. # noqa: E501
:rtype: int
"""
return self._occurrences
@occurrences.setter
def occurrences(self, occurrences):
"""Sets the occurrences of this CalendarException.
The number of occurrences for which the calendar exception is valid. # noqa: E501
:param occurrences: The occurrences of this CalendarException. # noqa: E501
:type: int
"""
if occurrences is None:
raise ValueError("Invalid value for `occurrences`, must not be `None`") # noqa: E501
self._occurrences = occurrences
@property
def name(self):
"""Gets the name of this CalendarException. # noqa: E501
The name of the exception. # noqa: E501
:return: The name of this CalendarException. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this CalendarException.
The name of the exception. # noqa: E501
:param name: The name of this CalendarException. # noqa: E501
:type: str
"""
self._name = name
@property
def type(self):
"""Gets the type of this CalendarException. # noqa: E501
The exception type. # noqa: E501
:return: The type of this CalendarException. # noqa: E501
:rtype: CalendarExceptionType
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this CalendarException.
The exception type. # noqa: E501
:param type: The type of this CalendarException. # noqa: E501
:type: CalendarExceptionType
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
self._type = type
@property
def period(self):
"""Gets the period of this CalendarException. # noqa: E501
The period of recurrence for the exception. # noqa: E501
:return: The period of this CalendarException. # noqa: E501
:rtype: int
"""
return self._period
@period.setter
def period(self, period):
"""Sets the period of this CalendarException.
The period of recurrence for the exception. # noqa: E501
:param period: The period of this CalendarException. # noqa: E501
:type: int
"""
if period is None:
raise ValueError("Invalid value for `period`, must not be `None`") # noqa: E501
self._period = period
@property
def days_of_week(self):
"""Gets the days_of_week of this CalendarException. # noqa: E501
The days of the week on which the exception is valid. # noqa: E501
:return: The days_of_week of this CalendarException. # noqa: E501
:rtype: list[DayType]
"""
return self._days_of_week
@days_of_week.setter
def days_of_week(self, days_of_week):
"""Sets the days_of_week of this CalendarException.
The days of the week on which the exception is valid. # noqa: E501
:param days_of_week: The days_of_week of this CalendarException. # noqa: E501
:type: list[DayType]
"""
self._days_of_week = days_of_week
@property
def month_item(self):
"""Gets the month_item of this CalendarException. # noqa: E501
The month item for which an exception recurrence is scheduled. # noqa: E501
:return: The month_item of this CalendarException. # noqa: E501
:rtype: MonthItemType
"""
return self._month_item
@month_item.setter
def month_item(self, month_item):
"""Sets the month_item of this CalendarException.
The month item for which an exception recurrence is scheduled. # noqa: E501
:param month_item: The month_item of this CalendarException. # noqa: E501
:type: MonthItemType
"""
if month_item is None:
raise ValueError("Invalid value for `month_item`, must not be `None`") # noqa: E501
self._month_item = month_item
@property
def month_position(self):
"""Gets the month_position of this CalendarException. # noqa: E501
The position of a month item within a month. # noqa: E501
:return: The month_position of this CalendarException. # noqa: E501
:rtype: MonthPosition
"""
return self._month_position
@month_position.setter
def month_position(self, month_position):
"""Sets the month_position of this CalendarException.
The position of a month item within a month. # noqa: E501
:param month_position: The month_position of this CalendarException. # noqa: E501
:type: MonthPosition
"""
if month_position is None:
raise ValueError("Invalid value for `month_position`, must not be `None`") # noqa: E501
self._month_position = month_position
@property
def month(self):
"""Gets the month of this CalendarException. # noqa: E501
The month for which an exception recurrence is scheduled. # noqa: E501
:return: The month of this CalendarException. # noqa: E501
:rtype: Month
"""
return self._month
@month.setter
def month(self, month):
"""Sets the month of this CalendarException.
The month for which an exception recurrence is scheduled. # noqa: E501
:param month: The month of this CalendarException. # noqa: E501
:type: Month
"""
if month is None:
raise ValueError("Invalid value for `month`, must not be `None`") # noqa: E501
self._month = month
@property
def month_day(self):
"""Gets the month_day of this CalendarException. # noqa: E501
The day of a month on which an exception recurrence is scheduled. # noqa: E501
:return: The month_day of this CalendarException. # noqa: E501
:rtype: int
"""
return self._month_day
@month_day.setter
def month_day(self, month_day):
"""Sets the month_day of this CalendarException.
The day of a month on which an exception recurrence is scheduled. # noqa: E501
:param month_day: The month_day of this CalendarException. # noqa: E501
:type: int
"""
if month_day is None:
raise ValueError("Invalid value for `month_day`, must not be `None`") # noqa: E501
self._month_day = month_day
@property
def day_working(self):
"""Gets the day_working of this CalendarException. # noqa: E501
Determines whether the specified date or day type is working. # noqa: E501
:return: The day_working of this CalendarException. # noqa: E501
:rtype: bool
"""
return self._day_working
@day_working.setter
def day_working(self, day_working):
"""Sets the day_working of this CalendarException.
Determines whether the specified date or day type is working. # noqa: E501
:param day_working: The day_working of this CalendarException. # noqa: E501
:type: bool
"""
if day_working is None:
raise ValueError("Invalid value for `day_working`, must not be `None`") # noqa: E501
self._day_working = day_working
@property
def working_times(self):
"""Gets the working_times of this CalendarException. # noqa: E501
The collection of working times that defines the time worked on the weekday. At least one working time must present, and there can't be more than five. # noqa: E501
:return: The working_times of this CalendarException. # noqa: E501
:rtype: list[WorkingTime]
"""
return self._working_times
@working_times.setter
def working_times(self, working_times):
"""Sets the working_times of this CalendarException.
The collection of working times that defines the time worked on the weekday. At least one working time must present, and there can't be more than five. # noqa: E501
:param working_times: The working_times of this CalendarException. # noqa: E501
:type: list[WorkingTime]
"""
self._working_times = working_times
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CalendarException):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
py | b40cc2d19974560404b97f76ed5526571f4577b8 | from anu import Region
import pickle
regions = []
with open("regions.pickle", "rb") as f:
regions = pickle.load(f)
for r in regions:
tup = (r.ID, r.Name.replace("'", "\\'"), r.Type+1, 'true' if r.IsKelurahan else 'false', r.fkParentID)
print "INSERT INTO regions VALUES (E'%s',E'%s', %d, %s, NULL, NULL, '%s','2014-12-13 04:48:57','2014-12-13 10:56:16');" % tup
|
py | b40cc3a91579d6d3a7072796210f2ec641aa58e6 | import torch
from torch import nn
from torch.nn import functional as F
import numpy as np
class Learner(nn.Module):
"""
"""
def __init__(self, config, imgc, imgsz):
"""
:param config: network config file, type:list of (string, list)
:param imgc: 1 or 3
:param imgsz: 28 or 84
"""
super(Learner, self).__init__()
self.config = config
# this dict contains all tensors needed to be optimized
self.vars = nn.ParameterList()
# running_mean and running_var
self.vars_bn = nn.ParameterList()
for i, (name, param) in enumerate(self.config):
if name is 'conv2d':
# [ch_out, ch_in, kernelsz, kernelsz]
w = nn.Parameter(torch.ones(*param[:4]))
# gain=1 according to cbfin's implementation
torch.nn.init.kaiming_normal_(w)
self.vars.append(w)
# [ch_out]
self.vars.append(nn.Parameter(torch.zeros(param[0])))
elif name is 'convt2d':
# [ch_in, ch_out, kernelsz, kernelsz, stride, padding]
w = nn.Parameter(torch.ones(*param[:4]))
# gain=1 according to cbfin's implementation
torch.nn.init.kaiming_normal_(w)
self.vars.append(w)
# [ch_in, ch_out]
self.vars.append(nn.Parameter(torch.zeros(param[1])))
elif name is 'linear':
# [ch_out, ch_in]
w = nn.Parameter(torch.ones(*param))
# gain=1 according to cbfinn's implementation
torch.nn.init.kaiming_normal_(w)
self.vars.append(w)
# [ch_out]
self.vars.append(nn.Parameter(torch.zeros(param[0])))
elif name is 'bn':
# [ch_out]
w = nn.Parameter(torch.ones(param[0]))
self.vars.append(w)
# [ch_out]
self.vars.append(nn.Parameter(torch.zeros(param[0])))
# must set requires_grad=False
running_mean = nn.Parameter(torch.zeros(param[0]), requires_grad=False)
running_var = nn.Parameter(torch.ones(param[0]), requires_grad=False)
self.vars_bn.extend([running_mean, running_var])
elif name in ['tanh', 'relu', 'upsample', 'avg_pool2d', 'max_pool2d',
'flatten', 'reshape', 'leakyrelu', 'sigmoid']:
continue
else:
raise NotImplementedError
def extra_repr(self):
info = ''
for name, param in self.config:
if name is 'conv2d':
tmp = 'conv2d:(ch_in:%d, ch_out:%d, k:%dx%d, stride:%d, padding:%d)'\
%(param[1], param[0], param[2], param[3], param[4], param[5],)
info += tmp + '\n'
elif name is 'convt2d':
tmp = 'convTranspose2d:(ch_in:%d, ch_out:%d, k:%dx%d, stride:%d, padding:%d)'\
%(param[0], param[1], param[2], param[3], param[4], param[5],)
info += tmp + '\n'
elif name is 'linear':
tmp = 'linear:(in:%d, out:%d)'%(param[1], param[0])
info += tmp + '\n'
elif name is 'leakyrelu':
tmp = 'leakyrelu:(slope:%f)'%(param[0])
info += tmp + '\n'
elif name is 'avg_pool2d':
tmp = 'avg_pool2d:(k:%d, stride:%d, padding:%d)'%(param[0], param[1], param[2])
info += tmp + '\n'
elif name is 'max_pool2d':
tmp = 'max_pool2d:(k:%d, stride:%d, padding:%d)'%(param[0], param[1], param[2])
info += tmp + '\n'
elif name in ['flatten', 'tanh', 'relu', 'upsample', 'reshape', 'sigmoid', 'use_logits', 'bn']:
tmp = name + ':' + str(tuple(param))
info += tmp + '\n'
else:
raise NotImplementedError
return info
def forward(self, x, vars=None, bn_training=True):
"""
This function can be called by finetunning, however, in finetunning, we dont wish to update
running_mean/running_var. Thought weights/bias of bn is updated, it has been separated by fast_weights.
Indeed, to not update running_mean/running_var, we need set update_bn_statistics=False
but weight/bias will be updated and not dirty initial theta parameters via fast_weiths.
:param x: [b, 1, 28, 28]
:param vars:
:param bn_training: set False to not update
:return: x, loss, likelihood, kld
"""
if vars is None:
vars = self.vars
idx = 0
bn_idx = 0
for name, param in self.config:
if name is 'conv2d':
w, b = vars[idx], vars[idx + 1]
# remember to keep synchrozied of forward_encoder and forward_decoder!
x = F.conv2d(x, w, b, stride=param[4], padding=param[5])
idx += 2
# print(name, param, '\tout:', x.shape)
elif name is 'convt2d':
w, b = vars[idx], vars[idx + 1]
# remember to keep synchrozied of forward_encoder and forward_decoder!
x = F.conv_transpose2d(x, w, b, stride=param[4], padding=param[5])
idx += 2
# print(name, param, '\tout:', x.shape)
elif name is 'linear':
w, b = vars[idx], vars[idx + 1]
x = F.linear(x, w, b)
idx += 2
# print('forward:', idx, x.norm().item())
elif name is 'bn':
w, b = vars[idx], vars[idx + 1]
running_mean, running_var = self.vars_bn[bn_idx], self.vars_bn[bn_idx+1]
x = F.batch_norm(x, running_mean, running_var, weight=w, bias=b, training=bn_training)
idx += 2
bn_idx += 2
elif name is 'flatten':
# print(x.shape)
x = x.view(x.size(0), -1)
elif name is 'reshape':
# [b, 8] => [b, 2, 2, 2]
x = x.view(x.size(0), *param)
elif name is 'relu':
x = F.relu(x, inplace=param[0])
elif name is 'leakyrelu':
x = F.leaky_relu(x, negative_slope=param[0], inplace=param[1])
elif name is 'tanh':
x = F.tanh(x)
elif name is 'sigmoid':
x = torch.sigmoid(x)
elif name is 'upsample':
x = F.upsample_nearest(x, scale_factor=param[0])
elif name is 'max_pool2d':
x = F.max_pool2d(x, param[0], param[1], param[2])
elif name is 'avg_pool2d':
x = F.avg_pool2d(x, param[0], param[1], param[2])
else:
raise NotImplementedError
# make sure variable is used properly
assert idx == len(vars)
assert bn_idx == len(self.vars_bn)
return x
def zero_grad(self, vars=None):
"""
:param vars:
:return:
"""
with torch.no_grad():
if vars is None:
for p in self.vars:
if p.grad is not None:
p.grad.zero_()
else:
for p in vars:
if p.grad is not None:
p.grad.zero_()
def parameters(self):
"""
override this function since initial parameters will return with a generator.
:return:
"""
return self.vars |
py | b40cc3e5ece18458bbedb79d864de408152adf36 | import multiprocessing
from lib.common import connectDB, getRaw, log, oidList
import sys
import time
host = 'mongodb://user:iasa2020!@localhost'
chunk = 100
maxProcessNo = 16
def crawlNews(oid, processNo, pushedNo, startTime):
while True:
try:
_, _, newsRawDB = connectDB(host)
metadataCollection = newsRawDB['metadata']
try:
startNo = metadataCollection.find_one({"oid": oid})['last']
except:
startNo = 1
tmpDB = []
cnt = 0
pushedNo.value += startNo - 1
log('Process oid=%03d started at aid=%d' % (oid, startNo), startTime, processNo,
pushedNo.value)
for i in range(startNo, 999999999):
status, newsResponseText, summary = getRaw(oid, i)
if not status:
continue
tmpDB.append({
'body': newsResponseText,
'summary': summary,
'aid': i
})
cnt += 1
if cnt >= chunk:
if len(tmpDB) > 0:
newsRawDB[str(oid)].insert_many(tmpDB)
pushedNo.value += len(tmpDB)
log('Pushed %03d objects to DB at oid=%03d for aid=%d' % (len(tmpDB), oid, i), startTime, processNo,
pushedNo.value)
tmpDB = []
cnt = 0
try:
metadataCollection.delete_one({"oid": oid})
metadataCollection.insert_one({
"oid": oid,
"last": i
})
except:
pass
except:
pass
if __name__ == '__main__':
multiprocessing.freeze_support()
print('Crawler main process started.')
thrs = []
cnt = 0
processNo = len(oidList)
pushedNo = multiprocessing.Value('i', 0)
startTime = time.time()
for i in oidList:
thr = multiprocessing.Process(target=crawlNews, args=(i, processNo, pushedNo, startTime))
thrs.append(thr)
thr.start()
cnt += 1
for i in thrs:
i.join()
|
py | b40cc4627792dac79898f3e8363b1a8d6052cdf5 | #function : construct syntax tree from tokens
# input: token list
# output: syntaxtree
# by OL Jan.7, 2016
import d_ast1
headers=[]
idnum=0
#build the corresponding node 建立对应的节点
def ifif(tokens):#for an if expr, create if node .如果是if语句,则创建if节点
#print(tokens)
for token in tokens:
if ':' in token:
cpos=tokens.index (token)
if 'else' in token:
elpos=tokens.index (token)
break
condition=tokens[1:cpos]
true_stmt=tokens[cpos+1:elpos]
false_stmt=tokens[elpos+2:len(tokens)]
#print('true',true_stmt)
#print('false',false_stmt)
nheader=d_ast1.If(condition, true_stmt,false_stmt)
return nheader
def iffor (tokens):
condition_vari=tokens[1][0]
strt=int(tokens[3][1])
stp=int(tokens[5][1])
body=tokens[6:]
#print(type(stp))
nheader=d_ast1.For(strt, stp, condition_vari, body)
return nheader
def ifwhil(tokens):
#print(tokens)
cpos=0
op=tokens[0][0]
for token in tokens:
if ':' in token:
cpos=tokens.index(token)
break
condition=tokens[1:cpos]
body=tokens[cpos+1:len(tokens)]
nheader=d_ast1.Whil(condition, body)
#print(nheader)
return nheader
def ifrel(tokens):
#print(tokens)
cpos=0
op=tokens[0][0]
for token in tokens:
if token[0] in ['==','!=','<=','>=','<','>']:
cpos=tokens.index(token)
op=token[0]
break
expr1=tokens[0:cpos]
expr2=tokens[cpos+1:len(tokens)]
nheader=d_ast1.Rel(expr1,expr2,op)
return nheader
def iflog(tokens):
#print(tokens)
cpos=0
op=tokens[0][0]
for token in tokens:
if token[0] in ['and','or','xor','not','nand','nor','xnor']:
cpos=tokens.index(token)
op=token[0]
break
expr1=tokens[0:cpos]
expr2=tokens[cpos+1:len(tokens)]
nheader=d_ast1.Log(expr1,expr2,op)
return nheader
def ifterm(tokens):
#print(tokens)
#(tokens)
cpos=0
op=tokens[0][0]
for token in tokens:
if token[0] in['+','-']:
cpos=tokens.index(token)
op=token[0]
#break
expr1=tokens[0:cpos]
expr2=tokens[cpos+1:len(tokens)]
nheader=d_ast1.Term(expr1,expr2,op)
return nheader
def iffactor(tokens):
#print(tokens)
cpos=0
op=tokens[0][0]
for token in tokens:
if token[0] in['*','/']:
cpos=tokens.index(token)
op=token[0]
break
expr1=tokens[0:cpos]
expr2=tokens[cpos+1:len(tokens)]
nheader=d_ast1.Factor(expr1,expr2,op)
#print(nheader.factor,nheader.ter)
return nheader
def ifassign(tokens):
#print(tokens)
name=tokens[0]
aexp=tokens[2:len(tokens)]
nheader=d_ast1.AssignStmt(name, aexp)
return nheader
##to judge the type of tokens 以下判断一个tokens list属于哪种表达
def isif(tokens):
if tokens[0][0]=='if':
return True
else :
return False
def iswhil(tokens):
if tokens[0][0]=='while':
return True
else :
return False
def isfor(tokens):
if tokens[0][0]=='for':
return True
else :
return False
def isrel(tokens):
for token in tokens:
if token[0] in ['==','!=','<=','>=','<','>']:
return True
return False
def islog(tokens):
for token in tokens:
#print(token[0])
if token[0] in ['and','or','xor','not','nand','nor','xnor']:
return True
return False
def isassign(tokens):
if tokens[1][0]=='=':
return True
else:
return False
def isterm(tokens):
for token in tokens:
if token[0] in['+','-']:
return True
return False
def isfactor(tokens):
for token in tokens:
if token[0] in['*','/']:
return True
return False
### visit the node 开始visit
def visitif(header):
global headers
global idnum
idnum+=1
header.id=idnum
#print(type(header))
#print(header.name)
if isrel(header.condition):
nheader=ifrel(header.condition)
header.addcchild(nheader)
visitrel(nheader)
headers.append(nheader)
# enter and create new nodes 进入并执行新建的节点
elif islog (header.condition):
nheader=iflog(header.condition)
header.addcchild(nheader)
visitlog(nheader)
headers.append(nheader)
else:
header.addcchild(d_ast1.node(header.condition))# need to change node class 需要修改NODE class
#print(header.false_stmt)
if isassign(header.true_stmt): # with only assign expr 执行语句只有赋值语句
nheader=ifassign(header.true_stmt)
header.addtchild(nheader)
visitassign(nheader)
headers.append(nheader)
else:
print('error ')
#print(isassign(header.false_stmt))
if isassign(header.false_stmt):
nheader=ifassign(header.false_stmt)
header.addfchild(nheader)
visitassign(nheader)
headers.append(nheader)
else:
print('error ')
#headers.append(header)
return header
def visitfor(header):
global headers
global idnum
idnum+=1
header.id=idnum
#print(type(header))
for_change=''
ini=''
tmp=[]
if isassign(header.body): # for only assign 执行语句只有一句赋值语句
bodies=[]
for i in range(header.strt,header.stp+1):
tmp=header.body[:]
print('ini',ini)
ini=tmp[2]
if i==0:
tmp[0][1]=tmp[0][1]+'1'
#print(tmp)
for_change=tmp[0]
bodies.append(tmp)
## elif i<header.stp:
## for tm in tmp[2:len(tmp)]:
## if tm==ini:
## pos=tmp.index(tm)
## tmp[pos]=for_change
## tmp[0][1]=tmp[0][1]+'_1'
## for_change=tmp[0]
## print(tmp)
## bodies.append(tmp)
elif i==header.stp:
#print(for_change,ini)
for tm in tmp[2:len(tmp)]:
if tm==ini:
pos=tmp.index(tm)
tmp[pos]=for_change
tmp[0]=ini
bodies.append(tmp)
print(bodies)
for body in bodies:
nheader=ifassign(body)
header.addchild(nheader)
visitassign(nheader)
headers.append(nheader)
else:
print('not supported expression')
def visitwhil(header):
#print(type(header))
global headers
global idnum
idnum+=1
header.id=idnum
#print(header.name)
if isrel(header.condition):
nheader=ifrel(header.condition)
header.addcchild(nheader)
visitrel(nheader)
headers.append(nheader)
# 进入并执行新建的节点
elif islog (header.condition):
nheader=iflog(header.condition)
header.addcchild(nheader)
visitlog(nheader)
headers.append(nheader)
else:
header.addcchild(d_ast1.node(header.condition))# 需要修改NODE class
if isassign(header.body): #执行语句只有赋值语句
nheader=ifassign(header.body)
header.addbchild(nheader)
visitassign(nheader)
headers.append(nheader)
else:
print('error ')
#headers.append(header)
return header
def visitassign(header):
#print(type(header))
global headers
global idnum
idnum+=1
header.id=idnum
#print('header.aexp:',header.aexp)
header.addnchild(d_ast1.node([header.name]))
if len(header.aexp)==1:
header.addachild(d_ast1.node(header.aexp))
elif isrel(header.aexp):
#print(header.aexp)
nheader=ifrel(header.aexp)
header.addachild(nheader)
visitrel(nheader)
headers.append(nheader)
# 进入并执行新建的节点
elif islog (header.aexp):
nheader=iflog(header.aexp)
header.addachild(nheader)
visitlog(nheader)
headers.append(nheader)
elif isterm (header.aexp):
nheader=ifterm(header.aexp)
header.addachild(nheader)
visitterm(nheader)
headers.append(nheader)
elif isfactor(header.aexp):
nheader=iffactor(header.aexp)
header.addachild(nheader)
visitfactor(nheader)
headers.append(nheader)
else:
print('Operaton not defined !')
#headers.append(header)
return header
def visitlog(header):
#print(type(header))
global headers
global idnum
idnum+=1
header.id=idnum
#print('header.aexp:',header.log1)
if len(header.log1)==1:
header.addlchild(d_ast1.node(header.log1))
elif isrel(header.log1):
#print(header.log1)
nheader=ifrel(header.log1)
header.addlchild(nheader)
visitrel(nheader)
headers.append(nheader)
# 进入并执行新建的节点
elif islog (header.log1):
nheader=iflog(header.log1)
header.addlchild(nheader)
visitlog(nheader)
headers.append(nheader)
elif isterm (header.log1):
nheader=ifterm(header.log1)
header.addlchild(nheader)
visitterm(nheader)
headers.append(nheader)
elif isfactor(header.log1):
nheader=iffactor(header.log1)
header.addlchild(nheader)
visitfactor(nheader)
headers.append(nheader)
elif len(header.log1)==0:
header.addlchild([])
else:
print('Operaton not defined !')
#begin to process the right branch 处理右分支
if len(header.log2)==1:
header.addrchild(d_ast1.node(header.log2))
elif isrel(header.log2):
#print(header.log2)
nheader=ifrel(header.log2)
header.addrchild(nheader)
visitrel(nheader)
headers.append(nheader)
# 进入并执行新建的节点
elif islog (header.log2):
nheader=iflog(header.log2)
header.addrchild(nheader)
visitlog(nheader)
headers.append(nheader)
elif isterm (header.log2):
nheader=ifterm(header.log2)
header.addrchild(nheader)
visitterm(nheader)
headers.append(nheader)
elif isfactor(header.log2):
nheader=iffactor(header.log2)
header.addrchild(nheader)
visitfactor(nheader)
headers.append(nheader)
else:
print('Operaton not defined !')
#headers.append(header)
return header
def visitrel(header):
#print(type(header))
global headers
global idnum
idnum+=1
header.id=idnum
#print('header.aexp:',header.rel1)
if len(header.rel1)==1:
header.addlchild(d_ast1.node(header.rel1))
# 进入并执行新建的节点
elif isterm (header.rel1):
nheader=ifterm(header.rel1)
header.addlchild(nheader)
visitterm(nheader)
headers.append(nheader)
elif isfactor(header.rel1):
nheader=iffactor(header.rel1)
header.addlchild(nheader)
visitfactor(nheader)
headers.append(nheader)
else:
print('Operaton not defined !')
#处理右分支
if len(header.rel2)==1:
header.addrchild(d_ast1.node(header.rel2))
elif isterm (header.rel2):
nheader=ifterm(header.rel2)
header.addrchild(nheader)
visitterm(nheader)
headers.append(nheader)
elif isfactor(header.rel2):
nheader=iffactor(header.rel2)
header.addrchild(nheader)
visitfactor(nheader)
headers.append(nheader)
else:
print('Operaton not defined !')
#headers.append(header)
return header
def visitterm(header):
#print(type(header))
global headers
global idnum
idnum+=1
header.id=idnum
#print('header.factor',header.factor)
if isterm(header.term):
nheader=ifterm(header.term)
header.addtchild(nheader)
visitterm(nheader)
headers.append(nheader)
# 进入并执行新建的节点
elif isfactor(header.term):
nheader=iffactor(header.term)
header.addtchild(nheader)
visitfactor(nheader)
headers.append(nheader)
else:
header.addtchild(d_ast1.node(header.term))
if isfactor(header.factor):
nheader=iffactor(header.factor)
header.addfchild(nheader)
visitfactor(nheader)
headers.append(nheader)
else:
header.addfchild(d_ast1.node(header.factor))
return header
def visitfactor(header):
#print(type(header))
global headers
global idnum
idnum+=1
header.id=idnum
#print(header.name)
if isfactor(header.factor):
nheader=iffactor(header.factor)
header.addfchild(nheader)
visitfactor(nheader)
headers.append(nheader)
# 进入并执行新建的节点
else:
header.addfchild(d_ast1.node(header.factor))
header.addtchild(d_ast1.node(header.ter))
#headers.append(header)
return header
def d_par(tokens):
#begin the main function, call above defs 开始执行,调用上述函数
global headers
global idnum
root=d_ast1.tree('headers')
## import pdb
## pdb.set_trace()
if isif(tokens):
nheader=ifif(tokens)
root.add(nheader)
visitif(nheader)
headers.append(nheader)
elif isfor(tokens):
nheader=iffor(tokens)
root.add(nheader)
visitfor(nheader)
headers.append(nheader)
elif iswhil(tokens):
nheader=ifwhil(tokens)
root.add(nheader)
visitwhil(nheader)
headers.append(nheader)
elif isassign(tokens): #执行语句只有赋值语句
nheader=ifassign(tokens)
#header.addbchild(nheader)
root.add(nheader)
visitassign(nheader)
headers.append(nheader)
elif isrel(tokens):
nheader=ifrel(tokens)
root.add(nheader)
visitrel(nheader)
headers.append(nheader)
elif islog (tokens):
nheader=iflog(tokens)
root.add(nheader)
visitlog(nheader)
headers.append(nheader)
elif isterm(tokens):
nheader=ifterm(tokens)
root.add(nheader)
visitterm(nheader)
headers.append(nheader)
elif isfactor(tokens):
nheader=iffactor(tokens)
root.add(nheader)
visitfactor(nheader)
headers.append(nheader)
return headers
#for testing code
##headers=[]
##idnum=0
##root=d_ast1.node(headers)
##tokens=[['if', 'reserved'], ['id', 'b'], ['==', 'reserved'], ['num', '3'], \
## ['id', ':'], ['id', 'a'], ['=', 'reserved'], ['num', '5'], ['else', 'reserved'], ['id', ':'], \
## ['id', 'a'], ['=', 'reserved'], ['num', '3']]
###nheader=ifif(tokens)
###print(nheader.data)
###visitif(nheader)
##strt(tokens)
##print(len(headers))
##for header in headers:
## print(header.id)
|
py | b40cc4e1a8d8936a254b7e7ee1eea86821938b49 | #!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet accounts properly when there is a double-spend conflict."""
from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
disconnect_nodes,
find_output,
)
class TxnMallTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def add_options(self, parser):
parser.add_argument("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
def setup_network(self):
# Start with split network:
super().setup_network()
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
def run_test(self):
# All nodes should start with 1,250 0MFQ:
starting_balance = 1250
# All nodes should be out of IBD.
# If the nodes are not all out of IBD, that can interfere with
# blockchain sync later in the test when nodes are connected, due to
# timing issues.
for n in self.nodes:
assert n.getblockchaininfo()["initialblockdownload"] == False
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar addresses:
node0_address_foo = self.nodes[0].getnewaddress()
fund_foo_txid = self.nodes[0].sendtoaddress(node0_address_foo, 1219)
fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
node0_address_bar = self.nodes[0].getnewaddress()
fund_bar_txid = self.nodes[0].sendtoaddress(node0_address_bar, 29)
fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
assert_equal(self.nodes[0].getbalance(),
starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress()
# First: use raw transaction API to send 1240 0MFQ to node1_address,
# but don't broadcast:
doublespend_fee = Decimal('-.02')
rawtx_input_0 = {}
rawtx_input_0["txid"] = fund_foo_txid
rawtx_input_0["vout"] = find_output(self.nodes[0], fund_foo_txid, 1219)
rawtx_input_1 = {}
rawtx_input_1["txid"] = fund_bar_txid
rawtx_input_1["vout"] = find_output(self.nodes[0], fund_bar_txid, 29)
inputs = [rawtx_input_0, rawtx_input_1]
change_address = self.nodes[0].getnewaddress()
outputs = {}
outputs[node1_address] = 1240
outputs[change_address] = 1248 - 1240 + doublespend_fee
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
doublespend = self.nodes[0].signrawtransactionwithwallet(rawtx)
assert_equal(doublespend["complete"], True)
# Create two spends using 1 50 0MFQ coin each
txid1 = self.nodes[0].sendtoaddress(node1_address, 40)
txid2 = self.nodes[0].sendtoaddress(node1_address, 20)
# Have node0 mine a block:
if (self.options.mine_block):
self.nodes[0].generate(1)
self.sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 500MFQ for another
# matured block, minus 40, minus 20, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
if self.options.mine_block:
expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance(), starting_balance - tx1["amount"] - tx2["amount"])
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Now give doublespend and its parents to miner:
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
doublespend_txid = self.nodes[2].sendrawtransaction(doublespend["hex"])
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].generate(1) # Mine another block to make sure we sync
self.sync_blocks()
assert_equal(self.nodes[0].gettransaction(doublespend_txid)["confirmations"], 2)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Both transactions should be conflicted
assert_equal(tx1["confirmations"], -2)
assert_equal(tx2["confirmations"], -2)
# Node0's total balance should be starting balance, plus 1000MFQ for
# two more matured blocks, minus 1240 for the double-spend, plus fees (which are
# negative):
expected = starting_balance + 100 - 1240 + fund_foo_tx["fee"] + fund_bar_tx["fee"] + doublespend_fee
assert_equal(self.nodes[0].getbalance(), expected)
# Node1's balance should be its initial balance (1250 for 25 block rewards) plus the doublespend:
assert_equal(self.nodes[1].getbalance(), 1250 + 1240)
if __name__ == '__main__':
TxnMallTest().main()
|
py | b40cc594a313f4555c3f62be65420f027546cfe0 | #!/usr/bin/env python
import base_filters
COPY_GOOGLE_DOC_KEY = '1AuHsCPvvW1W4fBZL0TvHXl92F7HY7ANxwHkMFc74CqI'
USE_ASSETS = False
# Use these variables to override the default cache timeouts for this graphic
# DEFAULT_MAX_AGE = 20
# ASSETS_MAX_AGE = 300
JINJA_FILTER_FUNCTIONS = base_filters.FILTERS
|
py | b40cc5bafc9f923597defc0f31d50f2affc2e0c0 | """The parallel implemenation of the Archipelago
This module defines the Archipelago data structure that runs in parallel on
multiple processors.
"""
from copy import copy, deepcopy
import os
from datetime import datetime
import logging
import numpy as np
import dill
from mpi4py import MPI
from .archipelago import Archipelago
from ..util.log import INFO, DETAILED_INFO
LOGGER = logging.getLogger(__name__)
MPI.pickle.__init__(dill.dumps, dill.loads)
AGE_UPDATE = 2
EXIT_NOTIFICATION = 3
MIGRATION = 4
class ParallelArchipelago(Archipelago):
"""A collection of islands that evolves in parallel
Evolution of the Archipelago involves independent evolution of Islands
combined with periodic migration of individuals between random pairs of
islands. Each mpi process is responsible for the evolution of a single
island which has two effects:
1) scaling to more islands requires use of more processes
2) scripts written for the Parallel Archipelago should be independent of
the number of processors: i.e., scripts don't need to be changed in
order to run with more processors. Simply run the same script with more
mpi processes.
Parameters
----------
island : `Island`
The island that the processor will evolve
non_blocking : bool
Specifies whether to use blocking or non-blocking execution. Default
is non-blocking (True).
sync_frequency : int
How frequently to update the average age for each island. Default 10
Attributes
----------
island : `Island`
The island where the current processor's evolution occurs
generational_age: int
The number of generations the archipelago has been evolved
hall_of_fame: HallOfFame
An object containing the best individuals seen in the archipelago
"""
def __init__(self, island, hall_of_fame=None, non_blocking=True,
sync_frequency=10):
self.comm = MPI.COMM_WORLD
self.comm_rank = self.comm.Get_rank()
self.comm_size = self.comm.Get_size()
super().__init__(self.comm_size, hall_of_fame)
self.island = island
self._non_blocking = non_blocking
self._sync_frequency = sync_frequency
if self.island.hall_of_fame is None:
self.island.hall_of_fame = deepcopy(self.hall_of_fame)
def get_best_fitness(self):
"""Gets the fitness of most fit member
Returns
-------
:
Fitness of best individual in the archipelago
"""
best_on_proc = self.island.get_best_fitness()
best_fitness = self.comm.allreduce(best_on_proc, op=MPI.MIN)
return best_fitness
def get_best_individual(self):
"""Returns the best individual
Returns
-------
chromosomes :
The individual with lowest fitness
"""
best_on_proc = self.island.get_best_individual()
all_best_indvs = self.comm.allgather(best_on_proc)
best_indv = min(all_best_indvs, key=lambda x: x.fitness)
return best_indv
def _step_through_generations(self, num_steps):
if self._non_blocking:
self._non_blocking_execution(num_steps)
else:
self.island.evolve(num_steps,
hall_of_fame_update=False,
suppress_logging=True)
def _non_blocking_execution(self, num_steps):
if self.comm_rank == 0:
self._non_blocking_execution_master(num_steps)
else:
self._non_blocking_execution_slave()
def _non_blocking_execution_master(self, num_steps):
total_age = {}
average_age = self.generational_age
target_age = average_age + num_steps
while average_age < target_age:
self.island.evolve(self._sync_frequency,
hall_of_fame_update=False,
suppress_logging=True)
self._gather_updated_ages(total_age)
average_age = (sum(total_age.values())) / self.comm.size
self._send_exit_notifications()
self.comm.Barrier()
self._gather_updated_ages(total_age)
def _gather_updated_ages(self, total_age):
total_age.update({0: self.island.generational_age})
status = MPI.Status()
while self.comm.iprobe(source=MPI.ANY_SOURCE,
tag=AGE_UPDATE,
status=status):
data = self.comm.recv(source=status.Get_source(),
tag=AGE_UPDATE)
total_age.update(data)
def _send_exit_notifications(self):
for destination in range(1, self.comm_size):
req = self.comm.isend(True, dest=destination,
tag=EXIT_NOTIFICATION)
req.Wait()
def _non_blocking_execution_slave(self):
self._send_updated_age()
while not self._has_exit_notification():
self.island.evolve(self._sync_frequency,
hall_of_fame_update=False,
suppress_logging=True)
self._send_updated_age()
self.comm.Barrier()
def _has_exit_notification(self):
if self.comm.iprobe(source=0, tag=EXIT_NOTIFICATION):
_ = self.comm.recv(source=0, tag=EXIT_NOTIFICATION)
return True
return False
def _send_updated_age(self):
data = {self.comm_rank: self.island.generational_age}
req = self.comm.isend(data, dest=0, tag=AGE_UPDATE)
req.Wait()
def _coordinate_migration_between_islands(self):
if self.comm_rank == 0:
LOGGER.log(DETAILED_INFO, "Performing migration between Islands")
partner = self._get_migration_partner()
if partner is not None:
self._population_exchange_program(partner)
self.island.reset_fitness()
def _get_migration_partner(self):
if self.comm_rank == 0:
island_partners = self._shuffle_island_indices()
else:
island_partners = None
island_partners = self.comm.bcast(island_partners, root=0)
island_index = island_partners.index(self.comm_rank)
if island_index % 2 == 0:
partner_index = island_index + 1
if partner_index < self.comm_size:
partner = island_partners[partner_index]
else:
partner = None
LOGGER.debug(" %d <-> %s", self.comm_rank, str(partner))
else:
partner_index = island_index - 1
partner = island_partners[partner_index]
return partner
def _shuffle_island_indices(self):
indices = list(range(self._num_islands))
np.random.shuffle(indices)
return indices
def _population_exchange_program(self, partner):
population_to_send = self.island.dump_fraction_of_population(0.5)
received_population = self.comm.sendrecv(population_to_send,
dest=partner,
sendtag=MIGRATION,
source=partner,
recvtag=MIGRATION)
self.island.population += received_population
def _log_evolution(self, start_time):
elapsed_time = datetime.now() - start_time
LOGGER.log(DETAILED_INFO, "Evolution time %s\t age %d\t fitness %.3le",
elapsed_time, self.island.generational_age,
self.get_best_fitness())
def _get_potential_hof_members(self):
self.island.update_hall_of_fame()
potential_members = list(self.island.hall_of_fame)
all_potential_members = self.comm.allgather(potential_members)
all_potential_members = [i for hof in all_potential_members
for i in hof]
return all_potential_members
def get_fitness_evaluation_count(self):
""" Gets the total number of fitness evaluations performed
Returns
-------
int :
number of fitness evaluations
"""
my_eval_count = self.island.get_fitness_evaluation_count()
total_eval_count = self.comm.allreduce(my_eval_count, op=MPI.SUM)
return total_eval_count
def get_ea_diagnostic_info(self):
""" Gets diagnostic info from the evolutionary algorithm(s)
Returns
-------
EaDiagnosticsSummary :
summary of evolutionary algorithm diagnostics
"""
my_diagnostics = self.island.get_ea_diagnostic_info()
all_diagnostics = self.comm.allgather(my_diagnostics)
return sum(all_diagnostics)
def dump_to_file(self, filename):
""" Dump the ParallelArchipelago object to a pickle file
The file will contain a pickle dump of a list of all the processors'
ParallelArchipelago objects.
Parameters
----------
filename : str
the name of the pickle file to dump
"""
if self.comm_rank == 0:
LOGGER.log(INFO, "Saving checkpoint: %s", filename)
pickleable_copy = self._copy_without_mpi()
all_par_archs = self.comm.gather(pickleable_copy, root=0)
if self.comm_rank == 0:
with open(filename, "wb") as dump_file:
dill.dump(all_par_archs, dump_file,
protocol=dill.HIGHEST_PROTOCOL)
LOGGER.log(DETAILED_INFO, "Saved successfully")
def _copy_without_mpi(self):
no_mpi_copy = copy(self)
no_mpi_copy.comm = None
no_mpi_copy.comm_size = None
no_mpi_copy.comm_rank = None
return no_mpi_copy
def _remove_stale_checkpoint(self):
if self.comm_rank == 0:
LOGGER.debug("Removing stale checkpoint file: %s",
self._previous_checkpoints[0])
os.remove(self._previous_checkpoints.pop(0))
def load_parallel_archipelago_from_file(filename):
""" Load a ParallelArchipelago objects from a pickle file
Parameters
----------
filename : str
the name of the pickle file to load
Returns
-------
str :
an evolutionary optimizer
"""
comm = MPI.COMM_WORLD
comm_rank = comm.Get_rank()
comm_size = comm.Get_size()
if comm_rank == 0:
LOGGER.log(INFO, "Loading checkpoint file: %s", filename)
with open(filename, "rb") as load_file:
all_par_archs = dill.load(load_file)
loaded_size = len(all_par_archs)
if comm_size < loaded_size:
all_par_archs = all_par_archs[:comm_size]
elif comm_size > loaded_size:
all_par_archs = [all_par_archs[i % loaded_size]
for i in range(comm_size)]
else:
all_par_archs = None
par_arch = comm.scatter(all_par_archs, root=0)
par_arch.comm = comm
par_arch.comm_rank = comm_rank
par_arch.comm_size = comm_size
if comm_rank == 0:
LOGGER.log(DETAILED_INFO, "Loaded successfully")
return par_arch
|
py | b40cc5c79c9047e9c6ffb439cdd3b68057380694 | from gitstack.models import Repository, Group
from django.test import TestCase
from django.test.client import Client
import time, json, os, shutil
from django.conf import settings
'''
For ldap test, use the following parameters :
ldaphost = 192.168.1.54
ldapbasedn = CN=Users,DC=contoso,DC=com
ldapbinddn = CN=john,CN=Users,DC=contoso,DC=com
ldapbindpassword = Abcd!123
ldapprotocol = ldap
ldapport = 389
ldapattribute = sAMAccountName
ldapscope = sub
ldapfilter = (objectClass=person)
Remove settings.ini file
Remove groupfile
Delete all the users
'''
class SimpleTest(TestCase):
###################
# Set up
###################
def setUp(self):
self.c = Client()
# put the settings.ini file
shutil.copy(settings.INSTALL_DIR + '/app/gitstack/config_template/settings.ini', settings.SETTINGS_PATH)
# create repositories
self.create_repos()
# create users
# self.create_users()
shutil.copy(settings.INSTALL_DIR + '/app/rest/tests/passwdfile', settings.INSTALL_DIR + '/data/passwdfile')
# create groups
self.create_groups()
def tearDown(self):
# delete repos
repositories = Repository.retrieve_all()
for repo in repositories:
repo.delete()
# delete users
os.remove(settings.INSTALL_DIR + '/data/passwdfile')
# delete groups
groups = Group.retrieve_all()
for group in groups:
# delete the group
group.delete()
# remove the settings.ini file from the filesystem
os.remove(settings.SETTINGS_PATH)
# create repositories
def create_repos(self):
self.assertEqual(self.c.post('/rest/repository/', { 'name' : 'repo1' }).status_code, 200)
self.assertEqual(self.c.post('/rest/repository/', { 'name' : 'repo2' }).status_code, 200)
self.assertEqual(self.c.post('/rest/repository/', { 'name' : 'repo3' }).status_code, 200)
# create users
def create_users(self):
# create empty user file
user_file = open(settings.INSTALL_DIR + '/data/passwdfile', 'w')
user_file.write('')
user_file.close()
self.assertEqual(self.c.post('/rest/user/', { 'username' : 'user1', 'password' : 'user1' }).status_code, 200)
time.sleep(0.1)
self.assertEqual(self.c.post('/rest/user/', { 'username' : 'user2', 'password' : 'user2' }).status_code, 200)
time.sleep(0.1)
self.assertEqual(self.c.post('/rest/user/', { 'username' : 'user3', 'password' : 'user3' }).status_code, 200)
time.sleep(0.1)
# create groups
def create_groups(self):
self.assertEqual(self.c.post('/rest/group/', { 'name' : 'group1' }).status_code, 200)
self.assertEqual(self.c.post('/rest/group/', { 'name' : 'group2' }).status_code, 200)
self.assertEqual(self.c.post('/rest/group/', { 'name' : 'group3' }).status_code, 200)
######################
# Repositories
#####################
# create repo
def test_repo_create(self):
self.assertEqual(self.c.post('/rest/repository/', { 'name' : 'repo4' }).status_code, 200)
response = self.c.get('/rest/repository/')
self.assertEqual(response.content, '[{"group_read_list": [], "name": "repo1", "user_write_list": [], "group_list": [], "bare": true, "user_read_list": [], "group_write_list": [], "user_list": []}, {"group_read_list": [], "name": "repo2", "user_write_list": [], "group_list": [], "bare": true, "user_read_list": [], "group_write_list": [], "user_list": []}, {"group_read_list": [], "name": "repo3", "user_write_list": [], "group_list": [], "bare": true, "user_read_list": [], "group_write_list": [], "user_list": []}, {"group_read_list": [], "name": "repo4", "user_write_list": [], "group_list": [], "bare": true, "user_read_list": [], "group_write_list": [], "user_list": []}]')
# retrieve repositories
def test_repo_retrieve(self):
response = self.c.get('/rest/repository/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, '[{"group_read_list": [], "name": "repo1", "user_write_list": [], "group_list": [], "bare": true, "user_read_list": [], "group_write_list": [], "user_list": []}, {"group_read_list": [], "name": "repo2", "user_write_list": [], "group_list": [], "bare": true, "user_read_list": [], "group_write_list": [], "user_list": []}, {"group_read_list": [], "name": "repo3", "user_write_list": [], "group_list": [], "bare": true, "user_read_list": [], "group_write_list": [], "user_list": []}]')
# delete a repository
def test_repo_delete(self):
response = self.c.delete('/rest/repository/repo3/')
self.assertEqual(response.status_code, 200)
response = self.c.get('/rest/repository/')
self.assertEqual(response.content, '[{"group_read_list": [], "name": "repo1", "user_write_list": [], "group_list": [], "bare": true, "user_read_list": [], "group_write_list": [], "user_list": []}, {"group_read_list": [], "name": "repo2", "user_write_list": [], "group_list": [], "bare": true, "user_read_list": [], "group_write_list": [], "user_list": []}]')
######################
# Users
#####################
# remove
def test_user_remove(self):
response = self.c.delete('/rest/user/user3/')
self.assertEqual(response.status_code, 200)
time.sleep(0.1)
response = self.c.get('/rest/user/')
self.assertEqual(response.content, '["user1", "user2", "everyone"]')
# update a user pas
def test_user_change_password(self):
response = self.c.put('/rest/user/', data='{"username":"user3", "password":"test"}', content_type='application/json')
self.assertEqual(response.status_code, 200)
# create
def test_user_create(self):
self.assertEqual(self.c.post('/rest/user/', { 'username' : 'user4', 'password' : 'user4' }).status_code, 200)
time.sleep(0.1)
response = self.c.get('/rest/user/')
self.assertEqual(response.content, '["user1", "user2", "user3", "user4", "everyone"]')
# retrieve
def test_user_retrieve(self):
response = self.c.get('/rest/user/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, '["user1", "user2", "user3", "everyone"]')
###############################
# Groups
###############################
# retrieve
def test_group_retrieve(self):
response = self.c.get('/rest/group/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, '[{"name": "group1", "member_list": []}, {"name": "group2", "member_list": []}, {"name": "group3", "member_list": []}]')
# remove
def test_group_remove(self):
response = self.c.delete('/rest/group/group3/')
self.assertEqual(response.status_code, 200)
response = self.c.get('/rest/group/')
self.assertEqual(response.content, '[{"name": "group1", "member_list": []}, {"name": "group2", "member_list": []}]')
# create
def test_group_create(self):
self.assertEqual(self.c.post('/rest/group/', { 'name' : 'group4' }).status_code, 200)
response = self.c.get('/rest/group/')
self.assertEqual(response.content, '[{"name": "group1", "member_list": []}, {"name": "group2", "member_list": []}, {"name": "group3", "member_list": []}, {"name": "group4", "member_list": []}]')
#
# group members
#
# retrieve all the users of a group
def test_group_user_retrieve(self):
self.assertEqual(self.c.post('/rest/group/group1/user/user1/').status_code, 200)
response = self.c.get('/rest/group/group1/user/')
self.assertEqual(response.content, '["user1"]')
# add a user to a group
def test_group_user_add(self):
self.assertEqual(self.c.post('/rest/group/group1/user/user1/').status_code, 200)
self.assertEqual(self.c.post('/rest/group/group1/user/user2/').status_code, 200)
response = self.c.get('/rest/group/group1/user/')
self.assertEqual(response.content, '["user1", "user2"]')
def test_group_user_remove(self):
self.assertEqual(self.c.post('/rest/group/group1/user/user1/').status_code, 200)
self.assertEqual(self.c.post('/rest/group/group1/user/user2/').status_code, 200)
self.assertEqual(self.c.delete('/rest/group/group1/user/user2/').status_code, 200)
response = self.c.get('/rest/group/group1/user/')
self.assertEqual(response.content, '["user1"]')
#########################
# Repository user management
########################
#
# add user to a repo
#
# add an user to a repo
def test_repo_add_user(self):
self.assertEqual(self.c.post('/rest/repository/repo1/user/user1/').status_code, 200)
response = self.c.get('/rest/repository/repo1/user/')
self.assertEqual(response.content, '["user1"]')
# remove an user from a repo
def test_repo_remove_user(self):
self.assertEqual(self.c.post('/rest/repository/repo1/user/user1/').status_code, 200)
self.assertEqual(self.c.post('/rest/repository/repo1/user/user2/').status_code, 200)
self.assertEqual(self.c.delete('/rest/repository/repo1/user/user2/').status_code, 200)
response = self.c.get('/rest/repository/repo1/user/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, '["user1"]')
# retrieve users added to a repo
def test_repo_retrieve_user(self):
self.assertEqual(self.c.post('/rest/repository/repo1/user/user1/').status_code, 200)
self.assertEqual(self.c.post('/rest/repository/repo1/user/user2/').status_code, 200)
response = self.c.get('/rest/repository/repo1/user/')
self.assertEqual(response.content, '["user1", "user2"]')
#
# read permission
#
# add read permission
def test_repo_add_read_user(self):
# the user1 has read rights by default
self.assertEqual(self.c.post('/rest/repository/repo1/user/user1/').status_code, 200)
# Check if the user1 has the read rights
response = self.c.get('/rest/repository/repo1/user/user1/')
permissions = json.loads(response.content)
self.assertEqual(permissions['read'], True)
# remove read permission
def test_repo_remove_read_user(self):
# the user1 has read rights by default
self.assertEqual(self.c.post('/rest/repository/repo1/user/user1/').status_code, 200)
# remove the read rights
self.assertEqual(self.c.put('/rest/repository/repo1/user/user1/',data='{"read":false}', content_type='application/json').status_code, 200)
# Check if the user1 has the read rights
response = self.c.get('/rest/repository/repo1/user/user1/')
permissions = json.loads(response.content)
self.assertEqual(permissions['read'], False)
#
# write permission
#
def test_repo_add_write_user(self):
# the user1 has read rights by default
self.assertEqual(self.c.post('/rest/repository/repo1/user/user1/').status_code, 200)
# Check if the user1 has the read rights
response = self.c.get('/rest/repository/repo1/user/user1/')
permissions = json.loads(response.content)
self.assertEqual(permissions['write'], True)
# remove write permission
def test_repo_remove_write_user(self):
# the user1 has read rights by default
self.assertEqual(self.c.post('/rest/repository/repo1/user/user1/').status_code, 200)
# remove the read rights
self.assertEqual(self.c.put('/rest/repository/repo1/user/user1/',data='{"write":false}', content_type='application/json').status_code, 200)
# Check if the user1 has the read rights
response = self.c.get('/rest/repository/repo1/user/user1/')
permissions = json.loads(response.content)
self.assertEqual(permissions['write'], False)
#############################
# Repository Group managment
#############################
#
# Add/Remove group to a repo
#
# add an group to a repo
def test_repo_add_group(self):
self.assertEqual(self.c.post('/rest/repository/repo1/group/group1/').status_code, 200)
response = self.c.get('/rest/repository/repo1/group/')
self.assertEqual(response.content, '["group1"]')
# remove a group from a repo
def test_repo_remove_group(self):
self.assertEqual(self.c.post('/rest/repository/repo1/group/group1/').status_code, 200)
self.assertEqual(self.c.post('/rest/repository/repo1/group/group2/').status_code, 200)
self.assertEqual(self.c.delete('/rest/repository/repo1/group/group2/').status_code, 200)
response = self.c.get('/rest/repository/repo1/group/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, '["group1"]')
# retrieve groups added to a repo
def test_repo_retrieve_group(self):
self.assertEqual(self.c.post('/rest/repository/repo1/group/group1/').status_code, 200)
self.assertEqual(self.c.post('/rest/repository/repo1/group/group2/').status_code, 200)
response = self.c.get('/rest/repository/repo1/group/')
self.assertEqual(response.content, '["group1", "group2"]')
#
# read permission
#
# add read permission
def test_repo_add_read_group(self):
# the user1 has read rights by default
self.assertEqual(self.c.post('/rest/repository/repo1/group/group1/').status_code, 200)
# Check if the group1 has the read rights
response = self.c.get('/rest/repository/repo1/group/group1/')
permissions = json.loads(response.content)
self.assertEqual(permissions['read'], True)
# remove read permission
def test_repo_remove_read_group(self):
# the group1 has read rights by default
self.assertEqual(self.c.post('/rest/repository/repo1/group/group1/').status_code, 200)
# remove the read rights
self.assertEqual(self.c.put('/rest/repository/repo1/group/group1/',data='{"read":false}', content_type='application/json').status_code, 200)
# Check if the user1 has the read rights
response = self.c.get('/rest/repository/repo1/group/group1/')
permissions = json.loads(response.content)
self.assertEqual(permissions['read'], False)
#
# write permission
#
def test_repo_add_write_group(self):
# the group1 has read rights by default
self.assertEqual(self.c.post('/rest/repository/repo1/group/group1/').status_code, 200)
# Check if the group1 has the write rights
response = self.c.get('/rest/repository/repo1/group/group1/')
permissions = json.loads(response.content)
self.assertEqual(permissions['write'], True)
# remove write permission
def test_repo_remove_write_group(self):
# the group1 has write rights by default
self.assertEqual(self.c.post('/rest/repository/repo1/group/group1/').status_code, 200)
# remove the read rights
self.assertEqual(self.c.put('/rest/repository/repo1/group/group1/',data='{"write":false}', content_type='application/json').status_code, 200)
# Check if the group1 has the write rights
response = self.c.get('/rest/repository/repo1/group/group1/')
permissions = json.loads(response.content)
self.assertEqual(permissions['write'], False)
################################
# Ldap
#################################
def test_ldap(self):
# get ldap settings
response = self.c.get('/rest/settings/authentication/')
# check that "file" auth is selected
self.assertEqual(response.content, '{"authMethod":"file","ldap":{"protocol": "","host": "","port": "","baseDn": "","attribute": "","scope": "","filter": "","bindDn": "","bindPassword": ""}}')
# set some ldap settings
self.assertEqual(self.c.put('/rest/settings/authentication/',data='{"authMethod":"ldap","ldap":{"protocol": "ldap","host": "192.168.1.54","port": "389","baseDn": "CN=Users,DC=contoso,DC=com","attribute": "cn","scope": "sub","filter": "(objectClass=person)","bindDn": "CN=john,CN=Users,DC=contoso,DC=com","bindPassword": "Abcd!123"}}', content_type='application/json').status_code, 200)
# retrieve the ldap settings
response = self.c.get('/rest/settings/authentication/')
self.assertEqual(response.content, '{"authMethod":"ldap","ldap":{"protocol": "ldap","host": "192.168.1.54","port": "389","baseDn": "CN=Users,DC=contoso,DC=com","attribute": "cn","scope": "sub","filter": "(objectClass=person)","bindDn": "CN=john,CN=Users,DC=contoso,DC=com","bindPassword": "Abcd!123"}}')
# test the ldap settings
self.assertEqual(self.c.get('/rest/settings/authentication/ldap/test/?protocol=ldap&host=192.168.1.54&port=389&baseDn=CN%3DUsers%2CDC%3Dcontoso%2CDC%3Dcom&attribute=cn&scope=sub&filter=(objectClass%3Dperson)&bindDn=CN%3Djohn%2CCN%3DUsers%2CDC%3Dcontoso%2CDC%3Dcom&bindPassword=Abcd!123').status_code, 200)
# add users to the repo1
# the user1 has read rights by default
self.assertEqual(self.c.post('/rest/repository/repo1/user/user1/').status_code, 200)
# Check if the user1 has the read rights
response = self.c.get('/rest/repository/repo1/user/user1/')
permissions = json.loads(response.content)
self.assertEqual(permissions['read'], True)
# remove user1 read permissions
self.assertEqual(self.c.put('/rest/repository/repo1/user/user1/',data='{"read":false}', content_type='application/json').status_code, 200)
# Check if the user1 has the read rights
response = self.c.get('/rest/repository/repo1/user/user1/')
permissions = json.loads(response.content)
self.assertEqual(permissions['read'], False)
################################
# Gitphp web access
################################
# Check if the web access is enabled
def test_web_access(self):
# The web interface should be enabled by default
response = self.c.get('/rest/settings/general/webinterface/')
permissions = json.loads(response.content)
self.assertEqual(permissions['enabled'], True)
def test_web_access_disable(self):
# Disable the web interface
self.assertEqual(self.c.put('/rest/settings/general/webinterface/',data='{"enabled":false}', content_type='application/json').status_code, 200)
# Check that the web interface is disabled
response = self.c.get('/rest/settings/general/webinterface/')
permissions = json.loads(response.content)
self.assertEqual(permissions['enabled'], False)
# make sure the web interface is enabled
self.assertEqual(self.c.put('/rest/settings/general/webinterface/',data='{"enabled":true}', content_type='application/json').status_code, 200)
##########################
# Port
#########################
# retrieve the ports numbers
def test_ports_retrieve(self):
# retrieve the ports
response = self.c.get('/rest/settings/general/port/')
self.assertEqual(response.content, '{"httpPort": 80, "httpsPort": 443}')
###############################
# Settings/security : https
#################################
# retrieve http/https settings
def test_protocol_retrieve(self):
response = self.c.get('/rest/settings/security/')
self.assertEqual(response.content, '{"http": true, "https": false}')
# set the http/https settings to https only
def test_protocol_change(self):
# change to https only
self.assertEqual(self.c.put('/rest/settings/security/',data='{"http": false, "https": true}', content_type='application/json').status_code, 200)
# check that the settings are set to https only
response = self.c.get('/rest/settings/security/')
self.assertEqual(response.content, '{"http": false, "https": true}')
|
py | b40cc743c3d8f154dfde0c37ec717a8f03a9e5a6 | # File: reversinglabs_consts.py
#
# Copyright (c) 2014-2022 Splunk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions
# and limitations under the License.
#
#
# Status/Progress Messages
REVERSINGLABS_MSG_GOT_RESP = "Got Response from ReversingLabs"
REVERSINGLABS_SUCC_MSG_OBJECT_QUERIED = "ReversingLabs query for {object_name} '{object_value}' finished"
REVERSINGLABS_ERR_MSG_OBJECT_QUERIED = "ReversingLabs query for {object_name} '{object_value}' failed"
REVERSINGLABS_MSG_CONNECTING_WITH_URL = "Querying ReversingLabs using url: '{url}' for hash type: '{hash_type}'"
REVERSINGLABS_SUCC_CONNECTIVITY_TEST = "Test connectivity passed"
REVERSINGLABS_ERR_CONNECTIVITY_TEST = "Test connectivity failed"
REVERSINGLABS_MSG_CHECK_CREDENTIALS = "Please check your credentials or the network connectivity"
REVERSINGLABS_ERR_INVALID_HASH = "Invalid hash"
REVERSINGLABS_ERR_MALWARE_PRESENCE_QUERY_FAILED = "Query to check if hash is malware failed with HTTP return code {ret_code}"
REVERSINGLABS_GENERATED_RANDOM_HASH = "Generated random hash for testing connectivity"
# Jsons used in params, result, summary etc.
REVERSINGLABS_JSON_DETECTIONS = "detections"
REVERSINGLABS_JSON_FOUND = "found"
REVERSINGLABS_JSON_POSITIVES = "positives"
REVERSINGLABS_JSON_TOTAL_SCANS = "total_scans"
REVERSINGLABS_JSON_TOTAL_POSITIVES = "total_positives"
REVERSINGLABS_JSON_STATUS = "status"
# Other constants used in the connector
MAL_PRESENCE_API_URL = 'https://ticloud01.reversinglabs.com/api/databrowser/malware_presence/bulk_query/json'
XREF_API_URL = 'https://ticloud01.reversinglabs.com/api/xref/v2/bulk_query/json'
|
py | b40cc766787d2a7d5cb4875c2a2ef4b22ba01a02 | #!/usr/bin/env python
import rospy
from std_srvs.srv import Trigger, TriggerRequest, TriggerResponse
from flexbe_core import EventState, Logger
from flexbe_core.proxy import ProxyServiceCaller
class DetectPlaneState(EventState):
'''
The state that the camera detect the plane to place the object.
The detect part is done by Apriltag
<= continue Given time has passed.
<= failed Example for a failure outcome.
'''
def __init__(self):
# Declare outcomes, input_keys, and output_keys by calling the super constructor with the corresponding arguments.
super(DetectPlaneState, self).__init__(outcomes = ['continue', 'failed'])
# The constructor is called when building the state machine, not when actually starting the behavior.
# Thus, we cannot save the starting time now and will do so later.
self._srv_topic = "/detect_plane_srv"
self._srv = ProxyServiceCaller({self._srv_topic: Trigger})
self._srv_result = None
self._failed = False
def execute(self, userdata):
# This method is called periodically while the state is active.
# Main purpose is to check state conditions and trigger a corresponding outcome.
# If no outcome is returned, the state will stay active.
if self._failed or self._srv_result.success is False:
return 'failed'
else:
return "continue"
def on_enter(self, userdata):
# This method is called when the state becomes active, i.e. a transition from another state to this one is taken.
# It is primarily used to start actions which are associated with this state.
# The following code is just for illustrating how the behavior logger works.
# Text logged by the behavior logger is sent to the operator and displayed in the GUI.
try:
self._srv_result = self._srv.call(self._srv_topic,
TriggerRequest())
except Exception as e:
Logger.logwarn('Failed to send service call:\n%s' % str(e))
self._failed = True
def on_exit(self, userdata):
pass # Nothing to do in this service.
def on_start(self):
pass # Nothing to do in this service.
def on_stop(self):
pass # Nothing to do in this service.
|
py | b40cc7a439e9869d41dc98600f6f5024bcffe161 | from thinker.core.questionsolver.questionsolver import QuestionSolver
class GeneratorBetweenNumbers:
###########################################################
#
# generateNumbers(value, possibleNumbers)
#
# positionNumbers: list of lists where each has [lowLimit, bigLimit, y/n]
# possibleNumbers: list of all values that can be considered as the user number
#
# This method creates different boundaries that remove values from possibleNumbers
#
###########################################################
def generateNumbers(self, values, possibleNumbers):
# Determine how many digits not using QuestionSolver
# digits = len(list(str(possibleNumbers[-1])))
# List of lists which contains all boundaries to remove
boundaries: list = []
# We insert the boundaries, we do this every time we execute this method to be transparent
# for the others, that means that this method doesn't remember anything
for boundary in values:
# If the user says that the value is in [lowLimit, bigLimit, y],
# we create 2 lists from 0 to lowLimit and from bigLimit
# to pow(10, digits) - 1 to eliminate the unnecessary values
if boundary[2] is "y":
boundaries.append([0, boundary[0] - 1, boundary[2]])
boundaries.append([boundary[1] + 1, pow(10, QuestionSolver.digits) - 1, boundary[2]])
# If the user says that the value is in [lowLimit, bigLimit, y],
# we only append the boundary
if boundary[2] is "n":
boundaries.append(boundary)
# And then we eliminate the values that doesn't have to be in there
for b in boundaries:
for i in range(b[0], b[1] + 1):
if i in possibleNumbers:
possibleNumbers.remove(i)
return possibleNumbers |
py | b40cc7aea69e1e49306114618b6f83013e30dfb1 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Services for exploration-related statistics."""
from core.domain import exp_domain
from core.domain import exp_services
from core.domain import stats_domain
from core.domain import stats_jobs_continuous
from core.platform import models
(stats_models,) = models.Registry.import_models([models.NAMES.statistics])
IMPROVE_TYPE_DEFAULT = 'default'
IMPROVE_TYPE_INCOMPLETE = 'incomplete'
# TODO(bhenning): Everything is handler name submit; therefore, it is
# pointless and should be removed.
_OLD_SUBMIT_HANDLER_NAME = 'submit'
def get_top_unresolved_answers_for_default_rule(exploration_id, state_name):
return {
answer: count for (answer, count) in
stats_domain.StateRuleAnswerLog.get(
exploration_id, state_name, exp_domain.DEFAULT_RULESPEC_STR
).get_top_answers(3)
}
def get_exps_unresolved_answers_count_for_default_rule(exp_ids):
"""Gets answer counts per exploration for the answer groups for default
rule across all states for explorations with ids in exp_ids.
Note that this method currently returns the counts only for the DEFAULT
rule. This should ideally handle all types of unresolved answers.
Returns:
A dict, keyed by the string '{exp_id}', whose values are the number of
unresolved answers that exploration has. Any exp_ids for explorations
that don't exist or that have been deleted will be ignored, and not
included in the return value.
"""
explorations = exp_services.get_multiple_explorations_by_id(
exp_ids, strict=False)
# The variable `exploration_states_tuples` is a list of all
# (exp_id, state_name) tuples for the given exp_ids.
# E.g. - [
# ('eid1', 'Introduction'),
# ('eid1', 'End'),
# ('eid2', 'Introduction'),
# ('eid3', 'Introduction')
# ]
# when exp_ids = ['eid1', 'eid2', 'eid3'].
explorations_states_tuples = [
(exp_domain_object.id, state_key)
for exp_domain_object in explorations.values()
for state_key in exp_domain_object.states
]
exploration_states_answers_list = get_top_state_rule_answers_multi(
explorations_states_tuples, [exp_domain.DEFAULT_RULESPEC_STR])
exps_answers_mapping = {}
for ind, statewise_answers in enumerate(exploration_states_answers_list):
for answer in statewise_answers:
exp_id = explorations_states_tuples[ind][0]
if exp_id not in exps_answers_mapping:
exps_answers_mapping[exp_id] = 0
exps_answers_mapping[exp_id] += answer['count']
return exps_answers_mapping
def get_state_rules_stats(exploration_id, state_name):
"""Gets statistics for the answer groups and rules of this state.
Returns:
A dict, keyed by the string '{HANDLER_NAME}.{RULE_STR}', whose
values are the corresponding stats_domain.StateRuleAnswerLog
instances.
"""
exploration = exp_services.get_exploration_by_id(exploration_id)
state = exploration.states[state_name]
rule_keys = []
for group in state.interaction.answer_groups:
for rule in group.rule_specs:
rule_keys.append((
_OLD_SUBMIT_HANDLER_NAME, rule.stringify_classified_rule()))
if state.interaction.default_outcome:
rule_keys.append((
_OLD_SUBMIT_HANDLER_NAME, exp_domain.DEFAULT_RULESPEC_STR))
answer_logs = stats_domain.StateRuleAnswerLog.get_multi(
exploration_id, [{
'state_name': state_name,
'rule_str': rule_key[1]
} for rule_key in rule_keys])
results = {}
for ind, answer_log in enumerate(answer_logs):
results['.'.join(rule_keys[ind])] = {
'answers': answer_log.get_top_answers(5),
'rule_hits': answer_log.total_answer_count
}
return results
def get_top_state_rule_answers(exploration_id, state_name, rule_str_list):
"""Returns a list of top answers (by submission frequency) submitted to the
given state in the given exploration which were mapped to any of the rules
listed in 'rule_str_list'. All answers submitted to the specified state and
match the rule spec strings in rule_str_list are returned.
"""
return get_top_state_rule_answers_multi(
[(exploration_id, state_name)], rule_str_list)[0]
def get_top_state_rule_answers_multi(exploration_state_list, rule_str_list):
"""Returns a list of top answers (by submission frequency) submitted to the
given explorations and states which were mapped to any of the rules listed
in 'rule_str_list' for each exploration ID and state name tuple in
exploration_state_list.
For each exploration ID and state, all answers submitted that match any of
the rule spec strings in rule_str_list are returned.
"""
answer_log_list = (
stats_domain.StateRuleAnswerLog.get_multi_by_multi_explorations(
exploration_state_list, rule_str_list))
return [[
{
'value': top_answer[0],
'count': top_answer[1]
}
for top_answer in answer_log.get_all_top_answers()
] for answer_log in answer_log_list]
def get_state_improvements(exploration_id, exploration_version):
"""Returns a list of dicts, each representing a suggestion for improvement
to a particular state.
"""
ranked_states = []
exploration = exp_services.get_exploration_by_id(exploration_id)
state_names = exploration.states.keys()
default_rule_answer_logs = stats_domain.StateRuleAnswerLog.get_multi(
exploration_id, [{
'state_name': state_name,
'rule_str': exp_domain.DEFAULT_RULESPEC_STR
} for state_name in state_names])
statistics = stats_jobs_continuous.StatisticsAggregator.get_statistics(
exploration_id, exploration_version)
state_hit_counts = statistics['state_hit_counts']
for ind, state_name in enumerate(state_names):
total_entry_count = 0
no_answer_submitted_count = 0
if state_name in state_hit_counts:
total_entry_count = (
state_hit_counts[state_name]['total_entry_count'])
no_answer_submitted_count = state_hit_counts[state_name].get(
'no_answer_count', 0)
if total_entry_count == 0:
continue
threshold = 0.2 * total_entry_count
default_rule_answer_log = default_rule_answer_logs[ind]
default_count = default_rule_answer_log.total_answer_count
eligible_flags = []
state = exploration.states[state_name]
if (default_count > threshold and
state.interaction.default_outcome is not None and
state.interaction.default_outcome.dest == state_name):
eligible_flags.append({
'rank': default_count,
'improve_type': IMPROVE_TYPE_DEFAULT})
if no_answer_submitted_count > threshold:
eligible_flags.append({
'rank': no_answer_submitted_count,
'improve_type': IMPROVE_TYPE_INCOMPLETE})
if eligible_flags:
eligible_flags = sorted(
eligible_flags, key=lambda flag: flag['rank'], reverse=True)
ranked_states.append({
'rank': eligible_flags[0]['rank'],
'state_name': state_name,
'type': eligible_flags[0]['improve_type'],
})
return sorted([
ranked_state for ranked_state in ranked_states
if ranked_state['rank'] != 0
], key=lambda x: -x['rank'])
def get_versions_for_exploration_stats(exploration_id):
"""Returns list of versions for this exploration."""
return stats_models.ExplorationAnnotationsModel.get_versions(
exploration_id)
def get_exploration_stats(exploration_id, exploration_version):
"""Returns a dict with state statistics for the given exploration id.
Note that exploration_version should be a string.
"""
exploration = exp_services.get_exploration_by_id(exploration_id)
exp_stats = stats_jobs_continuous.StatisticsAggregator.get_statistics(
exploration_id, exploration_version)
last_updated = exp_stats['last_updated']
state_hit_counts = exp_stats['state_hit_counts']
return {
'improvements': get_state_improvements(
exploration_id, exploration_version),
'last_updated': last_updated,
'num_completions': exp_stats['complete_exploration_count'],
'num_starts': exp_stats['start_exploration_count'],
'state_stats': {
state_name: {
'name': state_name,
'firstEntryCount': (
state_hit_counts[state_name]['first_entry_count']
if state_name in state_hit_counts else 0),
'totalEntryCount': (
state_hit_counts[state_name]['total_entry_count']
if state_name in state_hit_counts else 0),
} for state_name in exploration.states
},
}
|
py | b40cc86acbf89b77297415937897cd3b15df3219 | import rrPython
import os
import csv
os.chdir('C:\\RoadRunner\\bin')
function = 'getNumberOfGlobalParameters'
try:
num = rrPython.getNumberOfGlobalParameters()
if str(num) is not False:
result = 'True'
else:
result = 'False'
except:
result = 'False'
PythonTestResults = open('C:\\RoadRunner\\PythonTestResults.csv','a')
writer = csv.writer(PythonTestResults)
writevar = function + '=' + result
writer.writerow([writevar])
PythonTestResults.close() |
py | b40cc916e574b6c2fea544a77c6bf875af92ca90 | """
User API Views
"""
from django.contrib.auth.models import User
from rest_framework import generics, permissions
from rest_framework.exceptions import ParseError, PermissionDenied
from .permissions import IsOwnerOrReadOnly, IsUser
from .models import Profile, UserPhoto
from .serializers import (
ProfileSerializer,
PublicProfileSerializer,
UserSerializer,
UserPhotoSerializer,
)
class UserList(generics.ListAPIView):
"""View to list all users"""
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = (permissions.IsAuthenticated,)
# THIS WILL BE REPLACED. NO TESTS NEEDED
class UserCreate(generics.CreateAPIView):
"""View to create a new user"""
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = (permissions.IsAdminUser,)
class UserRetrieveUpdateDestroy(generics.RetrieveUpdateDestroyAPIView):
"""Retrieve a user or update user information"""
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = (IsUser,)
def perform_update(self, serializer):
image = self.request.data.get("image")
if image is not None:
serializer.save(image=image)
else:
serializer.save()
class ProfileList(generics.ListAPIView):
"""View to list all user profiles"""
queryset = Profile.objects.all()
serializer_class = ProfileSerializer
permission_classes = (permissions.IsAuthenticated,)
class ProfileRetrieveUpdate(generics.RetrieveUpdateAPIView):
"""Retrieve a user profile or update its information"""
queryset = Profile.objects.all()
permission_classes = (IsOwnerOrReadOnly,)
def get_serializer_class(self):
if self.kwargs["pk"] == self.request.user.profile.pk:
return ProfileSerializer
return PublicProfileSerializer
class UserPhotoListCreate(generics.ListCreateAPIView):
"""List all user photos or create a new one"""
serializer_class = UserPhotoSerializer
permission_classes = (IsOwnerOrReadOnly,)
def get_queryset(self):
return UserPhoto.objects.filter(profile=self.kwargs["pk"])
def perform_create(self, serializer):
profile = Profile.objects.get(pk=self.kwargs["pk"])
self.check_object_permissions(self.request, profile)
if profile.user != self.request.user:
raise PermissionDenied()
image = self.request.data.get("image")
if image is None:
raise ParseError("Could not find an 'image' in the POST data")
serializer.save(image=image, profile=profile)
class UserPhotoRetrieveUpdateDestroy(generics.RetrieveUpdateDestroyAPIView):
"""Retrieve, update, or delete a user photo"""
queryset = UserPhoto.objects.all()
serializer_class = UserPhotoSerializer
permission_classes = (IsOwnerOrReadOnly,)
|
py | b40cc923b98947e17243e9ac4ebb96d279107b9d | class PsiEffectModel: pass |
py | b40cc9d98f739c5666a668a3023043370455daba | import binascii
import socket
import struct
from typing import Tuple
def get_hex(binary: bytes) -> bytes:
return binascii.hexlify(binary, "-")
def get_ip(ip: bytes) -> str:
return socket.inet_ntoa(ip)
def parse_ethernet_header(raw_data: bytes) -> Tuple:
dest, src, ptype = struct.unpack("!6s6sH", raw_data[:14])
dest_mac = get_hex(dest)
src_mac = get_hex(src)
proto = socket.htons(ptype)
data = raw_data[14:]
return dest_mac, src_mac, proto, data
def parse_arp_header(raw_data: bytes) -> Tuple:
(
htype,
ptype,
hlen,
plen,
opcode,
src_mac,
src_ip,
dest_mac,
dest_ip,
) = struct.unpack("2s2s1s1s2s6s4s6s4s", raw_data)
htype = get_hex(htype)
ptype = get_hex(ptype)
hlen = get_hex(hlen)
plen = get_hex(plen)
opcode = get_hex(opcode)
src_mac = get_hex(src_mac)
src_ip = get_ip(src_ip)
dest_mac = get_hex(dest_mac)
dest_ip = get_ip(dest_ip)
return htype, ptype, hlen, plen, opcode, src_mac, src_ip, dest_mac, dest_ip
|
py | b40cc9f4b82067cad395e9dfa5b66ae14faf129e | # This function purpose is encoding a text turning a vowel into k + number of 1 to 5 codification.
def encoding(text):
container = ""
for letter in text:
if letter in "a":
container = container + "k5"
elif letter in "e":
container = container + "k4"
elif letter in "i":
container = container + "k3"
elif letter in "o":
container = container + "k2"
elif letter in "u":
container = container + "k1"
else:
container = container + letter
return container
# This function purpose is to decode a text turning the k + number of 1 to 5 codification into a vowel.
# If the first value is a k and the next value is a number among 1 and 5 then it brings a vowel relate with the code.
def decoding(lista):
container = []
cont = 0
while cont < len(lista):
lista[cont]
if lista[cont] in 'k' and lista[cont + 1] in '5':
container.append('a')
del (lista[1])
elif lista[cont] in 'k' and lista[cont + 1] in '4':
container.append('e')
del (lista[1])
elif lista[cont] in 'k' and lista[cont + 1] in '3':
container.append('i')
del (lista[1])
elif lista[cont] in 'k' and lista[cont + 1] in '2':
container.append('o')
del (lista[1])
elif lista[cont] in 'k' and lista[cont + 1] in '1':
container.append('u')
del (lista[1])
else:
container.append(lista[cont])
cont += 1
# Turn the list object into string
texto = ''.join(container)
return texto
# I wrote this script below for testing the decoding function
'''
frase = "k5mk5nhk5 e 5 de agosto"
# converto o texto string em uma lista (vetor)
lista = (list(frase))
j = decoding(lista)
print(j)
'''
|
py | b40cca14e283c42b8aa0e00ba5f584e50d79d205 | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
from w3lib.html import remove_tags
from scrapy.loader.processors import MapCompose, TakeFirst, Join
# Remove extras spaces in strings
def strip_space(value):
return value.strip()
# Put only one space between strings
def normalize_space(value):
return " ".join(value.split())
# This class defines the fields that will be created
class ProductItem(scrapy.Item):
# Each item may have a input and output processor
# Each processor performs a series of transformations on the data before saving it
product_id = scrapy.Field(output_processor=TakeFirst())
url = scrapy.Field(output_processor=TakeFirst())
title = scrapy.Field(output_processor=TakeFirst())
description = scrapy.Field(input_processor=MapCompose(normalize_space, remove_tags, strip_space),
output_processor=TakeFirst())
product_options = scrapy.Field(input_processor=MapCompose(normalize_space, remove_tags, strip_space),
output_processor=Join(','))
price = scrapy.Field(input_processor=MapCompose(normalize_space, remove_tags, strip_space),
output_processor=TakeFirst())
currency = scrapy.Field(input_processor=MapCompose(normalize_space, remove_tags, strip_space),
output_processor=TakeFirst())
rating = scrapy.Field(output_processor=TakeFirst())
number_of_votes = scrapy.Field(output_processor=TakeFirst())
count_of_images = scrapy.Field(output_processor=TakeFirst())
overview = scrapy.Field(input_processor=MapCompose(normalize_space, remove_tags, strip_space),
output_processor=Join(','))
favorited_by = scrapy.Field(output_processor=TakeFirst())
store_name = scrapy.Field(input_processor=MapCompose(normalize_space, remove_tags, strip_space),
output_processor=TakeFirst())
store_location = scrapy.Field(input_processor=MapCompose(normalize_space, remove_tags, strip_space),
output_processor=TakeFirst())
return_location = scrapy.Field(input_processor=MapCompose(normalize_space, remove_tags, strip_space),
output_processor=TakeFirst())
reviews = scrapy.Field() |
py | b40cca76962f221c3fd6d988f10f039053d8f3f6 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class BatchV2alpha1Api(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_namespaced_cron_job(self, namespace, body, **kwargs):
"""
create a CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_cron_job(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_namespaced_cron_job_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_cron_job_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_cron_job_with_http_info(self, namespace, body, **kwargs):
"""
create a CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_cron_job_with_http_info(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_cron_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_cron_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_cron_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_scheduled_job(self, namespace, body, **kwargs):
"""
create a ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_scheduled_job(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_namespaced_scheduled_job_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_scheduled_job_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_scheduled_job_with_http_info(self, namespace, body, **kwargs):
"""
create a ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_scheduled_job_with_http_info(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_scheduled_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_scheduled_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_scheduled_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_namespaced_cron_job(self, namespace, **kwargs):
"""
delete collection of CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_cron_job(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_collection_namespaced_cron_job_with_http_info(namespace, **kwargs)
else:
(data) = self.delete_collection_namespaced_cron_job_with_http_info(namespace, **kwargs)
return data
def delete_collection_namespaced_cron_job_with_http_info(self, namespace, **kwargs):
"""
delete collection of CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_cron_job_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_namespaced_cron_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_cron_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_namespaced_scheduled_job(self, namespace, **kwargs):
"""
delete collection of ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_scheduled_job(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_collection_namespaced_scheduled_job_with_http_info(namespace, **kwargs)
else:
(data) = self.delete_collection_namespaced_scheduled_job_with_http_info(namespace, **kwargs)
return data
def delete_collection_namespaced_scheduled_job_with_http_info(self, namespace, **kwargs):
"""
delete collection of ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_scheduled_job_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_namespaced_scheduled_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_scheduled_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_cron_job(self, name, namespace, body, **kwargs):
"""
delete a CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_cron_job(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_namespaced_cron_job_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.delete_namespaced_cron_job_with_http_info(name, namespace, body, **kwargs)
return data
def delete_namespaced_cron_job_with_http_info(self, name, namespace, body, **kwargs):
"""
delete a CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_cron_job_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_cron_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_cron_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_cron_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_cron_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'grace_period_seconds' in params:
query_params['gracePeriodSeconds'] = params['grace_period_seconds']
if 'orphan_dependents' in params:
query_params['orphanDependents'] = params['orphan_dependents']
if 'propagation_policy' in params:
query_params['propagationPolicy'] = params['propagation_policy']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_scheduled_job(self, name, namespace, body, **kwargs):
"""
delete a ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_scheduled_job(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_namespaced_scheduled_job_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.delete_namespaced_scheduled_job_with_http_info(name, namespace, body, **kwargs)
return data
def delete_namespaced_scheduled_job_with_http_info(self, name, namespace, body, **kwargs):
"""
delete a ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_scheduled_job_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_scheduled_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_scheduled_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_scheduled_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_scheduled_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'grace_period_seconds' in params:
query_params['gracePeriodSeconds'] = params['grace_period_seconds']
if 'orphan_dependents' in params:
query_params['orphanDependents'] = params['orphan_dependents']
if 'propagation_policy' in params:
query_params['propagationPolicy'] = params['propagation_policy']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_resources(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_api_resources(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_api_resources_with_http_info(**kwargs)
else:
(data) = self.get_api_resources_with_http_info(**kwargs)
return data
def get_api_resources_with_http_info(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_api_resources_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_resources" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIResourceList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_cron_job_for_all_namespaces(self, **kwargs):
"""
list or watch objects of kind CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_cron_job_for_all_namespaces(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_cron_job_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_cron_job_for_all_namespaces_with_http_info(**kwargs)
return data
def list_cron_job_for_all_namespaces_with_http_info(self, **kwargs):
"""
list or watch objects of kind CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_cron_job_for_all_namespaces_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['field_selector', 'include_uninitialized', 'label_selector', 'pretty', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_cron_job_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/cronjobs'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJobList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_cron_job(self, namespace, **kwargs):
"""
list or watch objects of kind CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_cron_job(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_namespaced_cron_job_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_cron_job_with_http_info(namespace, **kwargs)
return data
def list_namespaced_cron_job_with_http_info(self, namespace, **kwargs):
"""
list or watch objects of kind CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_cron_job_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_cron_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_cron_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJobList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_scheduled_job(self, namespace, **kwargs):
"""
list or watch objects of kind ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_scheduled_job(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_namespaced_scheduled_job_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_scheduled_job_with_http_info(namespace, **kwargs)
return data
def list_namespaced_scheduled_job_with_http_info(self, namespace, **kwargs):
"""
list or watch objects of kind ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_scheduled_job_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_scheduled_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_scheduled_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJobList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_scheduled_job_for_all_namespaces(self, **kwargs):
"""
list or watch objects of kind ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_scheduled_job_for_all_namespaces(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_scheduled_job_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_scheduled_job_for_all_namespaces_with_http_info(**kwargs)
return data
def list_scheduled_job_for_all_namespaces_with_http_info(self, **kwargs):
"""
list or watch objects of kind ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_scheduled_job_for_all_namespaces_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V2alpha1CronJobList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['field_selector', 'include_uninitialized', 'label_selector', 'pretty', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_scheduled_job_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/scheduledjobs'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJobList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_cron_job(self, name, namespace, body, **kwargs):
"""
partially update the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_cron_job(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_cron_job_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_cron_job_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_cron_job_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_cron_job_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_cron_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_cron_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_cron_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_cron_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_cron_job_status(self, name, namespace, body, **kwargs):
"""
partially update status of the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_cron_job_status(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_cron_job_status_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_cron_job_status_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_cron_job_status_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update status of the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_cron_job_status_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_cron_job_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_cron_job_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_cron_job_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_cron_job_status`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_scheduled_job(self, name, namespace, body, **kwargs):
"""
partially update the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_scheduled_job(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_scheduled_job_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_scheduled_job_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_scheduled_job_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_scheduled_job_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_scheduled_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_scheduled_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_scheduled_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_scheduled_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_scheduled_job_status(self, name, namespace, body, **kwargs):
"""
partially update status of the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_scheduled_job_status(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_scheduled_job_status_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_scheduled_job_status_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_scheduled_job_status_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update status of the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_scheduled_job_status_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_scheduled_job_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_scheduled_job_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_scheduled_job_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_scheduled_job_status`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_cron_job(self, name, namespace, **kwargs):
"""
read the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_cron_job(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_cron_job_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_cron_job_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_cron_job_with_http_info(self, name, namespace, **kwargs):
"""
read the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_cron_job_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty', 'exact', 'export']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_cron_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_cron_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_cron_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'exact' in params:
query_params['exact'] = params['exact']
if 'export' in params:
query_params['export'] = params['export']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_cron_job_status(self, name, namespace, **kwargs):
"""
read status of the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_cron_job_status(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_cron_job_status_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_cron_job_status_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_cron_job_status_with_http_info(self, name, namespace, **kwargs):
"""
read status of the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_cron_job_status_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_cron_job_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_cron_job_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_cron_job_status`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_scheduled_job(self, name, namespace, **kwargs):
"""
read the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_scheduled_job(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_scheduled_job_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_scheduled_job_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_scheduled_job_with_http_info(self, name, namespace, **kwargs):
"""
read the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_scheduled_job_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty', 'exact', 'export']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_scheduled_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_scheduled_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_scheduled_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'exact' in params:
query_params['exact'] = params['exact']
if 'export' in params:
query_params['export'] = params['export']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_scheduled_job_status(self, name, namespace, **kwargs):
"""
read status of the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_scheduled_job_status(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_scheduled_job_status_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_scheduled_job_status_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_scheduled_job_status_with_http_info(self, name, namespace, **kwargs):
"""
read status of the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_scheduled_job_status_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_scheduled_job_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_scheduled_job_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_scheduled_job_status`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_cron_job(self, name, namespace, body, **kwargs):
"""
replace the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_cron_job(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_cron_job_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_cron_job_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_cron_job_with_http_info(self, name, namespace, body, **kwargs):
"""
replace the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_cron_job_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_cron_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_cron_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_cron_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_cron_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_cron_job_status(self, name, namespace, body, **kwargs):
"""
replace status of the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_cron_job_status(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_cron_job_status_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_cron_job_status_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_cron_job_status_with_http_info(self, name, namespace, body, **kwargs):
"""
replace status of the specified CronJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_cron_job_status_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the CronJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_cron_job_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_cron_job_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_cron_job_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_cron_job_status`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/cronjobs/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_scheduled_job(self, name, namespace, body, **kwargs):
"""
replace the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_scheduled_job(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_scheduled_job_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_scheduled_job_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_scheduled_job_with_http_info(self, name, namespace, body, **kwargs):
"""
replace the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_scheduled_job_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_scheduled_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_scheduled_job`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_scheduled_job`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_scheduled_job`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_scheduled_job_status(self, name, namespace, body, **kwargs):
"""
replace status of the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_scheduled_job_status(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_scheduled_job_status_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_scheduled_job_status_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_scheduled_job_status_with_http_info(self, name, namespace, body, **kwargs):
"""
replace status of the specified ScheduledJob
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_scheduled_job_status_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ScheduledJob (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V2alpha1CronJob body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2alpha1CronJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_scheduled_job_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_scheduled_job_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_scheduled_job_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_scheduled_job_status`")
collection_formats = {}
resource_path = '/apis/batch/v2alpha1/namespaces/{namespace}/scheduledjobs/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V2alpha1CronJob',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
py | b40cca975bc85568bbe0aab89d02cf021ec3e06f | for case in range(int(input())):
name = {input() for _ in range(int(input()))}
result = 0
used = name.copy()
for _ in range(int(input())):
query = input()
used -= {query}
if not used:
used = name.copy() - {query}
result += 1
print('Case #{}: {}'.format(case+1, result))
|
py | b40ccaba8275990f900cfae82df4f21b81a9c0c2 | # Modified from https://github.com/facebookresearch/detectron2/tree/master/projects/PointRend # noqa
from os import path as osp
from typing import Tuple, Union
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import Tensor
from torch.nn.modules.utils import _pair
from torch.onnx.operators import shape_as_tensor
def bilinear_grid_sample(im: Tensor,
grid: Tensor,
align_corners: bool = False) -> Tensor:
"""Given an input and a flow-field grid, computes the output using input
values and pixel locations from grid. Supported only bilinear interpolation
method to sample the input pixels.
Args:
im (torch.Tensor): Input feature map, shape (N, C, H, W)
grid (torch.Tensor): Point coordinates, shape (N, Hg, Wg, 2)
align_corners (bool): If set to True, the extrema (-1 and 1) are
considered as referring to the center points of the input’s
corner pixels. If set to False, they are instead considered as
referring to the corner points of the input’s corner pixels,
making the sampling more resolution agnostic.
Returns:
torch.Tensor: A tensor with sampled points, shape (N, C, Hg, Wg)
"""
n, c, h, w = im.shape
gn, gh, gw, _ = grid.shape
assert n == gn
x = grid[:, :, :, 0]
y = grid[:, :, :, 1]
if align_corners:
x = ((x + 1) / 2) * (w - 1)
y = ((y + 1) / 2) * (h - 1)
else:
x = ((x + 1) * w - 1) / 2
y = ((y + 1) * h - 1) / 2
x = x.view(n, -1)
y = y.view(n, -1)
x0 = torch.floor(x).long()
y0 = torch.floor(y).long()
x1 = x0 + 1
y1 = y0 + 1
wa = ((x1 - x) * (y1 - y)).unsqueeze(1)
wb = ((x1 - x) * (y - y0)).unsqueeze(1)
wc = ((x - x0) * (y1 - y)).unsqueeze(1)
wd = ((x - x0) * (y - y0)).unsqueeze(1)
# Apply default for grid_sample function zero padding
im_padded = F.pad(im, pad=[1, 1, 1, 1], mode='constant', value=0)
padded_h = h + 2
padded_w = w + 2
# save points positions after padding
x0, x1, y0, y1 = x0 + 1, x1 + 1, y0 + 1, y1 + 1
# Clip coordinates to padded image size
x0 = torch.where(x0 < 0, torch.tensor(0), x0)
x0 = torch.where(x0 > padded_w - 1, torch.tensor(padded_w - 1), x0)
x1 = torch.where(x1 < 0, torch.tensor(0), x1)
x1 = torch.where(x1 > padded_w - 1, torch.tensor(padded_w - 1), x1)
y0 = torch.where(y0 < 0, torch.tensor(0), y0)
y0 = torch.where(y0 > padded_h - 1, torch.tensor(padded_h - 1), y0)
y1 = torch.where(y1 < 0, torch.tensor(0), y1)
y1 = torch.where(y1 > padded_h - 1, torch.tensor(padded_h - 1), y1)
im_padded = im_padded.view(n, c, -1)
x0_y0 = (x0 + y0 * padded_w).unsqueeze(1).expand(-1, c, -1)
x0_y1 = (x0 + y1 * padded_w).unsqueeze(1).expand(-1, c, -1)
x1_y0 = (x1 + y0 * padded_w).unsqueeze(1).expand(-1, c, -1)
x1_y1 = (x1 + y1 * padded_w).unsqueeze(1).expand(-1, c, -1)
Ia = torch.gather(im_padded, 2, x0_y0)
Ib = torch.gather(im_padded, 2, x0_y1)
Ic = torch.gather(im_padded, 2, x1_y0)
Id = torch.gather(im_padded, 2, x1_y1)
return (Ia * wa + Ib * wb + Ic * wc + Id * wd).reshape(n, c, gh, gw)
def is_in_onnx_export_without_custom_ops() -> bool:
from mmcv.ops import get_onnxruntime_op_path
ort_custom_op_path = get_onnxruntime_op_path()
return torch.onnx.is_in_onnx_export(
) and not osp.exists(ort_custom_op_path)
def normalize(grid: Tensor) -> Tensor:
"""Normalize input grid from [-1, 1] to [0, 1]
Args:
grid (torch.Tensor): The grid to be normalize, range [-1, 1].
Returns:
torch.Tensor: Normalized grid, range [0, 1].
"""
return (grid + 1.0) / 2.0
def denormalize(grid: Tensor) -> Tensor:
"""Denormalize input grid from range [0, 1] to [-1, 1]
Args:
grid (torch.Tensor): The grid to be denormalize, range [0, 1].
Returns:
torch.Tensor: Denormalized grid, range [-1, 1].
"""
return grid * 2.0 - 1.0
def generate_grid(num_grid: int, size: Tuple[int, int],
device: torch.device) -> Tensor:
"""Generate regular square grid of points in [0, 1] x [0, 1] coordinate
space.
Args:
num_grid (int): The number of grids to sample, one for each region.
size (tuple[int, int]): The side size of the regular grid.
device (torch.device): Desired device of returned tensor.
Returns:
torch.Tensor: A tensor of shape (num_grid, size[0]*size[1], 2) that
contains coordinates for the regular grids.
"""
affine_trans = torch.tensor([[[1., 0., 0.], [0., 1., 0.]]], device=device)
grid = F.affine_grid(
affine_trans, torch.Size((1, 1, *size)), align_corners=False)
grid = normalize(grid)
return grid.view(1, -1, 2).expand(num_grid, -1, -1)
def rel_roi_point_to_abs_img_point(rois: Tensor,
rel_roi_points: Tensor) -> Tensor:
"""Convert roi based relative point coordinates to image based absolute
point coordinates.
Args:
rois (torch.Tensor): RoIs or BBoxes, shape (N, 4) or (N, 5)
rel_roi_points (torch.Tensor): Point coordinates inside RoI, relative
to RoI, location, range (0, 1), shape (N, P, 2)
Returns:
torch.Tensor: Image based absolute point coordinates, shape (N, P, 2)
"""
with torch.no_grad():
assert rel_roi_points.size(0) == rois.size(0)
assert rois.dim() == 2
assert rel_roi_points.dim() == 3
assert rel_roi_points.size(2) == 2
# remove batch idx
if rois.size(1) == 5:
rois = rois[:, 1:]
abs_img_points = rel_roi_points.clone()
# To avoid an error during exporting to onnx use independent
# variables instead inplace computation
xs = abs_img_points[:, :, 0] * (rois[:, None, 2] - rois[:, None, 0])
ys = abs_img_points[:, :, 1] * (rois[:, None, 3] - rois[:, None, 1])
xs += rois[:, None, 0]
ys += rois[:, None, 1]
abs_img_points = torch.stack([xs, ys], dim=2)
return abs_img_points
def get_shape_from_feature_map(x: Tensor) -> Tensor:
"""Get spatial resolution of input feature map considering exporting to
onnx mode.
Args:
x (torch.Tensor): Input tensor, shape (N, C, H, W)
Returns:
torch.Tensor: Spatial resolution (width, height), shape (1, 1, 2)
"""
if torch.onnx.is_in_onnx_export():
img_shape = shape_as_tensor(x)[2:].flip(0).view(1, 1, 2).to(
x.device).float()
else:
img_shape = torch.tensor(x.shape[2:]).flip(0).view(1, 1, 2).to(
x.device).float()
return img_shape
def abs_img_point_to_rel_img_point(abs_img_points: Tensor,
img: Union[tuple, Tensor],
spatial_scale: float = 1.) -> Tensor:
"""Convert image based absolute point coordinates to image based relative
coordinates for sampling.
Args:
abs_img_points (torch.Tensor): Image based absolute point coordinates,
shape (N, P, 2)
img (tuple or torch.Tensor): (height, width) of image or feature map.
spatial_scale (float, optional): Scale points by this factor.
Default: 1.
Returns:
Tensor: Image based relative point coordinates for sampling, shape
(N, P, 2).
"""
assert (isinstance(img, tuple) and len(img) == 2) or \
(isinstance(img, torch.Tensor) and len(img.shape) == 4)
if isinstance(img, tuple):
h, w = img
scale = torch.tensor([w, h],
dtype=torch.float,
device=abs_img_points.device)
scale = scale.view(1, 1, 2)
else:
scale = get_shape_from_feature_map(img)
return abs_img_points / scale * spatial_scale
def rel_roi_point_to_rel_img_point(rois: Tensor,
rel_roi_points: Tensor,
img: Union[tuple, Tensor],
spatial_scale: float = 1.) -> Tensor:
"""Convert roi based relative point coordinates to image based absolute
point coordinates.
Args:
rois (torch.Tensor): RoIs or BBoxes, shape (N, 4) or (N, 5)
rel_roi_points (torch.Tensor): Point coordinates inside RoI, relative
to RoI, location, range (0, 1), shape (N, P, 2)
img (tuple or torch.Tensor): (height, width) of image or feature map.
spatial_scale (float, optional): Scale points by this factor.
Default: 1.
Returns:
torch.Tensor: Image based relative point coordinates for sampling,
shape (N, P, 2).
"""
abs_img_point = rel_roi_point_to_abs_img_point(rois, rel_roi_points)
rel_img_point = abs_img_point_to_rel_img_point(abs_img_point, img,
spatial_scale)
return rel_img_point
def point_sample(input: Tensor,
points: Tensor,
align_corners: bool = False,
**kwargs) -> Tensor:
"""A wrapper around :func:`grid_sample` to support 3D point_coords tensors
Unlike :func:`torch.nn.functional.grid_sample` it assumes point_coords to
lie inside ``[0, 1] x [0, 1]`` square.
Args:
input (torch.Tensor): Feature map, shape (N, C, H, W).
points (torch.Tensor): Image based absolute point coordinates
(normalized), range [0, 1] x [0, 1], shape (N, P, 2) or
(N, Hgrid, Wgrid, 2).
align_corners (bool, optional): Whether align_corners.
Default: False
Returns:
torch.Tensor: Features of `point` on `input`, shape (N, C, P) or
(N, C, Hgrid, Wgrid).
"""
add_dim = False
if points.dim() == 3:
add_dim = True
points = points.unsqueeze(2)
if is_in_onnx_export_without_custom_ops():
# If custom ops for onnx runtime not compiled use python
# implementation of grid_sample function to make onnx graph
# with supported nodes
output = bilinear_grid_sample(
input, denormalize(points), align_corners=align_corners)
else:
output = F.grid_sample(
input, denormalize(points), align_corners=align_corners, **kwargs)
if add_dim:
output = output.squeeze(3)
return output
class SimpleRoIAlign(nn.Module):
def __init__(self,
output_size: Tuple[int],
spatial_scale: float,
aligned: bool = True) -> None:
"""Simple RoI align in PointRend, faster than standard RoIAlign.
Args:
output_size (tuple[int]): h, w
spatial_scale (float): scale the input boxes by this number
aligned (bool): if False, use the legacy implementation in
MMDetection, align_corners=True will be used in F.grid_sample.
If True, align the results more perfectly.
"""
super().__init__()
self.output_size = _pair(output_size)
self.spatial_scale = float(spatial_scale)
# to be consistent with other RoI ops
self.use_torchvision = False
self.aligned = aligned
def forward(self, features: Tensor, rois: Tensor) -> Tensor:
num_imgs = features.size(0)
num_rois = rois.size(0)
rel_roi_points = generate_grid(
num_rois, self.output_size, device=rois.device)
if torch.onnx.is_in_onnx_export():
rel_img_points = rel_roi_point_to_rel_img_point(
rois, rel_roi_points, features, self.spatial_scale)
rel_img_points = rel_img_points.reshape(num_imgs, -1,
*rel_img_points.shape[1:])
point_feats = point_sample(
features, rel_img_points, align_corners=not self.aligned)
point_feats = point_feats.transpose(1, 2)
else:
point_feats = []
for batch_ind in range(num_imgs):
# unravel batch dim
feat = features[batch_ind].unsqueeze(0)
inds = (rois[:, 0].long() == batch_ind)
if inds.any():
rel_img_points = rel_roi_point_to_rel_img_point(
rois[inds], rel_roi_points[inds], feat,
self.spatial_scale).unsqueeze(0)
point_feat = point_sample(
feat, rel_img_points, align_corners=not self.aligned)
point_feat = point_feat.squeeze(0).transpose(0, 1)
point_feats.append(point_feat)
point_feats = torch.cat(point_feats, dim=0)
channels = features.size(1)
roi_feats = point_feats.reshape(num_rois, channels, *self.output_size)
return roi_feats
def __repr__(self) -> str:
format_str = self.__class__.__name__
format_str += '(output_size={}, spatial_scale={}'.format(
self.output_size, self.spatial_scale)
return format_str
|
py | b40ccac52517b5aa3ca1ae6a60e38c5ab18d4ee3 | import abc
import gzip
import hashlib
import logging
import mimetypes
import pydoc
from functools import lru_cache
from io import BytesIO
from typing import ClassVar
from typing import Generic
from typing import NoReturn
from typing import Optional
from typing import Tuple
from typing import Type
from typing import TypeVar
from typing import Union
from django.core.cache import caches
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import Storage
from django.utils.encoding import force_bytes
from collectfast import settings
_RemoteStorage = TypeVar("_RemoteStorage", bound=Storage)
cache = caches[settings.cache]
logger = logging.getLogger(__name__)
class Strategy(abc.ABC, Generic[_RemoteStorage]):
# Exceptions raised by storage backend for delete calls to non-existing
# objects. The command silently catches these.
delete_not_found_exception: ClassVar[Tuple[Type[Exception], ...]] = ()
def __init__(self, remote_storage: _RemoteStorage) -> None:
self.remote_storage = remote_storage
@abc.abstractmethod
def should_copy_file(
self, path: str, prefixed_path: str, local_storage: Storage
) -> bool:
"""
Called for each file before copying happens, this method decides
whether a file should be copied or not. Return False to indicate that
the file is already up-to-date and should not be copied, or True to
indicate that it is stale and needs updating.
"""
...
def pre_should_copy_hook(self) -> None:
"""Hook called before calling should_copy_file."""
...
def post_copy_hook(
self, path: str, prefixed_path: str, local_storage: Storage
) -> None:
"""Hook called after a file is copied."""
...
def on_skip_hook(
self, path: str, prefixed_path: str, local_storage: Storage
) -> None:
"""Hook called when a file copy is skipped."""
...
class HashStrategy(Strategy[_RemoteStorage], abc.ABC):
use_gzip = False
def should_copy_file(
self, path: str, prefixed_path: str, local_storage: Storage
) -> bool:
local_hash = self.get_local_file_hash(path, local_storage)
remote_hash = self.get_remote_file_hash(prefixed_path)
return local_hash != remote_hash
def get_gzipped_local_file_hash(
self, uncompressed_file_hash: str, path: str, contents: str
) -> str:
buffer = BytesIO()
zf = gzip.GzipFile(mode="wb", fileobj=buffer, mtime=0.0)
zf.write(force_bytes(contents))
zf.close()
return hashlib.md5(buffer.getvalue()).hexdigest()
@lru_cache(maxsize=None)
def get_local_file_hash(self, path: str, local_storage: Storage) -> str:
"""Create md5 hash from file contents."""
# Read file contents and handle file closing
file = local_storage.open(path)
try:
contents = file.read()
finally:
file.close()
file_hash = hashlib.md5(contents).hexdigest()
# Check if content should be gzipped and hash gzipped content
content_type = mimetypes.guess_type(path)[0] or "application/octet-stream"
if self.use_gzip and content_type in settings.gzip_content_types:
file_hash = self.get_gzipped_local_file_hash(file_hash, path, contents)
return file_hash
@abc.abstractmethod
def get_remote_file_hash(self, prefixed_path: str) -> Optional[str]:
...
class CachingHashStrategy(HashStrategy[_RemoteStorage], abc.ABC):
@lru_cache(maxsize=None)
def get_cache_key(self, path: str) -> str:
path_hash = hashlib.md5(path.encode()).hexdigest()
return settings.cache_key_prefix + path_hash
def invalidate_cached_hash(self, path: str) -> None:
cache.delete(self.get_cache_key(path))
def should_copy_file(
self, path: str, prefixed_path: str, local_storage: Storage
) -> bool:
local_hash = self.get_local_file_hash(path, local_storage)
remote_hash = self.get_cached_remote_file_hash(path, prefixed_path)
if local_hash != remote_hash:
# invalidate cached hash, since we expect its corresponding file to
# be overwritten
self.invalidate_cached_hash(path)
return True
return False
def get_cached_remote_file_hash(self, path: str, prefixed_path: str) -> str:
"""Cache the hash of the remote storage file."""
cache_key = self.get_cache_key(path)
hash_ = cache.get(cache_key, False)
if hash_ is False:
hash_ = self.get_remote_file_hash(prefixed_path)
cache.set(cache_key, hash_)
return str(hash_)
def get_gzipped_local_file_hash(
self, uncompressed_file_hash: str, path: str, contents: str
) -> str:
"""Cache the hash of the gzipped local file."""
cache_key = self.get_cache_key("gzip_hash_%s" % uncompressed_file_hash)
file_hash = cache.get(cache_key, False)
if file_hash is False:
file_hash = super().get_gzipped_local_file_hash(
uncompressed_file_hash, path, contents
)
cache.set(cache_key, file_hash)
return str(file_hash)
def post_copy_hook(
self, path: str, prefixed_path: str, local_storage: Storage
) -> None:
"""Cache the hash of the just copied local file."""
super().post_copy_hook(path, prefixed_path, local_storage)
key = self.get_cache_key(path)
value = self.get_local_file_hash(path, local_storage)
cache.set(key, value)
class DisabledStrategy(Strategy):
def should_copy_file(
self, path: str, prefixed_path: str, local_storage: Storage
) -> NoReturn:
raise NotImplementedError
def pre_should_copy_hook(self) -> NoReturn:
raise NotImplementedError
def load_strategy(klass: Union[str, type, object]) -> Type[Strategy[Storage]]:
if isinstance(klass, str):
klass = pydoc.locate(klass)
if not isinstance(klass, type) or not issubclass(klass, Strategy):
raise ImproperlyConfigured(
"Configured strategies must be subclasses of %s.%s"
% (Strategy.__module__, Strategy.__qualname__)
)
return klass
|
py | b40ccbe491f43bdc42cd571aac82c606619434f9 | import argparse
import os
import tarfile
FLAT_DIRS_REPO_TEMPLATE='flatDir {{ dirs {dirs} }}\n'
MAVEN_REPO_TEMPLATE='maven {{ url "{repo}" }}\n'
KEYSTORE_TEMLATE='signingConfigs {{ debug {{ storeFile file("{keystore}") }} }}\n'
ENABLE_JAVADOC = 'tasks["bundle${suffix}Aar"].dependsOn packageJavadocTask'
DO_NOT_STRIP = '''\
packagingOptions {
doNotStrip "*/arm64-v8a/*.so"
doNotStrip "*/armeabi-v7a/*.so"
doNotStrip "*/x86_64/*.so"
doNotStrip "*/x86/*.so"
}
'''
AAR_TEMPLATE = """\
ext.jniLibsDirs = [
{jni_libs_dirs}
]
ext.resDirs = [
{res_dirs}
]
ext.assetsDirs = [
{assets_dirs}
]
ext.javaDirs = [
{java_dirs}
]
def aidlDirs = [
{aidl_dirs}
]
ext.bundles = [
{bundles}
]
ext.androidArs = [
{aars}
]
def minVersion = 15
def compileVersion = 28
def targetVersion = 28
def buildVersion = '28.0.3'
import com.android.build.gradle.LibraryPlugin
import java.nio.file.Files
import java.nio.file.Paths
import java.util.regex.Matcher
import java.util.regex.Pattern
import java.util.zip.ZipFile
apply plugin: 'com.github.dcendents.android-maven'
buildDir = "$projectDir/build"
if (!ext.has("packageSuffix"))
ext.packageSuffix = ""
buildscript {{
// repositories {{
// jcenter()
// mavenCentral()
// }}
repositories {{
{maven_repos}
}}
dependencies {{
classpath 'com.android.tools.build:gradle:3.5.3'
classpath 'com.github.dcendents:android-maven-gradle-plugin:1.5'
}}
}}
apply plugin: LibraryPlugin
repositories {{
// flatDir {{
// dirs System.env.PKG_ROOT + '/bundle'
// }}
// maven {{
// url "http://maven.google.com/"
// }}
// maven {{
// url "http://artifactory.yandex.net/artifactory/public/"
// }}
{flat_dirs_repo}
{maven_repos}
}}
android {{
{keystore}
compileSdkVersion compileVersion
buildToolsVersion buildVersion
defaultConfig {{
minSdkVersion minVersion
targetSdkVersion targetVersion
consumerProguardFiles '{proguard_rules}'
}}
sourceSets {{
main {{
manifest.srcFile '{manifest}'
jniLibs.srcDirs = jniLibsDirs
res.srcDirs = resDirs
assets.srcDirs = assetsDirs
java.srcDirs = javaDirs
aidl.srcDirs = aidlDirs
}}
// We don't use this feature, so we set it to nonexisting directory
androidTest.setRoot('bundle/tests')
}}
{do_not_strip}
dependencies {{
for (bundle in bundles)
compile("$bundle") {{
transitive = true
}}
for (bundle in androidArs)
compile(bundle) {{
transitive = true
}}
}}
android.libraryVariants.all {{ variant ->
def suffix = variant.buildType.name.capitalize()
def sourcesJarTask = project.tasks.create(name: "sourcesJar${{suffix}}", type: Jar) {{
classifier = 'sources'
from android.sourceSets.main.java.srcDirs
include '**/*.java'
eachFile {{ fcd ->
def segments = fcd.relativePath.segments
if (segments[0] == 'impl') {{
fcd.relativePath = new RelativePath(true, segments.drop(1))
}}
}}
includeEmptyDirs = false
}}
def manifestFile = android.sourceSets.main.manifest.srcFile
def manifestXml = new XmlParser().parse(manifestFile)
def packageName = manifestXml['@package']
def groupName = packageName.tokenize('.')[0..-2].join('.')
def androidNs = new groovy.xml.Namespace("http://schemas.android.com/apk/res/android")
def packageVersion = manifestXml.attributes()[androidNs.versionName]
def writePomTask = project.tasks.create(name: "writePom${{suffix}}") {{
pom {{
project {{
groupId groupName
version packageVersion
packaging 'aar'
}}
}}.writeTo("$buildDir/${{rootProject.name}}$packageSuffix-pom.xml")
}}
tasks["bundle${{suffix}}Aar"].dependsOn sourcesJarTask
tasks["bundle${{suffix}}Aar"].dependsOn writePomTask
}}
android.libraryVariants.all {{ variant ->
def capitalizedVariantName = variant.name.capitalize()
def suffix = variant.buildType.name.capitalize()
def javadocTask = project.tasks.create(name: "generate${{capitalizedVariantName}}Javadoc", type: Javadoc) {{
group = "Javadoc"
description "Generates Javadoc for $capitalizedVariantName"
title = "Yandex documentation"
source = android.sourceSets.main.java.srcDirs
include "**/*/yandex/*/**"
// TODO: remove this when we support internal doc exclusion in IDL
// https://st.yandex-team.ru/MAPSMOBCORE-11364
exclude "**/internal/**"
ext.androidJar = "${{android.sdkDirectory.path}}/platforms/${{android.compileSdkVersion}}/android.jar"
classpath =
files(android.getBootClasspath().join(File.pathSeparator)) +
configurations.compile +
files(ext.androidJar) +
files(variant.javaCompile.outputs.files)
destinationDir = file("$buildDir/${{rootProject.name}}-javadoc/$capitalizedVariantName/")
options.doclet("ExcludeDoclet")
options.docletpath(
files(repositories.maven.url).getAsFileTree()
.matching{{include "**/exclude-doclet-1.0.0.jar"}}
.getSingleFile())
options.charSet = "UTF-8"
options.encoding = "UTF-8"
failOnError false
afterEvaluate {{
def dependencyTree = project.configurations.compile.getAsFileTree()
def aar_set = dependencyTree.matching{{include "**/*.aar"}}.getFiles()
def jar_tree = dependencyTree.matching{{include "**/*.jar"}}
aar_set.each{{ aar ->
def outputPath = "$buildDir/tmp/aarJar/${{aar.name.replace('.aar', '.jar')}}"
classpath += files(outputPath)
dependsOn task(name: "extract_${{aar.getAbsolutePath().replace(File.separatorChar, '_' as char)}}-${{capitalizedVariantName}}").doLast {{
extractClassesJar(aar, outputPath)
}}
}}
}}
}}
def packageJavadocTask = project.tasks.create(name: "package${{capitalizedVariantName}}Javadoc", type: Tar) {{
description "Makes an archive from Javadoc output"
from "${{buildDir}}/${{rootProject.name}}-javadoc/$capitalizedVariantName/"
archiveFileName = "${{rootProject.name}}-javadoc.tar.gz"
destinationDirectory = new File("${{buildDir}}")
dependsOn javadocTask
}}
{enable_javadoc}
}}
}}
private def extractClassesJar(aarPath, outputPath) {{
if (!aarPath.exists()) {{
throw new GradleException("AAR $aarPath not found")
}}
def zip = new ZipFile(aarPath)
zip.entries().each {{
if (it.name == "classes.jar") {{
def path = Paths.get(outputPath)
if (!Files.exists(path)) {{
Files.createDirectories(path.getParent())
Files.copy(zip.getInputStream(it), path)
}}
}}
}}
zip.close()
}}
"""
def gen_build_script(args):
def wrap(items):
return ',\n '.join('"{}"'.format(x) for x in items)
bundles = []
bundles_dirs = set(args.flat_repos)
for bundle in args.bundles:
dir_name, base_name = os.path.split(bundle)
assert(len(dir_name) > 0 and len(base_name) > 0)
name, ext = os.path.splitext(base_name)
assert(len(name) > 0 and ext == '.aar')
bundles_dirs.add(dir_name)
bundles.append('com.yandex:{}@aar'.format(name))
if len(bundles_dirs) > 0:
flat_dirs_repo = FLAT_DIRS_REPO_TEMPLATE.format(dirs=wrap(bundles_dirs))
else:
flat_dirs_repo = ''
maven_repos = ''.join(MAVEN_REPO_TEMPLATE.format(repo=repo) for repo in args.maven_repos)
if args.keystore:
keystore = KEYSTORE_TEMLATE.format(keystore=args.keystore)
else:
keystore = ''
if args.generate_doc:
enable_javadoc = ENABLE_JAVADOC
else:
enable_javadoc = ''
if args.do_not_strip:
do_not_strip = DO_NOT_STRIP
else:
do_not_strip = ''
return AAR_TEMPLATE.format(
aars=wrap(args.aars),
aidl_dirs=wrap(args.aidl_dirs),
assets_dirs=wrap(args.assets_dirs),
bundles=wrap(bundles),
do_not_strip=do_not_strip,
enable_javadoc=enable_javadoc,
flat_dirs_repo=flat_dirs_repo,
java_dirs=wrap(args.java_dirs),
jni_libs_dirs=wrap(args.jni_libs_dirs),
keystore=keystore,
manifest=args.manifest,
maven_repos=maven_repos,
proguard_rules=args.proguard_rules,
res_dirs=wrap(args.res_dirs),
)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--aars', nargs='*', default=[])
parser.add_argument('--aidl-dirs', nargs='*', default=[])
parser.add_argument('--assets-dirs', nargs='*', default=[])
parser.add_argument('--bundle-name', nargs='?', default='default-bundle-name')
parser.add_argument('--bundles', nargs='*', default=[])
parser.add_argument('--do-not-strip', action='store_true')
parser.add_argument('--flat-repos', nargs='*', default=[])
parser.add_argument('--generate-doc', action='store_true')
parser.add_argument('--java-dirs', nargs='*', default=[])
parser.add_argument('--jni-libs-dirs', nargs='*', default=[])
parser.add_argument('--keystore', default=None)
parser.add_argument('--manifest', required=True)
parser.add_argument('--maven-repos', nargs='*', default=[])
parser.add_argument('--output-dir', required=True)
parser.add_argument('--peers', nargs='*', default=[])
parser.add_argument('--proguard-rules', nargs='?', default=None)
parser.add_argument('--res-dirs', nargs='*', default=[])
args = parser.parse_args()
if args.proguard_rules is None:
args.proguard_rules = os.path.join(args.output_dir, 'proguard-rules.txt')
with open(args.proguard_rules, 'w') as f:
pass
for index, jsrc in enumerate(filter(lambda x: x.endswith('.jsrc'), args.peers)):
jsrc_dir = os.path.join(args.output_dir, 'jsrc_{}'.format(str(index)))
os.makedirs(jsrc_dir)
with tarfile.open(jsrc, 'r') as tar:
tar.extractall(path=jsrc_dir)
args.java_dirs.append(jsrc_dir)
args.build_gradle = os.path.join(args.output_dir, 'build.gradle')
args.settings_gradle = os.path.join(args.output_dir, 'settings.gradle')
content = gen_build_script(args)
with open(args.build_gradle, 'w') as f:
f.write(content)
if args.bundle_name:
with open(args.settings_gradle, 'w') as f:
f.write('rootProject.name = "{}"'.format(args.bundle_name))
|
py | b40ccbf4a2dc875da9468f23dcffc42ffdf92720 | def keys(valid, passed, required=None, name='spec'):
passed = set(passed)
if valid:
extra = passed - set(valid)
if extra:
raise KeyError("Error validating {}, the following keys aren't "
"valid: {}. Valid keys are: {}".format(
name, extra, valid))
if required:
missing = set(required) - passed
if missing:
raise KeyError(f"Error validating {name}. Missing keys: {missing}")
|
py | b40ccd0cce611bca3dbc4552500f55cd9b5626b7 | #!/usr/bin/env python
import atexit
import contextlib
import errno
import platform
import re
import shutil
import ssl
import subprocess
import sys
import tarfile
import tempfile
import urllib2
import os
import zipfile
from config import is_verbose_mode
from env_util import get_vs_env
BOTO_DIR = os.path.abspath(os.path.join(__file__, '..', '..', '..', 'vendor',
'boto'))
def get_host_arch():
"""Returns the host architecture with a predictable string."""
host_arch = platform.machine()
# Convert machine type to format recognized by gyp.
if re.match(r'i.86', host_arch) or host_arch == 'i86pc':
host_arch = 'ia32'
elif host_arch in ['x86_64', 'amd64']:
host_arch = 'x64'
elif host_arch.startswith('arm'):
host_arch = 'arm'
# platform.machine is based on running kernel. It's possible to use 64-bit
# kernel with 32-bit userland, e.g. to give linker slightly more memory.
# Distinguish between different userland bitness by querying
# the python binary.
if host_arch == 'x64' and platform.architecture()[0] == '32bit':
host_arch = 'ia32'
return host_arch
def tempdir(prefix=''):
directory = tempfile.mkdtemp(prefix=prefix)
atexit.register(shutil.rmtree, directory)
return directory
@contextlib.contextmanager
def scoped_cwd(path):
cwd = os.getcwd()
os.chdir(path)
try:
yield
finally:
os.chdir(cwd)
@contextlib.contextmanager
def scoped_env(key, value):
origin = ''
if key in os.environ:
origin = os.environ[key]
os.environ[key] = value
try:
yield
finally:
os.environ[key] = origin
def download(text, url, path):
safe_mkdir(os.path.dirname(path))
with open(path, 'wb') as local_file:
if hasattr(ssl, '_create_unverified_context'):
ssl._create_default_https_context = ssl._create_unverified_context
web_file = urllib2.urlopen(url)
file_size = int(web_file.info().getheaders("Content-Length")[0])
downloaded_size = 0
block_size = 128
ci = os.environ.get('CI') == '1'
while True:
buf = web_file.read(block_size)
if not buf:
break
downloaded_size += len(buf)
local_file.write(buf)
if not ci:
percent = downloaded_size * 100. / file_size
status = "\r%s %10d [%3.1f%%]" % (text, downloaded_size, percent)
print status,
if ci:
print "%s done." % (text)
else:
print
return path
def extract_tarball(tarball_path, member, destination):
with tarfile.open(tarball_path) as tarball:
tarball.extract(member, destination)
def extract_zip(zip_path, destination):
if sys.platform == 'darwin':
# Use unzip command on Mac to keep symbol links in zip file work.
execute(['unzip', zip_path, '-d', destination])
else:
with zipfile.ZipFile(zip_path) as z:
z.extractall(destination)
def make_zip(zip_file_path, files, dirs):
safe_unlink(zip_file_path)
if sys.platform == 'darwin':
files += dirs
execute(['zip', '-r', '-y', zip_file_path] + files)
else:
zip_file = zipfile.ZipFile(zip_file_path, "w", zipfile.ZIP_DEFLATED)
for filename in files:
zip_file.write(filename, filename)
for dirname in dirs:
for root, _, filenames in os.walk(dirname):
for f in filenames:
zip_file.write(os.path.join(root, f))
zip_file.close()
def rm_rf(path):
try:
shutil.rmtree(path)
except OSError:
pass
def safe_unlink(path):
try:
os.unlink(path)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def safe_mkdir(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def execute(argv, env=os.environ):
if is_verbose_mode():
print ' '.join(argv)
try:
output = subprocess.check_output(argv, stderr=subprocess.STDOUT, env=env)
if is_verbose_mode():
print output
return output
except subprocess.CalledProcessError as e:
print e.output
raise e
def execute_stdout(argv, env=os.environ, cwd=None):
if is_verbose_mode():
print ' '.join(argv)
try:
subprocess.check_call(argv, env=env, cwd=cwd)
except subprocess.CalledProcessError as e:
print e.output
raise e
else:
execute(argv, env)
def electron_gyp():
SOURCE_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
gyp = os.path.join(SOURCE_ROOT, 'electron.gyp')
with open(gyp) as f:
obj = eval(f.read());
return obj['variables']
def get_electron_version():
return 'v' + electron_gyp()['version%']
def parse_version(version):
if version[0] == 'v':
version = version[1:]
vs = version.split('.')
if len(vs) > 4:
return vs[0:4]
else:
return vs + ['0'] * (4 - len(vs))
def boto_path_dirs():
return [
os.path.join(BOTO_DIR, 'build', 'lib'),
os.path.join(BOTO_DIR, 'build', 'lib.linux-x86_64-2.7')
]
def run_boto_script(access_key, secret_key, script_name, *args):
env = os.environ.copy()
env['AWS_ACCESS_KEY_ID'] = access_key
env['AWS_SECRET_ACCESS_KEY'] = secret_key
env['PYTHONPATH'] = os.path.pathsep.join(
[env.get('PYTHONPATH', '')] + boto_path_dirs())
boto = os.path.join(BOTO_DIR, 'bin', script_name)
execute([sys.executable, boto] + list(args), env)
def s3put(bucket, access_key, secret_key, prefix, key_prefix, files):
args = [
'--bucket', bucket,
'--prefix', prefix,
'--key_prefix', key_prefix,
'--grant', 'public-read'
] + files
run_boto_script(access_key, secret_key, 's3put', *args)
def import_vs_env(target_arch):
if sys.platform != 'win32':
return
if target_arch == 'ia32':
vs_arch = 'amd64_x86'
else:
vs_arch = 'x86_amd64'
env = get_vs_env('14.0', vs_arch)
os.environ.update(env)
|
py | b40ccd672a75a7153eed4226ad9422e949340e68 | # -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pypm
import Axon
from Kamaelia.Chassis.Graphline import Graphline
from Kamaelia.UI.Pygame.Button import Button
class MidiTest(Axon.Component.component):
def __init__(self, port_number):
super(MidiTest, self).__init__()
pypm.Initialize()
self.output = pypm.Output(port_number, 0)
def main(self):
while 1:
if self.dataReady("inbox"):
#Note on (with note num from inbox) if we get data
self.output.WriteShort(0x90, self.recv("inbox"), 127)
yield 1
if __name__ == "__main__":
Graphline(bd = Button(caption="BD", msg=36, position=(0, 0)),
sd = Button(caption="SD", msg=38, position = (50, 0)),
hh = Button(caption="HH", msg=46, position = (100, 0)),
midi_out = MidiTest(0),
linkages = {
("bd", "outbox") : ("midi_out", "inbox"),
("hh", "outbox") : ("midi_out", "inbox"),
("sd", "outbox") : ("midi_out", "inbox"),
}
).run() |
py | b40ccd947df4b44bdc46f8e50979c6209d11110b | #!/usr/bin/env python
#
# Copyright 2011 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of platform-specific functionality.
For each function or class described in `tornado.platform.interface`,
the appropriate platform-specific implementation exists in this module.
Most code that needs access to this functionality should do e.g.::
from tornado.platform.auto import set_close_exec
"""
from __future__ import absolute_import, division, print_function, with_statement
import os
if os.name == 'nt':
from .common import Waker
from .windows import set_close_exec
else:
from .posix import set_close_exec, Waker
try:
# monotime monkey-patches the time module to have a monotonic function
# in versions of python before 3.3.
import monotime
except ImportError:
pass
try:
from time import monotonic as monotonic_time
except ImportError:
monotonic_time = None
|
py | b40cce6f6c604c368f7a2a62788f82564228b3cf | # coding: utf-8
#import sys,os
#sys.path.append(os.pardir)
from common.util import preprocess, create_co_matrix, cos_similarity
text = 'You say goodbye and I say hello.'
corpus, word_to_id, id_to_word = preprocess(text)
vocab_size = len(word_to_id)
C = create_co_matrix(corpus, vocab_size)
c0 = C[word_to_id['you']] # "you"의 단어 벡터
c1 = C[word_to_id['i']] # "i"의 단어 벡터
print(cos_similarity(c0, c1))
|
py | b40ccf3fce6268e0e163e3aff1aef783feb4a27a | INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"nested_admin",
"rest_framework",
"rest_framework.authtoken",
"social_django",
"template",
]
|
py | b40cd0a6a3082ff3aa5c193ff577cf5979a9d325 | from __future__ import absolute_import, unicode_literals
import json
import os
from common_tests import CommonTestDriver
from test_case import TestCase
class TypeErrorAtPosDriver(CommonTestDriver):
error_file_ext = ".err"
auto_namespace_map = '{"PHP": "HH\\\\Lib\\\\PHP"}'
repo_dir = "hphp/hack/test/integration/data/holes"
def write_load_config(
self, use_serverless_ide: bool = False, use_saved_state: bool = False
) -> None:
with open(os.path.join(self.repo_dir, ".hhconfig"), "w") as f:
f.write(
"""
auto_namespace_map = {}
allowed_fixme_codes_strict = 4101,4323
allowed_decl_fixme_codes = 4101,4323
disable_xhp_element_mangling = false
""".format(
self.auto_namespace_map
)
)
def expected_file_name(self, file_name: str, row_num: int, col_num: int) -> str:
return "{}+{}+{}.err.exp".format(file_name, row_num, col_num)
def expected_file_path(self, file_name: str, row_num: int, col_num: int) -> str:
return os.path.join(
self.repo_dir, self.expected_file_name(file_name, row_num, col_num)
)
def expected_type_error(self, file_name: str, row_num: int, col_num: int) -> str:
with open(
self.expected_file_path(file_name, row_num, col_num)
) as expected_file:
return expected_file.read().strip()
def extract_type_error(self, file_name: str, row_num: int, col_num: int) -> str:
arg = os.path.join(
self.repo_dir, "{}:{}:{}".format(file_name, row_num, col_num)
)
extracted_error, _, retcode = self.run_check(
options=["--type-error-at-pos", arg, "--json"]
)
self.assertEqual(
0,
retcode,
"hh --type-error-at-pos {} returned non-zero code".format(arg),
)
return json.dumps(
json.loads(extracted_error.replace(self.repo_dir, "")), indent=2
)
def assert_expected_error_matches_extracted_error(
self, file_name: str, row_num: int, col_num: int
) -> None:
self.assertMultiLineEqual(
self.expected_type_error(file_name, row_num, col_num),
self.extract_type_error(file_name, row_num, col_num),
f"The expected result of extracting type error in {file_name}, row {row_num}, column {col_num}, doesn't match the extracted ",
)
class TestTypeErrorAtPos(TestCase[TypeErrorAtPosDriver]):
@classmethod
def setUpClass(cls) -> None:
super().setUpClass()
@classmethod
def get_template_repo(cls) -> str:
return "hphp/hack/test/integration/data/holes"
@classmethod
def get_test_driver(cls) -> TypeErrorAtPosDriver:
return TypeErrorAtPosDriver()
def test_type_error(self) -> None:
self.test_driver.write_load_config()
cases = [
("call_single.php", [(7, 10), (10, 10)]),
("call_multiple.php", [(7, 5), (7, 8)]),
("call_unpack.php", [(7, 8)]),
("return_expr_only.php", [(5, 10), (5, 21)]),
("return_and_fn_arg.php", [(5, 10), (5, 21)]),
]
for (file_name, positions) in cases:
for (row_num, col_num) in positions:
with self.subTest(msg=f"{file_name}:{row_num}:{col_num}"):
self.test_driver.assert_expected_error_matches_extracted_error(
file_name, row_num, col_num
)
|
py | b40cd0f70493b21db10248663c559a618bc851ef | import os.path
import os
from hypernets.utils import common as common_util
def test_camel_keys_to_snake():
input_dict = {
'datasetConf': {
'trainData': './train.csv'
},
'name': 'with-feature-selection',
'jobs': [
{
'featureSelection': {
'leastFeatures': 10
},
'callbackSetting': [{
'className': 'hypernets.core.ConsoleCallback'
}]
}
]
}
ret_dict = common_util.camel_keys_to_snake(input_dict)
assert ret_dict['dataset_conf']['train_data'] == input_dict['datasetConf']['trainData']
assert ret_dict['name'] == input_dict['name']
input_job_conf_dict = input_dict['jobs'][0]
ret_job_conf_dict = ret_dict['jobs'][0]
assert ret_job_conf_dict['feature_selection']['least_features'] == \
input_job_conf_dict['featureSelection']['leastFeatures']
assert ret_job_conf_dict['callback_setting'][0]['class_name'] == \
input_job_conf_dict['callbackSetting'][0]['className']
def test_make_tempfile():
temp_file_path: str = common_util.get_temp_file_path(prefix='prefix', suffix='.txt')
assert not os.path.exists(temp_file_path)
assert os.path.basename(temp_file_path).startswith('prefix')
assert os.path.basename(temp_file_path).endswith('.txt')
temp_file_dir_created = common_util.get_temp_dir_path(prefix='prefix', suffix='prefix', create=True)
assert os.path.exists(temp_file_dir_created)
temp_file_dir_not_created = common_util.get_temp_dir_path(prefix='prefix', suffix='prefix', create=False)
assert not os.path.exists(temp_file_dir_not_created)
|
py | b40cd177016734ab5147f4d258b8eab3bb69465f | """
weasyprint.tests.test_draw.svg.test_images
------------------------------------------
Test how images are drawn in SVG.
"""
import pytest
from weasyprint.urls import path2url
from ...testing_utils import assert_no_logs, resource_filename
from .. import assert_pixels
@assert_no_logs
def test_image_svg():
assert_pixels('test_image_svg', 4, 4, '''
____
____
__B_
____
''', '''
<style>
@page { size: 4px 4px }
svg { display: block }
</style>
<svg width="4px" height="4px" xmlns="http://www.w3.org/2000/svg">
<svg x="1" y="1" width="2" height="2" viewBox="0 0 10 10">
<rect x="5" y="5" width="5" height="5" fill="blue" />
</svg>
</svg>
''')
@assert_no_logs
def test_image_svg_viewbox():
assert_pixels('test_image_svg_viewbox', 4, 4, '''
____
____
__B_
____
''', '''
<style>
@page { size: 4px 4px }
svg { display: block }
</style>
<svg viewBox="0 0 4 4" xmlns="http://www.w3.org/2000/svg">
<svg x="1" y="1" width="2" height="2" viewBox="0 0 10 10">
<rect x="5" y="5" width="5" height="5" fill="blue" />
</svg>
</svg>
''')
@assert_no_logs
def test_image_svg_align_default():
assert_pixels('test_image_svg_align_default', 8, 8, '''
__BRRR__
__BRRR__
__RRRG__
__RRRG__
________
________
________
________
''', '''
<style>
@page { size: 8px 8px }
svg { display: block }
</style>
<svg width="8px" height="4px" viewBox="0 0 4 4"
xmlns="http://www.w3.org/2000/svg">
<rect width="4" height="4" fill="red" />
<rect width="1" height="2" fill="blue" />
<rect x="3" y="2" width="1" height="2" fill="lime" />
</svg>
''')
@assert_no_logs
def test_image_svg_align_none():
assert_pixels('test_image_svg_align_none', 8, 8, '''
BBRRRRRR
BBRRRRRR
RRRRRRGG
RRRRRRGG
________
________
________
________
''', '''
<style>
@page { size: 8px 8px }
svg { display: block }
</style>
<svg width="8px" height="4px" viewBox="0 0 4 4"
preserveAspectRatio="none"
xmlns="http://www.w3.org/2000/svg">
<rect width="4" height="4" fill="red" />
<rect width="1" height="2" fill="blue" />
<rect x="3" y="2" width="1" height="2" fill="lime" />
</svg>
''')
@assert_no_logs
def test_image_svg_align_meet_x():
assert_pixels('test_image_svg_align_meet_x', 8, 8, '''
____BRRR
____BRRR
____RRRG
____RRRG
________
________
________
________
''', '''
<style>
@page { size: 8px 8px }
svg { display: block }
</style>
<svg width="8px" height="4px" viewBox="0 0 4 4"
preserveAspectRatio="xMaxYMax meet"
xmlns="http://www.w3.org/2000/svg">
<rect width="4" height="4" fill="red" />
<rect width="1" height="2" fill="blue" />
<rect x="3" y="2" width="1" height="2" fill="lime" />
</svg>
''')
@assert_no_logs
def test_image_svg_align_meet_y():
assert_pixels('test_image_svg_align_meet_y', 8, 8, '''
________
________
________
________
BRRR____
BRRR____
RRRG____
RRRG____
''', '''
<style>
@page { size: 8px 8px }
svg { display: block }
</style>
<svg width="4px" height="8px" viewBox="0 0 4 4"
preserveAspectRatio="xMaxYMax meet"
xmlns="http://www.w3.org/2000/svg">
<rect width="4" height="4" fill="red" />
<rect width="1" height="2" fill="blue" />
<rect x="3" y="2" width="1" height="2" fill="lime" />
</svg>
''')
@assert_no_logs
def test_image_svg_align_slice_x():
assert_pixels('test_image_svg_align_slice_x', 8, 8, '''
BBRRRRRR
BBRRRRRR
BBRRRRRR
BBRRRRRR
________
________
________
________
''', '''
<style>
@page { size: 8px 8px }
svg { display: block }
</style>
<svg width="8px" height="4px" viewBox="0 0 4 4"
preserveAspectRatio="xMinYMin slice"
xmlns="http://www.w3.org/2000/svg">
<rect width="4" height="4" fill="red" />
<rect width="1" height="2" fill="blue" />
<rect x="3" y="2" width="1" height="2" fill="lime" />
</svg>
''')
@assert_no_logs
def test_image_svg_align_slice_y():
assert_pixels('test_image_svg_align_slice_y', 8, 8, '''
BBRR____
BBRR____
BBRR____
BBRR____
RRRR____
RRRR____
RRRR____
RRRR____
''', '''
<style>
@page { size: 8px 8px }
svg { display: block }
</style>
<svg width="4px" height="8px" viewBox="0 0 4 4"
preserveAspectRatio="xMinYMin slice"
xmlns="http://www.w3.org/2000/svg">
<rect width="4" height="4" fill="red" />
<rect width="1" height="2" fill="blue" />
<rect x="3" y="2" width="1" height="2" fill="lime" />
</svg>
''')
@pytest.mark.xfail
@assert_no_logs
def test_image_svg_percentage():
assert_pixels('test_image_svg_percentage', 4, 4, '''
____
____
__B_
____
''', '''
<style>
@page { size: 4px 4px }
svg { display: block }
</style>
<svg width="100%" height="100%" xmlns="http://www.w3.org/2000/svg">
<svg x="1" y="1" width="50%" height="50%" viewBox="0 0 10 10">
<rect x="5" y="5" width="5" height="5" fill="blue" />
</svg>
</svg>
''')
def test_image_svg_wrong():
assert_pixels('test_image_svg_wrong', 4, 4, '''
____
____
____
____
''', '''
<style>
@page { size: 4px 4px }
svg { display: block }
</style>
<svg width="4px" height="4px" xmlns="http://www.w3.org/2000/svg">
<That’s bad!
</svg>
''')
@assert_no_logs
def test_image_image():
assert_pixels('test_image_image', 4, 4, '''
rBBB
BBBB
BBBB
BBBB
''', '''
<style>
@page { size: 4px 4px }
svg { display: block }
</style>
<svg width="4px" height="4px" xmlns="http://www.w3.org/2000/svg">
<image xlink:href="%s" />
</svg>
''' % path2url(resource_filename('pattern.png')))
def test_image_image_wrong():
assert_pixels('test_image_image_wrong', 4, 4, '''
____
____
____
____
''', '''
<style>
@page { size: 4px 4px }
svg { display: block }
</style>
<svg width="4px" height="4px" xmlns="http://www.w3.org/2000/svg">
<image xlink:href="it doesn’t exist, mouhahahaha" />
</svg>
''')
|
py | b40cd1e7b602e59cf2d7b319013a6a8f72b4e200 |
"""
cd /Users/brunoflaven/Documents/02_copy/_000_IA_bruno_light/facial_recognition/007_play_with_face_recognition/python_face_recognition_mysql/
python 008_identify_multiple_faces_in_picture.py
"""
import face_recognition
from PIL import Image, ImageDraw
known_face_one = face_recognition.load_image_file('./images/known/macron_cocorico_gettyimages_1048128128.jpg')
known_face_one_encoding = face_recognition.face_encodings(known_face_one)[0]
known_face_two = face_recognition.load_image_file('./images/known/michelle_obama_first_official_portrait_as_first_lady.jpg')
known_face_two_encoding = face_recognition.face_encodings(known_face_two)[0]
known_face_three = face_recognition.load_image_file('./images/known/stevo_pendarovski_north_macedonia_750.jpeg')
known_face_three_encoding = face_recognition.face_encodings(known_face_three)[0]
# Create arrays of encodings and names
known_face_encodings = [
known_face_one_encoding,
known_face_two_encoding,
known_face_three_encoding
]
known_face_names = [
"Emmanuel Macron",
"Michelle Obama",
"Stevo Pendarovski"
]
# Load test image to find faces in
test_image = face_recognition.load_image_file('./images/known/stevo_pendarovski_michelle_obama_emmanuel_macron.jpg')
# Find faces in test image
face_locations = face_recognition.face_locations(test_image)
face_encodings = face_recognition.face_encodings(test_image, face_locations)
# Convert to PIL format
pil_image = Image.fromarray(test_image)
# Create a ImageDraw instance
draw = ImageDraw.Draw(pil_image)
# Loop through faces in test image
for(top, right, bottom, left), face_encoding in zip(face_locations, face_encodings):
matches = face_recognition.compare_faces(known_face_encodings, face_encoding)
name = "Unknown Person"
# If match
if True in matches:
first_match_index = matches.index(True)
name = known_face_names[first_match_index]
# Draw box
draw.rectangle(((left, top), (right, bottom)), outline=(102,51,204))
# do it in purple 102,51,204 and text in white 255,255,255
# Draw label
text_width, text_height = draw.textsize(name)
draw.rectangle(((left,bottom - text_height - 10), (right, bottom)), fill=(102,51,204), outline=(102,51,204))
draw.text((left + 6, bottom - text_height - 5), name, fill=(255,255,255))
del draw
# Display image
pil_image.show()
# Save image
pil_image.save('./images_output/identified_stevo_pendarovski_michelle_obama_emmanuel_macron.jpg')
print("DONE - check the directory /images_output/")
|
py | b40cd1fae4adfcdb73d95b74364b5f1d3dffaf2a | from PIL import Image
import tempfile
# based on:
# http://stackoverflow.com/questions/69645/take-a-screenshot-via-a-python-script-linux
class GtkPixbufWrapper(object):
name = 'pygtk'
childprocess = False
def __init__(self):
import gtk
self.gtk = gtk
try:
gtk.gdk.Pixbuf
gtk.gdk.COLORSPACE_RGB
except AttributeError:
raise ImportError(
'Incompatible with Python3 / GDK3. Use gdk3pixbuf.')
def grab(self, bbox=None):
f = tempfile.NamedTemporaryFile(
suffix='.png', prefix='pyscreenshot_gtkpixbuf_')
filename = f.name
self.grab_to_file(filename, bbox)
im = Image.open(filename)
return im
def grab_to_file(self, filename, bbox=None):
"""http://www.pygtk.org/docs/pygtk/class-gdkpixbuf.html.
only "jpeg" or "png"
"""
w = self.gtk.gdk.get_default_root_window()
# Capture the whole screen.
if bbox is None:
sz = w.get_size()
pb = self.gtk.gdk.Pixbuf(
self.gtk.gdk.COLORSPACE_RGB, False, 8, sz[0], sz[1]) # 24bit RGB
pb = pb.get_from_drawable(
w, w.get_colormap(), 0, 0, 0, 0, sz[0], sz[1])
# Only capture what we need. The smaller the capture, the faster.
else:
sz = [bbox[2] - bbox[0], bbox[3] - bbox[1]]
pb = self.gtk.gdk.Pixbuf(
self.gtk.gdk.COLORSPACE_RGB, False, 8, sz[0], sz[1])
pb = pb.get_from_drawable(
w, w.get_colormap(), bbox[0], bbox[1], 0, 0, sz[0], sz[1])
assert pb
ftype = 'png'
if filename.endswith('.jpeg'):
ftype = 'jpeg'
pb.save(filename, ftype)
def backend_version(self):
return '.'.join(map(str, self.gtk.ver))
|
py | b40cd324fddc02ff0f42a71daf3ee72cbff56327 | # Copyright 2019 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Circuit specifications for general Fock simulator backends."""
from .compiler import Compiler
class Fock(Compiler):
"""Compiler for general Fock backends."""
short_name = "fock"
interactive = True
primitives = {
# meta operations
"All",
"_New_modes",
"_Delete",
# state preparations
"Vacuum",
"Coherent",
"Squeezed",
"DisplacedSqueezed",
"Thermal",
"Fock",
"Catstate",
"Ket",
"DensityMatrix",
# measurements
"MeasureFock",
"MeasureHomodyne",
# channels
"LossChannel",
# single mode gates
"Dgate",
"Sgate",
"Rgate",
"Vgate",
"Kgate",
# two mode gates
"BSgate",
"CKgate",
"S2gate",
}
decompositions = {
"Interferometer": {},
"GraphEmbed": {},
"BipartiteGraphEmbed": {},
"GaussianTransform": {},
"Gaussian": {},
"Pgate": {},
"CXgate": {},
"CZgate": {},
"MZgate": {},
"Xgate": {},
"Zgate": {},
"Fouriergate": {},
}
|
py | b40cd437b07976ee3c0909515215aaced1393b4a | """
Read NMEA from UDP or serial and send position and orientation to Water Linked Underwater GPS
"""
import requests
import argparse
import time
import logging
import sys
import serial
import pynmea2
import socket
log = logging.getLogger()
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
class SetupException(Exception):
pass
class SerialReader(object):
def __init__(self, port, baud):
try:
self.ser = serial.Serial(port, baud, timeout=5.0)
except serial.SerialException as err:
print("Serial connection error: {}".format(err))
raise SetupException()
def iter(self):
while True:
yield self.ser.read()
class UDPReader(object):
def __init__(self, host, port):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
self.sock.bind((host, port))
except socket.error as err:
print("UDP setup: Could not bind to {}:{}. Error: {}".format(host, port, err))
raise SetupException()
def iter(self):
while True:
data, addr = self.sock.recvfrom(1024)
print("Got {} bytes from {}: {}".format(len(data), addr, data))
if not data:
break
# Add newline if not present
if data[-1] != "\n":
data = data + "\n"
yield data
self.sock.close()
def set_position_master(url, latitude, longitude, orientation):
payload = dict(lat=latitude, lon=longitude, orientation=orientation)
#Keep loop running even if for some reason there is no connection.
try:
requests.put(url, json=payload, timeout=1)
except requests.exceptions.RequestException as err:
print("Serial connection error: {}".format(err))
def run(base_url, conn, compass_src):
lat = 0
lon = 0
orientation = 0
gotUpdate = False
reader = pynmea2.NMEAStreamReader()
for data in conn.iter():
#In case the format is given in bytes
try:
data = data.decode('UTF-8')
except AttributeError:
pass
try:
for msg in reader.next(data):
if type(msg) == pynmea2.types.talker.GGA:
lat = float(msg.latitude)
lon = float(msg.longitude)
gotUpdate = True
elif type(msg) == pynmea2.types.talker.HDT and compass_src == "hdt":
orientation = float(msg.heading)
gotUpdate = True
elif type(msg) == pynmea2.types.talker.HDG and compass_src == "hdg":
orientation = float(msg.heading)
gotUpdate = True
elif type(msg) == pynmea2.types.talker.HDM and compass_src == "hdm":
orientation = float(msg.heading)
gotUpdate = True
except pynmea2.ParseError as e:
log.warning("Error while parsing NMEA string: {}".format(e))
if gotUpdate:
log.info('Sending position {} {} and orientation: {}'.format(lat, lon, orientation))
set_position_master('{}/api/v1/external/master'.format(base_url), lat, lon, orientation)
gotUpdate = False
def main():
valid_compass = ["hdt", "hdg", "hdm", "any"]
valid_compass_str = ', '.join(valid_compass)
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-u', '--url', help='IP/URL of Underwater GPS kit. Typically http://192.168.2.94', type=str, default='http://demo.waterlinked.com')
parser.add_argument('-c', '--compass', help='NMEA type to use as orientation source. Valid options: {}'.format(valid_compass_str), type=str, default='hdt')
# UDP options
parser.add_argument('-p', '--port', help="Port to listen for UDP packets. Default: 10110", type=int, default=10110)
parser.add_argument('-i', '--ip', help="Enable UDP by specifying interface to listen for UDP packets. Typically 'localhost' or '0.0.0.0'. Default disabled", type=str, default='')
# Serial options
parser.add_argument('-s', '--serial', help="Enable Serial by specifying serial port to use. Example: '/dev/ttyUSB0' or 'COM1' Default disabled", type=str, default='')
parser.add_argument('-b', '--baud', help="Serial port baud rate", type=int, default=9600)
args = parser.parse_args()
if not (args.ip or args.serial):
parser.print_help()
print("")
print("ERROR: Please specify either serial port or UDP port to use")
print("")
sys.exit(1)
if (args.ip and args.serial):
parser.print_help()
print("")
print("ERROR: Please specify either serial port or UDP port to use")
print("")
sys.exit(1)
args.compass = args.compass.lower()
valid_compass = ["hdt", "hdg", "hdm"]
if args.compass not in valid_compass:
print("")
print("ERROR: Please --compass as one of {}".format(valid_compass_str))
print("")
sys.exit(1)
print("Sending data to Underwater GPS on url: {}".format(args.url))
if args.serial:
print("Source Serial {} at {} baud".format(args.serial, args.baud))
try:
ser = SerialReader(args.serial, args.baud)
except SetupException:
print("Aborting")
sys.exit(1)
run(args.url, ser, args.compass)
return
if args.ip:
print("Source UDP port {} on interface {}".format(args.port, args.ip))
try:
reader = UDPReader(args.ip, args.port)
except SetupException:
print("Aborting")
sys.exit(1)
run(args.url, reader, args.compass)
return
if __name__ == "__main__":
main()
|
py | b40cd48331aac4a52e8e8417951e1655cc7a9bc7 |
lista = [[1,2,3],[4,5,6],[7,8,9]]
print (lista[0][1])
|
py | b40cd4d546bc11b410d2197874f2befe1330859c | """
WSGI config for mysite project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings.production")
application = get_wsgi_application()
|
py | b40cd54c9cd71164df0e0f916eb07bd5a7c981b1 | from .CoreComponent import CoreComponent
class FishHeterotrophicComponent(CoreComponent):
"""Registers the heterotrophic nutrient sector component of the fish model.
Model taken from [2] (see Aquaponics.py for sources).
Parameters
----------
m : GEKKO
The gekko model.
Connecting Variables
--------------------
DO : MV
Input. Receives from environment.
Dissolved oxygen.
DOC : MV (mg DO / l)
Input. Receives from environment.
Dissolved oxygen concentration.
AFM : MV (kcal / day / pond)
Input. Receives from autotrophic food pool.
Autotrophic food entering heterotrophic food pool.
FW : MV (kcal / day / pond)
Input. Receives from fish anabolism.
Fish fecal wastes.
HFC : MV (kcal / day / pond)
Input. Receives from fish anabolism.
Heterotrophic food loss from fish grazing.
HFe : SV (kcal / pond)
Ouput.
Quantity of food nutrients in terms of energy.
HFp : SV (kcal / pond)
Output.
Quantity of food nutrients in terms of protein.
f1DO : SV
Output.
Decomposition of heterotrophic particles.
HFS : SV
Output.
Heterotrophic food loss rate due to sedimentation.
HFD : SV
Output.
Heterotrophic food loss rate due to decomposition.
"""
def __init__(self, m, aqua):
CoreComponent.__init__(self, m, aqua)
def register_connecting(self, **kwargs):
"""Registers all connecting variables associated with this component to
the gekko model.
kwargs
------
DO_0 : float, default = 5
DOC_0 : float, default = 5 (mg DO / l)
HFe_0 : float, default = 0 (kcal / pond)
HFp_0 : float, default = 0 (kcal / pond)
AFM_0 : float, default = 0 (kcal / day / pond)
FW_0 : float, default = 0 (kcal / day / pond)
HFC_0 : float, default = 0 (kcal / day / pond)
"""
# Aliases
a = self.aqua
MV = self.m.MV
SV = self.m.SV
# Initial Conditions
DO_0 = kwargs.get('DO', 5)
DOC_0 = kwargs.get('DOC', 5)
HFe_0 = kwargs.get('HFe_0', 0)
HFp_0 = kwargs.get('HFp_0', 0)
AFM_0 = kwargs.get('AFM_0', 0)
FW_0 = kwargs.get('FW_0', 0)
HFC_0 = kwargs.get('HFC_0', 0)
# Connecting Variables
a.register_connecting('DO', MV, DO_0)
a.register_connecting('DOC', MV, DOC_0)
a.register_connecting('AFM', MV, AFM_0)
a.register_connecting('FW', MV, FW_0)
a.register_connecting('HFC', MV, HFC_0)
a.register_connecting('HFe', SV, HFe_0, lb=0)
a.register_connecting('HFp', SV, HFp_0, lb=0)
a.register_connecting('f1DO', SV, 0)
a.register_connecting('HFS', SV, 0)
a.register_connecting('HFD', SV, 0)
def register_equations(self, **kwargs):
"""Registers all equations and intermediates associated with this
component to the gekko model.
kwargs
------
kswitch : int > 0
The quality of the switch/max/min functions. Larger is better.
(coming soon, see paper for model parameters)
"""
# -------
# Aliases
# -------
m = self.m
a = self.aqua
# ----------
# Parameters
# ----------
kswitch = kwargs.get('kswitch', 100)
# Tilapia params from paper
ks = m.Param(value=kwargs.get('ks', 0.14))
kd = m.Param(value=kwargs.get('kd', 0.12))
kp2 = m.Param(value=kwargs.get('kp2', 0.12))
kDO = m.Param(value=kwargs.get('kDO', 0.14))
# --------------------
# Connecting Variables
# --------------------
DO = a.DO
DOC = a.DOC
AFM = a.AFM
FW = a.FW
HFC = a.HFC
HFe = a.HFe
HFp = a.HFp
f1DO = a.f1DO
HFS = a.HFS
HFD = a.HFD
# ---------------------------
# Equations and Intermediates
# ---------------------------
# Decomposition of heterotrophic particles
m.Equation(
f1DO == m.switch(m.exp(-kDO * (4 - DOC) ** 2), 1, DO, 4, k=kswitch)
)
# Heterotrophic food loss rate due to sedimentation
m.Equation(HFS == HFe * ks)
# Heterotrophic food loss rate due to decomposition
m.Equation(HFD == HFe * kd * f1DO)
# Heterotrophic food quantity dynamics
m.Equation(HFe.dt() == AFM + FW - HFC - HFS - HFD)
m.Equation(HFp.dt() == HFe.dt() * kp2)
|
py | b40cd61d314462990e650bf99b1becd9ce7c18cd | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from pyspark import SparkContext, RDD
from sedona.core.jvm.translate import JvmSedonaPythonConverter
from sedona.utils.spatial_rdd_parser import SedonaPickler
class SedonaRDD:
def __init__(self, jsrdd, sc: SparkContext):
self.jsrdd = jsrdd
self.sc = sc
def to_rdd(self) -> RDD:
jvm = self.sc._jvm
serialized = JvmSedonaPythonConverter(jvm). \
translate_spatial_rdd_to_python(self.jsrdd)
return RDD(serialized, self.sc, SedonaPickler())
class SedonaPairRDD:
def __init__(self, jsrdd, sc: SparkContext):
self.jsrdd = jsrdd
self.sc = sc
def to_rdd(self) -> RDD:
jvm = self.sc._jvm
serialized = JvmSedonaPythonConverter(jvm). \
translate_spatial_pair_rdd_to_python(self.jsrdd)
return RDD(serialized, self.sc, SedonaPickler())
class SedonaPairRDDList:
def __init__(self, jsrdd, sc: SparkContext):
self.jsrdd = jsrdd
self.sc = sc
def to_rdd(self):
jvm = self.sc._jvm
serialized = JvmSedonaPythonConverter(jvm). \
translate_spatial_pair_rdd_with_list_to_python(self.jsrdd)
return RDD(serialized, self.sc, SedonaPickler())
|
py | b40cd6c815c784a47cd41fb796f02081f0a11499 | from datetime import datetime
from rdl.shared import Constants
class DataLoadTracker:
def __init__(
self,
execution_id,
model_name,
model_checksum,
model_config,
is_full_refresh,
full_refresh_reason,
change_tracking_info,
):
self.model_name = model_name
self.model_checksum = model_checksum
self.model_config = model_config
self.is_full_refresh = is_full_refresh
self.status = Constants.ExecutionModelStatus.STARTED
self.change_tracking_info = change_tracking_info
self.execution_id = execution_id
self.full_refresh_reason = full_refresh_reason
self.failure_reason = None
self.batches = []
self.total_row_count = 0
def start_batch(self):
batch = self.Batch()
self.batches.append(batch)
return batch
def data_load_successful(self):
self.data_load_completed(Constants.ExecutionModelStatus.SUCCESSFUL)
def data_load_failed(self, failure_reason=None):
self.data_load_completed(Constants.ExecutionModelStatus.FAILED, failure_reason)
def data_load_completed(self, execution_status, failure_reason=None):
self.status = execution_status
self.failure_reason = failure_reason
for batch in self.batches:
self.total_row_count += batch.row_count
class Batch:
row_count = 0
extract_started = None
extract_completed = None
load_completed = None
status = Constants.BatchExecutionStatus.STARTED
extract_execution_time = None
extract_rows_per_second = 0
load_execution_time = None
load_rows_per_second = 0
total_rows_per_second = 0
total_execution_time = None
def __init__(self):
self.extract_started = datetime.now()
self.status = Constants.BatchExecutionStatus.STARTED
def extract_completed_successfully(self, row_count):
self.status = Constants.BatchExecutionStatus.EXTRACT_COMPLETED_SUCCESSFULLY
self.extract_completed = datetime.now()
self.row_count = row_count
self.extract_execution_time = self.extract_completed - self.extract_started
if self.extract_execution_time.total_seconds() == 0:
self.extract_rows_per_second = self.row_count
else:
self.extract_rows_per_second = (
self.row_count / self.extract_execution_time.total_seconds()
)
def load_completed_successfully(self):
self.status = Constants.BatchExecutionStatus.LOAD_COMPLETED_SUCCESSFULLY
self.load_completed = datetime.now()
self.load_execution_time = self.load_completed - self.extract_completed
if self.load_execution_time.total_seconds() == 0:
self.load_rows_per_second = self.row_count
else:
self.load_rows_per_second = (
self.row_count / self.load_execution_time.total_seconds()
)
self.total_execution_time = self.load_completed - self.extract_started
if self.total_execution_time.total_seconds() == 0:
self.total_rows_per_second = self.row_count
else:
self.total_rows_per_second = (
self.row_count / self.total_execution_time.total_seconds()
)
def load_skipped_due_to_zero_rows(self):
self.status = Constants.BatchExecutionStatus.SKIPPED_AS_ZERO_ROWS
self.load_completed = datetime.now()
def get_statistics(self):
return (
f"Rows: {self.row_count}; "
f"Extract Execution Time: {self.extract_execution_time} "
f"@ {self.extract_rows_per_second:.2f} rows per second; "
f"Load Execution Time: {self.load_execution_time} "
f"@ {self.load_rows_per_second:.2f} rows per second; "
f"Total Execution Time: {self.total_execution_time} "
f"@ {self.total_rows_per_second:.2f} rows per second."
)
|
py | b40cd6e13e7604703a51ac9eb6507d2645213d6d | try:
from openbabel import pybel
except:
import pybel
import subprocess
def inchi2smi(inchi):
'''Converts InChI string to SMILES string.'''
return pybel.readstring('inchi', inchi).write('can').strip()
def smi2inchi(smi):
'''Converts SMILES string to InChI string.'''
return pybel.readstring('smi', smi).write('inchi').strip()
def canonicalize(smi):
'''Canonicalizes SMILES string.'''
return pybel.readstring('smi', smi).write('can').strip()
def desalt(smi):
mol = pybel.readstring('smi', smi)
mol.OBMol.StripSalts()
return mol.write('can').strip()
def neutralize(smi):
'''Neutralizes an canonical SMILES string (alternate).'''
def neutralize_inchi(inchi):
'''Neutralizes an InChI string.'''
if 'q' in inchi:
layers = inchi.split('/')
new = layers[0]
for i in range(1, len(layers)):
if 'q' not in layers[i]:
new += '/' + layers[i]
return new
return inchi
inchi = smi2inchi(smi)
inchi = neutralize_inchi(inchi)
return inchi2smi(inchi)
def tautomerize(smi):
try:
res = subprocess.check_output('cxcalc majortautomer -f smiles "%s"' % smi,
stderr=subprocess.DEVNULL, shell=True).decode('ascii').strip()
except:
print(smi, 'failed.')
return None
if len(res) == 0:
return None
return res
def _process(smiles):
try:
return canonicalize(tautomerize(neutralize(desalt(smiles))))
except:
return None
def process(smiles):
return [_process(x) for x in smiles]
|
py | b40cd6ee118aef5f3d6f7d01e842df38f32fb427 | from ..dojo_test_case import DojoTestCase, get_unit_tests_path
from dojo.models import Test, Engagement, Product
from dojo.tools.checkmarx_osa.parser import CheckmarxOsaParser
from datetime import datetime
class TestCheckmarxOsaParser(DojoTestCase):
# comment out to get full diff with big reports
# maxDiff = None
def init(self, report_filename):
my_file_handle = open(report_filename)
product = Product()
engagement = Engagement()
test = Test()
engagement.product = product
test.engagement = engagement
return my_file_handle, product, engagement, test
def teardown(self, my_file_handle):
my_file_handle.close()
# ----------------------------------------------------------------------------
# no_finding
# ----------------------------------------------------------------------------
def test_checkmarx_osa_parse_file_with_no_vulnerabilities_has_no_findings(
self,
):
my_file_handle, product, engagement, test = self.init(
get_unit_tests_path() + "/scans/checkmarx_osa/no_finding.json"
)
parser = CheckmarxOsaParser()
findings = parser.get_findings(my_file_handle, test)
self.teardown(my_file_handle)
self.assertEqual(0, len(findings))
# ----------------------------------------------------------------------------
# single finding (to_verify); check all fields
# ----------------------------------------------------------------------------
def test_checkmarx_osa_parse_file_with_single_vulnerability_has_single_finding(
self,
):
my_file_handle, product, engagement, test = self.init(
get_unit_tests_path() + "/scans/checkmarx_osa/single_finding.json"
)
parser = CheckmarxOsaParser()
findings = parser.get_findings(my_file_handle, test)
self.teardown(my_file_handle)
self.assertEqual(1, len(findings))
# check content
item = findings[0]
self.assertEqual(str, type(item.unique_id_from_tool))
self.assertEqual("2A3E02E74053088617923D6FE19F14E8188B5271", item.unique_id_from_tool)
self.assertEqual(str, type(item.title))
self.assertEqual("com.fasterxml.jackson.core:jackson-databind 2.10.2 | CVE-2020-25649", item.title)
self.assertEqual(int, type(item.cwe))
self.assertEqual(1035, item.cwe)
self.assertEqual(str, type(item.cve))
self.assertEqual("CVE-2020-25649", item.cve)
self.assertEqual(float, type(item.cvssv3_score))
self.assertEqual(7.5, item.cvssv3_score)
self.assertEqual(datetime, type(item.publish_date))
self.assertEqual(datetime.strptime("2020-12-03T17:15:00", '%Y-%m-%dT%H:%M:%S'), item.publish_date)
self.assertEqual(str, type(item.component_name))
self.assertEqual("com.fasterxml.jackson.core:jackson-databind", item.component_name)
self.assertEqual(str, type(item.component_version))
self.assertEqual("2.10.2", item.component_version)
self.assertEqual(bool, type(item.active))
self.assertEqual(True, item.active)
self.assertEqual(bool, type(item.verified))
self.assertEqual(False, item.verified)
self.assertEqual(bool, type(item.false_p))
self.assertEqual(False, item.false_p)
self.assertEqual(str, type(item.severity))
self.assertEqual("High", item.severity)
self.assertEqual(str, type(item.references))
self.assertEqual("https://nvd.nist.gov/vuln/detail/CVE-2020-25649", item.references)
self.assertEqual(str, type(item.mitigation))
self.assertEqual("Upgrade to 2.10.5.1", item.mitigation)
self.assertEqual(bool, type(item.static_finding))
self.assertEqual(True, item.static_finding)
self.assertEqual(bool, type(item.dynamic_finding))
self.assertEqual(False, item.dynamic_finding)
self.assertEqual(str, type(item.description))
self.assertEqual("A flaw was found in FasterXML Jackson Databind before 2.6.7.4, 2.7.0 through 2.9.10.6, and 2.10.0 through 2.10.5, where it did not have entity expansion secured properly. This flaw makes it vulnerable to XML external entity (XXE) attacks. The highest threat from this vulnerability is data integrity.", item.description)
self.assertEqual(int, type(item.scanner_confidence))
self.assertEqual(1, item.scanner_confidence)
# ----------------------------------------------------------------------------
# single finding false positive
# ----------------------------------------------------------------------------
def test_checkmarx_osa_parse_file_with_false_positive_is_false_positive(
self,
):
my_file_handle, product, engagement, test = self.init(
get_unit_tests_path() + "/scans/checkmarx_osa/single_finding_false_positive.json"
)
parser = CheckmarxOsaParser()
findings = parser.get_findings(my_file_handle, test)
self.teardown(my_file_handle)
self.assertEqual(1, len(findings))
item = findings[0]
self.assertEqual(bool, type(item.active))
self.assertEqual(False, item.active)
self.assertEqual(bool, type(item.verified))
self.assertEqual(False, item.verified)
self.assertEqual(bool, type(item.false_p))
self.assertEqual(True, item.false_p)
# ----------------------------------------------------------------------------
# single finding confirmed (should be verified=True)
# ----------------------------------------------------------------------------
def test_checkmarx_osa_parse_file_with_confirmed_is_verified(
self,
):
my_file_handle, product, engagement, test = self.init(
get_unit_tests_path() + "/scans/checkmarx_osa/single_finding_confirmed.json"
)
parser = CheckmarxOsaParser()
findings = parser.get_findings(my_file_handle, test)
self.teardown(my_file_handle)
self.assertEqual(1, len(findings))
item = findings[0]
self.assertEqual(bool, type(item.active))
self.assertEqual(True, item.active)
self.assertEqual(bool, type(item.verified))
self.assertEqual(True, item.verified)
self.assertEqual(bool, type(item.false_p))
self.assertEqual(False, item.false_p)
# ----------------------------------------------------------------------------
# multiple findings
# ----------------------------------------------------------------------------
def test_checkmarx_osa_parse_file_with_multiple_findings(
self,
):
my_file_handle, product, engagement, test = self.init(
get_unit_tests_path() + "/scans/checkmarx_osa/multiple_findings.json"
)
parser = CheckmarxOsaParser()
findings = parser.get_findings(my_file_handle, test)
self.teardown(my_file_handle)
self.assertEqual(18, len(findings))
# ----------------------------------------------------------------------------
# single finding no score
# ----------------------------------------------------------------------------
def test_checkmarx_osa_parse_file_with_no_score(
self,
):
my_file_handle, product, engagement, test = self.init(
get_unit_tests_path() + "/scans/checkmarx_osa/single_finding_no_score.json"
)
parser = CheckmarxOsaParser()
findings = parser.get_findings(my_file_handle, test)
self.teardown(my_file_handle)
self.assertEqual(1, len(findings))
item = findings[0]
self.assertIsNone(item.cvssv3_score)
# ----------------------------------------------------------------------------
# single finding no url
# ----------------------------------------------------------------------------
def test_checkmarx_osa_parse_file_with_no_url(
self,
):
my_file_handle, product, engagement, test = self.init(
get_unit_tests_path() + "/scans/checkmarx_osa/single_finding_no_url.json"
)
parser = CheckmarxOsaParser()
findings = parser.get_findings(my_file_handle, test)
self.teardown(my_file_handle)
self.assertEqual(1, len(findings))
item = findings[0]
self.assertIsNone(item.references)
# ----------------------------------------------------------------------------
# single finding no libraryId (ValueError)
# ----------------------------------------------------------------------------
def test_checkmarx_osa_parse_file_with_no_libraryId_raises_ValueError(
self,
):
with self.assertRaises(ValueError) as context:
my_file_handle, product, engagement, test = self.init(
get_unit_tests_path() + "/scans/checkmarx_osa/single_finding_no_libraryId.json"
)
parser = CheckmarxOsaParser()
parser.get_findings(my_file_handle, test)
self.teardown(my_file_handle)
self.assertTrue(
"Invalid format: missing mandatory field libraryId:" in str(context.exception)
)
|
py | b40cd75dc408a47adc2fc6551ee76f40d23663d4 | # pylint: disable=missing-module-docstring, missing-function-docstring
import pytest
from custom_components.hacs.enums import HacsStage
@pytest.mark.asyncio
async def test_hacs(hacs, repository, tmpdir):
hacs.hass.config.config_dir = tmpdir
hacs.repositories = [None]
assert hacs.get_by_id(None) is None
repository.data.id = "1337"
hacs.repositories = [repository]
assert hacs.get_by_id("1337").data.full_name == "test/test"
assert hacs.get_by_id("1337").data.full_name_lower == "test/test"
hacs.repositories = [None]
assert hacs.get_by_name(None) is None
hacs.repositories = [repository]
assert hacs.get_by_name("test/test").data.id == "1337"
assert hacs.is_known("1337")
await hacs.prosess_queue()
await hacs.clear_out_removed_repositories()
@pytest.mark.asyncio
async def test_set_stage(hacs):
assert hacs.stage == HacsStage.SETUP
await hacs.async_set_stage(HacsStage.RUNNING)
assert hacs.stage == HacsStage.RUNNING
|
py | b40cd7d30a32ae3811455570fa2b7ec9eed1f930 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# (c) B.Kerler 2018-2019
import os
import sys
import logging
import json
from binascii import hexlify, unhexlify
from struct import unpack, pack
from edl.Library.firehose import firehose
from edl.Library.xmlparser import xmlparser
from edl.Library.utils import do_tcp_server
from edl.Library.utils import LogBase, getint
from edl.Config.qualcomm_config import memory_type
from edl.Config.qualcomm_config import infotbl, msmids, secureboottbl, sochw
try:
import xml.etree.cElementTree as ET
from xml.etree import cElementTree as ElementTree
except ImportError:
import xml.etree.ElementTree as ET
from xml.etree import ElementTree
try:
from edl.Library.Modules.init import modules
except ImportError as e:
pass
class firehose_client(metaclass=LogBase):
def __init__(self, arguments, cdc, sahara, loglevel, printer):
self.cdc = cdc
self.sahara = sahara
self.arguments = arguments
self.printer = printer
self.info = self.__logger.info
self.error = self.__logger.error
self.warning = self.__logger.warning
self.__logger.setLevel(loglevel)
if loglevel == logging.DEBUG:
logfilename = "log.txt"
fh = logging.FileHandler(logfilename)
self.__logger.addHandler(fh)
self.cfg = firehose.cfg()
if not arguments["--memory"] is None:
self.cfg.MemoryName = arguments["--memory"].lower()
else:
self.cfg.MemoryName = ""
self.cfg.ZLPAwareHost = 1
self.cfg.SkipStorageInit = arguments["--skipstorageinit"]
self.cfg.SkipWrite = arguments["--skipwrite"]
self.cfg.MaxPayloadSizeToTargetInBytes = getint(arguments["--maxpayload"])
self.cfg.SECTOR_SIZE_IN_BYTES = getint(arguments["--sectorsize"])
self.cfg.bit64 = sahara.bit64
devicemodel = ""
skipresponse = False
if "--skipresponse" in arguments:
if arguments["--skipresponse"]:
skipresponse = True
if "--devicemodel" in arguments:
if arguments["--devicemodel"] is not None:
devicemodel = arguments["--devicemodel"]
self.firehose = firehose(cdc, xmlparser(), self.cfg, self.__logger.level, devicemodel, sahara.serial,
skipresponse,
self.getluns(arguments), arguments)
self.connected = False
self.firehose.connect()
if "hwid" in dir(sahara):
if sahara.hwid is not None:
hwid = (sahara.hwid >> 32) & 0xFFFFFF
socid = ((sahara.hwid >> 32) >> 16)
if hwid in msmids:
self.target_name = msmids[hwid]
self.info(f"Target detected: {self.target_name}")
if self.cfg.MemoryName == "":
if self.target_name in memory_type.preferred_memory:
type = memory_type.preferred_memory[self.target_name]
if type == memory_type.nand:
self.cfg.MemoryName = "nand"
if type == memory_type.spinor:
self.cfg.MemoryName = "spinor"
elif type == memory_type.emmc:
self.cfg.MemoryName = "eMMC"
elif type == memory_type.ufs:
self.cfg.MemoryName = "UFS"
self.warning("Based on the chipset, we assume " +
self.cfg.MemoryName + " as default memory type..., if it fails, try using " +
"--memory\" with \"UFS\",\"NAND\" or \"spinor\" instead !")
elif socid in sochw:
self.target_name = sochw[socid].split(",")[0]
# We assume ufs is fine (hopefully), set it as default
if self.cfg.MemoryName == "":
self.warning(
"No --memory option set, we assume \"eMMC\" as default ..., if it fails, try using \"--memory\" " +
"with \"UFS\",\"NAND\" or \"spinor\" instead !")
self.cfg.MemoryName = "eMMC"
if self.firehose.configure(0):
funcs = "Supported functions:\n-----------------\n"
for function in self.firehose.supported_functions:
funcs += function + ","
funcs = funcs[:-1]
self.info(funcs)
self.target_name = self.firehose.cfg.TargetName
self.connected = True
try:
if self.firehose.modules is None:
self.firehose.modules = modules(fh=self.firehose, serial=self.firehose.serial,
supported_functions=self.firehose.supported_functions,
loglevel=self.__logger.level,
devicemodel=self.firehose.devicemodel, args=self.arguments)
except Exception as err: # pylint: disable=broad-except
self.firehose.modules = None
def check_cmd(self, func):
if not self.firehose.supported_functions:
return True
for sfunc in self.firehose.supported_functions:
if func.lower() == sfunc.lower():
return True
return False
def find_bootable_partition(self, rawprogram):
part = -1
for xml in rawprogram:
with open(xml, "r") as fl:
for evt, elem in ET.iterparse(fl, events=["end"]):
if elem.tag == "program":
label = elem.get("label")
if label in ['xbl', 'xbl_a', 'sbl1']:
if part != -1:
self.error("[FIREHOSE] multiple bootloader found!")
return -1
part = elem.get("physical_partition_number")
return part
def getluns(self, argument):
if argument["--lun"] is not None:
return [int(argument["--lun"])]
luns = []
if self.cfg.MemoryName.lower() == "ufs":
for i in range(0, self.cfg.maxlun):
luns.append(i)
else:
luns = [0]
return luns
def check_param(self, parameters):
error = False
params = ""
for parameter in parameters:
params += parameter + " "
if parameter not in parameters:
error = True
if error:
if len(parameters) == 1:
self.printer("Argument " + params + "required.")
else:
self.printer("Arguments " + params + "required.")
return False
return True
def get_storage_info(self):
if "getstorageinfo" in self.firehose.supported_functions:
storageinfo = self.firehose.cmd_getstorageinfo()
for info in storageinfo:
if "storage_info" in info:
rs = info.replace("INFO: ", "")
field = json.loads(rs)
if "storage_info" in field:
info = field["storage_info"]
return info
return None
def handle_firehose(self, cmd, options):
if cmd == "gpt":
luns = self.getluns(options)
directory = options["<directory>"]
if directory is None:
directory = ""
genxml = False
if "--genxml" in options:
if options["--genxml"]:
genxml = True
for lun in luns:
sfilename = os.path.join(directory, f"gpt_main{str(lun)}.bin")
data, guid_gpt = self.firehose.get_gpt(lun, int(options["--gpt-num-part-entries"]),
int(options["--gpt-part-entry-size"]),
int(options["--gpt-part-entry-start-lba"]))
if guid_gpt is None:
break
with open(sfilename, "wb") as write_handle:
write_handle.write(data)
self.printer(f"Dumped GPT from Lun {str(lun)} to {sfilename}")
sfilename = os.path.join(directory, f"gpt_backup{str(lun)}.bin")
with open(sfilename, "wb") as write_handle:
write_handle.write(data[self.firehose.cfg.SECTOR_SIZE_IN_BYTES * 2:])
self.printer(f"Dumped Backup GPT from Lun {str(lun)} to {sfilename}")
if genxml:
guid_gpt.generate_rawprogram(lun, self.firehose.cfg.SECTOR_SIZE_IN_BYTES, directory)
return True
elif cmd == "printgpt":
luns = self.getluns(options)
for lun in luns:
data, guid_gpt = self.firehose.get_gpt(lun, int(options["--gpt-num-part-entries"]),
int(options["--gpt-part-entry-size"]),
int(options["--gpt-part-entry-start-lba"]))
if guid_gpt is None:
break
self.printer(f"\nParsing Lun {str(lun)}:")
guid_gpt.print()
return True
elif cmd == "r":
if not self.check_param(["<partitionname>", "<filename>"]):
return False
partitionname = options["<partitionname>"]
filename = options["<filename>"]
filenames = filename.split(",")
partitions = partitionname.split(",")
if len(partitions) != len(filenames):
self.error("You need to gives as many filenames as given partitions.")
return False
i = 0
for partition in partitions:
if partition=="gpt":
luns = self.getluns(options)
for lun in luns:
partfilename=filenames[i]+".lun%d" % lun
if self.firehose.cmd_read(lun, 0, 32, partfilename):
self.printer(
f"Dumped sector {str(0)} with sector count {str(32)} " +
f"as {partfilename}.")
continue
partfilename = filenames[i]
i += 1
res = self.firehose.detect_partition(options, partition)
if res[0]:
lun = res[1]
rpartition = res[2]
if self.firehose.cmd_read(lun, rpartition.sector, rpartition.sectors, partfilename):
self.printer(
f"Dumped sector {str(rpartition.sector)} with sector count {str(rpartition.sectors)} " +
f"as {partfilename}.")
else:
fpartitions = res[1]
self.error(f"Error: Couldn't detect partition: {partition}\nAvailable partitions:")
for lun in fpartitions:
for rpartition in fpartitions[lun]:
if self.cfg.MemoryName == "emmc":
self.error("\t" + rpartition)
else:
self.error(lun + ":\t" + rpartition)
return False
return True
elif cmd == "rl":
if not self.check_param(["<directory>"]):
return False
directory = options["<directory>"]
if options["--skip"]:
skip = options["--skip"].split(",")
else:
skip = []
genxml = False
if "--genxml" in options:
if options["--genxml"]:
genxml = True
if not os.path.exists(directory):
os.mkdir(directory)
luns = self.getluns(options)
for lun in luns:
data, guid_gpt = self.firehose.get_gpt(lun, int(options["--gpt-num-part-entries"]),
int(options["--gpt-part-entry-size"]),
int(options["--gpt-part-entry-start-lba"]))
if guid_gpt is None:
break
if len(luns) > 1:
storedir = os.path.join(directory, "lun" + str(lun))
else:
storedir = directory
if not os.path.exists(storedir):
os.mkdir(storedir)
sfilename = os.path.join(storedir, f"gpt_main{str(lun)}.bin")
with open(sfilename, "wb") as write_handle:
write_handle.write(data)
sfilename = os.path.join(storedir, f"gpt_backup{str(lun)}.bin")
with open(sfilename, "wb") as write_handle:
write_handle.write(data[self.firehose.cfg.SECTOR_SIZE_IN_BYTES * 2:])
if genxml:
guid_gpt.generate_rawprogram(lun, self.firehose.cfg.SECTOR_SIZE_IN_BYTES, storedir)
for partition in guid_gpt.partentries:
partitionname = partition.name
if partition.name in skip:
continue
filename = os.path.join(storedir, partitionname + ".bin")
self.info(
f"Dumping partition {str(partition.name)} with sector count {str(partition.sectors)} " +
f"as {filename}.")
if self.firehose.cmd_read(lun, partition.sector, partition.sectors, filename):
self.info(f"Dumped partition {str(partition.name)} with sector count " +
f"{str(partition.sectors)} as {filename}.")
return True
elif cmd == "rf":
if not self.check_param(["<filename>"]):
return False
filename = options["<filename>"]
storageinfo = self.get_storage_info()
if storageinfo is not None and self.cfg.MemoryName.lower() in ["spinor" , "nand"]:
totalsectors = None
if "total_blocks" in storageinfo:
totalsectors = storageinfo["total_blocks"]
if "num_physical" in storageinfo:
num_physical = storageinfo["num_physical"]
luns = [0]
if num_physical > 0:
luns=[]
for i in range(num_physical):
luns.append(i)
if totalsectors is not None:
for lun in luns:
buffer=self.firehose.cmd_read_buffer(physical_partition_number=lun,
start_sector=0, num_partition_sectors=1, display=False)
storageinfo = self.get_storage_info()
if "total_blocks" in storageinfo:
totalsectors = storageinfo["total_blocks"]
if len(luns) > 1:
sfilename = filename + f".lun{str(lun)}"
else:
sfilename = filename
self.printer(f"Dumping sector 0 with sector count {str(totalsectors)} as {filename}.")
if self.firehose.cmd_read(lun, 0, totalsectors, sfilename):
self.printer(
f"Dumped sector 0 with sector count {str(totalsectors)} as {filename}.")
else:
luns = self.getluns(options)
for lun in luns:
data, guid_gpt = self.firehose.get_gpt(lun, int(options["--gpt-num-part-entries"]),
int(options["--gpt-part-entry-size"]),
int(options["--gpt-part-entry-start-lba"]))
if guid_gpt is None:
break
if len(luns) > 1:
sfilename = filename + f".lun{str(lun)}"
else:
sfilename = filename
self.printer(f"Dumping sector 0 with sector count {str(guid_gpt.totalsectors)} as {filename}.")
if self.firehose.cmd_read(lun, 0, guid_gpt.totalsectors, sfilename):
self.printer(f"Dumped sector 0 with sector count {str(guid_gpt.totalsectors)} as {filename}.")
return True
elif cmd == "pbl":
if not self.check_param(["<filename>"]):
return False
if not self.check_cmd("peek"):
self.error("Peek command isn't supported by edl loader")
return False
else:
filename = options["<filename>"]
if self.target_name in infotbl:
target_name = infotbl[self.target_name]
if len(target_name[0]) > 0:
if self.firehose.cmd_peek(target_name[0][0], target_name[0][1], filename, True):
self.printer(f"Dumped pbl at offset {hex(target_name[0][0])} as {filename}.")
return True
else:
self.error("No known pbl offset for this chipset")
else:
self.error("Unknown target chipset")
self.error("Error on dumping pbl")
return False
elif cmd == "qfp":
if not self.check_param(["<filename>"]):
return False
if not self.check_cmd("peek"):
self.error("Peek command isn't supported by edl loader")
return False
else:
filename = options["<filename>"]
if self.target_name not in infotbl:
self.error("Unknown target chipset")
else:
target_name = infotbl[self.target_name]
if len(target_name[1]) > 0:
if self.firehose.cmd_peek(target_name[1][0], target_name[1][1], filename):
self.printer(f"Dumped qfprom at offset {hex(target_name[1][0])} as {filename}.")
return True
else:
self.error("No known qfprom offset for this chipset")
self.error("Error on dumping qfprom")
return False
elif cmd == "secureboot":
if not self.check_cmd("peek"):
self.error("Peek command isn't supported by edl loader")
return False
else:
if self.target_name in secureboottbl:
self.target_name = secureboottbl[self.target_name]
value = unpack("<I", self.firehose.cmd_peek(self.target_name, 4))[0]
is_secure = False
for area in range(0, 4):
sec_boot = (value >> (area * 8)) & 0xFF
pk_hashindex = sec_boot & 3
oem_pkhash = True if ((sec_boot >> 4) & 1) == 1 else False
auth_enabled = True if ((sec_boot >> 5) & 1) == 1 else False
use_serial = True if ((sec_boot >> 6) & 1) == 1 else False
if auth_enabled:
is_secure = True
self.printer(f"Sec_Boot{str(area)} " +
f"PKHash-Index:{str(pk_hashindex)} " +
f"OEM_PKHash: {str(oem_pkhash)} " +
f"Auth_Enabled: {str(auth_enabled)}" +
f"Use_Serial: {str(use_serial)}")
if is_secure:
self.printer("Secure boot enabled.")
else:
self.printer("Secure boot disabled.")
return True
else:
self.error("Unknown target chipset")
return False
elif cmd == "memtbl":
if not self.check_param(["<filename>"]):
return False
if not self.check_cmd("peek"):
self.error("Peek command isn't supported by edl loader")
return False
else:
filename = options["<filename>"]
if self.target_name in infotbl:
self.target_name = infotbl[self.target_name]
if len(self.target_name[2]) > 0:
if self.firehose.cmd_peek(self.target_name[2][0], self.target_name[2][1], filename):
self.printer(f"Dumped memtbl at offset {hex(self.target_name[2][0])} as {filename}.")
return True
else:
self.error("No known memtbl offset for this chipset")
else:
self.error("Unknown target chipset")
self.error("Error on dumping memtbl")
return False
elif cmd == "footer":
if not self.check_param(["<filename>"]):
return False
luns = self.getluns(options)
filename = options["<filename>"]
for lun in luns:
data, guid_gpt = self.firehose.get_gpt(lun, int(options["--gpt-num-part-entries"]),
int(options["--gpt-part-entry-size"]),
int(options["--gpt-part-entry-start-lba"]))
if guid_gpt is None:
break
pnames = ["userdata2", "metadata", "userdata", "reserved1", "reserved2", "reserved3"]
for partition in guid_gpt.partentries:
if partition.name in pnames:
self.printer(f"Detected partition: {partition.name}")
data = self.firehose.cmd_read_buffer(lun,
partition.sector +
(partition.sectors -
(0x4000 // self.firehose.cfg.SECTOR_SIZE_IN_BYTES)),
(0x4000 // self.firehose.cfg.SECTOR_SIZE_IN_BYTES), False)
if data == b"":
continue
val = unpack("<I", data[:4])[0]
if (val & 0xFFFFFFF0) == 0xD0B5B1C0:
with open(filename, "wb") as write_handle:
write_handle.write(data)
self.printer(f"Dumped footer from {partition.name} as {filename}.")
return True
self.error("Error: Couldn't detect footer partition.")
return False
elif cmd == "rs":
if options["--lun"] is not None:
lun = int(options["--lun"])
else:
lun = 0
if not self.check_param(["<filename>", "<sectors>", "<start_sector>"]):
return False
start = int(options["<start_sector>"])
sectors = int(options["<sectors>"])
filename = options["<filename>"]
if self.firehose.cmd_read(lun, start, sectors, filename, True):
self.printer(f"Dumped sector {str(start)} with sector count {str(sectors)} as {filename}.")
return True
elif cmd == "peek":
if not self.check_param(["<offset>", "<length>", "<filename>"]):
return False
if not self.check_cmd("peek"):
self.error("Peek command isn't supported by edl loader")
return False
else:
offset = getint(options["<offset>"])
length = getint(options["<length>"])
filename = options["<filename>"]
self.firehose.cmd_peek(offset, length, filename, True)
self.info(
f"Peek data from offset {hex(offset)} and length {hex(length)} was written to {filename}")
return True
elif cmd == "peekhex":
if not self.check_param(["<offset>", "<length>"]):
return False
if not self.check_cmd("peek"):
self.error("Peek command isn't supported by edl loader")
return False
else:
offset = getint(options["<offset>"])
length = getint(options["<length>"])
resp = self.firehose.cmd_peek(offset, length, "", True)
self.printer("\n")
self.printer(hexlify(resp))
return True
elif cmd == "peekqword":
if not self.check_param(["<offset>"]):
return False
if not self.check_cmd("peek"):
self.error("Peek command isn't supported by edl loader")
return False
else:
offset = getint(options["<offset>"])
resp = self.firehose.cmd_peek(offset, 8, "", True)
self.printer("\n")
self.printer(hex(unpack("<Q", resp[:8])[0]))
return True
elif cmd == "peekdword":
if not self.check_param(["<offset>"]):
return False
if not self.check_cmd("peek"):
self.error("Peek command isn't supported by edl loader")
return False
else:
offset = getint(options["<offset>"])
resp = self.firehose.cmd_peek(offset, 4, "", True)
self.printer("\n")
self.printer(hex(unpack("<I", resp[:4])[0]))
return True
elif cmd == "poke":
if not self.check_param(["<offset>", "<filename>"]):
return False
if not self.check_cmd("poke"):
self.error("Poke command isn't supported by edl loader")
return False
else:
offset = getint(options["<offset>"])
filename = options["<filename>"]
return self.firehose.cmd_poke(offset, "", filename, True)
elif cmd == "pokehex":
if not self.check_param(["<offset>", "<data>"]):
return False
if not self.check_cmd("poke"):
self.error("Poke command isn't supported by edl loader")
return False
else:
offset = getint(options["<offset>"])
data = unhexlify(options["<data>"])
return self.firehose.cmd_poke(offset, data, "", True)
elif cmd == "pokeqword":
if not self.check_param(["<offset>", "<data>"]):
return False
if not self.check_cmd("poke"):
self.error("Poke command isn't supported by edl loader")
return False
else:
offset = getint(options["<offset>"])
data = pack("<Q", getint(options["<data>"]))
return self.firehose.cmd_poke(offset, data, "", True)
elif cmd == "pokedword":
if not self.check_param(["<offset>", "<data>"]):
return False
if not self.check_cmd("poke"):
self.error("Poke command isn't supported by edl loader")
return False
else:
offset = getint(options["<offset>"])
data = pack("<I", getint(options["<data>"]))
return self.firehose.cmd_poke(offset, data, "", True)
elif cmd == "memcpy":
if not self.check_param(["<offset>", "<size>"]):
return False
if not self.check_cmd("poke"):
self.printer("Poke command isn't supported by edl loader")
else:
srcoffset = getint(options["<offset>"])
size = getint(options["<size>"])
dstoffset = srcoffset + size
if self.firehose.cmd_memcpy(dstoffset, srcoffset, size):
self.printer(f"Memcpy from {hex(srcoffset)} to {hex(dstoffset)} succeeded")
return True
else:
return False
elif cmd == "reset":
return self.firehose.cmd_reset()
elif cmd == "nop":
if not self.check_cmd("nop"):
self.error("Nop command isn't supported by edl loader")
return False
else:
return self.firehose.cmd_nop()
elif cmd == "setbootablestoragedrive":
if not self.check_param(["<lun>"]):
return False
if not self.check_cmd("setbootablestoragedrive"):
self.error("setbootablestoragedrive command isn't supported by edl loader")
return False
else:
return self.firehose.cmd_setbootablestoragedrive(int(options["<lun>"]))
elif cmd == "getstorageinfo":
if not self.check_cmd("getstorageinfo"):
self.error("getstorageinfo command isn't supported by edl loader")
return False
else:
return self.firehose.cmd_getstorageinfo_string()
elif cmd == "w":
if not self.check_param(["<partitionname>", "<filename>"]):
return False
partitionname = options["<partitionname>"]
filename = options["<filename>"]
if options["--lun"] is not None:
lun = int(options["--lun"])
else:
lun = 0
startsector = 0
if not os.path.exists(filename):
self.error(f"Error: Couldn't find file: {filename}")
return False
if partitionname.lower() == "gpt":
sectors = os.stat(filename).st_size // self.firehose.cfg.SECTOR_SIZE_IN_BYTES
res = [True, lun, sectors]
else:
res = self.firehose.detect_partition(options, partitionname)
if res[0]:
lun = res[1]
sectors = os.stat(filename).st_size // self.firehose.cfg.SECTOR_SIZE_IN_BYTES
if (os.stat(filename).st_size % self.firehose.cfg.SECTOR_SIZE_IN_BYTES) > 0:
sectors += 1
if partitionname.lower() != "gpt":
partition = res[2]
if sectors > partition.sectors:
self.error(
f"Error: {filename} has {sectors} sectors but partition only has {partition.sectors}.")
return False
startsector = partition.sector
if self.firehose.modules is not None:
self.firehose.modules.writeprepare()
if self.firehose.cmd_program(lun, startsector, filename):
self.printer(f"Wrote {filename} to sector {str(startsector)}.")
return True
else:
self.printer(f"Error writing {filename} to sector {str(startsector)}.")
return False
else:
if len(res) > 0:
fpartitions = res[1]
self.error(f"Error: Couldn't detect partition: {partitionname}\nAvailable partitions:")
for lun in fpartitions:
for partition in fpartitions[lun]:
if self.cfg.MemoryName == "emmc":
self.error("\t" + partition)
else:
self.error(lun + ":\t" + partition)
return False
elif cmd == "wl":
if not self.check_param(["<directory>"]):
return False
directory = options["<directory>"]
if options["--skip"]:
skip = options["--skip"].split(",")
else:
skip = []
luns = self.getluns(options)
if not os.path.exists(directory):
self.error(f"Error: Couldn't find directory: {directory}")
sys.exit()
filenames = []
if self.firehose.modules is not None:
self.firehose.modules.writeprepare()
for dirName, subdirList, fileList in os.walk(directory):
for fname in fileList:
filenames.append(os.path.join(dirName, fname))
for lun in luns:
data, guid_gpt = self.firehose.get_gpt(lun, int(options["--gpt-num-part-entries"]),
int(options["--gpt-part-entry-size"]),
int(options["--gpt-part-entry-start-lba"]))
if guid_gpt is None:
break
if "partentries" in dir(guid_gpt):
for filename in filenames:
for partition in guid_gpt.partentries:
partname = filename[filename.rfind("/") + 1:]
if ".bin" in partname[-4:] or ".img" in partname[-4:] or ".mbn" in partname[-4:]:
partname = partname[:-4]
if partition.name == partname:
if partition.name in skip:
continue
sectors = os.stat(filename).st_size // self.firehose.cfg.SECTOR_SIZE_IN_BYTES
if (os.stat(filename).st_size % self.firehose.cfg.SECTOR_SIZE_IN_BYTES) > 0:
sectors += 1
if sectors > partition.sectors:
self.error(f"Error: {filename} has {sectors} sectors but partition " +
f"only has {partition.sectors}.")
return False
self.printer(f"Writing {filename} to partition {str(partition.name)}.")
self.firehose.cmd_program(lun, partition.sector, filename)
else:
self.printer("Couldn't write partition. Either wrong memorytype given or no gpt partition.")
return False
return True
elif cmd == "ws":
if not self.check_param(["<start_sector>"]):
return False
if options["--lun"] is not None:
lun = int(options["--lun"])
else:
lun = 0
start = int(options["<start_sector>"])
filename = options["<filename>"]
if not os.path.exists(filename):
self.error(f"Error: Couldn't find file: {filename}")
return False
if self.firehose.modules is not None:
self.firehose.modules.writeprepare()
if self.firehose.cmd_program(lun, start, filename):
self.printer(f"Wrote {filename} to sector {str(start)}.")
return True
else:
self.error(f"Error on writing {filename} to sector {str(start)}")
return False
elif cmd == "wf":
if not self.check_param(["<filename>"]):
return False
if options["--lun"] is not None:
lun = int(options["--lun"])
else:
lun = 0
start = 0
filename = options["<filename>"]
if not os.path.exists(filename):
self.error(f"Error: Couldn't find file: {filename}")
return False
if self.firehose.modules is not None:
self.firehose.modules.writeprepare()
if self.firehose.cmd_program(lun, start, filename):
self.printer(f"Wrote {filename} to sector {str(start)}.")
return True
else:
self.error(f"Error on writing {filename} to sector {str(start)}")
return False
elif cmd == "e":
if not self.check_param(["<partitionname>"]):
return False
luns = self.getluns(options)
partitionname = options["<partitionname>"]
for lun in luns:
data, guid_gpt = self.firehose.get_gpt(lun, int(options["--gpt-num-part-entries"]),
int(options["--gpt-part-entry-size"]),
int(options["--gpt-part-entry-start-lba"]))
if guid_gpt is None:
break
if self.firehose.modules is not None:
self.firehose.modules.writeprepare()
if "partentries" in dir(guid_gpt):
for partition in guid_gpt.partentries:
if partition.name == partitionname:
self.firehose.cmd_erase(lun, partition.sector, partition.sectors)
self.printer(
f"Erased {partitionname} starting at sector {str(partition.sector)} " +
f"with sector count {str(partition.sectors)}.")
return True
else:
self.printer("Couldn't erase partition. Either wrong memorytype given or no gpt partition.")
return False
self.error(f"Error: Couldn't detect partition: {partitionname}")
return False
elif cmd == "ep":
if not self.check_param(["<partitionname>", "<sectors>"]):
return False
luns = self.getluns(options)
partitionname = options["<partitionname>"]
sectors = int(options["<sectors>"])
for lun in luns:
data, guid_gpt = self.firehose.get_gpt(lun, int(options["--gpt-num-part-entries"]),
int(options["--gpt-part-entry-size"]),
int(options["--gpt-part-entry-start-lba"]))
if guid_gpt is None:
break
if self.firehose.modules is not None:
self.firehose.modules.writeprepare()
if "partentries" in dir(guid_gpt):
for partition in guid_gpt.partentries:
if partition.name == partitionname:
self.firehose.cmd_erase(lun, partition.sector, sectors)
self.printer(
f"Erased {partitionname} starting at sector {str(partition.sector)} " +
f"with sector count {str(sectors)}.")
return True
else:
self.printer("Couldn't erase partition. Either wrong memorytype given or no gpt partition.")
return False
self.error(f"Error: Couldn't detect partition: {partitionname}")
return False
elif cmd == "es":
if not self.check_param(["<start_sector>", "<sectors>"]):
return False
if options["--lun"] is not None:
lun = int(options["--lun"])
else:
lun = 0
start = int(options["<start_sector>"])
sectors = int(options["<sectors>"])
if self.firehose.modules is not None:
self.firehose.modules.writeprepare()
if self.firehose.cmd_erase(lun, start, sectors):
self.printer(f"Erased sector {str(start)} with sector count {str(sectors)}.")
return True
return False
elif cmd == "xml":
if not self.check_param(["<xmlfile>"]):
return False
return self.firehose.cmd_xml(options["<xmlfile>"])
elif cmd == "rawxml":
if not self.check_param(["<xmlstring>"]):
return False
return self.firehose.cmd_rawxml(options["<xmlstring>"])
elif cmd == "send":
if not self.check_param(["<command>"]):
return False
command = options["<command>"]
resp = self.firehose.cmd_send(command, True)
self.printer("\n")
self.printer(resp)
return True
elif cmd == "server":
return do_tcp_server(self, options, self.handle_firehose)
elif cmd == "modules":
if not self.check_param(["<command>", "<options>"]):
return False
mcommand = options["<command>"]
moptions = options["<options>"]
if self.firehose.modules is None:
self.error("Feature is not supported")
return False
else:
return self.firehose.modules.run(command=mcommand, args=moptions)
elif cmd == "qfil":
self.info("[qfil] raw programming...")
rawprogram = options["<rawprogram>"].split(",")
imagedir = options["<imagedir>"]
patch = options["<patch>"].split(",")
for xml in rawprogram:
filename = os.path.join(imagedir, xml)
if os.path.exists(filename):
self.info("[qfil] programming %s" % xml)
fl = open(filename, "r")
for evt, elem in ET.iterparse(fl, events=["end"]):
if elem.tag == "program":
if elem.get("filename", ""):
filename = os.path.join(imagedir, elem.get("filename"))
if not os.path.isfile(filename):
self.error("%s doesn't exist!" % filename)
continue
partition_number = int(elem.get("physical_partition_number"))
num_disk_sectors = self.firehose.getlunsize(partition_number)
start_sector = elem.get("start_sector")
if "NUM_DISK_SECTORS" in start_sector:
start_sector = start_sector.replace("NUM_DISK_SECTORS", str(num_disk_sectors))
if "-" in start_sector or "*" in start_sector or "/" in start_sector or \
"+" in start_sector:
start_sector = start_sector.replace(".", "")
start_sector = eval(start_sector)
self.info(f"[qfil] programming {filename} to partition({partition_number})" +
f"@sector({start_sector})...")
self.firehose.cmd_program(int(partition_number), int(start_sector), filename)
else:
self.warning(f"File : {filename} not found.")
self.info("[qfil] raw programming ok.")
self.info("[qfil] patching...")
for xml in patch:
filename = os.path.join(imagedir, xml)
self.info("[qfil] patching with %s" % xml)
if os.path.exists(filename):
fl = open(filename, "r")
for evt, elem in ET.iterparse(fl, events=["end"]):
if elem.tag == "patch":
filename = elem.get("filename")
if filename != "DISK":
continue
start_sector = elem.get("start_sector")
size_in_bytes = elem.get("size_in_bytes")
self.info(
f"[qfil] patching {filename} sector({start_sector}), size={size_in_bytes}".format(
filename=filename, start_sector=start_sector, size_in_bytes=size_in_bytes))
content = ElementTree.tostring(elem).decode("utf-8")
CMD = "<?xml version=\"1.0\" ?><data>\n {content} </data>".format(
content=content)
print(CMD)
self.firehose.xmlsend(CMD)
else:
self.warning(f"File : {filename} not found.")
self.info("[qfil] patching ok")
bootable = self.find_bootable_partition(rawprogram)
if bootable != -1:
if self.firehose.cmd_setbootablestoragedrive(bootable):
self.info("[qfil] partition({partition}) is now bootable\n".format(partition=bootable))
else:
self.info(
"[qfil] set partition({partition}) as bootable failed\n".format(partition=bootable))
else:
self.error("Unknown/Missing command, a command is required.")
return False |
py | b40cd8326b258283195e5f3ac6a4eeb019ad0341 | import subprocess
import os
import numpy as np
from astropy.table import Table
def generate_SED(parameter_names,path_to_cigale,path_to_ini_file,filename='tmp',output_table_path='CIGALE_sed.fits'):
# open the template cigale file
fin = open(path_to_cigale + path_to_ini_file)
# open the standard pcigale ini file to copy edits to
fout = open(path_to_cigale + "pcigale.ini", "wt")
# for each line
for line in fin:
# check if the line match any parameter names
ind_line = [param + " =" in line for param in parameter_names]
if any(ind_line):
param = parameter_names[onp.array(ind_line)]
# if parameter name is fracAGN check if this is the first
if param[0] == 'fracAGN':
if fracagn:
fout.write(line)
fracagn = False
else:
# otherwise write out parameter values
fout.write(" " + param[0] + " = " + ", ".join(
['{:.13f}'.format(i) for i in parameters_tmp[param[0]]]) + ' \n')
fracagn = True
else:
fout.write(" " + param[0] + " = " + ", ".join(
['{:.13f}'.format(i) for i in parameters_tmp[param[0]]]) + ' \n')
else:
fout.write(line)
# close files
fin.close()
fout.close()
from shutil import copyfile, move, rmtree
# copy corresponding ini.spec file to standard path
copyfile(path_to_cigale + path_to_ini_file + ".spec", path_to_cigale + "pcigale.ini.spec")
# run cigale
p = subprocess.Popen(['pcigale', 'run'], cwd=path_to_cigale)
p.wait()
# check if folder already exists
try:
rmtree(path_to_cigale + '{}/'.format(filename))
except(FileNotFoundError):
print('---')
# move cigale output to folder
move(path_to_cigale + '/out/', path_to_cigale + '/{}/'.format(filename))
# read in SEDs
SEDs = Table.read(path_to_cigale + '/{}//models-block-0.fits'.format(filename))
# change units
SEDs['dust.luminosity'] = SEDs['dust.luminosity'] / L_sun.value
# repeat the SED table by the number of scale steps
dataset = vstack([SEDs for i in range(0, parameters_tmp['sfr'].size)])
# repeat the scale range by the number of entries in table (so I can easily multiply each column)
scale_table = onp.repeat(parameters_tmp['sfr'], len(SEDs)) / dataset['sfh.sfr']
# scale each column that should be scaled as SFR is scaled
for c in col_scale:
dataset[c] = dataset[c] * scale_table
# create log10 version of SFR
dataset['log10_sfh.sfr'] = onp.log10(dataset['sfh.sfr'])
# write out scaled file
dataset.write(output_table_path, overwrite=True) |
py | b40cd87da827d57e00f1dabc75dfaffcbc0f75e4 | # Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019-Present Datadog, Inc.
import re # noqa: F401
import sys # noqa: F401
from datadog_api_client.v2.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from datadog_api_client.v2.model.dashboard_list_item_response import DashboardListItemResponse
globals()["DashboardListItemResponse"] = DashboardListItemResponse
class DashboardListUpdateItemsResponse(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {}
validations = {}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
"dashboards": ([DashboardListItemResponse],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
"dashboards": "dashboards", # noqa: E501
}
_composed_schemas = {}
required_properties = set(
[
"_data_store",
"_check_type",
"_spec_property_naming",
"_path_to_item",
"_configuration",
"_visited_composed_classes",
]
)
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""DashboardListUpdateItemsResponse - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
dashboards ([DashboardListItemResponse]): List of dashboards in the dashboard list.. [optional] # noqa: E501
"""
_check_type = kwargs.pop("_check_type", True)
_spec_property_naming = kwargs.pop("_spec_property_naming", False)
_path_to_item = kwargs.pop("_path_to_item", ())
_configuration = kwargs.pop("_configuration", None)
_visited_composed_classes = kwargs.pop("_visited_composed_classes", ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments."
% (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
|
py | b40cd8a8fe61285bd8be560a643fbc4a164ef54c | import traceback
import sys
import pickle
import os
import subprocess
from Life import Life
from CommandLineInterface import CommandLineInterface
def main():
#initialize
# Assumes Linux setup
cmd="cd ~/Documents; pwd 2>/dev/null >/tmp/life_directory; cd - > /dev/null"
output=subprocess.check_output(cmd,shell=True).split()
documents_directory_path=subprocess.check_output("cat /tmp/life_directory",shell=True).split()[0]
life_directory = documents_directory_path+"/RealLifeGTA/"
life_filename="gta.data"
life_complete_path=life_directory+life_filename
try:
life = _load_from_file(life_complete_path)
#print "Loading life from datastore"
except:
#traceback.print_exc(file=sys.stdout)
print "Couldn't load life from datastore, creating a new file at path:" +life_directory
subprocess.check_output("mkdir -p "+life_directory,shell=True)
life = Life()
# start main loop
CommandLineInterface(life)._process_single_command()
#persist data
try:
_save_to_file(life, life_complete_path)
except:
traceback.print_exc(file=sys.stdout)
def _load_from_file(filename):
# If files don't exist, return empty list
with open(filename, 'rb') as f:
return_value = pickle.load(f)
return return_value
def _save_to_file(domain_object, filename):
with open(filename, 'wb') as f:
pickle.dump(domain_object, f)
if __name__ == "__main__":
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.