text
stringlengths 4
1.02M
| meta
dict |
---|---|
class Oveja:
def __init__(self, nombre, tipo):
self.nombre = nombre
self.tipo = tipo
def set_nombre(self, nombre):
self.nombre
def get_nombre(self):
return self.nombre
def set_tipo(self, tipo):
self.tipo
def get_tipo(self):
return self.tipo | {
"content_hash": "f31d7af52b0c45bd04f05784660feaba",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 37,
"avg_line_length": 19.5,
"alnum_prop": 0.5673076923076923,
"repo_name": "AnhellO/DAS_Sistemas",
"id": "06e2e9b42f011b346b2605f1903d09f397bbf6e9",
"size": "312",
"binary": false,
"copies": "1",
"ref": "refs/heads/ene-jun-2022",
"path": "Ene-Jun-2022/Practicas/1er Parcial/Práctica 6/prototype.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "8515"
},
{
"name": "Go",
"bytes": "25845"
},
{
"name": "HTML",
"bytes": "36671"
},
{
"name": "Python",
"bytes": "716604"
}
],
"symlink_target": ""
} |
"""
Fetch two subtrees in parallel
++++++++++++++++++++++++++++++
Send a series of SNMP GETNEXT requests with the following options:
* with SNMPv1, community 'public'
* over IPv4/UDP
* to an Agent at 195.218.195.228:161
* for two OIDs in tuple form
* stop on end-of-mib condition for both OIDs
This script performs similar to the following Net-SNMP command:
| $ snmpwalk -v1 -c public -ObentU 195.218.195.228 1.3.6.1.2.1.1 1.3.6.1.4.1.1
"""#
from pysnmp.entity import engine, config
from pysnmp.carrier.asyncore.dgram import udp
from pysnmp.entity.rfc3413 import cmdgen
# Create SNMP engine instance
snmpEngine = engine.SnmpEngine()
#
# SNMPv1/2c setup
#
# SecurityName <-> CommunityName mapping
config.addV1System(snmpEngine, 'my-area', 'public')
# Specify security settings per SecurityName (SNMPv1 - 0, SNMPv2c - 1)
config.addTargetParams(snmpEngine, 'my-creds', 'my-area', 'noAuthNoPriv', 0)
#
# Setup transport endpoint and bind it with security settings yielding
# a target name
#
# UDP/IPv4
config.addTransport(
snmpEngine,
udp.domainName,
udp.UdpSocketTransport().openClientMode()
)
config.addTargetAddr(
snmpEngine, 'my-router',
udp.domainName, ('195.218.195.228', 161),
'my-creds'
)
# Error/response receiver
def cbFun(snmpEngine, sendRequestHandle, errorIndication,
errorStatus, errorIndex, varBindTable, cbCtx):
if errorIndication:
print(errorIndication)
return
# SNMPv1 response may contain noSuchName error *and* SNMPv2c exception,
# so we ignore noSuchName error here
if errorStatus and errorStatus != 2:
print('%s at %s' % (
errorStatus.prettyPrint(),
errorIndex and varBindTable[-1][int(errorIndex)-1][0] or '?'
)
)
return # stop on error
for varBindRow in varBindTable:
for oid, val in varBindRow:
print('%s = %s' % (oid.prettyPrint(), val.prettyPrint()))
return 1 # signal dispatcher to continue
# Prepare initial request to be sent
cmdgen.NextCommandGenerator().sendVarBinds(
snmpEngine,
'my-router',
None, '', # contextEngineId, contextName
[ ((1,3,6,1,2,1,1), None),
((1,3,6,1,4,1,1), None) ],
cbFun
)
# Run I/O dispatcher which would send pending queries and process responses
snmpEngine.transportDispatcher.runDispatcher()
| {
"content_hash": "b4cc1e6385c4220cdeed878af7001ee8",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 78,
"avg_line_length": 28.228915662650603,
"alnum_prop": 0.6816047801963295,
"repo_name": "filippog/pysnmp",
"id": "0a41a416c65da4ba5e75ca9798e6ad9cedb47bb0",
"size": "2343",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/v3arch/asyncore/manager/cmdgen/getnext-multiple-oids-to-eom.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "991968"
},
{
"name": "Shell",
"bytes": "686"
}
],
"symlink_target": ""
} |
def app(environ, start_response):
status = '200 OK'
headers = [('Content-Type', 'text/plain')]
qstr = environ['QUERY_STRING']
body = qstr.replace('&', '\n')
start_response(status, headers)
return body
| {
"content_hash": "703e1901fd00d5b70322d239d772dae3",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 44,
"avg_line_length": 30.428571428571427,
"alnum_prop": 0.647887323943662,
"repo_name": "k1r8r0wn/nginx_py",
"id": "44a7831a212f869264480df76e3c239076bd144e",
"size": "213",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hello.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Nginx",
"bytes": "534"
},
{
"name": "Python",
"bytes": "213"
},
{
"name": "Shell",
"bytes": "257"
}
],
"symlink_target": ""
} |
""" NetCollector client."""
# pylint: disable=C0103
import argparse
import getpass
import net_collector
import netspot_settings
from spotmax import SpotMAX
# Arguments
parser = argparse.ArgumentParser(description='NetSPOT Network Collector')
parser.add_argument('-a', '--asset', help='Asset', required=False)
# Username/password
parser.add_argument('-u', '--username', help='Username for device login', required=False)
parser.add_argument('-p', '--password', help='Password for device login', required=False)
parser.add_argument('-k', '--sshkey', help='Path to SSH key file', required=False)
args = parser.parse_args()
def ask_user_passwd():
"""Ask user for username and password.
Returns:
(username, password): tuple
"""
if not args.username:
username = raw_input('Username: ')
else:
username = args.username
# Get password
password = getpass.getpass()
return (username, password)
class MACS(SpotMAX):
"""Class that interacts with the MongoDB backend."""
def __init__(self, database=netspot_settings.DATABASE, collection=netspot_settings.COLL_MACS):
SpotMAX.__init__(self, database, collection)
def add_macs(self, device_macs):
"""Add MACs to database.
Args:
device_macs: dict, key: asset: asset name
macs: list of mac entries
"""
# Add asset if it's a new asset
if not self._exist(device_macs['asset'], key='asset'):
# Add asset to database
self.collection.insert_one(device_macs)
else:
# Update existing asset with the latest data
update = {
'$set': device_macs,
'$currentDate': {'lastModified': True}
}
self.collection.update_one({'asset': device_macs['asset']}, update)
def main():
"""Main."""
if args.asset:
# Get username/password
if not args.username or (not args.sshkey and not args.password):
print 'Please specify device username/password.'
username, password = ask_user_passwd()
else:
username = args.username
password = args.password
# Collect data from asset
device = net_collector.NetCollector(args.asset, username, password, args.sshkey)
# Add collected data to the database
macs = MACS()
macs.add_macs(device.device_macs)
macs = net_collector.IPUsage(device.device_macs)
macs.uppdate_ip()
else:
print 'Need more arguments. Please try -h'
if __name__ == '__main__':
main()
| {
"content_hash": "176a6bea79b2a36e5914b87147e29eb8",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 96,
"avg_line_length": 26,
"alnum_prop": 0.6612111292962357,
"repo_name": "MaxIV-KitsControls/netspot",
"id": "c4d7d401816fc3eee1fc539e39271e17a7b82ff4",
"size": "2467",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "netspot/lib/spotmax/nc_client.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "77841"
},
{
"name": "HTML",
"bytes": "43477"
},
{
"name": "JavaScript",
"bytes": "114144"
},
{
"name": "Python",
"bytes": "213676"
}
],
"symlink_target": ""
} |
from flask import Flask, request, render_template
import json
from watsoncloud import downloader
from watsoncloud import transcribe
from watsoncloud import compilator
import pafy
from search import audioJSON, videoJSON
from clarifai_v1 import fetch_video_tags
from flask_cors import CORS, cross_origin
app = Flask(__name__)
base_url = "https://www.youtube.com/watch?v="
project_base = "./watsoncloud/projects/"
CORS(app)
@app.route("/watch", methods=['GET'])
def home():
video_id = request.args.get('v')
if video_id is None:
return "Invalid parameters, usage: http://youtubeseek.com/watch?v=abcdefg"
# Download file and get path
# TODO: Lowest quality MP4 download
downloaded_file = downloader.get_video(base_url+video_id, video_id)
print("DL file:"+downloaded_file)
# Transcribe the downloaded file
transcribed_file = transcribe.speech_to_text(downloaded_file)
print("TC file:"+transcribed_file)
# Compile words for transcribed file
compiled_file = compilator.compile_word_transcript(transcribed_file)
print("CL file:"+compiled_file)
# Return [{keyword: timestamp}]
return video_id
@app.route("/audiosearch",methods=['GET'])
def audio_search():
keywords = request.args.get('q')
video_id = request.args.get('v')
# Form youtube url
url = base_url+video_id
# Fetch video transcript_filename
video = pafy.new(url)
title = video.title
filename = title + '-' + video_id + '.json'
filepath = project_base+filename
# Form saved file name
if keywords is None:
return "Invalid parameters, usage: http://youtubeseek.com/audiosearch?v=abcedfg&q=man,woman"
result = audioJSON(filepath, keywords.split(","))
# @return: dict {keyword1:[ts1,ts2,ts3],keyword2:[ts1,ts2,ts3],keyword3:[ts1,ts2,ts3]}
return json.dumps(result)
@app.route("/videosearch",methods=['GET'])
def video_search():
keywords = request.args.get('q')
video_id = request.args.get('v')
# Form youtube url
url = base_url+video_id
video_tags = fetch_video_tags(url, keywords)
# Form saved file name
if keywords is None:
return "Invalid parameters, usage: http://youtubeseek.com/audiosearch?v=abcedfg&q=man,woman"
result = videoJSON(video_tags, keywords.split(","))
# @return: dict {keyword1:[ts1,ts2,ts3],keyword2:[ts1,ts2,ts3],keyword3:[ts1,ts2,ts3]}
return json.dumps(result)
if __name__ == "__main__":
app.run(host='0.0.0.0', port=5000, debug=True)
| {
"content_hash": "faf20f512b0bb74d7d9d71a8597e7797",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 100,
"avg_line_length": 35.55714285714286,
"alnum_prop": 0.6882282040980313,
"repo_name": "audip/youtubeseek",
"id": "acc242348cb3bab72cf460c2de0c80424442dc52",
"size": "2489",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1257"
},
{
"name": "Python",
"bytes": "49829"
}
],
"symlink_target": ""
} |
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1
from google.api_core import grpc_helpers_async
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.api import metric_pb2 # type: ignore
from google.api import monitored_resource_pb2 # type: ignore
from google.cloud.monitoring_v3.types import metric_service
from google.protobuf import empty_pb2 # type: ignore
from .base import MetricServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import MetricServiceGrpcTransport
class MetricServiceGrpcAsyncIOTransport(MetricServiceTransport):
"""gRPC AsyncIO backend transport for MetricService.
Manages metric descriptors, monitored resource descriptors,
and time series data.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "monitoring.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "monitoring.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
api_audience: Optional[str] = None,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
api_audience=api_audience,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def list_monitored_resource_descriptors(
self,
) -> Callable[
[metric_service.ListMonitoredResourceDescriptorsRequest],
Awaitable[metric_service.ListMonitoredResourceDescriptorsResponse],
]:
r"""Return a callable for the list monitored resource
descriptors method over gRPC.
Lists monitored resource descriptors that match a
filter. This method does not require a Workspace.
Returns:
Callable[[~.ListMonitoredResourceDescriptorsRequest],
Awaitable[~.ListMonitoredResourceDescriptorsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_monitored_resource_descriptors" not in self._stubs:
self._stubs[
"list_monitored_resource_descriptors"
] = self.grpc_channel.unary_unary(
"/google.monitoring.v3.MetricService/ListMonitoredResourceDescriptors",
request_serializer=metric_service.ListMonitoredResourceDescriptorsRequest.serialize,
response_deserializer=metric_service.ListMonitoredResourceDescriptorsResponse.deserialize,
)
return self._stubs["list_monitored_resource_descriptors"]
@property
def get_monitored_resource_descriptor(
self,
) -> Callable[
[metric_service.GetMonitoredResourceDescriptorRequest],
Awaitable[monitored_resource_pb2.MonitoredResourceDescriptor],
]:
r"""Return a callable for the get monitored resource
descriptor method over gRPC.
Gets a single monitored resource descriptor. This
method does not require a Workspace.
Returns:
Callable[[~.GetMonitoredResourceDescriptorRequest],
Awaitable[~.MonitoredResourceDescriptor]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_monitored_resource_descriptor" not in self._stubs:
self._stubs[
"get_monitored_resource_descriptor"
] = self.grpc_channel.unary_unary(
"/google.monitoring.v3.MetricService/GetMonitoredResourceDescriptor",
request_serializer=metric_service.GetMonitoredResourceDescriptorRequest.serialize,
response_deserializer=monitored_resource_pb2.MonitoredResourceDescriptor.FromString,
)
return self._stubs["get_monitored_resource_descriptor"]
@property
def list_metric_descriptors(
self,
) -> Callable[
[metric_service.ListMetricDescriptorsRequest],
Awaitable[metric_service.ListMetricDescriptorsResponse],
]:
r"""Return a callable for the list metric descriptors method over gRPC.
Lists metric descriptors that match a filter. This
method does not require a Workspace.
Returns:
Callable[[~.ListMetricDescriptorsRequest],
Awaitable[~.ListMetricDescriptorsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_metric_descriptors" not in self._stubs:
self._stubs["list_metric_descriptors"] = self.grpc_channel.unary_unary(
"/google.monitoring.v3.MetricService/ListMetricDescriptors",
request_serializer=metric_service.ListMetricDescriptorsRequest.serialize,
response_deserializer=metric_service.ListMetricDescriptorsResponse.deserialize,
)
return self._stubs["list_metric_descriptors"]
@property
def get_metric_descriptor(
self,
) -> Callable[
[metric_service.GetMetricDescriptorRequest],
Awaitable[metric_pb2.MetricDescriptor],
]:
r"""Return a callable for the get metric descriptor method over gRPC.
Gets a single metric descriptor. This method does not
require a Workspace.
Returns:
Callable[[~.GetMetricDescriptorRequest],
Awaitable[~.MetricDescriptor]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_metric_descriptor" not in self._stubs:
self._stubs["get_metric_descriptor"] = self.grpc_channel.unary_unary(
"/google.monitoring.v3.MetricService/GetMetricDescriptor",
request_serializer=metric_service.GetMetricDescriptorRequest.serialize,
response_deserializer=metric_pb2.MetricDescriptor.FromString,
)
return self._stubs["get_metric_descriptor"]
@property
def create_metric_descriptor(
self,
) -> Callable[
[metric_service.CreateMetricDescriptorRequest],
Awaitable[metric_pb2.MetricDescriptor],
]:
r"""Return a callable for the create metric descriptor method over gRPC.
Creates a new metric descriptor. The creation is executed
asynchronously and callers may check the returned operation to
track its progress. User-created metric descriptors define
`custom
metrics <https://cloud.google.com/monitoring/custom-metrics>`__.
Returns:
Callable[[~.CreateMetricDescriptorRequest],
Awaitable[~.MetricDescriptor]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_metric_descriptor" not in self._stubs:
self._stubs["create_metric_descriptor"] = self.grpc_channel.unary_unary(
"/google.monitoring.v3.MetricService/CreateMetricDescriptor",
request_serializer=metric_service.CreateMetricDescriptorRequest.serialize,
response_deserializer=metric_pb2.MetricDescriptor.FromString,
)
return self._stubs["create_metric_descriptor"]
@property
def delete_metric_descriptor(
self,
) -> Callable[
[metric_service.DeleteMetricDescriptorRequest], Awaitable[empty_pb2.Empty]
]:
r"""Return a callable for the delete metric descriptor method over gRPC.
Deletes a metric descriptor. Only user-created `custom
metrics <https://cloud.google.com/monitoring/custom-metrics>`__
can be deleted.
Returns:
Callable[[~.DeleteMetricDescriptorRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_metric_descriptor" not in self._stubs:
self._stubs["delete_metric_descriptor"] = self.grpc_channel.unary_unary(
"/google.monitoring.v3.MetricService/DeleteMetricDescriptor",
request_serializer=metric_service.DeleteMetricDescriptorRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_metric_descriptor"]
@property
def list_time_series(
self,
) -> Callable[
[metric_service.ListTimeSeriesRequest],
Awaitable[metric_service.ListTimeSeriesResponse],
]:
r"""Return a callable for the list time series method over gRPC.
Lists time series that match a filter. This method
does not require a Workspace.
Returns:
Callable[[~.ListTimeSeriesRequest],
Awaitable[~.ListTimeSeriesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_time_series" not in self._stubs:
self._stubs["list_time_series"] = self.grpc_channel.unary_unary(
"/google.monitoring.v3.MetricService/ListTimeSeries",
request_serializer=metric_service.ListTimeSeriesRequest.serialize,
response_deserializer=metric_service.ListTimeSeriesResponse.deserialize,
)
return self._stubs["list_time_series"]
@property
def create_time_series(
self,
) -> Callable[[metric_service.CreateTimeSeriesRequest], Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the create time series method over gRPC.
Creates or adds data to one or more time series.
The response is empty if all time series in the request
were written. If any time series could not be written, a
corresponding failure message is included in the error
response.
Returns:
Callable[[~.CreateTimeSeriesRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_time_series" not in self._stubs:
self._stubs["create_time_series"] = self.grpc_channel.unary_unary(
"/google.monitoring.v3.MetricService/CreateTimeSeries",
request_serializer=metric_service.CreateTimeSeriesRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["create_time_series"]
@property
def create_service_time_series(
self,
) -> Callable[[metric_service.CreateTimeSeriesRequest], Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the create service time series method over gRPC.
Creates or adds data to one or more service time series. A
service time series is a time series for a metric from a Google
Cloud service. The response is empty if all time series in the
request were written. If any time series could not be written, a
corresponding failure message is included in the error response.
This endpoint rejects writes to user-defined metrics. This
method is only for use by Google Cloud services. Use
[projects.timeSeries.create][google.monitoring.v3.MetricService.CreateTimeSeries]
instead.
Returns:
Callable[[~.CreateTimeSeriesRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_service_time_series" not in self._stubs:
self._stubs["create_service_time_series"] = self.grpc_channel.unary_unary(
"/google.monitoring.v3.MetricService/CreateServiceTimeSeries",
request_serializer=metric_service.CreateTimeSeriesRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["create_service_time_series"]
def close(self):
return self.grpc_channel.close()
__all__ = ("MetricServiceGrpcAsyncIOTransport",)
| {
"content_hash": "40e505b3b5ad21cc45df5e1ec9f1fbfc",
"timestamp": "",
"source": "github",
"line_count": 511,
"max_line_length": 106,
"avg_line_length": 45.06262230919765,
"alnum_prop": 0.6290007382637773,
"repo_name": "googleapis/python-monitoring",
"id": "3d7fda0e8e5f56e137221b3996987aed3ce96666",
"size": "23627",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "google/cloud/monitoring_v3/services/metric_service/transports/grpc_asyncio.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "2375818"
},
{
"name": "Shell",
"bytes": "30672"
}
],
"symlink_target": ""
} |
import os
import httplib2
from apiclient.discovery import build
from oauth2client.file import Storage
from datetime import datetime
from jobs import AbstractJob
class Calendar(AbstractJob):
def __init__(self, conf):
self.interval = conf['interval']
self.timeout = conf.get('timeout')
def _auth(self):
credentials_file = os.path.abspath(os.path.join(
os.path.dirname(__file__), '.calendar.json'))
storage = Storage(credentials_file)
credentials = storage.get()
http = httplib2.Http(timeout=self.timeout)
http = credentials.authorize(http)
self.service = build(serviceName='calendar', version='v3', http=http)
def _parse(self, items):
events = []
for item in items:
date = item['start'].get('dateTime') or item['start'].get('date')
events.append({
'id': item['id'],
'summary': item['summary'],
'date': date
})
return events
def get(self):
if not hasattr(self, 'service'):
self._auth()
now = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')
result = self.service.events().list(calendarId='primary',
orderBy='startTime',
singleEvents=True,
timeMin=now).execute()
return {'events': self._parse(result['items'])}
| {
"content_hash": "6dde868c9a26134bf114306370d1730f",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 77,
"avg_line_length": 33.2,
"alnum_prop": 0.5394912985274432,
"repo_name": "martinp/jarvis2",
"id": "ef1726965309312c9fe9ed2def3f796d63578f89",
"size": "1519",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jarvis/jobs/calendar.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4635"
},
{
"name": "HTML",
"bytes": "5098"
},
{
"name": "JavaScript",
"bytes": "19849"
},
{
"name": "Makefile",
"bytes": "788"
},
{
"name": "Python",
"bytes": "39444"
}
],
"symlink_target": ""
} |
import sys
from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from ..._serialization import Serializer
from .._vendor import _convert_request, _format_url_section
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_create_request(
resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_update_request(
resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
def build_get_request(
resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_delete_request(
resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_set_legal_hold_request(
resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/setLegalHold",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_clear_legal_hold_request(
resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/clearLegalHold",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_create_or_update_immutability_policy_request(
resource_group_name: str,
account_name: str,
container_name: str,
immutability_policy_name: Union[str, _models.Enum13],
subscription_id: str,
*,
if_match: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3),
"immutabilityPolicyName": _SERIALIZER.url("immutability_policy_name", immutability_policy_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if if_match is not None:
_headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str")
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_get_immutability_policy_request(
resource_group_name: str,
account_name: str,
container_name: str,
immutability_policy_name: Union[str, _models.Enum13],
subscription_id: str,
*,
if_match: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3),
"immutabilityPolicyName": _SERIALIZER.url("immutability_policy_name", immutability_policy_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if if_match is not None:
_headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_delete_immutability_policy_request(
resource_group_name: str,
account_name: str,
container_name: str,
immutability_policy_name: Union[str, _models.Enum13],
subscription_id: str,
*,
if_match: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3),
"immutabilityPolicyName": _SERIALIZER.url("immutability_policy_name", immutability_policy_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_lock_immutability_policy_request(
resource_group_name: str,
account_name: str,
container_name: str,
subscription_id: str,
*,
if_match: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/lock",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_extend_immutability_policy_request(
resource_group_name: str,
account_name: str,
container_name: str,
subscription_id: str,
*,
if_match: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/extend",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str")
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_lease_request(
resource_group_name: str, account_name: str, container_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/lease",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"containerName": _SERIALIZER.url("container_name", container_name, "str", max_length=63, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
class BlobContainersOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.storage.v2018_03_01_preview.StorageManagementClient`'s
:attr:`blob_containers` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.ListContainerItems:
"""Lists all containers and does not support a prefix like data plane. Also SRP today does not
return continuation token.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ListContainerItems or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.ListContainerItems
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ListContainerItems]
request = build_list_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("ListContainerItems", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers"} # type: ignore
@overload
def create(
self,
resource_group_name: str,
account_name: str,
container_name: str,
blob_container: _models.BlobContainer,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.BlobContainer:
"""Creates a new container under the specified account as described by request body. The container
resource includes metadata and properties for that container. It does not include a list of the
blobs contained by the container.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param blob_container: Properties of the blob container to create. Required.
:type blob_container: ~azure.mgmt.storage.v2018_03_01_preview.models.BlobContainer
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobContainer or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.BlobContainer
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create(
self,
resource_group_name: str,
account_name: str,
container_name: str,
blob_container: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.BlobContainer:
"""Creates a new container under the specified account as described by request body. The container
resource includes metadata and properties for that container. It does not include a list of the
blobs contained by the container.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param blob_container: Properties of the blob container to create. Required.
:type blob_container: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobContainer or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.BlobContainer
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create(
self,
resource_group_name: str,
account_name: str,
container_name: str,
blob_container: Union[_models.BlobContainer, IO],
**kwargs: Any
) -> _models.BlobContainer:
"""Creates a new container under the specified account as described by request body. The container
resource includes metadata and properties for that container. It does not include a list of the
blobs contained by the container.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param blob_container: Properties of the blob container to create. Is either a model type or a
IO type. Required.
:type blob_container: ~azure.mgmt.storage.v2018_03_01_preview.models.BlobContainer or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobContainer or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.BlobContainer
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.BlobContainer]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(blob_container, (IO, bytes)):
_content = blob_container
else:
_json = self._serialize.body(blob_container, "BlobContainer")
request = build_create_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("BlobContainer", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}"} # type: ignore
@overload
def update(
self,
resource_group_name: str,
account_name: str,
container_name: str,
blob_container: _models.BlobContainer,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.BlobContainer:
"""Updates container properties as specified in request body. Properties not mentioned in the
request will be unchanged. Update fails if the specified container doesn't already exist.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param blob_container: Properties to update for the blob container. Required.
:type blob_container: ~azure.mgmt.storage.v2018_03_01_preview.models.BlobContainer
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobContainer or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.BlobContainer
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def update(
self,
resource_group_name: str,
account_name: str,
container_name: str,
blob_container: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.BlobContainer:
"""Updates container properties as specified in request body. Properties not mentioned in the
request will be unchanged. Update fails if the specified container doesn't already exist.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param blob_container: Properties to update for the blob container. Required.
:type blob_container: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobContainer or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.BlobContainer
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def update(
self,
resource_group_name: str,
account_name: str,
container_name: str,
blob_container: Union[_models.BlobContainer, IO],
**kwargs: Any
) -> _models.BlobContainer:
"""Updates container properties as specified in request body. Properties not mentioned in the
request will be unchanged. Update fails if the specified container doesn't already exist.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param blob_container: Properties to update for the blob container. Is either a model type or a
IO type. Required.
:type blob_container: ~azure.mgmt.storage.v2018_03_01_preview.models.BlobContainer or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobContainer or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.BlobContainer
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.BlobContainer]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(blob_container, (IO, bytes)):
_content = blob_container
else:
_json = self._serialize.body(blob_container, "BlobContainer")
request = build_update_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("BlobContainer", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}"} # type: ignore
@distributed_trace
def get(
self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any
) -> _models.BlobContainer:
"""Gets properties of a specified container.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobContainer or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.BlobContainer
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.BlobContainer]
request = build_get_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("BlobContainer", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}"} # type: ignore
@distributed_trace
def delete( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, account_name: str, container_name: str, **kwargs: Any
) -> None:
"""Deletes specified container under its account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_delete_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}"} # type: ignore
@overload
def set_legal_hold(
self,
resource_group_name: str,
account_name: str,
container_name: str,
legal_hold: _models.LegalHold,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.LegalHold:
"""Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold
follows an append pattern and does not clear out the existing tags that are not specified in
the request.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param legal_hold: The LegalHold property that will be set to a blob container. Required.
:type legal_hold: ~azure.mgmt.storage.v2018_03_01_preview.models.LegalHold
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LegalHold or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.LegalHold
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def set_legal_hold(
self,
resource_group_name: str,
account_name: str,
container_name: str,
legal_hold: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.LegalHold:
"""Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold
follows an append pattern and does not clear out the existing tags that are not specified in
the request.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param legal_hold: The LegalHold property that will be set to a blob container. Required.
:type legal_hold: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LegalHold or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.LegalHold
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def set_legal_hold(
self,
resource_group_name: str,
account_name: str,
container_name: str,
legal_hold: Union[_models.LegalHold, IO],
**kwargs: Any
) -> _models.LegalHold:
"""Sets legal hold tags. Setting the same tag results in an idempotent operation. SetLegalHold
follows an append pattern and does not clear out the existing tags that are not specified in
the request.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param legal_hold: The LegalHold property that will be set to a blob container. Is either a
model type or a IO type. Required.
:type legal_hold: ~azure.mgmt.storage.v2018_03_01_preview.models.LegalHold or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LegalHold or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.LegalHold
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.LegalHold]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(legal_hold, (IO, bytes)):
_content = legal_hold
else:
_json = self._serialize.body(legal_hold, "LegalHold")
request = build_set_legal_hold_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.set_legal_hold.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("LegalHold", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_legal_hold.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/setLegalHold"} # type: ignore
@overload
def clear_legal_hold(
self,
resource_group_name: str,
account_name: str,
container_name: str,
legal_hold: _models.LegalHold,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.LegalHold:
"""Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent
operation. ClearLegalHold clears out only the specified tags in the request.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param legal_hold: The LegalHold property that will be clear from a blob container. Required.
:type legal_hold: ~azure.mgmt.storage.v2018_03_01_preview.models.LegalHold
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LegalHold or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.LegalHold
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def clear_legal_hold(
self,
resource_group_name: str,
account_name: str,
container_name: str,
legal_hold: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.LegalHold:
"""Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent
operation. ClearLegalHold clears out only the specified tags in the request.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param legal_hold: The LegalHold property that will be clear from a blob container. Required.
:type legal_hold: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LegalHold or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.LegalHold
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def clear_legal_hold(
self,
resource_group_name: str,
account_name: str,
container_name: str,
legal_hold: Union[_models.LegalHold, IO],
**kwargs: Any
) -> _models.LegalHold:
"""Clears legal hold tags. Clearing the same or non-existent tag results in an idempotent
operation. ClearLegalHold clears out only the specified tags in the request.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param legal_hold: The LegalHold property that will be clear from a blob container. Is either a
model type or a IO type. Required.
:type legal_hold: ~azure.mgmt.storage.v2018_03_01_preview.models.LegalHold or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LegalHold or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.LegalHold
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.LegalHold]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(legal_hold, (IO, bytes)):
_content = legal_hold
else:
_json = self._serialize.body(legal_hold, "LegalHold")
request = build_clear_legal_hold_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.clear_legal_hold.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("LegalHold", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
clear_legal_hold.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/clearLegalHold"} # type: ignore
@overload
def create_or_update_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
immutability_policy_name: Union[str, _models.Enum13],
if_match: Optional[str] = None,
parameters: Optional[_models.ImmutabilityPolicy] = None,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ImmutabilityPolicy:
"""Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but
not required for this operation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param immutability_policy_name: The name of the blob container immutabilityPolicy within the
specified storage account. ImmutabilityPolicy Name must be 'default'. "default" Required.
:type immutability_policy_name: str or ~azure.mgmt.storage.v2018_03_01_preview.models.Enum13
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied. Default value is None.
:type if_match: str
:param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob
container. Default value is None.
:type parameters: ~azure.mgmt.storage.v2018_03_01_preview.models.ImmutabilityPolicy
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.ImmutabilityPolicy
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_or_update_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
immutability_policy_name: Union[str, _models.Enum13],
if_match: Optional[str] = None,
parameters: Optional[IO] = None,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ImmutabilityPolicy:
"""Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but
not required for this operation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param immutability_policy_name: The name of the blob container immutabilityPolicy within the
specified storage account. ImmutabilityPolicy Name must be 'default'. "default" Required.
:type immutability_policy_name: str or ~azure.mgmt.storage.v2018_03_01_preview.models.Enum13
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied. Default value is None.
:type if_match: str
:param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob
container. Default value is None.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.ImmutabilityPolicy
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_or_update_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
immutability_policy_name: Union[str, _models.Enum13],
if_match: Optional[str] = None,
parameters: Optional[Union[_models.ImmutabilityPolicy, IO]] = None,
**kwargs: Any
) -> _models.ImmutabilityPolicy:
"""Creates or updates an unlocked immutability policy. ETag in If-Match is honored if given but
not required for this operation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param immutability_policy_name: The name of the blob container immutabilityPolicy within the
specified storage account. ImmutabilityPolicy Name must be 'default'. "default" Required.
:type immutability_policy_name: str or ~azure.mgmt.storage.v2018_03_01_preview.models.Enum13
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied. Default value is None.
:type if_match: str
:param parameters: The ImmutabilityPolicy Properties that will be created or updated to a blob
container. Is either a model type or a IO type. Default value is None.
:type parameters: ~azure.mgmt.storage.v2018_03_01_preview.models.ImmutabilityPolicy or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.ImmutabilityPolicy
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ImmutabilityPolicy]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
if parameters is not None:
_json = self._serialize.body(parameters, "ImmutabilityPolicy")
else:
_json = None
request = build_create_or_update_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
immutability_policy_name=immutability_policy_name,
subscription_id=self._config.subscription_id,
if_match=if_match,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update_immutability_policy.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
create_or_update_immutability_policy.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}"} # type: ignore
@distributed_trace
def get_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
immutability_policy_name: Union[str, _models.Enum13],
if_match: Optional[str] = None,
**kwargs: Any
) -> _models.ImmutabilityPolicy:
"""Gets the existing immutability policy along with the corresponding ETag in response headers and
body.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param immutability_policy_name: The name of the blob container immutabilityPolicy within the
specified storage account. ImmutabilityPolicy Name must be 'default'. "default" Required.
:type immutability_policy_name: str or ~azure.mgmt.storage.v2018_03_01_preview.models.Enum13
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied. Default value is None.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.ImmutabilityPolicy
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ImmutabilityPolicy]
request = build_get_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
immutability_policy_name=immutability_policy_name,
subscription_id=self._config.subscription_id,
if_match=if_match,
api_version=api_version,
template_url=self.get_immutability_policy.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
get_immutability_policy.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}"} # type: ignore
@distributed_trace
def delete_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
immutability_policy_name: Union[str, _models.Enum13],
if_match: str,
**kwargs: Any
) -> _models.ImmutabilityPolicy:
"""Aborts an unlocked immutability policy. The response of delete has
immutabilityPeriodSinceCreationInDays set to 0. ETag in If-Match is required for this
operation. Deleting a locked immutability policy is not allowed, only way is to delete the
container after deleting all blobs inside the container.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param immutability_policy_name: The name of the blob container immutabilityPolicy within the
specified storage account. ImmutabilityPolicy Name must be 'default'. "default" Required.
:type immutability_policy_name: str or ~azure.mgmt.storage.v2018_03_01_preview.models.Enum13
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied. Required.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.ImmutabilityPolicy
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ImmutabilityPolicy]
request = build_delete_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
immutability_policy_name=immutability_policy_name,
subscription_id=self._config.subscription_id,
if_match=if_match,
api_version=api_version,
template_url=self.delete_immutability_policy.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
delete_immutability_policy.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/{immutabilityPolicyName}"} # type: ignore
@distributed_trace
def lock_immutability_policy(
self, resource_group_name: str, account_name: str, container_name: str, if_match: str, **kwargs: Any
) -> _models.ImmutabilityPolicy:
"""Sets the ImmutabilityPolicy to Locked state. The only action allowed on a Locked policy is
ExtendImmutabilityPolicy action. ETag in If-Match is required for this operation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied. Required.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.ImmutabilityPolicy
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ImmutabilityPolicy]
request = build_lock_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
if_match=if_match,
api_version=api_version,
template_url=self.lock_immutability_policy.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
lock_immutability_policy.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/lock"} # type: ignore
@overload
def extend_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
if_match: str,
parameters: Optional[_models.ImmutabilityPolicy] = None,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ImmutabilityPolicy:
"""Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only
action allowed on a Locked policy will be this action. ETag in If-Match is required for this
operation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied. Required.
:type if_match: str
:param parameters: The ImmutabilityPolicy Properties that will be extended for a blob
container. Default value is None.
:type parameters: ~azure.mgmt.storage.v2018_03_01_preview.models.ImmutabilityPolicy
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.ImmutabilityPolicy
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def extend_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
if_match: str,
parameters: Optional[IO] = None,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ImmutabilityPolicy:
"""Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only
action allowed on a Locked policy will be this action. ETag in If-Match is required for this
operation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied. Required.
:type if_match: str
:param parameters: The ImmutabilityPolicy Properties that will be extended for a blob
container. Default value is None.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.ImmutabilityPolicy
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def extend_immutability_policy(
self,
resource_group_name: str,
account_name: str,
container_name: str,
if_match: str,
parameters: Optional[Union[_models.ImmutabilityPolicy, IO]] = None,
**kwargs: Any
) -> _models.ImmutabilityPolicy:
"""Extends the immutabilityPeriodSinceCreationInDays of a locked immutabilityPolicy. The only
action allowed on a Locked policy will be this action. ETag in If-Match is required for this
operation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param if_match: The entity state (ETag) version of the immutability policy to update. A value
of "*" can be used to apply the operation only if the immutability policy already exists. If
omitted, this operation will always be applied. Required.
:type if_match: str
:param parameters: The ImmutabilityPolicy Properties that will be extended for a blob
container. Is either a model type or a IO type. Default value is None.
:type parameters: ~azure.mgmt.storage.v2018_03_01_preview.models.ImmutabilityPolicy or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ImmutabilityPolicy or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.ImmutabilityPolicy
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ImmutabilityPolicy]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
if parameters is not None:
_json = self._serialize.body(parameters, "ImmutabilityPolicy")
else:
_json = None
request = build_extend_immutability_policy_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
if_match=if_match,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.extend_immutability_policy.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
deserialized = self._deserialize("ImmutabilityPolicy", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
extend_immutability_policy.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/immutabilityPolicies/default/extend"} # type: ignore
@overload
def lease(
self,
resource_group_name: str,
account_name: str,
container_name: str,
parameters: Optional[_models.LeaseContainerRequest] = None,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.LeaseContainerResponse:
"""The Lease Container operation establishes and manages a lock on a container for delete
operations. The lock duration can be 15 to 60 seconds, or can be infinite.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param parameters: Lease Container request body. Default value is None.
:type parameters: ~azure.mgmt.storage.v2018_03_01_preview.models.LeaseContainerRequest
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LeaseContainerResponse or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.LeaseContainerResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def lease(
self,
resource_group_name: str,
account_name: str,
container_name: str,
parameters: Optional[IO] = None,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.LeaseContainerResponse:
"""The Lease Container operation establishes and manages a lock on a container for delete
operations. The lock duration can be 15 to 60 seconds, or can be infinite.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param parameters: Lease Container request body. Default value is None.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LeaseContainerResponse or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.LeaseContainerResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def lease(
self,
resource_group_name: str,
account_name: str,
container_name: str,
parameters: Optional[Union[_models.LeaseContainerRequest, IO]] = None,
**kwargs: Any
) -> _models.LeaseContainerResponse:
"""The Lease Container operation establishes and manages a lock on a container for delete
operations. The lock duration can be 15 to 60 seconds, or can be infinite.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param container_name: The name of the blob container within the specified storage account.
Blob container names must be between 3 and 63 characters in length and use numbers, lower-case
letters and dash (-) only. Every dash (-) character must be immediately preceded and followed
by a letter or number. Required.
:type container_name: str
:param parameters: Lease Container request body. Is either a model type or a IO type. Default
value is None.
:type parameters: ~azure.mgmt.storage.v2018_03_01_preview.models.LeaseContainerRequest or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LeaseContainerResponse or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2018_03_01_preview.models.LeaseContainerResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2018-03-01-preview")
) # type: Literal["2018-03-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.LeaseContainerResponse]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
if parameters is not None:
_json = self._serialize.body(parameters, "LeaseContainerRequest")
else:
_json = None
request = build_lease_request(
resource_group_name=resource_group_name,
account_name=account_name,
container_name=container_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.lease.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("LeaseContainerResponse", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
lease.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/default/containers/{containerName}/lease"} # type: ignore
| {
"content_hash": "9a4e46525a92c085af0cd22b962eba95",
"timestamp": "",
"source": "github",
"line_count": 2294,
"max_line_length": 297,
"avg_line_length": 49.266346992153444,
"alnum_prop": 0.6628383340559385,
"repo_name": "Azure/azure-sdk-for-python",
"id": "4887e7617c61ae5e7edf04f2f3085c5719542443",
"size": "113517",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2018_03_01_preview/operations/_blob_containers_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from aldryn_apphooks_config.app_base import CMSConfigApp
from cms.apphook_pool import apphook_pool
from .menu import FaqCategoryMenu
from .models import FaqConfig
class FaqApp(CMSConfigApp):
name = _('FAQ')
urls = ['aldryn_faq.urls']
menus = [FaqCategoryMenu, ]
app_name = 'aldryn_faq'
app_config = FaqConfig
apphook_pool.register(FaqApp)
| {
"content_hash": "fc50a4f7c6f20523e429394753d356a1",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 56,
"avg_line_length": 24.263157894736842,
"alnum_prop": 0.7396963123644251,
"repo_name": "czpython/aldryn-faq",
"id": "75ce15c558f1b55abf0e85dcdd02402dc9572d42",
"size": "486",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aldryn_faq/cms_app.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "10652"
},
{
"name": "JavaScript",
"bytes": "25904"
},
{
"name": "Python",
"bytes": "253171"
},
{
"name": "Shell",
"bytes": "208"
}
],
"symlink_target": ""
} |
import socket
from oslo.config import cfg
from oslo import messaging
from oslo.utils import importutils
from ironic.common import config
from ironic.common.i18n import _LE
from ironic.common.i18n import _LI
from ironic.common import rpc
from ironic.objects import base as objects_base
from ironic.openstack.common import context
from ironic.openstack.common import log
from ironic.openstack.common import service
service_opts = [
cfg.IntOpt('periodic_interval',
default=60,
help='Seconds between running periodic tasks.'),
cfg.StrOpt('host',
default=socket.getfqdn(),
help='Name of this node. This can be an opaque identifier. '
'It is not necessarily a hostname, FQDN, or IP address. '
'However, the node name must be valid within '
'an AMQP key, and if using ZeroMQ, a valid '
'hostname, FQDN, or IP address.'),
]
cfg.CONF.register_opts(service_opts)
LOG = log.getLogger(__name__)
class RPCService(service.Service):
def __init__(self, host, manager_module, manager_class):
super(RPCService, self).__init__()
self.host = host
manager_module = importutils.try_import(manager_module)
manager_class = getattr(manager_module, manager_class)
self.manager = manager_class(host, manager_module.MANAGER_TOPIC)
self.topic = self.manager.topic
self.rpcserver = None
def start(self):
super(RPCService, self).start()
admin_context = context.RequestContext('admin', 'admin', is_admin=True)
self.tg.add_dynamic_timer(
self.manager.periodic_tasks,
periodic_interval_max=cfg.CONF.periodic_interval,
context=admin_context)
self.manager.init_host()
target = messaging.Target(topic=self.topic, server=self.host)
endpoints = [self.manager]
serializer = objects_base.IronicObjectSerializer()
self.rpcserver = rpc.get_server(target, endpoints, serializer)
self.rpcserver.start()
LOG.info(_LI('Created RPC server for service %(service)s on host '
'%(host)s.'),
{'service': self.topic, 'host': self.host})
def stop(self):
try:
self.rpcserver.stop()
self.rpcserver.wait()
except Exception as e:
LOG.exception(_LE('Service error occurred when stopping the '
'RPC server. Error: %s'), e)
try:
self.manager.del_host()
except Exception as e:
LOG.exception(_LE('Service error occurred when cleaning up '
'the RPC manager. Error: %s'), e)
super(RPCService, self).stop(graceful=True)
LOG.info(_LI('Stopped RPC server for service %(service)s on host '
'%(host)s.'),
{'service': self.topic, 'host': self.host})
def prepare_service(argv=[]):
config.parse_args(argv)
cfg.set_defaults(log.log_opts,
default_log_levels=['amqp=WARN',
'amqplib=WARN',
'qpid.messaging=INFO',
'sqlalchemy=WARN',
'keystoneclient=INFO',
'stevedore=INFO',
'eventlet.wsgi.server=WARN',
'iso8601=WARN',
'paramiko=WARN',
'requests=WARN',
'neutronclient=WARN',
'glanceclient=WARN',
'ironic.openstack.common=WARN',
])
log.setup('ironic')
| {
"content_hash": "b8886a05ba28e0849b43be34a927eae2",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 79,
"avg_line_length": 39.23,
"alnum_prop": 0.5347947998980372,
"repo_name": "debayanray/ironic_backup",
"id": "32c869cebd94f9fcd4baa5997b466b9009910ef6",
"size": "4600",
"binary": false,
"copies": "1",
"ref": "refs/heads/fix_for_bug_1418327_node_boot_mode",
"path": "ironic/common/service.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "2208014"
}
],
"symlink_target": ""
} |
"""SelectionContainer class.
Represents a multipage container that can be used to group other widgets into
pages.
"""
from .widget_box import Box
from .widget import register
from .widget_core import CoreWidget
from traitlets import Unicode, Dict, CInt, TraitError, validate, observe
from .trait_types import TypedTuple
class _SelectionContainer(Box, CoreWidget):
"""Base class used to display multiple child widgets."""
titles = TypedTuple(trait=Unicode(), help="Titles of the pages").tag(sync=True)
selected_index = CInt(
help="""The index of the selected page. This is either an integer selecting a particular sub-widget, or None to have no widgets selected.""",
allow_none=True,
default_value=None
).tag(sync=True)
@validate('selected_index')
def _validated_index(self, proposal):
if proposal.value is None or 0 <= proposal.value < len(self.children):
return proposal.value
else:
raise TraitError('Invalid selection: index out of bounds')
@observe('children')
def _observe_children(self, change):
if self.selected_index is not None and len(change.new) < self.selected_index:
self.selected_index = None
@register
class Accordion(_SelectionContainer):
"""Displays children each on a separate accordion page."""
_view_name = Unicode('AccordionView').tag(sync=True)
_model_name = Unicode('AccordionModel').tag(sync=True)
@register
class Tab(_SelectionContainer):
"""Displays children each on a separate accordion tab."""
_view_name = Unicode('TabView').tag(sync=True)
_model_name = Unicode('TabModel').tag(sync=True)
def __init__(self, **kwargs):
if 'children' in kwargs and 'selected_index' not in kwargs and len(kwargs['children']) > 0:
kwargs['selected_index'] = 0
super(Tab, self).__init__(**kwargs)
@observe('children')
def _observe_children(self, change):
# if there are no tabs, then none should be selected
if len(change.new) == 0:
self.selected_index = None
# if there are tabs, but none is selected, select the first one
elif self.selected_index == None:
self.selected_index = 0
# if there are tabs and a selection, but the selection is no longer
# valid, select the last tab.
elif len(change.new) < self.selected_index:
self.selected_index = len(change.new) - 1
@register
class Stacked(_SelectionContainer):
"""Displays only the selected child."""
_view_name = Unicode('StackedView').tag(sync=True)
_model_name = Unicode('StackedModel').tag(sync=True)
| {
"content_hash": "f4bebcb16cc5fbf88b3528787725353c",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 149,
"avg_line_length": 36.958333333333336,
"alnum_prop": 0.6681698609545283,
"repo_name": "SylvainCorlay/ipywidgets",
"id": "73be629b6dfce4bad6ccd4be352eb4e8b40dd78e",
"size": "2763",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ipywidgets/widgets/widget_selectioncontainer.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "63903"
},
{
"name": "JavaScript",
"bytes": "51864"
},
{
"name": "Jupyter Notebook",
"bytes": "2178"
},
{
"name": "Python",
"bytes": "311433"
},
{
"name": "Shell",
"bytes": "1872"
},
{
"name": "TypeScript",
"bytes": "440154"
}
],
"symlink_target": ""
} |
import errno
import optparse as op
import os
import subprocess as sp
import sys
import time
import unicodedata
__usage__ = "%prog [OPTIONS] DIRECTORY"
if sys.version_info[0] == 3:
def enc(text):
if isinstance(text, bytes):
return text
return text.encode()
def dec(text):
if isinstance(text, bytes):
return text.decode('utf-8')
return text
def write(pipe, data):
try:
pipe.stdin.write(data)
except IOError as e:
if e.errno != errno.EPIPE:
raise
else:
def enc(text):
if isinstance(text, unicode):
return text.encode('utf-8')
return text
def dec(text):
if isinstance(text, unicode):
return text
return text.decode('utf-8')
def write(pipe, data):
pipe.stdin.write(data)
def normalize_path(path):
# Fix unicode pathnames on OS X
# See: http://stackoverflow.com/a/5582439/44289
if sys.platform == "darwin":
return unicodedata.normalize("NFKC", dec(path))
return path
def check_repo(parser):
cmd = ['git', 'rev-parse']
p = sp.Popen(cmd, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE)
(ignore, error) = p.communicate()
if p.wait() != 0:
if not error:
error = "Unknown Git error"
error = error.decode("utf-8")
if error.startswith("fatal: "):
error = error[len("fatal: "):]
parser.error(error)
def try_rebase(remote, branch):
cmd = ['git', 'rev-list', '--max-count=1', '%s/%s' % (remote, branch)]
p = sp.Popen(cmd, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE)
(rev, ignore) = p.communicate()
if p.wait() != 0:
return True
cmd = ['git', 'update-ref', 'refs/heads/%s' % branch, rev.strip()]
if sp.call(cmd) != 0:
return False
return True
def get_config(key):
p = sp.Popen(['git', 'config', key], stdin=sp.PIPE, stdout=sp.PIPE)
(value, stderr) = p.communicate()
return value.strip()
def get_prev_commit(branch):
cmd = ['git', 'rev-list', '--max-count=1', branch, '--']
p = sp.Popen(cmd, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE)
(rev, ignore) = p.communicate()
if p.wait() != 0:
return None
return rev.decode('utf-8').strip()
def mk_when(timestamp=None):
if timestamp is None:
timestamp = int(time.time())
currtz = "%+05d" % (-1 * time.timezone / 36) # / 3600 * 100
return "%s %s" % (timestamp, currtz)
def start_commit(pipe, branch, message):
uname = dec(get_config("user.name"))
email = dec(get_config("user.email"))
write(pipe, enc('commit refs/heads/%s\n' % branch))
write(pipe, enc('committer %s <%s> %s\n' % (uname, email, mk_when())))
write(pipe, enc('data %d\n%s\n' % (len(message), message)))
head = get_prev_commit(branch)
if head:
write(pipe, enc('from %s\n' % head))
write(pipe, enc('deleteall\n'))
def add_file(pipe, srcpath, tgtpath):
with open(srcpath, "rb") as handle:
if os.access(srcpath, os.X_OK):
write(pipe, enc('M 100755 inline %s\n' % tgtpath))
else:
write(pipe, enc('M 100644 inline %s\n' % tgtpath))
data = handle.read()
write(pipe, enc('data %d\n' % len(data)))
write(pipe, enc(data))
write(pipe, enc('\n'))
def add_nojekyll(pipe):
write(pipe, enc('M 100644 inline .nojekyll\n'))
write(pipe, enc('data 0\n'))
write(pipe, enc('\n'))
def gitpath(fname):
norm = os.path.normpath(fname)
return "/".join(norm.split(os.path.sep))
def run_import(srcdir, branch, message, nojekyll):
cmd = ['git', 'fast-import', '--date-format=raw', '--quiet']
kwargs = {"stdin": sp.PIPE}
if sys.version_info >= (3, 2, 0):
kwargs["universal_newlines"] = False
pipe = sp.Popen(cmd, **kwargs)
start_commit(pipe, branch, message)
for path, dnames, fnames in os.walk(srcdir):
for fn in fnames:
fpath = os.path.join(path, fn)
fpath = normalize_path(fpath)
gpath = gitpath(os.path.relpath(fpath, start=srcdir))
add_file(pipe, fpath, gpath)
if nojekyll:
add_nojekyll(pipe)
write(pipe, enc('\n'))
pipe.stdin.close()
if pipe.wait() != 0:
sys.stdout.write(enc("Failed to process commit.\n"))
def main():
sp.call("." + os.path.sep + "hugo")
run_import("public", "gh-pages", "Updating website", True)
sp.check_call(['git', 'push', "origin", "gh-pages"])
if __name__ == '__main__':
# just call the main function
main()
| {
"content_hash": "49fa1b04fb4183ee60f708c79a8dce95",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 74,
"avg_line_length": 27.79640718562874,
"alnum_prop": 0.5702283498492029,
"repo_name": "DaveBackus/Data_Bootcamp",
"id": "aa392849acee640c3b1bcc590a8bbb16be5a0d9f",
"size": "4805",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "website/publish_site.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "138178"
},
{
"name": "HTML",
"bytes": "518500"
},
{
"name": "JavaScript",
"bytes": "825"
},
{
"name": "Jupyter Notebook",
"bytes": "6123729"
},
{
"name": "Python",
"bytes": "254733"
},
{
"name": "R",
"bytes": "54037"
},
{
"name": "Shell",
"bytes": "65"
},
{
"name": "TeX",
"bytes": "135312"
}
],
"symlink_target": ""
} |
import logging
from marshmallow import fields, post_load
from azure.ai.ml._restclient.v2021_10_01.models import Goal
from azure.ai.ml._schema.core.fields import StringTransformedEnum
from azure.ai.ml._schema.core.schema import PatchedSchemaMeta
from azure.ai.ml._utils.utils import camel_to_snake
module_logger = logging.getLogger(__name__)
class SweepObjectiveSchema(metaclass=PatchedSchemaMeta):
goal = StringTransformedEnum(
required=True,
allowed_values=[Goal.MINIMIZE, Goal.MAXIMIZE],
casing_transform=camel_to_snake,
)
primary_metric = fields.Str(required=True)
@post_load
def make(self, data, **kwargs) -> "Objective":
from azure.ai.ml.entities._job.sweep.objective import Objective
return Objective(**data)
| {
"content_hash": "3ca49dfa4f10006ff1197e20cb105330",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 71,
"avg_line_length": 31.28,
"alnum_prop": 0.7327365728900256,
"repo_name": "Azure/azure-sdk-for-python",
"id": "d326bd1f970bbecd9209c8f144d00c5d533a43e0",
"size": "1010",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_sweep/sweep_objective.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import decimal
class ScriptAddress2Test(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 3
self.setup_clean_chain = False
def setup_network(self):
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, []))
self.nodes.append(start_node(1, self.options.tmpdir, []))
self.nodes.append(start_node(2, self.options.tmpdir, []))
connect_nodes(self.nodes[1], 0)
connect_nodes(self.nodes[2], 0)
self.is_network_split = False
self.sync_all()
def run_test(self):
cnt = self.nodes[0].getblockcount()
# Mine some blocks
self.nodes[1].generate(100)
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 100):
raise AssertionError("Failed to mine 100 blocks")
addr = self.nodes[0].getnewaddress()
addr2 = self.nodes[0].getnewaddress()
multisig_addr = self.nodes[0].addmultisigaddress(2, [addr, addr2], "multisigaccount")
assert_equal(multisig_addr[0], 'Q')
# Send to a new multisig address
txid = self.nodes[1].sendtoaddress(multisig_addr, 1)
block = self.nodes[1].generate(3)
self.sync_all()
tx = self.nodes[2].getrawtransaction(txid, 1)
dest_addrs = [tx["vout"][0]['scriptPubKey']['addresses'][0],
tx["vout"][1]['scriptPubKey']['addresses'][0]]
assert(multisig_addr in dest_addrs)
# Spend from the new multisig address
addr3 = self.nodes[1].getnewaddress()
txid = self.nodes[0].sendfrom("multisigaccount", addr3, 0.8)
block = self.nodes[0].generate(2)
self.sync_all()
assert(self.nodes[0].getbalance("multisigaccount", 1) < 0.2)
assert(self.nodes[1].listtransactions()[-1]['address'] == addr3)
# Send to an old multisig address. The api addmultisigaddress
# can only generate a new address so we manually compute
# multisig_addr_old beforehand using an old client.
priv_keys = ["cU7eeLPKzXeKMeZvnEJhvZZ3tLqVF3XGeo1BbM8dnbmV7pP3Qg89",
"cTw7mRhSvTfzqCt6MFgBoTBqwBpYu2rWugisXcwjv4cAASh3iqPt"]
addrs = ["mj6gNGRXPXrD69R5ApjcsDerZGrYKSfb6v",
"mqET4JA3L7P7FoUjUP3F6m6YsLpCkyzzou"]
self.nodes[0].importprivkey(priv_keys[0])
self.nodes[0].importprivkey(priv_keys[1])
multisig_addr_new = self.nodes[0].addmultisigaddress(2, addrs, "multisigaccount2")
assert_equal(multisig_addr_new, "QZ974ZrPrmqMmm1PSVp4m8YEgo3bCQZBbe")
multisig_addr_old = "2N5nLwYz9qfnGdaFLpPn3gS6oYQbmLTWPjq"
## Let's send to the old address. We can then find it in the
## new address with the new client. So basically the old
## address and the new one are the same thing.
txid = self.nodes[1].sendtoaddress(multisig_addr_old, 1)
block = self.nodes[1].generate(1)
self.sync_all()
tx = self.nodes[2].getrawtransaction(txid, 1)
dest_addrs = [tx["vout"][0]['scriptPubKey']['addresses'][0],
tx["vout"][1]['scriptPubKey']['addresses'][0]]
assert(multisig_addr_new in dest_addrs)
assert(multisig_addr_old not in dest_addrs)
# Spend from the new multisig address
addr4 = self.nodes[1].getnewaddress()
txid = self.nodes[0].sendfrom("multisigaccount2", addr4, 0.8)
block = self.nodes[0].generate(2)
self.sync_all()
assert(self.nodes[0].getbalance("multisigaccount2", 1) < 0.2)
assert(self.nodes[1].listtransactions()[-1]['address'] == addr4)
if __name__ == '__main__':
ScriptAddress2Test().main()
| {
"content_hash": "afdf1c61d6635b0b9e7348e889c6787b",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 93,
"avg_line_length": 41.824175824175825,
"alnum_prop": 0.6237519705727799,
"repo_name": "Yearcoin-dev/yearcoin",
"id": "ee163cc4ff0924876afd156b3c6aba115db93c1b",
"size": "4077",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qa/rpc-tests/test_script_address2.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28456"
},
{
"name": "C",
"bytes": "696300"
},
{
"name": "C++",
"bytes": "4584719"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "30290"
},
{
"name": "M4",
"bytes": "185595"
},
{
"name": "Makefile",
"bytes": "104733"
},
{
"name": "Objective-C",
"bytes": "3892"
},
{
"name": "Objective-C++",
"bytes": "7232"
},
{
"name": "Protocol Buffer",
"bytes": "2329"
},
{
"name": "Python",
"bytes": "1030153"
},
{
"name": "QMake",
"bytes": "2020"
},
{
"name": "Roff",
"bytes": "30558"
},
{
"name": "Shell",
"bytes": "47200"
}
],
"symlink_target": ""
} |
import nbformat as nbf
from glob import glob
# Collect a list of all notebooks in the content folder
notebooks = glob("*.ipynb")
# Text to look for in adding tags
text_search_dict = {
"# Solution": "hide-cell", # Hide the cell with a button to show
}
# Search through each notebook and look for the text, add a tag if necessary
for ipath in notebooks:
ntbk = nbf.read(ipath, nbf.NO_CONVERT)
for cell in ntbk.cells:
cell_tags = cell.get('metadata', {}).get('tags', [])
cell_tags = []
for key, val in text_search_dict.items():
if key in cell['source']:
if val not in cell_tags:
cell_tags.append(val)
if len(cell_tags) > 0:
cell['metadata']['tags'] = cell_tags
nbf.write(ntbk, ipath)
| {
"content_hash": "14dc7dadf6529ad3a8da58211b235b9b",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 76,
"avg_line_length": 30.576923076923077,
"alnum_prop": 0.6062893081761006,
"repo_name": "AllenDowney/ModSimPy",
"id": "95067d965c3cc443b6000c1cdb98652ed54d59fc",
"size": "795",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jb/prep_notebooks.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4248502"
},
{
"name": "Jupyter Notebook",
"bytes": "9352380"
},
{
"name": "Makefile",
"bytes": "5311"
},
{
"name": "Python",
"bytes": "140055"
},
{
"name": "Shell",
"bytes": "312"
},
{
"name": "TeX",
"bytes": "315198"
}
],
"symlink_target": ""
} |
import unittest
from quickbooks import QuickBooks
from quickbooks.objects.journalentry import JournalEntry, JournalEntryLine, JournalEntryLineDetail, Entity
class JournalentryTests(unittest.TestCase):
def test_unicode(self):
journalentry = JournalEntry()
journalentry.TotalAmt = 1000
self.assertEquals(str(journalentry), '1000')
def test_valid_object_name(self):
obj = JournalEntry()
client = QuickBooks()
result = client.isvalid_object_name(obj.qbo_object_name)
self.assertTrue(result)
class JournalEntryLineTests(unittest.TestCase):
def test_init(self):
journalentry = JournalEntryLine()
self.assertEquals(journalentry.DetailType, "JournalEntryLineDetail")
self.assertEquals(journalentry.JournalEntryLineDetail, None)
class JournalEntryLineDetailTests(unittest.TestCase):
def test_init(self):
journalentry = JournalEntryLineDetail()
self.assertEquals(journalentry.PostingType, "")
self.assertEquals(journalentry.TaxApplicableOn, "Sales")
self.assertEquals(journalentry.TaxAmount, 0)
self.assertEquals(journalentry.BillableStatus, None)
self.assertEquals(journalentry.Entity, None)
self.assertEquals(journalentry.AccountRef, None)
self.assertEquals(journalentry.ClassRef, None)
self.assertEquals(journalentry.DepartmentRef, None)
self.assertEquals(journalentry.TaxCodeRef, None)
class EntityTests(unittest.TestCase):
def test_init(self):
entity = Entity()
self.assertEquals(entity.Type, "")
self.assertEquals(entity.EntityRef, None)
| {
"content_hash": "6031b71dec6f6f349d6d6dbd29d4eb2d",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 106,
"avg_line_length": 33.06,
"alnum_prop": 0.7223230490018149,
"repo_name": "sidecars/python-quickbooks",
"id": "cd1e9d47389bcced92ffcc146ee4f5ff868a54db",
"size": "1653",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/unit/objects/test_journalentry.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "164"
},
{
"name": "Python",
"bytes": "2135934"
}
],
"symlink_target": ""
} |
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from endpoints.utils.decorators import room_required, pk_required
from music.serializers import PlaylistSerializer
from music.models import PlaylistTrack
class PlaylistView(APIView):
"""
Playlist resource.
"""
@room_required
def get(self, request, room):
"""
Get current playlist
---
serializer: PlaylistSerializer
"""
return Response(PlaylistSerializer(room.playlist, many=True).data)
@pk_required
@room_required
def post(self, request, room, pk):
"""
Update playlist
---
serializer: PlaylistSerializer
"""
try:
playlistTrack = PlaylistTrack.objects.get(pk=pk, room=room)
except PlaylistTrack.DoesNotExist:
return Response("Can't find this playlistTrack.", status=status.HTTP_404_NOT_FOUND)
action = request.data.get('action')
if action not in PlaylistTrack.ACTIONS:
return Response('Action can only be: "%s"' % '" or "'.join(PlaylistTrack.ACTIONS), status=status.HTTP_400_BAD_REQUEST)
target = request.data.get('target')
if action in {'above', 'below'}:
if target is None:
return Response('"%s" action needs a target parameter' % action, status=status.HTTP_400_BAD_REQUEST)
try:
target = PlaylistTrack.objects.get(pk=int(target), room=room)
except PlaylistTrack.DoesNotExist:
return Response("Can't find this playlistTrack as target.", status=status.HTTP_404_NOT_FOUND)
if target is not None:
getattr(playlistTrack, action)(target)
else:
getattr(playlistTrack, action)()
message = {
'action': 'playlistTrack_updated',
'playlistTracks': PlaylistSerializer(room.playlist.all(), many=True).data
}
room.send_message(message)
return Response(PlaylistSerializer(room.playlist.all(), many=True).data, status=status.HTTP_200_OK)
@pk_required
@room_required
def delete(self, request, room, pk):
"""
Delete music from playlist
---
serializer: PlaylistSerializer
"""
try:
PlaylistTrack.objects.get(pk=pk, room=room).delete()
except PlaylistTrack.DoesNotExist:
return Response("Can't find this playlistTrack.", status=status.HTTP_404_NOT_FOUND)
message = {
'action': 'playlistTrack_deleted',
'playlistTracks': PlaylistSerializer(room.playlist.all(), many=True).data
}
room.send_message(message)
return Response(PlaylistSerializer(room.playlist.all(), many=True).data, status=status.HTTP_204_NO_CONTENT)
| {
"content_hash": "d8d3e9cce8d83ac99067f1c377488495",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 130,
"avg_line_length": 36.139240506329116,
"alnum_prop": 0.6301225919439579,
"repo_name": "Amoki/Amoki-Music",
"id": "c44baaf285678c22a0f6d4112cba48c2c07e0dc6",
"size": "2855",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "endpoints/playlist.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "14853"
},
{
"name": "HTML",
"bytes": "30566"
},
{
"name": "JavaScript",
"bytes": "53165"
},
{
"name": "Python",
"bytes": "127142"
}
],
"symlink_target": ""
} |
import pytest
import time
import random
import string
from selenium import webdriver
@pytest.fixture
def driver(request):
wd = webdriver.Chrome()
request.addfinalizer(wd.quit)
return wd
def test_registration(driver):
driver.get('http://localhost/litecart/')
driver.find_element_by_css_selector('div#box-account-login a').click()
email = 'jack' + ''.join(random.choice(string.digits) for i in range(3)) + '@black.com'
print(email)
password = 'blackjack'
form_fields = driver.find_elements_by_css_selector('.content table input')
form_fields[0].send_keys('666')
form_fields[1].send_keys('Tenacious D')
form_fields[2].send_keys('Jack')
form_fields[3].send_keys('Black')
form_fields[4].send_keys('Los-Angeles')
form_fields[6].send_keys('12345')
form_fields[7].send_keys('Los-Angeles')
select_country = driver.find_element_by_css_selector('select')
driver.execute_script("arguments[0].selectedIndex = 224; arguments[0].dispatchEvent(new Event('change'))", select_country)
form_fields[9].send_keys(email)
form_fields[10].send_keys('+76661234567')
form_fields[12].send_keys(password)
form_fields[13].send_keys(password)
driver.find_element_by_css_selector('button').click()
select_zone = driver.find_element_by_xpath('//select[@name="zone_code"]')
driver.execute_script("arguments[0].selectedIndex = 11; arguments[0].dispatchEvent(new Event('change'))", select_zone)
driver.find_element_by_xpath('//input[@name="password"]').send_keys(password)
driver.find_element_by_xpath('//input[@name="confirmed_password"]').send_keys(password)
driver.find_element_by_css_selector('button').click()
time.sleep(1)
logout(driver)
form_fields = driver.find_elements_by_css_selector('div#box-account-login table input')
form_fields[0].send_keys(email)
form_fields[1].send_keys(password)
driver.find_element_by_xpath('//button[@name="login"]').click()
time.sleep(1)
logout(driver)
time.sleep(1)
def logout(driver):
links = driver.find_elements_by_css_selector('div#box-account .list-vertical a')
for link in links:
if link.text == 'Logout':
link.click()
| {
"content_hash": "e1ac0e1a0e28c7a700b3cfd67d78e3e0",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 126,
"avg_line_length": 35.774193548387096,
"alnum_prop": 0.6853020739404869,
"repo_name": "madm0nkey/selenium-training",
"id": "ed74dc8cc9d36e7a35b8fbddb8afa582f2e7fb07",
"size": "2218",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "exercise_11.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "25470"
}
],
"symlink_target": ""
} |
import time
from django.test import TestCase
import mailjet
from .forms import SubscriptionForm
class SubscriptionFormTest(TestCase):
def list_name_and_label(self):
ts = int(time.time())
result = (u'Test%s' % ts, u'test%s' % ts)
self.temporary_lists.append(result)
return result
def setUp(self):
self.temporary_lists = []
self.list_name, self.list_label = self.list_name_and_label()
self.api = mailjet.Api()
def tearDown(self):
lists = self.api.lists.all()['lists']
for temporary_list in self.temporary_lists:
for l in lists:
if l['name'] == temporary_list[0]:
self.api.lists.delete(method='POST', id=l['id'])
def test_add_contact(self):
form = SubscriptionForm(
data={'email': '[email protected]'}, list_name=self.list_name,
list_label=self.list_label)
self.assertTrue(
form.is_valid(), 'Form should be valid for test to continue')
form.add_contact()
for c in self.api.lists.contacts(id=form.list_id)['result']:
if c['email'] == '[email protected]':
return
self.fail('Contact was not created')
def test_settings_override(self):
name_fixture, label_fixture = self.list_name_and_label()
with self.settings(MAILJET_LIST_NAME=name_fixture):
form = SubscriptionForm()
self.assertEqual(name_fixture, form.list_name)
form = SubscriptionForm(list_name=self.list_name)
self.assertEqual(self.list_name, form.list_name)
# self.settings context manager cannot be used to override 2 settings
with self.settings(MAILJET_LIST_LABEL=label_fixture):
form = SubscriptionForm()
self.assertEqual(label_fixture, form.list_label)
form = SubscriptionForm(list_label=self.list_label)
self.assertEqual(self.list_label, form.list_label)
def test_save(self):
def contact_in_list(email):
for c in self.api.lists.contacts(id=form.list_id)['result']:
if c['email'] == email:
return True
return False
form = SubscriptionForm(
data={'email': '[email protected]'}, list_name=self.list_name,
list_label=self.list_label)
self.assertTrue(
form.is_valid(), 'Form should be valid for test to continue')
self.assertFalse(
contact_in_list('[email protected]'),
'Contact must not be in list for test to continue')
form.save()
self.assertTrue(contact_in_list('[email protected]'))
form = SubscriptionForm(
data={'email': '[email protected]'}, list_name=self.list_name,
list_label=self.list_label)
self.assertTrue(
form.is_valid(), 'Form should be valid for test to continue')
self.assertFalse(
contact_in_list('[email protected]'),
'Contact must not be in list for test to continue')
form.save()
self.assertTrue(contact_in_list('[email protected]'))
def test_clean_email(self):
form = SubscriptionForm(
data={'email': '[email protected]'}, list_name=self.list_name,
list_label=self.list_label)
self.assertTrue(
form.is_valid(), 'Form should be valid for test to continue')
form.save()
form = SubscriptionForm(
data={'email': '[email protected]'}, list_name=self.list_name,
list_label=self.list_label)
self.assertFalse(
form.is_valid(), 'Form should detect duplicate email')
| {
"content_hash": "25005cadf3e12463a1a6f8823c0fbcf1",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 77,
"avg_line_length": 32.64912280701754,
"alnum_prop": 0.591886082751209,
"repo_name": "WoLpH/mailjet",
"id": "bab171a281d09e096aeebaeb67fd24c68bcf4be9",
"size": "3722",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "mailjet/contrib/django_mailjet/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "415"
},
{
"name": "Python",
"bytes": "25402"
}
],
"symlink_target": ""
} |
from django.db import DEFAULT_DB_ALIAS
from django_atomic_celery import (
_post_enter_atomic_block,
_post_exit_atomic_block,
)
class DjangoAtomicCeleryTestCaseMixin(object):
"""Django atomic Celery test case mixin.
Counters Django's behavior of running all test cases in atomic
transactions, which makes it impossible to test behavior based on
Django atomic Celery transactions.
"""
def setUp(self):
super(DjangoAtomicCeleryTestCaseMixin, self).setUp()
_post_exit_atomic_block(signal=None,
sender=None,
using=DEFAULT_DB_ALIAS,
outermost=True,
savepoint=True,
successful=True)
def tearDown(self):
_post_enter_atomic_block(signal=None,
sender=None,
using=DEFAULT_DB_ALIAS,
outermost=True,
savepoint=True)
super(DjangoAtomicCeleryTestCaseMixin, self).tearDown()
| {
"content_hash": "0148779581d9e557f3236db8707fa3f6",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 69,
"avg_line_length": 36.25806451612903,
"alnum_prop": 0.5444839857651246,
"repo_name": "graingert/django_atomic_celery",
"id": "95f1a1e75d301dda9255ebd422902f0c6ea1f26c",
"size": "1124",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_atomic_celery/testing.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "595"
},
{
"name": "Python",
"bytes": "13761"
}
],
"symlink_target": ""
} |
import numpy as np
import pygrit as GRIT
import math
def write_svg_files(engine, parameters, output_path, frame_number):
filename = output_path + GRIT.generate_filename('/tutorial_vortex_in_a_box', frame_number, 'svg')
GRIT.svg_draw(filename, engine, parameters)
print('\t\twrite_svg_files() Done writing ' + filename)
def do_simulation_step(engine, settings):
object_label = int(settings.get_value('object_label', '1'))
phase = GRIT.make_phase(engine, object_label)
N = np.asanyarray(phase.get_vertices()).size
px = np.resize(np.array([]),N)
py = np.resize(np.array([]),N)
GRIT.get_sub_range_current(engine, phase, px, py)
dt = 0.01
px_new = np.array(px)
py_new = np.array(py)
for i in range(0, N):
x = px[i]
y = py[i]
u = 2.0 * math.cos(math.pi * y) * math.sin(math.pi * y) * math.sin(math.pi*x) * math.sin(math.pi*x)
v = -2.0 * math.cos(math.pi * x) * math.sin(math.pi * x) * math.sin(math.pi*y) * math.sin(math.pi*y)
px_new[i] = px[i] + u*dt
py_new[i] = py[i] + v*dt
GRIT.set_sub_range_target(engine, phase, px_new, py_new)
def main():
print('*** VORTEX IN A BOX ***\n')
cfg_filename = '../bin/tutorial_vortex_in_a_box.cfg'
settings = GRIT.ConfigFile()
settings.load(cfg_filename)
parameters = GRIT.make_parameters_from_config_file(settings)
txt_filename = settings.get_value('txt_filename', 'circle_enright.txt')
output_path = settings.get_value('output_path', '')
max_steps = int(settings.get_value('steps', '100'))
print('Loading mesh file: ' + txt_filename)
engine = GRIT.Engine2D()
GRIT.init_engine_with_mesh_file(
GRIT.get_data_file_path(txt_filename)
, parameters
, engine
)
write_svg_files(engine, parameters, output_path, 0)
for i in range(1, max_steps):
do_simulation_step(engine, settings)
engine.update(parameters)
write_svg_files(engine, parameters, output_path, i)
if __name__ == "__main__":
main()
| {
"content_hash": "39d9d430a74dd17a0fe1f168781dab0c",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 108,
"avg_line_length": 29.930555555555557,
"alnum_prop": 0.5870069605568445,
"repo_name": "misztal/GRIT",
"id": "7fcf463798e076178adefe01ac8158c5611b0904",
"size": "2155",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "GRIT/bin/tutorial_vortex_in_a_box.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "60469"
},
{
"name": "C++",
"bytes": "7605518"
},
{
"name": "CMake",
"bytes": "80506"
},
{
"name": "Cuda",
"bytes": "383778"
},
{
"name": "GLSL",
"bytes": "2231"
},
{
"name": "Matlab",
"bytes": "50319"
},
{
"name": "Objective-C",
"bytes": "120"
},
{
"name": "Python",
"bytes": "87400"
},
{
"name": "Shell",
"bytes": "3706"
}
],
"symlink_target": ""
} |
import copy
import six
import yaml
from st2tests import DbTestCase
from st2tests.fixturesloader import FixturesLoader
import st2common.bootstrap.runnersregistrar as runners_registrar
from st2actions.runners.mistral import utils
from st2common.models.api.action import ActionAPI, RunnerTypeAPI
from st2common.persistence.action import Action
from st2common.persistence.runner import RunnerType
WB_PRE_XFORM_FILE = 'wb_pre_xform.yaml'
WB_POST_XFORM_FILE = 'wb_post_xform.yaml'
WF_PRE_XFORM_FILE = 'wf_pre_xform.yaml'
WF_POST_XFORM_FILE = 'wf_post_xform.yaml'
WF_NO_REQ_PARAM_FILE = 'wf_missing_required_param.yaml'
WF_UNEXP_PARAM_FILE = 'wf_has_unexpected_param.yaml'
TEST_FIXTURES = {
'workflows': [
WB_PRE_XFORM_FILE,
WB_POST_XFORM_FILE,
WF_PRE_XFORM_FILE,
WF_POST_XFORM_FILE,
WF_NO_REQ_PARAM_FILE,
WF_UNEXP_PARAM_FILE
],
'actions': [
'local.yaml',
'a1.yaml',
'a2.yaml',
'action1.yaml'
],
'runners': [
'testrunner1.yaml',
'testrunner2.yaml'
]
}
PACK = 'generic'
LOADER = FixturesLoader()
FIXTURES = LOADER.load_fixtures(fixtures_pack=PACK, fixtures_dict=TEST_FIXTURES)
WB_PRE_XFORM_PATH = LOADER.get_fixture_file_path_abs(PACK, 'workflows', WB_PRE_XFORM_FILE)
WB_PRE_XFORM_DEF = FIXTURES['workflows'][WB_PRE_XFORM_FILE]
WB_POST_XFORM_PATH = LOADER.get_fixture_file_path_abs(PACK, 'workflows', WB_POST_XFORM_FILE)
WB_POST_XFORM_DEF = FIXTURES['workflows'][WB_POST_XFORM_FILE]
WF_PRE_XFORM_PATH = LOADER.get_fixture_file_path_abs(PACK, 'workflows', WF_PRE_XFORM_FILE)
WF_PRE_XFORM_DEF = FIXTURES['workflows'][WF_PRE_XFORM_FILE]
WF_POST_XFORM_PATH = LOADER.get_fixture_file_path_abs(PACK, 'workflows', WF_POST_XFORM_FILE)
WF_POST_XFORM_DEF = FIXTURES['workflows'][WF_POST_XFORM_FILE]
WF_NO_REQ_PARAM_PATH = LOADER.get_fixture_file_path_abs(PACK, 'workflows', WF_NO_REQ_PARAM_FILE)
WF_NO_REQ_PARAM_DEF = FIXTURES['workflows'][WF_NO_REQ_PARAM_FILE]
WF_UNEXP_PARAM_PATH = LOADER.get_fixture_file_path_abs(PACK, 'workflows', WF_UNEXP_PARAM_FILE)
WF_UNEXP_PARAM_DEF = FIXTURES['workflows'][WF_UNEXP_PARAM_FILE]
def _read_file_content(path):
with open(path, 'r') as f:
return f.read()
class DSLTransformTestCase(DbTestCase):
@classmethod
def setUpClass(cls):
super(DSLTransformTestCase, cls).setUpClass()
runners_registrar.register_runner_types()
for _, fixture in six.iteritems(FIXTURES['runners']):
instance = RunnerTypeAPI(**fixture)
RunnerType.add_or_update(RunnerTypeAPI.to_model(instance))
for _, fixture in six.iteritems(FIXTURES['actions']):
instance = ActionAPI(**fixture)
Action.add_or_update(ActionAPI.to_model(instance))
@staticmethod
def _read_file_content(path):
with open(path, 'r') as f:
return f.read()
def test_invalid_dsl_version(self):
def_yaml = _read_file_content(WB_PRE_XFORM_PATH)
def_dict = yaml.safe_load(def_yaml)
# Unsupported version
def_dict['version'] = '1.0'
def_yaml = yaml.safe_dump(def_dict)
self.assertRaises(Exception, utils.transform_definition, def_yaml)
# Missing version
del def_dict['version']
def_yaml = yaml.safe_dump(def_dict)
self.assertRaises(Exception, utils.transform_definition, def_yaml)
def test_transform_workbook_dsl_yaml(self):
def_yaml = _read_file_content(WB_PRE_XFORM_PATH)
new_def = utils.transform_definition(def_yaml)
actual = yaml.safe_load(new_def)
expected = copy.deepcopy(WB_POST_XFORM_DEF)
self.assertDictEqual(actual, expected)
def test_transform_workbook_dsl_dict(self):
def_yaml = _read_file_content(WB_PRE_XFORM_PATH)
def_dict = yaml.safe_load(def_yaml)
actual = utils.transform_definition(def_dict)
expected = copy.deepcopy(WB_POST_XFORM_DEF)
self.assertDictEqual(actual, expected)
def test_transform_workflow_dsl_yaml(self):
def_yaml = _read_file_content(WF_PRE_XFORM_PATH)
new_def = utils.transform_definition(def_yaml)
actual = yaml.safe_load(new_def)
expected = copy.deepcopy(WF_POST_XFORM_DEF)
self.assertDictEqual(actual, expected)
def test_transform_workflow_dsl_dict(self):
def_yaml = _read_file_content(WF_PRE_XFORM_PATH)
def_dict = yaml.safe_load(def_yaml)
actual = utils.transform_definition(def_dict)
expected = copy.deepcopy(WF_POST_XFORM_DEF)
self.assertDictEqual(actual, expected)
def test_required_action_params_failure(self):
def_yaml = _read_file_content(WF_NO_REQ_PARAM_PATH)
def_dict = yaml.safe_load(def_yaml)
with self.assertRaises(Exception) as cm:
utils.transform_definition(def_dict)
self.assertIn('Missing required parameters', cm.exception.message)
def test_unexpected_action_params_failure(self):
def_yaml = _read_file_content(WF_UNEXP_PARAM_PATH)
def_dict = yaml.safe_load(def_yaml)
with self.assertRaises(Exception) as cm:
utils.transform_definition(def_dict)
self.assertIn('Unexpected parameters', cm.exception.message)
def test_deprecated_callback_action(self):
def_yaml = _read_file_content(WB_PRE_XFORM_PATH)
def_dict = yaml.safe_load(def_yaml)
def_dict['workflows']['main']['tasks']['callback'] = {'action': 'st2.callback'}
def_yaml = yaml.safe_dump(def_dict)
self.assertRaises(Exception, utils.transform_definition, def_yaml)
| {
"content_hash": "21c4d0a2d9ffed0a93f3dc9cb54cca22",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 96,
"avg_line_length": 37.577181208053695,
"alnum_prop": 0.674048937310234,
"repo_name": "Itxaka/st2",
"id": "e13e193e48c3dc36158e6aa8c6b8fc8c66fd2612",
"size": "6379",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "st2actions/tests/unit/test_mistral_dsl_transform.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "198"
},
{
"name": "Makefile",
"bytes": "35769"
},
{
"name": "PowerShell",
"bytes": "299"
},
{
"name": "Python",
"bytes": "2673739"
},
{
"name": "Shell",
"bytes": "16008"
},
{
"name": "Slash",
"bytes": "677"
}
],
"symlink_target": ""
} |
from django import template
register = template.Library()
@register.inclusion_tag('core/paginator.html', takes_context=True)
def paginator(context, adjacent_pages=5):
current_page = context['page_obj'].number
number_of_pages = context['paginator'].num_pages
page_obj = context['page_obj']
paginator = context['paginator']
startPage = max(current_page - adjacent_pages, 1)
endPage = current_page + adjacent_pages + 1
if endPage > number_of_pages: endPage = number_of_pages + 1
page_numbers = [n for n in range(startPage, endPage) if 0 < n <= number_of_pages]
return {
'page_obj': page_obj,
'paginator': paginator,
'page': current_page,
'pages': number_of_pages,
'page_numbers': page_numbers,
'has_previous': page_obj.has_previous(),
'has_next': page_obj.has_next(),
'show_first': 1 != current_page,
'show_last': number_of_pages != current_page,
}
| {
"content_hash": "a5c3c687f1f715cb3d5be7efd024a4ce",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 85,
"avg_line_length": 37.11538461538461,
"alnum_prop": 0.633160621761658,
"repo_name": "valbertovc/blog_django_bootstrap_ajax",
"id": "1b44284a0aab215f0d8251334be71dcb4bd53726",
"size": "965",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/templatetags/paginator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "75882"
},
{
"name": "HTML",
"bytes": "30317"
},
{
"name": "JavaScript",
"bytes": "516222"
},
{
"name": "Python",
"bytes": "41576"
}
],
"symlink_target": ""
} |
class mapreducejob:
# Data source (mandatory). Can be any dict object, the keys/values will be passed
# to "mapfn" function
datasource = {0: "This is the first phrase",
1: "This is the second phrase and the end",}
# Map function (mandatory). It's static method of map function code to be executed
# on worker machines. Note that because this function code will be executed in other
# environment alone, the variables outside of this function is invisible, for example
# all the modules need to be imported inside this function.
# @k - key. The key in "datasource" sent from server.
# @v - value, The value in "datasource" sent from server.
# @return - return a list of (key, value)
@staticmethod
def mapfn(k, v):
for w in v.split():
yield w, 1
import time
time.sleep(5)
# Reduce function (optional). It's static method of reduce function code to be executed
# on worker machines. Note that because this function code will be executed in other
# environment alone, the variables outside of this function is invisible, for example
# all the modules need to be imported inside this function.
# @k - key. The key returned by "mapfn".
# @vs - values, A list of values returned from "mapfn" of all the worker machines.
# @return - return a list of (key, value), "value" is the result of reducing for "vs"
@staticmethod
def reducefn(k, vs):
result = sum(vs)
import time
time.sleep(5)
return result
| {
"content_hash": "46375e54c382d0bc3855fcb673d62644",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 91,
"avg_line_length": 47.36363636363637,
"alnum_prop": 0.6621880998080614,
"repo_name": "darrenzhangcn/mapreducepy",
"id": "6bf81f63a657c667ccb7a70b9be179db97d3033b",
"size": "1686",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/example_basic.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "46072"
}
],
"symlink_target": ""
} |
from viper.core.ui import commands
from viper.core.database import Database
from viper.core.project import Project
from tests.conftest import FIXTURE_DIR
import re
import os
import sys
try:
from unittest import mock
except ImportError:
# Python2
import mock
class TestCommands:
def setup_class(cls):
commands.Open().run('-f', os.path.join(FIXTURE_DIR, "chromeinstall-8u31.exe"))
commands.Store().run()
def teardown_method(self):
commands.Close().run()
def test_init(self):
instance = commands.Commands()
assert isinstance(instance, commands.Commands)
def test_help(self, capsys):
commands.Help().run()
commands.Clear().run()
out, err = capsys.readouterr()
assert re.search(r".* Commands.*", out)
assert re.search(r".* Modules.*", out)
def test_notes(self, capsys):
commands.Notes().run('-h')
out, err = capsys.readouterr()
assert re.search("usage: notes \[-h\] .*", out)
commands.Notes().run('-l')
out, err = capsys.readouterr()
assert re.search(".*No open session.*", out)
def test_open(self, capsys):
commands.Open().run('-h')
commands.Open().run('-u', 'https://github.com/viper-framework/viper-test-files/raw/master/test_files/cmd.exe')
out, err = capsys.readouterr()
assert re.search("usage: open \[-h\] .*", out)
assert re.search(".*Session opened on /tmp/.*", out)
def test_open_tor(self, capsys):
commands.Open().run('-h')
commands.Open().run('-t', '-u', 'https://github.com/viper-framework/viper-test-files/raw/master/test_files/cmd.exe')
out, err = capsys.readouterr()
assert re.search("usage: open \[-h\] .*", out)
assert re.search(".*Session opened on /tmp/.*", out)
def test_notes_existing(self, capsys):
commands.Open().run('-f', os.path.join(FIXTURE_DIR, "chromeinstall-8u31.exe"))
Database().add_note(commands.__sessions__.current.file.sha256, 'Note test', 'This is the content')
commands.Notes().run('-l')
commands.Notes().run('-v', '1')
commands.Notes().run('-d', '1')
out, err = capsys.readouterr()
assert re.search(".*1 | Note test.*", out)
assert re.search(".*This is the content.*", out)
def test_analysis(self, capsys):
commands.Open().run('-f', os.path.join(FIXTURE_DIR, "chromeinstall-8u31.exe"))
commands.Analysis().run('-h')
commands.Analysis().run('-l')
commands.Analysis().run('-v', '1')
out, err = capsys.readouterr()
assert re.search("usage: analysis \[-h\] .*", out)
assert re.search(".*Saved On.*", out)
assert re.search(".*Cmd Line.*", out)
def test_store(self, capsys):
commands.Store().run('-h')
out, err = capsys.readouterr()
assert re.search("usage: store \[-h\] .*", out)
def test_delete(self, capsys):
commands.Delete().run('-h')
out, err = capsys.readouterr()
assert re.search("usage: delete \[-h\] .*", out)
def test_find(self, capsys):
commands.Find().run('-h')
out, err = capsys.readouterr()
assert re.search("usage: find \[-h\] .*", out)
commands.Find().run('all')
out, err = capsys.readouterr()
assert re.search(".*chromeinstall-8u31.exe.*", out)
def test_tags(self, capsys):
commands.Tags().run('-h')
out, err = capsys.readouterr()
assert re.search("usage: tags \[-h\] .*", out)
def test_tags_use(self, capsys):
commands.Open().run('-f', os.path.join(FIXTURE_DIR, "chromeinstall-8u31.exe"))
commands.Tags().run('-a', 'mytag')
commands.Tags().run('-d', 'mytag')
out, err = capsys.readouterr()
lines = out.split('\n')
assert re.search(".*Tags added to the currently opened file.*", lines[1])
assert re.search(".*Refreshing session to update attributes....*", lines[2])
def test_sessions(self, capsys):
commands.Sessions().run('-h')
out, err = capsys.readouterr()
assert re.search("usage: sessions \[-h\] .*", out)
commands.Sessions().run('-l')
out, err = capsys.readouterr()
assert re.search(".*Opened Sessions.*", out)
def test_projects(self, capsys):
commands.Projects().run('-h')
out, err = capsys.readouterr()
assert re.search("usage: projects \[-h\] .*", out)
p = Project()
p.open("project_switch_test1")
commands.Projects().run('-l')
out, err = capsys.readouterr()
assert re.search(".*Projects Available.*", out)
assert re.search(".*project_switch_test1.*", out)
assert not re.search(".*not_there.*", out)
commands.Projects().run('-s', 'project_switch_test1')
out, err = capsys.readouterr()
lines = out.split('\n')
assert re.search(".*Switched to project.*", lines[0])
# return to default
p.open("default")
def test_export(self, capsys):
commands.Export().run('-h')
out, err = capsys.readouterr()
assert re.search("usage: export \[-h\] .*", out)
def test_stats(self, capsys):
commands.Stats().run('-h')
out, err = capsys.readouterr()
assert re.search("usage: stats \[-h\] .*", out)
def test_parent(self, capsys):
commands.Parent().run('-h')
out, err = capsys.readouterr()
assert re.search("usage: parent \[-h\] .*", out)
def test_rename(self, capsys):
commands.Find().run("all")
out, err = capsys.readouterr()
assert out == ""
commands.Open().run('-f', os.path.join(FIXTURE_DIR, "chromeinstall-8u31.exe"))
commands.Store().run()
_, _ = capsys.readouterr()
if sys.version_info <= (3, 0):
in_fct = 'viper.core.ui.commands.input'
else:
in_fct = 'builtins.input'
with mock.patch(in_fct, return_value='chromeinstall-8u31.exe.new'):
commands.Rename().run()
out, err = capsys.readouterr()
lines = out.split('\n')
assert re.search(r".*Current name is.*1mchromeinstall-8u31.exe.*", lines[0])
assert re.search(r".*Refreshing session to update attributes.*", lines[1])
def test_copy(self, capsys):
commands.Projects().run('-s', 'copy_test_dst')
out, err = capsys.readouterr()
lines = out.split('\n')
assert re.search(r".*Switched to project.*", lines[0])
commands.Find().run('all')
out, err = capsys.readouterr()
assert out == ""
commands.Projects().run('-s', 'copy_test_src')
out, err = capsys.readouterr()
lines = out.split('\n')
assert re.search(r".*Switched to project.*", lines[0])
commands.Find().run('all')
out, err = capsys.readouterr()
assert out == ""
commands.Open().run('-f', os.path.join(FIXTURE_DIR, "chromeinstall-8u31.exe"))
commands.Store().run()
out, err = capsys.readouterr()
lines = out.split('\n')
assert re.search(r".*Session opened on.*", lines[0])
assert re.search(r".*Stored file.*", lines[1])
commands.Find().run('all')
out, err = capsys.readouterr()
assert re.search(r".*\| 1 \| chromeinstall-8u31.exe.*", out)
assert not re.search(r".*\| 2 \|.*", out)
commands.Copy().run('-d', 'copy_test_dst')
out, err = capsys.readouterr()
lines = out.split('\n')
assert re.search(r".*Copied:.*", lines[0])
assert re.search(r".*Deleted:.*", lines[1])
assert re.search(r".*Successfully copied sample.*", lines[2])
commands.Find().run('all')
out, err = capsys.readouterr()
assert out == ""
assert not re.search(r".*\| 1 \| chromeinstall-8u31.exe.*", out)
assert not re.search(r".*\| 2 \|.*", out)
commands.Projects().run('-s', 'copy_test_dst')
out, err = capsys.readouterr()
assert re.search(r".*Switched to project.*", out)
commands.Find().run('all')
out, err = capsys.readouterr()
assert re.search(r".*\| 1 \| chromeinstall-8u31.exe.*", out)
assert not re.search(r".*\| 2 \|.*", out)
| {
"content_hash": "14626f811df0840e00a6ad178533f2b1",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 124,
"avg_line_length": 36.29694323144105,
"alnum_prop": 0.5684552454282964,
"repo_name": "kevthehermit/viper",
"id": "fb1af67985388f7998a26c08ea634d32f15ab1a0",
"size": "8458",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/core/ui/test_commands.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1306"
},
{
"name": "JavaScript",
"bytes": "9295"
},
{
"name": "Makefile",
"bytes": "436"
},
{
"name": "Python",
"bytes": "1533848"
},
{
"name": "Smarty",
"bytes": "28213"
}
],
"symlink_target": ""
} |
"""
Connect to local server:
python3 client.py samples/longloop.py -- -n 100 -p 0.01
Connect to remote server:
python3 client.py --host 192.168.11.2 samples/longloop.py -- -n 100 -p 0.01
"""
import aiohttp
import argparse
import asyncio
import base64
import fcntl
import json
import os
import random
import signal
import sys
from functools import partial
def set_nonblocking(fileno):
fl = fcntl.fcntl(fileno, fcntl.F_GETFL)
return fcntl.fcntl(fileno, fcntl.F_SETFL, fl | os.O_NONBLOCK)
def read_file(file):
with open(file) as f:
return f.read()
def encode(data):
return base64.b64encode(data).decode('utf-8')
def decode(data):
return base64.b64decode(data.encode('utf-8'))
def make_run_msg(args, stdout, stderr, chunk_size, env, files):
return {'type': 'run',
'args': args,
'stdout': stdout,
'stderr': stderr,
'chunk_size': chunk_size,
'env': env,
'files': files}
def make_signal_msg(signum):
return {'type': 'signal',
'signum': signum}
def make_stdin_msg(data):
return {'type': 'stdin',
'data': encode(data)}
def sigint_handler(ws):
"""Ctrl-C handler."""
asyncio.ensure_future(ws.send_json(make_signal_msg(signal.SIGINT)))
def siginfo_handler(ws):
"""Ctrl-T handler."""
asyncio.ensure_future(ws.send_json(make_signal_msg(signal.SIGKILL)))
async def send_stdin(queue, ws):
while True:
data = await queue.get()
await ws.send_json(make_stdin_msg(data))
async def run(session, uri, run_msg, handle_sigint):
def read_stdin(queue):
data = sys.stdin.buffer.read(100 * 1024)
asyncio.ensure_future(queue.put(data))
async with session.ws_connect(uri) as ws:
queue = asyncio.Queue()
loop = asyncio.get_running_loop()
loop.add_reader(sys.stdin, partial(read_stdin, queue=queue))
asyncio.ensure_future(send_stdin(queue, ws))
await ws.send_json(run_msg)
loop.add_signal_handler(signal.SIGINFO, partial(siginfo_handler, ws=ws))
if handle_sigint:
loop.add_signal_handler(signal.SIGINT,
partial(sigint_handler, ws=ws))
async for msg in ws:
if msg.type == aiohttp.WSMsgType.TEXT:
params = json.loads(msg.data)
kind = params['type']
if kind == 'exit':
return params['code']
elif kind == 'stdout':
sys.stdout.buffer.write(decode(params['data']))
sys.stdout.buffer.flush()
elif kind == 'stderr':
sys.stderr.buffer.write(decode(params['data']))
sys.stderr.buffer.flush()
return 127 # WebSocket was closed without sending 'exit' message.
async def main():
parser = argparse.ArgumentParser()
parser.add_argument('--host', type=str, default='localhost')
parser.add_argument('--port', type=int, default=8765)
parser.add_argument('--chunk_size', type=int, default=1024)
parser.add_argument('--stdout', type=str, default='pipe',
choices=('null', 'pipe'))
parser.add_argument('--stderr', type=str, default='pipe',
choices=('null', 'pipe', 'stdout'))
parser.add_argument('--nosigint', dest='sigint', action='store_false', default=True)
parser.add_argument('file')
args, unknown_args = parser.parse_known_args()
uri = 'ws://%s:%d/spawn' % (args.host, args.port)
code = read_file(args.file)
async with aiohttp.ClientSession() as session:
env = {'PYTHONUNBUFFERED': '1'}
files = {'main.py': code}
all_args = ['/usr/bin/env', 'python3', 'main.py'] + unknown_args
run_msg = make_run_msg(all_args, args.stdout, args.stderr, args.chunk_size, env, files)
return await run(session, uri, run_msg, args.sigint)
if __name__ == '__main__':
set_nonblocking(sys.stdin.fileno())
loop = asyncio.get_event_loop()
code = loop.run_until_complete(main())
sys.exit(code)
| {
"content_hash": "871edd303f29bf72a62630d8b3b9f691",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 95,
"avg_line_length": 31.921875,
"alnum_prop": 0.6045031815956926,
"repo_name": "google/aiyprojects-raspbian-tools",
"id": "383744e60364bea17538f3b53d68fca541f4f4bb",
"size": "4689",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "packages/aiy-cwc-server/client.py",
"mode": "33261",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
from django.urls import include, path
from django.views.generic import RedirectView
from reviewboard.admin import admin_site, views
from reviewboard.admin.forms.auth_settings import AuthenticationSettingsForm
from reviewboard.admin.forms.avatar_settings import AvatarServicesForm
from reviewboard.admin.forms.diff_settings import DiffSettingsForm
from reviewboard.admin.forms.email_settings import EMailSettingsForm
from reviewboard.admin.forms.general_settings import GeneralSettingsForm
from reviewboard.admin.forms.logging_settings import LoggingSettingsForm
from reviewboard.admin.forms.privacy_settings import PrivacySettingsForm
from reviewboard.admin.forms.search_settings import SearchSettingsForm
from reviewboard.admin.forms.storage_settings import StorageSettingsForm
from reviewboard.admin.forms.support_settings import SupportSettingsForm
urlpatterns = [
path('', views.admin_dashboard_view, name='admin-dashboard'),
path('cache/', views.cache_stats, name='admin-server-cache'),
path('db/', admin_site.urls),
path('integrations/', include('reviewboard.integrations.urls')),
path('log/', include('djblets.log.urls')),
path('security/', views.security, name='admin-security-checks'),
path('settings/', include([
path('', RedirectView.as_view(url='general/', permanent=True),
name='site-settings'),
path('general/',
views.site_settings,
kwargs={
'form_class': GeneralSettingsForm,
},
name='settings-general'),
path('authentication/',
views.site_settings,
kwargs={
'form_class': AuthenticationSettingsForm,
},
name='settings-authentication'),
path('avatars/',
views.site_settings,
kwargs={
'form_class': AvatarServicesForm,
},
name='settings-avatars'),
path('email/',
views.site_settings,
kwargs={
'form_class': EMailSettingsForm,
},
name='settings-email'),
path('diffs/',
views.site_settings,
kwargs={
'form_class': DiffSettingsForm,
},
name='settings-diffs'),
path('logging/',
views.site_settings,
kwargs={
'form_class': LoggingSettingsForm,
},
name='settings-logging'),
path('privacy/',
views.site_settings,
kwargs={
'form_class': PrivacySettingsForm,
'template_name': 'admin/privacy_settings.html',
},
name='settings-privacy'),
path('ssh/',
views.ssh_settings,
name='settings-ssh'),
path('storage/',
views.site_settings,
kwargs={
'form_class': StorageSettingsForm,
},
name='settings-storage'),
path('support/',
views.site_settings,
kwargs={
'form_class': SupportSettingsForm,
},
name='settings-support'),
path('search/',
views.site_settings,
kwargs={
'form_class': SearchSettingsForm,
},
name='settings-search'),
])),
path('widget-activity/', views.widget_activity),
]
| {
"content_hash": "2ef6b9c692dfb3e04de02cc15cfd676f",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 76,
"avg_line_length": 31.36036036036036,
"alnum_prop": 0.5748348175811548,
"repo_name": "reviewboard/reviewboard",
"id": "15f9b9a28bc348dcdfb4a1e7cb5af04aeb303816",
"size": "4685",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reviewboard/admin/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10167"
},
{
"name": "Dockerfile",
"bytes": "7721"
},
{
"name": "HTML",
"bytes": "226489"
},
{
"name": "JavaScript",
"bytes": "3991608"
},
{
"name": "Less",
"bytes": "438017"
},
{
"name": "Python",
"bytes": "9186415"
},
{
"name": "Shell",
"bytes": "3855"
}
],
"symlink_target": ""
} |
import shutil, os, argparse, sys, stat
sys.path.append(os.path.join(os.path.dirname(os.path.dirname(__file__)), "pyUtils"))
from utils import Utils
from genFuncs import genHelper
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-prefix', type=str)
parser.add_argument('-externalLibDir', type=str)
parser.add_argument('-CC', type=str, nargs = 1)
parser.add_argument('-CXX', type=str, nargs = 1)
parser.add_argument('-libs', type=str)
parser.add_argument('-ldFlags', type=str)
parser.add_argument('-cxxFlags', type=str)
parser.add_argument('-private', action = "store_true", help="Use private repos")
parser.add_argument('-name', type=str, required = True)
return parser.parse_args()
def main():
args = parse_args()
prefix = "";
external = "external";
CC = genHelper.determineCC(args)
CXX = genHelper.determineCXX(args)
if(args.externalLibDir):
external = args.externalLibDir;
cmd = os.path.join(os.path.dirname(os.path.dirname(__file__)), "setUpScripts/generateCompFile.py") + """ -installName {name}
-outFilename compfile.mk -externalLoc {external} -CC {CC} -CXX {CXX}
-neededLibs {libs} -outname {name}"""
if args.private:
cmd += " -private ";
if args.prefix and args.prefix != "":
prefix = args.prefix;
cmd += " -prefix {prefix}"
if args.ldFlags and "" != args.ldFlags:
cmd += " -ldFlags " + args.ldFlags
if args.cxxFlags and "" != args.cxxFlags:
addingFlags = " -cxxFlags \""
if args.cxxFlags.startswith("-"):
addingFlags += "\\"
cmd += addingFlags + args.cxxFlags + "\""
cmd = " ".join(cmd.split())
cmd = cmd.format(name = args.name, external = external, CC=CC, CXX=CXX, libs = args.libs, prefix = prefix)
Utils.run(cmd)
main()
| {
"content_hash": "4d41930f71de691e9bceaff79da9ab2c",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 128,
"avg_line_length": 39.361702127659576,
"alnum_prop": 0.6291891891891892,
"repo_name": "weng-lab/TwoBit",
"id": "9b1b3dde87f792335219d19fbe5bb16e0d4f1ca3",
"size": "1874",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "scripts/setUpScripts/njhConfigure.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1515"
},
{
"name": "C++",
"bytes": "55874"
},
{
"name": "Makefile",
"bytes": "13480"
},
{
"name": "Python",
"bytes": "239884"
},
{
"name": "Shell",
"bytes": "806"
}
],
"symlink_target": ""
} |
"""
Pure functions used in transformer implementation.
"""
import numpy as np
import torch
import torch.nn as nn
from parlai.core.opt import Opt
def create_position_codes(n_pos, dim, out):
"""
Create positional codes and store them in ``out``.
"""
position_enc = np.array(
[
[pos / np.power(10000, 2 * j / dim) for j in range(dim // 2)]
for pos in range(n_pos)
]
)
out.detach_()
out.requires_grad = False
out[:, 0::2] = torch.FloatTensor(np.sin(position_enc)).type_as(out)
out[:, 1::2] = torch.FloatTensor(np.cos(position_enc)).type_as(out)
def create_embeddings(dictionary, embedding_size, padding_idx):
"""
Create and initialize word embeddings.
"""
e = nn.Embedding(len(dictionary), embedding_size, padding_idx)
nn.init.normal_(e.weight, mean=0, std=embedding_size**-0.5)
nn.init.constant_(e.weight[padding_idx], 0)
return e
def get_n_positions_from_options(opt: Opt):
"""
Determine n_positions from options dict.
"""
if opt.get('n_positions'):
# if the number of positions is explicitly provided, use that
n_positions = opt['n_positions']
else:
# else, use the worst case from truncate
n_positions = max(
opt.get('truncate') or 0,
opt.get('text_truncate') or 0,
opt.get('label_truncate') or 0,
)
if n_positions == 0:
# default to 1024
n_positions = 1024
if n_positions < 0:
raise ValueError('n_positions must be positive')
return n_positions
| {
"content_hash": "2951a70f5bfc106835495e705ba2721f",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 73,
"avg_line_length": 27.637931034482758,
"alnum_prop": 0.5995009357454772,
"repo_name": "facebookresearch/ParlAI",
"id": "f003ab931449a33ab0162bd26478b37f55379222",
"size": "1802",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "parlai/agents/transformer/modules/functions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "2000"
},
{
"name": "CSS",
"bytes": "38474"
},
{
"name": "Cuda",
"bytes": "4118"
},
{
"name": "Dockerfile",
"bytes": "1218"
},
{
"name": "HTML",
"bytes": "645771"
},
{
"name": "JavaScript",
"bytes": "405110"
},
{
"name": "Makefile",
"bytes": "289"
},
{
"name": "Python",
"bytes": "6802410"
},
{
"name": "Shell",
"bytes": "26147"
}
],
"symlink_target": ""
} |
from BattleBase import *
from DistributedBattleAI import *
from toontown.toonbase.ToontownBattleGlobals import *
import random
from toontown.suit import DistributedSuitBaseAI
import SuitBattleGlobals
import BattleExperienceAI
from toontown.toon import NPCToons
from toontown.pets import PetTricks, DistributedPetProxyAI
from toontown.hood import ZoneUtil
from direct.showbase.PythonUtil import lerp
import sys
class BattleCalculatorAI:
AccuracyBonuses = [0,
20,
40,
60]
DamageBonuses = [0,
20,
20,
20]
AttackExpPerTrack = [0,
10,
20,
30,
40,
50,
60]
NumRoundsLured = [2,
2,
3,
3,
4,
4,
15]
TRAP_CONFLICT = -2
APPLY_HEALTH_ADJUSTMENTS = 1
TOONS_TAKE_NO_DAMAGE = 0
CAP_HEALS = 1
CLEAR_SUIT_ATTACKERS = 1
SUITS_UNLURED_IMMEDIATELY = 1
CLEAR_MULTIPLE_TRAPS = 0
KBBONUS_LURED_FLAG = 0
KBBONUS_TGT_LURED = 1
notify = DirectNotifyGlobal.directNotify.newCategory('BattleCalculatorAI')
toonsAlwaysHit = simbase.config.GetBool('toons-always-hit', 0)
toonsAlwaysMiss = simbase.config.GetBool('toons-always-miss', 0)
toonsAlways5050 = simbase.config.GetBool('toons-always-5050', 0)
suitsAlwaysHit = simbase.config.GetBool('suits-always-hit', 0)
suitsAlwaysMiss = simbase.config.GetBool('suits-always-miss', 0)
immortalSuits = simbase.config.GetBool('immortal-suits', 0)
propAndOrganicBonusStack = simbase.config.GetBool('prop-and-organic-bonus-stack', 0)
def __init__(self, battle, tutorialFlag = 0):
self.battle = battle
self.SuitAttackers = {}
self.currentlyLuredSuits = {}
self.successfulLures = {}
self.toonAtkOrder = []
self.toonHPAdjusts = {}
self.toonSkillPtsGained = {}
self.traps = {}
self.npcTraps = {}
self.suitAtkStats = {}
self.__clearBonuses(hp=1)
self.__clearBonuses(hp=0)
self.delayedUnlures = []
self.__skillCreditMultiplier = simbase.air.baseXpMultiplier
self.tutorialFlag = tutorialFlag
self.trainTrapTriggered = False
self.fireDifficulty = 0
def setSkillCreditMultiplier(self, mult):
self.__skillCreditMultiplier = simbase.air.baseXpMultiplier * mult
def getSkillCreditMultiplier(self):
return self.__skillCreditMultiplier
def cleanup(self):
self.battle = None
return
def __calcToonAtkHit(self, attackIndex, atkTargets):
if len(atkTargets) == 0:
return (0, 0)
if self.tutorialFlag:
return (1, 95)
if self.toonsAlways5050:
roll = random.randint(0, 99)
if roll < 50:
return (1, 95)
else:
return (0, 0)
if self.toonsAlwaysHit:
return (1, 95)
elif self.toonsAlwaysMiss:
return (0, 0)
debug = self.notify.getDebug()
attack = self.battle.toonAttacks[attackIndex]
atkTrack, atkLevel = self.__getActualTrackLevel(attack)
hasAccuracyBuff = False
toon = simbase.air.doId2do.get(attack[TOON_ID_COL])
if toon:
if toon.hasBuff(BGagAccuracy):
if not ZoneUtil.isDynamicZone(toon.zoneId):
if ZoneUtil.getWhereName(toon.zoneId, True) in ('street', 'factoryExterior', 'cogHQExterior'):
hasAccuracyBuff = True
if atkTrack == NPCSOS:
return (1, 95)
if atkTrack == FIRE:
return (1, 95)
if atkTrack == TRAP:
if debug:
self.notify.debug('Attack is a trap, so it hits regardless')
attack[TOON_ACCBONUS_COL] = 0
return (1, 100)
elif atkTrack == DROP and attack[TOON_TRACK_COL] == NPCSOS:
unluredSuits = 0
for tgt in atkTargets:
if not self.__suitIsLured(tgt.getDoId()):
unluredSuits = 1
if unluredSuits == 0:
attack[TOON_ACCBONUS_COL] = 1
return (0, 0)
elif atkTrack == DROP:
allLured = True
for i in xrange(len(atkTargets)):
if self.__suitIsLured(atkTargets[i].getDoId()):
pass
else:
allLured = False
if allLured:
attack[TOON_ACCBONUS_COL] = 1
return (0, 0)
elif atkTrack == PETSOS:
return self.__calculatePetTrickSuccess(attack)
tgtDef = 0
numLured = 0
if atkTrack != HEAL:
for currTarget in atkTargets:
thisSuitDef = self.__targetDefense(currTarget, atkTrack)
if debug:
self.notify.debug('Examining suit def for toon attack: ' + str(thisSuitDef))
tgtDef = min(thisSuitDef, tgtDef)
if self.__suitIsLured(currTarget.getDoId()):
numLured += 1
trackExp = self.__toonTrackExp(attack[TOON_ID_COL], atkTrack)
for currOtherAtk in self.toonAtkOrder:
if currOtherAtk != attack[TOON_ID_COL]:
nextAttack = self.battle.toonAttacks[currOtherAtk]
nextAtkTrack = self.__getActualTrack(nextAttack)
if atkTrack == nextAtkTrack and attack[TOON_TGT_COL] == nextAttack[TOON_TGT_COL]:
currTrackExp = self.__toonTrackExp(nextAttack[TOON_ID_COL], atkTrack)
if debug:
self.notify.debug('Examining toon track exp bonus: ' + str(currTrackExp))
trackExp = max(currTrackExp, trackExp)
if debug:
if atkTrack == HEAL:
self.notify.debug('Toon attack is a heal, no target def used')
else:
self.notify.debug('Suit defense used for toon attack: ' + str(tgtDef))
self.notify.debug('Toon track exp bonus used for toon attack: ' + str(trackExp))
if attack[TOON_TRACK_COL] == NPCSOS:
randChoice = 0
else:
randChoice = random.randint(0, 99)
propAcc = AvPropAccuracy[atkTrack][atkLevel]
if hasAccuracyBuff:
propAcc *= BGagAccuracyMultiplier
if atkTrack == LURE:
treebonus = self.__toonCheckGagBonus(attack[TOON_ID_COL], atkTrack, atkLevel)
propBonus = self.__checkPropBonus(atkTrack)
if self.propAndOrganicBonusStack:
propAcc = 0
if treebonus:
self.notify.debug('using organic bonus lure accuracy')
propAcc += AvLureBonusAccuracy[atkLevel]
if propBonus:
self.notify.debug('using prop bonus lure accuracy')
propAcc += AvLureBonusAccuracy[atkLevel]
elif treebonus or propBonus:
self.notify.debug('using oragnic OR prop bonus lure accuracy')
propAcc = AvLureBonusAccuracy[atkLevel]
attackAcc = propAcc + trackExp + tgtDef
currAtk = self.toonAtkOrder.index(attackIndex)
if currAtk > 0 and atkTrack != HEAL:
prevAtkId = self.toonAtkOrder[currAtk - 1]
prevAttack = self.battle.toonAttacks[prevAtkId]
prevAtkTrack = self.__getActualTrack(prevAttack)
lure = atkTrack == LURE and (not attackAffectsGroup(atkTrack, atkLevel,
attack[TOON_TRACK_COL]) and attack[TOON_TGT_COL] in self.successfulLures or attackAffectsGroup(atkTrack, atkLevel, attack[TOON_TRACK_COL]))
if atkTrack == prevAtkTrack and (attack[TOON_TGT_COL] == prevAttack[TOON_TGT_COL] or lure):
if prevAttack[TOON_ACCBONUS_COL] == 1:
if debug:
self.notify.debug('DODGE: Toon attack track dodged')
elif prevAttack[TOON_ACCBONUS_COL] == 0:
if debug:
self.notify.debug('HIT: Toon attack track hit')
attack[TOON_ACCBONUS_COL] = prevAttack[TOON_ACCBONUS_COL]
return (not attack[TOON_ACCBONUS_COL], attackAcc)
atkAccResult = attackAcc
if debug:
self.notify.debug('setting atkAccResult to %d' % atkAccResult)
acc = attackAcc + self.__calcToonAccBonus(attackIndex)
if atkTrack != LURE and atkTrack != HEAL:
if atkTrack != DROP:
if numLured == len(atkTargets):
if debug:
self.notify.debug('all targets are lured, attack hits')
attack[TOON_ACCBONUS_COL] = 0
return (1, 100)
else:
luredRatio = float(numLured) / float(len(atkTargets))
accAdjust = 100 * luredRatio
if accAdjust > 0 and debug:
self.notify.debug(str(numLured) + ' out of ' + str(len(atkTargets)) + ' targets are lured, so adding ' + str(accAdjust) + ' to attack accuracy')
acc += accAdjust
elif numLured == len(atkTargets):
if debug:
self.notify.debug('all targets are lured, attack misses')
attack[TOON_ACCBONUS_COL] = 0
return (0, 0)
if acc > MaxToonAcc:
acc = MaxToonAcc
if randChoice < acc:
if debug:
self.notify.debug('HIT: Toon attack rolled' + str(randChoice) + 'to hit with an accuracy of' + str(acc))
attack[TOON_ACCBONUS_COL] = 0
else:
if debug:
self.notify.debug('MISS: Toon attack rolled' + str(randChoice) + 'to hit with an accuracy of' + str(acc))
attack[TOON_ACCBONUS_COL] = 1
return (not attack[TOON_ACCBONUS_COL], atkAccResult)
def __toonTrackExp(self, toonId, track):
toon = self.battle.getToon(toonId)
if toon != None:
toonExpLvl = toon.experience.getExpLevel(track)
exp = self.AttackExpPerTrack[toonExpLvl]
if track == HEAL:
exp = exp * 0.5
self.notify.debug('Toon track exp: ' + str(toonExpLvl) + ' and resulting acc bonus: ' + str(exp))
return exp
else:
return 0
return
def __toonCheckGagBonus(self, toonId, track, level):
toon = self.battle.getToon(toonId)
if toon != None:
return toon.checkGagBonus(track, level)
else:
return False
return
def __checkPropBonus(self, track):
result = False
if self.battle.getInteractivePropTrackBonus() == track:
result = True
return result
def __targetDefense(self, suit, atkTrack):
if atkTrack == HEAL:
return 0
suitDef = SuitBattleGlobals.SuitAttributes[suit.dna.name]['def'][suit.getLevel()]
return -suitDef
def __createToonTargetList(self, attackIndex):
attack = self.battle.toonAttacks[attackIndex]
atkTrack, atkLevel = self.__getActualTrackLevel(attack)
targetList = []
if atkTrack == NPCSOS:
return targetList
if not attackAffectsGroup(atkTrack, atkLevel, attack[TOON_TRACK_COL]):
if atkTrack == HEAL:
target = attack[TOON_TGT_COL]
else:
target = self.battle.findSuit(attack[TOON_TGT_COL])
if target != None:
targetList.append(target)
elif atkTrack == HEAL or atkTrack == PETSOS:
if attack[TOON_TRACK_COL] == NPCSOS or atkTrack == PETSOS:
targetList = self.battle.activeToons
else:
for currToon in self.battle.activeToons:
if attack[TOON_ID_COL] != currToon:
targetList.append(currToon)
else:
targetList = self.battle.activeSuits
return targetList
def __prevAtkTrack(self, attackerId, toon = 1):
if toon:
prevAtkIdx = self.toonAtkOrder.index(attackerId) - 1
if prevAtkIdx >= 0:
prevAttackerId = self.toonAtkOrder[prevAtkIdx]
attack = self.battle.toonAttacks[prevAttackerId]
return self.__getActualTrack(attack)
else:
return NO_ATTACK
def getSuitTrapType(self, suitId):
if suitId in self.traps:
if self.traps[suitId][0] == self.TRAP_CONFLICT:
return NO_TRAP
else:
return self.traps[suitId][0]
else:
return NO_TRAP
def __suitTrapDamage(self, suitId):
if suitId in self.traps:
return self.traps[suitId][2]
else:
return 0
def addTrainTrapForJoiningSuit(self, suitId):
self.notify.debug('addTrainTrapForJoiningSuit suit=%d self.traps=%s' % (suitId, self.traps))
trapInfoToUse = None
for trapInfo in self.traps.values():
if trapInfo[0] == UBER_GAG_LEVEL_INDEX:
trapInfoToUse = trapInfo
break
if trapInfoToUse:
self.traps[suitId] = trapInfoToUse
else:
self.notify.warning('huh we did not find a train trap?')
return
def __addSuitGroupTrap(self, suitId, trapLvl, attackerId, allSuits, npcDamage = 0):
if npcDamage == 0:
if suitId in self.traps:
if self.traps[suitId][0] == self.TRAP_CONFLICT:
pass
else:
self.traps[suitId][0] = self.TRAP_CONFLICT
for suit in allSuits:
id = suit.doId
if id in self.traps:
self.traps[id][0] = self.TRAP_CONFLICT
else:
self.traps[id] = [self.TRAP_CONFLICT, 0, 0]
else:
toon = self.battle.getToon(attackerId)
organicBonus = toon.checkGagBonus(TRAP, trapLvl)
propBonus = self.__checkPropBonus(TRAP)
damage = getAvPropDamage(TRAP, trapLvl, toon.experience.getExp(TRAP), organicBonus, propBonus, self.propAndOrganicBonusStack)
if self.itemIsCredit(TRAP, trapLvl):
self.traps[suitId] = [trapLvl, attackerId, damage]
else:
self.traps[suitId] = [trapLvl, 0, damage]
self.notify.debug('calling __addLuredSuitsDelayed')
self.__addLuredSuitsDelayed(attackerId, targetId=-1, ignoreDamageCheck=True)
elif suitId in self.traps:
if self.traps[suitId][0] == self.TRAP_CONFLICT:
self.traps[suitId] = [trapLvl, 0, npcDamage]
elif not self.__suitIsLured(suitId):
self.traps[suitId] = [trapLvl, 0, npcDamage]
def __addSuitTrap(self, suitId, trapLvl, attackerId, npcDamage = 0):
if npcDamage == 0:
if suitId in self.traps:
if self.traps[suitId][0] == self.TRAP_CONFLICT:
pass
else:
self.traps[suitId][0] = self.TRAP_CONFLICT
else:
toon = self.battle.getToon(attackerId)
organicBonus = toon.checkGagBonus(TRAP, trapLvl)
propBonus = self.__checkPropBonus(TRAP)
damage = getAvPropDamage(TRAP, trapLvl, toon.experience.getExp(TRAP), organicBonus, propBonus, self.propAndOrganicBonusStack)
if self.itemIsCredit(TRAP, trapLvl):
self.traps[suitId] = [trapLvl, attackerId, damage]
else:
self.traps[suitId] = [trapLvl, 0, damage]
elif suitId in self.traps:
if self.traps[suitId][0] == self.TRAP_CONFLICT:
self.traps[suitId] = [trapLvl, 0, npcDamage]
elif not self.__suitIsLured(suitId):
self.traps[suitId] = [trapLvl, 0, npcDamage]
def __removeSuitTrap(self, suitId):
if suitId in self.traps:
del self.traps[suitId]
def __clearTrapCreator(self, creatorId, suitId = None):
if suitId == None:
for currTrap in self.traps.keys():
if creatorId == self.traps[currTrap][1]:
self.traps[currTrap][1] = 0
elif suitId in self.traps:
self.traps[suitId][1] = 0
return
def __trapCreator(self, suitId):
if suitId in self.traps:
return self.traps[suitId][1]
else:
return 0
def __initTraps(self):
self.trainTrapTriggered = False
keysList = self.traps.keys()
for currTrap in keysList:
if self.traps[currTrap][0] == self.TRAP_CONFLICT:
del self.traps[currTrap]
def __calcToonAtkHp(self, toonId):
attack = self.battle.toonAttacks[toonId]
targetList = self.__createToonTargetList(toonId)
atkHit, atkAcc = self.__calcToonAtkHit(toonId, targetList)
atkTrack, atkLevel, atkHp = self.__getActualTrackLevelHp(attack)
if not atkHit and atkTrack != HEAL:
return
validTargetAvail = 0
lureDidDamage = 0
currLureId = -1
for currTarget in xrange(len(targetList)):
attackLevel = -1
attackTrack = None
attackDamage = 0
toonTarget = 0
targetLured = 0
if atkTrack == HEAL or atkTrack == PETSOS:
targetId = targetList[currTarget]
toonTarget = 1
else:
targetId = targetList[currTarget].getDoId()
if atkTrack == LURE:
if self.getSuitTrapType(targetId) == NO_TRAP:
if self.notify.getDebug():
self.notify.debug('Suit lured, but no trap exists')
if self.SUITS_UNLURED_IMMEDIATELY:
if not self.__suitIsLured(targetId, prevRound=1):
if not self.__combatantDead(targetId, toon=toonTarget):
validTargetAvail = 1
rounds = self.NumRoundsLured[atkLevel]
wakeupChance = 100 - atkAcc * 2
npcLurer = attack[TOON_TRACK_COL] == NPCSOS
currLureId = self.__addLuredSuitInfo(targetId, -1, rounds, wakeupChance, toonId, atkLevel, lureId=currLureId, npc=npcLurer)
if self.notify.getDebug():
self.notify.debug('Suit lured for ' + str(rounds) + ' rounds max with ' + str(wakeupChance) + '% chance to wake up each round')
targetLured = 1
else:
attackTrack = TRAP
if targetId in self.traps:
trapInfo = self.traps[targetId]
attackLevel = trapInfo[0]
else:
attackLevel = NO_TRAP
attackDamage = self.__suitTrapDamage(targetId)
trapCreatorId = self.__trapCreator(targetId)
if trapCreatorId > 0:
self.notify.debug('Giving trap EXP to toon ' + str(trapCreatorId))
self.__addAttackExp(attack, track=TRAP, level=attackLevel, attackerId=trapCreatorId)
self.__clearTrapCreator(trapCreatorId, targetId)
lureDidDamage = 1
if self.notify.getDebug():
self.notify.debug('Suit lured right onto a trap! (' + str(AvProps[attackTrack][attackLevel]) + ',' + str(attackLevel) + ')')
if not self.__combatantDead(targetId, toon=toonTarget):
validTargetAvail = 1
targetLured = 1
if not self.SUITS_UNLURED_IMMEDIATELY:
if not self.__suitIsLured(targetId, prevRound=1):
if not self.__combatantDead(targetId, toon=toonTarget):
validTargetAvail = 1
rounds = self.NumRoundsLured[atkLevel]
wakeupChance = 100 - atkAcc * 2
npcLurer = attack[TOON_TRACK_COL] == NPCSOS
currLureId = self.__addLuredSuitInfo(targetId, -1, rounds, wakeupChance, toonId, atkLevel, lureId=currLureId, npc=npcLurer)
if self.notify.getDebug():
self.notify.debug('Suit lured for ' + str(rounds) + ' rounds max with ' + str(wakeupChance) + '% chance to wake up each round')
targetLured = 1
if attackLevel != -1:
self.__addLuredSuitsDelayed(toonId, targetId)
if targetLured and (not targetId in self.successfulLures or targetId in self.successfulLures and self.successfulLures[targetId][1] < atkLevel):
self.notify.debug('Adding target ' + str(targetId) + ' to successfulLures list')
self.successfulLures[targetId] = [toonId,
atkLevel,
atkAcc,
-1]
else:
if atkTrack == TRAP:
npcDamage = 0
if attack[TOON_TRACK_COL] == NPCSOS:
npcDamage = atkHp
if self.CLEAR_MULTIPLE_TRAPS:
if self.getSuitTrapType(targetId) != NO_TRAP:
self.__clearAttack(toonId)
return
if atkLevel == UBER_GAG_LEVEL_INDEX:
self.__addSuitGroupTrap(targetId, atkLevel, toonId, targetList, npcDamage)
if self.__suitIsLured(targetId):
self.notify.debug('Train Trap on lured suit %d, \n indicating with KBBONUS_COL flag' % targetId)
tgtPos = self.battle.activeSuits.index(targetList[currTarget])
attack[TOON_KBBONUS_COL][tgtPos] = self.KBBONUS_LURED_FLAG
else:
self.__addSuitTrap(targetId, atkLevel, toonId, npcDamage)
elif self.__suitIsLured(targetId) and atkTrack == SOUND:
self.notify.debug('Sound on lured suit, ' + 'indicating with KBBONUS_COL flag')
tgtPos = self.battle.activeSuits.index(targetList[currTarget])
attack[TOON_KBBONUS_COL][tgtPos] = self.KBBONUS_LURED_FLAG
attackLevel = atkLevel
attackTrack = atkTrack
toon = self.battle.getToon(toonId)
if attack[TOON_TRACK_COL] == NPCSOS and lureDidDamage != 1 or attack[TOON_TRACK_COL] == PETSOS:
attackDamage = atkHp
elif atkTrack == FIRE:
suit = self.battle.findSuit(targetId)
if suit:
costToFire = 1 + self.fireDifficulty
abilityToFire = toon.getPinkSlips()
numLeft = abilityToFire - costToFire
if numLeft < 0:
numLeft = 0
toon.b_setPinkSlips(numLeft)
self.fireDifficulty += 1
if costToFire > abilityToFire:
simbase.air.writeServerEvent('suspicious', toonId, 'Toon attempting to fire a %s cost cog with %s pinkslips' % (costToFire, abilityToFire))
print 'Not enough PinkSlips to fire cog - print a warning here'
else:
suit.skeleRevives = 0
attackDamage = suit.getHP()
else:
attackDamage = 0
bonus = 0
else:
organicBonus = toon.checkGagBonus(attackTrack, attackLevel)
propBonus = self.__checkPropBonus(attackTrack)
attackDamage = getAvPropDamage(attackTrack, attackLevel, toon.experience.getExp(attackTrack), organicBonus, propBonus, self.propAndOrganicBonusStack)
if not self.__combatantDead(targetId, toon=toonTarget):
if self.__suitIsLured(targetId) and atkTrack == DROP:
self.notify.debug('not setting validTargetAvail, since drop on a lured suit')
else:
validTargetAvail = 1
if attackLevel == -1 and not atkTrack == FIRE:
result = LURE_SUCCEEDED
elif atkTrack != TRAP:
result = attackDamage
if atkTrack == HEAL:
if not self.__attackHasHit(attack, suit=0):
result = result * 0.2
if self.notify.getDebug():
self.notify.debug('toon does ' + str(result) + ' healing to toon(s)')
else:
if self.__suitIsLured(targetId) and atkTrack == DROP:
result = 0
self.notify.debug('setting damage to 0, since drop on a lured suit')
if self.notify.getDebug():
self.notify.debug('toon does ' + str(result) + ' damage to suit')
else:
result = 0
if result != 0 or atkTrack == PETSOS:
targets = self.__getToonTargets(attack)
if targetList[currTarget] not in targets:
if self.notify.getDebug():
self.notify.debug('Target of toon is not accessible!')
continue
targetIndex = targets.index(targetList[currTarget])
if atkTrack == HEAL:
result = result / len(targetList)
if self.notify.getDebug():
self.notify.debug('Splitting heal among ' + str(len(targetList)) + ' targets')
if targetId in self.successfulLures and atkTrack == LURE:
self.notify.debug('Updating lure damage to ' + str(result))
self.successfulLures[targetId][3] = result
else:
attack[TOON_HP_COL][targetIndex] = result
if result > 0 and atkTrack != HEAL and atkTrack != DROP and atkTrack != PETSOS:
attackTrack = LURE
lureInfos = self.__getLuredExpInfo(targetId)
for currInfo in lureInfos:
if currInfo[3]:
self.notify.debug('Giving lure EXP to toon ' + str(currInfo[0]))
self.__addAttackExp(attack, track=attackTrack, level=currInfo[1], attackerId=currInfo[0])
self.__clearLurer(currInfo[0], lureId=currInfo[2])
if lureDidDamage:
if self.itemIsCredit(atkTrack, atkLevel):
self.notify.debug('Giving lure EXP to toon ' + str(toonId))
self.__addAttackExp(attack)
if not validTargetAvail and self.__prevAtkTrack(toonId) != atkTrack:
self.__clearAttack(toonId)
return
def __getToonTargets(self, attack):
track = self.__getActualTrack(attack)
if track == HEAL or track == PETSOS:
return self.battle.activeToons
else:
return self.battle.activeSuits
def __attackHasHit(self, attack, suit = 0):
if suit == 1:
for dmg in attack[SUIT_HP_COL]:
if dmg > 0:
return 1
return 0
else:
track = self.__getActualTrack(attack)
return not attack[TOON_ACCBONUS_COL] and track != NO_ATTACK
def __attackDamage(self, attack, suit = 0):
if suit:
for dmg in attack[SUIT_HP_COL]:
if dmg > 0:
return dmg
return 0
else:
for dmg in attack[TOON_HP_COL]:
if dmg > 0:
return dmg
return 0
def __attackDamageForTgt(self, attack, tgtPos, suit = 0):
if suit:
return attack[SUIT_HP_COL][tgtPos]
else:
return attack[TOON_HP_COL][tgtPos]
def __calcToonAccBonus(self, attackKey):
numPrevHits = 0
attackIdx = self.toonAtkOrder.index(attackKey)
for currPrevAtk in xrange(attackIdx - 1, -1, -1):
attack = self.battle.toonAttacks[attackKey]
atkTrack, atkLevel = self.__getActualTrackLevel(attack)
prevAttackKey = self.toonAtkOrder[currPrevAtk]
prevAttack = self.battle.toonAttacks[prevAttackKey]
prvAtkTrack, prvAtkLevel = self.__getActualTrackLevel(prevAttack)
if self.__attackHasHit(prevAttack) and (attackAffectsGroup(prvAtkTrack, prvAtkLevel, prevAttack[TOON_TRACK_COL]) or attackAffectsGroup(atkTrack, atkLevel, attack[TOON_TRACK_COL]) or attack[TOON_TGT_COL] == prevAttack[TOON_TGT_COL]) and atkTrack != prvAtkTrack:
numPrevHits += 1
if numPrevHits > 0 and self.notify.getDebug():
self.notify.debug('ACC BONUS: toon attack received accuracy ' + 'bonus of ' + str(self.AccuracyBonuses[numPrevHits]) + ' from previous attack by (' + str(attack[TOON_ID_COL]) + ') which hit')
return self.AccuracyBonuses[numPrevHits]
def __applyToonAttackDamages(self, toonId, hpbonus = 0, kbbonus = 0):
totalDamages = 0
if not self.APPLY_HEALTH_ADJUSTMENTS:
return totalDamages
attack = self.battle.toonAttacks[toonId]
track = self.__getActualTrack(attack)
if track != NO_ATTACK and track != SOS and track != TRAP and track != NPCSOS:
targets = self.__getToonTargets(attack)
for position in xrange(len(targets)):
if hpbonus:
if targets[position] in self.__createToonTargetList(toonId):
damageDone = attack[TOON_HPBONUS_COL]
else:
damageDone = 0
elif kbbonus:
if targets[position] in self.__createToonTargetList(toonId):
damageDone = attack[TOON_KBBONUS_COL][position]
else:
damageDone = 0
else:
damageDone = attack[TOON_HP_COL][position]
if damageDone <= 0 or self.immortalSuits:
continue
if track == HEAL or track == PETSOS:
currTarget = targets[position]
if self.CAP_HEALS:
toonHp = self.__getToonHp(currTarget)
toonMaxHp = self.__getToonMaxHp(currTarget)
if toonHp + damageDone > toonMaxHp:
damageDone = toonMaxHp - toonHp
attack[TOON_HP_COL][position] = damageDone
self.toonHPAdjusts[currTarget] += damageDone
totalDamages = totalDamages + damageDone
continue
currTarget = targets[position]
currTarget.setHP(currTarget.getHP() - damageDone)
targetId = currTarget.getDoId()
if self.notify.getDebug():
if hpbonus:
self.notify.debug(str(targetId) + ': suit takes ' + str(damageDone) + ' damage from HP-Bonus')
elif kbbonus:
self.notify.debug(str(targetId) + ': suit takes ' + str(damageDone) + ' damage from KB-Bonus')
else:
self.notify.debug(str(targetId) + ': suit takes ' + str(damageDone) + ' damage')
totalDamages = totalDamages + damageDone
if currTarget.getHP() <= 0:
if currTarget.getSkeleRevives() >= 1:
currTarget.useSkeleRevive()
attack[SUIT_REVIVE_COL] = attack[SUIT_REVIVE_COL] | 1 << position
else:
self.suitLeftBattle(targetId)
attack[SUIT_DIED_COL] = attack[SUIT_DIED_COL] | 1 << position
if self.notify.getDebug():
self.notify.debug('Suit' + str(targetId) + 'bravely expired in combat')
return totalDamages
def __combatantDead(self, avId, toon):
if toon:
if self.__getToonHp(avId) <= 0:
return 1
else:
suit = self.battle.findSuit(avId)
if suit.getHP() <= 0:
return 1
return 0
def __combatantJustRevived(self, avId):
suit = self.battle.findSuit(avId)
if suit.reviveCheckAndClear():
return 1
else:
return 0
def __addAttackExp(self, attack, track = -1, level = -1, attackerId = -1):
trk = -1
lvl = -1
id = -1
if track != -1 and level != -1 and attackerId != -1:
trk = track
lvl = level
id = attackerId
elif self.__attackHasHit(attack):
if self.notify.getDebug():
self.notify.debug('Attack ' + repr(attack) + ' has hit')
trk = attack[TOON_TRACK_COL]
lvl = attack[TOON_LVL_COL]
id = attack[TOON_ID_COL]
if trk != -1 and trk != NPCSOS and trk != PETSOS and lvl != -1 and id != -1:
expList = self.toonSkillPtsGained.get(id, None)
if expList == None:
expList = [0,
0,
0,
0,
0,
0,
0]
self.toonSkillPtsGained[id] = expList
expList[trk] = min(ExperienceCap, expList[trk] + (lvl + 1) * self.__skillCreditMultiplier)
return
def __clearTgtDied(self, tgt, lastAtk, currAtk):
position = self.battle.activeSuits.index(tgt)
currAtkTrack = self.__getActualTrack(currAtk)
lastAtkTrack = self.__getActualTrack(lastAtk)
if currAtkTrack == lastAtkTrack and lastAtk[SUIT_DIED_COL] & 1 << position and self.__attackHasHit(currAtk, suit=0):
if self.notify.getDebug():
self.notify.debug('Clearing suit died for ' + str(tgt.getDoId()) + ' at position ' + str(position) + ' from toon attack ' + str(lastAtk[TOON_ID_COL]) + ' and setting it for ' + str(currAtk[TOON_ID_COL]))
lastAtk[SUIT_DIED_COL] = lastAtk[SUIT_DIED_COL] ^ 1 << position
self.suitLeftBattle(tgt.getDoId())
currAtk[SUIT_DIED_COL] = currAtk[SUIT_DIED_COL] | 1 << position
def __addDmgToBonuses(self, dmg, attackIndex, hp = 1):
toonId = self.toonAtkOrder[attackIndex]
attack = self.battle.toonAttacks[toonId]
atkTrack = self.__getActualTrack(attack)
if atkTrack == HEAL or atkTrack == PETSOS:
return
tgts = self.__createToonTargetList(toonId)
for currTgt in tgts:
tgtPos = self.battle.suits.index(currTgt)
attackerId = self.toonAtkOrder[attackIndex]
attack = self.battle.toonAttacks[attackerId]
track = self.__getActualTrack(attack)
if hp:
if track in self.hpBonuses[tgtPos]:
self.hpBonuses[tgtPos][track].append([attackIndex, dmg])
else:
self.hpBonuses[tgtPos][track] = [[attackIndex, dmg]]
elif self.__suitIsLured(currTgt.getDoId()):
if track in self.kbBonuses[tgtPos]:
self.kbBonuses[tgtPos][track].append([attackIndex, dmg])
else:
self.kbBonuses[tgtPos][track] = [[attackIndex, dmg]]
def __clearBonuses(self, hp = 1):
if hp:
self.hpBonuses = [{},
{},
{},
{}]
else:
self.kbBonuses = [{},
{},
{},
{}]
def __bonusExists(self, tgtSuit, hp = 1):
tgtPos = self.activeSuits.index(tgtSuit)
if hp:
bonusLen = len(self.hpBonuses[tgtPos])
else:
bonusLen = len(self.kbBonuses[tgtPos])
if bonusLen > 0:
return 1
return 0
def __processBonuses(self, hp = 1):
if hp:
bonusList = self.hpBonuses
self.notify.debug('Processing hpBonuses: ' + repr(self.hpBonuses))
else:
bonusList = self.kbBonuses
self.notify.debug('Processing kbBonuses: ' + repr(self.kbBonuses))
tgtPos = 0
for currTgt in bonusList:
for currAtkType in currTgt.keys():
if len(currTgt[currAtkType]) > 1 or not hp and len(currTgt[currAtkType]) > 0:
totalDmgs = 0
for currDmg in currTgt[currAtkType]:
totalDmgs += currDmg[1]
numDmgs = len(currTgt[currAtkType])
attackIdx = currTgt[currAtkType][numDmgs - 1][0]
attackerId = self.toonAtkOrder[attackIdx]
attack = self.battle.toonAttacks[attackerId]
if hp:
attack[TOON_HPBONUS_COL] = math.ceil(totalDmgs * (self.DamageBonuses[numDmgs - 1] * 0.01))
if self.notify.getDebug():
self.notify.debug('Applying hp bonus to track ' + str(attack[TOON_TRACK_COL]) + ' of ' + str(attack[TOON_HPBONUS_COL]))
elif len(attack[TOON_KBBONUS_COL]) > tgtPos:
attack[TOON_KBBONUS_COL][tgtPos] = totalDmgs * 0.5
if self.notify.getDebug():
self.notify.debug('Applying kb bonus to track ' + str(attack[TOON_TRACK_COL]) + ' of ' + str(attack[TOON_KBBONUS_COL][tgtPos]) + ' to target ' + str(tgtPos))
else:
self.notify.warning('invalid tgtPos for knock back bonus: %d' % tgtPos)
tgtPos += 1
if hp:
self.__clearBonuses()
else:
self.__clearBonuses(hp=0)
def __handleBonus(self, attackIdx, hp = 1):
attackerId = self.toonAtkOrder[attackIdx]
attack = self.battle.toonAttacks[attackerId]
atkDmg = self.__attackDamage(attack, suit=0)
atkTrack = self.__getActualTrack(attack)
if atkDmg > 0:
if hp:
if atkTrack != LURE:
self.notify.debug('Adding dmg of ' + str(atkDmg) + ' to hpBonuses list')
self.__addDmgToBonuses(atkDmg, attackIdx)
elif self.__knockBackAtk(attackerId, toon=1):
self.notify.debug('Adding dmg of ' + str(atkDmg) + ' to kbBonuses list')
self.__addDmgToBonuses(atkDmg, attackIdx, hp=0)
def __clearAttack(self, attackIdx, toon = 1):
if toon:
if self.notify.getDebug():
self.notify.debug('clearing out toon attack for toon ' + str(attackIdx) + '...')
attack = self.battle.toonAttacks[attackIdx]
self.battle.toonAttacks[attackIdx] = getToonAttack(attackIdx)
longest = max(len(self.battle.activeToons), len(self.battle.activeSuits))
taList = self.battle.toonAttacks
for j in xrange(longest):
taList[attackIdx][TOON_HP_COL].append(-1)
taList[attackIdx][TOON_KBBONUS_COL].append(-1)
if self.notify.getDebug():
self.notify.debug('toon attack is now ' + repr(self.battle.toonAttacks[attackIdx]))
else:
self.notify.warning('__clearAttack not implemented for suits!')
def __rememberToonAttack(self, suitId, toonId, damage):
if not suitId in self.SuitAttackers:
self.SuitAttackers[suitId] = {toonId: damage}
elif not toonId in self.SuitAttackers[suitId]:
self.SuitAttackers[suitId][toonId] = damage
elif self.SuitAttackers[suitId][toonId] <= damage:
self.SuitAttackers[suitId] = [toonId, damage]
def __postProcessToonAttacks(self):
self.notify.debug('__postProcessToonAttacks()')
lastTrack = -1
lastAttacks = []
self.__clearBonuses()
for currToonAttack in self.toonAtkOrder:
if currToonAttack != -1:
attack = self.battle.toonAttacks[currToonAttack]
atkTrack, atkLevel = self.__getActualTrackLevel(attack)
if atkTrack != HEAL and atkTrack != SOS and atkTrack != NO_ATTACK and atkTrack != NPCSOS and atkTrack != PETSOS:
targets = self.__createToonTargetList(currToonAttack)
allTargetsDead = 1
for currTgt in targets:
damageDone = self.__attackDamage(attack, suit=0)
if damageDone > 0:
self.__rememberToonAttack(currTgt.getDoId(), attack[TOON_ID_COL], damageDone)
if atkTrack == TRAP:
if currTgt.doId in self.traps:
trapInfo = self.traps[currTgt.doId]
currTgt.battleTrap = trapInfo[0]
targetDead = 0
if currTgt.getHP() > 0:
allTargetsDead = 0
else:
targetDead = 1
if atkTrack != LURE:
for currLastAtk in lastAttacks:
self.__clearTgtDied(currTgt, currLastAtk, attack)
tgtId = currTgt.getDoId()
if tgtId in self.successfulLures and atkTrack == LURE:
lureInfo = self.successfulLures[tgtId]
self.notify.debug('applying lure data: ' + repr(lureInfo))
toonId = lureInfo[0]
lureAtk = self.battle.toonAttacks[toonId]
tgtPos = self.battle.activeSuits.index(currTgt)
if currTgt.doId in self.traps:
trapInfo = self.traps[currTgt.doId]
if trapInfo[0] == UBER_GAG_LEVEL_INDEX:
self.notify.debug('train trap triggered for %d' % currTgt.doId)
self.trainTrapTriggered = True
self.__removeSuitTrap(tgtId)
lureAtk[TOON_KBBONUS_COL][tgtPos] = self.KBBONUS_TGT_LURED
lureAtk[TOON_HP_COL][tgtPos] = lureInfo[3]
elif self.__suitIsLured(tgtId) and atkTrack == DROP:
self.notify.debug('Drop on lured suit, ' + 'indicating with KBBONUS_COL ' + 'flag')
tgtPos = self.battle.activeSuits.index(currTgt)
attack[TOON_KBBONUS_COL][tgtPos] = self.KBBONUS_LURED_FLAG
if targetDead and atkTrack != lastTrack:
tgtPos = self.battle.activeSuits.index(currTgt)
attack[TOON_HP_COL][tgtPos] = 0
attack[TOON_KBBONUS_COL][tgtPos] = -1
if allTargetsDead and atkTrack != lastTrack:
if self.notify.getDebug():
self.notify.debug('all targets of toon attack ' + str(currToonAttack) + ' are dead')
self.__clearAttack(currToonAttack, toon=1)
attack = self.battle.toonAttacks[currToonAttack]
atkTrack, atkLevel = self.__getActualTrackLevel(attack)
damagesDone = self.__applyToonAttackDamages(currToonAttack)
self.__applyToonAttackDamages(currToonAttack, hpbonus=1)
if atkTrack != LURE and atkTrack != DROP and atkTrack != SOUND:
self.__applyToonAttackDamages(currToonAttack, kbbonus=1)
if lastTrack != atkTrack:
lastAttacks = []
lastTrack = atkTrack
lastAttacks.append(attack)
if self.itemIsCredit(atkTrack, atkLevel):
if atkTrack == TRAP or atkTrack == LURE:
pass
elif atkTrack == HEAL:
if damagesDone != 0:
self.__addAttackExp(attack)
else:
self.__addAttackExp(attack)
if self.trainTrapTriggered:
for suit in self.battle.activeSuits:
suitId = suit.doId
self.__removeSuitTrap(suitId)
suit.battleTrap = NO_TRAP
self.notify.debug('train trap triggered, removing trap from %d' % suitId)
if self.notify.getDebug():
for currToonAttack in self.toonAtkOrder:
attack = self.battle.toonAttacks[currToonAttack]
self.notify.debug('Final Toon attack: ' + str(attack))
def __allTargetsDead(self, attackIdx, toon = 1):
allTargetsDead = 1
if toon:
targets = self.__createToonTargetList(attackIdx)
for currTgt in targets:
if currTgt.getHp() > 0:
allTargetsDead = 0
break
else:
self.notify.warning('__allTargetsDead: suit ver. not implemented!')
return allTargetsDead
def __clearLuredSuitsByAttack(self, toonId, kbBonusReq = 0, targetId = -1):
if self.notify.getDebug():
self.notify.debug('__clearLuredSuitsByAttack')
if targetId != -1 and self.__suitIsLured(t.getDoId()):
self.__removeLured(t.getDoId())
else:
tgtList = self.__createToonTargetList(toonId)
for t in tgtList:
if self.__suitIsLured(t.getDoId()) and (not kbBonusReq or self.__bonusExists(t, hp=0)):
self.__removeLured(t.getDoId())
if self.notify.getDebug():
self.notify.debug('Suit %d stepping from lured spot' % t.getDoId())
else:
self.notify.debug('Suit ' + str(t.getDoId()) + ' not found in currently lured suits')
def __clearLuredSuitsDelayed(self):
if self.notify.getDebug():
self.notify.debug('__clearLuredSuitsDelayed')
for t in self.delayedUnlures:
if self.__suitIsLured(t):
self.__removeLured(t)
if self.notify.getDebug():
self.notify.debug('Suit %d stepping back from lured spot' % t)
else:
self.notify.debug('Suit ' + str(t) + ' not found in currently lured suits')
self.delayedUnlures = []
def __addLuredSuitsDelayed(self, toonId, targetId = -1, ignoreDamageCheck = False):
if self.notify.getDebug():
self.notify.debug('__addLuredSuitsDelayed')
if targetId != -1:
self.delayedUnlures.append(targetId)
else:
tgtList = self.__createToonTargetList(toonId)
for t in tgtList:
if self.__suitIsLured(t.getDoId()) and t.getDoId() not in self.delayedUnlures and (self.__attackDamageForTgt(self.battle.toonAttacks[toonId], self.battle.activeSuits.index(t), suit=0) > 0 or ignoreDamageCheck):
self.delayedUnlures.append(t.getDoId())
def __calculateToonAttacks(self):
self.notify.debug('__calculateToonAttacks()')
self.__clearBonuses(hp=0)
currTrack = None
self.notify.debug('Traps: ' + str(self.traps))
maxSuitLevel = 0
for cog in self.battle.activeSuits:
maxSuitLevel = max(maxSuitLevel, cog.getActualLevel())
self.creditLevel = maxSuitLevel
for toonId in self.toonAtkOrder:
if self.__combatantDead(toonId, toon=1):
if self.notify.getDebug():
self.notify.debug("Toon %d is dead and can't attack" % toonId)
continue
attack = self.battle.toonAttacks[toonId]
atkTrack = self.__getActualTrack(attack)
if atkTrack != NO_ATTACK and atkTrack != SOS and atkTrack != NPCSOS:
if self.notify.getDebug():
self.notify.debug('Calculating attack for toon: %d' % toonId)
if self.SUITS_UNLURED_IMMEDIATELY:
if currTrack and atkTrack != currTrack:
self.__clearLuredSuitsDelayed()
currTrack = atkTrack
self.__calcToonAtkHp(toonId)
attackIdx = self.toonAtkOrder.index(toonId)
self.__handleBonus(attackIdx, hp=0)
self.__handleBonus(attackIdx, hp=1)
lastAttack = self.toonAtkOrder.index(toonId) >= len(self.toonAtkOrder) - 1
unlureAttack = self.__attackHasHit(attack, suit=0) and self.__unlureAtk(toonId, toon=1)
if unlureAttack:
if lastAttack:
self.__clearLuredSuitsByAttack(toonId)
else:
self.__addLuredSuitsDelayed(toonId)
if lastAttack:
self.__clearLuredSuitsDelayed()
self.__processBonuses(hp=0)
self.__processBonuses(hp=1)
self.__postProcessToonAttacks()
return
def __knockBackAtk(self, attackIndex, toon = 1):
if toon and (self.battle.toonAttacks[attackIndex][TOON_TRACK_COL] == THROW or self.battle.toonAttacks[attackIndex][TOON_TRACK_COL] == SQUIRT):
if self.notify.getDebug():
self.notify.debug('attack is a knockback')
return 1
return 0
def __unlureAtk(self, attackIndex, toon = 1):
attack = self.battle.toonAttacks[attackIndex]
track = self.__getActualTrack(attack)
if toon and (track == THROW or track == SQUIRT or track == SOUND):
if self.notify.getDebug():
self.notify.debug('attack is an unlure')
return 1
return 0
def __calcSuitAtkType(self, attackIndex):
theSuit = self.battle.activeSuits[attackIndex]
attacks = SuitBattleGlobals.SuitAttributes[theSuit.dna.name]['attacks']
atk = SuitBattleGlobals.pickSuitAttack(attacks, theSuit.getLevel())
return atk
def __calcSuitTarget(self, attackIndex):
attack = self.battle.suitAttacks[attackIndex]
suitId = attack[SUIT_ID_COL]
if suitId in self.SuitAttackers and random.randint(0, 99) < 75:
totalDamage = 0
for currToon in self.SuitAttackers[suitId].keys():
totalDamage += self.SuitAttackers[suitId][currToon]
dmgs = []
for currToon in self.SuitAttackers[suitId].keys():
dmgs.append(self.SuitAttackers[suitId][currToon] / totalDamage * 100)
dmgIdx = SuitBattleGlobals.pickFromFreqList(dmgs)
if dmgIdx == None:
toonId = self.__pickRandomToon(suitId)
else:
toonId = self.SuitAttackers[suitId].keys()[dmgIdx]
if toonId == -1 or toonId not in self.battle.activeToons:
return -1
self.notify.debug('Suit attacking back at toon ' + str(toonId))
return self.battle.activeToons.index(toonId)
else:
return self.__pickRandomToon(suitId)
return
def __pickRandomToon(self, suitId):
liveToons = []
for currToon in self.battle.activeToons:
if not self.__combatantDead(currToon, toon=1):
liveToons.append(self.battle.activeToons.index(currToon))
if len(liveToons) == 0:
self.notify.debug('No tgts avail. for suit ' + str(suitId))
return -1
chosen = random.choice(liveToons)
self.notify.debug('Suit randomly attacking toon ' + str(self.battle.activeToons[chosen]))
return chosen
def __suitAtkHit(self, attackIndex):
if self.suitsAlwaysHit:
return 1
elif self.suitsAlwaysMiss:
return 0
theSuit = self.battle.activeSuits[attackIndex]
atkType = self.battle.suitAttacks[attackIndex][SUIT_ATK_COL]
atkInfo = SuitBattleGlobals.getSuitAttack(theSuit.dna.name, theSuit.getLevel(), atkType)
atkAcc = atkInfo['acc']
suitAcc = SuitBattleGlobals.SuitAttributes[theSuit.dna.name]['acc'][theSuit.getLevel()]
acc = atkAcc
randChoice = random.randint(0, 99)
if self.notify.getDebug():
self.notify.debug('Suit attack rolled ' + str(randChoice) + ' to hit with an accuracy of ' + str(acc) + ' (attackAcc: ' + str(atkAcc) + ' suitAcc: ' + str(suitAcc) + ')')
if randChoice < acc:
return 1
return 0
def __suitAtkAffectsGroup(self, attack):
atkType = attack[SUIT_ATK_COL]
theSuit = self.battle.findSuit(attack[SUIT_ID_COL])
atkInfo = SuitBattleGlobals.getSuitAttack(theSuit.dna.name, theSuit.getLevel(), atkType)
return atkInfo['group'] != SuitBattleGlobals.ATK_TGT_SINGLE
def __createSuitTargetList(self, attackIndex):
attack = self.battle.suitAttacks[attackIndex]
targetList = []
if attack[SUIT_ATK_COL] == NO_ATTACK:
self.notify.debug('No attack, no targets')
return targetList
debug = self.notify.getDebug()
if not self.__suitAtkAffectsGroup(attack):
targetList.append(self.battle.activeToons[attack[SUIT_TGT_COL]])
if debug:
self.notify.debug('Suit attack is single target')
else:
if debug:
self.notify.debug('Suit attack is group target')
for currToon in self.battle.activeToons:
if debug:
self.notify.debug('Suit attack will target toon' + str(currToon))
targetList.append(currToon)
return targetList
def __calcSuitAtkHp(self, attackIndex):
targetList = self.__createSuitTargetList(attackIndex)
attack = self.battle.suitAttacks[attackIndex]
for currTarget in xrange(len(targetList)):
toonId = targetList[currTarget]
toon = self.battle.getToon(toonId)
result = 0
if toon and toon.immortalMode:
result = 1
elif self.TOONS_TAKE_NO_DAMAGE:
result = 0
elif self.__suitAtkHit(attackIndex):
atkType = attack[SUIT_ATK_COL]
theSuit = self.battle.findSuit(attack[SUIT_ID_COL])
atkInfo = SuitBattleGlobals.getSuitAttack(theSuit.dna.name, theSuit.getLevel(), atkType)
result = atkInfo['hp']
targetIndex = self.battle.activeToons.index(toonId)
attack[SUIT_HP_COL][targetIndex] = result
def __getToonHp(self, toonDoId):
handle = self.battle.getToon(toonDoId)
if handle != None and toonDoId in self.toonHPAdjusts:
return handle.hp + self.toonHPAdjusts[toonDoId]
else:
return 0
return
def __getToonMaxHp(self, toonDoId):
handle = self.battle.getToon(toonDoId)
if handle != None:
return handle.maxHp
else:
return 0
return
def __applySuitAttackDamages(self, attackIndex):
attack = self.battle.suitAttacks[attackIndex]
if self.APPLY_HEALTH_ADJUSTMENTS:
for t in self.battle.activeToons:
position = self.battle.activeToons.index(t)
if attack[SUIT_HP_COL][position] <= 0:
continue
toonHp = self.__getToonHp(t)
if toonHp - attack[SUIT_HP_COL][position] <= 0:
if self.notify.getDebug():
self.notify.debug('Toon %d has died, removing' % t)
self.toonLeftBattle(t)
attack[TOON_DIED_COL] = attack[TOON_DIED_COL] | 1 << position
if self.notify.getDebug():
self.notify.debug('Toon ' + str(t) + ' takes ' + str(attack[SUIT_HP_COL][position]) + ' damage')
self.toonHPAdjusts[t] -= attack[SUIT_HP_COL][position]
self.notify.debug('Toon ' + str(t) + ' now has ' + str(self.__getToonHp(t)) + ' health')
def __suitCanAttack(self, suitId):
if self.__combatantDead(suitId, toon=0) or self.__suitIsLured(suitId) or self.__combatantJustRevived(suitId):
return 0
return 1
def __updateSuitAtkStat(self, toonId):
if toonId in self.suitAtkStats:
self.suitAtkStats[toonId] += 1
else:
self.suitAtkStats[toonId] = 1
def __printSuitAtkStats(self):
self.notify.debug('Suit Atk Stats:')
for currTgt in self.suitAtkStats.keys():
if currTgt not in self.battle.activeToons:
continue
tgtPos = self.battle.activeToons.index(currTgt)
self.notify.debug(' toon ' + str(currTgt) + ' at position ' + str(tgtPos) + ' was attacked ' + str(self.suitAtkStats[currTgt]) + ' times')
self.notify.debug('\n')
def __calculateSuitAttacks(self):
for i in xrange(len(self.battle.suitAttacks)):
if i < len(self.battle.activeSuits):
suitId = self.battle.activeSuits[i].doId
self.battle.suitAttacks[i][SUIT_ID_COL] = suitId
if not self.__suitCanAttack(suitId):
if self.notify.getDebug():
self.notify.debug("Suit %d can't attack" % suitId)
continue
if self.battle.pendingSuits.count(self.battle.activeSuits[i]) > 0 or self.battle.joiningSuits.count(self.battle.activeSuits[i]) > 0:
continue
attack = self.battle.suitAttacks[i]
attack[SUIT_ID_COL] = self.battle.activeSuits[i].doId
attack[SUIT_ATK_COL] = self.__calcSuitAtkType(i)
attack[SUIT_TGT_COL] = self.__calcSuitTarget(i)
if attack[SUIT_TGT_COL] == -1:
self.battle.suitAttacks[i] = getDefaultSuitAttack()
attack = self.battle.suitAttacks[i]
self.notify.debug('clearing suit attack, no avail targets')
self.__calcSuitAtkHp(i)
if attack[SUIT_ATK_COL] != NO_ATTACK:
if self.__suitAtkAffectsGroup(attack):
for currTgt in self.battle.activeToons:
self.__updateSuitAtkStat(currTgt)
else:
tgtId = self.battle.activeToons[attack[SUIT_TGT_COL]]
self.__updateSuitAtkStat(tgtId)
targets = self.__createSuitTargetList(i)
allTargetsDead = 1
for currTgt in targets:
if self.__getToonHp(currTgt) > 0:
allTargetsDead = 0
break
if allTargetsDead:
self.battle.suitAttacks[i] = getDefaultSuitAttack()
if self.notify.getDebug():
self.notify.debug('clearing suit attack, targets dead')
self.notify.debug('suit attack is now ' + repr(self.battle.suitAttacks[i]))
self.notify.debug('all attacks: ' + repr(self.battle.suitAttacks))
attack = self.battle.suitAttacks[i]
if self.__attackHasHit(attack, suit=1):
self.__applySuitAttackDamages(i)
if self.notify.getDebug():
self.notify.debug('Suit attack: ' + str(self.battle.suitAttacks[i]))
attack[SUIT_BEFORE_TOONS_COL] = 0
def __updateLureTimeouts(self):
if self.notify.getDebug():
self.notify.debug('__updateLureTimeouts()')
self.notify.debug('Lured suits: ' + str(self.currentlyLuredSuits))
noLongerLured = []
for currLuredSuit in self.currentlyLuredSuits.keys():
self.__incLuredCurrRound(currLuredSuit)
if self.__luredMaxRoundsReached(currLuredSuit) or self.__luredWakeupTime(currLuredSuit):
noLongerLured.append(currLuredSuit)
for currLuredSuit in noLongerLured:
self.__removeLured(currLuredSuit)
if self.notify.getDebug():
self.notify.debug('Lured suits: ' + str(self.currentlyLuredSuits))
def __initRound(self):
if self.CLEAR_SUIT_ATTACKERS:
self.SuitAttackers = {}
self.toonAtkOrder = []
attacks = findToonAttack(self.battle.activeToons, self.battle.toonAttacks, PETSOS)
for atk in attacks:
self.toonAtkOrder.append(atk[TOON_ID_COL])
attacks = findToonAttack(self.battle.activeToons, self.battle.toonAttacks, FIRE)
for atk in attacks:
self.toonAtkOrder.append(atk[TOON_ID_COL])
for track in xrange(HEAL, DROP + 1):
attacks = findToonAttack(self.battle.activeToons, self.battle.toonAttacks, track)
if track == TRAP:
sortedTraps = []
for atk in attacks:
if atk[TOON_TRACK_COL] == TRAP:
sortedTraps.append(atk)
for atk in attacks:
if atk[TOON_TRACK_COL] == NPCSOS:
sortedTraps.append(atk)
attacks = sortedTraps
for atk in attacks:
self.toonAtkOrder.append(atk[TOON_ID_COL])
specials = findToonAttack(self.battle.activeToons, self.battle.toonAttacks, NPCSOS)
toonsHit = 0
cogsMiss = 0
for special in specials:
npc_track = NPCToons.getNPCTrack(special[TOON_TGT_COL])
if npc_track == NPC_TOONS_HIT:
BattleCalculatorAI.toonsAlwaysHit = 1
toonsHit = 1
elif npc_track == NPC_COGS_MISS:
BattleCalculatorAI.suitsAlwaysMiss = 1
cogsMiss = 1
if self.notify.getDebug():
self.notify.debug('Toon attack order: ' + str(self.toonAtkOrder))
self.notify.debug('Active toons: ' + str(self.battle.activeToons))
self.notify.debug('Toon attacks: ' + str(self.battle.toonAttacks))
self.notify.debug('Active suits: ' + str(self.battle.activeSuits))
self.notify.debug('Suit attacks: ' + str(self.battle.suitAttacks))
self.toonHPAdjusts = {}
for t in self.battle.activeToons:
self.toonHPAdjusts[t] = 0
self.__clearBonuses()
self.__updateActiveToons()
self.delayedUnlures = []
self.__initTraps()
self.successfulLures = {}
return (toonsHit, cogsMiss)
def calculateRound(self):
longest = max(len(self.battle.activeToons), len(self.battle.activeSuits))
for t in self.battle.activeToons:
for j in xrange(longest):
self.battle.toonAttacks[t][TOON_HP_COL].append(-1)
self.battle.toonAttacks[t][TOON_KBBONUS_COL].append(-1)
for i in xrange(4):
for j in xrange(len(self.battle.activeToons)):
self.battle.suitAttacks[i][SUIT_HP_COL].append(-1)
toonsHit, cogsMiss = self.__initRound()
for suit in self.battle.activeSuits:
if suit.isGenerated():
suit.b_setHP(suit.getHP())
for suit in self.battle.activeSuits:
if not hasattr(suit, 'dna'):
self.notify.warning('a removed suit is in this battle!')
return None
self.__calculateToonAttacks()
self.__updateLureTimeouts()
self.__calculateSuitAttacks()
if toonsHit == 1:
BattleCalculatorAI.toonsAlwaysHit = 0
if cogsMiss == 1:
BattleCalculatorAI.suitsAlwaysMiss = 0
if self.notify.getDebug():
self.notify.debug('Toon skills gained after this round: ' + repr(self.toonSkillPtsGained))
self.__printSuitAtkStats()
return None
def __calculateFiredCogs():
import pdb
pdb.set_trace()
def toonLeftBattle(self, toonId):
if self.notify.getDebug():
self.notify.debug('toonLeftBattle()' + str(toonId))
if toonId in self.toonSkillPtsGained:
del self.toonSkillPtsGained[toonId]
if toonId in self.suitAtkStats:
del self.suitAtkStats[toonId]
if not self.CLEAR_SUIT_ATTACKERS:
oldSuitIds = []
for s in self.SuitAttackers.keys():
if toonId in self.SuitAttackers[s]:
del self.SuitAttackers[s][toonId]
if len(self.SuitAttackers[s]) == 0:
oldSuitIds.append(s)
for oldSuitId in oldSuitIds:
del self.SuitAttackers[oldSuitId]
self.__clearTrapCreator(toonId)
self.__clearLurer(toonId)
def suitLeftBattle(self, suitId):
if self.notify.getDebug():
self.notify.debug('suitLeftBattle(): ' + str(suitId))
self.__removeLured(suitId)
if suitId in self.SuitAttackers:
del self.SuitAttackers[suitId]
self.__removeSuitTrap(suitId)
def __updateActiveToons(self):
if self.notify.getDebug():
self.notify.debug('updateActiveToons()')
if not self.CLEAR_SUIT_ATTACKERS:
oldSuitIds = []
for s in self.SuitAttackers.keys():
for t in self.SuitAttackers[s].keys():
if t not in self.battle.activeToons:
del self.SuitAttackers[s][t]
if len(self.SuitAttackers[s]) == 0:
oldSuitIds.append(s)
for oldSuitId in oldSuitIds:
del self.SuitAttackers[oldSuitId]
for trap in self.traps.keys():
if self.traps[trap][1] not in self.battle.activeToons:
self.notify.debug('Trap for toon ' + str(self.traps[trap][1]) + ' will no longer give exp')
self.traps[trap][1] = 0
def getSkillGained(self, toonId, track):
return BattleExperienceAI.getSkillGained(self.toonSkillPtsGained, toonId, track)
def getLuredSuits(self):
luredSuits = self.currentlyLuredSuits.keys()
self.notify.debug('Lured suits reported to battle: ' + repr(luredSuits))
return luredSuits
def __suitIsLured(self, suitId, prevRound = 0):
inList = suitId in self.currentlyLuredSuits
if prevRound:
return inList and self.currentlyLuredSuits[suitId][0] != -1
return inList
def __findAvailLureId(self, lurerId):
luredSuits = self.currentlyLuredSuits.keys()
lureIds = []
for currLured in luredSuits:
lurerInfo = self.currentlyLuredSuits[currLured][3]
lurers = lurerInfo.keys()
for currLurer in lurers:
currId = lurerInfo[currLurer][1]
if currLurer == lurerId and currId not in lureIds:
lureIds.append(currId)
lureIds.sort()
currId = 1
for currLureId in lureIds:
if currLureId != currId:
return currId
currId += 1
return currId
def __addLuredSuitInfo(self, suitId, currRounds, maxRounds, wakeChance, lurer, lureLvl, lureId = -1, npc = 0):
if lureId == -1:
availLureId = self.__findAvailLureId(lurer)
else:
availLureId = lureId
if npc == 1:
credit = 0
else:
credit = self.itemIsCredit(LURE, lureLvl)
if suitId in self.currentlyLuredSuits:
lureInfo = self.currentlyLuredSuits[suitId]
if not lurer in lureInfo[3]:
lureInfo[1] += maxRounds
if wakeChance < lureInfo[2]:
lureInfo[2] = wakeChance
lureInfo[3][lurer] = [lureLvl, availLureId, credit]
else:
lurerInfo = {lurer: [lureLvl, availLureId, credit]}
self.currentlyLuredSuits[suitId] = [currRounds,
maxRounds,
wakeChance,
lurerInfo]
self.notify.debug('__addLuredSuitInfo: currLuredSuits -> %s' % repr(self.currentlyLuredSuits))
return availLureId
def __getLurers(self, suitId):
if self.__suitIsLured(suitId):
return self.currentlyLuredSuits[suitId][3].keys()
return []
def __getLuredExpInfo(self, suitId):
returnInfo = []
lurers = self.__getLurers(suitId)
if len(lurers) == 0:
return returnInfo
lurerInfo = self.currentlyLuredSuits[suitId][3]
for currLurer in lurers:
returnInfo.append([currLurer,
lurerInfo[currLurer][0],
lurerInfo[currLurer][1],
lurerInfo[currLurer][2]])
return returnInfo
def __clearLurer(self, lurerId, lureId = -1):
luredSuits = self.currentlyLuredSuits.keys()
for currLured in luredSuits:
lurerInfo = self.currentlyLuredSuits[currLured][3]
lurers = lurerInfo.keys()
for currLurer in lurers:
if currLurer == lurerId and (lureId == -1 or lureId == lurerInfo[currLurer][1]):
del lurerInfo[currLurer]
def __setLuredMaxRounds(self, suitId, rounds):
if self.__suitIsLured(suitId):
self.currentlyLuredSuits[suitId][1] = rounds
def __setLuredWakeChance(self, suitId, chance):
if self.__suitIsLured(suitId):
self.currentlyLuredSuits[suitId][2] = chance
def __incLuredCurrRound(self, suitId):
if self.__suitIsLured(suitId):
self.currentlyLuredSuits[suitId][0] += 1
def __removeLured(self, suitId):
if self.__suitIsLured(suitId):
del self.currentlyLuredSuits[suitId]
def __luredMaxRoundsReached(self, suitId):
return self.__suitIsLured(suitId) and self.currentlyLuredSuits[suitId][0] >= self.currentlyLuredSuits[suitId][1]
def __luredWakeupTime(self, suitId):
return self.__suitIsLured(suitId) and self.currentlyLuredSuits[suitId][0] > 0 and random.randint(0, 99) < self.currentlyLuredSuits[suitId][2]
def itemIsCredit(self, track, level):
if track == PETSOS:
return 0
return level < self.creditLevel
def __getActualTrack(self, toonAttack):
if toonAttack[TOON_TRACK_COL] == NPCSOS:
track = NPCToons.getNPCTrack(toonAttack[TOON_TGT_COL])
if track != None:
return track
else:
self.notify.warning('No NPC with id: %d' % toonAttack[TOON_TGT_COL])
return toonAttack[TOON_TRACK_COL]
def __getActualTrackLevel(self, toonAttack):
if toonAttack[TOON_TRACK_COL] == NPCSOS:
track, level, hp = NPCToons.getNPCTrackLevelHp(toonAttack[TOON_TGT_COL])
if track != None:
return (track, level)
else:
self.notify.warning('No NPC with id: %d' % toonAttack[TOON_TGT_COL])
return (toonAttack[TOON_TRACK_COL], toonAttack[TOON_LVL_COL])
def __getActualTrackLevelHp(self, toonAttack):
if toonAttack[TOON_TRACK_COL] == NPCSOS:
track, level, hp = NPCToons.getNPCTrackLevelHp(toonAttack[TOON_TGT_COL])
if track != None:
return (track, level, hp)
else:
self.notify.warning('No NPC with id: %d' % toonAttack[TOON_TGT_COL])
elif toonAttack[TOON_TRACK_COL] == PETSOS:
trick = toonAttack[TOON_LVL_COL]
petProxyId = toonAttack[TOON_TGT_COL]
trickId = toonAttack[TOON_LVL_COL]
healRange = PetTricks.TrickHeals[trickId]
hp = 0
if petProxyId in simbase.air.doId2do:
petProxy = simbase.air.doId2do[petProxyId]
if trickId < len(petProxy.trickAptitudes):
aptitude = petProxy.trickAptitudes[trickId]
hp = int(lerp(healRange[0], healRange[1], aptitude))
else:
self.notify.warning('pet proxy: %d not in doId2do!' % petProxyId)
return (toonAttack[TOON_TRACK_COL], toonAttack[TOON_LVL_COL], hp)
return (toonAttack[TOON_TRACK_COL], toonAttack[TOON_LVL_COL], 0)
def __calculatePetTrickSuccess(self, toonAttack):
petProxyId = toonAttack[TOON_TGT_COL]
if not petProxyId in simbase.air.doId2do:
self.notify.warning('pet proxy %d not in doId2do!' % petProxyId)
toonAttack[TOON_ACCBONUS_COL] = 1
return (0, 0)
petProxy = simbase.air.doId2do[petProxyId]
trickId = toonAttack[TOON_LVL_COL]
toonAttack[TOON_ACCBONUS_COL] = petProxy.attemptBattleTrick(trickId)
if toonAttack[TOON_ACCBONUS_COL] == 1:
return (0, 0)
else:
return (1, 100)
| {
"content_hash": "40659a1580943df42cfde97509627260",
"timestamp": "",
"source": "github",
"line_count": 1621,
"max_line_length": 272,
"avg_line_length": 45.18692165330043,
"alnum_prop": 0.5520150720838795,
"repo_name": "ToontownUprising/src",
"id": "81f271fa2b8c41f8407e6de11b33114abb0d4633",
"size": "73248",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "toontown/battle/BattleCalculatorAI.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Perl",
"bytes": "36"
},
{
"name": "Python",
"bytes": "16244807"
},
{
"name": "Shell",
"bytes": "11615"
}
],
"symlink_target": ""
} |
class ASMGroups(object):
"""Advanced Suppression Manager gives your recipients more control over the types of emails they want to receive
by letting them opt out of messages from a certain type of email.
Groups are specific types of email you would like your recipients to be able to unsubscribe from or subscribe to.
For example: Daily Newsletters, Invoices, System Alerts.
"""
def __init__(self, client, **opts):
"""
Constructs SendGrid ASM group object.
See https://sendgrid.com/docs/API_Reference/Web_API_v3/Advanced_Suppression_Manager/index.html and
https://sendgrid.com/docs/API_Reference/Web_API_v3/Advanced_Suppression_Manager/groups.html
"""
self._name = None
self._base_endpoint = "/v3/asm/groups"
self._endpoint = "/v3/asm/groups"
self._client = client
@property
def base_endpoint(self):
return self._base_endpoint
@property
def endpoint(self):
endpoint = self._endpoint
return endpoint
@endpoint.setter
def endpoint(self, value):
self._endpoint = value
@property
def client(self):
return self._client
# Retrieve all suppression groups associated with the user.
def get(self, id=None):
if id == None:
return self.client.get(self)
else:
self._endpoint = self._base_endpoint + "/" + str(id)
return self.client.get(self)
# Create a new unsubscribe group
def post(self, name, description, is_default):
self._endpoint = self._base_endpoint
data = {}
data["name"] = name
data["description"] = description
data["is_default"] = is_default
return self.client.post(self, data) | {
"content_hash": "60bd1ccda6b58cda446bf6515c8e0374",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 121,
"avg_line_length": 33.72222222222222,
"alnum_prop": 0.6166941241076331,
"repo_name": "Khan/sendgrid-python",
"id": "48c8a7371b9a4b5b70ff66632cad59b83d8569cb",
"size": "1821",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sendgrid/resources/asm_groups.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "51011"
},
{
"name": "Shell",
"bytes": "145"
}
],
"symlink_target": ""
} |
import os
import subprocess
import sys
from collections import defaultdict
from typing import Any, ClassVar, Dict, Type
from urllib.parse import urljoin
from .wptmanifest.parser import atoms
atom_reset = atoms["Reset"]
enabled_tests = {"testharness", "reftest", "wdspec", "crashtest", "print-reftest"}
class Result:
def __init__(self,
status,
message,
expected=None,
extra=None,
stack=None,
known_intermittent=None):
if status not in self.statuses:
raise ValueError("Unrecognised status %s" % status)
self.status = status
self.message = message
self.expected = expected
self.known_intermittent = known_intermittent if known_intermittent is not None else []
self.extra = extra if extra is not None else {}
self.stack = stack
def __repr__(self):
return f"<{self.__module__}.{self.__class__.__name__} {self.status}>"
class SubtestResult:
def __init__(self, name, status, message, stack=None, expected=None, known_intermittent=None):
self.name = name
if status not in self.statuses:
raise ValueError("Unrecognised status %s" % status)
self.status = status
self.message = message
self.stack = stack
self.expected = expected
self.known_intermittent = known_intermittent if known_intermittent is not None else []
def __repr__(self):
return f"<{self.__module__}.{self.__class__.__name__} {self.name} {self.status}>"
class TestharnessResult(Result):
default_expected = "OK"
statuses = {"OK", "ERROR", "INTERNAL-ERROR", "TIMEOUT", "EXTERNAL-TIMEOUT", "CRASH", "PRECONDITION_FAILED"}
class TestharnessSubtestResult(SubtestResult):
default_expected = "PASS"
statuses = {"PASS", "FAIL", "TIMEOUT", "NOTRUN", "PRECONDITION_FAILED"}
class ReftestResult(Result):
default_expected = "PASS"
statuses = {"PASS", "FAIL", "ERROR", "INTERNAL-ERROR", "TIMEOUT", "EXTERNAL-TIMEOUT",
"CRASH"}
class WdspecResult(Result):
default_expected = "OK"
statuses = {"OK", "ERROR", "INTERNAL-ERROR", "TIMEOUT", "EXTERNAL-TIMEOUT", "CRASH"}
class WdspecSubtestResult(SubtestResult):
default_expected = "PASS"
statuses = {"PASS", "FAIL", "ERROR"}
class CrashtestResult(Result):
default_expected = "PASS"
statuses = {"PASS", "ERROR", "INTERNAL-ERROR", "TIMEOUT", "EXTERNAL-TIMEOUT",
"CRASH"}
def get_run_info(metadata_root, product, **kwargs):
return RunInfo(metadata_root, product, **kwargs)
class RunInfo(Dict[str, Any]):
def __init__(self, metadata_root, product, debug,
browser_version=None,
browser_channel=None,
verify=None,
extras=None,
device_serials=None,
adb_binary=None):
import mozinfo
self._update_mozinfo(metadata_root)
self.update(mozinfo.info)
from .update.tree import GitTree
try:
# GitTree.__init__ throws if we are not in a git tree.
rev = GitTree(log_error=False).rev
except (OSError, subprocess.CalledProcessError):
rev = None
if rev:
self["revision"] = rev.decode("utf-8")
self["python_version"] = sys.version_info.major
self["product"] = product
if debug is not None:
self["debug"] = debug
elif "debug" not in self:
# Default to release
self["debug"] = False
if browser_version:
self["browser_version"] = browser_version
if browser_channel:
self["browser_channel"] = browser_channel
self["verify"] = verify
if "wasm" not in self:
self["wasm"] = False
if extras is not None:
self.update(extras)
if "headless" not in self:
self["headless"] = False
if adb_binary:
self["adb_binary"] = adb_binary
if device_serials:
# Assume all emulators are identical, so query an arbitrary one.
self._update_with_emulator_info(device_serials[0])
self.pop("linux_distro", None)
def _adb_run(self, device_serial, args, **kwargs):
adb_binary = self.get("adb_binary", "adb")
cmd = [adb_binary, "-s", device_serial, *args]
return subprocess.check_output(cmd, **kwargs)
def _adb_get_property(self, device_serial, prop, **kwargs):
args = ["shell", "getprop", prop]
value = self._adb_run(device_serial, args, **kwargs)
return value.strip()
def _update_with_emulator_info(self, device_serial):
"""Override system info taken from the host if using an Android
emulator."""
try:
self._adb_run(device_serial, ["wait-for-device"])
emulator_info = {
"os": "android",
"os_version": self._adb_get_property(
device_serial,
"ro.build.version.release",
encoding="utf-8",
),
}
emulator_info["version"] = emulator_info["os_version"]
# Detect CPU info (https://developer.android.com/ndk/guides/abis#sa)
abi64, *_ = self._adb_get_property(
device_serial,
"ro.product.cpu.abilist64",
encoding="utf-8",
).split(',')
if abi64:
emulator_info["processor"] = abi64
emulator_info["bits"] = 64
else:
emulator_info["processor"], *_ = self._adb_get_property(
device_serial,
"ro.product.cpu.abilist32",
encoding="utf-8",
).split(',')
emulator_info["bits"] = 32
self.update(emulator_info)
except (OSError, subprocess.CalledProcessError):
pass
def _update_mozinfo(self, metadata_root):
"""Add extra build information from a mozinfo.json file in a parent
directory"""
import mozinfo
path = metadata_root
dirs = set()
while path != os.path.expanduser('~'):
if path in dirs:
break
dirs.add(str(path))
path = os.path.dirname(path)
mozinfo.find_and_update_from_json(*dirs)
def server_protocol(manifest_item):
if hasattr(manifest_item, "h2") and manifest_item.h2:
return "h2"
if hasattr(manifest_item, "https") and manifest_item.https:
return "https"
return "http"
class Test:
result_cls = None # type: ClassVar[Type[Result]]
subtest_result_cls = None # type: ClassVar[Type[SubtestResult]]
test_type = None # type: ClassVar[str]
pac = None
default_timeout = 10 # seconds
long_timeout = 60 # seconds
def __init__(self, url_base, tests_root, url, inherit_metadata, test_metadata,
timeout=None, path=None, protocol="http", subdomain=False, pac=None):
self.url_base = url_base
self.tests_root = tests_root
self.url = url
self._inherit_metadata = inherit_metadata
self._test_metadata = test_metadata
self.timeout = timeout if timeout is not None else self.default_timeout
self.path = path
self.subdomain = subdomain
self.environment = {"url_base": url_base,
"protocol": protocol,
"prefs": self.prefs}
if pac is not None:
self.environment["pac"] = urljoin(self.url, pac)
def __eq__(self, other):
if not isinstance(other, Test):
return False
return self.id == other.id
# Python 2 does not have this delegation, while Python 3 does.
def __ne__(self, other):
return not self.__eq__(other)
def update_metadata(self, metadata=None):
if metadata is None:
metadata = {}
return metadata
@classmethod
def from_manifest(cls, manifest_file, manifest_item, inherit_metadata, test_metadata):
timeout = cls.long_timeout if manifest_item.timeout == "long" else cls.default_timeout
return cls(manifest_file.url_base,
manifest_file.tests_root,
manifest_item.url,
inherit_metadata,
test_metadata,
timeout=timeout,
path=os.path.join(manifest_file.tests_root, manifest_item.path),
protocol=server_protocol(manifest_item),
subdomain=manifest_item.subdomain)
@property
def id(self):
return self.url
@property
def keys(self):
return tuple()
@property
def abs_path(self):
return os.path.join(self.tests_root, self.path)
def _get_metadata(self, subtest=None):
if self._test_metadata is not None and subtest is not None:
return self._test_metadata.get_subtest(subtest)
else:
return self._test_metadata
def itermeta(self, subtest=None):
if self._test_metadata is not None:
if subtest is not None:
subtest_meta = self._get_metadata(subtest)
if subtest_meta is not None:
yield subtest_meta
yield self._get_metadata()
yield from reversed(self._inherit_metadata)
def disabled(self, subtest=None):
for meta in self.itermeta(subtest):
disabled = meta.disabled
if disabled is not None:
return disabled
return None
@property
def restart_after(self):
for meta in self.itermeta(None):
restart_after = meta.restart_after
if restart_after is not None:
return True
return False
@property
def leaks(self):
for meta in self.itermeta(None):
leaks = meta.leaks
if leaks is not None:
return leaks
return False
@property
def min_assertion_count(self):
for meta in self.itermeta(None):
count = meta.min_assertion_count
if count is not None:
return count
return 0
@property
def max_assertion_count(self):
for meta in self.itermeta(None):
count = meta.max_assertion_count
if count is not None:
return count
return 0
@property
def lsan_disabled(self):
for meta in self.itermeta():
if meta.lsan_disabled is not None:
return meta.lsan_disabled
return False
@property
def lsan_allowed(self):
lsan_allowed = set()
for meta in self.itermeta():
lsan_allowed |= meta.lsan_allowed
if atom_reset in lsan_allowed:
lsan_allowed.remove(atom_reset)
break
return lsan_allowed
@property
def lsan_max_stack_depth(self):
for meta in self.itermeta(None):
depth = meta.lsan_max_stack_depth
if depth is not None:
return depth
return None
@property
def mozleak_allowed(self):
mozleak_allowed = set()
for meta in self.itermeta():
mozleak_allowed |= meta.leak_allowed
if atom_reset in mozleak_allowed:
mozleak_allowed.remove(atom_reset)
break
return mozleak_allowed
@property
def mozleak_threshold(self):
rv = {}
for meta in self.itermeta(None):
threshold = meta.leak_threshold
for key, value in threshold.items():
if key not in rv:
rv[key] = value
return rv
@property
def tags(self):
tags = set()
for meta in self.itermeta():
meta_tags = meta.tags
tags |= meta_tags
if atom_reset in meta_tags:
tags.remove(atom_reset)
break
tags.add("dir:%s" % self.id.lstrip("/").split("/")[0])
return tags
@property
def prefs(self):
prefs = {}
for meta in reversed(list(self.itermeta())):
meta_prefs = meta.prefs
if atom_reset in meta_prefs:
del meta_prefs[atom_reset]
prefs = {}
prefs.update(meta_prefs)
return prefs
def expected(self, subtest=None):
if subtest is None:
default = self.result_cls.default_expected
else:
default = self.subtest_result_cls.default_expected
metadata = self._get_metadata(subtest)
if metadata is None:
return default
try:
expected = metadata.get("expected")
if isinstance(expected, str):
return expected
elif isinstance(expected, list):
return expected[0]
elif expected is None:
return default
except KeyError:
return default
def implementation_status(self):
implementation_status = None
for meta in self.itermeta():
implementation_status = meta.implementation_status
if implementation_status:
return implementation_status
# assuming no specific case, we are implementing it
return "implementing"
def known_intermittent(self, subtest=None):
metadata = self._get_metadata(subtest)
if metadata is None:
return []
try:
expected = metadata.get("expected")
if isinstance(expected, list):
return expected[1:]
return []
except KeyError:
return []
def __repr__(self):
return f"<{self.__module__}.{self.__class__.__name__} {self.id}>"
class TestharnessTest(Test):
result_cls = TestharnessResult
subtest_result_cls = TestharnessSubtestResult
test_type = "testharness"
def __init__(self, url_base, tests_root, url, inherit_metadata, test_metadata,
timeout=None, path=None, protocol="http", testdriver=False,
jsshell=False, scripts=None, subdomain=False, pac=None):
Test.__init__(self, url_base, tests_root, url, inherit_metadata, test_metadata, timeout,
path, protocol, subdomain, pac)
self.testdriver = testdriver
self.jsshell = jsshell
self.scripts = scripts or []
@classmethod
def from_manifest(cls, manifest_file, manifest_item, inherit_metadata, test_metadata):
timeout = cls.long_timeout if manifest_item.timeout == "long" else cls.default_timeout
pac = manifest_item.pac
testdriver = manifest_item.testdriver if hasattr(manifest_item, "testdriver") else False
jsshell = manifest_item.jsshell if hasattr(manifest_item, "jsshell") else False
script_metadata = manifest_item.script_metadata or []
scripts = [v for (k, v) in script_metadata
if k == "script"]
return cls(manifest_file.url_base,
manifest_file.tests_root,
manifest_item.url,
inherit_metadata,
test_metadata,
timeout=timeout,
pac=pac,
path=os.path.join(manifest_file.tests_root, manifest_item.path),
protocol=server_protocol(manifest_item),
testdriver=testdriver,
jsshell=jsshell,
scripts=scripts,
subdomain=manifest_item.subdomain)
@property
def id(self):
return self.url
class ManualTest(Test):
test_type = "manual"
@property
def id(self):
return self.url
class ReftestTest(Test):
"""A reftest
A reftest should be considered to pass if one of its references matches (see below) *and* the
reference passes if it has any references recursively.
Attributes:
references (List[Tuple[str, str]]): a list of alternate references, where one must match for the test to pass
viewport_size (Optional[Tuple[int, int]]): size of the viewport for this test, if not default
dpi (Optional[int]): dpi to use when rendering this test, if not default
"""
result_cls = ReftestResult
test_type = "reftest"
def __init__(self, url_base, tests_root, url, inherit_metadata, test_metadata, references,
timeout=None, path=None, viewport_size=None, dpi=None, fuzzy=None,
protocol="http", subdomain=False):
Test.__init__(self, url_base, tests_root, url, inherit_metadata, test_metadata, timeout,
path, protocol, subdomain)
for _, ref_type in references:
if ref_type not in ("==", "!="):
raise ValueError
self.references = references
self.viewport_size = self.get_viewport_size(viewport_size)
self.dpi = dpi
self._fuzzy = fuzzy or {}
@classmethod
def cls_kwargs(cls, manifest_test):
return {"viewport_size": manifest_test.viewport_size,
"dpi": manifest_test.dpi,
"protocol": server_protocol(manifest_test),
"fuzzy": manifest_test.fuzzy}
@classmethod
def from_manifest(cls,
manifest_file,
manifest_test,
inherit_metadata,
test_metadata):
timeout = cls.long_timeout if manifest_test.timeout == "long" else cls.default_timeout
url = manifest_test.url
node = cls(manifest_file.url_base,
manifest_file.tests_root,
manifest_test.url,
inherit_metadata,
test_metadata,
[],
timeout=timeout,
path=manifest_test.path,
subdomain=manifest_test.subdomain,
**cls.cls_kwargs(manifest_test))
refs_by_type = defaultdict(list)
for ref_url, ref_type in manifest_test.references:
refs_by_type[ref_type].append(ref_url)
# Construct a list of all the mismatches, where we end up with mismatch_1 != url !=
# mismatch_2 != url != mismatch_3 etc.
#
# Per the logic documented above, this means that none of the mismatches provided match,
mismatch_walk = None
if refs_by_type["!="]:
mismatch_walk = ReftestTest(manifest_file.url_base,
manifest_file.tests_root,
refs_by_type["!="][0],
[],
None,
[])
cmp_ref = mismatch_walk
for ref_url in refs_by_type["!="][1:]:
cmp_self = ReftestTest(manifest_file.url_base,
manifest_file.tests_root,
url,
[],
None,
[])
cmp_ref.references.append((cmp_self, "!="))
cmp_ref = ReftestTest(manifest_file.url_base,
manifest_file.tests_root,
ref_url,
[],
None,
[])
cmp_self.references.append((cmp_ref, "!="))
if mismatch_walk is None:
mismatch_refs = []
else:
mismatch_refs = [(mismatch_walk, "!=")]
if refs_by_type["=="]:
# For each == ref, add a reference to this node whose tail is the mismatch list.
# Per the logic documented above, this means any one of the matches must pass plus all the mismatches.
for ref_url in refs_by_type["=="]:
ref = ReftestTest(manifest_file.url_base,
manifest_file.tests_root,
ref_url,
[],
None,
mismatch_refs)
node.references.append((ref, "=="))
else:
# Otherwise, we just add the mismatches directly as we are immediately into the
# mismatch chain with no alternates.
node.references.extend(mismatch_refs)
return node
def update_metadata(self, metadata):
if "url_count" not in metadata:
metadata["url_count"] = defaultdict(int)
for reference, _ in self.references:
# We assume a naive implementation in which a url with multiple
# possible screenshots will need to take both the lhs and rhs screenshots
# for each possible match
metadata["url_count"][(self.environment["protocol"], reference.url)] += 1
reference.update_metadata(metadata)
return metadata
def get_viewport_size(self, override):
return override
@property
def id(self):
return self.url
@property
def keys(self):
return ("reftype", "refurl")
@property
def fuzzy(self):
return self._fuzzy
@property
def fuzzy_override(self):
values = {}
for meta in reversed(list(self.itermeta(None))):
value = meta.fuzzy
if not value:
continue
if atom_reset in value:
value.remove(atom_reset)
values = {}
for key, data in value:
if isinstance(key, (tuple, list)):
key = list(key)
key[0] = urljoin(self.url, key[0])
key[1] = urljoin(self.url, key[1])
key = tuple(key)
elif key:
# Key is just a relative url to a ref
key = urljoin(self.url, key)
values[key] = data
return values
@property
def page_ranges(self):
return {}
class PrintReftestTest(ReftestTest):
test_type = "print-reftest"
def __init__(self, url_base, tests_root, url, inherit_metadata, test_metadata, references,
timeout=None, path=None, viewport_size=None, dpi=None, fuzzy=None,
page_ranges=None, protocol="http", subdomain=False):
super().__init__(url_base, tests_root, url, inherit_metadata, test_metadata,
references, timeout, path, viewport_size, dpi,
fuzzy, protocol, subdomain=subdomain)
self._page_ranges = page_ranges
@classmethod
def cls_kwargs(cls, manifest_test):
rv = super().cls_kwargs(manifest_test)
rv["page_ranges"] = manifest_test.page_ranges
return rv
def get_viewport_size(self, override):
assert override is None
return (5*2.54, 3*2.54)
@property
def page_ranges(self):
return self._page_ranges
class WdspecTest(Test):
result_cls = WdspecResult
subtest_result_cls = WdspecSubtestResult
test_type = "wdspec"
default_timeout = 25
long_timeout = 180 # 3 minutes
class CrashTest(Test):
result_cls = CrashtestResult
test_type = "crashtest"
manifest_test_cls = {"reftest": ReftestTest,
"print-reftest": PrintReftestTest,
"testharness": TestharnessTest,
"manual": ManualTest,
"wdspec": WdspecTest,
"crashtest": CrashTest}
def from_manifest(manifest_file, manifest_test, inherit_metadata, test_metadata):
test_cls = manifest_test_cls[manifest_test.item_type]
return test_cls.from_manifest(manifest_file, manifest_test, inherit_metadata, test_metadata)
| {
"content_hash": "0e887e76cf81dd219c87327d2710446b",
"timestamp": "",
"source": "github",
"line_count": 712,
"max_line_length": 117,
"avg_line_length": 33.86938202247191,
"alnum_prop": 0.5483723823346465,
"repo_name": "chromium/chromium",
"id": "65577c1eabced979341854ea0c3b642bbe84f67a",
"size": "24143",
"binary": false,
"copies": "11",
"ref": "refs/heads/main",
"path": "third_party/wpt_tools/wpt/tools/wptrunner/wptrunner/wpttest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
ref: https://github.com/swagger-api/swagger-codegen
"""
from __future__ import absolute_import
import base64
import urllib3
try:
import httplib
except ImportError:
# for python3
import http.client as httplib
import sys
import logging
from six import iteritems
def singleton(cls, *args, **kw):
instances = {}
def _singleton():
if cls not in instances:
instances[cls] = cls(*args, **kw)
return instances[cls]
return _singleton
@singleton
class Configuration(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
Do not edit the class manually.
"""
def __init__(self):
"""
Constructor
"""
# Default Base url
self.host = "https://localhost/api/1.1"
# Default api client
self.api_client = None
# Temp file folder for downloading files
self.temp_folder_path = None
# Authentication Settings
# dict to store API key(s)
self.api_key = {}
# dict to store API prefix (e.g. Bearer)
self.api_key_prefix = {}
# Username for HTTP basic authentication
self.username = ""
# Password for HTTP basic authentication
self.password = ""
# Logging Settings
self.logger = {}
self.logger["package_logger"] = logging.getLogger("python_shovel")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
# Log format
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
# Log stream handler
self.logger_stream_handler = None
# Log file handler
self.logger_file_handler = None
# Debug file location
self.logger_file = None
# Debug switch
self.debug = False
# SSL/TLS verification
# Set this to false to skip verifying SSL certificate when calling API from https server.
self.verify_ssl = True
# Set this to customize the certificate file to verify the peer.
self.ssl_ca_cert = None
# client certificate file
self.cert_file = None
# client key file
self.key_file = None
@property
def logger_file(self):
"""
Gets the logger_file.
"""
return self.__logger_file
@logger_file.setter
def logger_file(self, value):
"""
Sets the logger_file.
If the logger_file is None, then add stream handler and remove file handler.
Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
self.__logger_file = value
if self.__logger_file:
# If set logging file,
# then add file handler and remove stream handler.
self.logger_file_handler = logging.FileHandler(self.__logger_file)
self.logger_file_handler.setFormatter(self.logger_formatter)
for _, logger in iteritems(self.logger):
logger.addHandler(self.logger_file_handler)
if self.logger_stream_handler:
logger.removeHandler(self.logger_stream_handler)
else:
# If not set logging file,
# then add stream handler and remove file handler.
self.logger_stream_handler = logging.StreamHandler()
self.logger_stream_handler.setFormatter(self.logger_formatter)
for _, logger in iteritems(self.logger):
logger.addHandler(self.logger_stream_handler)
if self.logger_file_handler:
logger.removeHandler(self.logger_file_handler)
@property
def debug(self):
"""
Gets the debug status.
"""
return self.__debug
@debug.setter
def debug(self, value):
"""
Sets the debug status.
:param value: The debug status, True or False.
:type: bool
"""
self.__debug = value
if self.__debug:
# if debug status is True, turn on debug logging
for _, logger in iteritems(self.logger):
logger.setLevel(logging.DEBUG)
# turn on httplib debug
httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in iteritems(self.logger):
logger.setLevel(logging.WARNING)
# turn off httplib debug
httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
"""
Gets the logger_format.
"""
return self.__logger_format
@logger_format.setter
def logger_format(self, value):
"""
Sets the logger_format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
self.__logger_format = value
self.logger_formatter = logging.Formatter(self.__logger_format)
def get_api_key_with_prefix(self, identifier):
"""
Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:return: The token for api key authentication.
"""
if self.api_key.get(identifier) and self.api_key_prefix.get(identifier):
return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier]
elif self.api_key.get(identifier):
return self.api_key[identifier]
def get_basic_auth_token(self):
"""
Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
return urllib3.util.make_headers(basic_auth=self.username + ':' + self.password)\
.get('authorization')
def auth_settings(self):
"""
Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
return {
}
def to_debug_report(self):
"""
Gets the essential information for debugging.
:return: The report for debugging.
"""
return "Python SDK Debug Report:\n"\
"OS: {env}\n"\
"Python Version: {pyversion}\n"\
"Version of the API: 1.0.0\n"\
"SDK Package Version: 1.0.0".\
format(env=sys.platform, pyversion=sys.version)
| {
"content_hash": "0bd1087f8028f9170fd3304559b77617",
"timestamp": "",
"source": "github",
"line_count": 227,
"max_line_length": 97,
"avg_line_length": 31.555066079295155,
"alnum_prop": 0.5963981571967053,
"repo_name": "keedya/shovel-api-python-client",
"id": "0dda47e99c1950945eb372b86fc1b0f21a08be71",
"size": "7180",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python_shovel/configuration.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "135345"
}
],
"symlink_target": ""
} |
import sys
import getopt, parser_generator, grammar_parser, interpreter
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Please give one argument, the input filename."
sys.exit(1)
cs164_grammar_file = './cs164c.grm'
cs164_input_file = sys.argv[1]
cs164_library_file = './library.164'
cs164parser = parser_generator.makeParser(grammar_parser.parse(open(cs164_grammar_file).read()))
# Load library into the cs164interpreter
library_ast = cs164parser.parse(open(cs164_library_file).read())
interpreter.ExecGlobal(library_ast)
# Load program into the cs164interpreter
input_ast = cs164parser.parse(open(cs164_input_file).read())
interpreter.ExecGlobal(input_ast)
| {
"content_hash": "1fbf22fe7976d7e4bab01af4b9b8a87d",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 100,
"avg_line_length": 36.55,
"alnum_prop": 0.6949384404924761,
"repo_name": "michelle/sink",
"id": "33a486f2d518c9ace1c9fe3eca6727b0a04ee717",
"size": "754",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "164/main_164.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "47213"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.core.urlresolvers import reverse_lazy
from django.db.models.loading import get_model
from django.test import TestCase
import json
class ViewsTests(TestCase):
fixtures = ['test_data']
def test_task_view_200(self):
response = self.client.get(reverse_lazy('TestTask'))
self.assertTrue('models' in response.context)
self.assertTrue(len(response.context['models']), 3)
self.assertEqual(response.status_code, 200)
def test_get_model_data_200(self):
response = self.client.get(reverse_lazy('ModelData', kwargs={'model_name': 'HobbiesDynamicModel'}))
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertTrue('fields' in data)
self.assertTrue('qs' in data)
self.assertEqual(len(data['fields']), len(data['qs'][0]))
def test_get_model_data_400(self):
response = self.client.get(reverse_lazy('ModelData', kwargs={'model_name': 'SomeModel'}))
data = json.loads(response.content)
self.assertEqual(response.status_code, 400)
self.assertTrue('error' in data)
self.assertEqual(data['error'], "App 'testtask' doesn't have a 'somemodel' model.")
def test_update_model_data_200(self):
data = 'field={}&id={}&data={}'.format('title', 1, 'Test')
response = self.client.put(reverse_lazy('ModelData', kwargs={'model_name': 'HobbiesDynamicModel'}), data=data)
status = json.loads(response.content)
self.assertEqual(response.status_code, 200)
self.assertTrue('status' in status)
self.assertEqual(status['status'], 'ok')
def test_update_model_data_200_error(self):
data = 'field={}&id={}&data={}'.format('date_joined', 1, 'dummy')
response = self.client.put(reverse_lazy('ModelData', kwargs={'model_name': 'UsersDynamicModel'}), data=data)
status = json.loads(response.content)
self.assertEqual(response.status_code, 200)
self.assertTrue('status' in status)
self.assertEqual(status['status'], 'error')
self.assertTrue('message' in status)
def test_update_model_data_400(self):
data = 'field={}&id={}&data={}'.format('title', 1, 'Test')
response = self.client.put(reverse_lazy('ModelData', kwargs={'model_name': 'SomeModel'}), data=data)
status = json.loads(response.content)
self.assertEqual(response.status_code, 400)
self.assertTrue('error' in status)
self.assertEqual(status['error'], "App 'testtask' doesn't have a 'somemodel' model.")
| {
"content_hash": "129bafe7eda4bcbd596d89ae2a1c5e32",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 118,
"avg_line_length": 41.6031746031746,
"alnum_prop": 0.6547119420068676,
"repo_name": "ToxicWar/travail-de-tests",
"id": "665a5d6b531515bf938eeab3e32603b65fd29ed6",
"size": "2637",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testtask/tests/test_views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1238"
},
{
"name": "JavaScript",
"bytes": "4144"
},
{
"name": "Python",
"bytes": "21682"
},
{
"name": "Shell",
"bytes": "267"
}
],
"symlink_target": ""
} |
from tests import web
class ApiTest(web.WebsiteTest):
async def test_base(self):
request = await self.client.get('/')
data = self.json(request.response, 200)
self.assertTrue(data)
self.assertTrue('user_url' in data)
# CONTENT API
async def test_article_list(self):
request = await self.client.get('/contents/articles?priority:gt=0')
data = self.json(request.response, 200)['result']
self.assertEqual(len(data), 3)
async def test_article_list_priority_query(self):
request = await self.client.get(
'/contents/articles?priority=3&priority=2')
data = self.json(request.response, 200)['result']
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['priority'], 2)
self.assertEqual(data[0]['title'], 'Just a test')
async def test_options_article_links(self):
request = await self.client.options('/contents/articles/_links')
self.assertEqual(request.response.status_code, 200)
async def test_article_links(self):
request = await self.client.get('/contents/articles/_links')
data = self.json(request.response, 200)['result']
self.assertEqual(len(data), 2)
async def test_options_article(self):
request = await self.client.options('/contents/articles/fooo')
self.assertEqual(request.response.status_code, 200)
async def test_get_article_404(self):
request = await self.client.get('/contents/articles/fooo')
self.json(request.response, 404)
async def test_head_article_404(self):
request = await self.client.head('/contents/articles/fooo')
self.assertEqual(request.response.status_code, 404)
async def test_get_article_200(self):
request = await self.client.get('/contents/articles/test')
data = self.json(request.response, 200)
self.assertEqual(data['path'], '/articles/test')
async def test_head_article_200(self):
request = await self.client.head('/contents/articles/test')
self.assertEqual(request.response.status_code, 200)
| {
"content_hash": "475503c3b01d3f6074743919a2346819",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 75,
"avg_line_length": 39.166666666666664,
"alnum_prop": 0.658628841607565,
"repo_name": "quantmind/lux",
"id": "850c8160fda50b8455b895fef5a610a462012ba2",
"size": "2115",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/web/test_api.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "906"
},
{
"name": "HTML",
"bytes": "5107"
},
{
"name": "JavaScript",
"bytes": "219127"
},
{
"name": "Makefile",
"bytes": "422"
},
{
"name": "Mako",
"bytes": "1050"
},
{
"name": "PLpgSQL",
"bytes": "140"
},
{
"name": "Python",
"bytes": "615221"
},
{
"name": "Shell",
"bytes": "196"
}
],
"symlink_target": ""
} |
import socket
import time
import threading
import pickle
from msg import Msg
class Server(threading.Thread):
def __init__(self, host, port, user):
self.host = host
self.port = port
self.user = user
self.running = True
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
self.sock.bind((self.host, self.port))
except:
print('Portbinding failed! Perhaps a service is\
already running on the port')
threading.Thread.__init__(self)
def run(self):
self.sock.listen(5)
while self.running:
conn, addr = self.sock.accept()
print('\n\n' +
'----------------------------------------------------\n' +
'Connected to {}:{} \n'.format(*addr) +
'----------------------------------------------------\n\n')
while self.running:
text = input(self.user + ': ')
if text == '!quit':
self.running = False
elif text == '!close':
break
data = Msg(self.user, text)
print(data)
data = pickle.dumps(data)
conn.sendall(data)
self.sock.close()
| {
"content_hash": "af21e4044ca26fcc906f24e972c4cbaa",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 77,
"avg_line_length": 30.58139534883721,
"alnum_prop": 0.44258555133079847,
"repo_name": "MalteT/secure-chat-py",
"id": "6a209038315641145082a25175ccbffe8a26c154",
"size": "1315",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2780"
}
],
"symlink_target": ""
} |
from enum import IntEnum
class CalibrationMethod(IntEnum):
PLUS_EIGENSTATE = 1
NONE = 0
MINUS_EIGENSTATE = -1
| {
"content_hash": "ad39e45c7399da768f425e6c26117cac",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 33,
"avg_line_length": 17.714285714285715,
"alnum_prop": 0.6935483870967742,
"repo_name": "rigetticomputing/pyquil",
"id": "6fdb559f1b8d9009ec1bceb14c814e4737ab8252",
"size": "896",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyquil/experiment/_calibration.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "7178"
},
{
"name": "Python",
"bytes": "741472"
}
],
"symlink_target": ""
} |
"""
Module containing the FooWrapper1 class
"""
import time
import subprocess
from wopmars.utils.Logger import Logger
from wopmars.models.ToolWrapper import ToolWrapper
class FooWrapper9(ToolWrapper):
"""
This class has been done for example/testing purpose.
Modifications may lead to failure in tests.
"""
__mapper_args__ = {'polymorphic_identity': "FooWrapper9"}
def specify_input_file(self):
return ["input1"]
def specify_input_table(self):
return ["FooBase2"]
def specify_output_file(self):
return ["output1"]
def run(self):
Logger.instance().info(self.__class__.__name__ + " is running...")
p = subprocess.Popen(["touch", self.output_file("output1")])
p.wait()
time.sleep(0.1)
| {
"content_hash": "fd0932e44677c6a544b44bc7e42e1fc2",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 74,
"avg_line_length": 25.193548387096776,
"alnum_prop": 0.646606914212548,
"repo_name": "aitgon/wopmars",
"id": "f690415b83b63767b7e81b90a449d1d917907486",
"size": "781",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wopmars/tests/resource/wrapper/FooWrapper9.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "338509"
},
{
"name": "Shell",
"bytes": "1526"
}
],
"symlink_target": ""
} |
from unittest import mock
import ddt
from tooz import coordination as tooz_coordination
from tooz import locking as tooz_locking
from manila import coordination
from manila import test
class Locked(Exception):
pass
class MockToozLock(tooz_locking.Lock):
active_locks = set()
def acquire(self, blocking=True):
if self.name not in self.active_locks:
self.active_locks.add(self.name)
return True
elif not blocking:
return False
else:
raise Locked
def release(self):
self.active_locks.remove(self.name)
@ddt.ddt
class CoordinatorTestCase(test.TestCase):
def setUp(self):
super(CoordinatorTestCase, self).setUp()
self.get_coordinator = self.mock_object(tooz_coordination,
'get_coordinator')
def test_coordinator_start(self):
crd = self.get_coordinator.return_value
agent = coordination.Coordinator()
agent.start()
self.assertTrue(self.get_coordinator.called)
self.assertTrue(crd.start.called)
self.assertTrue(agent.started)
def test_coordinator_stop(self):
crd = self.get_coordinator.return_value
agent = coordination.Coordinator()
agent.start()
self.assertIsNotNone(agent.coordinator)
agent.stop()
self.assertTrue(crd.stop.called)
self.assertIsNone(agent.coordinator)
self.assertFalse(agent.started)
def test_coordinator_lock(self):
crd = self.get_coordinator.return_value
crd.get_lock.side_effect = lambda n: MockToozLock(n)
agent1 = coordination.Coordinator()
agent1.start()
agent2 = coordination.Coordinator()
agent2.start()
lock_string = 'lock'
expected_lock = lock_string.encode('ascii')
self.assertNotIn(expected_lock, MockToozLock.active_locks)
with agent1.get_lock(lock_string):
self.assertIn(expected_lock, MockToozLock.active_locks)
self.assertRaises(Locked, agent1.get_lock(lock_string).acquire)
self.assertRaises(Locked, agent2.get_lock(lock_string).acquire)
self.assertNotIn(expected_lock, MockToozLock.active_locks)
def test_coordinator_offline(self):
crd = self.get_coordinator.return_value
crd.start.side_effect = tooz_coordination.ToozConnectionError('err')
agent = coordination.Coordinator()
self.assertRaises(tooz_coordination.ToozError, agent.start)
self.assertFalse(agent.started)
@mock.patch.object(coordination.LOCK_COORDINATOR, 'get_lock')
class CoordinationTestCase(test.TestCase):
def test_lock(self, get_lock):
with coordination.Lock('lock'):
self.assertTrue(get_lock.called)
def test_synchronized(self, get_lock):
@coordination.synchronized('lock-{f_name}-{foo.val}-{bar[val]}')
def func(foo, bar):
pass
foo = mock.Mock()
foo.val = 7
bar = mock.MagicMock()
bar.__getitem__.return_value = 8
func(foo, bar)
get_lock.assert_called_with('lock-func-7-8')
| {
"content_hash": "c47962f4c81972ae4e7c34b0e5b0940e",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 76,
"avg_line_length": 29.830188679245282,
"alnum_prop": 0.6461100569259962,
"repo_name": "openstack/manila",
"id": "843528711a1bf350f98e780aa46b858ef196be23",
"size": "3783",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manila/tests/test_coordination.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "953"
},
{
"name": "Python",
"bytes": "12728998"
},
{
"name": "Shell",
"bytes": "107601"
}
],
"symlink_target": ""
} |
import copy
import re
from nltk.sem.logic import Expression
from .category import Category
from .logic_parser import lexpr
from .normalization import normalize_token
class SemanticRule(object):
def __init__(self, category, semantics, attributes = {}):
if not isinstance(category, Category):
self.category = Category(category)
else:
self.category = category
if semantics and not isinstance(semantics, Expression):
self.semantics = lexpr(semantics)
else:
self.semantics = semantics
self.attributes = copy.deepcopy(attributes)
if 'surf' in self.attributes:
self.attributes['surf'] = normalize_token(self.attributes['surf'])
if 'base' in self.attributes:
self.attributes['base'] = normalize_token(self.attributes['base'])
def match(self, other):
# Check class membership and special attribute matches.
if not isinstance(other, self.__class__) \
or not isinstance(other.category, self.category.__class__) \
or not self.category.match(other.category):
return False
# If one rule is terminal but not the other, then they do not match.
if self.is_terminal_rule() != other.is_terminal_rule():
return False
# Obtain generic list of attributes from both semantic rules,
# and check whether they match each other (or are underspecified).
self_attr_names = set(self.attributes.keys())
other_attr_names = set(other.attributes.keys())
attribute_names = self_attr_names.union(other_attr_names)
attribute_names = self.remove_control_attribute_names(attribute_names)
for attribute_name in attribute_names:
self_attr_value = self.attributes.get(attribute_name)
other_attr_value = other.attributes.get(attribute_name)
if not attributes_match(attribute_name, self_attr_value, other_attr_value):
return False
if not wildcard_match(attribute_names, self.attributes, other.attributes):
return False
return True
def remove_control_attribute_names(self, attribute_names):
control_attrs = ['var_paths']
return [a for a in attribute_names if a not in control_attrs]
def is_terminal_rule(self):
if 'rule' in self.attributes:
return False
for attribute_name in self.attributes:
if attribute_name.startswith('child'):
return False
return True
def attributes_match(attribute_name, src_attr_value, trg_attr_value):
# If this attribute is an arbitrary type specification, it doesn't count
# as a rule attribute to match against the CCG tree. Thus, return True.
if 'coq_type' in attribute_name:
return True
# If the attribute is a wildcard, we do not check for match here. return True.
if '_any_' in attribute_name:
return True
if src_attr_value is not None and trg_attr_value is None:
return False
if src_attr_value is None and trg_attr_value is not None:
return True
if src_attr_value is None and trg_attr_value is None:
return True
# Case: src_attr_value is not None and trg_attr_value is not None
if not 'category' in attribute_name:
return src_attr_value.lower() == trg_attr_value.lower()
# Comparing categories needs feature unification:
src_category = Category(src_attr_value)
trg_category = Category(trg_attr_value)
return src_category.match(trg_category)
def any_attribute_matches(attribute_name, src_attributes, trg_attributes):
wildcard_names = re.findall(r'_any_(.*)', attribute_name)
assert wildcard_names, 'Attribute name invalid: {0}'.format(attribute_name)
wildcard_name = wildcard_names[0]
src_attr_value = src_attributes.get(attribute_name, None)
assert src_attr_value
trg_attr_values = [value for key, value in trg_attributes.items() \
if key.endswith(wildcard_name)]
for trg_attr_value in trg_attr_values:
if wildcard_name == 'category':
src_category = Category(src_attr_value)
trg_category = Category(trg_attr_value)
if src_category.match(trg_category):
return True
else:
if src_attr_value.lower() == trg_attr_value.lower():
return True
return False
def wildcard_match(attribute_names, src_attributes, trg_attributes):
wildcard_attributes = []
# Obtain attributes specified as a wildcard "any"
for attribute_name in src_attributes.keys():
if re.findall(r'_any_', attribute_name):
wildcard_attributes.append(attribute_name)
# match of attributes specified by wildcards.
for attribute_name in wildcard_attributes:
if not any_attribute_matches(attribute_name, src_attributes, trg_attributes):
return False
return True
| {
"content_hash": "963cfadb2273614c47bba82b11100e9e",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 87,
"avg_line_length": 43.92035398230089,
"alnum_prop": 0.6564577876284505,
"repo_name": "masashi-y/depccg",
"id": "1f1007392abd65c10ae26e87e3319b8092cbfc5d",
"size": "5586",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "depccg/semantics/ccg2lambda/semantic_rule.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "8162243"
},
{
"name": "C++",
"bytes": "15709"
},
{
"name": "Cython",
"bytes": "10244"
},
{
"name": "Jsonnet",
"bytes": "9883731"
},
{
"name": "Makefile",
"bytes": "203"
},
{
"name": "Python",
"bytes": "361518"
},
{
"name": "Shell",
"bytes": "70"
}
],
"symlink_target": ""
} |
"""
BioPandas module for working with Protein Data Bank (PDB)
files in pandas DataFrames.
"""
from .pandas_pdb import PandasPdb
__all__ = ["PandasPdb"]
| {
"content_hash": "5fd211e65b4c11a6eb754b3e646fade0",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 57,
"avg_line_length": 19.25,
"alnum_prop": 0.7142857142857143,
"repo_name": "rasbt/biopandas",
"id": "f11af5159da93a6017221c5833ef55a7b96baeab",
"size": "354",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "biopandas/pdb/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "135872"
},
{
"name": "Shell",
"bytes": "2480"
},
{
"name": "TeX",
"bytes": "1788"
}
],
"symlink_target": ""
} |
from netpyne import specs
from cfg import cfg
#------------------------------------------------------------------------------
#
# NETWORK PARAMETERS
#
#------------------------------------------------------------------------------
netParams = specs.NetParams() # object of class NetParams to store the network parameters
netParams.sizeX = cfg.sizeX # x-dimension (horizontal length) size in um
netParams.sizeY = cfg.sizeY # y-dimension (vertical height or cortical depth) size in um
netParams.sizeZ = cfg.sizeZ # z-dimension (horizontal length) size in um
netParams.propVelocity = 100.0 # propagation velocity (um/ms)
netParams.probLengthConst = 150.0 # length constant for conn probability (um)
#------------------------------------------------------------------------------
## Population parameters
netParams.popParams['E2'] = {'cellType': 'E', 'numCells': 10, 'yRange': [50,150], 'cellModel': 'HH'}
netParams.popParams['I2'] = {'cellType': 'I', 'numCells': 10, 'yRange': [50,150], 'cellModel': 'HH'}
netParams.popParams['E4'] = {'cellType': 'E', 'numCells': 10, 'yRange': [150,300], 'cellModel': 'HH'}
netParams.popParams['I4'] = {'cellType': 'I', 'numCells': 10, 'yRange': [150,300], 'cellModel': 'HH'}
netParams.popParams['E5'] = {'cellType': 'E', 'numCells': 10, 'ynormRange': [0.6,1.0], 'cellModel': 'HH'}
netParams.popParams['I5'] = {'cellType': 'I', 'numCells': 10, 'ynormRange': [0.6,1.0], 'cellModel': 'HH'}
#------------------------------------------------------------------------------
## Cell property rules
netParams.loadCellParamsRule(label='CellRule', fileName='CSTR_cellParams.json')
#------------------------------------------------------------------------------
## Synaptic mechanism parameters
netParams.synMechParams['exc'] = {'mod': 'Exp2Syn', 'tau1': 0.8, 'tau2': 5.3, 'e': 0} # NMDA synaptic mechanism
netParams.synMechParams['inh'] = {'mod': 'Exp2Syn', 'tau1': 0.6, 'tau2': 8.5, 'e': -75} # GABA synaptic mechanism
#------------------------------------------------------------------------------
# Stimulation parameters
netParams.stimSourceParams['bkg'] = {'type': 'NetStim', 'rate': 25, 'noise': 0.8}
netParams.stimTargetParams['bkg->all'] = {'source': 'bkg', 'conds': {'cellType': ['E','I']}, 'weight': 0.01, 'sec': 'soma', 'delay': 'max(1, normal(5,2))', 'synMech': 'exc'}
#------------------------------------------------------------------------------
## Cell connectivity rules
netParams.connParams['E->all'] = {
'preConds': {'cellType': 'E'}, 'postConds': {'y': [50,500]}, # E -> all (100-1000 um)
'probability': 0.1, # probability of connection
'weight': '0.04*post_ynorm', # synaptic weight
'delay': 'dist_3D/propVelocity', # transmission delay (ms)
'sec': ['Adend1', 'Adend2', 'Adend3'],
'synMech': 'exc'} # synaptic mechanism
netParams.connParams['I->E'] = {
'preConds': {'cellType': 'I'}, 'postConds': {'pop': ['E2','E4','E5']}, # I -> E
'probability': '0.3*exp(-dist_3D/probLengthConst)', # probability of connection
'weight': 0.01, # synaptic weight
'delay': 'dist_3D/propVelocity', # transmission delay (ms)
'sec': ['soma','Bdend'],
'synMech': 'inh'} # synaptic mechanism
#------------------------------------------------------------------------------
## RxD params
### constants
constants = {'ip3_init': cfg.ip3_init, # initial ip3 concentration
'caDiff': 0.08, # calcium diffusion coefficient
'ip3Diff': 1.41, # ip3 diffusion coefficient
'caci_init': 1e-5, # intracellular calcium initial concentration
'caco_init': 2.0, # extracellular calcium initial concentration
'gip3r': 12040 * 100, # ip3 receptors density
'gserca': 0.3913, # SERCA conductance
'gleak': 6.020, # ER leak channel conductance
'kserca': 0.1, # SERCA reaction constant
'kip3': 0.15, # ip3 reaction constant
'kact': 0.4, #
'ip3rtau': 2000, # ip3 receptors time constant
'fc': 0.8, # fraction of cytosol
'fe': 0.2, # fraction of ER
'margin': 20} # extracellular volume additional margin
netParams.rxdParams['constants'] = constants
### regions
regions = {}
regions['cyt'] = {'cells': 'all', 'secs': 'all', 'nrn_region': 'i', 'geometry': {'class': 'FractionalVolume', 'args': {'volume_fraction': constants['fc'], 'surface_fraction': 1}}}
regions['er'] = {'cells': 'all', 'secs': 'all', 'geometry': {'class': 'FractionalVolume', 'args': {'volume_fraction': constants['fe']}}}
regions['cyt_er_membrane'] = {'cells': 'all', 'secs': 'all', 'geometry': {'class': 'ScalableBorder', 'args': {'scale': 1, 'on_cell_surface': False}}}
margin = 20 # extracellular volume additional margin
x, y, z = [0-margin, 100+margin], [-500-margin, 0+margin], [0-margin, 100+margin]
regions['ecs'] = {'extracellular': True, 'xlo': x[0], 'ylo': y[0], 'zlo': z[0], 'xhi': x[1], 'yhi': y[1], 'zhi': z[1], 'dx': 5, 'volume_fraction': 0.2, 'tortuosity': 1.6}
netParams.rxdParams['regions'] = regions
### species
species = {}
species['ca'] = {'regions': ['cyt', 'er', 'ecs'], 'd': constants['caDiff'], 'charge': 2,
'initial': 'caco_init if isinstance(node,rxd.node.NodeExtracellular) else (0.0017 - caci_init * fc) / fe if node.region == er else caci_init'}
species['ip3'] = {'regions': ['cyt'], 'd': constants['ip3Diff'], 'initial': constants['ip3_init']}
netParams.rxdParams['species'] = species
### states
netParams.rxdParams['states'] = {'ip3r_gate_state': {'regions': ['cyt_er_membrane'], 'initial': 0.8}}
### reactions
minf = 'ip3[cyt] * 1000. * ca[cyt] / (ip3[cyt] + kip3) / (1000. * ca[cyt] + kact)'
h_gate = 'ip3r_gate_state[cyt_er_membrane]'
kip3 = 'gip3r * (%s * %s) ** 3' % (minf, h_gate)
mcReactions = {}
mcReactions['serca'] = {'reactant': 'ca[cyt]', 'product': 'ca[er]', 'rate_f': 'gserca / ((kserca / (1000. * ca[cyt])) ** 2 + 1)', 'membrane': 'cyt_er_membrane', 'custom_dynamics': True}
mcReactions['leak'] = {'reactant': 'ca[er]', 'product': 'ca[cyt]', 'rate_f': constants['gleak'], 'rate_b': constants['gleak'], 'membrane': 'cyt_er_membrane'}
mcReactions['ip3r'] = {'reactant': 'ca[er]', 'product': 'ca[cyt]', 'rate_f': kip3, 'rate_b': kip3, 'membrane': 'cyt_er_membrane'}
netParams.rxdParams['multicompartmentReactions'] = mcReactions
### rates
netParams.rxdParams['rates'] = {'ip3rg': {'species': h_gate, 'rate': '(1. / (1 + 1000. * ca[cyt] / (0.3)) - %s) / ip3rtau'%(h_gate)}}
| {
"content_hash": "147e1c57521da5b1d88058a08a647008",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 185,
"avg_line_length": 56.27350427350427,
"alnum_prop": 0.5476913730255164,
"repo_name": "Neurosim-lab/netpyne",
"id": "5f8360a52756bfb390436f7b812309b319bbe077",
"size": "6584",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "examples/rxd_net/netParams.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "AMPL",
"bytes": "25324"
},
{
"name": "Jupyter Notebook",
"bytes": "2588467"
},
{
"name": "Python",
"bytes": "1802020"
},
{
"name": "Shell",
"bytes": "915"
}
],
"symlink_target": ""
} |
"""Longley dataset"""
__docformat__ = 'restructuredtext'
COPYRIGHT = """This is public domain."""
TITLE = __doc__
SOURCE = """
The classic 1967 Longley Data
http://www.itl.nist.gov/div898/strd/lls/data/Longley.shtml
::
Longley, J.W. (1967) "An Appraisal of Least Squares Programs for the
Electronic Comptuer from the Point of View of the User." Journal of
the American Statistical Association. 62.319, 819-41.
"""
DESCRSHORT = """"""
DESCRLONG = """The Longley dataset contains various US macroeconomic
variables that are known to be highly collinear. It has been used to appraise
the accuracy of least squares routines."""
NOTE = """
Number of Observations - 16
Number of Variables - 6
Variable name definitions::
TOTEMP - Total Employment
GNPDEFL - GNP deflator
GNP - GNP
UNEMP - Number of unemployed
ARMED - Size of armed forces
POP - Population
YEAR - Year (1947 - 1962)
"""
from numpy import recfromtxt, array, column_stack
import statsmodels.tools.datautils as du
from os.path import dirname, abspath
def load():
"""
Load the Longley data and return a Dataset class.
Returns
-------
Dataset instance
See DATASET_PROPOSAL.txt for more information.
"""
data = _get_data()
return du.process_recarray(data, endog_idx=0, dtype=float)
def load_pandas():
"""
Load the Longley data and return a Dataset class.
Returns
-------
Dataset instance
See DATASET_PROPOSAL.txt for more information.
"""
data = _get_data()
return du.process_recarray_pandas(data, endog_idx=0)
def _get_data():
filepath = dirname(abspath(__file__))
data = recfromtxt(open(filepath+'/longley.csv',"rb"), delimiter=",",
names=True, dtype=float, usecols=(1,2,3,4,5,6,7))
return data
| {
"content_hash": "0d4b78d6ac7ed807daad0976fb4984cf",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 78,
"avg_line_length": 25.794520547945204,
"alnum_prop": 0.6436537440254912,
"repo_name": "pprett/statsmodels",
"id": "c91f9bd03a310a9c3b69e98235f29d851a5a9bda",
"size": "1883",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "statsmodels/datasets/longley/data.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "10509"
},
{
"name": "C",
"bytes": "11707"
},
{
"name": "JavaScript",
"bytes": "11143"
},
{
"name": "Python",
"bytes": "4135946"
},
{
"name": "R",
"bytes": "5412"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import django
__author__ = 'Dmitriy Sokolov'
__version__ = '1.0.0'
if django.VERSION < (3, 2):
default_app_config = 'admin_permissions.apps.AdminPermissionsConfig'
VERSION = __version__
| {
"content_hash": "e3efb68445f1ea8ab6314f4f524f4cbd",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 72,
"avg_line_length": 18.153846153846153,
"alnum_prop": 0.6864406779661016,
"repo_name": "silentsokolov/django-admin-permissions",
"id": "31eb3cd2bd9f9963b083c10230b3cf292c91a084",
"size": "261",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "admin_permissions/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "433"
},
{
"name": "Python",
"bytes": "6123"
}
],
"symlink_target": ""
} |
import requests
import json
import decimal
from requests.exceptions import ConnectionError
import logging
#Logger
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
log = logging.getLogger("EtherRpc")
class EtherRpcWrapper(object):
__id_count = 0
def __init__(self,
rpchost,
rpcport,
rpc_call=None,
):
self.__rpchost = rpchost
self.__rpcport = rpcport
self.__rpc_call = rpc_call
self.__headers = {'Host': self.__rpchost,
'User-Agent': 'EtherRpcWrapper v0.1',
'Content-type': 'application/json'
}
def __getattr__(self, name):
"""
Return an instance of EtherRpcWrapper with an rpc_call defined.
When the attribute (method) is not defined (i.e: instance.getinfo()),
then __getattr__ is called with the name (getinfo) as parameter.
It then calls EtherRpcWrapper as a function,
defining self.rpc_call to the attribute's name.
"""
if name.startswith('__') and name.endswith('__'):
raise AttributeError
if self.__rpc_call is not None:
name = "%s.%s" % (self.__rpc_call, name)
# Return an instance of the client. Will call the __call__ method.
log.debug('Making http request with method:%s' % name)
return EtherRpcWrapper(self.__rpchost, self.__rpcport, name)
def __call__(self, *args):
"""
Make a request to the rpc demon with the method name.
When the instance of the class is summoned like a function,
this method gets called.
"""
EtherRpcWrapper.__id_count += 1
log.debug("-%s-> %s %s" % (EtherRpcWrapper.__id_count,
self.__rpc_call,
json.dumps(args,
default=self.EncodeDecimal)
))
postdata = json.dumps({'version': '1.1',
'method': self.__rpc_call,
'params': args,
'id': EtherRpcWrapper.__id_count
},
default=self.EncodeDecimal
)
url = ''.join(['http://', self.__rpchost, ':', self.__rpcport])
try:
r = requests.post(url, data=postdata, headers=self.__headers)
except ConnectionError:
print 'There was a problem connecting to the RPC daemon.'
print 'Check the connection and connection parameters:'
error = 'Host: %s, Port: %s, Username: %s, Password: %s' \
% (self.__rpchost, self.__rpcport,)
log.error("Error connecting to rpc demon: %s" % error)
return ConnectionError
if r.status_code == 200:
log.debug("Response: %s" % r.json())
if 'error' in r.json():
return r.json()
else:
return r.json()['result']
else:
log.error("Error! Status code: %s" % r.status_code)
log.error("Text: %s" % r.text)
log.error("Json: %s" % r.json())
return r.json()
def EncodeDecimal(o):
if isinstance(o, decimal.Decimal):
return float(round(o, 8))
raise TypeError(repr(o) + " is not JSON serializable")
def batch_(self, rpc_calls):
"""
Batch RPC call.
Pass array of arrays: [ [ "method", params... ], ... ]
Returns array of results.
"""
batch_data = []
for rpc_call in rpc_calls:
EtherRpcWrapper.__id_count += 1
m = rpc_call.pop(0)
batch_data.append({"jsonrpc": "2.0",
"method": m,
"params": rpc_call,
"id": EtherRpcWrapper.__id_count
})
postdata = json.dumps(batch_data, default=self.EncodeDecimal)
log.debug("--> " + postdata)
url = ''.join(['http://', self.__rpchost, ':', self.__rpcport])
try:
r = requests.post(url, data=postdata, headers=self.__headers)
except ConnectionError:
print 'There was a problem connecting to the RPC daemon.'
print 'Check the connection and connection parameters:'
print 'Host: %s, Port: %s, Username: %s, Password: %s' \
% (self.__rpchost, self.__rpcport, self.__rpcuser,
self.__rpcpasswd)
return ConnectionError
if r.status_code == 200:
log.info("Response: %s" % r.json())
if 'error' in r.json():
return r.json()
else:
return r.json()['result']
else:
log.error("Error! Status code: %s" % r.status_code)
log.error("Text: %s" % r.text)
log.error("Json: %s" % r.json())
return r.json()
| {
"content_hash": "1c6336fbdc4ae99b89d450df67b92260",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 77,
"avg_line_length": 40.83076923076923,
"alnum_prop": 0.4798417483044461,
"repo_name": "nicosandller/python-ethereumrpc",
"id": "a08cfca1266326c166de9119a0bf9ec065177c82",
"size": "5308",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ether_rpc_wrapper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5806"
}
],
"symlink_target": ""
} |
'''
Common interface for all All ChamberConnectLibrary upper level interfaces
:copyright: (C) Espec North America, INC.
:license: MIT, see LICENSE for more details.
'''
#pylint: disable=W0703,W0201,R0902,W0232,R0904,C0103
from abc import ABCMeta, abstractmethod
from threading import RLock
import traceback
import time
import inspect
class ControllerInterfaceError(Exception):
'''Exception that is thrown when a there is a problem communicating with a controller'''
pass
def exclusive(func):
'''Lock the physical interface for the function call'''
def wrapper(self, *args, **kwargs):
'''Lock the physical interface for the function call'''
if kwargs.pop('exclusive', True):
with self.lock:
auto_connect = self.client is None
try:
if auto_connect:
self.connect()
return func(self, *args, **kwargs)
finally:
try:
if auto_connect:
self.close()
except Exception:
pass
else:
try:
del kwargs['exclusive']
except Exception:
pass
return func(self, *args, **kwargs)
return wrapper
class ControllerInterface:
'''Abstract class for a controller implimentation of the chamberconnectlibrary'''
__metaclass__ = ABCMeta
loop_map = []
named_loop_map = {}
def init_common(self, **kwargs):
'''Setup properties of all controllers of the chamberconnectlibrary'''
self.client = None
self.host = kwargs.get('host')
self.interface = kwargs.get('interface')
self.adr = kwargs.get('adr', 1)
self.serialport = kwargs.get('serialport')
self.baudrate = kwargs.get('baudrate')
self.loops = kwargs.get('loops', 1)
self.cascades = kwargs.get('cascades', 0)
self.lock = kwargs.get('lock', RLock())
@abstractmethod
def get_datetime(self):
'''
Get the datetime from the controller.
Returns:
datetime. The current time as a datetime.datetime object
'''
pass
@abstractmethod
def set_datetime(self, value):
'''
Set the datetime of the controller.
Args:
value (datetime.datetime): The new datetime for the controller.
'''
pass
@abstractmethod
def get_refrig(self):
'''
Get the constant settings for the refigeration system
returns:
{"mode":string, "setpoint":float}
'''
pass
@abstractmethod
def set_refrig(self, value):
'''
Set the constant setpoints refrig mode
params:
mode: string,"off" or "manual" or "auto"
setpoint: int,20 or 50 or 100
'''
pass
@exclusive
def get_loop(self, identifier, *args):
'''
Get all parameters for a loop from a given list.
There are four different ways to call this method; all methods return the same data:
get_loop(str(identifier), *str(parameters))
Args:
identifier (str): The name of the loop.
parameters (list(string)): The list of parameters to get from the loop, (see below)
get_loop(str(identifier), [str(parameters)])
Args:
identifier (str): The name of the loop.
parameters (list(string)): The list of parameters to get from the loop, (see below)
get_loop(int(identifier), str(loop_type), *str(parameters))
Args:
identifier (str): The name of the loop.
loop_type (str): The type of loop to be accessed ("loop" or "cascade")
parameters (list(string)): The list of parameters to get from the loop, (see below)
get_loop(int(identifier), str(loop_type), [str(parameters)])
Args:
identifier (str): The name of the loop.
loop_type (str): The type of loop to be accessed ("loop" or "cascade")
parameters (list(string)): The list of parameters to get from the loop, (see below)
parameters:
The following is a list of available parameters as referenced by each call type:
"setpoint" -- The target temp/humi/altitude/etc of the control loop
"processvalue" -- The current conditions inside the chamber
"range" -- The settable range for the "setpoint"
"enable" -- Weather the loop is on or off
"units" -- The units of the "setpoint" or "processvalue" parameters
"mode" -- The current control status of the loop
"power" -- The current output power of the loop
"deviation" -- (type="cascade" only) The allowable difference between air/prod.
"enable_cascade" -- (type="cascade" only) Enable or disable cascade type control
Returns:
dict. The dictionary contains a key for each item in the list argument::
"setpoint" -- {"constant":float,"current":float}
"processvalue" -- {"air":float,"product":float} ("product" only w/ type="cascade")
"range" -- {"min":float,"max":float}
"enable" -- {"constant":bool,"current":bool}
"units" -- str
"mode" -- str('Off' or 'Auto' or 'Manual')
"deviation" -- {"positive": float, "negative": float}
"enable_cascade" -- {"constant":bool,"current":bool}
"power" -- {"constant": float, "current": float}
'''
loop_functions = {
'cascade':{
'setpoint':self.get_cascade_sp,
'setPoint':self.get_cascade_sp,
'setValue':self.get_cascade_sp,
'processvalue':self.get_cascade_pv,
'processValue':self.get_cascade_pv,
'range':self.get_cascade_range,
'enable':self.get_cascade_en,
'units':self.get_cascade_units,
'mode':self.get_cascade_mode,
'deviation':self.get_cascade_deviation,
'enable_cascade':self.get_cascade_ctl,
'power': self.get_cascade_power
},
'loop':{
'setpoint':self.get_loop_sp,
'setPoint':self.get_loop_sp,
'setValue':self.get_loop_sp,
'processvalue':self.get_loop_pv,
'processValue':self.get_loop_pv,
'range':self.get_loop_range,
'enable':self.get_loop_en,
'units':self.get_loop_units,
'mode':self.get_loop_mode,
'power':self.get_loop_power
}
}
if isinstance(identifier, basestring):
my_loop_map = self.loop_map[self.named_loop_map[identifier]]
loop_number = my_loop_map['num']
loop_type = my_loop_map['type']
param_list = args if len(args) > 0 else None
elif isinstance(identifier, int) and len(args) >= 1:
loop_number = identifier
loop_type = args[0]
param_list = args[1:] if len(args) > 1 else None
else:
raise ValueError(
'invalid argument format, call w/: '
'get_loop(int(identifier), str(loop_type), *args) or '
'get_loop(str(identifier), *args), *args are optional.'
)
if param_list is None:
param_list = loop_functions[loop_type].keys()
excludes = ['setPoint', 'setValue', 'processValue']
param_list = [x for x in param_list if x not in excludes]
elif len(param_list) >= 1 and \
(isinstance(param_list[0], list) or isinstance(param_list[0], tuple)):
param_list = param_list[0]
ret = {}
for key in param_list:
try:
ret[key] = loop_functions[loop_type][key](loop_number, exclusive=False)
except KeyError:
ret[key] = None
except NotImplementedError:
ret[key] = None
return ret
@exclusive
def set_loop(self, identifier, loop_type='loop', param_list=None, **kwargs):
'''
Set all parameters for a loop from a given list.
Args:
identifier (int or str): The loop number, or the name of the loop
loop_type (str): The loop type (disregarded when identifier is a str)::
"cascade" -- A cascade control loop.
"loop" -- A standard control loop (default).
param_list (dict(dict)): The parameters to update as a dictionary::
see kwargs for possible keys/values
kwargs (dict): The parameters to update as key word arguments, param_list overrides::
"setpoint" -- The target temp/humi/altitude/etc of the control loop
"range" -- The settable range for the "setpoint"
"enable" -- turn the control loop on or off
"power" -- set the manual power of the control loop
"mode" -- set the control mode of the control loop
"deviation" -- (type="cascade" only) The allowable difference between air/prod.
"enable_cascade" -- (type="cascade" only) Enable or disable cascade type control
Returns:
None
'''
loop_functions = {
'cascade':{
'setpoint':self.set_cascade_sp,
'setPoint':self.set_cascade_sp,
'setValue':self.set_cascade_sp,
'range':self.set_cascade_range,
'enable':self.set_cascade_en,
'deviation':self.set_cascade_deviation,
'enable_cascade':self.set_cascade_ctl,
'mode':self.set_cascade_mode,
'power':self.set_cascade_power
},
'loop':{
'setpoint':self.set_loop_sp,
'setPoint':self.set_loop_sp,
'setValue':self.set_loop_sp,
'range':self.set_loop_range,
'enable':self.set_loop_en,
'mode':self.set_loop_mode,
'power':self.set_loop_power
}
}
if param_list is None:
param_list = kwargs
if isinstance(identifier, basestring):
my_loop_map = self.loop_map[self.named_loop_map[identifier]]
loop_number = my_loop_map['num']
loop_type = my_loop_map['type']
elif isinstance(identifier, (int, long)):
loop_number = identifier
else:
raise ValueError(
'invalid argument format, call w/: '
'set_loop(int(identifier), str(loop_type), **kwargs) or '
'get_loop(str(identifier), **kwargs)'
)
#mode must be done first
if 'mode' in param_list:
loop_functions[loop_type]['mode'](
exclusive=False,
N=loop_number,
value=param_list.pop('mode')
)
for key, val in param_list.items():
try:
loop_functions[loop_type][key](
exclusive=False,
N=loop_number,
value=val
)
except KeyError:
pass
except NotImplementedError:
pass
@exclusive
def get_operation(self, pgm=None):
'''
Get the complete operation status of the chamber (excludes control loops and events).
Args:
pgm (dict): A cached version of the running program, if None it will be retrieved.
Returns:
dict::
"mode" -- str('program' or 'constant' or 'standby' or 'off' or 'alarm' or 'paused')
"status" -- str(varies by controller; more detailed/format'd version of "mode" key)
"program" --
"alarm" -- [int] list of active alarms by number, empty list if no alarms
'''
status = self.get_status(exclusive=False)
ret = {'status': status}
if 'Paused' in status:
ret['mode'] = 'program_pause'
elif status.startswith('Prog'):
ret['mode'] = 'program'
elif status.startswith('Const'):
ret['mode'] = 'constant'
elif status.startswith('Stand'):
ret['mode'] = 'standby'
elif status == 'Off':
ret['mode'] = 'off'
elif status == 'Alarm':
ret['mode'] = 'alarm'
else:
ret['mode'] = 'ERROR'
if 'Program' in status:
pnum = self.get_prgm_cur(exclusive=False)
ret['program'] = {
'number':pnum,
'step':self.get_prgm_cstep(),
'time_remaining':self.get_prgm_time(pgm, exclusive=False),
'step_time_remaining':self.get_prgm_cstime(exclusive=False),
'name':self.get_prgm_name(pnum, exclusive=False),
'steps':self.get_prgm_steps(pnum, exclusive=False),
'cycles':self.get_prgm_counter(exclusive=False)
}
else:
ret['program'] = None
if status == 'Alarm':
ret['alarms'] = self.get_alarm_status(exclusive=False)['active']
else:
ret['alarms'] = None
return ret
@exclusive
def set_operation(self, mode, **kwargs):
'''
Update the controllers operation mode (ie run a program, stop, constant etc)
Args:
mode (string): The mode to run:
'standby' or 'off': Stop the chamber from running
'constant': Start the configured constant mode
'program': run a program
'program_pause': pause the running program
'program_resume': resume the paused program
'program_advance': force the program to the next step.
kwargs:
program (int or dict): the program # to run, valid dict: {'number':int, 'step': int}
step (int): the step # to start the program on. (Default=1)
Returns:
dict::
"mode" -- str('program' or 'constant' or 'standby' or 'off' or 'alarm' or 'paused')
"status" -- str(varies by controller; more detailed/format'd version of "mode" key)
"program" --
"alarm" -- [int] list of active alarms by number, empty list if no alarms
'''
if mode in ['standby', 'off']:
self.stop(exclusive=False)
elif mode == 'constant':
self.const_start(exclusive=False)
elif mode == 'program':
if isinstance(kwargs['program'], dict):
self.prgm_start(
kwargs['program']['number'],
kwargs['program'].get('step', 1),
exclusive=False
)
else:
self.prgm_start(kwargs['program'], kwargs.get('step', 1), exclusive=False)
elif mode == 'program_pause':
self.prgm_pause(exclusive=False)
elif mode == 'program_resume':
self.prgm_resume(exclusive=False)
elif mode == 'program_advance':
self.prgm_next_step(exclusive=False)
else:
raise ValueError('unsupported mode parameter')
@exclusive
def get_program(self, N):
'''
Get a program (alias for get_prgm)
Args:
N (int): The program number
Returns:
dict (format varies from controller to controller)
'''
return self.get_prgm(N, exclusive=False)
@exclusive
def set_program(self, N, value):
'''
Set a program (alias for set_prgm)
Args:
N (int): The program number
value (dict): The program to write to the controller, None erases the given program
'''
if value is None:
return self.prgm_delete(N, exclusive=False)
else:
return self.set_prgm(N, value, exclusive=False)
@exclusive
def get_program_list(self):
'''
Get a list of all programs on the chamber. (alias for get_prgms)
Returns:
[{"number":int, "name":str}]
'''
return self.get_prgms(exclusive=False)
@exclusive
def get_program_details(self, N):
'''
Get the name and number of steps of a program.
Args:
N (int): The program number
Returns:
{"number":int, "name":str, "steps":int}
'''
return {
'number':N,
'name':self.get_prgm_name(N, exclusive=False),
'steps':self.get_prgm_steps(N, exclusive=False)
}
@abstractmethod
def get_loop_sp(self, N):
'''
Get the setpoint of a control loop.
Args:
N (int): The number for the control loop
constant (bool): Read the constant or current setpoint, None=Both (default=None)
Returns:
{"constant":float, "current":float}
Raises:
ValueError
'''
pass
@abstractmethod
def set_loop_sp(self, N, value):
'''
Set the setpoint of the control loop.
Args:
N (int): The number for the control loop
value (float): The new setpoint
Returns:
None
Raises:
ValueError
'''
pass
@abstractmethod
def get_loop_pv(self, N):
'''
Get the process value of a loop.
Args:
N (int): The number for the control loop
product (bool): True=(not valid), False=air temp, None=both (default=None)
Returns:
dict(float). product=None
float. product=bool
Raises:
ValueError
'''
pass
@abstractmethod
def get_loop_range(self, N):
'''
Get the valid setpoint range of a loop
Args:
N (int): The number of the loop
Returns:
{"min": float, "max": float}
'''
pass
@abstractmethod
def set_loop_range(self, N, value):
'''
Set the valid setpoint range of a loop
Args:
N (int): The number of the loop
value: ({"min": float, "max": float}): The range
'''
pass
@abstractmethod
def get_loop_en(self, N):
'''
Get the enable/disable state of a loop
Args:
N (int): The number of the loop
Returns:
{"constant": bool, "current": bool}
'''
pass
@abstractmethod
def set_loop_en(self, N, value):
'''
Set the enable/disable state of a loop
Args:
N (int): The number of the loop
value (bool): True = loop is running
'''
pass
@abstractmethod
def get_loop_units(self, N):
'''
Get the units for a loop
Args:
N (int): The number of the loop
Returns:
string
'''
pass
@abstractmethod
def get_loop_mode(self, N):
'''
Get the control mode for a loop
Args:
N (int): The number of the loop
Returns:
string: The control mode (varies by controller)
'''
pass
@abstractmethod
def get_loop_modes(self, N):
'''
Get the available modes for a loop
Args:
N (int): The number of the loop
Returns:
string: list of control modes
'''
pass
@abstractmethod
def set_loop_mode(self, N, value):
'''
Get the control mode for a loop
Args:
N (int): The number of the loop
value (bool): The control mode (varies by controller)
'''
pass
@abstractmethod
def get_loop_power(self, N):
'''
Get the output power(%) for a loop
Args:
N (int): The number of the loop
Returns:
{"constant": float, "current": float}
'''
pass
@abstractmethod
def set_loop_power(self, N, value):
'''
Set the output power(%) for a loop
Args:
N (int): The number of the loop
value (float): The output power
'''
pass
@abstractmethod
def get_cascade_sp(self, N):
'''
Get the setpoint for a cascade loop
Args:
N (int): The number of the loop
Returns:
{"constant":float, "current":float, "air":float, "product":float}
'''
pass
@abstractmethod
def set_cascade_sp(self, N, value):
'''
Get the setpoint for a cascade loop
Args:
N (int): The number of the loop
value (float): The setpoint
'''
pass
@abstractmethod
def get_cascade_pv(self, N):
'''
Get the process value for a cascade loop
Args:
N (int): The number of the loop
Returns:
{"product":float, "air":float}
'''
pass
@abstractmethod
def get_cascade_range(self, N):
'''
Get the valid setpoint range for a cascade loop
Args:
N (int): The number of the loop
Returns:
{"min":float, "max":float}
'''
pass
@abstractmethod
def set_cascade_range(self, N, value):
'''
Set the valid setpoint range for a cascade loop
Args:
N (int): The number of the loop
value ({"min":float, "max":float}): The range
'''
pass
@abstractmethod
def get_cascade_en(self, N):
'''
Get the enable/disable state for a cascade loop
Args:
N (int): The number of the loop
Returns:
{"constant":bool, "current":bool}
'''
pass
@abstractmethod
def set_cascade_en(self, N, value):
'''
Set the enable/disable state for a cascade loop
Args:
N (int): The number of the loop
value (bool): True = loop running
'''
pass
@abstractmethod
def get_cascade_units(self, N):
'''
Get the units for a cascade loop
Args:
N (int): The number of the loop
Returns:
str: The loop units
'''
pass
@abstractmethod
def get_cascade_mode(self, N):
'''
Get the control mode for a cascade loop
Args:
N (int): The number of the loop
Returns:
str: The control mode
'''
pass
@abstractmethod
def get_cascade_modes(self, N):
'''
Get the available modes for a cascade loop
Args:
N (int): The number of the loop
Returns:
string: list of control modes
'''
pass
@abstractmethod
def set_cascade_mode(self, N, value):
'''
Set the control mode for a cascade loop
Args:
N (int): The number of the loop
value (str): The control mode
'''
pass
@abstractmethod
def get_cascade_ctl(self, N):
'''
Get enable/disable of cascade mode for a cascade loop
Args:
N (int): The number of the loop
Returns:
{"constant":bool, "current":bool}
'''
pass
@abstractmethod
def set_cascade_ctl(self, N, value):
'''
Set enable/disable of cascade mode for a cascade loop
Args:
N (int): The number of the loop
value (bool): True = when cascade mode is enabled
'''
pass
@abstractmethod
def get_cascade_deviation(self, N):
'''
Get allowable product to air deviation for a cascade loop
Args:
N (int): The number of the loop
Returns:
{"positive":float, "negative":float}
'''
pass
@abstractmethod
def set_cascade_deviation(self, N, value):
'''
Set allowable product to air deviation for a cascade loop
Args:
N (int): The number of the loop
value ({"positive": float, "negative": float}): The deviations
'''
pass
@abstractmethod
def get_cascade_power(self, N):
'''
Get the output power for a cascade loop
Args:
N (int): The number of the loop
Returns:
{"constant":float, "current":float}
'''
pass
@abstractmethod
def set_cascade_power(self, N, value):
'''
Set the output power for a cascade loop
Args:
N (int): The number of the loop
value (float): The output power %
'''
pass
@abstractmethod
def get_event(self, N):
'''
Get the state of a programmable output
Args:
N (int): The number of the output
Returns:
{"constant":bool, "current":bool}
'''
pass
@abstractmethod
def set_event(self, N, value):
'''
Set the state of a programmable output
Args:
N (int): The number of the output
value (bool): the output state True = on
'''
pass
@abstractmethod
def get_status(self):
'''
Get the chamber status.
Returns:
str: The chamber status
'''
pass
@abstractmethod
def get_alarm_status(self):
'''
Get the chamber alarms status.
Returns:
{"active":[int], "inactive":[int]}
'''
pass
@abstractmethod
def const_start(self):
'''
Start the constant mode of the chamber
'''
pass
@abstractmethod
def stop(self):
'''
Start operation of the chamber.
'''
pass
@abstractmethod
def prgm_start(self, N, step):
'''
Start a program on the chamber
Args:
N (int): The number of the program to start.
step (int): The step to start the program on.
'''
pass
@abstractmethod
def prgm_pause(self):
'''
Pause the running program.
'''
pass
@abstractmethod
def prgm_resume(self):
'''
Resume the paused program.
'''
pass
@abstractmethod
def prgm_next_step(self):
'''
Skip to the next step of the running program.
'''
pass
@abstractmethod
def get_prgm_counter(self):
'''
Get the status of the jump step/counter
Returns:
[{'name':str, 'start':int, 'end':int, 'cycles':int, 'remaining':int}]
'''
pass
@abstractmethod
def get_prgm_cur(self):
'''
Get the number of the running program
Returns:
int: The running program
'''
pass
@abstractmethod
def get_prgm_cstep(self):
'''
Get the current step of the running program.
Returns:
int: The current step of the running program
'''
pass
@abstractmethod
def get_prgm_cstime(self):
'''
Get the remaining time of the current step of the running program
Returns:
str: HH:MM:SS
'''
pass
@abstractmethod
def get_prgm_time(self, pgm=None):
'''
Get the time until the running program ends.
Args:
pgm (dict): The current program cache (optional,speeds up some controllers)
Returns:
str: HH:MM
'''
pass
@abstractmethod
def get_prgm_name(self, N):
'''
Get the name of a given program.
Args:
N (int): The program to get the name of
Returns:
str: The program name
'''
pass
@abstractmethod
def set_prgm_name(self, N, value):
'''
Set the name of a givem program
Args:
N (int): The program to set the name of
value (str): The new name for the program
'''
pass
@abstractmethod
def get_prgm_steps(self, N):
'''
Get the total number of steps in a program
Args:
N (int): The program number
Returns:
int: The total number of steps
'''
pass
@abstractmethod
def get_prgms(self):
'''
Get a list of all programs on the chamber.
Returns:
[{"number":int, "name":str}]
'''
pass
@abstractmethod
def get_prgm(self, N):
'''
Get a program
Args:
N (int): The program number
Returns:
dict (format varies from controller to controller)
'''
pass
@abstractmethod
def set_prgm(self, N, value):
'''
Set a program
Args:
N (int): The program number
value (dict): The program to write to the controller
'''
pass
@abstractmethod
def prgm_delete(self, N):
'''
Delete a program
Args:
N (int): The program number
'''
pass
@exclusive
def sample(self, lookup=None, **kwargs):
'''
Take a sample for data logging, gets datetime, mode, and sp/pv on all loops
kwargs:
get_loops (bool): If true get loop status (Default=True)
get_status (bool): If true get controller operation status. (Default=True)
get_alarms (bool): If true get the controller alarms status. (Default=False)
get_program_status (bool): If true get information about the running progam. (Default=False)
get_events (list or dict): A list of events to get the current status of (Default=None)
get_program_list (bool): get a list of programs on the controller. (Default=False)
get_refrig (bool): If true get the controller refrig mode. (Default=False)
Returns:
{"datetime":datetime.datetime, "loops":[{varies}], "status":str}
'''
ret = {'datetime':self.get_datetime(exclusive=False)}
if kwargs.get('get_loops', True):
ret['loops'] = []
for tmap in self.loop_map:
items = ['setpoint', 'processvalue', 'enable', 'mode', 'power', 'units', 'range']
if tmap['type'] == 'cascade':
items += ['enable_cascade', 'deviation']
lkps = [lkp for lkp in lookup[tmap['type']] if lkp['number'] == tmap['num']]
lpdata = lkps[0].copy() if lookup else {}
lpdata.update(self.get_loop(tmap['num'], tmap['type'], items, exclusive=False))
ret['loops'].append(lpdata)
if kwargs.get('get_status', True) or kwargs.get('get_program_status', False):
ret['status'] = self.get_status(exclusive=False)
if kwargs.get('get_alarms', False):
ret['alarms'] = self.get_alarm_status(exclusive=False)
if kwargs.get('get_program_status', False) and self.profiles:
if ret['status'].startswith('Program') and 'Remote' not in ret['status']:
cpn = self.get_prgm_cur(exclusive=False)
ret['program_status'] = {
'number':cpn,
'name':self.get_prgm_name(cpn, exclusive=False),
'step':self.get_prgm_cstep(exclusive=False),
'time_step':self.get_prgm_cstime(exclusive=False),
'time_total':self.get_prgm_time(kwargs.get('running_program'), exclusive=False),
'counters':self.get_prgm_counter(exclusive=False)
}
else:
ret['program_status'] = {p:None for p in ['number', 'name', 'step', 'time_step', 'time_total', 'counters']}
if kwargs.get('get_program_list', False) and self.profiles:
ret['program_list'] = self.get_prgms(exclusive=False)
if kwargs.get('get_events', None):
if isinstance(kwargs['get_events'][0], dict):
events = kwargs['get_events']
else:
events = [{'N':i} for i in kwargs['get_events']]
for event in events:
event['status'] = self.get_event(event['N'], exclusive=False)
ret['events'] = events
if kwargs.get('get_refrig', False):
try:
ret['refrig'] = self.get_refrig(exclusive=False)
except NotImplementedError:
ret['refrig'] = None
return ret
@abstractmethod
def process_controller(self, update=True):
'''
Read the controllers "part number" and setup the class as best as possible using it.
Args:
update (bool): When true update the classes configuration (default=True)
Returns:
str: The "part number"
'''
pass
@abstractmethod
def get_network_settings(self):
'''
Get the network settings from the controller (if controller supports)
Returns:
{"address":str, "mask":str, "gateway":str, "message":str, "host":str}
'''
pass
@abstractmethod
def set_network_settings(self, value):
'''
Set the network settings of the controller (if controller supports)
Args:
value ({"address":str, "mask":str, "gateway":str, "message":str, "host":str}): Settings
'''
pass
def get_operation_modes(self):
'''
Get the supported operation modes for this controller.
Returns:
["standby","constant","program"] or ["constant","program"]
'''
return ['standby', 'constant', 'program']
def self_test(self, loops, cascades):
'''
preform a self test on all functions
Args:
loops (int): The number of standard control loops
cascades (int): The number of cascade control loops
'''
def print_exception(trce):
'''
Format an Exception for printing
'''
print '\n'.join(['\t' + l for l in trce.split('\n')])
print 'process_controller():'
try:
print '\t%r' % self.process_controller()
except Exception:
print_exception(traceback.format_exc())
print 'get_datetime:'
try:
print '\t%r' % self.get_datetime()
except Exception:
print_exception(traceback.format_exc())
print 'set_datetime:'
try:
self.set_datetime(self.get_datetime())
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'get_operation:'
try:
print '\t%r' % self.get_operation()
except Exception:
print_exception(traceback.format_exc())
for lpi in range(loops):
i = lpi + 1
print 'get_loop_sp(%d):' % i
try:
print '\t%r' % self.get_loop_sp(i)
except Exception:
print_exception(traceback.format_exc())
print 'set_loop_sp(%d):' %i
try:
self.set_loop_sp(i, self.get_loop_sp(i)['constant'])
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'get_loop_pv(%d):' % i
try:
print '\t%r' % self.get_loop_pv(i)
except Exception:
print_exception(traceback.format_exc())
print 'get_loop_range(%d):' % i
try:
print '\t%r' % self.get_loop_range(i)
except Exception:
print_exception(traceback.format_exc())
print 'set_loop_range(%d):' %i
try:
self.set_loop_range(i, self.get_loop_range(i))
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'get_loop_en(%d):' % i
try:
print '\t%r' % self.get_loop_en(i)
except Exception:
print_exception(traceback.format_exc())
print 'set_loop_en(%d):' %i
try:
self.set_loop_en(i, self.get_loop_en(i)['constant'])
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'get_loop_units(%d):' % i
try:
print '\t%r' % self.get_loop_units(i)
except Exception:
print_exception(traceback.format_exc())
print 'get_loop_mode(%d):' % i
try:
print '\t%r' % self.get_loop_mode(i)
except Exception:
print_exception(traceback.format_exc())
print 'get_loop_power(%d):' % i
try:
print '\t%r' % self.get_loop_power(i)
except Exception:
print_exception(traceback.format_exc())
for csi in range(cascades):
i = csi + 1
print 'get_cascade_sp[%d]:' % i
try:
print '\t%r' % self.get_cascade_sp(i)
except Exception:
print_exception(traceback.format_exc())
print 'set_cascade_sp(%d):' %i
try:
self.set_cascade_sp(i, self.get_cascade_sp(i)['constant'])
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'get_cascade_pv(%d):' % i
try:
print '\t%r' % self.get_cascade_pv(i)
except Exception:
print_exception(traceback.format_exc())
print 'get_cascade_range(%d):' % i
try:
print '\t%r' % self.get_cascade_range(i)
except Exception:
print_exception(traceback.format_exc())
print 'set_cascade_range[%d]:' %i
try:
self.set_cascade_range(i, self.get_cascade_range(i))
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'get_cascade_en[%d]:' % i
try:
print '\t%r' % self.get_cascade_en(i)
except Exception:
print_exception(traceback.format_exc())
print 'set_cascade_en(%d):' %i
try:
self.set_cascade_en(i, self.get_cascade_en(i)['constant'])
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'get_cascade_units(%d):' % i
try:
print '\t%r' % self.get_cascade_units(i)
except Exception:
print_exception(traceback.format_exc())
print 'get_cascade_mode(%d):' % i
try:
print '\t%r' % self.get_cascade_mode(i)
except Exception:
print_exception(traceback.format_exc())
print 'get_cascade_ctl(%d):' % i
try:
print '\t%r' % self.get_cascade_ctl(i)
except Exception:
print_exception(traceback.format_exc())
print 'set_cascade_ctl(%d):' %i
try:
self.set_cascade_ctl(i, self.get_cascade_ctl(i))
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'get_cascade_deviation(%d):' % i
try:
print '\t%r' % self.get_cascade_deviation(i)
except Exception:
print_exception(traceback.format_exc())
print 'set_cascade_deviation(%d):' %i
try:
self.set_cascade_deviation(i, self.get_cascade_deviation(i))
print '\tok'
except Exception:
print_exception(traceback.format_exc())
for i in range(1, 13):
print 'get_event(%d):' % i
try:
print '\t%r' % self.get_event(i)
except Exception:
print_exception(traceback.format_exc())
print 'set_event(%d):' %i
try:
self.set_event(i, self.get_event(i)['current'])
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'get_status:'
try:
print '\t%r' % self.get_status()
except Exception:
print_exception(traceback.format_exc())
print 'get_alarm_status:'
try:
print '\t%r' % self.get_alarm_status()
except Exception:
print_exception(traceback.format_exc())
print 'get_prgm_cur:'
try:
print '\t%r' % self.get_prgm_cur()
except Exception:
print_exception(traceback.format_exc())
print 'get_prgm_cstep:'
try:
print '\t%r' % self.get_prgm_cstep()
except Exception:
print_exception(traceback.format_exc())
print 'get_prgm_cstime:'
try:
print '\t%r' % self.get_prgm_cstime()
except Exception:
print_exception(traceback.format_exc())
print 'get_prgm_time:'
try:
print '\t%r' % self.get_prgm_time()
except Exception:
print_exception(traceback.format_exc())
for i in range(1, 6): #do 5 programs only
print 'get_prgm_name(%d):' % i
try:
print '\t%r' % self.get_prgm_name(i)
except Exception:
print_exception(traceback.format_exc())
print 'get_prgm_steps(%d):' % i
try:
print '\t%r' % self.get_prgm_steps(i)
except Exception:
print_exception(traceback.format_exc())
print 'get_prgms:'
try:
print '\t%r' % self.get_prgms()
except Exception:
print_exception(traceback.format_exc())
print 'get_prgm(1):'
try:
print '\t%r' % self.get_prgm(1)
except Exception:
print_exception(traceback.format_exc())
print 'set_prgm(1):'
try:
self.set_prgm(2, self.get_prgm(1))
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'get_network_settings:'
try:
print '\t%r' % self.get_network_settings()
except Exception:
print_exception(traceback.format_exc())
print 'set_network_settings:'
try:
self.set_network_settings(self.get_network_settings())
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'call const_start():'
try:
self.const_start()
time.sleep(5)
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'call stop():'
try:
self.stop()
time.sleep(5)
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'call prgm_start(1,1):'
try:
self.prgm_start(1, 1)
time.sleep(5)
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'call prgm_pause():'
try:
self.prgm_pause()
time.sleep(5)
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'call prgm_resume():'
try:
self.prgm_resume()
time.sleep(5)
print '\tok'
except Exception:
print_exception(traceback.format_exc())
print 'call sample():'
try:
print '\t%r' % self.sample()
except Exception:
print_exception(traceback.format_exc())
print 'Testing Complete'
| {
"content_hash": "2982ed28c1a252b0dd455187470aa5cb",
"timestamp": "",
"source": "github",
"line_count": 1445,
"max_line_length": 123,
"avg_line_length": 30.91764705882353,
"alnum_prop": 0.5130047452771063,
"repo_name": "EspecNorthAmerica/ChamberConnectLibrary",
"id": "016f20fe170b1b338f595d31afd11f7aa8175963",
"size": "44678",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chamberconnectlibrary/controllerinterface.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "249295"
}
],
"symlink_target": ""
} |
import os
import time
import numpy as np
import keras_vgg_buddy
from .generators.base import output_samples
from .generators.style_xfer import output_size_from_glob, transform_glob
from .generators.callbacks import GenerateSamplesCallback
from .models.config import get_model_by_name
from .training import train
def main(args):
# ensure output path exists
output_dir = os.path.dirname(args.output_prefix)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
if args.train:
train_main(args)
else:
generate_main(args)
def train_main(args):
# should this allow training on rectangles or just squares?
args.max_height = args.max_width
assert args.style_image_path, 'Style image path required for training.'
style_img = keras_vgg_buddy.load_and_preprocess_image(args.style_image_path, width=args.max_width, square=True)
print('creating model...')
model, input_generator, eval_generator = get_model_by_name(args.model)
model = model(args, style_img=style_img)
print('loading weights...')
weights_filename = args.weights_prefix + '.weights'
model_filename = args.weights_prefix + '.json'
if not args.ignore_weights and os.path.exists(weights_filename):
model.nodes['texnet'].load_weights(weights_filename)
input_generator = input_generator(args)
eval_generator = eval_generator(args)
started_training = time.time()
train(model, args, input_generator,
callbacks=[GenerateSamplesCallback(model, args, eval_generator)]
)
print('Done training after {:.2f} seconds'.format(time.time() - started_training))
print('saving weights')
save_kwargs = {}
if args.auto_save_weights:
save_kwargs['overwrite'] = True
model.nodes['texnet'].save_weights(weights_filename, **save_kwargs)
model_json = model.nodes['texnet'].to_json()
if args.save_model:
with open(model_filename, 'w') as model_file:
model_file.write(model_json)
# output final samples
output_samples(model, args, eval_generator)
def generate_main(args):
# determine input image size
args.max_height, args.max_width = output_size_from_glob(args.convert_glob, width=args.max_width)
print('creating model...')
model, input_generator, eval_generator = get_model_by_name(args.model)
model = model(args)
print('loading weights...')
weights_filename = args.weights_prefix + '.weights'
if not args.ignore_weights and os.path.exists(weights_filename):
model.nodes['texnet'].load_weights(weights_filename)
transform_glob(model, args)
if __name__ == '__main__':
import argparser
import os
args = argparser.get_parser()
output_dir = os.path.dirname(args.output_prefix)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
main(args)
| {
"content_hash": "5135ec87ec1a8ca4d8c86c0a6c25bef6",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 115,
"avg_line_length": 33.964285714285715,
"alnum_prop": 0.69155275148966,
"repo_name": "awentzonline/keras-rtst",
"id": "9bdb3841aaba9bf1e924a543769ad51a87c2422d",
"size": "2853",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keras_rtst/main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42160"
},
{
"name": "Shell",
"bytes": "2508"
}
],
"symlink_target": ""
} |
"""Provide handling of a gateway."""
from .controlsystem import ControlSystem
class Gateway(
object
): # pylint: disable=too-few-public-methods,useless-object-inheritance
"""Provide handling of a gateway."""
def __init__(self, client, location, data=None):
"""Initialise the class."""
self.client = client
self.location = location
self._control_systems = []
self.control_systems = {}
if data is not None:
self.__dict__.update(data["gatewayInfo"])
for cs_data in data["temperatureControlSystems"]:
control_system = ControlSystem(client, location, self, cs_data)
self._control_systems.append(control_system)
self.control_systems[control_system.systemId] = control_system
| {
"content_hash": "b88b00d2af105f7afcfa51c7fc1d1e40",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 79,
"avg_line_length": 35.08695652173913,
"alnum_prop": 0.6270136307311028,
"repo_name": "watchforstock/evohome-client",
"id": "781406d93b1ca9f6430c77426c94058a9abdac1e",
"size": "807",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "evohomeclient2/gateway.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "63572"
}
],
"symlink_target": ""
} |
import os, glob, re
from lxml import etree
localesPath = 'C:\Documents and Settings\zelle\My Documents\CSL\locales\\'
localeXSLTPath = 'C:\Documents and Settings\zelle\My Documents\CSL\utilities\update-locales-1.0-to-1.0.1.xsl'
class FileResolver(etree.Resolver):
def resolve(self, url, pubid, context):
return self.resolve_filename(url, context)
locales = []
for localePath in glob.glob( os.path.join(localesPath, 'locales-*.xml') ):
locales.append(os.path.basename(localePath))
if not os.path.exists(os.path.join(localesPath, '1.0.1')):
os.makedirs(os.path.join(localesPath, '1.0.1'))
for locale in locales:
with open(localeXSLTPath, 'r') as localeXSLT:
localeXSLTContent = localeXSLT.read()
localeXSLTContent = localeXSLTContent.replace('locales-nl-NL.xml', locale)
## print(localeXSLTContent)
localizedXSLT = open(os.path.join('C:\Documents and Settings\zelle\My Documents\CSL\utilities\\', 'localizedXSLT.xsl'), 'w')
localizedXSLT.write(localeXSLTContent)
localizedXSLT.close()
## need to read modified copy!!!
localeXSLT = etree.parse(os.path.join('C:\Documents and Settings\zelle\My Documents\CSL\utilities\\', 'localizedXSLT.xsl'))
localeTransform = etree.XSLT(localeXSLT)
parsedLocale = etree.parse('C:\Documents and Settings\zelle\My Documents\CSL\utilities\locales-en-US.xml')
## print(etree.tostring(parsedLocale, pretty_print=True, xml_declaration=True, encoding="utf-8"))
localeElement = parsedLocale.getroot()
updatedLocale = localeTransform(localeElement)
updatedLocale = etree.tostring(updatedLocale, pretty_print=True, xml_declaration=True, encoding="utf-8")
updatedLocale = updatedLocale.replace(" <!--", "\n <!--")
updatedLocale = updatedLocale.replace(" ", " ")
updatedLocale = updatedLocale.replace("'", '"', 4)
updatedLocaleFile = open(os.path.join(localesPath, '1.0.1', locale), 'w')
updatedLocaleFile.write ( updatedLocale )
updatedLocaleFile.close()
| {
"content_hash": "8d3530163a652c6549dc049783916898",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 128,
"avg_line_length": 44.666666666666664,
"alnum_prop": 0.7154228855721393,
"repo_name": "citation-style-language/utilities",
"id": "501d0af594cd477b05e9b987ba4c27d7eabd39aa",
"size": "2231",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "update-locales-1.0-to-1.0.1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "19895"
},
{
"name": "JavaScript",
"bytes": "211279"
},
{
"name": "Python",
"bytes": "67201"
},
{
"name": "Ruby",
"bytes": "388"
},
{
"name": "Shell",
"bytes": "15481"
},
{
"name": "XSLT",
"bytes": "33926"
}
],
"symlink_target": ""
} |
name = 'Phennyfyxata. http://phenny.venefyxatu.be'
nick = 'Phennyfyxata'
host = 'irc.freenode.net'
channels = ['#dutchnano', '#venefyxatu']
#channels = ['#venefyxatu']
owner = 'venefyxatu'
password = 'PLACEHOLDER_PASSWORD'
# This isn't implemented yet:
# serverpass = 'yourserverpassword'
# These are people who will be able to use admin.py's functions...
admins = [owner] # strings with other nicknames
# But admin.py is disabled by default, as follows:
#exclude = ['admin']
# If you want to enumerate a list of modules rather than disabling
# some, use "enable = ['example']", which takes precedent over exclude
#
# enable = []
# Directories to load opt modules from
extra = []
# Services to load: maps channel names to white or black lists
external = {
'#dutchnano': ['!'], # allow all
'#venefyxatu': ['!'],
'#conservative': [], # allow none
'*': ['py', 'whois', 'glyph'], # default whitelist
}
# EOF
tell_filename = "/home/venefyatu/tellfile"
| {
"content_hash": "7669439531877e4f27003ed4e93ceb9f",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 70,
"avg_line_length": 26.916666666666668,
"alnum_prop": 0.6790505675954592,
"repo_name": "Venefyxatu/phennyfyxata",
"id": "dbfc896efffac8226ca7187ed8c9ec65145877b5",
"size": "969",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": ".phenny/default.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "2336"
},
{
"name": "HTML",
"bytes": "12941"
},
{
"name": "Python",
"bytes": "151082"
},
{
"name": "Shell",
"bytes": "131"
}
],
"symlink_target": ""
} |
import os
import json
from argparse import ArgumentParser
from CloudFormationGraph import GraphvizGenerator
def main():
parser = ArgumentParser()
parser.add_argument('--in', dest='stack', required=True, metavar='FILE',
help='File containing CloudFormation template',
type=lambda x: valid_file(parser, x))
parser.add_argument('--out', dest='out', default=None,
help='File path to write out to, defaults to STDOUT')
args = parser.parse_args()
gg = GraphvizGenerator()
stack = json.load(args.stack) # TODO: try/catch to deal with broken JSON
gg.add_stack(stack)
dot = gg.build()
if args.out is None:
print dot
else:
f = open(args.out, 'w')
f.write(dot)
f.close()
def valid_file(parser, filepath):
"""Open the file if it's valid, else throw a parser error"""
if os.path.isfile(filepath):
return open(filepath, 'r')
else:
parser.error("%s doesn't appear to be a valid file!" % file)
if __name__ == '__main__':
main()
| {
"content_hash": "2d719a0fce77aca0892708d24ceed510",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 77,
"avg_line_length": 30.416666666666668,
"alnum_prop": 0.6027397260273972,
"repo_name": "tigertoes/CloudFormationGraph",
"id": "400aa7e0970ea4a90bdeb88e49487e656d718edb",
"size": "1710",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CloudFormationGraph/cfd.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8732"
}
],
"symlink_target": ""
} |
""" Fusion Tables Client.
Issue requests to Fusion Tables.
"""
__author__ = '[email protected] (Kathryn Brisbin)'
import urllib2, urllib
try:
import oauth2
import authorization.oauth
except: pass
import httplib2
from oauth2client.file import Storage
from oauth2client.client import AccessTokenRefreshError
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.tools import run
from urllib import urlencode
class FTClient():
def _get(self, query): pass
def _post(self, query): pass
def query(self, query, request_type=None):
""" Issue a query to the Fusion Tables API and return the result. """
#encode to UTF-8
try: query = query.encode("utf-8")
except: query = query.decode('raw_unicode_escape').encode("utf-8")
lowercase_query = query.lower()
if lowercase_query.startswith("select") or \
lowercase_query.startswith("describe") or \
lowercase_query.startswith("show") or \
request_type=="GET":
return self._get(urllib.urlencode({'sql': query}))
else:
return self._post(urllib.urlencode({'sql': query}))
class ClientLoginFTClient(FTClient):
def __init__(self, token):
self.auth_token = token
self.request_url = "https://www.google.com/fusiontables/api/query"
def _get(self, query):
headers = {
'Authorization': 'GoogleLogin auth=' + self.auth_token,
}
serv_req = urllib2.Request(url="%s?%s" % (self.request_url, query),
headers=headers)
serv_resp = urllib2.urlopen(serv_req)
return serv_resp.read()
def _post(self, query):
headers = {
'Authorization': 'GoogleLogin auth=' + self.auth_token,
'Content-Type': 'application/x-www-form-urlencoded',
}
serv_req = urllib2.Request(url=self.request_url, data=query, headers=headers)
serv_resp = urllib2.urlopen(serv_req)
return serv_resp.read()
class OAuthFTClient(FTClient):
def __init__(self, consumer_key, consumer_secret):
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.scope = "https://www.google.com/fusiontables/api/query"
self._set_flow()
def _set_flow(self):
self.FLOW = OAuth2WebServerFlow(
client_id=self.consumer_key,
client_secret=self.consumer_secret,
scope=self.scope,
user_agent="fusion-tables-client-python/1.0")
def _authorize(self):
storage = Storage("fusion_tables.dat")
credentials = storage.get()
if credentials is None or credentials.invalid:
self._set_flow()
credentials = run(self.FLOW, storage)
http = httplib2.Http()
http = credentials.authorize(http)
return http
def _get(self, query):
url = "%s?%s" % (self.scope, query)
resp, content = self._authorize().request(url, method="GET")
return content
def _post(self, query):
url = self.scope
headers = {'Content-type': 'application/x-www-form-urlencoded'}
resp, content = self._authorize().request(
url, method="POST", body=query, headers=headers)
return content
| {
"content_hash": "677f1396acff9ce20370631ea1b97349",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 110,
"avg_line_length": 29.65714285714286,
"alnum_prop": 0.655427103403982,
"repo_name": "MapofLife/MOL",
"id": "1e8adcbaa469c84a72b4b0a69f60cae2664feecb",
"size": "3168",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "earthengine/gft-mod/src/ftclient.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "83354"
},
{
"name": "CSS",
"bytes": "245523"
},
{
"name": "JavaScript",
"bytes": "1302309"
},
{
"name": "PHP",
"bytes": "613"
},
{
"name": "Perl",
"bytes": "2100"
},
{
"name": "Python",
"bytes": "1953387"
},
{
"name": "R",
"bytes": "52"
},
{
"name": "SQL",
"bytes": "21299"
},
{
"name": "Shell",
"bytes": "3146"
}
],
"symlink_target": ""
} |
from enum import Enum
class ApplicationType(Enum):
web = "web"
other = "other"
class FlowType(Enum):
bluefield = "Bluefield"
class RequestSource(Enum):
rest = "rest"
class WebTestKind(Enum):
ping = "ping"
multistep = "multistep"
| {
"content_hash": "c78a7163257712d5675a235a9164e4e6",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 28,
"avg_line_length": 11.521739130434783,
"alnum_prop": 0.6377358490566037,
"repo_name": "AutorestCI/azure-sdk-for-python",
"id": "fdb344ca6bf35e5356aeb9f4bb8c3e9042927216",
"size": "739",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/models/application_insights_management_client_enums.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "34619070"
}
],
"symlink_target": ""
} |
import sublime, sublime_plugin
import json
import os.path
try:
import urllib.request as urllib2
except ImportError:
import urllib2
import urllib.parse
import re
import threading
import os, random, string
import subprocess
from subprocess import Popen, PIPE, STDOUT
# Check to see if we're in Sublime Text 3
ST3 = int(sublime.version()) >= 3000
master_password = None
openssl_enabled = True
#
# Pages / Templates Quick Panel Load
#
class MvSublimeTemplateEditorGetSitesCommand( sublime_plugin.WindowCommand ):
def run( self, type = 'pages' ):
global openssl_enabled
self.type = type
self.settings = sublime.load_settings( 'MvSublimeTemplateEditor.sublime-settings' )
if self.settings.has( 'disable_master_password' ):
openssl_enabled = not self.settings.get( 'disable_master_password' )
self.load_sites()
if openssl_enabled and not self.settings.has( 'password_verification' ):
sublime.error_message( 'Master password not set. Close this dialog and enter a master password' )
return self.set_master_password()
def load_sites( self ):
global master_password, openssl_enabled
self.sites = []
for site in self.settings.get( 'sites', [] ):
self.sites.append( site[ 'name' ] )
self.sites.append( 'Add Store' )
if openssl_enabled and master_password == None:
return self.prompt_master_pass()
self.show_sites()
def show_sites( self ):
sublime.set_timeout( lambda: self.window.show_quick_panel( self.sites, lambda index: self.site_callback( self.sites, index ) ) )
def prompt_master_pass( self ):
PasswordInputPanel( 'Enter Master Password', self.prompt_master_pass_callback )
def prompt_master_pass_callback( self, password ):
global master_password
try:
success, data, error_message = crypto( password, self.settings.get( 'password_verification' ), '-d' )
if not success:
sublime.error_message( error_message )
return self.prompt_master_pass()
elif data.decode( encoding='UTF-8' ) != 'VERIFIED':
sublime.error_message( 'Invalid master password' )
return self.prompt_master_pass()
except KeyError:
sublime.error_message( 'Master password not set. Close this dialog and enter a master password' )
return self.set_master_password()
master_password = password
self.show_sites()
def site_callback( self, sites, index ):
if index == -1:
return
if sites[ index ] == 'Add Store':
return self.add_store()
settings = None
site = sites[ index ]
try:
for site_settings in self.settings.get( 'sites', [] ):
if site_settings[ 'name' ] == site:
settings = site_settings
break
except KeyError:
sublime.error_message( 'Site not found' )
return
except Exception:
sublime.error_message( 'Invalid configuration file' )
return
if settings == None:
sublime.error_message( 'Site not found' )
return
if self.type == 'pages':
self.window.run_command( 'mv_sublime_template_editor_get_pages', { 'settings': settings } )
elif self.type == 'templates':
self.window.run_command( 'mv_sublime_template_editor_get_templates', { 'settings': settings } )
def set_master_password( self ):
PasswordInputPanel( 'Set Master Password', self.set_master_password_callback )
def set_master_password_callback( self, master_pass ):
global master_password
success, data, error_message = crypto( master_pass, 'VERIFIED', '-e' )
if not success:
return sublime.error_message( error_message )
master_password = master_pass
self.settings.set( 'password_verification', data.decode(encoding='UTF-8') )
sites = self.settings.get( 'sites' )
for site in sites:
success, encrypted_password, error_message = crypto( master_password, site[ 'store' ][ 'password' ], '-e' )
if success:
site[ 'store' ][ 'password_encrypted' ] = True
site[ 'store' ][ 'password' ] = encrypted_password.decode( encoding='UTF-8' )
self.settings.set( 'sites', sites )
sublime.save_settings( 'MvSublimeTemplateEditor.sublime-settings' )
self.show_sites()
def add_store( self ):
site = {}
site[ 'store' ] = {}
self.show_input_panel( 'Enter Store Name', '', lambda store_name: self.add_store_callback( site, store_name ), None, None )
def add_store_callback( self, site, store_name ):
site[ 'name' ] = store_name
self.add_store_template_location( site )
def add_store_template_location( self, site ):
self.show_input_panel( 'Enter Template Export Location', '', lambda entered_text: self.add_store_template_location_callback( site, entered_text ), None, None )
def add_store_template_location_callback( self, site, template_location = '/tmp/' ):
site[ 'local_exported_templates' ] = template_location
self.add_store_code( site )
def add_store_code( self , site ):
self.show_input_panel( 'Enter Store Code', '', lambda entered_text: self.add_store_code_callback( site, entered_text ), None, None )
def add_store_code_callback( self, site, store_code ):
site[ 'store' ][ 'store_code' ] = store_code
self.add_store_jsonurl( site )
def add_store_jsonurl( self, site ):
self.show_input_panel( 'Enter JSON URL to Store', '', lambda entered_text: self.add_store_jsonurl_callback( site, entered_text ), None, None )
def add_store_jsonurl_callback( self, site, json_url ):
site[ 'store' ][ 'json_url' ] = json_url
self.add_store_username( site )
def add_store_username( self, site ):
self.show_input_panel( 'Enter Username', '', lambda entered_text: self.add_store_username_callback( site, entered_text ), None, None )
def add_store_username_callback( self, site, username ):
site[ 'store' ][ 'username' ] = username
self.add_store_password( site )
def add_store_password( self, site ):
self.show_input_panel( 'Enter Password', '', lambda entered_text: self.add_store_password_callback( site, entered_text ), None, None )
def add_store_password_callback( self, site, password ):
global master_password, openssl_enabled
if openssl_enabled:
success, data, error_message = crypto( master_password, password, '-e' )
if not success:
return sublime.error_message( error_message )
password = data.decode( encoding='UTF-8' )
site[ 'store' ][ 'password_encrypted' ] = True
site[ 'store' ][ 'password' ] = password
site[ 'store' ][ 'timeout' ] = 15
sites = self.settings.get( 'sites' )
sites.append( site )
self.settings.set( 'sites', sites )
sublime.save_settings( 'MvSublimeTemplateEditor.sublime-settings' )
self.load_sites()
def show_input_panel( self, caption, initial_text, on_done, on_change = None, on_cancel = None ):
sublime.set_timeout( lambda: self.window.show_input_panel( caption, initial_text, on_done, on_change, on_cancel ), 10 )
class MvSublimeTemplateEditorGetPagesCommand( sublime_plugin.WindowCommand ):
def run( self, settings = None ):
self.settings = settings
if self.settings is None:
return self.window.run_command( 'mv_sublime_template_editor_get_sites', { 'type': 'pages' } )
thread = TemplateList_Load_Pages_Thread( self.settings, on_complete = self.pages_quick_panel )
thread.start()
ThreadProgress( thread, 'Loading pages', error_message = 'Failed loading pages' )
def pages_quick_panel( self, pages ):
entries = []
for page in pages:
entries.extend( [ '{0} - {1}' . format( page[ 'page_code' ], page[ 'page_name' ] ) ] )
self.show_quick_panel( entries, lambda index: self.pages_callback( pages, index ) )
def pages_callback( self, pages, index ):
if index == -1:
return
# Load templates for page
page_code = pages[ index ][ 'page_code' ]
self.window.run_command( 'mv_sublime_template_editor_get_page', { 'settings': self.settings, 'page_code': page_code } )
def show_quick_panel( self, entries, on_select, on_highlight = None ):
sublime.set_timeout( lambda: self.window.show_quick_panel( entries, on_select, on_highlight = on_highlight ), 10 )
class MvSublimeTemplateEditorGetPageCommand( sublime_plugin.WindowCommand ):
def run( self, settings = None, page_code = None ):
self.settings = settings
self.page_code = page_code
self.current_view = self.window.active_view()
self.selected_index = 0
self.template_load_initiated = False
settings = sublime.load_settings( 'MvSublimeTemplateEditor.sublime-settings' )
self.file_args = sublime.TRANSIENT
if self.page_code is None:
return
if settings is None:
return
thread = TemplateList_Load_Page_Thread( page_code, self.settings, on_complete = self.templates_quick_panel )
thread.start()
ThreadProgress( thread, 'Loading templates', error_message = 'Failed loading templates' )
def templates_quick_panel( self, templates ):
entries = []
for index, template in enumerate( templates ):
entries.extend( [ '{0}' . format( template[ 'display' ] ) ] )
if not self.template_load_initiated:
self.initiate_template_download( templates, template, index )
self.template_load_initiated = True
self.show_quick_panel( entries, lambda index: self.select_entry( templates, index ), lambda index: self.on_highlight( templates, index ), self.selected_index )
def select_entry( self, templates, index ):
if index == -1:
if self.current_view:
self.window.focus_view( self.current_view )
return
self.file_args = 0
self.selected_index = index
self.goto_file( templates[ index ], self.file_args )
def on_highlight( self, templates, index ):
if index == -1:
if self.current_view:
self.window.focus_view( self.current_view )
return
self.selected_index = index
self.goto_file( templates[ index ], self.file_args )
def initiate_template_download( self, templates, template, index ):
parameters = '&Module_Code=sublime_templateeditor&Module_Function=Template_Load_ID&ManagedTemplateVersion_ID={0}&TemporarySession=1' . format( template[ 'current_id' ] )
json_threadpool.add_request( self.settings, parameters, lambda record: self.download_template( template, record, index ) )
def download_template( self, template, record, index ):
record[ 'template_name' ] = template[ 'filename' ]
template[ 'record' ] = record
thread = Template_Write_File( template, self.settings.get( 'local_exported_templates', '' ), lambda ignore: self.download_template_callback( template, index ) )
thread.start()
def download_template_callback( self, template, index ):
if index == self.selected_index:
self.goto_file( template, self.file_args )
def goto_file( self, template, file_args = 0 ):
local_directory = self.settings.get( 'local_exported_templates', '' )
try:
file_name = '{0}' . format( template[ 'record' ][ 'template_name' ] )
except KeyError:
return # File hasn't loaded yet. Don't do anything.
local_file_path = os.path.join( local_directory, file_name )
view = self.window.open_file( local_file_path, file_args )
view_settings = view.settings()
view_settings.set( 'miva_managedtemplateversion', "true" )
view_settings.set( 'miva_managedtemplateversion_template', template[ 'record' ] )
view_settings.set( 'miva_settings', self.settings )
view_settings.set( 'miva_managedtemplateversion_page_code', self.page_code )
def show_quick_panel( self, entries, on_select, on_highlight = None, selected_index = 0 ):
sublime.set_timeout( lambda: self.window.show_quick_panel( entries, on_select, 0, selected_index, on_highlight ), 10 )
class MvSublimeTemplateEditorGetTemplatesCommand( sublime_plugin.WindowCommand ):
def run( self, settings = None ):
self.settings = settings
self.selected_index = 0
self.file_args = sublime.TRANSIENT
self.current_view = self.window.active_view()
if self.settings is None:
return self.window.run_command( 'mv_sublime_template_editor_get_sites', { 'type': 'templates' } )
thread = TemplateList_Load_All_Thread( self.settings, on_complete = self.templates_quick_panel )
thread.start()
ThreadProgress( thread, 'Loading templates', error_message = 'Failed loading templates' )
def templates_quick_panel( self, templates ):
entries = []
for template in templates:
entries.extend( [ 'Template file - {0}' . format( template[ 'filename' ] ) ] )
self.show_quick_panel( entries, lambda index: self.select_entry( templates, index ), lambda index: self.on_highlight( templates, index ), self.selected_index )
def select_entry( self, templates, index ):
if index == -1:
if self.current_view:
self.window.focus_view( self.current_view )
return
self.file_args = 0
self.selected_index = index
try:
dummy = templates[ index ][ 'record' ][ 'template_name' ]
except KeyError:
self.initiate_template_download( templates, templates[ index ], index )
# File hasn't loaded yet. Don't do anything.
return
self.goto_file( templates[ index ], self.file_args )
def on_highlight( self, templates, index ):
if index == -1:
if self.current_view:
self.window.focus_view( self.current_view )
return
self.selected_index = index
try:
dummy = templates[ index ][ 'record' ][ 'template_name' ]
except KeyError:
self.initiate_template_download( templates, templates[ index ], index )
# File hasn't loaded yet. Don't do anything.
return
self.goto_file( templates[ index ], self.file_args )
def initiate_template_download( self, templates, template, index ):
parameters = '&Module_Code=sublime_templateeditor&Module_Function=Template_Load_ID&ManagedTemplateVersion_ID={0}&TemporarySession=1' . format( template[ 'current_id' ] )
json_threadpool.add_request( self.settings, parameters, lambda record: self.download_template( template, record, index ) )
def download_template( self, template, record, index ):
record[ 'template_name' ] = template[ 'filename' ]
template[ 'record' ] = record
thread = Template_Write_File( template, self.settings.get( 'local_exported_templates', '' ), lambda ignore: self.download_template_callback( template, index ) )
thread.start()
def download_template_callback( self, template, index ):
if index == self.selected_index:
self.goto_file( template, self.file_args )
def goto_file( self, template, file_args = 0 ):
local_directory = self.settings.get( 'local_exported_templates', '' )
try:
file_name = '{0}' . format( template[ 'record' ][ 'template_name' ] )
except KeyError:
return # File hasn't loaded yet. Don't do anything.
local_file_path = os.path.join( local_directory, file_name )
view = self.window.open_file( local_file_path, file_args )
view_settings = view.settings()
view_settings.set( 'miva_managedtemplateversion', "true" )
view_settings.set( 'miva_managedtemplateversion_template', template[ 'record' ] )
view_settings.set( 'miva_settings', self.settings )
view_settings.set( 'miva_managedtemplateversion_page_code', self.page_code )
def show_quick_panel( self, entries, on_select, on_highlight = None, selected_index = 0 ):
sublime.set_timeout( lambda: self.window.show_quick_panel( entries, on_select, 0, selected_index, on_highlight ), 10 )
#
# Template Menu
#
class MvSublimeTemplateEditorTemplateMenu( sublime_plugin.WindowCommand ):
def run( self ):
self.view = self.window.active_view()
self.view_settings = self.view.settings()
if not self.view_settings.has( 'miva_managedtemplateversion' ):
return
self.settings = self.view_settings.get( 'miva_settings' )
if self.settings is None:
return
commands = [ 'Commit', 'Versions' ]
if self.view_settings.has( 'miva_managedtemplateversion_page_code' ):
commands.append( 'Templates In Page "{0}"' . format( self.view_settings.get( 'miva_managedtemplateversion_page_code' ) ) )
sublime.set_timeout( lambda: self.window.show_quick_panel( commands, lambda index: self.command_callback( commands, index ) ) )
def command_callback( self, commands, index ):
if index == -1:
return
if commands[ index ] == 'Commit':
self.on_save( self.view )
elif commands[ index ] == 'Templates In Page "{0}"' . format( self.view_settings.get( 'miva_managedtemplateversion_page_code' ) ):
self.window.run_command( 'mv_sublime_template_editor_get_page', { 'settings': self.settings, 'page_code': self.view_settings.get( 'miva_managedtemplateversion_page_code' ) } )
elif commands[ index ] == 'Versions':
thread = TemplateVersionList_Load_Template_Thread( self.view_settings.get( 'miva_managedtemplateversion_template' )[ 'templ_id' ], self.settings, on_complete = self.versions_quick_panel )
thread.start()
ThreadProgress( thread, 'Loading versions', error_message = 'Failed loading versions' )
def on_save( self, view ):
file_path = view.file_name()
settings = view.settings()
if not settings.has( 'miva_managedtemplateversion' ):
return
template = settings.get( 'miva_managedtemplateversion_template' )
source = view.substr( sublime.Region( 0, view.size() ) )
thread = Template_Update_ID( template[ 'templ_id' ], source, self.settings, on_complete = None )
thread.start()
ThreadProgress( thread, 'Uploading {0}' . format( template[ 'template_name' ] ), '{0} uploaded' . format( template[ 'template_name' ] ), 'Upload of {0} failed' . format( template[ 'template_name' ] ) )
def versions_quick_panel( self, templates ):
entries = []
for template in templates:
entries.extend( [ 'Note - {0}' . format( template[ 'notes' ] ) ] )
self.show_quick_panel( entries, lambda index: self.templates_callback( templates, index ) )
def templates_callback( self, templates, index ):
if index == -1:
return
filename = templates[ index ][ 'filename' ]
current_id = templates[ index ][ 'id' ]
thread = Template_Load_ID( current_id, "{0}-{1}" . format( filename, current_id ), self.settings, on_complete = self.download_template )
thread.start()
ThreadProgress( thread, 'Exporting {0}' . format( filename ), '{0} exported' . format( filename ), 'Export of {0} failed' . format( filename ) )
def download_template( self, template ):
local_directory = self.settings.get( 'local_exported_templates', '' )
file_name = '{0}.htm' . format ( template[ 'template_name' ] )
local_file_path = os.path.join( local_directory, file_name )
with open( local_file_path, 'wb' ) as fh:
fh.write( template[ 'source' ].encode( 'utf8' ) )
view = self.window.open_file( local_file_path )
view_settings = view.settings()
view_settings.set( 'miva_managedtemplateversion', "true" )
view_settings.set( 'miva_managedtemplateversion_template', template )
view_settings.set( 'miva_settings', self.settings )
def show_quick_panel( self, entries, on_select, on_highlight = None ):
sublime.set_timeout( lambda: self.window.show_quick_panel( entries, on_select, on_highlight = on_highlight ), 10 )
#
# Thread Functionality
#
class ThreadProgress():
def __init__( self, thread, message, success_message = '', error_message = '' ):
self.thread = thread
self.message = message
self.success_message = success_message
self.error_message = error_message
self.addend = 1
self.size = 8
sublime.set_timeout( lambda: self.run( 0 ), 100 )
def run( self, i ):
if not self.thread.is_alive():
if hasattr( self.thread, 'result' ) and not self.thread.result:
return sublime.status_message('')
if hasattr( self.thread, 'error' ) and self.thread.error:
return sublime.status_message( self.error_message )
return sublime.status_message( self.success_message )
before = i % self.size
after = ( self.size - 1 ) - before
sublime.status_message( '{0} [{1}={2}]' . format( self.message, ' ' * before, ' ' * after ) )
if not after:
self.addend = -1
if not before:
self.addend = 1
i += self.addend
sublime.set_timeout( lambda: self.run( i ), 100 )
class TemplateList_Load_Pages_Thread( threading.Thread ):
def __init__( self, settings, on_complete ):
self.settings = settings
self.on_complete = on_complete
self.error = False
threading.Thread.__init__( self )
def run( self ):
store_settings = self.settings.get( 'store' )
print( 'Retrieving pages' )
result, response, error = make_json_request( store_settings, 'Module', '&Count=0&Module_Code=sublime_templateeditor&Module_Function=TemplateList_Load_Pages&TemporarySession=1' )
if not result:
self.error = True
return sublime.error_message( error )
pages = response[ 'data' ][ 'data' ]
print( 'Retrieved {0} pages' . format( len( pages ) ) )
sublime.set_timeout( lambda: self.on_complete( pages ), 10 )
class TemplateList_Load_Page_Thread( threading.Thread ):
def __init__( self, page_code, settings, on_complete ):
self.page_code = page_code
self.settings = settings
self.on_complete = on_complete
self.error = False
threading.Thread.__init__( self )
def run( self ):
store_settings = self.settings.get( 'store' )
print( 'Retrieving templates' )
result, response, error = make_json_request( store_settings, 'Module', '&Count=0&Module_Code=sublime_templateeditor&Module_Function=TemplateList_Load_Page&Page_Code={0}&TemporarySession=1' . format( urllib.parse.quote_plus( self.page_code.encode( 'utf8' ) ) ) )
if not result:
self.error = True
return sublime.error_message( error )
templates = response[ 'data' ][ 'data' ]
print( 'Retrieved {0} templates' . format( len( templates ) ) )
sublime.set_timeout( lambda: self.on_complete( templates ), 10 )
class TemplateList_Load_All_Thread( threading.Thread ):
def __init__( self, settings, on_complete ):
self.settings = settings
self.on_complete = on_complete
self.error = False
threading.Thread.__init__( self )
def run( self ):
store_settings = self.settings.get( 'store' )
print( 'Retrieving templates' )
result, response, error = make_json_request( store_settings, 'Module', '&Count=0&Module_Code=sublime_templateeditor&Module_Function=TemplateList_Load_All&TemporarySession=1' )
if not result:
self.error = True
return sublime.error_message( error )
templates = response[ 'data' ][ 'data' ]
print( 'Retrieved {0} templates' . format( len( templates ) ) )
sublime.set_timeout( lambda: self.on_complete( templates ), 10 )
class TemplateVersionList_Load_Template_Thread( threading.Thread ):
def __init__( self, template_id, settings, on_complete ):
self.template_id = template_id
self.settings = settings
self.on_complete = on_complete
self.error = False
threading.Thread.__init__( self )
def run( self ):
store_settings = self.settings.get( 'store' )
print( 'Retrieving template versions' )
result, response, error = make_json_request( store_settings, 'Module', '&Count=0&Module_Code=sublime_templateeditor&Module_Function=TemplateVersionList_Load_Template&ManagedTemplate_ID={0}&TemporarySession=1' . format( self.template_id ) )
if not result:
self.error = True
return sublime.error_message( error )
templateversions = response[ 'data' ][ 'data' ]
print( 'Retrieved {0} template versions' . format( len( templateversions ) ) )
sublime.set_timeout( lambda: self.on_complete( templateversions ), 10 )
class Template_Load_ID( threading.Thread ):
def __init__( self, templ_id, template_name, settings, on_complete ):
self.templ_id = templ_id
self.template_name = template_name
self.settings = settings
self.on_complete = on_complete
self.error = False
threading.Thread.__init__( self )
def run( self ):
store_settings = self.settings.get( 'store' )
print( "Exporting {0}" . format( self.template_name ) )
result, response, error = make_json_request( store_settings, 'Module', '&Module_Code=sublime_templateeditor&Module_Function=Template_Load_ID&ManagedTemplateVersion_ID={0}&TemporarySession=1' . format( self.templ_id ) )
if not result:
self.error = True
return sublime.error_message( error )
template = response[ 'data' ]
template[ 'template_name' ] = self.template_name
sublime.set_timeout( lambda: self.on_complete( template ), 10 )
class Template_Write_File( threading.Thread ):
def __init__( self, template, local_directory, on_complete ):
self.template = template
self.local_directory = local_directory
self.on_complete = on_complete
threading.Thread.__init__( self )
def run( self ):
file_name = '{0}' . format ( self.template[ 'record' ][ 'template_name' ] )
local_file_path = os.path.join( self.local_directory, file_name )
with open( local_file_path, 'wb' ) as fh:
fh.write( self.template[ 'record' ][ 'source' ].encode( 'utf8' ) )
sublime.set_timeout( lambda: self.on_complete( self.template ), 10 )
class Template_Update_ID( threading.Thread ):
def __init__( self, managedtemplate_id, source, settings, on_complete ):
self.managedtemplate_id = managedtemplate_id
self.source = source
self.settings = settings
self.on_complete = on_complete
self.error = False
threading.Thread.__init__( self )
def run( self ):
store_settings = self.settings.get( 'store' )
source = urllib.parse.quote_plus( self.source )
result, response, error = make_json_request( store_settings, 'Module', '&Module_Code=sublime_templateeditor&Module_Function=Template_Update_ID&ManagedTemplate_ID={0}&Source={1}&TemporarySession=1' . format( self.managedtemplate_id, source ) )
if not result:
self.error = True
return sublime.error_message( error )
print( 'Page imported' )
class PasswordInputPanel():
def __init__( self, prompt, on_complete ):
self.window = sublime.active_window()
self.prompt = prompt
self.on_complete = on_complete
self.password = ''
self.stars = ''
self.window.show_input_panel( self.prompt, '', self.on_input, self.getpwd, None )
def getpwd( self, password ):
chg = password[len(self.stars):]
if len( password ) < len( self.password ):
new_password = self.password[:len( password )]
else:
new_password = self.password + chg
if self.password == new_password:
return
self.password = new_password
self.stars = "*" * len( password )
sublime.set_timeout( lambda: self.window.show_input_panel( self.prompt, self.stars, self.on_input, self.getpwd, None ), 10 )
def on_input( self, password ):
if self.password.strip() == "":
self.panel( "No password provided" )
return
sublime.set_timeout( lambda: self.on_complete( self.password.strip() ), 10 )
class JSON_Threadpool_Thread( threading.Thread ):
def __init__( self, settings, parameters, on_complete ):
self.settings = settings
self.parameters = parameters
self.on_complete = on_complete
self.error = False
threading.Thread.__init__( self )
def run( self ):
store_settings = self.settings.get( 'store' )
result, response, error = make_json_request( store_settings, 'Module', self.parameters )
if not result:
self.error = True
return sublime.error_message( error )
data = response[ 'data' ]
sublime.set_timeout( lambda: self.on_complete( data ), 10 )
class JSON_Threadpool():
def __init__( self, thread_count = 3 ):
self.thread_count = 3
self.active_count = 0
self.running = False
self.queue = []
self.running_queue = []
def add_request( self, settings, parameters, on_complete = None ):
request = JSON_Threadpool_Thread( settings, parameters, lambda data: self.run_request_callback( data, on_complete ) )
self.queue.append( request )
self.run()
def run( self ):
if self.running or len( self.queue ) == 0:
return
self.running = True
for i in range( 0, self.thread_count ):
if len( self.queue ) > i:
request = self.queue.pop( 0 )
self.run_request( request )
def run_request( self, request ):
print( 'Running Request' )
request.start()
def run_request_callback( self, data, on_complete = None ):
if on_complete != None:
sublime.set_timeout( lambda: on_complete( data ), 10 )
if len( self.queue ) == 0:
self.running = False
return
request = self.queue.pop( 0 )
self.run_request( request )
json_threadpool = JSON_Threadpool()
#
# Helper Functions
#
#
# Encrypt/Decrypt using OpenSSL -aes128 and -base64
# EG similar to running this CLI command:
# echo "data" | openssl enc -e -aes128 -base64 -pass "pass:lolcats"
#
def crypto( password, data, enc_flag = '-e' ):
settings = sublime.load_settings( 'MvSublimeTemplateEditor.sublime-settings' )
cipher = settings.get('cipher')
openssl_command = os.path.normpath( settings.get('openssl_command') )
# pass the password as an ENV variable, for better security
envVar = ''.join( random.sample( string.ascii_uppercase, 23 ) )
os.environ[ envVar ] = password
_pass = "env:%s" % envVar
try:
if isinstance(data, str):
data_handled = data.encode( 'utf-8' )
else:
data_handled = data
startupinfo = None
if sublime.platform() == 'windows':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
openssl = Popen( [openssl_command, "enc", enc_flag, cipher, "-base64", "-pass", _pass], startupinfo=startupinfo, stdin=PIPE, stdout=PIPE, stderr=PIPE )
result, error = openssl.communicate( data_handled )
del os.environ[envVar] # get rid of the temporary ENV var
except IOError as e:
return False, None, 'Error: %s' % e
except OSError as e:
error_message = """
Please verify that you have installed OpenSSL.
Attempting to execute: %s
Error: %s
""" % (openssl_command, e[1])
return False, None, error_message
# probably a wrong password was entered
if error:
_err = error.splitlines()[0]
if ST3:
_err = str(_err)
if _err.find('unknown option') != -1:
return False, None, 'Error: ' + _err
elif _err.find("WARNING:") != -1:
# skip WARNING's
return True, result, None
return False, None, 'Error: Wrong password'
return True, result, None
def join_path( dir_path, file_path, server_type ):
platform = sublime.platform()
if server_type == 'windows':
if dir_path.endswith( '\\' ):
return dir_path + file_path
else:
return dir_path + '\\' + file_path
elif platform == 'windows':
if dir_path.endswith( '/' ):
return dir_path + file_path
else:
return dir_path + '/' + file_path
return os.path.join( dir_path, file_path )
def make_json_request( store_settings, function, other_data = '' ):
global master_password, openssl_enabled
store_settings.setdefault( 'store_code', '' )
store_settings.setdefault( 'json_url', '' )
store_settings.setdefault( 'username', '' )
store_settings.setdefault( 'password', '' )
store_settings.setdefault( 'timeout', 15 )
store_settings.setdefault( 'password_decrypted', '' )
store_code = store_settings[ 'store_code' ]
json_url = store_settings[ 'json_url' ]
username = store_settings[ 'username' ]
password = store_settings[ 'password' ]
timeout = store_settings[ 'timeout' ]
if openssl_enabled:
if not 'password_encrypted' in store_settings:
settings = sublime.load_settings( 'MvSublimeTemplateEditor.sublime-settings' )
sites = settings.get( 'sites' )
success, encrypted_password, error_message = crypto( master_password, password, '-e' )
if success:
sites[ store_code ][ 'store' ][ 'password_encrypted' ] = True
sites[ store_code ][ 'store' ][ 'password' ] = encrypted_password.decode( encoding='UTF-8')
self.settings.set( 'sites', sites )
sublime.save_settings( 'MvSublimeTemplateEditor.sublime-settings' )
elif store_settings[ 'password_decrypted' ] != '':
password = store_settings[ 'password_decrypted' ]
else:
success, decrypted_password, error_message = crypto( master_password, password, '-d' )
if success:
password = decrypted_password.decode( encoding='UTF-8')
store_settings[ 'password_decrypted' ] = password
if not json_url.endswith( '?' ):
json_url += '?'
url = json_url + 'Store_Code={store_code}&Function={function}&Session_Type=admin&Username={username}&Password={password}' \
. format( store_code = urllib.parse.quote_plus( store_code ), function = urllib.parse.quote_plus( function ), username = urllib.parse.quote_plus( username ), password = urllib.parse.quote_plus( password ) )
print( url + other_data )
try:
req = urllib2.Request( url, other_data.encode( 'utf8' ), headers={'User-Agent' : "Magic Browser"} )
request = urllib2.urlopen( req, timeout = timeout )
except Exception as e:
print( 'Failed opening URL: {0}' . format( str( e ) ) )
return False, None, 'Failed to open URL'
try:
content = request.read().decode()
except Exception as e:
print( 'Failed decoding response: {0}' . format( str( e ) ) )
return False, None, 'Failed to decode response'
try:
json_response = json.loads( content )
except Exception as e:
print( 'Failed to parse JSON: {0}' . format( str( e ) ) )
return False, None, 'Failed to parse JSON response'
if 'success' not in json_response or json_response[ 'success' ] != 1:
print( 'JSON response was not a success {0}' . format( json_response ) )
return False, None, json_response[ 'error_message' ]
return True, json_response, None
| {
"content_hash": "eb82e36cd7f46ca9651fd7b670d20c8f",
"timestamp": "",
"source": "github",
"line_count": 938,
"max_line_length": 263,
"avg_line_length": 35.26012793176972,
"alnum_prop": 0.6869141924170042,
"repo_name": "rguisewite/MvSublimeTemplateEditor",
"id": "79ff0c0372c27f85dd618d42ffcbf53a9c41fcf8",
"size": "33074",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/MvSublimeTemplateEditor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "671"
},
{
"name": "Python",
"bytes": "33074"
}
],
"symlink_target": ""
} |
from mangopi.helper.decorators import memoize
from mangopi.helper.util import Util
class HasUrl(object):
@property
@memoize
def source(self):
return Util.getSourceCode(self.url)
| {
"content_hash": "a0037f89ae4afd26f28206be04906452",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 45,
"avg_line_length": 22.22222222222222,
"alnum_prop": 0.73,
"repo_name": "jiaweihli/mangopi",
"id": "fd6aaf7b1c83edc2ec319142afc9d203ddf02b7a",
"size": "200",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mangopi/helper/traits/hasurl.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "20107"
}
],
"symlink_target": ""
} |
"""
past.translation
==================
The ``past.translation`` package provides an import hook for Python 3 which
transparently runs ``futurize`` fixers over Python 2 code on import to convert
print statements into functions, etc.
It is intended to assist users in migrating to Python 3.x even if some
dependencies still only support Python 2.x.
Usage
-----
Once your Py2 package is installed in the usual module search path, the import
hook is invoked as follows:
>>> from past import autotranslate
>>> autotranslate('mypackagename')
Or:
>>> autotranslate(['mypackage1', 'mypackage2'])
You can unregister the hook using::
>>> from past.translation import remove_hooks
>>> remove_hooks()
Author: Ed Schofield.
Inspired by and based on ``uprefix`` by Vinay M. Sajip.
"""
import imp
import logging
import marshal
import os
import sys
import copy
from lib2to3.pgen2.parse import ParseError
from lib2to3.refactor import RefactoringTool
from libfuturize import fixes
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
myfixes = (list(fixes.libfuturize_fix_names_stage1) +
list(fixes.lib2to3_fix_names_stage1) +
list(fixes.libfuturize_fix_names_stage2) +
list(fixes.lib2to3_fix_names_stage2))
# We detect whether the code is Py2 or Py3 by applying certain lib2to3 fixers
# to it. If the diff is empty, it's Python 3 code.
py2_detect_fixers = [
# From stage 1:
'lib2to3.fixes.fix_apply',
# 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc. and move to stage2
'lib2to3.fixes.fix_except',
'lib2to3.fixes.fix_execfile',
'lib2to3.fixes.fix_exitfunc',
'lib2to3.fixes.fix_funcattrs',
'lib2to3.fixes.fix_filter',
'lib2to3.fixes.fix_has_key',
'lib2to3.fixes.fix_idioms',
'lib2to3.fixes.fix_import', # makes any implicit relative imports explicit. (Use with ``from __future__ import absolute_import)
'lib2to3.fixes.fix_intern',
'lib2to3.fixes.fix_isinstance',
'lib2to3.fixes.fix_methodattrs',
'lib2to3.fixes.fix_ne',
'lib2to3.fixes.fix_numliterals', # turns 1L into 1, 0755 into 0o755
'lib2to3.fixes.fix_paren',
'lib2to3.fixes.fix_print',
'lib2to3.fixes.fix_raise', # uses incompatible with_traceback() method on exceptions
'lib2to3.fixes.fix_renames',
'lib2to3.fixes.fix_reduce',
# 'lib2to3.fixes.fix_set_literal', # this is unnecessary and breaks Py2.6 support
'lib2to3.fixes.fix_repr',
'lib2to3.fixes.fix_standarderror',
'lib2to3.fixes.fix_sys_exc',
'lib2to3.fixes.fix_throw',
'lib2to3.fixes.fix_tuple_params',
'lib2to3.fixes.fix_types',
'lib2to3.fixes.fix_ws_comma',
'lib2to3.fixes.fix_xreadlines',
# From stage 2:
'lib2to3.fixes.fix_basestring',
# 'lib2to3.fixes.fix_buffer', # perhaps not safe. Test this.
# 'lib2to3.fixes.fix_callable', # not needed in Py3.2+
# 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc.
'lib2to3.fixes.fix_exec',
# 'lib2to3.fixes.fix_future', # we don't want to remove __future__ imports
'lib2to3.fixes.fix_getcwdu',
# 'lib2to3.fixes.fix_imports', # called by libfuturize.fixes.fix_future_standard_library
# 'lib2to3.fixes.fix_imports2', # we don't handle this yet (dbm)
# 'lib2to3.fixes.fix_input',
# 'lib2to3.fixes.fix_itertools',
# 'lib2to3.fixes.fix_itertools_imports',
'lib2to3.fixes.fix_long',
# 'lib2to3.fixes.fix_map',
# 'lib2to3.fixes.fix_metaclass', # causes SyntaxError in Py2! Use the one from ``six`` instead
'lib2to3.fixes.fix_next',
'lib2to3.fixes.fix_nonzero', # TODO: add a decorator for mapping __bool__ to __nonzero__
# 'lib2to3.fixes.fix_operator', # we will need support for this by e.g. extending the Py2 operator module to provide those functions in Py3
'lib2to3.fixes.fix_raw_input',
# 'lib2to3.fixes.fix_unicode', # strips off the u'' prefix, which removes a potentially helpful source of information for disambiguating unicode/byte strings
# 'lib2to3.fixes.fix_urllib',
'lib2to3.fixes.fix_xrange',
# 'lib2to3.fixes.fix_zip',
]
class RTs:
"""
A namespace for the refactoring tools. This avoids creating these at
the module level, which slows down the module import. (See issue #117).
There are two possible grammars: with or without the print statement.
Hence we have two possible refactoring tool implementations.
"""
_rt = None
_rtp = None
_rt_py2_detect = None
_rtp_py2_detect = None
@staticmethod
def setup():
"""
Call this before using the refactoring tools to create them on demand
if needed.
"""
if None in [RTs._rt, RTs._rtp]:
RTs._rt = RefactoringTool(myfixes)
RTs._rtp = RefactoringTool(myfixes, {'print_function': True})
@staticmethod
def setup_detect_python2():
"""
Call this before using the refactoring tools to create them on demand
if needed.
"""
if None in [RTs._rt_py2_detect, RTs._rtp_py2_detect]:
RTs._rt_py2_detect = RefactoringTool(py2_detect_fixers)
RTs._rtp_py2_detect = RefactoringTool(py2_detect_fixers,
{'print_function': True})
# We need to find a prefix for the standard library, as we don't want to
# process any files there (they will already be Python 3).
#
# The following method is used by Sanjay Vinip in uprefix. This fails for
# ``conda`` environments:
# # In a non-pythonv virtualenv, sys.real_prefix points to the installed Python.
# # In a pythonv venv, sys.base_prefix points to the installed Python.
# # Outside a virtual environment, sys.prefix points to the installed Python.
# if hasattr(sys, 'real_prefix'):
# _syslibprefix = sys.real_prefix
# else:
# _syslibprefix = getattr(sys, 'base_prefix', sys.prefix)
# Instead, we use the portion of the path common to both the stdlib modules
# ``math`` and ``urllib``.
def splitall(path):
"""
Split a path into all components. From Python Cookbook.
"""
allparts = []
while True:
parts = os.path.split(path)
if parts[0] == path: # sentinel for absolute paths
allparts.insert(0, parts[0])
break
elif parts[1] == path: # sentinel for relative paths
allparts.insert(0, parts[1])
break
else:
path = parts[0]
allparts.insert(0, parts[1])
return allparts
def common_substring(s1, s2):
"""
Returns the longest common substring to the two strings, starting from the
left.
"""
chunks = []
path1 = splitall(s1)
path2 = splitall(s2)
for (dir1, dir2) in zip(path1, path2):
if dir1 != dir2:
break
chunks.append(dir1)
return os.path.join(*chunks)
# _stdlibprefix = common_substring(math.__file__, urllib.__file__)
def detect_python2(source, pathname):
"""
Returns a bool indicating whether we think the code is Py2
"""
RTs.setup_detect_python2()
try:
tree = RTs._rt_py2_detect.refactor_string(source, pathname)
except ParseError as e:
if e.msg != 'bad input' or e.value != '=':
raise
tree = RTs._rtp.refactor_string(source, pathname)
if source != str(tree)[:-1]: # remove added newline
# The above fixers made changes, so we conclude it's Python 2 code
logger.debug('Detected Python 2 code: {0}'.format(pathname))
with open('/tmp/original_code.py', 'w') as f:
f.write('### Original code (detected as py2): %s\n%s' %
(pathname, source))
with open('/tmp/py2_detection_code.py', 'w') as f:
f.write('### Code after running py3 detection (from %s)\n%s' %
(pathname, str(tree)[:-1]))
return True
else:
logger.debug('Detected Python 3 code: {0}'.format(pathname))
with open('/tmp/original_code.py', 'w') as f:
f.write('### Original code (detected as py3): %s\n%s' %
(pathname, source))
try:
os.remove('/tmp/futurize_code.py')
except OSError:
pass
return False
class Py2Fixer(object):
"""
An import hook class that uses lib2to3 for source-to-source translation of
Py2 code to Py3.
"""
# See the comments on :class:future.standard_library.RenameImport.
# We add this attribute here so remove_hooks() and install_hooks() can
# unambiguously detect whether the import hook is installed:
PY2FIXER = True
def __init__(self):
self.found = None
self.base_exclude_paths = ['future', 'past']
self.exclude_paths = copy.copy(self.base_exclude_paths)
self.include_paths = []
def include(self, paths):
"""
Pass in a sequence of module names such as 'plotrique.plotting' that,
if present at the leftmost side of the full package name, would
specify the module to be transformed from Py2 to Py3.
"""
self.include_paths += paths
def exclude(self, paths):
"""
Pass in a sequence of strings such as 'mymodule' that, if
present at the leftmost side of the full package name, would cause
the module not to undergo any source transformation.
"""
self.exclude_paths += paths
def find_module(self, fullname, path=None):
logger.debug('Running find_module: {0}...'.format(fullname))
if '.' in fullname:
parent, child = fullname.rsplit('.', 1)
if path is None:
loader = self.find_module(parent, path)
mod = loader.load_module(parent)
path = mod.__path__
fullname = child
# Perhaps we should try using the new importlib functionality in Python
# 3.3: something like this?
# thing = importlib.machinery.PathFinder.find_module(fullname, path)
try:
self.found = imp.find_module(fullname, path)
except Exception as e:
logger.debug('Py2Fixer could not find {0}')
logger.debug('Exception was: {0})'.format(fullname, e))
return None
self.kind = self.found[-1][-1]
if self.kind == imp.PKG_DIRECTORY:
self.pathname = os.path.join(self.found[1], '__init__.py')
elif self.kind == imp.PY_SOURCE:
self.pathname = self.found[1]
return self
def transform(self, source):
# This implementation uses lib2to3,
# you can override and use something else
# if that's better for you
# lib2to3 likes a newline at the end
RTs.setup()
source += '\n'
try:
tree = RTs._rt.refactor_string(source, self.pathname)
except ParseError as e:
if e.msg != 'bad input' or e.value != '=':
raise
tree = RTs._rtp.refactor_string(source, self.pathname)
# could optimise a bit for only doing str(tree) if
# getattr(tree, 'was_changed', False) returns True
return str(tree)[:-1] # remove added newline
def load_module(self, fullname):
logger.debug('Running load_module for {0}...'.format(fullname))
if fullname in sys.modules:
mod = sys.modules[fullname]
else:
if self.kind in (imp.PY_COMPILED, imp.C_EXTENSION, imp.C_BUILTIN,
imp.PY_FROZEN):
convert = False
# elif (self.pathname.startswith(_stdlibprefix)
# and 'site-packages' not in self.pathname):
# # We assume it's a stdlib package in this case. Is this too brittle?
# # Please file a bug report at https://github.com/PythonCharmers/python-future
# # if so.
# convert = False
# in theory, other paths could be configured to be excluded here too
elif any([fullname.startswith(path) for path in self.exclude_paths]):
convert = False
elif any([fullname.startswith(path) for path in self.include_paths]):
convert = True
else:
convert = False
if not convert:
logger.debug('Excluded {0} from translation'.format(fullname))
mod = imp.load_module(fullname, *self.found)
else:
logger.debug('Autoconverting {0} ...'.format(fullname))
mod = imp.new_module(fullname)
sys.modules[fullname] = mod
# required by PEP 302
mod.__file__ = self.pathname
mod.__name__ = fullname
mod.__loader__ = self
# This:
# mod.__package__ = '.'.join(fullname.split('.')[:-1])
# seems to result in "SystemError: Parent module '' not loaded,
# cannot perform relative import" for a package's __init__.py
# file. We use the approach below. Another option to try is the
# minimal load_module pattern from the PEP 302 text instead.
# Is the test in the next line more or less robust than the
# following one? Presumably less ...
# ispkg = self.pathname.endswith('__init__.py')
if self.kind == imp.PKG_DIRECTORY:
mod.__path__ = [ os.path.dirname(self.pathname) ]
mod.__package__ = fullname
else:
#else, regular module
mod.__path__ = []
mod.__package__ = fullname.rpartition('.')[0]
try:
cachename = imp.cache_from_source(self.pathname)
if not os.path.exists(cachename):
update_cache = True
else:
sourcetime = os.stat(self.pathname).st_mtime
cachetime = os.stat(cachename).st_mtime
update_cache = cachetime < sourcetime
# # Force update_cache to work around a problem with it being treated as Py3 code???
# update_cache = True
if not update_cache:
with open(cachename, 'rb') as f:
data = f.read()
try:
code = marshal.loads(data)
except Exception:
# pyc could be corrupt. Regenerate it
update_cache = True
if update_cache:
if self.found[0]:
source = self.found[0].read()
elif self.kind == imp.PKG_DIRECTORY:
with open(self.pathname) as f:
source = f.read()
if detect_python2(source, self.pathname):
source = self.transform(source)
with open('/tmp/futurized_code.py', 'w') as f:
f.write('### Futurized code (from %s)\n%s' %
(self.pathname, source))
code = compile(source, self.pathname, 'exec')
dirname = os.path.dirname(cachename)
try:
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(cachename, 'wb') as f:
data = marshal.dumps(code)
f.write(data)
except Exception: # could be write-protected
pass
exec(code, mod.__dict__)
except Exception as e:
# must remove module from sys.modules
del sys.modules[fullname]
raise # keep it simple
if self.found[0]:
self.found[0].close()
return mod
_hook = Py2Fixer()
def install_hooks(include_paths=(), exclude_paths=()):
if isinstance(include_paths, str):
include_paths = (include_paths,)
if isinstance(exclude_paths, str):
exclude_paths = (exclude_paths,)
assert len(include_paths) + len(exclude_paths) > 0, 'Pass at least one argument'
_hook.include(include_paths)
_hook.exclude(exclude_paths)
# _hook.debug = debug
enable = sys.version_info[0] >= 3 # enabled for all 3.x
if enable and _hook not in sys.meta_path:
sys.meta_path.insert(0, _hook) # insert at beginning. This could be made a parameter
# We could return the hook when there are ways of configuring it
#return _hook
def remove_hooks():
if _hook in sys.meta_path:
sys.meta_path.remove(_hook)
def detect_hooks():
"""
Returns True if the import hooks are installed, False if not.
"""
return _hook in sys.meta_path
# present = any([hasattr(hook, 'PY2FIXER') for hook in sys.meta_path])
# return present
class hooks(object):
"""
Acts as a context manager. Use like this:
>>> from past import translation
>>> with translation.hooks():
... import mypy2module
>>> import requests # py2/3 compatible anyway
>>> # etc.
"""
def __enter__(self):
self.hooks_were_installed = detect_hooks()
install_hooks()
return self
def __exit__(self, *args):
if not self.hooks_were_installed:
remove_hooks()
class suspend_hooks(object):
"""
Acts as a context manager. Use like this:
>>> from past import translation
>>> translation.install_hooks()
>>> import http.client
>>> # ...
>>> with translation.suspend_hooks():
>>> import requests # or others that support Py2/3
If the hooks were disabled before the context, they are not installed when
the context is left.
"""
def __enter__(self):
self.hooks_were_installed = detect_hooks()
remove_hooks()
return self
def __exit__(self, *args):
if self.hooks_were_installed:
install_hooks()
| {
"content_hash": "f7ddd39d6adec3f3885169d71b2f1eb1",
"timestamp": "",
"source": "github",
"line_count": 496,
"max_line_length": 163,
"avg_line_length": 37.082661290322584,
"alnum_prop": 0.5776110476811831,
"repo_name": "ryfeus/lambda-packs",
"id": "c7ae2b7a081be3c800ef229a4b760cca3d842e6b",
"size": "18417",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "H2O/ArchiveH2O/past/translation/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9768343"
},
{
"name": "C++",
"bytes": "76566960"
},
{
"name": "CMake",
"bytes": "191097"
},
{
"name": "CSS",
"bytes": "153538"
},
{
"name": "Cuda",
"bytes": "61768"
},
{
"name": "Cython",
"bytes": "3110222"
},
{
"name": "Fortran",
"bytes": "110284"
},
{
"name": "HTML",
"bytes": "248658"
},
{
"name": "JavaScript",
"bytes": "62920"
},
{
"name": "MATLAB",
"bytes": "17384"
},
{
"name": "Makefile",
"bytes": "152150"
},
{
"name": "Python",
"bytes": "549307737"
},
{
"name": "Roff",
"bytes": "26398"
},
{
"name": "SWIG",
"bytes": "142"
},
{
"name": "Shell",
"bytes": "7790"
},
{
"name": "Smarty",
"bytes": "4090"
},
{
"name": "TeX",
"bytes": "152062"
},
{
"name": "XSLT",
"bytes": "305540"
}
],
"symlink_target": ""
} |
l = Glyphs.font.selectedLayers[0]
l.paths = []
for p in l.parent.layers[-1].paths:
l.paths.append(p.copy())
| {
"content_hash": "243b07e54b8954d6d671b487ac07b4bd",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 35,
"avg_line_length": 22.2,
"alnum_prop": 0.6666666666666666,
"repo_name": "jenskutilek/Glyphs-Scripts",
"id": "41a2dd33f936fc4c605738cecc94b6630a37e841",
"size": "152",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Layers/Copy Paths From Last Layer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "78578"
}
],
"symlink_target": ""
} |
import six
#import numpy as np
import pandas as pd
from io import StringIO
#from exa import Series, TypedMeta
from exa import TypedMeta
from exatomic.core import Editor, Tensor
class Meta(TypedMeta):
tensor = Tensor
class RTensor(six.with_metaclass(Meta, Editor)):
"""
This is a simple script to read a rank-2 tensor file with frame, label and atom index
labels. The format for such a file is,
0: f=** l=** a=**
1: xx xy xz
2: yx yy yz
3: zx zy zz
4:
5: Same as above for a second tensor
"""
## Must make this into a class that looks like the XYZ and Cube
# classes. Must have something like parse_tensor.
# Then on the Tensor class there should be something that can
# be activated to find the eigenvalues and eigenvectors of the
# matrix to plot the basis vectors.
# Look at untitled1.ipynb for more info.
_to_universe = Editor.to_universe
def to_universe(self):
raise NotImplementedError("Tensor file format has no atom table")
def parse_tensor(self):
df = pd.read_csv(StringIO(str(self)), delim_whitespace=True, header=None,
skip_blank_lines=False)
#print(df)
try:
i=0
data = ''
while True:
a = df.loc[[i*5],:].values[0]
labels = []
for lbl in a:
d = lbl.split('=')
labels.append(d[1])
cols = ['xx','xy','xz','yx','yy','yz','zx','zy','zz']
af = pd.DataFrame([df.loc[[i*5+1,i*5+2,i*5+3],:].unstack().values], \
columns=cols)
af['frame'] = labels[0] if labels[0] != '' else 0
af['label'] = labels[1] if labels[1] != '' else None
af['atom'] = labels[2] if labels[0] != '' else 0
if i >= 1:
data = pd.concat([data,af],keys=[o for o in range(i+1)])
#data = data.append(af)
print('tens.py--------')
print(data)
print('---------------')
else:
data = af
i+=1
except:
print('tens.py--------')
print("Reached EOF reading {} tensor".format(i))
print(data)
print('---------------')
self.tensor = data
# @classmethod
# def from_universe(cls):
# pass
| {
"content_hash": "c03748491e37fe65ba2c3d37b6b326f3",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 89,
"avg_line_length": 33.61333333333334,
"alnum_prop": 0.49147163823879414,
"repo_name": "tjduigna/exatomic",
"id": "c3edf0dc7d0aabc5ba8df9cef827f2797f79a609",
"size": "2659",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "exatomic/interfaces/tens.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "814"
},
{
"name": "JavaScript",
"bytes": "121181"
},
{
"name": "Jupyter Notebook",
"bytes": "13176"
},
{
"name": "Python",
"bytes": "724121"
},
{
"name": "Shell",
"bytes": "711"
}
],
"symlink_target": ""
} |
import sys
sys.path.append('../../../src/')
import cgmap as cg
import mdtraj as md
import md_check as check
############################### config #####################################
xyz_tolerance = 1e-6
input_traj = "dppc.trr"
input_top = "dppc.pdb"
input_traj_wrapped = "dppc_vmd_wrap.trr"
input_traj_wrapped_join = "dppc_vmd_wrap_join.trr"
input_maps = ["mapping_bead_1_dppc",
"mapping_bead_2_dppc",
"mapping_bead_3_dppc"]
output_traj = "dppc.trr"
output_traj_wrapped = "dppc_vmd_wrapped.trr"
output_traj_wrapped_join = "dppc_vmd_wrapped_join.trr"
output_top = "dppc.pdb"
reference_traj = "dppc.trr"
reference_top = "dppc.pdb"
output_dir = './output/'
input_dir = './input/'
reference_dir = './reference/'
#collection of names of molecules.
lipid_types = ['DPPC']
############################### config proc ################################
fq_input_maps = [ input_dir + loc for loc in input_maps ]
#read maps for each bead from files.
#list of lists of strings.
mapping_atom_names_dppc = [ [ l.strip() for l in open(mp_file,'r').readlines()]
for mp_file in fq_input_maps ]
#index strings for which to atom query the trajectory when making beads.
#list of lists of strings.
name_lists = [ " or ".join( ["name %s"%mn for mn in mapping_names ])
for mapping_names in mapping_atom_names_dppc ]
#names of cg beads created.
label_lists = ['DPH','DPM','DPT']
############################### run ######################################
### pull in trajectories
trj = md.load(input_dir + input_traj, top=input_dir + input_top)
trj_wrapped = md.load(input_dir + input_traj_wrapped, top=input_dir + input_top)
trj_wrapped_join = md.load(input_dir + input_traj_wrapped_join, top=input_dir + input_top)
#the types of each molecule in the trajectory.
molecule_types = [lipid_types.index(r.name) for r in trj.top.residues]
#actual map command
cg_trj = cg.map_molecules( trj = trj,
selection_list = [ name_lists ],
bead_label_list = [ label_lists ],
molecule_types = molecule_types,
mapping_function = 'center',
center_postwrap = True)
cg_trj_wrapped = cg.map_molecules( trj = trj_wrapped,
selection_list = [ name_lists ],
bead_label_list = [ label_lists ],
molecule_types = molecule_types,
mapping_function = 'center',
center_postwrap = True)
cg_trj_wrapped_join = cg.map_molecules(trj = trj_wrapped_join,
selection_list = [ name_lists ],
bead_label_list = [ label_lists ],
molecule_types = molecule_types,
mapping_function = 'center',
center_postwrap = True)
cg_trj.save( output_dir + output_traj)
cg_trj_wrapped.save( output_dir + output_traj_wrapped)
cg_trj_wrapped_join.save(output_dir + output_traj_wrapped_join)
cg_trj[0].save(output_dir + output_top)
############################### check results ###############################
print("!!! WARNING !!!: Test performed with an absolute xyz tolerance of {}; there exist "
"higher levels of errors here for unknown reasons.".format(xyz_tolerance))
print("!!! WARNING !!!: Look at the actual statistics, and local test output, "
"to understand these effects.")
cg_trj = cg_trj.load(output_dir + output_traj, top=output_dir + output_top)
cg_trj_wrapped = cg_trj.load(output_dir + output_traj_wrapped, top=output_dir + output_top)
cg_trj_wrapped_join = cg_trj.load(output_dir + output_traj_wrapped_join,top=output_dir + output_top)
ref_cg_trj = cg_trj.load(reference_dir + reference_traj,
top=reference_dir + reference_top)
print("Checking completely unwrapped trajectory...")
result_base = check.md_content_equality(cg_trj,
ref_cg_trj,xyz_abs_tol=xyz_tolerance)
print("Checking completely wrapped trajectory...")
result_wrapped = check.md_content_equality(cg_trj_wrapped,
ref_cg_trj,xyz_abs_tol=xyz_tolerance)
print("Checking sanely unwrapped trajectory...")
result_wrapped_join = check.md_content_equality(cg_trj_wrapped_join,
ref_cg_trj,xyz_abs_tol=xyz_tolerance)
sys.exit(check.check_result_to_exitval(result_base & result_wrapped & result_wrapped_join))
| {
"content_hash": "b180abf6495808ebfb611590ba7b2a5b",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 102,
"avg_line_length": 42.482142857142854,
"alnum_prop": 0.5559058427910887,
"repo_name": "uchicago-voth/cgmap",
"id": "dc074a237466bb0dbcf11d6feda997e1dca2b058",
"size": "5137",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/molecular_map_test/same_molecules_cop_periodic_test/test_same_molecules_cop.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "64237"
},
{
"name": "Shell",
"bytes": "2111"
}
],
"symlink_target": ""
} |
from django.template import Library
from google.appengine.api import users
register = Library()
@register.simple_tag
def google_login_url(redirect='/'):
return users.create_login_url(redirect)
@register.simple_tag
def google_logout_url(redirect='/'):
return users.create_logout_url(redirect)
| {
"content_hash": "92bc854ea10df55da9673868f86a7a64",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 44,
"avg_line_length": 25.25,
"alnum_prop": 0.7623762376237624,
"repo_name": "certik/chess",
"id": "9581d746185e5a3090afdba5f67044b6525755f3",
"size": "327",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "common/appenginepatch/ragendja/templatetags/googletags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "332405"
}
],
"symlink_target": ""
} |
"""Simple HTTP server based on the asyncore / asynchat framework
Under asyncore, every time a socket is created it enters a table which is
scanned through select calls by the asyncore.loop() function
All events (a client connecting to a server socket, a client sending data,
a server receiving data) is handled by the instances of classes derived
from asyncore.dispatcher
Here the server is represented by an instance of the Server class
When a client connects to it, its handle_accept() method creates an
instance of RequestHandler, one for each HTTP request. It is derived
from asynchat.async_chat, a class where incoming data on the connection
is processed when a "terminator" is received. The terminator can be :
- a string : here we'll use the string \r\n\r\n to handle the HTTP request
line and the HTTP headers
- an integer (n) : the data is processed when n bytes have been read. This
will be used for HTTP POST requests
The data is processed by a method called found_terminator. In RequestHandler,
found_terminator is first set to handle_request_line to handle the HTTP
request line (including the decoding of the query string) and the headers.
If the method is POST, terminator is set to the number of bytes to read
(the content-length header), and found_terminator is set to handle_post_data
After that, the handle_data() method is called and the connection is closed
Subclasses of RequestHandler only have to override the handle_data() method
"""
import asynchat, asyncore, socket, SimpleHTTPServer, select, urllib
import posixpath, sys, cgi, cStringIO, os, traceback, shutil
class CI_dict(dict):
"""Dictionary with case-insensitive keys
Replacement for the deprecated mimetools.Message class
"""
def __init__(self, infile, *args):
self._ci_dict = {}
lines = infile.readlines()
for line in lines:
k,v=line.split(":",1)
self._ci_dict[k.lower()] = self[k] = v.strip()
self.headers = self.keys()
def getheader(self,key,default=""):
return self._ci_dict.get(key.lower(),default)
def get(self,key,default=""):
return self._ci_dict.get(key.lower(),default)
def __getitem__(self,key):
return self._ci_dict[key.lower()]
def __contains__(self,key):
return key.lower() in self._ci_dict
class socketStream:
def __init__(self,sock):
"""Initiate a socket (non-blocking) and a buffer"""
self.sock = sock
self.buffer = cStringIO.StringIO()
self.closed = 1 # compatibility with SocketServer
def write(self, data):
"""Buffer the input, then send as many bytes as possible"""
self.buffer.write(data)
if self.writable():
buff = self.buffer.getvalue()
# next try/except clause suggested by Robert Brown
try:
sent = self.sock.send(buff)
except:
# Catch socket exceptions and abort
# writing the buffer
sent = len(data)
# reset the buffer to the data that has not yet be sent
self.buffer=cStringIO.StringIO()
self.buffer.write(buff[sent:])
def finish(self):
"""When all data has been received, send what remains
in the buffer"""
data = self.buffer.getvalue()
# send data
while len(data):
while not self.writable():
pass
sent = self.sock.send(data)
data = data[sent:]
def writable(self):
"""Used as a flag to know if something can be sent to the socket"""
return select.select([],[self.sock],[])[1]
class RequestHandler(asynchat.async_chat,
SimpleHTTPServer.SimpleHTTPRequestHandler):
protocol_version = "HTTP/1.1"
MessageClass = CI_dict
def __init__(self,conn,addr,server):
asynchat.async_chat.__init__(self,conn)
self.client_address = addr
self.connection = conn
self.server = server
# set the terminator : when it is received, this means that the
# http request is complete ; control will be passed to
# self.found_terminator
self.set_terminator ('\r\n\r\n')
self.rfile = cStringIO.StringIO()
self.found_terminator = self.handle_request_line
self.request_version = "HTTP/1.1"
# buffer the response and headers to avoid several calls to select()
self.wfile = cStringIO.StringIO()
def collect_incoming_data(self,data):
"""Collect the data arriving on the connexion"""
self.rfile.write(data)
def prepare_POST(self):
"""Prepare to read the request body"""
bytesToRead = int(self.headers.getheader('content-length'))
# set terminator to length (will read bytesToRead bytes)
self.set_terminator(bytesToRead)
self.rfile = cStringIO.StringIO()
# control will be passed to a new found_terminator
self.found_terminator = self.handle_post_data
def handle_post_data(self):
"""Called when a POST request body has been read"""
self.rfile.seek(0)
self.do_POST()
self.finish()
def do_GET(self):
"""Begins serving a GET request"""
# nothing more to do before handle_data()
self.body = {}
self.handle_data()
def do_POST(self):
"""Begins serving a POST request. The request data must be readable
on a file-like object called self.rfile"""
ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
self.body = cgi.FieldStorage(fp=self.rfile,
headers=self.headers, environ = {'REQUEST_METHOD':'POST'},
keep_blank_values = 1)
self.handle_data()
def handle_data(self):
"""Class to override"""
f = self.send_head()
if f:
self.copyfile(f, self.wfile)
def handle_request_line(self):
"""Called when the http request line and headers have been received"""
# prepare attributes needed in parse_request()
self.rfile.seek(0)
self.raw_requestline = self.rfile.readline()
self.parse_request()
if self.command in ['GET','HEAD']:
# if method is GET or HEAD, call do_GET or do_HEAD and finish
method = "do_"+self.command
if hasattr(self,method):
getattr(self,method)()
self.finish()
elif self.command=="POST":
# if method is POST, call prepare_POST, don't finish yet
self.prepare_POST()
else:
self.send_error(501, "Unsupported method (%s)" %self.command)
def end_headers(self):
"""Send the blank line ending the MIME headers, send the buffered
response and headers on the connection, then set self.wfile to
this connection
This is faster than sending the response line and each header
separately because of the calls to select() in socketStream"""
if self.request_version != 'HTTP/0.9':
self.wfile.write("\r\n")
self.start_resp = cStringIO.StringIO(self.wfile.getvalue())
self.wfile = socketStream(self.connection)
self.copyfile(self.start_resp, self.wfile)
def handle_error(self):
traceback.print_exc(sys.stderr)
self.close()
def copyfile(self, source, outputfile):
"""Copy all data between two file objects
Set a big buffer size"""
shutil.copyfileobj(source, outputfile, length = 128*1024)
def finish(self):
"""Send data, then close"""
try:
self.wfile.finish()
except AttributeError:
# if end_headers() wasn't called, wfile is a StringIO
# this happens for error 404 in self.send_head() for instance
self.wfile.seek(0)
self.copyfile(self.wfile, socketStream(self.connection))
self.close()
class Server(asyncore.dispatcher):
"""Copied from http_server in medusa"""
def __init__ (self, ip, port,handler):
self.ip = ip
self.port = port
self.handler = handler
asyncore.dispatcher.__init__ (self)
self.create_socket (socket.AF_INET, socket.SOCK_STREAM)
self.set_reuse_addr()
self.bind ((ip, port))
# lower this to 5 if your OS complains
self.listen (1024)
def handle_accept (self):
try:
conn, addr = self.accept()
except socket.error:
self.log_info ('warning: server accept() threw an exception', 'warning')
return
except TypeError:
self.log_info ('warning: server accept() threw EWOULDBLOCK', 'warning')
return
# creates an instance of the handler class to handle the request/response
# on the incoming connexion
self.handler(conn,addr,self)
if __name__=="__main__":
# launch the server on the specified port
port = 8081
s=Server('',port,RequestHandler)
print "SimpleAsyncHTTPServer running on port %s" %port
try:
asyncore.loop(timeout=2)
except KeyboardInterrupt:
print "Crtl+C pressed. Shutting down."
| {
"content_hash": "c4ac22f8cc5d9965a320107d36a951c5",
"timestamp": "",
"source": "github",
"line_count": 248,
"max_line_length": 84,
"avg_line_length": 37.346774193548384,
"alnum_prop": 0.6238393435543079,
"repo_name": "ActiveState/code",
"id": "3a756853f4d4051f4543f058dc6e0b1c1dc255cb",
"size": "9262",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "recipes/Python/259148_Simple_HTTP_server_based/recipe-259148.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "35894"
},
{
"name": "C",
"bytes": "56048"
},
{
"name": "C++",
"bytes": "90880"
},
{
"name": "HTML",
"bytes": "11656"
},
{
"name": "Java",
"bytes": "57468"
},
{
"name": "JavaScript",
"bytes": "181218"
},
{
"name": "PHP",
"bytes": "250144"
},
{
"name": "Perl",
"bytes": "37296"
},
{
"name": "Perl 6",
"bytes": "9914"
},
{
"name": "Python",
"bytes": "17387779"
},
{
"name": "Ruby",
"bytes": "40233"
},
{
"name": "Shell",
"bytes": "190732"
},
{
"name": "Tcl",
"bytes": "674650"
}
],
"symlink_target": ""
} |
"""empty message
Revision ID: a92d9d023624
Revises: f819f3f77f30
Create Date: 2016-10-08 01:17:30.083109
"""
# revision identifiers, used by Alembic.
revision = 'a92d9d023624'
down_revision = 'f819f3f77f30'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.rename_table('shortenedURL', 'targetURL')
def downgrade():
op.rename_table('targetURL', 'shortenedURL')
| {
"content_hash": "65b12eb91b211caeef19ab9cdd7bd2f7",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 48,
"avg_line_length": 18.61904761904762,
"alnum_prop": 0.7365728900255755,
"repo_name": "piotr-rusin/url-shortener",
"id": "b9a357c3ecb08fd44955c68469cba7d6f87c2304",
"size": "391",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "migrations/versions/a92d9d023624_.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2536"
},
{
"name": "HTML",
"bytes": "2411"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "76763"
}
],
"symlink_target": ""
} |
"""
minio.api
~~~~~~~~~~~~
This module implements the API.
:copyright: (c) 2015, 2016 by Minio, Inc.
:license: Apache 2.0, see LICENSE for more details.
"""
# Standard python packages
from __future__ import absolute_import
import platform
from time import mktime, strptime
from datetime import datetime, timedelta
import io
import json
import os
import itertools
# Dependencies
import urllib3
import certifi
# Internal imports
from . import __title__, __version__
from .compat import urlsplit, range, urlencode, basestring
from .error import ResponseError, InvalidArgumentError, InvalidSizeError
from .definitions import Object, UploadPart
from .parsers import (parse_list_buckets,
parse_list_objects,
parse_list_objects_v2,
parse_list_parts,
parse_copy_object,
parse_list_multipart_uploads,
parse_new_multipart_upload,
parse_location_constraint,
parse_multipart_upload_result,
parse_get_bucket_notification,
parse_multi_object_delete_response)
from .helpers import (get_target_url, is_non_empty_string,
is_valid_endpoint,
get_sha256_hexdigest, get_md5_base64digest, Hasher,
optimal_part_info,
is_valid_bucket_name, PartMetadata, parts_manager,
is_valid_bucket_notification_config,
mkdir_p, dump_http)
from .helpers import (MAX_MULTIPART_OBJECT_SIZE,
MIN_OBJECT_SIZE)
from .signer import (sign_v4, presign_v4,
generate_credential_string,
post_presign_signature, _SIGN_V4_ALGORITHM)
from .xml_marshal import (xml_marshal_bucket_constraint,
xml_marshal_complete_multipart_upload,
xml_marshal_bucket_notifications,
xml_marshal_delete_objects)
from .limited_reader import LimitedReader
from . import policy
# Comment format.
_COMMENTS = '({0}; {1})'
# App info format.
_APP_INFO = '{0}/{1}'
# Minio (OS; ARCH) LIB/VER APP/VER .
_DEFAULT_USER_AGENT = 'Minio {0} {1}'.format(
_COMMENTS.format(platform.system(),
platform.machine()),
_APP_INFO.format(__title__,
__version__))
_SEVEN_DAYS_SECONDS = 604800 # 7days
class Minio(object):
"""
Constructs a :class:`Minio <Minio>`.
Examples:
client = Minio('play.minio.io:9000')
client = Minio('s3.amazonaws.com', 'ACCESS_KEY', 'SECRET_KEY')
:param endpoint: Hostname of the cloud storage server.
:param access_key: Access key to sign self._http.request with.
:param secret_key: Secret key to sign self._http.request with.
:param secure: Set this value if wish to make secure requests.
Default is True.
:return: :class:`Minio <Minio>` object
"""
def __init__(self, endpoint, access_key=None,
secret_key=None, secure=True):
# Validate endpoint.
is_valid_endpoint(endpoint)
# Default is a secured connection.
endpoint_url = 'https://' + endpoint
if not secure:
endpoint_url = 'http://' + endpoint
url_components = urlsplit(endpoint_url)
self._region_map = dict()
self._endpoint_url = url_components.geturl()
self._access_key = access_key
self._secret_key = secret_key
self._user_agent = _DEFAULT_USER_AGENT
self._trace_output_stream = None
self._http = urllib3.PoolManager(
cert_reqs='CERT_REQUIRED',
ca_certs=certifi.where()
)
# Set application information.
def set_app_info(self, app_name, app_version):
"""
Sets your application name and version to
default user agent in the following format.
Minio (OS; ARCH) LIB/VER APP/VER
Example:
client.set_app_info('my_app', '1.0.2')
:param app_name: application name.
:param app_version: application version.
"""
if not (app_name and app_version):
raise ValueError('app_name and app_version cannot be empty.')
app_info = _APP_INFO.format(app_name,
app_version)
self._user_agent = ' '.join([_DEFAULT_USER_AGENT, app_info])
# enable HTTP trace.
def trace_on(self, stream):
"""
Enable http trace.
:param output_stream: Stream where trace is written to.
"""
if not stream:
raise ValueError('Input stream for trace output is invalid.')
# Save new output stream.
self._trace_output_stream = stream
# disable HTTP trace.
def trace_off(self):
"""
Disable HTTP trace.
"""
self._trace_output_stream = None
# Bucket level
def make_bucket(self, bucket_name, location='us-east-1'):
"""
Make a new bucket on the server.
Optionally include Location.
['us-east-1', 'us-west-1', 'us-west-2', 'eu-west-1', 'eu-central-1',
'ap-southeast-1', 'ap-southeast-2', 'ap-northeast-1', 'sa-east-1',
'cn-north-1']
Examples:
minio.make_bucket('foo')
minio.make_bucket('foo', 'us-west-1')
:param bucket_name: Bucket to create on server
:param location: Location to create bucket on
"""
is_valid_bucket_name(bucket_name)
method = 'PUT'
# Set user agent once before the request.
headers = {'User-Agent': self._user_agent}
content = None
if location and location != 'us-east-1':
content = xml_marshal_bucket_constraint(location)
headers['Content-Length'] = str(len(content))
content_sha256_hex = get_sha256_hexdigest(content)
if content:
headers['Content-Md5'] = get_md5_base64digest(content)
# In case of Amazon S3. The make bucket issued on already
# existing bucket would fail with 'AuthorizationMalformed'
# error if virtual style is used. So we default to 'path
# style' as that is the preferred method here. The final
# location of the 'bucket' is provided through XML
# LocationConstraint data with the request.
# Construct target url.
url = self._endpoint_url + '/' + bucket_name + '/'
# Get signature headers if any.
headers = sign_v4(method, url, 'us-east-1',
headers, self._access_key,
self._secret_key, content_sha256_hex)
response = self._http.urlopen(method, url,
body=content,
headers=headers,
preload_content=False)
if response.status != 200:
response_error = ResponseError(response)
raise response_error.put(bucket_name)
self._set_bucket_region(bucket_name, region=location)
def list_buckets(self):
"""
List all buckets owned by the user.
Example:
bucket_list = minio.list_buckets()
for bucket in bucket_list:
print(bucket.name, bucket.created_date)
:return: An iterator of buckets owned by the current user.
"""
method = 'GET'
url = get_target_url(self._endpoint_url)
# Set user agent once before the request.
headers = {'User-Agent': self._user_agent}
# default for all requests.
region = 'us-east-1'
# Get signature headers if any.
headers = sign_v4(method, url, region,
headers, self._access_key,
self._secret_key, None)
response = self._http.urlopen(method, url,
body=None,
headers=headers,
preload_content=False)
if self._trace_output_stream:
dump_http(method, url, headers, response,
self._trace_output_stream)
if response.status != 200:
response_error = ResponseError(response)
raise response_error.get()
return parse_list_buckets(response.data)
def bucket_exists(self, bucket_name):
"""
Check if the bucket exists and if the user has access to it.
:param bucket_name: To test the existence and user access.
:return: True on success.
"""
is_valid_bucket_name(bucket_name)
try:
self._url_open('HEAD', bucket_name=bucket_name, headers={})
# If the bucket has not been created yet, Minio will return a "NoSuchBucket" error.
except ResponseError as e:
if e.code == 'NoSuchBucket':
return False
raise
return True
def remove_bucket(self, bucket_name):
"""
Remove a bucket.
:param bucket_name: Bucket to remove
"""
is_valid_bucket_name(bucket_name)
self._url_open('DELETE', bucket_name=bucket_name, headers={})
# Make sure to purge bucket_name from region cache.
self._delete_bucket_region(bucket_name)
def _get_bucket_policy(self, bucket_name):
policy_dict = {}
try:
response = self._url_open("GET",
bucket_name=bucket_name,
query={"policy": ""},
headers={})
policy_dict = json.loads(response.read().decode('utf-8'))
except ResponseError as e:
# Ignore 'NoSuchBucketPolicy' error.
if e.code != 'NoSuchBucketPolicy':
raise
return policy_dict
def get_bucket_policy(self, bucket_name, prefix=""):
"""
Get bucket policy of given bucket name.
:param bucket_name: Bucket name.
:param prefix: Object prefix.
"""
is_valid_bucket_name(bucket_name)
policy_dict = self._get_bucket_policy(bucket_name)
# Normalize statements.
statements = []
policy.append_statements(statements, policy_dict.get('Statement', []))
return policy.get_policy(statements, bucket_name, prefix)
def set_bucket_policy(self, bucket_name, prefix, policy_access):
"""
Set bucket policy of given bucket name and object prefix.
:param bucket_name: Bucket name.
:param prefix: Object prefix.
"""
is_valid_bucket_name(bucket_name)
policy_dict = self._get_bucket_policy(bucket_name)
if policy_access == policy.Policy.NONE and not policy_dict:
return
if not policy_dict:
policy_dict = {'Statement': [],
"Version": "2012-10-17"}
# Normalize statements.
statements = []
policy.append_statements(statements, policy_dict['Statement'])
statements = policy.set_policy(statements, policy_access,
bucket_name, prefix)
if not statements:
self._url_open("DELETE",
bucket_name=bucket_name,
query={"policy": ""},
headers={})
else:
policy_dict['Statement'] = statements
content = json.dumps(policy_dict)
headers = {
'Content-Length': str(len(content)),
'Content-Md5': get_md5_base64digest(content)
}
content_sha256_hex = get_sha256_hexdigest(content)
self._url_open("PUT",
bucket_name=bucket_name,
query={"policy": ""},
headers=headers,
body=content,
content_sha256=content_sha256_hex)
def get_bucket_notification(self, bucket_name):
"""
Get notifications configured for the given bucket.
:param bucket_name: Bucket name.
"""
is_valid_bucket_name(bucket_name)
response = self._url_open(
"GET",
bucket_name=bucket_name,
query={"notification": ""},
headers={}
)
data = response.read().decode('utf-8')
return parse_get_bucket_notification(data)
def set_bucket_notification(self, bucket_name, notifications):
"""
Set the given notifications on the bucket.
:param bucket_name: Bucket name.
:param notifications: Notifications structure
"""
is_valid_bucket_name(bucket_name)
is_valid_bucket_notification_config(notifications)
content = xml_marshal_bucket_notifications(notifications)
headers = {
'Content-Length': str(len(content)),
'Content-Md5': get_md5_base64digest(content)
}
content_sha256_hex = get_sha256_hexdigest(content)
self._url_open(
'PUT',
bucket_name=bucket_name,
query={"notification": ""},
headers=headers,
body=content,
content_sha256=content_sha256_hex
)
def remove_all_bucket_notification(self, bucket_name):
"""
Removes all bucket notification configs configured
previously, this call disable event notifications
on a bucket. This operation cannot be undone, to
set notifications again you should use
``set_bucket_notification``
:param bucket_name: Bucket name.
"""
is_valid_bucket_name(bucket_name)
content_bytes = xml_marshal_bucket_notifications({})
headers = {
'Content-Length': str(len(content_bytes)),
'Content-Md5': get_md5_base64digest(content_bytes)
}
content_sha256_hex = get_sha256_hexdigest(content_bytes)
self._url_open(
'PUT',
bucket_name=bucket_name,
query={"notification": ""},
headers=headers,
body=content_bytes,
content_sha256=content_sha256_hex
)
def listen_bucket_notification(self, bucket_name, prefix, suffix, events):
"""
Yeilds new event notifications on a bucket, caller should iterate
to read new notifications.
NOTE: Notification is retried in case of `SyntaxError` otherwise
the function raises an exception.
:param bucket_name: Bucket name to listen event notifications from.
:param prefix: Object key prefix to filter notifications for.
:param suffix: Object key suffix to filter notifications for.
:param events: Enables notifications for specific event types.
of events.
"""
is_valid_bucket_name(bucket_name)
url_components = urlsplit(self._endpoint_url)
if url_components.hostname == 's3.amazonaws.com':
raise InvalidArgumentError(
'Listening for event notifications on a bucket is a Minio '
'specific extension to bucket notification API. It is not '
'supported by Amazon S3')
query = {
'prefix': prefix,
'suffix': suffix,
'events': events,
}
while True:
response = self._url_open('GET', bucket_name=bucket_name,
query=query, headers={})
try:
for line in response.stream():
event = json.loads(line)
if event['Records'] is not None:
yield event
except SyntaxError:
response.close()
continue
def _get_upload_id(self, bucket_name, object_name, content_type):
"""
Get previously uploaded upload id for object name or initiate a request
to fetch a new upload id.
:param bucket_name: Bucket name where the incomplete upload resides.
:param object_name: Object name for which upload id is requested.
:param content_type: Content type of the object.
"""
recursive = True
current_uploads = self._list_incomplete_uploads(bucket_name,
object_name,
recursive,
is_aggregate_size=False)
matching_uploads = [upload
for upload in current_uploads
if object_name == upload.object_name]
# If no matching uploads its a new multipart upload.
if not len(matching_uploads):
upload_id = self._new_multipart_upload(bucket_name,
object_name,
content_type)
else:
incomplete_upload = max(matching_uploads, key=lambda x: x.initiated)
upload_id = incomplete_upload.upload_id
return upload_id
def fput_object(self, bucket_name, object_name, file_path,
content_type='application/octet-stream'):
"""
Add a new object to the cloud storage server.
Examples:
minio.fput_object('foo', 'bar', 'filepath', 'text/plain')
:param bucket_name: Bucket to read object from.
:param object_name: Name of the object to read.
:param file_path: Local file path to be uploaded.
:param content_type: Content type of the object.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
is_non_empty_string(file_path)
# save file_size.
file_size = os.stat(file_path).st_size
if file_size > MAX_MULTIPART_OBJECT_SIZE:
raise InvalidArgumentError('Input content size is bigger '
' than allowed maximum of 5TiB.')
# Open file in 'read' mode.
file_data = io.open(file_path, mode='rb')
if file_size <= MIN_OBJECT_SIZE:
return self._do_put_object(bucket_name, object_name,
file_data.read(file_size),
content_type=content_type)
# Calculate optimal part info.
total_parts_count, part_size, last_part_size = optimal_part_info(file_size)
# get upload id.
upload_id = self._get_upload_id(bucket_name, object_name, content_type)
# Initialize variables
uploaded_parts = {}
total_uploaded = 0
# Iter over the uploaded parts.
parts_iter = self._list_object_parts(bucket_name,
object_name,
upload_id)
for part in parts_iter:
# Save uploaded parts for future verification.
uploaded_parts[part.part_number] = part
# Always start with first part number.
for part_number in range(1, total_parts_count + 1):
# Save the current part size that needs to be uploaded.
current_part_size = part_size
if part_number == total_parts_count:
current_part_size = last_part_size
# Save current offset as previous offset.
prev_offset = file_data.seek(0, 1)
# Calculate md5sum and sha256.
md5hasher = Hasher.md5()
sha256hasher = Hasher.sha256()
total_read = 0
# Save LimitedReader, read upto current_part_size for
# md5sum and sha256 calculation.
part = LimitedReader(file_data, current_part_size)
while total_read < current_part_size:
current_data = part.read() # Read in 64k chunks.
if not current_data or len(current_data) == 0:
break
md5hasher.update(current_data)
sha256hasher.update(current_data)
total_read += len(current_data)
part_md5_hex = md5hasher.hexdigest()
# Verify if current part number has been already
# uploaded. Verify if the size is same, further verify if
# we have matching md5sum as well.
if part_number in uploaded_parts:
previous_part = uploaded_parts[part_number]
if previous_part.size == current_part_size:
if previous_part.etag == part_md5_hex:
total_uploaded += previous_part.size
continue
# Seek back to previous offset position before checksum
# calculation.
file_data.seek(prev_offset, 0)
# Create the LimitedReader again for the http reader.
part = LimitedReader(file_data, current_part_size)
part_metadata = PartMetadata(part, md5hasher, sha256hasher,
current_part_size)
# Initiate multipart put.
etag = self._do_put_multipart_object(bucket_name, object_name,
part_metadata,
content_type, upload_id,
part_number)
# Save etags.
uploaded_parts[part_number] = UploadPart(bucket_name,
object_name,
upload_id,
part_number,
etag, None,
total_read)
# Total uploaded.
total_uploaded += total_read
if total_uploaded != file_size:
msg = 'Data uploaded {0} is not equal input size ' \
'{1}'.format(total_uploaded, file_size)
raise InvalidSizeError(msg)
# Complete all multipart transactions if possible.
return self._complete_multipart_upload(bucket_name, object_name,
upload_id, uploaded_parts)
def fget_object(self, bucket_name, object_name, file_path):
"""
Retrieves an object from a bucket and writes at file_path.
Examples:
minio.fget_object('foo', 'bar', 'localfile')
:param bucket_name: Bucket to read object from.
:param object_name: Name of the object to read.
:param file_path: Local file path to save the object.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
stat = self.stat_object(bucket_name, object_name)
if os.path.isdir(file_path):
raise OSError("file is a directory.")
# Create top level directory if needed.
top_level_dir = os.path.dirname(file_path)
if top_level_dir:
mkdir_p(top_level_dir)
# Write to a temporary file "file_path.part.minio" before saving.
file_part_path = file_path + stat.etag + '.part.minio'
# Open file in 'write+append' mode.
with open(file_part_path, 'ab') as file_part_data:
# Save current file_part statinfo.
file_statinfo = os.stat(file_part_path)
# Get partial object.
response = self._get_partial_object(bucket_name, object_name,
offset=file_statinfo.st_size,
length=0)
# Save content_size to verify if we wrote more data.
content_size = int(response.headers['content-length'])
# Save total_written.
total_written = 0
for data in response.stream(amt=1024*1024):
file_part_data.write(data)
total_written += len(data)
# Verify if we wrote data properly.
if total_written < content_size:
msg = 'Data written {0} bytes is smaller than the' \
'specified size {1} bytes'.format(total_written,
content_size)
raise InvalidSizeError(msg)
if total_written > content_size:
msg = 'Data written {0} bytes is in excess than the' \
'specified size {1} bytes'.format(total_written,
content_size)
raise InvalidSizeError(msg)
# Rename with destination file.
os.rename(file_part_path, file_path)
def get_object(self, bucket_name, object_name):
"""
Retrieves an object from a bucket.
Examples:
my_object = minio.get_partial_object('foo', 'bar')
:param bucket_name: Bucket to read object from
:param object_name: Name of object to read
:return: :class:`urllib3.response.HTTPResponse` object.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
return self._get_partial_object(bucket_name,
object_name)
def get_partial_object(self, bucket_name, object_name, offset=0, length=0):
"""
Retrieves an object from a bucket.
Optionally takes an offset and length of data to retrieve.
Examples:
partial_object = minio.get_partial_object('foo', 'bar', 2, 4)
:param bucket_name: Bucket to retrieve object from
:param object_name: Name of object to retrieve
:param offset: Optional offset to retrieve bytes from.
Must be >= 0.
:param length: Optional number of bytes to retrieve.
Must be an integer.
:return: :class:`urllib3.response.HTTPResponse` object.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
return self._get_partial_object(bucket_name,
object_name,
offset, length)
def copy_object(self, bucket_name, object_name, object_source,
conditions=None):
"""
Copy a source object on object storage server to a new object.
NOTE: Maximum object size supported by this API is 5GB.
Examples:
:param bucket_name: Bucket of new object.
:param object_name: Name of new object.
:param object_source: Source object to be copied.
:param conditions: :class:`CopyConditions` object. Collection of
supported CopyObject conditions.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
is_non_empty_string(object_source)
headers = {}
if conditions:
headers = {k: v for k, v in conditions.items()}
headers['X-Amz-Copy-Source'] = urlencode(object_source)
response = self._url_open('PUT',
bucket_name=bucket_name,
object_name=object_name,
headers=headers)
return parse_copy_object(bucket_name, object_name, response.data)
def put_object(self, bucket_name, object_name, data, length,
content_type='application/octet-stream'):
"""
Add a new object to the cloud storage server.
NOTE: Maximum object size supported by this API is 5TiB.
Examples:
file_stat = os.stat('hello.txt')
with open('hello.txt', 'rb') as data:
minio.put_object('foo', 'bar', data, file_stat.size, 'text/plain')
- For length lesser than 5MB put_object automatically
does single Put operation.
- For length larger than 5MB put_object automatically
does resumable multipart operation.
:param bucket_name: Bucket of new object.
:param object_name: Name of new object.
:param data: Contents to upload.
:param length: Total length of object.
:param content_type: mime type of object as a string.
:return: None
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
if not callable(getattr(data, 'read')):
raise ValueError('Invalid input data does not implement a callable read() method')
if length > MAX_MULTIPART_OBJECT_SIZE:
raise InvalidArgumentError('Input content size is bigger '
' than allowed maximum of 5TiB.')
if length > MIN_OBJECT_SIZE:
return self._stream_put_object(bucket_name, object_name,
data, length,
content_type=content_type)
current_data = data.read(length)
if len(current_data) != length:
raise InvalidArgumentError(
'Could not read {} bytes from data to upload'.format(length)
)
return self._do_put_object(bucket_name, object_name,
current_data,
content_type=content_type)
def list_objects(self, bucket_name, prefix=None, recursive=False):
"""
List objects in the given bucket.
Examples:
objects = minio.list_objects('foo')
for current_object in objects:
print(current_object)
# hello
# hello/
# hello/
# world/
objects = minio.list_objects('foo', prefix='hello/')
for current_object in objects:
print(current_object)
# hello/world/
objects = minio.list_objects('foo', recursive=True)
for current_object in objects:
print(current_object)
# hello/world/1
# world/world/2
# ...
objects = minio.list_objects('foo', prefix='hello/',
recursive=True)
for current_object in objects:
print(current_object)
# hello/world/1
# hello/world/2
:param bucket_name: Bucket to list objects from
:param prefix: String specifying objects returned must begin with
:param recursive: If yes, returns all objects for a specified prefix
:return: An iterator of objects in alphabetical order.
"""
is_valid_bucket_name(bucket_name)
method = 'GET'
# Initialize query parameters.
query = {
'max-keys': 1000
}
# Add if prefix present.
if prefix:
query['prefix'] = prefix
# Delimited by default.
if not recursive:
query['delimiter'] = '/'
marker = ''
is_truncated = True
while is_truncated:
if marker:
query['marker'] = marker
headers = {}
response = self._url_open(method,
bucket_name=bucket_name,
query=query,
headers=headers)
objects, is_truncated, marker = parse_list_objects(response.data,
bucket_name=bucket_name)
for obj in objects:
yield obj
def list_objects_v2(self, bucket_name, prefix=None, recursive=False):
"""
List objects in the given bucket using the List objects V2 API.
Examples:
objects = minio.list_objects_v2('foo')
for current_object in objects:
print(current_object)
# hello
# hello/
# hello/
# world/
objects = minio.list_objects_v2('foo', prefix='hello/')
for current_object in objects:
print(current_object)
# hello/world/
objects = minio.list_objects_v2('foo', recursive=True)
for current_object in objects:
print(current_object)
# hello/world/1
# world/world/2
# ...
objects = minio.list_objects_v2('foo', prefix='hello/',
recursive=True)
for current_object in objects:
print(current_object)
# hello/world/1
# hello/world/2
:param bucket_name: Bucket to list objects from
:param prefix: String specifying objects returned must begin with
:param recursive: If yes, returns all objects for a specified prefix
:return: An iterator of objects in alphabetical order.
"""
is_valid_bucket_name(bucket_name)
# Initialize query parameters.
query = {
'list-type': 2
}
# Add if prefix present.
if prefix:
query['prefix'] = prefix
# Delimited by default.
if not recursive:
query['delimiter'] = '/'
continuation_token = None
is_truncated = True
while is_truncated:
if continuation_token is not None:
query['continuation-token'] = continuation_token
response = self._url_open(method='GET',
bucket_name=bucket_name,
query=query,
headers={})
objects, is_truncated, continuation_token = parse_list_objects_v2(
response.data, bucket_name=bucket_name
)
for obj in objects:
yield obj
def stat_object(self, bucket_name, object_name):
"""
Check if an object exists.
:param bucket_name: Bucket of object.
:param object_name: Name of object
:return: Object metadata if object exists
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
response = self._url_open('HEAD', bucket_name=bucket_name,
object_name=object_name, headers={})
etag = response.headers.get('etag', '').replace('"', '')
size = int(response.headers.get('content-length', '0'))
content_type = response.headers.get('content-type', '')
last_modified = response.headers.get('last-modified')
if last_modified:
http_time_format = "%a, %d %b %Y %H:%M:%S GMT"
last_modified = mktime(strptime(last_modified, http_time_format))
return Object(bucket_name, object_name, content_type=content_type,
last_modified=last_modified, etag=etag, size=size)
def remove_object(self, bucket_name, object_name):
"""
Remove an object from the bucket.
:param bucket_name: Bucket of object to remove
:param object_name: Name of object to remove
:return: None
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
# No reason to store successful response, for errors
# relevant exceptions are thrown.
self._url_open('DELETE', bucket_name=bucket_name,
object_name=object_name, headers={})
def _process_remove_objects_batch(self, bucket_name, objects_batch):
"""
Requester and response parser for remove_objects
"""
# assemble request content for objects_batch
content = xml_marshal_delete_objects(objects_batch)
# compute headers
headers = {
'Content-Md5': get_md5_base64digest(content),
'Content-Length': len(content)
}
query = {"delete": None}
content_sha256_hex = get_sha256_hexdigest(content)
# send multi-object delete request
response = self._url_open(
'POST', bucket_name=bucket_name,
headers=headers, body=content,
query=query, content_sha256=content_sha256_hex,
)
# parse response to find delete errors
return parse_multi_object_delete_response(response.data)
def remove_objects(self, bucket_name, objects_iter):
"""
Removes multiple objects from a bucket.
:param bucket_name: Bucket from which to remove objects
:param objects_iter: A list, tuple or iterator that provides
objects names to delete.
:return: An iterator of MultiDeleteError instances for each
object that had a delete error.
"""
is_valid_bucket_name(bucket_name)
if isinstance(objects_iter, basestring):
raise TypeError(
'objects_iter cannot be `str` or `bytes` instance. It must be '
'a list, tuple or iterator of object names'
)
# turn list like objects into an iterator.
objects_iter = itertools.chain(objects_iter)
obj_batch = []
exit_loop = False
while not exit_loop:
try:
object_name = next(objects_iter)
is_non_empty_string(object_name)
except StopIteration:
exit_loop = True
if not exit_loop:
obj_batch.append(object_name)
# if we have 1000 items in the batch, or we have to exit
# the loop, we have to make a request to delete objects.
if len(obj_batch) == 1000 or (exit_loop and len(obj_batch) > 0):
# send request and parse response
errs_result = self._process_remove_objects_batch(
bucket_name, obj_batch
)
# return the delete errors.
for err_result in errs_result:
yield err_result
# clear batch for next set of items
obj_batch = []
def list_incomplete_uploads(self, bucket_name, prefix=None,
recursive=False):
"""
List all in-complete uploads for a given bucket.
Examples:
incomplete_uploads = minio.list_incomplete_uploads('foo')
for current_upload in incomplete_uploads:
print(current_upload)
# hello
# hello/
# hello/
# world/
incomplete_uploads = minio.list_incomplete_uploads('foo',
prefix='hello/')
for current_upload in incomplete_uploads:
print(current_upload)
# hello/world/
incomplete_uploads = minio.list_incomplete_uploads('foo',
recursive=True)
for current_upload in incomplete_uploads:
print(current_upload)
# hello/world/1
# world/world/2
# ...
incomplete_uploads = minio.list_incomplete_uploads('foo',
prefix='hello/',
recursive=True)
for current_upload in incomplete_uploads:
print(current_upload)
# hello/world/1
# hello/world/2
:param bucket_name: Bucket to list incomplete uploads
:param prefix: String specifying objects returned must begin with.
:param recursive: If yes, returns all incomplete uploads for
a specified prefix.
:return: An generator of incomplete uploads in alphabetical order.
"""
is_valid_bucket_name(bucket_name)
return self._list_incomplete_uploads(bucket_name, prefix, recursive)
def _list_incomplete_uploads(self, bucket_name, prefix=None, recursive=False, is_aggregate_size=True):
"""
List incomplete uploads list all previously uploaded incomplete multipart objects.
:param bucket_name: Bucket name to list uploaded objects.
:param prefix: String specifying objects returned must begin with.
:param recursive: If yes, returns all incomplete objects for a specified prefix.
:return: An generator of incomplete uploads in alphabetical order.
"""
is_valid_bucket_name(bucket_name)
# Initialize query parameters.
query = {
'uploads': None,
'max-uploads': 1000
}
if prefix:
query['prefix'] = prefix
if not recursive:
query['delimiter'] = '/'
key_marker, upload_id_marker = None, None
is_truncated = True
while is_truncated:
if key_marker:
query['key-marker'] = key_marker
if upload_id_marker:
query['upload-id-marker'] = upload_id_marker
response = self._url_open('GET',
bucket_name=bucket_name,
query=query,
headers={})
(uploads, is_truncated, key_marker, upload_id_marker) = parse_list_multipart_uploads(response.data,
bucket_name=bucket_name)
for upload in uploads:
if is_aggregate_size:
upload.size = self._get_total_multipart_upload_size(upload.bucket_name,
upload.object_name,
upload.upload_id)
yield upload
def _get_total_multipart_upload_size(self, bucket_name, object_name, upload_id):
"""
Get total multipart upload size.
:param bucket_name: Bucket name to list parts for.
:param object_name: Object name to list parts for.
:param upload_id: Upload id of the previously uploaded object name.
"""
return sum(
[part.size for part in
self._list_object_parts(bucket_name, object_name, upload_id)]
)
def _list_object_parts(self, bucket_name, object_name, upload_id):
"""
List all parts.
:param bucket_name: Bucket name to list parts for.
:param object_name: Object name to list parts for.
:param upload_id: Upload id of the previously uploaded object name.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
is_non_empty_string(upload_id)
query = {
'uploadId': upload_id,
'max-parts': 1000
}
is_truncated = True
part_number_marker = None
while is_truncated:
if part_number_marker:
query['part-number-marker'] = part_number_marker
response = self._url_open('GET',
bucket_name=bucket_name,
object_name=object_name,
query=query,
headers={})
parts, is_truncated, part_number_marker = parse_list_parts(
response.data,
bucket_name=bucket_name,
object_name=object_name,
upload_id=upload_id
)
for part in parts:
yield part
def remove_incomplete_upload(self, bucket_name, object_name):
"""
Remove all in-complete uploads for a given bucket_name and object_name.
:param bucket_name: Bucket to drop incomplete uploads
:param object_name: Name of object to remove incomplete uploads
:return: None
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
recursive = True
uploads = self._list_incomplete_uploads(bucket_name, object_name,
recursive,
is_aggregate_size=False)
for upload in uploads:
if object_name == upload.object_name:
self._remove_incomplete_upload(bucket_name, object_name,
upload.upload_id)
return
def presigned_get_object(self, bucket_name, object_name,
expires=timedelta(days=7),
response_headers=None):
"""
Presigns a get object request and provides a url
Example:
from datetime import timedelta
presignedURL = presigned_get_object('bucket_name',
'object_name',
timedelta(days=7))
print(presignedURL)
:param bucket_name: Bucket for the presigned url.
:param object_name: Object for which presigned url is generated.
:param expires: Optional expires argument to specify timedelta.
Defaults to 7days.
:return: Presigned url.
"""
if expires.total_seconds() < 1 or \
expires.total_seconds() > _SEVEN_DAYS_SECONDS:
raise InvalidArgumentError('Expires param valid values'
' are between 1 secs to'
' {0} secs'.format(_SEVEN_DAYS_SECONDS))
return self._presigned_get_partial_object(bucket_name,
object_name,
expires,
response_headers=response_headers)
def presigned_put_object(self, bucket_name, object_name,
expires=timedelta(days=7)):
"""
Presigns a put object request and provides a url
Example:
from datetime import timedelta
presignedURL = presigned_put_object('bucket_name',
'object_name',
timedelta(days=7))
print(presignedURL)
:param bucket_name: Bucket for the presigned url.
:param object_name: Object for which presigned url is generated.
:param expires: optional expires argument to specify timedelta.
Defaults to 7days.
:return: Presigned put object url.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
if expires.total_seconds() < 1 or \
expires.total_seconds() > _SEVEN_DAYS_SECONDS:
raise InvalidArgumentError('Expires param valid values'
' are between 1 secs to'
' {0} secs'.format(_SEVEN_DAYS_SECONDS))
region = self._get_bucket_region(bucket_name)
url = get_target_url(self._endpoint_url,
bucket_name=bucket_name,
object_name=object_name,
bucket_region=region)
presign_url = presign_v4('PUT', url,
self._access_key,
self._secret_key,
region=region,
headers={},
expires=int(expires.total_seconds()))
return presign_url
def presigned_post_policy(self, post_policy):
"""
Provides a POST form data that can be used for object uploads.
Example:
post_policy = PostPolicy()
post_policy.set_bucket_name('bucket_name')
post_policy.set_key_startswith('objectPrefix/')
expires_date = datetime.utcnow()+timedelta(days=10)
post_policy.set_expires(expires_date)
print(presigned_post_policy(post_policy))
:param post_policy: Post_Policy object.
:return: PostPolicy form dictionary to be used in curl or HTML forms.
"""
post_policy.is_valid()
date = datetime.utcnow()
iso8601_date = date.strftime("%Y%m%dT%H%M%SZ")
region = self._get_bucket_region(post_policy.form_data['bucket'])
credential_string = generate_credential_string(self._access_key,
date, region)
post_policy.policies.append(('eq', '$x-amz-date', iso8601_date))
post_policy.policies.append(('eq', '$x-amz-algorithm', _SIGN_V4_ALGORITHM))
post_policy.policies.append(('eq', '$x-amz-credential', credential_string))
post_policy_base64 = post_policy.base64()
signature = post_presign_signature(date, region,
self._secret_key,
post_policy_base64)
post_policy.form_data.update({
'policy': post_policy_base64,
'x-amz-algorithm': _SIGN_V4_ALGORITHM,
'x-amz-credential': credential_string,
'x-amz-date': iso8601_date,
'x-amz-signature': signature,
})
url_str = get_target_url(self._endpoint_url,
bucket_name=post_policy.form_data['bucket'],
bucket_region=region)
return (url_str, post_policy.form_data)
# All private functions below.
def _get_partial_object(self, bucket_name, object_name,
offset=0, length=0):
"""
Retrieves an object from a bucket.
Optionally takes an offset and length of data to retrieve.
Examples:
partial_object = minio.get_partial_object('foo', 'bar', 2, 4)
:param bucket_name: Bucket to retrieve object from
:param object_name: Name of object to retrieve
:param offset: Optional offset to retrieve bytes from.
Must be >= 0.
:param length: Optional number of bytes to retrieve.
Must be > 0.
:return: :class:`urllib3.response.HTTPResponse` object.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
headers = {}
if offset != 0 or length != 0:
request_range = '{}-{}'.format(
offset, "" if length == 0 else offset + length - 1
)
headers['Range'] = 'bytes=' + request_range
response = self._url_open('GET',
bucket_name=bucket_name,
object_name=object_name,
headers=headers)
return response
def _presigned_get_partial_object(self, bucket_name, object_name,
expires=timedelta(days=7),
offset=0, length=0,
response_headers=None):
"""
Presigns a get partial object request and provides a url,
this is a internal function not exposed.
:param bucket_name: Bucket for the presigned url.
:param object_name: Object for which presigned url is generated.
:param expires: optional expires argument to specify timedelta.
Defaults to 7days.
:param offset, length: optional defaults to '0, 0'.
:return: Presigned url.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
region = self._get_bucket_region(bucket_name)
url = get_target_url(self._endpoint_url,
bucket_name=bucket_name,
object_name=object_name,
bucket_region=region)
headers = {}
if offset != 0 or length != 0:
request_range = '{}-{}'.format(
offset, "" if length == 0 else offset + length - 1
)
headers['Range'] = 'bytes=' + request_range
presign_url = presign_v4('GET', url,
self._access_key,
self._secret_key,
region=region,
headers=headers,
response_headers=response_headers,
expires=int(expires.total_seconds()))
return presign_url
def _do_put_multipart_object(self, bucket_name, object_name, part_metadata,
content_type='application/octet-stream',
upload_id='', part_number=0):
"""
Initiate a multipart PUT operation for a part number.
:param bucket_name: Bucket name for the multipart request.
:param object_name: Object name for the multipart request.
:param part_metadata: Part-data and metadata for the multipart request.
:param content_type: Content type of multipart request.
:param upload_id: Upload id of the multipart request.
:param part_number: Part number of the data to be uploaded.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
data = part_metadata.data
if not callable(getattr(data, 'read')):
raise ValueError('Invalid input data does not implement a callable read() method')
headers = {
'Content-Length': part_metadata.size,
'Content-Type': content_type,
'Content-Md5': part_metadata.md5hasher.base64digest()
}
response = self._url_open(
'PUT', bucket_name=bucket_name,
object_name=object_name,
query={'uploadId': upload_id,
'partNumber': part_number},
headers=headers,
body=data,
content_sha256=part_metadata.sha256hasher.hexdigest()
)
return response.headers['etag'].replace('"', '')
def _do_put_object(self, bucket_name, object_name, data,
content_type='application/octet-stream'):
"""
Initiate a single PUT operation.
:param bucket_name: Bucket name for the put request.
:param object_name: Object name for the put request.
:param data: Input data for the put request.
:param content_type: Content type of put request.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
# Accept only bytes - otherwise we need to know how to encode
# the data to bytes before storing in the object.
if not isinstance(data, bytes):
raise ValueError('Input data must be bytes type')
headers = {
'Content-Length': len(data),
'Content-Type': content_type,
'Content-Md5': get_md5_base64digest(data)
}
response = self._url_open('PUT', bucket_name=bucket_name,
object_name=object_name,
headers=headers,
body=io.BytesIO(data),
content_sha256=get_sha256_hexdigest(data))
etag = response.headers.get('etag', '').replace('"', '')
# Returns here.
return etag
def _stream_put_object(self, bucket_name, object_name,
data, content_size,
content_type='application/octet-stream'):
"""
Streaming multipart upload operation.
:param bucket_name: Bucket name of the multipart upload.
:param object_name: Object name of the multipart upload.
:param content_size: Total size of the content to be uploaded.
:param content_type: Content type of of the multipart upload.
Defaults to 'application/octet-stream'.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
if not callable(getattr(data, 'read')):
raise ValueError('Invalid input data does not implement a callable read() method')
# get upload id.
upload_id = self._get_upload_id(bucket_name, object_name, content_type)
# Initialize variables
total_uploaded = 0
# Calculate optimal part info.
total_parts_count, part_size, last_part_size = optimal_part_info(content_size)
# Iter over the uploaded parts.
parts_iter = self._list_object_parts(bucket_name,
object_name,
upload_id)
# save uploaded parts for verification.
uploaded_parts = { part.part_number: part for part in parts_iter }
# Generate new parts and upload <= current_part_size until
# part_number reaches total_parts_count calculated for the
# given size. Additionally part_manager() also provides
# md5digest and sha256digest for the partitioned data.
for part_number in range(1, total_parts_count + 1):
current_part_size = (part_size if part_number < total_parts_count
else last_part_size)
part_metadata = parts_manager(data, current_part_size)
md5_hex = part_metadata.md5hasher.hexdigest()
# Verify if part number has been already uploaded.
# Further verify if we have matching md5sum as well.
previous_part = uploaded_parts.get(part_number, None)
if (previous_part and previous_part.size == current_part_size and
previous_part.etag == md5_hex):
total_uploaded += previous_part.size
continue
# Seek back to starting position.
part_metadata.data.seek(0)
etag = self._do_put_multipart_object(bucket_name,
object_name,
part_metadata,
content_type,
upload_id,
part_number)
# Save etags.
uploaded_parts[part_number] = UploadPart(bucket_name,
object_name,
upload_id,
part_number,
etag,
None,
part_metadata.size)
total_uploaded += part_metadata.size
if total_uploaded != content_size:
msg = 'Data uploaded {0} is not equal input size ' \
'{1}'.format(total_uploaded, content_size)
raise InvalidSizeError(msg)
# Complete all multipart transactions if possible.
return self._complete_multipart_upload(bucket_name, object_name,
upload_id, uploaded_parts)
def _remove_incomplete_upload(self, bucket_name, object_name, upload_id):
"""
Remove incomplete multipart request.
:param bucket_name: Bucket name of the incomplete upload.
:param object_name: Object name of incomplete upload.
:param upload_id: Upload id of the incomplete upload.
"""
# No reason to store successful response, for errors
# relevant exceptions are thrown.
self._url_open('DELETE', bucket_name=bucket_name,
object_name=object_name, query={'uploadId': upload_id},
headers={})
def _new_multipart_upload(self, bucket_name, object_name,
content_type='application/octet-stream'):
"""
Initialize new multipart upload request.
:param bucket_name: Bucket name of the new multipart request.
:param object_name: Object name of the new multipart request.
:param content_type: Content type of the new object.
:return: Returns an upload id.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
response = self._url_open('POST', bucket_name=bucket_name,
object_name=object_name,
query={'uploads': None},
headers={'Content-Type': content_type})
return parse_new_multipart_upload(response.data)
def _complete_multipart_upload(self, bucket_name, object_name,
upload_id, uploaded_parts):
"""
Complete an active multipart upload request.
:param bucket_name: Bucket name of the multipart request.
:param object_name: Object name of the multipart request.
:param upload_id: Upload id of the active multipart request.
:param uploaded_parts: Key, Value dictionary of uploaded parts.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
is_non_empty_string(upload_id)
data = xml_marshal_complete_multipart_upload(uploaded_parts)
sha256_hex = get_sha256_hexdigest(data)
md5_base64 = get_md5_base64digest(data)
headers = {
'Content-Length': len(data),
'Content-Type': 'application/xml',
'Content-Md5': md5_base64,
}
response = self._url_open('POST', bucket_name=bucket_name,
object_name=object_name,
query={'uploadId': upload_id},
headers=headers, body=data,
content_sha256=sha256_hex)
return parse_multipart_upload_result(response.data)
def _delete_bucket_region(self, bucket_name):
"""
Delete a bucket from bucket region cache.
:param bucket_name: Bucket name to be removed from cache.
"""
# Handles if bucket doesn't exist as well.
self._region_map.pop(bucket_name, None)
def _set_bucket_region(self, bucket_name, region='us-east-1'):
"""
Sets a bucket region into bucket region cache.
:param bucket_name: Bucket name for which region is set.
:param region: Region of the bucket name to set.
"""
self._region_map[bucket_name] = region
def _get_bucket_region(self, bucket_name):
"""
Get region based on the bucket name.
:param bucket_name: Bucket name for which region will be fetched.
:return: Region of bucket name.
"""
# get bucket location for Amazon S3.
region = 'us-east-1' # default to US standard.
if bucket_name in self._region_map:
region = self._region_map[bucket_name]
else:
region = self._get_bucket_location(bucket_name)
self._region_map[bucket_name] = region
return region
def _get_bucket_location(self, bucket_name):
"""
Get bucket location.
:param bucket_name: Fetches location of the Bucket name.
:return: location of bucket name is returned.
"""
method = 'GET'
url = self._endpoint_url + '/' + bucket_name + '?location'
headers = {}
# default for all requests.
region = 'us-east-1'
# For anonymous requests no need to get bucket location.
if self._access_key is None or self._secret_key is None:
return 'us-east-1'
# Get signature headers if any.
headers = sign_v4(method, url, region,
headers, self._access_key,
self._secret_key, None)
response = self._http.urlopen(method, url,
body=None,
headers=headers,
preload_content=False)
if response.status != 200:
response_error = ResponseError(response)
raise response_error.get(bucket_name)
location = parse_location_constraint(response.data)
# location is empty for 'US standard region'
if not location:
return 'us-east-1'
# location can be 'EU' convert it to meaningful 'eu-west-1'
if location == 'EU':
return 'eu-west-1'
return location
def _url_open(self, method, bucket_name=None, object_name=None,
query=None, body=None, headers=None, content_sha256=None):
"""
Open a url wrapper around signature version '4'
and :meth:`urllib3.PoolManager.urlopen`
"""
# Set user agent once before the request.
headers['User-Agent'] = self._user_agent
# Get bucket region.
region = self._get_bucket_region(bucket_name)
# Construct target url.
url = get_target_url(self._endpoint_url, bucket_name=bucket_name,
object_name=object_name, bucket_region=region,
query=query)
# Get signature headers if any.
headers = sign_v4(method, url, region,
headers, self._access_key,
self._secret_key, content_sha256)
response = self._http.urlopen(method, url,
body=body,
headers=headers,
preload_content=False)
if self._trace_output_stream:
dump_http(method, url, headers, response,
self._trace_output_stream)
if response.status != 200 and \
response.status != 204 and response.status != 206:
# Upon any response error invalidate the region cache
# proactively for the bucket name.
self._delete_bucket_region(bucket_name)
# Populate response_error with error response.
response_error = ResponseError(response)
if method == 'HEAD':
raise response_error.head(bucket_name, object_name)
elif method == 'GET':
raise response_error.get(bucket_name, object_name)
elif method == 'POST':
raise response_error.post(bucket_name, object_name)
elif method == 'PUT':
raise response_error.put(bucket_name, object_name)
elif method == 'DELETE':
raise response_error.delete(bucket_name, object_name)
else:
raise ValueError('Unsupported method returned'
' error: {0}'.format(response.status))
return response
| {
"content_hash": "d13f41c22e484de4e0751f98bb21450a",
"timestamp": "",
"source": "github",
"line_count": 1755,
"max_line_length": 121,
"avg_line_length": 38.26894586894587,
"alnum_prop": 0.5351240284684792,
"repo_name": "donatello/minio-py",
"id": "8e3b6d2fc9ec141ac6e9a73561426839234bcd74",
"size": "67826",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "minio/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "150"
},
{
"name": "Python",
"bytes": "273222"
}
],
"symlink_target": ""
} |
import pickle
import numpy
import pytest
try:
import scipy.sparse
scipy_available = True
except ImportError:
scipy_available = False
import cupy
from cupy import testing
from cupy.cuda import driver
from cupy.cuda import runtime
from cupyx.scipy import sparse
def _make(xp, sp, dtype):
data = xp.array([0, 1, 3, 2], dtype)
indices = xp.array([0, 0, 2, 1], 'i')
indptr = xp.array([0, 1, 2, 3, 4], 'i')
# 0, 1, 0, 0
# 0, 0, 0, 2
# 0, 0, 3, 0
return sp.csc_matrix((data, indices, indptr), shape=(3, 4))
def _make_complex(xp, sp, dtype):
data = xp.array([0, 1, 2, 3], dtype)
if dtype in [numpy.complex64, numpy.complex128]:
data = data - 1j
indices = xp.array([0, 1, 3, 2], 'i')
indptr = xp.array([0, 2, 3, 4], 'i')
# 0, 1 - 1j, 0, 0
# 0, 0, 0, 2 - 1j
# 0, 0, 3 - 1j, 0
return sp.csr_matrix((data, indices, indptr), shape=(3, 4))
def _make2(xp, sp, dtype):
data = xp.array([2, 1, 3, 4], dtype)
indices = xp.array([1, 0, 1, 2], 'i')
indptr = xp.array([0, 0, 1, 4, 4], 'i')
# 0, 0, 1, 0
# 0, 2, 3, 0
# 0, 0, 4, 0
return sp.csc_matrix((data, indices, indptr), shape=(3, 4))
def _make3(xp, sp, dtype):
data = xp.array([1, 4, 3, 2, 5], dtype)
indices = xp.array([0, 3, 1, 1, 3], 'i')
indptr = xp.array([0, 2, 3, 5], 'i')
# 1, 0, 0
# 0, 3, 2
# 0, 0, 0
# 4, 0, 5
return sp.csc_matrix((data, indices, indptr), shape=(4, 3))
def _make_unordered(xp, sp, dtype):
data = xp.array([1, 2, 3, 4], dtype)
indices = xp.array([1, 0, 1, 2], 'i')
indptr = xp.array([0, 0, 0, 2, 4], 'i')
return sp.csc_matrix((data, indices, indptr), shape=(3, 4))
def _make_duplicate(xp, sp, dtype):
data = xp.array([1, 4, 3, 0, 2, 5], dtype)
indices = xp.array([0, 1, 0, 2, 1, 1], 'i')
indptr = xp.array([0, 3, 4, 6, 6], 'i')
# 4, 0, 0, 0
# 4, 0, 7, 0
# 0, 0, 0, 0
return sp.csc_matrix((data, indices, indptr), shape=(3, 4))
def _make_empty(xp, sp, dtype):
data = xp.array([], dtype)
indices = xp.array([], 'i')
indptr = xp.array([0, 0, 0, 0, 0], 'i')
return sp.csc_matrix((data, indices, indptr), shape=(3, 4))
def _make_shape(xp, sp, dtype):
return sp.csc_matrix((3, 4))
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
}))
class TestCscMatrix:
@pytest.fixture(autouse=True)
def setUp(self):
self.m = _make(cupy, sparse, self.dtype)
def test_dtype(self):
assert self.m.dtype == self.dtype
def test_data(self):
assert self.m.data.dtype == self.dtype
testing.assert_array_equal(
self.m.data, cupy.array([0, 1, 3, 2], self.dtype))
def test_indices(self):
assert self.m.indices.dtype == numpy.int32
testing.assert_array_equal(
self.m.indices, cupy.array([0, 0, 2, 1], self.dtype))
def test_indptr(self):
assert self.m.indptr.dtype == numpy.int32
testing.assert_array_equal(
self.m.indptr, cupy.array([0, 1, 2, 3, 4], self.dtype))
def test_init_copy(self):
n = sparse.csc_matrix(self.m)
assert n is not self.m
cupy.testing.assert_array_equal(n.data, self.m.data)
cupy.testing.assert_array_equal(n.indices, self.m.indices)
cupy.testing.assert_array_equal(n.indptr, self.m.indptr)
assert n.shape == self.m.shape
def test_init_copy_other_sparse(self):
n = sparse.csc_matrix(self.m.tocsr())
cupy.testing.assert_array_equal(n.data, self.m.data)
cupy.testing.assert_array_equal(n.indices, self.m.indices)
cupy.testing.assert_array_equal(n.indptr, self.m.indptr)
assert n.shape == self.m.shape
@testing.with_requires('scipy')
def test_init_copy_scipy_sparse(self):
m = _make(numpy, scipy.sparse, self.dtype)
n = sparse.csc_matrix(m)
assert isinstance(n.data, cupy.ndarray)
assert isinstance(n.indices, cupy.ndarray)
assert isinstance(n.indptr, cupy.ndarray)
cupy.testing.assert_array_equal(n.data, m.data)
cupy.testing.assert_array_equal(n.indices, m.indices)
cupy.testing.assert_array_equal(n.indptr, m.indptr)
assert n.shape == m.shape
@testing.with_requires('scipy')
def test_init_copy_other_scipy_sparse(self):
m = _make(numpy, scipy.sparse, self.dtype)
n = sparse.csc_matrix(m.tocsr())
assert isinstance(n.data, cupy.ndarray)
assert isinstance(n.indices, cupy.ndarray)
assert isinstance(n.indptr, cupy.ndarray)
cupy.testing.assert_array_equal(n.data, m.data)
cupy.testing.assert_array_equal(n.indices, m.indices)
cupy.testing.assert_array_equal(n.indptr, m.indptr)
assert n.shape == m.shape
def test_init_dense(self):
m = cupy.array([[0, 1, 0, 2],
[0, 0, 0, 0],
[0, 0, 0, 3]], dtype=self.dtype)
n = sparse.csc_matrix(m)
assert n.nnz == 3
assert n.shape == (3, 4)
cupy.testing.assert_array_equal(n.data, [1, 2, 3])
cupy.testing.assert_array_equal(n.indices, [0, 0, 2])
cupy.testing.assert_array_equal(n.indptr, [0, 0, 1, 1, 3])
@pytest.mark.xfail(runtime.is_hip, reason='hipSPARSE handles nnz=0 badly')
def test_init_dense_empty(self):
m = cupy.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=self.dtype)
n = sparse.csc_matrix(m)
assert n.nnz == 0
assert n.shape == (3, 4)
cupy.testing.assert_array_equal(n.data, [])
cupy.testing.assert_array_equal(n.indices, [])
cupy.testing.assert_array_equal(n.indptr, [0, 0, 0, 0, 0])
def test_init_dense_one_dim(self):
m = cupy.array([0, 1, 0, 2], dtype=self.dtype)
n = sparse.csc_matrix(m)
assert n.nnz == 2
assert n.shape == (1, 4)
cupy.testing.assert_array_equal(n.data, [1, 2])
cupy.testing.assert_array_equal(n.indices, [0, 0])
cupy.testing.assert_array_equal(n.indptr, [0, 0, 1, 1, 2])
def test_init_dense_zero_dim(self):
m = cupy.array(1, dtype=self.dtype)
n = sparse.csc_matrix(m)
assert n.nnz == 1
assert n.shape == (1, 1)
cupy.testing.assert_array_equal(n.data, [1])
cupy.testing.assert_array_equal(n.indices, [0])
cupy.testing.assert_array_equal(n.indptr, [0, 1])
def test_init_data_row_col(self):
o = self.m.tocoo()
n = sparse.csc_matrix((o.data, (o.row, o.col)))
cupy.testing.assert_array_equal(n.data, self.m.data)
cupy.testing.assert_array_equal(n.indices, self.m.indices)
cupy.testing.assert_array_equal(n.indptr, self.m.indptr)
assert n.shape == self.m.shape
@testing.with_requires('scipy')
def test_init_dense_invalid_ndim(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
with pytest.raises(TypeError):
m = xp.zeros((1, 1, 1), dtype=self.dtype)
sp.csc_matrix(m)
def test_copy(self):
n = self.m.copy()
assert isinstance(n, sparse.csc_matrix)
assert n is not self.m
assert n.data is not self.m.data
assert n.indices is not self.m.indices
assert n.indptr is not self.m.indptr
cupy.testing.assert_array_equal(n.data, self.m.data)
cupy.testing.assert_array_equal(n.indices, self.m.indices)
cupy.testing.assert_array_equal(n.indptr, self.m.indptr)
assert n.shape == self.m.shape
def test_shape(self):
assert self.m.shape == (3, 4)
def test_ndim(self):
assert self.m.ndim == 2
def test_nnz(self):
assert self.m.nnz == 4
def test_conj(self):
n = _make_complex(cupy, sparse, self.dtype)
cupy.testing.assert_array_equal(n.conj().data, n.data.conj())
@testing.with_requires('scipy')
def test_get(self):
m = self.m.get()
assert isinstance(m, scipy.sparse.csc_matrix)
expect = [
[0, 1, 0, 0],
[0, 0, 0, 2],
[0, 0, 3, 0]
]
numpy.testing.assert_allclose(m.toarray(), expect)
@testing.with_requires('scipy')
def test_str(self):
if numpy.dtype(self.dtype).kind == 'f':
expect = ''' (0, 0)\t0.0
(0, 1)\t1.0
(2, 2)\t3.0
(1, 3)\t2.0'''
elif numpy.dtype(self.dtype).kind == 'c':
expect = ''' (0, 0)\t0j
(0, 1)\t(1+0j)
(2, 2)\t(3+0j)
(1, 3)\t(2+0j)'''
assert str(self.m) == expect
def test_toarray(self):
m = self.m.toarray()
expect = [
[0, 1, 0, 0],
[0, 0, 0, 2],
[0, 0, 3, 0]
]
assert m.flags.c_contiguous
cupy.testing.assert_allclose(m, expect)
def test_pickle_roundtrip(self):
s = _make(cupy, sparse, self.dtype)
s2 = pickle.loads(pickle.dumps(s))
assert s._descr.descriptor != s2._descr.descriptor
assert s.shape == s2.shape
assert s.dtype == s2.dtype
if scipy_available:
assert (s.get() != s2.get()).count_nonzero() == 0
def test_reshape_0(self):
assert self.m.reshape((12, 1)).shape == (12, 1)
def test_reshape_1(self):
m = self.m.reshape((1, 12)).toarray()
expect = [[0, 1, 0, 0, 0, 0, 0, 2, 0, 0, 3, 0]]
cupy.testing.assert_allclose(m, expect)
def test_reshape_2(self):
m = self.m.reshape((1, 12), order='F').toarray()
expect = [[1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 3, 0]]
cupy.testing.assert_allclose(m, expect)
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
}))
@testing.with_requires('scipy')
class TestCscMatrixInit:
@pytest.fixture(autouse=True)
def setUp(self):
self.shape = (3, 4)
def data(self, xp):
return xp.array([1, 2, 3, 4], self.dtype)
def indices(self, xp):
return xp.array([0, 0, 2, 1], 'i')
def indptr(self, xp):
return xp.array([0, 1, 2, 3, 4], 'i')
@testing.numpy_cupy_equal(sp_name='sp')
def test_shape_none(self, xp, sp):
x = sp.csc_matrix(
(self.data(xp), self.indices(xp), self.indptr(xp)), shape=None)
assert x.shape == (3, 4)
@testing.numpy_cupy_equal(sp_name='sp')
def test_dtype(self, xp, sp):
data = self.data(xp).real.astype('i')
x = sp.csc_matrix(
(data, self.indices(xp), self.indptr(xp)), dtype=self.dtype)
assert x.dtype == self.dtype
@testing.numpy_cupy_equal(sp_name='sp')
def test_copy_true(self, xp, sp):
data = self.data(xp)
indices = self.indices(xp)
indptr = self.indptr(xp)
x = sp.csc_matrix((data, indices, indptr), copy=True)
assert data is not x.data
assert indices is not x.indices
assert indptr is not x.indptr
@testing.numpy_cupy_allclose(sp_name='sp')
def test_init_with_shape(self, xp, sp):
s = sp.csc_matrix(self.shape)
assert s.shape == self.shape
assert s.dtype == 'd'
assert s.size == 0
return s
@testing.numpy_cupy_allclose(sp_name='sp')
def test_init_with_shape_and_dtype(self, xp, sp):
s = sp.csc_matrix(self.shape, dtype=self.dtype)
assert s.shape == self.shape
assert s.dtype == self.dtype
assert s.size == 0
return s
@testing.numpy_cupy_allclose(sp_name='sp', atol=1e-5)
def test_intlike_shape(self, xp, sp):
s = sp.csc_matrix((self.data(xp), self.indices(xp), self.indptr(xp)),
shape=(xp.array(self.shape[0]),
xp.int32(self.shape[1])))
assert isinstance(s.shape[0], int)
assert isinstance(s.shape[1], int)
return s
def test_shape_invalid(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
with pytest.raises(ValueError):
sp.csc_matrix(
(self.data(xp), self.indices(xp), self.indptr(xp)),
shape=(2,))
def test_data_invalid(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
with pytest.raises(ValueError):
sp.csc_matrix(
('invalid', self.indices(xp), self.indptr(xp)),
shape=self.shape)
def test_data_invalid_ndim(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
with pytest.raises(ValueError):
sp.csc_matrix(
(self.data(xp)[None], self.indices(xp),
self.indptr(xp)),
shape=self.shape)
def test_indices_invalid(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
with pytest.raises(ValueError):
sp.csc_matrix(
(self.data(xp), 'invalid', self.indptr(xp)),
shape=self.shape)
def test_indices_invalid_ndim(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
with pytest.raises(ValueError):
sp.csc_matrix(
(self.data(xp), self.indices(xp)[None], self.indptr(xp)),
shape=self.shape)
def test_indptr_invalid(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
with pytest.raises(ValueError):
sp.csc_matrix(
(self.data(xp), self.indices(xp), 'invalid'),
shape=self.shape)
def test_indptr_invalid_ndim(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
with pytest.raises(ValueError):
sp.csc_matrix(
(self.data(xp), self.indices(xp), self.indptr(xp)[None]),
shape=self.shape)
def test_data_indices_different_length(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
data = xp.arange(5, dtype=self.dtype)
with pytest.raises(ValueError):
sp.csc_matrix(
(data, self.indices(xp), self.indptr(xp)),
shape=self.shape)
def test_indptr_invalid_length(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
indptr = xp.array([0, 1], 'i')
with pytest.raises(ValueError):
sp.csc_matrix(
(self.data(xp), self.indices(xp), indptr),
shape=self.shape)
def test_unsupported_dtype(self):
with pytest.raises(ValueError):
sparse.csc_matrix(
(self.data(cupy), self.indices(cupy), self.indptr(cupy)),
shape=self.shape, dtype='i')
@testing.numpy_cupy_equal(sp_name='sp')
def test_conj(self, xp, sp):
n = _make_complex(xp, sp, self.dtype)
cupy.testing.assert_array_equal(n.conj().data, n.data.conj())
@testing.parameterize(*testing.product({
'make_method': [
'_make', '_make_unordered', '_make_empty', '_make_duplicate',
'_make_shape'],
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
}))
@testing.with_requires('scipy')
class TestCscMatrixScipyComparison:
@pytest.fixture(autouse=True)
def setUp(self):
if runtime.is_hip:
if self.make_method in ('_make_empty', '_make_shape'):
# xcsr2coo, xcsrgemm2Nnz, csrmm2, nnz_compress, ... could raise
# HIPSPARSE_STATUS_INVALID_VALUE, maybe because we have a zero
# matrix (nnz=0)?
pytest.xfail('may be buggy')
@property
def make(self):
return globals()[self.make_method]
def test_len(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
with pytest.raises(TypeError):
len(m)
@testing.numpy_cupy_array_equal(sp_name='sp')
def test_asfptype(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return m.asfptype()
@testing.numpy_cupy_allclose(sp_name='sp', contiguous_check=False)
def test_toarray(self, xp, sp):
m = self.make(xp, sp, self.dtype)
a = m.toarray()
if sp is sparse:
assert a.flags.c_contiguous
return a
@testing.numpy_cupy_allclose(sp_name='sp')
def test_toarray_c_order(self, xp, sp):
m = self.make(xp, sp, self.dtype)
a = m.toarray(order='C')
assert a.flags.c_contiguous
return a
@testing.numpy_cupy_allclose(sp_name='sp')
def test_toarray_f_order(self, xp, sp):
m = self.make(xp, sp, self.dtype)
a = m.toarray(order='F')
assert a.flags.f_contiguous
return a
@testing.with_requires('numpy>=1.19')
def test_toarray_unknown_order(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
with pytest.raises(ValueError):
m.toarray(order='#')
@testing.numpy_cupy_allclose(sp_name='sp', contiguous_check=False)
def test_A(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return m.A
@testing.numpy_cupy_allclose(sp_name='sp')
def test_tocoo(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return m.tocoo()
@testing.numpy_cupy_allclose(sp_name='sp')
def test_tocoo_copy(self, xp, sp):
m = self.make(xp, sp, self.dtype)
n = m.tocoo(copy=True)
assert m.data is not n.data
return n
@testing.numpy_cupy_allclose(sp_name='sp')
def test_tocsc(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return m.tocsc()
@testing.numpy_cupy_allclose(sp_name='sp')
def test_tocsc_copy(self, xp, sp):
m = self.make(xp, sp, self.dtype)
n = m.tocsc(copy=True)
assert m.data is not n.data
assert m.indices is not n.indices
assert m.indptr is not n.indptr
return n
@testing.numpy_cupy_allclose(sp_name='sp')
def test_tocsr(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return m.tocsr()
@testing.numpy_cupy_allclose(sp_name='sp')
def test_tocsr_copy(self, xp, sp):
m = self.make(xp, sp, self.dtype)
n = m.tocsr(copy=True)
assert m.data is not n.data
assert m.indices is not n.indices
assert m.indptr is not n.indptr
return n
# dot
@testing.with_requires('scipy!=1.8.0')
@testing.numpy_cupy_allclose(sp_name='sp', _check_sparse_format=False)
def test_dot_scalar(self, xp, sp):
m = _make(xp, sp, self.dtype)
return m.dot(2.0)
@testing.with_requires('scipy!=1.8.0')
@testing.numpy_cupy_allclose(sp_name='sp', _check_sparse_format=False)
def test_dot_numpy_scalar(self, xp, sp):
m = _make(xp, sp, self.dtype)
return m.dot(numpy.dtype(self.dtype).type(2.0))
@pytest.mark.skipif(runtime.is_hip and driver.get_build_version() < 400,
reason='no working implementation')
@testing.numpy_cupy_allclose(sp_name='sp', _check_sparse_format=False)
def test_dot_csr(self, xp, sp):
m = self.make(xp, sp, self.dtype)
x = _make3(xp, sp, self.dtype)
return m.dot(x)
@pytest.mark.skipif(runtime.is_hip and driver.get_build_version() < 400,
reason='no working implementation')
def test_dot_csr_invalid_shape(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
x = sp.csr_matrix((5, 3), dtype=self.dtype)
with pytest.raises(ValueError):
m.dot(x)
@pytest.mark.skipif(runtime.is_hip and driver.get_build_version() < 400,
reason='no working implementation')
@testing.numpy_cupy_allclose(sp_name='sp', _check_sparse_format=False)
def test_dot_csc(self, xp, sp):
m = self.make(xp, sp, self.dtype)
x = _make3(xp, sp, self.dtype).tocsc()
return m.dot(x)
@pytest.mark.skipif(runtime.is_hip and driver.get_build_version() < 400,
reason='no working implementation')
@testing.numpy_cupy_allclose(sp_name='sp', _check_sparse_format=False)
def test_dot_sparse(self, xp, sp):
m = self.make(xp, sp, self.dtype)
x = _make3(xp, sp, self.dtype).tocoo()
return m.dot(x)
@testing.with_requires('scipy>=1.8.0rc1')
def test_dot_zero_dim(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
x = xp.array(2, dtype=self.dtype)
with pytest.raises(ValueError):
m.dot(x)
@testing.numpy_cupy_allclose(sp_name='sp')
def test_dot_dense_vector(self, xp, sp):
if runtime.is_hip:
HIP_version = driver.get_build_version()
if HIP_version < 400:
pytest.skip('no working implementation')
elif HIP_version < 5_00_00000:
# I got HIPSPARSE_STATUS_INTERNAL_ERROR...
pytest.xfail('spmv is buggy (trans=True)')
m = self.make(xp, sp, self.dtype)
x = xp.arange(4).astype(self.dtype)
return m.dot(x)
def test_dot_dense_vector_invalid_shape(self):
if runtime.is_hip:
HIP_version = driver.get_build_version()
if HIP_version < 400:
pytest.skip('no working implementation')
elif HIP_version < 5_00_00000:
# I got HIPSPARSE_STATUS_INTERNAL_ERROR...
pytest.xfail('spmv is buggy (trans=True)')
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
x = xp.arange(5).astype(self.dtype)
with pytest.raises(ValueError):
m.dot(x)
@testing.numpy_cupy_allclose(sp_name='sp', contiguous_check=False)
def test_dot_dense_matrix(self, xp, sp):
if runtime.is_hip:
if driver.get_build_version() < 400:
pytest.skip('no working implementation')
# no idea what's wrong...
elif self.make_method in (
'_make', '_make_unordered', '_make_duplicate'):
pytest.xfail('spMM raises HIPSPARSE_STATUS_INVALID_VALUE')
m = self.make(xp, sp, self.dtype)
x = xp.arange(8).reshape(4, 2).astype(self.dtype)
return m.dot(x)
@pytest.mark.skipif(runtime.is_hip and driver.get_build_version() <= 400,
reason='no working implementation')
def test_dot_dense_matrix_invalid_shape(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
x = xp.arange(10).reshape(5, 2).astype(self.dtype)
with pytest.raises(ValueError):
m.dot(x)
def test_dot_dense_ndim3(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
x = xp.arange(24).reshape(4, 2, 3).astype(self.dtype)
with pytest.raises(ValueError):
m.dot(x)
def test_dot_unsupported(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
with pytest.raises(TypeError):
m.dot(None)
# __add__
@testing.numpy_cupy_allclose(sp_name='sp')
def test_add_zero(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return m + 0
def test_add_scalar(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
with pytest.raises(NotImplementedError):
m + 1
@testing.numpy_cupy_allclose(sp_name='sp')
def test_add_csr(self, xp, sp):
m = self.make(xp, sp, self.dtype)
n = _make2(xp, sp, self.dtype)
return m + n
@testing.numpy_cupy_allclose(sp_name='sp')
def test_add_coo(self, xp, sp):
m = self.make(xp, sp, self.dtype)
n = _make2(xp, sp, self.dtype).tocoo()
return m + n
@testing.numpy_cupy_allclose(sp_name='sp', contiguous_check=False)
def test_add_dense(self, xp, sp):
m = self.make(xp, sp, self.dtype)
n = xp.arange(12).reshape(3, 4)
return m + n
# __radd__
@testing.numpy_cupy_allclose(sp_name='sp')
def test_radd_zero(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return 0 + m
def test_radd_scalar(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
with pytest.raises(NotImplementedError):
1 + m
@testing.numpy_cupy_allclose(sp_name='sp', contiguous_check=False)
def test_radd_dense(self, xp, sp):
m = self.make(xp, sp, self.dtype)
n = xp.arange(12).reshape(3, 4)
return n + m
# __sub__
@testing.numpy_cupy_allclose(sp_name='sp')
def test_sub_zero(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return m - 0
def test_sub_scalar(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
with pytest.raises(NotImplementedError):
m - 1
@testing.numpy_cupy_allclose(sp_name='sp')
def test_sub_csr(self, xp, sp):
m = self.make(xp, sp, self.dtype)
n = _make2(xp, sp, self.dtype)
return m - n
@testing.numpy_cupy_allclose(sp_name='sp')
def test_sub_coo(self, xp, sp):
m = self.make(xp, sp, self.dtype)
n = _make2(xp, sp, self.dtype).tocoo()
return m - n
@testing.numpy_cupy_allclose(sp_name='sp')
def test_sub_dense(self, xp, sp):
m = self.make(xp, sp, self.dtype)
n = xp.arange(12).reshape(3, 4)
return m - n
# __rsub__
@testing.numpy_cupy_allclose(sp_name='sp')
def test_rsub_zero(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return 0 - m
def test_rsub_scalar(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
with pytest.raises(NotImplementedError):
1 - m
@testing.numpy_cupy_allclose(sp_name='sp')
def test_rsub_dense(self, xp, sp):
m = self.make(xp, sp, self.dtype)
n = xp.arange(12).reshape(3, 4)
return n - m
# __mul__
@testing.numpy_cupy_allclose(sp_name='sp')
def test_mul_scalar(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return m * 2.0
@testing.numpy_cupy_allclose(sp_name='sp')
def test_mul_numpy_scalar(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return m * numpy.dtype(self.dtype).type(2.0)
@pytest.mark.skipif(runtime.is_hip and driver.get_build_version() < 400,
reason='no working implementation')
@testing.numpy_cupy_allclose(sp_name='sp', _check_sparse_format=False)
def test_mul_csr(self, xp, sp):
m = self.make(xp, sp, self.dtype)
x = _make3(xp, sp, self.dtype)
return m * x
@pytest.mark.skipif(runtime.is_hip and driver.get_build_version() < 400,
reason='no working implementation')
@testing.numpy_cupy_allclose(sp_name='sp', _check_sparse_format=False)
def test_mul_csc(self, xp, sp):
m = self.make(xp, sp, self.dtype)
x = _make3(xp, sp, self.dtype).tocsc()
return m * x
@pytest.mark.skipif(runtime.is_hip and driver.get_build_version() < 400,
reason='no working implementation')
@testing.numpy_cupy_allclose(sp_name='sp', _check_sparse_format=False)
def test_mul_sparse(self, xp, sp):
m = self.make(xp, sp, self.dtype)
x = _make3(xp, sp, self.dtype).tocoo()
return m * x
@testing.numpy_cupy_allclose(sp_name='sp')
def test_mul_zero_dim(self, xp, sp):
m = self.make(xp, sp, self.dtype)
x = xp.array(2, dtype=self.dtype)
return m * x
@testing.numpy_cupy_allclose(sp_name='sp')
def test_mul_dense_vector(self, xp, sp):
if runtime.is_hip:
HIP_version = driver.get_build_version()
if HIP_version < 400:
pytest.skip('no working implementation')
elif HIP_version < 5_00_00000:
# I got HIPSPARSE_STATUS_INTERNAL_ERROR...
pytest.xfail('spmv is buggy (trans=True)')
m = self.make(xp, sp, self.dtype)
x = xp.arange(4).astype(self.dtype)
return m * x
@testing.numpy_cupy_allclose(sp_name='sp', contiguous_check=False)
def test_mul_dense_matrix(self, xp, sp):
if runtime.is_hip:
if driver.get_build_version() < 400:
pytest.skip('no working implementation')
# no idea what's wrong...
elif self.make_method in (
'_make', '_make_unordered', '_make_duplicate'):
pytest.xfail('spMM raises HIPSPARSE_STATUS_INVALID_VALUE')
m = self.make(xp, sp, self.dtype)
x = xp.arange(8).reshape(4, 2).astype(self.dtype)
return m * x
def test_mul_dense_ndim3(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
x = xp.arange(24).reshape(4, 2, 3).astype(self.dtype)
with pytest.raises(ValueError):
m * x
def test_mul_unsupported(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
with pytest.raises(TypeError):
m * None
# __rmul__
@testing.numpy_cupy_allclose(sp_name='sp')
def test_rmul_scalar(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return 2.0 * m
@testing.numpy_cupy_allclose(sp_name='sp')
def test_rmul_numpy_scalar(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return numpy.dtype(self.dtype).type(2.0) * m
@pytest.mark.skipif(runtime.is_hip and driver.get_build_version() < 400,
reason='no working implementation')
@testing.numpy_cupy_allclose(sp_name='sp', _check_sparse_format=False)
def test_rmul_csr(self, xp, sp):
m = self.make(xp, sp, self.dtype)
x = _make3(xp, sp, self.dtype)
return x * m
@pytest.mark.skipif(runtime.is_hip and driver.get_build_version() < 400,
reason='no working implementation')
@testing.numpy_cupy_allclose(sp_name='sp', _check_sparse_format=False)
def test_rmul_csc(self, xp, sp):
m = self.make(xp, sp, self.dtype)
x = _make3(xp, sp, self.dtype).tocsc()
return x * m
@pytest.mark.skipif(runtime.is_hip and driver.get_build_version() < 400,
reason='no working implementation')
@testing.numpy_cupy_allclose(sp_name='sp')
def test_rmul_sparse(self, xp, sp):
m = self.make(xp, sp, self.dtype)
x = _make3(xp, sp, self.dtype).tocoo()
return x * m
@testing.numpy_cupy_allclose(sp_name='sp')
def test_rmul_zero_dim(self, xp, sp):
m = self.make(xp, sp, self.dtype)
x = xp.array(2, dtype=self.dtype)
return x * m
@testing.numpy_cupy_allclose(sp_name='sp', contiguous_check=False)
def test_rmul_dense_matrix(self, xp, sp):
m = self.make(xp, sp, self.dtype)
x = xp.arange(12).reshape(4, 3).astype(self.dtype)
return x * m
def test_rmul_dense_ndim3(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
x = xp.arange(24).reshape(4, 2, 3).astype(self.dtype)
with pytest.raises(ValueError):
x * m
def test_rmul_unsupported(self):
if (
numpy.lib.NumpyVersion(scipy.__version__) >= '1.8.0rc1' and
self.make_method not in ['_make_empty', '_make_shape']
):
pytest.xfail('See scipy/15210')
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
# TODO(unno): When a sparse matrix has no element, scipy.sparse
# does not raise an error.
if m.nnz == 0:
continue
with pytest.raises(TypeError):
None * m
# Note: '@' operator is almost equivalent to '*' operator. Only test the
# cases where '@' raises an exception and '*' does not.
def test_matmul_scalar(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
x = 2.0
with pytest.raises(ValueError):
m @ x
with pytest.raises(ValueError):
x @ m
def test_matmul_numpy_scalar(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
x = numpy.dtype(self.dtype).type(2.0)
with pytest.raises(ValueError):
m @ x
with pytest.raises(ValueError):
x @ m
def test_matmul_scalar_like_array(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
x = xp.array(2.0, self.dtype)
with pytest.raises(ValueError):
m @ x
with pytest.raises(ValueError):
x @ m
@testing.numpy_cupy_equal(sp_name='sp')
def test_has_canonical_format(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return m.has_canonical_format
@testing.numpy_cupy_allclose(sp_name='sp')
def test_has_canonical_format2(self, xp, sp):
# this test is adopted from SciPy's
M = sp.csc_matrix((xp.array([2], dtype=self.dtype),
xp.array([0]), xp.array([0, 1])))
assert M.has_canonical_format is True
return M
@testing.numpy_cupy_allclose(sp_name='sp')
def test_has_canonical_format3(self, xp, sp):
# this test is adopted from SciPy's
indices = xp.array([0, 0]) # contains duplicate
data = xp.array([1, 1], dtype=self.dtype)
indptr = xp.array([0, 2])
M = sp.csc_matrix((data, indices, indptr))
assert M.has_canonical_format is False
# set by deduplicating
M.sum_duplicates()
assert M.has_canonical_format is True
assert 1 == len(M.indices)
return M
@testing.numpy_cupy_allclose(sp_name='sp')
def test_has_canonical_format4(self, xp, sp):
# this test is adopted from SciPy's
indices = xp.array([0, 0]) # contains duplicate
data = xp.array([1, 1], dtype=self.dtype)
indptr = xp.array([0, 2])
M = sp.csc_matrix((data, indices, indptr))
# set manually (although underlyingly duplicated)
M.has_canonical_format = True
assert M.has_canonical_format
assert 2 == len(M.indices) # unaffected content
# ensure deduplication bypassed when has_canonical_format == True
M.sum_duplicates()
assert 2 == len(M.indices) # unaffected content
return M
@testing.with_requires('scipy>1.6.0')
@testing.numpy_cupy_equal(sp_name='sp')
def test_has_sorted_indices(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return m.has_sorted_indices
# TODO(asi1024): Remove test after the fixed version is released.
# https://github.com/scipy/scipy/pull/13426
@testing.with_requires('scipy<=1.6.0')
@testing.numpy_cupy_equal(sp_name='sp')
def test_has_sorted_indices_for_old_scipy(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return bool(m.has_sorted_indices)
@testing.numpy_cupy_allclose(sp_name='sp')
def test_has_sorted_indices2(self, xp, sp):
# this test is adopted from SciPy's
sorted_inds = xp.array([0, 1])
data = xp.array([1, 1], dtype=self.dtype)
indptr = xp.array([0, 2])
M = sp.csc_matrix((data, sorted_inds, indptr))
assert M.has_sorted_indices
return M
@testing.numpy_cupy_allclose(sp_name='sp')
def test_has_sorted_indices3(self, xp, sp):
# this test is adopted from SciPy's
sorted_inds = xp.array([0, 1])
unsorted_inds = xp.array([1, 0])
data = xp.array([1, 1], dtype=self.dtype)
indptr = xp.array([0, 2])
M = sp.csc_matrix((data, unsorted_inds, indptr))
assert not M.has_sorted_indices
# set by sorting
M.sort_indices()
assert M.has_sorted_indices
assert (M.indices == sorted_inds).all()
return M
@testing.numpy_cupy_allclose(sp_name='sp')
def test_has_sorted_indices4(self, xp, sp):
# this test is adopted from SciPy's
unsorted_inds = xp.array([1, 0])
data = xp.array([1, 1], dtype=self.dtype)
indptr = xp.array([0, 2])
M = sp.csc_matrix((data, unsorted_inds, indptr))
# set manually (although underlyingly unsorted)
M.has_sorted_indices = True
assert M.has_sorted_indices
assert (M.indices == unsorted_inds).all()
# ensure sort bypassed when has_sorted_indices == True
M.sort_indices()
assert (M.indices == unsorted_inds).all()
return M
@testing.numpy_cupy_allclose(sp_name='sp')
def test_sort_indices(self, xp, sp):
m = self.make(xp, sp, self.dtype)
m.sort_indices()
assert m.has_sorted_indices
return m
@testing.numpy_cupy_allclose(sp_name='sp', contiguous_check=False)
def test_sort_indices2(self, xp, sp):
# 1. this test is adopted from SciPy's.
# 2. we don't check the contiguity flag because SciPy and CuPy handle
# the underlying data differently
data = xp.arange(5).astype(xp.float32)
indices = xp.array([7, 2, 1, 5, 4])
indptr = xp.array([0, 3, 5])
asp = sp.csc_matrix((data, indices, indptr), shape=(10, 2))
asp.sort_indices()
assert (asp.indices == xp.array([1, 2, 7, 4, 5])).all()
return asp.todense()
@testing.numpy_cupy_allclose(sp_name='sp')
def test_sorted_indices(self, xp, sp):
m = self.make(xp, sp, self.dtype)
m = m.sorted_indices()
assert m.has_sorted_indices
return m
def test_sum_tuple_axis(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
with pytest.raises(TypeError):
m.sum(axis=(0, 1))
def test_sum_too_large_axis(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
with pytest.raises(ValueError):
m.sum(axis=3)
@testing.numpy_cupy_allclose(sp_name='sp')
def test_sum_duplicates(self, xp, sp):
m = self.make(xp, sp, self.dtype)
m.sum_duplicates()
assert m.has_canonical_format
return m
@testing.numpy_cupy_allclose(sp_name='sp')
def test_transpose(self, xp, sp):
m = self.make(xp, sp, self.dtype)
return m.transpose()
def test_transpose_axes_int(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
m = self.make(xp, sp, self.dtype)
with pytest.raises(ValueError):
m.transpose(axes=0)
@testing.numpy_cupy_allclose(sp_name='sp')
def test_eliminate_zeros(self, xp, sp):
m = self.make(xp, sp, self.dtype)
m.eliminate_zeros()
return m
@testing.numpy_cupy_equal(sp_name='sp')
@pytest.mark.skipif(
not runtime.is_hip and cupy.cuda.runtime.runtimeGetVersion() < 8000,
reason='CUDA <8 cannot keep number of non-zero entries ')
def test_eliminate_zeros_nnz(self, xp, sp):
m = self.make(xp, sp, self.dtype)
m.eliminate_zeros()
return m.nnz
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64],
'ret_dtype': [None, numpy.float32, numpy.float64],
'axis': [None, 0, 1, -1, -2],
}))
@testing.with_requires('scipy')
class TestCscMatrixSum:
@pytest.fixture(autouse=True)
def setUp(self):
if runtime.is_hip and self.axis in (None, -1, 1):
HIP_version = driver.get_build_version()
if HIP_version < 400:
pytest.skip('no working implementation')
elif HIP_version < 5_00_00000:
# I got HIPSPARSE_STATUS_INTERNAL_ERROR...
pytest.xfail('spmv is buggy (trans=True)')
@testing.numpy_cupy_allclose(sp_name='sp')
def test_sum(self, xp, sp):
m = _make(xp, sp, self.dtype)
return m.sum(axis=self.axis, dtype=self.ret_dtype)
@testing.numpy_cupy_allclose(sp_name='sp')
def test_sum_with_out(self, xp, sp):
m = _make(xp, sp, self.dtype)
if self.axis is None:
shape = ()
else:
shape = list(m.shape)
shape[self.axis] = 1
shape = tuple(shape)
out = xp.empty(shape, dtype=self.ret_dtype)
if xp is numpy:
# TODO(unno): numpy.matrix is used for scipy.sparse though
# cupy.ndarray is used for cupyx.scipy.sparse.
out = xp.asmatrix(out)
return m.sum(axis=self.axis, dtype=self.ret_dtype, out=out)
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
}))
@testing.with_requires('scipy')
class TestCscMatrixScipyCompressed:
@testing.numpy_cupy_equal(sp_name='sp')
def test_get_shape(self, xp, sp):
return _make(xp, sp, self.dtype).get_shape()
@testing.numpy_cupy_equal(sp_name='sp')
def test_getnnz(self, xp, sp):
return _make(xp, sp, self.dtype).getnnz()
@testing.parameterize(*testing.product({
# TODO(takagi): Test dtypes
'axis': [None, -2, -1, 0, 1],
'dense': [False, True], # means a sparse matrix but all elements filled
}))
@testing.with_requires('scipy>=0.19.0')
class TestCscMatrixScipyCompressedMinMax:
def _make_data_min(self, xp, sp, dense=False):
dm_data = testing.shaped_random((10, 20), xp=xp, scale=1.0)
if not dense:
dm_data[abs(dm_data) < 0.95] = 0
return sp.csc_matrix(xp.array(dm_data))
def _make_data_max(self, xp, sp, dense=False):
return -self._make_data_min(xp, sp, dense=dense)
def _make_data_min_explicit(self, xp, sp, axis):
dm_data = testing.shaped_random((10, 20), xp=xp, scale=1.0)
if xp is cupy:
dm_data[dm_data < 0.95] = 0
else:
# As SciPy sparse matrix does not have `explicit` parameter, we
# make SciPy inputs such that SciPy's spmatrix.min(axis=axis)
# returns the same value as CuPy's spmatrix.min(axis=axis,
# explicit=True).
# Put infinity instead of zeros so spmatrix.min(axis=axis) returns
# the smallest numbers except for zero.
dm_data[dm_data < 0.95] = numpy.inf
if axis is None:
# If all elements in the array are set to infinity, we make it
# have at least a zero so SciPy's spmatrix.min(axis=None)
# returns zero.
if numpy.isinf(dm_data).all():
dm_data[0, 0] = 0
else:
if axis < 0:
axis += 2
# If all elements in a row/column are set to infinity, we make
# it have at least a zero so spmatrix.min(axis=axis) returns
# zero for the row/column.
mask = numpy.zeros_like(dm_data, dtype=numpy.bool_)
if axis == 0:
rows = dm_data.argmin(axis=0)
cols = numpy.arange(20)
else:
rows = numpy.arange(10)
cols = dm_data.argmin(axis=1)
mask[rows, cols] = numpy.isinf(dm_data[rows, cols])
dm_data[mask] = 0
return sp.csc_matrix(xp.array(dm_data))
def _make_data_max_explicit(self, xp, sp, axis):
return -self._make_data_min_explicit(xp, sp, axis=axis)
@testing.numpy_cupy_array_equal(sp_name='sp')
def test_min(self, xp, sp):
data = self._make_data_min(xp, sp, dense=self.dense)
return data.min(axis=self.axis)
@testing.numpy_cupy_array_equal(sp_name='sp')
def test_min_explicit(self, xp, sp):
data = self._make_data_min_explicit(xp, sp, axis=self.axis)
if xp is cupy:
return data.min(axis=self.axis, explicit=True)
else:
return data.min(axis=self.axis)
@testing.numpy_cupy_array_equal(sp_name='sp')
def test_max(self, xp, sp):
data = self._make_data_max(xp, sp, dense=self.dense)
return data.max(axis=self.axis)
@testing.numpy_cupy_array_equal(sp_name='sp')
def test_max_explicit(self, xp, sp):
data = self._make_data_max_explicit(xp, sp, axis=self.axis)
if xp is cupy:
return data.max(axis=self.axis, explicit=True)
else:
return data.max(axis=self.axis)
@testing.numpy_cupy_array_equal(sp_name='sp', type_check=False)
def test_argmin(self, xp, sp):
data = self._make_data_min(xp, sp, dense=self.dense)
# Due to a SciPy bug, the argmin output is different from the expected
# one
if self.axis is None and self.dense:
pytest.skip()
return xp.array(data.argmin(axis=self.axis))
@testing.numpy_cupy_array_equal(sp_name='sp', type_check=False)
def test_argmax(self, xp, sp):
data = self._make_data_max(xp, sp, dense=self.dense)
# Due to a SciPy bug, the argmin output is different from the expected
# one
if self.axis is None and self.dense:
pytest.skip()
return xp.array(data.argmax(axis=self.axis))
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
}))
@testing.with_requires('scipy')
class TestCscMatrixData:
@testing.numpy_cupy_equal(sp_name='sp')
def test_dtype(self, xp, sp):
return _make(xp, sp, self.dtype).dtype
@testing.numpy_cupy_allclose(sp_name='sp')
def test_abs(self, xp, sp):
m = _make(xp, sp, self.dtype)
return abs(m)
@testing.numpy_cupy_allclose(sp_name='sp')
def test_neg(self, xp, sp):
m = _make(xp, sp, self.dtype)
return (-m)
@testing.numpy_cupy_allclose(sp_name='sp')
def test_astype(self, xp, sp):
m = _make(xp, sp, self.dtype)
if numpy.dtype(self.dtype).kind == 'c':
t = 'D'
else:
t = 'd'
return m.astype(t)
@testing.numpy_cupy_equal(sp_name='sp')
def test_count_nonzero(self, xp, sp):
m = _make(xp, sp, self.dtype)
return m.count_nonzero()
@testing.numpy_cupy_allclose(sp_name='sp')
def test_power(self, xp, sp):
m = _make(xp, sp, self.dtype)
return m.power(2)
@testing.numpy_cupy_allclose(sp_name='sp')
def test_power_with_dtype(self, xp, sp):
m = _make(xp, sp, self.dtype)
if numpy.dtype(self.dtype).kind == 'c':
t = 'D'
else:
t = 'd'
return m.power(2, t)
@testing.numpy_cupy_allclose(sp_name='sp')
def test_mean_axis_None(self, xp, sp):
if runtime.is_hip:
HIP_version = driver.get_build_version()
if HIP_version < 400:
pytest.skip('no working implementation')
elif HIP_version < 5_00_00000:
# I got HIPSPARSE_STATUS_INTERNAL_ERROR...
pytest.xfail('spmv is buggy (trans=True)')
m = _make(xp, sp, self.dtype)
return m.mean(axis=None)
@testing.numpy_cupy_allclose(sp_name='sp')
def test_mean_axis_0(self, xp, sp):
m = _make(xp, sp, self.dtype)
return m.mean(axis=0)
@testing.numpy_cupy_allclose(sp_name='sp')
def test_mean_axis_1(self, xp, sp):
if runtime.is_hip:
HIP_version = driver.get_build_version()
if HIP_version < 400:
pytest.skip('no working implementation')
elif HIP_version < 5_00_00000:
# I got HIPSPARSE_STATUS_INTERNAL_ERROR...
pytest.xfail('spmv is buggy (trans=True)')
m = _make(xp, sp, self.dtype)
return m.mean(axis=1)
@testing.numpy_cupy_allclose(sp_name='sp')
def test_mean_axis_negative_1(self, xp, sp):
if runtime.is_hip:
HIP_version = driver.get_build_version()
if HIP_version < 400:
pytest.skip('no working implementation')
elif HIP_version < 5_00_00000:
# I got HIPSPARSE_STATUS_INTERNAL_ERROR...
pytest.xfail('spmv is buggy (trans=True)')
m = _make(xp, sp, self.dtype)
return m.mean(axis=-1)
@testing.numpy_cupy_allclose(sp_name='sp')
def test_mean_axis_negative_2(self, xp, sp):
m = _make(xp, sp, self.dtype)
return m.mean(axis=-2)
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
'ufunc': [
'arcsin', 'arcsinh', 'arctan', 'arctanh', 'ceil', 'deg2rad', 'expm1',
'floor', 'log1p', 'rad2deg', 'rint', 'sign', 'sin', 'sinh', 'sqrt',
'tan', 'tanh', 'trunc',
],
}))
@testing.with_requires('scipy')
class TestUfunc:
@testing.numpy_cupy_allclose(sp_name='sp', atol=1e-5)
def test_ufun(self, xp, sp):
x = _make(xp, sp, self.dtype)
x.data *= 0.1
func = getattr(x, self.ufunc)
complex_unsupported = {'ceil', 'deg2rad', 'floor', 'rad2deg', 'trunc'}
if (numpy.dtype(self.dtype).kind == 'c' and
self.ufunc in complex_unsupported):
with pytest.raises(TypeError):
func()
return xp.array(0)
else:
return func()
class TestIsspmatrixCsc:
def test_csr(self):
x = sparse.csr_matrix(
(cupy.array([], 'f'),
cupy.array([], 'i'),
cupy.array([0], 'i')),
shape=(0, 0), dtype='f')
assert not sparse.isspmatrix_csc(x)
def test_csc(self):
x = sparse.csc_matrix(
(cupy.array([], 'f'),
cupy.array([], 'i'),
cupy.array([0], 'i')),
shape=(0, 0), dtype='f')
assert sparse.isspmatrix_csc(x)
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
}))
@testing.with_requires('scipy>=1.4.0')
class TestCsrMatrixGetitem:
@testing.numpy_cupy_equal(sp_name='sp')
def test_getitem_int_int(self, xp, sp):
assert _make(xp, sp, self.dtype)[0, 1] == 1
@testing.numpy_cupy_equal(sp_name='sp')
def test_getitem_int_int_not_found(self, xp, sp):
assert _make(xp, sp, self.dtype)[1, 1] == 0
@testing.numpy_cupy_equal(sp_name='sp')
def test_getitem_int_int_negative(self, xp, sp):
assert _make(xp, sp, self.dtype)[-1, -2] == 3
def test_getitem_int_int_too_small_row(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
with pytest.raises(IndexError):
_make(xp, sp, self.dtype)[-4, 0]
def test_getitem_int_int_too_large_row(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
with pytest.raises(IndexError):
_make(xp, sp, self.dtype)[3, 0]
def test_getitem_int_int_too_small_col(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
with pytest.raises(IndexError):
_make(xp, sp, self.dtype)[0, -5]
def test_getitem_int_int_too_large_col(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
with pytest.raises(IndexError):
_make(xp, sp, self.dtype)[0, 4]
@testing.numpy_cupy_allclose(sp_name='sp')
def test_getitem_int(self, xp, sp):
return _make(xp, sp, self.dtype)[:, 1]
@testing.numpy_cupy_allclose(sp_name='sp')
def test_getitem_negative_int(self, xp, sp):
return _make(xp, sp, self.dtype)[:, -1]
def test_getitem_int_too_small(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
with pytest.raises(IndexError):
_make(xp, sp, self.dtype)[:, -5]
def test_getitem_int_too_large(self):
for xp, sp in ((numpy, scipy.sparse), (cupy, sparse)):
with pytest.raises(IndexError):
_make(xp, sp, self.dtype)[:, 4]
@testing.numpy_cupy_allclose(sp_name='sp')
def test_getitem_slice(self, xp, sp):
return _make(xp, sp, self.dtype)[:, 1:3]
@testing.numpy_cupy_allclose(sp_name='sp')
def test_getitem_slice_negative(self, xp, sp):
return _make(xp, sp, self.dtype)[:, -2:-1]
# SciPy prior to 1.4 has bugs where either an IndexError is raised or a
# segfault occurs instead of returning an empty slice.
@testing.numpy_cupy_allclose(sp_name='sp')
def test_getitem_slice_start_larger_than_stop(self, xp, sp):
return _make(xp, sp, self.dtype)[:, 3:2]
@testing.numpy_cupy_allclose(sp_name='sp')
def test_getitem_rowslice_all(self, xp, sp):
# This test is adapted from Scipy's CSC tests
return _make(xp, sp, self.dtype)[slice(None, None, None)]
@testing.numpy_cupy_allclose(sp_name='sp')
@testing.with_requires('scipy>=1.9.3')
def test_getitem_rowslice_negative_stop(self, xp, sp):
# This test is adapted from Scipy's CSC tests
return _make(xp, sp, self.dtype)[slice(1, -2, 2)]
def test_getrow(self):
# This test is adapted from Scipy's CSC tests
N = 10
X = testing.shaped_random((N, N), cupy, seed=0)
X[X > 0.7] = 0
Xcsc = sparse.csc_matrix(X)
for i in range(N):
arr_row = X[i:i + 1, :]
csc_row = Xcsc.getrow(i)
assert sparse.isspmatrix_csr(csc_row)
assert (arr_row == csc_row.toarray()).all()
def test_getcol(self):
# This test is adapted from Scipy's CSC tests
N = 10
X = testing.shaped_random((N, N), cupy, seed=0)
X[X > 0.7] = 0
Xcsc = sparse.csc_matrix(X)
for i in range(N):
arr_col = X[:, i:i + 1]
csc_col = Xcsc.getcol(i)
assert sparse.isspmatrix_csc(csc_col)
assert (arr_col == csc_col.toarray()).all()
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
}))
@testing.with_requires('scipy>=1.4.0')
class TestCsrMatrixGetitem2:
@testing.numpy_cupy_allclose(sp_name='sp')
def test_getitem_slice_start_too_small(self, xp, sp):
return _make(xp, sp, self.dtype)[:, -5:None]
@testing.numpy_cupy_allclose(sp_name='sp')
def test_getitem_slice_start_too_large(self, xp, sp):
return _make(xp, sp, self.dtype)[:, 5:None]
@testing.numpy_cupy_allclose(sp_name='sp')
def test_getitem_slice_stop_too_small(self, xp, sp):
return _make(xp, sp, self.dtype)[:, None:-5]
@testing.numpy_cupy_allclose(sp_name='sp')
def test_getitem_slice_stop_too_large(self, xp, sp):
return _make(xp, sp, self.dtype)[:, None:5]
| {
"content_hash": "ee8badde346f0232e59e6ff8ca98c374",
"timestamp": "",
"source": "github",
"line_count": 1554,
"max_line_length": 79,
"avg_line_length": 35.7998712998713,
"alnum_prop": 0.566300576995668,
"repo_name": "cupy/cupy",
"id": "b2be4476ac4ac259e38cd8b14c2f2fb4a269788c",
"size": "55633",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/cupyx_tests/scipy_tests/sparse_tests/test_csc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "38"
},
{
"name": "C",
"bytes": "712019"
},
{
"name": "C++",
"bytes": "895316"
},
{
"name": "Cuda",
"bytes": "151799"
},
{
"name": "Cython",
"bytes": "1996454"
},
{
"name": "Dockerfile",
"bytes": "40251"
},
{
"name": "PowerShell",
"bytes": "7361"
},
{
"name": "Python",
"bytes": "4841354"
},
{
"name": "Shell",
"bytes": "24521"
}
],
"symlink_target": ""
} |
from collections import OrderedDict
import mock
from nose.tools import *
from website.notifications.events.base import Event, register, event_registry
from website.notifications.events.files import (
FileAdded, FileRemoved, FolderCreated, FileUpdated,
AddonFileCopied, AddonFileMoved, AddonFileRenamed,
)
from website.notifications.events import utils
from addons.base import signals
from framework.auth import Auth
from osf_tests import factories
from osf.utils.permissions import WRITE
from tests.base import OsfTestCase, NotificationTestCase
email_transactional = 'email_transactional'
email_digest = 'email_digest'
class TestEventNotImplemented(OsfTestCase):
"""
Test non-implemented errors
"""
@register('not_implemented')
class NotImplementedEvent(Event):
pass
def setUp(self):
super(TestEventNotImplemented, self).setUp()
self.user = factories.UserFactory()
self.auth = Auth(user=self.user)
self.node = factories.ProjectFactory(creator=self.user)
self.event = self.NotImplementedEvent(self.user, self.node, 'not_implemented')
@raises(NotImplementedError)
def test_text(self):
text = self.event.text_message
@raises(NotImplementedError)
def test_html(self):
html = self.event.html_message
@raises(NotImplementedError)
def test_url(self):
url = self.event.url
@raises(NotImplementedError)
def test_event(self):
event = self.event.event_type
class TestListOfFiles(OsfTestCase):
"""
List files given a list
"""
def setUp(self):
super(TestListOfFiles, self).setUp()
self.tree = {
'kind': 'folder',
'path': 'a',
'children': [
{
'kind': 'folder',
'path': 'b',
'children': [
{
'kind': 'file',
'path': 'e'
},
{
'kind': 'file',
'path': 'f'
}
]
},
{
'kind': 'file',
'path': 'c'
},
{
'kind': 'file',
'path': 'd'
}
]
}
def test_list_of_files(self):
assert_equal(['e', 'f', 'c', 'd'], utils.list_of_files(self.tree))
class TestEventExists(OsfTestCase):
# Add all possible called events here to ensure that the Event class can
# call them.
def setUp(self):
super(TestEventExists, self).setUp()
self.user = factories.UserFactory()
self.consolidate_auth = Auth(user=self.user)
self.node = factories.ProjectFactory(creator=self.user)
def test_get_file_updated(self):
# Event gets FileUpdated from file_updated
event = event_registry['file_updated'](self.user, self.node, 'file_updated', payload=file_payload)
assert_is_instance(event, FileUpdated)
def test_get_file_added(self):
# Event gets FileAdded from file_added
event = event_registry['file_added'](self.user, self.node, 'file_added', payload=file_payload)
assert_is_instance(event, FileAdded)
def test_get_file_removed(self):
# Event gets FileRemoved from file_removed
event = event_registry['file_removed'](self.user, self.node, 'file_removed', payload=file_deleted_payload)
assert_is_instance(event, FileRemoved)
def test_get_folder_created(self):
# Event gets FolderCreated from folder_created
event = event_registry['folder_created'](self.user, self.node, 'folder_created', payload=folder_created_payload)
assert_is_instance(event, FolderCreated)
def test_get_file_moved(self):
# Event gets AddonFileMoved from addon_file_moved
file_moved_payload = file_move_payload(self.node, self.node)
event = event_registry['addon_file_moved'](self.user, self.node, 'addon_file_moved', payload=file_moved_payload)
assert_is_instance(event, AddonFileMoved)
def test_get_file_copied(self):
# Event gets AddonFileCopied from addon_file_copied
file_copied_payload = file_copy_payload(self.node, self.node)
event = event_registry['addon_file_copied'](self.user, self.node, 'addon_file_copied',
payload=file_copied_payload)
assert_is_instance(event, AddonFileCopied)
def test_get_file_renamed(self):
# Event gets AddonFileCopied from addon_file_copied
file_rename_payload = file_renamed_payload()
event = event_registry['addon_file_renamed'](self.user, self.node, 'addon_file_renamed',
payload=file_rename_payload)
assert_is_instance(event, AddonFileRenamed)
class TestSignalEvent(OsfTestCase):
def setUp(self):
super(TestSignalEvent, self).setUp()
self.user = factories.UserFactory()
self.auth = Auth(user=self.user)
self.node = factories.ProjectFactory(creator=self.user)
@mock.patch('website.notifications.events.files.FileAdded.perform')
def test_event_signal(self, mock_perform):
signals.file_updated.send(
user=self.user, target=self.node, event_type='file_added', payload=file_payload
)
assert_true(mock_perform.called)
class TestFileUpdated(OsfTestCase):
def setUp(self):
super(TestFileUpdated, self).setUp()
self.user_1 = factories.AuthUserFactory()
self.auth = Auth(user=self.user_1)
self.user_2 = factories.AuthUserFactory()
self.project = factories.ProjectFactory(creator=self.user_1)
# subscription
self.sub = factories.NotificationSubscriptionFactory(
_id=self.project._id + 'file_updated',
owner=self.project,
event_name='file_updated',
)
self.sub.save()
self.event = event_registry['file_updated'](self.user_2, self.project, 'file_updated', payload=file_payload)
def test_info_formed_correct(self):
assert_equal('{}_file_updated'.format(wb_path), self.event.event_type)
assert_equal('updated file "<b>{}</b>".'.format(materialized.lstrip('/')), self.event.html_message)
assert_equal('updated file "{}".'.format(materialized.lstrip('/')), self.event.text_message)
@mock.patch('website.notifications.emails.notify')
def test_file_updated(self, mock_notify):
self.event.perform()
# notify('exd', 'file_updated', 'user', self.project, timezone.now())
assert_true(mock_notify.called)
class TestFileAdded(NotificationTestCase):
def setUp(self):
super(TestFileAdded, self).setUp()
self.user = factories.UserFactory()
self.consolidate_auth = Auth(user=self.user)
self.project = factories.ProjectFactory()
self.project_subscription = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_file_updated',
owner=self.project,
event_name='file_updated'
)
self.project_subscription.save()
self.user2 = factories.UserFactory()
self.event = event_registry['file_added'](self.user2, self.project, 'file_added', payload=file_payload)
def test_info_formed_correct(self):
assert_equal('{}_file_updated'.format(wb_path), self.event.event_type)
assert_equal('added file "<b>{}</b>".'.format(materialized.lstrip('/')), self.event.html_message)
assert_equal('added file "{}".'.format(materialized.lstrip('/')), self.event.text_message)
@mock.patch('website.notifications.emails.notify')
def test_file_added(self, mock_notify):
self.event.perform()
# notify('exd', 'file_updated', 'user', self.project, timezone.now())
assert_true(mock_notify.called)
class TestFileRemoved(NotificationTestCase):
def setUp(self):
super(TestFileRemoved, self).setUp()
self.user = factories.UserFactory()
self.consolidate_auth = Auth(user=self.user)
self.project = factories.ProjectFactory()
self.project_subscription = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_file_updated',
owner=self.project,
event_name='file_updated'
)
self.project_subscription.save()
self.user2 = factories.UserFactory()
self.event = event_registry['file_removed'](
self.user2, self.project, 'file_removed', payload=file_deleted_payload
)
def test_info_formed_correct_file(self):
assert_equal('file_updated', self.event.event_type)
assert_equal('removed file "<b>{}</b>".'.format(materialized.lstrip('/')), self.event.html_message)
assert_equal('removed file "{}".'.format(materialized.lstrip('/')), self.event.text_message)
def test_info_formed_correct_folder(self):
assert_equal('file_updated', self.event.event_type)
self.event.payload['metadata']['materialized'] += u'/'
assert_equal(u'removed folder "<b>{}/</b>".'.format(materialized.lstrip('/')), self.event.html_message)
assert_equal(u'removed folder "{}/".'.format(materialized.lstrip('/')), self.event.text_message)
@mock.patch('website.notifications.emails.notify')
def test_file_removed(self, mock_notify):
self.event.perform()
# notify('exd', 'file_updated', 'user', self.project, timezone.now())
assert_true(mock_notify.called)
class TestFolderCreated(NotificationTestCase):
def setUp(self):
super(TestFolderCreated, self).setUp()
self.user = factories.UserFactory()
self.consolidate_auth = Auth(user=self.user)
self.project = factories.ProjectFactory()
self.project_subscription = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_file_updated',
owner=self.project,
event_name='file_updated'
)
self.project_subscription.save()
self.user2 = factories.UserFactory()
self.event = event_registry['folder_created'](
self.user2, self.project, 'folder_created', payload=folder_created_payload
)
def test_info_formed_correct(self):
assert_equal('file_updated', self.event.event_type)
assert_equal('created folder "<b>Three/</b>".', self.event.html_message)
assert_equal('created folder "Three/".', self.event.text_message)
@mock.patch('website.notifications.emails.notify')
def test_folder_added(self, mock_notify):
self.event.perform()
assert_true(mock_notify.called)
class TestFolderFileRenamed(OsfTestCase):
def setUp(self):
super(TestFolderFileRenamed, self).setUp()
self.user_1 = factories.AuthUserFactory()
self.auth = Auth(user=self.user_1)
self.user_2 = factories.AuthUserFactory()
self.project = factories.ProjectFactory(creator=self.user_1)
# subscription
self.sub = factories.NotificationSubscriptionFactory(
_id=self.project._id + 'file_updated',
owner=self.project,
event_name='file_updated',
)
self.sub.save()
# Payload
file_renamed_payload = file_move_payload(self.project, self.project)
self.event = event_registry['addon_file_renamed'](
self.user_1, self.project, 'addon_file_renamed',
payload=file_renamed_payload
)
self.sub.email_digest.add(self.user_2)
self.sub.save()
def test_rename_file_html(self):
self.event.payload['destination']['materialized'] = '/One/Paper14.txt'
assert_equal(self.event.html_message, 'renamed file "<b>/One/Paper13.txt</b>" to "<b>/One/Paper14.txt</b>".')
def test_rename_folder_html(self):
self.event.payload['destination']['kind'] = 'folder'
self.event.payload['destination']['materialized'] = '/One/Two/Four'
self.event.payload['source']['materialized'] = '/One/Two/Three'
assert_equal(self.event.html_message, 'renamed folder "<b>/One/Two/Three</b>" to "<b>/One/Two/Four</b>".')
def test_rename_file_text(self):
self.event.payload['destination']['materialized'] = '/One/Paper14.txt'
assert_equal(self.event.text_message, 'renamed file "/One/Paper13.txt" to "/One/Paper14.txt".')
def test_rename_folder_text(self):
self.event.payload['destination']['kind'] = 'folder'
self.event.payload['destination']['materialized'] = '/One/Two/Four'
self.event.payload['source']['materialized'] = '/One/Two/Three'
assert_equal(self.event.text_message, 'renamed folder "/One/Two/Three" to "/One/Two/Four".')
class TestFileMoved(NotificationTestCase):
def setUp(self):
super(TestFileMoved, self).setUp()
self.user_1 = factories.AuthUserFactory()
self.auth = Auth(user=self.user_1)
self.user_2 = factories.AuthUserFactory()
self.user_3 = factories.AuthUserFactory()
self.user_4 = factories.AuthUserFactory()
self.project = factories.ProjectFactory(creator=self.user_1)
self.private_node = factories.NodeFactory(parent=self.project, is_public=False, creator=self.user_1)
# Payload
file_moved_payload = file_move_payload(self.private_node, self.project)
self.event = event_registry['addon_file_moved'](
self.user_2, self.private_node, 'addon_file_moved', payload=file_moved_payload
)
# Subscriptions
# for parent node
self.sub = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_file_updated',
owner=self.project,
event_name='file_updated'
)
self.sub.save()
# for private node
self.private_sub = factories.NotificationSubscriptionFactory(
_id=self.private_node._id + '_file_updated',
owner=self.private_node,
event_name='file_updated'
)
self.private_sub.save()
# for file subscription
self.file_sub = factories.NotificationSubscriptionFactory(
_id='{pid}_{wbid}_file_updated'.format(
pid=self.project._id,
wbid=self.event.waterbutler_id
),
owner=self.project,
event_name='xyz42_file_updated'
)
self.file_sub.save()
def test_info_formed_correct(self):
# Move Event: Ensures data is correctly formatted
assert_equal('{}_file_updated'.format(wb_path), self.event.event_type)
# assert_equal('moved file "<b>{}</b>".', self.event.html_message)
# assert_equal('created folder "Three/".', self.event.text_message)
@mock.patch('website.notifications.emails.store_emails')
def test_user_performing_action_no_email(self, mock_store):
# Move Event: Makes sure user who performed the action is not
# included in the notifications
self.sub.email_digest.add(self.user_2)
self.sub.save()
self.event.perform()
assert_equal(0, mock_store.call_count)
@mock.patch('website.notifications.emails.store_emails')
def test_perform_store_called_once(self, mock_store):
# Move Event: Tests that store_emails is called once from perform
self.sub.email_transactional.add(self.user_1)
self.sub.save()
self.event.perform()
assert_equal(1, mock_store.call_count)
@mock.patch('website.notifications.emails.store_emails')
def test_perform_store_one_of_each(self, mock_store):
# Move Event: Tests that store_emails is called 3 times, one in
# each category
self.sub.email_transactional.add(self.user_1)
self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth)
self.project.save()
self.private_node.add_contributor(self.user_3, permissions=WRITE, auth=self.auth)
self.private_node.save()
self.sub.email_digest.add(self.user_3)
self.sub.save()
self.project.add_contributor(self.user_4, permissions=WRITE, auth=self.auth)
self.project.save()
self.file_sub.email_digest.add(self.user_4)
self.file_sub.save()
self.event.perform()
assert_equal(3, mock_store.call_count)
@mock.patch('website.notifications.emails.store_emails')
def test_remove_user_sent_once(self, mock_store):
# Move Event: Tests removed user is removed once. Regression
self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth)
self.project.save()
self.file_sub.email_digest.add(self.user_3)
self.file_sub.save()
self.event.perform()
assert_equal(1, mock_store.call_count)
class TestFileCopied(NotificationTestCase):
# Test the copying of files
def setUp(self):
super(TestFileCopied, self).setUp()
self.user_1 = factories.AuthUserFactory()
self.auth = Auth(user=self.user_1)
self.user_2 = factories.AuthUserFactory()
self.user_3 = factories.AuthUserFactory()
self.user_4 = factories.AuthUserFactory()
self.project = factories.ProjectFactory(creator=self.user_1)
self.private_node = factories.NodeFactory(parent=self.project, is_public=False, creator=self.user_1)
# Payload
file_copied_payload = file_copy_payload(self.private_node, self.project)
self.event = event_registry['addon_file_copied'](
self.user_2, self.private_node, 'addon_file_copied',
payload=file_copied_payload
)
# Subscriptions
# for parent node
self.sub = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_file_updated',
owner=self.project,
event_name='file_updated'
)
self.sub.save()
# for private node
self.private_sub = factories.NotificationSubscriptionFactory(
_id=self.private_node._id + '_file_updated',
owner=self.private_node,
event_name='file_updated'
)
self.private_sub.save()
# for file subscription
self.file_sub = factories.NotificationSubscriptionFactory(
_id='{pid}_{wbid}_file_updated'.format(
pid=self.project._id,
wbid=self.event.waterbutler_id
),
owner=self.project,
event_name='xyz42_file_updated'
)
self.file_sub.save()
def test_info_correct(self):
# Move Event: Ensures data is correctly formatted
assert_equal('{}_file_updated'.format(wb_path), self.event.event_type)
assert_equal(('copied file "<b>One/Paper13.txt</b>" from OSF Storage'
' in Consolidate to "<b>Two/Paper13.txt</b>" in OSF'
' Storage in Consolidate.'), self.event.html_message)
assert_equal(('copied file "One/Paper13.txt" from OSF Storage'
' in Consolidate to "Two/Paper13.txt" in OSF'
' Storage in Consolidate.'), self.event.text_message)
@mock.patch('website.notifications.emails.store_emails')
def test_copied_one_of_each(self, mock_store):
# Copy Event: Tests that store_emails is called 2 times, two with
# permissions, one without
self.sub.email_transactional.add(self.user_1)
self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth)
self.project.save()
self.private_node.add_contributor(self.user_3, permissions=WRITE, auth=self.auth)
self.private_node.save()
self.sub.email_digest.add(self.user_3)
self.sub.save()
self.project.add_contributor(self.user_4, permissions=WRITE, auth=self.auth)
self.project.save()
self.file_sub.email_digest.add(self.user_4)
self.file_sub.save()
self.event.perform()
assert_equal(2, mock_store.call_count)
@mock.patch('website.notifications.emails.store_emails')
def test_user_performing_action_no_email(self, mock_store):
# Move Event: Makes sure user who performed the action is not
# included in the notifications
self.sub.email_digest.add(self.user_2)
self.sub.save()
self.event.perform()
assert_equal(0, mock_store.call_count)
class TestCategorizeUsers(NotificationTestCase):
def setUp(self):
super(TestCategorizeUsers, self).setUp()
self.user_1 = factories.AuthUserFactory()
self.auth = Auth(user=self.user_1)
self.user_2 = factories.AuthUserFactory()
self.user_3 = factories.AuthUserFactory()
self.user_4 = factories.AuthUserFactory()
self.project = factories.ProjectFactory(creator=self.user_1)
self.private_node = factories.NodeFactory(
parent=self.project, is_public=False, creator=self.user_1
)
# Payload
file_moved_payload = file_move_payload(self.private_node, self.project)
self.event = event_registry['addon_file_moved'](
self.user_2, self.private_node, 'addon_file_moved',
payload=file_moved_payload
)
# Subscriptions
# for parent node
self.sub = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_file_updated',
owner=self.project,
event_name='file_updated'
)
self.sub.save()
# for private node
self.private_sub = factories.NotificationSubscriptionFactory(
_id=self.private_node._id + '_file_updated',
owner=self.private_node,
event_name='file_updated'
)
self.private_sub.save()
# for file subscription
self.file_sub = factories.NotificationSubscriptionFactory(
_id='{pid}_{wbid}_file_updated'.format(
pid=self.project._id,
wbid=self.event.waterbutler_id
),
owner=self.project,
event_name='xyz42_file_updated'
)
self.file_sub.save()
def test_warn_user(self):
# Tests that a user with a sub in the origin node gets a warning that
# they are no longer tracking the file.
self.sub.email_transactional.add(self.user_1)
self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth)
self.project.save()
self.private_node.add_contributor(self.user_3, permissions=WRITE, auth=self.auth)
self.private_node.save()
self.sub.email_digest.add(self.user_3)
self.sub.save()
self.private_sub.none.add(self.user_3)
self.private_sub.save()
moved, warn, removed = utils.categorize_users(
self.event.user, self.event.event_type, self.event.source_node,
self.event.event_type, self.event.node
)
assert_equal({email_transactional: [], email_digest: [self.user_3._id], 'none': []}, warn)
assert_equal({email_transactional: [self.user_1._id], email_digest: [], 'none': []}, moved)
def test_moved_user(self):
# Doesn't warn a user with two different subs, but does send a
# moved email
self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth)
self.project.save()
self.private_node.add_contributor(self.user_3, permissions=WRITE, auth=self.auth)
self.private_node.save()
self.sub.email_digest.add(self.user_3)
self.sub.save()
self.private_sub.email_transactional.add(self.user_3)
self.private_sub.save()
moved, warn, removed = utils.categorize_users(
self.event.user, self.event.event_type, self.event.source_node,
self.event.event_type, self.event.node
)
assert_equal({email_transactional: [], email_digest: [], 'none': []}, warn)
assert_equal({email_transactional: [self.user_3._id], email_digest: [], 'none': []}, moved)
def test_remove_user(self):
self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth)
self.project.save()
self.file_sub.email_transactional.add(self.user_3)
self.file_sub.save()
moved, warn, removed = utils.categorize_users(
self.event.user, self.event.event_type, self.event.source_node,
self.event.event_type, self.event.node
)
assert_equal({email_transactional: [self.user_3._id], email_digest: [], 'none': []}, removed)
def test_node_permissions(self):
self.private_node.add_contributor(self.user_3, permissions=WRITE)
self.private_sub.email_digest.add(self.user_3, self.user_4)
remove = {email_transactional: [], email_digest: [], 'none': []}
warn = {email_transactional: [], email_digest: [self.user_3._id, self.user_4._id], 'none': []}
subbed, remove = utils.subscriptions_node_permissions(
self.private_node,
warn,
remove
)
assert_equal({email_transactional: [], email_digest: [self.user_3._id], 'none': []}, subbed)
assert_equal({email_transactional: [], email_digest: [self.user_4._id], 'none': []}, remove)
class TestSubscriptionManipulations(OsfTestCase):
def setUp(self):
super(TestSubscriptionManipulations, self).setUp()
self.emails_1 = {
email_digest: ['a1234', 'b1234', 'c1234'],
email_transactional: ['d1234', 'e1234', 'f1234'],
'none': ['g1234', 'h1234', 'i1234']
}
self.emails_2 = {
email_digest: ['j1234'],
email_transactional: ['k1234'],
'none': ['l1234']
}
self.emails_3 = {
email_digest: ['b1234', 'c1234'],
email_transactional: ['e1234', 'f1234'],
'none': ['h1234', 'i1234']
}
self.emails_4 = {
email_digest: ['d1234', 'i1234'],
email_transactional: ['b1234'],
'none': []
}
self.diff_1_3 = {email_transactional: ['d1234'], 'none': ['g1234'], email_digest: ['a1234']}
self.union_1_2 = {'email_transactional': ['e1234', 'd1234', 'k1234', 'f1234'],
'none': ['h1234', 'g1234', 'i1234', 'l1234'],
'email_digest': ['j1234', 'b1234', 'a1234', 'c1234']}
self.dup_1_3 = {email_transactional: ['e1234', 'f1234'], 'none': ['h1234', 'g1234'],
'email_digest': ['a1234', 'c1234']}
def test_subscription_user_difference(self):
result = utils.subscriptions_users_difference(self.emails_1, self.emails_3)
assert_equal(self.diff_1_3, result)
def test_subscription_user_union(self):
result = utils.subscriptions_users_union(self.emails_1, self.emails_2)
assert_equal(self.union_1_2, result)
def test_remove_duplicates(self):
result = utils.subscriptions_users_remove_duplicates(
self.emails_1, self.emails_4, remove_same=False
)
assert_equal(self.dup_1_3, result)
def test_remove_duplicates_true(self):
result = utils.subscriptions_users_remove_duplicates(
self.emails_1, self.emails_1, remove_same=True
)
assert_equal({email_digest: [], email_transactional: [], 'none': ['h1234', 'g1234', 'i1234']}, result)
wb_path = u'5581cb50a24f710b0f4623f9'
materialized = u'/One/Paper13.txt'
provider = u'osfstorage'
name = u'Paper13.txt'
file_payload = OrderedDict([
(u'action', u'update'),
(u'auth', OrderedDict([
(u'email', u'[email protected]'), (u'id', u'tgn6m'), (u'name', u'aab')])),
(u'metadata', OrderedDict([
(u'contentType', None),
(u'etag', u'10485efa4069bb94d50588df2e7466a079d49d4f5fd7bf5b35e7c0d5b12d76b7'),
(u'extra', OrderedDict([
(u'downloads', 0),
(u'version', 30)])),
(u'kind', u'file'),
(u'materialized', materialized),
(u'modified', u'Wed, 24 Jun 2015 10:45:01 '),
(u'name', name),
(u'path', wb_path),
(u'provider', provider),
(u'size', 2008)])),
(u'provider', provider),
(u'time', 1435157161.979904)])
file_deleted_payload = OrderedDict([
(u'action', u'delete'),
(u'auth', OrderedDict([
(u'email', u'[email protected]'), (u'id', u'tgn6m'), (u'name', u'aab')])),
(u'metadata', OrderedDict([
(u'materialized', materialized),
(u'path', materialized)])), # Deleted files don't get wb_paths
(u'provider', u'osfstorage'),
(u'time', 1435157876.690203)])
folder_created_payload = OrderedDict([
(u'action', u'create_folder'),
(u'auth', OrderedDict([
(u'email', u'[email protected]'), (u'id', u'tgn6m'), (u'name', u'aab')])),
(u'metadata', OrderedDict([
(u'etag', u'5caf8ab73c068565297e455ebce37fd64b6897a2284ec9d7ecba8b6093082bcd'),
(u'extra', OrderedDict()),
(u'kind', u'folder'),
(u'materialized', u'/Three/'),
(u'name', u'Three'),
(u'path', u'558ac595a24f714eff336d66/'),
(u'provider', u'osfstorage')])),
(u'provider', u'osfstorage'),
(u'time', 1435157969.475282)])
def file_move_payload(new_node, old_node):
return OrderedDict([
(u'action', u'move'),
(u'auth', OrderedDict([
(u'email', 'Bob'), (u'id', 'bob2'), (u'name', 'Bob')])),
(u'destination', OrderedDict([
(u'contentType', None),
(u'etag', u'10485efa4069bb94d50588df2e7466a079d49d4f5fd7bf5b35e7c0d5b12d76b7'),
(u'extra', OrderedDict([
(u'downloads', 0),
(u'version', 30)])),
(u'kind', u'file'),
(u'materialized', materialized),
(u'modified', None),
(u'name', name),
(u'nid', str(new_node)),
(u'path', wb_path),
(u'provider', provider),
(u'size', 2008),
('url', '/project/nhgts/files/osfstorage/5581cb50a24f710b0f4623f9/'),
('node', {'url': '/{}/'.format(new_node._id), '_id': new_node._id, 'title': u'Consolidate2'}),
('addon', 'OSF Storage')])),
(u'source', OrderedDict([
(u'materialized', materialized),
(u'name', u'Paper13.txt'),
(u'nid', str(old_node)),
(u'path', materialized), # Not wb path
(u'provider', provider),
('url', '/project/nhgts/files/osfstorage/One/Paper13.txt/'),
('node', {'url': '/{}/'.format(old_node._id), '_id': old_node._id, 'title': u'Consolidate'}),
('addon', 'OSF Storage')])),
(u'time', 1435158051.204264),
('node', u'nhgts'),
('project', None)])
def file_copy_payload(new_node, old_node):
return OrderedDict([
(u'action', u'copy'),
(u'auth', OrderedDict([
(u'email', u'[email protected]'),
(u'id', u'tgn6m'),
(u'name', u'aab')])),
(u'destination', OrderedDict([
(u'contentType', None),
(u'etag', u'16075ae3e546971003095beef8323584de40b1fcbf52ed4bb9e7f8547e322824'),
(u'extra', OrderedDict([
(u'downloads', 0),
(u'version', 30)])),
(u'kind', u'file'),
(u'materialized', u'Two/Paper13.txt'),
(u'modified', None),
(u'name', u'Paper13.txt'),
(u'nid', u'nhgts'),
(u'path', wb_path),
(u'provider', u'osfstorage'),
(u'size', 2008),
('url', '/project/nhgts/files/osfstorage/558ac45da24f714eff336d59/'),
('node', {'url': '/nhgts/', '_id': old_node._id, 'title': u'Consolidate'}),
('addon', 'OSF Storage')])),
(u'source', OrderedDict([
(u'materialized', u'One/Paper13.txt'),
(u'name', u'Paper13.txt'),
(u'nid', u'nhgts'),
(u'path', u'One/Paper13.txt'),
(u'provider', u'osfstorage'),
('url', '/project/nhgts/files/osfstorage/One/Paper13.txt/'),
('node', {'url': '/nhgts/', '_id': new_node._id, 'title': u'Consolidate'}),
('addon', 'OSF Storage')])),
(u'time', 1435157658.036183),
('node', u'nhgts'),
('project', None)])
def file_renamed_payload():
return OrderedDict([
(u'action', u'move'),
(u'auth', OrderedDict([
(u'email', u'[email protected]'),
(u'id', u'tgn6m'),
(u'name', u'aab')])),
(u'destination', OrderedDict([
(u'contentType', None),
(u'etag', u'0e9bfddcb5a59956ae60e93f32df06b174ad33b53d8a2f2cd08c780cf34a9d93'),
(u'extra', OrderedDict([
(u'downloads', 0),
(u'hashes', OrderedDict([
(u'md5', u'79a64594dd446674ce1010007ac2bde7'),
(u'sha256', u'bf710301e591f6f5ce35aa8971cfc938b39dae0fedcb9915656dded6ad025580')])),
(u'version', 1)])),
(u'kind', u'file'),
(u'materialized', u'Fibery/file2.pdf'),
(u'modified', u'2015-05-07T10:54:32'),
(u'name', u'file2.pdf'),
(u'nid', u'wp6xv'),
(u'path', u'/55f07134a24f71b2a24f4812'),
(u'provider', u'osfstorage'),
(u'size', 21209),
('url', '/project/wp6xv/files/osfstorage/55f07134a24f71b2a24f4812/'),
('node', {'url': '/wp6xv/', '_id': u'wp6xv', 'title': u'File_Notify4'}),
('addon', 'OSF Storage')])),
(u'source', OrderedDict([
(u'materialized', u'Fibery/!--i--2.pdf'),
(u'name', u'!--i--2.pdf'), (u'nid', u'wp6xv'),
(u'path', u'Fibery/!--i--2.pdf'),
(u'provider', u'osfstorage'),
('url', '/project/wp6xv/files/osfstorage/Fibery/%21--i--2.pdf/'),
('node', {'url': '/wp6xv/', '_id': u'wp6xv', 'title': u'File_Notify4'}),
('addon', 'OSF Storage')])),
(u'time', 1441905340.876648),
('node', u'wp6xv'),
('project', None)])
| {
"content_hash": "30c8d965a5cf1b7c466a1cecf7defc14",
"timestamp": "",
"source": "github",
"line_count": 815,
"max_line_length": 120,
"avg_line_length": 42.00490797546012,
"alnum_prop": 0.5959280247706958,
"repo_name": "mattclark/osf.io",
"id": "560c7e8979c665d237dec980e4adb4916ea11646",
"size": "34234",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/test_events.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "92773"
},
{
"name": "Dockerfile",
"bytes": "8456"
},
{
"name": "HTML",
"bytes": "317371"
},
{
"name": "JavaScript",
"bytes": "1792241"
},
{
"name": "Mako",
"bytes": "654772"
},
{
"name": "Python",
"bytes": "10166997"
},
{
"name": "VCL",
"bytes": "13885"
}
],
"symlink_target": ""
} |
"""
Searching for names with given scope and name. This is very central in Jedi and
Python. The name resolution is quite complicated with descripter,
``__getattribute__``, ``__getattr__``, ``global``, etc.
If you want to understand name resolution, please read the first few chapters
in http://blog.ionelmc.ro/2015/02/09/understanding-python-metaclasses/.
Flow checks
+++++++++++
Flow checks are not really mature. There's only a check for ``isinstance``. It
would check whether a flow has the form of ``if isinstance(a, type_or_tuple)``.
Unfortunately every other thing is being ignored (e.g. a == '' would be easy to
check for -> a is a string). There's big potential in these checks.
"""
from parso.python import tree
from parso.tree import search_ancestor
from jedi import debug
from jedi.common import unite
from jedi import settings
from jedi.evaluate import representation as er
from jedi.evaluate.instance import AbstractInstanceContext
from jedi.evaluate import compiled
from jedi.evaluate import pep0484
from jedi.evaluate import iterable
from jedi.evaluate import imports
from jedi.evaluate import analysis
from jedi.evaluate import flow_analysis
from jedi.evaluate import param
from jedi.evaluate import helpers
from jedi.evaluate.filters import get_global_filters, TreeNameDefinition
from jedi.evaluate.context import ContextualizedName, ContextualizedNode
from jedi.parser_utils import is_scope, get_parent_scope
class NameFinder(object):
def __init__(self, evaluator, context, name_context, name_or_str,
position=None, analysis_errors=True):
self._evaluator = evaluator
# Make sure that it's not just a syntax tree node.
self._context = context
self._name_context = name_context
self._name = name_or_str
if isinstance(name_or_str, tree.Name):
self._string_name = name_or_str.value
else:
self._string_name = name_or_str
self._position = position
self._found_predefined_types = None
self._analysis_errors = analysis_errors
@debug.increase_indent
def find(self, filters, attribute_lookup):
"""
:params bool attribute_lookup: Tell to logic if we're accessing the
attribute or the contents of e.g. a function.
"""
names = self.filter_name(filters)
if self._found_predefined_types is not None and names:
check = flow_analysis.reachability_check(
self._context, self._context.tree_node, self._name)
if check is flow_analysis.UNREACHABLE:
return set()
return self._found_predefined_types
types = self._names_to_types(names, attribute_lookup)
if not names and self._analysis_errors and not types \
and not (isinstance(self._name, tree.Name) and
isinstance(self._name.parent.parent, tree.Param)):
if isinstance(self._name, tree.Name):
if attribute_lookup:
analysis.add_attribute_error(
self._name_context, self._context, self._name)
else:
message = ("NameError: name '%s' is not defined."
% self._string_name)
analysis.add(self._name_context, 'name-error', self._name, message)
return types
def _get_origin_scope(self):
if isinstance(self._name, tree.Name):
scope = self._name
while scope.parent is not None:
# TODO why if classes?
if not isinstance(scope, tree.Scope):
break
scope = scope.parent
return scope
else:
return None
def get_filters(self, search_global=False):
origin_scope = self._get_origin_scope()
if search_global:
return get_global_filters(self._evaluator, self._context, self._position, origin_scope)
else:
return self._context.get_filters(search_global, self._position, origin_scope=origin_scope)
def filter_name(self, filters):
"""
Searches names that are defined in a scope (the different
``filters``), until a name fits.
"""
names = []
if self._context.predefined_names:
# TODO is this ok? node might not always be a tree.Name
node = self._name
while node is not None and not is_scope(node):
node = node.parent
if node.type in ("if_stmt", "for_stmt", "comp_for"):
try:
name_dict = self._context.predefined_names[node]
types = name_dict[self._string_name]
except KeyError:
continue
else:
self._found_predefined_types = types
break
for filter in filters:
names = filter.get(self._string_name)
if names:
if len(names) == 1:
n, = names
if isinstance(n, TreeNameDefinition):
# Something somewhere went terribly wrong. This
# typically happens when using goto on an import in an
# __init__ file. I think we need a better solution, but
# it's kind of hard, because for Jedi it's not clear
# that that name has not been defined, yet.
if n.tree_name == self._name:
if self._name.get_definition().type == 'import_from':
continue
break
debug.dbg('finder.filter_name "%s" in (%s): %s@%s', self._string_name,
self._context, names, self._position)
return list(names)
def _check_getattr(self, inst):
"""Checks for both __getattr__ and __getattribute__ methods"""
# str is important, because it shouldn't be `Name`!
name = compiled.create(self._evaluator, self._string_name)
# This is a little bit special. `__getattribute__` is in Python
# executed before `__getattr__`. But: I know no use case, where
# this could be practical and where Jedi would return wrong types.
# If you ever find something, let me know!
# We are inversing this, because a hand-crafted `__getattribute__`
# could still call another hand-crafted `__getattr__`, but not the
# other way around.
names = (inst.get_function_slot_names('__getattr__') or
inst.get_function_slot_names('__getattribute__'))
return inst.execute_function_slots(names, name)
def _names_to_types(self, names, attribute_lookup):
types = set()
types = unite(name.infer() for name in names)
debug.dbg('finder._names_to_types: %s -> %s', names, types)
if not names and isinstance(self._context, AbstractInstanceContext):
# handling __getattr__ / __getattribute__
return self._check_getattr(self._context)
# Add isinstance and other if/assert knowledge.
if not types and isinstance(self._name, tree.Name) and \
not isinstance(self._name_context, AbstractInstanceContext):
flow_scope = self._name
base_node = self._name_context.tree_node
if base_node.type == 'comp_for':
return types
while True:
flow_scope = get_parent_scope(flow_scope, include_flows=True)
n = _check_flow_information(self._name_context, flow_scope,
self._name, self._position)
if n is not None:
return n
if flow_scope == base_node:
break
return types
def _name_to_types(evaluator, context, tree_name):
types = []
node = tree_name.get_definition(import_name_always=True)
if node is None:
node = tree_name.parent
if node.type == 'global_stmt':
context = evaluator.create_context(context, tree_name)
finder = NameFinder(evaluator, context, context, tree_name.value)
filters = finder.get_filters(search_global=True)
# For global_stmt lookups, we only need the first possible scope,
# which means the function itself.
filters = [next(filters)]
return finder.find(filters, attribute_lookup=False)
elif node.type not in ('import_from', 'import_name'):
raise ValueError("Should not happen.")
typ = node.type
if typ == 'for_stmt':
types = pep0484.find_type_from_comment_hint_for(context, node, tree_name)
if types:
return types
if typ == 'with_stmt':
types = pep0484.find_type_from_comment_hint_with(context, node, tree_name)
if types:
return types
if typ in ('for_stmt', 'comp_for'):
try:
types = context.predefined_names[node][tree_name.value]
except KeyError:
cn = ContextualizedNode(context, node.children[3])
for_types = iterable.py__iter__types(evaluator, cn.infer(), cn)
c_node = ContextualizedName(context, tree_name)
types = check_tuple_assignments(evaluator, c_node, for_types)
elif typ == 'expr_stmt':
types = _remove_statements(evaluator, context, node, tree_name)
elif typ == 'with_stmt':
context_managers = context.eval_node(node.get_test_node_from_name(tree_name))
enter_methods = unite(
context_manager.py__getattribute__('__enter__')
for context_manager in context_managers
)
types = unite(method.execute_evaluated() for method in enter_methods)
elif typ in ('import_from', 'import_name'):
types = imports.infer_import(context, tree_name)
elif typ in ('funcdef', 'classdef'):
types = _apply_decorators(evaluator, context, node)
elif typ == 'try_stmt':
# TODO an exception can also be a tuple. Check for those.
# TODO check for types that are not classes and add it to
# the static analysis report.
exceptions = context.eval_node(tree_name.get_previous_sibling().get_previous_sibling())
types = unite(
evaluator.execute(t, param.ValuesArguments([]))
for t in exceptions
)
else:
raise ValueError("Should not happen.")
return types
def _apply_decorators(evaluator, context, node):
"""
Returns the function, that should to be executed in the end.
This is also the places where the decorators are processed.
"""
if node.type == 'classdef':
decoratee_context = er.ClassContext(
evaluator,
parent_context=context,
classdef=node
)
else:
decoratee_context = er.FunctionContext(
evaluator,
parent_context=context,
funcdef=node
)
initial = values = set([decoratee_context])
for dec in reversed(node.get_decorators()):
debug.dbg('decorator: %s %s', dec, values)
dec_values = context.eval_node(dec.children[1])
trailer_nodes = dec.children[2:-1]
if trailer_nodes:
# Create a trailer and evaluate it.
trailer = tree.PythonNode('trailer', trailer_nodes)
trailer.parent = dec
dec_values = evaluator.eval_trailer(context, dec_values, trailer)
if not len(dec_values):
debug.warning('decorator not found: %s on %s', dec, node)
return initial
values = unite(dec_value.execute(param.ValuesArguments([values]))
for dec_value in dec_values)
if not len(values):
debug.warning('not possible to resolve wrappers found %s', node)
return initial
debug.dbg('decorator end %s', values)
return values
def _remove_statements(evaluator, context, stmt, name):
"""
This is the part where statements are being stripped.
Due to lazy evaluation, statements like a = func; b = a; b() have to be
evaluated.
"""
types = set()
check_instance = None
pep0484types = \
pep0484.find_type_from_comment_hint_assign(context, stmt, name)
if pep0484types:
return pep0484types
types |= context.eval_stmt(stmt, seek_name=name)
if check_instance is not None:
# class renames
types = set([er.get_instance_el(evaluator, check_instance, a, True)
if isinstance(a, er.Function) else a for a in types])
return types
def _check_flow_information(context, flow, search_name, pos):
""" Try to find out the type of a variable just with the information that
is given by the flows: e.g. It is also responsible for assert checks.::
if isinstance(k, str):
k. # <- completion here
ensures that `k` is a string.
"""
if not settings.dynamic_flow_information:
return None
result = None
if is_scope(flow):
# Check for asserts.
module_node = flow.get_root_node()
try:
names = module_node.get_used_names()[search_name.value]
except KeyError:
return None
names = reversed([
n for n in names
if flow.start_pos <= n.start_pos < (pos or flow.end_pos)
])
for name in names:
ass = search_ancestor(name, 'assert_stmt')
if ass is not None:
result = _check_isinstance_type(context, ass.assertion, search_name)
if result is not None:
return result
if flow.type in ('if_stmt', 'while_stmt'):
potential_ifs = [c for c in flow.children[1::4] if c != ':']
for if_test in reversed(potential_ifs):
if search_name.start_pos > if_test.end_pos:
return _check_isinstance_type(context, if_test, search_name)
return result
def _check_isinstance_type(context, element, search_name):
try:
assert element.type in ('power', 'atom_expr')
# this might be removed if we analyze and, etc
assert len(element.children) == 2
first, trailer = element.children
assert first.type == 'name' and first.value == 'isinstance'
assert trailer.type == 'trailer' and trailer.children[0] == '('
assert len(trailer.children) == 3
# arglist stuff
arglist = trailer.children[1]
args = param.TreeArguments(context.evaluator, context, arglist, trailer)
param_list = list(args.unpack())
# Disallow keyword arguments
assert len(param_list) == 2
(key1, lazy_context_object), (key2, lazy_context_cls) = param_list
assert key1 is None and key2 is None
call = helpers.call_of_leaf(search_name)
is_instance_call = helpers.call_of_leaf(lazy_context_object.data)
# Do a simple get_code comparison. They should just have the same code,
# and everything will be all right.
normalize = context.evaluator.grammar._normalize
assert normalize(is_instance_call) == normalize(call)
except AssertionError:
return None
result = set()
for cls_or_tup in lazy_context_cls.infer():
if isinstance(cls_or_tup, iterable.AbstractSequence) and \
cls_or_tup.array_type == 'tuple':
for lazy_context in cls_or_tup.py__iter__():
for context in lazy_context.infer():
result |= context.execute_evaluated()
else:
result |= cls_or_tup.execute_evaluated()
return result
def check_tuple_assignments(evaluator, contextualized_name, types):
"""
Checks if tuples are assigned.
"""
lazy_context = None
for index, node in contextualized_name.assignment_indexes():
cn = ContextualizedNode(contextualized_name.context, node)
iterated = iterable.py__iter__(evaluator, types, cn)
for _ in range(index + 1):
try:
lazy_context = next(iterated)
except StopIteration:
# We could do this with the default param in next. But this
# would allow this loop to run for a very long time if the
# index number is high. Therefore break if the loop is
# finished.
return set()
types = lazy_context.infer()
return types
| {
"content_hash": "e88ee82bd89a47784b2aabed16cf2ba2",
"timestamp": "",
"source": "github",
"line_count": 410,
"max_line_length": 102,
"avg_line_length": 40.45609756097561,
"alnum_prop": 0.594139989148128,
"repo_name": "NixaSoftware/CVis",
"id": "aba190e19636899ae64cc389b5365d46305ea0ad",
"size": "16587",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "venv/lib/python2.7/site-packages/jedi/evaluate/finder.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "160965"
},
{
"name": "Batchfile",
"bytes": "45451"
},
{
"name": "C",
"bytes": "4818355"
},
{
"name": "C#",
"bytes": "40804"
},
{
"name": "C++",
"bytes": "145737889"
},
{
"name": "CMake",
"bytes": "53495"
},
{
"name": "CSS",
"bytes": "287550"
},
{
"name": "CWeb",
"bytes": "174166"
},
{
"name": "Cuda",
"bytes": "26749"
},
{
"name": "Fortran",
"bytes": "9668"
},
{
"name": "HTML",
"bytes": "155266453"
},
{
"name": "IDL",
"bytes": "14"
},
{
"name": "JavaScript",
"bytes": "225380"
},
{
"name": "Lex",
"bytes": "1231"
},
{
"name": "M4",
"bytes": "29689"
},
{
"name": "Makefile",
"bytes": "1560105"
},
{
"name": "Max",
"bytes": "36857"
},
{
"name": "Objective-C",
"bytes": "4303"
},
{
"name": "Objective-C++",
"bytes": "218"
},
{
"name": "PHP",
"bytes": "59030"
},
{
"name": "Perl",
"bytes": "23580"
},
{
"name": "Perl 6",
"bytes": "7975"
},
{
"name": "Python",
"bytes": "28662237"
},
{
"name": "QML",
"bytes": "593"
},
{
"name": "Rebol",
"bytes": "354"
},
{
"name": "Roff",
"bytes": "8039"
},
{
"name": "Shell",
"bytes": "376471"
},
{
"name": "Smarty",
"bytes": "2045"
},
{
"name": "Tcl",
"bytes": "1172"
},
{
"name": "TeX",
"bytes": "13404"
},
{
"name": "XSLT",
"bytes": "746813"
},
{
"name": "Yacc",
"bytes": "18910"
}
],
"symlink_target": ""
} |
from fabric import task
deploy_folder = '/opt/site_bot'
existing_packages_tmp_file = '/tmp/existing_site_bot_python_requirements.txt'
@task
def deploy(c, ref):
c.run("""
cd %(deploy_folder)s
git fetch origin
git checkout %(ref)s
# Delete existing dependencies
pip3 freeze --user > %(existing_packages_tmp_file)s
pip3 uninstall -y -r %(existing_packages_tmp_file)s
rm %(existing_packages_tmp_file)s
# Install dependencies
pip3 install --user -r requirements.txt
""" % {"deploy_folder": deploy_folder,
"ref": ref,
"existing_packages_tmp_file": existing_packages_tmp_file})
| {
"content_hash": "5cffa04dbddad118c5ab858d0fd84c9f",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 77,
"avg_line_length": 30.772727272727273,
"alnum_prop": 0.6262924667651403,
"repo_name": "OpenTwinCities/site_bot",
"id": "6d336a188ab31e14acb91af29253eacd82766086",
"size": "677",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fabfile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "45284"
}
],
"symlink_target": ""
} |
import fcntl, shutil, signal, sys, os
# Define the constant to hold the special string that will be used as
# the delimiter when splitting the arguments from the command line
import struct, termios
# Check if python3 is being used
if sys.version_info.major < 3:
version = '.'.join([str(sys.version_info.major), str(sys.version_info.minor)])
print('Python version {version} is not supported. Please use version 3'
' and above'.format(version=version))
sys.exit(1)
# Try to import the hsjon, pexpect module
try:
import hjson, pexpect
except ImportError:
print('Please install all required modules by running '
'`python3 -m pip install -r requirements.txt `')
sys.exit(1)
DELIMITER = "<------->"
ARGS_PREFIX = "--"
APP_TIMEOUT = 3000
# Commands that will be used through the command line
CONNECT = 'connect'
LIST = 'list'
# Accepted commands
ACCEPTED_COMMANDS = {
CONNECT: {
"desc": """
Connects to a given server using the alias provided.
OPTIONS
--timeout - Specifies the time in seconds when the application
will try reaching the server before timing out.
Example ./server_automation connect saved_alias
""",
"options": ['timeout', 'test']
},
LIST: {
"desc": """
Provides a list of aliases. An alias is how you identify
the server that you have configured.
Example ./server_automation list
""",
"options": []
}
}
CONFIG_FILE = os.path.dirname(os.path.realpath(__file__)) + '/config.hjson'
def log(result, other=None):
"""Logging the results into the console"""
if other is None:
print(result)
else:
print(result, other)
def expected(child, expected_string):
"""Function to handle the expected output"""
# Check if the string passed is the expected string
try:
child.expect(expected_string, timeout=int(APP_TIMEOUT))
except pexpect.EOF:
log("EOF, Failed to match expected string: ", expected_string)
log("\t----> After: ", child.after)
sys.exit(1)
except pexpect.TIMEOUT:
log("TIMEOUT, Failed to match expected string: ", expected_string)
log("\t----> After: ", child.after)
sys.exit(1)
except:
log("Failed to match expected string: ", expected_string)
log("\t----> After: ", child.after)
sys.exit(1)
def ssh_log_in(server_ip, username, password, port=22, app_controller=None):
"""
This function logs in into a server with the arguments passed
"""
# Spawn a ssh session
command = 'ssh %s@%s -p%d' % (username, server_ip, port)
# Log
log("----> Logging in with the command: %s" % command)
# Run the command
if app_controller is None:
app_controller = pexpect.spawn(command)
else:
app_controller.sendline(command)
# Expect the password
expected(app_controller, 'assword:')
# Insert the password
app_controller.sendline(password)
# Expect the username and server display name
expected(app_controller, '%s@' % (username))
log("<---- Successfully logged into the server: " + server_ip + "\n")
return app_controller
# Function to run command on the server
def run_command(app_controller, command, expected_string=".*"):
log("\nRunning the command %s" % command)
app_controller.sendline(command)
# Check if the string passed is the expected string
expected(app_controller, expected_string)
return app_controller
def get_server_details(server_alias):
"""
Get the server details from the config file using the alias provided. The server details are username,
password, server, port, requiredServerLogIn
:param server_alias: String used to identify a server in the config file
:return: server details
"""
found_alias = False
saved_aliases = []
server = None
with open(CONFIG_FILE, 'r') as file:
config = hjson.load(file)
# Get the username and password
for server_item in config['servers']:
saved_aliases.extend(server_item['aliases'])
if server_alias in server_item['aliases']:
found_alias = True
server = server_item
break
if found_alias is False:
log('No alias with the name: \'{}\' does not exist. Get the available aliases using: '
' `./server_automation.py list`'.format(server_alias))
sys.exit(1)
return server
def sigwinch_pass_through(sig, data):
s = struct.pack("HHHH", 0, 0, 0, 0)
a = struct.unpack('hhhh',
fcntl.ioctl(sys.stdout.fileno(), termios.TIOCGWINSZ, s))
global controller
controller.setwinsize(a[0], a[1])
def server_login(server_details, app_controller=None):
"""
Logs into the server specified and any required servers
"""
if 'requiredServerLogIn' in server_details:
# Connect to the server
app_controller = server_login(get_server_details(server_details['requiredServerLogIn']), app_controller)
# Connect to the server
app_controller = ssh_log_in(server_details['server'],
server_details['username'],
server_details['password'],
server_details['port'],
app_controller)
return app_controller
def handle_connect_options(passed_options):
"""
Performs the operations needed for the connect option
:param passed_options:
:return:
"""
for passed_option in passed_options:
if passed_option['name'] == 'timeout':
global APP_TIMEOUT
APP_TIMEOUT = passed_option['value']
if __name__ == '__main__':
# Get the arguments passed
args = sys.argv[1:]
try:
assert len(args) >= 1
except AssertionError:
log('Supported commands are: \n{}'.format("\n".join(
["{0}: {1}".format(key, value['desc']) for key, value in ACCEPTED_COMMANDS.items()])))
sys.exit(1)
# Save the first arg
first_arg = args[0]
# Get the options and arguments passed
# Options have -- prefix
options = list(map(lambda x: x.strip(ARGS_PREFIX),
list(filter(lambda x: x.startswith(ARGS_PREFIX), args[1:]))))
other_args = list(filter(lambda x: not x.startswith(ARGS_PREFIX), args[1:]))
# Check if the fist argument is a supported command
if first_arg not in ACCEPTED_COMMANDS.keys():
log('Supported commands are: \n{}'.format("\n".join(
["{0}: {1}".format(key, value['desc']) for key, value in ACCEPTED_COMMANDS.items()])))
sys.exit(1)
# Handle each command
if first_arg == CONNECT:
# Verify if the options passed exists
available_options = ACCEPTED_COMMANDS[CONNECT]['options']
# Handle the options
for option in options:
try:
option_name, option_value = option.split("=")
assert option_name in available_options
assert type(option_name) is str
if len(option_value) < 1:
raise ValueError()
except ValueError:
log('Undefined value for option: {prefix}{option},'
' Use the format: {prefix}{option}=value'
.format(prefix=ARGS_PREFIX, option=option))
sys.exit(1)
except AssertionError:
log('Unknown option: {}{}'.format(ARGS_PREFIX, option))
sys.exit(1)
# Separate the key and values
# Create a list of dictionaries with the keys: name and value
options = list(map(lambda x: dict(zip(['name', 'value'], x.split("="))), options))
# Handle all the options
handle_connect_options(options)
# Check if the alias was passed as an argument
try:
assert type(other_args[0]) is str
except:
log("No alias was passed, please pass an alias. "
"Format \"./server_automation.py connect alias_name\"")
sys.exit(1)
alias = other_args[0]
details = get_server_details(alias)
controller = server_login(details)
# Get the window size and update the app controller
column, row = shutil.get_terminal_size((80, 20))
controller.setwinsize(row, column)
# Notify incase of a window size change
signal.signal(signal.SIGWINCH, sigwinch_pass_through)
controller.interact()
elif first_arg == LIST:
# Get the list of all aliases
all_aliases = []
with open(CONFIG_FILE, 'r') as f:
data = hjson.load(f)
try:
assert len(args) >= 1
except AssertionError:
log('Config file:{config} does not exist or is empty.'
.format(config=CONFIG_FILE))
for item in data['servers']:
all_aliases.append({
"server": item['server'],
"aliases": item['aliases']})
log("The list of aliases/servers are: \n")
for item in all_aliases:
log("Aliases: {aliases}, \tSERVER: {server}"
.format(server=item['server'], aliases=", ".join(item['aliases'])))
sys.exit(0)
else:
log('Unimplemented command {command} {accepted_commands}'.format(
command=first_arg,
accepted_commands=str(ACCEPTED_COMMANDS.keys())))
sys.exit(0)
| {
"content_hash": "987976afa30f976a60f0af7b8026dbc3",
"timestamp": "",
"source": "github",
"line_count": 306,
"max_line_length": 112,
"avg_line_length": 31.604575163398692,
"alnum_prop": 0.5913555992141454,
"repo_name": "MaxNdesh/server-automation",
"id": "ddd2f5d8d174b02ece5a298974cfccdef0d66068",
"size": "9694",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server_automation.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9694"
}
],
"symlink_target": ""
} |
from ftplib import FTP
import time, sys
ftp = FTP("ServerIP") # Declaring server IP
ftp.login('supercow','amazingpassword') # Logging in with credentials
files = []
dates = []
files = ftp.nlst()
files = (filter(lambda k: '.zip' in k, files)
#out = [w.replace('.zip','') for w in files]
#Formatting is as follows : YYYYMMDDHHMMSS
#for i in range(0, len(files)-1):
# dates[i] = time.strptime(files[i], "%Y%m%d%H%M%S")
#print(files)
#print(dates)
ftp.quit() | {
"content_hash": "b0eeec4cbe22c80534fb6c3a5dc5795b",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 70,
"avg_line_length": 24.31578947368421,
"alnum_prop": 0.6645021645021645,
"repo_name": "B-Genc/Pi-FTP-Sync",
"id": "c62b698bf88848d9e8733654e25b7a481a7a58fc",
"size": "462",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sandbox1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8668"
}
],
"symlink_target": ""
} |
import pandas as pd
import matplotlib.pyplot as plt
from utils import get_data, plot_data
def compute_daily_returns(df):
"""Compute and return the daily return values."""
daily_returns = df.copy()
daily_returns[1:] = (df[1:]/df[:-1].values) - 1 # to avoid index matching
daily_returns.ix[0, :] = 0
return daily_returns
def test_run():
dates = pd.date_range('2015-11-23', '2016-11-18')
symbols = ['SPY', 'XOM']
df = get_data(symbols, dates)
# Compute daily returns
daily_returns = compute_daily_returns(df)
# Plot a histogram
daily_returns['SPY'].hist(bins=20, label='SPY')
daily_returns['XOM'].hist(bins=20, label='XOM')
plt.legend(loc='upper right')
plt.show()
if __name__ == '__main__':
test_run() | {
"content_hash": "89032e3093cf4eb3c3bee57d6954568d",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 75,
"avg_line_length": 26.321428571428573,
"alnum_prop": 0.6580732700135685,
"repo_name": "alexey-ernest/ml-for-trading",
"id": "6dcd04f51d7bb4bd6ff0c08bd1435798a77508e8",
"size": "737",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "two_hist.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "22417"
}
],
"symlink_target": ""
} |
from connector import channel
from google3.cloud.graphite.mmv2.services.google.datastore import index_pb2
from google3.cloud.graphite.mmv2.services.google.datastore import index_pb2_grpc
from typing import List
class Index(object):
def __init__(
self,
ancestor: str = None,
index_id: str = None,
kind: str = None,
project: str = None,
properties: list = None,
state: str = None,
service_account_file: str = "",
):
channel.initialize()
self.ancestor = ancestor
self.kind = kind
self.project = project
self.properties = properties
self.service_account_file = service_account_file
def apply(self):
stub = index_pb2_grpc.DatastoreIndexServiceStub(channel.Channel())
request = index_pb2.ApplyDatastoreIndexRequest()
if IndexAncestorEnum.to_proto(self.ancestor):
request.resource.ancestor = IndexAncestorEnum.to_proto(self.ancestor)
if Primitive.to_proto(self.kind):
request.resource.kind = Primitive.to_proto(self.kind)
if Primitive.to_proto(self.project):
request.resource.project = Primitive.to_proto(self.project)
if IndexPropertiesArray.to_proto(self.properties):
request.resource.properties.extend(
IndexPropertiesArray.to_proto(self.properties)
)
request.service_account_file = self.service_account_file
response = stub.ApplyDatastoreIndex(request)
self.ancestor = IndexAncestorEnum.from_proto(response.ancestor)
self.index_id = Primitive.from_proto(response.index_id)
self.kind = Primitive.from_proto(response.kind)
self.project = Primitive.from_proto(response.project)
self.properties = IndexPropertiesArray.from_proto(response.properties)
self.state = IndexStateEnum.from_proto(response.state)
def delete(self):
stub = index_pb2_grpc.DatastoreIndexServiceStub(channel.Channel())
request = index_pb2.DeleteDatastoreIndexRequest()
request.service_account_file = self.service_account_file
if IndexAncestorEnum.to_proto(self.ancestor):
request.resource.ancestor = IndexAncestorEnum.to_proto(self.ancestor)
if Primitive.to_proto(self.kind):
request.resource.kind = Primitive.to_proto(self.kind)
if Primitive.to_proto(self.project):
request.resource.project = Primitive.to_proto(self.project)
if IndexPropertiesArray.to_proto(self.properties):
request.resource.properties.extend(
IndexPropertiesArray.to_proto(self.properties)
)
response = stub.DeleteDatastoreIndex(request)
@classmethod
def list(self, project, service_account_file=""):
stub = index_pb2_grpc.DatastoreIndexServiceStub(channel.Channel())
request = index_pb2.ListDatastoreIndexRequest()
request.service_account_file = service_account_file
request.Project = project
return stub.ListDatastoreIndex(request).items
def to_proto(self):
resource = index_pb2.DatastoreIndex()
if IndexAncestorEnum.to_proto(self.ancestor):
resource.ancestor = IndexAncestorEnum.to_proto(self.ancestor)
if Primitive.to_proto(self.kind):
resource.kind = Primitive.to_proto(self.kind)
if Primitive.to_proto(self.project):
resource.project = Primitive.to_proto(self.project)
if IndexPropertiesArray.to_proto(self.properties):
resource.properties.extend(IndexPropertiesArray.to_proto(self.properties))
return resource
class IndexProperties(object):
def __init__(self, name: str = None, direction: str = None):
self.name = name
self.direction = direction
@classmethod
def to_proto(self, resource):
if not resource:
return None
res = index_pb2.DatastoreIndexProperties()
if Primitive.to_proto(resource.name):
res.name = Primitive.to_proto(resource.name)
if IndexPropertiesDirectionEnum.to_proto(resource.direction):
res.direction = IndexPropertiesDirectionEnum.to_proto(resource.direction)
return res
@classmethod
def from_proto(self, resource):
if not resource:
return None
return IndexProperties(
name=Primitive.from_proto(resource.name),
direction=IndexPropertiesDirectionEnum.from_proto(resource.direction),
)
class IndexPropertiesArray(object):
@classmethod
def to_proto(self, resources):
if not resources:
return resources
return [IndexProperties.to_proto(i) for i in resources]
@classmethod
def from_proto(self, resources):
return [IndexProperties.from_proto(i) for i in resources]
class IndexAncestorEnum(object):
@classmethod
def to_proto(self, resource):
if not resource:
return resource
return index_pb2.DatastoreIndexAncestorEnum.Value(
"DatastoreIndexAncestorEnum%s" % resource
)
@classmethod
def from_proto(self, resource):
if not resource:
return resource
return index_pb2.DatastoreIndexAncestorEnum.Name(resource)[
len("DatastoreIndexAncestorEnum") :
]
class IndexPropertiesDirectionEnum(object):
@classmethod
def to_proto(self, resource):
if not resource:
return resource
return index_pb2.DatastoreIndexPropertiesDirectionEnum.Value(
"DatastoreIndexPropertiesDirectionEnum%s" % resource
)
@classmethod
def from_proto(self, resource):
if not resource:
return resource
return index_pb2.DatastoreIndexPropertiesDirectionEnum.Name(resource)[
len("DatastoreIndexPropertiesDirectionEnum") :
]
class IndexStateEnum(object):
@classmethod
def to_proto(self, resource):
if not resource:
return resource
return index_pb2.DatastoreIndexStateEnum.Value(
"DatastoreIndexStateEnum%s" % resource
)
@classmethod
def from_proto(self, resource):
if not resource:
return resource
return index_pb2.DatastoreIndexStateEnum.Name(resource)[
len("DatastoreIndexStateEnum") :
]
class Primitive(object):
@classmethod
def to_proto(self, s):
if not s:
return ""
return s
@classmethod
def from_proto(self, s):
return s
| {
"content_hash": "361fa44386a28d9428fc10ae844df417",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 86,
"avg_line_length": 33.45685279187817,
"alnum_prop": 0.6555909573661053,
"repo_name": "GoogleCloudPlatform/declarative-resource-client-library",
"id": "6be8d0d5c3fb6248c0be8550e3a73969053abe4a",
"size": "7190",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "python/services/datastore/index.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2560"
},
{
"name": "C++",
"bytes": "3947"
},
{
"name": "Go",
"bytes": "116489733"
},
{
"name": "Python",
"bytes": "17240408"
},
{
"name": "Starlark",
"bytes": "319733"
}
],
"symlink_target": ""
} |
import sys
from DataMining import *
from PythonLearn import *
from PandasLearn import *
from NumpyLearn import *
ch02path = 'D:/software/Python/python code/pydata-book-master/ch02/usagov_bitly_data2012-03-16-1331923249.txt';
# dm = DataMining(ch02path);
# dm.ch02_frame();
# dm.drawPlot();
# pl = PythonLearn();
# mylist = ["1","2","3","4","5"]
# pl.testList(mylist)
# myPanda = PandasLearn();
# myPanda.funcTest();
myNumpy = NumpyLearn();
myNumpy.testNumpy6();
| {
"content_hash": "3d49d03a304fc5419645dcca0362f45d",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 111,
"avg_line_length": 22.238095238095237,
"alnum_prop": 0.702355460385439,
"repo_name": "woniukaibuick/DA-ML",
"id": "5009c011962a5a17c2a8fa87a4511cb92fde7978",
"size": "492",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/valar/basic/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "75255"
}
],
"symlink_target": ""
} |
from azure.identity import DefaultAzureCredential
from azure.mgmt.cdn import CdnManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-cdn
# USAGE
python afd_endpoints_get.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = CdnManagementClient(
credential=DefaultAzureCredential(),
subscription_id="subid",
)
response = client.afd_endpoints.get(
resource_group_name="RG",
profile_name="profile1",
endpoint_name="endpoint1",
)
print(response)
# x-ms-original-file: specification/cdn/resource-manager/Microsoft.Cdn/stable/2021-06-01/examples/AFDEndpoints_Get.json
if __name__ == "__main__":
main()
| {
"content_hash": "8a7c438be4b2191effbc8e785357ddd6",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 119,
"avg_line_length": 30.852941176470587,
"alnum_prop": 0.7121067683508103,
"repo_name": "Azure/azure-sdk-for-python",
"id": "5655f10e5e461a61765498531cd979612fd395ab",
"size": "1517",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/cdn/azure-mgmt-cdn/generated_samples/afd_endpoints_get.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
TIME_ZONE = 'Europe/Amsterdam'
USE_TZ = True
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
| {
"content_hash": "1bfb3146c1826a8a55f3b611be8ddb9e",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 73,
"avg_line_length": 31.2,
"alnum_prop": 0.7606837606837606,
"repo_name": "wheelcms/obsolete-do-not-use-wheel-cms",
"id": "6d25178a7b0c43d3bbb0a3d72591844493688199",
"size": "862",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "wheel_cms/settings/base/settings_locale.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "11156"
}
],
"symlink_target": ""
} |
from django.http.response import HttpResponse, Http404
from django.views.decorators.csrf import csrf_exempt
from django.conf import settings
import importlib
import uuid
import os
import json
import unicodecsv
from itertools import chain
@csrf_exempt
def csvimport(request, label):
if not request.method == 'POST':
raise Http404
mod_name, form_name = settings.CSV_FORMS[label].rsplit('.',1)
mod = importlib.import_module(mod_name)
form = getattr(mod, form_name)()
headers = map(
lambda x: x.upper(),
chain(
form.fields.keys(),
[unicode(f.label) for f in form.fields.values() if not f.widget.is_hidden]
)
)
path_dir = os.path.join( settings.MEDIA_ROOT, 'csvimport')
if not os.path.exists(path_dir):
os.mkdir(path_dir)
file_id = str(uuid.uuid4())
file_name = os.path.join( path_dir, file_id)
stream = open(file_name, 'w')
for chunk in request.FILES['file'].chunks():
stream.write(chunk)
stream.close()
answ = { 'headers': [], 'rows': [], 'file': file_id }
with open(file_name, 'r') as stream:
csv = unicodecsv.reader(stream, encoding='utf-8', delimiter=getattr(settings, 'CSV_DELIMITER', ';'))
for n, row in enumerate( csv ):
if n > 4: break
if n == 0:
for col, item in enumerate( row ):
if item.upper() in headers:
answ['headers'].append( (col, item.lower()) )
else: answ['rows'].append(row)
return HttpResponse( json.dumps( answ ) )
@csrf_exempt
def csvdump(request, label):
# return HttpResponse( json.dumps({'added': 11 }) )
if not request.POST.has_key('file') or not request.POST.has_key('data') :
return HttpResponse( json.dumps({'critical': "Not file id" }) )
file_id = request.POST['file']
data = request.POST['data'].split(',')
path_dir = os.path.join( settings.MEDIA_ROOT, 'csvimport')
file_name = os.path.join( path_dir, file_id)
if not os.path.exists(file_name): return HttpResponse( json.dumps({'critical': "Not file" }) )
mod_name, form_name = settings.CSV_FORMS[label].rsplit('.',1)
mod = importlib.import_module(mod_name)
form = getattr(mod, form_name)
mapping = {}
for key in form().fields.keys():
try: mapping[data.index(key)] = key
except ValueError: pass
components = []
with open(file_name, 'r') as stream:
csv = unicodecsv.reader(stream, encoding='utf-8', delimiter=getattr(settings, 'CSV_DELIMITER', ';'))
for n, row in enumerate( csv ):
if n == 0: continue
obj = {}
for ind, i in enumerate(row):
try: obj[ mapping[ind] ] = i
except KeyError: pass
form_obj = form(obj)
if form_obj.is_valid():
components.append(obj)
mod_name, func_name = settings.CSV_DELEGATE[label].rsplit('.',1)
mod = importlib.import_module(mod_name)
func = getattr(mod, func_name)
added = func(components, request)
return HttpResponse( json.dumps({'added': added }) )
| {
"content_hash": "63ab101a3cb76c2cfbd8e958faa44c45",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 108,
"avg_line_length": 37.62352941176471,
"alnum_prop": 0.5888055034396498,
"repo_name": "DontPanicBaby/django-csvimport",
"id": "fc54434a4bdea6a5a8de4e160d779a84ffdb1a68",
"size": "3198",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4735"
}
],
"symlink_target": ""
} |
import logging
import traceback
import numpy as np
import pandas as pd
from ConfigSpace.hyperparameters import CategoricalHyperparameter, \
UniformFloatHyperparameter, UniformIntegerHyperparameter
from ConfigSpace.conditions import EqualsCondition, InCondition
from ConfigSpace.configuration_space import ConfigurationSpace
from ConfigSpace import Configuration
from aslib_scenario.aslib_scenario import ASlibScenario
__author__ = "Marius Lindauer"
__license__ = "BSD"
class IndRegression(object):
@staticmethod
def add_params(cs: ConfigurationSpace):
'''
adds parameters to ConfigurationSpace
'''
selector = cs.get_hyperparameter("selector")
regressor = cs.get_hyperparameter("regressor")
if "IndRegressor" in selector.choices:
cond = InCondition(child=regressor, parent=selector, values=["IndRegressor"])
cs.add_condition(cond)
def __init__(self, regressor_class):
'''
Constructor
'''
self.regressors = []
self.logger = logging.getLogger("IndRegressor")
self.regressor_class = regressor_class
def fit(self, scenario: ASlibScenario, config: Configuration):
'''
fit pca object to ASlib scenario data
Arguments
---------
scenario: data.aslib_scenario.ASlibScenario
ASlib Scenario with all data in pandas
config: ConfigSpace.Configuration
configuration
'''
self.logger.info("Fit PairwiseRegressor with %s" %
(self.regressor_class))
self.algorithms = scenario.algorithms
n_algos = len(scenario.algorithms)
X = scenario.feature_data.values
for i in range(n_algos):
y = scenario.performance_data[scenario.algorithms[i]].values
reg = self.regressor_class()
reg.fit(X, y, config)
self.regressors.append(reg)
def predict(self, scenario: ASlibScenario):
'''
predict schedules for all instances in ASLib scenario data
Arguments
---------
scenario: data.aslib_scenario.ASlibScenario
ASlib Scenario with all data in pandas
Returns
-------
schedule: {inst -> (solver, time)}
schedule of solvers with a running time budget
'''
if scenario.algorithm_cutoff_time:
cutoff = scenario.algorithm_cutoff_time
else:
cutoff = 2**31
n_algos = len(scenario.algorithms)
X = scenario.feature_data.values
scores = np.zeros((X.shape[0], n_algos))
for i in range(n_algos):
reg = self.regressors[i]
Y = reg.predict(X)
scores[:, i] += Y
#self.logger.debug(
# sorted(list(zip(scenario.algorithms, scores)), key=lambda x: x[1], reverse=True))
algo_indx = np.argmin(scores, axis=1)
schedules = dict((str(inst),[s]) for s,inst in zip([(scenario.algorithms[i], cutoff+1) for i in algo_indx], scenario.feature_data.index))
#self.logger.debug(schedules)
return schedules
def get_attributes(self):
'''
returns a list of tuples of (attribute,value)
for all learned attributes
Returns
-------
list of tuples of (attribute,value)
'''
reg_attr = self.regressors[0].get_attributes()
attr = [{self.regressor_class.__name__:reg_attr}]
return attr | {
"content_hash": "77a2d652742fedf1667abe8d1bd5e895",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 145,
"avg_line_length": 31.833333333333332,
"alnum_prop": 0.5913474786442546,
"repo_name": "mlindauer/AutoFolio",
"id": "8edf37de627fdb8494cc0bc2f1ace553819d72bd",
"size": "3629",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "autofolio/selector/ind_regression.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "115641"
}
],
"symlink_target": ""
} |
from mpxapi.api_base import ApiBase
class Checksum(ApiBase):
def __init__(self, api):
ApiBase.__init__(self, api)
self.schema = "1.2.0"
self.service = "Ingest Data Service"
self.path = "/data/Checksum"
| {
"content_hash": "2098e2906d45a8a79029c797bd008444",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 44,
"avg_line_length": 24.1,
"alnum_prop": 0.5933609958506224,
"repo_name": "blockbuster/mpxapi",
"id": "782e6dc72c91a9788365beb586c49f9dc1305acf",
"size": "241",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mpxapi/adapter/checksum.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18273"
},
{
"name": "Shell",
"bytes": "103"
}
],
"symlink_target": ""
} |
from types import ModuleType
from typing import TYPE_CHECKING, Type, Union
from libcloud.storage.types import OLD_CONSTANT_TO_NEW_MAPPING, Provider
from libcloud.common.providers import get_driver as _get_provider_driver
from libcloud.common.providers import set_driver as _set_provider_driver
if TYPE_CHECKING:
from libcloud.storage.base import StorageDriver
DRIVERS = {
Provider.DUMMY: ("libcloud.storage.drivers.dummy", "DummyStorageDriver"),
Provider.CLOUDFILES: (
"libcloud.storage.drivers.cloudfiles",
"CloudFilesStorageDriver",
),
Provider.OPENSTACK_SWIFT: (
"libcloud.storage.drivers.cloudfiles",
"OpenStackSwiftStorageDriver",
),
Provider.S3: ("libcloud.storage.drivers.s3", "S3StorageDriver"),
Provider.S3_US_EAST2: ("libcloud.storage.drivers.s3", "S3USEast2StorageDriver"),
Provider.S3_US_WEST: ("libcloud.storage.drivers.s3", "S3USWestStorageDriver"),
Provider.S3_US_WEST_OREGON: (
"libcloud.storage.drivers.s3",
"S3USWestOregonStorageDriver",
),
Provider.S3_US_GOV_WEST: (
"libcloud.storage.drivers.s3",
"S3USGovWestStorageDriver",
),
Provider.S3_CN_NORTH: ("libcloud.storage.drivers.s3", "S3CNNorthStorageDriver"),
Provider.S3_CN_NORTHWEST: (
"libcloud.storage.drivers.s3",
"S3CNNorthWestStorageDriver",
),
Provider.S3_EU_WEST: ("libcloud.storage.drivers.s3", "S3EUWestStorageDriver"),
Provider.S3_EU_WEST2: ("libcloud.storage.drivers.s3", "S3EUWest2StorageDriver"),
Provider.S3_EU_CENTRAL: ("libcloud.storage.drivers.s3", "S3EUCentralStorageDriver"),
Provider.S3_EU_NORTH1: ("libcloud.storage.drivers.s3", "S3EUNorth1StorageDriver"),
Provider.S3_AP_SOUTH: ("libcloud.storage.drivers.s3", "S3APSouthStorageDriver"),
Provider.S3_AP_SOUTHEAST: ("libcloud.storage.drivers.s3", "S3APSEStorageDriver"),
Provider.S3_AP_SOUTHEAST2: ("libcloud.storage.drivers.s3", "S3APSE2StorageDriver"),
Provider.S3_AP_NORTHEAST: ("libcloud.storage.drivers.s3", "S3APNE1StorageDriver"),
Provider.S3_AP_NORTHEAST1: ("libcloud.storage.drivers.s3", "S3APNE1StorageDriver"),
Provider.S3_AP_NORTHEAST2: ("libcloud.storage.drivers.s3", "S3APNE2StorageDriver"),
Provider.S3_SA_EAST: ("libcloud.storage.drivers.s3", "S3SAEastStorageDriver"),
Provider.S3_CA_CENTRAL: ("libcloud.storage.drivers.s3", "S3CACentralStorageDriver"),
Provider.S3_RGW: ("libcloud.storage.drivers.rgw", "S3RGWStorageDriver"),
Provider.S3_RGW_OUTSCALE: (
"libcloud.storage.drivers.rgw",
"S3RGWOutscaleStorageDriver",
),
Provider.NINEFOLD: ("libcloud.storage.drivers.ninefold", "NinefoldStorageDriver"),
Provider.GOOGLE_STORAGE: (
"libcloud.storage.drivers.google_storage",
"GoogleStorageDriver",
),
Provider.NIMBUS: ("libcloud.storage.drivers.nimbus", "NimbusStorageDriver"),
Provider.LOCAL: ("libcloud.storage.drivers.local", "LocalStorageDriver"),
Provider.AZURE_BLOBS: (
"libcloud.storage.drivers.azure_blobs",
"AzureBlobsStorageDriver",
),
Provider.KTUCLOUD: ("libcloud.storage.drivers.ktucloud", "KTUCloudStorageDriver"),
Provider.AURORAOBJECTS: (
"libcloud.storage.drivers.auroraobjects",
"AuroraObjectsStorageDriver",
),
Provider.BACKBLAZE_B2: (
"libcloud.storage.drivers.backblaze_b2",
"BackblazeB2StorageDriver",
),
Provider.ALIYUN_OSS: ("libcloud.storage.drivers.oss", "OSSStorageDriver"),
Provider.DIGITALOCEAN_SPACES: (
"libcloud.storage.drivers.digitalocean_spaces",
"DigitalOceanSpacesStorageDriver",
),
Provider.MINIO: ("libcloud.storage.drivers.minio", "MinIOStorageDriver"),
Provider.SCALEWAY: ("libcloud.storage.drivers.scaleway", "ScalewayStorageDriver"),
Provider.OVH: ("libcloud.storage.drivers.ovh", "OvhStorageDriver"),
}
def get_driver(provider):
# type: (Union[Provider, str]) -> Type[StorageDriver]
deprecated_constants = OLD_CONSTANT_TO_NEW_MAPPING
return _get_provider_driver(
drivers=DRIVERS, provider=provider, deprecated_constants=deprecated_constants
)
def set_driver(provider, module, klass):
# type: (Union[Provider, str], ModuleType, type) -> Type[StorageDriver]
return _set_provider_driver(drivers=DRIVERS, provider=provider, module=module, klass=klass)
| {
"content_hash": "caa86093adbf83659a19cc2b5edcf280",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 95,
"avg_line_length": 45.873684210526314,
"alnum_prop": 0.7129417163836622,
"repo_name": "apache/libcloud",
"id": "2bd1db4c686b20948b74cfa1dfc3c35d2877761b",
"size": "5140",
"binary": false,
"copies": "2",
"ref": "refs/heads/trunk",
"path": "libcloud/storage/providers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2155"
},
{
"name": "HTML",
"bytes": "2545"
},
{
"name": "PowerShell",
"bytes": "410"
},
{
"name": "Python",
"bytes": "9105547"
},
{
"name": "Shell",
"bytes": "12994"
}
],
"symlink_target": ""
} |
r"""
Backrefs for the 'regex' module.
Add the ability to use the following backrefs with re:
- `\Q` and `\Q...\E` - Escape/quote chars (search)
- `\c` and `\C...\E` - Uppercase char or chars (replace)
- `\l` and `\L...\E` - Lowercase char or chars (replace)
- `\N{Black Club Suit}` - Unicode character by name (replace)
- `\u0000` and `\U00000000` - Unicode characters (replace)
- `\R` - Generic line breaks (search)
- `\e` - Escape character (search)
Licensed under MIT
Copyright (c) 2015 - 2020 Isaac Muse <[email protected]>
"""
from __future__ import annotations
import regex as _regex # type: ignore[import]
import copyreg as _copyreg
from functools import lru_cache as _lru_cache
from . import util as _util
from . import _bregex_parse
from ._bregex_parse import ReplaceTemplate
from typing import AnyStr, Callable, Any, Optional, Generic, Mapping, Iterator, cast
from ._bregex_typing import Pattern, Match
__all__ = (
"expand", "expandf", "match", "fullmatch", "search", "sub", "subf", "subn", "subfn", "split", "splititer",
"findall", "finditer", "purge", "escape", "D", "DEBUG", "A", "ASCII", "B", "BESTMATCH",
"E", "ENHANCEMATCH", "F", "FULLCASE", "I", "IGNORECASE", "L", "LOCALE", "M", "MULTILINE", "R", "REVERSE",
"S", "DOTALL", "U", "UNICODE", "X", "VERBOSE", "V0", "VERSION0", "V1", "VERSION1", "W", "WORD",
"P", "POSIX", "DEFAULT_VERSION", "FORMAT", "compile", "compile_search", "compile_replace", "Bregex",
"ReplaceTemplate"
)
# Expose some common re flags and methods to
# save having to import re and backrefs libraries
D = _regex.D
DEBUG = _regex.DEBUG
A = _regex.A
ASCII = _regex.ASCII
B = _regex.B
BESTMATCH = _regex.BESTMATCH
E = _regex.E
ENHANCEMATCH = _regex.ENHANCEMATCH
F = _regex.F
FULLCASE = _regex.FULLCASE
I = _regex.I
IGNORECASE = _regex.IGNORECASE
L = _regex.L
LOCALE = _regex.LOCALE
M = _regex.M
MULTILINE = _regex.MULTILINE
R = _regex.R
REVERSE = _regex.REVERSE
S = _regex.S
DOTALL = _regex.DOTALL
U = _regex.U
UNICODE = _regex.UNICODE
X = _regex.X
VERBOSE = _regex.VERBOSE
V0 = _regex.V0
VERSION0 = _regex.VERSION0
V1 = _regex.V1
VERSION1 = _regex.VERSION1
W = _regex.W
WORD = _regex.WORD
P = _regex.P
POSIX = _regex.POSIX
DEFAULT_VERSION = _regex.DEFAULT_VERSION
escape = _regex.escape
# Replace flags
FORMAT = 1
# Case upper or lower
_UPPER = 1
_LOWER = 2
# Maximum size of the cache.
_MAXCACHE = 500
_REGEX_TYPE = type(_regex.compile('', 0))
@_lru_cache(maxsize=_MAXCACHE)
def _cached_search_compile(
pattern: AnyStr,
re_verbose: bool,
re_version: bool,
pattern_type: type[AnyStr]
) -> AnyStr:
"""Cached search compile."""
return _bregex_parse._SearchParser(pattern, re_verbose, re_version).parse()
@_lru_cache(maxsize=_MAXCACHE)
def _cached_replace_compile(
pattern: Pattern[AnyStr],
repl: AnyStr,
flags: int,
pattern_type: type[AnyStr]
) -> ReplaceTemplate[AnyStr]:
"""Cached replace compile."""
return _bregex_parse._ReplaceParser(pattern, repl, bool(flags & FORMAT)).parse()
def _get_cache_size(replace: bool = False) -> int:
"""Get size of cache."""
if not replace:
size = _cached_search_compile.cache_info().currsize
else:
size = _cached_replace_compile.cache_info().currsize
return size
def _purge_cache() -> None:
"""Purge the cache."""
_cached_replace_compile.cache_clear()
_cached_search_compile.cache_clear()
def _is_replace(obj: Any) -> bool:
"""Check if object is a replace object."""
return isinstance(obj, ReplaceTemplate)
def _apply_replace_backrefs(
m: Optional[Match[AnyStr]],
repl: ReplaceTemplate[AnyStr] | AnyStr,
flags: int = 0
) -> AnyStr:
"""Expand with either the `ReplaceTemplate` or compile on the fly, or return None."""
if m is None:
raise ValueError("Match is None!")
if isinstance(repl, ReplaceTemplate):
return repl.expand(m)
return _bregex_parse._ReplaceParser(m.re, repl, bool(flags & FORMAT)).parse().expand(m)
def _apply_search_backrefs(
pattern: AnyStr | Pattern[AnyStr] | Bregex[AnyStr],
flags: int = 0
) -> AnyStr | Pattern[AnyStr]:
"""Apply the search backrefs to the search pattern."""
if isinstance(pattern, (str, bytes)):
re_verbose = VERBOSE & flags
if flags & V0:
re_version = V0
elif flags & V1:
re_version = V1
else:
re_version = 0
if not (flags & DEBUG):
p = _cached_search_compile(
pattern, re_verbose, re_version, type(pattern)
) # type: AnyStr | Pattern[AnyStr]
else: # pragma: no cover
p = _bregex_parse._SearchParser(cast(AnyStr, pattern), re_verbose, re_version).parse()
elif isinstance(pattern, Bregex):
if flags:
raise ValueError("Cannot process flags argument with a compiled pattern")
p = pattern._pattern
elif isinstance(pattern, _REGEX_TYPE):
if flags:
raise ValueError("Cannot process flags argument with a compiled pattern!")
p = pattern
else:
raise TypeError("Not a string or compiled pattern!")
return p
def _assert_expandable(repl: Any, use_format: bool = False) -> None:
"""Check if replace template is expandable."""
if isinstance(repl, ReplaceTemplate):
if repl.use_format != use_format:
if use_format:
raise ValueError("Replace not compiled as a format replace")
else:
raise ValueError("Replace should not be compiled as a format replace!")
elif not isinstance(repl, (str, bytes)):
raise TypeError("Expected string, buffer, or compiled replace!")
###########################
# API
##########################
class Bregex(_util.Immutable, Generic[AnyStr]):
"""Bregex object."""
_pattern: Pattern[AnyStr]
auto_compile: bool
_hash: int
__slots__ = ("_pattern", "auto_compile", "_hash")
def __init__(self, pattern: Pattern[AnyStr], auto_compile: bool = True) -> None:
"""Initialization."""
super().__init__(
_pattern=pattern,
auto_compile=auto_compile,
_hash=hash((type(self), type(pattern), pattern, auto_compile))
)
@property
def pattern(self) -> AnyStr:
"""Return pattern."""
return cast(AnyStr, self._pattern.pattern)
@property
def flags(self) -> int:
"""Return flags."""
return cast(int, self._pattern.flags)
@property
def groupindex(self) -> Mapping[str, int]:
"""Return group index."""
return cast(Mapping[str, int], self._pattern.groupindex)
@property
def groups(self) -> tuple[Optional[AnyStr], ...]:
"""Return groups."""
return cast('tuple[Optional[AnyStr], ...]', self._pattern.groups)
@property
def scanner(self) -> Any:
"""Return scanner."""
return self._pattern.scanner
def __hash__(self) -> int:
"""Hash."""
return self._hash
def __eq__(self, other: Any) -> bool:
"""Equal."""
return (
isinstance(other, Bregex) and
self._pattern == other._pattern and
self.auto_compile == other.auto_compile
)
def __ne__(self, other: Any) -> bool:
"""Equal."""
return (
not isinstance(other, Bregex) or
self._pattern != other._pattern or
self.auto_compile != other.auto_compile
)
def __repr__(self) -> str: # pragma: no cover
"""Representation."""
return '{}.{}({!r}, auto_compile={!r})'.format(
self.__module__, self.__class__.__name__, self._pattern, self.auto_compile
)
def _auto_compile(
self,
template: AnyStr | Callable[..., AnyStr],
use_format: bool = False
) -> AnyStr | Callable[..., AnyStr]:
"""Compile replacements."""
if isinstance(template, ReplaceTemplate):
if use_format != template.use_format:
raise ValueError("Compiled replace cannot be a format object!")
elif isinstance(template, ReplaceTemplate) or (isinstance(template, (str, bytes)) and self.auto_compile):
return self.compile(template, (FORMAT if use_format and not isinstance(template, ReplaceTemplate) else 0))
elif isinstance(template, (str, bytes)) and use_format:
# Reject an attempt to run format replace when auto-compiling
# of template strings has been disabled and we are using a
# template string.
raise AttributeError('Format replaces cannot be called without compiling replace template!')
return template
def compile( # noqa A001
self,
repl: AnyStr | Callable[..., AnyStr],
flags: int = 0
) -> Callable[..., AnyStr]:
"""Compile replace."""
return compile_replace(self._pattern, repl, flags)
@property
def named_lists(self) -> Mapping[str, set[str | bytes]]:
"""Returned named lists."""
return cast('Mapping[str, set[str | bytes]]', self._pattern.named_lists)
def search(
self,
string: AnyStr,
*args: Any,
**kwargs: Any
) -> Optional[Match[AnyStr]]:
"""Apply `search`."""
return self._pattern.search(string, *args, **kwargs)
def match(
self,
string: AnyStr,
*args: Any,
**kwargs: Any
) -> Optional[Match[AnyStr]]:
"""Apply `match`."""
return cast(Optional[Match[AnyStr]], self._pattern.match(string, *args, **kwargs))
def fullmatch(
self,
string: AnyStr,
*args: Any,
**kwargs: Any
) -> Optional[Match[AnyStr]]:
"""Apply `fullmatch`."""
return cast(Optional[Match[AnyStr]], self._pattern.fullmatch(string, *args, **kwargs))
def split(
self,
string: AnyStr,
*args: Any,
**kwargs: Any
) -> list[AnyStr]:
"""Apply `split`."""
return cast('list[AnyStr]', self._pattern.split(string, *args, **kwargs))
def splititer(
self,
string: AnyStr,
*args: Any,
**kwargs: Any
) -> Iterator[AnyStr]:
"""Apply `splititer`."""
return cast(Iterator[AnyStr], self._pattern.splititer(string, *args, **kwargs))
def findall(
self,
string: AnyStr,
*args: Any,
**kwargs: Any
) -> list[AnyStr] | list[tuple[AnyStr, ...]]:
"""Apply `findall`."""
return cast('list[AnyStr] | list[tuple[AnyStr, ...]]', self._pattern.findall(string, *args, **kwargs))
def finditer(
self,
string: AnyStr,
*args: Any,
**kwargs: Any
) -> Iterator[Match[AnyStr]]:
"""Apply `finditer`."""
return cast(Iterator[Match[AnyStr]], self._pattern.finditer(string, *args, **kwargs))
def sub(
self,
repl: AnyStr | Callable[..., AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> AnyStr:
"""Apply `sub`."""
return cast(AnyStr, self._pattern.sub(self._auto_compile(repl), string, *args, **kwargs))
def subf(
self,
repl: AnyStr | Callable[..., AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> AnyStr: # noqa A002
"""Apply `sub` with format style replace."""
return cast(AnyStr, self._pattern.subf(self._auto_compile(repl, True), string, *args, **kwargs))
def subn(
self,
repl: AnyStr | Callable[..., AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> tuple[AnyStr, int]:
"""Apply `subn` with format style replace."""
return cast('tuple[AnyStr, int]', self._pattern.subn(self._auto_compile(repl), string, *args, **kwargs))
def subfn(
self,
repl: AnyStr | Callable[..., AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> tuple[AnyStr, int]: # noqa A002
"""Apply `subn` after applying backrefs."""
return cast('tuple[AnyStr, int]', self._pattern.subfn(self._auto_compile(repl, True), string, *args, **kwargs))
def compile( # noqa A001
pattern: AnyStr | Pattern[AnyStr] | Bregex[AnyStr],
flags: int = 0,
auto_compile: Optional[bool] = None,
**kwargs: Any
) -> 'Bregex[AnyStr]':
"""Compile both the search or search and replace into one object."""
if isinstance(pattern, Bregex):
if auto_compile is not None:
raise ValueError("Cannot compile Bregex with a different auto_compile!")
elif flags != 0:
raise ValueError("Cannot process flags argument with a compiled pattern")
return pattern
else:
if auto_compile is None:
auto_compile = True
return Bregex(compile_search(pattern, flags, **kwargs), auto_compile)
def compile_search(
pattern: AnyStr | Pattern[AnyStr] | Bregex[AnyStr],
flags: int = 0,
**kwargs: Any
) -> Pattern[AnyStr]:
"""Compile with extended search references."""
return cast(Pattern[AnyStr], _regex.compile(_apply_search_backrefs(pattern, flags), flags, **kwargs))
def compile_replace(
pattern: Pattern[AnyStr],
repl: AnyStr | Callable[..., AnyStr],
flags: int = 0
) -> Callable[..., AnyStr]:
"""Construct a method that can be used as a replace method for `sub`, `subn`, etc."""
if pattern is not None and isinstance(pattern, _REGEX_TYPE):
if isinstance(repl, (str, bytes)):
if not (pattern.flags & DEBUG):
call = _cached_replace_compile(pattern, repl, flags, type(repl))
else: # pragma: no cover
call = _bregex_parse._ReplaceParser(pattern, repl, bool(flags & FORMAT)).parse()
elif isinstance(repl, ReplaceTemplate):
if flags:
raise ValueError("Cannot process flags argument with a ReplaceTemplate!")
if repl.pattern_hash != hash(pattern):
raise ValueError("Pattern hash doesn't match hash in compiled replace!")
call = repl
else:
raise TypeError("Not a valid type!")
else:
raise TypeError("Pattern must be a compiled regular expression!")
return call
def purge() -> None:
"""Purge caches."""
_purge_cache()
_regex.purge()
def expand(m: Optional[Match[AnyStr]], repl: ReplaceTemplate[AnyStr] | AnyStr) -> AnyStr:
"""Expand the string using the replace pattern or function."""
_assert_expandable(repl)
return _apply_replace_backrefs(m, repl)
def expandf(m: Optional[Match[AnyStr]], repl: ReplaceTemplate[AnyStr] | AnyStr) -> AnyStr:
"""Expand the string using the format replace pattern or function."""
_assert_expandable(repl, True)
return _apply_replace_backrefs(m, repl, flags=FORMAT)
def match(
pattern: AnyStr | Pattern[AnyStr] | Bregex[AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> Optional[Match[AnyStr]]:
"""Wrapper for `match`."""
flags = args[2] if len(args) > 2 else kwargs.get('flags', 0)
return cast(
Optional[Match[AnyStr]],
_regex.match(_apply_search_backrefs(pattern, flags), string, *args, **kwargs)
)
def fullmatch(
pattern: AnyStr | Pattern[AnyStr] | Bregex[AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> Optional[Match[AnyStr]]:
"""Wrapper for `fullmatch`."""
flags = args[2] if len(args) > 2 else kwargs.get('flags', 0)
return cast(
Optional[Match[AnyStr]],
_regex.fullmatch(_apply_search_backrefs(pattern, flags), string, *args, **kwargs)
)
def search(
pattern: AnyStr | Pattern[AnyStr] | Bregex[AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> Optional[Match[AnyStr]]:
"""Wrapper for `search`."""
flags = args[2] if len(args) > 2 else kwargs.get('flags', 0)
return cast(
Optional[Match[AnyStr]],
_regex.search(_apply_search_backrefs(pattern, flags), string, *args, **kwargs)
)
def sub(
pattern: AnyStr | Pattern[AnyStr] | Bregex[AnyStr],
repl: AnyStr | Callable[..., AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> AnyStr:
"""Wrapper for `sub`."""
flags = args[4] if len(args) > 4 else kwargs.get('flags', 0)
is_replace = _is_replace(repl)
is_string = isinstance(repl, (str, bytes))
if is_replace and cast(ReplaceTemplate[AnyStr], repl).use_format:
raise ValueError("Compiled replace cannot be a format object!")
pattern = compile_search(pattern, flags)
return cast(
AnyStr,
_regex.sub(
pattern, (compile_replace(pattern, repl) if is_replace or is_string else repl), string,
*args, **kwargs
)
)
def subf(
pattern: AnyStr | Pattern[AnyStr] | Bregex[AnyStr],
repl: AnyStr | Callable[..., AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> AnyStr:
"""Wrapper for `subf`."""
flags = args[4] if len(args) > 4 else kwargs.get('flags', 0)
is_replace = _is_replace(repl)
is_string = isinstance(repl, (str, bytes))
if is_replace and not cast(ReplaceTemplate[AnyStr], repl).use_format:
raise ValueError("Compiled replace is not a format object!")
pattern = compile_search(pattern, flags)
rflags = FORMAT if is_string else 0
return cast(
AnyStr,
_regex.sub(
pattern, (compile_replace(pattern, repl, flags=rflags) if is_replace or is_string else repl), string,
*args, **kwargs
)
)
def subn(
pattern: AnyStr | Pattern[AnyStr] | Bregex[AnyStr],
repl: AnyStr | Callable[..., AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> tuple[AnyStr, int]:
"""Wrapper for `subn`."""
flags = args[4] if len(args) > 4 else kwargs.get('flags', 0)
is_replace = _is_replace(repl)
is_string = isinstance(repl, (str, bytes))
if is_replace and cast(ReplaceTemplate[AnyStr], repl).use_format:
raise ValueError("Compiled replace cannot be a format object!")
pattern = compile_search(pattern, flags)
return cast(
'tuple[AnyStr, int]',
_regex.subn(
pattern, (compile_replace(pattern, repl) if is_replace or is_string else repl), string,
*args, **kwargs
)
)
def subfn(
pattern: AnyStr | Pattern[AnyStr] | Bregex[AnyStr],
repl: AnyStr | Callable[..., AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> tuple[AnyStr, int]:
"""Wrapper for `subfn`."""
flags = args[4] if len(args) > 4 else kwargs.get('flags', 0)
is_replace = _is_replace(repl)
is_string = isinstance(repl, (str, bytes))
if is_replace and not cast(ReplaceTemplate[AnyStr], repl).use_format:
raise ValueError("Compiled replace is not a format object!")
pattern = compile_search(pattern, flags)
rflags = FORMAT if is_string else 0
return cast(
'tuple[AnyStr, int]',
_regex.subn(
pattern, (compile_replace(pattern, repl, flags=rflags) if is_replace or is_string else repl), string,
*args, **kwargs
)
)
def split(
pattern: AnyStr | Pattern[AnyStr] | Bregex[AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> list[AnyStr]:
"""Wrapper for `split`."""
flags = args[3] if len(args) > 3 else kwargs.get('flags', 0)
return cast(
'list[AnyStr]',
_regex.split(_apply_search_backrefs(pattern, flags), string, *args, **kwargs)
)
def splititer(
pattern: AnyStr | Pattern[AnyStr] | Bregex[AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> Iterator[AnyStr]:
"""Wrapper for `splititer`."""
flags = args[3] if len(args) > 3 else kwargs.get('flags', 0)
return cast(
Iterator[AnyStr],
_regex.splititer(_apply_search_backrefs(pattern, flags), string, *args, **kwargs)
)
def findall(
pattern: AnyStr | Pattern[AnyStr] | Bregex[AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> list[AnyStr] | list[tuple[AnyStr, ...]]:
"""Wrapper for `findall`."""
flags = args[2] if len(args) > 2 else kwargs.get('flags', 0)
return cast(
'list[AnyStr] | list[tuple[AnyStr, ...]]',
_regex.findall(_apply_search_backrefs(pattern, flags), string, *args, **kwargs)
)
def finditer(
pattern: AnyStr | Pattern[AnyStr] | Bregex[AnyStr],
string: AnyStr,
*args: Any,
**kwargs: Any
) -> Iterator[Match[AnyStr]]:
"""Wrapper for `finditer`."""
flags = args[2] if len(args) > 2 else kwargs.get('flags', 0)
return cast(
Iterator[Match[AnyStr]],
_regex.finditer(_apply_search_backrefs(pattern, flags), string, *args, **kwargs)
)
def _pickle(p): # type: ignore[no-untyped-def]
return Bregex, (p._pattern, p.auto_compile)
_copyreg.pickle(Bregex, _pickle)
| {
"content_hash": "027f4212e65ced324e13b591f279af5c",
"timestamp": "",
"source": "github",
"line_count": 713,
"max_line_length": 119,
"avg_line_length": 29.782608695652176,
"alnum_prop": 0.5883682599481987,
"repo_name": "facelessuser/backrefs",
"id": "13ce1b44f78d604c85e183af9d770ef5a1dfd024",
"size": "21235",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "backrefs/bregex.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "487191"
}
],
"symlink_target": ""
} |
""" AerPauliExpectation Class """
import logging
from functools import reduce
from operator import add
from typing import Union
from qiskit.exceptions import MissingOptionalLibraryError
from qiskit.opflow.expectations.expectation_base import ExpectationBase
from qiskit.opflow.list_ops.composed_op import ComposedOp
from qiskit.opflow.list_ops.list_op import ListOp
from qiskit.opflow.list_ops.summed_op import SummedOp
from qiskit.opflow.operator_base import OperatorBase
from qiskit.opflow.primitive_ops.pauli_op import PauliOp
from qiskit.opflow.primitive_ops.pauli_sum_op import PauliSumOp
from qiskit.opflow.state_fns.circuit_state_fn import CircuitStateFn
from qiskit.opflow.state_fns.operator_state_fn import OperatorStateFn
from qiskit.quantum_info import SparsePauliOp
logger = logging.getLogger(__name__)
class AerPauliExpectation(ExpectationBase):
r"""An Expectation converter for using Aer's operator snapshot to
take expectations of quantum state circuits over Pauli observables.
"""
def convert(self, operator: OperatorBase) -> OperatorBase:
"""Accept an Operator and return a new Operator with the Pauli measurements replaced by
AerSnapshot-based expectation circuits.
Args:
operator: The operator to convert. If it contains non-hermitian terms, the
operator is decomposed into hermitian and anti-hermitian parts.
Returns:
The converted operator.
"""
if isinstance(operator, OperatorStateFn) and operator.is_measurement:
if isinstance(operator.primitive, ListOp):
is_herm = all((op.is_hermitian() for op in operator.primitive.oplist))
else:
is_herm = operator.primitive.is_hermitian()
if not is_herm:
pauli_sum_re = (
self._replace_pauli_sums(
1 / 2 * (operator.primitive + operator.primitive.adjoint()).reduce()
)
* operator.coeff
)
pauli_sum_im = (
self._replace_pauli_sums(
1 / 2j * (operator.primitive - operator.primitive.adjoint()).reduce()
)
* operator.coeff
)
pauli_sum = (pauli_sum_re + 1j * pauli_sum_im).reduce()
else:
pauli_sum = self._replace_pauli_sums(operator.primitive) * operator.coeff
return pauli_sum
elif isinstance(operator, ListOp):
return operator.traverse(self.convert)
else:
return operator
@classmethod
def _replace_pauli_sums(cls, operator):
try:
from qiskit.providers.aer.library import SaveExpectationValue
except ImportError as ex:
raise MissingOptionalLibraryError(
libname="qiskit-aer",
name="AerPauliExpectation",
pip_install="pip install qiskit-aer",
) from ex
# The 'expval_measurement' label on the save instruction is special - the
# CircuitSampler will look for it to know that the circuit is a Expectation
# measurement, and not simply a
# circuit to replace with a DictStateFn
if operator.__class__ == ListOp:
return operator.traverse(cls._replace_pauli_sums)
if isinstance(operator, PauliSumOp):
save_instruction = SaveExpectationValue(operator.primitive, "expval_measurement")
return CircuitStateFn(
save_instruction, coeff=operator.coeff, is_measurement=True, from_operator=True
)
# Change to Pauli representation if necessary
if {"Pauli"} != operator.primitive_strings():
logger.warning(
"Measured Observable is not composed of only Paulis, converting to "
"Pauli representation, which can be expensive."
)
# Setting massive=False because this conversion is implicit. User can perform this
# action on the Observable with massive=True explicitly if they so choose.
operator = operator.to_pauli_op(massive=False)
if isinstance(operator, SummedOp):
sparse_pauli = reduce(
add, (meas.coeff * SparsePauliOp(meas.primitive) for meas in operator.oplist)
)
save_instruction = SaveExpectationValue(sparse_pauli, "expval_measurement")
return CircuitStateFn(
save_instruction, coeff=operator.coeff, is_measurement=True, from_operator=True
)
if isinstance(operator, PauliOp):
sparse_pauli = operator.coeff * SparsePauliOp(operator.primitive)
save_instruction = SaveExpectationValue(sparse_pauli, "expval_measurement")
return CircuitStateFn(save_instruction, is_measurement=True, from_operator=True)
raise TypeError(
f"Conversion of OperatorStateFn of {operator.__class__.__name__} is not defined."
)
def compute_variance(self, exp_op: OperatorBase) -> Union[list, float]:
r"""
Compute the variance of the expectation estimator. Because Aer takes this expectation
with matrix multiplication, the estimation is exact and the variance is always 0,
but we need to return those values in a way which matches the Operator's structure.
Args:
exp_op: The full expectation value Operator after sampling.
Returns:
The variances or lists thereof (if exp_op contains ListOps) of the expectation value
estimation, equal to 0.
"""
# Need to do this to mimic Op structure
def sum_variance(operator):
if isinstance(operator, ComposedOp):
return 0.0
elif isinstance(operator, ListOp):
return operator.combo_fn([sum_variance(op) for op in operator.oplist])
raise TypeError(f"Variance cannot be computed for {operator.__class__.__name__}.")
return sum_variance(exp_op)
| {
"content_hash": "3a74b60d64732706823d3d8f684be504",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 97,
"avg_line_length": 43.03521126760563,
"alnum_prop": 0.6373752250040909,
"repo_name": "QISKit/qiskit-sdk-py",
"id": "33790531592adecabe688c474c02401d02f671b5",
"size": "6589",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "qiskit/opflow/expectations/aer_pauli_expectation.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2582"
},
{
"name": "C++",
"bytes": "327518"
},
{
"name": "CMake",
"bytes": "19294"
},
{
"name": "Makefile",
"bytes": "5608"
},
{
"name": "Pascal",
"bytes": "2444"
},
{
"name": "Python",
"bytes": "1312801"
},
{
"name": "Shell",
"bytes": "8385"
}
],
"symlink_target": ""
} |
import json
from typing import Any, Dict, Tuple
from flask import current_app, g, jsonify, request
from werkzeug.datastructures import ImmutableMultiDict
from alerta.exceptions import ApiError
from alerta.models.alert import Alert
from alerta.utils.audit import write_audit_trail
from alerta.utils.response import absolute_url
from . import WebhookBase
JSON = Dict[str, Any]
def parse_slack(data: ImmutableMultiDict) -> Tuple[str, str, str]:
payload = json.loads(data['payload'])
user = payload.get('user', {}).get('name')
alert_id = payload.get('callback_id')
action = payload.get('actions', [{}])[0].get('value')
if not alert_id:
raise ValueError(f'Alert {alert_id} not match')
elif not user:
raise ValueError(f'User {user} not exist')
elif not action:
raise ValueError(f'Non existent action {action}')
return alert_id, user, action
def build_slack_response(alert: Alert, action: str, user: str, data: ImmutableMultiDict) -> JSON:
response = json.loads(data['payload']).get('original_message', {})
actions = ['watch', 'unwatch']
message = (
'User {user} is {action}ing alert {alert}' if action in actions else
'The status of alert {alert} is {status} now!').format(
alert=alert.get_id(short=True), status=alert.status.capitalize(),
action=action, user=user
)
attachment_response = {
'fallback': message, 'pretext': 'Action done!', 'color': '#808080',
'title': message, 'title_link': absolute_url('/alert/' + alert.id)
}
# clear interactive buttons and add new attachment as response of action
if action not in actions:
attachments = response.get('attachments', [])
for attachment in attachments:
attachment.pop('actions', None)
attachments.append(attachment_response)
response['attachments'] = attachments
return response
# update the interactive button of all actions
next_action = actions[(actions.index(action) + 1) % len(actions)]
for attachment in response.get('attachments', []):
for attached_action in attachment.get('actions', []):
if action == attached_action.get('value'):
attached_action.update({
'name': next_action, 'value': next_action,
'text': next_action.capitalize()
})
return response
class SlackWebhook(WebhookBase):
"""
Slack apps
See https://api.slack.com/slack-apps
"""
def incoming(self, path, query_string, payload):
alert_id, user, action = parse_slack(payload)
customers = g.get('customers', None)
alert = Alert.find_by_id(alert_id, customers=customers)
if not alert:
jsonify(status='error', message='alert not found for #slack message')
if action in ['open', 'ack', 'close']:
alert.from_action(action, text=f'status change via #slack by {user}')
elif action in ['watch', 'unwatch']:
alert.untag(tags=[f'{action}:{user}'])
else:
raise ApiError('Unsupported #slack action', 400)
text = 'alert updated via slack webhook'
write_audit_trail.send(current_app._get_current_object(), event='webhook-updated', message=text,
user=g.login, customers=g.customers, scopes=g.scopes, resource_id=alert.id,
type='alert', request=request)
response = build_slack_response(alert, action, user, payload)
return jsonify(**response), 201
| {
"content_hash": "c950ad2807e3b4693d8560d910f55030",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 106,
"avg_line_length": 35.91,
"alnum_prop": 0.6285157337788917,
"repo_name": "guardian/alerta",
"id": "aa265ce29813984769e67f7abc6f75a96c37951c",
"size": "3591",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "alerta/webhooks/slack.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "5143"
},
{
"name": "JavaScript",
"bytes": "2971"
},
{
"name": "Makefile",
"bytes": "842"
},
{
"name": "Python",
"bytes": "355607"
},
{
"name": "Shell",
"bytes": "2090"
}
],
"symlink_target": ""
} |
"""
MXP - Mud eXtension Protocol.
Partial implementation of the MXP protocol.
The MXP protocol allows more advanced formatting options for telnet clients
that supports it (mudlet, zmud, mushclient are a few)
This only implements the SEND tag.
More information can be found on the following links:
http://www.zuggsoft.com/zmud/mxp.htm
http://www.mushclient.com/mushclient/mxp.htm
http://www.gammon.com.au/mushclient/addingservermxp.htm
"""
from builtins import object
import re
LINKS_SUB = re.compile(r'\|lc(.*?)\|lt(.*?)\|le', re.DOTALL)
MXP = chr(91)
MXP_TEMPSECURE = "\x1B[4z"
MXP_SEND = MXP_TEMPSECURE + \
"<SEND HREF=\"\\1\">" + \
"\\2" + \
MXP_TEMPSECURE + \
"</SEND>"
def mxp_parse(text):
"""
Replaces links to the correct format for MXP.
Args:
text (str): The text to parse.
Returns:
parsed (str): The parsed text.
"""
text = text.replace("&", "&") \
.replace("<", "<") \
.replace(">", ">")
text = LINKS_SUB.sub(MXP_SEND, text)
return text
class Mxp(object):
"""
Implements the MXP protocol.
"""
def __init__(self, protocol):
"""
Initializes the protocol by checking if the client supports it.
Args:
protocol (Protocol): The active protocol instance.
"""
self.protocol = protocol
self.protocol.protocol_flags["MXP"] = False
self.protocol.will(MXP).addCallbacks(self.do_mxp, self.no_mxp)
def no_mxp(self, option):
"""
Called when the Client reports to not support MXP.
Args:
option (Option): Not used.
"""
self.protocol.protocol_flags["MXP"] = False
self.protocol.handshake_done()
def do_mxp(self, option):
"""
Called when the Client reports to support MXP.
Args:
option (Option): Not used.
"""
self.protocol.protocol_flags["MXP"] = True
self.protocol.requestNegotiation(MXP, '')
self.protocol.handshake_done()
| {
"content_hash": "46ff1a5fc7ede20fb1447c2d49c3a942",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 75,
"avg_line_length": 23.46590909090909,
"alnum_prop": 0.5951573849878935,
"repo_name": "feend78/evennia",
"id": "44bc9628ea305254306bc5cd039f1873fa843528",
"size": "2065",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "evennia/server/portal/mxp.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "42859"
},
{
"name": "Emacs Lisp",
"bytes": "2734"
},
{
"name": "HTML",
"bytes": "20118"
},
{
"name": "JavaScript",
"bytes": "32388"
},
{
"name": "Python",
"bytes": "2734770"
},
{
"name": "Shell",
"bytes": "4237"
}
],
"symlink_target": ""
} |
"""
Django settings for untitled6 project.
Generated by 'django-admin startproject' using Django 1.11.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
from utils import secret_key_generator
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_PATH = BASE_DIR
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = secret_key_generator.get_secret_key(os.path.join(PROJECT_PATH, 'secret.txt'))
PRODUCTION = os.environ.get('ON_PRODUCTION', False)
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = not PRODUCTION
if DEBUG:
ALLOWED_HOSTS = []
else:
ALLOWED_HOSTS = ['techalert.me', '127.0.0.1']
CSRF_COOKIE_SECURE = True
SESSION_COOKIE_SECURE = True
# Application definition
INSTALLED_APPS = [
'taggit',
'imagekit',
'search.apps.SearchConfig',
'aboutme.apps.AboutmeConfig',
'post.apps.PostConfig',
'archive.apps.ArchiveConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.staticfiles',
'markdown_deux',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'blog-website.urls'
SITE_ID = 1
STATICFILES_DIRS = [
BASE_DIR + "/media/",
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.media',
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'blog-website.wsgi.application'
DB_NAME = os.environ.get("DB_NAME")
DB_USER = os.environ.get("DB_USER")
DB_PASSWORD = os.environ.get("DB_PASSWORD")
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': DB_NAME,
'USER': DB_USER,
'PASSWORD': DB_PASSWORD,
'HOST': 'localhost',
'PORT': '',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
TAGGIT_CASE_INSENSITIVE = True
TIME_ZONE = 'Asia/Kolkata'
| {
"content_hash": "2742b7c1681cac0d7652d46b279ba551",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 91,
"avg_line_length": 26.305732484076433,
"alnum_prop": 0.6791767554479419,
"repo_name": "Prakash2403/Blog",
"id": "0ad20ad0ac9abbf17b402eaf3dcba1519d5879bf",
"size": "4130",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blog-website/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "39153"
},
{
"name": "HTML",
"bytes": "26479"
},
{
"name": "JavaScript",
"bytes": "87652"
},
{
"name": "Python",
"bytes": "22970"
}
],
"symlink_target": ""
} |
print "#"*3 +" Operador corto circuito "+"#"*3
persona = True
#if persona:
# nombre = 'Juan Perez'
nombre = persona and 'Juan Perez'
print "nombre:",nombre
#nombre = False
"""
if nombre:
encargado = nombre
else:
encargado = 'Algun nombre'
"""
encargado = nombre or 'Algun nombre'
print "encargado:",encargado
| {
"content_hash": "e8d52b7875016ce857b633f5bc00274b",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 46,
"avg_line_length": 18.941176470588236,
"alnum_prop": 0.6645962732919255,
"repo_name": "mario21ic/python_curso",
"id": "b60cf627be394ba192fd40603c4a59e1f8301e7a",
"size": "347",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "session1/8_corto_circuito.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "14541"
}
],
"symlink_target": ""
} |
from azure.identity import DefaultAzureCredential
from azure.mgmt.storageimportexport import StorageImportExport
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-storageimportexport
# USAGE
python list_available_operations.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = StorageImportExport(
credential=DefaultAzureCredential(),
subscription_id="SUBSCRIPTION_ID",
)
response = client.operations.list()
for item in response:
print(item)
# x-ms-original-file: specification/storageimportexport/resource-manager/Microsoft.ImportExport/preview/2021-01-01/examples/ListOperations.json
if __name__ == "__main__":
main()
| {
"content_hash": "ad1b3e9628923f6da109bce9a587d8c8",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 143,
"avg_line_length": 33.516129032258064,
"alnum_prop": 0.7449470644850819,
"repo_name": "Azure/azure-sdk-for-python",
"id": "185e21ed93ea30902e2386a58502b8554ee4ac86",
"size": "1507",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/storage/azure-mgmt-storageimportexport/generated_samples/list_available_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from topaz.module import ModuleDef
class Signal(object):
moduledef = ModuleDef("Signal")
@moduledef.function("trap")
def method_trap(self, args_w):
pass
| {
"content_hash": "dc046b08f9f3351bd6335014bf072d84",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 38,
"avg_line_length": 19.636363636363637,
"alnum_prop": 0.6851851851851852,
"repo_name": "kachick/topaz",
"id": "e5d72b570140387bacd3666e2b94b927df34e1d6",
"size": "216",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "topaz/modules/signal.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1112159"
},
{
"name": "Ruby",
"bytes": "199941"
},
{
"name": "Shell",
"bytes": "7755"
}
],
"symlink_target": ""
} |
"""
Django settings for chalk project.
Generated by 'django-admin startproject' using Django 1.8.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qcs2p&pau3$)7mp6gus)h)-7*iu(u3axomtpnr@a*p5c#wlguh'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'scheduler',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'chalk.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
'scheduler/templates'
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'chalk.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
| {
"content_hash": "d2dc48fde03ad5769a1b161c42d20363",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 71,
"avg_line_length": 25.666666666666668,
"alnum_prop": 0.686456400742115,
"repo_name": "Victoria-Sardelli/hackBU16",
"id": "c04022320c7e8fcb86cce93e026131ee21d69efc",
"size": "2695",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chalk/chalk/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "27025"
},
{
"name": "HTML",
"bytes": "14012"
},
{
"name": "JavaScript",
"bytes": "457470"
},
{
"name": "Python",
"bytes": "14172"
}
],
"symlink_target": ""
} |
import copy
from glanceclient.common import utils as glanceclient_utils
from glanceclient.v2 import schemas
import mock
from osc_lib.cli import format_columns
from osc_lib import exceptions
import warlock
from openstackclient.image.v2 import image
from openstackclient.tests.unit.identity.v3 import fakes as identity_fakes
from openstackclient.tests.unit.image.v2 import fakes as image_fakes
class TestImage(image_fakes.TestImagev2):
def setUp(self):
super(TestImage, self).setUp()
# Get shortcuts to the Mocks in image client
self.images_mock = self.app.client_manager.image.images
self.images_mock.reset_mock()
self.image_members_mock = self.app.client_manager.image.image_members
self.image_members_mock.reset_mock()
self.image_tags_mock = self.app.client_manager.image.image_tags
self.image_tags_mock.reset_mock()
# Get shortcut to the Mocks in identity client
self.project_mock = self.app.client_manager.identity.projects
self.project_mock.reset_mock()
self.domain_mock = self.app.client_manager.identity.domains
self.domain_mock.reset_mock()
def setup_images_mock(self, count):
images = image_fakes.FakeImage.create_images(count=count)
self.images_mock.get = image_fakes.FakeImage.get_images(
images,
0)
return images
class TestImageCreate(TestImage):
project = identity_fakes.FakeProject.create_one_project()
domain = identity_fakes.FakeDomain.create_one_domain()
def setUp(self):
super(TestImageCreate, self).setUp()
self.new_image = image_fakes.FakeImage.create_one_image()
self.images_mock.create.return_value = self.new_image
self.project_mock.get.return_value = self.project
self.domain_mock.get.return_value = self.domain
# This is the return value for utils.find_resource()
self.images_mock.get.return_value = copy.deepcopy(
self.new_image
)
self.images_mock.update.return_value = self.new_image
# Get the command object to test
self.cmd = image.CreateImage(self.app, None)
def test_image_reserve_no_options(self):
mock_exception = {
'find.side_effect': exceptions.CommandError('x'),
}
self.images_mock.configure_mock(**mock_exception)
arglist = [
self.new_image.name
]
verifylist = [
('container_format', image.DEFAULT_CONTAINER_FORMAT),
('disk_format', image.DEFAULT_DISK_FORMAT),
('name', self.new_image.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# ImageManager.create(name=, **)
self.images_mock.create.assert_called_with(
name=self.new_image.name,
container_format=image.DEFAULT_CONTAINER_FORMAT,
disk_format=image.DEFAULT_DISK_FORMAT,
)
# Verify update() was not called, if it was show the args
self.assertEqual(self.images_mock.update.call_args_list, [])
self.images_mock.upload.assert_called_with(
mock.ANY, mock.ANY,
)
self.assertEqual(
image_fakes.FakeImage.get_image_columns(self.new_image),
columns)
self.assertItemEqual(
image_fakes.FakeImage.get_image_data(self.new_image),
data)
@mock.patch('glanceclient.common.utils.get_data_file', name='Open')
def test_image_reserve_options(self, mock_open):
mock_file = mock.MagicMock(name='File')
mock_open.return_value = mock_file
mock_open.read.return_value = None
mock_exception = {
'find.side_effect': exceptions.CommandError('x'),
}
self.images_mock.configure_mock(**mock_exception)
arglist = [
'--container-format', 'ovf',
'--disk-format', 'ami',
'--min-disk', '10',
'--min-ram', '4',
('--protected'
if self.new_image.protected else '--unprotected'),
('--private'
if self.new_image.visibility == 'private' else '--public'),
'--project', self.new_image.owner,
'--project-domain', self.domain.id,
self.new_image.name,
]
verifylist = [
('container_format', 'ovf'),
('disk_format', 'ami'),
('min_disk', 10),
('min_ram', 4),
('protected', self.new_image.protected),
('unprotected', not self.new_image.protected),
('public', self.new_image.visibility == 'public'),
('private', self.new_image.visibility == 'private'),
('project', self.new_image.owner),
('project_domain', self.domain.id),
('name', self.new_image.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# ImageManager.create(name=, **)
self.images_mock.create.assert_called_with(
name=self.new_image.name,
container_format='ovf',
disk_format='ami',
min_disk=10,
min_ram=4,
owner=self.project.id,
protected=self.new_image.protected,
visibility=self.new_image.visibility,
)
# Verify update() was not called, if it was show the args
self.assertEqual(self.images_mock.update.call_args_list, [])
self.images_mock.upload.assert_called_with(
mock.ANY, mock.ANY,
)
self.assertEqual(
image_fakes.FakeImage.get_image_columns(self.new_image),
columns)
self.assertItemEqual(
image_fakes.FakeImage.get_image_data(self.new_image),
data)
def test_image_create_with_unexist_project(self):
self.project_mock.get.side_effect = exceptions.NotFound(None)
self.project_mock.find.side_effect = exceptions.NotFound(None)
arglist = [
'--container-format', 'ovf',
'--disk-format', 'ami',
'--min-disk', '10',
'--min-ram', '4',
'--protected',
'--private',
'--project', 'unexist_owner',
image_fakes.image_name,
]
verifylist = [
('container_format', 'ovf'),
('disk_format', 'ami'),
('min_disk', 10),
('min_ram', 4),
('protected', True),
('unprotected', False),
('public', False),
('private', True),
('project', 'unexist_owner'),
('name', image_fakes.image_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.assertRaises(
exceptions.CommandError,
self.cmd.take_action,
parsed_args,
)
@mock.patch('glanceclient.common.utils.get_data_file', name='Open')
def test_image_create_file(self, mock_open):
mock_file = mock.MagicMock(name='File')
mock_open.return_value = mock_file
mock_open.read.return_value = (
image_fakes.FakeImage.get_image_data(self.new_image))
mock_exception = {
'find.side_effect': exceptions.CommandError('x'),
}
self.images_mock.configure_mock(**mock_exception)
arglist = [
'--file', 'filer',
('--unprotected'
if not self.new_image.protected else '--protected'),
('--public'
if self.new_image.visibility == 'public' else '--private'),
'--property', 'Alpha=1',
'--property', 'Beta=2',
'--tag', self.new_image.tags[0],
'--tag', self.new_image.tags[1],
self.new_image.name,
]
verifylist = [
('file', 'filer'),
('protected', self.new_image.protected),
('unprotected', not self.new_image.protected),
('public', self.new_image.visibility == 'public'),
('private', self.new_image.visibility == 'private'),
('properties', {'Alpha': '1', 'Beta': '2'}),
('tags', self.new_image.tags),
('name', self.new_image.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# ImageManager.create(name=, **)
self.images_mock.create.assert_called_with(
name=self.new_image.name,
container_format=image.DEFAULT_CONTAINER_FORMAT,
disk_format=image.DEFAULT_DISK_FORMAT,
protected=self.new_image.protected,
visibility=self.new_image.visibility,
Alpha='1',
Beta='2',
tags=self.new_image.tags,
)
# Verify update() was not called, if it was show the args
self.assertEqual(self.images_mock.update.call_args_list, [])
self.images_mock.upload.assert_called_with(
mock.ANY, mock.ANY,
)
self.assertEqual(
image_fakes.FakeImage.get_image_columns(self.new_image),
columns)
self.assertItemEqual(
image_fakes.FakeImage.get_image_data(self.new_image),
data)
def test_image_create_dead_options(self):
arglist = [
'--store', 'somewhere',
self.new_image.name,
]
verifylist = [
('name', self.new_image.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.assertRaises(
exceptions.CommandError,
self.cmd.take_action, parsed_args)
class TestAddProjectToImage(TestImage):
project = identity_fakes.FakeProject.create_one_project()
domain = identity_fakes.FakeDomain.create_one_domain()
_image = image_fakes.FakeImage.create_one_image()
new_member = image_fakes.FakeImage.create_one_image_member(
attrs={'image_id': _image.id,
'member_id': project.id}
)
columns = (
'image_id',
'member_id',
'status',
)
datalist = (
_image.id,
new_member.member_id,
new_member.status,
)
def setUp(self):
super(TestAddProjectToImage, self).setUp()
# This is the return value for utils.find_resource()
self.images_mock.get.return_value = self._image
# Update the image_id in the MEMBER dict
self.image_members_mock.create.return_value = self.new_member
self.project_mock.get.return_value = self.project
self.domain_mock.get.return_value = self.domain
# Get the command object to test
self.cmd = image.AddProjectToImage(self.app, None)
def test_add_project_to_image_no_option(self):
arglist = [
self._image.id,
self.project.id,
]
verifylist = [
('image', self._image.id),
('project', self.project.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
self.image_members_mock.create.assert_called_with(
self._image.id,
self.project.id
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_add_project_to_image_with_option(self):
arglist = [
self._image.id,
self.project.id,
'--project-domain', self.domain.id,
]
verifylist = [
('image', self._image.id),
('project', self.project.id),
('project_domain', self.domain.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
self.image_members_mock.create.assert_called_with(
self._image.id,
self.project.id
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
class TestImageDelete(TestImage):
def setUp(self):
super(TestImageDelete, self).setUp()
self.images_mock.delete.return_value = None
# Get the command object to test
self.cmd = image.DeleteImage(self.app, None)
def test_image_delete_no_options(self):
images = self.setup_images_mock(count=1)
arglist = [
images[0].id,
]
verifylist = [
('images', [images[0].id]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.images_mock.delete.assert_called_with(images[0].id)
self.assertIsNone(result)
def test_image_delete_multi_images(self):
images = self.setup_images_mock(count=3)
arglist = [i.id for i in images]
verifylist = [
('images', arglist),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
calls = [mock.call(i.id) for i in images]
self.images_mock.delete.assert_has_calls(calls)
self.assertIsNone(result)
def test_image_delete_multi_images_exception(self):
images = image_fakes.FakeImage.create_images(count=2)
arglist = [
images[0].id,
images[1].id,
'x-y-x',
]
verifylist = [
('images', arglist)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# Fake exception in utils.find_resource()
# In image v2, we use utils.find_resource() to find a network.
# It calls get() several times, but find() only one time. So we
# choose to fake get() always raise exception, then pass through.
# And fake find() to find the real network or not.
ret_find = [
images[0],
images[1],
exceptions.NotFound('404'),
]
self.images_mock.get = Exception()
self.images_mock.find.side_effect = ret_find
self.assertRaises(exceptions.CommandError, self.cmd.take_action,
parsed_args)
calls = [mock.call(i.id) for i in images]
self.images_mock.delete.assert_has_calls(calls)
class TestImageList(TestImage):
_image = image_fakes.FakeImage.create_one_image()
columns = (
'ID',
'Name',
'Status',
)
datalist = (
_image.id,
_image.name,
'',
),
def setUp(self):
super(TestImageList, self).setUp()
self.api_mock = mock.Mock()
self.api_mock.image_list.side_effect = [
[self._image], [],
]
self.app.client_manager.image.api = self.api_mock
# Get the command object to test
self.cmd = image.ListImage(self.app, None)
def test_image_list_no_options(self):
arglist = []
verifylist = [
('public', False),
('private', False),
('community', False),
('shared', False),
('long', False),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.api_mock.image_list.assert_called_with(
marker=self._image.id,
)
self.assertEqual(self.columns, columns)
self.assertListItemEqual(self.datalist, tuple(data))
def test_image_list_public_option(self):
arglist = [
'--public',
]
verifylist = [
('public', True),
('private', False),
('community', False),
('shared', False),
('long', False),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.api_mock.image_list.assert_called_with(
public=True,
marker=self._image.id,
)
self.assertEqual(self.columns, columns)
self.assertListItemEqual(self.datalist, tuple(data))
def test_image_list_private_option(self):
arglist = [
'--private',
]
verifylist = [
('public', False),
('private', True),
('community', False),
('shared', False),
('long', False),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.api_mock.image_list.assert_called_with(
private=True,
marker=self._image.id,
)
self.assertEqual(self.columns, columns)
self.assertListItemEqual(self.datalist, tuple(data))
def test_image_list_community_option(self):
arglist = [
'--community',
]
verifylist = [
('public', False),
('private', False),
('community', True),
('shared', False),
('long', False),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.api_mock.image_list.assert_called_with(
community=True,
marker=self._image.id,
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, tuple(data))
def test_image_list_shared_option(self):
arglist = [
'--shared',
]
verifylist = [
('public', False),
('private', False),
('community', False),
('shared', True),
('long', False),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.api_mock.image_list.assert_called_with(
shared=True,
marker=self._image.id,
)
self.assertEqual(self.columns, columns)
self.assertListItemEqual(self.datalist, tuple(data))
def test_image_list_shared_member_status_option(self):
arglist = [
'--shared',
'--member-status', 'all'
]
verifylist = [
('public', False),
('private', False),
('community', False),
('shared', True),
('long', False),
('member_status', 'all')
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.api_mock.image_list.assert_called_with(
shared=True,
member_status='all',
marker=self._image.id,
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, tuple(data))
def test_image_list_shared_member_status_lower(self):
arglist = [
'--shared',
'--member-status', 'ALl'
]
verifylist = [
('public', False),
('private', False),
('community', False),
('shared', True),
('long', False),
('member_status', 'all')
]
self.check_parser(self.cmd, arglist, verifylist)
def test_image_list_long_option(self):
arglist = [
'--long',
]
verifylist = [
('long', True),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.api_mock.image_list.assert_called_with(
marker=self._image.id,
)
collist = (
'ID',
'Name',
'Disk Format',
'Container Format',
'Size',
'Checksum',
'Status',
'Visibility',
'Protected',
'Project',
'Tags',
)
self.assertEqual(collist, columns)
datalist = ((
self._image.id,
self._image.name,
'',
'',
'',
'',
'',
self._image.visibility,
self._image.protected,
self._image.owner,
format_columns.ListColumn(self._image.tags),
), )
self.assertListItemEqual(datalist, tuple(data))
@mock.patch('osc_lib.api.utils.simple_filter')
def test_image_list_property_option(self, sf_mock):
sf_mock.return_value = [copy.deepcopy(self._image)]
arglist = [
'--property', 'a=1',
]
verifylist = [
('property', {'a': '1'}),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.api_mock.image_list.assert_called_with(
marker=self._image.id,
)
sf_mock.assert_called_with(
[self._image],
attr='a',
value='1',
property_field='properties',
)
self.assertEqual(self.columns, columns)
self.assertListItemEqual(self.datalist, tuple(data))
@mock.patch('osc_lib.utils.sort_items')
def test_image_list_sort_option(self, si_mock):
si_mock.return_value = [copy.deepcopy(self._image)]
arglist = ['--sort', 'name:asc']
verifylist = [('sort', 'name:asc')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.api_mock.image_list.assert_called_with(
marker=self._image.id,
)
si_mock.assert_called_with(
[self._image],
'name:asc',
str,
)
self.assertEqual(self.columns, columns)
self.assertListItemEqual(self.datalist, tuple(data))
def test_image_list_limit_option(self):
ret_limit = 1
arglist = [
'--limit', str(ret_limit),
]
verifylist = [
('limit', ret_limit),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.api_mock.image_list.assert_called_with(
limit=ret_limit, marker=None
)
self.assertEqual(self.columns, columns)
self.assertEqual(ret_limit, len(tuple(data)))
@mock.patch('osc_lib.utils.find_resource')
def test_image_list_marker_option(self, fr_mock):
# tangchen: Since image_fakes.IMAGE is a dict, it cannot offer a .id
# operation. Will fix this by using FakeImage class instead
# of IMAGE dict.
fr_mock.return_value = mock.Mock()
fr_mock.return_value.id = image_fakes.image_id
arglist = [
'--marker', image_fakes.image_name,
]
verifylist = [
('marker', image_fakes.image_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.api_mock.image_list.assert_called_with(
marker=image_fakes.image_id,
)
def test_image_list_name_option(self):
arglist = [
'--name', 'abc',
]
verifylist = [
('name', 'abc'),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.api_mock.image_list.assert_called_with(
name='abc', marker=self._image.id
)
def test_image_list_status_option(self):
arglist = [
'--status', 'active',
]
verifylist = [
('status', 'active'),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.api_mock.image_list.assert_called_with(
status='active', marker=self._image.id
)
def test_image_list_tag_option(self):
arglist = [
'--tag', 'abc',
]
verifylist = [
('tag', 'abc'),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.api_mock.image_list.assert_called_with(
tag='abc', marker=self._image.id
)
class TestListImageProjects(TestImage):
project = identity_fakes.FakeProject.create_one_project()
_image = image_fakes.FakeImage.create_one_image()
member = image_fakes.FakeImage.create_one_image_member(
attrs={'image_id': _image.id,
'member_id': project.id}
)
columns = (
"Image ID",
"Member ID",
"Status"
)
datalist = ((
_image.id,
member.member_id,
member.status,
))
def setUp(self):
super(TestListImageProjects, self).setUp()
self.images_mock.get.return_value = self._image
self.image_members_mock.list.return_value = self.datalist
self.cmd = image.ListImageProjects(self.app, None)
def test_image_member_list(self):
arglist = [
self._image.id
]
verifylist = [
('image', self._image.id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.image_members_mock.list.assert_called_with(self._image.id)
self.assertEqual(self.columns, columns)
self.assertEqual(len(self.datalist), len(tuple(data)))
class TestRemoveProjectImage(TestImage):
project = identity_fakes.FakeProject.create_one_project()
domain = identity_fakes.FakeDomain.create_one_domain()
def setUp(self):
super(TestRemoveProjectImage, self).setUp()
self._image = image_fakes.FakeImage.create_one_image()
# This is the return value for utils.find_resource()
self.images_mock.get.return_value = self._image
self.project_mock.get.return_value = self.project
self.domain_mock.get.return_value = self.domain
self.image_members_mock.delete.return_value = None
# Get the command object to test
self.cmd = image.RemoveProjectImage(self.app, None)
def test_remove_project_image_no_options(self):
arglist = [
self._image.id,
self.project.id,
]
verifylist = [
('image', self._image.id),
('project', self.project.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.image_members_mock.delete.assert_called_with(
self._image.id,
self.project.id,
)
self.assertIsNone(result)
def test_remove_project_image_with_options(self):
arglist = [
self._image.id,
self.project.id,
'--project-domain', self.domain.id,
]
verifylist = [
('image', self._image.id),
('project', self.project.id),
('project_domain', self.domain.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.image_members_mock.delete.assert_called_with(
self._image.id,
self.project.id,
)
self.assertIsNone(result)
class TestImageSet(TestImage):
project = identity_fakes.FakeProject.create_one_project()
domain = identity_fakes.FakeDomain.create_one_domain()
def setUp(self):
super(TestImageSet, self).setUp()
# Set up the schema
self.model = warlock.model_factory(
image_fakes.IMAGE_schema,
schemas.SchemaBasedModel,
)
self.project_mock.get.return_value = self.project
self.domain_mock.get.return_value = self.domain
self.images_mock.get.return_value = self.model(**image_fakes.IMAGE)
self.images_mock.update.return_value = self.model(**image_fakes.IMAGE)
self.app.client_manager.auth_ref = mock.Mock(
project_id=self.project.id,
)
# Get the command object to test
self.cmd = image.SetImage(self.app, None)
def test_image_set_no_options(self):
arglist = [
image_fakes.image_id,
]
verifylist = [
('image', image_fakes.image_id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.assertIsNone(result)
self.image_members_mock.update.assert_not_called()
def test_image_set_membership_option_accept(self):
membership = image_fakes.FakeImage.create_one_image_member(
attrs={'image_id': image_fakes.image_id,
'member_id': self.project.id}
)
self.image_members_mock.update.return_value = membership
arglist = [
'--accept',
image_fakes.image_id,
]
verifylist = [
('accept', True),
('reject', False),
('pending', False),
('image', image_fakes.image_id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.image_members_mock.update.assert_called_once_with(
image_fakes.image_id,
self.app.client_manager.auth_ref.project_id,
'accepted',
)
# Assert that the 'update image" route is also called, in addition to
# the 'update membership' route.
self.images_mock.update.assert_called_with(image_fakes.image_id)
def test_image_set_membership_option_reject(self):
membership = image_fakes.FakeImage.create_one_image_member(
attrs={'image_id': image_fakes.image_id,
'member_id': self.project.id}
)
self.image_members_mock.update.return_value = membership
arglist = [
'--reject',
image_fakes.image_id,
]
verifylist = [
('accept', False),
('reject', True),
('pending', False),
('image', image_fakes.image_id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.image_members_mock.update.assert_called_once_with(
image_fakes.image_id,
self.app.client_manager.auth_ref.project_id,
'rejected',
)
# Assert that the 'update image" route is also called, in addition to
# the 'update membership' route.
self.images_mock.update.assert_called_with(image_fakes.image_id)
def test_image_set_membership_option_pending(self):
membership = image_fakes.FakeImage.create_one_image_member(
attrs={'image_id': image_fakes.image_id,
'member_id': self.project.id}
)
self.image_members_mock.update.return_value = membership
arglist = [
'--pending',
image_fakes.image_id,
]
verifylist = [
('accept', False),
('reject', False),
('pending', True),
('image', image_fakes.image_id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
self.image_members_mock.update.assert_called_once_with(
image_fakes.image_id,
self.app.client_manager.auth_ref.project_id,
'pending',
)
# Assert that the 'update image" route is also called, in addition to
# the 'update membership' route.
self.images_mock.update.assert_called_with(image_fakes.image_id)
def test_image_set_options(self):
arglist = [
'--name', 'new-name',
'--min-disk', '2',
'--min-ram', '4',
'--container-format', 'ovf',
'--disk-format', 'vmdk',
'--project', self.project.name,
'--project-domain', self.domain.id,
image_fakes.image_id,
]
verifylist = [
('name', 'new-name'),
('min_disk', 2),
('min_ram', 4),
('container_format', 'ovf'),
('disk_format', 'vmdk'),
('project', self.project.name),
('project_domain', self.domain.id),
('image', image_fakes.image_id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
kwargs = {
'name': 'new-name',
'owner': self.project.id,
'min_disk': 2,
'min_ram': 4,
'container_format': 'ovf',
'disk_format': 'vmdk',
}
# ImageManager.update(image, **kwargs)
self.images_mock.update.assert_called_with(
image_fakes.image_id, **kwargs)
self.assertIsNone(result)
def test_image_set_with_unexist_project(self):
self.project_mock.get.side_effect = exceptions.NotFound(None)
self.project_mock.find.side_effect = exceptions.NotFound(None)
arglist = [
'--project', 'unexist_owner',
image_fakes.image_id,
]
verifylist = [
('project', 'unexist_owner'),
('image', image_fakes.image_id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.assertRaises(
exceptions.CommandError,
self.cmd.take_action, parsed_args)
def test_image_set_bools1(self):
arglist = [
'--protected',
'--private',
image_fakes.image_name,
]
verifylist = [
('protected', True),
('unprotected', False),
('public', False),
('private', True),
('image', image_fakes.image_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
kwargs = {
'protected': True,
'visibility': 'private',
}
# ImageManager.update(image, **kwargs)
self.images_mock.update.assert_called_with(
image_fakes.image_id,
**kwargs
)
self.assertIsNone(result)
def test_image_set_bools2(self):
arglist = [
'--unprotected',
'--public',
image_fakes.image_name,
]
verifylist = [
('protected', False),
('unprotected', True),
('public', True),
('private', False),
('image', image_fakes.image_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
kwargs = {
'protected': False,
'visibility': 'public',
}
# ImageManager.update(image, **kwargs)
self.images_mock.update.assert_called_with(
image_fakes.image_id,
**kwargs
)
self.assertIsNone(result)
def test_image_set_properties(self):
arglist = [
'--property', 'Alpha=1',
'--property', 'Beta=2',
image_fakes.image_name,
]
verifylist = [
('properties', {'Alpha': '1', 'Beta': '2'}),
('image', image_fakes.image_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
kwargs = {
'Alpha': '1',
'Beta': '2',
}
# ImageManager.update(image, **kwargs)
self.images_mock.update.assert_called_with(
image_fakes.image_id,
**kwargs
)
self.assertIsNone(result)
def test_image_set_fake_properties(self):
arglist = [
'--architecture', 'z80',
'--instance-id', '12345',
'--kernel-id', '67890',
'--os-distro', 'cpm',
'--os-version', '2.2H',
'--ramdisk-id', 'xyzpdq',
image_fakes.image_name,
]
verifylist = [
('architecture', 'z80'),
('instance_id', '12345'),
('kernel_id', '67890'),
('os_distro', 'cpm'),
('os_version', '2.2H'),
('ramdisk_id', 'xyzpdq'),
('image', image_fakes.image_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
kwargs = {
'architecture': 'z80',
'instance_id': '12345',
'kernel_id': '67890',
'os_distro': 'cpm',
'os_version': '2.2H',
'ramdisk_id': 'xyzpdq',
}
# ImageManager.update(image, **kwargs)
self.images_mock.update.assert_called_with(
image_fakes.image_id,
**kwargs
)
self.assertIsNone(result)
def test_image_set_tag(self):
arglist = [
'--tag', 'test-tag',
image_fakes.image_name,
]
verifylist = [
('tags', ['test-tag']),
('image', image_fakes.image_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
kwargs = {
'tags': ['test-tag'],
}
# ImageManager.update(image, **kwargs)
self.images_mock.update.assert_called_with(
image_fakes.image_id,
**kwargs
)
self.assertIsNone(result)
def test_image_set_activate(self):
arglist = [
'--tag', 'test-tag',
'--activate',
image_fakes.image_name,
]
verifylist = [
('tags', ['test-tag']),
('image', image_fakes.image_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
kwargs = {
'tags': ['test-tag'],
}
self.images_mock.reactivate.assert_called_with(
image_fakes.image_id,
)
# ImageManager.update(image, **kwargs)
self.images_mock.update.assert_called_with(
image_fakes.image_id,
**kwargs
)
self.assertIsNone(result)
def test_image_set_deactivate(self):
arglist = [
'--tag', 'test-tag',
'--deactivate',
image_fakes.image_name,
]
verifylist = [
('tags', ['test-tag']),
('image', image_fakes.image_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
kwargs = {
'tags': ['test-tag'],
}
self.images_mock.deactivate.assert_called_with(
image_fakes.image_id,
)
# ImageManager.update(image, **kwargs)
self.images_mock.update.assert_called_with(
image_fakes.image_id,
**kwargs
)
self.assertIsNone(result)
def test_image_set_tag_merge(self):
old_image = copy.copy(image_fakes.IMAGE)
old_image['tags'] = ['old1', 'new2']
self.images_mock.get.return_value = self.model(**old_image)
arglist = [
'--tag', 'test-tag',
image_fakes.image_name,
]
verifylist = [
('tags', ['test-tag']),
('image', image_fakes.image_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
kwargs = {
'tags': ['old1', 'new2', 'test-tag'],
}
# ImageManager.update(image, **kwargs)
a, k = self.images_mock.update.call_args
self.assertEqual(image_fakes.image_id, a[0])
self.assertIn('tags', k)
self.assertEqual(set(kwargs['tags']), set(k['tags']))
self.assertIsNone(result)
def test_image_set_tag_merge_dupe(self):
old_image = copy.copy(image_fakes.IMAGE)
old_image['tags'] = ['old1', 'new2']
self.images_mock.get.return_value = self.model(**old_image)
arglist = [
'--tag', 'old1',
image_fakes.image_name,
]
verifylist = [
('tags', ['old1']),
('image', image_fakes.image_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
kwargs = {
'tags': ['new2', 'old1'],
}
# ImageManager.update(image, **kwargs)
a, k = self.images_mock.update.call_args
self.assertEqual(image_fakes.image_id, a[0])
self.assertIn('tags', k)
self.assertEqual(set(kwargs['tags']), set(k['tags']))
self.assertIsNone(result)
def test_image_set_dead_options(self):
arglist = [
'--visibility', '1-mile',
image_fakes.image_name,
]
verifylist = [
('visibility', '1-mile'),
('image', image_fakes.image_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.assertRaises(
exceptions.CommandError,
self.cmd.take_action, parsed_args)
def test_image_set_numeric_options_to_zero(self):
arglist = [
'--min-disk', '0',
'--min-ram', '0',
image_fakes.image_name,
]
verifylist = [
('min_disk', 0),
('min_ram', 0),
('image', image_fakes.image_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
kwargs = {
'min_disk': 0,
'min_ram': 0,
}
# ImageManager.update(image, **kwargs)
self.images_mock.update.assert_called_with(
image_fakes.image_id,
**kwargs
)
self.assertIsNone(result)
class TestImageShow(TestImage):
new_image = image_fakes.FakeImage.create_one_image(
attrs={'size': 1000})
def setUp(self):
super(TestImageShow, self).setUp()
# Set up the schema
self.model = warlock.model_factory(
image_fakes.IMAGE_schema,
schemas.SchemaBasedModel,
)
self.images_mock.get.return_value = self.model(**image_fakes.IMAGE)
# Get the command object to test
self.cmd = image.ShowImage(self.app, None)
def test_image_show(self):
arglist = [
image_fakes.image_id,
]
verifylist = [
('image', image_fakes.image_id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
self.images_mock.get.assert_called_with(
image_fakes.image_id,
)
self.assertEqual(image_fakes.IMAGE_columns, columns)
self.assertItemEqual(image_fakes.IMAGE_SHOW_data, data)
def test_image_show_human_readable(self):
self.images_mock.get.return_value = self.new_image
arglist = [
'--human-readable',
self.new_image.id,
]
verifylist = [
('human_readable', True),
('image', self.new_image.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
self.images_mock.get.assert_called_with(
self.new_image.id,
)
size_index = columns.index('size')
self.assertEqual(data[size_index], '1K')
class TestImageUnset(TestImage):
attrs = {}
attrs['tags'] = ['test']
attrs['prop'] = 'test'
image = image_fakes.FakeImage.create_one_image(attrs)
def setUp(self):
super(TestImageUnset, self).setUp()
# Set up the schema
self.model = warlock.model_factory(
image_fakes.IMAGE_schema,
schemas.SchemaBasedModel,
)
self.images_mock.get.return_value = self.image
self.image_tags_mock.delete.return_value = self.image
# Get the command object to test
self.cmd = image.UnsetImage(self.app, None)
def test_image_unset_no_options(self):
arglist = [
image_fakes.image_id,
]
verifylist = [
('image', image_fakes.image_id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.assertIsNone(result)
def test_image_unset_tag_option(self):
arglist = [
'--tag', 'test',
self.image.id,
]
verifylist = [
('tags', ['test']),
('image', self.image.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.image_tags_mock.delete.assert_called_with(
self.image.id, 'test'
)
self.assertIsNone(result)
def test_image_unset_property_option(self):
arglist = [
'--property', 'prop',
self.image.id,
]
verifylist = [
('properties', ['prop']),
('image', self.image.id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
kwargs = {}
self.images_mock.update.assert_called_with(
self.image.id,
parsed_args.properties,
**kwargs)
self.assertIsNone(result)
def test_image_unset_mixed_option(self):
arglist = [
'--tag', 'test',
'--property', 'prop',
self.image.id,
]
verifylist = [
('tags', ['test']),
('properties', ['prop']),
('image', self.image.id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
kwargs = {}
self.images_mock.update.assert_called_with(
self.image.id,
parsed_args.properties,
**kwargs)
self.image_tags_mock.delete.assert_called_with(
self.image.id, 'test'
)
self.assertIsNone(result)
class TestImageSave(TestImage):
image = image_fakes.FakeImage.create_one_image({})
def setUp(self):
super(TestImageSave, self).setUp()
# Generate a request id
self.resp = mock.MagicMock()
self.resp.headers['x-openstack-request-id'] = 'req_id'
# Get the command object to test
self.cmd = image.SaveImage(self.app, None)
def test_save_data(self):
req_id_proxy = glanceclient_utils.RequestIdProxy(
['some_data', self.resp]
)
self.images_mock.data.return_value = req_id_proxy
arglist = ['--file', '/path/to/file', self.image.id]
verifylist = [
('file', '/path/to/file'),
('image', self.image.id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
with mock.patch('glanceclient.common.utils.save_image') as mocked_save:
self.cmd.take_action(parsed_args)
mocked_save.assert_called_once_with(req_id_proxy, '/path/to/file')
def test_save_no_data(self):
req_id_proxy = glanceclient_utils.RequestIdProxy(
[None, self.resp]
)
self.images_mock.data.return_value = req_id_proxy
arglist = ['--file', '/path/to/file', self.image.id]
verifylist = [
('file', '/path/to/file'),
('image', self.image.id)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# Raise SystemExit if no data was provided.
self.assertRaises(SystemExit, self.cmd.take_action, parsed_args)
| {
"content_hash": "82c99107c84c7a726f876dc0d3063187",
"timestamp": "",
"source": "github",
"line_count": 1625,
"max_line_length": 79,
"avg_line_length": 31.743384615384617,
"alnum_prop": 0.5523719054727333,
"repo_name": "dtroyer/python-openstackclient",
"id": "748a61aaf40798806984bf015b763bea2f287e79",
"size": "52182",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openstackclient/tests/unit/image/v2/test_image.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4040230"
},
{
"name": "Shell",
"bytes": "299"
}
],
"symlink_target": ""
} |
from django.test import TestCase
from music.models import MusicProfile
from users.models import User
class SearchTestCase(TestCase):
def setUp(self):
u = User.objects.create(username='joe')
MusicProfile.objects.create(user=u, zipcode='80516')
def test_find_one(self):
user = do_find('80516')
self.assertEquals(user.count(), 1)
self.assertEquals(user[0].username, 'joe')
def test_find_two(self):
u = User.objects.create(username='jane')
MusicProfile.objects.create(user=u, zipcode='80516')
users = do_find('80516')
self.assertEquals(users.count(), 2)
self.assertContainss(users, 'joe')
u = User.objects.create(username='bob')
MusicProfile.objects.create(user=u, zipcode='51104')
users = do_find('80516')
self.assertEquals(users.count(), 2)
self.assertNotContainss(users, 'joe')
| {
"content_hash": "9cb62cbb322fe16002214e3040e611c2",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 60,
"avg_line_length": 25.63888888888889,
"alnum_prop": 0.6403033586132177,
"repo_name": "jimfmunro/djangodash2013",
"id": "595a9fcb04aa23e30148a24c246f7396bef5fa58",
"size": "923",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dash/test/search/test_utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "13769"
},
{
"name": "JavaScript",
"bytes": "16495"
},
{
"name": "Python",
"bytes": "59593"
},
{
"name": "Shell",
"bytes": "5083"
}
],
"symlink_target": ""
} |
'''
testimages.py
Jusa test program to test the librarys
Author: Brandon Layton
'''
import tkinter, tkinter.filedialog
from GIF import GIF
root = tkinter.Tk()
root.withdraw()
file_path = tkinter.filedialog.askopenfilename()
f = open(file_path,"rb")
myImage = GIF()
myImage.loadGIF(f)
#for p in myImage.getImage(0).pixels:
# print(p)
images = myImage.getImages()
_i = 0
#see the image in cmd line
for img in images:
print("Image #"+str(_i))
img.showImage()
_i+=1
myImage.save("C:\\test.gif") | {
"content_hash": "38d58075b6a81e6c1e09cb4eab765d0c",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 48,
"avg_line_length": 15.242424242424242,
"alnum_prop": 0.7017892644135189,
"repo_name": "proflayton/pyMediaManip",
"id": "e993c7dbae9167b0a0e56e7e4090442a491231d6",
"size": "503",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testimages.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19306"
}
],
"symlink_target": ""
} |
import unittest
import os
from test.aiml_tests.client import TestClient
from programy.config.brain import BrainFileConfiguration
class BasicTestClient(TestClient):
def __init__(self):
TestClient.__init__(self)
def load_configuration(self, arguments):
super(BasicTestClient, self).load_configuration(arguments)
self.configuration.brain_configuration._aiml_files = BrainFileConfiguration(files=os.path.dirname(__file__))
class PatternBotAIMLTests(unittest.TestCase):
def setUp(cls):
PatternBotAIMLTests.test_client = BasicTestClient()
PatternBotAIMLTests.test_client.bot.brain.properties.pairs.append(("favouritecolor", "RED"))
def test_pattern_bot_match(self):
PatternBotAIMLTests.test_client.bot.brain.dump_tree()
response = PatternBotAIMLTests.test_client.bot.ask_question("test", "MY FAVORITE COLOR IS RED")
self.assertIsNotNone(response)
self.assertEqual(response, "RED IS A NICE COLOR.")
| {
"content_hash": "ff8b35b14e2171647eb47b896cd4fe10",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 116,
"avg_line_length": 36.629629629629626,
"alnum_prop": 0.731041456016178,
"repo_name": "dkamotsky/program-y",
"id": "402ee8dec6f5fb8d48e2ee0cb0f7108049352d36",
"size": "989",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/aiml_tests/pattern_bot_tests/test_pattern_bot_aiml.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "937"
},
{
"name": "HTML",
"bytes": "1583"
},
{
"name": "Python",
"bytes": "1131157"
},
{
"name": "Shell",
"bytes": "3481"
}
],
"symlink_target": ""
} |
import mock
from django.conf import settings
from oslo_serialization import jsonutils
from openstack_dashboard.api.rest import keystone
from openstack_dashboard.test import helpers as test
class KeystoneRestTestCase(test.TestCase):
#
# Version
#
@mock.patch.object(keystone.api, 'keystone')
def test_version_get(self, kc):
request = self.mock_rest_request()
kc.get_version.return_value = '2.0'
response = keystone.Version().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.content, '{"version": "2.0"}')
kc.get_version.assert_called_once_with()
#
# Users
#
@mock.patch.object(keystone.api, 'keystone')
def test_user_get(self, kc):
request = self.mock_rest_request()
kc.user_get.return_value.to_dict.return_value = {'name': 'Ni!'}
response = keystone.User().get(request, 'the_id')
self.assertStatusCode(response, 200)
self.assertEqual(response.content, '{"name": "Ni!"}')
kc.user_get.assert_called_once_with(request, 'the_id')
@mock.patch.object(keystone.api, 'keystone')
def test_user_get_current(self, kc):
request = self.mock_rest_request(**{'user.id': 'current_id'})
kc.user_get.return_value.to_dict.return_value = {'name': 'Ni!'}
response = keystone.User().get(request, 'current')
self.assertStatusCode(response, 200)
self.assertEqual(response.content, '{"name": "Ni!"}')
kc.user_get.assert_called_once_with(request, 'current_id')
@mock.patch.object(keystone.api, 'keystone')
def test_user_get_list(self, kc):
request = self.mock_rest_request(**{
'session.get': mock.Mock(return_value='the_domain'),
'GET': {},
})
kc.user_list.return_value = [
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'Ptang!'}})
]
response = keystone.Users().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.content,
'{"items": [{"name": "Ni!"}, {"name": "Ptang!"}]}')
kc.user_list.assert_called_once_with(request, project=None,
domain='the_domain', group=None,
filters=None)
@mock.patch.object(keystone.api, 'keystone')
def test_user_get_list_with_filters(self, kc):
filters = {'enabled': True}
request = self.mock_rest_request(**{
'session.get': mock.Mock(return_value='the_domain'),
'GET': dict(**filters),
})
kc.user_list.return_value = [
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'Ptang!'}})
]
response = keystone.Users().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.content,
'{"items": [{"name": "Ni!"}, {"name": "Ptang!"}]}')
kc.user_list.assert_called_once_with(request, project=None,
domain='the_domain', group=None,
filters=filters)
def test_user_create_full(self):
self._test_user_create(
'{"name": "bob", '
'"password": "sekrit", "project_id": "project123", '
'"email": "[email protected]"}',
{
'name': 'bob',
'password': 'sekrit',
'email': '[email protected]',
'project': 'project123',
'domain': 'the_domain',
'enabled': True
}
)
def test_user_create_existing_role(self):
self._test_user_create(
'{"name": "bob", '
'"password": "sekrit", "project_id": "project123", '
'"email": "[email protected]"}',
{
'name': 'bob',
'password': 'sekrit',
'email': '[email protected]',
'project': 'project123',
'domain': 'the_domain',
'enabled': True
}
)
def test_user_create_no_project(self):
self._test_user_create(
'{"name": "bob", '
'"password": "sekrit", "project_id": "", '
'"email": "[email protected]"}',
{
'name': 'bob',
'password': 'sekrit',
'email': '[email protected]',
'project': None,
'domain': 'the_domain',
'enabled': True
}
)
def test_user_create_partial(self):
self._test_user_create(
'{"name": "bob"}',
{
'name': 'bob',
'password': None,
'email': None,
'project': None,
'domain': 'the_domain',
'enabled': True
}
)
@mock.patch.object(keystone.api, 'keystone')
def _test_user_create(self, supplied_body, add_user_call, kc):
request = self.mock_rest_request(body=supplied_body)
kc.get_default_domain.return_value = mock.Mock(**{'id': 'the_domain'})
kc.user_create.return_value.id = 'user123'
kc.user_create.return_value = mock.Mock(**{
'id': 'user123',
'to_dict.return_value': {'id': 'user123', 'name': 'bob'}
})
response = keystone.Users().post(request)
self.assertStatusCode(response, 201)
self.assertEqual(response['location'],
'/api/keystone/users/user123')
self.assertEqual(response.content, '{"id": "user123", '
'"name": "bob"}')
kc.user_create.assert_called_once_with(request, **add_user_call)
@mock.patch.object(keystone.api, 'keystone')
def test_user_delete_many(self, kc):
request = self.mock_rest_request(body='''
["id1", "id2", "id3"]
''')
response = keystone.Users().delete(request)
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.user_delete.assert_has_calls([
mock.call(request, 'id1'),
mock.call(request, 'id2'),
mock.call(request, 'id3'),
])
@mock.patch.object(keystone.api, 'keystone')
def test_user_delete(self, kc):
request = self.mock_rest_request()
response = keystone.User().delete(request, 'the_id')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.user_delete.assert_called_once_with(request, 'the_id')
@mock.patch.object(keystone.api, 'keystone')
def test_user_patch_password(self, kc):
request = self.mock_rest_request(body='''
{"password": "sekrit"}
''')
user = keystone.User()
kc.user_get = mock.MagicMock(return_value=user)
response = user.patch(request, 'user123')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.user_update_password.assert_called_once_with(request,
user,
'sekrit')
@mock.patch.object(keystone.api, 'keystone')
def test_user_patch_enabled(self, kc):
request = self.mock_rest_request(body='''
{"enabled": false}
''')
user = keystone.User()
kc.user_get = mock.MagicMock(return_value=user)
response = user.patch(request, 'user123')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.user_get.assert_called_once_with(request, 'user123')
kc.user_update_enabled.assert_called_once_with(request,
user,
False)
@mock.patch.object(keystone.api, 'keystone')
def test_user_patch_project(self, kc):
request = self.mock_rest_request(body='''
{"project": "other123"}
''')
user = keystone.User()
kc.user_get = mock.MagicMock(return_value=user)
response = user.patch(request, 'user123')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.user_update.assert_called_once_with(request,
user,
project='other123')
@mock.patch.object(keystone.api, 'keystone')
def test_user_patch_multiple(self, kc):
request = self.mock_rest_request(body='''
{"project": "other123", "name": "something"}
''')
user = keystone.User()
kc.user_get = mock.MagicMock(return_value=user)
response = user.patch(request, 'user123')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.user_update.assert_called_once_with(request,
user,
project='other123',
name='something')
#
# Roles
#
@mock.patch.object(keystone.api, 'keystone')
def test_role_get(self, kc):
request = self.mock_rest_request()
kc.role_get.return_value.to_dict.return_value = {'name': 'Ni!'}
response = keystone.Role().get(request, 'the_id')
self.assertStatusCode(response, 200)
self.assertEqual(response.content, '{"name": "Ni!"}')
kc.role_get.assert_called_once_with(request, 'the_id')
@mock.patch.object(keystone.api, 'keystone')
def test_role_get_default(self, kc):
request = self.mock_rest_request()
kc.get_default_role.return_value.to_dict.return_value = {'name': 'Ni!'}
response = keystone.Role().get(request, 'default')
self.assertStatusCode(response, 200)
self.assertEqual(response.content, '{"name": "Ni!"}')
kc.get_default_role.assert_called_once_with(request)
kc.role_get.assert_not_called()
@mock.patch.object(keystone.api, 'keystone')
def test_role_get_list(self, kc):
request = self.mock_rest_request(**{'GET': {}})
kc.role_list.return_value = [
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'Ptang!'}})
]
response = keystone.Roles().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.content,
'{"items": [{"name": "Ni!"}, {"name": "Ptang!"}]}')
kc.role_list.assert_called_once_with(request)
@mock.patch.object(keystone.api, 'keystone')
def test_role_get_for_user(self, kc):
request = self.mock_rest_request(**{'GET': {'user_id': 'user123',
'project_id': 'project123'}})
kc.roles_for_user.return_value = [
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'Ptang!'}})
]
response = keystone.Roles().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.content,
'{"items": [{"name": "Ni!"}, {"name": "Ptang!"}]}')
kc.roles_for_user.assert_called_once_with(request, 'user123',
'project123')
@mock.patch.object(keystone.api, 'keystone')
def test_role_create(self, kc):
request = self.mock_rest_request(body='''
{"name": "bob"}
''')
kc.role_create.return_value.id = 'role123'
kc.role_create.return_value.to_dict.return_value = {
'id': 'role123', 'name': 'bob'
}
response = keystone.Roles().post(request)
self.assertStatusCode(response, 201)
self.assertEqual(response['location'],
'/api/keystone/roles/role123')
self.assertEqual(response.content, '{"id": "role123", "name": "bob"}')
kc.role_create.assert_called_once_with(request, 'bob')
@mock.patch.object(keystone.api, 'keystone')
def test_role_grant(self, kc):
request = self.mock_rest_request(body='''
{"action": "grant", "data": {"user_id": "user123",
"role_id": "role123", "project_id": "project123"}}
''')
response = keystone.ProjectRole().put(request, "project1", "role2",
"user3")
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.add_tenant_user_role.assert_called_once_with(request, 'project1',
'user3', 'role2')
@mock.patch.object(keystone.api, 'keystone')
def test_role_delete_many(self, kc):
request = self.mock_rest_request(body='''
["id1", "id2", "id3"]
''')
response = keystone.Roles().delete(request)
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.role_delete.assert_has_calls([
mock.call(request, 'id1'),
mock.call(request, 'id2'),
mock.call(request, 'id3'),
])
@mock.patch.object(keystone.api, 'keystone')
def test_role_delete(self, kc):
request = self.mock_rest_request()
response = keystone.Role().delete(request, 'the_id')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.role_delete.assert_called_once_with(request, 'the_id')
@mock.patch.object(keystone.api, 'keystone')
def test_role_patch(self, kc):
request = self.mock_rest_request(body='{"name": "spam"}')
response = keystone.Role().patch(request, 'the_id')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.role_update.assert_called_once_with(request,
'the_id',
'spam')
#
# Domains
#
@mock.patch.object(keystone.api, 'keystone')
def test_domain_get(self, kc):
request = self.mock_rest_request()
kc.domain_get.return_value.to_dict.return_value = {'name': 'Ni!'}
response = keystone.Domain().get(request, 'the_id')
self.assertStatusCode(response, 200)
self.assertEqual(response.content, '{"name": "Ni!"}')
kc.domain_get.assert_called_once_with(request, 'the_id')
@mock.patch.object(keystone.api, 'keystone')
def test_domain_get_default(self, kc):
request = self.mock_rest_request()
kc.get_default_domain.return_value.to_dict.return_value = {
'name': 'Ni!'
}
response = keystone.Domain().get(request, 'default')
self.assertStatusCode(response, 200)
self.assertEqual(response.content, '{"name": "Ni!"}')
kc.get_default_domain.assert_called_once_with(request)
kc.domain_get.assert_not_called()
@mock.patch.object(keystone.api, 'keystone')
def test_domain_get_list(self, kc):
request = self.mock_rest_request()
kc.domain_list.return_value = [
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'Ptang!'}})
]
response = keystone.Domains().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.content,
'{"items": [{"name": "Ni!"}, {"name": "Ptang!"}]}')
kc.domain_list.assert_called_once_with(request)
def test_domain_create_full(self):
self._test_domain_create(
'{"name": "bob", '
'"description": "sekrit", "enabled": false}',
{
'description': 'sekrit',
'enabled': False
}
)
def test_domain_create_partial(self):
self._test_domain_create(
'{"name": "bob"}',
{
'description': None,
'enabled': True
}
)
@mock.patch.object(keystone.api, 'keystone')
def _test_domain_create(self, supplied_body, expected_call, kc):
request = self.mock_rest_request(body=supplied_body)
kc.domain_create.return_value.id = 'domain123'
kc.domain_create.return_value.to_dict.return_value = {
'id': 'domain123', 'name': 'bob'
}
response = keystone.Domains().post(request)
self.assertStatusCode(response, 201)
self.assertEqual(response['location'],
'/api/keystone/domains/domain123')
self.assertEqual(response.content, '{"id": "domain123", '
'"name": "bob"}')
kc.domain_create.assert_called_once_with(request, 'bob',
**expected_call)
@mock.patch.object(keystone.api, 'keystone')
def test_domain_delete_many(self, kc):
request = self.mock_rest_request(body='''
["id1", "id2", "id3"]
''')
response = keystone.Domains().delete(request)
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.domain_delete.assert_has_calls([
mock.call(request, 'id1'),
mock.call(request, 'id2'),
mock.call(request, 'id3'),
])
@mock.patch.object(keystone.api, 'keystone')
def test_domain_delete(self, kc):
request = self.mock_rest_request()
response = keystone.Domain().delete(request, 'the_id')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.domain_delete.assert_called_once_with(request, 'the_id')
@mock.patch.object(keystone.api, 'keystone')
def test_domain_patch(self, kc):
request = self.mock_rest_request(body='{"name": "spam"}')
response = keystone.Domain().patch(request, 'the_id')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.domain_update.assert_called_once_with(request,
'the_id',
name='spam',
description=None,
enabled=None)
#
# Projects
#
@mock.patch.object(keystone.api, 'keystone')
def test_project_get(self, kc):
request = self.mock_rest_request()
kc.tenant_get.return_value.to_dict.return_value = {'name': 'Ni!'}
response = keystone.Project().get(request, 'the_id')
self.assertStatusCode(response, 200)
self.assertEqual(response.content, '{"name": "Ni!"}')
kc.tenant_get.assert_called_once_with(request, 'the_id')
def test_project_get_list(self):
self._test_project_get_list(
{},
{
'paginate': False,
'marker': None,
'domain': None,
'user': None,
'admin': True,
'filters': None
}
)
def test_project_get_list_with_params_true(self):
self._test_project_get_list(
{
'paginate': 'true',
'admin': 'true'
},
{
'paginate': True,
'marker': None,
'domain': None,
'user': None,
'admin': True,
'filters': None
}
)
def test_project_get_list_with_params_false(self):
self._test_project_get_list(
{
'paginate': 'false',
'admin': 'false'
},
{
'paginate': False,
'marker': None,
'domain': None,
'user': None,
'admin': False,
'filters': None
}
)
@mock.patch.object(keystone.api, 'keystone')
def _test_project_get_list(self, params, expected_call, kc):
request = self.mock_rest_request(**{'GET': dict(**params)})
kc.tenant_list.return_value = ([
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'Ptang!'}})
], False)
with mock.patch.object(settings, 'DEBUG', True):
response = keystone.Projects().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.content, '{"has_more": false, '
'"items": [{"name": "Ni!"}, {"name": "Ptang!"}]}')
kc.tenant_list.assert_called_once_with(request, **expected_call)
@mock.patch.object(keystone.api, 'keystone')
def test_project_get_list_with_filters(self, kc):
filters = {'name': 'Ni!'}
request = self.mock_rest_request(**{'GET': dict(**filters)})
kc.tenant_list.return_value = ([
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'Ni!'}})
], False)
with mock.patch.object(settings, 'DEBUG', True):
response = keystone.Projects().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.content, '{"has_more": false, '
'"items": [{"name": "Ni!"}, {"name": "Ni!"}]}')
kc.tenant_list.assert_called_once_with(request, paginate=False,
marker=None, domain=None,
user=None, admin=True,
filters=filters)
def test_project_create_full(self):
self._test_project_create(
'{"name": "bob", '
'"domain_id": "domain123", "description": "sekrit", '
'"enabled": false}',
{
'description': 'sekrit',
'domain': 'domain123',
'enabled': False
}
)
def test_project_create_partial(self):
self._test_project_create(
'{"name": "bob"}',
{
'description': None,
'domain': None,
'enabled': True
}
)
@mock.patch.object(keystone.api, 'keystone')
def _test_project_create(self, supplied_body, expected_call, kc):
request = self.mock_rest_request(body=supplied_body)
kc.tenant_create.return_value.id = 'project123'
kc.tenant_create.return_value.to_dict.return_value = {
'id': 'project123', 'name': 'bob'
}
response = keystone.Projects().post(request)
self.assertStatusCode(response, 201)
self.assertEqual(response['location'],
'/api/keystone/projects/project123')
self.assertEqual(response.content, '{"id": "project123", '
'"name": "bob"}')
kc.tenant_create.assert_called_once_with(request, 'bob',
**expected_call)
@mock.patch.object(keystone.api, 'keystone')
def test_project_delete_many(self, kc):
request = self.mock_rest_request(body='''
["id1", "id2", "id3"]
''')
response = keystone.Projects().delete(request)
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.tenant_delete.assert_has_calls([
mock.call(request, 'id1'),
mock.call(request, 'id2'),
mock.call(request, 'id3'),
])
@mock.patch.object(keystone.api, 'keystone')
def test_project_delete(self, kc):
request = self.mock_rest_request()
response = keystone.Project().delete(request, 'the_id')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.tenant_delete.assert_called_once_with(request, 'the_id')
@mock.patch.object(keystone.api, 'keystone')
def test_project_patch(self, kc):
# nothing in the Horizon code documents what additional parameters are
# allowed, so we'll just assume GIGO
request = self.mock_rest_request(body='''
{"name": "spam", "domain_id": "domain123", "foo": "bar"}
''')
response = keystone.Project().patch(request, 'spam123')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, '')
kc.tenant_update.assert_called_once_with(request,
'spam123',
name='spam', foo='bar',
description=None,
domain='domain123',
enabled=None)
#
# Service Catalog
#
@mock.patch.object(keystone.api, 'keystone')
def test_service_catalog_get(self, kc):
request = self.mock_rest_request()
response = keystone.ServiceCatalog().get(request)
self.assertStatusCode(response, 200)
content = jsonutils.dumps(request.user.service_catalog,
sort_keys=settings.DEBUG)
self.assertEqual(content, response.content)
#
# User Session
#
@mock.patch.object(keystone.api, 'keystone')
def test_user_session_get(self, kc):
request = self.mock_rest_request()
request.user = mock.Mock(
services_region='some region',
super_secret_thing='not here',
is_authenticated=lambda: True,
spec=['services_region', 'super_secret_thing']
)
response = keystone.UserSession().get(request)
self.assertStatusCode(response, 200)
content = jsonutils.loads(response.content)
self.assertEqual(content['services_region'], 'some region')
self.assertNotIn('super_secret_thing', content)
| {
"content_hash": "139790927dc457abb3afcbe4adc588ae",
"timestamp": "",
"source": "github",
"line_count": 656,
"max_line_length": 79,
"avg_line_length": 40.07164634146341,
"alnum_prop": 0.5269905276372351,
"repo_name": "maestro-hybrid-cloud/horizon",
"id": "8aed1ac66a787656dddef9eda4756043c0f0a9af",
"size": "26870",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openstack_dashboard/test/api_tests/keystone_rest_tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "106162"
},
{
"name": "HTML",
"bytes": "518913"
},
{
"name": "JavaScript",
"bytes": "983970"
},
{
"name": "Makefile",
"bytes": "588"
},
{
"name": "Python",
"bytes": "4928965"
},
{
"name": "Shell",
"bytes": "18658"
}
],
"symlink_target": ""
} |
"""
If a task raises an unhandled exception, it is saved to the Future for the
task and made available through the result() or exception() methods.
"""
from concurrent import futures
def task(n):
print('{}: starting'.format(n))
raise ValueError('the value {} is no good'.format(n))
ex = futures.ThreadPoolExecutor(max_workers=2)
print('main: starting')
f = ex.submit(task, 5)
error = f.exception()
print('main: error: {}'.format(error))
try:
result = f.result()
except ValueError as e:
print('main: saw error "{}" when accessing result'.format(e))
| {
"content_hash": "4d336881d2cc763bdbb9c242b749cba1",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 75,
"avg_line_length": 22.92,
"alnum_prop": 0.6858638743455497,
"repo_name": "scotthuang1989/Python-3-Module-of-the-Week",
"id": "acee3d76f8cffdb61e027794d20c8c08261497da",
"size": "573",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "concurrency/futures/futures_future_exception.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "913525"
},
{
"name": "Python",
"bytes": "53855"
},
{
"name": "Shell",
"bytes": "91"
}
],
"symlink_target": ""
} |
import numpy as np
from sklearn import metrics, svm
from sklearn.linear_model import LinearRegression
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.naive_bayes import GaussianNB
from sklearn.svm import SVC
trainingData = np.array([[1,2,3.8],[3,2,1],[1,1,1],[1,3,2]])#np.array([ [2.3, 4.3, 2], [1.3, 5.2, 5.2], [3.3, 2.9, 0.8], [3.1, 4.3, 4.0] ])
trainingScores = np.array( [3, 1.1, 4.1, 1] )
predictionData = np.array([ [2, 2.9, 2], [1, 2, 2] ])
clf = LinearRegression()
clf.fit(trainingData, trainingScores)
print("LinearRegression")
print(clf.predict(predictionData))
clf = svm.SVR()
clf.fit(trainingData, trainingScores)
print("SVR")
print(clf.predict(predictionData))
#clf = LogisticRegression()
#int degerde cevap veremedi float degerlerdede cevap veremedi
#clf.fit(trainingData.values, trainingScores.values)
#print("LogisticRegression")
#print(clf.predict(predictionData))
#clf = DecisionTreeClassifier() #int degerlerde cevap verdi - float degerlerde cevap veremedi 2.1 gibi
#clf.fit(trainingData, trainingScores)
#print("DecisionTreeClassifier")
#print(clf.predict(predictionData))
#clf = KNeighborsClassifier() int olmadi
#clf.fit(trainingData, trainingScores)
#print("KNeighborsClassifier")
#print(clf.predict(predictionData))
#clf = LinearDiscriminantAnalysis() olmadi
#clf.fit(trainingData, trainingScores)
#print("LinearDiscriminantAnalysis")
#print(clf.predict(predictionData))
#clf = GaussianNB() olmadi float
#clf.fit(trainingData, trainingScores)
#print("GaussianNB")
#print(clf.predict(predictionData))
#clf = SVC()
#clf.fit(trainingData, trainingScores)
#print("SVC")
#print(clf.predict(predictionData)) | {
"content_hash": "6d9c392bc7fc4f63c7b526a078486a42",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 146,
"avg_line_length": 33.41379310344828,
"alnum_prop": 0.7223942208462333,
"repo_name": "OO-E/MoviePrediction",
"id": "e3d19e0824549dcd0286df8fd8b858d1da3a731b",
"size": "1938",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Python/example.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "111"
},
{
"name": "HTML",
"bytes": "275"
},
{
"name": "JavaScript",
"bytes": "14423"
},
{
"name": "Python",
"bytes": "10830"
}
],
"symlink_target": ""
} |
'''
Author: Prakhar Mishra
Date: 13/01/2016
'''
# Importing Packages
import pygmaps
import webbrowser
import csv
import geocoder
import random
import re
import sys
# Opening and getting marker for the input file
data = open(str(sys.argv[1]), 'r')
data_reader = csv.reader(data)
# Reads only the geo information and appends it into
# places list
places = []
for line in data_reader:
places.append(line[3])
# Cleans the text
regex = "(\")"
s = [re.sub(regex,'',i) for i in places]
# From 1 because to skip the header
# s[0] - Header of csv
# Gets the latitite and longitute from names
count = 0
data_latlng = []
for i in s[1:]:
if i != 'Location data not found !!':
g = geocoder.google(str(i))
dta = g.latlng
if len(dta) != 0:
data_latlng.append(g.latlng)
else:
pass
else:
pass
points = data_latlng
# # # Defining Center of map ( Portugal )
mymap = pygmaps.maps(37.933426, -7.496694, 3)
# For points in points, plots the data in maps
for i in points:
mymap.addpoint(float(i[0]),float(i[1]),'#FF00FF')
mymap.draw('mymap.html')
print 'Drawn'
# Opens the browser when plotting is complete
webbrowser.open_new_tab('mymap.html')
| {
"content_hash": "1c68e863914a07bcdceaac16938ed652",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 53,
"avg_line_length": 19.278688524590162,
"alnum_prop": 0.6760204081632653,
"repo_name": "prakhar21/SocialCops",
"id": "0454eb69c042ae506de47b3aa0ba64e7001e4415",
"size": "1176",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "generate_Map.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "258822"
},
{
"name": "Python",
"bytes": "17025"
}
],
"symlink_target": ""
} |
import time
import json
from bson import ObjectId # To install it: `$ pip3 install pymongo`
users = dict() # uId (input) -> user number (output)
songs = dict() # eId (input), just to count unique youtube tracks
counter = 0
# == Print anonymised playlog to stdout
print('timestamp,user,song') # header of the csv file
with open("./fullplaylog.json.log", "r") as f:
for line in f:
counter = counter + 1
# if counter == 10:
# break
row = json.loads(line)
# identify songs
if '/yt/' in row['eId']:
eId = row['eId'][4:]
songs[eId] = 1
# identify users
if row['uId'] not in users:
users[row['uId']] = len(users)
if '_t' in row:
timestamp = row['_t'] // 1000
elif '_id' in row:
timestamp = int(time.mktime(ObjectId(row['_id']['$oid']).generation_time.timetuple()))
print (','.join([ str(timestamp), str(users[row['uId']]), eId ]))
# == Print anonymised playlog to stdout
# print (','.join([ 'openwhyd_user_id', 'anonymous_user_id' ]))
# for user in users:
# print (','.join([ user, str(users[user]) ]))
# print 'found', len(songs), 'youtube tracks'
| {
"content_hash": "e3f8fa0a8663c0d88dbd2d942087d8b8",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 92,
"avg_line_length": 29.842105263157894,
"alnum_prop": 0.6022927689594356,
"repo_name": "openwhyd/openwhyd",
"id": "69bfafb3833124a8e89b3ea18edc26a643fe01ae",
"size": "1372",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/data-analysis-scripts/src/anonymise-playlog.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "191099"
},
{
"name": "Dockerfile",
"bytes": "1555"
},
{
"name": "Gnuplot",
"bytes": "1289"
},
{
"name": "HTML",
"bytes": "461922"
},
{
"name": "JavaScript",
"bytes": "1421314"
},
{
"name": "Makefile",
"bytes": "5332"
},
{
"name": "Python",
"bytes": "1372"
},
{
"name": "Shell",
"bytes": "26827"
},
{
"name": "TypeScript",
"bytes": "54909"
}
],
"symlink_target": ""
} |
from django.db import models
class CalloutUserfield(models.Model):
name = models.CharField(max_length=255)
| {
"content_hash": "666e7b2f1a336ab49eefecf6b6a9923a",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 43,
"avg_line_length": 28,
"alnum_prop": 0.7767857142857143,
"repo_name": "350dotorg/aktivator",
"id": "67e0e178606b251694b13c4714eeb2f03f248ebb",
"size": "112",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "actionkit_userdetail/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "363353"
},
{
"name": "JavaScript",
"bytes": "1128479"
},
{
"name": "Python",
"bytes": "77716"
}
],
"symlink_target": ""
} |
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from Selenium2Library import utils
from Selenium2Library.locators import ElementFinder
from keywordgroup import KeywordGroup
class _ElementKeywords(KeywordGroup):
def __init__(self):
self._element_finder = ElementFinder()
# Public, element lookups
def current_frame_contains(self, text, loglevel='INFO'):
"""Verifies that current frame contains `text`.
See `Page Should Contain ` for explanation about `loglevel` argument.
"""
if not self._is_text_present(text):
self.log_source(loglevel)
raise AssertionError("Page should have contained text '%s' "
"but did not" % text)
self._info("Current page contains text '%s'." % text)
def element_should_contain(self, locator, expected, message=''):
"""Verifies element identified by `locator` contains text `expected`.
If you wish to assert an exact (not a substring) match on the text
of the element, use `Element Text Should Be`.
`message` can be used to override the default error message.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
self._info("Verifying element '%s' contains text '%s'."
% (locator, expected))
actual = self._get_text(locator)
if not expected in actual:
if not message:
message = "Element '%s' should have contained text '%s' but "\
"its text was '%s'." % (locator, expected, actual)
raise AssertionError(message)
def frame_should_contain(self, locator, text, loglevel='INFO'):
"""Verifies frame identified by `locator` contains `text`.
See `Page Should Contain ` for explanation about `loglevel` argument.
Key attributes for frames are `id` and `name.` See `introduction` for
details about locating elements.
"""
if not self._frame_contains(locator, text):
self.log_source(loglevel)
raise AssertionError("Page should have contained text '%s' "
"but did not" % text)
self._info("Current page contains text '%s'." % text)
def page_should_contain(self, text, loglevel='INFO'):
"""Verifies that current page contains `text`.
If this keyword fails, it automatically logs the page source
using the log level specified with the optional `loglevel` argument.
Giving `NONE` as level disables logging.
"""
if not self._page_contains(text):
self.log_source(loglevel)
raise AssertionError("Page should have contained text '%s' "
"but did not" % text)
self._info("Current page contains text '%s'." % text)
def page_should_contain_element(self, locator, message='', loglevel='INFO'):
"""Verifies element identified by `locator` is found on the current page.
`message` can be used to override default error message.
See `Page Should Contain` for explanation about `loglevel` argument.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
self._page_should_contain_element(locator, None, message, loglevel)
def page_should_not_contain(self, text, loglevel='INFO'):
"""Verifies the current page does not contain `text`.
See `Page Should Contain ` for explanation about `loglevel` argument.
"""
if self._page_contains(text):
self.log_source(loglevel)
raise AssertionError("Page should not have contained text '%s'" % text)
self._info("Current page does not contain text '%s'." % text)
def page_should_not_contain_element(self, locator, message='', loglevel='INFO'):
"""Verifies element identified by `locator` is not found on the current page.
`message` can be used to override the default error message.
See `Page Should Contain ` for explanation about `loglevel` argument.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
self._page_should_not_contain_element(locator, None, message, loglevel)
# Public, attributes
def assign_id_to_element(self, locator, id):
"""Assigns a temporary identifier to element specified by `locator`.
This is mainly useful if the locator is complicated/slow XPath expression.
Identifier expires when the page is reloaded.
Example:
| Assign ID to Element | xpath=//div[@id="first_div"] | my id |
| Page Should Contain Element | my id |
"""
self._info("Assigning temporary id '%s' to element '%s'" % (id, locator))
element = self._element_find(locator, True, True)
self._current_browser().execute_script("arguments[0].id = '%s';" % id, element)
def element_should_be_disabled(self, locator):
"""Verifies that element identified with `locator` is disabled.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
if self._is_enabled(locator):
raise AssertionError("Element '%s' is enabled." % (locator))
def element_should_be_enabled(self, locator):
"""Verifies that element identified with `locator` is enabled.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
if not self._is_enabled(locator):
raise AssertionError("Element '%s' is disabled." % (locator))
def element_should_be_visible(self, locator, message=''):
"""Verifies that the element identified by `locator` is visible.
Herein, visible means that the element is logically visible, not optically
visible in the current browser viewport. For example, an element that carries
display:none is not logically visible, so using this keyword on that element
would fail.
`message` can be used to override the default error message.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
self._info("Verifying element '%s' is visible." % locator)
visible = self._is_visible(locator)
if not visible:
if not message:
message = "The element '%s' should be visible, but it "\
"is not." % locator
raise AssertionError(message)
def element_should_not_be_visible(self, locator, message=''):
"""Verifies that the element identified by `locator` is NOT visible.
This is the opposite of `Element Should Be Visible`.
`message` can be used to override the default error message.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
self._info("Verifying element '%s' is not visible." % locator)
visible = self._is_visible(locator)
if visible:
if not message:
message = "The element '%s' should not be visible, "\
"but it is." % locator
raise AssertionError(message)
def element_text_should_be(self, locator, expected, message=''):
"""Verifies element identified by `locator` exactly contains text `expected`.
In contrast to `Element Should Contain`, this keyword does not try
a substring match but an exact match on the element identified by `locator`.
`message` can be used to override the default error message.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
self._info("Verifying element '%s' contains exactly text '%s'."
% (locator, expected))
element = self._element_find(locator, True, True)
actual = element.text
if expected != actual:
if not message:
message = "The text of element '%s' should have been '%s' but "\
"in fact it was '%s'." % (locator, expected, actual)
raise AssertionError(message)
def get_element_attribute(self, attribute_locator):
"""Return value of element attribute.
`attribute_locator` consists of element locator followed by an @ sign
and attribute name, for example "element_id@class".
"""
locator, attribute_name = self._parse_attribute_locator(attribute_locator)
element = self._element_find(locator, True, False)
if element is None:
raise ValueError("Element '%s' not found." % (locator))
return element.get_attribute(attribute_name)
def get_horizontal_position(self, locator):
"""Returns horizontal position of element identified by `locator`.
The position is returned in pixels off the left side of the page,
as an integer. Fails if a matching element is not found.
See also `Get Vertical Position`.
"""
element = self._element_find(locator, True, False)
if element is None:
raise AssertionError("Could not determine position for '%s'" % (locator))
return element.location['x']
def get_value(self, locator):
"""Returns the value attribute of element identified by `locator`.
See `introduction` for details about locating elements.
"""
return self._get_value(locator)
def get_text(self, locator):
"""Returns the text value of element identified by `locator`.
See `introduction` for details about locating elements.
"""
return self._get_text(locator)
def get_vertical_position(self, locator):
"""Returns vertical position of element identified by `locator`.
The position is returned in pixels off the top of the page,
as an integer. Fails if a matching element is not found.
See also `Get Horizontal Position`.
"""
element = self._element_find(locator, True, False)
if element is None:
raise AssertionError("Could not determine position for '%s'" % (locator))
return element.location['y']
# Public, mouse input/events
def click_element(self, locator):
"""Click element identified by `locator`.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
self._info("Clicking element '%s'." % locator)
self._element_find(locator, True, True).click()
def double_click_element(self, locator):
"""Double click element identified by `locator`.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
self._info("Double clicking element '%s'." % locator)
element = self._element_find(locator, True, True)
ActionChains(self._current_browser()).double_click(element).perform()
def focus(self, locator):
"""Sets focus to element identified by `locator`."""
element = self._element_find(locator, True, True)
self._current_browser().execute_script("arguments[0].focus();", element)
def drag_and_drop(self, source, target):
"""Drags element identified with `source` which is a locator.
Element can be moved on top of another element with `target`
argument.
`target` is a locator of the element where the dragged object is
dropped.
Examples:
| Drag And Drop | elem1 | elem2 | # Move elem1 over elem2. |
"""
src_elem = self._element_find(source,True,True)
trg_elem = self._element_find(target,True,True)
ActionChains(self._current_browser()).drag_and_drop(src_elem, trg_elem).perform()
def drag_and_drop_by_offset(self, source, xoffset, yoffset):
"""Drags element identified with `source` which is a locator.
Element will be moved by xoffset and yoffset. each of which is a
negative or positive number specify the offset.
Examples:
| Drag And Drop | myElem | 50 | -35 | # Move myElem 50px right and 35px down. |
"""
src_elem = self._element_find(source, True, True)
ActionChains(self._current_browser()).drag_and_drop_by_offset(src_elem, xoffset, yoffset).perform()
def mouse_down(self, locator):
"""Simulates pressing the left mouse button on the element specified by `locator`.
The element is pressed without releasing the mouse button.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
See also the more specific keywords `Mouse Down On Image` and
`Mouse Down On Link`.
"""
self._info("Simulating Mouse Down on element '%s'" % locator)
element = self._element_find(locator, True, False)
if element is None:
raise AssertionError("ERROR: Element %s not found." % (locator))
ActionChains(self._current_browser()).click_and_hold(element).perform()
def mouse_out(self, locator):
"""Simulates moving mouse away from the element specified by `locator`.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
self._info("Simulating Mouse Out on element '%s'" % locator)
element = self._element_find(locator, True, False)
if element is None:
raise AssertionError("ERROR: Element %s not found." % (locator))
size = element.size
offsetx = (size['width'] / 2) + 1
offsety = (size['height'] / 2) + 1
ActionChains(self._current_browser()).move_to_element(element).move_by_offset(offsetx, offsety).perform()
def mouse_over(self, locator):
"""Simulates hovering mouse over the element specified by `locator`.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
self._info("Simulating Mouse Over on element '%s'" % locator)
element = self._element_find(locator, True, False)
if element is None:
raise AssertionError("ERROR: Element %s not found." % (locator))
ActionChains(self._current_browser()).move_to_element(element).perform()
def mouse_up(self, locator):
"""Simulates releasing the left mouse button on the element specified by `locator`.
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
self._info("Simulating Mouse Up on element '%s'" % locator)
element = self._element_find(locator, True, False)
if element is None:
raise AssertionError("ERROR: Element %s not found." % (locator))
ActionChains(self._current_browser()).click_and_hold(element).release(element).perform()
def open_context_menu(self, locator):
"""Opens context menu on element identified by `locator`."""
element = self._element_find(locator, True, True)
ActionChains(self._current_browser()).context_click(element).perform()
def simulate(self, locator, event):
"""Simulates `event` on element identified by `locator`.
This keyword is useful if element has OnEvent handler that needs to be
explicitly invoked.
See `introduction` for details about locating elements.
"""
element = self._element_find(locator, True, True)
script = """
element = arguments[0];
eventName = arguments[1];
if (document.createEventObject) { // IE
return element.fireEvent('on' + eventName, document.createEventObject());
}
var evt = document.createEvent("HTMLEvents");
evt.initEvent(eventName, true, true);
return !element.dispatchEvent(evt);
"""
self._current_browser().execute_script(script, element, event)
def press_key(self, locator, key):
"""Simulates user pressing key on element identified by `locator`.
`key` is either a single character, or a numerical ASCII code of the key
lead by '\\'.
Examples:
| Press Key | text_field | q |
| Press Key | login_button | \\13 | # ASCII code for enter key |
"""
if key.startswith('\\') and len(key) > 1:
key = self._map_ascii_key_code_to_key(int(key[1:]))
#if len(key) > 1:
# raise ValueError("Key value '%s' is invalid.", key)
element = self._element_find(locator, True, True)
#select it
element.send_keys(key)
# Public, links
def click_link(self, locator):
"""Clicks a link identified by locator.
Key attributes for links are `id`, `name`, `href` and link text. See
`introduction` for details about locating elements.
"""
self._info("Clicking link '%s'." % locator)
link = self._element_find(locator, True, True, tag='a')
link.click()
def get_all_links(self):
"""Returns a list containing ids of all links found in current page.
If a link has no id, an empty string will be in the list instead.
"""
links = []
for anchor in self._element_find("tag=a", False, False, 'a'):
links.append(anchor.get_attribute('id'))
return links
def mouse_down_on_link(self, locator):
"""Simulates a mouse down event on a link.
Key attributes for links are `id`, `name`, `href` and link text. See
`introduction` for details about locating elements.
"""
element = self._element_find(locator, True, True, 'link')
ActionChains(self._current_browser()).click_and_hold(element).perform()
def page_should_contain_link(self, locator, message='', loglevel='INFO'):
"""Verifies link identified by `locator` is found from current page.
See `Page Should Contain Element` for explanation about `message` and
`loglevel` arguments.
Key attributes for links are `id`, `name`, `href` and link text. See
`introduction` for details about locating elements.
"""
self._page_should_contain_element(locator, 'link', message, loglevel)
def page_should_not_contain_link(self, locator, message='', loglevel='INFO'):
"""Verifies image identified by `locator` is not found from current page.
See `Page Should Contain Element` for explanation about `message` and
`loglevel` arguments.
Key attributes for images are `id`, `src` and `alt`. See
`introduction` for details about locating elements.
"""
self._page_should_not_contain_element(locator, 'link', message, loglevel)
# Public, images
def click_image(self, locator):
"""Clicks an image found by `locator`.
Key attributes for images are `id`, `src` and `alt`. See
`introduction` for details about locating elements.
"""
self._info("Clicking image '%s'." % locator)
element = self._element_find(locator, True, False, 'image')
if element is None:
# A form may have an image as it's submit trigger.
element = self._element_find(locator, True, True, 'input')
element.click()
def mouse_down_on_image(self, locator):
"""Simulates a mouse down event on an image.
Key attributes for images are `id`, `src` and `alt`. See
`introduction` for details about locating elements.
"""
element = self._element_find(locator, True, True, 'image')
ActionChains(self._current_browser()).click_and_hold(element).perform()
def page_should_contain_image(self, locator, message='', loglevel='INFO'):
"""Verifies image identified by `locator` is found from current page.
See `Page Should Contain Element` for explanation about `message` and
`loglevel` arguments.
Key attributes for images are `id`, `src` and `alt`. See
`introduction` for details about locating elements.
"""
self._page_should_contain_element(locator, 'image', message, loglevel)
def page_should_not_contain_image(self, locator, message='', loglevel='INFO'):
"""Verifies image identified by `locator` is found from current page.
See `Page Should Contain Element` for explanation about `message` and
`loglevel` arguments.
Key attributes for images are `id`, `src` and `alt`. See
`introduction` for details about locating elements.
"""
self._page_should_not_contain_element(locator, 'image', message, loglevel)
# Public, xpath
def get_matching_xpath_count(self, xpath):
"""Returns number of elements matching `xpath`
If you wish to assert the number of matching elements, use
`Xpath Should Match X Times`.
"""
count = len(self._element_find("xpath=" + xpath, False, False))
return str(count)
def xpath_should_match_x_times(self, xpath, expected_xpath_count, message='', loglevel='INFO'):
"""Verifies that the page contains the given number of elements located by the given `xpath`.
See `Page Should Contain Element` for explanation about `message` and
`loglevel` arguments.
"""
actual_xpath_count = len(self._element_find("xpath=" + xpath, False, False))
if int(actual_xpath_count) != int(expected_xpath_count):
if not message:
message = "Xpath %s should have matched %s times but matched %s times"\
%(xpath, expected_xpath_count, actual_xpath_count)
self.log_source(loglevel)
raise AssertionError(message)
self._info("Current page contains %s elements matching '%s'."
% (actual_xpath_count, xpath))
def get_elements_nonempty_text_using_xpath(self, xpath):
"""Find all elements' non-empty text that match an xpath."""
self._info("Searching for elements using xpath='%s'" % xpath)
browser = self._current_browser()
return [n.text for n in browser.find_elements_by_xpath(xpath) if n.text]
def click_on_tab_with_text_using_xpath(self, xpath, text):
"""Using Xpath selector click on Tab control with the specified text."""
self._info("Using xpath='%s' clicking on Tab with text:'%s'" % (xpath,text))
browser = self._current_browser()
l = browser.find_elements_by_xpath(xpath)
for n in l:
if n.text==text:
n.click()
break
# Private
def _element_find(self, locator, first_only, required, tag=None):
browser = self._current_browser()
elements = self._element_finder.find(browser, locator, tag)
if required and len(elements) == 0:
raise ValueError("Element locator '" + locator + "' did not match any elements.")
if first_only:
if len(elements) == 0: return None
return elements[0]
return elements
def _frame_contains(self, locator, text):
browser = self._current_browser()
element = self._element_find(locator, True, True)
browser.switch_to_frame(element)
self._info("Searching for text from frame '%s'." % locator)
found = self._is_text_present(text)
browser.switch_to_default_content()
return found
def _get_text(self, locator):
element = self._element_find(locator, True, True)
if element is not None:
return element.text
return None
def _get_value(self, locator, tag=None):
element = self._element_find(locator, True, False, tag=tag)
return element.get_attribute('value') if element is not None else None
def _is_enabled(self, locator):
element = self._element_find(locator, True, True)
if not self._is_form_element(element):
raise AssertionError("ERROR: Element %s is not an input." % (locator))
if not element.is_enabled():
return False
read_only = element.get_attribute('readonly')
if read_only == 'readonly' or read_only == 'true':
return False
return True
def _is_text_present(self, text):
locator = "xpath=//*[contains(., %s)]" % utils.escape_xpath_value(text);
return self._is_element_present(locator)
def _is_visible(self, locator):
element = self._element_find(locator, True, False)
if element is not None:
return element.is_displayed()
return None
def _map_ascii_key_code_to_key(self, key_code):
map = {
0: Keys.NULL,
8: Keys.BACK_SPACE,
9: Keys.TAB,
10: Keys.RETURN,
13: Keys.ENTER,
24: Keys.CANCEL,
27: Keys.ESCAPE,
32: Keys.SPACE,
42: Keys.MULTIPLY,
43: Keys.ADD,
44: Keys.SEPARATOR,
45: Keys.SUBTRACT,
56: Keys.DECIMAL,
57: Keys.DIVIDE,
59: Keys.SEMICOLON,
61: Keys.EQUALS,
127: Keys.DELETE
}
key = map.get(key_code)
if key is None:
key = chr(key_code)
return key
def _parse_attribute_locator(self, attribute_locator):
parts = attribute_locator.rpartition('@')
if len(parts[0]) == 0:
raise ValueError("Attribute locator '%s' does not contain an element locator." % (attribute_locator))
if len(parts[2]) == 0:
raise ValueError("Attribute locator '%s' does not contain an attribute name." % (attribute_locator))
return (parts[0], parts[2])
def _is_element_present(self, locator, tag=None):
return (self._element_find(locator, True, False, tag=tag) != None)
def _page_contains(self, text):
browser = self._current_browser()
browser.switch_to_default_content()
if self._is_text_present(text):
return True
subframes = self._element_find("xpath=//frame|//iframe", False, False)
self._debug('Current frame has %d subframes' % len(subframes))
for frame in subframes:
browser.switch_to_frame(frame)
found_text = self._is_text_present(text)
browser.switch_to_default_content()
if found_text:
return True
return False
def _page_should_contain_element(self, locator, tag, message, loglevel):
element_name = tag if tag is not None else 'element'
if not self._is_element_present(locator, tag):
if not message:
message = "Page should have contained %s '%s' but did not"\
% (element_name, locator)
self.log_source(loglevel)
raise AssertionError(message)
self._info("Current page contains %s '%s'." % (element_name, locator))
def _page_should_not_contain_element(self, locator, tag, message, loglevel):
element_name = tag if tag is not None else 'element'
if self._is_element_present(locator, tag):
if not message:
message = "Page should not have contained %s '%s'"\
% (element_name, locator)
self.log_source(loglevel)
raise AssertionError(message)
self._info("Current page does not contain %s '%s'."
% (element_name, locator))
| {
"content_hash": "53ef277a8ba041d9cd1410dda9dbbfc6",
"timestamp": "",
"source": "github",
"line_count": 675,
"max_line_length": 113,
"avg_line_length": 41.534814814814816,
"alnum_prop": 0.6196675702667999,
"repo_name": "ktan2020/legacy-automation",
"id": "fbdec75e65709865881a1c6b08177d0daa8feeff",
"size": "28036",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "win/Lib/site-packages/robotframework_selenium2library-1.1.0-py2.7.egg/Selenium2Library/keywords/_element.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "913"
},
{
"name": "Ada",
"bytes": "289"
},
{
"name": "Assembly",
"bytes": "687"
},
{
"name": "Boo",
"bytes": "540"
},
{
"name": "C",
"bytes": "40116"
},
{
"name": "C#",
"bytes": "474"
},
{
"name": "C++",
"bytes": "393"
},
{
"name": "CSS",
"bytes": "70883"
},
{
"name": "ColdFusion",
"bytes": "1012"
},
{
"name": "Common Lisp",
"bytes": "1034"
},
{
"name": "D",
"bytes": "1858"
},
{
"name": "Eiffel",
"bytes": "426"
},
{
"name": "Erlang",
"bytes": "9243"
},
{
"name": "FORTRAN",
"bytes": "1810"
},
{
"name": "Forth",
"bytes": "182"
},
{
"name": "Groovy",
"bytes": "2366"
},
{
"name": "Haskell",
"bytes": "816"
},
{
"name": "Haxe",
"bytes": "455"
},
{
"name": "Java",
"bytes": "1155"
},
{
"name": "JavaScript",
"bytes": "69444"
},
{
"name": "Lua",
"bytes": "795"
},
{
"name": "Matlab",
"bytes": "1278"
},
{
"name": "OCaml",
"bytes": "350"
},
{
"name": "Objective-C++",
"bytes": "885"
},
{
"name": "PHP",
"bytes": "1411"
},
{
"name": "Pascal",
"bytes": "388"
},
{
"name": "Perl",
"bytes": "252651"
},
{
"name": "Pike",
"bytes": "589"
},
{
"name": "Python",
"bytes": "42085780"
},
{
"name": "R",
"bytes": "1156"
},
{
"name": "Ruby",
"bytes": "480"
},
{
"name": "Scheme",
"bytes": "282"
},
{
"name": "Shell",
"bytes": "30518"
},
{
"name": "Smalltalk",
"bytes": "926"
},
{
"name": "Squirrel",
"bytes": "697"
},
{
"name": "Stata",
"bytes": "302"
},
{
"name": "SystemVerilog",
"bytes": "3145"
},
{
"name": "Tcl",
"bytes": "1039"
},
{
"name": "TeX",
"bytes": "1746"
},
{
"name": "VHDL",
"bytes": "985"
},
{
"name": "Vala",
"bytes": "664"
},
{
"name": "Verilog",
"bytes": "439"
},
{
"name": "Visual Basic",
"bytes": "2142"
},
{
"name": "XSLT",
"bytes": "152770"
},
{
"name": "ooc",
"bytes": "890"
},
{
"name": "xBase",
"bytes": "769"
}
],
"symlink_target": ""
} |
"""This file contains some utility functions"""
import time
import os
import six
import codecs
import glob
import numpy as np
import tensorflow as tf
from tensorflow.python.client import device_lib
FLAGS = tf.app.flags.FLAGS
def get_dir_or_file_path(dir_or_path, max_deep=1):
if os.path.exists(dir_or_path) == False:
raise ValueError("{} not exists".format(dir_or_path))
if os.path.isdir(dir_or_path):
all_paths = [os.path.join(dir_or_path, name) for name in os.listdir(dir_or_path)]
else:
all_paths = glob.glob(dir_or_path)
return all_paths
def get_config():
"""Returns config for tf.session"""
config = tf.ConfigProto(allow_soft_placement=True)
config.gpu_options.allow_growth=True
return config
def load_ckpt(saver, sess, ckpt_dir="train"):
"""Load checkpoint from the ckpt_dir (if unspecified, this is train dir) and restore it to saver and sess, waiting 10 secs in the case of failure. Also returns checkpoint name."""
while True:
try:
latest_filename = "checkpoint_best" if ckpt_dir=="eval" else None
ckpt_dir = os.path.join(FLAGS.log_root, ckpt_dir)
ckpt_state = tf.train.get_checkpoint_state(ckpt_dir, latest_filename=latest_filename)
tf.logging.info('Loading checkpoint %s', ckpt_state.model_checkpoint_path)
saver.restore(sess, ckpt_state.model_checkpoint_path)
return ckpt_state.model_checkpoint_path
except:
tf.logging.info("Failed to load checkpoint from %s. Sleeping for %i secs...", ckpt_dir, 10)
time.sleep(10)
def cut_sentence(words):
start = 0
i = 0 # 记录每个字符的位置
sents = []
punt_list = u'.!?;~。!?~' # string 必须要解码为 unicode 才能进行匹配
for word in words:
if six.PY2 and type(word) == str:
word = word.decode("utf-8")
# print(type(word))
if word in punt_list:
sents.append(words[start:i + 1])
start = i + 1 # start标记到下一句的开头
i += 1
else:
i += 1 # 若不是标点符号,则字符位置继续前移
if start < len(words):
sents.append(words[start:]) # 这是为了处理文本末尾没有标点符号的情况
return sents
def read_sum_sents(file_path,sent_token=False):
f = codecs.open(file_path, "r", "utf-8")
sum_sents = []
while True:
line = f.readline()
if line == "":
break
sents = line.strip()
if sent_token:
sents = sents.split(" ")
sum_sents.append(sents)
return sum_sents
def get_available_gpus():
local_device_protos = device_lib.list_local_devices()
return [x.name for x in local_device_protos if x.device_type == 'GPU']
if __name__ == "__main__":
print( cut_sentence(["我爱","中国","。","我爱","中国"]) )
| {
"content_hash": "485cfc4daba60546664ed9b3e0b641bd",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 181,
"avg_line_length": 31.9625,
"alnum_prop": 0.6624951114587407,
"repo_name": "liyi193328/pointer-generator",
"id": "344507c88721cd0817ce12c1fad2304ba30da24d",
"size": "3465",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "9581"
},
{
"name": "HTML",
"bytes": "4989"
},
{
"name": "JavaScript",
"bytes": "803833"
},
{
"name": "Python",
"bytes": "159009"
}
],
"symlink_target": ""
} |
Subsets and Splits