id
stringlengths 1
7
| text
stringlengths 6
1.03M
| dataset_id
stringclasses 1
value |
---|---|---|
67136
|
<reponame>jbronikowski/genielibs
""" File utils base class for SCP on JunOS devices. """
from ..fileutils import FileUtils as FileUtilsJunOSBase
class FileUtils(FileUtilsJunOSBase):
pass
|
StarcoderdataPython
|
3394448
|
<filename>linear-networks/image-classification-dataset.py
import torch
import torchvision
from torch.utils import data
from torchvision import transforms
from d2l import torch as d2l
d2l.use_svg_display()
trans = transforms.ToTensor()
mnist_train = torchvision.datasets.FashionMNIST(root="../data", train=True,
transform=trans,
download=True)
mnist_test = torchvision.datasets.FashionMNIST(root="../data", train=False,
transform=trans, download=True)
len(mnist_train), len(mnist_test)
mnist_train[0][0].shape
def get_fashion_mnist_labels(labels):
"""返回Fashion-MNIST数据集的文本标签。"""
text_labels = [
't-shirt', 'trouser', 'pullover', 'dress', 'coat', 'sandal', 'shirt',
'sneaker', 'bag', 'ankle boot']
return [text_labels[int(i)] for i in labels]
def show_images(imgs, num_rows, num_cols, titles=None, scale=1.5):
"""Plot a list of images."""
figsize = (num_cols * scale, num_rows * scale)
_, axes = d2l.plt.subplots(num_rows, num_cols, figsize=figsize)
axes = axes.flatten()
for i, (ax, img) in enumerate(zip(axes, imgs)):
if torch.is_tensor(img):
ax.imshow(img.numpy())
else:
ax.imshow(img)
ax.axes.get_xaxis().set_visible(False)
ax.axes.get_yaxis().set_visible(False)
if titles:
ax.set_title(titles[i])
return axes
X, y = next(iter(data.DataLoader(mnist_train, batch_size=18)))
show_images(X.reshape(18, 28, 28), 2, 9, titles=get_fashion_mnist_labels(y))
batch_size = 256
def get_dataloader_workers():
"""使用4个进程来读取数据。"""
return 4
train_iter = data.DataLoader(mnist_train, batch_size, shuffle=True,
num_workers=get_dataloader_workers())
timer = d2l.Timer()
for X, y in train_iter:
continue
f'{timer.stop():.2f} sec'
def load_data_fashion_mnist(batch_size, resize=None):
"""下载Fashion-MNIST数据集,然后将其加载到内存中。"""
trans = [transforms.ToTensor()]
if resize:
trans.insert(0, transforms.Resize(resize))
trans = transforms.Compose(trans)
mnist_train = torchvision.datasets.FashionMNIST(root="../data",
train=True,
transform=trans,
download=True)
mnist_test = torchvision.datasets.FashionMNIST(root="../data",
train=False,
transform=trans,
download=True)
return (data.DataLoader(mnist_train, batch_size, shuffle=True,
num_workers=get_dataloader_workers()),
data.DataLoader(mnist_test, batch_size, shuffle=False,
num_workers=get_dataloader_workers()))
train_iter, test_iter = load_data_fashion_mnist(32, resize=64)
for X, y in train_iter:
print(X.shape, X.dtype, y.shape, y.dtype)
break
|
StarcoderdataPython
|
37247
|
<gh_stars>0
#!/usr/bin/env python3
import cgi, os
import shutil
from userlogger import UserLogger
import templates
import mimetypes
from stat import S_IEXEC
def getUnusedName(file):
if not os.path.exists(file):
return file
basepath, basename = os.path.split(file)
p = basename.rfind('.')
extension = basename[p:] if p > 0 else ""
name = basename[:len(basename)-len(extension)]
counter = 0
outFile = file
while os.path.exists(outFile):
counter += 1
outFile = os.path.join(basepath, name + str(counter) + extension)
return outFile
def getRecbin():
if not os.path.isdir("recbin") and not os.path.isdir("../recbin"):
os.mkdir("recbin")
return "recbin" if os.path.isdir("recbin") else "../recbin"
##################################################
# main
# create instance of field storage
form = cgi.FieldStorage()
if "path" in form:
filepath = form.getvalue("path")
filepath = filepath.rstrip(os.sep)
else:
filepath = os.sep
if "cmd" in form:
cmd = form.getvalue("cmd")
else:
cmd = "nocommand"
# receive file for upload
try:
uploadfiles = form["uploadfiles"]
cmd = "uploadfiles"
except:
pass
# receive page (optional)
currentPage = 0 if "page" not in form else int(form.getvalue("page"))
##################################################
# permission guard
userLogger = UserLogger()
userPermission = userLogger.getPermission(filepath)
userLogger.setTargetUrl('pybrowser.py?path='+filepath)
# make sure user is allowed to read
if (userPermission < UserLogger.PERMISSION_READ):
if "redirect" not in form:
args = '&'.join([key + '=' + str(form[key].value) for key in form.keys()])
if args:
url = os.path.basename(os.environ['SCRIPT_NAME']) + '?redirect=True&' + args
else:
url = os.path.basename(os.environ['SCRIPT_NAME']) + '?redirect=True'
templates.redirect(url)
else:
userLogger.showLogin('Identification required')
elif userPermission == UserLogger.PERMISSION_READ:
if (cmd == "nocommand"):
templates.directory(filepath, currentPage)
else:
if "redirect" not in form:
args = '&'.join([key + '=' + str(form[key].value) for key in form.keys()])
if args:
url = os.path.basename(os.environ['SCRIPT_NAME']) + '?redirect=True&' + args
else:
url = os.path.basename(os.environ['SCRIPT_NAME']) + '?redirect=True'
templates.redirect(url)
else:
userLogger.showLogin('Identification required')
##################################################
# check commands (all read permission)
# upload file
if cmd == "uploadfiles":
# upload file to server
try:
# if single file received, make file list-accessable
if uploadfiles.filename:
uploadfiles = list([uploadfiles])
except:
pass
try:
for file in uploadfiles:
FILEPATH = os.path.join(filepath, file.filename)
# create file
with open(FILEPATH , 'wb') as fhand:
contentRaw = file.file.read()
fhand.write(contentRaw)
fhand.close()
# convert text file to unix format
mime = mimetypes.guess_type(FILEPATH)
if 'text' in str(mime):
with open(FILEPATH , 'wb') as fhand:
contentRaw = contentRaw.replace(b'\r\n', b'\n') # DOS
contentRaw = contentRaw.replace(b'\r', b'\n') # MAC os
fhand.write(contentRaw)
fhand.close()
# make file executable
if ".py" in FILEPATH:
mode = os.stat(FILEPATH).st_mode
os.chmod(FILEPATH, mode|S_IEXEC )
except Exception as e:
templates.message("UploadError", str(e))
# new
elif cmd == "new":
# new folder
if not os.path.exists(filepath):
os.mkdir(filepath)
filepath = os.path.dirname(filepath)
# save file (from editor)
elif os.path.isfile(filepath):
try:
contentRaw = form.getvalue("textcontent")
fhand = open(filepath, 'wb')
contentRaw = contentRaw.encode('utf-8')
# in case of DOS/macOS-formatting, change to unix
#contentUnix = contentRaw.replace('\r\n', '\n') # DOS
#contentUnix = contentUnix.replace('\r', '\n') # MAC os
contentUnix = contentRaw.replace(b'\r\n', b'\n') # DOS
contentUnix = contentUnix.replace(b'\r', b'\n') # MAC os
fhand.write(contentUnix)
fhand.close()
if ".py" in filepath:
mode = os.stat(filepath).st_mode
os.chmod(filepath, mode|S_IEXEC )
except Exception as e:
templates.error(str(e))
# remove folder/file
elif cmd == "remove":
recbin = getRecbin()
userRecbin = os.path.join(recbin, userLogger.isLoggedIn())
if not os.path.isdir(userRecbin):
os.mkdir(userRecbin)
if os.path.isdir(filepath) or os.path.isfile(filepath):
try:
destination = getUnusedName(os.path.join(userRecbin, os.path.basename(filepath)))
os.rename(filepath, destination)
except:
pass
# rename
elif cmd == "rename":
try:
newname = form.getvalue("newname")
if os.path.exists(filepath):
os.rename(filepath, os.path.join(os.path.dirname(filepath), newname))
except:
pass
# copy
elif cmd == "copy":
if os.path.isfile(filepath) or os.path.isdir(filepath):
userLogger.setCopyUrl(filepath)
if os.path.isdir(filepath):
filepath = os.path.split(filepath)[0]
# paste
elif cmd == "paste":
sourceFile = userLogger.getCopyUrl()
userLogger.resetCopyUrl()
destFileName = getUnusedName(os.path.join(filepath, os.path.basename(sourceFile)))
if os.path.isfile(sourceFile):
shutil.copy(sourceFile, destFileName)
elif os.path.isdir(sourceFile):
shutil.copytree(sourceFile, destFileName)
else:
templates.error("No copy file found")
# unzip
elif cmd == "unzip":
import zipfile
dirpath = os.path.dirname(filepath)
newFolder = getUnusedName(os.path.join(dirpath, os.path.basename(filepath).replace('.zip', '')))
os.mkdir(newFolder)
try:
zipf = zipfile.ZipFile(filepath, 'r')
zipf.extractall(newFolder)
zipf.close()
except Exception as e:
templates.message("Unzip", str(e))
filepath = dirpath
#templates.message("Unzip", filepath)
# validate filepath
if not os.path.isdir(filepath):
filepath = os.path.dirname(filepath)
if not os.path.isdir(filepath):
filepath = os.sep
# show directory
if (userLogger.getPermission(filepath) >= userLogger.PERMISSION_READ):
templates.directory(filepath, currentPage)
else:
if "redirect" not in form:
args = '&'.join([key + '=' + str(form[key].value) for key in form.keys()])
if args:
url = os.path.basename(os.environ['SCRIPT_NAME']) + '?redirect=True&' + args
else:
url = os.path.basename(os.environ['SCRIPT_NAME']) + '?redirect=True'
templates.redirect(url)
else:
userLogger.showLogin('Identification required')
|
StarcoderdataPython
|
3315648
|
<gh_stars>1-10
import requests
class DeviceTracker ():
def __init__(self,ip_url):
self.ip_url = ip_url
def get_json_data(self):
device_req = requests.get(url = self.ip_url)
if device_req.status_code == 200:
device_json = device_req.json()
else:
device_json = {}
return device_json
def get_device_details(self):
device_data = self.get_json_data()
device_details = device_data['status']['country']['countryCode']['region']['regionName']['city']['zip']['lat']['lon']['timezone']['isp']['org']['as']['query']
return device_details
ip_url = 'http://ip-api.com/json'
my_loc = DeviceTracker(ip_url = ip_url)
print(my_loc.get_device_details)
ip_url = 'http://ip-api.com/json'
my_loc = DeviceTracker(ip_url=ip_url)
my_loc.get_json_data()
print(my_loc)
|
StarcoderdataPython
|
120219
|
import nibabel as nib
import numpy as np
import pdb
# library: http://nipy.org/nibabel/gettingstarted.html
data_folder = '/ihome/azhu/cs189/data/liverScans/Training Batch 2/'
num_examples_min = 28
num_examples_max = 130
def nifti_to_nparray(filename):
img = nib.load(filename)
data = img.get_data()
return data
def convert_and_save_batch(folder, data_type):
for i in range(num_examples_min, num_examples_max+1):
print 'converting ' + data_type + ' ' + str(i)
file_name = folder + data_type + '-' + str(i)
data = nifti_to_nparray(file_name + '.nii')
# Concatenate an end-token at the end of the volume/segmentation
# x = data.shape[1]
# y = data.shape[0]
# end_token = np.ones((y, x, 1)) * -1
# data = np.concatenate((data, end_token), 2)
# Save the np array
npy_name = folder + 'npy_data_notoken/' + data_type + '-' + str(i)
np.save(npy_name, data)
print npy_name + ' saved'
if __name__ == '__main__':
vol = convert_and_save_batch(data_folder, 'volume')
seg = convert_and_save_batch(data_folder, 'segmentation')
|
StarcoderdataPython
|
1621455
|
'''
## Problem 🤔
You are given an array of k linked-lists lists, each linked-list is sorted in ascending order.
Merge all the linked-lists into one sorted linked-list and return it.
**Example 1**
`Input: lists = [[1,4,5],[1,3,4],[2,6]]`
`Output: [1,1,2,3,4,4,5,6]`
_Explanation_
The linked-lists are:
```
[
1->4->5,
1->3->4,
2->6
]
```
merging them into one sorted list:
```
1->1->2->3->4->4->5->6
```
**Example 2**
`Input: lists = []`
`Output: []`
**Example 3**
`Input: lists = [[]]`
`Output: []`
**Note**
- k == lists.length
- 0 <= k <= 10^4
- 0 <= lists[i].length <= 500
- -10^4 <= lists[i][j] <= 10^4
- lists[i] is sorted in ascending order.
- The sum of lists[i].length won't exceed 10^4
'''
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class NaiveSolution(object):
def mergeKLists(self, lists):
vals = []
cur = ListNode(None)
dummy_node = cur
for l in lists:
while l:
vals.append(l.val)
l = l.next
for n in sorted(vals):
cur.next = ListNode(n)
cur = cur.next
return dummy_node.next
class Solution:
def mergeKLists(self, lists):
heap = [(l.val, idx) for idx, l in enumerate(lists) if l]
heapq.heapify(heap)
cur = ListNode(None)
dummy_node = cur
while heap:
val, idx = heapq.heappop(heap)
cur.next = ListNode(val)
cur = cur.next
lists[idx] = lists[idx].next
if lists[idx]:
heapq.heappush(heap, (lists[idx].val, idx))
return dummy_node.next
|
StarcoderdataPython
|
4839594
|
<reponame>purcell-lab/HomeAssistant-OctopusEnergy
from datetime import timedelta
import logging
from homeassistant.util.dt import (utcnow, now, as_utc, parse_datetime)
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity
)
from homeassistant.components.sensor import (
DEVICE_CLASS_MONETARY,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_GAS,
STATE_CLASS_TOTAL_INCREASING,
SensorEntity,
)
from homeassistant.const import (
ENERGY_KILO_WATT_HOUR,
VOLUME_CUBIC_METERS
)
from .utils import (async_get_active_tariff_code, convert_kwh_to_m3)
from .const import (
DOMAIN,
CONFIG_MAIN_API_KEY,
CONFIG_MAIN_ACCOUNT_ID,
CONFIG_SMETS1,
DATA_COORDINATOR,
DATA_CLIENT
)
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(minutes=1)
async def async_setup_entry(hass, entry, async_add_entities):
"""Setup sensors based on our entry"""
if CONFIG_MAIN_API_KEY in entry.data:
await async_setup_default_sensors(hass, entry, async_add_entities)
async def async_setup_default_sensors(hass, entry, async_add_entities):
config = dict(entry.data)
if entry.options:
config.update(entry.options)
is_smets1 = False
if CONFIG_SMETS1 in config:
is_smets1 = config[CONFIG_SMETS1]
client = hass.data[DOMAIN][DATA_CLIENT]
coordinator = hass.data[DOMAIN][DATA_COORDINATOR]
await coordinator.async_config_entry_first_refresh()
entities = [OctopusEnergyElectricityCurrentRate(coordinator), OctopusEnergyElectricityPreviousRate(coordinator)]
account_info = await client.async_get_account(config[CONFIG_MAIN_ACCOUNT_ID])
if len(account_info["electricity_meter_points"]) > 0:
for point in account_info["electricity_meter_points"]:
# We only care about points that have active agreements
if async_get_active_tariff_code(point["agreements"], client) != None:
for meter in point["meters"]:
entities.append(OctopusEnergyLatestElectricityReading(client, point["mpan"], meter["serial_number"]))
entities.append(OctopusEnergyPreviousAccumulativeElectricityReading(client, point["mpan"], meter["serial_number"]))
if len(account_info["gas_meter_points"]) > 0:
for point in account_info["gas_meter_points"]:
# We only care about points that have active agreements
if async_get_active_tariff_code(point["agreements"], client) != None:
for meter in point["meters"]:
entities.append(OctopusEnergyLatestGasReading(client, point["mprn"], meter["serial_number"], is_smets1))
entities.append(OctopusEnergyPreviousAccumulativeGasReading(client, point["mprn"], meter["serial_number"], is_smets1))
async_add_entities(entities, True)
class OctopusEnergyElectricityCurrentRate(CoordinatorEntity, SensorEntity):
"""Sensor for displaying the current rate."""
def __init__(self, coordinator):
"""Init sensor."""
# Pass coordinator to base class
super().__init__(coordinator)
self._attributes = {}
self._state = None
@property
def unique_id(self):
"""The id of the sensor."""
return "octopus_energy_electricity_current_rate"
@property
def name(self):
"""Name of the sensor."""
return "Octopus Energy Electricity Current Rate"
@property
def device_class(self):
"""The type of sensor"""
return DEVICE_CLASS_MONETARY
@property
def icon(self):
"""Icon of the sensor."""
return "mdi:currency-usd"
@property
def unit_of_measurement(self):
"""Unit of measurement of the sensor."""
return "GBP/kWh"
@property
def extra_state_attributes(self):
"""Attributes of the sensor."""
return self._attributes
@property
def state(self):
"""The state of the sensor."""
# Find the current rate. We only need to do this every half an hour
now = utcnow()
if (now.minute % 30) == 0 or self._state == None:
_LOGGER.info('Updating OctopusEnergyElectricityCurrentRate')
current_rate = None
if self.coordinator.data != None:
for period in self.coordinator.data:
if now >= period["valid_from"] and now <= period["valid_to"]:
current_rate = period
break
if current_rate != None:
self._attributes = current_rate
self._state = current_rate["value_inc_vat"] / 100
else:
self._state = 0
return self._state
class OctopusEnergyElectricityPreviousRate(CoordinatorEntity, SensorEntity):
"""Sensor for displaying the previous rate."""
def __init__(self, coordinator):
"""Init sensor."""
# Pass coordinator to base class
super().__init__(coordinator)
self._attributes = {}
self._state = None
@property
def unique_id(self):
"""The id of the sensor."""
return "octopus_energy_electricity_previous_rate"
@property
def name(self):
"""Name of the sensor."""
return "Octopus Energy Electricity Previous Rate"
@property
def device_class(self):
"""The type of sensor"""
return DEVICE_CLASS_MONETARY
@property
def icon(self):
"""Icon of the sensor."""
return "mdi:currency-usd"
@property
def unit_of_measurement(self):
"""Unit of measurement of the sensor."""
return "GBP/kWh"
@property
def extra_state_attributes(self):
"""Attributes of the sensor."""
return self._attributes
@property
def state(self):
"""The state of the sensor."""
# Find the previous rate. We only need to do this every half an hour
now = utcnow()
if (now.minute % 30) == 0 or self._state == None:
_LOGGER.info('Updating OctopusEnergyElectricityPreviousRate')
target = now - timedelta(minutes=30)
previous_rate = None
if self.coordinator.data != None:
for period in self.coordinator.data:
if target >= period["valid_from"] and target <= period["valid_to"]:
previous_rate = period
break
if previous_rate != None:
self._attributes = previous_rate
self._state = previous_rate["value_inc_vat"] / 100
else:
self._state = 0
return self._state
class OctopusEnergyLatestElectricityReading(SensorEntity):
"""Sensor for displaying the current electricity rate."""
def __init__(self, client, mpan, serial_number):
"""Init sensor."""
self._mpan = mpan
self._serial_number = serial_number
self._client = client
self._attributes = {
"MPAN": mpan,
"Serial Number": serial_number
}
self._state = None
@property
def unique_id(self):
"""The id of the sensor."""
return f"octopus_energy_electricity_{self._serial_number}_latest_consumption"
@property
def name(self):
"""Name of the sensor."""
return f"Octopus Energy Electricity {self._serial_number} Latest Consumption"
@property
def device_class(self):
"""The type of sensor"""
return DEVICE_CLASS_ENERGY
@property
def state_class(self):
"""The state class of sensor"""
return STATE_CLASS_TOTAL_INCREASING
@property
def unit_of_measurement(self):
"""The unit of measurement of sensor"""
return ENERGY_KILO_WATT_HOUR
@property
def icon(self):
"""Icon of the sensor."""
return "mdi:lightning-bolt"
@property
def extra_state_attributes(self):
"""Attributes of the sensor."""
return self._attributes
@property
def state(self):
"""Native value of the sensor."""
return self._state
async def async_update(self):
"""Retrieve the latest consumption"""
# We only need to do this every half an hour
current_datetime = now()
if (current_datetime.minute % 30) == 0 or self._state == None:
_LOGGER.info('Updating OctopusEnergyLatestElectricityReading')
period_from = as_utc(current_datetime - timedelta(hours=1))
period_to = as_utc(current_datetime)
data = await self._client.async_electricity_consumption(self._mpan, self._serial_number, period_from, period_to)
if data != None and len(data) > 0:
self._state = data[0]["consumption"]
else:
self._state = 0
class OctopusEnergyPreviousAccumulativeElectricityReading(SensorEntity):
"""Sensor for displaying the previous days accumulative electricity reading."""
def __init__(self, client, mprn, serial_number):
"""Init sensor."""
self._mprn = mprn
self._serial_number = serial_number
self._client = client
self._attributes = {
"MPRN": mprn,
"Serial Number": serial_number
}
self._state = None
self._data = []
@property
def unique_id(self):
"""The id of the sensor."""
return f"octopus_energy_electricity_{self._serial_number}_previous_accumulative_consumption"
@property
def name(self):
"""Name of the sensor."""
return f"Octopus Energy Electricity {self._serial_number} Previous Accumulative Consumption"
@property
def device_class(self):
"""The type of sensor"""
return DEVICE_CLASS_ENERGY
@property
def state_class(self):
"""The state class of sensor"""
return STATE_CLASS_TOTAL_INCREASING
@property
def unit_of_measurement(self):
"""The unit of measurement of sensor"""
return ENERGY_KILO_WATT_HOUR
@property
def icon(self):
"""Icon of the sensor."""
return "mdi:lightning-bolt"
@property
def extra_state_attributes(self):
"""Attributes of the sensor."""
return self._attributes
@property
def state(self):
"""Native value of the sensor."""
return self._state
async def async_update(self):
"""Retrieve the previous days accumulative consumption"""
current_datetime = now()
# We only need to do this once a day, unless we don't have enough data for the day therefore we want to retrieve it
# every hour until we have enough data for the day
if (current_datetime.hour == 0 and current_datetime.minute == 0) or self._state == None or (current_datetime.minute % 60 == 0 and len(self._data) != 48):
_LOGGER.info('Updating OctopusEnergyPreviousAccumulativeElectricityReading')
period_from = as_utc((current_datetime - timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0))
period_to = as_utc(current_datetime.replace(hour=0, minute=0, second=0, microsecond=0))
data = await self._client.async_electricity_consumption(self._mprn, self._serial_number, period_from, period_to)
if data != None:
total = 0
for item in data:
total = total + item["consumption"]
self._state = total
self._data = data
else:
self._state = 0
self._data = []
class OctopusEnergyLatestGasReading(SensorEntity):
"""Sensor for displaying the current gas rate."""
def __init__(self, client, mprn, serial_number, is_smets1_meter):
"""Init sensor."""
self._mprn = mprn
self._serial_number = serial_number
self._is_smets1_meter = is_smets1_meter
self._client = client
self._attributes = {
"MPRN": mprn,
"Serial Number": serial_number,
"Is SMETS1 Meter": is_smets1_meter
}
self._state = None
@property
def unique_id(self):
"""The id of the sensor."""
return f"octopus_energy_gas_{self._serial_number}_latest_consumption"
@property
def name(self):
"""Name of the sensor."""
return f"Octopus Energy Gas {self._serial_number} Latest Consumption"
@property
def device_class(self):
"""The type of sensor"""
return DEVICE_CLASS_GAS
@property
def state_class(self):
"""The state class of sensor"""
return STATE_CLASS_TOTAL_INCREASING
@property
def unit_of_measurement(self):
"""The unit of measurement of sensor"""
return VOLUME_CUBIC_METERS
@property
def icon(self):
"""Icon of the sensor."""
return "mdi:fire"
@property
def extra_state_attributes(self):
"""Attributes of the sensor."""
return self._attributes
@property
def state(self):
"""Native value of the sensor."""
return self._state
async def async_update(self):
"""Retrieve the latest consumption"""
# We only need to do this every half an hour
current_datetime = now()
if (current_datetime.minute % 30) == 0 or self._state == None:
_LOGGER.info('Updating OctopusEnergyLatestGasReading')
period_from = as_utc(current_datetime - timedelta(hours=1))
period_to = as_utc(current_datetime)
data = await self._client.async_gas_consumption(self._mprn, self._serial_number, period_from, period_to)
if data != None and len(data) > 0:
self._state = data[0]["consumption"]
else:
self._state = 0
if self._is_smets1_meter:
self._state = convert_kwh_to_m3(self._state)
class OctopusEnergyPreviousAccumulativeGasReading(SensorEntity):
"""Sensor for displaying the previous days accumulative gas reading."""
def __init__(self, client, mprn, serial_number, is_smets1_meter):
"""Init sensor."""
self._mprn = mprn
self._serial_number = serial_number
self._is_smets1_meter = is_smets1_meter
self._client = client
self._attributes = {
"MPRN": mprn,
"Serial Number": serial_number,
"Is SMETS1 Meter": is_smets1_meter
}
self._state = None
self._data = []
@property
def unique_id(self):
"""The id of the sensor."""
return f"octopus_energy_gas_{self._serial_number}_previous_accumulative_consumption"
@property
def name(self):
"""Name of the sensor."""
return f"Octopus Energy Gas {self._serial_number} Previous Accumulative Consumption"
@property
def device_class(self):
"""The type of sensor"""
return DEVICE_CLASS_GAS
@property
def state_class(self):
"""The state class of sensor"""
return STATE_CLASS_TOTAL_INCREASING
@property
def unit_of_measurement(self):
"""The unit of measurement of sensor"""
return VOLUME_CUBIC_METERS
@property
def icon(self):
"""Icon of the sensor."""
return "mdi:fire"
@property
def extra_state_attributes(self):
"""Attributes of the sensor."""
return self._attributes
@property
def state(self):
"""Native value of the sensor."""
return self._state
async def async_update(self):
"""Retrieve the previous days accumulative consumption"""
current_datetime = now()
# We only need to do this once a day, unless we don't have enough data for the day therefore we want to retrieve it
# every hour until we have enough data for the day
if (current_datetime.hour == 0 and current_datetime.minute == 0) or self._state == None or (current_datetime.minute % 60 == 0 and len(self._data) != 48):
_LOGGER.info('Updating OctopusEnergyPreviousAccumulativeGasReading')
period_from = as_utc((current_datetime - timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0))
period_to = as_utc(current_datetime.replace(hour=0, minute=0, second=0, microsecond=0))
data = await self._client.async_gas_consumption(self._mprn, self._serial_number, period_from, period_to)
if data != None:
total = 0
for item in data:
total = total + item["consumption"]
self._state = total
self._data = data
else:
self._state = 0
self._data = []
if self._is_smets1_meter:
self._state = convert_kwh_to_m3(self._state)
|
StarcoderdataPython
|
101160
|
import json
import tweepy
with open("json/bots.json", "r") as bots:
default_bot = json.load(bots)["Default"]
auth = tweepy.OAuthHandler(default_bot["consumer_key"], default_bot["consumer_secret"])
auth.set_access_token(default_bot["access_token"], default_bot["access_token_secret"])
api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
try:
api.verify_credentials()
print("OK")
except:
print("Authentication Error")
for tweet in tweepy.Cursor(api.user_timeline).items(100):
api.destroy_status(tweet.id)
print("DONE")
|
StarcoderdataPython
|
3305873
|
# coding=utf-8
from setuptools import setup, find_packages
setup(
name='PrefectDecorator',
version='0.0.3',
description='Some convenient decorators.',
author='RankoHata',
author_email='<EMAIL>',
maintainer='RankoHata',
maintainer_email='<EMAIL>',
license='MIT License',
packages = find_packages(),
platforms=['all'],
url='https://github.com/RankoHata/PrefectDecorator',
classifiers=[
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation',
'Topic :: Software Development :: Libraries'
],
)
|
StarcoderdataPython
|
31886
|
<reponame>troybots/omnibot
from omnibot import logging
from omnibot.services import stats
from omnibot.services import slack
from omnibot.services.slack import parser
logger = logging.getLogger(__name__)
class Message(object):
"""
Class for representing a parsed slack message.
"""
def __init__(self, bot, event, event_trace):
self._event_trace = event_trace
self.event = event
self._match = None
self._payload = {}
self._payload['omnibot_payload_type'] = 'message'
self._bot = bot
# The bot object has data we don't want to pass to downstreams, so
# in the payload, we just store specific bot data.
self._payload['bot'] = {
'name': bot.name,
'bot_id': bot.bot_id
}
# For future safety sake, we'll do the same for the team.
self._payload['team'] = {
'name': bot.team.name,
'team_id': bot.team.team_id
}
self._payload['ts'] = event['ts']
self._payload['thread_ts'] = event.get('thread_ts')
self._check_unsupported()
self._payload['user'] = event.get('user')
if self.user:
self._payload['parsed_user'] = slack.get_user(self.bot, self.user)
elif self.bot_id:
# TODO: call get_bot
self._payload['parsed_user'] = None
else:
self._payload['parsed_user'] = None
try:
self._payload['text'] = event['text']
except Exception:
logger.error(
'Message event is missing text attribute.',
extra=self.event_trace
)
raise
self._payload['parsed_text'] = self.text
self._payload['channel_id'] = event['channel']
self._event_trace['channel_id'] = self.channel_id
self._payload['channel'] = slack.get_channel(
self.bot,
self.channel_id
)
if not self.channel:
logger.error(
'Failed to fetch channel from channel_id.',
extra=self.event_trace
)
self._parse_payload()
def _check_unsupported(self):
# TODO: make the ignores configurable, but have a default list
# Ignore self
# Ignore bots
unsupported = False
if self.bot_id:
logger.debug('ignoring message from bot', extra=self.event_trace)
unsupported = True
# Ignore threads
elif self.thread_ts:
logger.debug('ignoring thread message', extra=self.event_trace)
unsupported = True
# For now, ignore all event subtypes
elif self.subtype:
extra = {'subtype': self.subtype}
extra.update(self.event_trace)
logger.debug(
'ignoring message with unsupported subtype',
extra=extra,
)
unsupported = True
if unsupported:
statsd = stats.get_statsd_client()
statsd.incr('event.unsupported')
raise MessageUnsupportedError()
def _parse_payload(self):
try:
self._payload['users'] = parser.extract_users(self.text, self.bot)
self._payload['parsed_text'] = parser.replace_users(
self.parsed_text,
self.users
)
except Exception:
logger.exception(
'Failed to extract user info from text.',
exc_info=True,
extra=self.event_trace
)
try:
self._payload['channels'] = parser.extract_channels(
self.text,
self.bot
)
self._payload['parsed_text'] = parser.replace_channels(
self.parsed_text,
self.channels
)
except Exception:
logger.exception(
'Failed to extract channel info from text.',
exc_info=True,
extra=self.event_trace
)
try:
self._payload['subteams'] = parser.extract_subteams(
self.text,
self.bot
)
except Exception:
logger.exception(
'Failed to extract subteam info from text.',
exc_info=True,
extra=self.event_trace
)
try:
self._payload['specials'] = parser.extract_specials(self.text)
self._payload['parsed_text'] = parser.replace_specials(
self.parsed_text,
self.specials
)
except Exception:
logger.exception(
'Failed to extract special info from text.',
exc_info=True,
extra=self.event_trace
)
try:
self._payload['emojis'] = parser.extract_emojis(self.text)
except Exception:
logger.exception(
'Failed to extract emoji info from text.',
exc_info=True,
extra=self.event_trace
)
try:
self._payload['emails'] = parser.extract_emails(self.text)
self._payload['parsed_text'] = parser.replace_emails(
self.parsed_text,
self.emails
)
except Exception:
logger.exception(
'Failed to extract email info from text.',
exc_info=True,
extra=self.event_trace
)
try:
self._payload['urls'] = parser.extract_urls(self.text)
self._payload['parsed_text'] = parser.replace_urls(
self.parsed_text,
self.urls
)
except Exception:
logger.exception(
'Failed to extract url info from text.',
exc_info=True,
extra=self.event_trace
)
try:
self._payload['directed'] = parser.extract_mentions(
# We match mentioned and directed against parsed users, not
# against raw users.
self.parsed_text,
self.bot,
self.channel
)
except Exception:
logger.exception(
'Failed to extract mentions from text.',
exc_info=True,
extra=self.event_trace
)
self._payload['mentioned'] = False
for user_id, user_name in self.users.items():
if self.bot.name == user_name:
self._payload['mentioned'] = True
try:
self._payload['command_text'] = parser.extract_command(
# Similar to mentions above, we find the command text
# from pre-parsed text for users, not against raw users.
self.parsed_text,
self.bot
)
except Exception:
logger.exception(
'Failed to extract command_text from text.',
exc_info=True,
extra=self.event_trace
)
@property
def subtype(self):
return self.event.get('subtype')
@property
def text(self):
return self._payload['text']
@property
def parsed_text(self):
return self._payload['parsed_text']
@property
def command_text(self):
return self._payload.get('command_text')
@property
def directed(self):
return self._payload.get('directed', False)
@property
def mentioned(self):
return self._payload.get('mentioned', False)
@property
def channel_id(self):
return self._payload.get('channel_id')
@property
def channel(self):
return self._payload.get('channel', {})
@property
def user(self):
return self._payload['user']
@property
def ts(self):
return self._payload['ts']
@property
def thread_ts(self):
return self._payload['thread_ts']
@property
def team(self):
return self._payload['team']
@property
def bot(self):
"""
The bot associated with the app that received this message from the
event subscription api. To get info about a bot that may have sent
this message, see bot_id.
"""
return self._bot
@property
def bot_id(self):
"""
The bot_id associated with the message, if the message if from a bot.
If this message isn't from a bot, this will return None.
"""
return self.event.get('bot_id')
@property
def channels(self):
return self._payload.get('channels', {})
@property
def users(self):
return self._payload.get('users', {})
@property
def specials(self):
return self._payload.get('specials', {})
@property
def emails(self):
return self._payload.get('emails', {})
@property
def urls(self):
return self._payload.get('urls', {})
@property
def match_type(self):
return self._payload.get('match_type')
@property
def match(self):
return self._match
@property
def payload(self):
return self._payload
@property
def event_trace(self):
return self._event_trace
def set_match(self, match_type, match):
self._payload['match_type'] = match_type
self._match = match
if match_type == 'command':
self._payload['command'] = match
self._payload['args'] = self.command_text[len(match):].strip()
elif match_type == 'regex':
self._payload['regex'] = match
class MessageUnsupportedError(Exception):
pass
|
StarcoderdataPython
|
140863
|
import FWCore.ParameterSet.Config as cms
#Tracks without extra and hits
#AOD content
RecoTrackerAOD = cms.PSet(
outputCommands = cms.untracked.vstring(
'keep recoTracks_ctfWithMaterialTracksP5_*_*',
'keep recoTracks_ctfWithMaterialTracksP5LHCNavigation_*_*',
'keep recoTracks_rsWithMaterialTracksP5_*_*',
'keep recoTracks_cosmictrackfinderP5_*_*',
'keep recoTracks_beamhaloTracks_*_*',
'keep recoTracks_splittedTracksP5_*_*',
'keep recoTracks_ctfWithMaterialTracksP5Top_*_*',
'keep recoTracks_rsWithMaterialTracksP5Top_*_*',
'keep recoTracks_cosmictrackfinderP5Top_*_*',
'keep recoTracks_ctfWithMaterialTracksP5Bottom_*_*',
'keep recoTracks_rsWithMaterialTracksP5Bottom_*_*',
'keep recoTracks_cosmictrackfinderP5Bottom_*_*',
'keep recoTracks_regionalCosmicTracks_*_*',
'keep *_dedxHitInfo_*_*',
'keep *_dedxHarmonic2_*_*',
'keep *_dedxHitInfoCTF_*_*',
'keep *_dedxHarmonic2CTF_*_*',
'keep *_dedxHitInfoCosmicTF_*_*',
'keep *_dedxHarmonic2CosmicTF_*_*')
)
#RECO content
RecoTrackerRECO = cms.PSet(
outputCommands = cms.untracked.vstring(
'keep recoTrackExtras_ctfWithMaterialTracksP5_*_*',
'keep TrackingRecHitsOwned_ctfWithMaterialTracksP5_*_*',
'keep recoTrackExtras_ctfWithMaterialTracksP5LHCNavigation_*_*',
'keep TrackingRecHitsOwned_ctfWithMaterialTracksP5LHCNavigation_*_*',
'keep recoTrackExtras_rsWithMaterialTracksP5_*_*',
'keep TrackingRecHitsOwned_rsWithMaterialTracksP5_*_*',
'keep recoTrackExtras_cosmictrackfinderP5_*_*',
'keep TrackingRecHitsOwned_cosmictrackfinderP5_*_*',
'keep recoTrackExtras_beamhaloTracks_*_*',
'keep TrackingRecHitsOwned_beamhaloTracks_*_*',
'keep recoTrackExtras_splittedTracksP5_*_*',
'keep TrackingRecHitsOwned_splittedTracksP5_*_*',
'keep recoTrackExtras_ctfWithMaterialTracksP5Top_*_*',
'keep TrackingRecHitsOwned_ctfWithMaterialTracksP5Top_*_*',
'keep recoTrackExtras_rsWithMaterialTracksP5Top_*_*',
'keep TrackingRecHitsOwned_rsWithMaterialTracksP5Top_*_*',
'keep recoTrackExtras_cosmictrackfinderP5Top_*_*',
'keep TrackingRecHitsOwned_cosmictrackfinderP5Top_*_*',
'keep recoTrackExtras_ctfWithMaterialTracksP5Bottom_*_*',
'keep TrackingRecHitsOwned_ctfWithMaterialTracksP5Bottom_*_*',
'keep recoTrackExtras_rsWithMaterialTracksP5Bottom_*_*',
'keep TrackingRecHitsOwned_rsWithMaterialTracksP5Bottom_*_*',
'keep recoTrackExtras_cosmictrackfinderP5Bottom_*_*',
'keep TrackingRecHitsOwned_cosmictrackfinderP5Bottom_*_*',
'keep recoTrackExtras_regionalCosmicTracks_*_*',
'keep TrackingRecHitsOwned_regionalCosmicTracks_*_*',
'keep *_dedxTruncated40_*_*',
'keep *_dedxTruncated40CTF_*_*',
'keep *_dedxTruncated40CosmicTF_*_*',
'keep recoTracks_cosmicDCTracks_*_*',
'keep recoTrackExtras_cosmicDCTracks_*_*',
'keep TrackingRecHitsOwned_cosmicDCTracks_*_*')
)
RecoTrackerRECO.outputCommands.extend(RecoTrackerAOD.outputCommands)
#Full Event content
RecoTrackerFEVT = cms.PSet(
outputCommands = cms.untracked.vstring()
)
RecoTrackerFEVT.outputCommands.extend(RecoTrackerRECO.outputCommands)
|
StarcoderdataPython
|
108144
|
from __future__ import print_function
import FWCore.ParameterSet.Config as cms
import copy
process = cms.Process("zpdfsys")
process.maxEvents = cms.untracked.PSet(
#input = cms.untracked.int32(-1)
input = cms.untracked.int32(-1)
)
## process.source = cms.Source("PoolSource",
## debugVerbosity = cms.untracked.uint32(0),
## debugFlag = cms.untracked.bool(False),
## fileNames = cms.untracked.vstring()
## )
## import os
## dirname = "/scratch1/cms/data/summer08/Zmumu_M20/"
## dirlist = os.listdir(dirname)
## basenamelist = os.listdir(dirname + "/")
## for basename in basenamelist:
## process.source.fileNames.append("file:" + dirname + "/" + basename)
## print "Number of files to process is %s" % (len(process.source.fileNames))
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring(
'file:genParticlePlusCteq65AndMRST06NNLOAndMSTW2007LOmodWeigths.root',
)
)
process.evtInfo = cms.OutputModule("AsciiOutputModule")
process.TFileService = cms.Service("TFileService",
fileName = cms.string('cteq65AndMST06NLOABDMSTW2007lomod.root')
)
#for i in range(41):
# proc = "process.zpdf" + str(i)
# print "proc", proc
process.zpf = cms.EDAnalyzer("zPdfUnc",
genParticles = cms.InputTag("genParticles"),
pdfweights = cms.InputTag("pdfWeights:xxxxx"),
pdfmember = cms.untracked.uint32(0),
nbinsMass=cms.untracked.uint32(200),
nbinsPt=cms.untracked.uint32(200),
nbinsAng=cms.untracked.uint32(200),
massMax = cms.untracked.double(200.),
ptMax= cms.untracked.double(200.),
angMax = cms.untracked.double(6.),
#parameter for the geometric acceptance (numerator)
accPtMin = cms.untracked.double(20.0),
accMassMin = cms.untracked.double(60.0),
accMassMax = cms.untracked.double(120.0),
accEtaMin = cms.untracked.double(0.0),
accEtaMax = cms.untracked.double(2.1),
# for denominator
accMassMinDenominator=cms.untracked.double(40.0),
isMCatNLO= cms.untracked.bool(False),
outfilename= cms.untracked.string("xxxxx.txt")
)
pdf_1 = "cteq65"
pdf_2 = "MRST2006nnlo"
pdf_3= "MRST2007lomod"
### cteq65 has 1 + 2*20 members ###
for i in range(41):
module = copy.deepcopy(process.zpf)
setattr(module, "pdfweights", "pdfWeights:cteq65")
setattr(module, "pdfmember", i)
setattr(module, "outfilename", "cteq65.txt")
moduleLabel = module.label() + pdf_1+ "_" + str(i)
setattr(process, moduleLabel, module)
if i == 0:
seq = module
else:
seq = seq + module
### MRST2006nnlo has 1 + 2*30 members ###
for j in range(61):
module = copy.deepcopy(process.zpf)
setattr(module, "pdfweights", "pdfWeights:MRST2006nnlo")
setattr(module, "pdfmember", j)
setattr(module, "outfilename", "MRST2006nnlo.txt")
moduleLabel = module.label() + pdf_2+ "_" + str(j)
setattr(process, moduleLabel, module)
# needed only if the sequence is filled for the first time
# if j == 0:
# seq_2 = module
# else:
seq = seq + module
### MRST2007lomod has 1 member ###
for k in range(1):
module = copy.deepcopy(process.zpf)
setattr(module, "pdfweights", "pdfWeights:MRST2007lomod")
setattr(module, "pdfmember", k)
setattr(module, "outfilename", "MRST2007lomod.txt")
moduleLabel = module.label() + pdf_3+ "_" + str(k)
setattr(process, moduleLabel, module)
# needed only if the sequence is filled for the first time
# if k == 0:
# seq_3 = module
# else:
seq = seq + module
print("sequence", seq)
process.options = cms.untracked.PSet(
wantSummary = cms.untracked.bool(True)
)
process.path=cms.Path(seq)
process.end = cms.EndPath(process.evtInfo )
|
StarcoderdataPython
|
82938
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from mock import MagicMock
from pika.adapters import SelectConnection
from pika.channel import Channel
from pika.exceptions import AMQPChannelError, AMQPConnectionError
from pika.spec import Basic, BasicProperties
from rabbitleap.consumer import Consumer
from rabbitleap.envelope import Envelope
from rabbitleap.exceptions import AbortHandling, HandlingError, SkipHandling
from rabbitleap.handling import Handler, MessageHandler
from rabbitleap.retry_policies import (FixedDelayLimitedRetriesPolicy,
FixedDelayUnlimitedRetriesPolicy,
LimitedRetriesPolicy, RetryPolicy,
UnlimitedRetriesPolicy)
from rabbitleap.routing import (AnyMatches, Matcher, MessageTypeMatches,
NoneMatches, Router, Rule, RuleRouter)
def create_envelope(payload='some payload'):
prop = BasicProperties(
content_type='application/json',
content_encoding='utf-8',
headers=None,
delivery_mode=2,
priority=7,
correlation_id='some-correlation-id',
reply_to='reply_to_name',
expiration='some-expiration',
message_id='some-message-id',
timestamp='123456789',
type='message-type-a',
user_id='user-id',
app_id='app-id',
cluster_id='cluster-id')
payload = payload or 'some data'
delivery_info = Basic.Deliver(
consumer_tag=34,
delivery_tag=132,
redelivered=True,
exchange='exchange1',
routing_key='routing_key1')
envelope = Envelope(prop, payload, delivery_info)
return envelope
def create_consumer():
amqp_url = r'amqp://guest:guest@localhost:5672/%2f'
queue_name = 'queue1'
durable = True
exclusive = True
dlx_name = 'dead_exchange'
auto_reconnect = True
auto_reconnect_delay = 10
consumer = Consumer(
amqp_url=amqp_url,
queue_name=queue_name,
durable=durable,
exclusive=exclusive,
dlx_name=dlx_name,
auto_reconnect=auto_reconnect,
auto_reconnect_delay=auto_reconnect_delay)
return consumer
class TestRabbitleap(TestCase):
def test_envelope(self):
prop = BasicProperties(
content_type='application/json',
content_encoding='utf-8',
headers=None,
delivery_mode=2,
priority=7,
correlation_id='some-correlation-id',
reply_to='reply_to_name',
expiration='some-expiration',
message_id='some-message-id',
timestamp='123456789',
type='message-type-a',
user_id='user-id',
app_id='app-id',
cluster_id='cluster-id')
payload = 'some data'
delivery_info = Basic.Deliver(
consumer_tag=34,
delivery_tag=132,
redelivered=True,
exchange='exchange1',
routing_key='routing_key1')
envelope = Envelope(prop, payload, delivery_info)
self.assertEqual(envelope.payload, payload)
self.assertEqual(envelope.properties, prop)
self.assertEqual(envelope.delivery_info, delivery_info)
self.assertEqual(envelope.consumer_tag, delivery_info.consumer_tag)
self.assertEqual(envelope.delivery_tag, delivery_info.delivery_tag)
self.assertEqual(envelope.redelivered, delivery_info.redelivered)
self.assertEqual(envelope.routing_key, delivery_info.routing_key)
self.assertEqual(envelope.exchange, delivery_info.exchange)
self.assertEqual(envelope.content_type, prop.content_type)
self.assertEqual(envelope.content_encoding, prop.content_encoding)
self.assertEqual(envelope.headers, prop.headers)
self.assertEqual(envelope.delivery_mode, prop.delivery_mode)
self.assertEqual(envelope.priority, prop.priority)
self.assertEqual(envelope.correlation_id, prop.correlation_id)
self.assertEqual(envelope.reply_to, prop.reply_to)
self.assertEqual(envelope.expiration, prop.expiration)
self.assertEqual(envelope.message_id, prop.message_id)
self.assertEqual(envelope.timestamp, prop.timestamp)
self.assertEqual(envelope.type, prop.type)
self.assertEqual(envelope.user_id, prop.user_id)
self.assertEqual(envelope.app_id, prop.app_id)
self.assertEqual(envelope.cluster_id, prop.cluster_id)
self.assertEqual(envelope.headers, prop.headers)
self.assertIsNone(envelope.get_header('some_header'))
envelope.set_header('some_header', 50)
envelope.set_header('another_header', 'hello')
self.assertEqual(envelope.headers,
dict(some_header=50, another_header='hello'))
self.assertEqual(envelope.get_header('some_header'), 50)
self.assertEqual(envelope.get_header('another_header'), 'hello')
self.assertIsNone(envelope.get_header('non_exist_header'))
def test_exceptions(self):
def raise_exception(Exception, **kwargs):
raise Exception(**kwargs)
msg = 'some message'
with self.assertRaises(AbortHandling) as cm:
raise_exception(AbortHandling, reason=msg)
self.assertEqual(cm.exception.reason, msg)
self.assertEqual(str(cm.exception), cm.exception.reason)
with self.assertRaises(SkipHandling) as cm:
raise_exception(SkipHandling, reason=msg)
self.assertEqual(cm.exception.reason, msg)
self.assertEqual(str(cm.exception), cm.exception.reason)
with self.assertRaises(HandlingError) as cm:
raise_exception(HandlingError, error_msg=msg)
self.assertEqual(cm.exception.error_msg, msg)
self.assertEqual(str(cm.exception), cm.exception.error_msg)
def test_handling(self):
envelope = create_envelope()
handler = Handler(envelope)
self.assertEqual(handler.envelope, envelope)
handler.initialize(key='value', key2='value2')
handler.pre_handle()
self.assertRaises(NotImplementedError, handler.handle)
handler.post_handle()
envelope = create_envelope()
consumer = Consumer('localhost', 'test')
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
consumer.connection = mock_connection
consumer.channel = mock_channel
handler = MessageHandler(consumer, envelope)
self.assertEqual(handler.envelope, envelope)
self.assertEqual(handler.consumer, consumer)
self.assertEqual(handler.channel, mock_channel)
handler.initialize(key='value', key2='value2')
handler.pre_handle()
self.assertRaises(NotImplementedError, handler.handle)
handler.post_handle()
msg = 'some message'
with self.assertRaises(AbortHandling) as cm:
handler.abort(reason=msg)
self.assertEqual(cm.exception.reason, msg)
with self.assertRaises(SkipHandling) as cm:
handler.skip(reason=msg)
self.assertEqual(cm.exception.reason, msg)
with self.assertRaises(HandlingError) as cm:
handler.error(error_msg=msg)
self.assertEqual(cm.exception.error_msg, msg)
def test_retry_policy(self):
envelope = create_envelope()
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
consumer.connection = mock_connection
consumer.channel = mock_channel
retry_policy = RetryPolicy()
self.assertRaises(
NotImplementedError, retry_policy.retry, envelope=envelope)
def test_unlimited_retries_policy(self):
envelope = create_envelope()
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
consumer.connection = mock_connection
consumer.channel = mock_channel
retry_policy = UnlimitedRetriesPolicy(
consumer=consumer,
initial_delay=0,
delay_incremented_by=5,
max_delay=30,
retry_queue_suffix='r')
mock_channel.queue_declare = MagicMock()
mock_channel.basic_publish = MagicMock()
mock_channel.basic_ack = MagicMock()
retry_policy.retry(envelope)
mock_channel.queue_declare.assert_called_with(
callback=None,
queue='{}.{}.{}'.format(consumer.queue_name,
retry_policy.retry_queue_suffix, 0),
durable=consumer.durable,
nowait=True,
arguments={
'x-dead-letter-exchange': '',
'x-dead-letter-routing-key': consumer.queue_name,
'x-message-ttl': 0,
'x-expires': 0 + retry_policy.min_retry_queue_ttl
})
mock_channel.basic_publish.assert_called_with(
exchange='',
routing_key='{}.{}.{}'.format(consumer.queue_name,
retry_policy.retry_queue_suffix, 0),
properties=envelope.properties,
body=envelope.payload)
mock_channel.basic_ack.assert_called_with(envelope.delivery_tag)
self.assertEqual(
envelope.get_header('x-original-delivery-info'), {
'consumer_tag': envelope.consumer_tag,
'delivery_tag': envelope.delivery_tag,
'redelivered': envelope.redelivered,
'exchange': envelope.exchange,
'routing_key': envelope.routing_key
})
envelope.set_header(
'x-death',
[{
'queue': consumer.queue_name,
'count': 1
},
{
'queue':
'{}.{}.{}'.format(consumer.queue_name,
retry_policy.retry_queue_suffix, 10000),
'count':
120
}, {
'queue': 'some-queue',
'count': 1
}])
retry_policy.retry(envelope)
delay = retry_policy.max_delay
retry_queue_name = '{}.{}.{}'.format(
consumer.queue_name, retry_policy.retry_queue_suffix, delay * 1000)
mock_channel.queue_declare.assert_called_with(
callback=None,
queue=retry_queue_name,
durable=consumer.durable,
nowait=True,
arguments={
'x-dead-letter-exchange': '',
'x-dead-letter-routing-key': consumer.queue_name,
'x-message-ttl': delay * 1000,
'x-expires': delay * 2 * 1000
})
mock_channel.basic_publish.assert_called_with(
exchange='',
routing_key=retry_queue_name,
properties=envelope.properties,
body=envelope.payload)
mock_channel.basic_ack.assert_called_with(envelope.delivery_tag)
self.assertEqual(
envelope.get_header('x-original-delivery-info'), {
'consumer_tag': envelope.consumer_tag,
'delivery_tag': envelope.delivery_tag,
'redelivered': envelope.redelivered,
'exchange': envelope.exchange,
'routing_key': envelope.routing_key
})
def test_limited_retries_policy(self):
envelope = create_envelope()
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
consumer.connection = mock_connection
consumer.channel = mock_channel
retry_policy = LimitedRetriesPolicy(
consumer=consumer,
retry_delays=(1, 5, 10, 50, 5 * 60),
retry_queue_suffix='r')
mock_channel.queue_declare = MagicMock()
mock_channel.basic_publish = MagicMock()
mock_channel.basic_ack = MagicMock()
mock_channel.basic_reject = MagicMock()
retry_policy.retry(envelope)
mock_channel.queue_declare.assert_called_with(
callback=None,
queue='{}.{}.{}'.format(consumer.queue_name,
retry_policy.retry_queue_suffix, 1000),
durable=consumer.durable,
nowait=True,
arguments={
'x-dead-letter-exchange': '',
'x-dead-letter-routing-key': consumer.queue_name,
'x-message-ttl': 1000,
'x-expires': retry_policy.min_retry_queue_ttl
})
mock_channel.basic_publish.assert_called_with(
exchange='',
routing_key='{}.{}.{}'.format(
consumer.queue_name, retry_policy.retry_queue_suffix, 1000),
properties=envelope.properties,
body=envelope.payload)
mock_channel.basic_ack.assert_called_with(envelope.delivery_tag)
self.assertEqual(
envelope.get_header('x-original-delivery-info'), {
'consumer_tag': envelope.consumer_tag,
'delivery_tag': envelope.delivery_tag,
'redelivered': envelope.redelivered,
'exchange': envelope.exchange,
'routing_key': envelope.routing_key
})
envelope.set_header(
'x-death',
[{
'queue': consumer.queue_name,
'count': 1
},
{
'queue':
'{}.{}.{}'.format(consumer.queue_name,
retry_policy.retry_queue_suffix, 10000),
'count':
3
}, {
'queue': 'some-queue',
'count': 1
}])
retry_policy.retry(envelope)
delay = 5 * 60
retry_queue_name = '{}.{}.{}'.format(
consumer.queue_name, retry_policy.retry_queue_suffix, delay * 1000)
mock_channel.queue_declare.assert_called_with(
callback=None,
queue=retry_queue_name,
durable=consumer.durable,
nowait=True,
arguments={
'x-dead-letter-exchange': '',
'x-dead-letter-routing-key': consumer.queue_name,
'x-message-ttl': delay * 1000,
'x-expires': delay * 2 * 1000
})
mock_channel.basic_publish.assert_called_with(
exchange='',
routing_key=retry_queue_name,
properties=envelope.properties,
body=envelope.payload)
mock_channel.basic_ack.assert_called_with(envelope.delivery_tag)
self.assertEqual(
envelope.get_header('x-original-delivery-info'), {
'consumer_tag': envelope.consumer_tag,
'delivery_tag': envelope.delivery_tag,
'redelivered': envelope.redelivered,
'exchange': envelope.exchange,
'routing_key': envelope.routing_key
})
envelope.set_header(
'x-death',
[{
'queue': consumer.queue_name,
'count': 1
},
{
'queue':
'{}.{}.{}'.format(consumer.queue_name,
retry_policy.retry_queue_suffix, 10000),
'count':
4
}, {
'queue': 'some-queue',
'count': 1
}])
retry_policy.retry(envelope)
mock_channel.basic_reject.assert_called_with(
envelope.delivery_tag, requeue=False)
envelope.set_header(
'x-death',
[{
'queue': consumer.queue_name,
'count': 1
},
{
'queue':
'{}.{}.{}'.format(consumer.queue_name,
retry_policy.retry_queue_suffix, 10000),
'count':
46
}, {
'queue': 'some-queue',
'count': 1
}])
retry_policy.retry(envelope)
mock_channel.basic_reject.assert_called_with(
envelope.delivery_tag, requeue=False)
def test_fixed_delay_unlimited_retries_policy(self):
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
consumer.connection = mock_connection
consumer.channel = mock_channel
retry_policy = FixedDelayUnlimitedRetriesPolicy(
consumer, 10, retry_queue_suffix='h')
self.assertEqual(isinstance(retry_policy, UnlimitedRetriesPolicy), True)
self.assertEqual(retry_policy.initial_delay, 10)
self.assertEqual(retry_policy.max_delay, 10)
self.assertEqual(retry_policy.delay_incremented_by, 0)
self.assertEqual(retry_policy.retry_queue_suffix, 'h')
def test_fixed_delay_limited_retries_policy(self):
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
consumer.connection = mock_connection
consumer.channel = mock_channel
retry_policy = FixedDelayLimitedRetriesPolicy(
consumer, delay=10, retries_limit=7, retry_queue_suffix='s')
self.assertEqual(isinstance(retry_policy, LimitedRetriesPolicy), True)
self.assertEqual(retry_policy.retry_delays, tuple([10] * 7))
self.assertEqual(retry_policy.retry_queue_suffix, 's')
def test_router(self):
envelope = create_envelope()
router = Router()
self.assertRaises(
NotImplementedError, router.find_handler, envelope=envelope)
def test_matcher(self):
envelope = create_envelope()
matcher = Matcher()
self.assertRaises(NotImplementedError, matcher.match, envelope=envelope)
def test_any_matches(self):
envelope = create_envelope()
matcher = AnyMatches()
self.assertEqual(matcher.match(None), True)
self.assertEqual(matcher.match(envelope), True)
def test_none_matches(self):
envelope = create_envelope()
matcher = NoneMatches()
self.assertEqual(matcher.match(None), False)
self.assertEqual(matcher.match(envelope), False)
def test_message_type_matches(self):
envelope = create_envelope()
# str pattern
matcher = MessageTypeMatches(r'cat\.jump')
self.assertEqual(matcher.match(envelope), False)
envelope.properties.type = 'cat.jump'
self.assertEqual(matcher.match(envelope), True)
matcher = MessageTypeMatches(r'(cat|rabbit)\.jump')
self.assertEqual(matcher.match(envelope), True)
envelope.properties.type = 'rabbit.jump'
self.assertEqual(matcher.match(envelope), True)
envelope.properties.type = 'dog.jump'
self.assertEqual(matcher.match(envelope), False)
envelope.properties.type = None
self.assertEqual(matcher.match(envelope), False)
# re.Parrern
import re
envelope = create_envelope()
matcher = MessageTypeMatches(re.compile(r'cat\.jump'))
self.assertEqual(matcher.match(envelope), False)
envelope.properties.type = 'cat.jump'
self.assertEqual(matcher.match(envelope), True)
matcher = MessageTypeMatches(re.compile(r'(cat|rabbit)\.jump'))
self.assertEqual(matcher.match(envelope), True)
envelope.properties.type = 'rabbit.jump'
self.assertEqual(matcher.match(envelope), True)
envelope.properties.type = 'dog.jump'
self.assertEqual(matcher.match(envelope), False)
envelope.properties.type = None
self.assertEqual(matcher.match(envelope), False)
def test_rule(self):
with self.assertRaises(AssertionError):
Rule('test', None, None)
with self.assertRaises(AssertionError):
Rule(AnyMatches(), 'test', None)
with self.assertRaises(AssertionError):
Rule(AnyMatches(), Router, None)
router = Router()
Rule(AnyMatches(), router, None)
Rule(AnyMatches(), Handler, {'key': 10})
def test_rule_router(self):
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
consumer.connection = mock_connection
consumer.channel = mock_channel
with self.assertRaises(AssertionError):
RuleRouter(consumer, 'None rule object')
router = RuleRouter(consumer)
self.assertEqual(router.consumer, consumer)
envelope = create_envelope()
self.assertIsNone(router.find_handler(envelope))
default_rule = Rule(AnyMatches(), MessageHandler, None)
router.set_default_rule(default_rule)
self.assertIsNotNone(router.find_handler(envelope))
handler = router.find_handler(envelope)
self.assertEqual(isinstance(handler, Handler), True)
router.set_default_rule(None)
self.assertIsNone(router.find_handler(envelope))
router = RuleRouter(consumer, default_rule)
self.assertIsNotNone(router.find_handler(envelope))
class CustomHandler(MessageHandler):
def initialize(self, arg1, arg2, **kwargs):
super(CustomHandler, self).initialize(**kwargs)
self.arg1 = arg1
self.arg2 = arg2
message_type_matcher = MessageTypeMatches(r'message_a')
rule = Rule(message_type_matcher, CustomHandler, {'arg1': 1, 'arg2': 2})
router.add_rule(rule)
envelope.properties.type = 'message_a'
handler = router.find_handler(envelope)
self.assertEqual(isinstance(handler, CustomHandler), True)
self.assertEqual(handler.arg1, 1)
self.assertEqual(handler.arg2, 2)
self.assertEqual(handler.consumer, consumer)
router.set_default_rule(default_rule)
handler = router.find_handler(envelope)
self.assertEqual(isinstance(handler, CustomHandler), True)
envelope = create_envelope()
handler = router.find_handler(envelope)
self.assertEqual(isinstance(handler, Handler), True)
# test subrouting
default_subrouter_rule = Rule(message_type_matcher, CustomHandler, {
'arg1': 1,
'arg2': 2
})
subrouter = RuleRouter(consumer, default_subrouter_rule)
# default rule for main router is the subrouter
main_default_rule = Rule(AnyMatches(), subrouter, None)
main_router = RuleRouter(consumer, main_default_rule)
handler = main_router.find_handler(envelope)
self.assertIsNone(handler)
envelope.properties.type = 'message_a'
handler = main_router.find_handler(envelope)
self.assertEqual(isinstance(handler, CustomHandler), True)
self.assertEqual(handler.arg1, 1)
self.assertEqual(handler.arg2, 2)
self.assertEqual(handler.consumer, consumer)
def test_consumer(self):
amqp_url = r'amqp://guest:guest@localhost:5672/%2f'
queue_name = 'queue1'
durable = True
exclusive = True
dlx_name = 'dead_exchange'
auto_reconnect = True
auto_reconnect_delay = 10
consumer = Consumer(
amqp_url=amqp_url,
queue_name=queue_name,
durable=durable,
exclusive=exclusive,
dlx_name=dlx_name,
auto_reconnect=auto_reconnect,
auto_reconnect_delay=auto_reconnect_delay)
self.assertEqual(consumer.amqp_url, amqp_url)
self.assertEqual(consumer.queue_name, queue_name)
self.assertEqual(consumer.durable, durable)
self.assertEqual(consumer.exclusive, exclusive)
self.assertEqual(consumer.dlx_name, dlx_name)
self.assertEqual(consumer.auto_reconnect, auto_reconnect)
self.assertEqual(consumer.auto_reconnect_delay, auto_reconnect_delay)
# Test abort
with self.assertRaises(AbortHandling) as cm:
consumer.abort('some reason')
self.assertEqual(cm.exception.reason, 'some reason')
# Test skip
with self.assertRaises(SkipHandling) as cm:
consumer.skip('some reason')
self.assertEqual(cm.exception.reason, 'some reason')
# Test error
with self.assertRaises(HandlingError) as cm:
consumer.error('some error')
self.assertEqual(cm.exception.error_msg, 'some error')
# Test bind to exchange
exchange_name = 'exchange1'
routing_key = 'some_routing_key'
declare_exchange = True
declare_kwargs = {'type': 'topic'}
consumer.add_exchange_bind(
exchange_name=exchange_name,
routing_key=routing_key,
declare_exchange=declare_exchange,
declare_kwargs=declare_kwargs)
self.assertEqual(
consumer._exchange_binds[0],
((exchange_name, routing_key, declare_exchange, declare_kwargs)))
with self.assertRaises(AssertionError) as cm:
consumer.add_exchange_bind(
exchange_name=exchange_name,
routing_key=routing_key,
declare_exchange=declare_exchange,
declare_kwargs=None)
with self.assertRaises(AssertionError) as cm:
consumer.add_exchange_bind(
exchange_name=exchange_name,
routing_key=routing_key,
declare_exchange=declare_exchange,
declare_kwargs={})
# Test add handler
consumer.add_handler('some_pattern', Handler, None)
self.assertEqual(len(consumer.rules), 2)
self.assertEqual(
isinstance(consumer.rules[-2].matcher, MessageTypeMatches), True)
self.assertEqual(consumer.rules[-2].matcher.message_type_pattern,
'some_pattern')
consumer.add_handler(AnyMatches(), Handler, None)
self.assertEqual(len(consumer.rules), 3)
self.assertEqual(
isinstance(consumer.rules[-2].matcher, AnyMatches), True)
# Test set default handler
consumer.set_default_handler(Handler)
self.assertEqual(
isinstance(consumer.rules[-1].matcher, AnyMatches), True)
consumer.set_default_handler(None)
self.assertEqual(
isinstance(consumer.rules[-1].matcher, NoneMatches), True)
# Test set/unset policy
policy = FixedDelayLimitedRetriesPolicy(
consumer=consumer, delay=5, retries_limit=15)
consumer.set_retry_policy(policy)
self.assertEqual(consumer._retry_policy, policy)
self.assertRaises(AssertionError, consumer.set_retry_policy,
'non policy object')
consumer.unset_retry_policy()
self.assertIsNone(consumer._retry_policy)
# Test handling message
envelope = create_envelope()
# Create fresh consumer
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
mock_channel.basic_nack = MagicMock()
mock_channel.basic_reject = MagicMock()
consumer.channel = mock_channel
# Test no handler, should reject message
consumer._on_message(mock_channel, envelope.delivery_info,
envelope.properties, envelope.payload)
consumer.channel.basic_reject.assert_called_with(
envelope.delivery_tag, requeue=False)
# Test positive message handling
consumer.channel.basic_ack = MagicMock()
pre_handle_mock = MagicMock()
handle_mock = MagicMock()
post_handle_mock = MagicMock()
class SuccessHandler(Handler):
def __init__(self, envelope, **kwargs):
super(SuccessHandler, self).__init__(envelope, **kwargs)
self.pre_handle = pre_handle_mock
self.handle = handle_mock
self.post_handle = post_handle_mock
consumer.add_handler(AnyMatches(), SuccessHandler)
consumer._on_message(mock_channel, envelope.delivery_info,
envelope.properties, envelope.payload)
pre_handle_mock.assert_called_once()
handle_mock.assert_called_once()
post_handle_mock.assert_called_once()
consumer.channel.basic_ack.assert_called_with(envelope.delivery_tag)
# Test skip message handling
# Create fresh consumer
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
mock_channel.basic_nack = MagicMock()
mock_channel.basic_reject = MagicMock()
consumer.channel = mock_channel
consumer.channel.basic_ack = MagicMock()
pre_handle_mock = MagicMock()
handle_mock = MagicMock()
post_handle_mock = MagicMock()
class SkipHandler(MessageHandler):
def __init__(self, consumer, envelope, **kwargs):
super(SkipHandler, self).__init__(consumer, envelope, **kwargs)
self.handle = handle_mock
self.post_handle = post_handle_mock
def pre_handle(self):
self.skip(reason='some reason')
consumer.add_handler(AnyMatches(), SkipHandler)
consumer._on_message(mock_channel, envelope.delivery_info,
envelope.properties, envelope.payload)
handle_mock.assert_not_called()
post_handle_mock.assert_not_called()
consumer.channel.basic_ack.assert_called_with(envelope.delivery_tag)
# Test abort message handling
# Create fresh consumer
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
mock_channel.basic_nack = MagicMock()
mock_channel.basic_reject = MagicMock()
consumer.channel = mock_channel
consumer.channel.basic_ack = MagicMock()
pre_handle_mock = MagicMock()
handle_mock = MagicMock()
post_handle_mock = MagicMock()
class AbortHandler(MessageHandler):
def __init__(self, consumer, envelope, **kwargs):
super(AbortHandler, self).__init__(consumer, envelope, **kwargs)
self.handle = handle_mock
self.post_handle = post_handle_mock
def pre_handle(self):
self.abort(reason='some reason')
consumer.add_handler(AnyMatches(), AbortHandler)
consumer._on_message(mock_channel, envelope.delivery_info,
envelope.properties, envelope.payload)
handle_mock.assert_not_called()
post_handle_mock.assert_not_called()
consumer.channel.basic_reject.assert_called_with(
envelope.delivery_tag, requeue=False)
consumer.channel.basic_ack.assert_not_called()
# Test error message handling
# Create fresh consumer
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
mock_channel.basic_nack = MagicMock()
mock_channel.basic_reject = MagicMock()
consumer.channel = mock_channel
consumer.channel.basic_ack = MagicMock()
pre_handle_mock = MagicMock()
handle_mock = MagicMock()
post_handle_mock = MagicMock()
class ErrorHandler(MessageHandler):
def __init__(self, consumer, envelope, **kwargs):
super(ErrorHandler, self).__init__(consumer, envelope, **kwargs)
self.handle = handle_mock
self.post_handle = post_handle_mock
def pre_handle(self):
self.error(error_msg='some reason')
consumer.add_handler(AnyMatches(), ErrorHandler)
consumer._on_message(mock_channel, envelope.delivery_info,
envelope.properties, envelope.payload)
handle_mock.assert_not_called()
post_handle_mock.assert_not_called()
consumer.channel.basic_reject.assert_called_with(
envelope.delivery_tag, requeue=False)
consumer.channel.basic_ack.assert_not_called()
# Test error message handling with retry policy
# Create fresh consumer
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
mock_channel.basic_nack = MagicMock()
mock_channel.basic_reject = MagicMock()
consumer.channel = mock_channel
consumer.channel.basic_ack = MagicMock()
pre_handle_mock = MagicMock()
handle_mock = MagicMock()
post_handle_mock = MagicMock()
retry_policy = RetryPolicy()
retry_policy.retry = MagicMock()
consumer.set_retry_policy(retry_policy)
consumer.add_handler(AnyMatches(), ErrorHandler)
consumer._on_message(mock_channel, envelope.delivery_info,
envelope.properties, envelope.payload)
handle_mock.assert_not_called()
post_handle_mock.assert_not_called()
consumer.channel.basic_reject.assert_not_called()
consumer.channel.basic_ack.assert_not_called()
retry_policy.retry.assert_called_once()
# Test error message handling; general exception
# Create fresh consumer
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
mock_channel.basic_nack = MagicMock()
mock_channel.basic_reject = MagicMock()
consumer.channel = mock_channel
consumer.channel.basic_ack = MagicMock()
pre_handle_mock = MagicMock()
handle_mock = MagicMock()
post_handle_mock = MagicMock()
class ExceHandler(MessageHandler):
def __init__(self, consumer, envelope, **kwargs):
super(ExceHandler, self).__init__(consumer, envelope, **kwargs)
self.handle = handle_mock
self.post_handle = post_handle_mock
def pre_handle(self):
raise Exception()
consumer.add_handler(AnyMatches(), ExceHandler)
consumer._on_message(mock_channel, envelope.delivery_info,
envelope.properties, envelope.payload)
handle_mock.assert_not_called()
post_handle_mock.assert_not_called()
consumer.channel.basic_reject.assert_called_with(
envelope.delivery_tag, requeue=False)
consumer.channel.basic_ack.assert_not_called()
# Test error message handling; general exception with retry policy
# Create fresh consumer
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
mock_channel.basic_nack = MagicMock()
mock_channel.basic_reject = MagicMock()
consumer.channel = mock_channel
consumer.channel.basic_ack = MagicMock()
pre_handle_mock = MagicMock()
handle_mock = MagicMock()
post_handle_mock = MagicMock()
retry_policy = RetryPolicy()
retry_policy.retry = MagicMock()
consumer.set_retry_policy(retry_policy)
consumer.add_handler(AnyMatches(), ExceHandler)
consumer._on_message(mock_channel, envelope.delivery_info,
envelope.properties, envelope.payload)
handle_mock.assert_not_called()
post_handle_mock.assert_not_called()
consumer.channel.basic_reject.assert_not_called()
consumer.channel.basic_ack.assert_not_called()
retry_policy.retry.assert_called_once()
# Should handle AMQPConnectionError
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
mock_channel.basic_nack = MagicMock()
mock_channel.basic_reject = MagicMock()
consumer.channel = mock_channel
consumer.channel.basic_ack = MagicMock()
pre_handle_mock = MagicMock()
handle_mock = MagicMock()
post_handle_mock = MagicMock()
class AMQPConnectionErrorHandler(MessageHandler):
def __init__(self, consumer, envelope, **kwargs):
super(AMQPConnectionErrorHandler, self).__init__(
consumer, envelope, **kwargs)
self.handle = handle_mock
self.post_handle = post_handle_mock
def pre_handle(self):
raise AMQPConnectionError()
consumer.add_handler(AnyMatches(), AMQPConnectionErrorHandler)
consumer._on_message(mock_channel, envelope.delivery_info,
envelope.properties, envelope.payload)
handle_mock.assert_not_called()
post_handle_mock.assert_not_called()
consumer.channel.basic_reject.assert_not_called()
consumer.channel.basic_ack.assert_not_called()
# Should handle AMQPChannelError
consumer = create_consumer()
mock_connection = SelectConnection()
mock_channel = Channel(mock_connection, 10, None)
mock_channel.basic_nack = MagicMock()
mock_channel.basic_reject = MagicMock()
consumer.channel = mock_channel
consumer.channel.basic_ack = MagicMock()
pre_handle_mock = MagicMock()
handle_mock = MagicMock()
post_handle_mock = MagicMock()
class AMQPChannelErrorHandler(MessageHandler):
def __init__(self, consumer, envelope, **kwargs):
super(AMQPChannelErrorHandler, self).__init__(
consumer, envelope, **kwargs)
self.handle = handle_mock
self.post_handle = post_handle_mock
def pre_handle(self):
raise AMQPChannelError()
consumer.add_handler(AnyMatches(), AMQPChannelErrorHandler)
consumer._on_message(mock_channel, envelope.delivery_info,
envelope.properties, envelope.payload)
handle_mock.assert_not_called()
post_handle_mock.assert_not_called()
consumer.channel.basic_reject.assert_not_called()
consumer.channel.basic_ack.assert_not_called()
def test_consumer_start_up_process(self):
consumer = create_consumer()
envelope = create_envelope()
consumer.connection = SelectConnection()
mock_channel = Channel(consumer.connection, 10, None)
# Mock exchange_declare
mock_channel.exchange_declare = MagicMock()
def on_exchange_declareok(callback, exchange, exchange_type, durable,
auto_delete, internal, arguments):
self.assertEqual(exchange, 'exchange1')
self.assertEqual(exchange_type, 'topic')
self.assertEqual(durable, False)
self.assertEqual(auto_delete, False)
self.assertEqual(internal, False)
self.assertEqual(arguments, None)
callback(None)
mock_channel.exchange_declare.side_effect = on_exchange_declareok
# Mock queue_declare
mock_channel.queue_declare = MagicMock()
def on_consumer_queue_declareok(callback, queue, durable, exclusive,
arguments):
self.assertEqual(queue, consumer.queue_name)
self.assertEqual(durable, consumer.durable)
self.assertEqual(exclusive, consumer.exclusive)
self.assertEqual(arguments,
{'x-dead-letter-exchange': consumer.dlx_name})
callback(None)
mock_channel.queue_declare.side_effect = on_consumer_queue_declareok
# Mock queue_bind
mock_channel.queue_bind = MagicMock()
def on_bindok(callback, queue, exchange, routing_key):
self.assertEqual(queue, consumer.queue_name)
self.assertEqual(routing_key, 'key1')
callback(None)
mock_channel.queue_bind.side_effect = on_bindok
mock_channel.add_on_close_callback = MagicMock()
# Mock basic_consume
mock_channel.basic_consume = MagicMock()
def on_message(callback, queue):
self.assertEqual(queue, consumer.queue_name)
mock_channel.basic_reject = MagicMock()
callback(mock_channel, envelope.delivery_info, envelope.properties,
envelope.payload)
mock_channel.basic_consume.side_effect = on_message
# connection.channel method used to open a new channel
# mock method is used to return the created mock_channel
consumer.connection.channel = MagicMock()
def on_channel_open(on_open_callback):
on_open_callback(mock_channel)
consumer.connection.channel.side_effect = on_channel_open
consumer.connection.ioloop.start = MagicMock()
consumer.connection.ioloop.stop = MagicMock()
consumer.add_exchange_bind(
exchange_name='exchange1',
routing_key='key1',
declare_exchange=True,
declare_kwargs={'type': 'topic'})
consumer.add_exchange_bind(
exchange_name='exchange1', routing_key='key1')
# Initiate open connection manually
consumer._on_connection_open(None)
mock_channel.add_on_close_callback.assert_called_once()
mock_channel.queue_declare.assert_called_once()
mock_channel.basic_reject.assert_called_once()
mock_channel.queue_declare.assert_called_once()
queue_bind_count = len(mock_channel.queue_bind.call_args_list)
self.assertEqual(queue_bind_count, 2)
mock_channel.basic_consume.assert_called_once()
# Stopping consumer
# Mock add_timeout
consumer.connection.add_timeout = MagicMock()
def add_timeout_side_effect(delay, callback):
callback()
consumer.connection.add_timeout.side_effect = add_timeout_side_effect
# Mock basic_cancel
consumer.channel.basic_cancel = MagicMock()
def on_cancelok(callback, consumer_tag):
callback(None)
consumer.channel.basic_cancel.side_effect = on_cancelok
# Mock channel close
consumer.channel.close = MagicMock()
def channel_close_side_effect():
consumer._on_channel_closed(consumer.channel, '', '')
consumer.channel.close.side_effect = channel_close_side_effect
# Mock connection close
consumer.connection.close = MagicMock()
def on_connection_closed():
consumer._on_connection_closed(consumer.connection, '', '')
consumer.connection.close.side_effect = on_connection_closed
# Simulate stop
consumer.stop()
consumer.connection.add_timeout.assert_called_once()
consumer.connection.close.assert_called_once()
self.assertIsNone(consumer.channel)
consumer.connection.ioloop.stop.assert_called_once()
def test_consumer_auto_reconnect(self):
consumer = create_consumer()
consumer.connection = SelectConnection()
consumer.connection.ioloop.stop = MagicMock()
consumer.connection.ioloop.start = MagicMock()
# Mock add_timeout
consumer.connection.add_timeout = MagicMock()
def add_timeout_side_effect(delay, callback):
self.assertEqual(delay, consumer.auto_reconnect_delay)
self.assertEqual(callback, consumer._reconnect)
consumer.connection.add_timeout.side_effect = add_timeout_side_effect
# Simulate connection failute
consumer._on_connection_closed(consumer.connection, '', '')
consumer.connection.add_timeout.assert_called_once()
# Test shutdown on connection failure
consumer.auto_reconnect = False
consumer._on_connection_closed(consumer.connection, '', '')
consumer.connection.ioloop.stop.assert_called_once()
def test_consumer_auto_reconnect_error(self):
consumer = create_consumer()
consumer.connection = SelectConnection()
consumer.connection.ioloop.stop = MagicMock()
consumer.connection.ioloop.start = MagicMock()
# Mock add_timeout
consumer.connection.add_timeout = MagicMock()
def add_timeout_side_effect(delay, callback):
self.assertEqual(delay, consumer.auto_reconnect_delay)
self.assertEqual(callback, consumer._reconnect)
consumer.connection.add_timeout.side_effect = add_timeout_side_effect
# Simulate reconnection failute
consumer._on_open_connection_error(consumer.connection, Exception())
consumer.connection.add_timeout.assert_called_once()
# Test shutdown on reconnection failure
consumer.auto_reconnect = False
consumer._on_open_connection_error(consumer.connection, Exception())
consumer.connection.ioloop.stop.assert_called_once()
def test_consumer_reconnect(self):
consumer = create_consumer()
consumer.connection = SelectConnection()
consumer._connect = MagicMock()
def connect_side_effect():
return consumer.connection
consumer._connect.side_effect = connect_side_effect
consumer.connection.ioloop.stop = MagicMock()
consumer.connection.ioloop.start = MagicMock()
consumer._reconnect()
consumer._connect.assert_called_once()
consumer.connection.ioloop.stop.assert_called_once()
consumer.connection.ioloop.start.assert_called_once()
def test_consumer_restart(self):
consumer = create_consumer()
consumer.connection = SelectConnection()
consumer.connection.ioloop.stop = MagicMock()
consumer.connection.ioloop.start = MagicMock()
# Mock add_timeout
consumer.connection.add_timeout = MagicMock()
def add_timeout_side_effect(delay, callback):
self.assertEqual(delay, 0)
self.assertEqual(callback, consumer._stop_consuming)
consumer.connection.add_timeout.side_effect = add_timeout_side_effect
consumer.restart()
consumer.connection.add_timeout.assert_called_once()
|
StarcoderdataPython
|
3272930
|
import win32com.client # required for win32 users
import Downloaders.MTS, Downloaders.URL
import Installer.Sims2Pack, Installer.Inteen, Installer.Sims3Pack
SM.Temp = "out" # usually not used
SM.Handlers["ModTheSims"] = Downloaders.MTS.Downloader
SM.Handlers["url"] = Downloaders.URL.Downloader
SM.Fogs = ["samples/inteen.fog", "samples/wideloading.fog"]
GameDocs = {"Sims2UltimateCollection":"EA Games\\The Sims™ 2 Ultimate Collection",
"Sims3": "Electronic Arts\\The Sims 3"}
Documents = win32com.client.Dispatch("WScript.Shell").SpecialFolders("MyDocuments") + GameDocs['Sims2UltimateCollection']
SM.Scope['Directories'] = {
"Downloads": Documents+"Downloads",
"Cameras": Documents+"Cameras",
"LatestEPResUI": "C:\\Program Files (x86)\\Origin Games\\The Sims 2 Ultimate Collection\\Apartment Life\\TSData\\Res\\UI",
"Cache": "__cache",
"PetBreeds": Documents+"PetBreeds",
"ModsPackages": Documents+"Mods\\Packages\\",
"ModsOverrides": Documents+"Mods\\Packages\\",
"ALOverrides": "C:\\Program Files (x86)\\Origin Games\\The Sims 2 Ultimate Collection\\Apartment Life\\TSData\\Res\\Overrides"
}
SM.Scope['FileTypes'] = {
'Txt': '*.txt',
'Package': '*.package',
'Sims2Pack': '*.Sims2Pack',
'Sims3Pack': '*.Sims3Pack',
'InTeenPackage': "*.package"
}
SM.Scope['Hooks'] = {
'InTeenPackage':
{
"Extracted": Installer.Inteen._
}
}
# The user agent may be required for some downloaders to work.
# Some servers decline user agents which look suspicious.
# By default, this is Chrome 51.0.2704.103.
# User agent does not specify what will download the file(s). It is used to represent the user's "browser".
SM.Scope['UserAgent'] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36"
# You can comment out (add # on line beginning) following line,
# if you do not want version information to be appended to user agent.
insmod("versionstring")
# Sleep after request - set 0 if you want to disable
# 0.5 may be not enough to keep servers away of thinking that your network/computer is a bot. Even if it is.
SM.Pause = 0.5
|
StarcoderdataPython
|
3239657
|
# communication with the DeLight wallet
# to read atm devault wallet and send bought amount to client
# for readability I define commands for os as a string
# before calling
import os
import json
import config as c
def start_daemon():
print('Starting daemon.')
cmd = 'DeLight/delight daemon start'
os.system(cmd)
def stop_daemon():
print('Stopping daemon')
cmd = 'DeLight/delight daemon stop'
os.system(cmd)
def load_wallet(path_to_wallet):
print('Loading wallet...')
cmd = f'DeLight/delight -v daemon load_wallet -w {path_to_wallet}'
output = os.popen(cmd).read()
def get_balance(path_to_wallet):
load_wallet(path_to_wallet)
print('getting balance')
cmd = f'DeLight/delight -v getbalance -w {path_to_wallet}'
balance_data = os.popen(cmd).read()
# use json to convert str to dict and get balance
# and make sure atm balance is updated
confirmed_balance = float(json.loads(balance_data)['confirmed'])
print(confirmed_balance)
if len(json.loads(balance_data)) > 1:
unconfirmed_balance = float(json.loads(balance_data)['unconfirmed'])
print(unconfirmed_balance)
if unconfirmed_balance < 0:
return int(confirmed_balance + unconfirmed_balance)
return int(confirmed_balance )##round(confirmed_balance, 0)
def deposit(address, amount, path_to_wallet):
print('Beginning deposit')
# the wallet can only handle 3 decimals or it breaks
try:
amount = int(amount) ##float(amount)
print('amount: ', amount)
# Set fee_per_kb and confirmed_only to make sure tx goes to mempool
feecmd = './DeLight/delight -v setconfig fee_per_kb ' + str(c.TX_FEE_PER_KB)
print(feecmd,os.popen(feecmd).read())
confirmed_cmd = './DeLight/delight -v setconfig confirmed_only true'
print(confirmed_cmd,os.popen(confirmed_cmd).read())
load_wallet(path_to_wallet)
## make tx - Removed fees and set fee above instead
txcmd = f'./DeLight/delight payto -v -w {path_to_wallet} {address} {amount}'
# set true to avoid error when converting data to dict, can be anything
true = True
tx_data = os.popen(txcmd).read()
# use json to convert str to dict
hex = json.loads(tx_data)['hex']
print(hex)
broadcast_cmd = f'DeLight/delight -v broadcast {hex}'
tx = os.popen(broadcast_cmd).read()
tx_id = json.loads(tx)[1]
print(tx_id)
except ValueError as e:
tx_id = e
return tx_id
|
StarcoderdataPython
|
3338703
|
from __future__ import print_function
import sys
import os
import copy
# import time
import unittest
import logging
# import numpy as np
import torch
# import torch.nn as nn
# from torch.nn import Parameter
# import torch.nn.functional as F
# import torch_mlu
import torch_mlu.core.mlu_model as ct
cur_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(cur_dir + "/../../")
from common_utils import testinfo, TestCase # pylint: disable=C0413,C0411
logging.basicConfig(level=logging.DEBUG)
class TestTanhOp(TestCase):
# @unittest.skip("not test")
@testinfo()
def test_tanh_tensor_scalar_contiguous(self):
in_shape = [(10), (15, 19), (25, 19, 13), (13, 31, 16, 19), (14, 19, 21, 23, 21),
(16, 17, 18, 19, 20, 21)]
for shape in in_shape:
input_data = torch.randn(shape, dtype=torch.float)
input_data_mlu = input_data.to(ct.mlu_device())
output_cpu = torch.tanh(input_data)
output_mlu = torch.tanh(input_data_mlu)
# test scalar
scalar_cpu = input_data.sum()
scalar_mlu = scalar_cpu.to(ct.mlu_device())
out_scalar_cpu = torch.tanh(scalar_cpu)
out_scalar_mlu = torch.tanh(scalar_mlu)
# test inplace operation
input_mlu_ptr = input_data_mlu.data_ptr()
input_data_mlu.tanh_()
self.assertEqual(input_mlu_ptr, input_data_mlu.data_ptr())
self.assertTensorsEqual(
output_cpu, output_mlu.cpu(), 0.003, use_MSE=True)
self.assertTensorsEqual(
out_scalar_cpu, out_scalar_mlu.cpu(), 0.003, use_MSE=True)
self.assertTensorsEqual(
output_cpu, input_data_mlu.cpu().float(), 0.003, use_MSE=True)
# @unittest.skip("not test")
@testinfo()
def test_tanh_tensor_scalar_channel_last(self):
in_shape = [(13, 31, 16, 19), (14, 19, 21, 23, 21)]
for shape in in_shape:
input_data = torch.randn(shape, dtype=torch.float)
input_data = self.convert_to_channel_last(input_data)
input_data_mlu = input_data.to(ct.mlu_device())
output_cpu = torch.tanh(input_data)
output_mlu = torch.tanh(input_data_mlu)
# test inplace operation
input_mlu_ptr = input_data_mlu.data_ptr()
input_data_mlu.tanh_()
self.assertEqual(input_mlu_ptr, input_data_mlu.data_ptr())
self.assertTensorsEqual(
output_cpu, output_mlu.cpu(), 0.003, use_MSE=True)
self.assertTensorsEqual(
output_cpu, input_data_mlu.cpu().float(), 0.003, use_MSE=True)
# @unittest.skip("not test")
@testinfo()
def test_tanh_tensor_scalar_not_dense(self):
in_shape = [(15, 19 * 2), (25, 19, 13 * 2), (13, 31, 16, 19 * 2), (14, 19, 21, 23, 21 * 2),
(16, 17, 18, 19, 20, 21 * 2)]
for shape in in_shape:
input_data = torch.empty(0)
if len(shape) == 2:
input_data = torch.randn(shape, dtype=torch.float)[:, :int(shape[-1] / 2)]
elif len(shape) == 3:
input_data = torch.randn(shape, dtype=torch.float)[:, :, :int(shape[-1] / 2)]
elif len(shape) == 4:
input_data = torch.randn(shape, dtype=torch.float)[:, :, :, :int(shape[-1] / 2)]
elif len(shape) == 5:
input_data = torch.randn(shape, dtype=torch.float)[:, :, :, :, :int(shape[-1] / 2)]
elif len(shape) == 6:
input_data = torch.randn(shape,\
dtype=torch.float)[:, :, :, :, :, :int(shape[-1] / 2)]
input_data = self.convert_to_channel_last(input_data)
input_data_mlu = input_data.to(ct.mlu_device())
output_cpu = torch.tanh(input_data)
output_mlu = torch.tanh(input_data_mlu)
# test inplace operation
input_mlu_ptr = input_data_mlu.data_ptr()
input_data_mlu.tanh_()
self.assertEqual(input_mlu_ptr, input_data_mlu.data_ptr())
self.assertTensorsEqual(
output_cpu, output_mlu.cpu(), 0.003, use_MSE=True)
self.assertTensorsEqual(
output_cpu, input_data_mlu.cpu().float(), 0.003, use_MSE=True)
# @unittest.skip("not test")
@testinfo()
def test_tanh_dtype(self):
in_shape = [(10), (15, 19), (25, 19, 13), (13, 31, 16, 19), (14, 19, 21, 23, 21),
(16, 17, 18, 19, 20, 21)]
# now cnnlTanh only support float and half
type_list = [torch.float, torch.half]
for shape in in_shape:
for typeId in type_list:
input_data = torch.randn(shape, dtype=torch.float)
input_data_cpu = input_data.to(typeId)
input_data_mlu = input_data_cpu.to(ct.mlu_device())
output_cpu = torch.tanh(input_data)
output_mlu = torch.tanh(input_data_mlu)
self.assertTensorsEqual(
output_cpu, output_mlu.cpu().float(), 0.003, use_MSE=True)
# @unittest.skip("not test")
@testinfo()
def test_tanh_backward(self):
in_shape = [(50), (35, 46), (16, 27, 38), (128, 4, 128, 124), (14, 19, 11, 13, 21),
(6, 7, 8, 9, 10, 11), (16, 17, 18, 19, 20, 21)]
type_list = [torch.float, torch.half]
for shape in in_shape:
for typeId in type_list:
x_0 = torch.randn(shape, dtype=torch.float, requires_grad=True)
x = x_0.to(typeId)
x_mlu = x.to(ct.mlu_device())
# use float on cpu kernel
out_cpu = x_0.tanh()
out_mlu = x_mlu.tanh()
grad = torch.randn(out_cpu.shape)
grad_mlu = grad.to(ct.mlu_device())
out_cpu.backward(grad)
out_grad_cpu = copy.deepcopy(x_0.grad)
x_0.grad.zero_()
out_mlu.backward(grad_mlu)
out_grad_mlu = copy.deepcopy(x_0.grad)
self.assertTensorsEqual(
out_grad_cpu,
out_grad_mlu.cpu().float() if typeId == torch.half else out_grad_mlu.cpu(),
0.003,
use_MSE=True)
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
1792107
|
from owl_model.modelobject import ModelObject
class Game(ModelObject):
"""
An OWL game
A match in OWL is a best-of-X competition where two teams play X games and
the winner of the match is considered to be the team winning the majority of
the games.
"""
cls_attr_types = {
'id': 'str',
'map': 'owl_model.map.Map',
'vod': 'owl_model.url.Vod',
'players': 'list[owl_model.player.Player]',
'state': 'str',
'status': 'str',
#stats : TODO: watch this field closely, this seems new.
'matchid': 'dict(str, str)'
}
cls_attr_map = {
'id': 'id',
'map': 'attributes',
'vod': 'vodLink',
'players': 'players',
'state': 'state',
'status': 'status',
#'stats': 'stats',
'matchid': 'match'
}
def __init__ (self, id=None, map=None, vod=None, players=None,
state=None, status=None, matchid=None):
"""
"""
self.id = id
self.map = map
self.vod = vod
self.players = players
self.state = state
self.status = status
self.matchid = matchid
def finalize_init (self):
"""
"""
self.matchid = self.matchid['id']
|
StarcoderdataPython
|
3295597
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
# Copyright (c) 2020-2021 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import json
import urllib.parse
from .util import (
toBool,
)
from .basicswap_util import (
strBidState,
SwapTypes,
)
from .chainparams import (
Coins,
)
from .ui import (
PAGE_LIMIT,
inputAmount,
describeBid,
setCoinFilter,
get_data_entry,
get_data_entry_or,
have_data_entry,
tickerToCoinId,
)
def js_error(self, error_str):
error_str_json = json.dumps({'error': error_str})
return bytes(error_str_json, 'UTF-8')
def withdraw_coin(swap_client, coin_type, post_string, is_json):
if is_json:
post_data = json.loads(post_string)
post_data['is_json'] = True
else:
post_data = urllib.parse.parse_qs(post_string)
value = get_data_entry(post_data, 'value')
address = get_data_entry(post_data, 'address')
subfee = get_data_entry(post_data, 'subfee')
if not isinstance(subfee, bool):
subfee = toBool(subfee)
if coin_type == Coins.PART:
type_from = get_data_entry_or(post_data, 'type_from', 'plain')
type_to = get_data_entry_or(post_data, 'type_to', 'plain')
txid_hex = swap_client.withdrawParticl(type_from, type_to, value, address, subfee)
else:
txid_hex = swap_client.withdrawCoin(coin_type, value, address, subfee)
return {'txid': txid_hex}
def js_wallets(self, url_split, post_string, is_json):
if len(url_split) > 3:
ticker_str = url_split[3]
coin_type = tickerToCoinId(ticker_str)
if len(url_split) > 4:
cmd = url_split[4]
if cmd == 'withdraw':
return bytes(json.dumps(withdraw_coin(self.server.swap_client, coin_type, post_string, is_json)), 'UTF-8')
raise ValueError('Unknown command')
return bytes(json.dumps(self.server.swap_client.getWalletInfo(coin_type)), 'UTF-8')
return bytes(json.dumps(self.server.swap_client.getWalletsInfo()), 'UTF-8')
def js_offers(self, url_split, post_string, is_json, sent=False):
offer_id = None
if len(url_split) > 3:
if url_split[3] == 'new':
if post_string == '':
raise ValueError('No post data')
if is_json:
form_data = json.loads(post_string)
form_data['is_json'] = True
else:
form_data = urllib.parse.parse_qs(post_string)
offer_id = self.postNewOffer(form_data)
rv = {'offer_id': offer_id.hex()}
return bytes(json.dumps(rv), 'UTF-8')
offer_id = bytes.fromhex(url_split[3])
filters = {
'coin_from': -1,
'coin_to': -1,
'page_no': 1,
'limit': PAGE_LIMIT,
'sort_by': 'created_at',
'sort_dir': 'desc',
}
if offer_id:
filters['offer_id'] = offer_id
if post_string != '':
if is_json:
post_data = json.loads(post_string)
post_data['is_json'] = True
else:
post_data = urllib.parse.parse_qs(post_string)
filters['coin_from'] = setCoinFilter(post_data, 'coin_from')
filters['coin_to'] = setCoinFilter(post_data, 'coin_to')
if have_data_entry(post_data, 'sort_by'):
sort_by = get_data_entry(post_data, 'sort_by')
assert(sort_by in ['created_at', 'rate']), 'Invalid sort by'
filters['sort_by'] = sort_by
if have_data_entry(post_data, 'sort_dir'):
sort_dir = get_data_entry(post_data, 'sort_dir')
assert(sort_dir in ['asc', 'desc']), 'Invalid sort dir'
filters['sort_dir'] = sort_dir
if b'offset' in post_data:
filters['offset'] = int(get_data_entry(post_data, 'offset'))
if b'limit' in post_data:
filters['limit'] = int(get_data_entry(post_data, 'limit'))
assert(filters['limit'] > 0 and filters['limit'] <= PAGE_LIMIT), 'Invalid limit'
offers = self.server.swap_client.listOffers(sent, filters)
rv = []
for o in offers:
ci_from = self.server.swap_client.ci(o.coin_from)
ci_to = self.server.swap_client.ci(o.coin_to)
rv.append({
'addr_from': o.addr_from,
'addr_to': o.addr_to,
'offer_id': o.offer_id.hex(),
'created_at': o.created_at,
'expire_at': o.expire_at,
'coin_from': ci_from.coin_name(),
'coin_to': ci_to.coin_name(),
'amount_from': ci_from.format_amount(o.amount_from),
'amount_to': ci_to.format_amount((o.amount_from * o.rate) // ci_from.COIN()),
'rate': ci_to.format_amount(o.rate),
})
return bytes(json.dumps(rv), 'UTF-8')
def js_sentoffers(self, url_split, post_string, is_json):
return self.js_offers(url_split, post_string, is_json, True)
def js_bids(self, url_split, post_string, is_json):
swap_client = self.server.swap_client
if len(url_split) > 3:
if url_split[3] == 'new':
if post_string == '':
raise ValueError('No post data')
if is_json:
post_data = json.loads(post_string)
post_data['is_json'] = True
else:
post_data = urllib.parse.parse_qs(post_string)
offer_id = bytes.fromhex(get_data_entry(post_data, 'offer_id'))
assert(len(offer_id) == 28)
offer = swap_client.getOffer(offer_id)
assert(offer), 'Offer not found.'
ci_from = swap_client.ci(offer.coin_from)
amount_from = inputAmount(get_data_entry(post_data, 'amount_from'), ci_from)
addr_from = None
if have_data_entry(post_data, 'addr_from'):
addr_from = get_data_entry(post_data, 'addr_from')
if addr_from == '-1':
addr_from = None
if have_data_entry(post_data, 'validmins'):
valid_for_seconds = int(get_data_entry(post_data, 'validmins')) * 60
elif have_data_entry(post_data, 'valid_for_seconds'):
valid_for_seconds = int(get_data_entry(post_data, 'valid_for_seconds'))
else:
valid_for_seconds = 10 * 60
extra_options = {
'valid_for_seconds': valid_for_seconds,
}
if offer.swap_type == SwapTypes.XMR_SWAP:
bid_id = swap_client.postXmrBid(offer_id, amount_from, addr_send_from=addr_from, extra_options=extra_options)
else:
bid_id = swap_client.postBid(offer_id, amount_from, addr_send_from=addr_from, extra_options=extra_options)
if have_data_entry(post_data, 'debugind'):
swap_client.setBidDebugInd(bid_id, int(get_data_entry(post_data, 'debugind')))
rv = {'bid_id': bid_id.hex()}
return bytes(json.dumps(rv), 'UTF-8')
bid_id = bytes.fromhex(url_split[3])
assert(len(bid_id) == 28)
if post_string != '':
if is_json:
post_data = json.loads(post_string)
post_data['is_json'] = True
else:
post_data = urllib.parse.parse_qs(post_string)
if have_data_entry(post_data, 'accept'):
swap_client.acceptBid(bid_id)
elif have_data_entry(post_data, 'debugind'):
swap_client.setBidDebugInd(bid_id, int(get_data_entry(post_data, 'debugind')))
bid, xmr_swap, offer, xmr_offer, events = swap_client.getXmrBidAndOffer(bid_id)
assert(bid), 'Unknown bid ID'
edit_bid = False
show_txns = False
data = describeBid(swap_client, bid, xmr_swap, offer, xmr_offer, events, edit_bid, show_txns, for_api=True)
return bytes(json.dumps(data), 'UTF-8')
bids = swap_client.listBids()
return bytes(json.dumps([{
'bid_id': b[2].hex(),
'offer_id': b[3].hex(),
'created_at': b[0],
'expire_at': b[1],
'coin_from': b[9],
'amount_from': swap_client.ci(b[9]).format_amount(b[4]),
'bid_state': strBidState(b[5])
} for b in bids]), 'UTF-8')
def js_sentbids(self, url_split, post_string, is_json):
return bytes(json.dumps(self.server.swap_client.listBids(sent=True)), 'UTF-8')
def js_network(self, url_split, post_string, is_json):
return bytes(json.dumps(self.server.swap_client.get_network_info()), 'UTF-8')
def js_revokeoffer(self, url_split, post_string, is_json):
offer_id = bytes.fromhex(url_split[3])
assert(len(offer_id) == 28)
self.server.swap_client.revokeOffer(offer_id)
return bytes(json.dumps({'revoked_offer': offer_id.hex()}), 'UTF-8')
def js_index(self, url_split, post_string, is_json):
return bytes(json.dumps(self.server.swap_client.getSummary()), 'UTF-8')
def js_smsgaddresses(self, url_split, post_string, is_json):
swap_client = self.server.swap_client
if len(url_split) > 3:
if post_string == '':
raise ValueError('No post data')
if is_json:
post_data = json.loads(post_string)
post_data['is_json'] = True
else:
post_data = urllib.parse.parse_qs(post_string)
if url_split[3] == 'new':
addressnote = get_data_entry_or(post_data, 'addressnote', '')
new_addr, pubkey = swap_client.newSMSGAddress(addressnote)
return bytes(json.dumps({'new_address': new_addr, 'pubkey': pubkey}), 'UTF-8')
if url_split[3] == 'add':
addressnote = get_data_entry_or(post_data, 'addressnote', '')
pubkey_hex = get_data_entry(post_data, 'addresspubkey')
added_address = swap_client.addSMSGAddress(pubkey_hex, addressnote)
return bytes(json.dumps({'added_address': added_address, 'pubkey': pubkey_hex}), 'UTF-8')
elif url_split[3] == 'edit':
address = get_data_entry(post_data, 'address')
activeind = int(get_data_entry(post_data, 'active_ind'))
addressnote = get_data_entry_or(post_data, 'addressnote', '')
new_addr = swap_client.editSMSGAddress(address, activeind, addressnote)
return bytes(json.dumps({'edited_address': address}), 'UTF-8')
return bytes(json.dumps(swap_client.listAllSMSGAddresses()), 'UTF-8')
|
StarcoderdataPython
|
1792968
|
<reponame>alanshenpku/LeetCode<gh_stars>1-10
# Time: O(n^0.25 * logn)
# Space: O(logn)
# Let's say a positive integer is a superpalindrome
# if it is a palindrome, and it is also the square of a palindrome.
#
# Now, given two positive integers L and R (represented as strings),
# return the number of superpalindromes in the inclusive range [L, R].
#
# Example 1:
#
# Input: L = "4", R = "1000"
# Output: 4
# Explanation: 4, 9, 121, and 484 are superpalindromes.
# Note that 676 is not a superpalindrome: 26 * 26 = 676,
# but 26 is not a palindrome.
#
# Note:
# - 1 <= len(L) <= 18
# - 1 <= len(R) <= 18
# - L and R are strings representing integers in the range [1, 10^18).
# - int(L) <= int(R)
class Solution(object):
def superpalindromesInRange(self, L, R):
"""
:type L: str
:type R: str
:rtype: int
"""
def is_palindrome(k):
return str(k) == str(k)[::-1]
K = int((10**((len(R)+1)*0.25)))
l, r = int(L), int(R)
result = 0
# count odd length
for k in xrange(K):
s = str(k)
t = s + s[-2::-1]
v = int(t)**2
if v > r:
break
if v >= l and is_palindrome(v):
result += 1
# count even length
for k in xrange(K):
s = str(k)
t = s + s[::-1]
v = int(t)**2
if v > r:
break
if v >= l and is_palindrome(v):
result += 1
return result
|
StarcoderdataPython
|
52342
|
#!/usr/bin/env python
# Author: by <NAME> on March 15, 2020
# Date: March 15, 2020
from matplotlib import pyplot
from pydoc import pager
from time import sleep
import argparse
import datetime as dt
import json
import matplotlib
import pandas as pd
import requests
import seaborn
import sys
import yaml
def main():
''' Simple web scraping script to get covid-19 data using:
https://thevirustracker.com free API.
'''
if len(sys.argv) == 2:
if sys.argv[1] == '-h' or sys.argv[1] == '--help':
parser = argparse.ArgumentParser(
description='''COVID Scrapper v0.0.2''',
epilog='''Thanks for using our service.''')
parser.add_argument('-w', help='Print Worldwide COVID-19 data')
parser.add_argument('-g', help='Plot COVID-19 data')
parser.add_argument('-list',
help='Print a list of available countries and codes')
parser.add_argument('-s', metavar='[country]',
help='Print specific country COVID-19 data')
args = parser.parse_args()
if sys.argv[1] == '-w':
get_worldwide_stats(WORLDWIDE_URL)
sys.exit(0)
elif sys.argv[1] == '-list':
print_list_to_user()
sys.exit(0)
elif sys.argv[1] == '-g':
prep_data()
sys.exit(0)
elif len(sys.argv) > 2:
# Account for countries with spaces (i.e United States)
string = ""
index = 2
while index < len(sys.argv):
if sys.argv[index] != " ":
string += sys.argv[index] + " "
index += 1
string = string.strip()
# This acts as if the user chose option #3 in the menu.
country = 'https://thevirustracker.com/free-api?countryTotal={}'\
.format(get_country_code(string))
get_country_stats(country)
else:
# No cli-arguments given.
menu_driver()
def menu_driver():
"""Program main driver.
The user can choose between 1-4 menu options.
1. Wordwide stats
2. List of countries
3. Specific country stats (Full country or two-letter code)
4. Exit the program
"""
done = False
while not done:
print_menu()
user_input = input("Please, enter option: ")
print('------------------------------------------------')
option_info = check_validity(user_input)
if option_info != -1:
pass
if option_info == 5:
done = True
print("\n")
print("Thank you for using COVID-19 Scrapper. Stay safe!")
else:
evaluate_option(option_info)
else:
print("Please, enter a valid number option from 1 to 4....")
sleep(2)
print('------------------------------------------------')
def print_menu():
"""Prints the menu to the user."""
# TODO: think about plotting option in menu.
print()
print("COVID-19 Stats Scrapper. Please, select a number." + "\n")
print("1. To see worldwide stats.")
print("2. To see a list of the available countries and their"\
+ " respective abbreviations.")
print("3. To type a country or abrreviation and see their stats.")
print("4. To visualize Total Cases in the most infected countries.")
print("5. Exit")
def check_validity(option):
"""Check if the input received is a valid digit 1 to 4 inclusive."""
if option.isdigit():
numeric_option = int(option)
if numeric_option >=1 and numeric_option <= 5:
return numeric_option
else:
return -1
else:
return -1
def evaluate_option(user_option):
"""Evaluate the valid input from the user."""
if user_option == 1:
get_worldwide_stats(WORLDWIDE_URL)
elif user_option == 2:
print_list_to_user()
elif user_option == 3:
# Check if there are command line arguments
country_input = input("Please enter a country name or two-letter"\
+ " code of country to see COVID-19 stats.\n")
print("\n")
country = 'https://thevirustracker.com/free-api?countryTotal={}'\
.format(get_country_code(country_input))
get_country_stats(country)
elif user_option == 4:
prep_data()
else:
pass
def print_list_to_user():
with open('countries-json/country-by-abbreviation.json') as json_file:
number = 0
string = ""
for line in yaml.safe_load(json_file):
string += "{}. {}:{}".format(number, line['COUNTRY'],\
line['ABBREVIATION'] + '\n')
number += 1
number = 0
pager(string)
def check_country_is_valid(country):
"""Given the country full name or two-letter code; check if it's a valid
country by searching the countries.txt file for a match.
@param Country full name or country two-letter code.
@return True if country is valid False otherwise.
"""
l = []
fhandler = open('countries.txt', 'r')
for line in fhandler:
temp = line.strip('\n').split(":")
for e in temp:
l.append(e)
fhandler.close()
if country.upper() in l:
return True
else:
return False
def get_worldwide_stats(url):
"""Pull the world wide data from:
https://thevirustracker.com/free-api?global=stats
@param url of the worldwide stats
"""
response = requests.get(url, headers={"User-Agent": "XY"})
content = json.loads(response.content.decode())
#TODO: format to f strings for cleaner look
print()
print("Total cases: {val:,}".format(val=content['results'][0]['total_cases']))
print("Total New cases: {val:,}".format(val=content['results'][0]['total_new_cases_today']))
print("Total Recovered cases: {val:,}".format(val=content['results'][0]['total_recovered']))
print("Total Unresolved cases: {val:,}".format(val=content['results'][0]['total_unresolved']))
print("Total Deaths: {val:,}".format(val=content['results'][0]['total_deaths']))
print("Total Active Cases: {val:,}".format(val=content['results'][0]['total_active_cases']))
print("Total Serious Cases: {val:,}".format(val=content['results'][0]['total_serious_cases']))
death_rate = ((int(content['results'][0]['total_deaths'])) /\
(int(content['results'][0]['total_cases']))) * 100
print("Death Rate: {0:.2f}%".format(death_rate), '\n')
if len(sys.argv) == 1:
ask_user_if_continue()
# We are on script mode. Exit.
else:
sys.exit()
def get_country_stats(data):
"""Pull the world wide data from:
https://thevirustracker.com/free-api?global=stats
https://thevirustracker.com/free-api?countryTotal={@param}
@param url of the specific country stats
"""
response = requests.get(data, headers={"User-Agent": "XY"})
content = json.loads(response.content.decode())
#TODO: format to f strings for cleaner look
print('Country:', content['countrydata'][0]['info']['title'])
print("Total Cases: {val:,}".format(val=content['countrydata'][0]['total_cases']))
print('Total Active Cases: {val:,}'.format(val=content['countrydata'][0]['total_active_cases']))
print('Total Cases Recovered: {val:,}'.format(val=content['countrydata'][0]['total_recovered']))
print('Total Unresolved Cases: {val:,}'.format(val=content['countrydata'][0]['total_unresolved']))
print('Total Deaths Reported: {val:,}'.format(val=content['countrydata'][0]['total_deaths']), '\n')
death_rate = ((int(content['countrydata'][0]['total_deaths'])) /\
(int(content['countrydata'][0]['total_cases']))) * 100
print("Death Rate: {0:.2f}%".format(death_rate), '\n')
if len(sys.argv) == 1:
ask_user_if_continue()
# We are on script mode. Exit.
else:
sys.exit(0)
def prep_data():
'''Format the data for better visualization.
Format: Date Location New Cases New Deaths Total Cases Total Deaths
'''
amount_of_countries = int(input('How many countries would you like to'\
+ ' compare data? (15 countries max.) '))
if amount_of_countries <= 1 or amount_of_countries > 15:
# Default will be 10 if the number given as parameter is too high or
# too low
amount_of_countries = 10
data = pd.read_csv('https://covid.ourworldindata.org/data/ecdc/full_data.csv')
# Format the dates
data['date'] = [dt.datetime.strptime(x, '%Y-%m-%d') for x in data['date']]
# Format colum titles
data.columns = ['Date', 'Country', 'New Cases', 'New Deaths', 'Total Cases',\
'Total Deaths']
# Exclude countries from the data
countries_to_exclude = ['World']
data = data.loc[~(data['Country'].isin(countries_to_exclude))]
# Group the data by Location and Date. Look only for Total Cases and
data = pd.DataFrame(data.groupby(['Country', 'Date'])['Total Cases', \
'Total Deaths'].sum()).reset_index()
data = data.sort_values(by=['Country', 'Date'], ascending=False)
filtered_data = data.drop_duplicates(subset=['Country'], keep='first')
plot_data('Country', 'Total Cases', 'Total cases in the World',\
filtered_data, size=amount_of_countries)
def plot_data(parameter, value, title, data, size):
'''Plot cases and deaths as bar plot for X countries.
Function to plot bar plots using Seaborn.
'''
pyplot.style.use('dark_background')
f, ax = pyplot.subplots(1,1, figsize=(size*2, 5))
data = data.sort_values([value], ascending=False).reset_index(drop=True)
g = seaborn.barplot(data[parameter][0:size], data[value][0:size], palette='Set3')
g.set_title('Number of {} - highest {} values'.format(title, size))
pyplot.show()
if len(sys.argv) <= 1:
print('\n')
ask_user_if_continue()
else:
sys.exit()
def ask_user_if_continue():
decision = input("Would you like to continue using COVID-19 Scrapper? (y/n)")
if decision == 'y':
print_menu()
elif decision == 'n':
print("Thank you for using COVID-19 Scrapper. Stay safe!")
exit()
def get_country_code(country):
"""Retrieve the two-letter code from the .json file
and return the code.
"""
country_code = ""
if check_country_is_valid(country):
pass
else:
print("Please enter a valid country name or two-letter code.")
print("Consult the available country list with -list")
print('----------------------------------------------------------------------')
sys.exit(1)
with open('countries-json/country-by-abbreviation.json') as json_file:
country = country.upper()
if len(country) > 2:
for line in yaml.safe_load(json_file):
if line['COUNTRY'] == country:
country_code = line['ABBREVIATION']
return country_code
else:
return country
if __name__ == "__main__":
WORLDWIDE_URL = 'https://thevirustracker.com/free-api?global=stats'
main()
|
StarcoderdataPython
|
1788569
|
<reponame>yuchen352416/leetcode-example
#!/usr/bin/python3
from lib.ListLibraries import ListNode, ListNodeInitialize
class Solution:
def deleteNode(self, node: ListNode):
"""
:type node: ListNode
:rtype: void Do not return anything, modify node in-place instead.
"""
node.val = node.next.val
node.next = node.next.next
if __name__ == '__main__':
node = ListNodeInitialize([4, 5, 1, 9]).getNode()
deleteNode = node
while deleteNode.val != 1:
deleteNode = deleteNode.next
Solution().deleteNode(deleteNode)
print(deleteNode.val)
|
StarcoderdataPython
|
1658680
|
# Generated by Django 3.2.5 on 2021-07-25 21:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0002_product_image'),
('categories', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='category',
name='products',
field=models.ManyToManyField(blank=True, to='products.Product'),
),
]
|
StarcoderdataPython
|
3205901
|
import unittest
from stability_label_algorithm.modules.argumentation.argumentation_theory.argumentation_theory import ArgumentationTheory
from stability_label_algorithm.modules.dataset_generator.argumentation_system_generator.layered.\
layered_argumentation_system_generator import LayeredArgumentationSystemGenerator
from stability_label_algorithm.modules.dataset_generator.argumentation_system_generator.layered.\
layered_argumentation_system_generator_parameters import LayeredArgumentationSystemGeneratorParameters
from stability_label_algorithm.modules.dataset_generator.argumentation_system_property_computer.\
argumentation_system_property_computer import \
compute_argumentation_system_properties
from stability_label_algorithm.modules.dataset_generator.argumentation_theory_property_computer.\
incomplete_argumentation_framework import \
IncompleteArgumentationFramework
class TestLayeredDatasetGenerator(unittest.TestCase):
def test_layered_argumentation_system_generation(self):
# LayeredArgumentationSystemGeneratorParameters
literal_layer_distribution = {0: 3, 1: 2, 2: 1}
nr_of_literals = 6
nr_of_rules = 3
rule_antecedent_distribution = {1: 2, 2: 1}
argumentation_system_generation_parameters = \
LayeredArgumentationSystemGeneratorParameters(nr_of_literals, nr_of_rules,
rule_antecedent_distribution, literal_layer_distribution)
# Generate argumentation system
argumentation_system_generator = LayeredArgumentationSystemGenerator(argumentation_system_generation_parameters)
argumentation_system = argumentation_system_generator.generate()
# Check number of literals and rules
argumentation_system_properties = compute_argumentation_system_properties(argumentation_system)
self.assertEqual(nr_of_literals, argumentation_system_properties.nr_of_literals)
self.assertEqual(nr_of_rules, argumentation_system_properties.nr_of_rules)
self.assertEqual(rule_antecedent_distribution, argumentation_system_properties.rule_antecedent_distribution)
# Check layers
empty_argumentation_theory = ArgumentationTheory(argumentation_system, [])
inc_arg_fw = IncompleteArgumentationFramework.from_argumentation_theory(empty_argumentation_theory)
actual_literal_layers = [max([pot_arg.height for pot_arg in pot_arg_list])
for pot_arg_list in inc_arg_fw.potential_arguments_by_literal.values()]
actual_literal_layer_distribution = {layer_nr: actual_literal_layers.count(layer_nr)
for layer_nr in sorted(list(set(actual_literal_layers)))}
self.assertEqual(literal_layer_distribution, actual_literal_layer_distribution)
def test_impossible_rule_antecedent_distribution(self):
literal_layer_distribution = {0: 4}
nr_of_literals = 4
nr_of_rules = 1
rule_antecedent_distribution = {2: 1}
argumentation_system_generation_parameters = \
LayeredArgumentationSystemGeneratorParameters(nr_of_literals, nr_of_rules,
rule_antecedent_distribution, literal_layer_distribution)
argumentation_system_generator = LayeredArgumentationSystemGenerator(argumentation_system_generation_parameters)
with self.assertRaises(ValueError):
argumentation_system_generator.generate()
def test_two_layer_argumentation_system_generation(self):
literal_layer_distribution = {0: 19, 1: 1}
nr_of_literals = 20
nr_of_rules = 25
rule_antecedent_distribution = {3: 15, 2: 10}
argumentation_system_generation_parameters = \
LayeredArgumentationSystemGeneratorParameters(nr_of_literals, nr_of_rules,
rule_antecedent_distribution, literal_layer_distribution)
# Generate argumentation system
argumentation_system_generator = LayeredArgumentationSystemGenerator(argumentation_system_generation_parameters)
argumentation_system = argumentation_system_generator.generate()
# Check number of literals and rules
argumentation_system_properties = compute_argumentation_system_properties(argumentation_system)
self.assertEqual(nr_of_literals, argumentation_system_properties.nr_of_literals)
self.assertEqual(nr_of_rules, argumentation_system_properties.nr_of_rules)
self.assertEqual(rule_antecedent_distribution, argumentation_system_properties.rule_antecedent_distribution)
# Check layers
empty_argumentation_theory = ArgumentationTheory(argumentation_system, [])
inc_arg_fw = IncompleteArgumentationFramework.from_argumentation_theory(empty_argumentation_theory)
actual_literal_layers = [max([pot_arg.height for pot_arg in pot_arg_list])
for pot_arg_list in inc_arg_fw.potential_arguments_by_literal.values()]
actual_literal_layer_distribution = {layer_nr: actual_literal_layers.count(layer_nr)
for layer_nr in sorted(list(set(actual_literal_layers)))}
self.assertEqual(literal_layer_distribution, actual_literal_layer_distribution)
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
4813408
|
<filename>SuperFlow.py
from Packet import *
import socket
import Flow
#get median of interarrival time
def getMedian(vallist):
vallist.sort(key = lambda val:val[0])
tot = 0
cfreq = []
for val in vallist:
tot += val[1]
cfreq.append(tot)
medianindex = tot / 2
i = 0
while medianindex > cfreq[i]:
i += 1
return vallist[i][0]
#defines a superflow
class SuperFlow(Flow.Flow):
def __init__(self, fields):
if fields == None:
self.ip1 = None
self.ip2 = None
self.key = None
self.prot = 0
self.n_packet1 = 0
self.n_byte1 = 0
self.t_start1 = 0
self.t_end1 = 0
self.t_interarrival1 = []
self.n_packet2 = 0
self.n_byte2 = 0
self.t_start2 = 0
self.t_end2 = 0
self.t_interarrival2 = []
else:
self.ip1 = socket.inet_aton(fields[0])
self.ip2 = socket.inet_aton(fields[1])
self.key = self.ip1 + self.ip2
self.prot = int(fields[2])
self.n_packet1 = int(fields[3])
self.n_byte1 = int(fields[4])
self.t_start1 = float(fields[5])
self.t_end1 = float(fields[6])
self.t_interarrival1 = [(float(fields[7]),self.n_packet1)]
self.n_packet2 = int(fields[8])
self.n_byte2 = int(fields[9])
self.t_start2 = float(fields[10])
self.t_end2 = float(fields[11])
self.t_interarrival2 = [(float(fields[12]),self.n_packet2)]
#get median of interarrival time irrespective of direction
def getInterArrivaltime(self):
combined = self.t_interarrival1 + self.t_interarrival2
if len(combined) > 0:
return getMedian(combined)
return 0
#interarrival time for direction1(arbitrary)
def getInterArrivaltime1(self):
if len(self.t_interarrival1) > 0:
return getMedian(self.t_interarrival1)
return 0
#interarrival time for direction2(arbitrary)
def getInterArrivaltime2(self):
if len(self.t_interarrival2) > 0:
return getMedian(self.t_interarrival2)
return 0
|
StarcoderdataPython
|
3295931
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
import time
import datetime
from lib.base import BaseGithubAction
class GetDeploymentStatusesAction(BaseGithubAction):
def run(self, api_user, repository, deployment_id, github_type):
enterprise = self._is_enterprise(github_type)
if api_user:
self.token = self._get_user_token(api_user, enterprise)
payload = {"id": deployment_id}
responses = self._request("GET",
"/repos/{}/deployments/{}/statuses".format(
repository, deployment_id),
payload,
self.token,
enterprise)
results = []
for response in responses:
ts_created_at = time.mktime(
datetime.datetime.strptime(
response['created_at'],
"%Y-%m-%dT%H:%M:%SZ").timetuple())
results.append({'creator': response['creator']['login'],
'id': response['id'],
'description': response['description'],
'state': response['state'],
'target_url': response['target_url'],
'created_at': response['created_at'],
'updated_at': response['updated_at'],
'ts_created_at': ts_created_at})
return results
|
StarcoderdataPython
|
1700131
|
from fastapi import APIRouter
from app.models.ready import ReadyResponse
router = APIRouter()
@router.get("/ready", response_model=ReadyResponse)
async def readiness_check():
return ReadyResponse(status="ok")
|
StarcoderdataPython
|
3232568
|
<filename>source/bot_commands.py<gh_stars>1-10
import discord
import requests
import json
import os
from dotenv import load_dotenv
from source.utils import putTableAll, putTableLong, putFixtures, fetchJSON, putMatches
from source.league_code import LEAGUE_CODE
from source.team_id import TEAM_ID
from source.exceptions import *
def getStandings(code, mode='long'):
"""
Function that delivers standings in text format.
Queries the cache for the requested data, if not found,
Loads the data from API and caches it
Parameters:
-----------
code: str
The ID of the league for which standings are required
mode: 'long' or 'all', optional
* defaults to 'long'
* 'long' -> SNO, Team name, Matches Played, Points Obtained
* 'all' -> SNO, Team Code, Matches Played, Won, Drawn, Lost, Pts, Goal Difference
Returns:
--------
str
standings if code is valid, or an error message
"""
try:
if code not in LEAGUE_CODE:
raise InvalidLeagueCodeException
obj = fetchJSON(code, 'standings')
if mode == 'all':
return putTableAll(obj)
return putTableLong(obj)
except InvalidLeagueCodeException:
return None
def getFixtures(code, limit: int):
"""
Displays the fixtures in the requested league / team as an embed
Fetches fixtures from JSON file and renders embed for it,
Displays 'limit' matches
Parameters:
-----------
code: str
The ID of the league or team for which fixtures are required
limit: int, optional
Number of fixtures to display (default value of 5)
Returns:
--------
discord.Embed
Shows the fixtures as many as requested,
Incase of invalid code, relevant help embed is returned
"""
try:
if limit < 0:
raise InvalidLimitException
mode = 'league'
if code not in LEAGUE_CODE:
if code in TEAM_ID:
mode = 'team'
else:
return discord.Embed(title='Please enter a valid code!',
description='Please Refer to **.team-codes** for team codes\
\nAnd **.league-codes** for league-codes',
color=0xf58300)
obj = fetchJSON(code, 'fixtures')
return putFixtures(obj, code, limit, mode)
except InvalidLimitException:
return discord.Embed(title='Limit must be greater than :zero:',
description="Enter a valid limit :smile:",
color=0xf58300)
def getMatches(code, limit: int):
try:
if limit < 0:
raise InvalidLimitException
mode = 'league'
if code not in LEAGUE_CODE:
if code in TEAM_ID:
mode = 'team'
else:
return discord.Embed(title='Please enter a valid code!',
description='Please Refer to **.team-codes** for team codes\
\nAnd **.league-codes** for league-codes',
color=0xf58300)
obj = fetchJSON(code, 'live')
return putMatches(obj, code, limit, mode)
except InvalidLimitException:
return discord.Embed(title='Limit must be greater than :zero:',
description="Enter a valid limit :smile:",
color=0xf58300)
def getLeagueCodes(title="League Codes"):
"""
Returns Leagues and their codes as an Embed
Parameters:
-----------
title: str, optional
Title of the embed (by default: "League Codes")
Returns:
--------
discord.Embed
Embed displaying league codes
"""
embed = discord.Embed(
title=title,
description="Refer codes for Top :five: Leagues here:",
color=0xf58300)
embed.add_field(name=':one: Premier League',
value='PL' + "\n\u200b", inline=False)
embed.add_field(name=':two: La Liga', value='SPA' +
"\n\u200b", inline=True)
embed.add_field(name=':three: Serie A', value='SA' +
"\n\u200b", inline=False)
embed.add_field(name=':four: Bundesliga',
value='BA' + "\n\u200b", inline=True)
embed.add_field(name=':five:Ligue 1', value='FL1', inline=False)
embed.add_field(name='For more leagues',
value='click [Here](https://github.com/MaheshBharadwaj/paneka/blob/master/README.md/#league-codes)')
return embed
def getTeamCodes(title="Team Codes"):
"""
Returns Teams and their codes as an Embed
Parameters:
-----------
title: str, optional
Title of the embed (by default: "Team Codes")
Returns:
--------
discord.Embed
Embed displaying team codes
"""
embed = discord.Embed(
title=title,
description="Refer codes for Top :one: :zero: Teams here:",
color=0xf58300)
embed.add_field(name='Real Madrid', value='MAD' + "\n\u200b", inline=True)
embed.add_field(name='FC Barcelona', value='FCB' + "\n\u200b", inline=True)
embed.add_field(name='Manchester United',
value='MUFC' + "\n\u200b", inline=True)
embed.add_field(name='Arsenal', value='AFC' + "\n\u200b", inline=True)
embed.add_field(name='Bayern Munich', value='BAY' +
"\n\u200b", inline=True)
embed.add_field(name='Chelsea', value='CFC' + "\n\u200b", inline=True)
embed.add_field(name='Juventus', value='JUVE' + "\n\u200b", inline=True)
embed.add_field(name='Atletico Madrid',
value='ATM' + "\n\u200b", inline=True)
embed.add_field(name='Liverpool', value='LFC' + "\n\u200b", inline=True)
embed.add_field(name='Manche$ter City', value='MCFC', inline=True)
return embed
def getInviteEmbed(ctx):
"""
Generates Invite embed to invite bot
Parameters:
-----------
ctx: discord.Context
Context data passed by discord when a command is invoked
Returns:
--------
discord.Embed
Showing invite URL for the bot
"""
inviteEmbed = discord.Embed(
title='Invite link!',
description='URL for inviting bot to your servers'
)
inviteEmbed.add_field(
name=":warning: You need to be an admin to add bots :slight_smile:",
value="https://discord.com/api/oauth2/authorize?client_id=731544990446256198&permissions=60416&scope=bot"
)
return inviteEmbed
def getHelpEmbed(ctx=None):
"""
Generates the 'Help' embed when requested
Parameters:
-----------
ctx: discord.Context
Context data passed by discord when a command is invoked
Returns:
--------
discord.Embed
Showing help data for the commands available
"""
embed = discord.Embed(
title="Paneka-Help!",
description="Shows available commands and their functions\
\nNOTE: The command prefix is '.'",
color=0xf58300)
embed.set_thumbnail(
url="https://img.icons8.com/fluent/144/000000/get-help.png")
embed.add_field(name=":one: .standings-all [league code]", inline=False,\
value="Detailed Standings, with team codes")
embed.add_field(name=":two: .standings [league code]", inline=False,\
value="Display Standings")
embed.add_field(name=":three: .fixtures [league code or team code] [limit (default: :five: )]", inline=False,\
value="Displays fixtures of matches of the league or team",)
embed.add_field(name=":four: .live [league code or team code] [limit (default: :five: )]", inline=False,\
value='Display Live Matches of the league or team')
embed.add_field(name=":five: .league-codes", inline=False,\
value="Displays Leagues and their Respective Codes")
embed.add_field(name=":six: .team-codes", inline=False,\
value="Displayes Teams and their Respective Codes")
embed.add_field(name=":seven: .invite", inline=False,\
value="Invite bot to your servers!")
embed.add_field(
name="\u200b", value=":computer: Link to GitHub Repository: [Click Here](https://github.com/MaheshBharadwaj/paneka)", inline=False)
if ctx is not None:
embed.set_footer(text='Requested By: ' + str(ctx.author))
return embed
if __name__ == "__main__":
print(getStandings('PL'))
|
StarcoderdataPython
|
165198
|
import random
from collections import deque
from typing import Any
from srl.base.rl.base import RLRemoteMemory
class ExperienceReplayBuffer(RLRemoteMemory):
def __init__(self, *args):
super().__init__(*args)
def init(self, capacity: int):
self.memory = deque(maxlen=capacity)
def length(self) -> int:
return len(self.memory)
def restore(self, data: Any) -> None:
self.memory = data
def backup(self):
return self.memory
# ---------------------------
def add(self, batch: Any):
self.memory.append(batch)
def sample(self, batch_size: int):
return random.sample(self.memory, batch_size)
def clear(self):
self.memory.clear()
|
StarcoderdataPython
|
11518
|
# -*- coding: utf-8 -*-
"""Implicitly reference attributes of an object."""
from ast import Name, Assign, Load, Call, Lambda, With, Str, arg, \
Attribute, Subscript, Store, Del
from macropy.core.quotes import macros, q, u, name, ast_literal
from macropy.core.hquotes import macros, hq
from macropy.core.walkers import Walker
from .util import wrapwith, AutorefMarker
from .letdoutil import isdo, islet, ExpandedDoView, ExpandedLetView
from ..dynassign import dyn
from ..lazyutil import force1, mark_lazy
# with autoref(o):
# with autoref(scipy.loadmat("mydata.mat")): # evaluate once, assign to a gensym
# with autoref(scipy.loadmat("mydata.mat")) as o: # evaluate once, assign to given name
#
# We need something like::
#
# with autoref(o):
# x # --> (o.x if hasattr(o, "x") else x)
# x.a # --> (o.x.a if hasattr(o, "x") else x.a)
# x[s] # --> (o.x[s] if hasattr(o, "x") else x[s])
# o # --> o
# with autoref(p):
# x # --> (p.x if hasattr(p, "x") else (o.x if hasattr(o, "x") else x))
# x.a # --> (p.x.a if hasattr(p, "x") else (o.x.a if hasattr(o, "x") else x.a))
# x[s] # --> (p.x[s] if hasattr(p, "x") else (o.x[s] if hasattr(o, "x") else x[s]))
# o # --> (p.o if hasattr(p, "o") else o)
# o.x # --> (p.o.x if hasattr(p, "o") else o.x)
# o[s] # --> (p.o[s] if hasattr(p, "o") else o[s])
#
# One possible clean-ish implementation is::
#
# with AutorefMarker("o"): # no-op at runtime
# x # --> (lambda _ar271: _ar271[1] if _ar271[0] else x)(_autoref_resolve((o, "x")))
# x.a # --> ((lambda _ar271: _ar271[1] if _ar271[0] else x)(_autoref_resolve((o, "x")))).a
# x[s] # --> ((lambda _ar271: _ar271[1] if _ar271[0] else x)(_autoref_resolve((o, "x"))))[s]
# o # --> o (can only occur if an asname is supplied)
# with AutorefMarker("p"):
# x # --> (lambda _ar314: _ar314[1] if _ar314[0] else x)(_autoref_resolve((p, o, "x")))
# x.a # --> ((lambda _ar314: _ar314[1] if _ar314[0] else x)(_autoref_resolve((p, o, "x"))).a
# x[s] # --> ((lambda _ar314: _ar314[1] if _ar314[0] else x)(_autoref_resolve((p, o, "x")))[s]
# # when the inner autoref expands, it doesn't know about the outer one, so we will get this:
# o # --> (lambda _ar314: _ar314[1] if _ar314[0] else o)(_autoref_resolve((p, "o")))
# o.x # --> ((lambda _ar314: _ar314[1] if _ar314[0] else o)(_autoref_resolve((p, "o")))).x
# o[s] # --> ((lambda _ar314: _ar314[1] if _ar314[0] else o)(_autoref_resolve((p, "o"))))[s]
# # the outer autoref needs the marker to know to skip this (instead of looking up o.p):
# p # --> p
#
# The lambda is needed, because the lexical-variable lookup for ``x`` must occur at the use site,
# and it can only be performed by Python itself. We could modify ``_autoref_resolve`` to take
# ``locals()`` and ``globals()`` as arguments and look also in the ``builtins`` module,
# but that way we get no access to the enclosing scopes (the "E" in LEGB).
#
# Recall the blocks expand from inside out.
#
# We must leave an AST marker in place of the each autoref block, so that any outer autoref block (when it expands)
# understands that within that block, any read access to the name "p" is to be left alone.
#
# In ``_autoref_resolve``, we use a single args parameter to avoid dealing with ``*args``
# when analyzing the Call node, thus avoiding much special-case code for the AST differences
# between Python 3.4 and 3.5+.
#
# In reality, we also capture-and-assign the autoref'd expr into a gensym'd variable (instead of referring
# to ``o`` and ``p`` directly), so that arbitrary expressions can be autoref'd without giving them
# a name in user code.
@mark_lazy
def _autoref_resolve(args):
*objs, s = [force1(x) for x in args]
for o in objs:
if hasattr(o, s):
return True, force1(getattr(o, s))
return False, None
def autoref(block_body, args, asname):
assert len(args) == 1, "expected exactly one argument, the expr to implicitly reference"
assert block_body, "expected at least one statement inside the 'with autoref' block"
gen_sym = dyn.gen_sym
o = asname.id if asname else gen_sym("_o") # Python itself guarantees asname to be a bare Name.
# with AutorefMarker("_o42"):
def isexpandedautorefblock(tree):
if not (type(tree) is With and len(tree.items) == 1):
return False
ctxmanager = tree.items[0].context_expr
return type(ctxmanager) is Call and \
type(ctxmanager.func) is Name and ctxmanager.func.id == "AutorefMarker" and \
len(ctxmanager.args) == 1 and type(ctxmanager.args[0]) is Str
def getreferent(tree):
return tree.items[0].context_expr.args[0].s
# (lambda _ar314: _ar314[1] if _ar314[0] else x)(_autoref_resolve((p, o, "x")))
def isautoreference(tree):
return type(tree) is Call and \
len(tree.args) == 1 and type(tree.args[0]) is Call and \
type(tree.args[0].func) is Name and tree.args[0].func.id == "_autoref_resolve" and \
type(tree.func) is Lambda and len(tree.func.args.args) == 1 and \
tree.func.args.args[0].arg.startswith("_ar")
def get_resolver_list(tree): # (p, o, "x")
return tree.args[0].args[0].elts
def add_to_resolver_list(tree, objnode):
lst = get_resolver_list(tree)
lst.insert(-1, objnode)
# x --> the autoref code above.
def makeautoreference(tree):
assert type(tree) is Name and (type(tree.ctx) is Load or not tree.ctx)
newtree = hq[(lambda __ar_: __ar_[1] if __ar_[0] else ast_literal[tree])(_autoref_resolve((name[o], u[tree.id])))]
our_lambda_argname = gen_sym("_ar")
@Walker
def renametmp(tree, **kw):
if type(tree) is Name and tree.id == "__ar_":
tree.id = our_lambda_argname
elif type(tree) is arg and tree.arg == "__ar_":
tree.arg = our_lambda_argname
return tree
return renametmp.recurse(newtree)
@Walker
def transform(tree, *, referents, set_ctx, stop, **kw):
if type(tree) in (Attribute, Subscript, Name) and type(tree.ctx) in (Store, Del):
stop()
# skip autoref lookup for let/do envs
elif islet(tree):
view = ExpandedLetView(tree)
set_ctx(referents=referents + [view.body.args.args[0].arg]) # lambda e14: ...
elif isdo(tree):
view = ExpandedDoView(tree)
set_ctx(referents=referents + [view.body[0].args.args[0].arg]) # lambda e14: ...
elif isexpandedautorefblock(tree):
set_ctx(referents=referents + [getreferent(tree)])
elif isautoreference(tree): # generated by an inner already expanded autoref block
stop()
thename = get_resolver_list(tree)[-1].s
if thename in referents:
# remove autoref lookup for an outer referent, inserted early by an inner autoref block
# (that doesn't know that any outer block exists)
tree = q[name[thename]] # (lambda ...)(_autoref_resolve((p, "o"))) --> o
else:
add_to_resolver_list(tree, q[name[o]]) # _autoref_resolve((p, "x")) --> _autoref_resolve((p, o, "x"))
elif type(tree) is Call and type(tree.func) is Name and tree.func.id == "AutorefMarker": # nested autorefs
stop()
elif type(tree) is Name and (type(tree.ctx) is Load or not tree.ctx) and tree.id not in referents:
stop()
tree = makeautoreference(tree)
# Attribute works as-is, because a.b.c --> Attribute(Attribute(a, "b"), "c"), so Name "a" gets transformed.
# Subscript similarly, a[1][2] --> Subscript(Subscript(a, 1), 2), so Name "a" gets transformed.
return tree
# skip (by name) some common references inserted by other macros
always_skip = ['letter', 'dof', 'namelambda', 'curry', 'currycall', 'lazy', 'lazyrec', 'lazycall']
newbody = [Assign(targets=[q[name[o]]], value=args[0])]
for stmt in block_body:
newbody.append(transform.recurse(stmt, referents=always_skip + [o]))
return wrapwith(item=hq[AutorefMarker(u[o])],
body=newbody,
locref=block_body[0])
|
StarcoderdataPython
|
1726786
|
class VehicleUpdate:
def __init__(self, id, x, y, durability, remaining_attack_cooldown_ticks, selected, groups):
self.id = id
self.x = x
self.y = y
self.durability = durability
self.remaining_attack_cooldown_ticks = remaining_attack_cooldown_ticks
self.selected = selected
self.groups = groups
|
StarcoderdataPython
|
1614136
|
#
# This source file is part of the EdgeDB open source project.
#
# Copyright 2016-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
import edgedb
from edgedb.datatypes import datatypes as private
from edgedb import introspect
class TestRecordDesc(unittest.TestCase):
def test_recorddesc_1(self):
with self.assertRaisesRegex(TypeError, 'one to three positional'):
private._RecordDescriptor()
with self.assertRaisesRegex(TypeError, 'one to three positional'):
private._RecordDescriptor(t=1)
with self.assertRaisesRegex(TypeError, 'requires a tuple'):
private._RecordDescriptor(1)
with self.assertRaisesRegex(TypeError, 'requires a tuple'):
private._RecordDescriptor(('a',), 1)
with self.assertRaisesRegex(TypeError,
'the same length as the names tuple'):
private._RecordDescriptor(('a',), ())
private._RecordDescriptor(('a', 'b'))
with self.assertRaisesRegex(ValueError, f'more than {0x4000-1}'):
private._RecordDescriptor(('a',) * 20000)
def test_recorddesc_2(self):
rd = private._RecordDescriptor(
('a', 'b', 'c'),
(private._EDGE_POINTER_IS_LINKPROP,
0,
private._EDGE_POINTER_IS_LINK))
self.assertEqual(rd.get_pos('a'), 0)
self.assertEqual(rd.get_pos('b'), 1)
self.assertEqual(rd.get_pos('c'), 2)
self.assertTrue(rd.is_linkprop('a'))
self.assertFalse(rd.is_linkprop('b'))
self.assertFalse(rd.is_linkprop('c'))
self.assertFalse(rd.is_link('a'))
self.assertFalse(rd.is_link('b'))
self.assertTrue(rd.is_link('c'))
with self.assertRaises(LookupError):
rd.get_pos('z')
with self.assertRaises(LookupError):
rd.is_linkprop('z')
def test_recorddesc_3(self):
f = private.create_object_factory(
id={'property', 'implicit'},
lb='link-property',
c='property',
d='link',
)
o = f(1, 2, 3, 4)
desc = private.get_object_descriptor(o)
self.assertEqual(set(dir(desc)), set(('id', 'lb', 'c', 'd')))
self.assertTrue(desc.is_linkprop('lb'))
self.assertFalse(desc.is_linkprop('id'))
self.assertFalse(desc.is_linkprop('c'))
self.assertFalse(desc.is_linkprop('d'))
self.assertFalse(desc.is_link('lb'))
self.assertFalse(desc.is_link('id'))
self.assertFalse(desc.is_link('c'))
self.assertTrue(desc.is_link('d'))
self.assertFalse(desc.is_implicit('lb'))
self.assertTrue(desc.is_implicit('id'))
self.assertFalse(desc.is_implicit('c'))
self.assertFalse(desc.is_implicit('d'))
self.assertEqual(desc.get_pos('lb'), 1)
self.assertEqual(desc.get_pos('id'), 0)
self.assertEqual(desc.get_pos('c'), 2)
self.assertEqual(desc.get_pos('d'), 3)
def test_recorddesc_4(self):
f = private.create_object_factory(
id={'property', 'implicit'},
lb='link-property',
c='property',
d='link',
)
o = f(1, 2, 3, 4)
intro = introspect.introspect_object(o)
self.assertEqual(
intro.pointers,
(
('id', introspect.PointerKind.PROPERTY, True),
('c', introspect.PointerKind.PROPERTY, False),
('d', introspect.PointerKind.LINK, False),
)
)
# clear cache so that tests in refcount mode don't freak out.
introspect._introspect_object_desc.cache_clear()
class TestTuple(unittest.TestCase):
def test_tuple_empty_1(self):
t = edgedb.Tuple()
self.assertEqual(len(t), 0)
self.assertEqual(hash(t), hash(()))
self.assertEqual(repr(t), '()')
with self.assertRaisesRegex(IndexError, 'out of range'):
t[0]
def test_tuple_2(self):
t = edgedb.Tuple((1, 'a'))
self.assertEqual(len(t), 2)
self.assertEqual(hash(t), hash((1, 'a')))
self.assertEqual(repr(t), "(1, 'a')")
self.assertEqual(t[0], 1)
self.assertEqual(t[1], 'a')
with self.assertRaisesRegex(IndexError, 'out of range'):
t[2]
def test_tuple_3(self):
t = edgedb.Tuple((1, []))
t[1].append(t)
self.assertEqual(t[1], [t])
self.assertEqual(repr(t), '(1, [(...)])')
self.assertEqual(str(t), '(1, [(...)])')
def test_tuple_4(self):
with self.assertRaisesRegex(ValueError, f'more than {0x4000 - 1}'):
edgedb.Tuple([1] * 20000)
def test_tuple_freelist_1(self):
lst = []
for _ in range(5000):
lst.append(edgedb.Tuple((1,)))
for t in lst:
self.assertEqual(t[0], 1)
def test_tuple_5(self):
self.assertEqual(
edgedb.Tuple([1, 2, 3]),
edgedb.Tuple([1, 2, 3]))
self.assertNotEqual(
edgedb.Tuple([1, 2, 3]),
edgedb.Tuple([1, 3, 2]))
self.assertLess(
edgedb.Tuple([1, 2, 3]),
edgedb.Tuple([1, 3, 2]))
self.assertEqual(
edgedb.Tuple([]),
edgedb.Tuple([]))
self.assertEqual(
edgedb.Tuple([1]),
edgedb.Tuple([1]))
self.assertGreaterEqual(
edgedb.Tuple([1]),
edgedb.Tuple([1]))
self.assertNotEqual(
edgedb.Tuple([1]),
edgedb.Tuple([]))
self.assertGreater(
edgedb.Tuple([1]),
edgedb.Tuple([]))
self.assertNotEqual(
edgedb.Tuple([1]),
edgedb.Tuple([2]))
self.assertLess(
edgedb.Tuple([1]),
edgedb.Tuple([2]))
self.assertNotEqual(
edgedb.Tuple([1, 2]),
edgedb.Tuple([2, 2]))
self.assertNotEqual(
edgedb.Tuple([1, 1]),
edgedb.Tuple([2, 2, 1]))
def test_tuple_6(self):
self.assertEqual(
edgedb.Tuple([1, 2, 3]),
(1, 2, 3))
self.assertEqual(
(1, 2, 3),
edgedb.Tuple([1, 2, 3]))
self.assertNotEqual(
edgedb.Tuple([1, 2, 3]),
(1, 3, 2))
self.assertLess(
edgedb.Tuple([1, 2, 3]),
(1, 3, 2))
self.assertEqual(
edgedb.Tuple([]),
())
self.assertEqual(
edgedb.Tuple([1]),
(1,))
self.assertGreaterEqual(
edgedb.Tuple([1]),
(1,))
self.assertNotEqual(
edgedb.Tuple([1]),
())
self.assertGreater(
edgedb.Tuple([1]),
())
self.assertNotEqual(
edgedb.Tuple([1]),
(2,))
self.assertLess(
edgedb.Tuple([1]),
(2,))
self.assertNotEqual(
edgedb.Tuple([1, 2]),
(2, 2))
self.assertNotEqual(
edgedb.Tuple([1, 1]),
(2, 2, 1))
def test_tuple_7(self):
self.assertNotEqual(
edgedb.Tuple([1, 2, 3]),
123)
class TestNamedTuple(unittest.TestCase):
def test_namedtuple_empty_1(self):
with self.assertRaisesRegex(ValueError, 'at least one field'):
edgedb.NamedTuple()
def test_namedtuple_2(self):
t = edgedb.NamedTuple(a=1)
self.assertEqual(repr(t), "(a := 1)")
t = edgedb.NamedTuple(a=1, b='a')
self.assertEqual(set(dir(t)), {'a', 'b'})
self.assertEqual(repr(t), "(a := 1, b := 'a')")
self.assertEqual(t[0], 1)
self.assertEqual(t[1], 'a')
with self.assertRaisesRegex(IndexError, 'out of range'):
t[2]
self.assertEqual(len(t), 2)
self.assertEqual(hash(t), hash((1, 'a')))
self.assertEqual(t.a, 1)
self.assertEqual(t.b, 'a')
with self.assertRaises(AttributeError):
t.z
def test_namedtuple_3(self):
t = edgedb.NamedTuple(a=1, b=[])
t.b.append(t)
self.assertEqual(t.b, [t])
self.assertEqual(repr(t), '(a := 1, b := [(...)])')
self.assertEqual(str(t), '(a := 1, b := [(...)])')
def test_namedtuple_4(self):
t1 = edgedb.NamedTuple(a=1, b='aaaa')
t2 = edgedb.Tuple((1, 'aaaa'))
t3 = (1, 'aaaa')
self.assertEqual(hash(t1), hash(t2))
self.assertEqual(hash(t1), hash(t3))
def test_namedtuple_5(self):
self.assertEqual(
edgedb.NamedTuple(a=1, b=2, c=3),
edgedb.NamedTuple(x=1, y=2, z=3))
self.assertNotEqual(
edgedb.NamedTuple(a=1, b=2, c=3),
edgedb.NamedTuple(a=1, c=3, b=2))
self.assertLess(
edgedb.NamedTuple(a=1, b=2, c=3),
edgedb.NamedTuple(a=1, b=3, c=2))
self.assertEqual(
edgedb.NamedTuple(a=1),
edgedb.NamedTuple(b=1))
self.assertEqual(
edgedb.NamedTuple(a=1),
edgedb.NamedTuple(a=1))
def test_namedtuple_6(self):
self.assertEqual(
edgedb.NamedTuple(a=1, b=2, c=3),
(1, 2, 3))
self.assertEqual(
(1, 2, 3),
edgedb.NamedTuple(a=1, b=2, c=3))
self.assertNotEqual(
edgedb.NamedTuple(a=1, b=2, c=3),
(1, 3, 2))
self.assertLess(
edgedb.NamedTuple(a=1, b=2, c=3),
(1, 3, 2))
self.assertEqual(
edgedb.NamedTuple(a=1),
(1,))
self.assertEqual(
edgedb.NamedTuple(a=1),
(1,))
def test_namedtuple_7(self):
self.assertNotEqual(
edgedb.NamedTuple(a=1, b=2, c=3),
1)
class TestObject(unittest.TestCase):
def test_object_1(self):
f = private.create_object_factory(
id='property',
lb='link-property',
c='property'
)
o = f(1, 2, 3)
self.assertEqual(repr(o), 'Object{id := 1, @lb := 2, c := 3}')
self.assertEqual(o.id, 1)
self.assertEqual(o.c, 3)
with self.assertRaises(AttributeError):
o.lb
with self.assertRaises(AttributeError):
o.z
with self.assertRaises(TypeError):
len(o)
with self.assertRaises(KeyError):
o[0]
with self.assertRaises(TypeError):
o['id']
self.assertEqual(set(dir(o)), {'id', 'c'})
def test_object_2(self):
f = private.create_object_factory(
id={'property', 'implicit'},
lb='link-property',
c='property'
)
o = f(1, 2, 3)
self.assertEqual(repr(o), 'Object{@lb := 2, c := 3}')
self.assertEqual(hash(o), hash(f(1, 2, 3)))
self.assertNotEqual(hash(o), hash(f(1, 2, 'aaaa')))
self.assertNotEqual(hash(o), hash((1, 2, 3)))
self.assertEqual(set(dir(o)), {'id', 'c'})
def test_object_3(self):
f = private.create_object_factory(id='property', c='link')
o = f(1, [])
o.c.append(o)
self.assertEqual(repr(o), 'Object{id := 1, c := [Object{...}]}')
with self.assertRaisesRegex(TypeError, 'unhashable'):
hash(o)
def test_object_4(self):
f = private.create_object_factory(
id={'property', 'implicit'},
lb='link-property',
c='property'
)
o1 = f(1, 'aa', 'ba')
o2 = f(1, 'ab', 'bb')
o3 = f(3, 'ac', 'bc')
self.assertEqual(o1, o2)
self.assertNotEqual(o1, o3)
self.assertLess(o1, o3)
self.assertGreater(o3, o2)
def test_object_5(self):
f = private.create_object_factory(
a='property',
lb='link-property',
c='property'
)
with self.assertRaisesRegex(ValueError, "without 'id' field"):
f(1, 2, 3)
def test_object_6(self):
User = private.create_object_factory(
id='property',
name='property',
)
u = User(1, 'user1')
with self.assertRaisesRegex(TypeError,
"property 'name' should be "
"accessed via dot notation"):
u['name']
def test_object_links_1(self):
O2 = private.create_object_factory(
id='property',
lb='link-property',
c='property'
)
O1 = private.create_object_factory(
id='property',
o2s='link'
)
o2_1 = O2(1, 'linkprop o2 1', 3)
o2_2 = O2(4, 'linkprop o2 2', 6)
o1 = O1(2, edgedb.Set((o2_1, o2_2)))
linkset = o1['o2s']
self.assertEqual(len(linkset), 2)
self.assertEqual(linkset, o1['o2s'])
self.assertEqual(hash(linkset), hash(o1['o2s']))
self.assertEqual(
repr(linkset),
"LinkSet(name='o2s', source_id=2, target_ids={1, 4})")
link1 = linkset[0]
self.assertIs(link1.source, o1)
self.assertIs(link1.target, o2_1)
self.assertEqual(
repr(link1),
"Link(name='o2s', source_id=2, target_id=1)")
self.assertEqual(set(dir(link1)), {'target', 'source', 'lb'})
link2 = linkset[1]
self.assertIs(link2.source, o1)
self.assertIs(link2.target, o2_2)
self.assertNotEqual(link1, link2)
self.assertEqual(list(linkset), [link1, link2])
self.assertEqual([link for link in linkset], [link1, link2])
self.assertNotEqual(link1, link2)
self.assertEqual(link1.lb, 'linkprop o2 1')
self.assertEqual(link2.lb, 'linkprop o2 2')
with self.assertRaises(AttributeError):
link2.aaaa
def test_object_links_2(self):
User = private.create_object_factory(
id='property',
friends='link',
enemies='link',
)
u1 = User(1, edgedb.Set([]), edgedb.Set([]))
u2 = User(2, edgedb.Set([]), edgedb.Set([]))
u3 = User(3, edgedb.Set([]), edgedb.Set([]))
u4 = User(4, edgedb.Set([u1, u2]), edgedb.Set([u1, u2]))
u5 = User(5, edgedb.Set([u1, u3]), edgedb.Set([u1, u2]))
self.assertNotEqual(u4['friends'], u4['enemies'])
self.assertNotEqual(u4['enemies'], u5['enemies'])
self.assertEqual(set(dir(u1)), {'id', 'friends', 'enemies'})
def test_object_links_3(self):
User = private.create_object_factory(
id='property',
friend='link',
)
u1 = User(1, None)
u2 = User(2, u1)
u3 = User(3, edgedb.Set([]))
self.assertEqual(set(dir(u2['friend'])), {'source', 'target'})
self.assertIs(u2['friend'].target, u1)
self.assertIsNone(u1['friend'])
self.assertEqual(len(u3['friend']), 0)
self.assertEqual(
repr(u3['friend']),
"LinkSet(name='friend', source_id=3, target_ids={})")
self.assertEqual(
repr(u2['friend']),
"Link(name='friend', source_id=2, target_id=1)")
def test_object_links_4(self):
User = private.create_object_factory(
id='property',
friend='link',
)
u = User(1, None)
with self.assertRaisesRegex(KeyError,
"link 'error_key' does not exist"):
u['error_key']
class TestSet(unittest.TestCase):
def test_set_1(self):
s = edgedb.Set(())
self.assertEqual(repr(s), 'Set{}')
s = edgedb.Set((1, 2, [], 'a'))
self.assertEqual(s[1], 2)
self.assertEqual(s[2], [])
self.assertEqual(len(s), 4)
with self.assertRaises(IndexError):
s[10]
with self.assertRaises(TypeError):
s[0] = 1
def test_set_2(self):
s = edgedb.Set((1, 2, 3000, 'a'))
self.assertEqual(repr(s), "Set{1, 2, 3000, 'a'}")
self.assertEqual(
hash(s),
hash(edgedb.Set((1, 2, sum([1000, 2000]), 'a'))))
self.assertNotEqual(
hash(s),
hash((1, 2, 3000, 'a')))
def test_set_3(self):
s = edgedb.Set(())
self.assertEqual(len(s), 0)
self.assertEqual(hash(s), hash(edgedb.Set(())))
self.assertNotEqual(hash(s), hash(()))
def test_set_4(self):
s = edgedb.Set(([],))
s[0].append(s)
self.assertEqual(repr(s), "Set{[Set{...}]}")
def test_set_5(self):
self.assertEqual(
edgedb.Set([1, 2, 3]),
edgedb.Set([3, 2, 1]))
self.assertEqual(
edgedb.Set([]),
edgedb.Set([]))
self.assertEqual(
edgedb.Set([1]),
edgedb.Set([1]))
self.assertNotEqual(
edgedb.Set([1]),
edgedb.Set([]))
self.assertNotEqual(
edgedb.Set([1]),
edgedb.Set([2]))
self.assertNotEqual(
edgedb.Set([1, 2]),
edgedb.Set([2, 2]))
self.assertNotEqual(
edgedb.Set([1, 1, 2]),
edgedb.Set([2, 2, 1]))
def test_set_6(self):
f = private.create_object_factory(
id={'property', 'implicit'},
lb='link-property',
c='property'
)
o1 = f(1, 'aa', edgedb.Set([1, 2, 3]))
o2 = f(1, 'ab', edgedb.Set([1, 2, 4]))
o3 = f(3, 'ac', edgedb.Set([5, 5, 5, 5]))
self.assertEqual(
edgedb.Set([o1, o2, o3]),
edgedb.Set([o2, o3, o1]))
self.assertEqual(
edgedb.Set([o1, o3]),
edgedb.Set([o2, o3]))
self.assertNotEqual(
edgedb.Set([o1, o1]),
edgedb.Set([o2, o3]))
def test_set_7(self):
self.assertEqual(
edgedb.Set([1, 2, 3]),
[1, 2, 3])
self.assertNotEqual(
edgedb.Set([1, 2, 3]),
[3, 2, 1])
self.assertNotEqual(
edgedb.Set([1, 2, 3]),
1)
def test_set_8(self):
s = edgedb.Set([1, 2, 3])
si = iter(s)
self.assertEqual(list(si), [1, 2, 3])
class TestArray(unittest.TestCase):
def test_array_empty_1(self):
t = edgedb.Array()
self.assertEqual(len(t), 0)
self.assertNotEqual(hash(t), hash(()))
with self.assertRaisesRegex(IndexError, 'out of range'):
t[0]
self.assertEqual(repr(t), "[]")
def test_array_2(self):
t = edgedb.Array((1, 'a'))
self.assertEqual(repr(t), "[1, 'a']")
self.assertEqual(str(t), "[1, 'a']")
self.assertEqual(len(t), 2)
self.assertEqual(hash(t), hash(edgedb.Array([1, 'a'])))
self.assertNotEqual(hash(t), hash(edgedb.Array([10, 'ab'])))
self.assertEqual(t[0], 1)
self.assertEqual(t[1], 'a')
with self.assertRaisesRegex(IndexError, 'out of range'):
t[2]
def test_array_3(self):
t = edgedb.Array((1, []))
t[1].append(t)
self.assertEqual(t[1], [t])
self.assertEqual(repr(t), '[1, [[...]]]')
def test_array_4(self):
self.assertEqual(
edgedb.Array([1, 2, 3]),
edgedb.Array([1, 2, 3]))
self.assertNotEqual(
edgedb.Array([1, 2, 3]),
edgedb.Array([1, 3, 2]))
self.assertLess(
edgedb.Array([1, 2, 3]),
edgedb.Array([1, 3, 2]))
self.assertEqual(
edgedb.Array([]),
edgedb.Array([]))
self.assertEqual(
edgedb.Array([1]),
edgedb.Array([1]))
self.assertGreaterEqual(
edgedb.Array([1]),
edgedb.Array([1]))
self.assertNotEqual(
edgedb.Array([1]),
edgedb.Array([]))
self.assertGreater(
edgedb.Array([1]),
edgedb.Array([]))
self.assertNotEqual(
edgedb.Array([1]),
edgedb.Array([2]))
self.assertLess(
edgedb.Array([1]),
edgedb.Array([2]))
self.assertNotEqual(
edgedb.Array([1, 2]),
edgedb.Array([2, 2]))
self.assertNotEqual(
edgedb.Array([1, 1]),
edgedb.Array([2, 2, 1]))
def test_array_5(self):
self.assertEqual(
edgedb.Array([1, 2, 3]),
[1, 2, 3])
self.assertEqual(
[1, 2, 3],
edgedb.Array([1, 2, 3]))
self.assertNotEqual(
[1, 2, 4],
edgedb.Array([1, 2, 3]))
self.assertNotEqual(
edgedb.Array([1, 2, 3]),
[1, 3, 2])
self.assertLess(
edgedb.Array([1, 2, 3]),
[1, 3, 2])
self.assertEqual(
edgedb.Array([]),
[])
self.assertEqual(
edgedb.Array([1]),
[1])
self.assertGreaterEqual(
edgedb.Array([1]),
[1])
self.assertNotEqual(
edgedb.Array([1]),
[])
self.assertGreater(
edgedb.Array([1]),
[])
self.assertNotEqual(
edgedb.Array([1]),
[2])
self.assertLess(
edgedb.Array([1]),
[2])
self.assertNotEqual(
edgedb.Array([1, 2]),
[2, 2])
self.assertNotEqual(
edgedb.Array([1, 1]),
[2, 2, 1])
def test_array_6(self):
self.assertNotEqual(
edgedb.Array([1, 2, 3]),
False)
|
StarcoderdataPython
|
119206
|
<filename>DigiroadPreDataAnalysis/digiroad/util/__init__.py
import configparser
import csv
import datetime
import json
import logging
import logging.config
import numpy
import os
import shutil
import time
import zipfile
from digiroad import carRoutingExceptions as exc
from digiroad.entities import Point
from pandas.io.json import json_normalize
def enum(**enums):
return type('Enum', (), enums)
carRountingDictionary = {
"pituus": "distance",
"digiroa_aa": "speed_limit_time",
"kokopva_aa": "day_average_delay_time",
"keskpva_aa": "midday_delay_time",
"ruuhka_aa": "rush_hour_delay_time"
}
CostAttributes = enum(DISTANCE='pituus',
SPEED_LIMIT_TIME='digiroa_aa',
DAY_AVG_DELAY_TIME='kokopva_aa',
MIDDAY_DELAY_TIME='keskpva_aa',
RUSH_HOUR_DELAY='ruuhka_aa',
BICYCLE_FAST_TIME='fast_time',
BICYCLE_SLOW_TIME='slow_time')
TransportModes = enum(PRIVATE_CAR='PRIVATE_CAR', BICYCLE='BICYCLE')
GeometryType = enum(POINT="Point", MULTI_POINT='MultiPoint', LINE_STRING='LineString')
PostfixAttribute = enum(EUCLIDEAN_DISTANCE="EuclideanDistance", AVG_WALKING_DISTANCE="AVGWalkingDistance",
WALKING_TIME="WalkingTime", PARKING_TIME="ParkingTime")
GPD_CRS = enum(WGS_84={'init': 'EPSG:4326'}, PSEUDO_MERCATOR={'init': 'EPSG:3857'})
def getEnglishMeaning(cost_attribute=None):
if cost_attribute in carRountingDictionary:
return carRountingDictionary[cost_attribute]
else:
return cost_attribute
def getFormattedDatetime(timemilis=time.time(), format='%Y-%m-%d %H:%M:%S'):
formattedDatetime = datetime.datetime.fromtimestamp(timemilis).strftime(format)
return formattedDatetime
def timeDifference(startTime, endTime):
totalTime = (endTime - startTime) / 60 # min
return totalTime
def getConfigurationProperties(section="WFS_CONFIG"):
config = configparser.ConfigParser()
configurationPath = os.getcwd() + "%resources%configuration.properties".replace("%", os.sep)
config.read(configurationPath)
return config[section]
def extractCRS(geojson):
epsgCode = geojson["crs"]["properties"]["name"].split(":")[-3] + ":" + \
geojson["crs"]["properties"]["name"].split(":")[-1]
return epsgCode
def createPointFromPointFeature(newFeaturePoint, epsgCode):
if newFeaturePoint["geometry"]["type"] == GeometryType.MULTI_POINT:
startNearestVertexCoordinates = newFeaturePoint["geometry"]["coordinates"][0]
elif newFeaturePoint["geometry"]["type"] == GeometryType.POINT:
startNearestVertexCoordinates = newFeaturePoint["geometry"]["coordinates"]
nearestStartPoint = Point(latitute=startNearestVertexCoordinates[1],
longitude=startNearestVertexCoordinates[0],
epsgCode=epsgCode)
return nearestStartPoint
def dgl_timer(func):
def func_wrapper(*args, **kwargs):
timerEnabled = "True".__eq__(getConfigurationProperties(section="WFS_CONFIG")["timerEnabled"])
if timerEnabled:
functionName = func.__name__
startTime = time.time()
Logger.getInstance().info("%s Start Time: %s" % (functionName, getFormattedDatetime(timemilis=startTime)))
###############################
returns = func(*args, **kwargs)
###############################
endTime = time.time()
Logger.getInstance().info("%s End Time: %s" % (functionName, getFormattedDatetime(timemilis=endTime)))
totalTime = timeDifference(startTime, endTime)
Logger.getInstance().info("%s Total Time: %s m" % (functionName, totalTime))
return returns
else:
return func(*args, **kwargs)
return func_wrapper
def dgl_timer_enabled(func):
def func_wrapper(*args, **kwargs):
functionName = func.__name__
startTime = time.time()
Logger.getInstance().info("%s Start Time: %s" % (functionName, getFormattedDatetime(timemilis=startTime)))
###############################
returns = func(*args, **kwargs)
###############################
endTime = time.time()
Logger.getInstance().info("%s End Time: %s" % (functionName, getFormattedDatetime(timemilis=endTime)))
totalTime = timeDifference(startTime, endTime)
Logger.getInstance().info("%s Total Time: %s m" % (functionName, totalTime))
return returns
return func_wrapper
class AbstractLinkedList(object):
def __init__(self):
self._next = None
def hasNext(self):
return self._next is not None
def next(self):
self._next
def setNext(self, next):
self._next = next
class Node:
def __init__(self, item):
"""
A node contains an item and a possible next node.
:param item: The referenced item.
"""
self._item = item
self._next = None
def getItem(self):
return self._item
def setItem(self, item):
self._item = item
def getNext(self):
return self._next
def setNext(self, next):
self._next = next
class LinkedList(AbstractLinkedList):
def __init__(self):
"""
Linked List implementation.
The _head is the first node in the linked list.
_next refers to the possible next node into the linked list.
And the _tail is the last node added into the linked list.
"""
self._head = None
self._next = None
self._tail = None
def hasNext(self):
"""
Veryfy if there is a possible next node in the queue of the linked list.
:return: True if there is a next node.
"""
if self._next:
return True
return False
def next(self):
"""
:return: The next available item in the queue of the linked list.
"""
item = self._next.getItem()
self._next = self._next.getNext()
return item
def add(self, newItem):
"""
Add new items into the linked list. The _tail is moving forward and create a new node ecah time that a new item
is added.
:param newItem: Item to be added.
"""
if self._head is None:
self._head = Node(newItem)
self._next = self._head
self._tail = self._head
else:
node = Node(newItem)
self._tail.setNext(node)
self._tail = node
def restart(self):
"""
Move the linked list to its initial node.
"""
self._next = self._head
self._tail = self._head
class FileActions:
def readJson(self, url):
"""
Read a json file
:param url: URL for the Json file
:return: json dictionary data
"""
with open(url) as f:
data = json.load(f)
return data
def readMultiPointJson(self, url):
"""
Read a MultiPoint geometry geojson file, in case the file do not be a MultiPoint
geometry, then an NotMultiPointGeometryException is thrown.
:param url: URL for the Json file
:return: json dictionary data
"""
data = None
with open(url) as f:
data = json.load(f)
self.checkGeometry(data, GeometryType.MULTI_POINT)
return data
def readPointJson(self, url):
"""
Read a MultiPoint geometry geojson file, in case the file do not be a MultiPoint
geometry, then an NotMultiPointGeometryException is thrown.
:param url: URL for the Json file
:return: json dictionary data
"""
data = None
with open(url) as f:
data = json.load(f)
self.checkGeometry(data, GeometryType.POINT)
return data
def checkGeometry(self, data, geometryType=GeometryType.MULTI_POINT):
"""
Check the content of the Json to verify if it is a specific geoemtry type. By default is MultiPoint.
In case the geojson do not be the given geometry type then an
:param data: json dictionary
:param geometryType: Geometry type (i.e. MultiPoint, LineString)
:return: None
"""
for feature in data["features"]:
if feature["geometry"]["type"] != geometryType:
raise exc.IncorrectGeometryTypeException("Expected %s" % geometryType)
def convertToGeojson(self, dataframe):
jsonResult = dataframe.to_json()
newJson = json.loads(jsonResult)
newJson["crs"] = {
"properties": {
"name": "urn:ogc:def:crs:%s" % (GPD_CRS.PSEUDO_MERCATOR["init"].replace(":", "::"))
},
"type": "name"
}
return newJson
def writeFile(self, folderPath, filename, data):
if not os.path.exists(folderPath):
os.makedirs(folderPath)
fileURL = folderPath + "%s%s" % (os.sep, filename)
with open(fileURL, 'w+') as outfile:
json.dump(data, outfile, sort_keys=True)
return fileURL
def createFile(self, folderPath, filename):
if not os.path.exists(folderPath):
os.makedirs(folderPath)
with open(folderPath + os.sep + filename, 'w+') as outfile:
outfile.close()
def deleteFile(self, folderPath, filename):
Logger.getInstance().info("Deleting FILE %s" % os.path.join(folderPath, filename))
if os.path.exists(folderPath + os.sep + filename):
os.remove(folderPath + os.sep + filename)
Logger.getInstance().info("The FILE %s was deleted" % os.path.join(folderPath, filename))
def deleteFolder(self, path):
Logger.getInstance().info("Deleting FOLDER %s" % path)
if os.path.exists(path):
shutil.rmtree(path)
Logger.getInstance().info("The FOLDER %s was deleted" % path)
@dgl_timer
def compressOutputFile(self, folderPath, zip_filename, filepath):
zipf = zipfile.ZipFile(folderPath + os.sep + zip_filename, "a", zipfile.ZIP_DEFLATED, allowZip64=True)
zipf.write(filepath, os.path.basename(filepath))
@dgl_timer
def transformGeojsonInDataFrame(self, geojson):
if "features" in geojson:
df = json_normalize(geojson["features"])
columns = numpy.asarray([column.replace("properties.", "") for column in df.columns.values])
df.columns = columns
return df
return None
def writeInCSV(self, folderPath, filename, valueList):
if not os.path.exists(folderPath):
os.makedirs(folderPath)
file = folderPath + os.sep + filename
if not os.path.isfile(file):
fieldList = []
attributes = getConfigurationProperties(section="ATTRIBUTES_MAPPING")
for attribute_key in attributes:
attribute_splitted = attributes[attribute_key].split(",")
key = attribute_splitted[0]
value = attribute_splitted[1]
fieldList.append(value)
with open(file, 'w', newline='') as outputFile:
writer = csv.writer(outputFile, delimiter=';')
writer.writerow(fieldList)
writer.writerow(valueList)
else:
with open(file, 'a', newline='') as outputFile:
writer = csv.writer(outputFile, delimiter=';')
writer.writerow(valueList)
def parallel_job_print(msg, msg_args):
""" Display the message on stout or stderr depending on verbosity
"""
# XXX: Not using the logger framework: need to
# learn to use logger better.
# if not self.verbose:
# return
# if self.verbose < 50:
# writer = sys.stderr.write
# else:
# writer = sys.stdout.write
msg = msg % msg_args
self = "Parallel(n_jobs=%s)" % getConfigurationProperties(section="PARALLELIZATION")["jobs"]
# writer('[%s]: %s\n' % (self, msg))
Logger.getInstance().info('[%s]: %s' % (self, msg))
class Logger:
__instance = None
__handler = None
def __init__(self):
raise Exception("Instances must be constructed with Logger.getInstance()")
@staticmethod
def configureLogger(outputFolder, prefix):
# Logger.__instance = None
log_filename = prefix + "_log - %s.log" % getFormattedDatetime(timemilis=time.time(),
format='%Y-%m-%d %H_%M_%S')
logs_folder = outputFolder + os.sep + "logs"
FileActions().createFile(logs_folder, log_filename)
if Logger.__handler is not None:
Logger.getInstance().removeHandler(Logger.__handler)
fileHandler = logging.FileHandler(logs_folder + os.sep + log_filename, 'w')
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
fileHandler.setFormatter(formatter)
Logger.__handler = fileHandler
Logger.getInstance().addHandler(fileHandler)
@staticmethod
def getInstance():
if not Logger.__instance:
# configurationPath = os.getcwd() + "%resources%logging.properties".replace("%", os.sep)
# logging.config.fileConfig(configurationPath)
# create logger
Logger.__instance = logging.getLogger("CARDAT")
# "application" code
# Logger.instance.debug("debug message")
# Logger.instance.info("info message")
# Logger.instance.warn("warn message")
# Logger.instance.error("error message")
# Logger.instance.critical("critical message")
return Logger.__instance
class GeneralLogger:
def __init__(self, loggerName, outputFolder, prefix=""):
self.logger = self._createLogger(loggerName=loggerName)
self.handler = self._createLogFileHandler(outputFolder=outputFolder, prefix=prefix)
self.logger.addHandler(self.handler)
def _createLogger(self, loggerName):
configurationPath = os.getcwd() + "%resources%logging.properties".replace("%", os.sep)
logging.config.fileConfig(configurationPath)
# create logger
logger = logging.getLogger(loggerName)
return logger
def _createLogFileHandler(self, outputFolder, prefix):
log_filename = prefix + "_log - %s.log" % getFormattedDatetime(
timemilis=time.time(),
format='%Y-%m-%d %H_%M_%S'
)
logs_folder = outputFolder + os.sep + "logs"
FileActions().createFile(logs_folder, log_filename)
fileHandler = logging.FileHandler(logs_folder + os.sep + log_filename, 'w')
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
fileHandler.setFormatter(formatter)
return fileHandler
def getLogger(self):
return self.logger
|
StarcoderdataPython
|
3370212
|
from itertools import chain
from django import forms
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.forms import widgets
from django.forms.models import ModelChoiceIterator
from django.template import Template, Context
from django.utils.encoding import force_unicode
from django.utils.html import escape, conditional_escape
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.debug import sensitive_variables
from evap.evaluation.models import UserProfile
class QuestionnaireChoiceIterator(ModelChoiceIterator):
def choice(self, obj):
return (self.field.prepare_value(obj), self.field.label_from_instance(obj), obj.description)
class QuestionnaireSelectMultiple(forms.CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
has_id = attrs and 'id' in attrs
final_attrs = self.build_attrs(attrs, name=name)
output = [u'<ul class="inputs-list">']
# Normalize to strings
str_values = set([force_unicode(v) for v in value])
for i, (option_value, option_label, option_text) in enumerate(chain(self.choices, choices)):
# If an ID attribute was given, add a numeric index as a suffix,
# so that the checkboxes don't all have the same ID attribute.
if has_id:
final_attrs = dict(final_attrs, id='%s_%s' % (attrs['id'], i))
label_for = u' for="%s"' % final_attrs['id']
else:
label_for = ''
cb = widgets.CheckboxInput(final_attrs, check_test=lambda value: value in str_values)
option_value = force_unicode(option_value)
rendered_cb = cb.render(name, option_value)
option_label = conditional_escape(force_unicode(option_label))
output.append(u'<li class="twipsify" title="%s"><div class="checkbox"><label%s>%s %s</label></div></li>' % (escape(option_text), label_for, rendered_cb.replace('class="form-control"',''), option_label))
output.append(u'</ul>')
return mark_safe(u'\n'.join(output))
class QuestionnaireMultipleChoiceField(forms.ModelMultipleChoiceField):
widget = QuestionnaireSelectMultiple
def __init__(self, *args, **kwargs):
super(QuestionnaireMultipleChoiceField, self).__init__(*args, **kwargs)
self.help_text = ""
def _get_choices(self):
# If self._choices is set, then somebody must have manually set
# the property self.choices. In this case, just return self._choices.
if hasattr(self, '_choices'):
return self._choices
# Otherwise, execute the QuerySet in self.queryset to determine the
# choices dynamically. Return a fresh ModelChoiceIterator that has not been
# consumed. Note that we're instantiating a new ModelChoiceIterator *each*
# time _get_choices() is called (and, thus, each time self.choices is
# accessed) so that we can ensure the QuerySet has not been consumed. This
# construct might look complicated but it allows for lazy evaluation of
# the queryset.
return QuestionnaireChoiceIterator(self)
choices = property(_get_choices, forms.ChoiceField._set_choices)
class LoginUsernameForm(forms.Form):
"""Form encapsulating the login with username and password, for example from an Active Directory.
"""
username = forms.CharField(label=_(u"Username"), max_length=254)
password = forms.CharField(label=_(u"Password"), widget=forms.PasswordInput)
def __init__(self, request=None, *args, **kwargs):
"""
If request is passed in, the form will validate that cookies are
enabled. Note that the request (a HttpRequest object) must have set a
cookie with the key TEST_COOKIE_NAME and value TEST_COOKIE_VALUE before
running this validation.
"""
self.request = request
self.user_cache = None
super(LoginUsernameForm, self).__init__(*args, **kwargs)
@sensitive_variables('password')
def clean_password(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if username and password:
self.user_cache = authenticate(username=username, password=password)
if self.user_cache is None:
raise forms.ValidationError(_("Please enter a correct username and password."))
elif not self.user_cache.is_active:
raise forms.ValidationError(_("This account is inactive."))
self.check_for_test_cookie()
return password
def check_for_test_cookie(self):
if self.request and not self.request.session.test_cookie_worked():
raise forms.ValidationError(_("Your Web browser doesn't appear to have cookies enabled. Cookies are required for logging in."))
def get_user_id(self):
if self.user_cache:
return self.user_cache.id
return None
def get_user(self):
return self.user_cache
class LoginKeyForm(forms.Form):
"""Form encapsulating the login with a login key. It works together with the
evaluation.auth.RequestAuthUserBackend.
"""
INVALID_CODE_MESSAGE = _("Please enter a correct login key. Be aware that login keys are automatically invalidated after seven months.")
login_key = forms.IntegerField(label=_(u"Login key"), error_messages={'invalid': INVALID_CODE_MESSAGE}, widget=forms.TextInput)
def __init__(self, *args, **kwargs):
self.user_cache = None
super(LoginKeyForm, self).__init__(*args, **kwargs)
def clean_login_key(self):
login_key = self.cleaned_data.get('login_key')
if login_key:
self.user_cache = authenticate(key=login_key)
if self.user_cache is None:
raise forms.ValidationError(LoginKeyForm.INVALID_CODE_MESSAGE)
elif not self.user_cache.is_active:
raise forms.ValidationError(_("This account is inactive."))
return login_key
def get_user_id(self):
if self.user_cache:
return self.user_cache.id
return None
def get_user(self):
return self.user_cache
class NewKeyForm(forms.Form):
email = forms.EmailField(label=_(u"Email address"))
def __init__(self, *args, **kwargs):
self.user_cache = None
self.profile_cache = None
super(NewKeyForm, self).__init__(*args, **kwargs)
def clean_email(self):
email = self.cleaned_data.get('email')
if not UserProfile.email_needs_login_key(email):
raise forms.ValidationError(_(u"HPI users cannot request login keys. Please login using your domain credentials."))
try:
user = User.objects.get(email__iexact=email)
self.user_cache = user
self.profile_cache = UserProfile.get_for_user(user)
except User.DoesNotExist:
raise forms.ValidationError(_(u"No user with this email address was found. Please make sure to enter the email address already known to the university office."))
return email
def get_user(self):
return self.user_cache
def get_profile(self):
return self.profile_cache
class BootstrapFieldset(object):
""" Fieldset container. Renders to a <fieldset>. """
def __init__(self, legend, *fields):
self.legend_html = legend and ('<legend>%s</legend>' % legend) or ''
self.fields = fields
def as_html(self, form):
return u'<fieldset>%s%s</fieldset>' % (self.legend_html, form.render_fields(self.fields), )
class BootstrapMixin(object):
""""""
__TEMPLATE = """<div class="form-group{% if errors %} has-error{% endif %}">""" \
"""<label class="col-sm-2 control-label" for="{{ field.auto_id }}">{{ label }}</label>""" \
"""<div class="col-sm-6">""" \
"""{{ bf }}""" \
"""{% if errors %}<span class="help-block">{{ errors }}</span>{% endif %}""" \
"""{% if help_text %}<span class="help-block">{{ help_text }}</span>{% endif %}""" \
"""</div></div>"""
def as_div(self):
""" Render the form as a set of <div>s. """
top_errors = []
output = self.__render_fields(self.__layout, top_errors)
top_errors.extend(self.non_field_errors())
if top_errors:
errors = u"""<ul class="errorlist"><li>%s</li></ul>""" % u"</li><li>".join(top_errors)
else:
errors = u""
return mark_safe(errors + output)
@property
def __layout(self):
try:
return self.__layout_store
except AttributeError:
self.__layout_store = self.fields.keys()
return self.__layout_store
@property
def __custom_fields(self):
try:
return self.__custom_fields_store
except AttributeError:
self.__custom_fields_store = {}
return self.__custom_fields_store
def __render_fields(self, fields, top_errors, separator=u""):
""" Render a list of fields and join the fields by the value in separator. """
output = []
for field in fields:
if isinstance(field, BootstrapFieldset):
output.append(field.as_html(self))
else:
output.append(self.__render_field(field, top_errors))
return separator.join(output)
def __render_field(self, field, top_errors):
""" Render a named field to HTML. """
try:
field_instance = self.fields[field]
except KeyError:
raise Exception("Could not resolve form field '%s'." % field)
bf = forms.forms.BoundField(self, field_instance, field)
output = ''
if bf.errors:
# If the field contains errors, render the errors to a <ul>
# using the error_list helper function.
# bf_errors = error_list([escape(error) for error in bf.errors])
bf_errors = ', '.join([e for e in bf.errors])
else:
bf_errors = ''
if bf.is_hidden:
# If the field is hidden, add it at the top of the form
# self.prefix.append(unicode(bf))
# If the hidden field has errors, append them to the top_errors
# list which will be printed out at the top of form
if bf_errors:
top_errors.extend(bf.errors)
else:
# Find field + widget type css classes
css_class = type(field_instance).__name__ + " " + type(field_instance.widget).__name__
# Add an extra class, Required, if applicable
if field_instance.required:
css_class += " required"
if field_instance.help_text:
# The field has a help_text, construct <span> tag
help_text = escape(unicode(field_instance.help_text))
else:
help_text = u''
attrs = {}
if isinstance(field_instance.widget, (widgets.DateInput, widgets.Textarea, widgets.TextInput, widgets.SelectMultiple)):
attrs['class'] = 'form-control'
if isinstance(field_instance.widget, widgets.DateInput) and not field_instance.widget.attrs.get("readonly", False):
attrs['data-datepicker'] = "datepicker"
field_hash = {
'class' : mark_safe(css_class),
'label' : mark_safe(bf.label or ''),
'help_text' :mark_safe(unicode(help_text)),
'field' : field_instance,
'bf' : mark_safe(unicode(bf.as_widget(attrs=attrs))),
'bf_raw' : bf,
'errors' : mark_safe(bf_errors),
'field_type' : mark_safe(field.__class__.__name__),
}
output = Template(self.__TEMPLATE).render(Context(field_hash))
return mark_safe(output)
|
StarcoderdataPython
|
3268780
|
# Module: objects
# Date: 20th December 2014
# Author: <NAME>, prologic at shortcircuit dot net dot au
"""Objects Module
Implements core objects used to store data
"""
from os import geteuid
from pwd import getpwuid
from textwrap import fill
from .utils import normalize
from . import __name__, __version__
VERSION = "{} {}".format(__name__, __version__)
STATUS_CODES = {
200: "OK",
301: "Moved Permanently",
303: "See Other",
400: "Bad Request",
401: "Unauthorized",
403: "Forbidden",
404: "Not Found",
500: "Internal Server Error",
}
class Request(object):
def __init__(self, sock, server, selector, query):
self.sock = sock
self.server = server
self.selector = normalize(selector)
self.query = query
try:
self.remote_addr = self.sock.getpeername()
except:
self.remote_addr = "169.254.0.1", 0
self.environ = {
"USER": getpwuid(geteuid()).pw_name,
"PEER": self.remote_addr[0],
"SELECTOR": self.selector,
"QUERY": self.query,
"SCRIPT_NAME": self.selector,
"SERVER_HOST": self.server.host,
"SERVER_PORT": str(self.server.port),
"SERVER_VERSION": VERSION,
"ENCODING": self.server.encoding,
"WIDTH": str(self.server.width),
"DOCUMENT_ROOT": str(self.server.rootdir),
}
def __repr__(self):
return "<Request(host={}, port={}, selector={}, query={})>".format(
self.remote_addr[0], self.remote_addr[1], self.selector, self.query
)
class Response(object):
def __init__(self, req):
self.req = req
self._size = 0
self._body = []
self._status = 200
self._stream = False
def __len__(self):
return self._size or sum(map(len, self._body))
def __repr__(self):
return "<Response(bytes={}, status={}, stream={})>".format(
len(self), self.status, self.stream
)
def __unicode__(self):
return u"{}\r\n.".format(u"\r\n".join(self._body))
def __str__(self):
return unicode(self).encode(self.req.server.encoding)
@property
def error(self):
return self._error
@error.setter
def error(self, (status, error)):
self._status = status
self._body = []
self.add_error("{} {}: {}".format(status, STATUS_CODES[status], error))
@property
def status(self):
return self._status
@status.setter
def status(self, status):
self._status = status
@property
def size(self):
return self._size
@size.setter
def size(self, size):
self._size = size
@property
def stream(self):
return self._stream
@stream.setter
def stream(self, stream):
self._stream = stream
def add_text(self, text, width=67):
"""
Adds a line of text by formatting it as a gopher selector and
word-wrapping the text to the ``width`` variable.
"""
string = fill(text, width)
for line in string.split("\n"):
self._body.append(u"i{}\t\tnull.host\t0".format(line))
def add_para(self, text, width=67):
"""
Adds a paragraph(s) of text by word-wrapping each paragraph, while
preserving any newline characters. Word-wraps to the ``width`` variable.
"""
for para in text.split("\n"):
self.add_text(para, width)
def add_error(self, text, width=67):
"""
Adds a string as an itemtype-3 gopher selector (error), while
wrapping the text to the ``width`` variable.
"""
string = fill(text, width)
for line in string.split("\n"):
self._body.append(u"3{}\t\terror.host\t0".format(line))
def add_link(self, type, text, path, host=None, port=None):
"""
Adds a gopher selector link using the arguments provided.
"""
host = host or self.req.server.host
port = port or self.req.server.port
self._body.append(u"{}{}\t{}\t{}\t{}".format(type, text, path, host, port))
def add_telnet(self, text, host, port=23):
"""
Adds a telnet link, using the arguments provided.
"""
self._body.append(u"8{}\t\t{}\t{}" % (text, host, port))
def add_url(self, text, url):
"""
Adds an external link to any url, not just gopher.
"""
self._body.append(u"h{}\tURL:{}\tnull.host\t0".format(text, url))
def add_title(self, text):
"""
Adds a title.
"""
self._body.append(u"i{}\tTITLE\tnull.host\t0".format(text))
def add_line(self):
"""
Adds a blank line.
"""
self._body.append(u"i\t\tnull.host\t0")
|
StarcoderdataPython
|
3378405
|
<filename>setup.py
#!/usr/bin/python
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pyaig',
version='1.0.13',
license='MIT',
author=u'<NAME>',
author_email='<EMAIL>',
url='http://github.com/sterin/pyaig',
description='A simple Python AIG package',
long_description=read('README.md'),
long_description_content_type='text/markdown',
platforms='any',
data_files=[
('.',['requirements.txt'])
],
install_requires=read('requirements.txt'),
packages=['pyaig'],
)
|
StarcoderdataPython
|
167969
|
from django.conf import settings
from django.db.models.loading import get_model
def get_profile_model():
"""
Returns configured user profile model or None if not found
"""
user_profile_module = getattr(settings, 'USER_PROFILE_MODULE', None)
if user_profile_module:
app_label, model_name = user_profile_module.split('.')
return get_model(app_label, model_name)
else:
return None
|
StarcoderdataPython
|
4812508
|
<filename>flute/protocol.py
import asyncio
from logging import Logger, DEBUG
import datetime
from httptools import HttpRequestParser, parse_url
from werkzeug.routing import NotFound, RequestRedirect, MethodNotAllowed
from statics import HTTP_STATUS_CODES
__all__ = ['FluteHttpProtocol']
class Response(object):
__slots__ = ['headers', 'body', 'cookies']
def __init__(self, headers, body, **options):
self.headers = headers
self.body = body
def add_cookie(self, key, value):
self.cookies[key] = value
def get_text(self):
header = "\r\n".join("{0}: {1}".format(key, value) for key, value in self.headers) + "\r\n\r\n"
text = header + body
return text.encode()
class FluteHttpProtocol(asyncio.Protocol):
headers = dict()
resp_headers = {}
def __init__(self, app):
self.app = app
def connection_made(self, transport):
self.transport = transport
def data_received(self, data):
hrp = HttpRequestParser(self)
hrp.feed_data(data)
self.http_version = hrp.get_http_version()
self.method = hrp.get_method()
before_requests = asyncio.async(self.call_before_requests())
async def call_before_requests(self):
await self.call_handler()
async def call_handler(self):
# adapter = self.app.create_url_adapter()
try:
match = self.app.adapter.match(self.url_requested.path.decode('utf-8'))
# print(match)
func = self.app.view_functions.get(match[0])
# print(func)
resp = await func(self, **match[1])
# if not route:
# self.status_code = 404
# self.create_response_header()
# resp = await self.app.get_error_response(404, self)
# self.transport.write(self.header + resp)
# self.transport.close()
# else:
self.status_code = 200
self.create_response_header()
self.transport.write(self.header + resp)
self.transport.close()
# print('[{datetime}] : {status_code} {method} {url}'.format(datetime=datetime.datetime.now(),
# status_code=self.status_code,
# method=self.method.decode('utf-8'),
# url=self.url_requested.path.decode('utf-8')))
except NotFound:
pass
except MethodNotAllowed:
pass
except RequestRedirect as e:
print(e)
finally:
await self.call_after_requests()
async def call_after_requests(self):
pass
def create_response_header(self):
self.header = "HTTP/%s %d %s\r\n" % (self.http_version, self.status_code, HTTP_STATUS_CODES[self.status_code])
self.header += "Server: Flute Server 0.1.0\r\n"
# self.header += "Content-Length: 41\r\n"
self.header += "Connection: close\r\n"
self.header += "Content-Type: text/html\r\n"
self.header += "\r\n"
self.header = self.header.encode()
# def on_message_begin(self):
# pass
def on_header(self, name, value):
self.headers[name] = value
# def on_headers_complete(self):
# pass
def on_body(self, body):
self.req_body = body
# def on_message_complete(self):
# pass
def on_url(self, url):
self.url_requested = parse_url(url)
# def on_chunk_header(self):
# pass
# def on_chunk_complete(self):
# pass
def test():
loop = asyncio.get_event_loop()
coro = loop.create_server(lambda: FluteHttpProtocol(), '127.0.0.1', 5000)
loop.run_until_complete(coro)
loop.run_forever()
loop.close()
if __name__ == "__main__":
test()
|
StarcoderdataPython
|
147441
|
import abc
from lbann import optimizers_pb2
import lbann.core.util
class Optimizer(abc.ABC):
"""Optimization algorithm for a neural network's parameters."""
def export_proto(self):
"""Construct and return a protobuf message."""
return optimizers_pb2.Optimizer()
# Generate Optimizer sub-classes from lbann.proto
# Note: The list of skip fields must be updated if any new fields are
# added to the Optimizer message in lbann.proto
if optimizers_pb2:
classes = lbann.core.util.generate_classes_from_protobuf_message(
optimizers_pb2.Optimizer,
base_class = Optimizer,
base_has_export_proto = True)
for c in classes:
globals()[c.__name__] = c
|
StarcoderdataPython
|
1719897
|
<reponame>jbyu/HorizonNet
#!/usr/bin/env python
from .naive import grid_sample as naive
from .faster import grid_sample as faster
__all__ = [
"faster",
"naive",
]
|
StarcoderdataPython
|
4806829
|
from pathlib import Path
import multiprocessing
import contextlib
from tqdm.autonotebook import tqdm
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from scipy.spatial.distance import hamming
from Bio import pairwise2, SeqIO
data_path = Path("../data/Protera")
def compute_diversity(seq1, seq2, method="hamming"):
if method == "alignment":
method = pairwise2.align.globalxx
diversity = method(seq1, seq2)
diversity = diversity[0].score / max(len(seq1), len(seq2))
elif method == "hamming":
if len(seq1) != len(seq2):
size = min(len(seq1), len(seq2))
seq1 = seq1[:size]
seq2 = seq2[:size]
method = hamming
# print(list(seq1), "\n" ,list(seq2))
diversity = method(list(seq1), list(seq2))
return diversity
def div_vs_all(sequence, other_sequences, reducer=np.nanmean):
v_diversity = np.vectorize(lambda x: compute_diversity(sequence, x))
if len(other_sequences) > 1:
div_vs_all = v_diversity(other_sequences)
else:
print(f"Skipping sequence {sequence}")
return np.nan
reduced_div_vs_all = reducer(div_vs_all) if len(div_vs_all) >= 1 else np.nan
return reduced_div_vs_all
def dataset_diversity(sequences, method="hamming", reduce="mean", verbose=True):
if reduce != "mean":
raise NotImplementedError
else:
# nan results are due to different lengths
reducer = np.nanmean
reduced_divs = []
if verbose:
pbar = tqdm(total=len(sequences), miniters=1, smoothing=1)
all_other_sequences = [
np.concatenate((sequences[:idx], sequences[idx + 1 :]))
for idx in range(len(sequences))
]
with contextlib.closing(multiprocessing.Pool(4)) as pool:
for sequence_idx, sequence in enumerate(sequences):
other_sequences = all_other_sequences[sequence_idx]
reduced_divs.append(
pool.apply_async(div_vs_all, args=(sequence, other_sequences))
)
for idx, result in enumerate(reduced_divs):
reduced_divs[idx] = result.get()
if verbose:
pbar.update(1)
pbar.refresh()
pool.join()
if verbose:
pbar.close()
return reduced_divs
def get_cluster_diversity():
rep_seqs = dict()
rep_seq_glob = list((data_path / "clustering").glob("cluster_*_rep_seq.fasta"))
print(rep_seq_glob)
for rep_seqs_pth in rep_seq_glob:
df_name = rep_seqs_pth.stem.split("_")[1]
df = pd.DataFrame()
sequences = [str(record.seq) for record in SeqIO.parse(rep_seqs_pth, "fasta")]
sequences = np.random.permutation(sequences)[:3000]
# alignment = dataset_diversity(sequences, "alignment")
hamming = dataset_diversity(sequences, "hamming")
df["sequences"] = sequences
# df["alignment"] = alignment
df["hamming"] = hamming
rep_seqs[df_name] = df
return rep_seqs
if __name__ == "__main__":
print("Running...")
rep_seqs = get_cluster_diversity()
for key, df in rep_seqs.items():
df.to_csv(f"{key}.csv", index=False)
|
StarcoderdataPython
|
3388783
|
import requests
import pytest
from neptune.neptune_api import NeptuneService
from neptune.tests.conftest import get_server_addr
@pytest.mark.fpgas(1)
def test_coco(request):
"""
Check the coco service from Neptune with a known image
Args:
request (fixture): get the cmdline options
"""
server_addr = get_server_addr(request.config)
# create Neptune service and start it
service = NeptuneService(server_addr, 'coco')
service.start()
# submit job to service
post_data = {
'url': 'http://farm1.staticflickr.com/26/50531313_4422f0787e_z.jpg',
'dtype': 'uint8'
}
r = requests.post('%s/serve/coco' % server_addr, post_data)
assert r.status_code == 200, r.text
response = r.json()
assert type(response) is dict
# for this known image, validate the expected response
for i, j in zip(response['resized_shape'], [149, 224, 3]):
assert i == j
assert 'img' in response
assert response['url'] == post_data['url']
assert len(response['boxes']) == 2
tolerance = 5
for i, j in zip(response['boxes'][0], [85, 18, 149, 118, "giraffe"]):
if isinstance(j, int):
assert j - tolerance <= i <= j + tolerance
else:
assert i == j
for i, j in zip(response['boxes'][1], [21, 90, 65, 148, "zebra"]):
if isinstance(j, int):
assert j - tolerance <= i <= j + tolerance
else:
assert i == j
service.stop()
|
StarcoderdataPython
|
1657878
|
import heapq
global list_of_popularity
global list_of_filesID
list_of_popularity = [0] * 100
list_of_cost = [0] * 100
list_of_filesID = []
list_of_size = []
list_of_k_value = []
k_parameter = 0.002
counter = 0
total_cost=0
demand_counter=0
k_value=7
previous_total_cost = 1
positive_negative = True
cache_not_used_enough = 0
g_k_value =7
verbose = False # Set it to True to print which files are in the cache.
verbose_time = False
def cache_decision_sample(my_cache, file, file_size):
if file not in my_cache.stored_files:
# If the cache capacity is full,
# remove the last accessed files from the cache until there is enough space for the new file.
if my_cache.cache_size + file_size < my_cache.cache_capacity:
my_cache.store_in_cache(file)
else:
while my_cache.cache_size + file_size > my_cache.cache_capacity:
my_cache.remove_from_cache(min(my_cache.timestamp, key=my_cache.timestamp.get))
my_cache.store_in_cache(file)
# You will fill inside this function for part 1. You can only use the information given in the arguments.
def cache_decision_part1(my_cache, file, file_popularities, file_sizes):
pass
def guess_k_value():
global previous_total_cost
global g_k_value
previous_total_cost=previous_total_cost
cost_calc=(total_cost/demand_counter)/previous_total_cost
if(cost_calc>1):
g_k_value = k_value + 5
if(g_k_value>100):
g_k_value=100
else:
g_k_value=k_value
previous_total_cost=total_cost/demand_counter
return g_k_value
# You will fill inside this function for part 2. You can only use the information given in the arguments.
def cache_decision_part2(my_cache, file, file_size):
global list_of_popularity
global total_cost
global demand_counter
global k_value
global cache_not_used_enough
demand_counter=demand_counter+1
list_current_files_popularity = []
list_of_popularity[file] = list_of_popularity[file] + 1
if (len(my_cache.stored_files) == 0):
for i in range(100):
list_of_popularity[i] = 0
#try to guess k_value
if(demand_counter%200==0 and demand_counter!=0):
k_value=guess_k_value()
if file not in my_cache.stored_files:
total_cost = total_cost + file_size
list_current_files = my_cache.stored_files
if (len(list_current_files) != 0):
for i in range(len(list_current_files)):
list_current_files_popularity.append(list_of_popularity[list_current_files[i]])
if(max(list_of_popularity)>k_value):
#clear cache if pop < k_value
if(min(list_current_files_popularity)<k_value):
index_min = min(range(len(list_current_files_popularity)),
key=list_current_files_popularity.__getitem__)
my_cache.remove_from_cache(my_cache.stored_files[index_min])
list_current_files_popularity.pop(index_min)
if(my_cache.cache_size + file_size < my_cache.cache_capacity):
my_cache.store_in_cache(file)
idx=heapq.nlargest(20, list_of_popularity)
res = sorted(range(len(list_of_popularity)), key=lambda sub: list_of_popularity[sub])[-20:]
for i in range(len(list_current_files),0):
if(idx[i] not in list_current_files_popularity):
if(my_cache.cache_size+ file_size < my_cache.cache_capacity):
my_cache.store_in_cache(res[i])
else:
if my_cache.cache_size + file_size < my_cache.cache_capacity:
my_cache.store_in_cache(file)
else:
if my_cache.cache_size + file_size < my_cache.cache_capacity:
my_cache.store_in_cache(file)
|
StarcoderdataPython
|
61226
|
import pandas as pd
def lookup_dates(s):
"""
This is an extremely fast approach to datetime parsing.
For large data, the same dates are often repeated. Rather than
re-parse these, we store all unique dates, parse them, and
use a lookup to convert all dates.
"""
dates_dict = {date:pd.to_datetime(date,errors='coerce') for date in s.unique()}
return s.map(dates_dict)
def end_quarter(series):
return (series - pd.tseries.offsets.DateOffset(days=1) + pd.tseries.offsets.QuarterEnd())
|
StarcoderdataPython
|
9609
|
from abc import (
abstractmethod,
)
from typing import (
Any,
Callable,
cast,
FrozenSet,
Generic,
Type,
TypeVar,
)
from cancel_token import (
CancelToken,
)
from p2p.exceptions import (
PeerConnectionLost,
)
from p2p.kademlia import Node
from p2p.peer import (
BasePeer,
PeerSubscriber,
)
from p2p.peer_pool import (
BasePeerPool,
)
from p2p.protocol import (
Command,
PayloadType,
)
from p2p.service import (
BaseService,
)
from trinity.endpoint import (
TrinityEventBusEndpoint,
)
from .events import (
ConnectToNodeCommand,
DisconnectPeerEvent,
HasRemoteEvent,
PeerCountRequest,
PeerCountResponse,
)
TPeer = TypeVar('TPeer', bound=BasePeer)
TStreamEvent = TypeVar('TStreamEvent', bound=HasRemoteEvent)
class PeerPoolEventServer(BaseService, PeerSubscriber, Generic[TPeer]):
"""
Base class to create a bridge between the ``PeerPool`` and the event bus so that peer
messages become available to external processes (e.g. isolated plugins). In the opposite
direction, other processes can also retrieve information or execute actions on the peer pool by
sending specific events through the event bus that the ``PeerPoolEventServer`` answers.
This class bridges all common APIs but protocol specific communication can be enabled through
subclasses that add more handlers.
"""
msg_queue_maxsize: int = 2000
subscription_msg_types: FrozenSet[Type[Command]] = frozenset({})
def __init__(self,
event_bus: TrinityEventBusEndpoint,
peer_pool: BasePeerPool,
token: CancelToken = None) -> None:
super().__init__(token)
self.peer_pool = peer_pool
self.event_bus = event_bus
async def _run(self) -> None:
self.logger.debug("Running %s", self.__class__.__name__)
self.run_daemon_event(
DisconnectPeerEvent,
lambda peer, event: peer.disconnect_nowait(event.reason)
)
self.run_daemon_task(self.handle_peer_count_requests())
self.run_daemon_task(self.handle_connect_to_node_requests())
self.run_daemon_task(self.handle_native_peer_messages())
await self.cancellation()
def run_daemon_event(self,
event_type: Type[TStreamEvent],
event_handler_fn: Callable[[TPeer, TStreamEvent], Any]) -> None:
"""
Register a handler to be run every time that an event of type ``event_type`` appears.
"""
self.run_daemon_task(self.handle_stream(event_type, event_handler_fn))
@abstractmethod
async def handle_native_peer_message(self,
remote: Node,
cmd: Command,
msg: PayloadType) -> None:
"""
Process every native peer message. Subclasses should overwrite this to forward specific
peer messages on the event bus. The handler is called for every message that is defined in
``self.subscription_msg_types``.
"""
pass
def get_peer(self, remote: Node) -> TPeer:
"""
Look up and return a peer from the ``PeerPool`` that matches the given node.
Raise ``PeerConnectionLost`` if the peer is no longer in the pool or is winding down.
"""
try:
peer = self.peer_pool.connected_nodes[remote]
except KeyError:
self.logger.debug("Peer with remote %s does not exist in the pool anymore", remote)
raise PeerConnectionLost()
else:
if not peer.is_operational:
self.logger.debug("Peer %s is not operational when selecting from pool", peer)
raise PeerConnectionLost()
else:
return cast(TPeer, peer)
async def handle_connect_to_node_requests(self) -> None:
async for command in self.wait_iter(self.event_bus.stream(ConnectToNodeCommand)):
self.logger.debug('Received request to connect to %s', command.remote)
self.run_task(self.peer_pool.connect_to_node(command.remote))
async def handle_peer_count_requests(self) -> None:
async for req in self.wait_iter(self.event_bus.stream(PeerCountRequest)):
await self.event_bus.broadcast(
PeerCountResponse(len(self.peer_pool)),
req.broadcast_config()
)
async def handle_stream(self,
event_type: Type[TStreamEvent],
event_handler_fn: Callable[[TPeer, TStreamEvent], Any]) -> None:
async for event in self.wait_iter(self.event_bus.stream(event_type)):
try:
peer = self.get_peer(event.remote)
except PeerConnectionLost:
pass
else:
event_handler_fn(peer, event)
async def handle_native_peer_messages(self) -> None:
with self.subscribe(self.peer_pool):
while self.is_operational:
peer, cmd, msg = await self.wait(self.msg_queue.get())
await self.handle_native_peer_message(peer.remote, cmd, msg)
class DefaultPeerPoolEventServer(PeerPoolEventServer[BasePeer]):
async def handle_native_peer_message(self,
remote: Node,
cmd: Command,
msg: PayloadType) -> None:
pass
|
StarcoderdataPython
|
179510
|
<reponame>renauddahou/appointment_bot
from bs4 import BeautifulSoup
import re
import time
import telegram_message
def open_day(driver):
open_days_list = []
soup_level = BeautifulSoup(driver.page_source, 'html.parser')
month = soup_level.find_all('tbody') #, attrs = {'class':'fc-week fc-first'})
days = re.findall(r'<td class=\"fc-day fc-(.*?)>', str(month[0]))
#print(len(days))
for i in range(len(days)):
#print(days[i])
date = re.search(r'\"(\d.+?)\"', days[i])
try:
style = re.search(r'\((\d.+)\)', days[i])
if style.group()== '(255, 106, 106)':
print("Close Day")
print("Date: %s" %(date.group()))
elif style.group()== '(188, 237, 145)':
print("Open Day")
open_days_list.append(date.group())
print("Date: %s" %(date.group()))
#print("--------")
#print("Date: %s and Color-day: %s" %(date.group(), style.group()))
except:
pass
#print("Date: %s" %(date.group()))
return open_days_list
def select_day(driver, open_days, desired_date):
time.sleep(5)
status = False
for date in open_days:
if date == desired_date:
driver.find_element_by_xpath("//*[contains(text(), '14')]").click()
time.sleep(3)
soup_level = BeautifulSoup(driver.page_source, 'html.parser')
hours = soup_level.find_all('input', attrs = {'name':'selectedTimeBand'})
print(hours)
print("Date selected")
status = True
return status
def calendar_update(driver, location_name):
try:
open_days = open_day(driver)
telegram_message.message_sender(open_days, location_name)
time.sleep(30)
driver.find_element_by_xpath("//*[contains(text(), '›')]").click()
open_days = open_day(driver)
telegram_message.message_sender(open_days, location_name)
time.sleep(30)
driver.find_element_by_xpath("//*[contains(text(), '‹')]").click()
status = True
return status
except:
status = False
return status
|
StarcoderdataPython
|
1669913
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import multiprocessing
import sys
import threading
from edl.uitls import reader as edl_reader
from edl.utils import env as edl_env
from edl.utils import state as edl_state
from edl.utils import data_server
from edl.utils import data_server_pb2
from edl.utils import edl_process
from edl.utils import data_server_client
from edl.utils import etcd_db
from edl.utils.log_utils import logger
from edl.utils import unique_name
from edl.utils import exceptions
class DataGenerator(edl_process.ProcessWrapper):
"""
1. get file_list from data_server_leader
2. parse files of file_list and put BatchData to out_quque
if reach data end, put None to out_queue.
3. program will exit if meets any error
"""
def __init__(
self,
reader_leader_endpoint,
reader_name,
pod_id,
all_files_list,
splitter_cls,
out_queue,
):
super(DataGenerator, self).__init__()
self._batch_data_id = 0
self._leader_endpoint = reader_leader_endpoint
self._pod_id = pod_id
self._reader_name = reader_name
self._file_list = all_files_list
self._splitter_cls = splitter_cls
self._data_queue = out_queue
def _get_file_list(self, timeout=60):
client = data_server_client.DataServerClient()
return client.get_file_list(
leader_endpoint=self._leader_endpoint,
reader_name=self._reader_name,
pod_id=self._pod_id,
file_list=self._file_list,
)
def _generate_batch_data(self):
self._batch_data_id += 1
b = data_server_pb2.BatchData()
b.batch_data_id = self._batch_data_id
b.data = None
return b
def _read_batch_data(self):
b = self._generate_batch_data()
for m in self._get_file_list():
if self._stop.set():
break
assert self._file_list[m.idx] == m.path
for record in self._splitter_cls(m.path):
fields = record
assert fields[0] == m.idx
rec = data_server_pb2.Record()
rec.record_no = fields[0]
for field in fields[1:]:
rec.field_data.append(field)
if len(b.records) >= self._batch_size:
self._data_queue.put(b)
b = self._generate_batch_data()
if len(b.records) > 0:
self._data_queue.put(b)
self._data_queue.put(None)
def _worker_func(self):
try:
self._read_batch_data()
except Exception as e:
print(e, file=sys.stderr)
sys.exit(1)
class DataAccesser(object):
def __init__(
self,
reader_leader_endpoint,
reader_name,
trainer_env,
input_queue,
out_queue,
queue_size,
):
self._reader_leader_endpoint = reader_leader_endpoint
self._reader_name = reader_name
self._trainer_env = trainer_env
self._etcd = etcd_db.get_global_etcd(
self._trainer_env.etcd_endpoint, job_id=self._trainer_env.job_id
)
# BatchData
self._input_queue = input_queue
self._out_queue = out_queue
# batch_data_id => BatchData
self._cache = {}
# pb.BatchDataRequest queue
self._req_queue = threading.Queue(queue_size)
self._data_server = data_server.DataServer(self)
self._data_server.start()
edl_reader.save_to_etcd(
self._etcd,
reader_name=self._reader_name,
pod_id=self._trainer_env.pod_id,
data_server_endpoint=self._data_server.endpoint,
)
self._stop = threading.Event()
self._t_reporter = threading.Thread(target=self.report)
self._t_generater = threading.Thread(target=self.generate)
self._t_accesser = threading.Thread(target=self.access)
self._client = data_server_client.DataServerClient()
def start(self):
self._client.connect(self._reader_leader_endpoint)
self._t_reporter.start()
self._t_generater.start()
self._t_accesser.start()
def _report(self, report_size=10):
"""
1. Report BatchData index to Leader
2. Get the BatchData index need to be processed
if there is no data, set None to req_queue
"""
batch_data_ids = []
while not self._stop.set():
while len(batch_data_ids) < report_size:
b = self._input_queue.pop()
if b is None:
logger.info("data read to end!")
break
batch_data_ids.append(b.batch_data_id)
with self._lock:
self._cache[b.batch_data_id] = b
self._client.report_batch_data_meta(
reader_leader_endpoint=self._reader_leader_endpoint,
reader_name=self._name,
pod_id=self._trainer_env.pod_id,
dataserver_endpoint=self._data_server.endpoint,
batch_data_ids=batch_data_ids,
)
batch_data_ids = []
while not self._stop.set() and len(batch_data_ids) > 0:
self._client.report_batch_data_meta(
reader_leader_endpoint=self._reader_leader_endpoint,
reader_name=self._name,
pod_id=self._trainer_env.pod_id,
dataserver_endpoint=self._data_server.endpoint,
batch_data_ids=batch_data_ids,
)
self._client.reach_data_end(
reader_leader_endpoint=self._reader_leader_endpoint,
reader_name=self._name,
pod_id=self._trainer_env.pod_id,
)
def _access(self):
while not self._stop.set():
res = self._client.get_balanced_batch_data(
reader_leader_endpoint=self._reader_leader_endpoint,
reader_name=self._name,
pod_id=self._trainer_env.pod_id,
)
self._req_queue.put(res)
# data end
if res is None:
break
def _get_batch_data(self, req):
"""
Read BatchData from local or remote by BatchDataRequest
"""
if self._trainer_env.pod_id != req.producer_pod_id:
return (req, self._client.get_batch_data(req))
return (req, self.get_local_batch_data(req))
def get_local_batch_data(self, req):
ret = []
for batch_data_id in req.data.batch_data_ids:
with self._lock:
ret.append(self._cache.pop(batch_data_id))
return ret
def _generate(self):
while not self._stop.set():
req = self._req_queue.pop()
if req is None:
break
ret = self._get_batch_data(req)
for b in ret:
self._out_queue.put(b)
self._out_queue.put(None)
def report(self):
try:
self._report()
except Exception as e:
print(e, file=sys.stderr)
sys.exit(1)
def access(self):
try:
self._access()
except Exception as e:
print(e, file=sys.stderr)
sys.exit(1)
def generate(self):
try:
self._generate()
except Exception as e:
print(e, file=sys.stderr)
sys.exit(1)
def access_batch_data(
reader_leader,
reader_name,
trainer_env,
input_queue,
out_queue,
cache_capcity,
error_queue,
):
"""
Run DataAccesser in a seperated process
"""
try:
a = DataAccesser(
reader_leader,
reader_name,
trainer_env,
input_queue,
out_queue,
cache_capcity,
)
a.start()
except KeyboardInterrupt:
pass
except Exception:
import traceback
error_queue.put(traceback.format_exc())
sys.exit(1)
class Reader(object):
def __init__(self, file_list, file_splitter_cls, batch_size, cache_capcity=100):
self._file_list = file_list
assert isinstance(self._file_list, list), "file_list must be a list"
self._name = unique_name.generator("_dist_reader_")
self._cls = file_splitter_cls
self._batch_size = batch_size
assert self._batch_size > 0, "batch size must > 0"
self._cache_capcity = cache_capcity
# connections to data servers
self._trainer_env = edl_env.TrainerEnv()
self._state = edl_state.load_from_etcd(
etcd_endpoints=self._trainer_env.etcd_endpoints,
job_id=self._trainer_env.job_id,
state_name=self._name,
timeout=60,
)
self._etcd = etcd_db.get_global_etcd(
self._trainer_env.endpoints, self._trainer_env.job_id
)
# reader meta
self._reader_leader = edl_reader.load_from_ectd(
self._etcd, self._trainer_env.pod_leader_id, timeout=60
)
self._generater_out_queue = multiprocessing.Queue(self._cache_capcity)
self._accesser_out_queue = multiprocessing.Queue(self._cache_capcity)
self._generater = None
self._accesser = None
def stop(self):
if self._generater:
self._generater.stop()
self._generater = None
if self._accesser:
self._accesser.terminate()
self._accesser.join()
self._accesser = None
def __exit__(self):
self.stop()
def _check_accesser(self):
if self._accesser.is_alive():
return True
self._accesser.join()
exitcode = self._accesser.exitcode
if exitcode == 0:
return False
if len(self._error_queue) > 0:
raise exceptions.EdlAccessDataError(self.error_queue[0])
else:
raise exceptions.EdlAccessDataError(
"access process exit:{}".format(exitcode)
)
def __iter__(self):
self._generater = DataGenerator()
self._generator.start()
self._accesser = multiprocessing.Process(
access_batch_data,
args=(
self._reader_leader,
self._name,
self._trainer_env,
self._generater_out_queue,
self._accesser_out_queue,
self._cache_capcity,
),
)
while True:
if not self._check_accesser():
break
try:
b = self._accesser_out_queue.pop(60)
except multiprocessing.Queue.Empty:
continue
if b is None:
logger.debug("{} reach data end".format(self._name))
break
yield {"meta": b[0], "data": b[1]}
|
StarcoderdataPython
|
134350
|
__all__ = [ 'XDict' ]
class XDict(dict):
__slots__ = ()
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
__getitem__ = dict.get
__getattr__ = dict.get
__getnewargs__ = lambda self: getattr(dict,self).__getnewargs__(self)
__repr__ = lambda self: '<XDict %s>' % dict.__repr__(self)
__getstate__ = lambda self: None
__copy__ = lambda self: Storage(self)
|
StarcoderdataPython
|
1799501
|
<reponame>kishima/RaspiMusicServer
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import time
import subprocess
import re
from pykakasi import kakasi
from arduino_timer import Yukkuri
import grove_gesture_sensor
import ap_music
import ap_music_server_conf
MENU_IDLE = 0
MENU_PLAYING = 1
MENU_ONMENU = 2
class ApMenu:
MENU_TIMEOUT_SET = 20*5
def __init__(self,ledobj,volumebj,timerobj):
self.conf = ap_music_server_conf.MusicServerConfig().get_conf()
self.loop1 = 0
self.led = ledobj
self.volume = volumebj
self.timer = timerobj
self.music = ap_music.ApMusic()
self.mpdstat = "" #mpd status
self.menu_stat = MENU_ONMENU
self.stat_chage = True
self.menu_item = ["PLAY","STOP","CANCEL","RELOAD","WEATHER","NEWS"]
self.station_list = []
self.menu_cursor = 0
self.current_station = 0
list = self.music.get_playlist()
for station in list:
if station != "":
self.station_list.append(station)
print("station",station)
self.menu_timeout = 0
self.kakasi = kakasi()
self.kakasi.setMode('H', 'a')
self.kakasi.setMode('K', 'a')
self.kakasi.setMode('J', 'a')
self.conv = self.kakasi.getConverter()
self.yukkuri = Yukkuri()
return
def update(self,cnt,x,y,button,ges):
if self.menu_stat == MENU_IDLE:
self.mode_idle(cnt,x,y,button,ges)
elif self.menu_stat == MENU_PLAYING:
self.mode_playing(cnt,x,y,button,ges)
elif self.menu_stat == MENU_ONMENU:
self.mode_onmenu(cnt,x,y,button,ges)
return
def mode_idle(self,cnt,x,y,button,ges):
return
def mode_playing(self,cnt,x,y,button,ges):
if x!=0 or y!=0 or button != 0 or ges == grove_gesture_sensor.gesture.FORWARD:
self.menu_stat = MENU_ONMENU
self.stat_chage = True
self.menu_timeout = 20*5
return
if 0 == cnt % 40 :
self.mpdstat = self.music.check_mpd_status()
self.mpdstat = self.conv.do(self.mpdstat)
self.mpdstat = self.music.pickup_first_line(self.mpdstat)
self.mpdstat = self.mpdstat.replace('\n','')
if 0 == cnt % 10:
if self.loop1 > len(self.mpdstat) :
self.loop1=0
if len(self.mpdstat)-self.loop1 < 32:
t=self.mpdstat[self.loop1:]+" / "+self.mpdstat
self.led.put_text(t)
else:
self.led.put_text(self.mpdstat[self.loop1:])
self.loop1+=1
if ges == grove_gesture_sensor.gesture.CLOCKWISE:
play_stat = self.music.check_mpc_status()
if play_stat:
self.music.stop()
self.yukkuri.wether_speech()
if play_stat:
self.music.play()
return
def reload_schedule(self):
self.timer.reload_schedule()
def mode_onmenu(self,cnt,x,y,button,ges):
if y != 0:
x = 0
condition_update = False
if x!=0 or y!=0 or button != 0 or ges != 0:
self.menu_timeout = self.MENU_TIMEOUT_SET
self.menu_timeout -= 1
if self.menu_timeout <= 0:
self.menu_stat = MENU_PLAYING
if ges == grove_gesture_sensor.gesture.FORWARD:
button = 1
if ges == grove_gesture_sensor.gesture.UP:
y = -1
if ges == grove_gesture_sensor.gesture.DOWN:
y = 1
if ges == grove_gesture_sensor.gesture.RIGHT:
x = 1
if ges == grove_gesture_sensor.gesture.LEFT:
x = -1
if self.stat_chage:
#menu will be updated when state is changed
condition_update = True
self.stat_chage=False
if y == 1:
self.menu_cursor+=1
condition_update = True
if self.menu_cursor>=len(self.menu_item):
self.menu_cursor=0
if y == -1:
self.menu_cursor-=1
condition_update = True
if self.menu_cursor<0:
self.menu_cursor=len(self.menu_item)-1
if x == 1:
self.current_station+=1
condition_update = True
if self.current_station>=len(self.station_list):
self.current_station=0
if x == -1:
self.current_station-=1
condition_update = True
if self.current_station<0:
self.current_station=len(self.station_list)-1
if condition_update:
self.led.clear_display()
volume = self.volume.get_current_volset()
output = "["+self.menu_item[self.menu_cursor]+"]"+" V:"+str(volume)+"\n"
output += self.station_list[self.current_station]
self.led.put_text(output)
if button==1:
logging.debug("button press: cursor="+str(self.menu_cursor))
if self.menu_item[self.menu_cursor] == "PLAY":
r = self.music.play_item(str(self.current_station+1))
logging.debug(r)
elif self.menu_item[self.menu_cursor] == "STOP":
r = self.music.stop()
logging.debug(r)
elif self.menu_item[self.menu_cursor] == "NEXT":
r = self.music.next()
logging.debug(r)
elif self.menu_item[self.menu_cursor] == "CANCEL":
self.menu_stat = MENU_PLAYING
self.stat_chage = True
elif self.menu_item[self.menu_cursor] == "RELOAD":
self.reload_schedule()
elif self.menu_item[self.menu_cursor] == "WEATHER":
def local_yukkuri():
self.yukkuri.dayofweek_info_speech()
self.yukkuri.wether_speech()
self.music.mute_play_action(local_yukkuri)
elif self.menu_item[self.menu_cursor] == "NEWS":
def local_yukkuri():
self.yukkuri.rss_speech(self.conf['rss01'])
self.yukkuri.rss_speech(self.conf['rss02'])
self.yukkuri.rss_speech(self.conf['rss03'])
self.music.mute_play_action(local_yukkuri)
return
|
StarcoderdataPython
|
3203348
|
<reponame>KamilPiechowiak/iatransfer<filename>iatransfer/utils/file_utils.py
import json
def read_contents(filename: str, encoding: str = 'utf-8') -> str:
with open(filename, 'r', encoding=encoding) as file:
return file.read()
def read_json(filename: str) -> dict:
with open(filename) as json_file:
return json.load(json_file)
|
StarcoderdataPython
|
3354360
|
# Copyright 2018 The OpenEBS Authors
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
#
# Info:
# This Python script can be used to check the cluster readiness over multiple Clouds
# providers like GCE, GKE and AWS with any number of nodes. Script does a series of
# api calls to the kubernetes cluster, by utilising the Python client library for kubernetes.
# Script terminates successfully once cluster is up or else fails after a T/O period of 900 seconds
# To run the script: `python cluster_health_check.py --nodes 4`
from kubernetes import client, config
import multiprocessing
import time
import argparse
import sys
def create_api():
while True:
try:
v1 = client.CoreV1Api()
break
except Exception:
time.sleep(30)
return v1
def get_nodes(node_count):
v1 = create_api()
while True:
try:
getNodes = v1.list_node()
if len(getNodes.items) == int(node_count):
return getNodes.items
except Exception as e:
print e
time.sleep(30)
def get_node_status(node_count):
count = 0
nodes = get_nodes(node_count)
for node in nodes:
obj = node.status.conditions
for i in obj:
if i.type == 'Ready':
count = count + 1
return count
def checkCluster(node_count):
while True:
try:
count = get_node_status(node_count)
if count == int(node_count):
break
except Exception:
time.sleep(30)
print('Cluster is Up and Running')
def get_kube_config():
while True:
try:
config.load_kube_config()
break
except Exception:
time.sleep(30)
def get_args():
parser = argparse.ArgumentParser()
# Pass total node count including master node in the cluster iusing flag -n or --nodes
parser.add_argument(
'-n', '--nodes', help='Node or Size of cluster', required=True)
args = parser.parse_args()
return args.nodes
def init():
nodes = get_args()
get_kube_config()
while True:
try:
checkCluster(nodes)
return exit
except Exception:
time.sleep(30)
if __name__ == '__main__':
p = multiprocessing.Process(target=init, name="main")
p.start()
timeElapsed = 0
timeOut = 1200
while(True):
if p.is_alive() is False:
p.terminate()
sys.exit(0)
# Setting Timeout to 900 seconds
elif timeElapsed == timeOut:
print "Error: time out! Program terminated after", timeOut, "seconds"
p.terminate()
sys.exit(1)
time.sleep(1)
timeElapsed += 1
|
StarcoderdataPython
|
3395738
|
"""
Prototype Design pattern
- Needed when creating several similar objects.
- Maybe when the cost of creating an object from scratch is large
- Create a Prototype intstance then clone it whenever needed
"""
import copy
class Prototype:
"""Protoype classs to register and clone objects"""
def __init__(self):
self._objects = {}
def register_object(self, name, obj):
"""register an object"""
self._objects[name] = obj
def unregister_object(self, name):
"""unregister an object"""
del self._objects[name]
def clone(self, name, **attr):
"""Clone a registered object and update its attributes"""
obj = copy.deepcopy(self._objects.get(name))
obj.__dict__.update(attr) # possibly update some attr's
return obj
class Car:
def __init__(self):
self.name = 'Skylark'
self.color = 'Red'
self.options = 'Ex'
def __str__(self):
return '{} | {} | {}'.format(self.name, self.color, self.options)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
car = Car()
prototype = Prototype()
prototype.register_object('Skylark', car)
c1 = prototype.clone('Skylark')
print(c1)
|
StarcoderdataPython
|
1610439
|
<gh_stars>0
class BeansException(Exception):
pass
class EncodingException(BeansException):
pass
class DecodingException(BeansException):
pass
|
StarcoderdataPython
|
3351669
|
<reponame>dmonn/socialize
import os
def create_token_file(token):
try:
home_directory = os.path.expanduser('~')
file = open(os.path.join(home_directory, ".AUTHTOKEN"), 'w')
file.write(token['token'])
file.close()
return(200)
except:
print('Something went wrong!')
def remove_token_file():
try:
home_directory = os.path.expanduser('~')
os.remove(os.path.join(home_directory, ".AUTHTOKEN"))
except:
print('Something went wrong!')
|
StarcoderdataPython
|
45031
|
''' An example of playing randomly in RLCard
'''
import argparse
import pprint
import rlcard
from rlcard.agents import RandomAgent
from rlcard.utils import set_seed
def run(args):
# Make environment
env = rlcard.make(args.env, config={'seed': 42})
# Seed numpy, torch, random
set_seed(42)
# Set agents
agent = RandomAgent(num_actions=env.num_actions)
env.set_agents([agent for _ in range(env.num_players)])
# Generate data from the environment
trajectories, player_wins = env.run(is_training=False)
# Print out the trajectories
print('\nTrajectories:')
print(trajectories)
print('\nSample raw observation:')
pprint.pprint(trajectories[0][0]['raw_obs'])
print('\nSample raw legal_actions:')
pprint.pprint(trajectories[0][0]['raw_legal_actions'])
if __name__ == '__main__':
parser = argparse.ArgumentParser("Random example in RLCard")
parser.add_argument('--env', type=str, default='leduc-holdem',
choices=['blackjack', 'leduc-holdem', 'limit-holdem', 'doudizhu', 'mahjong', 'no-limit-holdem', 'uno', 'gin-rummy', 'bridge', 'dummy'])
args = parser.parse_args()
run(args)
|
StarcoderdataPython
|
1766630
|
<reponame>alexander-sidorov/qap-05
from typing import Any
from hw.alexander_sidorov.common import validate
from .task12 import task_12
def test_task_12() -> None:
args: Any
args = (1, 2)
validate(
task_12,
*args,
expected_data={1: 2},
)
args = "ab"
validate(
task_12,
*args,
expected_data={"a": "b"},
)
args = (None, [], ..., {})
validate(
task_12,
*args,
expected_data={None: [], ...: {}},
)
args = "a" * 20
validate(
task_12,
*args,
expected_data={"a": "a"},
)
args = "a" * 19
validate(
task_12,
*args,
expected_errors=["odd number of elements (19)"],
)
args = [[], None]
validate(
task_12,
*args,
expected_errors={
"args[0]=[] is not hashable",
},
)
|
StarcoderdataPython
|
4819553
|
<reponame>ericmjl/mbtools
"""
Author: <NAME>
Purpose: A module that defines functions for doing PCR.
"""
from Levenshtein import distance
from difflib import SequenceMatcher
from Bio.Seq import Seq
import math
import numpy as np
def alignment_indices(template, primer):
"""
Finds the optimal alignment between template and primer.
Inputs:
=======
- str1, str2: (str)
Returns (int1, int2), (int3, int4), where:
- int1, int2 = start, stop on str1
- int3, int4 = start, stop on str2
For the DNA case, we are assuming that the 5'->3' directionality of the
two strings are identical.
"""
s = SequenceMatcher(None, template, primer)
m = s.find_longest_match(0, len(template), 0, len(primer))
return (m.a, m.a + m.size), (m.b, m.b + m.size)
def compute_amplicon(fw_primer, re_primer, template):
"""
Given a pair of fw and re primers and a template, return the amplicon.
Inputs:
=======
- fw_primer, re_primer: BioPython Seq objects
- template: BioPython Seq objects
It is assumed that the reverse primer is not in the same orientation as the
template sequence.
Returns:
========
- amplicon: BioPython SeqRecord object
Text-based illustration:
A B G H
5-------3 5------------3
C D E F
5-----------------------3
Or:
A B G H
5-------3 5------------3
C D E F
5---------------------------------------------------3
The basic idea here is that we need to compute the locations of D and E on
the template.
"""
# Identify where the fw primer anneals to template region.
# fp : forward primer
# rp : reverse primer
# tpl : template
# idxs : indices
fp_tpl_idxs, fp_idxs = alignment_indices(template,
fw_primer)
rp_tpl_idxs, rp_idxs = alignment_indices(template,
re_primer.reverse_complement())
final_sequence = fw_primer + template[fp_tpl_idxs[1]:rp_tpl_idxs[0]] +\
re_primer.reverse_complement()
return final_sequence
def num_cycles(fold_amp):
"""
Computes the necessary number of cycles. Rounds up to the next integer.
"""
return math.ceil(np.log10(fold_amp) / np.log10(2))
def num_cycles(fold_amp):
"""
Computes the necessary number of cycles. Rounds up to the next integer.
"""
return math.ceil(np.log10(fold_amp) / np.log10(2))
def input_plasmid_mass(target_len, plasmid_len, target_mass):
"""
Computes the mass of total input plasmid mass required to get a given mass
of target DNA.
Silently assumes that the units are:
- target_len: bp (base pairs)
- plasmid_len: bp (base pairs)
- target_mass: ng (nanograms)
"""
return target_mass * plasmid_len / target_len
def input_volume(input_mass, input_conc):
"""
Computes the required amount of volume for a given mass and concentration.
Silently assumes that the units are:
- input_mass: ng (nanograms)
- input_conc: ng/µL (nanograms per microlitre)
"""
return input_mass / input_conc
def num_cycles(fold_amp):
"""
Computes the necessary number of cycles. Rounds up to the next integer.
"""
return math.ceil(np.log10(fold_amp) / np.log10(2))
|
StarcoderdataPython
|
3332711
|
"""
PCR会战管理命令 v2
猴子也会用的会战管理
命令设计遵循以下原则:
- 中文:降低学习成本
- 唯一:There should be one-- and preferably only one --obvious way to do it.
- 耐草:参数不规范时尽量执行
"""
import os
from datetime import datetime, timedelta
from typing import List
from matplotlib import pyplot as plt
try:
import ujson as json
except:
import json
from aiocqhttp.exceptions import ActionFailed
from nonebot import NoneBot
from nonebot import MessageSegment as ms
from nonebot.typing import Context_T
from hoshino import util, priv
from . import sv, cb_cmd
from .argparse import ArgParser, ArgHolder, ParseResult
from .argparse.argtype import *
from .battlemaster import BattleMaster
from .exception import *
plt.style.use('seaborn-pastel')
plt.rcParams['font.family'] = ['DejaVuSans', 'Microsoft YaHei', 'SimSun', ]
USAGE_ADD_CLAN = '!建会 N公会名 S服务器代号'
USAGE_ADD_MEMBER = '!入会 昵称 (@qq)'
USAGE_LIST_MEMBER = '!查看成员'
USAGE_TIP = '\n\n※无需输入尖括号,圆括号内为可选参数'
ERROR_CLAN_NOTFOUND = f'公会未初始化:请*群管理*使用【{USAGE_ADD_CLAN}】进行初始化{USAGE_TIP}'
ERROR_ZERO_MEMBER = f'公会内无成员:使用【{USAGE_ADD_MEMBER}】以添加{USAGE_TIP}'
ERROR_MEMBER_NOTFOUND = f'未找到成员:请使用【{USAGE_ADD_MEMBER}】加入公会{USAGE_TIP}'
ERROR_PERMISSION_DENIED = '权限不足:需*群管理*以上权限'
def _check_clan(bm:BattleMaster):
clan = bm.get_clan(1)
if not clan:
raise NotFoundError(ERROR_CLAN_NOTFOUND)
return clan
def _check_member(bm:BattleMaster, uid:int, alt:int, tip=None):
mem = bm.get_member(uid, alt) or bm.get_member(uid, 0) # 兼容cmdv1
if not mem:
raise NotFoundError(tip or ERROR_MEMBER_NOTFOUND)
return mem
def _check_admin(ctx:Context_T, tip:str=''):
if not priv.check_priv(ctx, priv.ADMIN):
raise PermissionDeniedError(ERROR_PERMISSION_DENIED + tip)
@cb_cmd('建会', ArgParser(usage=USAGE_ADD_CLAN, arg_dict={
'N': ArgHolder(tip='公会名'),
'S': ArgHolder(tip='服务器地区', type=server_code)}))
async def add_clan(bot:NoneBot, ctx:Context_T, args:ParseResult):
_check_admin(ctx)
bm = BattleMaster(ctx['group_id'])
if bm.has_clan(1):
bm.mod_clan(1, args.N, args.S)
await bot.send(ctx, f'公会信息已修改!\n{args.N} {server_name(args.S)}', at_sender=True)
else:
bm.add_clan(1, args.N, args.S)
await bot.send(ctx, f'公会建立成功!{args.N} {server_name(args.S)}', at_sender=True)
@cb_cmd('查看公会', ArgParser('!查看公会'))
async def list_clan(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
clans = bm.list_clan()
if len(clans):
clans = map(lambda x: f"{x['cid']}会:{x['name']} {server_name(x['server'])}", clans)
msg = ['本群公会:', *clans]
await bot.send(ctx, '\n'.join(msg), at_sender=True)
else:
raise NotFoundError(ERROR_CLAN_NOTFOUND)
@cb_cmd('入会', ArgParser(usage=USAGE_ADD_MEMBER, arg_dict={
'': ArgHolder(tip='昵称', default=''),
'@': ArgHolder(tip='qq号', type=int, default=0)}))
async def add_member(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
clan = _check_clan(bm)
uid = args['@'] or args.at or ctx['user_id']
name = args['']
if uid != ctx['user_id']:
_check_admin(ctx, '才能添加其他人')
try: # 尝试获取群员信息,用以检查该成员是否在群中
await bot.get_group_member_info(self_id=ctx['self_id'], group_id=bm.group, user_id=uid)
except:
raise NotFoundError(f'Error: 无法获取群员信息,请检查{uid}是否属于本群')
if not name:
m = await bot.get_group_member_info(self_id=ctx['self_id'], group_id=bm.group, user_id=uid)
name = m['card'] or m['nickname'] or str(m['user_id'])
mem = bm.get_member(uid, bm.group) or bm.get_member(uid, 0) # 兼容cmdv1
if mem:
bm.mod_member(uid, mem['alt'], name, 1)
await bot.send(ctx, f'成员{ms.at(uid)}昵称已修改为{name}')
else:
bm.add_member(uid, bm.group, name, 1)
await bot.send(ctx, f"成员{ms.at(uid)}添加成功!欢迎{name}加入{clan['name']}")
@cb_cmd(('查看成员', '成员查看', '查询成员', '成员查询'), ArgParser(USAGE_LIST_MEMBER))
async def list_member(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
clan = _check_clan(bm)
mems = bm.list_member(1)
if l := len(mems):
# 数字太多会被腾讯ban
mems = map(lambda x: '{uid: <11,d} | {name}'.format_map(x), mems)
msg = [ f"\n{clan['name']} {l}/30 人\n____ QQ ____ | 昵称", *mems]
await bot.send(ctx, '\n'.join(msg), at_sender=True)
else:
raise NotFoundError(ERROR_ZERO_MEMBER)
@cb_cmd('退会', ArgParser(usage='!退会 (@qq)', arg_dict={
'@': ArgHolder(tip='qq号', type=int, default=0)}))
async def del_member(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
uid = args['@'] or args.at or ctx['user_id']
mem = _check_member(bm, uid, bm.group, '公会内无此成员')
if uid != ctx['user_id']:
_check_admin(ctx, '才能踢人')
bm.del_member(uid, mem['alt'])
await bot.send(ctx, f"成员{mem['name']}已从公会删除", at_sender=True)
@cb_cmd('清空成员', ArgParser('!清空成员'))
async def clear_member(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
clan = _check_clan(bm)
_check_admin(ctx)
msg = f"{clan['name']}已清空!" if bm.clear_member(1) else f"{clan['name']}已无成员"
await bot.send(ctx, msg, at_sender=True)
@cb_cmd('一键入会', ArgParser('!一键入会'))
async def batch_add_member(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
clan = _check_clan(bm)
_check_admin(ctx)
try:
mlist = await bot.get_group_member_list(self_id=ctx['self_id'], group_id=bm.group)
except ActionFailed:
raise ClanBattleError('Bot缓存未更新,暂时无法使用一键入会。请尝试【!入会】命令逐个添加')
if len(mlist) > 50:
raise ClanBattleError('群员过多!一键入会仅限50人以内群使用')
self_id = ctx['self_id']
succ, fail = 0, 0
for m in mlist:
if m['user_id'] != self_id:
try:
bm.add_member(m['user_id'], bm.group, m['card'] or m['nickname'] or str(m['user_id']), 1)
succ += 1
except DatabaseError:
fail += 1
msg = f'批量注册完成!成功{succ}/失败{fail}\n使用【{USAGE_LIST_MEMBER}】查看当前成员列表'
await bot.send(ctx, msg, at_sender=True)
def _gen_progress_text(clan_name, round_, boss, hp, max_hp, score_rate):
return f"{clan_name} 当前进度:\n{round_}周目 {BattleMaster.int2kanji(boss)}王 SCORE x{score_rate:.1f}\nHP={hp:,d}/{max_hp:,d}"
async def process_challenge(bot:NoneBot, ctx:Context_T, ch:ParseResult):
"""
处理一条报刀 需要保证challenge['flag']的正确性
"""
bm = BattleMaster(ctx['group_id'])
now = datetime.now() - timedelta(days=ch.get('dayoffset', 0))
clan = _check_clan(bm)
mem = _check_member(bm, ch.uid, ch.alt)
cur_round, cur_boss, cur_hp = bm.get_challenge_progress(1, now)
round_ = ch.round or cur_round
boss = ch.boss or cur_boss
damage = ch.damage if ch.flag != BattleMaster.LAST else (ch.damage or cur_hp)
flag = ch.flag
if (ch.flag == BattleMaster.LAST) and (ch.round or ch.boss) and (not damage):
raise NotFoundError('补报尾刀请给出伤害值') # 补报尾刀必须给出伤害值
msg = ['']
# 上一刀如果是尾刀,这一刀就是补偿刀
challenges = bm.list_challenge_of_user_of_day(mem['uid'], mem['alt'], now)
if len(challenges) > 0 and challenges[-1]['flag'] == BattleMaster.LAST:
flag = BattleMaster.EXT
msg.append('⚠️已自动标记为补时刀')
if round_ != cur_round or boss != cur_boss:
msg.append('⚠️上报与当前进度不一致')
else: # 伤害校对
eps = 30000
if damage > cur_hp + eps:
damage = cur_hp
msg.append(f'⚠️过度虐杀 伤害数值已自动修正为{damage}')
if flag == BattleMaster.NORM:
flag = BattleMaster.LAST
msg.append('⚠️已自动标记为尾刀')
elif flag == BattleMaster.LAST:
if damage < cur_hp - eps:
msg.append('⚠️尾刀伤害不足 请未报刀成员及时上报')
elif damage < cur_hp:
if damage % 1000 == 0:
damage = cur_hp
msg.append(f'⚠️尾刀伤害已自动修正为{damage}')
else:
msg.append('⚠️Boss仍有少量残留血量')
eid = bm.add_challenge(mem['uid'], mem['alt'], round_, boss, damage, flag, now)
aft_round, aft_boss, aft_hp = bm.get_challenge_progress(1, now)
max_hp, score_rate = bm.get_boss_info(aft_round, aft_boss, clan['server'])
msg.append(f"记录编号E{eid}:\n{mem['name']}给予{round_}周目{bm.int2kanji(boss)}王{damage:,d}点伤害\n")
msg.append(_gen_progress_text(clan['name'], aft_round, aft_boss, aft_hp, max_hp, score_rate))
await bot.send(ctx, '\n'.join(msg), at_sender=True)
# 判断是否更换boss,呼叫预约
if aft_round != cur_round or aft_boss != cur_boss:
await call_subscribe(bot, ctx, aft_round, aft_boss)
await auto_unlock_boss(bot, ctx, bm)
await auto_unsubscribe(bot, ctx, bm.group, mem['uid'], boss)
@cb_cmd(('出刀', '报刀'), ArgParser(usage='!出刀 <伤害值> (@qq)', arg_dict={
'': ArgHolder(tip='伤害值', type=damage_int),
'@': ArgHolder(tip='qq号', type=int, default=0),
'R': ArgHolder(tip='周目数', type=round_code, default=0),
'B': ArgHolder(tip='Boss编号', type=boss_code, default=0),
'D': ArgHolder(tip='日期差', type=int, default=0)}))
async def add_challenge(bot:NoneBot, ctx:Context_T, args:ParseResult):
challenge = ParseResult({
'round': args.R,
'boss': args.B,
'damage': args.get(''),
'uid': args['@'] or args.at or ctx['user_id'],
'alt': ctx['group_id'],
'flag': BattleMaster.NORM,
'dayoffset': args.get('D', 0)
})
await process_challenge(bot, ctx, challenge)
@cb_cmd(('出尾刀', '收尾', '尾刀'), ArgParser(usage='!出尾刀 (<伤害值>) (@<qq号>)', arg_dict={
'': ArgHolder(tip='伤害值', type=damage_int, default=0),
'@': ArgHolder(tip='qq号', type=int, default=0),
'R': ArgHolder(tip='周目数', type=round_code, default=0),
'B': ArgHolder(tip='Boss编号', type=boss_code, default=0)}))
async def add_challenge_last(bot:NoneBot, ctx:Context_T, args:ParseResult):
challenge = ParseResult({
'round': args.R,
'boss': args.B,
'damage': args.get(''),
'uid': args['@'] or args.at or ctx['user_id'],
'alt': ctx['group_id'],
'flag': BattleMaster.LAST
})
await process_challenge(bot, ctx, challenge)
@cb_cmd(('出补时刀', '补时刀', '补时'), ArgParser(usage='!出补时刀 <伤害值> (@qq)', arg_dict={
'': ArgHolder(tip='伤害值', type=damage_int),
'@': ArgHolder(tip='qq号', type=int, default=0),
'R': ArgHolder(tip='周目数', type=round_code, default=0),
'B': ArgHolder(tip='Boss编号', type=boss_code, default=0)}))
async def add_challenge_ext(bot:NoneBot, ctx:Context_T, args:ParseResult):
challenge = ParseResult({
'round': args.R,
'boss': args.B,
'damage': args.get(''),
'uid': args['@'] or args.at or ctx['user_id'],
'alt': ctx['group_id'],
'flag': BattleMaster.EXT
})
await process_challenge(bot, ctx, challenge)
@cb_cmd('掉刀', ArgParser(usage='!掉刀 (@qq)', arg_dict={
'@': ArgHolder(tip='qq号', type=int, default=0),
'R': ArgHolder(tip='周目数', type=round_code, default=0),
'B': ArgHolder(tip='Boss编号', type=boss_code, default=0)}))
async def add_challenge_timeout(bot:NoneBot, ctx:Context_T, args:ParseResult):
challenge = ParseResult({
'round': args.R,
'boss': args.B,
'damage': 0,
'uid': args['@'] or args.at or ctx['user_id'],
'alt': ctx['group_id'],
'flag': BattleMaster.TIMEOUT
})
await process_challenge(bot, ctx, challenge)
@cb_cmd('删刀', ArgParser(usage='!删刀 E记录编号', arg_dict={
'E': ArgHolder(tip='记录编号', type=int)}))
async def del_challenge(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
now = datetime.now()
clan = _check_clan(bm)
ch = bm.get_challenge(args.E, 1, now)
if not ch:
raise NotFoundError(f'未找到出刀记录E{args.E}')
if ch['uid'] != ctx['user_id']:
_check_admin(ctx, '才能删除其他人的记录')
bm.del_challenge(args.E, 1, now)
await bot.send(ctx, f"{clan['name']}已删除{ms.at(ch['uid'])}的出刀记录E{args.E}", at_sender=True)
# TODO 将预约信息转至数据库
SUBSCRIBE_PATH = os.path.expanduser('~/.hoshino/clanbattle_sub/')
SUBSCRIBE_MAX = [99, 6, 6, 6, 6, 6]
os.makedirs(SUBSCRIBE_PATH, exist_ok=True)
class SubscribeData:
def __init__(self, data:dict):
for i in '12345':
data.setdefault(i, [])
data.setdefault('m' + i, [])
l = len(data[i])
if len(data['m' + i]) != l:
data['m' + i] = [None] * l
data.setdefault('tree', [])
data.setdefault('lock', [])
if 'max' not in data or len(data['max']) != 6:
data['max'] = [99, 6, 6, 6, 6, 6]
self._data = data
@staticmethod
def default():
return SubscribeData({
'1':[], '2':[], '3':[], '4':[], '5':[],
'm1':[], 'm2':[], 'm3':[], 'm4':[], 'm5':[],
'tree':[], 'lock':[],
'max': [99, 6, 6, 6, 6, 6]
})
def get_sub_list(self, boss:int):
return self._data[str(boss)]
def get_memo_list(self, boss:int):
return self._data[f'm{boss}']
def get_tree_list(self):
return self._data['tree']
def get_sub_limit(self, boss:int):
return self._data['max'][boss]
def set_sub_limit(self, boss:int, limit:int):
self._data['max'][boss] = limit
def add_sub(self, boss:int, uid:int, memo:str):
self._data[str(boss)].append(uid)
self._data[f'm{boss}'].append(memo)
def remove_sub(self, boss:int, uid:int):
s = self._data[str(boss)]
m = self._data[f'm{boss}']
i = s.index(uid)
s.pop(i)
m.pop(i)
def add_tree(self, uid:int):
self._data['tree'].append(uid)
def clear_tree(self):
self._data['tree'].clear()
def get_lock_info(self):
return self._data['lock']
def set_lock(self, uid:int, ts):
self._data['lock'] = [ (uid, ts) ]
def clear_lock(self):
self._data['lock'].clear()
def dump(self, filename):
with open(filename, 'w', encoding='utf8') as f:
json.dump(self._data, f, ensure_ascii=False)
def _load_sub(gid) -> SubscribeData:
filename = os.path.join(SUBSCRIBE_PATH, f"{gid}.json")
if os.path.exists(filename):
with open(filename, 'r', encoding='utf8') as f:
return SubscribeData(json.load(f))
else:
return SubscribeData.default()
def _save_sub(sub:SubscribeData, gid):
filename = os.path.join(SUBSCRIBE_PATH, f"{gid}.json")
sub.dump(filename)
def _gen_namelist_text(bm:BattleMaster, uidlist:List[int], memolist:List[str]=None, do_at=False):
if do_at:
mems = map(lambda x: str(ms.at(x)), uidlist)
else:
mems = map(lambda x: bm.get_member(x, bm.group) or bm.get_member(x, 0) or {'name': str(x)}, uidlist)
mems = map(lambda x: x['name'], mems)
if memolist:
mems = list(mems)
for i in range(len(mems)):
if i < len(memolist) and memolist[i]:
mems[i] = f"{mems[i]}:{memolist[i]}"
return mems
SUBSCRIBE_TIP = ''
@cb_cmd('预约', ArgParser(usage='!预约 <Boss号> M留言', arg_dict={
'': ArgHolder(tip='Boss编号', type=boss_code),
'M': ArgHolder(tip='留言', default='')}))
async def subscribe(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
uid = ctx['user_id']
_check_clan(bm)
_check_member(bm, uid, bm.group)
sub = _load_sub(bm.group)
boss = args['']
memo = args.M
boss_name = bm.int2kanji(boss)
slist = sub.get_sub_list(boss)
mlist = sub.get_memo_list(boss)
limit = sub.get_sub_limit(boss)
if uid in slist:
raise AlreadyExistError(f'您已经预约过{boss_name}王了')
msg = ['']
if len(slist) < limit:
sub.add_sub(boss, uid, memo)
_save_sub(sub, bm.group)
msg.append(f'已为您预约{boss_name}王!')
else:
msg.append(f'预约失败:{boss_name}王预约人数已达上限')
msg.append(f'=== 当前队列 {len(slist)}/{limit} ===')
msg.extend(_gen_namelist_text(bm, slist, mlist))
msg.append(SUBSCRIBE_TIP)
await bot.send(ctx, '\n'.join(msg), at_sender=True)
@cb_cmd(('取消预约', '预约取消'), ArgParser(usage='!取消预约 <Boss号>', arg_dict={
'': ArgHolder(tip='Boss编号', type=boss_code)}))
async def unsubscribe(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
uid = ctx['user_id']
_check_clan(bm)
_check_member(bm, uid, bm.group)
sub = _load_sub(bm.group)
boss = args['']
boss_name = bm.int2kanji(boss)
slist = sub.get_sub_list(boss)
mlist = sub.get_memo_list(boss)
limit = sub.get_sub_limit(boss)
if uid not in slist:
raise NotFoundError(f'您没有预约{boss_name}王')
sub.remove_sub(boss, uid)
_save_sub(sub, bm.group)
msg = [ f'\n已为您取消预约{boss_name}王!' ]
msg.append(f'=== 当前队列 {len(slist)}/{limit} ===')
msg.extend(_gen_namelist_text(bm, slist, mlist))
await bot.send(ctx, '\n'.join(msg), at_sender=True)
async def auto_unsubscribe(bot:NoneBot, ctx:Context_T, gid, uid, boss):
sub = _load_sub(gid)
slist = sub.get_sub_list(boss)
if uid not in slist:
return
sub.remove_sub(boss, uid)
_save_sub(sub, gid)
await bot.send(ctx, f'已为{ms.at(uid)}自动取消{BattleMaster.int2kanji(boss)}王的订阅')
async def call_subscribe(bot:NoneBot, ctx:Context_T, round_:int, boss:int):
bm = BattleMaster(ctx['group_id'])
msg = []
sub = _load_sub(bm.group)
slist = sub.get_sub_list(boss)
mlist = sub.get_memo_list(boss)
tlist = sub.get_tree_list()
if slist:
msg.append(f"您们预约的老{BattleMaster.int2kanji(boss)}出现啦!")
msg.extend(_gen_namelist_text(bm, slist, mlist, do_at=True))
if slist and tlist:
msg.append("==========")
if tlist:
msg.append(f"以下成员可以下树了")
msg.extend(map(lambda x: str(ms.at(x)), tlist))
sub.clear_tree()
_save_sub(sub, bm.group)
if msg:
await bot.send(ctx, '\n'.join(msg), at_sender=False) # do not at the sender
@cb_cmd(('查询预约', '预约查询', '查看预约', '预约查看'), ArgParser('!查询预约'))
async def list_subscribe(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
clan = _check_clan(bm)
msg = [ f"\n{clan['name']}当前预约情况:" ]
sub = _load_sub(bm.group)
for boss in range(1, 6):
slist = sub.get_sub_list(boss)
mlist = sub.get_memo_list(boss)
limit = sub.get_sub_limit(boss)
msg.append(f"========\n{bm.int2kanji(boss)}王: {len(slist)}/{limit}")
msg.extend(_gen_namelist_text(bm, slist, mlist))
await bot.send(ctx, '\n'.join(msg), at_sender=True)
@cb_cmd(('清空预约', '预约清空', '清理预约', '预约清理'), ArgParser('!清空预约', arg_dict={
'': ArgHolder(tip='Boss编号', type=boss_code)}))
async def clear_subscribe(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
clan = _check_clan(bm)
_check_admin(ctx, '才能清理预约队列')
sub = _load_sub(bm.group)
boss = args['']
slist = sub.get_sub_list(boss)
mlist = sub.get_memo_list(boss)
if slist:
slist.clear()
mlist.clear()
_save_sub(sub, bm.group)
await bot.send(ctx, f"{bm.int2kanji(boss)}王预约队列已清空", at_sender=True)
else:
raise NotFoundError(f"无人预约{bm.int2kanji(boss)}王")
@cb_cmd(('预约上限', ), ArgParser(usage='!预约上限 B<Boss号> <上限值>', arg_dict={
'B': ArgHolder(tip='Boss编号', type=boss_code),
'': ArgHolder(tip='上限值', type=int)
}))
async def set_subscribe_limit(bot:NoneBot, ctx, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
clan = _check_clan(bm)
_check_admin(ctx, '才能设置预约上限')
limit = args['']
if not (0 < limit <= 30):
raise ClanBattleError('预约上限只能为1~30内的整数')
sub = _load_sub(bm.group)
sub.set_sub_limit(args.B, limit)
_save_sub(sub, bm.group)
await bot.send(ctx, f'{bm.int2kanji(args.B)}王预约上限已设置为:{limit}')
@cb_cmd(('挂树', '上树'), ArgParser('!挂树'))
async def add_sos(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
uid = ctx['user_id']
clan = _check_clan(bm)
_check_member(bm, uid, bm.group)
sub = _load_sub(bm.group)
tree = sub.get_tree_list()
if uid in tree:
raise AlreadyExistError("您已在树上")
sub.add_tree(uid)
_save_sub(sub, bm.group)
msg = [ "\n您已上树,本Boss被击败时将会通知您",
f"目前{clan['name']}挂树人数为{len(tree)}人:" ]
msg.extend(_gen_namelist_text(bm, tree))
await bot.send(ctx, '\n'.join(msg), at_sender=True)
@cb_cmd(('查树', ), ArgParser('!查树'))
async def list_sos(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
clan = _check_clan(bm)
sub = _load_sub(bm.group)
tree = sub.get_tree_list()
msg = [ f"\n目前{clan['name']}挂树人数为{len(tree)}人:" ]
msg.extend(_gen_namelist_text(bm, tree))
await bot.send(ctx, '\n'.join(msg), at_sender=True)
@cb_cmd(('锁定', '申请出刀'), ArgParser('!锁定'))
async def lock_boss(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
_check_clan(bm)
_check_member(bm, ctx['user_id'], bm.group)
sub = _load_sub(bm.group)
lock = sub.get_lock_info()
if lock:
uid, ts = lock[0]
time = datetime.fromtimestamp(ts)
mem = bm.get_member(uid, bm.group) or bm.get_member(uid, 0) or {'name': str(uid)}
delta = datetime.now() - time
delta = timedelta(seconds=round(delta.total_seconds())) # ignore miliseconds
msg = f"\n锁定失败:{mem['name']}已于{delta}前锁定了Boss"
await bot.send(ctx, msg, at_sender=True)
else:
uid = ctx['user_id']
time = datetime.now()
sub.set_lock(uid, datetime.now().timestamp())
_save_sub(sub, bm.group)
msg = f"已锁定Boss"
await bot.send(ctx, msg, at_sender=True)
@cb_cmd(('解锁', ), ArgParser('!解锁'))
async def unlock_boss(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
_check_clan(bm)
sub = _load_sub(bm.group)
lock = sub.get_lock_info()
if lock:
uid, ts = lock[0]
time = datetime.fromtimestamp(ts)
if uid != ctx['user_id']:
mem = bm.get_member(uid, bm.group) or bm.get_member(uid, 0) or {'name': str(uid)}
delta = datetime.now() - time
delta = timedelta(seconds=round(delta.total_seconds())) # ignore miliseconds
_check_admin(ctx, f"才能解锁其他人\n解锁失败:{mem['name']}于{delta}前锁定了Boss")
sub.clear_lock()
_save_sub(sub, bm.group)
msg = f"\nBoss已解锁"
await bot.send(ctx, msg, at_sender=True)
else:
msg = "\n无人锁定Boss"
await bot.send(ctx, msg, at_sender=True)
async def auto_unlock_boss(bot:NoneBot, ctx:Context_T, bm:BattleMaster):
sub = _load_sub(bm.group)
lock = sub.get_lock_info()
if lock:
uid, ts = lock[0]
time = datetime.fromtimestamp(ts)
if uid != ctx['user_id']:
mem = bm.get_member(uid, bm.group) or bm.get_member(uid, 0) or {'name': str(uid)}
delta = datetime.now() - time
delta = timedelta(seconds=round(delta.total_seconds())) # ignore miliseconds
msg = f"⚠️{mem['name']}于{delta}前锁定了Boss,您出刀前未申请锁定!"
await bot.send(ctx, msg, at_sender=True)
else:
sub.clear_lock()
_save_sub(sub, bm.group)
msg = f"\nBoss已自动解锁"
await bot.send(ctx, msg, at_sender=True)
@cb_cmd(('进度', '进度查询', '查询进度', '进度查看', '查看进度', '状态'), ArgParser(usage='!进度'))
async def show_progress(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
clan = _check_clan(bm)
r, b, hp = bm.get_challenge_progress(1, datetime.now())
max_hp, score_rate = bm.get_boss_info(r, b, clan['server'])
msg = _gen_progress_text(clan['name'], r, b, hp, max_hp, score_rate)
await bot.send(ctx, '\n' + msg, at_sender=True)
@cb_cmd(('统计', '伤害统计'), ArgParser(usage='!伤害统计'))
async def stat_damage(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
now = datetime.now()
clan = _check_clan(bm)
yyyy, mm, _ = bm.get_yyyymmdd(now)
stat = bm.stat_damage(1, now)
yn = len(stat)
if not yn:
await bot.send(ctx, f"{clan['name']}{yyyy}年{mm}月会战统计数据为空", at_sender=True)
return
stat.sort(key=lambda x: x[3][0], reverse=True)
name = [ s[2] for s in stat ]
y_pos = list(range(yn))
y_size = 0.3 * yn + 1.0
unit = 1e4
unit_str = 'w'
# convert to pre-sum
for s in stat:
d = s[3]
d[0] = 0
for i in range(2, 6):
d[i] += d[i - 1]
pre_sum_dmg = [
[ s[3][b] for s in stat ] for b in range(6)
]
# generate statistic figure
fig, ax = plt.subplots()
fig.set_size_inches(10, y_size)
ax.set_title(f"{clan['name']}{yyyy}年{mm}月会战伤害统计")
ax.set_yticks(y_pos)
ax.set_yticklabels(name)
ax.set_ylim((-0.6, yn - 0.4))
ax.invert_yaxis()
ax.set_xlabel('伤害')
colors = ['#00a2e8', '#22b14c', '#b5e61d', '#fff200', '#ff7f27', '#ed1c24']
bars = [ ax.barh(y_pos, pre_sum_dmg[b], align='center', color=colors[b]) for b in range(5, -1, -1) ]
bars.reverse()
ax.ticklabel_format(axis='x', style='plain')
for b in range(1, 6):
for i, rect in enumerate(bars[b]):
x = (rect.get_width() + bars[b - 1][i].get_width()) / 2
y = rect.get_y() + rect.get_height() / 2
d = pre_sum_dmg[b][i] - pre_sum_dmg[b - 1][i]
if d > unit:
ax.text(x, y, f'{d/unit:.0f}{unit_str}', ha='center', va='center')
plt.subplots_adjust(left=0.12, right=0.96, top=1 - 0.35 / y_size, bottom=0.55 / y_size)
pic = util.fig2b64(plt)
plt.close()
msg = f"{ms.image(pic)}\n※分数统计请发送“!分数统计”"
await bot.send(ctx, msg, at_sender=True)
@cb_cmd('分数统计', ArgParser(usage='!分数统计'))
async def stat_score(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
now = datetime.now()
clan = _check_clan(bm)
yyyy, mm, _ = bm.get_yyyymmdd(now)
stat = bm.stat_score(1, now)
stat.sort(key=lambda x: x[3], reverse=True)
if not len(stat):
await bot.send(ctx, f"{clan['name']}{yyyy}年{mm}月会战统计数据为空", at_sender=True)
return
# msg = [ f"\n{yyyy}年{mm}月会战{clan['name']}分数统计:" ]
# for _, _, name, score in stat:
# score = f'{score:,d}' # 数字太多会被腾讯ban,用逗号分隔
# blank = ' ' * (11-len(score)) # QQ字体非等宽,width(空格*2) == width(数字*1)
# msg.append(f"{blank}{score}分 | {name}")
# generate statistic figure
fig, ax = plt.subplots()
score = list(map(lambda i: i[3], stat))
yn = len(stat)
name = list(map(lambda i: i[2], stat))
y_pos = list(range(yn))
if score[0] >= 1e8:
unit = 1e8
unit_str = 'e'
else:
unit = 1e4
unit_str = 'w'
y_size = 0.3 * yn + 1.0
fig.set_size_inches(10, y_size)
bars = ax.barh(y_pos, score, align='center')
ax.set_title(f"{clan['name']}{yyyy}年{mm}月会战分数统计")
ax.set_yticks(y_pos)
ax.set_yticklabels(name)
ax.set_ylim((-0.6, yn - 0.4))
ax.invert_yaxis()
ax.set_xlabel('分数')
ax.ticklabel_format(axis='x', style='plain')
for rect in bars:
w = rect.get_width()
ax.text(w, rect.get_y() + rect.get_height() / 2, f'{w/unit:.2f}{unit_str}', ha='left', va='center')
plt.subplots_adjust(left=0.12, right=0.96, top=1 - 0.35 / y_size, bottom=0.55 / y_size)
pic = util.fig2b64(plt)
plt.close()
msg = f"{ms.image(pic)}\n※伤害统计请发送“!伤害统计”"
await bot.send(ctx, msg, at_sender=True)
async def _do_show_remain(bot:NoneBot, ctx:Context_T, args:ParseResult, at_user:bool):
bm = BattleMaster(ctx['group_id'])
clan = _check_clan(bm)
if at_user:
_check_admin(ctx, '才能催刀。您可以用【!查刀】查询余刀')
rlist = bm.list_challenge_remain(1, datetime.now() - timedelta(days=args.get('D', 0)))
rlist.sort(key=lambda x: x[3] + x[4], reverse=True)
msg = [ f"\n{clan['name']}今日余刀:" ]
n = len(rlist)
for i in range(0, n, 10):
for uid, _, name, r_n, r_e in rlist[i:min(i+10, n)]:
if r_n or r_e:
msg.append(f"剩{r_n}刀 补时{r_e}刀 | {ms.at(uid) if at_user else name}")
await bot.send(ctx, '\n'.join(msg))
msg.clear()
if not n:
await bot.send(ctx, f"今日{clan['name']}所有成员均已下班!各位辛苦了!", at_sender=True)
else:
msg.append('若有负数说明报刀有误 请注意核对\n使用“!出刀记录 @qq”可查看详细记录')
if at_user:
msg.append("=========\n在?阿sir喊你出刀啦!")
await bot.send(ctx, '\n'.join(msg), at_sender=True)
@cb_cmd('查刀', ArgParser(usage='!查刀', arg_dict={
'D': ArgHolder(tip='日期差', type=int, default=0)}))
async def list_remain(bot:NoneBot, ctx:Context_T, args:ParseResult):
await _do_show_remain(bot, ctx, args, at_user=False)
@cb_cmd('催刀', ArgParser(usage='!催刀'))
async def urge_remain(bot:NoneBot, ctx:Context_T, args:ParseResult):
await _do_show_remain(bot, ctx, args, at_user=True)
@cb_cmd('出刀记录', ArgParser(usage='!出刀记录 (@qq)', arg_dict={
'@': ArgHolder(tip='qq号', type=int, default=0),
'D': ArgHolder(tip='日期差', type=int, default=0)}))
async def list_challenge(bot:NoneBot, ctx:Context_T, args:ParseResult):
bm = BattleMaster(ctx['group_id'])
clan = _check_clan(bm)
now = datetime.now() - timedelta(days=args.D)
zone = bm.get_timezone_num(clan['server'])
uid = args['@'] or args.at
if uid:
mem = _check_member(bm, uid, bm.group, '公会内无此成员')
challen = bm.list_challenge_of_user_of_day(mem['uid'], mem['alt'], now, zone)
else:
challen = bm.list_challenge_of_day(clan['cid'], now, zone)
n = len(challen)
if not n:
await bot.send(ctx, "未检索到出刀记录")
return
msg = [ f'{clan["name"]}出刀记录:\n编号|出刀者|周目|Boss|伤害|标记' ]
for i in range(0, n, 10):
challenstr = 'E{eid:0>3d}|{name}|r{round}|b{boss}|{dmg: >7,d}{flag_str}'
for c in challen[i:min(n, i+10)]:
mem = bm.get_member(c['uid'], c['alt'])
c['name'] = mem['name'] if mem else c['uid']
flag = c['flag']
c['flag_str'] = '|补时' if flag & bm.EXT else '|尾刀' if flag & bm.LAST else '|掉线' if flag & bm.TIMEOUT else '|通常'
msg.append(challenstr.format_map(c))
await bot.send(ctx, '\n'.join(msg))
msg.clear()
|
StarcoderdataPython
|
183040
|
BBBB BBBBBBBBBBBBBBBBBB
BBBBBBBBBBBBBBBB
BB BBBBBBBBBBBBBBBBBB
BB BBBBBBBBBBBBBBBB
BBBBBBB BBBBBBBBBBBBBBBBBBBBBBB
BBBBB
BBB BBBBB BB BBBB
BBBBBBB BBBBBBBBBBBBBB
BBBBBB
BBBBBBBBBBBBBBBBBBB
|
StarcoderdataPython
|
3300177
|
"""
FIT1008 Prac 3 Task 3
@purpose Summing items until a negative number is reached, for Task 3
@author <NAME> 25461257
@modified 20140810
@created 20140807
"""
import time, random
def sum_until_negative(a_list):
"""
@purpose Summing items in a list, stops as soon as a negative number is reached.
If first item is negative, or list is empty, returns 0
@parameter
a_list: A list passed to be summed until negative number reached
@Complexity: Worst Case - O(N): goes through the whole list when none is negative
Best Case - O(1): empty list or first is negative
@pre-condition An integer list is passed
@post-condition Returns the sum of numbers up until negative number reached
"""
try:
sum = 0
if len(a_list) > 0:
for i in range(len(a_list)):
if a_list[i] >= 0:
sum += a_list[i]
else: # if encountered a negative number, stop
return sum
return sum
else:
return sum
except TypeError:
return "Please only insert numerical type lists."
def time_sum_until_negative(a_list):
"""
@purpose Returns the time taken in seconds, for summing a list using sum_until_negative()
@param
a_list: a list of items passed to this function to be passed to sum_until_negative()
@complexity: O(1)
@precondition Passing a list
@postcondition Return the time taken in seconds
"""
start = time.time()
total = sum_until_negative(a_list)
taken = (time.time() - start)
return taken
def table_time_sum_until_negative_1():
"""
@purpose Generates a list of 1 million real values using random.seed() and random.uniform(),
ranging from -1 to 1, then prints out: for each n, in steps of 2, up to 1 million,
the time taken for summing from list[0] to list[n]
@complexity:
Worst Case: O(n log n) : when none of the item in list is negative.
Best Case: O(log n): when the first item is negative. Because the algorithm always generate N items,
then loop through log n items, then stop when it encounters the negative item.
@postcondition Return a list of times
"""
random_list = [] # initialising the list
random.seed(1)
while len(random_list) < 1500000:
# Generates a million real values between -0.1 and 1
random_list.append(random.uniform(-0.1,1))
print("\n")
n=1
while n < len(random_list):
print(str(n) + ", " + str(time_sum_until_negative(random_list[:n])))
n = n*2
def table_time_sum_until_negative_2():
"""
@purpose Generates a list of 1 million real values using random.seed() and random.uniform(),
ranging from -1 to 1, then prints out: for each n, in steps of 2, up to 1 million,
the time taken for summing from list[0] to list[n]
This one also makes sure the first item is negative.
@complexity:
Best & Worst: O(log N): The algorithm always generate N items, then go through log n iteration,
but stops at first item everytime.
@precondition none
@postcondition Return a list of times
"""
random_list = [-1]
#random_list.append(random.uniform(-1,-0.001)) # Making sure first item is a negative
random.seed(1)
while len(random_list) < 1500000:
random_list.append(random.uniform(-1,1))
print("\n")
n=1
while n < len(random_list):
print(str(n) + ", " + str(time_sum_until_negative(random_list[:n])))
n = n*2
def test_sum_until_negative():
"""
@purpose A function that calls sum_items with various test cases
@param none
@complexity O(1)
"""
#some test cases
list1 = [1, 2, 3, 4] #valid numerical values
print(sum_until_negative(list1))
list2 = [] #range of many values
print(sum_until_negative(list2))
list3 = [-1,2,3] #invalid values
print(sum_until_negative(list3))
list4 = [1,3,-2,4] #invalid values
print(sum_until_negative(list4))
list5 = ["a"] #space
print(sum_until_negative(list5))
#MAIN
if __name__ == "__main__":
try:
table_time_sum_until_negative_1()
table_time_sum_until_negative_2()
#test_sum_until_negative()
except KeyboardInterrupt:
print("Stopped by user.")
|
StarcoderdataPython
|
1765448
|
from nose.tools import * # flake8: noqa
from tests.base import AdminTestCase
from admin.base.forms import GuidForm
class TestGuidForm(AdminTestCase):
def setUp(self):
super(TestGuidForm, self).setUp()
def test_valid_data(self):
guid = '12345'
form = GuidForm({
'guid': guid,
})
assert_true(form.is_valid())
assert_equal(form.cleaned_data.get('guid'), guid)
def test_blank_data(self):
form = GuidForm({})
assert_false(form.is_valid())
assert_equal(form.errors, {
'guid': [u'This field is required.'],
})
|
StarcoderdataPython
|
178477
|
<filename>omnilingual/features/__init__.py<gh_stars>0
from .universal import *
|
StarcoderdataPython
|
1680538
|
# Copyright 2017-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import yaml
from django.db import models, migrations
from django.contrib.auth.hashers import make_password
FIXTURES = os.path.abspath(
os.path.dirname(os.path.realpath(__file__)) + "/initial_data.yaml"
)
def load_data_from_yaml():
file = open(FIXTURES, "r").read()
try:
data = yaml.safe_load(file)
return data
except Exception as e:
raise Exception("Cannot load inital data file: %s" % e.message)
def persist_data(apps, schema_editor):
data = load_data_from_yaml()
# iterate over the data
for entry in data:
# retrieve the class for that model
[app, model_name] = entry["model"].split(".")
model_class = apps.get_model(app, model_name)
# create a new instance for that model
i = model_class(**entry["fields"])
# if model is user hash the password
if model_name == "User":
i.password = make_password(entry["fields"]["password"])
# check relations
if "relations" in entry:
for (r_name, r) in entry["relations"].items():
# retrieve the related model
[r_app, r_model_name] = r["model"].split(".")
related_model_class = apps.get_model(r_app, r_model_name)
r_model = related_model_class.objects.get(**r["fields"])
# assign relation
setattr(i, r_name, r_model)
# save the instance
i.save()
print("Created %s: %s" % (model_name, entry["fields"]))
class Migration(migrations.Migration):
dependencies = [("core", "0001_initial")]
operations = [migrations.RunPython(persist_data)]
|
StarcoderdataPython
|
165050
|
<filename>radical/utils/logger/filehandler.py
__author__ = "Radical.Utils Development Team (<NAME>, <NAME>)"
__copyright__ = "Copyright 2013, RADICAL@Rutgers"
__license__ = "MIT"
''' Provides a file handler for the logging system.
'''
from logging import DEBUG, getLogger, Filter, FileHandler as LFileHandler
class FileHandler(LFileHandler):
""" A output FileHandler. """
pass
|
StarcoderdataPython
|
1620345
|
<reponame>WangXin93/mmpose
import torch
import torch.nn as nn
from ..registry import LOSSES
@LOSSES.register_module()
class CELoss(nn.Module):
""" Cross Entorpy Loss Wrapper
Args:
loss_weight (float): Weight of the loss. Default: 1.0.
"""
def __init__(self, loss_weight=1.):
super().__init__()
self.loss_weight = loss_weight
self.criterion = nn.CrossEntropyLoss()
def forward(self, output, target):
"""Forward function."""
return self.loss_weight * self.criterion(output, target)
@LOSSES.register_module()
class AttrCELoss(nn.Module):
""" Cross Entorpy Loss Wrapper
Args:
loss_weight (float): Weight of the loss. Default: 1.0.
"""
def __init__(self, loss_weight=1., weight_attr_neg=0.1, weight_attr_pos=1.):
super().__init__()
self.loss_weight = loss_weight
self.weight_attr_pos = weight_attr_pos
self.weight_attr_neg = weight_attr_neg
self.criterion = nn.CrossEntropyLoss(torch.tensor([weight_attr_neg, weight_attr_pos]))
def forward(self, output, target):
"""Forward function."""
return self.loss_weight * self.criterion(output, target)
|
StarcoderdataPython
|
3336827
|
<filename>greenlet_stackless/greenstackless.py
#! /usr/bin/python2.5
assert 0, 'see ../syncless/greenstackless.py instead'
|
StarcoderdataPython
|
14621
|
<filename>mayan/apps/rest_api/classes.py
from collections import namedtuple
import io
import json
from furl import furl
from django.core.handlers.wsgi import WSGIRequest
from django.http.request import QueryDict
from django.template import Variable, VariableDoesNotExist
from django.test.client import MULTIPART_CONTENT
from django.urls import resolve
from django.urls.exceptions import Resolver404
from mayan.apps.organizations.settings import setting_organization_url_base_path
from mayan.apps.templating.classes import Template
from .literals import API_VERSION
class BatchResponse:
def __init__(self, name, status_code, data, headers):
self.name = name
self.status_code = status_code
self.data = data
self.headers = headers
class NestableLazyIterator:
def __init__(
self, iterable_string, context, context_list_index, parent_iterator=None
):
self.iterable_string = iterable_string
self.context = context
self.context_list_index = context_list_index
self.parent_iterator = parent_iterator
self.items = None
self.index = 0
def __iter__(self):
return self
def __next__(self):
# Setup the initial values on the initial access.
if not self.items:
if self.parent_iterator:
next(self.parent_iterator)
self.update_iterable_object()
if self.index == len(self.items):
self.index = 0
if self.parent_iterator:
next(self.parent_iterator)
else:
raise StopIteration
self.update_iterable_object()
value = self.items[self.index]
self.context['iterables'][self.context_list_index] = value
self.index += 1
return value
def update_iterable_object(self):
self.items = Variable(var=self.iterable_string).resolve(context=self.context)
RenderedContent = namedtuple(
typename='RenderedContent', field_names=(
'body', 'include', 'method', 'name', 'url'
)
)
class BatchRequest:
def __init__(
self, collection, name, url, body=None, group_name=None,
include='true', iterables=None, method='GET'
):
self.collection = collection
self.body = body or {}
self.include = include
self.group_name = group_name
self.iterables = iterables
self.method = method
self.name = name
self.url = url
def execute(self):
if self.iterables:
# Initialize the iterables list to allow using any index.
self.collection.context['iterables'] = [None] * len(self.iterables)
iterator = None
for iterable_index, iterable in enumerate(self.iterables):
iterator = NestableLazyIterator(
context=self.collection.context,
context_list_index=iterable_index,
iterable_string=iterable, parent_iterator=iterator
)
while True:
try:
next(iterator)
except StopIteration:
break
except VariableDoesNotExist as exception:
self.collection.responses[self.name] = {
'data': {'error': str(exception)},
'include': 'true',
'is_response': True
}
return
else:
rendered_content = self.render_content()
BatchRequest(
collection=self.collection,
body=rendered_content.body,
group_name=self.group_name,
include=rendered_content.include,
method=rendered_content.method,
name=rendered_content.name,
url=rendered_content.url
).execute()
else:
rendered_content = self.render_content()
url_parts = furl(rendered_content.url)
try:
resolver_match = resolve(path=url_parts.pathstr)
except Resolver404 as exception:
self.collection.responses[rendered_content.name] = {
'data': {
'error': '"{}" not found'.format(exception.args[0]['path'])
},
'include': 'true',
'is_response': True,
'status_code': 404
}
return
else:
environ = getattr(
self.collection.view_request, 'environ', {}
).copy()
environ['REQUEST_METHOD'] = rendered_content.method
environ['PATH_INFO'] = self.url
environ['QUERY_STRING'] = url_parts.querystr
post_query_dict = QueryDict(mutable=True)
post_query_dict.update(rendered_content.body)
json_body = json.dumps(post_query_dict)
request_data = json_body.encode('utf-8')
environ['wsgi.input'] = io.BytesIO(request_data)
environ['CONTENT_LENGTH'] = str(len(request_data))
if rendered_content.method == 'POST':
environ['CONTENT_TYPE'] = MULTIPART_CONTENT
else:
environ['CONTENT_TYPE'] = 'application/octet-stream'
request = WSGIRequest(environ=environ)
request.LANGUAGE_CODE = getattr(
self.collection.view_request, 'LANGUAGE_CODE', None
)
request.POST = post_query_dict
request._read_started = True
request.auth = getattr(
self.collection.view_request, 'auth', None
)
request.csrf_processing_done = True
request.session = getattr(
self.collection.view_request, 'session', None
)
request.user = getattr(
self.collection.view_request, 'user', None
)
response = resolver_match.func(
request=request, **resolver_match.kwargs
)
result = {
'data': response.data,
'headers': {key: value for key, value in response.items()},
'include': rendered_content.include,
'is_response': True,
'status_code': response.status_code
}
self.collection.context[rendered_content.name] = result
self.collection.responses[rendered_content.name] = result
if self.group_name:
self.collection.context.setdefault('groups', {})
self.collection.context['groups'].setdefault(
self.group_name, []
)
self.collection.context['groups'][self.group_name].append(
result
)
def render_content(self):
rendered_body = {}
for key, value in self.body.items():
rendered_key = Template(template_string=key).render(
context=self.collection.context
)
rendered_value = Template(template_string=value).render(
context=self.collection.context
)
rendered_body[rendered_key] = rendered_value
rendered_include = Template(template_string=self.include).render(
context=self.collection.context
)
rendered_method = Template(template_string=self.method).render(
context=self.collection.context
)
rendered_name = Template(template_string=self.name).render(
context=self.collection.context
)
rendered_url = Template(template_string=self.url).render(
context=self.collection.context
)
return RenderedContent(
body=rendered_body, include=rendered_include,
method=rendered_method, name=rendered_name, url=rendered_url
)
class BatchRequestCollection:
def __init__(self, request_list=None):
self.requests = []
for request_index, request_dict in enumerate(request_list):
request_dict.update(
{'collection': self}
)
try:
self.requests.append(BatchRequest(**request_dict))
except Exception as exception:
raise ValueError(
'Error instantiating request #{}; {}'.format(
request_index, exception
)
) from exception
def execute(self, view_request):
self.context = {'view_request': view_request}
self.responses = {}
self.view_request = view_request
for request in self.requests:
request.execute()
# Convert responses in context into response class instances.
result = []
for key, value in self.responses.items():
if json.loads(s=value.get('include', 'true')):
result.append(
BatchResponse(
name=key,
status_code=value.get('status_code', 0),
data=value.get('data', {}),
headers=value.get('headers', {}),
)
)
return result
class Endpoint:
def __init__(self, label, viewname=None, kwargs=None):
self.label = label
self.kwargs = kwargs
if viewname:
self.viewname = viewname
else:
installation_base_url = setting_organization_url_base_path.value
if installation_base_url:
installation_base_url = '/{}'.format(installation_base_url)
else:
installation_base_url = ''
self.url = '{}/api/v{}/{}/'.format(
installation_base_url, API_VERSION, self.label
)
try:
self.viewname = resolve(path=self.url).view_name
except Resolver404:
self.viewname = None
|
StarcoderdataPython
|
3317337
|
<filename>script.py
class script(object):
START_MSG = """ Hey {}
നീ ഏതാ മോനൂസെ എന്നെ [𝗖𝗜𝗡𝗘𝗠𝗔 𝗖𝗢𝗠𝗣𝗔𝗡𝗬](https://t.me/cinimacompany123)ഗ്രൂപ്പിലേക്ക് മാത്രമേ ഉപയോഗിക്കാൻ പറ്റൂ...
വെറുതെ സമയം കളയാൻ നിൽക്കണ്ട...വേഗം ഗ്രൂപ്പിലേക്ക് വിട്ടോ സിനിമ അവിടെ കിട്ടും...🤭"""
HELP_MSG = "ഇപ്പോഴും ഇപ്പോഴും പറയാൻ വയ്യ എന്നെ കൊണ്ട് നിനക്ക് ഒരു help ഉം കിട്ടില്ല...😂"
ABOUT_MSG = """⭕️<b>My Name : Auto Filter Bot V2</b>
⭕️<b>Creater :</b> @Cinema_company_owner
⭕️<b>Language :</b> <code>Python3</code>
⭕️<b>Library :</b> <a href='https://docs.pyrogram.org/'>Pyrogram 1.0.7</a>
⭕️<b>Tutorial Video :</b> <a href='https://youtu.be/KQVYQAOsFYY'>Video Link</a>
"""
|
StarcoderdataPython
|
3370676
|
# Aprimore o ex093 para que ele funcione com vários jogadores, incluindo um sistema de visualização de detalhes do
# aproveitamento de cada jogador
from time import sleep
jogadores = list()
jogador = dict()
gols = list()
total = 0
while True:
jogador['jogador'] = str(input('Nome do jogador: ')).title().strip()
partidas = int(input(f'Quantos partidas {jogador["jogador"]} jogou: '))
total = 0
gols.clear()
for c in range(0, partidas):
gols.append(int(input(f'Quantos gols na partida {c}: ')))
total += gols[c]
jogador['gols'] = gols[:]
jogador['total'] = total
jogadores.append(jogador.copy())
while True:
resp = str(input('Deseja continuar: [S/N] '))
if resp in 'SsNn':
break
print('Erro! Digite apenas S ou N')
if resp in 'Nn':
break
print('+ ' * 25)
print(f'{"Cod."} {"Nome":10} {"Gols":20} {"Total"}')
cod = 0
for j in jogadores:
nome = j['jogador']
tot = j['total']
print(f'{cod:<5}{nome}{" " * (12-len(nome))}{j["gols"]}{" " * (21-(len(j["gols"])*3))}{tot}')
cod += 1
print('+ ' * 25)
print()
while True:
esc = int(input('Deseja ver o aproveitamento de qual jogador: (999 para sair) '))
if esc == 999:
print(' <<ENCERRANDO', end='')
sleep(1)
print('>', end='')
sleep(1)
print('>', end='')
break
else:
print('+ ' * 25)
if esc >= cod:
print('{}Desculpe! Cod. não encontrado. Tente Novamente!{}'.format('\033[31m', '\033[m'))
print('+ ' * 25)
else:
print(f'{"<<-- LEVANTAMENTO DO JOGADOR"} {jogadores[esc]["jogador"]} -->>')
for p, g in enumerate(jogadores[esc]['gols']):
sleep(1)
print(f' ==> Na partida {p} fez {g} gols!')
print('+ ' * 25)
|
StarcoderdataPython
|
3383110
|
<reponame>dana/python-message-match
import pytest
import sys
sys.path.append('..')
sys.path.append('.')
from message_match import mmatch # noqa: E402
# not nested
def test_simplest_possible():
assert mmatch({'a': 'b'}, {'a': 'b'})
def test_extra_stuff():
assert mmatch({'a': 'b', 'c': 'd'}, {'a': 'b'})
def test_simplest_miss():
assert not mmatch({'a': 'b'}, {'c': 'd'})
def test_simplest_multi_match():
assert mmatch({'a': 'b', 'c': 'd'}, {'a': 'b', 'c': 'd'})
# nested
def test_simplest_nested():
assert mmatch({'x': {'y': 'z'}}, {'x': {'y': 'z'}})
def test_simplest_nested_extra_stuff():
assert mmatch({'a': 'b', 'x': {'y': 'z'}}, {'x': {'y': 'z'}})
def test_multiple_matches_nested():
assert mmatch({'a': 'b', 'x': {'y': 'z'}},
{'a': 'b', 'x': {'y': 'z'}})
# array in message, scalar in match: checks membership
def test_array_contains():
assert mmatch({'a': [1, 2, 3]}, {'a': 2})
def test_array_does_not_contain():
assert not mmatch({'a': [1, 2, 3]}, {'a': 5})
# array on both sides: full recursion
def test_array_full_match():
assert mmatch({'a': [1, 2, 3]}, {'a': [1, 2, 3]})
@pytest.mark.xfail(strict=True)
def test_definitely_fails():
assert 1 == 2
def test_nested_array_full_match():
assert mmatch({'a': [{'a': 'b'}, 2, 3]}, {'a': [{'a': 'b'}, 2, 3]})
# regex
def test_simplest_regex():
assert mmatch({'a': 'forefoot'}, {'a': ' special/foo/'})
def test_simplest_regex_failure():
assert not mmatch({'a': 'forefoot'}, {'a': ' special/smurf/'})
def test_regex_failure_for_case_sensitivity():
assert not mmatch({'a': 'forefoot'}, {'a': ' special/FOO/'})
def test_regex_pass_for_case_sensitivity():
assert mmatch({'a': 'forefoot'}, {'a': ' special/FOO/i'})
# special forms
def test_always_pass_with_match_as_an_empty_dict():
assert mmatch({'a': 'b'}, {})
def test_always_pass_with_both_as_empty_dict():
assert mmatch({}, {})
def test_validate_the_always_pass_works_nested():
assert mmatch({'a': {'b': 'c'}}, {'a': {}})
def test_validate_always_pass_if_nested_does_not_override_failure_elsewhere():
assert not mmatch({'a': {'b': 'c'}}, {'a': {}, 'x': 'y'})
def test_always_pass_should_pass_even_against_a_deeper_structure():
assert mmatch({'a': {'b': 'c'}}, {})
# strangeness
def test_pass_empty_strings():
assert mmatch({'a': ''}, {'a': ''})
def test_fail_two_things_that_are_both_false_but_different():
assert not mmatch({'a': ''}, {'a': 0})
def test_fail_invalid_special():
assert not mmatch({'a': 'forefoot'}, {'a': ' specialhuhFOO/i'})
|
StarcoderdataPython
|
3251653
|
# PC: Pre-commit
## PC0xx: pre-commit-hooks
from __future__ import annotations
import functools
from importlib.abc import Traversable
from typing import Any
import yaml
@functools.cache
def precommit(package: Traversable) -> dict[str, Any]:
precommit_path = package.joinpath(".pre-commit-config.yaml")
if precommit_path.is_file():
with precommit_path.open("rb") as f:
return yaml.safe_load(f) # type: ignore[no-any-return]
return {}
class PreCommit:
family = "pre-commit"
requires = {"PY006"}
@classmethod
def check(cls, precommit: dict[str, Any]) -> bool:
"Must have `{cls.repo}` repo in `.pre-commit-config.yaml`"
for repo in precommit.get("repos", {}):
# pylint: disable-next=no-member
if "repo" in repo and repo["repo"].lower() == cls.repo: # type: ignore[attr-defined]
return True
return False
class PC100(PreCommit):
"Has pre-commit-hooks"
repo = "https://github.com/pre-commit/pre-commit-hooks"
class PC110(PreCommit):
"Uses black"
repo = "https://github.com/psf/black"
class PC111(PreCommit):
"Uses blacken-docs"
requires = {"PY006", "PC110"}
repo = "https://github.com/asottile/blacken-docs"
class PC120(PreCommit):
"Uses isort"
repo = "https://github.com/pycqa/isort"
class PC130(PreCommit):
"Uses flake8"
repo = "https://github.com/pycqa/flake8"
class PC131(PreCommit):
"Adds flake8-bugbear"
requires = {"PC130"}
@staticmethod
def check(precommit: dict[str, Any]) -> bool:
"""
Must have `"flake8-bugbear"` in `additional_dependencies`. This can
catch lots of commonly buggy code patterns.
"""
for repo in precommit.get("repos", {}):
if (
"repo" in repo
and repo["repo"].lower() == "https://github.com/pycqa/flake8"
):
for hook in repo["hooks"]:
match hook:
case {"additional_dependencies": list(x)}:
for item in x:
if "flake8-bugbear" in item:
return True
return False
class PC140(PreCommit):
"Uses mypy"
repo = "https://github.com/pre-commit/mirrors-mypy"
class PC150(PreCommit):
"Uses PyUpgrade"
repo = "https://github.com/asottile/pyupgrade"
class PC160(PreCommit):
"Uses codespell"
repo = "https://github.com/codespell-project/codespell"
class PC170(PreCommit):
"Uses PyGrep hooks"
repo = "https://github.com/pre-commit/pygrep-hooks"
class PC180(PreCommit):
"Uses prettier"
repo = "https://github.com/pre-commit/mirrors-prettier"
class PC901(PreCommit):
"Custom pre-commit CI message"
@staticmethod
def check(precommit: dict[str, Any]) -> bool:
"""
Should have something like this in `.pre-commit-config.yaml`:
```yaml
ci:
autoupdate_commit_msg: 'chore: update pre-commit hooks'
```
"""
return "autoupdate_commit_msg" in precommit.get("ci", {})
repo_review_fixtures = {"precommit"}
repo_review_checks = {p.__name__ for p in PreCommit.__subclasses__()}
|
StarcoderdataPython
|
3305437
|
<filename>src/python/phyre/eval_task_complexity.py
# Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A scrict compute evaluation stats for a task template.
Evaluation stats contains number of attempts required to find a solution by
random search as well as the list of a few solutions.
To compute the stats run:
python eval_task_complexity.py --template-id 00100
"""
import collections
import enum
import functools
import itertools
import json
import logging
import multiprocessing
import os
import pickle
import signal
import sys
import joblib
import scipy.stats
import phyre.action_mappers
import phyre.action_simulator
import phyre.compute_solution_power
import phyre.loader
import phyre.settings
import phyre.util
CREATOR_HASH = phyre.util.compute_creator_hash()
INVALID_INPUT = phyre.action_simulator.SimulationStatus.INVALID_INPUT
NOT_SOLVED = phyre.action_simulator.SimulationStatus.NOT_SOLVED
UNSTABLY_SOLVED = phyre.action_simulator.SimulationStatus.UNSTABLY_SOLVED
STABLY_SOLVED = phyre.action_simulator.SimulationStatus.STABLY_SOLVED
SOLVED = phyre.action_simulator.SimulationStatus.SOLVED
MIN_VALID_ATTEMPTS = 10000
# Tasks that have a probability to be solved that is likely to be higher than
# this threshold are considered GOOD. Tasks that have a probability to be
# solved that is likely to be lower than twice this threshold are considered
# BAD.
SOLVABILITY_THRESHOLD_PROBS = {
'ball': 1e-5,
'two_balls': 1e-6,
'ramp': 1e-5,
}
P_VALUE = 0.05
# For solvable tasks collect at least this many solutions.
MIN_SOLUTIONS = 3
# Action number i is computed as (i % ACTION_POOL_SIZE) action in
# ACTION_POOL_SIZE actions generated with seed (1000 + i // ACTION_POOL_SIZE).
ACTION_POOL_SIZE = 10000
MAX_SOLUTIONS_TO_KEEP = 5
VERSION = '8'
STATS = frozenset([
'status_counts', 'flags', 'solutions', 'unstable_solutions',
'solution_power'
])
class Flags(enum.Enum):
"""Flags defining solvability of task in tier."""
GOOD_STABLE = enum.auto()
GOOD = enum.auto()
BAD_STABLE = enum.auto()
BAD = enum.auto()
IMPOSSIBLE = enum.auto()
# Less than 10 attempts on average.
TRIVIAL = enum.auto()
class LoadingMode(enum.Enum):
"""Loading mode for eval stats."""
FIRST_SOLUTION_ONLY = enum.auto()
FULL = enum.auto()
def _worker(args):
return _eval_single_task(*args)
def _get_actions(action_simulator, start, num_actions):
action_pool = start // ACTION_POOL_SIZE
assert (start + num_actions - 1) // ACTION_POOL_SIZE == action_pool, (
ACTION_POOL_SIZE, start, num_actions)
actions = action_simulator.build_discrete_action_space(ACTION_POOL_SIZE,
seed=1000 +
action_pool)
actions = actions[start % ACTION_POOL_SIZE:][:num_actions]
return actions
def _eval_single_task(task, action_tier_name, start, num_attempts):
"""Evalute the task on attmepts random action from tier."""
task_id = task.taskId
action_simulator = phyre.ActionSimulator([task], action_tier_name)
actions = _get_actions(action_simulator, start, num_attempts)
statuses = collections.defaultdict(int)
stable_solutions, unstable_solutions = [], []
for action in actions:
status = action_simulator.simulate_action(0,
action,
need_images=False,
stable=True).status
statuses[status] += 1
if status == STABLY_SOLVED:
stable_solutions.append(action.tolist())
if status == UNSTABLY_SOLVED:
unstable_solutions.append(action.tolist())
return dict(task_id=task_id,
tier=action_tier_name,
stable_solutions=stable_solutions[:MAX_SOLUTIONS_TO_KEEP],
unstable_solutions=unstable_solutions[:MAX_SOLUTIONS_TO_KEEP],
statuses=statuses)
def compute_flags(tier, status_counts):
"""Given status counts run statisical tests and return a list of labels."""
total_attempts = sum(status_counts.values())
valid_attempts = total_attempts - status_counts[INVALID_INPUT]
stable_solution_attempts = status_counts[STABLY_SOLVED]
solution_attempts = status_counts[UNSTABLY_SOLVED] + stable_solution_attempts
flags = {}
threshold = SOLVABILITY_THRESHOLD_PROBS[tier]
for suffix, count in [('', solution_attempts),
('_stable', stable_solution_attempts)]:
flags[f'good{suffix}'] = scipy.stats.binom_test(
count, n=valid_attempts, p=threshold,
alternative='greater') < P_VALUE
flags[f'bad{suffix}'] = scipy.stats.binom_test(
count, n=valid_attempts, p=2 * threshold,
alternative='less') < P_VALUE
if not solution_attempts:
flags[f'impossible'] = True
if stable_solution_attempts / max(total_attempts, 1) >= 0.1:
flags['trivial'] = True
return frozenset(getattr(Flags, k.upper()) for k, v in flags.items() if v)
class TaskEvaller():
"""Supervisor that runs evals in chunks until everything is computed."""
def __init__(self,
tasks,
min_valid_attempts,
num_workers,
simulate_worker_size,
reject_ball_solvable=False,
warp_size=240):
self.min_valid_attempts = min_valid_attempts
self.simulate_worker_size = simulate_worker_size
self.reject_ball_solvable = reject_ball_solvable
self.warp_size = warp_size
assert ACTION_POOL_SIZE % simulate_worker_size == 0
stats_per_task_tier = {}
for tier in phyre.action_mappers.ACTION_MAPPERS:
for task in tasks:
stats_per_task_tier[task.taskId, tier] = dict(
status_counts={
status: 0 for status in phyre.SimulationStatus
},
solutions=[],
unstable_solutions=[],
)
done_task_tier = set()
self._task_id_to_tasks = {task.taskId: task for task in tasks}
self._state = {
'stats_per_task_tier': stats_per_task_tier,
'done_task_tier': done_task_tier
}
self._pool = multiprocessing.Pool(
num_workers if num_workers > 0 else None)
def __del__(self):
self._pool.close()
def step(self):
"""Schedule a chunk of evaluation jobs."""
done_simulations_per_task_tier = {}
for key, stats in self._state['stats_per_task_tier'].items():
if key in self._state['done_task_tier']:
continue
counts = sum(stats['status_counts'].values())
done_simulations_per_task_tier[key] = counts
num_unresolved_task_tier_pairs = len(done_simulations_per_task_tier)
if self.reject_ball_solvable:
# First compute stats for ball tier.
ball_only = {
k: v
for k, v in done_simulations_per_task_tier.items()
if k[1] == 'ball'
}
if ball_only:
done_simulations_per_task_tier = ball_only
simluation_tasks = []
for key in itertools.cycle(list(done_simulations_per_task_tier)):
start = done_simulations_per_task_tier[key]
done_simulations_per_task_tier[key] += self.simulate_worker_size
task_id, tier = key
simluation_tasks.append((self._task_id_to_tasks[task_id], tier,
start, self.simulate_worker_size))
if len(simluation_tasks) >= self.warp_size:
break
logging.info(
'Starting simulation chunk with %d items. Total unresolved tasks:'
' %s. Simulations_done: %d', len(simluation_tasks),
num_unresolved_task_tier_pairs,
sum(
sum(x['status_counts'].values())
for x in self._state['stats_per_task_tier'].values()))
for result in self._pool.imap(_worker, simluation_tasks):
key = (result['task_id'], result['tier'])
if key in self._state['done_task_tier']:
# We scheduled a simulation task, but already got enough data.
# So just ignoring this bit to be agnostic of warp_size.
continue
# Note, we may "overshoot" here: update stats that are already complete.
stats = self._state['stats_per_task_tier'][key]
for status, count in result['statuses'].items():
stats['status_counts'][status] += count
stats['solutions'].extend(result['stable_solutions'])
del stats['solutions'][MAX_SOLUTIONS_TO_KEEP:]
stats['unstable_solutions'].extend(result['unstable_solutions'])
del stats['unstable_solutions'][MAX_SOLUTIONS_TO_KEEP:]
self._update_done_stats(*key)
return self.done()
def _update_done_stats(self, task_id, action_tier):
"""Update a set of "done" tasks after new data for task_id and action_tier."""
key = (task_id, action_tier)
status_counts = self._state['stats_per_task_tier'][key]['status_counts']
valid_attempts = sum(
status_counts.values()) - status_counts[INVALID_INPUT]
if valid_attempts < self.min_valid_attempts:
return
flags = compute_flags(action_tier, status_counts)
if not ({Flags.GOOD, Flags.BAD} & flags):
return
if not ({Flags.GOOD_STABLE, Flags.BAD_STABLE} & flags):
return
num_solved = status_counts[UNSTABLY_SOLVED] + status_counts[
STABLY_SOLVED]
if Flags.GOOD in flags and num_solved < MIN_SOLUTIONS:
return
if (Flags.GOOD_STABLE in flags and
status_counts[STABLY_SOLVED] < MIN_SOLUTIONS):
return
self._state['done_task_tier'].add(key)
logging.info('Done simulation for %s. Stats: %s. Flags: %s', key,
status_counts, flags)
# If reject_ball_solvable, add task ids for ball solved task to
# done_task_tiers_reasons.
solved_by_ball = (action_tier == 'ball' and Flags.GOOD_STABLE in flags)
if self.reject_ball_solvable and solved_by_ball:
for tier in phyre.action_mappers.ACTION_MAPPERS:
tier_key = (task_id, tier)
if tier_key in self._state['done_task_tier']:
continue
logging.info(
'Removing %s. Solved by ball and reject_ball_solvable is'
' True', tier_key)
self._state['done_task_tier'].add(tier_key)
def done(self):
"""Checks whether evaluation for all jobs is done."""
return len(self._state['done_task_tier']) == len(
self._state['stats_per_task_tier'])
def result(self):
"""Returns evaluation results."""
assert self.done()
return self._state['stats_per_task_tier']
def maybe_load(self, checkpoint_path):
"""If checkpoint is provided will load evaluation state."""
if checkpoint_path is not None and os.path.exists(checkpoint_path):
logging.info('Loading %s', checkpoint_path)
with open(checkpoint_path, 'rb') as stream:
self._state = pickle.load(stream)
# Re-compute done_task_tier.
self._state['done_task_tier'] = set()
for key in self._state['stats_per_task_tier']:
self._update_done_stats(*key)
def maybe_save(self, checkpoint_path):
"""If checkpoint is provided will save evaluation state."""
if checkpoint_path is not None:
tmp_path = checkpoint_path + '.tmp'
with open(tmp_path, 'wb') as stream:
pickle.dump(self._state, stream)
os.rename(tmp_path, checkpoint_path)
def load_all_eval_stats(num_workers=None, mode=LoadingMode.FULL):
"""Load all computed up-to-date eval stats.
Args:
num_workers: None or int, num workers to use for loading. If None
will load in the main thread.
mode: LoadingMode, defines a subset of fields to load.
Returns:
dict of dicts:
template_id -> tasl_id -> eval_stats
"""
known_template_ids = [
x.split('.')[0]
for x in os.listdir(str(phyre.settings.TASK_EVAL_DIR))
if x.endswith('.json')
]
local_maybe_load_evaluation = functools.partial(maybe_load_evaluation,
mode=mode)
if num_workers is None:
eval_stats = {}
for template_id in known_template_ids:
eval_stats[template_id] = local_maybe_load_evaluation(template_id)
else:
num_workers = num_workers if num_workers > 0 else None
with multiprocessing.Pool(num_workers) as pool:
eval_stats = pool.map(local_maybe_load_evaluation,
known_template_ids)
eval_stats = dict(zip(known_template_ids, eval_stats))
eval_stats = {k: v for k, v in eval_stats.items() if v is not None}
return eval_stats
def _clean_stats(per_tier_stats, tier):
stats = {}
counts = {
phyre.SimulationStatus(int(k)): v
for k, v in per_tier_stats['status_counts'].items()
}
counts[SOLVED] = counts[UNSTABLY_SOLVED] + counts[STABLY_SOLVED]
stats['status_counts'] = counts
stats['flags'] = compute_flags(tier, counts)
stats['solutions'] = per_tier_stats['solutions']
stats['unstable_solutions'] = per_tier_stats['unstable_solutions']
return stats
def maybe_load_evaluation(template_id, mode=LoadingMode.FULL):
"""Loads evaluation file if up-to-date."""
task_path = str(phyre.settings.TASK_SCRIPTS_DIR / f'task{template_id}.py')
if not os.path.exists(task_path):
logging.warning('Rogue eval file for %s', template_id)
return None
if does_evaluation_need_update(task_path):
return None
with open(get_evaluation_meta_path(task_path)) as stream:
eval_data = json.load(stream)
eval_data.update(joblib.load(get_evaluation_path(task_path)))
if mode == LoadingMode.FULL:
solution_power = joblib.load(
phyre.compute_solution_power.get_solution_power_path(task_path))
else:
solution_power = None
if mode == LoadingMode.FULL:
needed_stats = STATS
elif mode == LoadingMode.FIRST_SOLUTION_ONLY:
needed_stats = ('solutions',)
else:
raise ValueError('Unknown loading mode: %s' % mode)
final_eval_data = {
stat: {tier: {} for tier in phyre.action_mappers.ACTION_MAPPERS
} for stat in STATS
}
for task, per_task_stats in eval_data['eval_stats'].items():
for tier, per_tier_stats in per_task_stats.items():
for stat_name, value in _clean_stats(per_tier_stats, tier).items():
final_eval_data[stat_name][tier][task] = value
if solution_power is not None:
for tier in phyre.action_mappers.ACTION_MAPPERS:
final_eval_data['solution_power'][tier][
'task_ids'] = solution_power['task_ids']
final_eval_data['solution_power'][tier][
'actions_on_tasks'] = solution_power[f'{tier}_actions_on_tasks']
final_eval_data = {k: final_eval_data[k] for k in needed_stats}
if mode == LoadingMode.FIRST_SOLUTION_ONLY:
for per_task_solution_list in final_eval_data['solutions'].values():
for solution_list in per_task_solution_list.values():
solution_list[:] = solution_list[:1]
return final_eval_data
def maybe_load_status_counts(template_id):
eval_stats = maybe_load_evaluation(template_id)
if eval_stats is None:
return None
status_counts = {}
for tier, tier_stats in eval_stats['status_counts'].items():
for task_id, task_stats in tier_stats.items():
if task_id not in status_counts:
status_counts[task_id] = {}
status_counts[task_id][tier] = {
phyre.SimulationStatus(int(k)): v
for k, v in task_stats.items()
}
return status_counts
def load_instance_status_counts(task_instance_id):
template_id = task_instance_id.split(':')[0]
counts = maybe_load_status_counts(template_id)
if counts is None:
return None
else:
return counts.get(task_instance_id)
def get_task_id_slurm(log_dir):
assert 'SLURM_ARRAY_TASK_ID' in os.environ
task_list_fpath = os.path.join(log_dir, 'task_list')
with open(task_list_fpath) as stream:
task_list = stream.read().split()
return task_list[int(os.environ['SLURM_ARRAY_TASK_ID'])]
def get_evaluation_path(task_path):
task_id = os.path.basename(task_path).split('.')[0][4:]
return str(phyre.settings.TASK_EVAL_DIR / task_id) + '.lzma'
def get_evaluation_meta_path(task_path):
task_id = os.path.basename(task_path).split('.')[0][4:]
return str(phyre.settings.TASK_EVAL_DIR / task_id) + '.meta.json'
def does_evaluation_need_update(task_path):
return does_eval_stats_need_update(
task_path
) or phyre.compute_solution_power.does_solution_power_need_update(task_path)
def does_eval_stats_need_update(task_path):
_, _, task_script = phyre.loader.load_task_script(task_path)
task_script_version = task_script.build_task.get_version()
logging.debug('Task script version: %s', task_script_version)
creator_version = CREATOR_HASH
logging.debug('Creator lib version: %s', creator_version)
eval_meta_fpath = get_evaluation_meta_path(task_path)
eval_fpath = get_evaluation_path(task_path)
logging.debug(eval_meta_fpath)
if os.path.exists(eval_meta_fpath) and os.path.exists(eval_fpath):
logging.debug('Found existing eval file')
with open(eval_meta_fpath) as stream:
eval_data = json.load(stream)
if eval_data.get('evaluator_version', '1') != VERSION:
logging.debug('Computed with old version of eval_task_complexity')
return True
if task_script_version != eval_data.get('task_script_version', '1'):
logging.debug('Computed for old task (%s)',
eval_data.get('task_script_version', '1'))
return True
logging.debug('The eval results up to date')
return False
else:
return True
def sig_handler(signum, frame):
"""USR1 signal handler that requeues the job."""
del frame # Unused.
logging.warning('Signal handler called with signal %s', signum)
prod_id = int(os.environ['SLURM_PROCID'])
if 'SLURM_ARRAY_JOB_ID' in os.environ:
job_id = '%s_%s' % (os.environ['SLURM_ARRAY_JOB_ID'],
os.environ['SLURM_ARRAY_TASK_ID'])
else:
job_id = os.environ['SLURM_JOB_ID']
if prod_id == 0:
logging.warning('Requeuing job %s', job_id)
os.system('scontrol requeue %s' % job_id)
else:
logging.warning('Not the master process, no need to requeue.')
sys.exit(-1)
def term_handler(signum, frame):
"""Dummy TERM signal handler that does nothing."""
del frame # Unused.
logging.warning('Signal handler called with signal %s', signum)
logging.warning('Bypassing SIGTERM.')
def init_signal_handler():
"""Handle signals sent by SLURM for time limit / pre-emption."""
signal.signal(signal.SIGUSR1, sig_handler)
signal.signal(signal.SIGTERM, term_handler)
logging.warning('Signal handler installed.')
def maybe_recompute_solution_power(template_id, task_path, num_workers):
if not phyre.compute_solution_power.does_solution_power_need_update(
task_path):
return
logging.info('Stale solution power. Recomputing for: %s', template_id)
# Reading eval meta.
eval_meta_fpath = get_evaluation_meta_path(task_path)
assert os.path.exists(eval_meta_fpath), (
f'Eval-meta path does not exist for {task_path}')
with open(eval_meta_fpath) as stream:
eval_meta = json.load(stream)
# Reading main eval data.
eval_fpath = get_evaluation_path(task_path)
assert os.path.exists(eval_fpath), (
f'Eval-stats path does not exist for {task_path}')
eval_data = joblib.load(eval_fpath)
phyre.compute_solution_power.save_solution_power(template_id, eval_meta,
eval_data, task_path,
num_workers)
def main(template_id, log_dir, force, interactive, **simulate_kwargs):
if template_id is None:
assert log_dir is not None, 'Provide --template-id or --log-dir'
init_signal_handler()
template_id = get_task_id_slurm(log_dir)
# Compute the hash before starting the eval.
logging.info('Task template id: %s', template_id)
phyre.settings.TASK_EVAL_DIR.mkdir(parents=True, exist_ok=True)
_, task_path, task_script = phyre.loader.load_task_script(template_id)
if not does_eval_stats_need_update(task_path) and not interactive:
if force:
logging.warning('Oh, wait a sec, force mode, will rewrite')
else:
return maybe_recompute_solution_power(
template_id, task_path, simulate_kwargs['num_workers'])
tasks = task_script.build_task.build_tasks_for_search(template_id)
logging.info('Built %d task instances.', len(tasks))
search_params = task_script.build_task.search_params
logging.info('Search params: %s', search_params)
task_script_hash = phyre.util.compute_file_hash(task_path)
if log_dir:
checkpoint_path = os.path.join(log_dir, f'{template_id}.cpkt')
else:
checkpoint_path = None
evaller = TaskEvaller(
tasks,
reject_ball_solvable='BALL:GOOD_STABLE' in search_params.excluded_flags,
**simulate_kwargs)
evaller.maybe_load(checkpoint_path)
while not evaller.done():
evaller.step()
evaller.maybe_save(checkpoint_path)
eval_stats_task_tier = evaller.result()
eval_stats = collections.defaultdict(dict)
for (task_id, tier), stats in eval_stats_task_tier.items():
stats['status_counts'] = {
int(k): v for k, v in stats['status_counts'].items()
}
eval_stats[task_id][tier] = stats
eval_fpath = get_evaluation_path(task_path)
eval_meta_fpath = get_evaluation_meta_path(task_path)
# Clean up simulate_kwargs from not essential flags.
clean_simulate_kwargs = simulate_kwargs.copy()
del clean_simulate_kwargs['num_workers']
meta = dict(evaluator_version=VERSION,
task_script_hash=task_script_hash,
task_script_version=task_script.build_task.get_version(),
creator_hash=CREATOR_HASH,
simulate_kwargs=clean_simulate_kwargs)
eval_data = dict(eval_stats=eval_stats)
if interactive:
# Remove solutions.
for ball_solvable_filter in True, False:
if ball_solvable_filter:
print('BALL-solvable')
else:
print('BALL-NOT-solvable')
for task_id, task_stats in eval_stats.items():
ball_solvable = (
task_stats['ball']['status_counts'][STABLY_SOLVED] +
task_stats['ball']['status_counts'][UNSTABLY_SOLVED]) > 0
if ball_solvable_filter != ball_solvable:
continue
print('===', task_id, end=' ')
for tier, stats in task_stats.items():
stats = stats['status_counts']
print(tier,
stats[STABLY_SOLVED],
stats[UNSTABLY_SOLVED],
stats[INVALID_INPUT],
stats[NOT_SOLVED],
end='\t')
print()
else:
# Serialize to string first to type-check.
json.dumps(eval_data, indent=2)
logging.info('Saving %s', eval_fpath)
joblib.dump(eval_data, eval_fpath, compress=('lzma', 6))
# Meta is written at the end.
with open(eval_meta_fpath, 'w') as stream:
json.dump(meta, stream)
# Since we updated eval stats, we need to recompute solution power
phyre.compute_solution_power.save_solution_power(
template_id,
meta,
eval_data,
task_path,
num_workers=simulate_kwargs['num_workers'])
if __name__ == '__main__':
logging.basicConfig(format=('%(asctime)s %(levelname)-8s'
' {%(module)s:%(lineno)d} %(message)s'),
level=logging.DEBUG,
datefmt='%Y-%m-%d %H:%M:%S')
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--force', action='store_true')
parser.add_argument('--template-id')
parser.add_argument('--log-dir')
parser.add_argument('--num-workers', type=int, default=-1)
parser.add_argument('--min-valid-attempts',
type=int,
default=MIN_VALID_ATTEMPTS)
parser.add_argument('--simulate-worker-size',
type=int,
default=MIN_VALID_ATTEMPTS)
parser.add_argument('--interactive', action='store_true')
main(**vars(parser.parse_args()))
|
StarcoderdataPython
|
1719746
|
"""
Script to convert Qualtrics results to more manageable format.
Author: XXXXX
Usage: python convert_data.py
Required files:
- writtendescriptions.csv
- list_mapping.csv
Generates:
- written_annotations.json
- written_plain.txt
- duration_stats.json
"""
# Stdlib
import csv
import json
import os
from collections import defaultdict
# Installe:
from numpy import std, median, mean
def get_entries(filename):
"Load the entries from the file, representing each participant as a dictionary."
with open(filename, encoding='cp1252') as f:
reader = csv.reader(f)
keys = next(reader)
# Skip next two lines.
for i in range(2):
next(reader)
entries = [dict(zip(keys,row)) for row in reader]
return entries
def duration_statistics(entries):
"Generate duration statistics."
durations = [int(entry['Duration (in seconds)']) for entry in entries]
return dict(mean_seconds=mean(durations),
median_seconds=median(durations),
std_seconds=std(durations),
min_seconds=min(durations),
max_seconds=max(durations),
mean_minutes=mean(durations)/60,
median_minutes=median(durations)/60,
std_minutes=std(durations)/60,
min_minutes=min(durations)/60,
max_minutes=max(durations)/60,
durations=durations)
def load_mappings(filename):
"Load mappings from file."
with open(filename) as f:
reader = csv.reader(f,delimiter=';')
header = next(reader)
question_to_image = dict()
image_to_partition = dict()
for image, question, partition in reader:
question_to_image[question] = image
image_to_partition[image] = partition
return question_to_image, image_to_partition
def write_json(object,filename):
"Write JSON to a file."
with open(filename,'w') as f:
json.dump(object, f, indent=4)
def get_items(entries):
"Get basic items (to be enriched later)."
items = []
for i, entry in enumerate(entries, start=1000):
response_id = entry['ResponseId']
for key, value in entry.items():
if key == 'Q319':
# Practice question: Ignore.
continue
elif key.startswith('Q') and value:
item = dict(participant=i,
description=value.strip(),
question=key,
response_id=response_id)
items.append(item)
return items
def enrich_items(items, question2img, img2part):
"Enrich items with information about their partition and the image."
for item in items:
question = item['question']
image = question2img[question]
partition = img2part[image]
item['image'] = image
item['partition'] = partition
# This function modifies the list in place, so it returns nothing.
return None
def participant_index(items):
"Build an index: identifier -> descriptions"
description_index = defaultdict(list)
image_index = defaultdict(list)
for item in items:
participant = item['participant']
description = item['description'] + '\n'
image = item['image']
description_index[participant].append(description)
image_index[participant].append(image)
return description_index, image_index
def extract_lines(items):
"Extract lines to create a plaintext corpus."
lines = [item['description'] + '\n' for item in items]
return lines
def ensure_folder(folder_name):
"Make sure a folder exists. If not, create it."
if not os.path.exists(folder_name):
os.mkdir(folder_name)
def create_participant_files(items, folder='Participants/Plain/'):
"Create files with descriptions per participant."
description_index, image_index = participant_index(items)
ensure_folder(folder)
for participant, lines in description_index.items():
with open(folder + str(participant) + '.txt', 'w') as f:
f.writelines(lines)
with open(folder + 'participant_lines_image_mapping.json', 'w') as f:
json.dump(image_index, f, indent=2)
if __name__ == "__main__":
entries = get_entries('./writtendescriptions.csv')
items = get_items(entries)
question_to_image, image_to_partition = load_mappings('./list_mapping.csv')
enrich_items(items, question_to_image, image_to_partition)
write_json(items, 'written_annotations.json')
lines = extract_lines(items)
with open('written_plain.txt','w') as f:
f.writelines(lines)
create_participant_files(items)
duration_stats = duration_statistics(entries)
write_json(duration_stats,"duration_stats.json")
|
StarcoderdataPython
|
1687932
|
<gh_stars>0
from gevent import monkey; monkey.patch_all()
import gc
import os
import sys
import json
import time
import code
import socket
import inspect
import logging
import msgpack
import cStringIO
import urlparse
import argparse
import resource
import traceback
import threading
from ast import literal_eval
import gevent
import requests
import statsd
import tornado.ioloop
import tornado.web
import tornado.websocket
import tornado.iostream
from tornado.template import BaseLoader, Template
from tornado.web import StaticFileHandler, HTTPError
MSG_TYPE_CONSOLE = 0
MSG_TYPE_LOG = 1
MAX_LOG_FILE_SIZE = 100 * 1024 * 1024 # 100MB
# set the logging level of requests module to warning
# otherwise it swamps with too many logs
logging.getLogger('requests').setLevel(logging.WARNING)
def tag(*tags):
'''
Constructs a decorator that tags a function with specified
strings (@tags). The tags on the decorated function are
available via fn.tags
'''
def dfn(fn):
_tags = getattr(fn, 'tags', set())
_tags.update(tags)
fn.tags = _tags
return fn
return dfn
def get_fn_tags(fn):
return getattr(fn, 'tags', set())
def mime(mime):
'''
Constructs a decorator that sets the preferred mime type
to be written in the http response when returning the
function result.
'''
def dfn(fn):
fn.mime = mime
return fn
return dfn
def raw(mime='application/octet-stream'):
'''
Constructs a decorator that marks the fn
as raw response format
'''
def dfn(fn):
tags = getattr(fn, 'tags', set())
tags.add('raw')
fn.tags = tags
fn.mime = getattr(fn, 'mime', mime)
return fn
return dfn
class RPCCallException(Exception):
pass
class BaseHandler(tornado.web.RequestHandler):
def get_template_namespace(self):
ns = super(BaseHandler, self).get_template_namespace()
ns.update(sys.funcserver.define_template_namespace())
return ns
class PyInterpreter(code.InteractiveInterpreter):
def __init__(self, *args, **kwargs):
code.InteractiveInterpreter.__init__(self, *args, **kwargs)
self.output = []
def write(self, data):
self.output.append(data)
class WSConnection(tornado.websocket.WebSocketHandler):
'''
Websocket based communication channel between a
client and the server.
'''
WRITE_BUFFER_THRESHOLD = 1 * 1024 * 1024 # 1MB
def open(self):
'''
Called when client opens connection. Initialization
is done here.
'''
self.id = id(self)
self.funcserver = self.application.funcserver
# register this connection with node
self.state = self.funcserver.websocks[self.id] = {'id': self.id, 'sock': self}
def on_message(self, msg):
'''
Called when client sends a message.
Supports a python debugging console. This forms
the "eval" part of a standard read-eval-print loop.
Currently the only implementation of the python
console is in the WebUI but the implementation
of a terminal based console is planned.
'''
msg = json.loads(msg)
interpreter = self.state.get('interpreter', None)
if interpreter is None:
interpreter = PyInterpreter(self.funcserver.define_python_namespace())
self.state['interpreter'] = interpreter
code = msg['code']
msg_id = msg['id']
stdout = sys.stdout
try:
sys.stdout = cStringIO.StringIO()
interpreter.runsource(code)
output = sys.stdout.getvalue() or interpreter.output
if isinstance(output, list): output = ''.join(output)
interpreter.output = []
finally:
sys.stdout = stdout
msg = {'type': MSG_TYPE_CONSOLE, 'id': msg_id, 'data': output}
self.send_message(msg)
def on_close(self):
'''
Called when client closes this connection. Cleanup
is done here.
'''
if self.id in self.funcserver.websocks:
self.funcserver.websocks[self.id] = None
ioloop = tornado.ioloop.IOLoop.instance()
ioloop.add_callback(lambda: self.funcserver.websocks.pop(self.id, None))
def send_message(self, msg, binary=False):
# TODO: check if following two lines are required
# tornado documentation seems to indicate that
# this might be handled internally.
if not isinstance(msg, str):
msg = json.dumps(msg)
try:
if self.ws_connection:
self.write_message(msg, binary=binary)
except tornado.iostream.StreamClosedError:
self.on_close()
@property
def is_buffer_full(self):
bsize = sum([len(x) for x in self.stream._write_buffer])
return bsize >= self.WRITE_BUFFER_THRESHOLD
def _msg_from(self, msg):
return {'type': msg.get('type', ''), 'id': msg['id']}
def call(fn):
ioloop = tornado.ioloop.IOLoop.instance()
ioloop.add_callback(fn)
def make_handler(template, handler):
class SimpleHandler(handler):
def get(self):
return self.render(template)
return SimpleHandler
def resolve_path(path):
return path if os.path.isabs(path) else os.path.join(os.path.dirname(__file__), path)
class WebLogHandler(logging.Handler):
def __init__(self, funcserver):
super(WebLogHandler, self).__init__()
self.funcserver = funcserver
def emit(self, record):
msg = self.format(record)
self.funcserver._send_log(msg)
class TemplateLoader(BaseLoader):
def __init__(self, dirs=None, **kwargs):
super(TemplateLoader, self).__init__(**kwargs)
self.dirs = dirs or []
def add_dir(self, d):
self.dirs.append(d)
def del_dir(self, d):
self.dirs.remove(d)
def resolve_path(self, name, parent_path=None):
for d in reversed(self.dirs):
p = os.path.join(d, name)
if not os.path.exists(p): continue
return os.path.abspath(p)
return name
def _create_template(self, name):
f = open(name, 'rb')
template = Template(f.read(), name=name, loader=self)
f.close()
return template
class CustomStaticFileHandler(StaticFileHandler):
PATHS = []
@classmethod
def get_absolute_path(cls, root, path):
for p in reversed(cls.PATHS):
ap = os.path.join(p, path)
if not os.path.exists(ap):
continue
return ap
return path
def validate_absolute_path(self, root, absolute_path):
if (os.path.isdir(absolute_path) and
self.default_filename is not None):
# need to look at the request.path here for when path is empty
# but there is some prefix to the path that was already
# trimmed by the routing
if not self.request.path.endswith("/"):
self.redirect(self.request.path + "/", permanent=True)
return
absolute_path = os.path.join(absolute_path, self.default_filename)
if not os.path.exists(absolute_path):
raise HTTPError(404)
if not os.path.isfile(absolute_path):
raise HTTPError(403, "%s is not a file", self.path)
return absolute_path
class StatsCollector(object):
STATS_FLUSH_INTERVAL = 1
def __init__(self, prefix, stats_loc):
self.cache = {}
self.gauge_cache = {}
self.stats = None
if not stats_loc: return
port = None
if ':' in stats_loc:
ip, port = stats_loc.split(':')
port = int(port)
else:
ip = stats_loc
S = statsd.StatsClient
self.stats = S(ip, port, prefix) if port is not None else S(ip, prefix=prefix)
def fn():
while 1:
time.sleep(self.STATS_FLUSH_INTERVAL)
self._collect_ramusage()
self.send()
self.stats_thread = gevent.spawn(fn)
def incr(self, key, n=1):
if self.stats is None: return
self.cache[key] = self.cache.get(key, 0) + n
def decr(self, key, n=1):
if self.stats is None: return
self.cache[key] = self.cache.get(key, 0) - n
def timing(self, key, ms):
if self.stats is None: return
return self.stats.timing(key, ms)
def gauge(self, key, n, delta=False):
if delta:
v, _ = self.gauge_cache.get(key, (0, True))
n += v
self.gauge_cache[key] = (n, delta)
def _collect_ramusage(self):
self.gauge('resource.maxrss',
resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
def send(self):
if self.stats is None: return
p = self.stats.pipeline()
for k, v in self.cache.iteritems():
p.incr(k, v)
for k, (v, d) in self.gauge_cache.iteritems():
p.gauge(k, v, delta=d)
p.send()
self.cache = {}
self.gauge_cache = {}
class BaseScript(object):
LOG_FORMATTER = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
DESC = 'Base script abstraction'
def __init__(self):
# argparse parser obj
self.parser = argparse.ArgumentParser(description=self.DESC)
self.define_baseargs(self.parser)
self.define_args(self.parser)
self.args = self.parser.parse_args()
self.hostname = socket.gethostname()
self.log = self.init_logger(self.args.log, self.args.log_level,\
quiet=self.args.quiet)
self.stats = self.create_stats()
self.log.debug('init: args=%s' % repr(self.args))
@property
def name(self):
return '.'.join([x for x in (sys.argv[0].split('.')[0], self.args.name) if x])
def create_stats(self):
stats_prefix = '.'.join([x for x in (self.hostname, self.name) if x])
return StatsCollector(stats_prefix, self.args.statsd_server)
def init_logger(self, fname, log_level, quiet=False):
if not fname:
fname = '%s.log' % self.name
log = logging.getLogger('')
stderr_hdlr = logging.StreamHandler(sys.stderr)
rofile_hdlr = logging.handlers.RotatingFileHandler(fname,
maxBytes=MAX_LOG_FILE_SIZE, backupCount=10)
hdlrs = (stderr_hdlr, rofile_hdlr)
for hdlr in hdlrs:
hdlr.setFormatter(self.LOG_FORMATTER)
log.addHandler(hdlr)
log.addHandler(rofile_hdlr)
if not quiet: log.addHandler(stderr_hdlr)
log.setLevel(getattr(logging, log_level.upper()))
return log
def define_baseargs(self, parser):
parser.add_argument('--name', default=None,
help='Name to identify this instance')
parser.add_argument('--statsd-server', default=None,
help='Location of StatsD server to send statistics. '
'Format is ip[:port]. Eg: localhost, localhost:8125')
parser.add_argument('--log', default=None,
help='Name of log file')
parser.add_argument('--log-level', default='WARNING',
help='Logging level as picked from the logging module')
parser.add_argument('--quiet', action='store_true')
def define_args(self, parser):
pass
def dump_stacks(self):
'''
Dumps the stack of all threads and greenlets. This function
is meant for debugging. Useful when a deadlock happens.
borrowed from: http://blog.ziade.org/2012/05/25/zmq-and-gevent-debugging-nightmares/
'''
dump = []
# threads
threads = dict([(th.ident, th.name)
for th in threading.enumerate()])
for thread, frame in sys._current_frames().items():
if thread not in threads: continue
dump.append('Thread 0x%x (%s)\n' % (thread, threads[thread]))
dump.append(''.join(traceback.format_stack(frame)))
dump.append('\n')
# greenlets
try:
from greenlet import greenlet
except ImportError:
return ''.join(dump)
# if greenlet is present, let's dump each greenlet stack
for ob in gc.get_objects():
if not isinstance(ob, greenlet):
continue
if not ob:
continue # not running anymore or not started
dump.append('Greenlet\n')
dump.append(''.join(traceback.format_stack(ob.gr_frame)))
dump.append('\n')
return ''.join(dump)
class FuncServer(BaseScript):
NAME = 'FuncServer'
DESC = 'Default Functionality Server'
DEFAULT_PORT = 9345
VIRTUAL_HOST = r'.*'
STATIC_PATH = 'static'
TEMPLATE_PATH = 'templates'
APP_CLASS = tornado.web.Application
def __init__(self):
super(FuncServer, self).__init__()
self.log_id = 0
# add weblog handler to logger
weblog_hdlr = WebLogHandler(self)
weblog_hdlr.setFormatter(self.LOG_FORMATTER)
self.log.addHandler(weblog_hdlr)
# tornado app object
base_handlers = self.prepare_base_handlers()
handlers = self.prepare_handlers()
self.template_loader = TemplateLoader([resolve_path(self.TEMPLATE_PATH)])
_ = self.prepare_template_loader(self.template_loader)
if _ is not None: self.template_loader = _
shclass = CustomStaticFileHandler
shclass.PATHS.append(resolve_path(self.STATIC_PATH))
_ = self.prepare_static_paths(shclass.PATHS)
if _ is not None: shclass.PATHS = _
self.static_handler_class = shclass
self.nav_tabs = [('Console', '/console'), ('Logs', '/logs')]
self.nav_tabs = self.prepare_nav_tabs(self.nav_tabs)
settings = {
'static_path': '<DUMMY-INEXISTENT-PATH>',
'static_handler_class': self.static_handler_class,
'template_loader': self.template_loader,
'compress_response': True,
}
all_handlers = handlers + base_handlers
self.app = self.APP_CLASS(**settings)
self.app.add_handlers(self.VIRTUAL_HOST, all_handlers)
sys.funcserver = self.app.funcserver = self
# all active websockets and their state
self.websocks = {}
@property
def name(self):
return '.'.join([x for x in (self.NAME, self.args.name) if x])
def define_baseargs(self, parser):
super(FuncServer, self).define_baseargs(parser)
parser.add_argument('--port', default=self.DEFAULT_PORT,
type=int, help='port to listen on for server')
def _send_log(self, msg):
msg = {'type': MSG_TYPE_LOG, 'id': self.log_id, 'data': msg}
bad_ws = []
for _id, ws in self.websocks.iteritems():
if ws is None: bad_ws.append(_id); continue
ws['sock'].send_message(msg)
for _id in bad_ws: del self.websocks[_id]
self.log_id += 1
def prepare_base_handlers(self):
# Tornado URL handlers for core functionality
return [
(r'/ws', WSConnection),
(r'/logs', make_handler('logs.html', BaseHandler)),
(r'/console', make_handler('console.html', BaseHandler)),
(r'/', make_handler('console.html', BaseHandler))
]
def prepare_handlers(self):
# Tornado URL handlers for additional functionality
return []
def prepare_template_loader(self, loader):
# add additional template dirs by using
# loader.add_dir(path)
return loader
def prepare_static_paths(self, paths):
# add static paths that can contain
# additional of override files
# eg: paths.append(PATH)
return paths
def prepare_nav_tabs(self, nav_tabs):
# Add additional tab buttons in the UI toolbar
# eg: nav_tabs.append(('MyTab', '/mytab'))
return nav_tabs
def define_python_namespace(self):
return {'server': self, 'logging': logging, 'call': call}
def define_template_namespace(self):
return self.define_python_namespace()
def pre_start(self):
'''
Override to perform any operations
before the server loop is started
'''
pass
def start(self):
self.pre_start()
if self.args.port != 0:
self.app.listen(self.args.port)
tornado.ioloop.IOLoop.instance().start()
class RPCHandler(BaseHandler):
WRITE_CHUNK_SIZE = 4096
def initialize(self, server):
self.server = server
self.stats = server.stats
self.log = server.log
self.api = server.api
def _get_apifn(self, fn_name):
obj = self.api
for part in fn_name.split('.'):
obj = getattr(obj, part)
return obj
def _clean_kwargs(self, kwargs, fn):
'''
Remove unexpected keyword arguments from the
set of received keyword arguments.
'''
# Do not do the cleaning if server config
# doesnt ask to ignore
if not self.server.IGNORE_UNEXPECTED_KWARGS:
return kwargs
expected_kwargs = set(inspect.getargspec(fn).args)
got_kwargs = set(kwargs.keys())
unexpected_kwargs = got_kwargs - expected_kwargs
for k in unexpected_kwargs:
del kwargs[k]
return kwargs
def _handle_single_call(self, m):
fn_name = m.get('fn', None)
sname = 'api.%s' % fn_name
t = time.time()
try:
fn = self._get_apifn(fn_name)
self.stats.incr(sname)
r = fn(*m['args'], **self._clean_kwargs(m['kwargs'], fn))
if 'raw' not in get_fn_tags(fn):
r = {'success': True, 'result': r}
except Exception, e:
self.log.exception('Exception during RPC call. '
'fn=%s, args=%s, kwargs=%s' % \
(m.get('fn', ''), repr(m.get('args', '[]')),
repr(m.get('kwargs', '{}'))))
r = {'success': False, 'result': repr(e)}
finally:
tdiff = (time.time() - t) * 1000
self.stats.timing(sname, tdiff)
return r
def _handle_call(self, fn, m, protocol):
if fn != '__batch__':
r = self._handle_single_call(m)
else:
r = []
for call in m['calls']:
_r = self._handle_single_call(call)
if isinstance(_r, dict) and 'success' in _r:
_r = _r['result'] if _r['success'] else None
r.append(_r)
fnobj = self._get_apifn(fn)
if 'raw' not in get_fn_tags(fnobj):
r = self.get_serializer(protocol)(r)
mime = getattr(fnobj, 'mime', self.get_mime(protocol))
self.set_header('Content-Type', mime)
self.set_header('Content-Length', len(r))
chunk_size = self.WRITE_CHUNK_SIZE
for i in xrange(0, len(r), chunk_size):
self.write(r[i:i+chunk_size])
self.flush()
self.finish()
def get_serializer(self, name):
return {'msgpack': msgpack.packb,
'json': json.dumps,
'python': repr}.get(name, self.server.SERIALIZER)
def get_deserializer(self, name):
return {'msgpack': msgpack.packb,
'json': json.loads,
'python': eval}.get(name, self.server.DESERIALIZER)
def get_mime(self, name):
return {'msgpack': 'application/x-msgpack',
'json': 'application/json',
'python': 'application/x-python'}\
.get(name, self.server.MIME)
@tornado.web.asynchronous
def post(self, protocol='default'):
m = self.get_deserializer(protocol)(self.request.body)
fn = m['fn']
gevent.spawn(lambda: self._handle_call(fn, m, protocol))
def failsafe_json_decode(self, v):
try: v = json.loads(v)
except ValueError: pass
return v
@tornado.web.asynchronous
def get(self, protocol='default'):
D = self.failsafe_json_decode
args = dict([(k, D(v[0])) for k, v in self.request.arguments.iteritems()])
fn = args.pop('fn')
m = dict(kwargs=args, fn=fn, args=[])
gevent.spawn(lambda: self._handle_call(fn, m, protocol))
class RPCServer(FuncServer):
NAME = 'RPCServer'
DESC = 'Default RPC Server'
SERIALIZER = staticmethod(msgpack.packb)
DESERIALIZER = staticmethod(msgpack.unpackb)
MIME = 'application/x-msgpack'
IGNORE_UNEXPECTED_KWARGS = False
def __init__(self, *args, **kwargs):
super(RPCServer, self).__init__(*args, **kwargs)
self.api = None
def pre_start(self):
self.api = self.prepare_api()
if not hasattr(self.api, 'log'): self.api.log = self.log
super(RPCServer, self).pre_start()
def prepare_api(self):
'''
Prepare the API object that is exposed as
functionality by the RPCServer
'''
return None
def prepare_base_handlers(self):
hdlrs = super(RPCServer, self).prepare_base_handlers()
hdlrs.append((r'/rpc(?:/([^/]*)/?)?', RPCHandler, dict(server=self)))
return hdlrs
def define_python_namespace(self):
ns = super(RPCServer, self).define_python_namespace()
ns['api'] = self.api
return ns
def _passthrough(name):
def fn(self, *args, **kwargs):
p = self.prefix + '.' + name
if self.bound or self.parent is None:
return self._call(p, args, kwargs)
else:
return self.parent._call(p, args, kwargs)
return fn
class RPCClient(object):
SERIALIZER = staticmethod(msgpack.packb)
DESERIALIZER = staticmethod(msgpack.unpackb)
def __init__(self, server_url, prefix=None, parent=None):
self.server_url = server_url
self.rpc_url = urlparse.urljoin(server_url, 'rpc')
self.is_batch = False
self.prefix = prefix
self.parent = parent
self.bound = False
self._calls = []
def __getattr__(self, attr):
prefix = self.prefix + '.' + attr if self.prefix else attr
return self.__class__(self.server_url, prefix=prefix,
parent=self if self.bound else self.parent)
def get_handle(self):
self.bound = True
return self
def __call__(self, *args, **kwargs):
if self.bound or self.parent is None:
return self._call(self.prefix, args, kwargs)
else:
return self.parent._call(self.prefix, args, kwargs)
def _call(self, fn, args, kwargs):
if not self.is_batch:
return self._do_single_call(fn, args, kwargs)
else:
self._calls.append(dict(fn=fn, args=args, kwargs=kwargs))
__getitem__ = _passthrough('__getitem__')
__setitem__ = _passthrough('__setitem__')
__delitem__ = _passthrough('__delitem__')
__contains__ = _passthrough('__contains__')
__len__ = _passthrough('__len__')
def __nonzero__(self): return True
def set_batch(self):
self.is_batch = True
def unset_batch(self):
self.is_batch = False
def _do_single_call(self, fn, args, kwargs):
m = self.SERIALIZER(dict(fn=fn, args=args, kwargs=kwargs))
req = requests.post(self.rpc_url, data=m)
res = self.DESERIALIZER(req.content)
if not res['success']:
raise RPCCallException(res['result'])
else:
return res['result']
def execute(self):
if not self._calls: return
m = dict(fn='__batch__', calls=self._calls)
m = self.SERIALIZER(m)
req = requests.post(self.rpc_url, data=m)
res = self.DESERIALIZER(req.content)
self._calls = []
return res
if __name__ == '__main__':
funcserver = FuncServer()
funcserver.start()
|
StarcoderdataPython
|
1751847
|
# python2 and python3 compatible
class EfuseClass:
""" A class for parsing efuses """
normal = ('lot','wafer','assembly','part','comment','flowcell','y' ,'x' ,'bin','noise','softbin','chiptype','barcode')
codes = ('L:' ,'W:' ,'J:' ,'P:' ,'C:' ,'F:' ,'Y:','X:','B:' ,'N:' ,'SB:', 'CT:', 'BC:' )
def __init__( self , fuse ):
self.text = fuse
fields = fuse.split(',')
consumed = []
# First generically set attributes:
for f in fields:
try:
setattr( self , f.split(':')[0] , f.split(':')[1] )
except:
pass
# Set familiar name handles
for i in range(len(self.normal)):
for j, f in enumerate( fields ):
# Set default values
setattr( self , self.normal[i] , 'NA' )
if self.codes[i] in f:
setattr( self , self.normal[i] , f.replace(self.codes[i],'') )
consumed.append( j )
break
# Record anything that we don't already have a field for
misc = []
for j, f in enumerate( fields ):
if j not in consumed:
misc.append( f )
self.misc = ','.join( misc )
# Dummy proof
self.X = self.x
self.Y = self.y
self.hardbin = self.bin
self.fuse = fuse
Efuse = EfuseClass
|
StarcoderdataPython
|
125297
|
#
# python_grabber
#
# Authors:
# <NAME> <<EMAIL>>
#
# Copyright (C) 2019 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from ctypes import POINTER, HRESULT
from ctypes import windll
from ctypes.wintypes import (DWORD, ULONG, HWND,
UINT, LPCOLESTR, LCID, LPVOID)
from comtypes import IUnknown, GUID
LPUNKNOWN = POINTER(IUnknown)
CLSID = GUID
LPCLSID = POINTER(CLSID)
WS_CHILD = 0x40000000
WS_CLIPSIBLINGS = 0x04000000
OleCreatePropertyFrame = windll.oleaut32.OleCreatePropertyFrame
OleCreatePropertyFrame.restype = HRESULT
OleCreatePropertyFrame.argtypes = (
HWND, # [in] hwndOwner
UINT, # [in] x
UINT, # [in] y
LPCOLESTR, # [in] lpszCaption
ULONG, # [in] cObjects
POINTER(LPUNKNOWN), # [in] ppUnk
ULONG, # [in] cPages
LPCLSID, # [in] pPageClsID
LCID, # [in] lcid
DWORD, # [in] dwReserved
LPVOID, # [in] pvReserved
)
|
StarcoderdataPython
|
3282780
|
import time
import hiro # type: ignore
from starlette.applications import Starlette
from starlette.requests import Request
from starlette.responses import PlainTextResponse
from starlette.routing import Route
from starlette.testclient import TestClient
from slowapi.util import get_ipaddr, get_remote_address
from tests import TestSlowapi
class TestDecorators(TestSlowapi):
def test_single_decorator_async(self):
app, limiter = self.build_starlette_app(key_func=get_ipaddr)
@limiter.limit("5/minute")
async def t1(request: Request):
return PlainTextResponse("test")
app.add_route("/t1", t1)
client = TestClient(app)
for i in range(0, 10):
response = client.get("/t1")
assert response.status_code == 200 if i < 5 else 429
if i < 5:
assert response.text == "test"
def test_single_decorator_sync(self):
app, limiter = self.build_starlette_app(key_func=get_ipaddr)
@limiter.limit("5/minute")
def t1(request: Request):
return PlainTextResponse("test")
app.add_route("/t1", t1)
client = TestClient(app)
for i in range(0, 10):
response = client.get("/t1")
assert response.status_code == 200 if i < 5 else 429
if i < 5:
assert response.text == "test"
def test_shared_decorator(self):
app, limiter = self.build_starlette_app(key_func=get_ipaddr)
shared_lim = limiter.shared_limit("5/minute", "somescope")
@shared_lim
def t1(request: Request):
return PlainTextResponse("test")
@shared_lim
def t2(request: Request):
return PlainTextResponse("test")
app.add_route("/t1", t1)
app.add_route("/t2", t2)
client = TestClient(app)
for i in range(0, 10):
response = client.get("/t1")
assert response.status_code == 200 if i < 5 else 429
# the shared limit has already been hit via t1
assert client.get("/t2").status_code == 429
def test_multiple_decorators(self):
app, limiter = self.build_starlette_app(key_func=get_ipaddr)
@limiter.limit("10 per minute", lambda: "test")
@limiter.limit("5/minute") # per ip as per default key_func
async def t1(request: Request):
return PlainTextResponse("test")
app.add_route("/t1", t1)
with hiro.Timeline().freeze() as timeline:
cli = TestClient(app)
for i in range(0, 10):
response = cli.get("/t1", headers={"X_FORWARDED_FOR": "127.0.0.2"})
assert response.status_code == 200 if i < 5 else 429
for i in range(5):
assert cli.get("/t1").status_code == 200
assert cli.get("/t1").status_code == 429
assert (
cli.get("/t1", headers={"X_FORWARDED_FOR": "127.0.0.3"}).status_code
== 429
)
def test_multiple_decorators_with_headers(self):
app, limiter = self.build_starlette_app(
key_func=get_ipaddr, headers_enabled=True
)
@limiter.limit("10 per minute", lambda: "test")
@limiter.limit("5/minute") # per ip as per default key_func
async def t1(request: Request):
return PlainTextResponse("test")
app.add_route("/t1", t1)
with hiro.Timeline().freeze() as timeline:
cli = TestClient(app)
for i in range(0, 10):
response = cli.get("/t1", headers={"X_FORWARDED_FOR": "127.0.0.2"})
assert response.status_code == 200 if i < 5 else 429
assert response.headers.get("Retry-After") if i < 5 else True
for i in range(5):
assert cli.get("/t1").status_code == 200
assert cli.get("/t1").status_code == 429
assert (
cli.get("/t1", headers={"X_FORWARDED_FOR": "127.0.0.3"}).status_code
== 429
)
def test_headers_no_breach(self):
app, limiter = self.build_starlette_app(
headers_enabled=True, key_func=get_remote_address
)
@app.route("/t1")
@limiter.limit("10/minute")
def t1(request: Request):
return PlainTextResponse("test")
@app.route("/t2")
@limiter.limit("2/second; 5 per minute; 10/hour")
def t2(request: Request):
return PlainTextResponse("test")
with hiro.Timeline().freeze():
with TestClient(app) as cli:
resp = cli.get("/t1")
assert resp.headers.get("X-RateLimit-Limit") == "10"
assert resp.headers.get("X-RateLimit-Remaining") == "9"
assert resp.headers.get("X-RateLimit-Reset") == str(
int(time.time() + 61)
)
assert resp.headers.get("Retry-After") == str(60)
resp = cli.get("/t2")
assert resp.headers.get("X-RateLimit-Limit") == "2"
assert resp.headers.get("X-RateLimit-Remaining") == "1"
assert resp.headers.get("X-RateLimit-Reset") == str(
int(time.time() + 2)
)
assert resp.headers.get("Retry-After") == str(1)
def test_headers_breach(self):
app, limiter = self.build_starlette_app(
headers_enabled=True, key_func=get_remote_address
)
@app.route("/t1")
@limiter.limit("2/second; 10 per minute; 20/hour")
def t(request: Request):
return PlainTextResponse("test")
with hiro.Timeline().freeze() as timeline:
with TestClient(app) as cli:
for i in range(11):
resp = cli.get("/t1")
timeline.forward(1)
assert resp.headers.get("X-RateLimit-Limit") == "10"
assert resp.headers.get("X-RateLimit-Remaining") == "0"
assert resp.headers.get("X-RateLimit-Reset") == str(
int(time.time() + 50)
)
assert resp.headers.get("Retry-After") == str(int(50))
def test_retry_after(self):
# FIXME: this test is not actually running!
app, limiter = self.build_starlette_app(
headers_enabled=True, key_func=get_remote_address
)
@app.route("/t1")
@limiter.limit("1/minute")
def t(request: Request):
return PlainTextResponse("test")
with hiro.Timeline().freeze() as timeline:
with TestClient(app) as cli:
resp = cli.get("/t1")
retry_after = int(resp.headers.get("Retry-After"))
assert retry_after > 0
timeline.forward(retry_after)
resp = cli.get("/t1")
assert resp.status_code == 200
def test_exempt_decorator(self):
app, limiter = self.build_starlette_app(
headers_enabled=True,
key_func=get_remote_address,
default_limits=["1/minute"],
)
@app.route("/t1")
def t1(request: Request):
return PlainTextResponse("test")
with TestClient(app) as cli:
resp = cli.get("/t1", headers={"X_FORWARDED_FOR": "127.0.0.10"})
assert resp.status_code == 200
resp2 = cli.get("/t1", headers={"X_FORWARDED_FOR": "127.0.0.10"})
assert resp2.status_code == 429
@app.route("/t2")
@limiter.exempt
def t2(request: Request):
"""Exempt a sync route"""
return PlainTextResponse("test")
with TestClient(app) as cli:
resp = cli.get("/t2", headers={"X_FORWARDED_FOR": "127.0.0.10"})
assert resp.status_code == 200
resp2 = cli.get("/t2", headers={"X_FORWARDED_FOR": "127.0.0.10"})
assert resp2.status_code == 200
@app.route("/t3")
@limiter.exempt
async def t3(request: Request):
"""Exempt an async route"""
return PlainTextResponse("test")
with TestClient(app) as cli:
resp = cli.get("/t3", headers={"X_FORWARDED_FOR": "127.0.0.10"})
assert resp.status_code == 200
resp2 = cli.get("/t3", headers={"X_FORWARDED_FOR": "127.0.0.10"})
assert resp2.status_code == 200
# todo: more tests - see https://github.com/alisaifee/flask-limiter/blob/55df08f14143a7e918fc033067a494248ab6b0c5/tests/test_decorators.py#L187
def test_default_and_decorator_limit_merging(self):
app, limiter = self.build_starlette_app(
key_func=lambda: "test", default_limits=["10/minute"]
)
@limiter.limit("5 per minute", key_func=get_ipaddr, override_defaults=False)
async def t1(request: Request):
return PlainTextResponse("test")
app.add_route("/t1", t1)
with hiro.Timeline().freeze() as timeline:
cli = TestClient(app)
for i in range(0, 10):
response = cli.get("/t1", headers={"X_FORWARDED_FOR": "127.0.0.2"})
assert response.status_code == 200 if i < 5 else 429
for i in range(5):
assert cli.get("/t1").status_code == 200
assert cli.get("/t1").status_code == 429
assert (
cli.get("/t1", headers={"X_FORWARDED_FOR": "127.0.0.3"}).status_code
== 429
)
|
StarcoderdataPython
|
4801083
|
<reponame>maxest/MaxestFramework
import torch
import numpy as np
from torch.autograd import Variable
import torch_layers_dumper
torch.backends.cudnn.deterministic = True
torch.manual_seed(42)
torch.cuda.manual_seed(42)
np.random.seed(42)
layer1 = torch.nn.Linear(1, 24, True)
layer2 = torch.nn.Linear(24, 24, False)
layer3 = torch.nn.Linear(24, 1, True)
model = torch.nn.Sequential(
layer1,
torch.nn.Sigmoid(),
layer2,
torch.nn.Sigmoid(),
layer3,
torch.nn.Sigmoid(),
)
x = [ [0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9] ]
x = Variable(torch.FloatTensor(x))
y = [ [0.0], [0.1], [0.2], [0.3], [0.9], [0.9], [0.3], [0.2], [0.1], [0.0] ]
y = Variable(torch.FloatTensor(y))
learning_rate = 0.01
optim = torch.optim.Adam(model.parameters(), lr=learning_rate)
for i in range(10000):
p = np.random.permutation(10)
x2 = x[ p[0:10].tolist() ]
y2 = y[ p[0:10].tolist() ]
y2_hat = model.forward(x2)
err = y2 - y2_hat
err = err * err
err = torch.sum(err)
err /= 10.0
optim.zero_grad()
err.backward()
optim.step()
torch_layers_dumper.DumpSequential("model.txt", model)
y_hat = model(x)
print("Real\n", y)
print("Predicted\n", y_hat)
|
StarcoderdataPython
|
124860
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @Time : 2022/1/8 8:01 下午
# @Author: zhoumengjie
# @File : BondBuilder.py
class BondPage:
def __init__(self):
# 申购评测
self.apply_bonds = []
# 证监会核准/同意注册
self.next_bonds = []
# 已申购完,即将上市的
self.ipo_bonds = []
# 隔日上市
self.prepare_bonds = []
# 即将申购
self.applying_bonds = []
# 今天上市的
self.today_bonds = []
# 中签结果
self.draw_bonds = []
# 发审委通过
self.pass_bonds = []
class CompanyInfo:
def __init__(self, data):
#公司名称
self.gsmc = data['jbzl']['gsmc']
#英文名称
self.ywmc = data['jbzl']['ywmc']
#曾用名
self.cym = data['jbzl']['cym']
#A股代码
self.agdm = data['jbzl']['agdm']
#A股简称
self.agjc = data['jbzl']['agjc']
#B股代码
self.bgdm = data['jbzl']['bgdm']
#B股简称
self.bgjc = data['jbzl']['bgjc']
#H股代码
self.hgdm = data['jbzl']['hgdm']
#H股简称
self.hgjc = data['jbzl']['hgjc']
#所属市场
self.ssjys = data['jbzl']['ssjys']
#所属行业
self.sszjhhy = data['jbzl']['sszjhhy']
#法人代表
self.frdb = data['jbzl']['frdb']
#注册资金
self.zczb = data['jbzl']['zczb']
#成立日期
self.clrq = data['fxxg']['clrq']
#上市日期
self.ssrq = data['fxxg']['ssrq']
#注册地址
self.zcdz = data['jbzl']['zcdz']
#经营范围
self.jyfw = data['jbzl']['jyfw']
#公司简介
self.gsjj = data['jbzl']['gsjj']
class BondInfo:
def __init__(self, row):
self.stock_code = row.get('stock_id', '-')
self.bond_code = row.get('bond_id', '-')
self.bond_name = row.get('bond_nm', '-')
self.stock_name = row.get('stock_nm', '-')
# 总金额 数字
self.amount = row.get('amount', '-')
# 评级
self.grade = row.get('rating_cd', '-')
# 正股价 数字
self.price = row.get('price', '-')
# 转股价 数字
self.convert_price = row.get('convert_price', '-')
# 股东配售率 字符串 62.100
self.ration_rt = row.get('ration_rt', '-')
# 正股现价比转股价 字符串 97.11
self.pma_rt = row.get('pma_rt', '-')
# 正股pb 数字
self.pb = row.get('pb', '-')
# 百元股票含权 数字
self.cb_amount = row.get('cb_amount', '-')
# 每股配售(元) 数字
self.ration = row.get('ration', '-')
# 配送10张所需股数 数字
self.apply10 = row.get('apply10', '-')
# 股权登记日 字符
self.record_dt = row.get('record_dt', '-')
# 网上规模 字符
self.online_amount = row.get('online_amount', '-')
# 中签率 字符 "0.0238"
self.lucky_draw_rt = row.get('lucky_draw_rt', '-')
# 单帐户中签(顶格) 字符 0.2377
self.single_draw = row.get('single_draw', '-')
# 申购户数 数字
self.valid_apply = row.get('valid_apply', '-')
# 申购日期 字符
self.apply_date = row.get('apply_date', '-')
# 方案进展 字符 发行流程:董事会预案 → 股东大会批准 → 证监会受理 → 发审委通过 → 证监会核准/同意注册 → 发行公告
self.progress_nm = row.get('progress_nm', '-')
# 进展日期 yyyy-MM-dd
self.progress_dt = row.get('progress_dt', '-')
# 上市日期
self.list_date = row.get('list_date', '-')
# 申购标志:E:已申购待上市已排期、D:已上市、B:待申购、C:已申购未有上市排期、N:证监会核准/同意注册
self.ap_flag = row.get('ap_flag', '-')
class BondData:
def __init__(self, row):
self.stock_code = row.get('stock_id', '-')
self.bond_code = row.get('bond_id', '-')
self.bond_name = row.get('bond_nm', '-')
self.stock_name = row.get('stock_nm', '-')
# 涨跌幅 数字 -1.98
self.increase_rt = row.get('increase_rt', '-')
# 正股价 数字
self.sprice = row.get('sprice', '-')
# 现价 数字
self.price = row.get('price', '-')
# 正股涨跌 数字 -3.03
self.sincrease_rt = row.get('sincrease_rt', '-')
# 正股pb 数字
self.pb = row.get('pb', '-')
# 转股价 数字
self.convert_price = row.get('convert_price', '-')
# 转股价值 数字
self.convert_value = row.get('convert_value', '-')
# 溢价率 数字 18.41
self.premium_rt = row.get('premium_rt', '-')
# 评级
self.grade = row.get('rating_cd', '-')
# 回售触发价 数字
self.put_convert_price = row.get('put_convert_price', '-')
# 强赎触发价 数字
self.force_redeem_price = row.get('force_redeem_price', '-')
# 转债占比
self.convert_amt_ratio = row.get('convert_amt_ratio', '-')
# 到期时间
self.short_maturity_dt = row.get('short_maturity_dt', '-')
# 剩余年限 数字
self.year_left = row.get('year_left', '-')
# 剩余规模 数字
self.curr_iss_amt = row.get('curr_iss_amt', '-')
# 剩余年限 数字
self.year_left = row.get('year_left', '-')
# 成交额 数字
self.volume = row.get('volume', '-')
# 换手率 数字
self.turnover_rt = row.get('turnover_rt', '-')
# 到期税前收益 数字
self.ytm_rt = row.get('ytm_rt', '-')
# 双低 数字
self.dblow = row.get('dblow', '-')
class ForceBondInfo:
def __init__(self, row):
self.stock_code = row.get('stock_id', '-')
self.bond_code = row.get('bond_id', '-')
self.bond_name = row.get('bond_nm', '-')
self.stock_name = row.get('stock_nm', '-')
# 债券价 字符 "355.000"
self.price = row.get('price', '-')
# 最后交易日 字符
self.redeem_dt = row.get('redeem_dt', '-')
# 简介 字符 "最后交易日:2022年1月27日\r\n最后转股日:2022年1月27日\r\n赎回价:100.21元/张"
self.force_redeem = row.get('force_redeem', '-')
# 转股起始日 字符
self.convert_dt = row.get('convert_dt', '-')
# 剩余规模 字符 "355.00"
self.curr_iss_amt = row.get('curr_iss_amt', '-')
# 强赎触发价 字符 "355.00"
self.force_redeem_price = row.get('force_redeem_price', '-')
# 规模 字符 "355.000"
self.orig_iss_amt = row.get('orig_iss_amt', '-')
# 强赎价 字符 "355.000"
self.real_force_redeem_price = row.get('real_force_redeem_price', '-')
# 强赎标志 字符 "Y" N
self.redeem_flag = row.get('redeem_flag', '-')
# 没懂~~ 字符 "355.00"
self.redeem_price = row.get('redeem_price', '-')
# 强赎触发比 字符 "355%"
self.redeem_price_ratio = row.get('redeem_price_ratio', '-')
# 强赎条款 字符
self.redeem_tc = row.get('redeem_tc', '-')
# 正股价 字符 "34.45"
self.sprice = row.get('sprice', '-')
# 剩余天数 数字
self.redeem_count_days = row.get('redeem_count_days', '-')
# 总天数 数字
self.redeem_real_days = row.get('redeem_real_days', '-')
# R=已要强赎、O=公告要强赎、G=公告不强赎
self.redeem_icon = row.get('redeem_icon', '-')
|
StarcoderdataPython
|
3304713
|
#!/usr/bin/python3
import os
import json
import sys
import pandas as pd
import plotly.express as px
# Need this line so Atom can run it :grrrff
os.chdir('/home/andres/Programs/python/covid/app/visualizations')
from urllib.request import urlopen
# with urlopen('https://github.com/datasets/geo-countries/blob/master/data/countries.geojson') as response:
# countries = json.load(response)
def main():
with open('data/world-countries.json') as file:
countries = json.load(file)
df = pd.read_csv('data/full_data_enrd.csv',dtype={'name': str})
df.head()
fig = px.choropleth(df, geojson=countries, locations='name', color='total_cases',
featureidkey="properties.name",
color_continuous_scale="Reds",
range_color=(0, 10000),
scope="world",
labels={'total_cases':'Total cases'}
)
fig.update_layout(margin={"r":0,"t":0,"l":0,"b":0})
fig.show()
if __name__ == '__main__':
sys.exit(main())
|
StarcoderdataPython
|
186827
|
from zipfile import ZipFile
# from skimage.io import imread
import os
import numpy as np
import pandas as pd
from PIL import Image
from pathlib import Path
from data_layer.util import image_path
# from data_layer.dataset import CovidMetadata
# DEFAULT_BASE_PATH = 'C:/Covid-Screening/data_layer/raw_data'
DEFAULT_BASE_PATH = os.pathPath(__file__).parent
DEFAULT_METADATA_BASE_PATH = os.path.join(DEFAULT_BASE_PATH, 'metadata.csv')
# DEFAULT_IMAGES_BASE_PATH = os.path.join(DEFAULT_BASE_PATH, 'images')
# DEFAULT_CHANNELS = (1, 2, 3, 4, 5)
ROOT_DIR = Path(__file__).parent
CHANNELS = (1,2,3,4,5)
save_path = f"{ROOT_DIR}\\data_layer\\raw_data\\mocks\\"
# config = Config('trasfer_data')
# metadata = CovidMetadata(config.DEFAULT_METADATA_BASE_PATH)
# metadata = load_csv(DEFAULT_METADATA_BASE_PATH)
metadata2 = pd.read_csv(DEFAULT_METADATA_BASE_PATH)
img_filenames = []
channels = []
i=0
for rec in metadata:
for c in CHANNELS:
i+=1
img_filename = image_path(rec['experiment'], rec['plate'], rec['well'], rec['site'],c)
img_filenames.append(img_filename)
channels.append(c)
metadata = pd.DataFrame(metadata)
reps = [5]*metadata.shape[0]
image_frame = metadata.loc[np.repeat(metadata.index.values,reps)]
image_frame['channel'] = channels
image_frame['img_filename'] = img_filenames
image_frame.to_csv(image_frame, os.path.join(DEFAULT_BASE_PATH, 'image_frame'), columns=None, index=None)
write_dict_to_csv_with_pandas(image_frame, os.path.join(DEFAULT_BASE_PATH, 'image_frame.csv'))
filename = "D:\\RxRx19a-images.zip"
with ZipFile(filename) as archive:
for entry in archive.infolist():
with archive.open(entry) as file:
if file.name.__contains__( 'HRCE'):
# last_sep = file.name.rindex('/')
img = Image.open(file)
print(img.size, img.mode, len(img.getdata()))
|
StarcoderdataPython
|
97975
|
<reponame>kishankj/python
def is_isogram(string):
pass
|
StarcoderdataPython
|
3370369
|
<gh_stars>0
import sublime, sublime_plugin
import time
import datetime
def log_error(ex, command):
error_msg = 'Error in ' + command + ': ' + str(ex)
print error_msg
sublime.status_message(error_msg)
class SubliMapCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.edit = edit
self.view.window().show_input_panel('Map lambda t, i:', '', self.map_text, None, None)
def map_text(self, inp):
try:
edit = self.view.begin_edit()
replacer = eval('lambda t, i: str(' + inp + ')')
for idx, region in enumerate(self.view.sel()):
txt = self.view.substr(region)
replacement = replacer(txt, idx)
self.view.replace(self.edit, region, replacement)
except Exception as e:
log_error(e, 'SubliMap')
finally:
self.view.end_edit(edit)
class SubliReduceCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.edit = edit
self.view.window().show_input_panel('Reduce lambda x, y:', '', self.reduce_text, None, None)
def reduce_text(self, inp):
try:
edit = self.view.begin_edit()
reducer = eval('lambda x, y: ' + inp)
result = reduce(reducer, map(lambda x: self.view.substr(x), self.view.sel()))
sublime.status_message("Result: " + str(result))
map(lambda x: self.view.erase(edit, x), self.view.sel())
self.view.replace(edit, self.view.sel()[0], str(result))
except Exception as e:
log_error(e, 'SubliReduce')
finally:
self.view.end_edit(edit)
|
StarcoderdataPython
|
1789506
|
<reponame>JingweiZuo/SMATE<gh_stars>10-100
import logging
import sys
from datetime import datetime
import os
#import tensorflow as tf
#os.environ['TF_CPP_MIN_LOG_LEVEL'] = '0'
#logging.getLogger("tensorflow").setLevel(logging.INFO)
##
# Try not to initialize many loggers at the same run
# def init_logging(log_file=''):
# if log_file != '':
# log_file = datetime.now().strftime(log_file + '_%Y_%m_%d_%H_%M.log')
# log_format = "%(levelname)s %(asctime)-15s [%(lineno)d] %(funcName)s: %(message)s"
# if log_file == '':
# logging.basicConfig(format=log_format, level=logging.INFO, stream=sys.stdout)
# else:
# logging.basicConfig(filename=log_file, filemode='w', format=log_format, level=logging.INFO)
# logger = logging.getLogger()
# #logger = tf.get_logger()
# return logger
def init_logging(log_file='', name='log_name', level=logging.DEBUG):
if len(log_file) == 0:
return init_logging(log_file)
"""Function setup as many loggers as you want"""
if log_file != '':
log_file = datetime.now().strftime(log_file + '_%Y_%m_%d_%H_%M.log')
formatter = logging.Formatter('%(levelname)s %(asctime)-15s [%(lineno)d] %(funcName)s: %(message)s')
handler = logging.FileHandler(log_file, 'w')
handler.setFormatter(formatter)
logger = logging.getLogger(name)
#logger = tf.get_logger(name)
logger.setLevel(level)
logger.addHandler(handler)
return logger
if __name__ == '__main__':
#log_file = "/home/ivan/Research/projects/yhao_cnn_varying/src/python/test"
#init_logging(log_file)
#setup_logger(log_file)
#logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
logger = setup_logger('')
#logging.debug('This message should appear on the console')
logger.info('So should this')
#logging.warning('And this, too')
|
StarcoderdataPython
|
3316941
|
from django.db import models
from apps.Habitacion.models import Habitacion
from apps.Cliente.models import Cliente
from apps.Registrador.models import Registrador
class Estado (models.Model):
idEstado=models.CharField(primary_key=True,max_length=100)
nombre=models.CharField(max_length=100)
class Alquiler (models.Model):
idAlquiler=models.CharField(max_length=200,primary_key=True)
fechaHoraEntrada=models.DateTimeField(auto_now_add=True)
fechaHoraSalida=models.DateTimeField()
costoTotal=models.IntegerField(default=0)#change for real cost
observacion=models.TextField()
fkHabitacion=models.ForeignKey(Habitacion,on_delete=None,null=False)
fkCliente=models.ForeignKey(Cliente,on_delete=None,null=False)
fkRegistrador=models.ForeignKey(Registrador,on_delete=None,null=False)
fkEstado=models.ForeignKey(Estado,on_delete=None,null=True)
|
StarcoderdataPython
|
3351717
|
<reponame>changsuchoi/cspy<filename>swarp-register.py
from astropy.io import ascii
from astropy.io import fits
import astropy.units as u
import astropy.coordinates as coord
from astropy.table import Table, Column
from astropy.time import Time
from pyraf import iraf
import os, sys
import numpy as np
import matplotlib.pyplot as plt
from astropy.wcs import WCS
from astropy.coordinates import SkyCoord
from astropy.coordinates import ICRS, Galactic, FK4, FK5
from astropy.coordinates import Angle, Latitude, Longitude
os.system('swarp -d > default.swarp')
def swarpcom(imlist):
newdt, newname=centertimeheader(imlist)
param_dict={
'IMAGEOUT_NAME' : newname,
'COMBINE' : 'Y',
'COMBINE_TYPE' : 'MEDIAN',
'SUBTRACT_BACK' : 'N',
'WRITE_FILEINFO' : 'Y',
'BACK_TYPE' : 'AUTO',
'BACK_DEFAULT' : '0.0',
'BACK_SIZE' : '64',
'BACK_FILTERSIZE' : '3',
'CELESTIAL_TYPE' : 'NATIVE',
'PROJECTION_TYPE' : 'TPV',
'RESAMPLE' : 'Y',
'FSCALE_KEYWORD' : 'FLXSCALE',
'COPY_KEYWORDS' : 'OBJECT,DATE-OBS,FILTER',
'WRITE_FILEINFO' : 'Y',
'WRITE_XML' : 'N',
}
#
#output=os.path.splitext(inlist[0])[0]+'_'+str(len(inlist))+'_com'+os.path.splitext(inlist[0])[1]
optstr=''
for i in param_dict:
#print(' -{} {}'.format(i,param_dict[i]))
optstr += ' -{} {}'.format(i,param_dict[i])
swarpcom='swarp -c default.swarp ' + ','.join(imlist) + optstr
#print(swarpcom)
os.system(swarpcom)
def pixelscale(i):
cd11 = fits.getheader(i)['CD1_1']
cd12 = fits.getheader(i)['CD1_2']
cd21 = fits.getheader(i)['CD2_1']
cd22 = fits.getheader(i)['CD2_2']
pixscale=round(np.sqrt(cd11**2 + cd21**2) *3600 ,4)
puthdr(i,'PSCALE',round(pixscale,3))
#print('Pixel scale =', pixscale,'\"')
return pixscale
def radec_center(im):
from astropy.wcs import WCS
from astropy.coordinates import SkyCoord
from astropy.coordinates import ICRS, Galactic, FK4, FK5
from astropy.coordinates import Angle, Latitude, Longitude
from astropy.io import fits
import astropy.units as u
import astropy.coordinates as coord
import numpy as np
hdr = fits.getheader(im)
# RA, Dec center for reference catalog query
xcent, ycent= hdr['NAXIS1']/2., hdr['NAXIS2']/2.
w = WCS(im)
racent, deccent = w.all_pix2world(xcent, ycent, 1)
c=SkyCoord(racent,deccent,unit="deg")
rastr=c.ra.to_string(unit=u.hourangle,sep=':')
decstr=c.dec.to_string(unit=u.deg,sep=':')
racent, deccent = racent.item(), deccent.item()
return rastr,decstr,racent,deccent
def swarpregister(im,refim='ref.fits'):
rahms,decdms,rac,dec=radec_center(im)
PSCALE=fits.getheader(im)['PSCALE']
inputs=' '+refim+' '
outname='regs_'+im
swarpcom0='swarp -c default.swarp '
opt1=' -IMAGEOUT_NAME '+ outname+' '
opt2=' -COMBINE N '
opt3=' -CELESTIAL_TYPE NATIVE ' # NATIVE, PIXEL, EQUATORIAL,
# GALACTIC,ECLIPTIC, or SUPERGALACTIC
opt4=' -PROJECTION_TYPE TAN ' # Any WCS projection code or NONE
opt5=' -PROJECTION_ERR 0.001 ' # Maximum projection error (in output
# pixels), or 0 for no approximation
opt6=' -CENTER_TYPE MANUAL ' # MANUAL, ALL or MOST
opt7=' -CENTER '+rahms+','+decdms+' ' # Coordinates of the image center
opt8=' -PIXELSCALE_TYPE MANUAL ' # MANUAL,FIT,MIN,MAX or MEDIAN
opt9=' -PIXEL_SCALE '+str(PSCALE)+' ' # Pixel scale
opt10=' -IMAGE_SIZE 0 ' # Image size (0 = AUTOMATIC)
opt11=' -RESAMPLE Y '
opt12=' -SUBTRACT_BACK N '
opt13=' -DELETE_TMPFILES N '
opt14=' -COPY_KEYWORDS OBJECT '
opt15=' -WRITE_FILEINFO Y '
swarpcom=swarpcom0+inputs+opt1+opt2+opt3+opt4+opt5+\
opt6+opt7+opt8+opt9+opt10+\
opt11+opt12+opt13+opt14+opt15
print(swarpcom)
os.system(swarpcom)
def swarp_single(im):
rahms,decdms,rac,dec=radec_center(im)
PSCALE=fits.getheader(im)['PSCALE']
inputs=' '+refim+' '
outname='regs_'+im
swarpcom0='swarp -c default.swarp '
opt1=' -IMAGEOUT_NAME '+ outname+' '
opt2=' -COMBINE N '
opt3=' -CELESTIAL_TYPE NATIVE ' # NATIVE, PIXEL, EQUATORIAL,
# GALACTIC,ECLIPTIC, or SUPERGALACTIC
opt4=' -PROJECTION_TYPE TAN ' # Any WCS projection code or NONE
opt5=' -PROJECTION_ERR 0.001 ' # Maximum projection error (in output
# pixels), or 0 for no approximation
opt6=' -CENTER_TYPE MANUAL ' # MANUAL, ALL or MOST
opt7=' -CENTER '+rahms+','+decdms+' ' # Coordinates of the image center
opt8=' -PIXELSCALE_TYPE MANUAL ' # MANUAL,FIT,MIN,MAX or MEDIAN
opt9=' -PIXEL_SCALE '+str(PSCALE)+' ' # Pixel scale
opt10=' -IMAGE_SIZE 0 ' # Image size (0 = AUTOMATIC)
opt11=' -RESAMPLE Y '
opt12=' -SUBTRACT_BACK N '
opt13=' -DELETE_TMPFILES N '
opt14=' -COPY_KEYWORDS OBJECT '
opt15=' -WRITE_FILEINFO Y '
swarpcom=swarpcom0+inputs+opt1+opt2+opt3+opt4+opt5+\
opt6+opt7+opt8+opt9+opt10+\
opt11+opt12+opt13+opt14+opt15
print(swarpcom)
os.system(swarpcom)
# !swarp t.fits -c default.swarp -PROJECTION_TYPE TAN -IMAGEOUT_NAME=satan.fits
# swarp -d
'''
# Default configuration file for SWarp 2.41.4
# EB 2021-02-04
#
#----------------------------------- Output -----------------------------------
IMAGEOUT_NAME coadd.fits # Output filename
WEIGHTOUT_NAME coadd.weight.fits # Output weight-map filename
HEADER_ONLY N # Only a header as an output file (Y/N)?
HEADER_SUFFIX .head # Filename extension for additional headers
#------------------------------- Input Weights --------------------------------
WEIGHT_TYPE NONE # BACKGROUND,MAP_RMS,MAP_VARIANCE
# or MAP_WEIGHT
WEIGHT_SUFFIX .weight.fits # Suffix to use for weight-maps
WEIGHT_IMAGE # Weightmap filename if suffix not used
# (all or for each weight-map)
#------------------------------- Co-addition ----------------------------------
COMBINE Y # Combine resampled images (Y/N)?
COMBINE_TYPE MEDIAN # MEDIAN,AVERAGE,MIN,MAX,WEIGHTED,CLIPPED
# CHI-OLD,CHI-MODE,CHI-MEAN,SUM,
# WEIGHTED_WEIGHT,MEDIAN_WEIGHT,
# AND,NAND,OR or NOR
#-------------------------------- Astrometry ----------------------------------
CELESTIAL_TYPE NATIVE # NATIVE, PIXEL, EQUATORIAL,
# GALACTIC,ECLIPTIC, or SUPERGALACTIC
PROJECTION_TYPE TAN # Any WCS projection code or NONE
PROJECTION_ERR 0.001 # Maximum projection error (in output
# pixels), or 0 for no approximation
CENTER_TYPE ALL # MANUAL, ALL or MOST
CENTER 00:00:00.0, +00:00:00.0 # Coordinates of the image center
PIXELSCALE_TYPE MEDIAN # MANUAL,FIT,MIN,MAX or MEDIAN
PIXEL_SCALE 0.0 # Pixel scale
IMAGE_SIZE 0 # Image size (0 = AUTOMATIC)
#-------------------------------- Resampling ----------------------------------
RESAMPLE Y # Resample input images (Y/N)?
RESAMPLE_DIR . # Directory path for resampled images
RESAMPLE_SUFFIX .resamp.fits # filename extension for resampled images
RESAMPLING_TYPE LANCZOS3 # NEAREST,BILINEAR,LANCZOS2,LANCZOS3
# LANCZOS4 (1 per axis) or FLAGS
OVERSAMPLING 0 # Oversampling in each dimension
# (0 = automatic)
INTERPOLATE N # Interpolate bad input pixels (Y/N)?
# (all or for each image)
FSCALASTRO_TYPE FIXED # NONE,FIXED, or VARIABLE
FSCALE_KEYWORD FLXSCALE # FITS keyword for the multiplicative
# factor applied to each input image
FSCALE_DEFAULT 1.0 # Default FSCALE value if not in header
GAIN_KEYWORD GAIN # FITS keyword for effect. gain (e-/ADU)
GAIN_DEFAULT 0.0 # Default gain if no FITS keyword found
#--------------------------- Background subtraction ---------------------------
SUBTRACT_BACK Y # Subtraction sky background (Y/N)?
# (all or for each image)
BACK_TYPE AUTO # AUTO or MANUAL
# (all or for each image)
BACK_DEFAULT 0.0 # Default background value in MANUAL
# (all or for each image)
BACK_SIZE 128 # Background mesh size (pixels)
# (all or for each image)
BACK_FILTERSIZE 3 # Background map filter range (meshes)
# (all or for each image)
#------------------------------ Memory management -----------------------------
VMEM_DIR . # Directory path for swap files
VMEM_MAX 2047 # Maximum amount of virtual memory (MB)
MEM_MAX 256 # Maximum amount of usable RAM (MB)
COMBINE_BUFSIZE 256 # RAM dedicated to co-addition(MB)
#------------------------------ Miscellaneous ---------------------------------
DELETE_TMPFILES Y # Delete temporary resampled FITS files
# (Y/N)?
COPY_KEYWORDS OBJECT # List of FITS keywords to propagate
# from the input to the output headers
WRITE_FILEINFO N # Write information about each input
# file in the output image header?
WRITE_XML Y # Write XML file (Y/N)?
XML_NAME swarp.xml # Filename for XML output
VERBOSE_TYPE NORMAL # QUIET,LOG,NORMAL, or FULL
NTHREADS 0 # Number of simultaneous threads for
# the SMP version of SWarp
# 0 = automatic
'''
# swarp -dd
'''
# Default configuration file for SWarp 2.41.4
# EB 2021-02-04
#
#----------------------------------- Output -----------------------------------
IMAGEOUT_NAME coadd.fits # Output filename
WEIGHTOUT_NAME coadd.weight.fits # Output weight-map filename
HEADEROUT_NAME # Out. header filename (overrides suffix)
HEADER_NAME # Header filename if suffix not used
HEADER_ONLY N # Only a header as an output file (Y/N)?
HEADER_SUFFIX .head # Filename extension for additional headers
TILE_COMPRESS N # Write tile compressed output image (Y/N)?
#------------------------------- Input Weights --------------------------------
WEIGHT_TYPE NONE # BACKGROUND,MAP_RMS,MAP_VARIANCE
# or MAP_WEIGHT
RESCALE_WEIGHTS Y # Rescale input weights/variances (Y/N)?
WEIGHT_SUFFIX .weight.fits # Suffix to use for weight-maps
WEIGHT_IMAGE # Weightmap filename if suffix not used
# (all or for each weight-map)
WEIGHT_THRESH # Bad pixel weight-threshold
#------------------------------- Co-addition ----------------------------------
COMBINE Y # Combine resampled images (Y/N)?
COMBINE_TYPE MEDIAN # MEDIAN,AVERAGE,MIN,MAX,WEIGHTED,CLIPPED
# CHI-OLD,CHI-MODE,CHI-MEAN,SUM,
# WEIGHTED_WEIGHT,MEDIAN_WEIGHT,
# AND,NAND,OR or NOR
CLIP_AMPFRAC 0.3 # Fraction of flux variation allowed
# with clipping
CLIP_SIGMA 4.0 # RMS error multiple variation allowed
# with clipping
CLIP_WRITELOG N # Write output file with coordinates of
# clipped pixels (Y/N)
CLIP_LOGNAME clipped.log # Name of output file with coordinates
# of clipped pixels
BLANK_BADPIXELS N # Set to 0 pixels having a weight of 0
#-------------------------------- Astrometry ----------------------------------
CELESTIAL_TYPE NATIVE # NATIVE, PIXEL, EQUATORIAL,
# GALACTIC,ECLIPTIC, or SUPERGALACTIC
PROJECTION_TYPE TAN # Any WCS projection code or NONE
PROJECTION_ERR 0.001 # Maximum projection error (in output
# pixels), or 0 for no approximation
CENTER_TYPE ALL # MANUAL, ALL or MOST
CENTER 00:00:00.0, +00:00:00.0 # Coordinates of the image center
PIXELSCALE_TYPE MEDIAN # MANUAL,FIT,MIN,MAX or MEDIAN
PIXEL_SCALE 0.0 # Pixel scale
IMAGE_SIZE 0 # Image size (0 = AUTOMATIC)
#-------------------------------- Resampling ----------------------------------
RESAMPLE Y # Resample input images (Y/N)?
RESAMPLE_DIR . # Directory path for resampled images
RESAMPLE_SUFFIX .resamp.fits # filename extension for resampled images
RESAMPLING_TYPE LANCZOS3 # NEAREST,BILINEAR,LANCZOS2,LANCZOS3
# LANCZOS4 (1 per axis) or FLAGS
OVERSAMPLING 0 # Oversampling in each dimension
# (0 = automatic)
INTERPOLATE N # Interpolate bad input pixels (Y/N)?
# (all or for each image)
FSCALASTRO_TYPE FIXED # NONE,FIXED, or VARIABLE
FSCALE_KEYWORD FLXSCALE # FITS keyword for the multiplicative
# factor applied to each input image
FSCALE_DEFAULT 1.0 # Default FSCALE value if not in header
GAIN_KEYWORD GAIN # FITS keyword for effect. gain (e-/ADU)
GAIN_DEFAULT 0.0 # Default gain if no FITS keyword found
# 0 = infinity (all or for each image)
SATLEV_KEYWORD SATURATE # FITS keyword for saturation level (ADU)
SATLEV_DEFAULT 50000.0 # Default saturation if no FITS keyword
#--------------------------- Background subtraction ---------------------------
SUBTRACT_BACK Y # Subtraction sky background (Y/N)?
# (all or for each image)
BACK_TYPE AUTO # AUTO or MANUAL
# (all or for each image)
BACK_DEFAULT 0.0 # Default background value in MANUAL
# (all or for each image)
BACK_SIZE 128 # Background mesh size (pixels)
# (all or for each image)
BACK_FILTERSIZE 3 # Background map filter range (meshes)
# (all or for each image)
BACK_FILTTHRESH 0.0 # Threshold above which the background-
# map filter operates
#------------------------------ Memory management -----------------------------
VMEM_DIR . # Directory path for swap files
VMEM_MAX 2047 # Maximum amount of virtual memory (MB)
MEM_MAX 256 # Maximum amount of usable RAM (MB)
COMBINE_BUFSIZE 256 # RAM dedicated to co-addition(MB)
#------------------------------ Miscellaneous ---------------------------------
DELETE_TMPFILES Y # Delete temporary resampled FITS files
# (Y/N)?
COPY_KEYWORDS OBJECT # List of FITS keywords to propagate
# from the input to the output headers
WRITE_FILEINFO N # Write information about each input
# file in the output image header?
WRITE_XML Y # Write XML file (Y/N)?
XML_NAME swarp.xml # Filename for XML output
XSL_URL file:///data7/cschoi/local/share/swarp/swarp.xsl
# Filename for XSL style-sheet
VERBOSE_TYPE NORMAL # QUIET,LOG,NORMAL, or FULL
NNODES 1 # Number of nodes (for clusters)
NODE_INDEX 0 # Node index (for clusters)
NTHREADS 0 # Number of simultaneous threads for
# the SMP version of SWarp
# 0 = automatic
NOPENFILES_MAX 512 # Maximum number of files opened by SWarp
'''
|
StarcoderdataPython
|
3260826
|
<filename>config/settings/local.py<gh_stars>0
from .base import * # noqa
from .base import env
# GENERAL
DEBUG = True
SECRET_KEY = env(
"DJANGO_SECRET_KEY",
default="<KEY>",
)
ALLOWED_HOSTS = ["localhost", "0.0.0.0", "127.0.0.1"]
# CACHES
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "",
}
}
# TEMPLATES
TEMPLATES[0]["OPTIONS"]["debug"] = DEBUG # noqa F405
# EMAIL
EMAIL_BACKEND = env(
"DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.console.EmailBackend" # noqa
)
# https://docs.djangoproject.com/en/dev/ref/settings/#email-host
EMAIL_HOST = "localhost"
# https://docs.djangoproject.com/en/dev/ref/settings/#email-port
EMAIL_PORT = 1025
if env("USE_DOCKER") == "yes":
import socket
hostname, _, ips = socket.gethostbyname_ex(socket.gethostname())
INTERNAL_IPS += [ip[:-1] + "1" for ip in ips] # noqa
# Celery
CELERY_TASK_EAGER_PROPAGATES = True
|
StarcoderdataPython
|
114512
|
import asyncio
def main():
loop = asyncio.get_event_loop()
# Initialize components
hisayer = HiSayer()
loop.create_task(hisayer.run())
splitter = StringSplitter()
loop.create_task(splitter.run())
lowercaser = LowerCaser()
loop.create_task(lowercaser.run())
uppercaser = UpperCaser()
loop.create_task(uppercaser.run())
stringjoiner = StringJoiner()
loop.create_task(stringjoiner.run())
printer = Printer()
loop.create_task(printer.run())
# Connect network
splitter.in_lines = hisayer.out_lines
lowercaser.in_lines = splitter.out_leftpart
uppercaser.in_lines = splitter.out_rightpart
stringjoiner.in_leftpart = lowercaser.out_lines
stringjoiner.in_rightpart = uppercaser.out_lines
printer.in_lines = stringjoiner.out_lines
# Run the full event loop
loop.run_until_complete(printer.run())
class HiSayer:
out_lines = asyncio.Queue()
async def run(self):
for i in range(20):
await self.out_lines.put(f"Hi hi for the {i+1}:th time...")
class StringSplitter:
in_lines = asyncio.Queue()
out_leftpart = asyncio.Queue()
out_rightpart = asyncio.Queue()
async def run(self):
while not self.in_lines.empty():
s = await self.in_lines.get()
await self.out_leftpart.put(s[: int(len(s) / 2)])
await self.out_rightpart.put(s[int(len(s) / 2) :])
class LowerCaser:
in_lines = asyncio.Queue()
out_lines = asyncio.Queue()
async def run(self):
while not self.in_lines.empty():
s = await self.in_lines.get()
await self.out_lines.put(s.lower())
class UpperCaser:
in_lines = asyncio.Queue()
out_lines = asyncio.Queue()
async def run(self):
while not self.in_lines.empty():
s = await self.in_lines.get()
await self.out_lines.put(s.upper())
class StringJoiner:
in_leftpart = asyncio.Queue()
in_rightpart = asyncio.Queue()
out_lines = asyncio.Queue()
async def run(self):
while not self.in_leftpart.empty() or not self.in_rightpart.empty():
leftpart = await self.in_leftpart.get()
rightpart = await self.in_rightpart.get()
await self.out_lines.put(f"{leftpart}{rightpart}")
class Printer:
in_lines = asyncio.Queue()
async def run(self):
while not self.in_lines.empty():
s = await self.in_lines.get()
print(f"Printer got line: {s}")
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
131249
|
<gh_stars>1-10
from django.apps import AppConfig
class SignalNotificationConfig(AppConfig):
name = 'signal_notification'
def ready(self):
from signal_notification.notify_manager import get_registered_notify_manager, get_registered_handlers
get_registered_handlers()
get_registered_notify_manager()
|
StarcoderdataPython
|
4833164
|
from django.conf.urls import url
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.utils.translation import ugettext_lazy as _
from cms.app_base import CMSApp
from cms.test_utils.project.sampleapp.cms_menus import SampleAppMenu, StaticMenu3, StaticMenu4
from cms.apphook_pool import apphook_pool
from .models import SampleAppConfig
class AppWithNoMenu(CMSApp):
app_name = 'app_with_no_menu'
def get_urls(self, page=None, language=None, **kwargs):
return ["cms.test_utils.project.sampleapp.urls"]
class SampleApp(CMSApp):
name = _("Sample App")
permissions = True
def get_menus(self, page=None, language=None, **kwargs):
return [SampleAppMenu]
def get_urls(self, page=None, language=None, **kwargs):
return ["cms.test_utils.project.sampleapp.urls"]
class SampleAppWithConfig(CMSApp):
name = _("Sample App with config")
app_config = SampleAppConfig
def get_urls(self, page=None, language=None, **kwargs):
return ["cms.test_utils.project.sampleapp.urls_sample_config"]
def get_configs(self):
return self.app_config.objects.all()
def get_config(self, namespace):
try:
return self.app_config.objects.get(namespace=namespace)
except ObjectDoesNotExist:
return None
def get_config_add_url(self):
return reverse('admin:%s_%s_add' % (self.app_config._meta.app_label, self.app_config._meta.model_name))
class SampleAppWithExcludedPermissions(CMSApp):
name = _("Sample App with excluded permissions")
permissions = True
exclude_permissions = ['excluded']
def get_urls(self, page=None, language=None, **kwargs):
return ["cms.test_utils.project.sampleapp.urls_excluded"]
class SampleApp2(CMSApp):
name = _("Sample App 2")
def get_menus(self, page=None, language=None, **kwargs):
return [StaticMenu3]
def get_urls(self, page=None, language=None, **kwargs):
return ["cms.test_utils.project.sampleapp.urls2"]
class SampleApp3(CMSApp):
# CMSApp which returns the url directly rather than trough another Python module
name = _("Sample App 3")
def get_urls(self, page=None, language=None, **kwargs):
def my_view(request):
return HttpResponse("Sample App 3 Response")
return [
url(r'^$', my_view, name='sample3-root'),
]
class NamespacedApp(CMSApp):
name = _("Namespaced App")
app_name = 'namespaced_app_ns'
def get_menus(self, page=None, language=None, **kwargs):
return [SampleAppMenu, StaticMenu3]
def get_urls(self, page=None, language=None, **kwargs):
return [
"cms.test_utils.project.sampleapp.ns_urls",
"cms.test_utils.project.sampleapp.urls"
]
class ParentApp(CMSApp):
name = _("Parent app")
def get_urls(self, page=None, language=None, **kwargs):
return ["cms.test_utils.project.sampleapp.urls_parentapp"]
class ChildApp(CMSApp):
name = _("Child app")
def get_urls(self, page=None, language=None, **kwargs):
return ["cms.test_utils.project.sampleapp.urls_childapp"]
class VariableUrlsApp(CMSApp):
name = _("Variable urls-menus App")
def get_menus(self, page=None, language=None, **kwargs):
if page and page.reverse_id == 'page1':
return [SampleAppMenu]
elif page and page.reverse_id == 'page2':
return [StaticMenu4]
else:
return [StaticMenu4, SampleAppMenu]
def get_urls(self, page=None, language=None, **kwargs):
if page and page.reverse_id == 'page1':
return ["cms.test_utils.project.sampleapp.urls"]
else:
return ["cms.test_utils.project.sampleapp.urls2"]
apphook_pool.register(ChildApp)
apphook_pool.register(SampleApp)
apphook_pool.register(SampleAppWithExcludedPermissions)
apphook_pool.register(SampleApp2)
apphook_pool.register(SampleApp3)
apphook_pool.register(NamespacedApp)
apphook_pool.register(ParentApp)
apphook_pool.register(VariableUrlsApp)
|
StarcoderdataPython
|
1791178
|
<gh_stars>0
def test_main():
assert "main" == "main"
|
StarcoderdataPython
|
1743015
|
<filename>spaceship.py
WIDTH = 800
HEIGHT = 600
import math
spaceship = Actor("falcon")
print(spaceship.size)
spaceship.center = WIDTH/2, HEIGHT/2
spaceship.speed = 4
spaceship.angle = 0
spaceship.direction = 0, -1
#def on_key_down(key):
# if key == keys.left:
# pass
# elif key == keys.right:
# pass
# elif key == keys.space:
# pass
def on_key_up(key):
pass
def update():
dx, dy = spaceship.direction
spaceship.move_ip(spaceship.speed * dx, spaceship.speed * dy)
if keyboard.left:
spaceship.angle += 8
if spaceship.angle >= 360:
spaceship.angle = 0
if keyboard.right:
spaceship.angle -= 8
if spaceship.angle < 0:
spaceship.angle = 360 + spaceship.angle
spacex = math.sin(math.radians(spaceship.angle))
spacey = math.cos(math.radians(spaceship.angle))
print(spaceship.angle)
print(int(spacex), int(spacey))
spaceship.direction = -spacex, -spacey
pass
def draw():
screen.clear()
spaceship.draw()
pass
|
StarcoderdataPython
|
101818
|
import discord
import asyncio
import youtube_dl
import string
class vidPlayer:
def __init__(self, bot):
self.bot = bot
self.list = []
self.voice = None
self.player = None
@asyncio.coroutine
def playAll(self, channel: discord.Channel=None):
if len(self.list) == 0:
self.bot.say("No videos in the playlist")
return
if self.voice is None:
self.voice = yield from self.bot.join_voice_channel(channel)
self.player = yield from self.voice.create_ytdl_player(self.list.pop(0), after=self.threadsafePlayNext, use_avconv=True)
self.player.start()
@asyncio.coroutine
def playlist(self, url: str=None):
self.list.append(url)
yield from self.bot.say("Added your video!")
@asyncio.coroutine
def play(self, url: str=None, channel: discord.Channel=None):
if self.voice is None:
self.voice = yield from self.bot.join_voice_channel(channel)
self.player = yield from self.voice.create_ytdl_player(url, after=self.threadsafeDisconnect, use_avconv=True)
self.player.start()
@asyncio.coroutine
def disconnect(self):
if self.voice is None:
pass
else:
yield from self.voice.disconnect()
self.player.stop()
self.voice = None
self.player = None
def threadsafeDisconnect(self):
coro = self.voice.disconnect()
fut = discord.compat.run_coroutine_threadsafe(coro, self.voice.loop)
try:
fut.result()
self.voice = None
self.player.stop()
self.player = None
except:
pass
def threadsafePlayNext(self):
self.player.stop()
if self.list != []:
coro = self.playAll()
fut = discord.compat.run_coroutine_threadsafe(coro, self.bot.loop)
try:
fut.result()
except:
pass
else:
coro = self.voice.disconnect()
fut = discord.compat.run_coroutine_threadsafe(coro, self.voice.loop)
try:
fut.result()
self.voice = None
self.player = None
except:
pass
@asyncio.coroutine
def changeVolume(self, vol: int=None):
if self.player is None:
yield from self.bot.say("Can't change volume, no audio to adjust")
elif vol is None:
yield from self.bot.say("What do you want the volume at? Please include a number")
elif vol > 200:
self.player.volume = 2
yield from self.bot.say("Volume is at 200")
elif vol < 0:
self.player.volume = 0
yield from self.bot.say("Volume is at 0")
else:
self.player.volume = vol/100
yield from self.bot.say("Volume is at " + str(vol))
@asyncio.coroutine
def getVolume(self):
if self.player is None:
yield from self.bot.say("Not currently playing anything")
else:
yield from self.bot.say("Volume is " + str(self.player.volume))
@asyncio.coroutine
def now(self):
if self.player is None:
yield from self.bot.say("Not currently playing anything, use command !add or !youtube to start")
else:
yield from self.bot.say("Currently Playing: " + self.player.title)
@asyncio.coroutine
def pauseAndResume(self):
if self.player is None:
pass
elif self.player.is_playing():
self.player.pause()
yield from self.bot.say(self.player.title + " paused")
elif self.player.is_playing() is False:
self.player.resume()
yield from self.bot.say(self.player.title + " resumed")
@asyncio.coroutine
def skip(self):
if self.player is None:
pass
else:
self.player.stop()
self.playAll()
|
StarcoderdataPython
|
1704684
|
<reponame>Zarchan/mqtt_chat
import argparse
import shutil
import paho.mqtt.client as mqtt # type: ignore
import paho.mqtt.subscribe as subscribe # type: ignore
parser = argparse.ArgumentParser(description="Start a command line chat room over MQTT")
parser.add_argument('--display-name', '-d', required=True, help="Chatroom display name" )
parser.add_argument('--user-name', '-u', help="Username for authentication, if required")
parser.add_argument('--password', '-p', help="Password for authentication, if required")
parser.add_argument('--broker-address', '-b', required=True, help="Address of the MQTT broker")
parser.add_argument('--topic', '-t', required=True, help="Topic to subscribe to, looks like /mygroup/ourchat. For more information look up MQTT topics")
parser.add_argument('--obfuscate', '-o', help="Obfuscate text so that any other subscribers to the topic not in the know only see garbled text. Note this is not encryption, it is the cryptographic version of Dracula disguising himself as Alucard")
parser.add_argument('--max-width', '-m', default=float('inf'), help='If set to a number less than the terminal width, will use that number as the terminal width')
args = parser.parse_args()
def on_message(client, userdata, message):
incoming = str(message.payload.decode("utf-8"))
lines = [incoming[i*max_width:(i+1)*max_width] for i in range((len(incoming) // max_width) + 1)]
for line in lines:
print("\033[s\033[1S\033[1A\033[1L\033[999D" + line + "\033[u", end="", flush=True)
max_width = min(shutil.get_terminal_size()[0] - 1, args.max_width)
client = mqtt.Client() #create new instance
client.will_set(args.topic, payload= f"{args.display_name} has lost the connection")
client.on_message=on_message
client.connect(args.broker_address)
client.loop_start()
client.subscribe(args.topic)
print("\033[999B", end="") #pushes cursor to the bottom of the window
try:
while True:
message = str(input(">> "))
message_height = len(message) // max_width + 1 #number of times to do next line
print("\033[1T\033[K" * message_height, end="", flush=True) #ANSI escape sequence to delete previous line
client.publish(args.topic, f"{args.display_name}: {message}")
except KeyboardInterrupt:
client.loop_stop()
print("")
|
StarcoderdataPython
|
1685687
|
import sqlite3
import crypt_handler
# database entry object containing all the information nessecary
class DbEntry():
def __init__(self, app, url, email, username, password):
self.app = app
self.url = url
self.email = email
self.username = username
self.password = password
def __str__(self):
return f'Application: {self.app} | Url: {self.url} | Email: {self.email} | Username: {self.username} | Password: {self.password}'
# create a connection to the database
def create_connection(db):
con = None
try:
con = sqlite3.connect(f'{db}.db')
except Exception as e:
print(e)
return con
return sqlite3.connect(f'{db}.db')
# create a table for the master password
def create_master_table(con):
with con:
c = con.cursor()
c.execute('CREATE TABLE IF NOT EXISTS master (Password text)')
# create a table for the accounts
def create_accounts_table(con):
with con:
c = con.cursor()
c.execute('''CREATE TABLE IF NOT EXISTS accounts (
Application text,
Url text,
Email text,
Username text,
Password text,
primary key (Application, Email)
)'''
)
# insert master password to the master table
def insert_to_master(con, password):
hashed = crypt_handler.hash_password(password)
with con:
c = con.cursor()
c.execute('INSERT INTO master VALUES (:password)', {"password": hashed})
# insert account to the accounts table
def insert_to_accounts(con, entry):
with con:
c = con.cursor()
c.execute('INSERT INTO accounts VALUES(?, ?, ?, ?, ?)', (entry.app, entry.url, entry.email, entry.username, entry.password))
# check if table is empty
def is_empty(con, table):
with con:
c =con.cursor()
c.execute(f'SELECT count(*) FROM {table}')
res = c.fetchall()[0][0]
return True if res == 0 else False
# return the master password
def get_master_hashed(con):
with con:
c = con.cursor()
c.execute('SELECT Password FROM master')
res = c.fetchall()[0][0]
return res
# find accounts by application name
def find_by_app(con, app):
with con:
c = con.cursor()
c.execute('Select * FROM accounts WHERE Application = :app', {'app': app})
return c.fetchall()
#find accounts by email
def find_by_email(con, email):
with con:
c = con.cursor()
c.execute('Select * FROM accounts WHERE Email=:email', {'email': email})
return c.fetchall()
# get all the accounts in the database
def show_all(con):
with con:
c = con.cursor()
c.execute('SELECT * FROM accounts')
return c.fetchall()
# delete an account from the database
def delete_account(con, entry):
with con:
c = con.cursor()
c.execute('DELETE FROM accounts WHERE (Application = :app AND Email = :email)', {'app': entry.app, 'email': entry.email})
# change a password for a chosen account
def change_password(con, entry, new_pass):
with con:
c = con.cursor()
c.execute('UPDATE accounts SET Password = :password WHERE (Application = :app AND Email = :email)', {'password': <PASSWORD>_<PASSWORD>, 'app': entry.app, 'email': entry.email})
|
StarcoderdataPython
|
4825245
|
# Copyright (C) 2015-2019 SignalFx, Inc. All rights reserved.
# Copyright (C) 2020 Splunk, Inc. All rights reserved.
import collections
import json
import logging
import pprint
import requests
import six
from six.moves import queue
import threading
import zlib
from .constants import DEFAULT_INGEST_ENDPOINT, DEFAULT_TIMEOUT, \
DEFAULT_BATCH_SIZE, SUPPORTED_EVENT_CATEGORIES, INTEGER_MAX, \
INTEGER_MIN
from . import version
try:
from .generated_protocol_buffers \
import signal_fx_protocol_buffers_pb2 as sf_pbuf
except ImportError:
sf_pbuf = None
_COMPRESSION_LEVEL = 8
_logger = logging.getLogger(__name__)
class _BaseSignalFxIngestClient(object):
"""Base SignalFx ingest client.
This class is private and is not meant to be used directly. Instead, its
subclasses, which implement specific data encodings for interacting with
the SignalFx Ingest API.
This class manages the datapoint sending thread and the common features.
"""
_THREAD_NAME = 'SignalFxDatapointSendThread'
_HEADER_API_TOKEN_KEY = 'X-SF-Token'
_HEADER_USER_AGENT_KEY = 'User-Agent'
_INGEST_ENDPOINT_DATAPOINT_SUFFIX = 'v2/datapoint'
_INGEST_ENDPOINT_EVENT_SUFFIX = 'v2/event'
_QUEUE_STOP = object()
def __init__(self, token, endpoint=DEFAULT_INGEST_ENDPOINT,
timeout=DEFAULT_TIMEOUT, batch_size=DEFAULT_BATCH_SIZE,
user_agents=None, compress=True):
self._token = token
self._endpoint = endpoint.rstrip('/')
self._timeout = timeout
self._batch_size = max(1, batch_size)
self._compress = compress
self._extra_dimensions = {}
self._queue = queue.Queue()
self._thread_running = False
self._lock = threading.Lock()
self._error_counters = collections.defaultdict(lambda: 0)
user_agent = ['{0}/{1}'.format(version.name, version.version)]
if type(user_agents) == list:
user_agent.extend(user_agents)
self._session = requests.Session()
self._session.headers.update({
'X-SF-Token': self._token,
'User-Agent': ' '.join(user_agent),
})
if compress:
self._session.headers.update({
'Content-Encoding': 'gzip'
})
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.stop()
def _add_to_queue(self, metric_type, datapoint):
raise NotImplementedError('Subclasses should implement this!')
def _add_extra_dimensions(self, datapoint):
with self._lock:
if not self._extra_dimensions:
return
if datapoint.get('dimensions') is not None:
datapoint['dimensions'].update(self._extra_dimensions)
else:
datapoint['dimensions'] = self._extra_dimensions
def add_dimensions(self, dimensions):
"""Add one or more dimensions that will be included with every
datapoint and event sent to SignalFx.
Args:
dimensions (dict): A mapping of {dimension: value, ...} pairs.
"""
with self._lock:
self._extra_dimensions.update(dimensions)
def remove_dimensions(self, dimension_names):
"""Removes extra dimensions added by the add_dimensions() function.
Ignores dimension names that don't exist.
Args:
dimension_names (list): List of dimension names to remove.
"""
with self._lock:
for dimension in dimension_names:
if dimension in self._extra_dimensions:
del self._extra_dimensions[dimension]
def send(self, cumulative_counters=None, gauges=None, counters=None):
"""Send the given metrics to SignalFx.
Args:
cumulative_counters (list): a list of dictionaries representing the
cumulative counters to report.
gauges (list): a list of dictionaries representing the gauges to
report.
counters (list): a list of dictionaries representing the counters
to report.
"""
if not gauges and not cumulative_counters and not counters:
return
data = {
'cumulative_counter': cumulative_counters,
'gauge': gauges,
'counter': counters,
}
_logger.debug('Sending datapoints to SignalFx: %s', data)
for metric_type, datapoints in data.items():
if not datapoints:
continue
if not isinstance(datapoints, list):
raise TypeError('Datapoints not of type list %s', datapoints)
for datapoint in datapoints:
self._add_extra_dimensions(datapoint)
self._add_to_queue(metric_type, datapoint)
# Ensure the sending thread is running.
self._start_thread()
def send_event(self, event_type, category=None, dimensions=None,
properties=None, timestamp=None):
"""Send an event to SignalFx.
Args:
event_type (string): the event type (name of the event time
series).
category (string): the category of the event.
dimensions (dict): a map of event dimensions.
properties (dict): a map of extra properties on that event.
timestamp (float): timestamp when the event has occured
"""
if category and category not in SUPPORTED_EVENT_CATEGORIES:
raise ValueError('Event category is not one of the supported' +
'types: {' +
', '.join(SUPPORTED_EVENT_CATEGORIES) + '}')
data = {
'eventType': event_type,
'category': category,
'dimensions': dimensions or {},
'properties': properties or {},
'timestamp': int(timestamp) if timestamp else None,
}
_logger.debug('Sending event to SignalFx: %s', data)
self._add_extra_dimensions(data)
return self._send_event(event_data=data, url='{0}/{1}'.format(
self._endpoint, self._INGEST_ENDPOINT_EVENT_SUFFIX),
session=self._session)
def _send_event(self, event_data=None, url=None, session=None):
raise NotImplementedError('Subclasses should implement this!')
def _start_thread(self):
# Locking the variable that tracks the thread status
# 'self._thread_running' to make it an atomic operation.
with self._lock:
if self._thread_running:
return
self._thread_running = True
self._send_thread = threading.Thread(target=self._send,
name=self._THREAD_NAME)
self._send_thread.daemon = True
self._send_thread.start()
_logger.debug('Thread %s started', self._THREAD_NAME)
def stop(self, msg='Thread stopped'):
"""Stop send thread and flush points for a safe exit."""
with self._lock:
if not self._thread_running:
return
self._thread_running = False
self._queue.put(_BaseSignalFxIngestClient._QUEUE_STOP)
self._send_thread.join()
_logger.debug(msg)
def _inc_error(self, error_type):
"""Increment internal counter of errors encountered.
Args:
error_type (string): the exception class name or other error
descriptor.
"""
with self._lock:
self._error_counters[error_type] += 1
def reset_error_counters(self):
"""Reset dict of error counters to 0 and return the previous values."""
with self._lock:
previous = self._error_counters
self._error_counters = collections.defaultdict(lambda: 0)
return previous
def _send(self):
try:
while self._thread_running or not self._queue.empty():
tmp_dp = self._queue.get(True)
if tmp_dp == _BaseSignalFxIngestClient._QUEUE_STOP:
break
datapoints_list = [tmp_dp]
while (not self._queue.empty() and
len(datapoints_list) < self._batch_size):
tmp_dp = self._queue.get()
if tmp_dp != _BaseSignalFxIngestClient._QUEUE_STOP:
datapoints_list.append(tmp_dp)
try:
self._post(self._batch_data(datapoints_list),
'{0}/{1}'.format(
self._endpoint,
self._INGEST_ENDPOINT_DATAPOINT_SUFFIX))
except Exception as err:
self._inc_error(err.__class__.__name__)
_logger.exception('Posting data to SignalFx failed.')
except KeyboardInterrupt:
self.stop(msg='Thread stopped by keyboard interrupt.')
def _batch_data(self, datapoints_list):
"""Convert the given list of datapoints into a serialized string that
can be send to the ingest endpoint. Subclasses must implement this to
provide the serialization relevant to their implementation."""
raise NotImplementedError('Subclasses should implement this!')
def _post(self, data, url, session=None, timeout=None):
session = session or self._session
timeout = timeout or self._timeout
_logger.debug('Raw datastream being sent: %s', pprint.pformat(data))
if self._compress:
uncompressed_bytes = len(data)
c = zlib.compressobj(_COMPRESSION_LEVEL, zlib.DEFLATED,
zlib.MAX_WBITS | 16)
data = c.compress(data) + c.flush()
_logger.debug('Compressed payload from %d to %d bytes',
uncompressed_bytes, len(data))
response = session.post(url, data=data, timeout=timeout)
_logger.debug('Sending to SignalFx %s (%d %s)',
'succeeded' if response.ok else 'failed',
response.status_code, response.text)
class ProtoBufSignalFxIngestClient(_BaseSignalFxIngestClient):
"""SignalFx Ingest API client that uses Protocol Buffers.
This class presents the interfaces that handle the serialization of data
using Protocol Buffers.
"""
def __init__(self, token, **kwargs):
if not sf_pbuf:
raise AssertionError('Protocol Buffers are not installed')
super(ProtoBufSignalFxIngestClient, self).__init__(token, **kwargs)
self._session.headers.update({
'Content-Type': 'application/x-protobuf'
})
def _add_to_queue(self, metric_type, datapoint):
pbuf_dp = sf_pbuf.DataPoint()
self._assign_value(pbuf_dp, datapoint['value'])
pbuf_dp.metricType = getattr(sf_pbuf, metric_type.upper())
pbuf_dp.metric = datapoint['metric']
if datapoint.get('timestamp'):
pbuf_dp.timestamp = int(datapoint['timestamp'])
self._set_dimensions(
pbuf_dp, datapoint.get('dimensions', {}))
self._queue.put(pbuf_dp)
def _set_dimensions(self, pbuf_obj, dimensions):
if not isinstance(dimensions, dict):
raise ValueError('Invalid dimensions {0}; must be a dict!'
.format(dimensions))
for key, value in dimensions.items():
dim = pbuf_obj.dimensions.add()
dim.key = key
dim.value = value
def _set_event_properties(self, pbuf_obj, properties):
if not isinstance(properties, dict):
raise ValueError('Invalid dimensions {0}; must be a dict!'
.format(properties))
for key, value in properties.items():
prop = pbuf_obj.properties.add()
prop.key = key
self._assign_property_value(prop, value)
def _assign_value_by_type(self, pbuf_obj, value, _bool=True, _float=True,
_integer=True, _string=True, error_prefix=''):
"""Assigns the supplied value to the appropriate protobuf value type"""
# bool inherits int, so bool instance check must be executed prior to
# checking for integer types
if isinstance(value, bool) and _bool is True:
pbuf_obj.value.boolValue = value
elif isinstance(value, six.integer_types) and \
not isinstance(value, bool) and _integer is True:
if value < INTEGER_MIN or value > INTEGER_MAX:
raise ValueError(
('{}: {} exceeds signed 64 bit integer range '
'as defined by ProtocolBuffers ({} to {})')
.format(error_prefix, str(value),
str(INTEGER_MIN), str(INTEGER_MAX)))
pbuf_obj.value.intValue = value
elif isinstance(value, float) and _float is True:
pbuf_obj.value.doubleValue = value
elif isinstance(value, six.string_types) and _string is True:
pbuf_obj.value.strValue = value
else:
raise ValueError(
'{}: {} is of invalid type {}'
.format(error_prefix, str(value), str(type(value))))
def _assign_property_value(self, prop, value):
"""Assigns a property value to the protobuf obj property"""
self._assign_value_by_type(prop, value,
error_prefix='Invalid property value')
def _assign_value(self, pbuf_dp, value):
"""Assigns a value to the protobuf obj"""
self._assign_value_by_type(pbuf_dp, value, _bool=False,
error_prefix='Invalid value')
def _batch_data(self, datapoints_list):
dpum = sf_pbuf.DataPointUploadMessage()
dpum.datapoints.extend(datapoints_list)
return dpum.SerializeToString()
def _send_event(self, event_data=None, url=None, session=None):
pbuf_event = self._create_event_protobuf_message(event_data)
pbuf_eventum = sf_pbuf.EventUploadMessage()
pbuf_eventum.events.extend([pbuf_event])
return self._post(pbuf_eventum.SerializeToString(), url, session)
def _create_event_protobuf_message(self, event_data=None):
pbuf_event = sf_pbuf.Event()
pbuf_event.eventType = event_data['eventType']
self._set_dimensions(
pbuf_event, event_data.get('dimensions', {}))
self._set_event_properties(
pbuf_event, event_data.get('properties', {}))
if event_data.get('category'):
pbuf_event.category = getattr(sf_pbuf,
event_data['category'].upper())
if event_data.get('timestamp'):
pbuf_event.timestamp = event_data['timestamp']
return pbuf_event
class JsonSignalFxIngestClient(_BaseSignalFxIngestClient):
"""SignalFx Ingest API client that uses JSON.
This class presents the interfaces that handle the serialization of data
using JSON.
"""
def __init__(self, token, **kwargs):
super(JsonSignalFxIngestClient, self).__init__(token, **kwargs)
self._session.headers.update({
'Content-Type': 'application/json',
})
def _add_to_queue(self, metric_type, datapoint):
self._queue.put({metric_type: datapoint})
def _batch_data(self, datapoints_list):
datapoints = collections.defaultdict(list)
for item in datapoints_list:
item_keys = list(item.keys())
datapoints[item_keys[0]].append(item[item_keys[0]])
return json.dumps(datapoints).encode('utf-8')
def _send_event(self, event_data=None, url=None, session=None):
data = json.dumps([event_data]).encode('utf-8')
return self._post(data, url, session)
|
StarcoderdataPython
|
80988
|
<gh_stars>100-1000
# Copyright (c) 2020 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from typing import Any, List, Optional
import torch
try:
import horovod.torch
_HVD = horovod.torch
except (ModuleNotFoundError, ImportError):
_HVD = None
def initialize_horovod():
if not _HVD:
"""
raise ValueError("Horovod backend specified, "
"but cannot import `horovod.tensorflow`. "
"Install Horovod following the instructions at: "
"https://github.com/horovod/horovod")
"""
raise ValueError(
"Horovod backend specified, "
"but cannot import `horovod.torch`. "
"Install Horovod following the instructions at: "
"https://github.com/horovod/horovod"
)
_HVD.init()
return _HVD
def has_horovodrun():
"""Returns True if running with `horovodrun` using Gloo or OpenMPI."""
return "OMPI_COMM_WORLD_RANK" in os.environ or "HOROVOD_RANK" in os.environ
def return_first(fn):
"""Wraps function so results are only returned by the first (coordinator) rank.
The purpose of this function is to reduce network overhead.
"""
def wrapped(*args, **kwargs):
res = fn(*args, **kwargs)
return res if _HVD.rank() == 0 else None
return wrapped
def gather_all_tensors(result: torch.Tensor, group: Optional[Any] = None) -> List[torch.Tensor]:
"""Function to gather all tensors from several processes onto a list that is broadcast to all processes.
Works on tensors that have the same number of dimensions, but where each dimension may differ. In this case
tensors are padded, gathered and then trimmed to secure equal workload for all processes.
:param result: the value to sync
:param group: the process group to gather results from (not supported: always uses world)
:return: list with size equal to the process group where gathered_result[i]
corresponds to result tensor from process i
"""
if group is not None:
raise ValueError("Horovod does not support allgather using a subcommunicator at this time. " "Unset `group`.")
if _HVD is None or not _HVD.is_initialized():
return [result]
if len(result.shape) == 0:
# Convert scalars to single dimension tensors
result = result.reshape(1)
is_bool = False
if result.dtype == torch.bool:
# need to convert to int due to Horovod limitation
result = result.int()
is_bool = True
# sync and gather all
gathered = _HVD.allgather(result)
gathered_result = list(gathered.split(1, dim=0))
if is_bool:
# convert back if needed
gathered_result = [t.bool() for t in gathered_result]
return gathered_result
def is_distributed_available() -> bool:
return _HVD is not None and _HVD.is_initialized()
|
StarcoderdataPython
|
1715807
|
import collections
import logging
import pprint
class PprintArgsFilter(logging.Filter):
'''Use pprint/pformat to pretty the log message args
ie, log.debug("foo: %s", foo)
this will pformat the value of the foo object.
'''
def __init__(self, name="", defaults=None):
super(PprintArgsFilter, self).__init__(name=name)
self.defaults = defaults or {}
def filter(self, record):
if not record.args:
return True
# Modify the log record in place, replacing the arg
# with a pprint'ed version of it's values.
# TODO: could wrap with a callable to defer evaluating
# the arg till the last minute
# print("record.args1: %s" % list(record.args))
# args can be a tuple or a dict/map, this bit is from logging.LogRecord.__init__
args_map = {}
if isinstance(record.args, collections.abc.Mapping):
args_map = record.args
if args_map:
for arg, value in args_map.items():
args_map[arg] = pprint.pformat(value)
record.args = args_map
else:
record.args = tuple([pprint.pformat(x) for x in record.args])
# record.args = tuple(pprint.pformat(x) for x in record.args)
# print("record.args2: %s" % list(record.args))
# for arg in record.args:
# print("arg: %s", arg)
# pretty_arg = pprint.pformat(record.args[arg])
# record.args[arg] = pretty_arg
return True
|
StarcoderdataPython
|
3238533
|
<filename>KernTool3.roboFontExt/lib/tdKernListView.py
# -*- coding: utf-8 -*-
from vanilla import *
from AppKit import *
from fontTools.pens.cocoaPen import CocoaPen
from mojo.canvas import Canvas
from mojo import drawingTools# .drawingTools import *
from fontParts.world import CurrentFont
from lib.eventTools.eventManager import postEvent, publishEvent
from mojo.events import addObserver, removeObserver
from defconAppKit.windows.baseWindow import BaseWindowController
from vanilla.dialogs import getFile, putFile
import codecs, sys, os
import operator
import importlib
import platform
import tdCanvasKeysDecoder
importlib.reload(tdCanvasKeysDecoder)
from tdCanvasKeysDecoder import decodeCanvasKeys, decodeModifiers
import tdKernToolEssentials
importlib.reload(tdKernToolEssentials)
from tdKernToolEssentials import *
import tdControlPanel
importlib.reload(tdControlPanel)
from tdControlPanel import TDControlPanel
import tdGlyphparser
importlib.reload(tdGlyphparser)
import tdMenuAdvanced
importlib.reload(tdMenuAdvanced)
from tdMenuAdvanced import MenuDialogWindow
idY0 = 0
idDispL = 1
idDispR = 2
idKern = 3
idNote = 4
idNameL = 5
idNameR = 6
idGlyphL = 7
idGroupL = 9
idGlyphR = 8
idGroupR = 10
class TDKernDB(object):
def __init__(self, font, hashKernDic):
self.font = font
self.hashKernDic = hashKernDic
self.db = {}
self.sortedList = []
# self.indexList = {}
self._mask1id = ID_KERNING_GROUP.replace('.kern', '') + ID_GROUP_DIRECTION_POSITION_LEFT
self._mask2id = ID_KERNING_GROUP.replace('.kern', '') + ID_GROUP_DIRECTION_POSITION_RIGHT
self.buildDB()
def buildDB(self):
self.db = {}
for pair in self.font.kerning:
self.refreshPairInDB(pair)
def makeSortedList(self, pairslist = None, order = 'left', reverse = False):
if not pairslist:
pairslist = self.db
# self.indexList = {}
if order == 'left':
self.sortedList = sorted(pairslist.items(), key = lambda p: (p[1][0], p[1][1]), reverse = reverse )
elif order == 'right':
self.sortedList = sorted(pairslist.items(), key = lambda p: (p[1][1], p[1][0]), reverse = reverse)
elif order == 'values':
self.sortedList = sorted(pairslist.items(), key = lambda p: (p[1][4], p[1][0], p[1][1]), reverse = reverse)
elif order == 'notes':
reverse = not reverse
self.sortedList = sorted(pairslist.items(), key = lambda p: (p[1][5], p[1][0], p[1][1]), reverse = reverse)
# for idx, item in enumerate(self.sortedList):
# self.indexList[item[0]] = idx
def refreshPairInDB(self, pair):
(l,r) = pair
if pair not in self.font.kerning: return
v = self.font.kerning[pair]
keyGlyphL = self.hashKernDic.getKeyGlyphByGroupname(l) # idGlyphL
keyGlyphR = self.hashKernDic.getKeyGlyphByGroupname(r)
note, _l, _r = getKernPairInfo_v2(self.font, self.hashKernDic, (l, r))
grouppedR = False
sortR = r
# print('ref', l,r, _l,_r, note)
if r.startswith(ID_KERNING_GROUP):
grouppedR = True
sortR = r.replace(self._mask2id, '')
if l.startswith(ID_KERNING_GROUP):
sortL = l.replace(self._mask1id, '')
self.db[(l, r)] = (sortL, sortR, True, grouppedR, v, note, keyGlyphL, keyGlyphR)
else:
self.db[(l, r)] = (l, sortR, False, grouppedR, v, note, keyGlyphL, keyGlyphR)
if l != _l and r != _r :
# print ('i think this pair is Orhan')
if (_l, _r) in self.db:
(_sl, _sr, _gl, _gr, _v, note, _kgl, _kgr) = self.db[(_l,_r)]
self.db[(_l, _r)] = (_sl, _sr, _gl, _gr, _v, PAIR_INFO_ATTENTION, _kgl, _kgr)
def updateKernPair(self, pair):
if pair in self.db:
# print('pair in DB', self.db[pair])
if pair in self.font.kerning:
# print ('just new value')
self.refreshPairInDB(pair)
# note, _l, _r = getKernPairInfo_v2(self.font, self.hashKernDic, pair)
# # if note == PAIR_INFO_ORPHAN:
# print ('but lets check the parents',_l, _r, note)
# self.refreshPairInDB((_l, _r))
else:
# print('but it not in kernig - pair was deleted')
self.db.pop(pair)
note, _l, _r = getKernPairInfo_v2(self.font, self.hashKernDic, pair)
""" теперь, если пара - исключение, надо найти инфу о родительских группах и обновить инфу о них"""
# if note == PAIR_INFO_ORPHAN or note == PAIR_INFO_EXCEPTION or note == PAIR_INFO_EXCEPTION_DELETED:
# print('pair was exception, refreshing info about parents', pair, _l, _r)
if (_l, _r) in self.db:
# print('founded in db')
self.refreshPairInDB((_l,_r))
return pair
else:
# print('pair not in DB', pair)
if pair in self.font.kerning:
# print ('but it has in kerning, adding new pair in DB')
self.refreshPairInDB(pair)
note, _l, _r = getKernPairInfo_v2(self.font, self.hashKernDic, pair)
""" теперь, если пара - исключение, надо найти инфу о родительских группах и обновить инфу о них"""
if note == PAIR_INFO_EXCEPTION:# or note == PAIR_INFO_ORPHAN:
# print('pair new is exception, refreshing info about parents', _l,_r)
if (_l, _r) in self.db:
self.refreshPairInDB((_l, _r))
else:
print('error! pair not found in DB and kerning', pair)
return None
def getKernPair(self, pair):
if pair in self.db:
return self.db[pair]
else:
print ('DB: pair not founded', pair)
class TDKernListControl(VanillaBaseObject):
nsViewClass = NSView
def __init__ (self, posSize, selectionCallback=None, window=None):
xw, yw, tx, ty = posSize
# self._window = window
# self._font = None
self._viewArray = []
self._lastSelectedIdx = 0
self._positionYselected = 0
self._lineSize = ty # 1800 # 1800=1000upm; 2400=2000upm
self._scalefactorUI = 1
self._lineCount = 1
self.darkmode = KERNTOOL_UI_DARKMODE
self.darkmodeWarm = KERNTOOL_UI_DARKMODE_WARMBACKGROUND
self._xL = 0
self._xR = 0
self._xV = 0
self._xN = 0
self.maxX = 0
self._selectionCallback = selectionCallback
self.showselection = False
self._setupView(self.nsViewClass, (xw, yw, tx, ty)) # (0, 0, -0, 106)
self.canvas = Canvas((0, 0, -0, -0),
delegate = self, # canvasSize = (100, 101),
hasHorizontalScroller = False,
hasVerticalScroller = True,
autohidesScrollers = False,
backgroundColor = NSColor.whiteColor(),
drawsBackground = True,
# acceptsMouseMoved = True
)
self.canvas.scrollView.getNSScrollView().setBorderType_(NSNoBorder)
def setupControl(self, leftTitle, rightTitle, kernTitle, noteTitle):
self.leftTitle = leftTitle['title']
self.leftName = leftTitle['name']
self.rightTitle = rightTitle['title']
self.rightName = rightTitle['name']
self.kernTitle = kernTitle['title']
self.kernName = kernTitle['name']
self.noteTitle = noteTitle['title']
self.noteName = noteTitle['name']
self.leftSelected = False
self.rightSelected = False
self.kernSelected = False
self.noteSelected = False
self.leftReversed = False
self.rightReversed = False
self.kernReversed = False
self.noteReversed = False
def selectMenuItem(self, menuname, reversed = False):
if menuname == self.leftName:
self.leftSelected = True
self.rightSelected = False
self.kernSelected = False
self.noteSelected = False
self.leftReversed = reversed
self.rightReversed = False
self.kernReversed = False
self.noteReversed = False
elif menuname == self.rightName:
self.leftSelected = False
self.rightSelected = True
self.kernSelected = False
self.noteSelected = False
self.leftReversed = False
self.rightReversed = reversed
self.kernReversed = False
self.noteReversed = False
elif menuname == self.kernName:
self.leftSelected = False
self.rightSelected = False
self.kernSelected = True
self.noteSelected = False
self.leftReversed = False
self.rightReversed = False
self.kernReversed = reversed
self.noteReversed = False
elif menuname == self.noteName:
self.leftSelected = False
self.rightSelected = False
self.kernSelected = False
self.noteSelected = True
self.leftReversed = False
self.rightReversed = False
self.kernReversed = False
self.noteReversed = reversed
def mouseDown (self, event):
visibleWidth = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.width
visibleHeight = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.height
Y_local_pos = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.y
X_window_pos = event.locationInWindow().x
Y_window_pos = event.locationInWindow().y
X_local_pos = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.x
Y_local_pos = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.y
xW, yW, x2W, y2W = self.getPosSize()
x = X_window_pos + X_local_pos # - self._letterStep
y = Y_window_pos + y2W + Y_local_pos
result = None
if self._xL < x and x < self._xR:
result = self.leftName
elif self._xR < x and x < self._xV:
result = self.rightName
elif self._xV < x and x < self._xN:
result = self.kernName
elif self._xN < x and x < visibleWidth:
result = self.noteName
else:
return
if result and self._selectionCallback:
self.selectMenuItem(result, reversed = False)
self.canvas.update()
self._selectionCallback(result)
def updatePanel(self):
self.canvas.update()
def recalculateFrame (self, canvaswidth=None):
# scalefactor = self._scaleUI
if canvaswidth:
visibleWidth = canvaswidth
else:
visibleWidth = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.width
self.visibleWidth = visibleWidth
visibleHeight = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.height
self.canvas._view.setFrame_(NSMakeRect(0, 0, visibleWidth + 20, visibleHeight))
self.maxX = visibleWidth + 20
def draw (self):
# self.recalculateFrame()
self._viewFontName = 'Menlo'
self._viewFontSize = 11
font(self._viewFontName, fontSize = self._viewFontSize)
self.recalculateFrame()
visibleWidth = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.width
visibleHeight = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.height
Y_local_pos = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.y
X_local_pos = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.x
Y_min_window = Y_local_pos
Y_max_window = Y_local_pos + visibleHeight
X_min_window = X_local_pos
X_max_window = X_local_pos + visibleWidth
stroke(0, 0, 0, 0)
strokeWidth(0)
colorBKG = (.9, .9, .9, .8)
if self.darkmode:
colorBKG = (.1,.1,.1,1)
fillRGB(colorBKG)
rect(0, 0, visibleWidth, Y_max_window)
save()
Xpos = 0
Ypos = 0 #item['y0']
Ycontrol = -1 * Ypos
wValue = 40 # width of Value row
wNote = 40 # width of Note row
w = visibleWidth - wValue - wNote
xR = w / 2 # width of Left/Right row and start position of Right row
xV = xR + (w / 2) # start position Values row
xN = xV + wValue # start position Note row
self._xR = xR
self._xV = xV
self._xN = xN
colorBKG = (.7, .7, .7, .8)
colorTXT = COLOR_BLACK
selected = False
colorGRP = COLOR_GREY_50
colorBKG = (.9, .9, .9, .8)
colorSRT = (.85, .85, .85, .8)
colorTXT = COLOR_BLACK
selected = False
colorGRP = COLOR_GREY_30
colorStroke = COLOR_GREY_50
if self.darkmode:
colorBKG = (.2, .2, .2, 1)
colorSRT = (.85, .85, .85, .8)
colorTXT = (1,.6,.2,1)
selected = False
colorGRP = (.1, .1, .1, 1)
colorStroke = COLOR_BLACK
fillRGB(colorStroke)
rect(0, 0, visibleWidth, Y_max_window)
txtup = 2.5
titleY = -2
txtsm = txtup + 1
ltxt = self.leftTitle
rtxt = self.rightTitle
if self.leftSelected:
fillRGB(colorGRP)
rect(Xpos, Ycontrol+1, xR-1, self._lineSize)
font(self._viewFontName, fontSize = self._viewFontSize)
fillRGB(colorTXT)
if self.leftReversed:
text(chr(int('25B2', 16)), (xR - 17, Ycontrol + txtup))
else:
text(chr(int('25BC',16)), (xR - 17, Ycontrol + txtup))
if not ltxt:
font(self._viewFontName, fontSize = self._viewFontSize+3)
text(chr(int('25E7', 16)), (Xpos + 5, Ycontrol + txtup + titleY))
else:
fillRGB(colorBKG)
rect(Xpos, Ycontrol+1, xR-1, self._lineSize)
font(self._viewFontName, fontSize = self._viewFontSize)
fillRGB(COLOR_GREY_50)
if self.leftReversed:
text(chr(int('25B2', 16)), (xR - 17, Ycontrol + txtup))
else:
text(chr(int('25BC', 16)), (xR - 17, Ycontrol + txtup))
if not ltxt:
font(self._viewFontName, fontSize = self._viewFontSize+3)
text(chr(int('25E7', 16)), (Xpos + 5, Ycontrol + txtup + titleY))
fillRGB(colorTXT)
if ltxt:
_w, _h = textSize(ltxt)
if _w + 5 + 17 > xR:
font(self._viewFontName, fontSize = self._viewFontSize - 2)
text(ltxt, (Xpos + 5, Ycontrol + txtsm + titleY))
else:
font(self._viewFontName, fontSize = self._viewFontSize )
text(ltxt, (Xpos + 5, Ycontrol + txtsm + titleY))
# DRAW Right row
if self.rightSelected:
fillRGB(colorGRP)
rect(xR, Ycontrol+1, xR-1, self._lineSize)
font(self._viewFontName, fontSize = self._viewFontSize)
fillRGB(colorTXT)
if self.rightReversed:
text(chr(int('25B2', 16)), (xV - 17, Ycontrol + txtup))
else:
text(chr(int('25BC', 16)), (xV - 17, Ycontrol + txtup))
if not rtxt:
font(self._viewFontName, fontSize = self._viewFontSize + 3)
text(chr(int('25E8', 16)), (xR + 5, Ycontrol + txtup + titleY))
else:
fillRGB(colorBKG)
rect(xR, Ycontrol+1, xR-1, self._lineSize)
font(self._viewFontName, fontSize = self._viewFontSize)
fillRGB(COLOR_GREY_50)
if self.rightReversed:
text(chr(int('25B2', 16)), (xV - 17, Ycontrol + txtup))
else:
text(chr(int('25BC', 16)), (xV - 17, Ycontrol + txtup))
if not rtxt:
font(self._viewFontName, fontSize = self._viewFontSize + 3)
text(chr(int('25E8', 16)), (xR + 5, Ycontrol + txtup + titleY))
fillRGB(colorTXT)
if rtxt:
_w, _h = textSize(rtxt)
if _w + 17 > xR:
font(self._viewFontName, fontSize = self._viewFontSize - 2)
text(rtxt, (xR + 5, Ycontrol + txtsm + titleY))
else:
font(self._viewFontName, fontSize = self._viewFontSize)
text(rtxt, (xR + 5, Ycontrol + txtup + titleY))
if self.kernSelected:
fillRGB(colorGRP)
rect(xV, Ycontrol+1, wValue-1, self._lineSize)
font(self._viewFontName, fontSize = self._viewFontSize)
fillRGB(colorTXT)
if self.kernReversed:
text(chr(int('25B2', 16)), (xN - 17, Ycontrol + txtup))
else:
text(chr(int('25BC', 16)), (xN - 17, Ycontrol + txtup))
else:
fillRGB(colorBKG)
rect(xV, Ycontrol+1, wValue-1, self._lineSize)
font(self._viewFontName, fontSize = self._viewFontSize)
fillRGB(COLOR_GREY_50)
if self.kernReversed:
text(chr(int('25B2', 16)), (xN - 17, Ycontrol + txtup))
else:
text(chr(int('25BC', 16)), (xN - 17, Ycontrol + txtup))
fillRGB(colorTXT)
rtxt = self.kernTitle
_w, _h = textSize(rtxt)
if _w + 17 > wValue:
font(self._viewFontName, fontSize = self._viewFontSize - 2)
text(rtxt, (xV + 5, Ycontrol + txtsm+ titleY))
else:
font(self._viewFontName, fontSize = self._viewFontSize)
text(rtxt, (xV + 5, Ycontrol + txtup+ titleY))
if self.noteSelected:
fillRGB(colorGRP)
rect(xN, Ycontrol+1, wNote, self._lineSize)
fillRGB(colorTXT)
font(self._viewFontName, fontSize = self._viewFontSize)
if self.noteReversed:
text(chr(int('25B2', 16)), (xN + 8, Ycontrol + txtup))
else:
text(chr(int('25BC', 16)), (xN + 8, Ycontrol + txtup))
else:
fillRGB(colorBKG)
rect(xN, Ycontrol+1, wNote, self._lineSize)
fillRGB(COLOR_GREY_50)
font(self._viewFontName, fontSize = self._viewFontSize)
if self.noteReversed:
text(chr(int('25B2', 16)), (xN + 8, Ycontrol + txtup))
else:
text(chr(int('25BC', 16)), (xN + 8, Ycontrol + txtup))
fillRGB(colorTXT)
rtxt = self.noteTitle
_w, _h = textSize(rtxt)
if _w + 17 > wNote:
font(self._viewFontName, fontSize = self._viewFontSize - 2)
text(rtxt, (xN + 5, Ycontrol + txtsm+ titleY))
else:
font(self._viewFontName, fontSize = self._viewFontSize)
text(rtxt, (xN + 5, Ycontrol + txtup+ titleY))
restore()
idModeSelected = False
idModeShowAll = True
idFilterSide1 = 'side1'
idFilterBoth = 'both'
idFilterSide2 = 'side2'
class TDKernListView(VanillaBaseObject):
nsViewClass = NSView
def __init__ (self, posSize, selectionCallback=None, window=None, commandCallback = None, previewGlyph = False):
xw, yw, tx, ty = posSize
self._window = window
self._linesToDisplay = []
# self._font = None
self.font = None
self.hashKernDic = None
self.kernDB = None
self._viewArray = []
self._selectedLines = []
self._pairsList2idx = {}
self._currentKernListState = {}
self._setToView = []
self._grouppedList = []
self._idxListGroupped = {}
self._ungrouppedList = []
self._idxListUngroupped = {}
self._listKeyGlyphsLeft = {}
self._listKeyGlyphsRight = {}
self._errorpairslist = []
self._lastSelectedIdx = 0
self._positionYselected = 0
self._lineSize = 20 # 1800 # 1800=1000upm; 2400=2000upm
self._previewGlyph = previewGlyph
self._previewWidthHalf = 40
if previewGlyph:
self._lineSize = 45
self._scalefactorUI = 1
self._lineCount = 0
self._sortName = None
self._sortReverse = None
self.groupsSortedTop = False
self._viewingMode = idModeSelected
self._filterMode = idFilterBoth
self.darkmode = KERNTOOL_UI_DARKMODE
self.darkmodeWarm = KERNTOOL_UI_DARKMODE_WARMBACKGROUND
self.maxX = 0
self._selectionCallback = selectionCallback
self._commandCallback = commandCallback
self.showselection = False
self._setupView(self.nsViewClass, (xw, yw, tx, ty)) # (0, 0, -0, 106)
self.macos = MACOS_VERSION
self.canvas = Canvas((0, 0, -0, -0),
delegate = self, # canvasSize = (100, 101),
hasHorizontalScroller = False,
hasVerticalScroller = True,
autohidesScrollers = True,
# backgroundColor = NSColor.whiteColor(),
drawsBackground = False,
# acceptsMouseMoved = True
)
self.canvas.scrollView.getNSScrollView().setBorderType_(NSNoBorder)
def updatePanel(self):
self.canvas.update()
def getCorrectPreviwWidth(self):
# g = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')
# list(filter(lambda n: n.name in g, self.font))
s = sorted(self.font, key = lambda w: w.width)
wp = s[-1].width * .030
self._previewWidthHalf = wp + 10
if self._previewWidthHalf > 75:
self._previewWidthHalf = 75
def setFont(self, font):
self.font = font
self.hashKernDic = TDHashKernDic(font)
self.kernDB = TDKernDB(self.font, self.hashKernDic)
# self.kernDB.makeSortedList()
self.getCorrectPreviwWidth()
self.refreshView()
def updateStatusbar(self):
self._window.menuStatusBar.setLabelValue(self._window.labelTotalPairsID, str(len(self.font.kerning)))
self._window.menuStatusBar.setLabelValue(self._window.labelShowedPairsID, str(len(self._viewArray)))
self._window.menuStatusBar.setLabelValue(self._window.labelSelectedPairsID, str(len(self._selectedLines)))
def setPreviewMode(self, previewMode = False):
self._previewGlyph = previewMode
self._lineSize = 20 # 1800 # 1800=1000upm; 2400=2000upm
if self._previewGlyph:
self._lineSize = 45
self.refreshView()
self.scrollToLine(0)
def resetView(self):
self.compileLines(self.kernDB.db, sorting = self._sortName, reverse = self._sortReverse)
self.scrollToLine(0)
def setViewingMode(self, mode = idModeShowAll, sorting = None, reverse = False, filterMode = idFilterBoth):
self._viewingMode = mode
self._filterMode = filterMode
if sorting:
self._sortName = sorting
if reverse:
self._sortReverse = reverse
else:
if reverse != self._sortReverse:
self._sortReverse = reverse
if self._viewingMode == idModeSelected:
self.setGlyphsToView(self.font.selection, filterMode = filterMode)
else:
self.compileLines(self.kernDB.db, sorting = self._sortName, reverse = self._sortReverse)
self.scrollToLine(0)
def refreshView(self, fullrefresh = True):
if self._viewingMode == idModeSelected:
self.setGlyphsToView(self.font.selection, filterMode = self._filterMode)
else:
if fullrefresh:
self.compileLines(self.kernDB.db, sorting = self._sortName, reverse = self._sortReverse)
def scrollToLine (self, linenumber):
if not self._viewArray: return
visibleWidth = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.width
visibleHeight = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.height
posXscroller = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.x
posYscroller = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.y
scale = self._scalefactorUI
xpos = 0
ypos = 0
if linenumber < 0 or linenumber > len(self._viewArray): return
self._selectedLine = linenumber
self._positionYselected = self._viewArray[linenumber][idY0]
firstItemInLine = linenumber
maxY = self._viewArray[-1][idY0]
y0 = (maxY + (-1 * self._positionYselected)) * scale
y1 = y0 + (self._lineSize * scale)
if y0 < posYscroller:
ypos = y0
elif y1 - posYscroller > visibleHeight:
offset = visibleHeight - self._lineSize * scale
ypos = y0 - offset # + posYscroller
else:
return firstItemInLine
point = NSPoint(xpos, ypos)
self.canvas.scrollView.getNSScrollView().contentView().scrollToPoint_(point)
self.canvas.scrollView.getNSScrollView().reflectScrolledClipView_(
self.canvas.scrollView.getNSScrollView().contentView())
return firstItemInLine
def mouseDown (self, event):
visibleHeight = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.height
Y_local_pos = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.y
X_window_pos = event.locationInWindow().x
Y_window_pos = event.locationInWindow().y
X_local_pos = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.x
Y_local_pos = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.y
xW, yW, x2W, y2W = self.getPosSize()
x = X_window_pos + X_local_pos # - self._letterStep
y = Y_window_pos + y2W + Y_local_pos
self.showselection = True
maxY = 0
if self._viewArray:
maxY = self._viewArray[-1][idY0]
yoff = ((self._lineCount - 1) * self._lineSize) # + Ygap
if yoff < visibleHeight:
yoff = visibleHeight - self._lineSize * self._lineCount
else:
yoff = 0
for idx, item in enumerate(self._viewArray):
y0 = maxY + (-1 * item[idY0]) + yoff
y1 = y0 + self._lineSize
if (y0 < y and y < y1):
if decodeModifiers(event.modifierFlags()) == 'Cmd':
if idx in self._selectedLines:
self._selectedLines.remove( idx )
else:
self._selectedLines.append( idx )
self._lastSelectedIdx = idx
elif decodeModifiers(event.modifierFlags()) == 'Shift':
if idx > self._lastSelectedIdx:
for i in range(self._lastSelectedIdx, idx+1):
if i not in self._selectedLines:
self._selectedLines.append( i )
self._lastSelectedIdx = i
elif idx < self._lastSelectedIdx:
# print ('revers')
for i in range(idx, self._lastSelectedIdx):
if i not in self._selectedLines:
self._selectedLines.append( i )
self._lastSelectedIdx = i
# print ('shift last', self._lastSelectedIdx)
else:
self._selectedLines = []
self._selectedLines.append( idx )
self._lastSelectedIdx = idx
self.canvas.update()
self.updateStatusbar()
if self._selectionCallback:
self._selectionCallback(self._selectedLines)
break
def keyDown (self, event):
keypress = decodeCanvasKeys(event.keyCode(), event.modifierFlags())
commands = translateKeyCodesToKernToolCommands(keypress)
# if commands['command'] == COMMAND_ENTER:
# self.showselection = True
# self.menuSelectedCallback()
# self.canvas.update()
if commands['command'] == COMMAND_ESCAPE:
self._selectedLines = []
self.canvas.update()
if commands['command'] == COMMAND_SELECT_ALL:
self._selectedLines = []
for i, item in enumerate(self._viewArray):
self._selectedLines.append(i)
self.updateStatusbar()
self.canvas.update()
if commands['command'] == COMMAND_DELETE_PAIR:
# self.showselection = True
if self._commandCallback:
self._commandCallback({'command': COMMAND_DELETE_PAIR})
if commands['command'] == COMMAND_SPACEKEY:
self.prepare4sendSelectedPairsToKernTool()
if commands['command'] == COMMAND_NEXT_LINE_SHORT:
n = self._lastSelectedIdx + 1
if n > len(self._viewArray)-1:
n = 0
self.scrollToLine(n)
item = self._viewArray[n]
# if decodeModifiers(event.modifierFlags()) == 'Shift':
# if item['idx'] in self._selectedLines:
# self._selectedLines.remove(item['idx'])
# else:
# self._selectedLines.append(item['idx'])
# self._lastSelectedIdx = item['idx']
# else:
self._selectedLines = []
self._selectedLines.append(n)
self._lastSelectedIdx = n
self.canvas.update()
if commands['command'] == COMMAND_PREV_LINE_SHORT:
n = self._lastSelectedIdx - 1
if n == -1:
n = len(self._viewArray)-1
self.scrollToLine(n)
item = self._viewArray[n]
# if decodeModifiers(event.modifierFlags()) == 'Shift':
# if item['idx'] in self._selectedLines:
# self._selectedLines.remove(item['idx'])
# else:
# self._selectedLines.append(item['idx'])
# self._lastSelectedIdx = item['idx']
# else:
self._selectedLines = []
self._selectedLines.append(n)
self._lastSelectedIdx = n
self.canvas.update()
self.updateStatusbar()
def refreshKernPair(self, pair):
self.kernDB.updateKernPair(pair)
# save selected pairs
pairsselected = []
for i in self._selectedLines:
item = self._viewArray[i]
pairsselected.append((item[idNameL], item[idNameR]))
self._selectedLines = []
self.refreshView()
# restore selected pairs, except deleted
for pair in pairsselected:
if pair in self._pairsList2idx:
self._selectedLines.append(self._pairsList2idx[pair])
self.canvas.update()
def setGlyphsToView(self, glyphlist, filterMode = idFilterBoth):
self._currentKernListState = {}
listL = []
listR = []
if not glyphlist: return
self._setToView = []
# self._setToView = list(glyphlist)
for glyphname in glyphlist:
if filterMode == idFilterSide1 or filterMode == idFilterBoth:
self._setToView.append(glyphname)
self._setToView.append(self.hashKernDic.getGroupNameByGlyph(glyphname, side = 'L'))
if filterMode == idFilterSide2 or filterMode == idFilterBoth:
self._setToView.append(glyphname)
self._setToView.append(self.hashKernDic.getGroupNameByGlyph(glyphname, side = 'R'))
for pair, item in self.kernDB.db.items():
(l,r) = pair
# if l in self._setToView or r in self._setToView:
if l in self._setToView and (filterMode == idFilterSide1 or filterMode == idFilterBoth):
self._currentKernListState[(l, r)] = item
if r in self._setToView and (filterMode == idFilterSide2 or filterMode == idFilterBoth):
self._currentKernListState[(l, r)] = item
if self._currentKernListState:
self.compileLines(self._currentKernListState, sorting = self._sortName, reverse = self._sortReverse)
else:
self._viewArray = []
# self.resetView()
self.canvas.update()
self.updateStatusbar()
def getPairByIndex(self, idx):
try:
item = self._viewArray[idx]
except:
print ('wrong index of pair',idx)
return None
l, r, n, kl, kr = item[idNameL], item[idNameR], item[idNote], self.hashKernDic.getKeyGlyphByGroupname(item[idNameL]), self.hashKernDic.getKeyGlyphByGroupname(item[idNameR])
if n == PAIR_INFO_EMPTY:
return None
if (l,r) in self.font.kerning:
return (l,r, kl,kr)
else:
print('pair not founded', l,r)
def getListofSelectedPairs_KeyGlyphs(self):
pairs = []
leftlist = []
rightlist = []
pairsbyglyphkey = []
if self._selectedLines:
for idx in sorted(self._selectedLines):
p = self.getPairByIndex(idx)
if p:
pairs.append(self.getPairByIndex(idx))
# print(self.getPairByIndex(idx))
for pair in pairs:
l,r, kl, kr = pair
if l.startswith(ID_KERNING_GROUP):
if l in self.font.groups and len(self.font.groups[l])>0:
l = self.font.groups[l][0]
# else:
# leftlist.append(l)
if r.startswith(ID_KERNING_GROUP):
if r in self.font.groups and len(self.font.groups[r])>0:
r = self.font.groups[r][0]
# else:
# rightlist.append(r)
if l and r:
pairsbyglyphkey.append((l,r))
return pairsbyglyphkey
def getListOfSelectedPairs(self):
pairs = []
if self._selectedLines:
for idx in sorted(self._selectedLines):
p = self.getPairByIndex(idx)
if p:
l, r, kl, kr = self.getPairByIndex(idx)
pairs.append((l,r))
return pairs
def prepare4sendSelectedPairsToKernTool(self):
if self._commandCallback:
pairs = self.getListofSelectedPairs_KeyGlyphs()
# print (pairs)
self._commandCallback({'command':COMMAND_SPACEKEY, 'pairs': pairs})
def compileLines(self, listkern = None, sorting = 'left', reverse = False):
lineStep = self._lineSize
# if not listkern: return
self._viewArray = []
self._pairsList2idx = {}
self._sortName = sorting
self._sortReverse = reverse
Ypos = 0
idx = 0
self._currentKernListState = listkern
self.kernDB.makeSortedList(self._currentKernListState, order = sorting, reverse = reverse)
# print(self.kernDB.sortedList)
for item in self.kernDB.sortedList:
l , r = item[0]
sl, sr, gl, gr, v, n, kgl, kgr = item[1]
self._viewArray.append([
Ypos, # idY0
sl, #getDisplayNameGroup(l), # idDispL
sr, #getDisplayNameGroup(r), # idDispR
v, # idKern
n, # idNote
l, # idNameL
r, # idNameR
kgl, #self.hashKernDic.getKeyGlyphByGroupname(l), # idGlyphL
kgr, #self.hashKernDic.getKeyGlyphByGroupname(r), # idGlyphR
gl, # idGroupL
gr # idGroupR
])
self._pairsList2idx[(l, r)] = idx
Ypos += lineStep
idx += 1
self._lineCount = idx
self.recalculateFrame()
self.canvas.update()
self.updateStatusbar()
def recalculateFrame (self, canvaswidth=None):
# scalefactor = 1#self._scalefactorUI
if canvaswidth:
visibleWidth = canvaswidth
else:
visibleWidth = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.width
self.visibleWidth = visibleWidth
visibleHeight = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.height
Y_local_pos = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.y
X_local_pos = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.x
yoff = self._lineCount * self._lineSize #* scalefactor # + self.Ygap*2
Y_min_window = Y_local_pos
Y_max_window = Y_local_pos + visibleHeight
if yoff < visibleHeight:
yoff = visibleHeight
self.canvas._view.setFrame_(NSMakeRect(0, 0, visibleWidth, yoff))
self.maxX = visibleWidth + 60
def scrollwheel (self, event):
# print (event)
#
scaleUI = self._scalefactorUI
# deltaX = event.deltaX()
deltaY = event.deltaY()
if deltaY == 0 : return
scaleScroll = 5#abs(deltaY)/10
# if abs(deltaY) < 3:
# scaleScroll = .2
# if abs(deltaY) > 3 and abs(deltaY) < 8:
# scaleScroll = .6
# if abs(deltaY) > 8 and abs(deltaY) < 15:
# scaleScroll = 1.1
# if abs(deltaY) > 30:
# scaleScroll = 10
visibleWidth = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.width
visibleHeight = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.height
posXscroller = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.x
posYscroller = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.y
xW, yW, wW, hW = self.getPosSize()
# xpoint = posXscroller - (deltaX * scaleScroll)
ypoint = posYscroller + (deltaY * scaleScroll)
# if xpoint > self.maxXX - visibleWidth: # - visibleWidth:
# xpoint = self.maxXX - visibleWidth # - self.visibleWidth #- visibleWidth
# if xpoint < xW:
# xpoint = 0
if ypoint < 0:
ypoint = 0
# return
maxY = 0
if self._viewArray:
maxY = (self._lineCount -1) * self._lineSize # self._viewArray[-1]['y0']
if posYscroller + visibleHeight - self._lineSize * scaleUI > maxY * scaleUI:
ypoint = maxY * scaleUI - visibleHeight + self._lineSize * scaleUI
elif posYscroller + visibleHeight - self._lineSize * scaleUI == maxY * scaleUI and deltaY > 0:
ypoint = maxY * scaleUI - visibleHeight + self._lineSize * scaleUI
point = NSPoint(0, ypoint)
self.canvas.scrollView.getNSScrollView().contentView().scrollToPoint_(point)
self.canvas.scrollView.getNSScrollView().reflectScrolledClipView_(
self.canvas.scrollView.getNSScrollView().contentView())
# time.sleep(0.09)
if self.macos == '15':
self.canvas.update()
if self.macos == '16':
self.canvas.update()
# self.canvas.update()
def draw (self):
def drawException (x, y):
s = 1.6
newPath()
moveTo((x + s * 4, y + s * 8))
lineTo((x + s * 1, y + s * 3))
lineTo((x + s * 4, y + s * 3))
lineTo((x + s * 4, y + s * 0))
lineTo((x + s * 7, y + s * 5))
lineTo((x + s * 4, y + s * 5))
closePath()
drawPath()
self.recalculateFrame()
self._viewFontName = 'Menlo'
self._viewFontSize = 12
font(self._viewFontName, fontSize = self._viewFontSize)
visibleWidth = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.width
visibleHeight = self.canvas.scrollView.getNSScrollView().documentVisibleRect().size.height
Y_local_pos = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.y
X_local_pos = self.canvas.scrollView.getNSScrollView().documentVisibleRect().origin.x
Y_min_window = Y_local_pos
Y_max_window = Y_local_pos + visibleHeight
# print (Y_min_window, Y_max_window, visibleHeight)
X_min_window = X_local_pos
X_max_window = X_local_pos + visibleWidth
stroke(0, 0, 0, 0)
strokeWidth(0)
yoff = ((self._lineCount - 1) * self._lineSize) #+ Ygap
if yoff < visibleHeight:
yoff = visibleHeight - self._lineSize #+ 500
# if self._viewArray:
# maxY = self._viewArray[-1]['y0']
maxY = (self._lineCount -1) * self._lineSize
colorBKG = (.9, .9, .9, .8)
# self.darkmode = True
if self.darkmode:
colorBKG = ((.07,.07,.07,1))
fillRGB(colorBKG)
rect(0, 0, visibleWidth, Y_max_window)
save()
translate(0, yoff )
Xpos = 0
Ypos = 0
XtextShift = 17
for idx, item in enumerate(self._viewArray):
# Xpos = item['x0']
# Ypos = item['y0']
item[idY0] = Ypos
# (_d0, _d1,_d2,_d3,_d4,_d5)
# DRAW if it visible in window frame
if (Y_min_window - self._lineSize < ((maxY + (-1 * Ypos)))
and Y_max_window + self._lineSize > ((maxY + (-1 * Ypos)))) :
Ycontrol = -1 * Ypos
wValue = 40 # width of Value row
wNote = 40 # width of Note row
w = visibleWidth-wValue-wNote # width of left+right row
xR = w / 2 # width of Left/Right row and start position of Right row
xRp = xR
wtxt = xR
xV = xR + (w / 2) # start position Values row
xN = xV + wValue # start position Note row
textYshif = 0
phalf = 0
if self._previewGlyph:
phalf = self._previewWidthHalf
xR += phalf
wtxt -= phalf
textYshif = self._lineSize /2 -10
colorBKG = (.9, .9, .9, .8)
colorSRT = (.85,.85,.85,.8)
colorTXT = COLOR_BLACK
selected = False
colorGRP = COLOR_GREY_50
# self.darkmode = True
if self.darkmode:
colorBKG = ((.18,.18,.18,.8))
colorSRT = ((.12,.12,.12,.8))
colorTXT = (COLOR_GREY_20)
colorGRP = (COLOR_GREY_50)
if idx in self._selectedLines:
colorBKG = (0,0,.5,.6)
if self.darkmode:
colorBKG = (.18,.18,.9,.6)
colorTXT = COLOR_WHITE
selected = True
colorGRP = COLOR_GREY_10
fillRGB(colorBKG)
if not self._previewGlyph:
rect(Xpos, Ycontrol, visibleWidth, self._lineSize)
else:
rect(Xpos, Ycontrol, xRp - phalf, self._lineSize)
rect(xR, Ycontrol, wtxt + wValue + wNote, self._lineSize)
# rect(xV, Ycontrol, wValue, self._lineSize)
if item[idKern] < 0:
colorKRN = COLOR_KERN_VALUE_NEGATIVE
if selected:
colorKRN = (1,.2,.2,1)
elif item[idKern] > 0:
colorKRN = COLOR_KERN_VALUE_POSITIVE
if selected:
colorKRN = (0.4,1,0.4,1)
else:
colorKRN = colorTXT
# DRAW Left row
if self._sortName == 'left' and not selected:
fillRGB(colorSRT)
# if self._previewGlyph:
# fillRGB(COLOR_GREY_30)
else:
fillRGB(colorBKG)
rect(Xpos, Ycontrol, xRp-phalf, self._lineSize)
if item[idGroupL]:#.startswith('@.'):
fillRGB(colorGRP)
text('@', (Xpos + 5, Ycontrol + 4 + textYshif))
if self._previewGlyph:
if selected:
fillRGB((.3, .3, .3, .5))
rect(xRp - phalf, Ycontrol, phalf * 2, self._lineSize)
# else:
# fillRGB((.9, .9, .9, .8))
# rect(xRp - phalf, Ycontrol , phalf * 2, self._lineSize )
fillRGB(colorTXT)
ltxt = item[idDispL]
_w,_h = textSize(ltxt)
if _w + 5 + XtextShift > wtxt :
font(self._viewFontName, fontSize = self._viewFontSize-2)
text( ltxt, (Xpos + XtextShift, Ycontrol+4+ textYshif))
else:
font(self._viewFontName, fontSize = self._viewFontSize )
text( ltxt, (Xpos + XtextShift, Ycontrol + 3+ textYshif))
if self._previewGlyph:
if selected:
fillRGB((.9, .9, .9, .8))
rect(xRp - phalf, Ycontrol, phalf * 2, self._lineSize)
else:
fillRGB((.9, .9, .9, .8))
if self.darkmode:
fillRGB((.8, .8, .8, .8))
rect(xRp - phalf, Ycontrol , phalf * 2, self._lineSize )
# DRAW Right row
if self._sortName == 'right' and not selected:
fillRGB(colorSRT)
# if self._previewGlyph:
# fillRGB(COLOR_GREY_30)
else:
fillRGB(colorBKG)
rect(xR, Ycontrol, wtxt, self._lineSize)
font(self._viewFontName, fontSize = self._viewFontSize)
if item[idGroupR]:#.startswith('@.'):
fillRGB(colorGRP)
text('@', (xR + 5, Ycontrol + 4+ textYshif))
fillRGB(colorTXT)
rtxt = item[idDispR]
_w, _h = textSize(rtxt)
if _w + XtextShift +5 > wtxt :
font(self._viewFontName, fontSize = self._viewFontSize - 2)
text(rtxt, (xR + XtextShift, Ycontrol + 4+ textYshif))
else:
font(self._viewFontName, fontSize = self._viewFontSize)
text(rtxt, (xR + XtextShift, Ycontrol + 3+ textYshif))
if self._sortName == 'values' and not selected:
fillRGB(colorSRT)
# if self._previewGlyph:
# fillRGB(COLOR_GREY_30)
else:
fillRGB(colorBKG)
font('Menlo', fontSize = self._viewFontSize-1)
# if selected:
# fillRGB((.3,.3,.3,.3))
rect(xV, Ycontrol, wValue, self._lineSize)
# openTypeFeatures(tnum = True)
alignX, _y = textSize(str(item[idKern]))
alignX = xV + wValue - alignX
fillRGB(colorKRN)
# roundedRect(x, y, width, height, radius)
text(str(item[idKern]), (alignX - 5, Ycontrol + 3))
font(self._viewFontName, fontSize = self._viewFontSize)
if self._sortName == 'notes' and not selected:
fillRGB(colorSRT)
# if self._previewGlyph:
# fillRGB(COLOR_GREY_30)
else:
fillRGB(colorBKG)
rect(xN, Ycontrol, wNote, self._lineSize)
fillRGB(colorTXT)
if PAIR_INFO_EXCEPTION == item[idNote]:
fillRGB(colorKRN)
drawException(xN +5 , Ycontrol + 4)
elif PAIR_INFO_ORPHAN == item[idNote]:
fillRGB(colorKRN)
drawException(xN +5, Ycontrol + 4)
drawException(xN + 10 +5, Ycontrol + 4)
elif PAIR_INFO_ATTENTION == item[idNote]:
text(chr(int('25CB',16)), (xN + 6, Ycontrol + 3))
elif PAIR_INFO_EMPTY == item[idNote]:
fillRGB(COLOR_KERN_VALUE_NEGATIVE)
if selected:
# colorKRN = (1,.2,.2,1)
fillRGB((1,.2,.2,1))
text(chr(int('2716', 16)), (xN + 3 +5, Ycontrol + 3))
# else:
# text(item['note'], (xN , Ycontrol + 3)) # item['idx']+
# fillRGB(COLOR_GREY_20)
if self._previewGlyph:
scalefactor = .030 # .05
if PAIR_INFO_EMPTY == item[idNote]:
fillRGB(COLOR_KERN_VALUE_NEGATIVE)
font(self._viewFontName, fontSize = self._viewFontSize + 20)
_xe, _ye = textSize(chr(int('2716', 16)))
text(chr(int('2716', 16)), (xRp - _xe/2 , Ycontrol + 4))
font(self._viewFontName, fontSize = self._viewFontSize)
else:
glyphL = self.font[item[idGlyphL]]
glyphR = self.font[item[idGlyphR]]
fillRGB(COLOR_BLACK)
if checkOverlapingGlyphs(self.font, glyphL, glyphR, item[idKern]):
fillRGB(COLOR_KERN_VALUE_NEGATIVE)
save()
translate(xRp - glyphL.width * scalefactor - (item[idKern] / 2) * scalefactor, Ycontrol + 12)
scale(scalefactor)
penL = CocoaPen(self.font)
glyphL.draw(penL)
drawPath(penL.path)
restore()
save()
translate(xRp + (item[idKern] / 2) * scalefactor, Ycontrol + 12)
scale(scalefactor)
penR = CocoaPen(self.font)
glyphR.draw(penR)
drawPath(penR.path)
restore()
# TODO когда режим Селекции попробовать разделять каждый столбец по глифам
if idx+1<len(self._viewArray):
if True: #self._viewingMode == idModeShowAll:
colorStroke = COLOR_GREY_20
colorSeparate = COLOR_GREY_30
if self.darkmode == True:
colorStroke = ((.07,.07,.07,1))
colorSeparate = ((.3,.3,.3,1))
fillRGB(colorStroke)
if self._sortName == 'left':
if item[idDispL] != self._viewArray[idx+1][idDispL]:
fillRGB(colorSeparate)
rect(Xpos, Ycontrol, visibleWidth, 1)
if self._sortName == 'right':
if item[idDispR] != self._viewArray[idx+1][idDispR]:
fillRGB(colorSeparate)
rect(Xpos, Ycontrol, visibleWidth, 1)
if self._sortName == 'values':
if item[idKern] != self._viewArray[idx+1][idKern]:
fillRGB(colorSeparate)
rect(Xpos, Ycontrol, visibleWidth, 1)
if self._sortName == 'notes':
if item[idNote] != self._viewArray[idx+1][idNote]:
fillRGB(colorSeparate)
rect(Xpos, Ycontrol, visibleWidth, 1)
rect(Xpos, Ycontrol, visibleWidth, 1)
# else:
# fillRGB(COLOR_GREY_20)
# if item[idDispL] != self._viewArray[idx + 1][idDispL]:
# fillRGB(COLOR_GREY_30)
# rect(Xpos, Ycontrol, xR, 1)
# else:
# fillRGB(COLOR_GREY_20)
# rect(Xpos, Ycontrol, xR, 1)
#
# if item[idDispR] != self._viewArray[idx + 1][idDispR]:
# fillRGB(COLOR_GREY_30)
# rect(xR, Ycontrol, xR, 1)
# else:
# fillRGB(COLOR_GREY_20)
# rect(xR, Ycontrol, xR, 1)
# if item[idKern] != self._viewArray[idx + 1][idKern]:
# fillRGB(COLOR_GREY_30)
# rect(xV, Ycontrol, wValue, 1)
# else:
# fillRGB(COLOR_GREY_20)
# rect(xV, Ycontrol, wValue, 1)
# if item[idNote] != self._viewArray[idx + 1][idNote]:
# fillRGB(COLOR_GREY_30)
# rect(xN, Ycontrol, wNote, 1)
# else:
# fillRGB(COLOR_GREY_20)
# rect(xN, Ycontrol, wNote, 1)
# if self._sortName != 'right' and item[idDispL] != self._viewArray[idx+1][idDispL]:
# fillRGB(COLOR_GREY_30)
# rect(Xpos, Ycontrol, visibleWidth, 1)
#
#
# elif self._sortName != 'left' and item[idDispR] != self._viewArray[idx+1][idDispR]:
# fillRGB(COLOR_GREY_30)
# rect(Xpos, Ycontrol, visibleWidth, 1)
#
# if item[idDispR] != self._viewArray[idx + 1][idDispR]:
# fillRGB(COLOR_GREY_20)
# rect(xR, Ycontrol, xR, 1)
# if item[idDispL] != self._viewArray[idx + 1][idDispL]:
# fillRGB(COLOR_GREY_20)
# rect(Xpos, Ycontrol, xR, 1)
Ypos += self._lineSize
restore()
# self.updateStatusbar()
# TEST Section
# if __name__ == "__main__":
import os
import sys
def saveKerning (font, selectedkern, filename):
print('=' * 60)
print(font.info.familyName, font.info.styleName)
print('Saving kerning to file:')
fn = filename
print(fn)
groupsfile = open(fn, mode = 'w')
txt = ''
for (l, r) in sorted(selectedkern):
txt += '%s %s %s\n' % (l, r, str(font.kerning[(l, r)]))
groupsfile.write(txt)
groupsfile.close()
print(len(selectedkern), 'pairs saved..')
print('Done.')
def loadKernFile (font, filename, mode='replace'): # replace / add
fn = filename
if os.path.exists(fn):
print('=' * 60)
print(font.info.familyName, font.info.styleName)
print('Loading kerning from file:')
print(fn)
f = open(fn, mode = 'r')
pairsbefore = len(font.kerning)
pairsimported = 0
for line in f:
line = line.strip()
if not line.startswith('#') and line != '':
left = line.split(' ')[0]
right = line.split(' ')[1]
value = int(round((float(line.split(' ')[2])), 0))
fl = False
fr = False
if left in font.groups:
fl = True
if left in font:
fl = True
if right in font.groups:
fr = True
if right in font:
fr = True
if fl and fr:
font.kerning[(left, right)] = value
pairsimported += 1
else:
print('Group or Glyph not found:', left, right, value)
f.close()
print('Kerning loaded..')
print('pairs before:\t', pairsbefore)
print('pairs imported:\t', pairsimported)
print('total pairs:\t', len(font.kerning))
else:
print('ERROR! kerning file not found')
class TDKernFinger(BaseWindowController):
def __init__ (self):
self.w = Window((350, 600), "KernFinger", minSize = (200, 100))
# self.w.getNSWindow().setBackgroundColor_(NSColor.controlAccentColor())
menuH = 17
sepH = 5
self.modeShowAll = True
self.previewMode = False
self.filterMode = idFilterBoth
self.pairsPerLine = 4
self.sendMode = 'groupped'
self.observerID = getUniqName()
self.w.menuMain = TDControlPanel((5, sepH, -5, 17), # -160
parentWindow = self)
self.w.menuMain.addControlItem(title = 'select font', callback = self.fontMenuCall)
self.w.menuMain.addMenuSeparator()
self.w.menuMain.addControlItem(title = 'append from file', callback = self.appendKernFromFile)
self.w.menuMain.addMenuSeparator()
self.w.menuMain.addControlItem(title = 'save to file', callback = self.saveSelectedPairs2file)
self.w.menuFilters = TDControlPanel((5, sepH + menuH + sepH, -5, 17), # -160
parentWindow = self)
self.swShowMode = self.w.menuFilters.addSwitchControlItem(switchers = (('all pairs','all'),
('from selection','selection')),
callback = self.modeShowSelectionCallback)
self.w.menuFilters.setSwitchItem(self.swShowMode, switch = 'all pairs')
self.w.menuFilters.addMenuSeparator()
self.filterSide = self.w.menuFilters.addSwitchControlItem(switchers = (('side1',idFilterSide1),
('both',idFilterBoth),
('side2',idFilterSide2)),
callback = self.modeFiltersSelectionCallback)
self.w.menuFilters.setSwitchItem(self.filterSide,'both')
self.w.menuFilters.enableControlItem(self.filterSide, False)
self.w.menuFilters.addMenuSeparator()
self.lblpreview = self.w.menuFilters.addControlItem(title = chr(int('1F170', 16)) + chr(int('1F185', 16)), # 1F170 1F185
callback = self.setPreviewMode)
self.w.kernlistControl = TDKernListControl((5, sepH + menuH + sepH + menuH + 2 ,-5,17),
selectionCallback = self.kernListControlCalback)
self.w.kernlist = TDKernListView((5,sepH + menuH + sepH + menuH + menuH +1,-5,-130 + 41), # 130
selectionCallback = self.KLselectionCallback,
commandCallback = self.commandsKernListCallback,
window = self.w)
self.w.g = Group((0,-128 + 41,-0,100)) # 128
self.w.g.menuOperation = TDControlPanel((5, 0, -5, 17), # -160
parentWindow = self)
self.w.g.menuOperation.addControlItem(title = 'delete',
hotkey = chr(int('232B', 16)),
command = COMMAND_DELETE_PAIR,
callback = self.deleteSelectedPairsCallback)
self.w.g.menuOperation.addControlItem(title = 'send to KernTool',
hotkey = 'space',
command = COMMAND_SPACEKEY,
callback = self.w.kernlist.prepare4sendSelectedPairsToKernTool)
# width 380 -5 +5 margins
# reserved 30
# 380 - 10 -30 /2 = 175 width of EditText
# xlbl = 5+ 150 = 155
# x2edit = 155 + 30
Yctrl = 19 # 60
ww = 350
margins = 5
wlbl = 30
wedit = (ww - margins*2 - wlbl) /2
xlbl = margins + wedit
x2edit = wedit + wlbl + margins
self.w.g.preEdit = EditText((5, Yctrl, wedit, 17),text = '/H/H', sizeStyle = 'small')
self.w.g.preEdit.getNSTextField().setBordered_(False)
# self.w.g.preEdit.getNSTextField().setFont_(NSFont.fontWithName_size_('.SFCompactText-Regular', 12))
self.w.g.lbl = TextBox((xlbl,Yctrl+4,wlbl,17),'_::_',sizeStyle = 'small', alignment = 'center')
self.w.g.postEdit = EditText((x2edit, Yctrl, wedit, 17), text = '/n/n/o/o',sizeStyle = 'small')
self.w.g.postEdit.getNSTextField().setBordered_(False)
# self.w.g.postEdit.getNSTextField().setFont_(NSFont.fontWithName_size_('.SFCompactText-Regular', 12))
self.w.g.menuPairsCount = TDControlPanel((5, Yctrl + 19, -5, 17), # -160
parentWindow = self )
self.swgrp = self.w.g.menuPairsCount.addSwitchControlItem(switchers = (('groupped','groupped'),
('expanded','expanded')),
callback = self.switchSend2KTCallback)
self.w.g.menuPairsCount.setSwitchItem(self.swgrp, switch = 'groupped')
self.w.g.menuPairsCount.addMenuSeparator()
self.swpairs = self.w.g.menuPairsCount.addSwitchControlItem(switchers = (('1',1),
('2',2),
('3',3),
('4',4),
('pairs/line',5)),
callback = self.switchPairsPerLineCallback)
self.w.g.menuPairsCount.setSwitchItem(self.swpairs,'4')
self.w.menuStatusBar = TDControlPanel((5, -27, -5, 17), # -160
parentWindow = self,
selectionCallback = None,
keyPressedCallback = None,
active = False )
self.w.labelTotalPairsID = self.w.menuStatusBar.addLabelItem(title = 'pairs', value = 0)
self.w.menuStatusBar.addMenuSeparator(type = ' ')
self.w.labelShowedPairsID = self.w.menuStatusBar.addLabelItem(title = 'shown', value = 0)
self.w.menuStatusBar.addMenuSeparator(type = ' ')
self.w.labelSelectedPairsID = self.w.menuStatusBar.addLabelItem(title = 'selected', value = 0)
self.w.kernlistControl.setupControl({'title':' ','name':'left'},
{'title':' ','name':'right'},
{'title':'','name':'value'},
{'title':'','name':'note'}) # chr(int('21B9',16))
self.font = CurrentFont()
self.fontID = getFontID(self.font)
self.w.kernlist.setFont(self.font)
self.hashKernDic = TDHashKernDic(self.font)
self.w.setTitle(title = 'KernFinger: %s %s' % (self.font.info.familyName, self.font.info.styleName))
self.w.kernlist.setViewingMode(mode = self.modeShowAll,
sorting = 'left', reverse = False, filterMode = self.filterMode)
self.w.kernlistControl.selectMenuItem('left', reversed = False)
self.leftReverse = False
self.rightReverse = False
self.valuesReverse = False
self.notesReverse = False
self.sortName = None
addObserver(self, "glyphChanged", "currentGlyphChanged")
addObserver(self, 'refreshKernView', EVENT_REFRESH_ALL_OBSERVERS)
# self.w.bind('close', self.windowCloseCallback)
self.w.bind('resize', self.widwowResize)
self.setUpBaseWindowBehavior()
self.w.open()
self.w.kernlist.scrollToLine(0)
def windowCloseCallback (self, sender):
removeObserver(self, "currentGlyphChanged")
removeObserver(self, EVENT_REFRESH_ALL_OBSERVERS)
# super(MyW, self).windowCloseCallback(sender)
# print('KernFinger: DONE.')
def widwowResize (self, sender):
x,y, w, h = sender.getPosSize()
# print (posSize)
Yctrl = 19 # 60
# ww = 320
ww = w
margins = 5
wlbl = 30
wedit = (ww - margins * 2 - wlbl) / 2
xlbl = margins + wedit
x2edit = wedit + wlbl + margins
self.w.g.preEdit.setPosSize((5,Yctrl, wedit, 17))
self.w.g.lbl.setPosSize((xlbl, Yctrl+4, wlbl, 17))
self.w.g.postEdit.setPosSize((x2edit, Yctrl, wedit, 17))
self.w.menuMain.updatePanel()
self.w.menuStatusBar.updatePanel()
self.w.g.menuPairsCount.updatePanel()
self.w.menuFilters.updatePanel()
self.w.kernlist.updatePanel()
self.w.kernlistControl.updatePanel()
self.w.g.menuOperation.updatePanel()
def refreshKernView(self, info):
if info['fontID'] == self.fontID and info['observerID'] != self.observerID:
self.w.kernlist.refreshKernPair(info['pair'])
def glyphChanged(self, info):
self.w.kernlist.refreshView(fullrefresh = False)
def deleteSelectedPairsCallback(self):
pairs = self.w.kernlist.getListOfSelectedPairs()
# print('deleting pairs:')
if pairs:
for pair in pairs:
# print (pair)
if pair in self.font.kerning:
self.font.kerning.remove(pair)
self.w.kernlist.refreshKernPair(pair)
postEvent(EVENT_REFRESH_ALL_OBSERVERS, fontID = self.fontID, observerID = self.observerID)
def modeShowSelectionCallback(self, command):
if command == 'all':
self.modeShowAll = True
self.w.kernlist.setViewingMode(idModeShowAll)
self.w.menuFilters.enableControlItem(self.filterSide, False)
elif command == 'selection':
self.modeShowAll = False
self.w.kernlist.setViewingMode(idModeSelected, filterMode = self.filterMode)
self.w.menuFilters.enableControlItem(self.filterSide, True)
def modeFiltersSelectionCallback(self, command):
self.filterMode = command
self.w.kernlist.setViewingMode(idModeSelected, filterMode = self.filterMode)
def switchSend2KTCallback(self, mode):
self.sendMode = mode
def switchPairsPerLineCallback(self, mode):
self.pairsPerLine = mode
def commandsKernListCallback(self, info):
if info['command'] == COMMAND_SPACEKEY:
self.sendPairsToKernTool(info)
elif info['command'] == COMMAND_DELETE_PAIR:
self.deleteSelectedPairsCallback()
def sendPairsToKernTool(self, info):
patternLeft = tdGlyphparser.translateText(font = self.font,
text = self.w.g.preEdit.get())
patternRight = tdGlyphparser.translateText(font = self.font,
text = self.w.g.postEdit.get())
LPattern = ''
RPattern = ''
if patternLeft:
for l in patternLeft:
if l != '':
LPattern += '/%s' % l
if patternRight:
for r in patternRight:
if r != '':
RPattern += '/%s' % r
cl = None
cr = None
if self.sendMode == 'groupped':
line =''
count = 0
ppl = self.pairsPerLine
for idx, (l,r) in enumerate(info['pairs']):
if idx == 0:
cl = l
cr = r
line += '%s/%s/%s%s' % (LPattern, l ,r ,RPattern)
count +=1
if ppl !=5 and count == ppl:
line += '\\n'
count = 0
else:
line = ''
count = 0
ppl = self.pairsPerLine
listL = []
listR = []
for idx, (l,r) in enumerate(info['pairs']):
if idx == 0:
cl = l
cr = r
gl = self.hashKernDic.getGroupNameByGlyph(l,'L')
gr = self.hashKernDic.getGroupNameByGlyph(r,'R')
if gl.startswith(ID_KERNING_GROUP):
for kl in self.font.groups[gl]:
if gr.startswith(ID_KERNING_GROUP):
for kr in self.font.groups[gr]:
line += '%s/%s/%s%s' % (LPattern, kl, kr, RPattern)
count += 1
if ppl != 5 and count == ppl:
line += '\\n'
count = 0
else:
line += '%s/%s/%s%s' % (LPattern, kl, r, RPattern)
count += 1
if ppl != 5 and count == ppl:
line += '\\n'
count = 0
else:
if gr.startswith(ID_KERNING_GROUP):
for kr in self.font.groups[gr]:
line += '%s/%s/%s%s' % (LPattern, l, kr, RPattern)
count += 1
if ppl != 5 and count == ppl:
line += '\\n'
count = 0
else:
line += '%s/%s/%s%s' % (LPattern, l, r, RPattern)
count += 1
if ppl != 5 and count == ppl:
line += '\\n'
count = 0
# s = ''
# for i, l in enumerate(listL):
# line += '%s/%s/%s%s' % (LPattern, l, listR[i], RPattern)
# count += 1
# if ppl != 5 and count == ppl:
# line += '\\n'
# count = 0
# print (line)
postEvent('typedev.KernTool.observerSetText',
glyphsLine = line,
glyphsready = True,
targetpair = (cl,cr),
fontID = getFontID(self.font),
# observerID = self.observerID)
)
def saveSelectedPairs2file(self):
pairsfile = putFile(messageText = 'Save selected pairs to text file', title = 'title')
if pairsfile:
pairs = self.w.kernlist.getListOfSelectedPairs()
saveKerning(self.font, pairs, pairsfile)
def appendKernFromFile(self):
pairsfile = getFile(messageText = 'Append pairs from file', title = 'title')
if pairsfile:
loadKernFile(self.font, pairsfile[0])
self.w.kernlist.setFont(self.font)
self.hashKernDic = TDHashKernDic(self.font)
self.w.kernlist.setViewingMode(mode = self.modeShowAll, sorting = 'left', reverse = False)
self.w.kernlistControl.selectMenuItem('left', reversed = False)
# for (l,r), v in sorted(self.font.kerning.items()):
# self.w.kernlist.addItem({'left':l,'right':r,'value':v,'note':'what the fuck are you doing?!'})
self.leftReverse = False
self.rightReverse = False
self.valuesReverse = False
self.notesReverse = False
self.sortName = None
def kernListControlCalback(self, info):
if info == 'left':
self.sortName = info
self.w.kernlistControl.selectMenuItem(info, reversed = self.leftReverse)
self.w.kernlist.setViewingMode(mode = self.modeShowAll,
sorting = 'left', reverse = self.leftReverse, filterMode = self.filterMode)
self.w.kernlist.scrollToLine(0)
self.leftReverse = not self.leftReverse
self.rightReverse = False
self.valuesReverse = False
self.notesReverse = False
elif info == 'right':
self.sortName = info
self.w.kernlistControl.selectMenuItem(info, reversed = self.rightReverse)
self.w.kernlist.setViewingMode(mode = self.modeShowAll,
sorting = 'right', reverse = self.rightReverse, filterMode = self.filterMode)
self.w.kernlist.scrollToLine(0)
self.leftReverse = False
self.rightReverse = not self.rightReverse
self.valuesReverse = False
self.notesReverse = False
elif info == 'value':
self.sortName = info
self.w.kernlistControl.selectMenuItem(info, reversed = self.valuesReverse)
self.w.kernlist.setViewingMode(mode = self.modeShowAll,
sorting = 'values', reverse = self.valuesReverse, filterMode = self.filterMode)
self.w.kernlist.scrollToLine(0)
self.leftReverse = False
self.rightReverse = False
self.valuesReverse = not self.valuesReverse
self.notesReverse = False
elif info == 'note':
self.sortName = info
self.w.kernlistControl.selectMenuItem(info, reversed = self.notesReverse)
self.w.kernlist.setViewingMode(mode = self.modeShowAll,
sorting = 'notes', reverse = self.notesReverse, filterMode = self.filterMode)
self.w.kernlist.scrollToLine(0)
self.leftReverse = False
self.rightReverse = False
self.valuesReverse = False
self.notesReverse = not self.notesReverse
def setPreviewMode(self):
self.previewMode = not self.previewMode
self.w.menuFilters.setStateItem(self.lblpreview, self.previewMode)
self.w.kernlist.setPreviewMode(self.previewMode)
def KLselectionCallback(self, info):
pass
def fontMenuCall (self):
from mojo.UI import SelectFont
font = SelectFont(title='KernFinger')
self.changeFont(font)
# MenuDialogWindow(parentWindow = self.w, callback = self.changeFont)
def changeFont(self, font):
if not font:
self.font = CurrentFont()
else:
self.font = font
self.fontID = getFontID(self.font)
self.w.kernlist.setFont(self.font)
self.hashKernDic = TDHashKernDic(self.font)
self.w.setTitle(title = 'KernFinger: %s %s' % (self.font.info.familyName, self.font.info.styleName))
self.w.kernlist.setViewingMode(mode = self.modeShowAll, sorting = 'left', reverse = False)
self.w.kernlistControl.selectMenuItem('left', reversed = False)
self.leftReverse = False
self.rightReverse = False
self.valuesReverse = False
self.notesReverse = False
self.sortName = None
self.w.kernlist.scrollToLine(0)
# MyW()
|
StarcoderdataPython
|
3232760
|
<reponame>isabella232/nnabla<filename>python/src/nnabla/experimental/graph_converters/batch_norm_batchstat.py<gh_stars>0
# Copyright 2020,2021 Sony Corporation.
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import nnabla.functions as F
from .graph_converter import FunctionModifier
class BatchNormBatchStatModifier(FunctionModifier):
"""
Change `batch_stat` to `False`.
Supported functions: `BatchNormalization`, `FusedBatchNormalization`, `SyncBatchNormalization`.
Examples:
.. code-block:: python
pred = Model(...)
import nnabla.experimental.graph_converters as GC
modifiers = [GC.BatchNormBatchStatModifier()]
gc = GC.GraphConverter(modifiers)
pred = gc.convert(pred)
"""
def __init__(self):
super(BatchNormBatchStatModifier, self).__init__()
self._fct_set = {
'BatchNormalization': F.batch_normalization,
'FusedBatchNormalization': F.fused_batch_normalization,
'SyncBatchNormalization': F.sync_batch_normalization
}
def connect(self, fname, inputs, args):
fct = self._fct_set[fname]
args['batch_stat'] = False
if 'no_scale' in args:
del args['no_scale']
if 'no_bias' in args:
del args['no_bias']
h = fct(*inputs, **args)
return h
def modify(self, f, inputs):
if not f.info.type_name in self._fct_set:
return
h = self.connect(f.info.type_name, inputs, f.info.args)
return h
|
StarcoderdataPython
|
4830083
|
# Copyright 2015 Twitter, Inc and other contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = ['DefaultTestLoader',
'out', 'BufferedTestResult', 'BufferedTestRunner',
'HtmlReporter', 'XUnitReporter',
'main', 'DefaultTestProgram',
'data_driven', 'multi_threading_data_driven',
'ContextManager', 'contexts']
from unishark.result import (out, BufferedTestResult)
from unishark.reporter import (Reporter, HtmlReporter, XUnitReporter)
from unishark.runner import BufferedTestRunner
from unishark.loader import DefaultTestLoader
from unishark.decorator import data_driven, multi_threading_data_driven
from unishark.util import ContextManager, contexts
from unishark.main import (TestProgram, DefaultTestProgram, main)
PACKAGE = __name__
VERSION = '0.3.2'
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.