id
stringlengths 1
7
| text
stringlengths 6
1.03M
| dataset_id
stringclasses 1
value |
---|---|---|
1643240
|
<reponame>angry-tony/cmdb-ralph<filename>src/ralph/attachments/forms.py
from django import forms
from ralph.admin.helpers import get_content_type_for_model
from ralph.attachments.models import Attachment, AttachmentItem
from ralph.lib.mixins.forms import RequestModelForm
class ChangeAttachmentWidget(forms.ClearableFileInput):
template_with_initial = (
'<div>%(input_text)s: %(input)s</div>'
)
class AttachmentForm(RequestModelForm):
class Meta:
fields = ['file', 'description']
model = Attachment
widgets = {
'file': ChangeAttachmentWidget(),
'description': forms.Textarea(attrs={'rows': 2, 'cols': 30}),
}
def save(self, commit=True):
"""
Overrided standard save method. Saving object with attributes:
* uploaded_by - user from request,
* mime_type - uploaded file's content type.
"""
obj = super().save(commit=False)
md5 = Attachment.get_md5_sum(obj.file)
attachment = Attachment.objects.filter(md5=md5)
if obj.pk:
attachment = attachment.exclude(pk=obj.pk)
attachment = attachment.first()
# _parent_object is an object to which it is assigned Attachment.
# _parent_object is set in attachment.views.AttachmentsView
# get_formset method.
if attachment:
content_type = get_content_type_for_model(
self._parent_object._meta.model
)
if not AttachmentItem.objects.filter(
content_type=content_type,
object_id=self._parent_object.pk,
attachment__md5=md5
).exists():
AttachmentItem.objects.attach(
self._parent_object.pk, content_type, [attachment]
)
return None
return
obj.md5 = md5
obj.uploaded_by = self._request.user
file = self.cleaned_data.get('file', None)
if file and hasattr(file, 'content_type'):
obj.mime_type = file.content_type
obj.save()
return obj
|
StarcoderdataPython
|
1769485
|
<reponame>JohnyTheCarrot/GearBot
import discord
from discord.ext import commands
class AntiRaid:
def __init__(self, bot):
self.bot: commands.Bot = bot
async def sound_the_alarm(self, guild):
print("alarm triggered!")
pass
async def on_member_join(self, member: discord.Member):
# someone joined, track in redis, query
pass
def setup(bot):
bot.add_cog(AntiRaid(bot))
|
StarcoderdataPython
|
1624599
|
<filename>restartservice.py
from contextlib import closing
import json
import logging
import time
from datehelper import DateHelper
from dockermon import DockerMon, DockermonError
from sys import version_info
from notifyable import Notifyable
if version_info[:2] < (3, 0):
from httplib import NO_CONTENT as HTTP_NO_CONTENT
else:
from http.client import OK as HTTP_OK
logger = logging.getLogger(__name__)
class RestartParameters:
def __init__(self, args):
self.restart_threshold = args.restart_threshold
self.restart_limit = args.restart_limit
self.restart_reset_period = args.restart_reset_period
self.containers_to_watch = args.containers_to_watch
self.do_restart = args.restart_containers_on_die
class RestartData:
def __init__(self, container_name, timestamp=None):
self.container_name = container_name
self.mail_sent = False
if timestamp:
self.occasions = [timestamp]
self.formatted_occasions = [DateHelper.format_timestamp(timestamp)]
else:
self.occasions = []
self.formatted_occasions = []
def add_restart_occasion(self, timestamp):
self.occasions.append(timestamp)
self.formatted_occasions.append(DateHelper.format_timestamp(timestamp))
def __str__(self):
return "container_name: %s, occasions: %s, formatted_occasions: %s" \
% (self.container_name, self.occasions, self.formatted_occasions)
class RestartService(Notifyable):
def __init__(self, socket_url, restart_params, notification_service):
Notifyable.__init__(self)
self.socket_url = socket_url
self.params = restart_params
self.notification_service = notification_service
self.cached_container_names = {'restart': [], 'do_not_restart': []}
self.restarts = {}
def container_started(self, event):
self.maintain_container_restart_counter(event.container_name)
def container_became_healthy(self, event):
self.maintain_container_restart_counter(event.container_name)
def container_stopped_by_hand(self, event):
logger.debug("Container %s is stopped/killed, but WILL NOT BE restarted "
"as it was stopped/killed by hand", event.container_name)
def container_dead(self, event):
container_name = event.container_name
if self.is_restart_allowed(container_name) and \
self.check_container_is_restartable(container_name):
if self.params.do_restart:
logger.info("Container %s dead unexpectedly, restarting...", container_name)
self.do_restart(event)
else:
logger.info("Container %s dead unexpectedly, skipping restart but sending mail, as per configuration!",
container_name)
mail_subject = "Container %s dead unexpectedly" % container_name
mail_body = RestartService.create_mail_body_from_docker_event(event)
self.notification_service.send_mail(mail_subject, mail_body)
else:
logger.warn("Container %s is stopped/killed, but WILL NOT BE restarted again, "
"as maximum restart count is reached: %s", container_name, self.params.restart_limit)
if not self.is_mail_sent(container_name):
mail_subject = "Maximum restart count is reached for container %s" % container_name
mail_body = RestartService.create_mail_body_from_docker_event(event)
self.notification_service.send_mail(mail_subject, mail_body)
self.set_mail_sent(container_name)
pass
def container_became_unhealthy(self, event):
container_name = event.container_name
if self.is_restart_allowed(container_name) and self.check_container_is_restartable(container_name):
if self.params.do_restart:
logger.info("Container %s became unhealthy, restarting...", container_name)
self.do_restart(event)
else:
logger.info("Container %s became unhealthy, skipping restart but sending mail, as per configuration!",
container_name)
mail_subject = "Container %s became unhealthy" % container_name
mail_body = RestartService.create_mail_body_from_docker_event(event)
self.notification_service.send_mail(mail_subject, mail_body)
def log_restart_container(self, container_name):
count_of_restarts = self.get_performed_restart_count(container_name)
log_record = "Restarting container: %s (%s / %s)..." % (
container_name, count_of_restarts, self.params.restart_limit)
logger.info(log_record)
return log_record
def is_mail_sent(self, container_name):
return self.restarts[container_name].mail_sent
def set_mail_sent(self, container_name):
self.restarts[container_name].mail_sent = True
def save_restart_event_happened(self, container_name):
now = time.time()
if container_name not in self.restarts:
self.restarts[container_name] = RestartData(container_name, now)
else:
self.restarts[container_name].add_restart_occasion(now)
def get_performed_restart_count(self, container_name):
if container_name not in self.restarts:
self.restarts[container_name] = RestartData(container_name)
return len(self.restarts[container_name].occasions)
def reset_restart_data(self, container_name):
self.restarts[container_name] = RestartData(container_name)
def check_container_is_restartable(self, container_name):
if container_name in self.cached_container_names['restart']:
return True
elif container_name in self.cached_container_names['do_not_restart']:
logger.debug("Container %s is stopped/killed, "
"but WILL NOT BE restarted as it does not match any names from configuration "
"'containers-to-watch'.", container_name)
return False
else:
for pattern in self.params.containers_to_watch:
if pattern.match(container_name):
logger.debug("Container %s is matched for container name pattern %s", container_name,
pattern.pattern)
self.cached_container_names['restart'].append(container_name)
return True
logger.debug("Container %s is stopped/killed, "
"but WILL NOT BE restarted as it does not match any names from configuration "
"'containers-to-watch'.", container_name)
self.cached_container_names['do_not_restart'].append(container_name)
return False
def is_restart_allowed(self, container_name):
restart_count = self.get_performed_restart_count(container_name)
last_restarts = self.restarts[container_name].occasions[-self.params.restart_limit:]
now = time.time()
restart_range_start = now - self.params.restart_threshold * 60
for r in last_restarts:
if r < restart_range_start:
return False
return restart_count < self.params.restart_limit
def maintain_container_restart_counter(self, container_name):
if container_name not in self.restarts:
return
last_restart = self.restarts[container_name].occasions[-1]
now = time.time()
restart_duration = now - last_restart
needs_counter_reset = restart_duration < self.params.restart_reset_period * 60
if needs_counter_reset:
logger.info("Start/healthy event received for container %s, clearing restart counter...",
container_name)
logger.info("Last restart time was %s", DateHelper.format_timestamp(last_restart))
self.reset_restart_data(container_name)
def do_restart(self, event):
sock, hostname = DockerMon.connect(self.socket_url)
logger.info("Sending restart request to Docker API for container: %s (%s), compose service name: %s",
event.container_name, event.container_id, event.service_name)
request = self.create_docker_restart_request(event.container_id, hostname)
self.handle_restart_request(request, sock, event)
def handle_restart_request(self, request, sock, event):
with closing(sock):
sock.sendall(request)
header, payload = DockerMon.read_http_header(sock)
status, reason = DockerMon.header_status(header)
# checking the HTTP status, no payload should be received!
if status == HTTP_NO_CONTENT:
mail_body = RestartService.create_mail_body_from_docker_event(event)
self.save_restart_event_happened(event.container_name)
log_record = self.log_restart_container(event.container_name)
self.notification_service.send_mail(log_record, mail_body)
else:
raise DockermonError('bad HTTP status: %s %s' % (status, reason))
@staticmethod
def create_mail_body_from_docker_event(event):
return 'Parsed event: %s\n\n\nOriginal docker event: %s\n' % (event, event.details)
@staticmethod
def create_docker_restart_request(container_id, hostname):
request = 'POST /containers/{0}/restart?t=5 HTTP/1.1\nHost: {1}\n\n'.format(container_id, hostname)
request = request.encode('utf-8')
return request
|
StarcoderdataPython
|
110869
|
<gh_stars>0
print('=ˆ= ' * 8)
print(' TAULA DE MULTIPLICACIÓ')
print('=ˆ= ' * 8)
num = int(input('introduïu un número per trobar\nla taula de multiplicació: '))
print(' ' * 7, '-' * 13)
x = 1
for c in range(1, 11):
print(' ' * 7, '{} x {:2} = {:3}'.format(num, x, num*x))
x += 1
print(' ' * 7, '-' * 13)
|
StarcoderdataPython
|
3267990
|
# (C) Copyright 2018-2021 Enthought, Inc., Austin, TX
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only under
# the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
""" Mixin tests for testing implementations of IEventLoop. """
from traits_futures.i_event_loop import IEventLoop
from traits_futures.i_event_loop_helper import IEventLoopHelper
from traits_futures.i_pingee import IPingee
class IEventLoopTests:
"""Mixin providing tests for implementations of IEventLoop."""
#: Override this in subclasses.
event_loop_factory = IEventLoop
def setUp(self):
self.event_loop = self.event_loop_factory()
def tearDown(self):
del self.event_loop
def test_implements_i_event_loop(self):
self.assertIsInstance(self.event_loop, IEventLoop)
def test_pingee(self):
pingee = self.event_loop.pingee(on_ping=lambda: None)
self.assertIsInstance(pingee, IPingee)
def test_event_loop_helper(self):
event_loop_helper = self.event_loop.helper()
self.assertIsInstance(event_loop_helper, IEventLoopHelper)
|
StarcoderdataPython
|
3336401
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division
import ndef
import pytest
import _test_record_base
def pytest_generate_tests(metafunc):
_test_record_base.generate_tests(metafunc)
class TestUriRecord(_test_record_base._TestRecordBase):
RECORD = ndef.uri.UriRecord
ATTRIB = "iri, uri"
test_init_args_data = [
((), ('', '')),
((None,), ('', '')),
(('http://nfcpy.org',), ('http://nfcpy.org', 'http://nfcpy.org')),
((u'http://nfcpy.org',), ('http://nfcpy.org', 'http://nfcpy.org')),
((b'http://nfcpy.org',), ('http://nfcpy.org', 'http://nfcpy.org')),
(('nfcpy',), ('nfcpy', 'nfcpy')),
(("http://www.nfcpy",), ("http://www.nfcpy", "http://www.nfcpy")),
(("https://www.nfcpy",), ("https://www.nfcpy", "https://www.nfcpy")),
(("http://nfcpy",), ("http://nfcpy", "http://nfcpy")),
(("https://nfcpy",), ("https://nfcpy", "https://nfcpy")),
(("tel:01234",), ("tel:01234", "tel:01234")),
(("mailto:nfcpy",), ("mailto:nfcpy", "mailto:nfcpy")),
(("ftp://anonymous:anonymous@nfcpy",),
("ftp://anonymous:anonymous@nfcpy",
"ftp://anonymous:anonymous@nfcpy")),
(("ftp://ftp.nfcpy",), ("ftp://ftp.nfcpy", "ftp://ftp.nfcpy")),
(("ftps://nfcpy",), ("ftps://nfcpy", "ftps://nfcpy")),
(("sftp://nfcpy",), ("sftp://nfcpy", "sftp://nfcpy")),
(("smb://nfcpy",), ("smb://nfcpy", "smb://nfcpy")),
(("nfs://nfcpy",), ("nfs://nfcpy", "nfs://nfcpy")),
(("ftp://nfcpy",), ("ftp://nfcpy", "ftp://nfcpy")),
(("dav://nfcpy",), ("dav://nfcpy", "dav://nfcpy")),
(("news:nfcpy",), ("news:nfcpy", "news:nfcpy")),
(("telnet://nfcpy",), ("telnet://nfcpy", "telnet://nfcpy")),
(("imap://nfcpy",), ("imap://nfcpy", "imap://nfcpy")),
(("rtsp://nfcpy",), ("rtsp://nfcpy", "rtsp://nfcpy")),
(("urn:nfcpy",), ("urn:nfcpy", "urn:nfcpy")),
(("pop:nfcpy",), ("pop:nfcpy", "pop:nfcpy")),
(("sip:<EMAIL>",),
("sip:<EMAIL>", "sip:<EMAIL>")),
(("sips:nfcpy",), ("sips:nfcpy", "sips:nfcpy")),
(("tftp:nfcpy",), ("tftp:nfcpy", "tftp:nfcpy")),
(("btspp://nfcpy",), ("btspp://nfcpy", "btspp://nfcpy")),
(("btl2cap://nfcpy",), ("btl2cap://nfcpy", "btl2cap://nfcpy")),
(("btgoep://nfcpy",), ("btgoep://nfcpy", "btgoep://nfcpy")),
(("tcpobex://nfcpy",), ("tcpobex://nfcpy", "tcpobex://nfcpy")),
(("irdaobex://nfcpy",), ("irdaobex://nfcpy", "irdaobex://nfcpy")),
(("file://nfcpy",), ("file://nfcpy", "file://nfcpy")),
(("urn:epc:id:12345",), ("urn:epc:id:12345", "urn:epc:id:12345")),
(("urn:epc:tag:12345",), ("urn:epc:tag:12345", "urn:epc:tag:12345")),
(("urn:epc:pat:12345",), ("urn:epc:pat:12345", "urn:epc:pat:12345")),
(("urn:epc:raw:12345",), ("urn:epc:raw:12345", "urn:epc:raw:12345")),
(("urn:epc:12345",), ("urn:epc:12345", "urn:epc:12345")),
(("urn:nfc:12345",), ("urn:nfc:12345", "urn:nfc:12345")),
((u"http://www.hääyö.com/~user/index.html",),
(u"http://www.hääyö.com/~user/index.html",
u"http://www.xn--hy-viaa5g.com/~user/index.html")),
]
test_init_kwargs_data = [
(('URI',), "iri='URI'"),
]
test_init_fail_data = [
((1,), ".iri accepts str or bytes, but not int"),
]
test_decode_valid_data = [
('006e66637079', ("nfcpy",)),
('016e66637079', ("http://www.nfcpy",)),
('026e66637079', ("https://www.nfcpy",)),
('036e66637079', ("http://nfcpy",)),
('046e66637079', ("https://nfcpy",)),
('053031323334', ("tel:01234",)),
("066e66637079", ("mailto:nfcpy",)),
("076e66637079", ("ftp://anonymous:anonymous@nfcpy",)),
("086e66637079", ("ftp://ftp.nfcpy",)),
("096e66637079", ("ftps://nfcpy",)),
("0a6e66637079", ("sftp://nfcpy",)),
("0b6e66637079", ("smb://nfcpy",)),
("0c6e66637079", ("nfs://nfcpy",)),
("0d6e66637079", ("ftp://nfcpy",)),
("0e6e66637079", ("dav://nfcpy",)),
("0f6e66637079", ("news:nfcpy",)),
("106e66637079", ("telnet://nfcpy",)),
("116e66637079", ("imap:nfcpy",)),
("126e66637079", ("rtsp://nfcpy",)),
("136e66637079", ("urn:nfcpy",)),
("146e66637079", ("pop:nfcpy",)),
("156e66637079", ("sip:nfcpy",)),
("166e66637079", ("sips:nfcpy",)),
("176e66637079", ("tftp:nfcpy",)),
("186e66637079", ("btspp://nfcpy",)),
("196e66637079", ("btl2cap://nfcpy",)),
("1a6e66637079", ("btgoep://nfcpy",)),
("1b6e66637079", ("tcpobex://nfcpy",)),
("1c6e66637079", ("irdaobex://nfcpy",)),
("1d6e66637079", ("file://nfcpy",)),
("1e3132333435", ("urn:epc:id:12345",)),
("1f3132333435", ("urn:epc:tag:12345",)),
("203132333435", ("urn:epc:pat:12345",)),
("213132333435", ("urn:epc:raw:12345",)),
("223132333435", ("urn:epc:12345",)),
("233132333435", ("urn:nfc:12345",)),
]
test_decode_error_data = [
('246e66637079', "decoding of URI identifier 36 is not defined"),
('ff6e66637079', "decoding of URI identifier 255 is not defined"),
('0380', "URI field is not valid UTF-8 data"),
('0300', "URI field contains invalid characters"),
]
test_decode_relax_data = [
('246e66637079', ("nfcpy",)),
('ff6e66637079', ("nfcpy",)),
]
test_encode_error = None
test_format_args_data = [
((), "''"),
(('http://example.com',), "'http://example.com'"),
]
test_format_str_data = [
((), "NDEF Uri Record ID '' Resource ''"),
(('tel:1234',), "NDEF Uri Record ID '' Resource 'tel:1234'"),
]
def test_uri_to_iri_conversion():
record = ndef.UriRecord()
# no netloc -> no conversion
record.uri = u"tel:1234"
assert record.iri == u"tel:1234"
# with netloc -> conversion
record.uri = u"http://www.xn--hy-viaa5g.com/%7Euser/index.html"
assert record.iri == u"http://www.hääyö.com/~user/index.html"
uri_messages = [
('D1010a55036e666370792e6f7267',
[ndef.UriRecord('http://nfcpy.org')]),
('91010a55036e666370792e6f7267 51010a55046e666370792e6f7267',
[ndef.UriRecord('http://nfcpy.org'),
ndef.UriRecord('https://nfcpy.org')]),
('D101115500736d74703a2f2f6e666370792e6f7267',
[ndef.UriRecord('smtp://nfcpy.org')]),
]
@pytest.mark.parametrize("encoded, message", uri_messages)
def test_message_decode(encoded, message):
octets = bytes(bytearray.fromhex(encoded))
print(list(ndef.message_decoder(octets)))
assert list(ndef.message_decoder(octets)) == message
@pytest.mark.parametrize("encoded, message", uri_messages)
def test_message_encode(encoded, message):
octets = bytes(bytearray.fromhex(encoded))
print(list(ndef.message_encoder(message)))
assert b''.join(list(ndef.message_encoder(message))) == octets
|
StarcoderdataPython
|
3351147
|
import datetime
import enum
import logging
import chardet
import tqdm
import subprocess
import re
import sys
import dataclasses
from typing import List, Set, Tuple, Optional, Any
from modification import Modification
from javadoc_analyzer import has_java_javadoc_changed
_commit_line = re.compile(r'^commit ([0-9a-f]{40})$')
_date_line = re.compile(r'^Date:\s*([0-9\-]+T[0-9\:]+)')
_src_line = re.compile(r'^M\t((.+)\.java)$')
@enum.unique
class CommitType(enum.Enum):
UNKNOWN = None
JAVA_AND_JAVADOC_TAGS_EVERYWHERE = "Arbitrary Java / JavaDoc changes"
ONLY_JAVADOC_TAGS_IN_SOME_FILES = "Some files have only JavaDoc tag changes"
ONLY_JAVADOC_TAGS_EVERYWHERE = "Whole commit has only JavaDoc tag changes"
WITHOUT_JAVADOC_TAGS = "Commit doesn't have JavaDoc tag changes"
_mixed_commits: int = 0
_only_javadoc_in_some_files_commits: int = 0
_pure_javadoc_commits: int = 0
_total_commits: int = 0
_java_files_commits: int = 0
@dataclasses.dataclass()
class Commit:
sha1: str
files: List[Optional[str]] = None
date: datetime = None
commit_type: CommitType = CommitType.UNKNOWN
file_statuses: List[Tuple[bool, bool, bool]] = None
modifications: List[Modification] = None
@staticmethod
def read_file_in_any_encoding(patch_filename: str, filename: str, comment: str = "") -> str:
with open(patch_filename, 'rb') as bf:
bts = bf.read()
try:
return bts.decode('utf-8')
except Exception as ude1:
logging.warning(f"File: {filename} of {comment} is not in UTF-8: {ude1}")
try:
return bts.decode(sys.getdefaultencoding())
except Exception as ude2:
logging.warning(f"File: {filename} of {comment} is not in sys.getdefaultencoding() = {sys.getdefaultencoding()}: {ude2}")
# Can't handle more here...
enc = chardet.detect(bts)['encoding']
logging.warning(f"File: {filename} of {comment} is likely in {enc} encoding")
return bts.decode(enc)
def classify(self, tmpdir):
global _mixed_commits, _only_javadoc_in_some_files_commits, _pure_javadoc_commits
file_statuses: List[Tuple[bool, bool, bool]] = []
modifications: List[Modification] = []
for f in self.files:
patchname = subprocess.check_output([
'git', 'format-patch', '-1', '--numbered-files', '--unified=100000',
'-o', tmpdir, self.sha1,
'--', f
]).decode(sys.getdefaultencoding()).strip()
try:
patch = self.read_file_in_any_encoding(patchname, f, f"Commit: {self.sha1}")
tuple_ = has_java_javadoc_changed(f, patch, self.date, self.sha1)
file_statuses.append((tuple_[0], tuple_[1], tuple_[2]))
if tuple_[2] and not tuple_[0] and not tuple_[1]:
modifications.extend(tuple_[3])
except Exception as e:
logging.error("Skipping bad patch of commit %s in file %s due to %s" % (self.sha1, f, e))
file_statuses.append((False, False, False))
pure_javadoc_tag_files_count = sum(
1 for (j, d, t) in file_statuses if t and not j and not d
)
javadoc_tag_files_count = sum(
1 for (j, d, t) in file_statuses if t
)
if pure_javadoc_tag_files_count == len(file_statuses):
self.commit_type = CommitType.ONLY_JAVADOC_TAGS_EVERYWHERE
_pure_javadoc_commits += 1
elif pure_javadoc_tag_files_count > 0:
self.commit_type = CommitType.ONLY_JAVADOC_TAGS_IN_SOME_FILES
_only_javadoc_in_some_files_commits += 1
elif javadoc_tag_files_count == 0:
self.commit_type = CommitType.WITHOUT_JAVADOC_TAGS
else:
self.commit_type = CommitType.JAVA_AND_JAVADOC_TAGS_EVERYWHERE
_mixed_commits += 1
self.file_statuses = file_statuses
self.modifications = modifications
# def get_file_statuses_str(self) -> str:
# res = []
# for f, (j, d, t, s) in zip(self.files, self.file_statuses):
# if len(s):
# res.append("%s:\n%s\n" % (f, s))
# return "\n".join(res)
def get_csv_lines(self, url_prefix: str) -> List[List[str]]:
if not self.modifications:
return [[self.commit_type.value, url_prefix + self.sha1, self.date, '', '']]
csv_lines = []
for i in range(0, len(self.modifications)):
csv_lines.append(self.csv_line(i, url_prefix))
return csv_lines
def csv_line(self, i: int, url_prefix: str) -> List[str]:
if i < 1:
if self.modifications[0].time_offset is None:
return [
self.commit_type.value,
url_prefix + self.sha1,
self.date,
self.modifications[0].file_name,
self.modifications[0].javadoc_modification,
self.modifications[0].functionheader_modification,
self.modifications[0].functionheader_date,
''
]
return [
self.commit_type.value,
url_prefix + self.sha1,
self.date,
self.modifications[0].file_name,
self.modifications[0].javadoc_modification,
self.modifications[0].functionheader_modification,
self.modifications[0].functionheader_date,
self.modifications[0].time_offset.days
]
else:
if self.modifications[i].time_offset is None:
return [
'',
'',
'',
self.modifications[i].file_name,
self.modifications[i].javadoc_modification,
self.modifications[i].functionheader_modification,
self.modifications[i].functionheader_date,
''
]
return [
'',
'',
'',
self.modifications[i].file_name,
self.modifications[i].javadoc_modification,
self.modifications[i].functionheader_modification,
self.modifications[i].functionheader_date,
self.modifications[i].time_offset.days
]
def get_commits(single_commit: Optional[str] = None) -> List[Commit]:
global _total_commits
git_cmd = [
'git', 'show', '--name-status', '--date=iso-strict', single_commit
] if single_commit else [
'git', 'log', '--name-status', '--date=iso-strict', '--all'
]
log = subprocess.check_output(git_cmd).decode(sys.getdefaultencoding())
log = log.replace('\r', '')
loglines = log.split('\n')
commits = []
cur_commit = None
cur_date = None
cur_files = []
def release():
global _java_files_commits
if cur_commit and len(cur_files):
_java_files_commits += 1
cur_realdatetime = datetime.datetime.strptime(cur_date, "%Y-%m-%dT%H:%M:%S")
commits.append(Commit(cur_commit, cur_files.copy(), cur_realdatetime))
print("Analyzing log...")
for l in tqdm.tqdm(loglines):
clm = _commit_line.match(l)
clf = _src_line .match(l)
cld = _date_line.match(l)
if clm:
_total_commits += 1
release()
cur_commit = clm.group(1)
cur_files = []
elif cld:
cur_date = cld.group(1)
elif clf:
cur_files.append(clf.group(1))
release()
return commits
|
StarcoderdataPython
|
155786
|
#!/usr/bin/env python3
# Copyright 2019 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import simplediskimage
from common import generate_bb_testdata
logging.basicConfig(level=logging.DEBUG)
def main():
# Generate test data
generate_bb_testdata()
# Create image
image = simplediskimage.DiskImage("bar.img", partition_table='msdos',
partitioner=simplediskimage.Sfdisk)
part_fat = image.new_partition("fat16", partition_flags=["BOOT"])
# Copy the files to the root, could also be written:
# part_fat.copy("file1", "file2", destination="/"), or without destination
part_fat.copy("generated/u-boot.img")
part_fat.copy("generated/MLO")
# Make sure that the partition is always 48 MiB
part_fat.set_fixed_size_bytes(48 * simplediskimage.SI.Mi)
image.commit()
print("sudo kpartx -av bar.img")
print("...")
print("sudo kpartx -dv bar.img")
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
1723447
|
# -*- coding: utf-8 -*-
from rest_framework.serializers import (
ModelSerializer, Serializer, IntegerField, DurationField)
from button.models import Clear
class ClearSerializer(ModelSerializer):
class Meta:
model = Clear
read_only_fields = ('id', 'user', 'date',)
class MyStatsSerializer(Serializer):
my_score = IntegerField()
my_best_conquest = DurationField()
my_clicks = IntegerField()
|
StarcoderdataPython
|
71977
|
<reponame>adabutch/account_tracker
# Generated by Django 3.0.3 on 2020-02-13 19:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account_request', '0013_auto_20190603_1712'),
]
operations = [
migrations.AlterField(
model_name='accountrequest',
name='supervisor_phone',
field=models.CharField(default=8675309, max_length=12),
preserve_default=False,
),
]
|
StarcoderdataPython
|
137457
|
"""
Classes for visualizing echo data
"""
import numpy as np
import matplotlib.colors as colors
# import datetime as dt
from matplotlib.dates import date2num
from collections import defaultdict
import echopype_model
# Colormap: multi-frequency availability from Jech & Michaels 2006
MF_COLORS = np.array([[0,0,0],\
[86,25,148],\
[28,33,179],\
[0,207,239],\
[41,171,71],\
[51,204,51],\
[255,239,0],\
[255,51,0]])/255.
MF_CMAP_TMP = colors.ListedColormap(MF_COLORS)
MF_CMAP = colors.BoundaryNorm(range(MF_COLORS.shape[0]+1),MF_CMAP_TMP.N)
# Colormap: standard EK60
EK60_COLORS = np.array([[255, 255, 255],\
[159, 159, 159],\
[ 95, 95, 95],\
[ 0, 0, 255],\
[ 0, 0, 127],\
[ 0, 191, 0],\
[ 0, 127, 0],\
[255, 255, 0],\
[255, 127, 0],\
[255, 0, 191],\
[255, 0, 0],\
[166, 83, 60],\
[120, 60, 40]])/255.
EK60_CMAP_TH = [-80,-30]
EK60_CMAP_TMP = colors.ListedColormap(EK60_COLORS)
EK60_CMAP_BOUNDS = np.linspace(EK60_CMAP_TH[0],EK60_CMAP_TH[1],EK60_CMAP_TMP.N+1)
EK60_CMAP = colors.BoundaryNorm(EK60_CMAP_BOUNDS,EK60_CMAP_TMP.N)
# Default plot echogram params
ECHOGRAM_PARAMS = defaultdict(list)
ECHOGRAM_PARAMS['hour_spacing'] = 1 # spacing: in [hour]
ECHOGRAM_PARAMS['depth_ticks_num'] = 5 # number of tick marks on y-axis
ECHOGRAM_PARAMS['depth_min'] = 0 # min depth to plot [m]
ECHOGRAM_PARAMS['depth_max'] = 200 # max depth to plot [m]
ECHOGRAM_PARAMS['cmap_min'] = -80 # min of color scale
ECHOGRAM_PARAMS['cmap_max'] = -30 # max of color scale
ECHOGRAM_PARAMS['c_ticks_spacing'] # number of ticks in colorbar
class EchoDataViewer(object):
'''
Class to view echo data.
Input can be a EchoDataRaw (now) or EchoDataMVBS (future)
'''
def __init__(self,echo_object='',fig_size=[15,8],cmap='viridis'):
self.fig_size = fig_size
self.cmap = cmap
if echo_object=='':
self.echo_data_type = ''
self.echo_vals = []
self.frequency = []
self.depth_bin = []
self.ping_bin = []
self.ping_time = []
else:
self.echo_object = echo_object
self.load_echo_info()
# Emtpy attributes that will only be evaluated when associated methods called by user
self.date_range = []
# Methods to set/get critical attributes
def load_echo_info(self):
if isinstance(self.echo_object,(echopype_model.EchoDataRaw)):
self.echo_data_type = 'EchoDataRaw'
self.echo_vals = self.echo_object.Sv_corrected
self.depth_bin = self.echo_object.bin_size # this is for Sv data
else:
self.echo_data_type = 'others' # add other types later
self.echo_vals = []
self.depth_bin = self.echo_object.depth_bin # this is for MVBS
self.frequency = [float(x) for x in self.echo_object.Sv_corrected.keys()] # get frequency info
self.ping_bin = self.echo_object.ping_bin
self.ping_time = self.echo_object.ping_time
def set_date_range(self,date_range):
self.date_range = date_range
def set_fig_size(self,fig_size):
self.fig_size = fig_size
def set_cmap(self,cmap):
self.cmap = cmap
# Methods for visualization
def echogram(self,ax,echogram_params,freq_select):
'''
Plot echogram for selected frequencies
INPUT:
ax axis the echogram to be plot on
echogram_params plotting parameters
freq_select selected frequency (dtype=float)
'''
freq_idx = self.find_freq_seq(freq_select)
sz = self.echo_vals[str(self.frequency[freq_idx])].shape
# Getting start and end ping indices
if self.date_range=='':
print('No data range set. Use set_date_range to select date range to plot')
return
else:
date_num_range = date2num(self.date_range)
ping_idx_start = np.searchsorted(self.ping_time, date_num_range[0], side="left")
ping_idx_end = np.searchsorted(self.ping_time, date_num_range[1], side="right")
if ping_idx_end>=self.ping_time.shape[0]:
ping_idx_end = self.ping_time.shape[0]-1
# Getting start and end depth indices
depth_vec = np.arange(sz[0])*self.depth_bin # list of depth bins [m]
depth_idx_start = np.searchsorted(depth_vec, echogram_params['depth_min'], side="left")
depth_idx_end = np.searchsorted(depth_vec, echogram_params['depth_max'], side="right")
if depth_idx_end>=depth_vec.shape[0]:
depth_idx_end = depth_vec.shape[0]-1
# Set up xticks -- day
del_time = (self.date_range[1]-self.date_range[0])
x_ticks_num = (del_time.days*24+del_time.seconds/60/60)/echogram_params['hour_spacing']
x_ticks_spacing = sz[1]/(x_ticks_num)
x_ticks = np.arange(0,sz[1],x_ticks_spacing)
x_ticks_label = np.arange(x_ticks.shape[0])*echogram_params['hour_spacing'] # this probably should be outside of the function
# Set up yticks -- depth
y_ticks_spacing = sz[0]/(echogram_params['depth_ticks_num']-1)
y_ticks = np.arange(echogram_params['depth_ticks_num'])*y_ticks_spacing
depth_spacing = np.around((echogram_params['depth_max']-\
echogram_params['depth_min'])/(echogram_params['depth_ticks_num']-1),decimals=1)
depth_label = np.around(np.arange(echogram_params['depth_ticks_num'])*depth_spacing,decimals=1)
# Plot
# -- plot echo_vals upside-down
axim = ax.imshow(self.echo_vals[str(self.frequency[freq_idx])][::-1,:],aspect='auto',\
vmax=echogram_params['cmap_max'],vmin=echogram_params['cmap_min'],cmap=self.cmap)
ax.set_yticks(y_ticks)
ax.set_yticklabels(depth_label)
ax.set_xticks(x_ticks)
ax.set_xticklabels(x_ticks_label)
return axim
def find_freq_seq(self,freq_select):
'''Find the sequence of transducer of a particular freq'''
return int(np.where(np.array(self.frequency)==freq_select)[0])
|
StarcoderdataPython
|
161203
|
<reponame>mavabene/ROAR
from pydantic import BaseModel, Field
from ROAR.control_module.controller import Controller
from ROAR.utilities_module.vehicle_models import VehicleControl, Vehicle
from ROAR.utilities_module.data_structures_models import Transform, Location
from collections import deque
import numpy as np
import math
import logging
from ROAR.agent_module.agent import Agent
from typing import Tuple
import json
from pathlib import Path
class BStanley_controller(Controller):
def __init__(self, agent, steering_boundary: Tuple[float, float],
throttle_boundary: Tuple[float, float], **kwargs):
super().__init__(agent, **kwargs)
#self.max_speed = self.agent.agent_settings.max_speed
self.max_speed = 130 # ************************* MAX SPEED *********************************
self.throttle_boundary = throttle_boundary
self.steering_boundary = steering_boundary
self.config = json.load(Path(agent.agent_settings.pid_config_file_path).open(mode='r'))
self.long_pid_controller = LongPIDController(agent=agent,
throttle_boundary=throttle_boundary,
max_speed=self.max_speed,
config=self.config["longitudinal_controller"])
self.blat_stanley_controller = BLatStanley_controller(
agent=agent,
config=self.config["latitudinal_controller"],
steering_boundary=steering_boundary
)
self.logger = logging.getLogger(__name__)
def run_in_series(self, next_waypoint: Transform, **kwargs) -> VehicleControl:
throttle = self.long_pid_controller.run_in_series(next_waypoint=next_waypoint,
target_speed=kwargs.get("target_speed", self.max_speed))
steering = self.blat_stanley_controller.run_in_series(next_waypoint=next_waypoint)
return VehicleControl(throttle=throttle, steering=steering)
@staticmethod
def find_k_values(vehicle: Vehicle, config: dict) -> np.array:
current_speed = Vehicle.get_speed(vehicle=vehicle)
k_p, k_d, k_i = 1, 0, 0
for speed_upper_bound, kvalues in config.items():
speed_upper_bound = float(speed_upper_bound)
if current_speed < speed_upper_bound:
k_p, k_d, k_i = kvalues["Kp"], kvalues["Kd"], kvalues["Ki"]
break
return np.clip([k_p, k_d, k_i], a_min=0, a_max=1)
# class LongPIDController(Controller):
# def __init__(self, agent, config: dict, throttle_boundary: Tuple[float, float], max_speed: float,
# dt: float = 0.03, **kwargs):
# super().__init__(agent, **kwargs)
# self.config = config
# self.max_speed = max_speed
# self.throttle_boundary = throttle_boundary
# self._error_buffer = deque(maxlen=10)
#
# self._dt = dt
#
# def run_in_series(self, next_waypoint: Transform, **kwargs) -> float:
# target_speed = min(self.max_speed, kwargs.get("target_speed", self.max_speed))
# current_speed = Vehicle.get_speed(self.agent.vehicle)
#
# k_p, k_d, k_i = BStanley_controller.find_k_values(vehicle=self.agent.vehicle, config=self.config)
# error = target_speed - current_speed
#
# self._error_buffer.append(error)
#
# if len(self._error_buffer) >= 2:
# # print(self._error_buffer[-1], self._error_buffer[-2])
# _de = (self._error_buffer[-2] - self._error_buffer[-1]) / self._dt
# _ie = sum(self._error_buffer) * self._dt
# else:
# _de = 0.0
# _ie = 0.0
# output = float(np.clip((k_p * error) + (k_d * _de) + (k_i * _ie), self.throttle_boundary[0],
# self.throttle_boundary[1]))
# # self.logger.debug(f"curr_speed: {round(current_speed, 2)} | kp: {round(k_p, 2)} | kd: {k_d} | ki = {k_i} | "
# # f"err = {round(error, 2)} | de = {round(_de, 2)} | ie = {round(_ie, 2)}")
# # f"self._error_buffer[-1] {self._error_buffer[-1]} | self._error_buffer[-2] = {self._error_buffer[-2]}")
# return output
# *** Roll ContRoller v3 ***
class LongPIDController(Controller):
def __init__(self, agent, config: dict, throttle_boundary: Tuple[float, float], max_speed: float,
dt: float = 0.03, **kwargs):
super().__init__(agent, **kwargs)
self.config = config
self.max_speed = max_speed
self.throttle_boundary = throttle_boundary
self._error_buffer = deque(maxlen=10)
self._dt = dt
def run_in_series(self, next_waypoint: Transform, **kwargs) -> float:
target_speed = min(self.max_speed, kwargs.get("target_speed", self.max_speed))
# self.logger.debug(f"Target_Speed: {target_speed} | max_speed = {self.max_speed}")
current_speed = Vehicle.get_speed(self.agent.vehicle)
print('max speed: ',self.max_speed)
k_p, k_d, k_i = BStanley_controller.find_k_values(vehicle=self.agent.vehicle, config=self.config)
error = target_speed - current_speed
self._error_buffer.append(error)
#****************** implement look ahead *******************
la_err = self.la_calcs(next_waypoint)
kla = .03
if len(self._error_buffer) >= 2:
# print(self._error_buffer[-1], self._error_buffer[-2])
_de = (self._error_buffer[-2] - self._error_buffer[-1]) / self._dt
_ie = sum(self._error_buffer) * self._dt
else:
_de = 0.0
_ie = 0.0
# output = float(np.clip((k_p * error) + (k_d * _de) + (k_i * _ie), self.throttle_boundary[0],
# self.throttle_boundary[1]))
print(self.agent.vehicle.transform.rotation.roll)
vehroll = self.agent.vehicle.transform.rotation.roll
if current_speed >= (target_speed + 2):
out = 1 - .1 * (current_speed - target_speed)
else:
if abs(self.agent.vehicle.transform.rotation.roll) <= .35:
out = 6 * np.exp(-0.05 * np.abs(vehroll))-(la_err/180)*current_speed*kla
else:
out = 2 * np.exp(-0.05 * np.abs(vehroll))-(la_err/180)*current_speed*kla # *****ALGORITHM*****
output = np.clip(out, a_min=0, a_max=1)
print('*************')
print('throttle = ', output)
print('*************')
# if abs(self.agent.vehicle.transform.rotation.roll) <= .35:
# output = 1
# if abs(self.agent.vehicle.transform.rotation.roll) > .35:
# # output = 1.2*np.exp(-0.07 * np.abs(vehroll))
# # output = 4 * np.exp(-0.06 * np.abs(vehroll))
#
# output = 0
# if abs(self.agent.vehicle.transform.rotation.roll) > .6:
# output = .8
# if abs(self.agent.vehicle.transform.rotation.roll) > 1.2:
# output = .7
# if abs(self.agent.vehicle.transform.rotation.roll) > 1.5:
# output = 1/(3.1**(self.agent.vehicle.transform.rotation.roll))
# if abs(self.agent.vehicle.transform.rotation.roll) > 7:
# output = 0
# if abs(self.agent.vehicle.transform.rotation.roll) > 1:
# output = .7
# if abs(self.agent.vehicle.transform.rotation.roll) > 3:
# output = .4
# if abs(self.agent.vehicle.transform.rotation.roll) > 4:
# output = .2
# if abs(self.agent.vehicle.transform.rotation.roll) > 6:
# output = 0
# self.logger.debug(f"curr_speed: {round(current_speed, 2)} | kp: {round(k_p, 2)} | kd: {k_d} | ki = {k_i} | "
# f"err = {round(error, 2)} | de = {round(_de, 2)} | ie = {round(_ie, 2)}")
# f"self._error_buffer[-1] {self._error_buffer[-1]} | self._error_buffer[-2] = {self._error_buffer[-2]}")
return output
def la_calcs(self, next_waypoint: Transform, **kwargs):
current_speed = int(Vehicle.get_speed(self.agent.vehicle))
cs = np.clip(current_speed, 80, 200)
# *** next points on path
# *** averaging path points for smooth path vector ***
next_pathpoint1 = (self.agent.local_planner.way_points_queue[cs+51])
next_pathpoint2 = (self.agent.local_planner.way_points_queue[cs+52])
next_pathpoint3 = (self.agent.local_planner.way_points_queue[cs+53])
next_pathpoint4 = (self.agent.local_planner.way_points_queue[2*cs+51])
next_pathpoint5 = (self.agent.local_planner.way_points_queue[2*cs+52])
next_pathpoint6 = (self.agent.local_planner.way_points_queue[2*cs+53])
# next_pathpoint4 = (self.agent.local_planner.way_points_queue[cs+43])
# next_pathpoint5 = (self.agent.local_planner.way_points_queue[cs+42])
# next_pathpoint6 = (self.agent.local_planner.way_points_queue[cs+41])
# next_pathpoint1 = (self.agent.local_planner.way_points_queue[31])
# next_pathpoint2 = (self.agent.local_planner.way_points_queue[32])
# next_pathpoint3 = (self.agent.local_planner.way_points_queue[33])
# next_pathpoint4 = (self.agent.local_planner.way_points_queue[52])
# next_pathpoint5 = (self.agent.local_planner.way_points_queue[53])
# next_pathpoint6 = (self.agent.local_planner.way_points_queue[54])
nx0 = next_pathpoint1.location.x
nz0 = next_pathpoint1.location.z
nx = (
next_pathpoint1.location.x + next_pathpoint2.location.x + next_pathpoint3.location.x + next_pathpoint4.location.x + next_pathpoint5.location.x + next_pathpoint6.location.x) / 6
nz = (
next_pathpoint1.location.z + next_pathpoint2.location.z + next_pathpoint3.location.z + next_pathpoint4.location.z + next_pathpoint5.location.z + next_pathpoint6.location.z) / 6
nx1 = (next_pathpoint1.location.x + next_pathpoint2.location.x + next_pathpoint3.location.x) / 3
nz1 = (next_pathpoint1.location.z + next_pathpoint2.location.z + next_pathpoint3.location.z) / 3
nx2 = (next_pathpoint4.location.x + next_pathpoint5.location.x + next_pathpoint6.location.x) / 3
nz2 = (next_pathpoint4.location.z + next_pathpoint5.location.z + next_pathpoint6.location.z) / 3
npath0 = np.transpose(np.array([nx0, nz0, 1]))
npath = np.transpose(np.array([nx, nz, 1]))
npath1 = np.transpose(np.array([nx1, nz1, 1]))
npath2 = np.transpose(np.array([nx2, nz2, 1]))
path_yaw_rad = -(math.atan2((nx2 - nx1), -(nz2 - nz1)))
path_yaw = path_yaw_rad * 180 / np.pi
veh_yaw = self.agent.vehicle.transform.rotation.yaw
ahead_err = abs(abs(path_yaw)-abs(veh_yaw))
if ahead_err < 60:
la_err = 0
elif ahead_err > 80:
la_err = 2 * ahead_err
else:
la_err = ahead_err
# if la_err > 180:
# ahead_err = la_err - 360
# elif la_err < -180:
# ahead_err = la_err + 360
# else:
# ahead_err = la_err
print('--------------------------------------')
# print(f"{veh_x},{veh_y},{veh_z},{veh_roll},{veh_pitch},{veh_yaw}")
# datarow = f"{veh_x},{veh_y},{veh_z},{veh_roll},{veh_pitch},{veh_yaw}"
# self.waypointrecord.append(datarow.split(","))
print('** la err **', la_err)
print('--------------------------------------')
#
# print('** look ahead error **', ahead_err)
return la_err
#***********************************************************
# ***** end version Roll ContRoller v3*****
# # *** original Roll ContRoller + v2 ***
# class LongPIDController(Controller):
# def __init__(self, agent, config: dict, throttle_boundary: Tuple[float, float], max_speed: float,
# dt: float = 0.03, **kwargs):
# super().__init__(agent, **kwargs)
# self.config = config
# self.max_speed = max_speed
# self.throttle_boundary = throttle_boundary
# self._error_buffer = deque(maxlen=10)
#
# self._dt = dt
#
# def run_in_series(self, next_waypoint: Transform, **kwargs) -> float:
# target_speed = min(self.max_speed, kwargs.get("target_speed", self.max_speed))
# #target_speed = 120
# # self.logger.debug(f"Target_Speed: {target_speed} | max_speed = {self.max_speed}")
# current_speed = Vehicle.get_speed(self.agent.vehicle)
#
# k_p, k_d, k_i = BStanley_controller.find_k_values(vehicle=self.agent.vehicle, config=self.config)
# error = target_speed - current_speed
#
# self._error_buffer.append(error)
#
# if len(self._error_buffer) >= 2:
# # print(self._error_buffer[-1], self._error_buffer[-2])
# _de = (self._error_buffer[-2] - self._error_buffer[-1]) / self._dt
# _ie = sum(self._error_buffer) * self._dt
# else:
# _de = 0.0
# _ie = 0.0
# #output = float(np.clip((k_p * error) + (k_d * _de) + (k_i * _ie), self.throttle_boundary[0],
# # self.throttle_boundary[1]))
# #print(self.agent.vehicle.transform.rotation.roll)
# vehroll=self.agent.vehicle.transform.rotation.roll
# if current_speed >= (target_speed+2):
# out = 1-.1*(current_speed-target_speed)
# else:
# out = 2 * np.exp(-0.4 * np.abs(vehroll))
#
# output = np.clip(out, a_min=0, a_max=1)
# #print('throttle = ',output)
# # if abs(self.agent.vehicle.transform.rotation.roll) <= .35:
# # output = 1
# # if abs(self.agent.vehicle.transform.rotation.roll) > .35:
# # output = 1.2*np.exp(-0.07 * np.abs(vehroll))
# #output = 4 * np.exp(-0.06 * np.abs(vehroll))
#
# # output = 0
# # if abs(self.agent.vehicle.transform.rotation.roll) > .6:
# # output = .8
# # if abs(self.agent.vehicle.transform.rotation.roll) > 1.2:
# # output = .7
# # if abs(self.agent.vehicle.transform.rotation.roll) > 1.5:
# # output = 1/(3.1**(self.agent.vehicle.transform.rotation.roll))
# # if abs(self.agent.vehicle.transform.rotation.roll) > 7:
# # output = 0
# # if abs(self.agent.vehicle.transform.rotation.roll) > 1:
# # output = .7
# # if abs(self.agent.vehicle.transform.rotation.roll) > 3:
# # output = .4
# # if abs(self.agent.vehicle.transform.rotation.roll) > 4:
# # output = .2
# # if abs(self.agent.vehicle.transform.rotation.roll) > 6:
# # output = 0
#
# # self.logger.debug(f"curr_speed: {round(current_speed, 2)} | kp: {round(k_p, 2)} | kd: {k_d} | ki = {k_i} | "
# # f"err = {round(error, 2)} | de = {round(_de, 2)} | ie = {round(_ie, 2)}")
# #f"self._error_buffer[-1] {self._error_buffer[-1]} | self._error_buffer[-2] = {self._error_buffer[-2]}")
# return output
# # ***** end original version Roll ContRoller *****
class BLatStanley_controller(Controller):
def __init__(self, agent, config: dict, steering_boundary: Tuple[float, float],
dt: float = 0.03, **kwargs):
super().__init__(agent, **kwargs)
self.config = config
self.steering_boundary = steering_boundary
self._error_buffer = deque(maxlen=10)
self._dt = dt
self.waypointrecord=[]
def run_in_series(self, next_waypoint: Transform, **kwargs) -> float: #*********** aka stanley_control(state, cx, cy, cyaw, last_target_idx)
'''
TODO: tune
*** inputs needed: vehicle yaw, x, y; nearest path yaw, x, y
*** implement target calculations: heading error (vehicle yaw - path yaw at nearest path point);
cross track error (front axle x,y - nearest path x,y)
self.way_points_queue[0]
*** output lat_control: steering angle delta = heading error + inv tan (gain * cross track error/veh speed)
'''
vel = self.agent.vehicle.velocity
veh_spd = math.sqrt(vel.x ** 2 + vel.y ** 2 + vel.z ** 2) #*** m/s
k = 10 #control gain
kh = .1
kt = .02
ks = .6
# veh_loc = self.agent.vehicle.transform.location
pos_err, head_err = self.stan_calcs(next_waypoint)
#lat_control = head_err + k * pos_err #if angle > 30 then 1, otherwise angle/180 ************ what does 1 equate to? 30 degrees?
print('cross error return: ',np.arctan((k * pos_err)/((ks*veh_spd)+.001)))
print('head err return',kh*head_err)
lat_control = float(
np.clip(kt*(kh*head_err + (np.arctan(k * pos_err/(ks*veh_spd+.001)))), self.steering_boundary[0], self.steering_boundary[1]) #**** guessing steering of '1' equates to 30 degrees
)
print('-----------------------------------------')
print('bStanley lat_control = ', lat_control)
print('-----------------------------------------')
return lat_control
def stan_calcs(self, next_waypoint: Transform, **kwargs):
'''
calculate target
front axle position (corrected from veh location + lv cos and sin heading
nearest point
front axle error (distance from front axle to desired path position)
front_axle_vec = [-np.cos(state.yaw + np.pi / 2),
-np.sin(state.yaw + np.pi / 2)]
error_front_axle = np.dot([dx[target_idx], dy[target_idx]], front_axle_vec)
'''
# *** vehicle data ***
wb = 2.96 # assumed vehicle wheelbase (tesla)
veh_x = self.agent.vehicle.transform.location.x
veh_y = self.agent.vehicle.transform.location.y
veh_z = self.agent.vehicle.transform.location.z
veh_yaw = self.agent.vehicle.transform.rotation.yaw
veh_roll = self.agent.vehicle.transform.rotation.roll
veh_pitch = self.agent.vehicle.transform.rotation.pitch
print('pos x: ', veh_x)
print('pos y: ', veh_y)
print('pos z: ', veh_z)
print('yaw: ', veh_yaw)
print('pitch: ', veh_pitch)
print('roll: ', veh_roll)
# ************* convert points to vehicle reference *****************
theta_deg = veh_yaw
theta_rad = np.radians(theta_deg)
# gvw3d=np.array([[np.cos (theta_rad), 0, np.sin (theta_rad)],
# [0, 1, 0 ],
# [-np.sin (theta_rad), 0, np.cos (theta_rad)]])
# gwv = np.array([[np.cos(theta_rad), -np.sin(theta_rad), veh_x],
# [np.sin(theta_rad), np.cos(theta_rad), veh_z],
# [0, 0, 1]])
gwv = np.array([[np.cos(theta_rad), np.sin(theta_rad), veh_x],
[-np.sin(theta_rad), np.cos(theta_rad), veh_z],
[0, 0, 1]])
gvw = np.linalg.inv(gwv)
# *** define points in vehicle reference frame ***
# *** vehicle front axle ***
fx = 0
fz = -.5 * wb
# *** next waypoint ***
nextwp = np.transpose(np.array([next_waypoint.location.x, next_waypoint.location.z, 1]))
vf_nextwp = np.matmul(gvw, nextwp)
# nextwp = np.transpose(np.array([next_waypoint.location.x, next_waypoint.location.z, 1]))
# vf_nextwp = np.matmul(gvw, nextwp)
# *** next points on path
#*** averaging path points for smooth path vector ***
next_pathpoint1 = (self.agent.local_planner.way_points_queue[1])
next_pathpoint2 = (self.agent.local_planner.way_points_queue[2])
next_pathpoint3 = (self.agent.local_planner.way_points_queue[3])
next_pathpoint4 = (self.agent.local_planner.way_points_queue[17])
next_pathpoint5 = (self.agent.local_planner.way_points_queue[18])
next_pathpoint6 = (self.agent.local_planner.way_points_queue[19])
nx0 = next_pathpoint1.location.x
nz0 = next_pathpoint1.location.z
nx = (next_pathpoint1.location.x + next_pathpoint2.location.x + next_pathpoint3.location.x + next_pathpoint4.location.x + next_pathpoint5.location.x + next_pathpoint6.location.x)/6
nz = (next_pathpoint1.location.z + next_pathpoint2.location.z + next_pathpoint3.location.z + next_pathpoint4.location.z + next_pathpoint5.location.z + next_pathpoint6.location.z) / 6
nx1 = (next_pathpoint1.location.x + next_pathpoint2.location.x + next_pathpoint3.location.x) /3
nz1 = (next_pathpoint1.location.z + next_pathpoint2.location.z + next_pathpoint3.location.z) /3
nx2 = (next_pathpoint4.location.x + next_pathpoint5.location.x + next_pathpoint6.location.x)/3
nz2 = (next_pathpoint4.location.z + next_pathpoint5.location.z + next_pathpoint6.location.z) /3
npath0 = np.transpose(np.array([nx0, nz0, 1]))
npath = np.transpose(np.array([nx, nz, 1]))
npath1 = np.transpose(np.array([nx1, nz1, 1]))
npath2 = np.transpose(np.array([nx2, nz2, 1]))
vf_npath0 = np.matmul(gvw, npath0)
vf_npath = np.matmul(gvw, npath)
vf_npath1 = np.matmul(gvw, npath1)
vf_npath2 = np.matmul(gvw, npath2)
# ***
# print ('theta_deg = ',theta_deg)
# print ('vf_npath = ',vf_npath)
# print('vf_npath1 = ', vf_npath1)
# print('vf_npath2 = ', vf_npath2)
# print ('Gvw = ', gvw)
# # print ('Gvw inv = ', gwv)
print('vehicle frame next waypoint = ', vf_nextwp)
print('next waypoint = ', nextwp)
# print('next waypoint object',next_waypoint)
# print ('next waypoint = ', next_waypoint.location)
'''
# *** get in vehicle reference ***
path coordinates
next_wp
vehicle coordinates
'''
# # *** getting front axle coordinates ***
# frontx = veh_x + wb*np.cos(veh_pitch*180/np.pi)/2
# frontz = veh_z + wb*np.sin(veh_pitch*180/np.pi)/2
# # *** referencing next waypoint coordinates ***
# path_x = next_waypoint.location.x #*** next waypoint: self.way_points_queue[0]
# path_z = next_waypoint.location.z #** how get
# *** calculate crosstrack error ***
# *** calculate front axle position error from path with positive error = turn to right, negative = turn to left
# vf_cterad = -math.atan2(vf_npath1[0], -vf_npath1[1])
# #vf_cterad = -1*math.atan2(npath1[1], npath1[0])
# vf_ctedeg = vf_cterad*180/math.pi-90
# vf_cte = vf_ctedeg
#vf_cte = (vf_nextwp[0]+vf_npath0[0])/vf_nextwp[1]/4
vf_cte = vf_nextwp[0]
#vf_cte = vf_npath0[0]
# dx = nx1-frontx
# dz = nz1-frontz
# dx = veh_x - path_x
# dz = veh_z - path_z
# dpathhead_rad = (math.atan2(dz, dx)) # *** need some lead to get sign correct (with sin)?
# #dpathhead_rad = (math.atan2((path_z - frontz), (path_x - frontx)))
# #dpathhead_rad = (math.atan2((path_z - veh_z), (path_x - veh_x)))
# dpathhead_ang = dpathhead_rad * 180 / np.pi
# pitch_to_path = dpathhead_ang - veh_pitch
# #dpath = np.cos(pitch_to_path*np.pi/180)*np.hypot(dx, dz) # *** pitch goes from + to - as crosses x axis
#
# #dpath = np.hypot(dx, dz)-8 # really should take this value * sign of pitch_to_path
# #*************************
#
# #dpath = (((nx2-path_x)*(path_z-veh_z)-(path_x-veh_x)*(nz2-path_z)))/(((nx2-path_x)**2+(nz2-path_z)**2)**.5)
# dpath = (((nx2-nx1)*(nz1-veh_z)-(nx1-veh_x)*(nz2-nz1)))/(((nx2-nx1)**2+(nz2-nz1)**2)**.5)
#*************************
# front_axle_vec = [-np.cos(veh_yaw + np.pi / 2), -np.sin(veh_yaw + np.pi / 2)] # RMS error?
# e_front_axle_pos = np.dot([nx, ny], front_axle_vec)
#***get heading if vehicle was at the correct spot on path**
# vf_path_yaw = np.degrees(math.atan2((vf_npath2[1] - vf_nextwp[1]), (vf_npath2[0] - vf_nextwp[0])))
path_yaw_rad = -(math.atan2((nx - nx1), -(nz - nz1)))
path_yaw = path_yaw_rad*180/np.pi
#***difference between correct heading and actual heading - pos error gives right steering, neg gives left ***
hd_err = veh_yaw - path_yaw
#head_err = 0
if hd_err > 180:
head_err = hd_err-360
elif hd_err < -180:
head_err = hd_err +360
else:
head_err = hd_err
# head_err = hd_err
# head_err = vf_path_pitch - theta_deg
print('--------------------------------------')
print('veh yaw = ', veh_yaw)
# # print('veh roll = ', veh_roll)
#print('veh pitch = ', veh_pitch)
# print('**pitch to path** = ', pitch_to_path)
# print('**dpathhead_ang**',dpathhead_ang)
# print('veh x = ', veh_x)
# print('veh z = ', veh_z)
print(f"{veh_x},{veh_y},{veh_z},{veh_roll},{veh_pitch},{veh_yaw}")
datarow=f"{veh_x},{veh_y},{veh_z},{veh_roll},{veh_pitch},{veh_yaw}"
self.waypointrecord.append(datarow.split(","))
# # print('front x = ', frontx)
# # print('front z = ', frontz)
# print('path x = ', path_x)
# print('path z = ', path_z)
# print('next path x1 = ', nx1)
# print('next path z1 = ', nz1)
# print('next path x = ', nx)
# print('next path z = ', nz)
# print('next path x2 = ', nx2)
# print('next path z2 = ', nz2)
#
# print('**distance to path = ', dpath)
print('path yaw = ', path_yaw)
# print('path_pitch_rad = ', path_pitch_rad)
# # print('path queue 0 = ', self.agent.local_planner.way_points_queue[0])
# # print('path queue 4 = ', self.agent.local_planner.way_points_queue[9])
# # print('path queue 20 = ', self.agent.local_planner.way_points_queue[17])
print('** hd err **', hd_err)
print('** heading error **', head_err)
print('vf cross track error',vf_cte)
# # print('_dot err', _dot)
# vf_cte=0 # check if goes straight
#head_err=0
return vf_cte, head_err
'''
*** end my code ***
'''
# def calc_curr_heading_err(self, next_waypoint: Transform) -> float:
# return next_waypoint.rotation.pitch - self.agent.vehicle.transform.rotation.pitch
#
# def calc_cross_track_error(self, next_waypoint: Transform) -> float:
# # calculate a vector that represent where you are going
# v_begin = self.agent.vehicle.transform.location
# v_end = v_begin + Location(
# x=math.cos(math.radians(self.agent.vehicle.transform.rotation.pitch)),
# y=v_begin.y,
# z=math.sin(math.radians(self.agent.vehicle.transform.rotation.pitch)),
# )
# v_vec = np.array([v_end.x - v_begin.x, v_end.y - v_begin.y, v_end.z - v_begin.z])
#
# # calculate error projection
# w_vec = np.array(
# [
# next_waypoint.location.x - v_begin.x,
# next_waypoint.location.y - v_begin.y,
# next_waypoint.location.z - v_begin.z,
# ]
# )
# _dot = math.acos(
# np.clip(
# np.dot(w_vec, v_vec) / (np.linalg.norm(w_vec) * np.linalg.norm(v_vec)),
# -1.0,
# 1.0,
# )
# )
# _cross = np.cross(v_vec, w_vec)
# if _cross[1] > 0:
# _dot *= -1.0
# return _dot
|
StarcoderdataPython
|
3288919
|
<gh_stars>10-100
"""Define Snow Category Item."""
import logging
class SnowCatalogItem(object):
"""ServiceNow Category Item."""
def __init__(self, name, description, conf):
"""Initialize."""
self.name = name
# terraform catalog sys_id
self.catalog = conf.get("SERVICENOW", "TF_CATALOG")
# terraform catalog's watchmaker category
self.category = conf.get("SERVICENOW", "CATEGORY")
self.description = description
# terraform deployment workflow
self.workflow = conf.get("SERVICENOW", "TFE_WORKFLOW")
# terraform-snow (SN application) sys_id
# search for terraform-snow in the sys_package table on your SN inst.
self.sys_package = conf.get("SERVICENOW", "SYS_PACKAGE")
self.isactive = "true"
def data(self):
"""Create category item data payload."""
logging.info('')
return {
"sys_package": self.sys_package,
"name": self.name,
"category": self.category,
"sc_catalogs": self.catalog,
"short_description": self.description,
"workflow": self.workflow,
"active": self.isactive
}
|
StarcoderdataPython
|
1742212
|
import FWCore.ParameterSet.Config as cms
EcalTrivialConditionRetriever = cms.ESSource("EcalTrivialConditionRetriever",
TotLumi = cms.untracked.double(0.0),
InstLumi = cms.untracked.double(0.0),
producedEcalChannelStatus = cms.untracked.bool(True),
producedEcalDQMTowerStatus = cms.untracked.bool(True),
producedEcalDQMChannelStatus = cms.untracked.bool(True),
producedEcalDCSTowerStatus = cms.untracked.bool(True),
producedEcalDAQTowerStatus = cms.untracked.bool(True),
producedEcalTrgChannelStatus = cms.untracked.bool(True),
# Values to get correct noise on RecHit amplitude using 3+5 weights
EBpedRMSX12 = cms.untracked.double(1.089),
weightsForTB = cms.untracked.bool(False),
# channel status
channelStatusFile = cms.untracked.string(''),
producedEcalPedestals = cms.untracked.bool(True),
# If set true reading optimized weights (3+5 weights) from file
getWeightsFromFile = cms.untracked.bool(True),
intercalibErrorsFile = cms.untracked.string(''),
laserAPDPNMean = cms.untracked.double(1.0),
laserAPDPNRefMean = cms.untracked.double(1.0),
# untracked string amplWeightsFile = "CalibCalorimetry/EcalTrivialCondModules/data/ampWeights_TB.txt"
# file with intercalib constants - same format used for online and offline DB
# by default set all inter calib const to 1.0 if no file provided
intercalibConstantsFile = cms.untracked.string(''),
linearCorrectionsFile = cms.untracked.string(''),
producedEcalWeights = cms.untracked.bool(True),
EEpedRMSX12 = cms.untracked.double(2.018),
producedEcalLinearCorrections = cms.untracked.bool(True),
producedEcalIntercalibConstants = cms.untracked.bool(True),
producedEcalIntercalibConstantsMC = cms.untracked.bool(True),
producedEcalIntercalibErrors = cms.untracked.bool(True),
producedEcalTimeCalibConstants = cms.untracked.bool(True),
producedEcalTimeCalibErrors = cms.untracked.bool(True),
producedEcalTimeOffsetConstant = cms.untracked.bool(True),
producedEcalLaserCorrection = cms.untracked.bool(True),
producedEcalGainRatios = cms.untracked.bool(True),
producedEcalADCToGeVConstant = cms.untracked.bool(True),
adcToGeVEBConstant = cms.untracked.double(0.035),
adcToGeVEEConstant = cms.untracked.double(0.06),
### phase I Pulse Shapes
sim_pulse_shape_TI = cms.untracked.double( 1.0),
sim_pulse_shape_EB_thresh = cms.double(0.00013),
sim_pulse_shape_EE_thresh = cms.double(0.00025),
sim_pulse_shape_APD_thresh = cms.double(0),
EBSimPulseShapeFile = cms.untracked.string("EB_SimPulseShape.txt"),
EESimPulseShapeFile = cms.untracked.string("EE_SimPulseShape.txt"),
APDSimPulseShapeFile = cms.untracked.string("APD_SimPulseShape.txt"),
# cluster functions/corrections -- by default no parameters are passed
producedEcalClusterLocalContCorrParameters = cms.untracked.bool(True),
localContCorrParameters = cms.untracked.vdouble(
# 1.00365, 0.0007179, -0.008303, 0.01116, -0.1057, 1.00362, 0.0006617, -0.005505, -0.01044, -0.1770, 1.0035),
# Monte Carlo (Apr 2012)
# 1.00385, 0.000847402, 0.0419403, 1.0033, 0.00100782, 0.0362918,
# 1.00322, 0.000902587, 0.0335483, 1.00478, 0.000112104, 0.05377,
# 1.00363, -0.00168853, 0.0392934, 1.00374, -0.00197705, 0.0402998,
# 1.00258, -0.00121254, 0.0278283, 1.00266, 0.00165111, 0.0245362),
# data (Apr 2012)
1.00603, 0.00300789, 0.0667232, 1.00655, 0.00386189, 0.073931,
1.00634, 0.00631341, 0.0764134, 1.00957, 0.0113306, 0.123808,
1.00403, -0.0012733, 0.042925, 1.00394, -0.00137567, 0.0416698,
1.00298, -0.00111589, 0.0320377, 1.00269, -0.00153347, 0.0296769),
producedEcalClusterCrackCorrParameters = cms.untracked.bool(True),
crackCorrParameters = cms.untracked.vdouble(
0.9933, -0.01813, -0.03359, -0.09972, -0.2889, 0.9909, 0.04019,
-0.1095, 0.2401, -0.3412, 0.9942, -0.01245, -0.03002, -0.1098,
-0.2777, 0.9981, 0.01087, -0.01359, 0.06212, -0.354),
mappingFile = cms.untracked.string('Geometry/EcalMapping/data/EEMap.txt'),
producedEcalMappingElectronics = cms.untracked.bool(True),
energyUncertaintyParameters = cms.untracked.vdouble(
0.002793, 0.000908, 0.23592, 0.04446,
0.02463, -0.001782, -0.343492, -0.017968,
-0.013338, 0.0013819, 0.398369, 0.025488,
0.002264, 0.000674, 0.281829, 0.043100,
0.02047, -0.001914, -0.297824, -0.020220,
-0.010669, 0.001648, 0.464209, -0.01112,
0.000530, 0.001274, 0.21071, 0.04679,
0.031323, -0.001997, -0.40509, -0.05102,
-0.016961, 0.0014051, 0.313083, 0.059649,
-0.03947, 0.03364, 3.6768, 0.243637,
0.05167, -0.02335, -2.77506, -0.162785,
-0.011482, 0.004621, 0.511206, 0.032607,
-0.05062, 0.057102, 5.48885, -0.5305,
0.06604, -0.04686, -4.34245, 0.500381,
-0.01487, 0.010382, 0.823244, -0.09392,
-0.04195, 0.028296, 1.66651, 0.87535,
0.048104, -0.01493, -0.98163, -0.72297,
-0.010256, 0.001827, 0.149991, 0.144294),
producedEcalClusterEnergyUncertaintyParameters = cms.untracked.bool(True),
energyCorrectionParameters = cms.untracked.vdouble(
# 40.2198, -3.03103e-6,
# 1.1, 8.0, -0.05185, 0.1354, 0.9165, -0.0005626, 1.385,
# 1.002, -0.7424, 0, 0,
# 0, 0.5558, 2.375, 0.1869,
# 7.6, 1.081, -0.00181,
# 0, 0,
# 0.9, 6.5, -0.1214, 0.2362, 0.8847, -0.00193, 1.057,
# 2.213, -17.29,
# -0.599, 8.874,
# 0.09632, -1.457,
# -0.7584, 10.29,
# 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
# 1, 0, 0, 0, 0, 0, 0, 0),
# New dat from <NAME> (2011/03/02)
40.2198, -3.03103e-6,
1.1, 8.0, -0.05289, 0.1374, 0.9141, -0.000669, 1.38,
1.000, -0.698, 0, 0,
0, 0.6605, 8.825, 0.841,
7.6, 1.081, -0.00181,
0, 0,
0.9, 6.5, -0.07945, 0.1298, 0.9147, -0.001565, 0.9,
-3.516, -2.362,
2.151, 1.572,
-0.336, -0.2807,
3.2, 0,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 0),
producedEcalClusterEnergyCorrectionParameters = cms.untracked.bool(True),
energyCorrectionObjectSpecificParameters = cms.untracked.vdouble(
# 2011 Nov 17
# fEta : p0, p1
# 40.2198, -3.03103e-6,
## fBremEta : xcorr,par0, par1, par2, par3, par4 (x 14 x 2 (electron/photon))
# Electrons
#xcorr
# 1.00227, 1.00252, 1.00225, 1.00159, 0.999475, 0.997203, 0.993886,
# 0.971262, 0.975922, 0.979087, 0.98495, 0.98781, 0.989546, 0.989638,
#par
# 1.00718, -0.00187886, 0, 0, 0,
# 1.00713, -0.00227574, 0, 0, 0,
# 1.00641, -0.00259935, 0, 0, 0,
# 1.00761, -0.00433692, 0, 0, 0,
# 1.00682, -0.00551324, 0, 0, 0,
# 1.0073, -0.00799669, 0, 0, 0,
# 1.00462, -0.00870057, 0, 0, 0,
# 0.972798, -0.000771577, -0.00276696, 0, 0,
# 0.981672, -0.00202028, -0.00471028, 0, 0,
# 0.98251, 0.00441308, -0.00809139, 0, 0,
# 0.986123, 0.00832913, -0.00944584, 0, 0,
# 0.990124, 0.00742879, -0.00960462, 0, 0,
# 0.990187, 0.0094608, -0.010172, 0, 0,
# 0.99372, 0.00560406, -0.00943169, 0, 0,
# Photons
#xcorr
# 1.00506, 1.00697, 1.00595, 1.00595, 1.00595, 1.00595, 1.00595,
# 0.966651, 0.97381, 0.976516, 0.983254, 0.98502, 0.98502, 0.978472,
#par
# 0.00132382, 2.17664, -0.00467206, 0.988994, 17.5858,
# -0.00590257, 1.90733, 0.000684327, 0.986431, 16.6698,
# 0.00265109, 1.73272, -0.00107022, 0.989322, 15.4911,
# 0.00231631, 1.3463, -0.00369555, 0.987133, 10.9233,
# 0.00984253, 1.33889, -0.00392593, 0.979191, 9.35276,
# 0.023683, 1.31198, -0.00947317, 0.963352, 7.5597,
# 0.0851133, 1.38097, -0.0340201, 0.969502, 4.17983,
# 6.71705, 5034.26, -2.68669, 0.970174, 1.00288,
# 1306.82, 472004, -1.86145, 0.981714, -0.25644,
# 0.317121, 3.22717, -0.126848, 0.957792, 2.01028,
# 0.275225, 2.20686, -0.11009, 0.93922, 2.69958,
# 0.0639875, 1.40045, -0.0255853, 0.821566, 7.3297,
# 0.030488, 1.37842, -0.0121879, 0.8173, 9.29944,
# 0.213906, 1.67471, -0.0860589, 0.893636, 3.78218,
## fEt : 7 x 4 (photon/electron, EB/EE)
# Electrons EB
# 0.97213, 0.999528, 5.61192e-06, 0.0143269, -17.1776, 0, 0,
# Electrons EE
# 0.930081, 0.996683, 3.54079e-05, 0.0460187, -23.2461, 0, 0,
# Photons EB
# 1, 1.00348, 1.001, -9.17302e-06, 0.999688, 0, 0,
# Photons EE
# 1, 0.996931, 0.999497, 0.992617, 7.52128e-05, -1.2845e-07, 1.00231,
## fEnergy : 5 x 2 (photon/electron, EE only)
# Electrons EE
# 400, 0.982475, 4.95413e-05, 0.16886, -30.1517,
# Photons EE
# 850, 0.994169, 1.28629e-05, 0, 0),
# 2012 May 16
# fEta : p0, p1
40.2198, -3.03103e-6,
# Electron f(Brem,eta):
1.00355, 1.00377, 1.00307, 1.00235, 1.0008, 0.999123, 0.995808,
0.974023, 0.983046, 0.986587, 0.989959, 0.992291, 0.994088, 0.994841,
1.00824, -0.00195358, 0, 0, 0,
1.00848, -0.00249326, 0, 0, 0,
1.00762, -0.00267961, 0, 0, 0,
1.00817, -0.00405541, 0, 0, 0,
1.00648, -0.00472328, 0, 0, 0,
1.00823, -0.00789251, 0, 0, 0,
1.00671, -0.00889114, 0, 0, 0,
0.977122, -0.00079133, -0.00213429, 0, 0,
0.988986, -0.00383962, -0.00256931, 0, 0,
0.990514, 0.00110704, -0.00538053, 0, 0,
0.989242, 0.00822155, -0.00760498, 0, 0,
0.99109, 0.0100383, -0.00889766, 0, 0,
0.984981, 0.0207496, -0.011706, 0, 0,
0.996159, 0.00762923, -0.00876377, 0, 0,
# Photon f(Brem,eta):
1.00942, 1.01462, 1.00984, 1.00984, 1.00984, 1.00984, 1.00984,
0.976343, 0.984129, 0.985861, 0.987185, 0.986922, 0.984653, 0.984653,
0.0631272, 2.07465, -0.0006589, 0.989607, 12.9334,
-0.00810258, 1.87803, 0.00312754, 0.989272, 12.777,
-0.000777875, 1.6271, 0.0409175, 0.992587, 10.4214,
0.00499402, 1.27952, -0.0171224, 0.990867, 7.31709,
0.0206879, 1.3566, -0.00869229, 0.983379, 7.12404,
0.117245, 1.67142, -0.0468981, 0.986991, 2.89181,
0.0855469, 1.42217, -0.0342187, 0.971139, 4.21491,
2.32816, 556.179, -0.93126, 0.972245, 1.83274,
0.462982, 4.21266, -0.0638084, 0.973512, 1.96724,
0.267879, 2.82353, -0.107158, 0.955956, 2.67778,
0.2808, 3.11316, -0.11232, 0.956383, 2.8149,
0.012426, 1.80645, -1.10844, 0.907241, 4.27577,
0.266712, 2.74984, -0.106685, 0.958985, 2.72102,
0.253367, 2.53726, -0.101347, 0.925249, 3.76083,
## f(ET) Electron EB
0.976603, 0.999277, 6.91141e-06, 0.0493142, -8.21903, 0, 0,
# f(ET) Electron EE
0.949713, 1.00196, 3.84843e-06, 0.0329028, -34.6927, 0, 0,
# f(ET) Photon EB
1, 1.00213, 1.00069, -5.27777e-06, 0.99992, 0, 0,
# f(ET) Photon EE
1, 1.00206, 0.998431, 0.995999, 3.22962e-05, -1.8556e-08, 1.00205,
# f(E) Electron EE
400, 0.986762, 3.65522e-05, 0.178521, -24.8851,
# f(E) Photon EE
600, 0.995234, 1.14198e-05, 0, 0),
producedEcalClusterEnergyCorrectionObjectSpecificParameters = cms.untracked.bool(True),
producedEcalSampleMask = cms.untracked.bool(True),
sampleMaskEB = cms.untracked.uint32(1023),
sampleMaskEE = cms.untracked.uint32(1023),
producedEcalTimeBiasCorrections = cms.untracked.bool(True),
# trivial
# EBtimeCorrAmplitudeBins = cms.untracked.vdouble(0),
# EBtimeCorrShiftBins = cms.untracked.vdouble(0),
# EEtimeCorrAmplitudeBins = cms.untracked.vdouble(0),
# EEtimeCorrShiftBins = cms.untracked.vdouble(0)
# data 2011
EBtimeCorrAmplitudeBins = cms.untracked.vdouble(
7.9, 8.9, 10, 11.2, 12.5, 14.1, 15.8, 17.7, 19.9, 22.3, 25, 28.1, 31.5, 35.3, 39.7,
44.5, 49.9, 56, 62.8, 70.5, 79.1, 88.8, 99.6, 111.7, 125.4, 140.7, 157.9, 177.1, 198.7, 223,
250.2, 280.7, 315, 353.4, 396.5, 444.9, 499.2, 560.1, 628.4, 705.1, 791.1, 887.7, 996, 1117.5, 1253.9,
1406.8, 1578.5, 1771.1, 1987.2, 2229.7, 2501.8, 2807, 3149.5, 3533.8, 3895.9, 3896, 4311.8, 4837.9, 5428.2, 6090.6,
6833.7, 7667.5, 8603.1, 9652.9, 10830, 12152, 13635, 15298, 17165, 19260, 21610),
EBtimeCorrShiftBins = cms.untracked.vdouble(
-1.770, -1.770, -1.770, -1.770, -1.666, -1.430, -1.233, -1.012, -0.866, -0.736, -0.640, -0.561, -0.505, -0.452, -0.405,
-0.363, -0.335, -0.305, -0.279, -0.260, -0.239, -0.220, -0.204, -0.191, -0.186, -0.177, -0.158, -0.137, -0.126, -0.115,
-0.104, -0.096, -0.085, -0.064, -0.056, -0.036, -0.020, -0.006, -0.020, -0.009, -0.020, 0.005, 0.053, 0.076, 0.093,
0.137, 0.143, 0.171, 0.222, 0.229, 0.271, 0.298, 0.312, 0.307, 0.254 , -0.997 ,-0.859 , -0.819, -0.775, -0.589,
-0.428, -0.288, -0.434, -0.277, -0.210, -0.179, -0.134, 0.362, 0.152, -0.282, -0.382),
EEtimeCorrAmplitudeBins = cms.untracked.vdouble(
15.7, 17.6, 19.7, 22.1, 24.8, 27.9, 31.3, 35.1, 39.4, 44.2, 49.6, 55.6, 62.4, 70, 78.6,
88.1, 98.9, 111, 124.5, 139.7, 156.7, 175.9, 197.3, 221.4, 248.4, 278.7, 312.7, 350.9, 393.7, 441.7,
495.6, 556.1, 624, 700.1, 785.5, 881.4, 988.9, 1109.6, 1245, 1396.9, 1567.3, 1758.6, 1973.1, 2213.9, 2484,
2787.1, 3127.2, 3508.8, 3936.9, 4417.3, 4956.3, 5561.1, 6239.6, 7001, 7522.8, 8440.7, 9470.6, 10626),
EEtimeCorrShiftBins = cms.untracked.vdouble(
-0.896, -0.896, -0.896, -0.896, -0.563, -0.392, -0.287, -0.203, -0.135, -0.100, -0.068, -0.050, -0.060, -0.052, -0.055,
-0.050, -0.052, -0.056, -0.055, -0.056, -0.048, -0.037, -0.038, -0.037, -0.025, -0.026, -0.024, -0.013, -0.003, 0.005,
0.020, 0.026, 0.008, 0.007, -0.006, 0.024, 0.045, 0.062, 0.085, 0.088 , 0.111 , 0.139, 0.156, 0.176, 0.210,
0.242, 0.267, 0.301, 0.318, 0.278, 0.287, 0.218, 0.305, 0.245, 0.184, -0.159, -0.095, 0.037),
producedEcalSamplesCorrelation = cms.untracked.bool(True),
EBG12samplesCorrelation = cms.untracked.vdouble(
1.00000, 0.71073, 0.55721, 0.46089, 0.40449, 0.35931, 0.33924, 0.32439, 0.31581, 0.30481),
EBG6samplesCorrelation = cms.untracked.vdouble(
1.00000, 0.70946, 0.58021, 0.49846, 0.45006, 0.41366, 0.39699, 0.38478, 0.37847, 0.37055),
EBG1samplesCorrelation = cms.untracked.vdouble(
1.00000, 0.73354, 0.64442, 0.58851, 0.55425, 0.53082, 0.51916, 0.51097, 0.50732, 0.50409),
EEG12samplesCorrelation = cms.untracked.vdouble(
1.00000, 0.71373, 0.44825, 0.30152, 0.21609, 0.14786, 0.11772, 0.10165, 0.09465, 0.08098),
EEG6samplesCorrelation = cms.untracked.vdouble(
1.00000, 0.71217, 0.47464, 0.34056, 0.26282, 0.20287, 0.17734, 0.16256, 0.15618, 0.14443),
EEG1samplesCorrelation = cms.untracked.vdouble(
1.00000, 0.72698, 0.62048, 0.55691, 0.51848, 0.49147, 0.47813, 0.47007, 0.46621, 0.46265)
)
|
StarcoderdataPython
|
1778055
|
<gh_stars>0
"""
Sage Intacct charge card accounts
"""
from typing import Dict
from .api_base import ApiBase
class ChargeCardAccounts(ApiBase):
"""Class for Charge Card Accounts APIs."""
def __init__(self):
super().__init__(dimension='CREDITCARD')
|
StarcoderdataPython
|
6838
|
from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.mail import EmailMessage
from django.conf import settings
from django.template.loader import render_to_string
from django.contrib.auth.models import User
from django.contrib import messages
from django.contrib.auth import logout, login, authenticate
from django.contrib.auth.forms import UserCreationForm
from .decorators import *
from .forms import PostForm, CustomUserCreationForm, ProfileForm, UserForm
from .filters import PostFilter
from .models import *
# Create your views here.
def home(request):
posts = Post.objects.filter(active=True, featured=True)[0:3]
context = {'posts':posts}
return render(request, 'base/index.html', context)
def posts(request):
posts = Post.objects.filter(active=True)
myFilter = PostFilter(request.GET, queryset=posts)
posts = myFilter.qs
page = request.GET.get('page')
paginator = Paginator(posts, 5)
try:
posts = paginator.page(page)
except PageNotAnInteger:
posts = paginator.page(1)
except EmptyPage:
posts = paginator.page(paginator.num_pages)
context = {'posts':posts, 'myFilter':myFilter}
return render(request, 'base/posts.html', context)
def post(request, slug):
post = Post.objects.get(slug=slug)
if request.method == 'POST':
PostComment.objects.create(
author=request.user.profile,
post=post,
body=request.POST['comment']
)
messages.success(request, "Your comment has been posted successfully!")
return redirect('post', slug=post.slug)
context = {'post':post}
return render(request, 'base/post.html', context)
def profile(request):
return render(request, 'base/profile.html')
#CRUD VIEWS
@admin_only
@login_required(login_url="home")
def createPost(request):
form = PostForm()
if request.method == 'POST':
form = PostForm(request.POST, request.FILES)
if form.is_valid():
form.save()
return redirect('posts')
context = {'form':form}
return render(request, 'base/post_form.html', context)
@admin_only
@login_required(login_url="home")
def updatePost(request, slug):
post = Post.objects.get(slug=slug)
form = PostForm(instance=post)
if request.method == 'POST':
form = PostForm(request.POST, request.FILES, instance=post)
if form.is_valid():
form.save()
return redirect('posts')
context = {'form':form}
return render(request, 'base/post_form.html', context)
@admin_only
@login_required(login_url="home")
def deletePost(request, slug):
post = Post.objects.get(slug=slug)
if request.method == 'POST':
post.delete()
return redirect('posts')
context = {'item':post}
return render(request, 'base/delete.html', context)
def sendEmail(request):
if request.method == 'POST':
template = render_to_string('base/email_template.html', {
'name':request.POST['name'],
'email':request.POST['email'],
'message':request.POST['message'],
})
email = EmailMessage(
request.POST['subject'],
template,
settings.EMAIL_HOST_USER,
['<EMAIL>']
)
email.fail_silently=False
email.send()
return render(request, 'base/email_sent.html')
def loginPage(request):
if request.user.is_authenticated:
return redirect('home')
if request.method == 'POST':
email = request.POST.get('email')
password =request.POST.get('password')
#Little Hack to work around re-building the usermodel
try:
user = User.objects.get(email=email)
user = authenticate(request, username=user.username, password=password)
except:
messages.error(request, 'User with this email does not exists')
return redirect('login')
if user is not None:
login(request, user)
return redirect('home')
else:
messages.error(request, 'Email OR password is incorrect')
context = {}
return render(request, 'base/login.html', context)
def registerPage(request):
form = CustomUserCreationForm()
if request.method == 'POST':
form = CustomUserCreationForm(request.POST)
if form.is_valid():
user = form.save(commit=False)
user.save()
messages.success(request, 'Account successfuly created!')
user = authenticate(request, username=user.username, password=request.POST['<PASSWORD>'])
if user is not None:
login(request, user)
next_url = request.GET.get('next')
if next_url == '' or next_url == None:
next_url = 'home'
return redirect(next_url)
else:
messages.error(request, 'An error has occured with registration')
context = {'form':form}
return render(request, 'base/register.html', context)
def logoutUser(request):
logout(request)
return redirect('home')
@admin_only
@login_required(login_url="home")
def userAccount(request):
profile = request.user.profile
context = {'profile':profile}
return render(request, 'base/account.html', context)
@login_required(login_url="home")
def updateProfile(request):
user = request.user
profile = user.profile
form = ProfileForm(instance=profile)
if request.method == 'POST':
user_form = UserForm(request.POST, instance=user)
if user_form.is_valid():
user_form.save()
form = ProfileForm(request.POST, request.FILES, instance=profile)
if form.is_valid():
form.save()
return redirect('account')
context = {'form':form}
return render(request, 'base/profile_form.html', context)
def myEducation(request):
return render(request, 'base/education.html')
def myExperience(request):
return render(request, 'base/experience.html')
def myAchievements(request):
return render(request, 'base/achievements.html')
def myAbout(request):
return render(request, 'base/about.html')
def myContact(request):
return render(request, 'base/contact.html')
def mySkills(request):
return render(request, 'base/skills.html')
|
StarcoderdataPython
|
1765280
|
<reponame>sepidehpouyan/SCF-MSP430
from scfmsp.controlflowanalysis.AbstractInstruction import AbstractInstruction
from scfmsp.controlflowanalysis.ExecutionPoint import ExecutionPoint
class AbstractInstructionControlFlow(AbstractInstruction):
def __init__(self, function):
super(AbstractInstructionControlFlow, self).__init__(function)
def get_branch_target(self):
opList = self.oplist.split()
temp = '{0:04b}'.format(int(opList[0][3],16))
if(temp[2]=='0'):
part3 = int(temp[2])*512 + int(temp[3])*256
part1 = int(opList[0][1],16)
part2 = (int(opList[0][0],16))*16
offset = part1 + part2 + part3
else:
temp = '{0:04b}'.format(int(opList[0][3],16))
part3 = int(self.rev(temp[2]))*512 + int(self.rev(temp[3]))*256
temp1 = '{0:04b}'.format(int(opList[0][1],16))
part1 = (int(self.rev(temp1[0])) * 8) + (int(self.rev(temp1[1])) * 4) + (int(self.rev(temp1[2])) * 2) + (int(self.rev(temp1[3])) * 1)
temp2 = '{0:04b}'.format(int(opList[0][0],16))
part2 = ((int(self.rev(temp2[0])) * 8) + (int(self.rev(temp2[1])) * 4) + (int(self.rev(temp2[2])) * 2) + (int(self.rev(temp2[3])) * 1)) * 16
offset = (part1 + part2 + part3 + 1) * (-1)
addr = self.address + 2*(offset)+2
target = ExecutionPoint(self.function.name, addr, self.function.caller)
return target
def rev(self, arr):
if(arr == '0'):
arr = '1'
else:
arr = '0'
return arr
|
StarcoderdataPython
|
1793825
|
import json
import os
import sys
from colorama import Fore, Back, Style
class ConfigFieldMissing(Exception):
pass
class Config(dict):
def checkField( self, name, default=None, hasDefault=False, valuesList=None):
if default is not None:
hasDefault = True
if name in self:
if (valuesList is not None) and (self[name] not in valuesList):
raise ConfigFieldMissing(Fore.RED + f'ERROR: Value for "{name}" should be one of: ' + (','.join(valuesList)) + Style.RESET_ALL)
else:
if hasDefault:
self[name] = default
else:
raise ConfigFieldMissing(
Fore.RED +
f'ERROR: missing key "{name}" in config' +
Style.RESET_ALL)
def parse_config(robot_folder_path):
config_path = robot_folder_path + '/config.json'
if not os.path.exists(config_path):
raise Exception( Fore.RED + "ERROR: The file " + config_path + " can't be found" + Style.RESET_ALL)
config = Config(json.load(open(config_path)))
config['configPath'] = config_path
config.checkField('documentId')
config.checkField('versionId', '')
config.checkField('workspaceId', '')
config.checkField('drawFrames', False)
config.checkField('drawCollisions', False)
config.checkField('assemblyName', False)
config.checkField('outputFormat', 'urdf', valuesList=['urdf', 'sdf'])
config.checkField('useFixedLinks', False)
config.checkField('ignoreLimits', False)
# Using OpenSCAD for simplified geometry
config.checkField('useScads', True)
config.checkField('pureShapeDilatation', 0.0)
# Dynamics
config.checkField('jointMaxEffort', 1)
config.checkField('jointMaxVelocity', 20)
config.checkField('noDynamics', False)
# Ignore list
config.checkField('ignore', [])
config.checkField('whitelist', None, hasDefault=True)
# Color override
config.checkField('color', None, hasDefault=True)
# STLs merge and simplification
config.checkField('mergeSTLs', 'no', valuesList=[
'no', 'visual', 'collision', 'all'])
config.checkField('maxSTLSize', 3)
config.checkField('simplifySTLs', 'no', valuesList=[
'no', 'visual', 'collision', 'all'])
# Post-import commands to execute
config.checkField('postImportCommands', [])
config['outputDirectory'] = robot_folder_path
config['dynamicsOverride'] = {}
# Add collisions=true configuration on parts
config.checkField('useCollisionsConfigurations', True)
# ROS support
config.checkField('packageName', '')
config.checkField('addDummyBaseLink', False)
config.checkField('robotName', 'onshape')
# additional XML code to insert
if config['outputFormat'] == 'urdf':
config.checkField('additionalUrdfFile', '')
additionalFileName = config['additionalUrdfFile']
else: # outputFormat can only be 'urdf' or 'sdf'
config.checkField('additionalSdfFile', '')
additionalFileName = config['addionalSdfFile']
if additionalFileName == '':
config['additionalXML'] = ''
else:
with open(robot_folder_path + additionalFileName, 'r') as additionalXMLFile:
config['additionalXML'] = additionalXMLFile.read()
# Creating dynamics override array
config.checkField('dynamics', {})
tmp = config['dynamics']
for key in tmp:
if tmp[key] == 'fixed':
config['dynamicsOverride'][key.lower()] = {"com": [0, 0, 0], "mass": 0, "inertia": [
0, 0, 0, 0, 0, 0, 0, 0, 0]}
else:
config['dynamicsOverride'][key.lower()] = tmp[key]
# Deal with output directory creation/permission verification
if not (os.path.isdir(config['outputDirectory']) and os.access(config['outputDirectory'], os.W_OK)):
try:
os.makedirs(config['outputDirectory'])
except FileExistsError:
if os.path.isdir(config['outputDirectory']):
raise Exception(f'The output directory {config["outputDirectory"]} cannot be used, it seems the directory exists but is not writeable.')
else:
raise Exception(f'The output directory {config["outputDirectory"]} cannot be used, it seems there is a file with the same name.')
except PermissionError:
raise Exception(f'The output directory {config["outputDirectory"]} cannot be used, it seems there aren\'t sufficient permissions.')
# Checking that OpenSCAD is present
if config['useScads']:
print( Style.BRIGHT + '* Checking OpenSCAD presence...' + Style.RESET_ALL)
if os.system('openscad -v 2> /dev/null') != 0:
print(Fore.RED + "Can't run openscad -v, disabling OpenSCAD support" + Style.RESET_ALL)
# print(Fore.BLUE + "TIP: consider installing openscad" + Style.RESET_ALL)
# print(Fore.BLUE + "sudo add-apt-repository ppa:openscad/releases" + Style.RESET_ALL)
# print(Fore.BLUE + "sudo apt-get update" + Style.RESET_ALL)
# print(Fore.BLUE + "sudo apt-get install openscad" + Style.RESET_ALL)
config['useScads'] = False
# Checking that MeshLab is present
if config['simplifySTLs']:
print(
Style.BRIGHT +
'* Checking MeshLab presence...' +
Style.RESET_ALL)
if not os.path.exists('/usr/bin/meshlabserver') != 0:
print(Fore.RED + "No /usr/bin/meshlabserver, disabling STL simplification support" + Style.RESET_ALL)
# print(Fore.BLUE + "TIP: consider installing meshlab:" + Style.RESET_ALL)
# print(Fore.BLUE + "sudo apt-get install meshlab" + Style.RESET_ALL)
config['simplifySTLs'] = False
# Checking that versionId and workspaceId are not set on same time
if config['versionId'] != '' and config['workspaceId'] != '':
print(Fore.RED + "You can't specify workspaceId AND versionId")
return config
|
StarcoderdataPython
|
114438
|
@(lambda: [lambda x: x][0])()
def foo():
<caret>
|
StarcoderdataPython
|
142083
|
<gh_stars>0
from .asset import Asset, AssetCurrency
from .assetPricing import AssetPricing, AssetPricingQuotes, AssetPricingParametrized
from .assetOperation import AssetOperation, AssetOperationType
from .quote import Quote, QuoteCurrencyPair, QuoteHistoryItem
from .types import PyObjectId
|
StarcoderdataPython
|
3223284
|
<reponame>and3rson/pyrant<gh_stars>0
#!/usr/bin/env python2
from pyrant import Client
import json
pyrant = Client()
def test_get_rants():
# Retrieve rants from feed
for rant in pyrant.get_rants():
print '*** @{}:'.format(rant.user_username)
print rant.text, rant.id
def test_get_rant():
# Get rant by ID
rant = pyrant.get_rant(id=231448)
# Get rant image
assert rant.attached_image.url
# Update rant from server
rant.update()
def test_dump_rant():
# Dump rant data, useful for checking out the available attributes
rant = pyrant.get_rant(id=231448)
assert json.dumps(rant.serialize(), indent=4)
def test_login():
# Log in
assert pyrant.log_in('tester', 'tester')
def test_vote():
# Find a rant
rant = pyrant.get_rant(id=231448)
# Vote for a rant
rant.vote(up=1)
# Unvote for a rant
rant.vote(up=0)
|
StarcoderdataPython
|
1651444
|
arr=[]
for i in range(10):
arr.append(int(input()))
for j in range(10):
arr[j]=arr[j]%42
arr=set(arr)
print(len(arr))
|
StarcoderdataPython
|
1625336
|
<gh_stars>0
import numpy as np
import random
from operator import attrgetter
from math import exp
class Coin:
def __init__(self, p, flip_count):
self.p_head = p
self.p_head_estimate = self.flip_repeatedly(flip_count)
def flip(self):
if random.uniform(0,1) < self.p_head: return 1
else: return 0
def flip_repeatedly(self, count):
head_count = 0
for x in range(count):
head_count += self.flip()
return head_count / count
class TestCase:
def __init__(self, p, coin_count):
coins = [Coin(p, 10) for x in range(coin_count)]
self.c1 = coins[0]
self.c_rand = random.choice(coins)
self.c_min = min(coins, key=attrgetter('p_head_estimate'))
self.nu1 = self.c1.p_head_estimate
self.nu_rand = self.c_rand.p_head_estimate
self.nu_min = self.c_min.p_head_estimate
def run_test(run_count, coin_count, threshold):
p_head = 0.5
test_cases = [TestCase(p_head, coin_count) for x in range(run_count)]
nu1s = []
nu_rands = []
nu_mins = []
hoeffding_upper_bound = 2*exp(-2*coin_count*threshold**2)
print(hoeffding_upper_bound)
bad_c1_count = 0
bad_c_rand_count = 0
bad_c_min_count = 0
for x in range(run_count):
nu1s.append(test_cases[x].nu1)
if abs(nu1s[x]-p_head)>threshold: bad_c1_count += 1
nu_rands.append(test_cases[x].nu_rand)
if abs(nu_rands[x]-p_head)>threshold: bad_c_rand_count += 1
nu_mins.append(test_cases[x].nu_min)
if abs(nu_mins[x]-p_head)>threshold: bad_c_min_count += 1
avg_nu1 = np.mean(nu1s)
avg_nu_rand = np.mean(nu_rands)
avg_nu_min = np.mean(nu_mins)
print(avg_nu1)
print(avg_nu_rand)
print(avg_nu_min)
p_bad_c1 = bad_c1_count / run_count
print('c1: ' + str(p_bad_c1) + ',' + str(p_bad_c1 <= hoeffding_upper_bound))
p_bad_c_rand = bad_c_rand_count / run_count
print('c_rand: ' + str(p_bad_c_rand) + ',' + str(p_bad_c_rand <= hoeffding_upper_bound))
p_bad_c_min = bad_c_min_count / run_count
print('c_min: ' + str(p_bad_c_min) + ',' + str(p_bad_c_min <= hoeffding_upper_bound))
run_test(1000, 1000, 0.02)
|
StarcoderdataPython
|
1725085
|
'''
let s_k be the number of 1's when writing the numbers from 0 to k in binary.
For example, writing 0 to 5 in binary, we have 0, 1, 10, 11, 100, 101. There are seven 1's, so s_5 = 7
The sequence S = {s_k : k >= 0} starts {0, 1, 2, 4, 5, 7, 9, 12, ...}.
A game is played by two players. Before the game starts, a number n is chosen. A counter c starts at 0. At each turn, the player chooses a number from 1 to n (inclusive) and increases c by that number. The resulting value of c must be a member of S. If there are no more valid moves, the player loses.
For example:
Let n = 5. c starts at 0.
Player 1 chooses 4, so c becomes 0 + 4 = 4.
Player 2 chooses 5, so c becomes 4 + 5 = 9.
Player 1 chooses 3, so c becomes 9 + 3 = 12.
etc.
Note that c must always belong to S, and each player can increase c by at most n.
Let M(n) be the highest number the first player can choose at her first turn to force a win, and M(n) = 0 if there is no such move. For example, M(2) = 2, M(7) = 1 and M(20) = 4.
Given \sum(M(n))^3 = 8150 for 1 \leq n \leq 20.
Find \sum(M(n))^3 for 1 \leq n \leq 1000.
First I would want to generate s_k?
I don't fully understand the game.
s_k = 0, 1, 2, 4, 5, 7, 9, 12, 13, 15
difference between consecutive terms:
1, 1, 2, 1, 2, 2, 3, 1, 2...
wait no... it's not in binary so it doesn't pertain, right?
'''
def countOnes(binaryString):
count = 0
for b in binaryString:
if b == '1':
count += 1
return count
count = 0
for i in xrange(10):
count += countOnes(str(bin(i)))
print str(i)+' '+str(bin(i))+' \tsk='+str(count)
|
StarcoderdataPython
|
1617596
|
<reponame>nfitzen/advent-of-code-2020<filename>13/star2.py
#!/usr/bin/env python3
# SPDX-FileCopyrightText: 2020 <NAME> <https://github.com/nfitzen>
#
# SPDX-License-Identifier: CC0-1.0
from math import lcm
import itertools
with open('input.txt') as f:
data = f.readlines()
firstId = int(data[1].split(',')[0])
ids = {(i, int(v)) for i, v in enumerate(data[1].split(',')) if v != 'x'}
t, m = (firstId,) * 2
for i, v in ids:
while (t+i) % v:
t += m
m = lcm(m, v)
print(t)
|
StarcoderdataPython
|
3322095
|
# -*- coding: utf-8 -*-
import sys, os
# -- General configuration -----------------------------------------------------
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Galah'
copyright = u'2012, <NAME>'
# The version info for the project you're documenting.
version = release = '0.2'
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
def get_man_pages(root_dir):
from os import listdir
from os.path import isfile, join, splitext
pages = []
for i in listdir(root_dir):
file_path = join(root_dir, i)
file_name = splitext(i)[0]
if not isfile(file_path):
continue
pages.append((
join(root_dir, file_name),
file_name,
u"Galah API: %s documentation" % file_name,
[u"<NAME>"],
1
))
return pages
man_pages = get_man_pages("commands")
|
StarcoderdataPython
|
1629008
|
<filename>yelp/obj/business_response.py
# -*- coding: UTF-8 -*-
from yelp.obj.business import Business
from yelp.obj.response_object import ResponseObject
class BusinessResponse(ResponseObject):
def __init__(self, response):
super(BusinessResponse, self).__init__(response)
self._parse_main_response_body('business', Business, response)
|
StarcoderdataPython
|
1698486
|
<filename>cogs/kindness.py
import io
import json
import random
import aiohttp
import discord
import giphypop
from discord.ext import commands
from .utils.dataIO import dataIO
import os, os.path
import re
class Kindness(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
async def kiss(self, ctx, victim: discord.Member = None):
"""
Kisses a user with a random gif from Giphy
:param ctx:
:param victim: Who are you kissing? (optional)
:return: The kiss you sent off :)
"""
g = giphypop.Giphy("KZciiXBwyJ9RabyZyUHjQ8e4ZutZQ1Go")
results = [x for x in g.search('kiss')]
kisser = ctx.author.name
if victim == None:
await ctx.send(str(ctx.author.name) + " puckers their lips, but no one is there... sad.")
elif victim.name == kisser:
await ctx.send(
f"{kisser} starts making out with their image in a mirror... strange one this {kisser} is...")
else:
msg = random.choice(dataIO.load_json("data/lewd/kiss.json")['kiss']).format(kisser=str(kisser),
victim=str(victim.name))
embed = discord.Embed(title=msg, color=0xFF69B4)
embed.set_image(url=random.choice(results).raw_data['images']['fixed_height_downsampled']['url'])
await ctx.send(embed=embed)
# await ctx.send(msg)
@commands.command("hug")
async def hug(self, ctx, victim: discord.Member, number=None):
"""
Hug a user with a cute gif
:param victim: the user you are hugging
:param number: The specific gif you want to return. If None picks a random gif.
:return: The gif of your hug
"""
if victim == ctx.author:
return await ctx.channel.send(
'https://tenor.com/view/steven-universe-su-stevenuniverse-diamonddays-gif-13326567')
if number is None:
DIR = 'data/lewd/hugs'
number = len([name for name in os.listdir(DIR) if os.path.isfile(os.path.join(DIR, name))])
file = str(random.randint(1, number)) + '.gif'
else:
file = str(number) + '.gif'
await ctx.send(file=discord.File('data/lewd/hugs/' + file))
@commands.command()
async def addhug(self, ctx):
dir = 'data/lewd/hugs'
if ("https://" in ctx.message.content.lower() or "http://" in ctx.message.content.lower()):
url = ctx.message.content[7:].lstrip(" ")
await self.linkSubmit(ctx, url, dir)
else:
try:
await self.normalSubmit(ctx, dir)
except Exception as e:
print(str(e))
@commands.command()
async def pat(self, ctx, target=None, number=None):
if number is None:
DIR = 'data/lewd/headpats'
number = len([name for name in os.listdir(DIR) if os.path.isfile(os.path.join(DIR, name))])
file = str(random.randint(1, number)) + '.gif'
await ctx.send(file=discord.File('data/lewd/headpats/' + file))
@commands.command()
async def addpat(self, ctx):
dir = 'data/lewd/headpats'
if ("https://" in ctx.message.content.lower() or "http://" in ctx.message.content.lower()):
url = ctx.message.content[7:].lstrip(" ")
await self.linkSubmit(ctx, url, dir)
else:
try:
await self.normalSubmit(ctx, dir)
except Exception as e:
print(str(e))
@commands.command()
async def cuddle(self, ctx, target: discord.Member):
if target == ctx.author:
await ctx.channel.send('https://tenor.com/view/steven-universe-su-stevenuniverse-diamonddays-gif-13326567')
return
cuddles = ['https://i.imgur.com/d7gjIVu.gif',
'https://media.giphy.com/media/xR9FIxmoAPCMw/giphy.gif',
'https://i.imgur.com/fgPMy3v.gif',
'https://proxy.duckduckgo.com/iu/?u=http%3A%2F%2Fmedia.giphy.com%2Fmedia%2F3bqtLDeiDtwhq%2Fgiphy.gif',
'https://i.imgur.com/TVT4K9d.gif',
'https://i.imgur.com/65ZrxPf.gif',
'https://proxy.duckduckgo.com/iu/?u=https%3A%2F%2Fi.pinimg.com%2Foriginals%2Faf%2F6a%2Ff9%2Faf6af9f078d34217d49287514b2d24d5.gif',
'https://proxy.duckduckgo.com/iu/?u=http%3A%2F%2Fmedia.giphy.com%2Fmedia%2Flrr9rHuoJOE0w%2Fgiphy.gif'
]
messages = dataIO.load_json('data/lewd/cuddles.json')
message = random.choice(messages).format(cuddler=ctx.author.name, victim=target.name)
embed = discord.Embed(title=message, color=discord.Color.purple())
embed.set_image(url=random.choice(cuddles))
await ctx.channel.send(embed=embed)
@commands.command()
async def compliment(self, ctx, target):
"""
Compliment a user!
:param target: Who you are coplimenting
:return:
"""
msg = random.choice(dataIO.load_json("data/compliment/compliments.json")['compliments'])
await ctx.send(str(target) + ' ' + msg)
async def linkSubmit(self, ctx, url, dir):
if '.gif' not in url:
return await ctx.send("Please provide a Gif not an image!")
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
if resp.status != 200:
return await ctx.send('Could not download file...')
data = io.BytesIO(await resp.read())
number = len([name for name in os.listdir(dir) if os.path.isfile(os.path.join(dir, name))]) + 1
with open(f"{dir}/{number}.gif", 'wb') as f:
f.write(data.read())
await ctx.send(f"File Successfully saved as number {number}")
async def normalSubmit(self, ctx, dir):
jsonstr = ctx.message.attachments[0]
url = jsonstr.url
await self.linkSubmit(ctx, url, dir)
def setup(bot):
n = Kindness(bot)
bot.add_cog(n)
|
StarcoderdataPython
|
82504
|
<gh_stars>0
"""
Классы для работы с настройками проекта.
Настройки берутся из файла
"""
import json
import os
import urllib.parse as urlparse
from abc import ABC
# Абстрактный класс по работе с настройками
class Settings(ABC):
__FILE_NAME = 'settings.json'
def __read_file(self) -> json:
f = open(self.__FILE_NAME)
str = f.read()
f.close()
return json.loads(str)
def __read_env(self) -> json:
dc = {}
dc['telegram_token'] = os.environ['telegram_token']
if os.environ.get('db_name') is not None:
dc['db_name'] = os.environ['db_name']
dc['db_user'] = os.environ['db_user']
dc['db_password'] = <PASSWORD>['db_password']
dc['port'] = os.environ['db_port']
dc['host'] = os.environ['host']
else:
url_db = urlparse.urlparse(os.environ['DATABASE_URL'])
dc['db_name'] = url_db.path[1:]
dc['db_user'] =url_db.username
dc['db_password'] = <PASSWORD>
dc['host'] = url_db.hostname
dc['port'] = url_db.port
dc['client_id'] = os.environ['client_id']
dc['client_secret'] = os.environ['client_secret']
dc['client_name'] = os.environ['client_name']
return dc
def __init__(self):
json_settings = self.__read_file() if os.path.exists(self.__FILE_NAME) else self.__read_env()
self._token = json_settings['telegram_token']
self._db_name = json_settings['db_name']
self._db_user = json_settings['db_user']
self._db_password = json_settings['db_password']
self._host = json_settings['host']
if 'port' in json_settings:
self._port = json_settings['port']
else:
self._port = '5432'
self._client_id = json_settings['client_id']
self._client_secret = json_settings['client_secret']
self._client_name = json_settings['client_name']
# класс для получения настроек для ТГ. Наследуется от базового класса
class SettingsTelegram(Settings):
def get_settings_tg(self) -> dict:
list = {'token': self._token}
return list
# класс для получения настроек для БД. Наследуется от базового класса
class SettingsDb(Settings):
def get_settings_db(self) -> dict:
list = {'db_name': self._db_name, 'db_user': self._db_user, 'db_password': <PASSWORD>,
'host': self._host, 'port': self._port}
return list
# класс для получения настроек для Шики. Наследуется от базового класса
class SettingsShiki(Settings):
def get_settings_shiki(self) -> dict:
ls = {'client_id': self._client_id, 'client_secret': self._client_secret, 'client_name': self._client_name}
return ls
|
StarcoderdataPython
|
1755357
|
<filename>pydatastructures/list.py
print("list")
|
StarcoderdataPython
|
108442
|
import os
import json
from numbers import Number
from collections import Iterable, Mapping
from operator import itemgetter
from .config import set_class_path, JavaSettingsConstructorParams
set_class_path()
from jnius import autoclass, MetaJavaClass
# Java DataTypes
jMap = autoclass('java.util.HashMap')
jArrayList = autoclass('java.util.ArrayList')
jList = autoclass('java.util.List')
jInt = autoclass('java.lang.Integer')
jLong = autoclass('java.lang.Long')
jFloat = autoclass('java.lang.Float')
jDouble = autoclass('java.lang.Double')
jString = autoclass('java.lang.String')
jBoolean = autoclass('java.lang.Boolean')
# Custom Java Classes
Settings = autoclass('ai.digamma.entities.Settings')
Service = autoclass('ai.digamma.service.DateTimeExtractor')
SettingsBuilder = autoclass('ai.digamma.utils.SettingsBuilder')
class JavaPrimitive(object):
'''
Convert primitives to their corresponding Java-types based on size
'''
def __return_value(self, javaObj, isValue, attr):
if isValue:
return getattr(javaObj, attr)()
else:
return javaObj
def __call__(self, obj, isValue = False):
if isinstance(obj, int):
if isinstance(obj, bool):
return self.__return_value(jBoolean(obj), isValue, 'booleanValue')
if obj <= jInt.MAX_VALUE:
return self.__return_value(jInt(obj), isValue, 'intValue')
else:
return self.__return_value(jLong(obj), isValue, 'longValue')
elif isinstance(obj, float):
if obj < jFloat.MAX_VALUE:
return self.__return_value(jFloat(obj), isValue, 'floatValue')
else:
return self.__return_value(jDouble(obj), isValue, 'doubleValue')
elif isinstance(obj, str):
return jString(obj)
class JavaComposite(object):
def __init__(self):
self.primitives = (Number, str)
self.primitiveConverter = JavaPrimitive()
def __call__(self, obj, isValue = False):
'''
Recursively convert Python objects to composite Java oobjects (e.g. Java Map<String, Object>)
:param obj: Python object
'''
try:
if isinstance(obj, self.primitives):
return self.primitiveConverter(obj, isValue)
elif isinstance(obj.__class__, MetaJavaClass):
return obj
elif isinstance(obj, Mapping):
HashMap = jMap()
for key, value in obj.items():
hashMapKey = self(key, isValue)
hashMapValue = self(value, isValue = False)
HashMap.put(hashMapKey, hashMapValue)
return HashMap
elif isinstance(obj, Iterable):
JavaArrayList = jArrayList()
for element in obj:
temp = self(element)
JavaArrayList.add(temp)
return JavaArrayList
else:
return jString(str(obj))
except Exception as e:
print(repr(e))
raise e
class PySettings(object):
JavaSettings = Settings
Converter = JavaComposite()
def __init__(self, **kwargs):
self.kwargs = kwargs
self.build_java_settings_obj()
def build_java_settings_obj(self):
if self.kwargs:
temp = dict()
for param, value in self.kwargs.items():
temp[param] = self.Converter(value, isValue = True)
JavaParams = itemgetter(*JavaSettingsConstructorParams)(temp)
self.javaSettingsObj = self.JavaSettings(*JavaParams)
else:
self.javaSettingsObj = self.JavaSettings()
def __call__(self):
return self.javaSettingsObj
class PySettingsBuilder(object):
JavaSettingsBuilder = SettingsBuilder
Converter = JavaComposite()
def __init__(self, javaBuilderObj=None):
self.javaBuilderObj = javaBuilderObj if javaBuilderObj else self.JavaSettingsBuilder()
def __set_java_builder(self, newJavaBuilderObj):
self.javaBuilderObj = newJavaBuilderObj
return self
def build(self):
pySettings = PySettings()
pySettings.javaSettingsObj = self.javaBuilderObj.build()
return pySettings
def __getattr__(self, attr):
if hasattr(self.javaBuilderObj, attr):
def wrapper(*args, **kwargs):
args = [self.Converter(arg, isValue=True) for arg in args]
for key, value in kwargs.items():
kwargs[key] = self.Converter(value, isValue=True)
rez = getattr(self.javaBuilderObj, attr)(*args, **kwargs)
return self.__set_java_builder(rez)
return wrapper
raise AttributeError(attr)
class ExtractionService(object):
JavaService = Service
Converter = JavaComposite()
@classmethod
def extract(cls, text, settings = None):
if not isinstance(text, (str, jString)):
raise TypeError('Text argument should be of type str or java.lang.String. Got {0} instead'.format(type(text)))
if settings:
if not isinstance(settings, (PySettings, Settings)):
raise TypeError('Settings argument should be of type PySettings or ai.digamma.entities.Settings. Got {0} instead'.format(type(settings)))
elif isinstance(settings, PySettings):
settings = settings()
ServiceParams = (cls.Converter(text), cls.Converter(settings))
else:
ServiceParams = (cls.Converter(text),)
rez = cls.JavaService.extractJSON(*ServiceParams)
return json.loads(rez)
@classmethod
def extractFromCsv(cls, csvPath, outputPath, settings, separator = ','):
if not isinstance(settings, (PySettings, Settings)):
raise TypeError('Settings argument should be of type PySettings or ai.digamma.entities.Settings. Got {0} instead'.format(type(settings)))
elif isinstance(settings, PySettings):
settings = settings()
rez = cls.JavaService.extractJSONFromCsv(csvPath, separator, outputPath, settings)
return json.loads(rez)
if __name__=='__main__':
settings = (PySettingsBuilder()
.addRulesGroup('DurationGroup')
.excludeRules("holidaysRule")
.addUserDate("2017-10-23T18:40:40.931Z")
.addTimeZoneOffset("100")
.includeOnlyLatestDates(True)
.build()
)
text = "10-15 month"
rez = ExtractionService.extract(text, settings)
print(rez)
|
StarcoderdataPython
|
1707766
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the tar path specification implementation."""
import unittest
from dfvfs.path import tar_path_spec
from tests.path import test_lib
class TarPathSpecTest(test_lib.PathSpecTestCase):
"""Tests for the tar path specification implementation."""
def testInitialize(self):
"""Tests the path specification initialization."""
path_spec = tar_path_spec.TarPathSpec(
location=u'/test', parent=self._path_spec)
self.assertNotEqual(path_spec, None)
with self.assertRaises(ValueError):
_ = tar_path_spec.TarPathSpec(location=u'/test', parent=None)
with self.assertRaises(ValueError):
_ = tar_path_spec.TarPathSpec(location=None, parent=self._path_spec)
with self.assertRaises(ValueError):
_ = tar_path_spec.TarPathSpec(
location=u'/test', parent=self._path_spec, bogus=u'BOGUS')
def testComparable(self):
"""Tests the path specification comparable property."""
path_spec = tar_path_spec.TarPathSpec(
location=u'/test', parent=self._path_spec)
self.assertNotEqual(path_spec, None)
expected_comparable = u'\n'.join([
u'type: TEST',
u'type: TAR, location: /test',
u''])
self.assertEqual(path_spec.comparable, expected_comparable)
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
1673820
|
from .AnimeInterp import AnimeInterp
__all__ = [ 'AnimeInterp' ]
|
StarcoderdataPython
|
119655
|
#!/usr/bin/env python
import itertools
import os
import signal
import socket
import subprocess
import sys
import uuid
from contextlib import contextmanager
from functools import partial
from threading import Thread
import click
from dask.distributed import Client, as_completed
def timed_wait_proc(proc, timeout):
try:
return proc.wait(timeout)
except subprocess.TimeoutExpired:
return None
@contextmanager
def exec_proc(args, on_stdout=None, on_stderr=None, stderr_to_stdout=False,
buffer_size=16*1024, ctrl_c_timeout=3, kill_timeout=60, **kwargs):
"""
Execute an external program within a context.
Args:
args: Arguments of the program.
on_stdout ((bytes) -> None): Callback for capturing stdout.
on_stderr ((bytes) -> None): Callback for capturing stderr.
stderr_to_stdout (bool): Whether or not to redirect stderr to
stdout? If specified, `on_stderr` will be ignored.
(default :obj:`False`)
buffer_size (int): Size of buffers for reading from stdout and stderr.
ctrl_c_timeout (int): Seconds to wait for the program to
respond to CTRL+C signal. (default 3)
kill_timeout (int): Seconds to wait for the program to terminate after
being killed. (default 60)
**kwargs: Other named arguments passed to :func:`subprocess.Popen`.
Yields:
subprocess.Popen: The process object.
"""
# check the arguments
if stderr_to_stdout:
kwargs['stderr'] = subprocess.STDOUT
on_stderr = None
if on_stdout is not None:
kwargs['stdout'] = subprocess.PIPE
if on_stderr is not None:
kwargs['stderr'] = subprocess.PIPE
# output reader
def reader_func(fd, action):
while not giveup_waiting[0]:
buf = os.read(fd, buffer_size)
if not buf:
break
action(buf)
def make_reader_thread(fd, action):
th = Thread(target=reader_func, args=(fd, action))
th.daemon = True
th.start()
return th
# internal flags
giveup_waiting = [False]
# launch the process
stdout_thread = None # type: Thread
stderr_thread = None # type: Thread
proc = subprocess.Popen(args, **kwargs)
try:
if on_stdout is not None:
stdout_thread = make_reader_thread(proc.stdout.fileno(), on_stdout)
if on_stderr is not None:
stderr_thread = make_reader_thread(proc.stderr.fileno(), on_stderr)
try:
yield proc
except KeyboardInterrupt: # pragma: no cover
if proc.poll() is None:
# Wait for a while to ensure the program has properly dealt
# with the interruption signal. This will help to capture
# the final output of the program.
# TODO: use signal.signal instead for better treatment
_ = timed_wait_proc(proc, 1)
finally:
if proc.poll() is None:
# First, try to interrupt the process with Ctrl+C signal
ctrl_c_signal = (signal.SIGINT if sys.platform != 'win32'
else signal.CTRL_C_EVENT)
os.kill(proc.pid, ctrl_c_signal)
if timed_wait_proc(proc, ctrl_c_timeout) is None:
# If the Ctrl+C signal does not work, terminate it.
proc.kill()
# Finally, wait for at most 60 seconds
if timed_wait_proc(proc, kill_timeout) is None: # pragma: no cover
giveup_waiting[0] = True
# Close the pipes such that the reader threads will ensure to exit,
# if we decide to give up waiting.
def close_pipes():
for f in (proc.stdout, proc.stderr, proc.stdin):
if f is not None:
f.close()
if giveup_waiting[0]: # pragma: no cover
close_pipes()
# Wait for the reader threads to exit
for th in (stdout_thread, stderr_thread):
if th is not None:
th.join()
# Ensure all the pipes are closed.
if not giveup_waiting[0]:
close_pipes()
def run_proc(args, work_dir=None, log_file=None):
env = os.environ.copy()
env.setdefault('PYTHONUNBUFFERED', '1')
python_path = env.get('PYTHONPATH', '')
python_path = (os.path.split(os.path.abspath(__file__))[0] +
os.pathsep + python_path)
env['PYTHONPATH'] = python_path
header = '>>> Start process: {}\n at: {}'.format(args, work_dir)
print(header)
@contextmanager
def log_writer():
if log_file is not None:
with open(log_file, 'ab') as f:
fd = f.fileno()
os.write(fd, header.encode('utf-8') + b'\n\n')
yield partial(os.write, fd)
else:
yield (lambda s: None)
with log_writer() as writer, \
exec_proc(args, on_stdout=writer, stderr_to_stdout=True,
cwd=work_dir, env=env) as proc:
exit_code = proc.wait()
footer = '>>> Exit code is {}'.format(exit_code)
print(footer)
if writer is not None:
writer(b'\n\n' + footer.format(exit_code).encode('utf-8') + b'\n')
if exit_code != 0:
raise RuntimeError('Exit code != 0: {}'.format(exit_code))
return exit_code
@click.command()
@click.option('-s', '--scheduler', default=socket.gethostname() + ':7891',
help='Specify the dask scheduler.')
@click.option('-l', '--log-file', default=None, required=False,
help='Save console log to this file.')
@click.option('-r', '--retries', type=int, default=0, required=False,
help='Maximum number of retries on error.')
@click.option('-N', '--instance-num', type=int, default=1, required=False,
help='Start this number of instances.')
@click.option('-w', '--work-dir', type=str, required=False, default='.',
help='Work directory for the task. Defaults to current dir.')
@click.option('--no-wait', is_flag=True, default=False, required=False,
help='If specified, submit the task to scheduler and exit '
'without waiting for the program to finish.')
@click.option('template_args', '--template-arg', multiple=True, default=None,
required=False)
@click.option('template_values', '--template-val', multiple=True, default=None,
required=False)
@click.argument('args', nargs=-1)
def main(scheduler, log_file, retries, instance_num, work_dir, no_wait,
template_args, template_values, args):
work_dir = os.path.abspath(work_dir)
if log_file is not None:
log_file = os.path.abspath(log_file)
# parse with template args
template_keys = []
template_value_list = []
for s in template_values or ():
key, val = s.split('=')
val_list = [i for i in val.split(',')]
template_keys.append(key)
template_value_list.append(val_list)
def iter_template_dict():
for values in itertools.product(*template_value_list):
yield {k: v for k, v in zip(template_keys, values)}
# do submit the tasks
client = Client(scheduler)
task_map = {}
def submit(args):
for i in range(instance_num):
next_task_id = len(task_map)
key = 'run_proc:{}'.format(uuid.uuid4())
local_log_file = log_file
if local_log_file is not None:
local_log_file = list(os.path.splitext(log_file))
local_log_file[0] = '{}.{}'.format(local_log_file[0], next_task_id)
local_log_file = ''.join(local_log_file)
f = client.submit(
run_proc, args=args, work_dir=work_dir, log_file=local_log_file,
key=key, retries=retries
)
task_map[f] = next_task_id
print('> submitted task #{}: {}: {}'.format(next_task_id, key, args))
tasks.append(f)
tasks = []
if template_args:
for template_dict in iter_template_dict():
the_args = list(args)
for a in template_args:
the_args.append(a.format(**template_dict))
submit(the_args)
else:
submit(args)
# wait for the tasks to finish
if not no_wait:
for f in as_completed(tasks, with_results=False):
try:
_ = f.result()
except RuntimeError as ex:
print('$ task #{}: error: {}'.format(task_map[f], ex))
else:
print('$ task #{}: done'.format(task_map[f]))
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
45047
|
<reponame>JCab09/StickyDJ-Bot<gh_stars>0
#!/usr/bin/env python3
"""
This class uses the yaml-parser in order to create the
apropriate config-dictionary for the client who requested it
Author: <NAME>
"""
from src.util.parser.yaml_parser import yaml_parser
import string
def getConfig(filepath, type = '.yaml', context = None):
"""PLACEHOLDER
"""
if type.startswith('.') is False:
filepath + '.'
file = filepath + type
config = parseConfig(file, type)
if context is not None:
return config.get(context)
else:
return config
def parseConfig(file, type):
"""PLACEHOLDER
"""
if type == '.yaml':
return yaml_parser.get_config(file)
else:
print("ERROR: Config filetype (%s) not supported! ", type)
|
StarcoderdataPython
|
108963
|
<reponame>Scobber/yeasterAPI
from setuptools import setup, find_packages
import pathlib
here = pathlib.Path(__file__).parent.resolve()
long_description = (here / 'README.md').read_text(encoding='utf-8')
setup(
name='yeastarAPI',
version='0.1.5', # Required
description='yeastar wireless terminal api client', # Optional
long_description=long_description, # Optional
long_description_content_type='text/markdown', # Optional (see note above)
url='https://github.com/Scobber/yeastarAPI', # Optional
author='The Scobber', # Optional
author_email='<EMAIL>', # Optional
classifiers=[ # Optional
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3 :: Only',
],
keywords='api, networking, sms terminal', # Optional
#package_dir={'': 'yeastarapi'}, # Optional
packages=find_packages(), # Required
python_requires='>=3.6, <4',
#install_requires=['peppercorn'], # Optional
#extras_require={ # Optional
# 'dev': ['check-manifest'],
# 'test': ['coverage'],
#},
#entry_points={ # Optional
# 'console_scripts': [
# 'sample=sample:main',
# ],
#},
project_urls={ # Optional
'Bug Reports': 'https://github.com/Scobber/yeastarAPI/issues',
'Say Thanks!': 'https://github.com/Scobber/yeastarAPI',
'Source': 'https://github.com/Scobber/yeastarAPI/',
},
)
|
StarcoderdataPython
|
3380520
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""The page cycler measurement.
This measurement registers a window load handler in which is forces a layout and
then records the value of performance.now(). This call to now() measures the
time from navigationStart (immediately after the previous page's beforeunload
event) until after the layout in the page's load event. In addition, two garbage
collections are performed in between the page loads (in the beforeunload event).
This extra garbage collection time is not included in the measurement times.
Finally, various memory and IO statistics are gathered at the very end of
cycling all pages.
"""
import os
import sys
from perf_tools import histogram_metric
from telemetry.core import util
from telemetry.page import page_measurement
MEMORY_HISTOGRAMS = [
{'name': 'V8.MemoryExternalFragmentationTotal', 'units': 'percent'},
{'name': 'V8.MemoryHeapSampleTotalCommitted', 'units': 'kb'},
{'name': 'V8.MemoryHeapSampleTotalUsed', 'units': 'kb'}]
class PageCycler(page_measurement.PageMeasurement):
def AddCommandLineOptions(self, parser):
# The page cyclers should default to 10 iterations. In order to change the
# default of an option, we must remove and re-add it.
pageset_repeat_option = parser.get_option('--pageset-repeat')
pageset_repeat_option.default = 10
parser.remove_option('--pageset-repeat')
parser.add_option(pageset_repeat_option)
def WillRunPageSet(self, tab, results):
# Avoid paying for a cross-renderer navigation on the first page on legacy
# page cyclers which use the filesystem.
if tab.browser.http_server:
tab.Navigate(tab.browser.http_server.UrlOf('nonexistent.html'))
with open(os.path.join(os.path.dirname(__file__),
'page_cycler.js'), 'r') as f:
self.page_cycler_js = f.read() # pylint: disable=W0201
# pylint: disable=W0201
self.start_commit_charge = tab.browser.memory_stats['SystemCommitCharge']
# pylint: disable=W0201
self.histograms = [histogram_metric.HistogramMetric(
h, histogram_metric.RENDERER_HISTOGRAM)
for h in MEMORY_HISTOGRAMS]
def WillNavigateToPage(self, page, tab):
page.script_to_evaluate_on_commit = self.page_cycler_js
def DidNavigateToPage(self, page, tab):
for h in self.histograms:
h.Start(page, tab)
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArg('--dom-automation')
options.AppendExtraBrowserArg('--js-flags=--expose_gc')
options.AppendExtraBrowserArg('--no-sandbox')
# Temporarily enable threaded compositing on Mac on only some page sets.
# This malignancy is to diagnose an issue where the bots are experiencing
# a regression that isn't reproducing locally.
# TODO(ccameron): delete this
# http://crbug.com/180025
if sys.platform == 'darwin':
composited_page_sets = ('/bloat.json', '/moz.json', '/intl2.json')
if sys.argv[-1].endswith(composited_page_sets):
options.AppendExtraBrowserArg('--force-compositing-mode')
options.AppendExtraBrowserArg('--enable-threaded-compositing')
else:
options.AppendExtraBrowserArg('--disable-force-compositing-mode')
def MeasureMemory(self, tab, results):
memory = tab.browser.memory_stats
if not memory['Browser']:
return
metric = 'resident_set_size'
if sys.platform == 'win32':
metric = 'working_set'
def AddSummariesForProcessTypes(process_types_memory, process_type_trace):
def AddSummary(value_name_memory, value_name_trace):
if len(process_types_memory) > 1 and value_name_memory.endswith('Peak'):
return
values = []
for process_type_memory in process_types_memory:
if value_name_memory in memory[process_type_memory]:
values.append(memory[process_type_memory][value_name_memory])
if values:
results.AddSummary(value_name_trace + process_type_trace,
'bytes', sum(values), data_type='unimportant')
AddSummary('VM', 'vm_final_size_')
AddSummary('WorkingSetSize', 'vm_%s_final_size_' % metric)
AddSummary('PrivateDirty', 'vm_private_dirty_final_')
AddSummary('ProportionalSetSize', 'vm_proportional_set_size_final_')
AddSummary('VMPeak', 'vm_peak_size_')
AddSummary('WorkingSetSizePeak', '%s_peak_size_' % metric)
AddSummariesForProcessTypes(['Browser'], 'browser')
AddSummariesForProcessTypes(['Renderer'], 'renderer')
AddSummariesForProcessTypes(['Gpu'], 'gpu')
AddSummariesForProcessTypes(['Browser', 'Renderer', 'Gpu'], 'total')
results.AddSummary('commit_charge', 'kb',
memory['SystemCommitCharge'] - self.start_commit_charge,
data_type='unimportant')
results.AddSummary('processes', 'count', memory['ProcessCount'],
data_type='unimportant')
def MeasureIO(self, tab, results):
io_stats = tab.browser.io_stats
if not io_stats['Browser']:
return
def AddSummariesForProcessType(process_type_io, process_type_trace):
if 'ReadOperationCount' in io_stats[process_type_io]:
results.AddSummary('read_operations_' + process_type_trace, '',
io_stats[process_type_io]
['ReadOperationCount'],
data_type='unimportant')
if 'WriteOperationCount' in io_stats[process_type_io]:
results.AddSummary('write_operations_' + process_type_trace, '',
io_stats[process_type_io]
['WriteOperationCount'],
data_type='unimportant')
if 'ReadTransferCount' in io_stats[process_type_io]:
results.AddSummary('read_bytes_' + process_type_trace, 'kb',
io_stats[process_type_io]
['ReadTransferCount'] / 1024,
data_type='unimportant')
if 'WriteTransferCount' in io_stats[process_type_io]:
results.AddSummary('write_bytes_' + process_type_trace, 'kb',
io_stats[process_type_io]
['WriteTransferCount'] / 1024,
data_type='unimportant')
AddSummariesForProcessType('Browser', 'browser')
AddSummariesForProcessType('Renderer', 'renderer')
AddSummariesForProcessType('Gpu', 'gpu')
def MeasurePage(self, page, tab, results):
def _IsDone():
return bool(tab.EvaluateJavaScript('__pc_load_time'))
util.WaitFor(_IsDone, 60)
for h in self.histograms:
h.GetValue(page, tab, results)
results.Add('page_load_time', 'ms',
int(float(tab.EvaluateJavaScript('__pc_load_time'))),
chart_name='times')
def DidRunPageSet(self, tab, results):
self.MeasureMemory(tab, results)
self.MeasureIO(tab, results)
|
StarcoderdataPython
|
4816897
|
# Remove the temp directory and then create a fresh one
from __future__ import print_function
import os
import sys
import shutil
from subprocess import Popen, PIPE
# exclude files that take time on locanachine
exclude = ["flopy_swi2_ex2.py", "flopy_swi2_ex5.py"]
if "CI" in os.environ:
exclude = []
else:
for arg in sys.argv:
if arg.lower() == "--all":
exclude = []
sdir = os.path.join("..", "examples", "scripts")
tdir = os.path.join("..", "examples", "Tutorials")
# make working directories
tempdir = os.path.join(".", "temp")
if os.path.isdir(tempdir):
shutil.rmtree(tempdir)
os.mkdir(tempdir)
testdirs = (
os.path.join(".", "temp", "scripts"),
os.path.join(".", "temp", "tutorials"),
)
for testdir in testdirs:
if os.path.isdir(testdir):
shutil.rmtree(testdir)
os.mkdir(testdir)
# add testdir to python path
sys.path.append(testdir)
def copy_scripts(src_dir, dst_dir, include_subdir=False):
if include_subdir:
files = []
for dirpath, _, filenames in os.walk(src_dir):
files += [
os.path.join(dirpath, filename)
for filename in sorted(filenames)
if filename.endswith(".py") and filename not in exclude
]
else:
files = [
os.path.join(src_dir, f)
for f in sorted(os.listdir(src_dir))
if f.endswith(".py") and f not in exclude
]
# copy files
for src in files:
filename = os.path.basename(src)
filedir = os.path.dirname(src)
dst = os.path.join(dst_dir, os.path.basename(src))
# copy script
print("copying {} from {} to {}".format(filename, filedir, testdir))
shutil.copyfile(src, dst)
return [os.path.basename(filepath) for filepath in files]
def import_from(mod, name):
mod = __import__(mod)
main = getattr(mod, name)
return main
def run_scripts(fn, testdir):
# import run function from scripts
s = os.path.splitext(fn)[0]
run = import_from(s, "run")
# change to working directory
opth = os.getcwd()
print('changing to working directory "{}"'.format(testdir))
os.chdir(testdir)
# run the script
ival = run()
# change back to starting directory
print('changing back to starting directory "{}"'.format(opth))
os.chdir(opth)
# make sure script ran successfully
assert ival == 0, "could not run {}".format(fn)
def run_tutorial_scripts(fn, testdir):
args = ("python", fn)
print("running...'{}'".format(" ".join(args)))
proc = Popen(args, stdout=PIPE, stderr=PIPE, cwd=testdir)
stdout, stderr = proc.communicate()
if stdout:
print(stdout.decode("utf-8"))
if stderr:
print("Errors:\n{}".format(stderr.decode("utf-8")))
return
def test_scripts():
# get list of scripts to run
files = copy_scripts(sdir, testdirs[0])
for fn in files:
yield run_scripts, fn, testdirs[0]
def test_tutorial_scripts():
# get list of scripts to run
files = copy_scripts(tdir, testdirs[1], include_subdir=True)
for fn in files:
yield run_tutorial_scripts, fn, testdirs[1]
if __name__ == "__main__":
# get list of scripts to run
files = copy_scripts(sdir, testdirs[0])
for fn in files:
run_scripts(fn, testdirs[0])
# get list of tutorial scripts to run
files = copy_scripts(tdir, testdirs[1], include_subdir=True)
for fn in files:
run_tutorial_scripts(fn, testdirs[1])
|
StarcoderdataPython
|
1797050
|
<gh_stars>1-10
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
import itertools
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.declarative import declared_attr
from anthill.framework.db import db
"""
models.py features a basic, non-hierarchical, non-constrained RBAC data model,
also known as a flat model
-- Ref: http://csrc.nist.gov/rbac/sandhu-ferraiolo-kuhn-00.pdf
+-----------------+ +-------------------+ +---------------+
| | | | | |
| | | R o l e | | |
| R o l e +----------+ Permission +----------+ Permission |
| | | | | |
+-----------------+ +-------------------+ +---------------+
+-----------------+ +-------------------+ +---------------+
| | | | | |
| | | R o l e | | |
| U s e r +----------+ Membership +----------+ R o l e |
| | | | | |
+-----------------+ +-------------------+ +---------------+
"""
role_permission = db.Table(
'role_permission', db.metadata,
db.Column('role_id', db.ForeignKey('role.id'), primary_key=True),
db.Column('permission_id', db.ForeignKey('permission.id'), primary_key=True)
)
role_membership = db.Table(
'role_membership', db.metadata,
db.Column('role_id', db.ForeignKey('role.id'), primary_key=True),
db.Column('user_id', db.ForeignKey('user.id'), primary_key=True)
)
class UserMixin(db.Model):
__abstract__ = True
@declared_attr
def roles(self):
return db.relationship('Role', secondary=role_membership, backref='users')
@declared_attr
def perms(self):
return association_proxy('roles', 'permissions')
@property
def permissions(self):
return list(itertools.chain(*self.perms))
class Credential(db.Model):
__tablename__ = 'credential'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.ForeignKey('user.id'), nullable=False, unique=False)
credential = db.Column(db.String, nullable=False)
credential_type_id = db.Column(db.ForeignKey('credential_type.id'), nullable=False)
expiration_dt = db.Column(db.DateTime(timezone=True), nullable=False)
user = db.relationship('User',
backref='credential',
cascade="all, delete-orphan",
single_parent=True)
def __repr__(self):
return ("Credential(credential_type_id={0}, user_id={1})".
format(self.credential_type_id, self.user_id))
class CredentialType(db.Model):
__tablename__ = 'credential_type'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String, nullable=False)
def __repr__(self):
return "CredentialType(title={0})".format(self.title)
class Domain(db.Model):
__tablename__ = 'domain'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), nullable=False)
def __repr__(self):
return "Domain(id={0}, name={1})".format(self.id, self.name)
class Action(db.Model):
__tablename__ = 'action'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), nullable=False)
def __repr__(self):
return "Action(id={0}, name={1})".format(self.id, self.name)
class Resource(db.Model):
__tablename__ = 'resource'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), nullable=False)
def __repr__(self):
return "Resource(id={0}, name={1})".format(self.id, self.name)
class Scope(db.Model):
__tablename__ = 'scope'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), nullable=False)
def __repr__(self):
return "Scope(id={0}, name={1})".format(self.id, self.name)
class Permission(db.Model):
__tablename__ = 'permission'
id = db.Column(db.Integer, primary_key=True)
domain_id = db.Column(db.ForeignKey('domain.id'), nullable=True)
action_id = db.Column(db.ForeignKey('action.id'), nullable=True)
resource_id = db.Column(db.ForeignKey('resource.id'), nullable=True)
domain = db.relationship('Domain', backref='permission')
action = db.relationship('Action', backref='permission')
resource = db.relationship('Resource', backref='permission')
roles = db.relationship('Role', secondary=role_permission,
backref='permissions')
users = association_proxy('roles', 'users')
def __repr__(self):
return ("Permission(domain_id={0},action_id={1},resource_id={2})".
format(self.domain_id, self.action_id, self.resource_id))
class Role(db.Model):
__tablename__ = 'role'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(100))
def __repr__(self):
return "Role(title={0})".format(self.title)
|
StarcoderdataPython
|
139217
|
<reponame>Paul11100/LeetCode
class Solution:
# Accumulator List (Accepted), O(n) time and space
def waysToMakeFair(self, nums: List[int]) -> int:
acc = []
is_even = True
n = len(nums)
for i in range(n):
even, odd = acc[i-1] if i > 0 else (0, 0)
if is_even:
even += nums[i]
else:
odd += nums[i]
is_even = not is_even
acc.append((even, odd))
res = 0
for i in range(n):
even = odd = 0
if i > 0:
even += acc[i-1][0]
odd += acc[i-1][1]
if i < n-1:
even += acc[n-1][1] - acc[i][1]
odd += acc[n-1][0] - acc[i][0]
if even == odd:
res += 1
return res
# Two Sum Pairs for even and odd (Top Voted), O(n) time, O(1) space
def waysToMakeFair(self, A: List[int]) -> int:
s1, s2 = [0, 0], [sum(A[0::2]), sum(A[1::2])]
res = 0
for i, a in enumerate(A):
s2[i % 2] -= a
res += s1[0] + s2[1] == s1[1] + s2[0]
s1[i % 2] += a
return res
|
StarcoderdataPython
|
34545
|
<filename>abc/abc107/abc107b.py
from sys import stdout
H, W = map(int, input().split())
a = [input() for _ in range(H)]
h = [all(c == '.' for c in a[i]) for i in range(H)]
w = [True] * W
for i in range(H):
for j in range(W):
w[j] = w[j] and a[i][j] == '.'
for i in range(H):
if h[i]:
continue
for j in range(W):
if w[j]:
continue
stdout.write(a[i][j])
stdout.write('\n')
|
StarcoderdataPython
|
3212672
|
<reponame>civicboom/civicboom
from civicboom.tests import *
from civicboom.model.meta import Session
from civicboom.model import Message, Member
#import json
class TestMessagesController(TestController):
def test_conversation_with(self):
# Create a mini conversation
self.log_in_as('unittest')
self.send_member_message('unitfriend', 'best friend', 'you are my bestest friend ever. (msg1)')
self.log_in_as('unitfriend')
self.send_member_message('unittest', 're', 'No, im not, you are creaping me out, never message me again (msg2)')
self.log_in_as('unittest')
self.send_member_message('unitfriend', 're', 'But what of the sweet moments we have shared ... DOES THAT MEAN NOTHING TO YOU!!! (msg3)')
# Check the HTML render of the conversation contains the 3 messages
# if they are present in the HTML they are present in the API return
response = self.app.get(url('member_action', id='unitfriend', action="conversation"))
self.assertIn('msg1', response.body)
self.assertIn('msg2', response.body)
self.assertIn('msg3', response.body)
def test_mark_all_read(self):
# Create new message to unittest
self.log_in_as('unitfriend')
message_id = self.send_member_message('unittest', 'mark_all_test', 'mark_all_test')
# Check message is in message list as unittest
self.log_in_as('unittest')
response = self.app.get(url('messages', id='me', list='to', format='json'))
response_json = json.loads(response.body)
self.assertEquals('mark_all_test', response_json['data']['list']['items'][0]['subject'])
self.assertEquals(False , response_json['data']['list']['items'][0]['read'] )
# Call mark_all
response = self.app.post(
url(controller='profile', action='mark_messages_as_read', list='to', format='json'),
params={
'_authentication_token': self.auth_token,
}
)
response_json = json.loads(response.body)
# Check all messages are read
response = self.app.get(url('messages', id='me', list='to', format='json'))
response_json = json.loads(response.body)
for message in response_json['data']['list']['items']:
self.assertEquals(True, message['read'])
# Cleanup
self.delete_message(message_id)
#---------------------------------------------------------------------------
def test_all(self):
self.part_setup()
self.part_new()
self.part_new_frag()
self.part_create()
self.part_create_bad_target()
self.part_create_no_content()
self.part_index()
self.part_index_lists()
self.part_index_as_json()
self.part_show()
self.part_show_as_json()
self.part_show_someone_elses()
self.part_show_non_exist()
self.part_edit()
self.part_edit_as_json()
self.part_update()
self.part_delete_message()
self.part_delete_notification()
self.part_delete_someone_elses()
self.part_delete_non_exist()
#TODO
# view sent messages
# try delete sent
# view notifications
# view recived
# set read
def part_setup(self):
# notifications can't be created manually
n1 = Message()
n1.target = Session.query(Member).get("unittest")
n1.subject = u"Notification! A test"
n1.content = u"A test is happening now :O"
n2 = Message()
n2.target = Session.query(Member).get("unitfriend")
n2.subject = u"Another notification! A test"
n2.content = u"A test part 2 is happening now :O"
n3 = Message()
n3.target = Session.query(Member).get("unittest")
n3.subject = u"deleteme"
n3.content = u"This is a notification to test deletion with"
Session.add_all([n1, n2, n3])
Session.commit()
self.n1_id = n1.id
self.n2_id = n2.id
self.n3_id = n3.id
## new -> create #########################################################
def part_new(self):
response = self.app.get(url('new_message', format='json'))
def part_new_frag(self):
response = self.app.get(url('new_message', format='frag'))
def part_create(self):
self.send_member_message('unittest', 'arrr, a subject', 'I am content')
self.log_in_as("unittest")
self.m1_id = self.send_member_message('unitfriend', 'Re: singing' , 'My singing is fine!')
self.log_in_as("unitfriend")
self.m2_id = self.send_member_message('unittest' , 'Re: Re: singing', 'It is totally not! And to explain, I will use a sentence that is over 50 characters long, to test the Message.__unicode__ truncation feature')
self.m5_id = self.send_member_message('unittest' , 'deleteme' , 'this is a message to test deletion with')
self.log_in_as("unittest")
def part_create_bad_target(self):
response = self.app.post(
url('messages'),
params={
'_authentication_token': self.auth_token,
'target': 'MrNotExists',
'subject': 'arrr, a subject',
'content': 'I am content',
},
status=400
)
def part_create_no_content(self):
response = self.app.post(
url('messages'),
params={
'_authentication_token': self.auth_token,
'target': 'unittest',
'subject': 'arrr, a subject',
},
status=400
)
## index -> show #########################################################
def part_index(self):
response = self.app.get(url('messages', format="frag"))
self.assertIn("Re: Re: singing", response)
def part_index_lists(self):
response = self.app.get(url('messages', format="json", list="notification"))
response = self.app.get(url('messages', format="json", list="to"))
response = self.app.get(url('messages', format="json", list="sent"))
# TODO: need asserts here to check is actual messages are included
response = self.app.get(url('messages', format="json", list="whgarbl"), status=400)
def part_index_as_json(self):
response = self.app.get(url('messages', format='json'))
self.assertIn("Re: Re: singing", response)
def part_show(self):
response = self.app.get(url('message', id=self.m2_id, format="frag"))
# TODO - check read status after viewing
self.assertIn("truncation", response)
def part_show_as_json(self):
response = self.app.get(url('message', id=self.m2_id, format='json'))
self.assertIn("truncation", response)
def part_show_someone_elses(self):
self.log_in_as('kitten')
response = self.app.get(url('message', id=self.m1_id), status=403)
self.log_in()
def part_show_non_exist(self):
response = self.app.get(url('message', id=0), status=404)
## edit -> update ########################################################
# messages are un-updatable, so these are stubs
def part_edit(self):
response = self.app.get(url('edit_message', id=1), status=501)
def part_edit_as_json(self):
response = self.app.get(url('edit_message', id=1, format='json'), status=501)
def part_update(self):
response = self.app.put(url('message', id=1), status=501)
## delete ################################################################
def part_delete_message(self):
response = self.app.post(
url('message', id=self.m5_id, format="json"),
params={
'_method': 'delete',
'_authentication_token': self.auth_token
}
)
def part_delete_notification(self):
response = self.app.post(
url('message', id=self.n3_id, format="json"),
params={
'_method': 'delete',
'_authentication_token': self.auth_token
}
)
def part_delete_someone_elses(self):
response = self.app.post(
url('message', id=self.m1_id, format="json"),
params={
'_method': 'delete',
'_authentication_token': self.auth_token
},
status=403
)
def part_delete_non_exist(self):
response = self.app.post(
url('message', id=0, format="json"),
params={
'_method': 'delete',
'_authentication_token': self.auth_token
},
status=404
)
|
StarcoderdataPython
|
3228791
|
import os
import PySimpleGUI as sg
from utils.gui import inference_window, training_window
layout = [
[sg.Button('Inference', key='inference'), sg.Button('Training', key='training')],
[sg.Cancel('Quit', key='cancel')],
]
window = sg.Window('Bi-Vulma', layout, resizable=True)
while True:
event, values = window.read()
if event == sg.WINDOW_CLOSED or 'cancel' in event:
break
if event == 'inference':
inference_window()
elif event == 'training':
training_window()
window.close()
|
StarcoderdataPython
|
3353725
|
<reponame>k2bd/firebased
from dataclasses import dataclass
from datetime import datetime, timedelta
from typing import List, Optional
from dateparser import parse as parse_datetime
@dataclass
class _Base:
def __post_init__(self):
pass
@dataclass
class _WithUserBasic(_Base):
#: ID token
id_token: str
#: Refresh token
refresh_token: str
#: Seconds until the token expires, as of time of issue
expires_in: Optional[timedelta]
def __post_init__(self):
super().__post_init__()
if self.expires_in is not None:
self.expires_in = timedelta(seconds=float(self.expires_in))
@dataclass
class _WithResponseKind(_Base):
#: Response kind, sent by gcloud
kind: str
@dataclass
class _WithIsNewUser(_Base):
#: Whether the token is issued to a new user
is_new_user: bool
@dataclass
class _WithAccessToken(_Base):
#: Access token
access_token: str
@dataclass
class _WithTokenType(_Base):
#: Type of the token
token_type: str
@dataclass
class _WithUserId(_Base):
#: ID of the user
user_id: str
@dataclass
class _WithProjectId(_Base):
#: Firebase project ID
project_id: str
@dataclass
class _WithEmail(_Base):
#: Email of the new user
email: str
@dataclass
class _WithPasswordHash(_Base):
#: Hash version of the password
password_hash: str
@dataclass
class _WithLocalId(_Base):
#: UID of the new user
local_id: str
@dataclass
class _WithRegistered(_Base):
#: Whether the email is for an existing account
registered: bool
@dataclass
class _WithFederatedId(_Base):
#: The unique ID identifies the IdP account.
federated_id: str
@dataclass
class _WithProviderId(_Base):
#: The linked provider ID (e.g. "google.com" for the Google provider).
provider_id: str
@dataclass
class _WithEmailVerified(_Base):
#: Whether the sign-in email is verified.
email_verified: bool
@dataclass
class _WithOauthIdToken(_Base):
#: The OIDC id token if available.
oauth_id_token: str
@dataclass
class _WithOauthAccessToken(_Base):
#: The OAuth access token if available.
oauth_access_token: str
@dataclass
class _WithOauthTokenSecret(_Base):
#: The OAuth 1.0 token secret if available.
oauth_token_secret: str
@dataclass
class _WithRawUserInfo(_Base):
#: The stringified JSON response containing all the IdP data corresponding
#: to the provided OAuth credential.
raw_user_info: str
@dataclass
class _WithFirstName(_Base):
#: The first name for the account.
first_name: str
@dataclass
class _WithLastName(_Base):
#: The last name for the account.
last_name: str
@dataclass
class _WithFullName(_Base):
#: The full name for the account.
full_name: str
@dataclass
class _WithDisplayName(_Base):
#: The display name for the account.
display_name: Optional[str] = None
@dataclass
class _WithPhotoUrl(_Base):
#: The photo Url for the account.
photo_url: Optional[str] = None
@dataclass
class _WithProfilePicture(_Base):
#: Account's profile picture.
profile_picture: Optional[str] = None
@dataclass
class _WithNeedConfirmation(_Base):
#: Whether another account with the same credential already exists.
#: The user will need to sign in to the original account and then link the
#: current credential to it.
need_confirmation: bool
@dataclass
class _WithRequestType(_Base):
#: Type of the request
request_type: str
@dataclass
class ProviderUserInfoItem(_WithProviderId, _WithFederatedId):
pass
@dataclass
class _WithProviderUserInfo(_Base):
#: List of all linked provider objects
provider_user_info: List[ProviderUserInfoItem]
@dataclass
class _WithPasswordUpdatedAt(_Base):
#: The timestamp, in milliseconds, that the account password was last
#: changed.
password_updated_at: float
@dataclass
class _WithValidSince(_Base):
#: The timestamp, in seconds, which marks a boundary, before which
#: Firebase ID token are considered revoked.
valid_since: str
@dataclass
class _WithDisabled(_Base):
#: Whether the account is disabled or not.
disabled: Optional[bool] = None
@dataclass
class _WithLastLoginAt(_Base):
#: The time that the account last logged in at.
last_login_at: datetime
def __post_init__(self):
super().__post_init__()
self.last_login_at = datetime.fromtimestamp(float(self.last_login_at) / 1000.0)
@dataclass
class _WithCreatedAt(_Base):
#: The time that the account was created at.
created_at: datetime
def __post_init__(self):
super().__post_init__()
self.created_at = datetime.fromtimestamp(float(self.created_at) / 1000.0)
@dataclass
class _WithCustomAuth(_Base):
#: Whether the account is authenticated by the developer.
custom_auth: Optional[bool] = None
@dataclass
class _WithSalt(_Base):
#: Salt
salt: str
@dataclass
class _WithLastRefreshAt(_Base):
#: Last Refresh Time
last_refresh_at: Optional[datetime]
def __post_init__(self):
super().__post_init__()
self.last_refresh_at = (
parse_datetime(self.last_refresh_at) if self.last_refresh_at else None
)
# --- Returned types
@dataclass
class SignInWithTokenUser(
_WithUserBasic,
_WithIsNewUser,
_WithResponseKind,
):
pass
@dataclass
class RefreshUser(
_WithUserBasic,
_WithTokenType,
_WithUserId,
_WithProjectId,
_WithAccessToken,
):
pass
@dataclass
class SignUpUser(
_WithUserBasic,
_WithEmail,
_WithLocalId,
_WithResponseKind,
):
pass
@dataclass
class SignInWithPasswordUser(
_WithDisplayName,
_WithPhotoUrl,
_WithProfilePicture,
_WithUserBasic,
_WithResponseKind,
_WithEmail,
_WithLocalId,
_WithRegistered,
):
pass
@dataclass
class AnonymousUser(
_WithUserBasic,
_WithResponseKind,
_WithLocalId,
):
pass
@dataclass
class SignInWithOauthUser(
_WithDisplayName,
_WithPhotoUrl,
_WithUserBasic,
_WithResponseKind,
_WithFederatedId,
_WithProviderId,
_WithLocalId,
_WithEmailVerified,
_WithEmail,
_WithOauthIdToken,
_WithOauthAccessToken,
_WithOauthTokenSecret,
_WithRawUserInfo,
_WithFirstName,
_WithLastName,
_WithFullName,
_WithNeedConfirmation,
):
pass
@dataclass
class EmailProviders(
_WithRegistered,
):
#: The list of providers that the user has previously signed in with.
all_providers: List[str]
@dataclass
class ResetResponse(
_WithEmail,
_WithResponseKind,
):
pass
@dataclass
class VerifyResetResponse(
_WithEmail,
_WithRequestType,
_WithResponseKind,
):
pass
@dataclass
class ChangeEmailResponse(
_WithUserBasic,
_WithResponseKind,
_WithLocalId,
_WithEmail,
_WithPasswordHash,
_WithProviderUserInfo,
_WithEmailVerified,
):
pass
@dataclass
class ChangePasswordResponse(
_WithUserBasic,
_WithResponseKind,
_WithLocalId,
_WithEmail,
_WithPasswordHash,
_WithProviderUserInfo,
_WithEmailVerified,
):
pass
@dataclass
class UpdateProfileResponse(
_WithDisplayName,
_WithPhotoUrl,
_WithResponseKind,
_WithLocalId,
_WithEmail,
_WithPasswordHash,
_WithProviderUserInfo,
_WithEmailVerified,
):
pass
@dataclass
class UserInfoItem(
_WithDisplayName,
_WithPhotoUrl,
_WithCustomAuth,
_WithDisabled,
_WithLocalId,
_WithEmail,
_WithEmailVerified,
_WithProviderUserInfo,
_WithPasswordHash,
_WithPasswordUpdatedAt,
_WithValidSince,
_WithLastLoginAt,
_WithCreatedAt,
_WithSalt,
_WithLastRefreshAt,
):
pass
@dataclass
class AccountInfo(_WithResponseKind):
#: The account associated with the given Firebase ID token.
users: List[UserInfoItem]
def __post_init__(self):
super().__post_init__()
self.users = [UserInfoItem(**user) for user in self.users]
@dataclass
class LinkAccountEmailResponse(
_WithDisplayName,
_WithPhotoUrl,
_WithUserBasic,
_WithResponseKind,
_WithLocalId,
_WithEmail,
_WithPasswordHash,
_WithProviderUserInfo,
_WithEmailVerified,
):
pass
@dataclass
class LinkAccountOauthResponse(
_WithDisplayName,
_WithPhotoUrl,
_WithUserBasic,
_WithResponseKind,
_WithFederatedId,
_WithProviderId,
_WithLocalId,
_WithEmailVerified,
_WithEmail,
_WithOauthIdToken,
_WithOauthAccessToken,
_WithOauthTokenSecret,
_WithRawUserInfo,
_WithFirstName,
_WithLastName,
_WithFullName,
):
pass
@dataclass
class UnlinkProviderResponse(
_WithDisplayName,
_WithPhotoUrl,
_WithLocalId,
_WithEmail,
_WithPasswordHash,
_WithProviderUserInfo,
_WithEmailVerified,
):
pass
@dataclass
class SendEmailVerificationResponse(
_WithEmail,
_WithResponseKind,
):
pass
@dataclass
class ConfirmEmailVerificationResponse(
_WithDisplayName,
_WithPhotoUrl,
_WithEmail,
_WithPasswordHash,
_WithProviderUserInfo,
_WithEmailVerified,
_WithResponseKind,
_WithLocalId,
):
pass
# --- Emulator-only types
@dataclass
class EmulatorSignInObject(_Base):
allow_duplicate_emails: bool
@dataclass
class EmulatorConfigurtion(_Base):
sign_in: EmulatorSignInObject
usage_mode: str
def __post_init__(self):
super().__post_init__()
self.sign_in = EmulatorSignInObject(**self.sign_in)
@dataclass
class EmulatorOobCode(_Base):
email: str
oob_code: str
oob_link: str
request_type: str
@dataclass
class EmulatorOobCodes(_Base):
oob_codes: List[EmulatorOobCode]
def __post_init__(self):
super().__post_init__()
self.oob_codes = [EmulatorOobCode(**oob_code) for oob_code in self.oob_codes]
@dataclass
class EmulatorSmsCode(_Base):
phone_number: str
session_code: str
@dataclass
class EmulatorSmsCodes(_Base):
verification_codes: List[EmulatorSmsCode]
def __post_init__(self):
super().__post_init__()
self.sms_codes = [EmulatorSmsCode(**sms_code) for sms_code in self.sms_codes]
|
StarcoderdataPython
|
3213568
|
<reponame>mahimadubey/leetcode-python
class Solution:
# @return a list of lists of string
def solveNQueens(self, n):
self.n = n
res = []
columns = [-1 for i in range(n)]
self.solve(columns, 0, res)
return res
def make_string_list(self, columns):
sol = [] # One solution (list of strings)
row = ['.' for i in columns]
for c in columns:
new_row = row[:]
new_row[c] = 'Q'
sol.append(''.join(new_row))
return sol
def is_valid(self, columns, row, col):
for r in range(row):
c = columns[r]
if c == col:
return False
if abs(c - col) == row - r:
return False
return True
def solve(self, columns, row, res):
if row == self.n:
res.append(self.make_string_list(columns))
else:
for col in range(self.n):
if self.is_valid(columns, row, col):
columns[row] = col
self.solve(columns, row + 1, res)
|
StarcoderdataPython
|
125385
|
#!/usr/bin/env python
# example gtkcombobox.py
import pygtk
pygtk.require('2.0')
import gtk
import gobject
class ComboBox:
def delete_event(self, widget, event, data=None):
gtk.main_quit()
return False
def __init__(self):
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.set_title("Combo Box")
self.window.connect("delete_event", self.delete_event)
self.window.set_border_width(12)
# create a horizontal box (VBox) to organize widgets
# we will pack two buttons in this box.
self.vbox = gtk.VBox(False, 0)
store = gtk.ListStore(gobject.TYPE_STRING)
combo = gtk.ComboBox(store)
cell = gtk.CellRendererText()
combo.pack_start(cell, True)
combo.add_attribute(cell, 'text', 0)
combo.insert_text(0, "Apple")
combo.insert_text(1, "Banana")
combo.insert_text(2, "Cherry")
combo.insert_text(3, "Durian")
combo.insert_text(4, "Fig")
combo.insert_text(5, "Grapefruit")
combo.insert_text(6, "Jakfruit")
combo.insert_text(7, "Kiwi")
combo.insert_text(8, "Lemon")
combo.insert_text(9, "Mango")
combo.insert_text(10, "Orange")
combo.insert_text(11, "Papaya")
self.button = gtk.Button("ButtoN")
self.vbox.pack_start(self.button, True, True, 0)
self.vbox.pack_start(combo, True, True, 0)
self.window.add(self.vbox)
self.button.show()
self.vbox.show()
combo.show()
self.window.show()
def main():
gtk.main()
return 0
if __name__ == "__main__":
ComboBox()
main()
|
StarcoderdataPython
|
1782573
|
<gh_stars>0
#! /usr/bin/env python
# vim: set fileencoding=utf-8: set encoding=utf-8:
"""
Proposed solution for calculating root mean square of a set of values.
"""
#We need to import the `math` package in order to use the `sqrt` function it
#provides for doing square roots.
import math
def rms(values):
"""
Returns the _root mean square_ of the given sequence of numerical values.
:param values: An iterable of numbers (ints, longs, or floats) for which to
calculate and return the root mean square.
:returns float: The root mean square of the given values.
"""
#Get the squares of the values using a **list comprehension**.
#
#This says: for each element in `values`, call that element `v` and
# multiply it by itself to get the square of the value.
# Collect these squares as the elements of a new list, in order corresponding
# to the order of `values`.
squares = [v*v for v in values]
#Calculate the sum of the squares.
#This is easily done using the builtin `sum` function.
sum_of_squares = sum(squares)
#Calculate the average ("mean") of the squares.
#This is simply the sum of the values divided by the number of values.
#We need to make sure we don't divide by 0!
if len(squares) == 0:
raise ValueError('The root mean square is undefined for an empty set.')
else:
mean_square = sum_of_squares / float(len(squares))
#Lastly, take the square root of the mean.
# Square roots are provided by the `sqrt` function in the `math` package.
root_mean_square = math.sqrt(mean_square)
return root_mean_square
if __name__ == '__main__':
import sys
#Read values from stdin, splitting up the string by whitespace.
data = sys.stdin.read().split()
#Convert the values to numbers.
values = [float(d) for d in data]
print rms(values)
|
StarcoderdataPython
|
49914
|
class TrieNode:
def __init__(self, c=None, end=False):
self.c = c
self.children = {}
self.end = end
class Trie:
def __init__(self):
"""
Initialize your data structure here.
"""
self.root = TrieNode('')
def insert(self, word: str) -> None:
"""
Inserts a word into the trie.
"""
root = self.root
for ch in word:
if ch not in root.children:
node = TrieNode(ch)
root.children[ch] = node
root = root.children[ch]
root.end = True
def search(self, word: str) -> bool:
"""
Returns if the word is in the trie.
"""
root = self.root
for ch in word:
if ch not in root.children:
return False
root = root.children[ch]
return root.end
def startsWith(self, prefix: str) -> bool:
"""
Returns if there is any word in the trie that starts with the given prefix.
"""
root = self.root
for ch in prefix:
if ch not in root.children:
return False
root = root.children[ch]
return True
|
StarcoderdataPython
|
152042
|
import json
import logging
import os
import sys
from typing import Any, Iterator, Optional
import boto3
from botocore.exceptions import ClientError
from chalice import Chalice
app = Chalice(app_name="swarm-lifecycle-event-handler")
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(os.getenv("GRAPL_LOG_LEVEL", "ERROR"))
if bool(os.environ.get("IS_LOCAL", False)):
LOGGER.addHandler(logging.StreamHandler(stream=sys.stdout))
INSTANCE_LAUNCHING = "autoscaling:EC2_INSTANCE_LAUNCHING"
INSTANCE_TERMINATING = "autoscaling:EC2_INSTANCE_TERMINATING"
def _instance_ip_address(instance_id: str) -> str:
ec2 = boto3.resource("ec2")
instance = ec2.Instance(instance_id)
return instance.private_ip_address
def _dns_ip_addresses(
route53: Any, dns_name: str, ip_address: Optional[str], hosted_zone_id: str
) -> Iterator[str]:
for rrset in route53.list_resource_record_sets(
HostedZoneId=hosted_zone_id,
StartRecordName=dns_name,
)["ResourceRecordSets"]:
if rrset["Type"] == "A":
for rrecord in rrset["ResourceRecords"]:
yield rrecord["Value"]
if ip_address is not None:
yield ip_address
def _complete_lifecycle_action(
lifecycle_hook_name: str, autoscaling_group_name: str, instance_id: str
) -> None:
autoscaling = boto3.client("autoscaling")
autoscaling.complete_lifecycle_action(
LifecycleHookName=lifecycle_hook_name,
AutoScalingGroupName=autoscaling_group_name,
InstanceId=instance_id,
LifecycleActionResult="CONTINUE",
)
LOGGER.info(
f"Completed {lifecycle_hook_name} lifecycle action for instance {instance_id} in ASG {autoscaling_group_name}"
)
def _remove_dns_ip(dns_name: str, ip_address: str, hosted_zone_id: str) -> None:
route53 = boto3.client("route53")
ip_addresses = [
ip
for ip in _dns_ip_addresses(route53, dns_name, None, hosted_zone_id)
if ip != ip_address
]
change = {
"Action": "DELETE", # delete the A record if this is the last address
"ResourceRecordSet": {
"Name": dns_name,
"Type": "A",
"TTL": 300,
"ResourceRecords": [{"Value": ip_address}],
},
}
if len(ip_addresses) > 0:
change["Action"] = "UPSERT"
change["ResourceRecordSet"]["ResourceRecords"] = [
{"Value": ip} for ip in ip_addresses
]
try:
comment = f"Removed {ip_address} from {dns_name} DNS A Record"
route53.change_resource_record_sets(
HostedZoneId=hosted_zone_id,
ChangeBatch={
"Changes": [change],
"Comment": comment,
},
)
LOGGER.info(comment)
except ClientError as e:
if e.response["Error"]["Code"] == "InvalidChangeBatch":
LOGGER.warn(f"DNS record does not exist for {ip_address}")
else:
raise e
def _insert_dns_ip(dns_name: str, ip_address: str, hosted_zone_id: str) -> None:
route53 = boto3.client("route53")
comment = f"Inserted {ip_address} into {dns_name} DNS A Record"
route53.change_resource_record_sets(
HostedZoneId=hosted_zone_id,
ChangeBatch={
"Changes": [
{
"Action": "UPSERT",
"ResourceRecordSet": {
"Name": dns_name,
"Type": "A",
"TTL": 300,
"ResourceRecords": [
{"Value": ip}
for ip in _dns_ip_addresses(
route53, dns_name, ip_address, hosted_zone_id
)
],
},
},
],
"Comment": comment,
},
)
LOGGER.info(comment)
@app.lambda_function()
def main(event, context) -> None:
LOGGER.debug(f"Processing event: {json.dumps(event)}")
for record in event["Records"]:
LOGGER.debug(f"Processing record: {json.dumps(record)}")
if "Sns" in record:
message = json.loads(record["Sns"]["Message"])
transition = message["LifecycleTransition"]
if "NotificationMetadata" in message:
notification_metadata = json.loads(message["NotificationMetadata"])
hosted_zone_id = notification_metadata["HostedZoneId"]
dns_name = notification_metadata["DnsName"]
prefix = notification_metadata["Prefix"]
autoscaling_group_name = notification_metadata["AsgName"]
else:
LOGGER.warn(
f"NotificationMetadata absent from message: {json.dumps(message)}"
)
if "EC2InstanceId" in message:
instance_id = message["EC2InstanceId"]
ip_address = _instance_ip_address(instance_id)
else:
LOGGER.warn(f"EC2InstanceId absent from message: {json.dumps(message)}")
if transition == INSTANCE_LAUNCHING:
try:
_insert_dns_ip(dns_name, ip_address, hosted_zone_id)
finally:
_complete_lifecycle_action(
lifecycle_hook_name=f"{prefix}-SwarmLaunchHook",
autoscaling_group_name=autoscaling_group_name,
instance_id=instance_id,
)
elif transition == INSTANCE_TERMINATING:
try:
_remove_dns_ip(dns_name, ip_address, hosted_zone_id)
finally:
_complete_lifecycle_action(
lifecycle_hook_name=f"{prefix}-SwarmTerminateHook",
autoscaling_group_name=autoscaling_group_name,
instance_id=instance_id,
)
else:
LOGGER.warn(
f'Encountered unknown lifecycle transition "{transition}" in message: {json.dumps(message)}'
)
else:
LOGGER.warn(f"Encountered unknown record: {json.dumps(record)}")
|
StarcoderdataPython
|
3289468
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetChangeResult',
'AwaitableGetChangeResult',
'get_change',
'get_change_output',
]
@pulumi.output_type
class GetChangeResult:
def __init__(__self__, additions=None, deletions=None, is_serving=None, kind=None, start_time=None, status=None):
if additions and not isinstance(additions, list):
raise TypeError("Expected argument 'additions' to be a list")
pulumi.set(__self__, "additions", additions)
if deletions and not isinstance(deletions, list):
raise TypeError("Expected argument 'deletions' to be a list")
pulumi.set(__self__, "deletions", deletions)
if is_serving and not isinstance(is_serving, bool):
raise TypeError("Expected argument 'is_serving' to be a bool")
pulumi.set(__self__, "is_serving", is_serving)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if start_time and not isinstance(start_time, str):
raise TypeError("Expected argument 'start_time' to be a str")
pulumi.set(__self__, "start_time", start_time)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def additions(self) -> Sequence['outputs.ResourceRecordSetResponse']:
"""
Which ResourceRecordSets to add?
"""
return pulumi.get(self, "additions")
@property
@pulumi.getter
def deletions(self) -> Sequence['outputs.ResourceRecordSetResponse']:
"""
Which ResourceRecordSets to remove? Must match existing data exactly.
"""
return pulumi.get(self, "deletions")
@property
@pulumi.getter(name="isServing")
def is_serving(self) -> bool:
"""
If the DNS queries for the zone will be served.
"""
return pulumi.get(self, "is_serving")
@property
@pulumi.getter
def kind(self) -> str:
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> str:
"""
The time that this operation was started by the server (output only). This is in RFC3339 text format.
"""
return pulumi.get(self, "start_time")
@property
@pulumi.getter
def status(self) -> str:
"""
Status of the operation (output only). A status of "done" means that the request to update the authoritative servers has been sent, but the servers might not be updated yet.
"""
return pulumi.get(self, "status")
class AwaitableGetChangeResult(GetChangeResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetChangeResult(
additions=self.additions,
deletions=self.deletions,
is_serving=self.is_serving,
kind=self.kind,
start_time=self.start_time,
status=self.status)
def get_change(change_id: Optional[str] = None,
client_operation_id: Optional[str] = None,
managed_zone: Optional[str] = None,
project: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetChangeResult:
"""
Fetches the representation of an existing Change.
"""
__args__ = dict()
__args__['changeId'] = change_id
__args__['clientOperationId'] = client_operation_id
__args__['managedZone'] = managed_zone
__args__['project'] = project
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('google-native:dns/v1:getChange', __args__, opts=opts, typ=GetChangeResult).value
return AwaitableGetChangeResult(
additions=__ret__.additions,
deletions=__ret__.deletions,
is_serving=__ret__.is_serving,
kind=__ret__.kind,
start_time=__ret__.start_time,
status=__ret__.status)
@_utilities.lift_output_func(get_change)
def get_change_output(change_id: Optional[pulumi.Input[str]] = None,
client_operation_id: Optional[pulumi.Input[Optional[str]]] = None,
managed_zone: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetChangeResult]:
"""
Fetches the representation of an existing Change.
"""
...
|
StarcoderdataPython
|
1747947
|
<reponame>Kayuii/trezor-crypto<filename>python/trezorlib/btc.py<gh_stars>0
# This file is part of the Trezor project.
#
# Copyright (C) 2012-2018 SatoshiLabs and contributors
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3
# as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the License along with this library.
# If not, see <https://www.gnu.org/licenses/lgpl-3.0.html>.
from . import coins, messages
from .tools import CallException, expect, normalize_nfc, session
@expect(messages.PublicKey)
def get_public_node(
client,
n,
ecdsa_curve_name=None,
show_display=False,
coin_name=None,
script_type=messages.InputScriptType.SPENDADDRESS,
):
return client.call(
messages.GetPublicKey(
address_n=n,
ecdsa_curve_name=ecdsa_curve_name,
show_display=show_display,
coin_name=coin_name,
script_type=script_type,
)
)
@expect(messages.Address, field="address")
def get_address(
client,
coin_name,
n,
show_display=False,
multisig=None,
script_type=messages.InputScriptType.SPENDADDRESS,
):
return client.call(
messages.GetAddress(
address_n=n,
coin_name=coin_name,
show_display=show_display,
multisig=multisig,
script_type=script_type,
)
)
@expect(messages.MessageSignature)
def sign_message(
client, coin_name, n, message, script_type=messages.InputScriptType.SPENDADDRESS
):
message = normalize_nfc(message)
return client.call(
messages.SignMessage(
coin_name=coin_name, address_n=n, message=message, script_type=script_type
)
)
def verify_message(client, coin_name, address, signature, message):
message = normalize_nfc(message)
try:
resp = client.call(
messages.VerifyMessage(
address=address,
signature=signature,
message=message,
coin_name=coin_name,
)
)
except CallException as e:
resp = e
return isinstance(resp, messages.Success)
@session
def sign_tx(client, coin_name, inputs, outputs, details=None, prev_txes=None):
# set up a transactions dict
txes = {None: messages.TransactionType(inputs=inputs, outputs=outputs)}
# preload all relevant transactions ahead of time
if coin_name in coins.by_name:
load_prevtxes = not coins.by_name[coin_name]["force_bip143"]
else:
load_prevtxes = True
if load_prevtxes:
for inp in inputs:
if inp.script_type not in (
messages.InputScriptType.SPENDP2SHWITNESS,
messages.InputScriptType.SPENDWITNESS,
messages.InputScriptType.EXTERNAL,
):
try:
prev_tx = prev_txes[inp.prev_hash]
except Exception as e:
raise ValueError("Could not retrieve prev_tx") from e
if not isinstance(prev_tx, messages.TransactionType):
raise ValueError("Invalid value for prev_tx") from None
txes[inp.prev_hash] = prev_tx
if details is None:
signtx = messages.SignTx()
else:
signtx = details
signtx.coin_name = coin_name
signtx.inputs_count = len(inputs)
signtx.outputs_count = len(outputs)
res = client.call(signtx)
# Prepare structure for signatures
signatures = [None] * len(inputs)
serialized_tx = b""
def copy_tx_meta(tx):
tx_copy = messages.TransactionType(**tx)
# clear fields
tx_copy.inputs_cnt = len(tx.inputs)
tx_copy.inputs = []
tx_copy.outputs_cnt = len(tx.bin_outputs or tx.outputs)
tx_copy.outputs = []
tx_copy.bin_outputs = []
tx_copy.extra_data_len = len(tx.extra_data or b"")
tx_copy.extra_data = None
return tx_copy
R = messages.RequestType
while isinstance(res, messages.TxRequest):
# If there's some part of signed transaction, let's add it
if res.serialized:
if res.serialized.serialized_tx:
serialized_tx += res.serialized.serialized_tx
if res.serialized.signature_index is not None:
idx = res.serialized.signature_index
sig = res.serialized.signature
if signatures[idx] is not None:
raise ValueError("Signature for index %d already filled" % idx)
signatures[idx] = sig
if res.request_type == R.TXFINISHED:
break
# Device asked for one more information, let's process it.
current_tx = txes[res.details.tx_hash]
if res.request_type == R.TXMETA:
msg = copy_tx_meta(current_tx)
res = client.call(messages.TxAck(tx=msg))
elif res.request_type == R.TXINPUT:
msg = messages.TransactionType()
msg.inputs = [current_tx.inputs[res.details.request_index]]
res = client.call(messages.TxAck(tx=msg))
elif res.request_type == R.TXOUTPUT:
msg = messages.TransactionType()
if res.details.tx_hash:
msg.bin_outputs = [current_tx.bin_outputs[res.details.request_index]]
else:
msg.outputs = [current_tx.outputs[res.details.request_index]]
res = client.call(messages.TxAck(tx=msg))
elif res.request_type == R.TXEXTRADATA:
o, l = res.details.extra_data_offset, res.details.extra_data_len
msg = messages.TransactionType()
msg.extra_data = current_tx.extra_data[o : o + l]
res = client.call(messages.TxAck(tx=msg))
if isinstance(res, messages.Failure):
raise CallException("Signing failed")
if not isinstance(res, messages.TxRequest):
raise CallException("Unexpected message")
if None in signatures:
raise RuntimeError("Some signatures are missing!")
return signatures, serialized_tx
|
StarcoderdataPython
|
117432
|
<filename>tsne_precompute/2_convert_to_bin_for_r.py
from os import path
import struct
import numpy
from config import Config
from sensorimotor_norms.config.config import Config as SMConfig; SMConfig(use_config_overrides_from_file=Config.path)
from tsne import valid_distance_names, SensorimotorTSNE
def convert_file(npy_path, bin_path):
# Thanks to https://www.r-bloggers.com/2012/06/getting-numpy-data-into-r/
# load from the file
mat = numpy.load(npy_path)
# create a binary file
with open(bin_path, mode="wb") as bin_file:
header = struct.pack('2I', mat.shape[0], mat.shape[1])
bin_file.write(header)
# then loop over columns and write each
for i in range(mat.shape[1]):
data = struct.pack('%id' % mat.shape[0], *mat[:, i])
bin_file.write(data)
if __name__ == '__main__':
for distance in valid_distance_names:
for dim in [2, 3]:
npy_path = SensorimotorTSNE.save_path_for(dims=dim, distance_name=distance)
bin_path = path.splitext(npy_path)[0] + '.bin'
convert_file(npy_path, bin_path)
|
StarcoderdataPython
|
1647287
|
from __future__ import division
total_count = len(orgs_id)
current_count = 0
corrected_count = 0
print("{0:.0f}%".format(current_count/total_count * 100))
|
StarcoderdataPython
|
3390701
|
#!/usr/bin/env python3
# Write a program that computes the GC fraction of a DNA sequence in a window
# Window size is 11 nt
# Step size is 5 nt
# Output with 4 significant figures using whichever method you prefer
# Use nested loops
seq = 'ACGACGCAGGAGGAGAGTTTCAGAGATCACGAATACATCCATATTACCCAGAGAGAG'
w = 11
s = 5
for nt in range(0, len(seq)-w+1, s):
gc = 0
for c in seq[nt:nt+w]:
if c == 'G' or c == 'C': gc += 1
print(f'%d %s %.4f' % (nt, seq[nt:nt+w], gc/w))
"""
0 ACGACGCAGGA 0.6364
5 GCAGGAGGAGA 0.6364
10 AGGAGAGTTTC 0.4545
15 AGTTTCAGAGA 0.3636
20 CAGAGATCACG 0.5455
25 ATCACGAATAC 0.3636
30 GAATACATCCA 0.3636
35 CATCCATATTA 0.2727
40 ATATTACCCAG 0.3636
45 ACCCAGAGAGA 0.5455
"""
|
StarcoderdataPython
|
3384528
|
from .waba_service import WabaService
|
StarcoderdataPython
|
3264192
|
import FWCore.ParameterSet.Config as cms
#
# Hcal fake calibrations
#
#
# please note: in the future, it should load Hcal_FakeConditions.cfi from this same directory
# for 130 is was decided (by DPG) to stick to the old config, hence I load
#
#include "CalibCalorimetry/HcalPlugins/data/Hcal_FakeConditions.cfi"
from CalibCalorimetry.HcalPlugins.Hcal_FakeConditions_cff import *
|
StarcoderdataPython
|
1615813
|
import dgl
from . import register_model, BaseModel
import torch.nn as nn
import numpy as np
import dgl.nn.pytorch as dglnn
import torch
import torch.nn.functional as F
@register_model('DMGI')
class DMGI(BaseModel):
r"""
Description
-----------
**Title:** Unsupervised Attributed Multiplex Network Embedding
**Authors:** <NAME>, <NAME>, <NAME>, <NAME>
DMGI was introduced in `[paper] <https://ojs.aaai.org//index.php/AAAI/article/view/5985>`_
and parameters are defined as follows:
Parameters
----------
meta_paths : dict
Extract metapaths from graph
sc : int
Introducing a weight to self-connections
category : string
The category of the nodes to be classificated
in_size : int
Input feature size
hid_unit : int
Hidden units size
dropout : float
Dropout rate on feature. Defaults: ``0.5``.
num_nodes : int
The number of all nodes of category in graph
num_classes : int
The numbers of category's types
isBias :bool
If True, adds a learnable bias to the output.Defaults: ``False``.
isAttn : bool
If True, adopt the attention mechanism to calculate loss . Defaults: ``False``.
isSemi : bool
If True, add isSemi's loss to calculate loss
Attributes
----------
H : torch.FloatTensor
The learnable weight tensor.
"""
@classmethod
def build_model_from_args(cls, args, hg):
etypes = hg.canonical_etypes
mps = []
for etype in etypes:
if etype[0] == args.category:
for dst_e in etypes:
if etype[0] == dst_e[2] and etype[2] == dst_e[0]:
if etype[0] != etype[2]:
mps.append([etype, dst_e])
num_nodes = hg.num_nodes(args.category)
return cls(meta_paths=mps, sc=args.sc,
category=args.category, in_size=args.in_dim,
hid_unit=args.hid_unit, nheads=args.num_heads,dropout=args.dropout,
num_nodes=num_nodes, num_classes=args.num_classes,
isSemi=args.isSemi,isAttn=args.isAttn, isBias=args.isBias)
def __init__(self, meta_paths, sc, category, in_size, hid_unit,nheads,
dropout, num_nodes, num_classes, isBias, isAttn, isSemi):
super(DMGI, self).__init__()
self.category = category
# self.layers = nn.ModuleList()
self.hid = hid_unit
self.meta_paths = meta_paths
self.nheads = nheads
self.isAttn = isAttn
self.isSemi = isSemi
self.sc = sc
r"""
The encoder is a single-layer GCN:
.. math::
\begin{equation}
\mathbf{H}^{(r)}=g_{r}\left(\mathbf{X}, \mathbf{A}^{(r)} \mid \mathbf{W}^{(r)}\right)=\sigma\left(\hat{\mathbf{D}}_{r}^{-\frac{1}{2}} \hat{\mathbf{A}}^{(r)} \hat{\mathbf{D}}_{r}^{-\frac{1}{2}} \mathbf{X} \mathbf{W}^{(r)}\right)
\end{equation}
where :math:`\hat{\mathbf{A}}^{(r)}=\mathbf{A}^{(r)}+w \mathbf{I}_{n}` ,
:math:`\hat{D}_{i i}=\sum_{j} \hat{A}_{i j}`
"""
self.gcn = nn.ModuleList([dglnn.GraphConv(in_feats=in_size,
out_feats=hid_unit,
activation=nn.ReLU(),
bias=isBias,
allow_zero_in_degree=True) for _ in range(len(meta_paths))])
self.disc = Discriminator(hid_unit)
self.readout = AvgReadout()
self.readout_act_func = nn.Sigmoid()
self.dropout = dropout
self.num_nodes = num_nodes
# num_head = 1
self.H = nn.Parameter(torch.FloatTensor(1, num_nodes, hid_unit))
self.logistic = LogReg(hid_unit, num_classes)
if self.isAttn:
self.attn = nn.ModuleList(Attention(hid_units=hid_unit,
num_mps=len(meta_paths),
num_ndoes=num_nodes) for _ in range(nheads))
# self.attn = Attention(hid_units=hid_unit, num_mps=len(meta_paths), num_ndoes=num_nodes)
self.init_weight()
print("category:{}, category's classes:{}, isBias:{},"
" isAttn:{}, isSemi:{}".format(category, num_classes,isBias,isAttn,isSemi))
def init_weight(self):
nn.init.xavier_normal_(self.H)
# samp_bias1, samp_bias2 default None
def forward(self, hg, samp_bias1=None, samp_bias2=None):
r"""
The formula to compute the relation-type specific cross entropy :math:`\mathcal{L}^{(r)}`
.. math::
\begin{equation}
\mathcal{L}^{(r)}=\sum_{v_{i} \in \mathcal{V}}^{n} \log \mathcal{D}\left(\mathbf{h}_{i}^{(r)}, \mathbf{s}^{(r)}\right)+\sum_{j=1}^{n} \log \left(1-\mathcal{D}\left(\tilde{\mathbf{h}}_{j}^{(r)}, \mathbf{s}^{(r)}\right)\right)
\end{equation}
where :math:`h_{i}^{(r)}` is calculate by :math:`\mathbf{h}_{i}=\sigma\left(\sum_{j \in N(i)} \frac{1}{c_{i j}} \mathbf{x}_{j} \mathbf{W}\right)` ,
:math:`s^{(r)}` is :math:`\mathbf{s}^{(r)}=\operatorname{Readout}\left(\mathbf{H}^{(r)}\right)=\sigma\left(\frac{1}{n} \sum_{i=1}^{n} \mathbf{h}_{i}^{(r)}\right)` .
:math:`\mathcal{D}` is a discriminator that scores patchsummary representation pairs
:math:`\tilde{\mathbf{h}}_{j}^{(r)}` corrupt the original attribute matrix by shuffling it.
"""
h_1_all = [];h_2_all = [];c_all = [];logits = []
result = {}
# process features
features = hg.srcdata['h']
feats = self.normal_feat(features, self.meta_paths)
# shuffled features
shuf_feats = self.shuf_feats(feats)
for idx, meta_path in enumerate(self.meta_paths):
new_g = dgl.metapath_reachable_graph(hg, meta_path)
for i in range(self.sc):
new_g = dgl.add_self_loop(new_g)
feats[idx] = F.dropout(feats[idx], self.dropout, training=self.training)
shuf_feats[idx] = F.dropout(shuf_feats[idx], self.dropout, training=self.training)
h_1 = self.gcn[idx](new_g, feats[idx])
c = self.readout(h_1)
c = self.readout_act_func(c)
h_2 = self.gcn[idx](new_g, shuf_feats[idx])
logit = self.disc(c, h_1, h_2, samp_bias1, samp_bias2)
h_1_all.append(h_1.unsqueeze(0))
h_2_all.append(h_2.unsqueeze(0))
c_all.append(c)
logits.append(logit)
result['logits'] = logits
# Attention or not
if self.isAttn:
r"""
.. math::
\begin{equation}
\mathbf{h}_{i}=\mathcal{Q}\left(\left\{\mathbf{h}^{(r)} \mid r \in \mathcal{R}\right\}\right)=\sum_{r \in \mathcal{R}} a_{i}^{(r)} \mathbf{h}^{(r)}
\end{equation}
where :math:`a_{i}^{(r)}` denotes the importance of relationr in generating the final embedding of node videfined as:
.. math::
\begin{equation}
a_{i}^{(r)}=\frac{\exp \left(\mathbf{q}^{(r)} \cdot \mathbf{h}_{i}^{(r)}\right)}{\sum_{r^{\prime} \in \mathcal{R}} \exp \left(\mathbf{q}^{\left(r^{\prime}\right)} \cdot \mathbf{h}_{i}^{r^{\prime}}\right)}
\end{equation}
"""
h_1_all_lst = [];h_2_all_lst = [];c_all_lst = []
for h_idx in range(self.nheads):
h_1_all_, h_2_all_, c_all_ = self.attn[h_idx](h_1_all, h_2_all, c_all)
h_1_all_lst.append(h_1_all_);h_2_all_lst.append(h_2_all_); c_all_lst.append(c_all_)
h_1_all = torch.mean(torch.cat(h_1_all_lst, 0), 0).unsqueeze(0)
h_2_all = torch.mean(torch.cat(h_2_all_lst, 0), 0).unsqueeze(0)
else:
h_1_all = torch.mean(torch.cat(h_1_all, 0), 0).unsqueeze(0)
h_2_all = torch.mean(torch.cat(h_2_all, 0), 0).unsqueeze(0)
# Lcs = [Z − AVG { H(r)|r∈ R }]^2 - [Z − AVG { ~H(r)|r∈ R }]^2
pos_reg_loss = ((self.H - h_1_all) ** 2).sum()
neg_reg_loss = ((self.H - h_2_all) ** 2).sum()
reg_loss = pos_reg_loss - neg_reg_loss
result['reg_loss'] = reg_loss
# semi-supervised module
if self.isSemi:
r"""
Extension to Semi-Supervised Learning
.. math::
\begin{equation}
\ell_{\text {sup }}=-\frac{1}{\left|\mathcal{Y}_{L}\right|} \sum_{l \in \mathcal{Y}_{L}} \sum_{i=1}^{c} Y_{l i} \ln \hat{Y}_{l i}
\end{equation}
Where :math:`mathcal{Y}_{L}` is the set of node indices with labels
"""
semi = self.logistic(self.H).squeeze(0)
result['semi'] = semi
# result: ['logits','reg_loss','semi']
return result
'''feature_normalize'''
def normal_feat(self, feats, meta_paths):
feat = []
feats = feats[self.category].data
for mp in meta_paths:
rowsum = feats.sum(1)
r_inv = torch.pow(rowsum, -1).flatten()
r_inv[torch.isinf(r_inv)] = 0.
r_mat_inv = torch.diag(r_inv)
feats = torch.spmm(r_mat_inv, feats)
feat.append(feats)
return feat
'''corrupt the original attribute matrix by shuffling it'''
def shuf_feats(self, feats):
shuf_feats = []
for feat in feats:
idx = np.random.permutation(feat.shape[0])
shuf = feat[idx]
shuf_feats.append(shuf)
return shuf_feats
'''In the experiments, some relation type is more beneficial for a
certain downstream task than others. Therefore, we can adopt the
attention mechanism'''
class Attention(nn.Module):
def __init__(self, hid_units, num_mps, num_ndoes):
super(Attention, self).__init__()
self.num_mps = num_mps
self.hid_units = hid_units
self.num_nodes = num_ndoes
self.A = nn.ModuleList([nn.Linear(hid_units, 1) for _ in range(num_mps)])
self.weight_init()
def weight_init(self):
for i in range(self.num_mps):
nn.init.xavier_normal_(self.A[i].weight)
self.A[i].bias.data.fill_(0.0)
def forward(self, feat_pos, feat_neg, summary):
feat_pos, feat_pos_attn = self.attn_feature(feat_pos)
feat_neg, feat_neg_attn = self.attn_feature(feat_neg)
summary, summary_attn = self.attn_summary(summary)
return feat_pos, feat_neg, summary
def attn_feature(self, features):
features_attn = []
for i in range(self.num_mps):
features_attn.append((self.A[i](features[i].squeeze())))
features_attn = F.softmax(torch.cat(features_attn, 1), -1)
features = torch.cat(features,1).squeeze(0)
features_attn_reshaped = features_attn.transpose(1, 0).contiguous().view(-1, 1)
features = features * features_attn_reshaped.expand_as(features)
features = features.view(self.num_mps, self.num_nodes, self.hid_units).sum(0).unsqueeze(0)
return features, features_attn
def attn_summary(self, features):
features_attn = []
for i in range(self.num_mps):
features_attn.append((self.A[i](features[i].squeeze())))
features_attn = F.softmax(torch.cat(features_attn), dim=-1).unsqueeze(1)
features = torch.stack(features, 0)
features_attn_expanded = features_attn.expand_as(features)
features = (features * features_attn_expanded).sum(0).unsqueeze(0)
return features, features_attn
'''
D is a discriminator that scores patchsummary representation pairs.
In this paper, we apply a simple bilinear scoring function as it
empirically performs the best in our experiments:'''
class Discriminator(nn.Module):
r"""
The discriminator
.. math::
\begin{equation}
\mathcal{D}\left(\mathbf{h}_{i}^{(r)}, \mathbf{s}^{(r)}\right)=\sigma\left(\mathbf{h}_{i}^{(r) T} \mathbf{M}^{(r)} \mathbf{s}^{(r)}\right)
\end{equation}
where :math:`M^{(r)}` is a trainable scoring matrix.
"""
def __init__(self, n_h):
super(Discriminator, self).__init__()
self.f_k_bilinear = nn.Bilinear(n_h, n_h, 1)
for m in self.modules():
self.weights_init(m)
def weights_init(self, m):
if isinstance(m, nn.Bilinear):
torch.nn.init.xavier_uniform_(m.weight.data)
if m.bias is not None:
m.bias.data.fill_(0.0)
def forward(self, c, h_pl, h_mi, s_bias1=None, s_bias2=None):
c_x = c.expand_as(h_pl)
sc_1 = torch.squeeze(self.f_k_bilinear(h_pl, c_x), 1) # sc_1 = 1 x nb_nodes
sc_2 = torch.squeeze(self.f_k_bilinear(h_mi, c_x), 1) # sc_2 = 1 x nb_nodes
if s_bias1 is not None:
sc_1 += s_bias1
if s_bias2 is not None:
sc_2 += s_bias2
logits = torch.cat((sc_1, sc_2), 0)
return logits
'''considering the efficiency of the method, we simply employ average pooling'''
class AvgReadout(nn.Module):
r"""
Considering the efficiency of the method, we simply employ average pooling, computing the average of the set of embedding matrices
.. math::
\begin{equation}
\mathbf{H}=\mathcal{Q}\left(\left\{\mathbf{H}^{(r)} \mid r \in \mathcal{R}\right\}\right)=\frac{1}{|\mathcal{R}|} \sum_{r \in \mathcal{R}} \mathbf{H}^{(r)}
\end{equation}
"""
def __init__(self):
super(AvgReadout, self).__init__()
def forward(self, seq):
return torch.mean(seq, 0)
'''logreg'''
class LogReg(nn.Module):
r"""
Parameters
----------
ft_in : int
Size of hid_units
nb_class : int
The number of category's types
"""
def __init__(self, ft_in, nb_classes):
super(LogReg, self).__init__()
self.fc = nn.Linear(ft_in, nb_classes)
for m in self.modules():
self.weights_init(m)
def weights_init(self, m):
if isinstance(m, nn.Linear):
torch.nn.init.xavier_uniform_(m.weight.data)
if m.bias is not None:
m.bias.data.fill_(0.0)
def forward(self, seq):
ret = self.fc(seq)
return ret
|
StarcoderdataPython
|
76923
|
<reponame>what-digital/aldryn-people
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('aldryn_people', '0015_m2m_remove_null'),
]
operations = [
# Replace FK, 'unique=True' with OneToOneField.
migrations.AlterField(
model_name='person',
name='user',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='persons', null=True, blank=True, to=settings.AUTH_USER_MODEL),
),
]
|
StarcoderdataPython
|
3309087
|
class Inventory(object):
def __init__(self, items):
self.items = items
def add(self,item):
if self.items.haskey(item):
self.items[item] += 1
else:
self.items[item] = 1
def remove(self, item):
if self.items.haskey(item):
if self.items[item] < 1:
print "Can't go negative. Sorry"
else:
self.items[item] -= 1
def check(self, item):
if self.items.haskey(item):
return True
else:
return False
def print_inventory(self):
for item in self.items:
print item, "-", self.items[item]
|
StarcoderdataPython
|
1622075
|
from queue import LifoQueue
if __name__ == '__main__':
stack = LifoQueue()
stack.put('one')
stack.put('two')
stack.put('three')
stack.put('four')
stack.put('five')
stack.put('six')
while not stack.empty():
print(stack.get())
|
StarcoderdataPython
|
6299
|
<reponame>hansthienpondt/ansible-networking-collections
# (c) 2020 Nokia
#
# Licensed under the BSD 3 Clause license
# SPDX-License-Identifier: BSD-3-Clause
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
---
author:
- "<NAME> (@HansThienpondt)"
- "<NAME> (@wisotzky)"
connection: gnmi
short_description: Provides a persistent gRPC connection for gNMI API service
description:
- This gRPC plugin provides methods to interact with the gNMI service.
- OpenConfig gNMI specification
https://github.com/openconfig/reference/blob/master/rpc/gnmi/gnmi-specification.md
- gNMI API
https://raw.githubusercontent.com/openconfig/gnmi/master/proto/gnmi/gnmi.proto
- This connection plugin provides a persistent communication channel to
remote devices using gRPC including the underlying transport (TLS).
- The plugin binds to the gNMI gRPC service. It provide wrappers for gNMI
requests (Capabilities, Get, Set, Subscribe)
requirements:
- grpcio
- protobuf
options:
host:
description:
- Target host FQDN or IP address to establish gRPC connection.
default: inventory_hostname
vars:
- name: ansible_host
port:
type: int
description:
- Specifies the port on the remote device that listens for connections
when establishing the gRPC connection. If None only the C(host) part
will be used.
ini:
- section: defaults
key: remote_port
env:
- name: ANSIBLE_REMOTE_PORT
vars:
- name: ansible_port
remote_user:
description:
- The username used to authenticate to the remote device when the gRPC
connection is first established. If the remote_user is not specified,
the connection will use the username of the logged in user.
- Can be configured from the CLI via the C(--user) or C(-u) options.
ini:
- section: defaults
key: remote_user
env:
- name: ANSIBLE_REMOTE_USER
vars:
- name: ansible_user
password:
description:
- Configures the user password used to authenticate to the remote device
when first establishing the gRPC connection.
vars:
- name: ansible_password
- name: ansible_ssh_pass
private_key_file:
description:
- The PEM encoded private key file used to authenticate to the
remote device when first establishing the grpc connection.
ini:
- section: grpc_connection
key: private_key_file
env:
- name: ANSIBLE_PRIVATE_KEY_FILE
vars:
- name: ansible_private_key_file
root_certificates_file:
description:
- The PEM encoded root certificate file used to create a SSL-enabled
channel, if the value is None it reads the root certificates from
a default location chosen by gRPC at runtime.
ini:
- section: grpc_connection
key: root_certificates_file
env:
- name: ANSIBLE_ROOT_CERTIFICATES_FILE
vars:
- name: ansible_root_certificates_file
certificate_chain_file:
description:
- The PEM encoded certificate chain file used to create a SSL-enabled
channel. If the value is None, no certificate chain is used.
ini:
- section: grpc_connection
key: certificate_chain_file
env:
- name: ANSIBLE_CERTIFICATE_CHAIN_FILE
vars:
- name: ansible_certificate_chain_file
certificate_path:
description:
- Folder to search for certificate and key files
ini:
- section: grpc_connection
key: certificate_path
env:
- name: ANSIBLE_CERTIFICATE_PATH
vars:
- name: ansible_certificate_path
gnmi_encoding:
description:
- Encoding used for gNMI communication
- Must be either JSON or JSON_IETF
- If not provided, will run CapabilityRequest for auto-detection
ini:
- section: grpc_connection
key: gnmi_encoding
env:
- name: ANSIBLE_GNMI_ENCODING
vars:
- name: ansible_gnmi_encoding
grpc_channel_options:
description:
- Key/Value pairs (dict) to define gRPC channel options to be used
- gRPC reference
U(https://grpc.github.io/grpc/core/group__grpc__arg__keys.html)
- Provide the I(ssl_target_name_override) option to override the TLS
subject or subjectAltName (only in the case secure connections are
used). The option must be provided in cases, when the FQDN or IPv4
address that is used to connect to the device is different from the
subject name that is provided in the host certificate. This is
needed, because the TLS validates hostname or IP address to avoid
man-in-the-middle attacks.
vars:
- name: ansible_grpc_channel_options
grpc_environment:
description:
- Key/Value pairs (dict) to define environment settings specific to gRPC
- The standard mechanism to provide/set the environment in Ansible
cannot be used, because those environment settings are not passed to
the client process that establishes the gRPC connection.
- Set C(GRPC_VERBOSITY) and C(GRPC_TRACE) to setup gRPC logging. Need to
add code for log forwarding of gRPC related log messages to the
persistent messages log (see below).
- Set C(HTTPS_PROXY) to specify your proxy settings (if needed).
- Set C(GRPC_SSL_CIPHER_SUITES) in case the default TLS ciphers do not match
what is offered by the gRPC server.
vars:
- name: ansible_grpc_environment
persistent_connect_timeout:
type: int
description:
- Configures, in seconds, the amount of time to wait when trying to
initially establish a persistent connection. If this value expires
before the connection to the remote device is completed, the connection
will fail.
default: 5
ini:
- section: persistent_connection
key: connect_timeout
env:
- name: ANSIBLE_PERSISTENT_CONNECT_TIMEOUT
vars:
- name: ansible_connect_timeout
persistent_command_timeout:
type: int
description:
- Configures the default timeout value (in seconds) when awaiting a
response after issuing a call to a RPC. If the RPC does not return
before the timeout exceed, an error is generated and the connection
is closed.
default: 300
ini:
- section: persistent_connection
key: command_timeout
env:
- name: ANSIBLE_PERSISTENT_COMMAND_TIMEOUT
vars:
- name: ansible_command_timeout
persistent_log_messages:
type: boolean
description:
- This flag will enable logging the command executed and response received
from target device in the ansible log file. For this option to work the
'log_path' ansible configuration option is required to be set to a file
path with write access.
- Be sure to fully understand the security implications of enabling this
option as it could create a security vulnerability by logging sensitive
information in log file.
default: False
ini:
- section: persistent_connection
key: log_messages
env:
- name: ANSIBLE_PERSISTENT_LOG_MESSAGES
vars:
- name: ansible_persistent_log_messages
"""
import os
import re
import json
import base64
import datetime
try:
import grpc
HAS_GRPC = True
except ImportError:
HAS_GRPC = False
try:
from google import protobuf
HAS_PROTOBUF = True
except ImportError:
HAS_PROTOBUF = False
from ansible.errors import AnsibleConnectionFailure, AnsibleError
from ansible.plugins.connection import NetworkConnectionBase
from ansible.plugins.connection import ensure_connect
from google.protobuf import json_format
from ansible_collections.nokia.grpc.plugins.connection.pb import gnmi_pb2
from ansible.module_utils._text import to_text
class Connection(NetworkConnectionBase):
"""
Connection plugin for gRPC
To use gRPC connections in Ansible one (or more) sub-plugin(s) for the
required gRPC service(s) must be loaded. To load gRPC sub-plugins use the
method `register_service()` with the name of the sub-plugin to be
registered.
After loading the sub-plugin, Ansible modules can call methods provided by
that sub-plugin. There is a wrapper available that consumes the attribute
name {sub-plugin name}__{method name} to call a specific method of that
sub-plugin.
"""
transport = "nokia.grpc.gnmi"
has_pipelining = True
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(
play_context, new_stdin, *args, **kwargs
)
self._task_uuid = to_text(kwargs.get("task_uuid", ""))
if not HAS_PROTOBUF:
raise AnsibleError(
"protobuf is required to use gRPC connection type. " +
"Please run 'pip install protobuf'"
)
if not HAS_GRPC:
raise AnsibleError(
"grpcio is required to use gRPC connection type. " +
"Please run 'pip install grpcio'"
)
self._connected = False
def readFile(self, optionName):
"""
Reads a binary certificate/key file
Parameters:
optionName(str): used to read filename from options
Returns:
File content
Raises:
AnsibleConnectionFailure: file does not exist or read excpetions
"""
path = self.get_option('certificate_path')
if not path:
path = '/etc/ssl:/etc/ssl/certs:/etc/ca-certificates'
filename = self.get_option(optionName)
if filename:
if filename.startswith('~'):
filename = os.path.expanduser(filename)
if not filename.startswith('/'):
for entry in path.split(':'):
if os.path.isfile(os.path.join(entry, filename)):
filename = os.path.join(entry, filename)
break
if os.path.isfile(filename):
try:
with open(filename, 'rb') as f:
return f.read()
except Exception as exc:
raise AnsibleConnectionFailure(
'Failed to read cert/keys file %s: %s' % (filename, exc)
)
else:
raise AnsibleConnectionFailure(
'Cert/keys file %s does not exist' % filename
)
return None
def _connect(self):
"""
Establish gRPC connection to remote node and create gNMI stub.
This method will establish the persistent gRPC connection, if not
already done. After this, the gNMI stub will be created. To get
visibility about gNMI capabilities of the remote device, a gNM
CapabilityRequest will be sent and result will be persisted.
Parameters:
None
Returns:
None
"""
if self.connected:
self.queue_message('v', 'gRPC connection to host %s already exist' % self._target)
return
grpcEnv = self.get_option('grpc_environment') or {}
if not isinstance(grpcEnv, dict):
raise AnsibleConnectionFailure("grpc_environment must be a dict")
for key in grpcEnv:
if grpcEnv[key]:
os.environ[key] = str(grpcEnv[key])
else:
try:
del os.environ[key]
except KeyError:
# no such setting in current environment, but thats ok
pass
self._login_credentials = [
('username', self.get_option('remote_user')),
('password', self.get_option('password'))
]
host = self.get_option('host')
port = self.get_option('port')
self._target = host if port is None else '%s:%d' % (host, port)
self._timeout = self.get_option('persistent_command_timeout')
certs = {}
certs['root_certificates'] = self.readFile('root_certificates_file')
certs['certificate_chain'] = self.readFile('certificate_chain_file')
certs['private_key'] = self.readFile('private_key_file')
options = self.get_option('grpc_channel_options')
if options:
if not isinstance(options, dict):
raise AnsibleConnectionFailure("grpc_channel_options must be a dict")
options = options.items()
if certs['root_certificates'] or certs['private_key'] or certs['certificate_chain']:
self.queue_message('v', 'Starting secure gRPC connection')
creds = grpc.ssl_channel_credentials(**certs)
self._channel = grpc.secure_channel(self._target, creds, options=options)
else:
self.queue_message('v', 'Starting insecure gRPC connection')
self._channel = grpc.insecure_channel(self._target, options=options)
self.queue_message('v', "gRPC connection established for user %s to %s" %
(self.get_option('remote_user'), self._target))
self.queue_message('v', 'Creating gNMI stub')
self._stub = gnmi_pb2.gNMIStub(self._channel)
self._encoding = self.get_option('gnmi_encoding')
if not self._encoding:
self.queue_message('v', 'Run CapabilityRequest()')
request = gnmi_pb2.CapabilityRequest()
response = self._stub.Capabilities(request, metadata=self._login_credentials)
self.queue_message('v', 'CapabilityRequest() succeeded')
self._gnmiVersion = response.gNMI_version
self._yangModels = response.supported_models
if gnmi_pb2.Encoding.Value('JSON_IETF') in response.supported_encodings:
self._encoding = 'JSON_IETF'
elif gnmi_pb2.Encoding.Value('JSON') in response.supported_encodings:
self._encoding = 'JSON'
else:
raise AnsibleConnectionFailure("No compatible supported encoding found (JSON or JSON_IETF)")
else:
if self._encoding not in ['JSON_IETF', 'JSON']:
raise AnsibleConnectionFailure("Incompatible encoding '%s' requested (JSON or JSON_IETF)" % self._encoding)
self._encoding_value = gnmi_pb2.Encoding.Value(self._encoding)
self._connected = True
self.queue_message('v', 'gRPC/gNMI connection has established successfully')
def close(self):
"""
Closes the active gRPC connection to the target host
Parameters:
None
Returns:
None
"""
if self._connected:
self.queue_message('v', "Closing gRPC connection to target host")
self._channel.close()
super(Connection, self).close()
# -----------------------------------------------------------------------
def _encodeXpath(self, xpath='/'):
"""
Encodes XPATH to dict representation that allows conversion to gnmi_pb.Path object
Parameters:
xpath (str): path string using XPATH syntax
Returns:
(dict): path dict using gnmi_pb2.Path structure for easy conversion
"""
mypath = []
xpath = xpath.strip('\t\n\r /')
if xpath:
path_elements = re.split('''/(?=(?:[^\[\]]|\[[^\[\]]+\])*$)''', xpath)
for e in path_elements:
entry = {'name': e.split("[", 1)[0]}
eKeys = re.findall('\[(.*?)\]', e)
dKeys = dict(x.split('=', 1) for x in eKeys)
if dKeys:
entry['key'] = dKeys
mypath.append(entry)
return {'elem': mypath}
return {}
def _decodeXpath(self, path):
"""
Decodes XPATH from dict representation converted from gnmi_pb.Path object
Parameters:
path (dict): decoded gnmi_pb2.Path object
Returns:
(str): path string using XPATH syntax
"""
result = []
if 'elem' not in path:
return ""
for elem in path['elem']:
tmp = elem['name']
if 'key' in elem:
for k, v in elem['key'].items():
tmp += "[%s=%s]" % (k, v)
result.append(tmp)
return '/'.join(result)
def _encodeVal(self, data):
"""
Encodes value to dict representation that allows conversion to gnmi_pb.TypedValue object
Parameters:
data (ANY): data to be encoded as gnmi_pb.TypedValue object
Returns:
(dict): dict using gnmi_pb.TypedValue structure for easy conversion
"""
value = base64.b64encode(json.dumps(data).encode())
if self._encoding == 'JSON_IETF':
return {'jsonIetfVal': value}
else:
return {'jsonVal': value}
def _decodeVal(self, val):
"""
Decodes value from dict representation converted from gnmi_pb.TypedValue object
Parameters:
val (dict): decoded gnmi_pb.TypedValue object
Returns:
(ANY): extracted data
"""
if 'jsonIetfVal' in val:
return json.loads(base64.b64decode(val['jsonIetfVal']))
elif 'jsonVal' in val:
return json.loads(base64.b64decode(val['jsonVal']))
else:
raise AnsibleConnectionFailure("Ansible gNMI plugin does not support encoding for value: %s" % json.dumps(val))
def _dictToList(self, aDict):
for key in aDict.keys():
if key.startswith('___'):
aDict[key[3:]] = [self._dictToList(val) if isinstance(val, dict) else val for val in aDict[key].values()]
del aDict[key]
else:
if isinstance(aDict[key], dict):
aDict[key] = self._dictToList(aDict[key])
return aDict
def _mergeToSingleDict(self, rawData):
result = {}
for entry in rawData:
if 'syncResponse' in entry and entry['syncResponse']:
# Ignore: SyncResponse is sent after initial update
break
elif 'update' not in entry:
# Ignore: entry without updates
break
elif 'timestamp' not in entry:
# Subscribe response, enter update context
entry = entry['update']
else:
# Get response, keep context
pass
prfx = result
if ('prefix' in entry) and ('elem' in entry['prefix']):
prfx_elements = entry['prefix']['elem']
else:
prfx_elements = []
for elem in prfx_elements:
eleName = elem['name']
if 'key' in elem:
eleKey = json.dumps(elem['key'])
eleName = '___'+eleName
# Path Element has key => must be list()
if eleName in prfx:
# Path Element exists => Change Context
prfx = prfx[eleName]
if eleKey not in prfx:
# List entry does not exist => Create
prfx[eleKey] = elem['key']
prfx = prfx[eleKey]
else:
# Path Element does not exist => Create
prfx[eleName] = {}
prfx = prfx[eleName]
prfx[eleKey] = elem['key']
prfx = prfx[eleKey]
else:
# Path Element hasn't key => must be dict()
if eleName in prfx:
# Path Element exists => Change Context
prfx = prfx[eleName]
else:
# Path Element does not exist => Create
prfx[eleName] = {}
prfx = prfx[eleName]
for _upd in entry['update']:
if 'val' not in _upd:
# requested path without content (no value) => skip
continue
elif ('path' in _upd) and ('elem' in _upd['path']):
path_elements = _upd['path']['elem']
cPath = prfx
elif prfx_elements:
path_elements = prfx_elements
cPath = result
else:
# No path at all, replace the objecttree with value
result = self._decodeVal(_upd['val'])
prfx = result
continue
# If path_elements has more than just a single entry,
# we need to create/navigate to the specified subcontext
for elem in path_elements[:-1]:
eleName = elem['name']
if 'key' in elem:
eleKey = json.dumps(elem['key'])
eleName = '___'+eleName
# Path Element has key => must be list()
if eleName in cPath:
# Path Element exists => Change Context
cPath = cPath[eleName]
if eleKey not in cPath:
# List entry does not exist => Create
cPath[eleKey] = elem['key']
cPath = cPath[eleKey]
else:
# Path Element does not exist => Create
cPath[eleName] = {}
cPath = cPath[eleName]
cPath[eleKey] = elem['key']
cPath = cPath[eleKey]
else:
# Path Element hasn't key => must be dict()
if eleName in cPath:
# Path Element exists => Change Context
cPath = cPath[eleName]
else:
# Path Element does not exist => Create
cPath[eleName] = {}
cPath = cPath[eleName]
# The last entry of path_elements is the leaf element
# that needs to be created/updated
leaf_elem = path_elements[-1]
if 'key' in leaf_elem:
eleKey = json.dumps(leaf_elem['key'])
eleName = '___'+leaf_elem['name']
if eleName not in cPath:
cPath[eleName] = {}
cPath = cPath[eleName]
cPath[eleKey] = self._decodeVal(_upd['val'])
else:
cPath[leaf_elem['name']] = self._decodeVal(_upd['val'])
return self._dictToList(result)
def _simplifyUpdates(self, rawData):
for msg in rawData:
entry = json_format.MessageToDict(msg)
if 'syncResponse' in entry:
# Ignore: SyncResponse is sent after initial update
pass
elif 'update' in entry:
result = {}
update = entry['update']
if 'prefix' in update:
result['prefix'] = '/'+self._decodeXpath(update['prefix'])
if 'timestamp' in update:
result['timestamp'] = datetime.datetime.fromtimestamp(float(update['timestamp'])/1000000000).isoformat()
if 'update' in update:
result['values'] = {self._decodeXpath(u['path']): self._decodeVal(u['val']) for u in update['update']}
yield result
else:
# Ignore: Invalid message format
pass
# -----------------------------------------------------------------------
@ensure_connect
def gnmiCapabilities(self):
"""
Executes a gNMI Capabilities request
Parameters:
None
Returns:
str: gNMI capabilities converted into JSON format
"""
request = gnmi_pb2.CapabilityRequest()
auth = self._login_credentials
try:
response = self._stub.Capabilities(request, metadata=auth)
except grpc.RpcError as e:
raise AnsibleConnectionFailure("%s" % e)
return json_format.MessageToJson(response)
@ensure_connect
def gnmiGet(self, *args, **kwargs):
"""
Executes a gNMI Get request
Encoding that is used for data serialization is automatically determined
based on the remote device capabilities. This gNMI plugin has implemented
suppport for JSON_IETF (preferred) and JSON (fallback).
Parameters:
type (str): Type of data that is requested: ALL, CONFIG, STATE
prefix (str): Path prefix that is added to all paths (XPATH syntax)
paths (list): List of paths (str) to be captured
Returns:
str: GetResponse message converted into JSON format
"""
# Remove all input parameters from kwargs that are not set
input = dict(filter(lambda x: x[1], kwargs.items()))
# Adjust input parameters to match specification for gNMI SetRequest
if 'prefix' in input:
input['prefix'] = self._encodeXpath(input['prefix'])
if 'path' in input:
input['path'] = [self._encodeXpath(path) for path in input['path']]
if 'type' in input:
input['type'] = input['type'].upper()
input['encoding'] = self._encoding_value
request = json_format.ParseDict(input, gnmi_pb2.GetRequest())
auth = self._login_credentials
try:
response = self._stub.Get(request, metadata=auth)
except grpc.RpcError as e:
raise AnsibleConnectionFailure("%s" % e)
output = self._mergeToSingleDict(json_format.MessageToDict(response)['notification'])
return json.dumps(output, indent=4).encode()
@ensure_connect
def gnmiSet(self, *args, **kwargs):
"""
Executes a gNMI Set request
Encoding that is used for data serialization is automatically determined
based on the remote device capabilities. This gNMI plugin has implemented
suppport for JSON_IETF (preferred) and JSON (fallback).
Parameters:
prefix (str): Path prefix that is added to all paths (XPATH syntax)
update (list): Path/Value pairs to be updated
replace (list): Path/Value pairs to be replaced
delete (list): Paths (str) to be deleted
Returns:
str: SetResponse message converted into JSON format
"""
# Remove all input parameters from kwargs that are not set
input = dict(filter(lambda x: x[1], kwargs.items()))
# Backup options are not to be used in gNMI SetRequest
if 'backup' in input:
del input['backup']
if 'backup_options' in input:
del input['backup_options']
# Adjust input parameters to match specification for gNMI SetRequest
if 'prefix' in input:
input['prefix'] = self._encodeXpath(input['prefix'])
if 'delete' in input:
input['delete'] = [self._encodeXpath(entry) for entry in input['delete']]
if 'update' in input:
for entry in input['update']:
entry['path'] = self._encodeXpath(entry['path'])
entry['val'] = self._encodeVal(entry['val'])
if 'replace' in input:
for entry in input['replace']:
entry['path'] = self._encodeXpath(entry['path'])
entry['val'] = self._encodeVal(entry['val'])
request = json_format.ParseDict(input, gnmi_pb2.SetRequest())
auth = self._login_credentials
try:
response = self._stub.Set(request, metadata=auth)
except grpc.RpcError as e:
raise AnsibleConnectionFailure("%s" % e)
output = json_format.MessageToDict(response)
output['timestamp'] = datetime.datetime.fromtimestamp(float(output['timestamp'])/1000000000).isoformat()
if 'prefix' in output:
output['prefix'] = self._decodeXpath(output['prefix'])
for item in output['response']:
item['path'] = self._decodeXpath(item['path'])
return json.dumps(output, indent=4).encode()
@ensure_connect
def gnmiSubscribe(self, *args, **kwargs):
"""
Executes a gNMI Subscribe request
Encoding that is used for data serialization is automatically determined
based on the remote device capabilities. This gNMI plugin has implemented
suppport for JSON_IETF (preferred) and JSON (fallback).
Parameters:
prefix (str): Path prefix that is added to all paths (XPATH syntax)
mode (str): Mode of subscription (STREAM, ONCE)
subscription (list of dict): Subscription specification (path, interval, submode)
duration (int): timeout, to stop receiving
qos (int): DSCP marking that is used
updates_only (bool): Send only updates to initial state
allow_aggregation (bool): Aggregate elements marked as eligible for aggregation
Returns:
str: Updates received converted into JSON format
"""
# Remove all input parameters from kwargs that are not set
input = dict(filter(lambda x: x[1], kwargs.items()))
# Adjust input parameters to match specification for gNMI SubscribeRequest
if 'mode' in input:
input['mode'] = input['mode'].upper()
input['encoding'] = self._encoding_value
if 'prefix' in input:
input['prefix'] = self._encodeXpath(input['prefix'])
if 'subscription' in input:
for item in input['subscription']:
item['path'] = self._encodeXpath(item['path'])
# Extract duration from input attributes
if 'duration' in input:
duration = input['duration']
del input['duration']
else:
duration = 20
request = json_format.ParseDict({'subscribe': input}, gnmi_pb2.SubscribeRequest())
auth = self._login_credentials
try:
output = []
responses = self._stub.Subscribe(iter([request]), duration, metadata=auth)
if input['mode'] == 'ONCE':
responses = [json_format.MessageToDict(response) for response in responses]
output = self._mergeToSingleDict(responses)
else:
for update in self._simplifyUpdates(responses):
output.append(update)
except grpc.RpcError as e:
if e.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
if input['mode'] == 'ONCE':
raise AnsibleConnectionFailure("gNMI ONCE Subscription timed out")
else:
# RPC timed out, which is okay
pass
else:
raise AnsibleConnectionFailure("%s" % e)
return json.dumps(output, indent=4).encode()
|
StarcoderdataPython
|
86836
|
"""CoinGecko view"""
__docformat__ = "numpy"
import logging
import os
from pandas.plotting import register_matplotlib_converters
from gamestonk_terminal.cryptocurrency.dataframe_helpers import (
lambda_very_long_number_formatter,
)
from gamestonk_terminal.cryptocurrency.discovery import pycoingecko_model
from gamestonk_terminal.decorators import log_start_end
from gamestonk_terminal.helper_funcs import export_data, print_rich_table
from gamestonk_terminal.rich_config import console
logger = logging.getLogger(__name__)
register_matplotlib_converters()
# pylint: disable=inconsistent-return-statements
# pylint: disable=R0904, C0302
COINS_COLUMNS = [
"Symbol",
"Name",
"Volume [$]",
"Market Cap [$]",
"Market Cap Rank",
"7D Change [%]",
"24H Change [%]",
]
@log_start_end(log=logger)
def display_coins(
category: str, top: int = 250, sortby: str = "Symbol", export: str = ""
) -> None:
"""Display top coins [Source: CoinGecko]
Parameters
----------
category: str
Coingecko category. If no category is passed it will search for all coins. (E.g., smart-contract-platform)
top: int
Number of records to display
sortby: str
Key to sort data
export : str
Export dataframe data to csv,json,xlsx file
"""
df = pycoingecko_model.get_coins(top=top, category=category)
if not df.empty:
df = df[
[
"symbol",
"name",
"total_volume",
"market_cap",
"market_cap_rank",
"price_change_percentage_7d_in_currency",
"price_change_percentage_24h_in_currency",
]
]
df = df.set_axis(
COINS_COLUMNS,
axis=1,
inplace=False,
)
if sortby in COINS_COLUMNS:
df = df[
(df["Volume [$]"].notna()) & (df["Market Cap [$]"].notna())
].sort_values(by=sortby, ascending=False)
for col in ["Volume [$]", "Market Cap [$]"]:
if col in df.columns:
df[col] = df[col].apply(lambda x: lambda_very_long_number_formatter(x))
print_rich_table(
df.head(top),
headers=list(df.columns),
show_index=False,
)
console.print("")
export_data(
export,
os.path.dirname(os.path.abspath(__file__)),
"cgtop",
df,
)
else:
console.print("\nUnable to retrieve data from CoinGecko.\n")
@log_start_end(log=logger)
def display_gainers(
period: str = "1h", top: int = 20, sortby: str = "Symbol", export: str = ""
) -> None:
"""Shows Largest Gainers - coins which gain the most in given period. [Source: CoinGecko]
Parameters
----------
period: str
Time period by which data is displayed. One from [1h, 24h, 7d, 14d, 30d, 60d, 1y]
top: int
Number of records to display
sortby: str
Key to sort data
export : str
Export dataframe data to csv,json,xlsx file
"""
df = pycoingecko_model.get_gainers_or_losers(top=top, period=period, typ="gainers")
if not df.empty:
if sortby in COINS_COLUMNS:
df = df[
(df["Volume [$]"].notna()) & (df["Market Cap [$]"].notna())
].sort_values(by=sortby, ascending=False)
for col in ["Volume [$]", "Market Cap [$]"]:
if col in df.columns:
df[col] = df[col].apply(lambda x: lambda_very_long_number_formatter(x))
print_rich_table(
df.head(top),
headers=list(df.columns),
show_index=False,
)
console.print("")
export_data(
export,
os.path.dirname(os.path.abspath(__file__)),
"gainers",
df,
)
else:
console.print("\nUnable to retrieve data from CoinGecko.\n")
@log_start_end(log=logger)
def display_losers(
period: str = "1h", top: int = 20, export: str = "", sortby: str = "Symbol"
) -> None:
"""Shows Largest Losers - coins which lost the most in given period of time. [Source: CoinGecko]
Parameters
----------
period: str
Time period by which data is displayed. One from [1h, 24h, 7d, 14d, 30d, 60d, 1y]
top: int
Number of records to display
sortby: str
Key to sort data
export : str
Export dataframe data to csv,json,xlsx file
"""
df = pycoingecko_model.get_gainers_or_losers(top=top, period=period, typ="losers")
if not df.empty:
if sortby in COINS_COLUMNS:
df = df[
(df["Volume [$]"].notna()) & (df["Market Cap [$]"].notna())
].sort_values(by=sortby, ascending=False)
for col in ["Volume [$]", "Market Cap [$]"]:
if col in df.columns:
df[col] = df[col].apply(lambda x: lambda_very_long_number_formatter(x))
print_rich_table(
df.head(top),
headers=list(df.columns),
show_index=False,
)
console.print()
export_data(
export,
os.path.dirname(os.path.abspath(__file__)),
"cglosers",
df,
)
else:
console.print("\nUnable to retrieve data from CoinGecko.\n")
@log_start_end(log=logger)
def display_trending(export: str = "") -> None:
"""Display trending coins [Source: CoinGecko]
Parameters
----------
export : str
Export dataframe data to csv,json,xlsx file
"""
df = pycoingecko_model.get_trending_coins()
if not df.empty:
print_rich_table(
df,
headers=list(df.columns),
floatfmt=".4f",
show_index=False,
title="Trending coins on CoinGecko",
)
console.print("")
export_data(
export,
os.path.dirname(os.path.abspath(__file__)),
"cgtrending",
df,
)
else:
console.print("\nUnable to retrieve data from CoinGecko.\n")
|
StarcoderdataPython
|
4841296
|
import os
import sys
import argparse
import glob
import pandas as pd
def get_arguments():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="",
epilog="""
Convert behavioural data from cimaq to bids format
Input: Folder
""")
parser.add_argument(
"-d", "--idir",
required=True, nargs="+",
help="Folder with input files",
)
parser.add_argument(
"-o", "--odir",
required=True, nargs="+",
help="Folder where output file is saved",
)
parser.add_argument(
"-v", "--verbose",
required=False, nargs="+",
help="Verbose to get more information about what's going on",
)
args = parser.parse_args()
if len(sys.argv) == 1:
parser.print_help()
sys.exit()
else:
return args
def get_ids(fileDir):
if not os.path.exists(fileDir):
sys.exit('This folder doesnt exist: {}'.format(iDir))
return
files = glob.glob(os.path.join(fileDir,'sub*.tsv'))
ids = []
for file in files:
filename = os.path.basename(file)
id = filename.split('-')[1].split('_')[0]
ids.append(id)
return ids
def main():
args = get_arguments()
output_dir = args.odir[0]
ids = get_ids(args.idir[0])
data_ids = pd.DataFrame({'sub_ids' : ids})
print(data_ids['sub_ids'])
data_ids.sort_values(by = ['sub_ids'], axis = 0, ascending = True, inplace= True)
print(data_ids.iloc[:, 0])
data_ids.to_csv(output_dir+'/sub_list.tsv', sep='\t',
header=True, index=False)
if __name__ == '__main__':
sys.exit(main())
|
StarcoderdataPython
|
3209061
|
# ==============================================================================
# Copyright 2019 - <NAME>
#
# NOTICE: Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# ==============================================================================
""" Dynamic Decoders
- Contains an implementation of dynamic_decode that uses shape invariants
Source: https://github.com/tensorflow/tensorflow/blob/r1.13/tensorflow/contrib/seq2seq/python/ops/decoder.py
"""
import sys
assert 'tensorflow' in sys.modules, 'You need to import TF before importing this module.'
from diplomacy_research.utils.tensorflow import _create_zero_outputs, _transpose_batch_time
from diplomacy_research.utils.tensorflow import seq2seq
from diplomacy_research.utils.tensorflow import ops
from diplomacy_research.utils.tensorflow import tensor_shape
from diplomacy_research.utils.tensorflow import array_ops, gen_array_ops
from diplomacy_research.utils.tensorflow import constant_op
from diplomacy_research.utils.tensorflow import context
from diplomacy_research.utils.tensorflow import control_flow_ops
from diplomacy_research.utils.tensorflow import control_flow_util
from diplomacy_research.utils.tensorflow import dtypes
from diplomacy_research.utils.tensorflow import math_ops, gen_math_ops
from diplomacy_research.utils.tensorflow import nest
from diplomacy_research.utils.tensorflow import tensor_array_ops
from diplomacy_research.utils.tensorflow import tensor_util
from diplomacy_research.utils.tensorflow import variable_scope
def dynamic_decode(decoder, output_time_major=False, impute_finished=False, maximum_iterations=None,
parallel_iterations=32, invariants_map=None, swap_memory=False, scope=None):
""" Performs dynamic decoding with `decoder`.
:param decoder: A `Decoder` instance.
:param output_time_major: If True, outputs [time, batch, ...], otherwise outputs [batch, time, ...]
:param impute_finished: If true, finished states are copied through the end of the game
:param maximum_iterations: Int or None. The maximum number of steps (otherwise decode until it's done)
:param parallel_iterations: Argument passed to tf.while_loop
:param invariants_map: Optional. Dictionary of tensor path (in initial_state) to its shape invariant.
:param swap_memory: Argument passed to `tf.while_loop`.
:param scope: Optional variable scope to use.
:return: A tuple of 1) final_outputs, 2) final_state, 3) final_sequence_length
"""
if not isinstance(decoder, seq2seq.Decoder):
raise TypeError('Expected decoder to be type Decoder, but saw: %s' % type(decoder))
with variable_scope.variable_scope(scope, 'decoder') as varscope:
# Determine context types.
ctxt = ops.get_default_graph()._get_control_flow_context() # pylint: disable=protected-access
is_xla = control_flow_util.GetContainingXLAContext(ctxt) is not None
in_while_loop = control_flow_util.GetContainingWhileContext(ctxt) is not None
# Properly cache variable values inside the while_loop.
# Don't set a caching device when running in a loop, since it is possible that train steps could be wrapped
# in a tf.while_loop. In that scenario caching prevents forward computations in loop iterations from re-reading
# the updated weights.
if not context.executing_eagerly() and not in_while_loop:
if varscope.caching_device is None:
varscope.set_caching_device(lambda op: op.device)
# Setting maximum iterations
if maximum_iterations is not None:
maximum_iterations = ops.convert_to_tensor(maximum_iterations,
dtype=dtypes.int32,
name="maximum_iterations")
if maximum_iterations.get_shape().ndims != 0:
raise ValueError('maximum_iterations must be a scalar')
def _inv_shape(maybe_ta):
""" Returns the invariatns shape """
if isinstance(maybe_ta, tensor_array_ops.TensorArray):
return maybe_ta.flow.shape
return maybe_ta.shape
def _invariants(structure):
""" Returns the invariants of a structure """
return nest.map_structure(_inv_shape, structure)
def _map_invariants(structure):
""" Returns the invariants of a structure, but replaces the invariant using the value in invariants_map """
return nest.map_structure_with_paths(lambda path, tensor: (invariants_map or {}).get(path,
_inv_shape(tensor)),
structure)
# Initializing decoder
initial_finished, initial_inputs, initial_state = decoder.initialize()
zero_outputs = _create_zero_outputs(decoder.output_size, decoder.output_dtype, decoder.batch_size)
if is_xla and maximum_iterations is None:
raise ValueError('maximum_iterations is required for XLA compilation.')
if maximum_iterations is not None:
initial_finished = gen_math_ops.logical_or(initial_finished, maximum_iterations <= 0)
initial_sequence_lengths = array_ops.zeros_like(initial_finished, dtype=dtypes.int32)
initial_time = constant_op.constant(0, dtype=dtypes.int32)
# Creating initial output TA
def _shape(batch_size, from_shape):
""" Returns the batch_size concatenated with the from_shape """
if (not isinstance(from_shape, tensor_shape.TensorShape) or from_shape.ndims == 0):
return tensor_shape.TensorShape(None)
batch_size = tensor_util.constant_value(ops.convert_to_tensor(batch_size, name='batch_size'))
return tensor_shape.TensorShape([batch_size]).concatenate(from_shape)
dynamic_size = maximum_iterations is None or not is_xla
def _create_ta(shape, dtype):
""" Creates a tensor array"""
return tensor_array_ops.TensorArray(dtype=dtype,
size=0 if dynamic_size else maximum_iterations,
dynamic_size=dynamic_size,
element_shape=_shape(decoder.batch_size, shape))
initial_outputs_ta = nest.map_structure(_create_ta, decoder.output_size, decoder.output_dtype)
def condition(unused_time, unused_outputs_ta, unused_state, unused_inputs, finished, unused_sequence_lengths):
""" While loop condition"""
return gen_math_ops.logical_not(math_ops.reduce_all(finished))
def body(time, outputs_ta, state, inputs, finished, sequence_lengths):
""" Internal while_loop body. """
(next_outputs, decoder_state, next_inputs, decoder_finished) = decoder.step(time, inputs, state)
if decoder.tracks_own_finished:
next_finished = decoder_finished
else:
next_finished = gen_math_ops.logical_or(decoder_finished, finished)
next_sequence_lengths = array_ops.where(gen_math_ops.logical_not(finished),
gen_array_ops.fill(array_ops.shape(sequence_lengths), time + 1),
sequence_lengths)
nest.assert_same_structure(state, decoder_state)
nest.assert_same_structure(outputs_ta, next_outputs)
nest.assert_same_structure(inputs, next_inputs)
# Zero out output values past finish
if impute_finished:
emit = nest.map_structure(lambda out, zero: array_ops.where(finished, zero, out),
next_outputs,
zero_outputs)
else:
emit = next_outputs
# Copy through states past finish
def _maybe_copy_state(new, cur):
# TensorArrays, multiple dynamic dims, and scalar states get passed through.
if isinstance(cur, tensor_array_ops.TensorArray):
pass_through = True
elif None in new.shape.as_list()[1:]:
pass_through = True
else:
new.set_shape(cur.shape)
pass_through = (new.shape.ndims == 0)
return new if pass_through else array_ops.where(finished, cur, new)
if impute_finished:
next_state = nest.map_structure(_maybe_copy_state, decoder_state, state)
else:
next_state = decoder_state
outputs_ta = nest.map_structure(lambda ta, out: ta.write(time, out), outputs_ta, emit)
return (time + 1, outputs_ta, next_state, next_inputs, next_finished, next_sequence_lengths)
res = control_flow_ops.while_loop(condition,
body,
loop_vars=(initial_time,
initial_outputs_ta,
initial_state,
initial_inputs,
initial_finished,
initial_sequence_lengths),
shape_invariants=(_invariants(initial_time),
_invariants(initial_outputs_ta),
_map_invariants(initial_state),
_invariants(initial_inputs),
_invariants(initial_finished),
_invariants(initial_sequence_lengths)),
parallel_iterations=parallel_iterations,
maximum_iterations=maximum_iterations,
swap_memory=swap_memory)
final_outputs_ta = res[1]
final_state = res[2]
final_sequence_lengths = res[5]
final_outputs = nest.map_structure(lambda ta: ta.stack(), final_outputs_ta)
try:
final_outputs, final_state = decoder.finalize(final_outputs, final_state, final_sequence_lengths)
except NotImplementedError:
pass
if not output_time_major:
final_outputs = nest.map_structure(_transpose_batch_time, final_outputs)
return final_outputs, final_state, final_sequence_lengths
|
StarcoderdataPython
|
12877
|
import os
import time
import argparse
import torchvision
import torch
import torch.nn as nn
from util import AverageMeter, TwoAugUnsupervisedDataset
from encoder import SmallAlexNet
from align_uniform import align_loss, uniform_loss
import json
def parse_option():
parser = argparse.ArgumentParser('STL-10 Representation Learning with Alignment and Uniformity Losses')
parser.add_argument('--align_w', type=float, default=1, help='Alignment loss weight')
parser.add_argument('--unif_w', type=float, default=1, help='Uniformity loss weight')
parser.add_argument('--align_alpha', type=float, default=2, help='alpha in alignment loss')
parser.add_argument('--unif_t', type=float, default=2, help='t in uniformity loss')
parser.add_argument('--batch_size', type=int, default=768, help='Batch size')
parser.add_argument('--epochs', type=int, default=200, help='Number of training epochs')
parser.add_argument('--lr', type=float, default=None,
help='Learning rate. Default is linear scaling 0.12 per 256 batch size')
parser.add_argument('--lr_decay_rate', type=float, default=0.1, help='Learning rate decay rate')
parser.add_argument('--lr_decay_epochs', default=[155, 170, 185], nargs='*', type=int,
help='When to decay learning rate')
parser.add_argument('--momentum', type=float, default=0.9, help='SGD momentum')
parser.add_argument('--weight_decay', type=float, default=1e-4, help='L2 weight decay')
parser.add_argument('--feat_dim', type=int, default=128, help='Feature dimensionality')
parser.add_argument('--num_workers', type=int, default=20, help='Number of data loader workers to use')
parser.add_argument('--log_interval', type=int, default=40, help='Number of iterations between logs')
parser.add_argument('--gpus', default=[0], nargs='*', type=int,
help='List of GPU indices to use, e.g., --gpus 0 1 2 3')
parser.add_argument('--data_folder', type=str, default='./data', help='Path to data')
parser.add_argument('--result_folder', type=str, default='./results', help='Base directory to save model')
parser.add_argument('--suffix', type=str, default='info', help='Name Suffix')
opt = parser.parse_args()
opt.data_folder = '/afs/csail.mit.edu/u/h/hehaodele/radar/Hao/datasets'
opt.result_folder = '/afs/csail.mit.edu/u/h/hehaodele/radar/Hao/projects/align_uniform/results'
if opt.lr is None:
opt.lr = 0.12 * (opt.batch_size / 256)
print(json.dumps(vars(opt), indent=2, default=lambda o: o.__dict__))
opt.gpus = list(map(lambda x: torch.device('cuda', x), opt.gpus))
exp_name = f"align{opt.align_w:g}alpha{opt.align_alpha:g}_unif{opt.unif_w:g}t{opt.unif_t:g}"
if len(opt.suffix) > 0:
exp_name += f'_{opt.suffix}'
opt.save_folder = os.path.join(
opt.result_folder,
exp_name,
)
os.makedirs(opt.save_folder, exist_ok=True)
return opt
def get_data_loader(opt):
from util import RandomResizedCropWithBox, TwoAugUnsupervisedDatasetWithBox
transform_crop = RandomResizedCropWithBox(64, scale=(0.08, 1))
transform_others = torchvision.transforms.Compose([
torchvision.transforms.RandomHorizontalFlip(),
torchvision.transforms.ColorJitter(0.4, 0.4, 0.4, 0.4),
torchvision.transforms.RandomGrayscale(p=0.2),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize(
(0.44087801806139126, 0.42790631331699347, 0.3867879370752931),
(0.26826768628079806, 0.2610450402318512, 0.26866836876860795),
),
])
dataset = TwoAugUnsupervisedDatasetWithBox(
torchvision.datasets.STL10(opt.data_folder, 'train+unlabeled', download=True), transform_crop, transform_others)
return torch.utils.data.DataLoader(dataset, batch_size=opt.batch_size, num_workers=opt.num_workers,
shuffle=True, pin_memory=True)
def get_rate(x):
return sum(x) / len(x) * 100
def main():
opt = parse_option()
print(f'Optimize: {opt.align_w:g} * loss_align(alpha={opt.align_alpha:g}) + {opt.unif_w:g} * loss_uniform(t={opt.unif_t:g})')
torch.cuda.set_device(opt.gpus[0])
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = True
encoder = nn.DataParallel(SmallAlexNet(feat_dim=opt.feat_dim).to(opt.gpus[0]), opt.gpus)
optim = torch.optim.SGD(encoder.parameters(), lr=opt.lr,
momentum=opt.momentum, weight_decay=opt.weight_decay)
scheduler = torch.optim.lr_scheduler.MultiStepLR(optim, gamma=opt.lr_decay_rate,
milestones=opt.lr_decay_epochs)
loader = get_data_loader(opt)
align_meter = AverageMeter('align_loss')
unif_meter = AverageMeter('uniform_loss')
loss_meter = AverageMeter('total_loss')
it_time_meter = AverageMeter('iter_time')
info_rate_meter = AverageMeter('info_rate')
noni_rate_meter = AverageMeter('noni_rate')
for epoch in range(opt.epochs):
align_meter.reset()
unif_meter.reset()
loss_meter.reset()
it_time_meter.reset()
t0 = time.time()
for ii, (im_x, info_x, im_y, info_y) in enumerate(loader):
optim.zero_grad()
x, y = encoder(torch.cat([im_x.to(opt.gpus[0]), im_y.to(opt.gpus[0])])).chunk(2)
align_loss_val = align_loss(x, y, alpha=opt.align_alpha)
unif_loss_val = (uniform_loss(x, t=opt.unif_t) + uniform_loss(y, t=opt.unif_t)) / 2
loss = align_loss_val * opt.align_w + unif_loss_val * opt.unif_w
info_x, info_y = info_x.to(opt.gpus[0]), info_y.to(opt.gpus[0])
info_x_idx, noni_x_idx = info_x > 0.5, info_x < 0.2
info_y_idx, noni_y_idx = info_y > 0.5, info_y < 0.2
info_pair_idx = info_x_idx & info_y_idx
if info_pair_idx.any():
align_loss_info = align_loss(x[info_pair_idx], y[info_pair_idx], alpha=opt.align_alpha)
else:
align_loss_info = 0
uniform_loss_noninfo = 0
if noni_x_idx.any():
uniform_loss_noninfo += uniform_loss(x[noni_x_idx], t=opt.unif_t)
if noni_y_idx.any():
uniform_loss_noninfo += uniform_loss(y[noni_y_idx], t=opt.unif_t)
uniform_loss_noninfo /= 2
loss_info = align_loss_info * opt.align_w + uniform_loss_noninfo * opt.unif_w
loss = loss + loss_info
align_meter.update(align_loss_val, x.shape[0])
unif_meter.update(unif_loss_val)
loss_meter.update(loss, x.shape[0])
info_rate_meter.update((get_rate(info_x_idx)+get_rate(info_y_idx))/2)
noni_rate_meter.update((get_rate(noni_x_idx)+get_rate(noni_y_idx))/2)
loss.backward()
optim.step()
it_time_meter.update(time.time() - t0)
if ii % opt.log_interval == 0:
print(f"Epoch {epoch}/{opt.epochs}\tIt {ii}/{len(loader)}\t" +
f"{align_meter}\t{unif_meter}\t{loss_meter}\t{it_time_meter}\t{info_rate_meter}\t{noni_rate_meter}")
t0 = time.time()
scheduler.step()
if epoch % 40 == 0:
ckpt_file = os.path.join(opt.save_folder, f'encoder-ep{epoch}.pth')
torch.save(encoder.module.state_dict(), ckpt_file)
ckpt_file = os.path.join(opt.save_folder, 'encoder.pth')
torch.save(encoder.module.state_dict(), ckpt_file)
print(f'Saved to {ckpt_file}')
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
20355
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class MybankCreditSceneprodCommonQueryModel(object):
def __init__(self):
self._app_seq_no = None
self._ext_param = None
self._operation_type = None
self._org_code = None
self._product_code = None
self._seq_no = None
@property
def app_seq_no(self):
return self._app_seq_no
@app_seq_no.setter
def app_seq_no(self, value):
self._app_seq_no = value
@property
def ext_param(self):
return self._ext_param
@ext_param.setter
def ext_param(self, value):
self._ext_param = value
@property
def operation_type(self):
return self._operation_type
@operation_type.setter
def operation_type(self, value):
self._operation_type = value
@property
def org_code(self):
return self._org_code
@org_code.setter
def org_code(self, value):
self._org_code = value
@property
def product_code(self):
return self._product_code
@product_code.setter
def product_code(self, value):
self._product_code = value
@property
def seq_no(self):
return self._seq_no
@seq_no.setter
def seq_no(self, value):
self._seq_no = value
def to_alipay_dict(self):
params = dict()
if self.app_seq_no:
if hasattr(self.app_seq_no, 'to_alipay_dict'):
params['app_seq_no'] = self.app_seq_no.to_alipay_dict()
else:
params['app_seq_no'] = self.app_seq_no
if self.ext_param:
if hasattr(self.ext_param, 'to_alipay_dict'):
params['ext_param'] = self.ext_param.to_alipay_dict()
else:
params['ext_param'] = self.ext_param
if self.operation_type:
if hasattr(self.operation_type, 'to_alipay_dict'):
params['operation_type'] = self.operation_type.to_alipay_dict()
else:
params['operation_type'] = self.operation_type
if self.org_code:
if hasattr(self.org_code, 'to_alipay_dict'):
params['org_code'] = self.org_code.to_alipay_dict()
else:
params['org_code'] = self.org_code
if self.product_code:
if hasattr(self.product_code, 'to_alipay_dict'):
params['product_code'] = self.product_code.to_alipay_dict()
else:
params['product_code'] = self.product_code
if self.seq_no:
if hasattr(self.seq_no, 'to_alipay_dict'):
params['seq_no'] = self.seq_no.to_alipay_dict()
else:
params['seq_no'] = self.seq_no
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = MybankCreditSceneprodCommonQueryModel()
if 'app_seq_no' in d:
o.app_seq_no = d['app_seq_no']
if 'ext_param' in d:
o.ext_param = d['ext_param']
if 'operation_type' in d:
o.operation_type = d['operation_type']
if 'org_code' in d:
o.org_code = d['org_code']
if 'product_code' in d:
o.product_code = d['product_code']
if 'seq_no' in d:
o.seq_no = d['seq_no']
return o
|
StarcoderdataPython
|
1738993
|
# -*- coding: utf-8 -*-
"""
idfy_rest_client.models.identification_response
This file was automatically generated for Idfy by APIMATIC v2.0 ( https://apimatic.io )
"""
import idfy_rest_client.models.error
import idfy_rest_client.models.environment_info
class IdentificationResponse(object):
"""Implementation of the 'IdentificationResponse' model.
The reponse for the identity process. Contains users name, id number etc
Attributes:
name (string): The fullname of the user as reported back from the
IdentityProvider
first_name (string): The first name of the user
middle_name (string): The middle name of the user (not always
available)
last_name (string): The last name of the user
date_of_birth (string): The users date of birth (not always
available)
status (Status): The status of the identification process. If not
success the identification process is not completed.
social_security_number (string): The social security number for the
user (if allowed and if the GetSocialSecurityNumber is set to true
in the request)
identity_provider_unique_id (string): The uniqueID from the e-ID, this
ID is unique for the user and is the same every time the user logs
on. This is not a sensitiv ID and you could store this to identify
the user in you database. Remark: The Swedish BankID do not have
an unique ID.
identity_provider (IdentityProvider): The identityprovider type
(Norwegian BanKID, SwedishBankID, Nemid, etc)
error (Error): Information about error if the identification process
failed. (Only set if an error occured, if not is null)
environment_info (EnvironmentInfo): Information about the users
environment as seen by IdentiSign, can be used to compare with own
data.
meta_data (dict<object, string>): A dicitonary with extra information
from each identityprovider, and extra services. See developer
documentation for more information
request_id (string): The RequestId
"""
# Create a mapping from Model property names to API property names
_names = {
"name":'Name',
"first_name":'FirstName',
"middle_name":'MiddleName',
"last_name":'LastName',
"date_of_birth":'DateOfBirth',
"status":'Status',
"social_security_number":'SocialSecurityNumber',
"identity_provider_unique_id":'IdentityProviderUniqueId',
"identity_provider":'IdentityProvider',
"error":'Error',
"environment_info":'EnvironmentInfo',
"meta_data":'MetaData',
"request_id":'RequestId'
}
def __init__(self,
name=None,
first_name=None,
middle_name=None,
last_name=None,
date_of_birth=None,
status=None,
social_security_number=None,
identity_provider_unique_id=None,
identity_provider=None,
error=None,
environment_info=None,
meta_data=None,
request_id=None,
additional_properties = {}):
"""Constructor for the IdentificationResponse class"""
# Initialize members of the class
self.name = name
self.first_name = first_name
self.middle_name = middle_name
self.last_name = last_name
self.date_of_birth = date_of_birth
self.status = status
self.social_security_number = social_security_number
self.identity_provider_unique_id = identity_provider_unique_id
self.identity_provider = identity_provider
self.error = error
self.environment_info = environment_info
self.meta_data = meta_data
self.request_id = request_id
# Add additional model properties to the instance
self.additional_properties = additional_properties
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
name = dictionary.get('Name')
first_name = dictionary.get('FirstName')
middle_name = dictionary.get('MiddleName')
last_name = dictionary.get('LastName')
date_of_birth = dictionary.get('DateOfBirth')
status = dictionary.get('Status')
social_security_number = dictionary.get('SocialSecurityNumber')
identity_provider_unique_id = dictionary.get('IdentityProviderUniqueId')
identity_provider = dictionary.get('IdentityProvider')
error = idfy_rest_client.models.error.Error.from_dictionary(dictionary.get('Error')) if dictionary.get('Error') else None
environment_info = idfy_rest_client.models.environment_info.EnvironmentInfo.from_dictionary(dictionary.get('EnvironmentInfo')) if dictionary.get('EnvironmentInfo') else None
meta_data = dictionary.get('MetaData')
request_id = dictionary.get('RequestId')
# Clean out expected properties from dictionary
for key in cls._names.values():
if key in dictionary:
del dictionary[key]
# Return an object of this model
return cls(name,
first_name,
middle_name,
last_name,
date_of_birth,
status,
social_security_number,
identity_provider_unique_id,
identity_provider,
error,
environment_info,
meta_data,
request_id,
dictionary)
|
StarcoderdataPython
|
37476
|
#!/usr/bin/env python3
"""
Easy to use Websocket Server.
Source: https://github.com/rharder/handy
June 2018 - Updated for aiohttp v3.3
August 2018 - Updated for Python 3.7, made WebServer support multiple routes on one port
"""
import asyncio
import logging
import weakref
from functools import partial
from typing import Dict, Set, List
import aiohttp # pip install aiohttp
from aiohttp import web
__author__ = "<NAME>"
__email__ = "<EMAIL>"
__license__ = "Public Domain"
class WebServer:
"""Hosts a web/websocket server on a given port and responds to multiple routes
(relative urls) at that address.
Source: https://github.com/rharder/handy
Author: <NAME>
License: Public Domain
"""
def __init__(self, host: str = None, port: int = None, ssl_context=None):
"""
Create a new WebServer that will listen on the given port.
:param port: The port on which to listen
"""
super().__init__()
self.log = logging.getLogger(__name__ + '.' + self.__class__.__name__)
# Passed parameters
self.host: str = host
self.port: int = port
self.ssl_context = ssl_context
# Internal use
self.app: web.Application = None
self.site: web.TCPSite = None
self.runner: web.AppRunner = None
self.route_handlers: Dict[str, WebHandler] = {}
self._running: bool = False
self._shutting_down: bool = False
self._starting_up: bool = False
def __str__(self):
routes = ", ".join(self.route_handlers.keys())
return "{}({}:({})".format(self.__class__.__name__, self.port, routes)
@property
def running(self):
return self._running
@property
def starting_up(self):
return self._starting_up
@property
def shutting_down(self):
return self._shutting_down
async def start(self):
"""
Starts the websocket server and begins listening. This function returns
with the server continuing to listen (non-blocking).
:return: None
"""
if self.starting_up or self.running:
raise Exception("Cannot start server when it is already running.")
self._starting_up = True
self.app = web.Application()
self.app['requests'] = [] # type: List[web.BaseRequest]
self.app.on_shutdown.append(self._on_shutdown)
# Connect routes
for route in self.route_handlers.keys():
self.app.router.add_get(route, partial(self.incoming_http_handler, route))
self.runner = web.AppRunner(self.app)
await self.runner.setup()
self.site = web.TCPSite(self.runner, port=self.port, host=self.host, ssl_context=self.ssl_context)
await self.site.start()
self._running = True
self._starting_up = False
async def shutdown(self):
if not self.running:
raise Exception("Cannot close server that is not running.")
if self.shutting_down:
pass
else:
self._shutting_down = True
await self.runner.cleanup()
async def _on_shutdown(self, app: web.Application):
self.close_current_connections()
self._running = False
self._shutting_down = False
def close_current_connections(self):
for x in self.app["requests"]:
if x is not None and x.transport is not None:
x.transport.close()
def add_route(self, route: str, handler):
if self.running:
raise RuntimeError("Cannot add a route after server is already running.")
self.route_handlers[route] = handler
async def incoming_http_handler(self, route: str, request: web.BaseRequest):
self.app['requests'].append(request)
try:
resp = await self.route_handlers[route].on_incoming_http(route, request)
finally:
self.app['requests'].remove(request)
return resp
class WebHandler:
async def on_incoming_http(self, route: str, request: web.BaseRequest):
return web.Response(body=str(self.__class__.__name__))
class WebsocketHandler(WebHandler):
def __init__(self, *kargs, **kwargs):
super().__init__(*kargs, **kwargs)
self.websockets: Set[web.WebSocketResponse] = weakref.WeakSet()
async def broadcast_json(self, msg):
""" Converts msg to json and broadcasts the json data to all connected clients. """
await self._broadcast(msg, web.WebSocketResponse.send_json)
async def broadcast_text(self, msg: str):
""" Broadcasts a string to all connected clients. """
await self._broadcast(msg, web.WebSocketResponse.send_str)
async def broadcast_bytes(self, msg: bytes):
""" Broadcasts bytes to all connected clients. """
await self._broadcast(msg, web.WebSocketResponse.send_bytes)
async def _broadcast(self, msg, func: callable):
for ws in set(self.websockets): # type: web.WebSocketResponse
await func(ws, msg)
async def close_websockets(self):
"""Closes all active websockets for this handler."""
ws_closers = [ws.close() for ws in set(self.websockets) if not ws.closed]
ws_closers and await asyncio.gather(*ws_closers)
async def on_incoming_http(self, route: str, request: web.BaseRequest):
"""Handles the incoming http(s) request and converts it to a WebSocketResponse.
This method is not meant to be overridden when subclassed.
"""
ws = web.WebSocketResponse()
self.websockets.add(ws)
try:
await ws.prepare(request)
await self.on_websocket(route, ws)
finally:
self.websockets.discard(ws)
return ws
async def on_websocket(self, route: str, ws: web.WebSocketResponse):
"""
Override this function if you want to handle new incoming websocket clients.
The default behavior is to listen indefinitely for incoming messages from clients
and call on_message() with each one.
If you override on_websocket and have your own loop to receive and process messages,
you may also need an await asyncio.sleep(0) line to avoid an infinite loop with the
websocket close message.
Example:
while not ws.closed:
ws_msg = await ws.receive()
await asyncio.sleep(0)
...
"""
try:
while not ws.closed:
ws_msg = await ws.receive() # type: aiohttp.WSMessage
await self.on_message(route=route, ws=ws, ws_msg_from_client=ws_msg)
# If you override on_websocket and have your own loop
# to receive and process messages, you may also need
# this await asyncio.sleep(0) line to avoid an infinite
# loop with the websocket close message.
await asyncio.sleep(0) # Need to yield control back to event loop
except RuntimeError as e: # Socket closing throws RuntimeError
print("RuntimeError - did socket close?", e, flush=True)
pass
finally:
await self.on_close(route, ws)
async def on_message(self, route: str, ws: web.WebSocketResponse, ws_msg_from_client: aiohttp.WSMessage):
""" Override this function to handle incoming messages from websocket clients. """
pass
async def on_close(self, route: str, ws: web.WebSocketResponse):
""" Override this function to handle a websocket having closed. """
pass
|
StarcoderdataPython
|
39639
|
<reponame>idiotic/idiotic
import logging
from urllib.parse import urlparse, urlunparse
from idiotic import block
from idiotic.util.resources import http
import aiohttp
import asyncio
import json
import types
log = logging.getLogger(__name__)
class HTTP(block.Block):
def __init__(self, name, url, method="GET", parameters=None, defaults=None, skip_repeats=False, format_data=True,
output=True, data=None, json=False, **options):
super().__init__(name, **options)
self.url = url
self.parameters = parameters or []
self.method = method
self.data = data or {}
self.headers = {}
self.json = json
self.defaults = defaults or {}
self.skip_repeats = skip_repeats
self.format_data = format_data
if output:
if output is True:
self.outputter = lambda d: d
elif output == "int":
self.outputter = int
elif output == "float":
self.outputter = float
elif output == "bool":
self.outputter = bool
elif output == "str":
self.outputter = str
elif output == "json":
self.outputter = json.loads
else:
raise ValueError("Invalid output type: {}".format(output))
else:
self.outputter = None
parsed_url = urlparse(url, scheme='http')
url_root = urlunparse((parsed_url[0], parsed_url[1], '', '', '', ''))
#: Options
self.options = options
self._param_dict = {n: self.defaults.get(n, None) for n in self.parameters}
for name in self.parameters:
async def setparam(self, val):
await self._setparam(name, val)
setattr(self, name, types.MethodType(setparam, self))
self.inputs = {}
self.resources = [http.URLReachable(url_root)]
async def _setparam(self, name, value):
if not self.skip_repeats or value != self._param_dict.get(name):
self._param_dict[name] = value
await self.perform()
def formatted_data(self):
if self.format_data:
return {
k: v.format(**self.data) for k, v in self._param_dict.items()
}
else:
return self.data
async def perform(self, *_):
while True:
try:
async with aiohttp.ClientSession() as client:
headers = dict(self.headers)
data = self.formatted_data()
if self.json:
data = json.dumps(data)
if 'content-type' not in headers:
headers['content-type'] = 'application/json'
async with client.request(
self.method,
self.url.format(**self._param_dict),
data=data,
headers=headers,
) as request:
res = await request.text()
if self.outputter:
output_val = self.outputter(res)
await self.output(output_val)
break
except IOError:
log.error("%s: Unable to retrieve %s", self.name, self.url)
await asyncio.sleep(5)
|
StarcoderdataPython
|
1696022
|
# This will import the SWIG bindings created and installed by the bindings directory, to create
# a seamless protocols package integrating C++ and python code
from protocols import *
from MultipleTryProtocol import MultipleTryProtocol
|
StarcoderdataPython
|
154738
|
#<NAME>
#october 6, 20202
import pysam
import pandas as pd
import numpy as np
def get_sample_counts(bam_file, reference):
bamfile_obj = pysam.AlignmentFile(bam_file,'rb')
ref = open(reference + "/chrName.txt",'r')
curr_file_counts = []
ref_list = []
for seq in ref:
seq = seq.strip()
curr_seq_reads = bamfile_obj.fetch(seq)
read_counter = 0
for curr_read in curr_seq_reads:
read_counter = read_counter + 1
curr_file_counts.extend([read_counter])
ref_list.extend([seq])
counts = pd.DataFrame(curr_file_counts)
counts = counts.transpose()
counts.columns=ref_list
counts = counts.transpose()
return counts
############################################################
bam_files = snakemake.input.bams
counts = [get_sample_counts(f, snakemake.params.ref)
for f in bam_files]
script_log = snakemake.log
log_file = open(str(script_log),"w")
samples = snakemake.params.samples
if(hasattr(snakemake.params, 'units')):
units = snakemake.params.units
tmp_samples = []
for sample, unit in zip(samples, units):
tmp_samples.append(sample + "_" + unit)
log_file.write("Made this sample-unit pair")
log_file.write(sample + "_" + unit)
samples = tmp_samples
log_file.write("checking sample order in the python script")
log_file.write(str(samples))
for t, sample in zip(counts, samples):
t.columns = [sample]
log_file.write("checking sample order in the python script after the zip")
for t in counts:
print(t.columns)
matrix = pd.concat(counts, axis=1)
matrix.index.name = "gene"
log_file.write("Checking columns in matrix before groupby")
log_file.write(str(matrix.columns))
matrix = matrix.groupby(matrix.columns, axis=1).sum()
#matrix = matrix.groupby(matrix.columns, axis=1)
log_file.write("Checking columns in matrix")
log_file.write(str(matrix.columns))
matrix.to_csv(snakemake.output[0], sep="\t")
|
StarcoderdataPython
|
73262
|
<reponame>PavelSheremetev/libelium_sensor_collector
import select
import socket
SERVER_ADDRESS = ('172.16.58.3', 8888)
# Говорит о том, сколько дескрипторов единовременно могут быть открыты
MAX_CONNECTIONS = 10
# Откуда и куда записывать информацию
INPUTS = list()
OUTPUTS = list()
def get_non_blocking_server_socket():
# Создаем сокет, который работает без блокирования основного потока
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setblocking(0)
# Биндим сервер на нужный адрес и порт
server.bind(SERVER_ADDRESS)
# Установка максимального количество подключений
server.listen(MAX_CONNECTIONS)
return server
def handle_readables(readables, server):
"""
Обработка появления событий на входах
"""
for resource in readables:
# Если событие исходит от серверного сокета, то мы получаем новое подключение
if resource is server:
connection, client_address = resource.accept()
connection.setblocking(0)
INPUTS.append(connection)
print("new connection from {address}".format(address=client_address))
# Если событие исходит не от серверного сокета, но сработало прерывание на наполнение входного буффера
else:
data = ""
try:
data = resource.recv(1024)
# Если сокет был закрыт на другой стороне
except ConnectionResetError:
pass
if data:
# Вывод полученных данных на консоль
print("getting data: {data}".format(data=str(data)))
# Говорим о том, что мы будем еще и писать в данный сокет
if resource not in OUTPUTS:
OUTPUTS.append(resource)
# Если данных нет, но событие сработало, то ОС нам отправляет флаг о полном прочтении ресурса и его закрытии
else:
# Очищаем данные о ресурсе и закрываем дескриптор
clear_resource(resource)
def clear_resource(resource):
"""
Метод очистки ресурсов использования сокета
"""
if resource in OUTPUTS:
OUTPUTS.remove(resource)
if resource in INPUTS:
INPUTS.remove(resource)
resource.close()
print('closing connection ' + str(resource))
def handle_writables(writables):
# Данное событие возникает когда в буффере на запись освобождается место
for resource in writables:
try:
resource.send(bytes('Hello from server!', encoding='UTF-8'))
except OSError:
clear_resource(resource)
if __name__ == '__main__':
# Создаем серверный сокет без блокирования основного потока в ожидании подключения
server_socket = get_non_blocking_server_socket()
INPUTS.append(server_socket)
print("server is running, please, press ctrl+c to stop")
try:
while INPUTS:
readables, writables, exceptional = select.select(INPUTS, OUTPUTS, INPUTS)
handle_readables(readables, server_socket)
handle_writables(writables)
except KeyboardInterrupt:
clear_resource(server_socket)
print("Server stopped! Thank you for using!")
|
StarcoderdataPython
|
1645786
|
<reponame>MiM0ulay/code-katas
"""Function to return list from string."""
def string_to_array(string):
"""Return a list from input string."""
if string == "":
return [""]
return string.split()
|
StarcoderdataPython
|
3240929
|
<filename>JianshuResearchTools/objects.py
from datetime import datetime
from typing import Dict, List
from . import article, collection, island, notebook, user
from .assert_funcs import (AssertArticleUrl, AssertCollectionUrl,
AssertIslandUrl, AssertNotebookUrl, AssertUserUrl)
from .convert import (ArticleSlugToArticleUrl, CollectionSlugToCollectionUrl,
IslandSlugToIslandUrl, IslandUrlToIslandSlug,
NotebookSlugToNotebookUrl, UserSlugToUserUrl,
UserUrlToUserSlug)
from .exceptions import InputError
DISABLE_CACHE = False # 禁用缓存
def cache_result(func):
"""缓存函数的返回值"""
cache_Dict = {}
def wrapper(*args, **kwargs):
args_hash = hash(tuple(args[1:]) + tuple(kwargs.items()))
cache_result = cache_Dict.get(args_hash)
if cache_result and not DISABLE_CACHE:
return cache_result
else:
result = func(*args, **kwargs)
if not DISABLE_CACHE:
cache_Dict[args_hash] = result
return result
return wrapper
class User():
"""用户类
"""
def __init__(self, user_url: str = None, *, user_slug: str = None):
"""构建新的用户对象
Args:
user_url (str, optional): 用户个人主页 Url. Defaults to None.
user_slug (str, optional): 用户 Slug. Defaults to None.
"""
# TODO: 支持使用用户 Id 初始化用户对象
if user_slug and user_url:
raise InputError("只能使用一个参数进行用户类的初始化")
elif user_url:
AssertUserUrl(user_url)
self._url = user_url
elif user_slug:
user_url = UserSlugToUserUrl(user_slug)
AssertUserUrl(user_url)
self._url = user_url
else:
raise InputError("请至少传入一个参数")
@property
def url(self) -> str:
"""获取用户主页 Url
Returns:
str: 用户主页 Url
"""
return self._url
@property
@cache_result
def slug(self) -> str:
"""获取用户 Slug
Returns:
str: 用户 Slug
"""
return UserUrlToUserSlug(self._url)
@property
@cache_result
def name(self) -> str:
"""获取用户昵称
Returns:
str: 用户昵称
"""
return user.GetUserName(self._url)
@property
@cache_result
def gender(self) -> int:
"""获取用户性别
Returns:
int: 用户性别,0 为未知,1 为男,2 为女
"""
return user.GetUserGender(self._url)
@property
@cache_result
def followers_count(self) -> int:
"""获取用户关注数
Returns:
int: 关注数
"""
return user.GetUserFollowersCount(self._url)
@property
@cache_result
def fans_count(self) -> int:
"""获取用户粉丝数
Returns:
int: 粉丝数
"""
return user.GetUserFansCount(self._url)
@property
@cache_result
def articles_count(self) -> int:
"""获取用户文章数
Returns:
int: 文章数
"""
return user.GetUserArticlesCount(self._url)
@property
@cache_result
def wordage(self) -> int:
"""获取用户总字数
Returns:
int: 总字数
"""
return user.GetUserWordage(self._url)
@property
@cache_result
def likes_count(self) -> int:
"""获取用户被点赞数
Returns:
int: 被点赞数
"""
return user.GetUserLikesCount(self._url)
@property
@cache_result
def assets_count(self) -> float:
"""获取用户资产量
Returns:
int: 资产量
"""
return user.GetUserAssetsCount(self._url)
@property
@cache_result
def FP_count(self) -> float:
"""获取用户简书钻数量
Returns:
int: 简书钻数量
"""
return user.GetUserFPCount(self._url)
@property
@cache_result
def FTN_count(self) -> float:
"""获取用户简书贝数量
Returns:
int: 简书贝数量
"""
return user.GetUserFTNCount(self._url)
@property
@cache_result
def badges(self) -> List:
"""获取徽章列表
Returns:
List: 徽章列表
"""
return user.GetUserBadgesList(self._url)
@property
@cache_result
def last_update_time(self) -> datetime:
"""获取最近更新时间
Returns:
datetime: 最近更新时间
"""
return user.GetUserLastUpdateTime(self._url)
@property
@cache_result
def VIP_info(self) -> Dict:
"""获取用户会员信息
Returns:
Dict: 会员信息
"""
return user.GetUserVIPInfo(self._url)
@property
@cache_result
def introduction_text(self) -> str:
"""获取纯文本格式的用户简介
Returns:
str: 纯文本格式的用户简介
"""
return user.GetUserIntroductionText(self._url)
@property
@cache_result
def introduction_html(self) -> str:
"""获取 Html 格式的用户简介
Returns:
str: Html 格式的用户简介
"""
return user.GetUserIntroductionHtml(self._url)
@property
@cache_result
def notebooks(self) -> List:
"""获取用户文集信息
Returns:
List: 文集信息
"""
return user.GetUserNotebooksInfo(self._url)
@property
@cache_result
def own_collections(self) -> List:
"""获取自己创建的专题信息
Returns:
List: 自己创建的专题信息
"""
return user.GetUserOwnCollectionsInfo(self._url)
@property
@cache_result
def manageable_collections(self) -> List:
"""获取用户有管理权的专题信息
Returns:
List: 有管理权的专题信息
"""
return user.GetUserManageableCollectionsInfo(self._url)
@cache_result
def articles_info(self, page: int = 1, count: int = 10) -> List[Dict]:
"""获取文章信息
Args:
page (int, optional): 页码. Defaults to 1.
count (int, optional): 每次获取的文章信息数量. Defaults to 1.
Returns:
List[Dict]: 文章信息
"""
return user.GetUserArticlesInfo(self._url, page, count)
@cache_result
def followers_info(self, page: int = 1) -> List[Dict]:
"""获取关注者信息
Args:
page (int, optional): 页码. Defaults to 1.
Returns:
List[Dict]: 关注者信息
"""
return user.GetUserFollowersInfo(self._url, page)
@cache_result
def fans_info(self, page: int = 1) -> List[Dict]:
"""获取粉丝信息
Args:
page (int, optional): 页码. Defaults to 1.
Returns:
List[Dict]: 粉丝信息
"""
return user.GetUserFansInfo(self._url, page)
def __eq__(self, other: object) -> bool:
"""判断是否是同一个用户
Args:
other (object): 另一个对象
Returns:
bool: 判断结果
"""
if not isinstance(other, User):
return False # 不是由用户类构建的必定不相等
if self._url == other._url:
return True
else:
return False
def __str__(self) -> str:
"""输出用户信息摘要
Returns:
str: 用户信息摘要
"""
result = f"""用户信息摘要:
昵称:{self.name}
Url:{self.url}
性别:{self.gender}
关注者数:{self.followers_count}
粉丝数:{self.fans_count}
文章数:{self.articles_count}
总字数:{self.wordage}
简书钻:{self.FP_count}
简书贝:{self.FTN_count}
总资产:{self.assets_count}
徽章:{' '.join(self.badges)}
最后更新时间:{self.last_update_time}
会员等级:{self.VIP_info['vip_type']}
会员过期时间:{self.VIP_info['expire_date']}
个人简介:\n{self.introduction_text}"""
return result
class Article():
"""文章类
"""
def __init__(self, article_url: str = None, article_slug: str = None):
"""构建新的文章对象
Args:
article_url (str, optional): 文章 Url. Defaults to None.
article_slug (str, optional): 文章 Slug. Defaults to None.
"""
# TODO: 支持使用文章 Id 初始化文章对象
if article_slug and article_url:
raise InputError("只能使用一个参数进行文章类的初始化")
elif article_url:
AssertArticleUrl(article_url)
self._url = article_url
elif article_slug:
article_url = ArticleSlugToArticleUrl(article_slug)
AssertArticleUrl(article_url)
self._url = article_url
else:
raise InputError("请至少传入一个参数")
@property
def url(self) -> str:
"""获取文章 Url
Returns:
str: 文章 Url
"""
return self._url
@property
@cache_result
def slug(self) -> str:
"""获取文章 Slug
Returns:
str: 文章 Slug
"""
return article.GetArticleSlug(self._url)
@property
@cache_result
def title(self) -> str:
"""获取文章标题
Returns:
str: 标题
"""
return article.GetArticleTitle(self._url)
@property
@cache_result
def author_name(self) -> str:
"""获取文章作者名
Returns:
str: 作者名
"""
return article.GetArticleAuthorName(self._url)
@property
@cache_result
def wordage(self) -> int:
"""获取文章总字数
Returns:
int: 总字数
"""
return article.GetArticleWordage(self._url)
@property
@cache_result
def reads_count(self) -> int:
"""获取文章阅读量
Returns:
int: 阅读量
"""
return article.GetArticleReadsCount(self._url)
@property
@cache_result
def likes_count(self) -> int:
"""获取文章点赞量
Returns:
int: 文章点赞量
"""
return article.GetArticleLikesCount(self._url)
@property
@cache_result
def comments_count(self) -> int:
"""获取文章评论量
Returns:
int: 文章评论量
"""
return article.GetArticleCommentsCount(self._url)
@property
@cache_result
def most_valuable_comments_count(self) -> int:
"""获取文章精选评论量
Returns:
int: 文章精选评论量
"""
return article.GetArticleMostValuableCommentsCount(self._url)
@property
@cache_result
def total_FP_count(self) -> float:
"""获取文章总获钻量
Returns:
int: 文章总获钻量
"""
return article.GetArticleTotalFPCount(self._url)
@property
@cache_result
def description(self) -> str:
"""获取文章摘要
Returns:
str: 文章摘要
"""
return article.GetArticleDescription(self._url)
@property
@cache_result
def publish_time(self) -> datetime:
"""获取文章发布时间
Returns:
datetime: 文章发布时间
"""
return article.GetArticlePublishTime(self._url)
@property
@cache_result
def update_time(self) -> datetime:
"""获取文章更新时间
Returns:
datetime: 文章更新时间
"""
return article.GetArticleUpdateTime(self._url)
@property
@cache_result
def paid_status(self) -> bool:
"""获取文章付费状态
Returns:
bool: 文章付费状态
"""
return article.GetArticlePaidStatus(self._url)
@property
@cache_result
def reprint_status(self) -> bool:
"""获取文章转载状态
Returns:
bool: 文章转载状态
"""
return article.GetArticleReprintStatus(self._url)
@property
@cache_result
def comment_status(self) -> bool:
"""获取文章评论状态
Returns:
bool: 文章评论状态
"""
return article.GetArticleCommentStatus(self._url)
@property
@cache_result
def html(self) -> str:
"""获取 Html 格式的文章内容
Returns:
str: Html 格式的文章内容
"""
return article.GetArticleHtml(self._url)
@property
@cache_result
def text(self) -> str:
"""获取纯文本格式的文章内容
Returns:
str: 纯文本格式的文章内容
"""
return article.GetArticleText(self._url)
def __eq__(self, other: object) -> bool:
"""判断是否是同一篇文章
Args:
other (object): 另一个对象
Returns:
bool: 判断结果
"""
if not isinstance(other, Article):
return False # 不是由文章类构建的必定不相等
if self._url == other._url:
return True
else:
return False
def __str__(self) -> str:
result = f"""文章信息摘要:
标题:{self.title}
Url: {self.url}
作者名:{self.author_name}
字数:{self.wordage}
阅读量:{self.reads_count}
点赞数:{self.likes_count}
评论数:{self.comments_count}
精选评论数:{self.most_valuable_comments_count}
总获钻量:{self.total_FP_count}
发布时间:{self.publish_time}
更新时间:{self.update_time}
需付费:{self.paid_status}
可转载:{self.reprint_status}
可评论:{self.comment_status}
摘要:\n{self.description}"""
return result
class Notebook():
"""文集类
"""
def __init__(self, notebook_url: str = None, notebook_slug: str = None):
"""构建新的文集对象
Args:
notebook_url (str, optional): 文集 Url. Defaults to None.
notebook_slug (str, optional): 文集 Slug. Defaults to None.
"""
# TODO: 支持使用用户 Id 初始化用户对象
if notebook_slug and notebook_url:
raise InputError("只能使用一个参数进行文集类的初始化")
elif notebook_url:
AssertNotebookUrl(notebook_url)
self._url = notebook_url
elif notebook_slug:
notebook_url = NotebookSlugToNotebookUrl(notebook_slug)
AssertNotebookUrl(notebook_url)
self._url = notebook_url
else:
raise InputError("请至少传入一个参数")
@property
def url(self) -> str:
"""获取文集 Url
Returns:
str: 文集 Url
"""
return self._url
@property
@cache_result
def id(self) -> int:
"""获取文集 ID
Returns:
int: 文集 ID
"""
return notebook.GetNotebookId(self._url)
@property
@cache_result
def slug(self) -> str:
"""获取文集 Slug
Returns:
str: 文集 Slug
"""
return notebook.GetNotebookSlug(self._url)
@property
@cache_result
def name(self) -> str:
"""获取文集名称
Returns:
str: 文集名称
"""
return notebook.GetNotebookName(self._url)
@property
@cache_result
def articles_count(self) -> int:
"""获取文集中的文章总数
Returns:
int: 文章总数
"""
return notebook.GetNotebookArticlesCount(self._url)
@property
@cache_result
def author_name(self) -> str:
"""获取文集的作者名
Returns:
str: 作者名
"""
return notebook.GetNotebookAuthorInfo(self._url)["name"]
@property
@cache_result
def author_info(self) -> Dict:
"""获取文集的作者信息
Returns:
Dict: 作者信息
"""
return notebook.GetNotebookAuthorInfo(self._url)
@property
@cache_result
def wordage(self) -> int:
"""获取文集中所有文章的总字数
Returns:
int: 文集总字数
"""
return notebook.GetNotebookWordage(self._url)
@property
@cache_result
def subscribers_count(self) -> int:
"""获取文集的关注者数量
Returns:
int: 关注者数量
"""
return notebook.GetNotebookSubscribersCount(self._url)
@property
@cache_result
def update_time(self) -> datetime:
"""获取文集的更新时间
Returns:
datetime: 更新时间
"""
return notebook.GetNotebookUpdateTime(self._url)
@cache_result
def articles_info(self, page: int = 1, count: int = 10, sorting_method: str = "time") -> List[Dict]:
"""获取文集中的文章信息
Args:
page (int, optional): 页码. Defaults to 1.
count (int, optional): 每次返回的数据数量. Defaults to 10.
sorting_method (str, optional): 排序方法,time 为按照发布时间排序,
comment_time 为按照最近评论时间排序,hot 为按照热度排序. Defaults to "time".
Returns:
List[Dict]: 文章信息
"""
return notebook.GetNotebookArticlesInfo(self._url, page, count, sorting_method)
def __eq__(self, other: object) -> bool:
"""判断是否是同一个文集
Args:
other (object): 另一个对象
Returns:
bool: 判断结果
"""
if not isinstance(other, Notebook):
return False # 不是由文集类构建的必定不相等
if self._url == other._url:
return True
else:
return False
def __str__(self) -> str:
"""输出文集信息摘要
Returns:
str: 文集信息摘要
"""
result = f"""文集信息摘要:
名称:{self.name}
Url:{self.url}
作者名:{self.author_name}
文章数:{self.articles_count}
总字数:{self.wordage}
关注者数:{self.subscribers_count}
更新时间:{self.update_time}"""
return result
class Collection():
"""专题类
"""
def __init__(self, collection_url: str = None, collection_slug: str = None,
collection_id: int = None):
"""初始化专题类
Args:
collection_url (str, optional): 专题 Url. Defaults to None.
collection_slug (str, optional): 专题 Slug. Defaults to None.
collection_id (int, optional): 专题 Id,如不传入部分数据将无法获取. Defaults to None.
"""
# TODO: 支持通过 collection_url 获取 collection_id
if collection_slug and collection_url:
raise InputError("只能使用一个参数进行专题类的初始化")
elif collection_url:
AssertCollectionUrl(collection_url)
self._url = collection_url
elif collection_slug:
collection_url = CollectionSlugToCollectionUrl(collection_slug)
AssertCollectionUrl(collection_url)
self._url = collection_url
else:
raise InputError("请至少传入一个参数")
if collection_id:
self._id = collection_id
else:
self._id = None
@property
def url(self) -> str:
"""获取专题 Url
Returns:
str: 专题 Url
"""
return self._url
@property
@cache_result
def slug(self) -> str:
"""获取专题 Slug
Returns:
str: 专题 Slug
"""
return collection.GetCollectionSlug(self._url)
@property
@cache_result
def name(self) -> str:
"""获取专题名称
Returns:
str: 专题名称
"""
return collection.GetCollectionName(self._url)
@property
@cache_result
def avatar_url(self) -> str:
"""获取专题头像链接
Returns:
str: 专题头像链接
"""
return collection.GetCollectionAvatarUrl(self._url)
@property
@cache_result
def introduction_text(self) -> str:
"""获取纯文本格式的专题简介
Returns:
str: 纯文本格式的专题简介
"""
return collection.GetCollectionIntroductionText(self._url)
@property
@cache_result
def introduction_html(self) -> str:
"""获取 Html 格式的专题简介
Returns:
str: Html 格式的专题简介
"""
return collection.GetCollectionIntroductionHtml(self._url)
@property
@cache_result
def articles_update_time(self) -> datetime:
"""获取专题文章更新时间
Returns:
datetime: 专题文章更新时间
"""
return collection.GetCollectionArticlesUpdateTime(self._url)
@property
@cache_result
def info_update_time(self) -> datetime:
"""获取专题信息更新时间
Returns:
datetime: 专题信息更新时间
"""
return collection.GetCollectionInfoUpdateTime(self._url)
@property
@cache_result
def owner_info(self) -> Dict:
"""获取专题的所有者信息
Returns:
Dict: 用户信息
"""
return collection.GetCollectionOwnerInfo(self._url)
@property
@cache_result
def articles_count(self) -> int:
"""获取专题文章数
Returns:
int: 专题文章数
"""
return collection.GetCollectionArticlesCount(self._url)
@property
@cache_result
def subscribers_count(self) -> int:
"""获取专题关注者数
Returns:
int: 专题关注者数
"""
return collection.GetCollectionSubscribersCount(self._url)
@cache_result
def editors_info(self, page: int = 1) -> List[Dict]:
"""获取专题编辑信息
Args:
page (int, optional): 页码. Defause to 1.
Raises:
InputError: 因缺少 ID 参数而无法获取结果时抛出此异常
Returns:
List[Dict]: 编辑信息
"""
if not self._id:
raise InputError("实例化该专题对象时未传入 ID 参数,无法获取编辑信息")
return collection.GetCollectionEditorsInfo(self._id, page)
@cache_result
def recommended_writers_info(self, page: int = False) -> List[Dict]:
"""获取专题推荐作者信息
Args:
page (int, optional): 页码. Defaults to False.
Raises:
InputError: 因缺少 ID 参数而无法获取结果时抛出此异常
Returns:
List[Dict]: 推荐作者信息
"""
if not self._id:
raise InputError("实例化该专题对象时未传入 ID 参数,无法获取推荐作者信息")
return collection.GetCollectionRecommendedWritersInfo(self._id, page)
@cache_result
def subscribers_info(self, start_sort_id: int) -> List:
"""获取专题关注者信息
Args:
start_sort_id (int): 起始序号,等于上一条数据的序号
Raises:
InputError: 因缺少 ID 参数而无法获取结果时抛出此异常
Returns:
List: 关注者信息
"""
if not self._id:
raise InputError("实例化该专题对象时未传入 ID 参数,无法获取关注者信息")
return collection.GetCollectionSubscribersInfo(self._id, start_sort_id)
@cache_result
def articles_info(self, page: int = 1, count: int = 10,
sorting_method: str = "time") -> List[Dict]:
"""获取专题文章信息
Args:
page (int, optional): 页码. Defaults to 1.
count (int, optional): 每次返回的数据数量. Defaults to 10.
sorting_method (str, optional): 排序方法,time 为按照发布时间排序,
comment_time 为按照最近评论时间排序,hot 为按照热度排序. Defaults to "time".
Returns:
List[Dict]: 专题中的文章信息
"""
return collection.GetCollectionArticlesInfo(self._url, page, count, sorting_method)
def __eq__(self, other: object) -> bool:
"""判断是否是同一个专题
Args:
other (object): 另一个对象
Returns:
bool: 判断结果
"""
if not isinstance(other, Collection):
return False # 不是由专题类构建的必定不相等
if self._url == other._url:
return True
else:
return False
def __str__(self) -> str:
"""输出专题信息摘要
Returns:
str: 专题信息摘要
"""
result = f"""
专题信息摘要:
专题名:{self.name}
Url:{self.url}
主编名:{self.owner_info["name"]}
图片链接:{self.avatar_url}
文章数:{self.articles_count}
关注者数:{self.subscribers_count}
文章更新时间:{self.articles_update_time}
信息更新时间:{self.info_update_time}
简介:\n{self.introduction_text}"""
return result
class Island():
"""小岛类
"""
def __init__(self, island_url: str = None, island_slug: str = None):
if island_slug and island_url:
raise InputError("只能使用一个参数进行小岛类的初始化")
elif island_url:
AssertIslandUrl(island_url)
self._url = island_url
elif island_slug:
island_url = IslandSlugToIslandUrl(island_slug)
AssertIslandUrl(island_url)
self._url = island_url
else:
raise InputError("请至少传入一个参数")
@property
def url(self) -> str:
"""获取小岛 Url
Returns:
str: 小岛 Url
"""
return self._url
@property
@cache_result
def slug(self) -> str:
"""获取小岛 Slug
Returns:
str: 小岛 Slug
"""
return IslandUrlToIslandSlug(self._url)
@property
@cache_result
def name(self) -> str:
"""获取小岛名称
Returns:
str: 小岛名称
"""
return island.GetIslandName(self._url)
@property
@cache_result
def avatar_url(self) -> str:
"""获取小岛头像链接
Returns:
str: 小岛头像链接
"""
return island.GetIslandAvatarUrl(self._url)
@property
@cache_result
def introduction(self) -> str:
"""获取小岛简介
Returns:
str: 小岛简介
"""
return island.GetIslandIntroduction(self._url)
@property
@cache_result
def members_count(self) -> int:
"""获取小岛成员数量
Returns:
int: 成员数量
"""
return island.GetIslandMembersCount(self._url)
@property
@cache_result
def posts_count(self) -> int:
"""获取小岛帖子数量
Returns:
int: 帖子数量
"""
return island.GetIslandPostsCount(self._url)
@property
@cache_result
def category(self) -> str:
"""获取小岛分类
Returns:
str: 分类
"""
return island.GetIslandCategory(self._url)
@cache_result
def posts(self, start_sort_id: int = None, count: int = 10,
topic_id: int = None, sorting_method: str = "time") -> List[Dict]:
"""获取小岛帖子信息
Args:
start_sort_id (int, optional): 起始序号,等于上一条数据的序号. Defaults to None.
count (int, optional): 每次返回的数据数量. Defaults to 10.
topic_id (int, optional): 话题 ID. Defaults to None.
sorting_method (str, optional): 排序方法,time 为按照发布时间排序,
comment_time 为按照最近评论时间排序,hot 为按照热度排序. Defaults to "time".
Returns:
List[Dict]: 帖子信息
"""
return island.GetIslandPosts(self._url, start_sort_id, count, topic_id, sorting_method)
def __eq__(self, other: object) -> bool:
"""判断是否是同一个小岛
Args:
other (object): 另一个对象
Returns:
bool: 判断结果
"""
if not isinstance(other, Collection):
return False # 不是由小岛类构建的必定不相等
if self._url == other._url:
return True
else:
return False
def __str__(self) -> str:
"""输出小岛信息摘要
Returns:
str: 小岛信息摘要
"""
result = f"""小岛信息摘要:
小岛名:{self.name}
Url:{self.url}
分类:{self.category}
成员数:{self.members_count}
帖子数:{self.posts_count}
简介:\n{self.introduction}"""
return result
|
StarcoderdataPython
|
4801290
|
from musicscore.dtd.dtd import Sequence, Choice, GroupReference, Element
from musicscore.musicxml.attributes.attribute_abstract import AttributeAbstract
from musicscore.musicxml.attributes.optional_unique_id import OptionalUniqueId
from musicscore.musicxml.attributes.printobject import PrintObject
from musicscore.musicxml.attributes.printstyle import PrintStyle
from musicscore.musicxml.elements.xml_element import XMLElement
from musicscore.musicxml.types.complextypes.complextype import ComplexType
from musicscore.musicxml.types.simple_type import TypeFifths, TypeMode, TypeStep, TypeSemitones, TypeAccidentalValue, \
TypeOctave
class KeyNumberAttribute(AttributeAbstract):
def __init__(self, cancel=None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.generate_attribute('number', cancel, 'StaffNumber')
class Location(AttributeAbstract):
""""""
def __init__(self, location=None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.generate_attribute('location', location, 'TypeCancelLocation')
class ComplexTypeCancel(ComplexType, TypeFifths, Location):
"""
A cancel element indicates that the old key signature should be cancelled before the new one appears. This will
always happen when changing to C major or A minor and need not be specified then. The cancel value matches the
fifths value of the cancelled key signature (e.g., a cancel of -2 will provide an explicit cancellation for changing
from B flat major to F major). The optional location attribute indicates where the cancellation appears relative to
the new key signature.
"""
def __init__(self, value, *args, **kwargs):
super().__init__(value=value, *args, **kwargs)
class Cancel(ComplexTypeCancel):
""""""
def __init__(self, value, *args, **kwargs):
super().__init__(value=value, *args, **kwargs)
class Fifths(XMLElement, TypeFifths):
""""""
def __init__(self, value, *args, **kwargs):
super().__init__(value=value, *args, **kwargs)
class Mode(XMLElement, TypeMode):
""""""
def __init__(self, value, *args, **kwargs):
super().__init__(value=value, *args, **kwargs)
class KeyStep(XMLElement, TypeStep):
"""
Non-traditional key signatures can be represented using the Humdrum/Scot concept of a list of altered tones.
The key-step element indicates the pitch step to be altered, represented using the same names as in the step
element.
"""
def __init__(self, value, *args, **kwargs):
super().__init__(value=value, *args, **kwargs)
class KeyAlter(XMLElement, TypeSemitones):
"""
Non-traditional key signatures can be represented using the Humdrum/Scot concept of a list of altered tones.
The key-step element indicates the pitch step to be altered, represented using the same names as in the step
element.
"""
def __init__(self, value, *args, **kwargs):
super().__init__(value=value, *args, **kwargs)
class ComplexKeyAccidental(ComplexType, TypeAccidentalValue):
"""
Non-traditional key signatures can be represented using the Humdrum/Scot concept of a list of altered tones.
The key-accidental element indicates the accidental to be displayed in the key signature, represented in the same
manner as the accidental element. It is used for disambiguating microtonal accidentals.
<xs:simpleContent>
<xs:extension base="accidental-value">
<xs:attribute name="smufl" type="smufl-accidental-glyph-name"/>
</xs:extension>
</xs:simpleContent>
"""
def __init__(self, value, *args, **kwargs):
super().__init__(value=value, *args, **kwargs)
raise NotImplementedError()
class Number(AttributeAbstract):
def __init__(self, number, *args, **kwargs):
super().__init__(*args, **kwargs)
self.generate_attribute('number', number, 'PositiveInteger')
class CancelAttribute(AttributeAbstract):
def __init__(self, cancel=None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.generate_attribute('cancel', cancel, 'TypeYesNo')
class ComplexTypeKeyOctave(ComplexType, TypeOctave, Number, CancelAttribute):
"""
The key-octave element specifies in which octave an element of a key signature appears. The content specifies the
octave value using the same values as the display-octave element. The number attribute is a positive integer that
refers to the key signature element in left-to-right order. If the cancel attribute is set to yes, then this number
refers to the canceling key signature specified by the cancel element in the parent key element. The cancel
attribute cannot be set to yes if there is no corresponding cancel element within the parent key element. It is no
by default
"""
class KeyOctave(ComplexTypeKeyOctave):
def __init__(self, value, *args, **kwargs):
super().__init__(value=value, *args, **kwargs)
"""
The traditional-key group represents a traditional key signature using the cycle of fifths.
"""
TraditionalKey = Sequence(
Element(Cancel, min_occurrence=0),
Element(Fifths),
Element(Mode, min_occurrence=0)
)
"""
The non-traditional-key group represents a single alteration within a non-traditional key signature. A sequence of
these groups makes up a non-traditional key signature
"""
NonTraditionalKey = Sequence(
Element(KeyStep),
Element(KeyAlter),
Element(ComplexKeyAccidental, min_occurrence=0)
)
class TypeKey(ComplexType, KeyNumberAttribute, PrintStyle, PrintObject,
OptionalUniqueId):
"""
The key type represents a key signature. Both traditional and non-traditional key signatures are supported.
The optional number attribute refers to staff numbers. If absent, the key signature applies to all staves in the
part. Key signatures appear at the start of each system unless the print-object attribute has been set to "no".
"""
_DTD = (
Sequence(
Choice(
GroupReference(TraditionalKey),
GroupReference(NonTraditionalKey, min_occurrence=0, max_occurrence=None)
),
Element(KeyOctave, min_occurrence=0, max_occurrence=None)
)
)
def __init__(self, tag, *args, **kwargs):
super().__init__(tag=tag, *args, **kwargs)
|
StarcoderdataPython
|
1787260
|
from __future__ import division
import parent
from parent import *
class HelixComplex(ParentComplex):
"""Helix association or disocciation reaction"""
def __init__(self , myPickles, dangleleft, dangleright, theta, zip, strand1, strand2,T, concentration, sodium, magnesium, dataset_name, docID, name ):
ParentComplex.__init__(self,myPickles, theta , T, concentration, sodium, magnesium, dataset_name, docID )
self.name = name
assert strand1.complement == strand2
self.strand1 = strand1
self.strand2 = strand2
self.L = len(strand1)
assert self.L == len(strand2)
self.zip = zip
self.dangleleft = dangleleft
self.dangleright = dangleright
def dot_paren(self, state):
"""Returns the structure of the complex in dot-paren notation"""
L = self.L
i, j = state
strand2 = '.' * len(self.dangleleft)+'.' * (L - j) + '(' * (j - i) + '.' * i+ '.' * len(self.dangleright)
strand1 = '.' * i + ')' * (j - i) + '.' * (L-j)
if i == j:
return strand1
else:
return strand2 + '+' + strand1
def dot_paren_modify(self, state):
"""Insert `*' at the start and end of the dot-paren notation, before and after all `+' signs, and also before and after every space"""
L = self.L
i, j = state
strand2 = '.' * len(self.dangleleft)+'.' * (L - j) + '(' * (j - i) + '.' * i+ '.' * len(self.dangleright)
strand1 ='.' * i + ')' * (j - i) + '.' * (L-j)
return '*' + strand2 + '*' +'+' +'*' + strand1 +'*'
def sequence(self, state):
"""Returns the sequence of the complex as NUPACK expects. The
first line is the number of independent strands, and the last
line determines how many times each strand appears."""
i, j = state
if i == j:
return ('1\n' + self.strand1.sequence + '\n1\n')
else:
return ('2\n' + self.dangleleft +self.strand2.sequence +self.dangleright+ '\n' + self.strand1.sequence + '\n1 2 \n')
def num_complex(self) :
"""counts the number of complexes in each state """
self.n_complex = dict()
for state in self.statespace:
i, j = state
self.n_complex[state] = 2 if i == j else 1
def calculate_energy(self):
ParentComplex.calculate_energy( self, AUXILIARY_NUPACK_FILE + "/helix"+str(self.name))
def allowed_state(self, state):
"""Check that a state is allowed."""
i, j= state
return 0 <= i <= j <= self.L
def possible_states(self, state):
"""Returns the neighbors of state"""
i, j= state
if (i == j):
states = [(n, n + 1) for n in range(0, self.L)]
else:
states = [(i - 1, j),
(i + 1, j),
(i, j - 1),
(i, j + 1)]
removed = False
removeList = []
for s in states :
if s[0] == s[1] and 0 < s[0] <= self.L :
removeList.append((s[0],s[1]))
removed= True
for s in removeList:
states.remove(s )
if removed == True :
states.append((0,0))
return filter(self.allowed_state, states)
def initial_final_state_config(self ):
"""sets the initial and final state for helix association (zip == True) and helix dissociation (zip == False ) """
if self.zip == True :
initialStateConfig = (0, 0 )
finalStateConfig = (0, self.L)
if self.zip == False:
initialStateConfig = (0, self.L )
finalStateConfig = (0, 0)
return [initialStateConfig, finalStateConfig]
def main(myPickles, real_rate, theta, beta, zip, T, concentration, sodium, magnesium, dangle, dataset_name, docID ,name ):
if name ==myenums.DatasetName.REYNALDODISSOCIATE.value :
dangleleft = "GAA"
dangleright =dangle [len(dangleleft) + len(beta): ]
else :
dangleleft = ""
dangleright=""
strand = MyStrand(beta)
strandComplement = strand.complement
helix_complex = HelixComplex( myPickles, dangleleft, dangleright, theta, zip, strand, strandComplement, T, concentration, sodium, magnesium , dataset_name, docID, name)
return helix_complex.find_answers(concentration,real_rate, zip )
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
95665
|
<reponame>lilloraffa/covid19-model
import math
import numpy as np
from .model import *
class Param:
def __init__(self, par_name, par_min = -1*math.inf, par_max = math.inf):
self.par_name = par_name
self.par_min = par_min
self.par_max = par_max
class GridParam:
def __init__(self):
self.param = {}
self.paramGrid = {}
def setGrid(self, param, grid_avg, grid_min, grid_max, steps):
if (param.par_max is not None) and (param.par_min is not None):
val_max = grid_max if grid_max <= param.par_max else param.par_max
val_min = grid_min if grid_min >= param.par_min else param.par_min
else:
val_max = grid_max
val_min = grid_min
self.paramGrid[param.par_name] = self.getList(val_min, val_max, grid_avg, steps)
self.param[param.par_name] = param
def setGridList(self, param, grid_list):
self.paramGrid[param.par_name] = grid_list
self.param[param.par_name] = param
def getList(self, val_min, val_max, val_avg, steps):
mod = (steps - 1) % 2
steps_half = (steps-1)/2
gridList = []
steps_min = math.floor(steps_half)
steps_max = math.floor(steps_half)
if(mod > 0):
if((val_max-val_avg) > (val_avg - val_min)):
steps_min = math.floor(steps_half) + 1
else:
steps_max = math.floor(steps_half) + 1
if steps > 2:
if (steps_min>0 and steps_max>0 and (val_avg - val_min)>0 and (val_max - val_avg)>0):
gridList = np.arange(val_min, val_avg, (val_avg - val_min)/(steps_min)).tolist()
gridList = gridList + np.arange(val_avg, val_max, (val_max - val_avg)/(steps_max)).tolist()
gridList.append(val_max)
else:
gridList = [val_min, val_avg, val_max]
elif steps == 2:
gridList = [val_min, val_max]
else:
gridList = [val_avg]
return gridList
def getGrid(self, par_name, constr_min = None, constr_max = None, delta_min=0):
if par_name in self.paramGrid.keys():
grid = self.paramGrid[par_name]
if grid[0] is not None:
if isinstance(grid[0], list):
#for i in range(0,len(grid)):
# grid[i] = [ x for x in grid[i] if (x >= self.param[par_name].par_min and x <= self.param[par_name].par_max)]
#res =
res = []
for elem in grid:
if constr_min is not None:
if(elem[0]< constr_min):
delta = constr_min - elem[0] + delta_min
elem = [x + delta for x in elem]
if constr_max is not None:
if(elem[len(elem)-1] > constr_max):
elem = [x for x in elem if x<= constr_max]
res.append([ x for x in elem if (x >= self.param[par_name].par_min and x <= self.param[par_name].par_max)])
return res
else:
if constr_min is not None:
if(grid[0]< constr_min):
delta = constr_min - grid[0] + delta_min
grid = [x + delta for x in grid]
if constr_max is not None:
if(grid[len(grid)-1] > constr_max):
grid = [x for x in grid if x<= constr_max]
return [ x for x in grid if (x >= self.param[par_name].par_min and x <= self.param[par_name].par_max)]
else:
return [None]
else:
return [None]
def getParamList(param_list_init = None, exclude = None):
mod_generic = Model()
exclude_param = ['Pop_tot', 'Igci_t0', 'Igcn_t0', 'M_t0', 'Ggci_t0', 'Ggcn_t0', 'Gas_t0']
param_list = mod_generic.params.keys()
if param_list_init is not None and param_list_init != []:
param_list = param_list_init
if exclude is not None:
exclude_param = exclude_param + exclude
return [x for x in param_list if x not in exclude_param], exclude_param
|
StarcoderdataPython
|
1741549
|
class RemoteOperationExecutionException(Exception):
pass
|
StarcoderdataPython
|
3327773
|
<reponame>abourget/formalchemy-abourget<filename>pylonsapp/pylonsapp/model/__init__.py
"""The application's model objects"""
import sqlalchemy as sa
from sqlalchemy import orm
from pylonsapp.model import meta
def init_model(engine):
"""Call me before using any of the tables or classes in the model"""
## Reflected tables must be defined and mapped here
#global reflected_table
#reflected_table = sa.Table("Reflected", meta.metadata, autoload=True,
# autoload_with=engine)
#orm.mapper(Reflected, reflected_table)
sm = orm.sessionmaker(autoflush=True, autocommit=False, bind=engine)
meta.engine = engine
meta.Session = orm.scoped_session(sm)
foo_table = sa.Table("Foo", meta.metadata,
sa.Column("id", sa.types.Integer, primary_key=True),
sa.Column("bar", sa.types.String(255), nullable=False),
)
class Foo(object):
pass
orm.mapper(Foo, foo_table)
files_table = sa.Table("Files", meta.metadata,
sa.Column("id", sa.types.Integer, primary_key=True),
sa.Column("path", sa.types.String(255), nullable=False),
)
class Files(object):
pass
orm.mapper(Files, files_table)
animals_table = sa.Table("Animals", meta.metadata,
sa.Column("id", sa.types.Integer, primary_key=True),
sa.Column("name", sa.types.String(255), nullable=False),
sa.Column("owner_id", sa.ForeignKey('Owners.id'), nullable=False),
)
class Animal(object):
def __unicode__(self):
return self.name
orm.mapper(Animal, animals_table)
owners_table = sa.Table("Owners", meta.metadata,
sa.Column("id", sa.types.Integer, primary_key=True),
sa.Column("name", sa.types.String(255), nullable=False),
)
class Owner(object):
def __unicode__(self):
return self.name
orm.mapper(Owner, owners_table, properties=dict(
animals=orm.relation(Animal,
backref=orm.backref('owner', uselist=False))
))
## Non-reflected tables may be defined and mapped at module level
#foo_table = sa.Table("Foo", meta.metadata,
# sa.Column("id", sa.types.Integer, primary_key=True),
# sa.Column("bar", sa.types.String(255), nullable=False),
# )
#
#class Foo(object):
# pass
#
#orm.mapper(Foo, foo_table)
## Classes for reflected tables may be defined here, but the table and
## mapping itself must be done in the init_model function
#reflected_table = None
#
#class Reflected(object):
# pass
|
StarcoderdataPython
|
1674860
|
import bpy
class MESH_UL_mylist(bpy.types.UIList):
# Constants (flags)
# Be careful not to shadow FILTER_ITEM (i.e. UIList().bitflag_filter_item)!
# E.g. VGROUP_EMPTY = 1 << 0
# Custom properties, saved with .blend file. E.g.
# use_filter_empty = bpy.props.BoolProperty(name="Filter Empty", default=False, options=set(),
# description="Whether to filter empty vertex groups")
# Called for each drawn item.
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index, flt_flag):
# 'DEFAULT' and 'COMPACT' layout types should usually use the same draw code.
if self.layout_type in {'DEFAULT', 'COMPACT'}:
pass
# 'GRID' layout type should be as compact as possible (typically a single icon!).
elif self.layout_type in {'GRID'}:
pass
# Called once to draw filtering/reordering options.
def draw_filter(self, context, layout):
# Nothing much to say here, it's usual UI code...
pass
# Called once to filter/reorder items.
def filter_items(self, context, data, propname):
# This function gets the collection property (as the usual tuple (data, propname)), and must return two lists:
# * The first one is for filtering, it must contain 32bit integers were self.bitflag_filter_item marks the
# matching item as filtered (i.e. to be shown), and 31 other bits are free for custom needs. Here we use the
# first one to mark VGROUP_EMPTY.
# * The second one is for reordering, it must return a list containing the new indices of the items (which
# gives us a mapping org_idx -> new_idx).
# Please note that the default UI_UL_list defines helper functions for common tasks (see its doc for more info).
# If you do not make filtering and/or ordering, return empty list(s) (this will be more efficient than
# returning full lists doing nothing!).
# Default return values.
flt_flags = []
flt_neworder = []
# Do filtering/reordering here...
return flt_flags, flt_neworder
|
StarcoderdataPython
|
3302383
|
#!/usr/bin/python3
#Code written by
# _ _ __
# ___ | |_ _ __ / | / _| ___
# / __|| __|| '__|| || |_ / _ \
# \__ \| |_ | | | || _|| __/
# |___/ \__||_| |_||_| \___|
#
# Простая реализациия элементарных клеточных автоматов с применением ООП.
# Использование: создаете экземлпяр класса WolframCA с аргументами
# rule - Десятичное представление кода Вольфрама (int от 0 до 255)
# height - Количество итераций автомата (int от 1 до inf)
# width - Количество клеток в одной строке автомата (int от 1 до inf)
# Далее запускаете метод run()
#
# Пример:
#
# rule101 = WolframCA(101, 500, 500)
# rule101.run()
#
# Примечание: правила строятся на случайной конфигурации
import numpy as np
import matplotlib.pyplot as plt
class WolframCA:
def __init__(self, rule: int, height: int, width: int) -> None:
self.rule = rule
self.height = height
self.width = width
self.rule_2 = None
self.prev_state = None
self.initial_state = None
def set_rule(self) -> str:
num = str(bin(self.rule))
if (len(num) - 2) < 8:
missing = 8 - (len(num) - 2)
num = '0' * missing + num[2:]
return num
else:
num = num[2:]
return num
def get_rule(self) -> None:
self.rule_2 = self.set_rule()
def set_initial_state(self) -> np.ndarray:
return np.random.randint(2, size=self.width)
def get_initial_state(self) -> None:
self.initial_state = self.set_initial_state()
def read_state(self, prev: int, nxt: int, curr: int) -> int:
if prev == 1 and curr == 1 and nxt == 1:
return int(self.rule_2[0])
elif prev == 1 and curr == 1 and nxt == 0:
return int(self.rule_2[1])
elif prev == 1 and curr == 0 and nxt == 1:
return int(self.rule_2[2])
elif prev == 1 and curr == 0 and nxt == 0:
return int(self.rule_2[3])
elif prev == 0 and curr == 1 and nxt == 1:
return int(self.rule_2[4])
elif prev == 0 and curr == 1 and nxt == 0:
return int(self.rule_2[5])
elif prev == 0 and curr == 0 and nxt == 1:
return int(self.rule_2[6])
else:
return int(self.rule_2[7])
def get_new_state(self, i) -> np.ndarray:
new_state = np.zeros((1, self.width))[0]
if i == 0:
self.prev_state = self.initial_state
for j in range(self.width):
if j == 0:
new_state[j] = self.read_state(0, self.prev_state[j+1], self.prev_state[j])
elif j == self.width - 1:
new_state[j] = self.read_state(self.prev_state[j-1], 0, self.prev_state[j])
else:
new_state[j] = self.read_state(self.prev_state[j-1], self.prev_state[j+1], self.prev_state[j])
self.prev_state = new_state
return new_state
def draw_config(self, matr) -> None:
plt.imshow(matr, cmap="Greys", interpolation="nearest")
plt.show()
def run(self) -> None:
self.get_rule()
self.get_initial_state()
config = self.initial_state
for i in range(self.height):
new_state = self.get_new_state(i)
config = np.vstack((config, new_state))
self.draw_config(config)
if __name__ == "__main__":
rule101 = WolframCA(30, 300, 300)
rule101.run()
|
StarcoderdataPython
|
3287171
|
# -*- coding: utf-8 -*-
#
# Copyright 2019 <NAME>. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import unicode_literals
from prompt_toolkit import PromptSession
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
from prompt_toolkit.history import FileHistory, InMemoryHistory, ThreadedHistory
from kafkashell.bindings import get_bindings
from kafkashell.completer import KafkaCompleter
from kafkashell.config import get_user_history_path
from kafkashell.executor import Executor
from kafkashell.settings import Settings
from kafkashell.style import style
from kafkashell.toolbar import Toolbar
def main():
settings = Settings()
bindings = get_bindings(settings)
executor = Executor(settings)
toolbar = Toolbar(settings)
completer = KafkaCompleter(settings) if settings.enable_auto_complete else None
suggester = AutoSuggestFromHistory() if settings.enable_auto_suggest else None
history = ThreadedHistory(FileHistory(get_user_history_path())) if settings.enable_history else InMemoryHistory()
session = PromptSession(completer=completer, style=style, bottom_toolbar=toolbar.handler,
key_bindings=bindings, history=history, include_default_pygments_style=False)
while True:
try:
command = session.prompt([("class:operator", "> ")], auto_suggest=suggester)
except KeyboardInterrupt:
continue
except EOFError:
break
else:
executor.execute(command)
settings.save_settings()
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
1697967
|
#!/usr/bin/python3
# <NAME> @2013
# steinkirch at gmail
''' using sets '''
def intersection_two_arrays_sets(seq1, seq2):
''' find the intersection of two arrays using set proprieties '''
set1 = set(seq1)
set2 = set(seq2)
return set1.intersection(set2) #same as list(set1 & set2
''' using merge sort '''
def intersection_two_arrays_ms(seq1, seq2):
''' find the intersection of two arrays using merge sort '''
res = []
while seq1 and seq2:
if seq1[-1] == seq2[-1]:
res.append(seq1.pop())
seq2.pop()
elif seq1[-1] > seq2[-1]:
seq1.pop()
else:
seq2.pop()
res.reverse()
return res
''' using binary search '''
def binary_search(seq, key, lo=0, hi=None):
''' binary search iterative algorithm '''
hi = hi or len(seq)
while lo < hi:
mid = (hi+lo) // 2
if seq[mid] == key:
return True
elif key > seq[mid]:
lo = mid + 1
else:
hi = mid
return None
def intersection_two_arrays_bs(seq1, seq2):
''' if A small and B is too large, we can do a binary search on each entry in B '''
''' only works if sorted and the small sequence has not larger nmbers!!!'''
if len(seq1) > len(seq2): seq, key = seq1, seq2
else: seq, key = seq2, seq1
intersec = []
for item in key:
if binary_search(seq, item):
intersec.append(item)
return intersec
def test_intersection_two_arrays(module_name='this module'):
seq1 = [1,2,3,5,7,8]
seq2 = [3,5,6]
assert(set(intersection_two_arrays_sets(seq1,seq2)) == set([3,5]))
assert(intersection_two_arrays_bs(seq1,seq2) == [3,5])
assert(intersection_two_arrays_ms(seq1,seq2) == [3,5])
s = 'Tests in {name} have {con}!'
print(s.format(name=module_name, con='passed'))
if __name__ == '__main__':
test_intersection_two_arrays()
|
StarcoderdataPython
|
3321820
|
from sys import stderr
class Best:
def __init__(self, phase, metric, file=stderr):
super().__init__()
self.phase = phase
self.metric = metric
self.file = file
self.best = None
self.state_dict = None
def step(self, epoch, model):
loss = epoch[self.phase][self.metric][-1]
if self.best is None or loss < self.best:
self.best = loss
self.state_dict = model.state_dict()
self.file.write('Checkpoint.\n')
def load_state_dict(self, model):
model.load_state_dict(self.state_dict)
def reset(self):
self.best = None
self.state_dict = None
def __repr__(self):
return f'Best {self.phase} {self.metric}: {self.best:.4f}'
|
StarcoderdataPython
|
153099
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class TuitionQueryOrder(object):
def __init__(self):
self._alipay_payment_id = None
self._isv_payment_id = None
@property
def alipay_payment_id(self):
return self._alipay_payment_id
@alipay_payment_id.setter
def alipay_payment_id(self, value):
self._alipay_payment_id = value
@property
def isv_payment_id(self):
return self._isv_payment_id
@isv_payment_id.setter
def isv_payment_id(self, value):
self._isv_payment_id = value
def to_alipay_dict(self):
params = dict()
if self.alipay_payment_id:
if hasattr(self.alipay_payment_id, 'to_alipay_dict'):
params['alipay_payment_id'] = self.alipay_payment_id.to_alipay_dict()
else:
params['alipay_payment_id'] = self.alipay_payment_id
if self.isv_payment_id:
if hasattr(self.isv_payment_id, 'to_alipay_dict'):
params['isv_payment_id'] = self.isv_payment_id.to_alipay_dict()
else:
params['isv_payment_id'] = self.isv_payment_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = TuitionQueryOrder()
if 'alipay_payment_id' in d:
o.alipay_payment_id = d['alipay_payment_id']
if 'isv_payment_id' in d:
o.isv_payment_id = d['isv_payment_id']
return o
|
StarcoderdataPython
|
3266660
|
<gh_stars>1-10
from decimal import Decimal as D
import datetime
from django.conf import settings
from django.utils import unittest
from django.core.exceptions import ValidationError
from oscar.apps.product.models import Item, ItemClass
from oscar.apps.partner.models import Partner, StockRecord
from oscar.test.helpers import create_product
class DummyWrapper(object):
def availability(self, stockrecord):
return 'Dummy response'
def dispatch_date(self, stockrecord):
return "Another dummy response"
class StockRecordTests(unittest.TestCase):
def setUp(self):
self.product = create_product(price=D('10.00'))
def test_get_price_incl_tax_defaults_to_no_tax(self):
self.assertEquals(D('10.00'), self.product.stockrecord.price_incl_tax)
def test_get_price_excl_tax_returns_correct_value(self):
self.assertEquals(D('10.00'), self.product.stockrecord.price_excl_tax)
class DefaultWrapperTests(unittest.TestCase):
def test_default_wrapper_for_in_stock(self):
product = create_product(price=D('10.00'), partner="Acme", num_in_stock=10)
self.assertEquals("In stock (10 available)", product.stockrecord.availability)
def test_default_wrapper_for_out_of_stock(self):
product = create_product(price=D('10.00'), partner="Acme", num_in_stock=0)
self.assertEquals("Out of stock", product.stockrecord.availability)
def test_dispatch_date_for_in_stock(self):
product = create_product(price=D('10.00'), partner="Acme", num_in_stock=1)
tomorrow = datetime.date.today() + datetime.timedelta(days=1)
self.assertEquals(tomorrow, product.stockrecord.dispatch_date)
def test_dispatch_date_for_out_of_stock(self):
product = create_product(price=D('10.00'), partner="Acme", num_in_stock=0)
date = datetime.date.today() + datetime.timedelta(days=7)
self.assertEquals(date, product.stockrecord.dispatch_date)
class CustomWrapperTests(unittest.TestCase):
def setUp(self):
self._old_setting = settings.OSCAR_PARTNER_WRAPPERS
settings.OSCAR_PARTNER_WRAPPERS = {
'Acme': 'oscar.apps.partner.tests.DummyWrapper'
}
def tearDown(self):
settings.OSCAR_PARTNER_WRAPPERS = self._old_setting
def test_wrapper_availability_gets_called(self):
product = create_product(price=D('10.00'), partner="Acme", num_in_stock=10)
self.assertEquals("Dummy response", product.stockrecord.availability)
def test_wrapper_dispatch_date_gets_called(self):
product = create_product(price=D('10.00'), partner="Acme", num_in_stock=10)
self.assertEquals("Another dummy response", product.stockrecord.dispatch_date)
|
StarcoderdataPython
|
1746604
|
<reponame>EnjoyLifeFund/py36pkgs
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
from .container_service_custom_profile import ContainerServiceCustomProfile
from .container_service_service_principal_profile import ContainerServiceServicePrincipalProfile
from .container_service_orchestrator_profile import ContainerServiceOrchestratorProfile
from .container_service_master_profile import ContainerServiceMasterProfile
from .container_service_agent_pool_profile import ContainerServiceAgentPoolProfile
from .container_service_windows_profile import ContainerServiceWindowsProfile
from .container_service_ssh_public_key import ContainerServiceSshPublicKey
from .container_service_ssh_configuration import ContainerServiceSshConfiguration
from .container_service_linux_profile import ContainerServiceLinuxProfile
from .container_service_vm_diagnostics import ContainerServiceVMDiagnostics
from .container_service_diagnostics_profile import ContainerServiceDiagnosticsProfile
from .container_service import ContainerService
from .container_service_paged import ContainerServicePaged
from .container_service_client_enums import (
ContainerServiceOrchestratorTypes,
ContainerServiceVMSizeTypes,
)
__all__ = [
'Resource',
'ContainerServiceCustomProfile',
'ContainerServiceServicePrincipalProfile',
'ContainerServiceOrchestratorProfile',
'ContainerServiceMasterProfile',
'ContainerServiceAgentPoolProfile',
'ContainerServiceWindowsProfile',
'ContainerServiceSshPublicKey',
'ContainerServiceSshConfiguration',
'ContainerServiceLinuxProfile',
'ContainerServiceVMDiagnostics',
'ContainerServiceDiagnosticsProfile',
'ContainerService',
'ContainerServicePaged',
'ContainerServiceOrchestratorTypes',
'ContainerServiceVMSizeTypes',
]
|
StarcoderdataPython
|
1770268
|
"""
cl_sii "extras" / Django REST Framework (DRF) fields.
(for serializers)
"""
try:
import rest_framework
except ImportError as exc: # pragma: no cover
raise ImportError("Package 'djangorestframework' is required to use this module.") from exc
import rest_framework.fields
from cl_sii.rut import Rut
class RutField(rest_framework.fields.CharField):
"""
DRF field for RUT.
Data types:
* native/primitive/internal/deserialized: :class:`cl_sii.rut.Rut`
* representation/serialized: str, same as for DRF field
:class:`rest_framework.fields.CharField`
It verifies only that the input is syntactically valid; it does NOT check
that the value is within boundaries deemed acceptable by the SII.
The field performs some input value cleaning when it is an str;
for example ``' 1.111.111-k \t '`` is allowed and the resulting value
is ``Rut('1111111-K')``.
.. seealso::
:class:`.dj_model_fields.RutField` and :class:`.mm_fields.RutField`
Implementation partially inspired in
:class:`rest_framework.fields.UUIDField`.
"""
default_error_messages = {
'invalid': "'{value}' is not a syntactically valid RUT.",
}
def to_internal_value(self, data: object) -> Rut:
"""
Deserialize.
> Restore a primitive datatype into its internal python representation.
:raises rest_framework.exceptions.ValidationError:
if the data can't be converted
"""
if isinstance(data, Rut):
converted_data = data
else:
try:
if isinstance(data, str):
converted_data = Rut(data, validate_dv=False)
else:
self.fail('invalid', value=data)
except (AttributeError, TypeError, ValueError):
self.fail('invalid', value=data)
return converted_data
def to_representation(self, value: Rut) -> str:
"""
Serialize.
> Convert the initial datatype into a primitive, serializable datatype.
"""
return value.canonical
|
StarcoderdataPython
|
146495
|
<filename>app.py<gh_stars>0
import Services
import Repositories
import config
from flask_restx import Resource, Api
from flask import Flask
app = Flask(__name__)
api = Api(app)
@api.route('/get_word_count/<int:threshold>')
class MainClass(Resource):
def get(self, threshold):
scanner = Services.DirectoryScanner(config.DIRECTORIES_TO_SCAN)
interesting_service = Services.InterestingService(config.DEFAULT_INTERESTING_WEIGHT)
document_parser = Services.DocumentParser()
repo = Repositories.TxtRepository()
counting_service = Services.WordCountingService(document_parser, interesting_service, threshold)
for file in scanner.scan_files():
for line in repo.read_file(file):
for sentence in document_parser.split_to_sentences(line):
counting_service.populate(sentence, file)
return counting_service.get_word_count()
if __name__ == '__main__':
app.config.from_object('config')
app.run(port=config.PORT_NUMBER)
|
StarcoderdataPython
|
156511
|
<filename>tests/optimise.py
import sys
import os
import numpy as np
def run_cci(year, stock, window, up, down):
import test_cci
return test_cci.test(year, stock, window, up, down, get_plots=False, verbose=False)
def run_sma(year, stock, window, up, down):
import test_sma
return test_sma.test(year, stock, window, up, down, get_plots=False, verbose=False)
def run_ema(year, stock, window, up, down):
import test_ema
return test_ema.test(year, stock, window, up, down, get_plots=False, verbose=False)
def run_dema(year, stock, window, up, down):
import test_dema
return test_dema.test(year, stock, window, up, down, get_plots=False, verbose=False)
def run_tema(year, stock, window, up, down):
import test_tema
return test_tema.test(year, stock, window, up, down, get_plots=False, verbose=False)
test_function = {
"cci" : run_cci,
"sma" : run_sma,
"ema" : run_ema,
"dema" : run_dema,
"tema" : run_tema,
}
def run_optimisation(algo, year, stock, window, up, down):
year_list = []
stock_list = []
sharpes = []
sortinos = []
rois = []
if year == "all":
year_list = map(str, list(range(2000, 2018)))
else:
year_list = [year]
for yr in year_list:
stocks_avail = os.listdir("../Historical Data/%s/" %(yr))
stocks_avail = [x.split("-")[0] for x in stocks_avail]
if stock == "all":
stock_list = stocks_avail
else:
stock_list = [stock]
for stck in stock_list:
if stck not in stocks_avail:
print("Data for stock %s not available for year %s" %(stck, yr))
continue
sharpe, sortino, roi = test_function[algo](yr, stck, window, up, down)
sharpes.append(sharpe)
sortinos.append(sortino)
rois.append(roi)
sharpes = np.array(sharpes)
sortinos = np.array(sortinos)
mean_sharpe = np.mean(sharpes)
mean_sortino = np.mean(sortinos)
std_sharpe = np.std(sharpes)
std_sortino = np.std(sortinos)
return mean_sharpe, std_sharpe, mean_sortino, std_sortino
def optimise(algo, year, stock, window, up, down):
algo_list = []
if algo == "all":
algo_list = test_function.keys()
else:
if algo not in test_function.keys():
print("Algo %s is not available" %(algo))
return
return run_optimisation(algo, year, stock, window, up, down)
for alg in algo_list:
mean_sharpe, std_sharpe, mean_sortino, std_sortino = run_optimisation(alg, year, stock, window, up, down)
print(" Values in order ", mean_sharpe, stddev_sharpe ,mean_sortino, std_sortino)
return
if __name__ == "__main__":
if(len(sys.argv) != 7):
print("Invalid input")
sys.exit(1)
optimise(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5], sys.argv[6])
|
StarcoderdataPython
|
3242472
|
# Generated by Django 3.1.12 on 2021-07-13 13:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('action_plans', '0008_actionplan_status'),
]
operations = [
migrations.AddField(
model_name='actionplan',
name='strategic_context',
field=models.TextField(blank=True, default=''),
),
migrations.AddField(
model_name='actionplantask',
name='outcome',
field=models.TextField(blank=True, default=''),
),
]
|
StarcoderdataPython
|
1770004
|
<reponame>patelgaurank/SocialSponsorDjangoAPI
from django.shortcuts import render
from django.http import HttpResponse
from django.views.generic import ListView
from django.views import View
# from django.contrib.auth.models import User, Group
from django.views.decorators.csrf import csrf_exempt
from django.http import JsonResponse
from django.shortcuts import get_object_or_404
from django.http import Http404
from rest_framework import viewsets, permissions, status
from rest_framework import response
from rest_framework import views
from rest_framework.decorators import api_view, permission_classes, action
from rest_framework.permissions import IsAuthenticated, AllowAny
from rest_framework.permissions import DjangoModelPermissionsOrAnonReadOnly, DjangoObjectPermissions
from rest_framework.response import Response
from rest_framework.renderers import TemplateHTMLRenderer
from rest_framework.views import APIView
from rest_framework import authentication, permissions
from rest_framework import mixins
from rest_framework import generics
from .serializers import UserSerializer, GroupSerializer, UserDataSerializer, AllUsersSerializer, AllUrlSerializer
from .models import UserData, url
from users.models import NewUser
from datetime import datetime
""" Concrete View Classes
# CreateAPIView
Used for create-only endpoints.
# ListAPIView
Used for read-only endpoints to represent a collection of model instances.
# RetrieveAPIView
Used for read-only endpoints to represent a single model instance.
# DestroyAPIView
Used for delete-only endpoints for a single model instance.
# UpdateAPIView
Used for update-only endpoints for a single model instance.
# ListCreateAPIView
Used for read-write endpoints to represent a collection of model instances.
RetrieveUpdateAPIView
Used for read or update endpoints to represent a single model instance.
# RetrieveDestroyAPIView
Used for read or delete endpoints to represent a single model instance.
# RetrieveUpdateDestroyAPIView
Used for read-write-delete endpoints to represent a single model instance.
"""
# @permission_classes((permissions.AllowAny,))
# @api_view(['GET', 'POST'])
class BackendAppOverView(viewsets.ModelViewSet):
permission_classes = [DjangoModelPermissionsOrAnonReadOnly]
queryset = url.objects.all()
# serializer_class = AllUrlSerializer
# def get_object(self):
# return get_object_or_404(url, **filter)
def get_serializer_class(self):
print(self.request.user.is_admin)
if self.request.user.is_admin:
return AllUrlSerializer
# def list(self, request, format=None, *args, **kwargs):
# queryset = self.filter_queryset(self.get_queryset())
# page = self.paginate_queryset(queryset)
# if page is not None:
# serializer = self.get_serializer(page, many=True)
# return self.get_paginated_response(serializer.data)
# serializer = self.get_serializer(queryset, many=True)
# # if request.user:
# # data = {'isLoggedIn': 'No', 'currentUser': '',
# # 'result': serializer.data}
# # else:
# # data = {'isLoggedIn': 'Yes', 'currentUser': '',
# # 'result': serializer.data}
# return Response(serializer.data)
# def retrieve(self, request, format=None, *args, **kwargs):
# instance = self.get_object()
# serializer = self.get_serializer(instance)
# return Response(serializer.data)
# def list(self, request):
# pass
# def create(self, request):
# pass
# def retrieve(self, request, pk=None):
# pass
# def update(self, request, pk=None):
# pass
# def partial_update(self, request, pk=None):
# pass
# def destroy(self, request, pk=None):
# pass
# @action(detail=False, methods=['get'])
# def retrieve(self, request, *args, **kwargs):
# data = {
# 'List': '/user-list/',
# 'Detail': '/user-detail/<str:pk>/',
# 'Create': '/user-create/',
# 'Update': '/user-update/<str:pk>/',
# 'Delete': '/user-delete/<str:pk>/',
# }
# return Response(data)
# @action(detail=False, methods=['get'])
# def list(self, request, *args, **kwargs):
# data = {
# 'List': '/user-list/',
# 'Detail': '/user-detail/<str:pk>/',
# 'Create': '/user-create/',
# 'Update': '/user-update/<str:pk>/',
# 'Delete': '/user-delete/<str:pk>/',
# }
# return Response(data)
class PostUserWritePermission(permissions.BasePermission):
message = 'Editing posts is restricted to the author only.'
def has_object_permission(self, request, view, obj):
if request.method in SAFE_METHODS:
return True
return obj.author == request.user
class UserData(viewsets.ModelViewSet):
permission_classes = [DjangoModelPermissionsOrAnonReadOnly]
queryset = UserData.objects.all()
def get_object(self):
return get_object_or_404(UserData, **filter)
def get_serializer_class(self):
return UserDataSerializer
def list(self, request, format=None, *args, **kwargs):
print(request)
pageVisitor(request)
queryset = self.filter_queryset(self.get_queryset())
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
print(request.user)
if request.user:
data = {'isLoggedIn': 'No', 'currentUser': '',
'result': serializer.data}
else:
data = {'isLoggedIn': 'Yes', 'currentUser': '',
'result': serializer.data}
return response(data)
def retrieve(self, request, format=None, *args, **kwargs):
instance = self.get_object()
serializer = self.get_serializer(instance)
return response(serializer.data)
class CurrentUserViewSet(viewsets.ModelViewSet):
permission_classes = [DjangoModelPermissionsOrAnonReadOnly]
queryset = NewUser.objects.all()
serializer_class = AllUsersSerializer
def pageVisitor(*args, **kwargs):
request = args[0]
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
# print('META KEY - %s' % request.META)
print('Ip - %s' % ip)
print('QueryDict - %s' % request.GET)
print('User - %s' % request.user)
print('Authenticated - %s' % request.auth)
print('Authenticator - %s' % request.authenticators)
print('data - %s' % request.data)
print('Method - %s' % request.method)
print('content_type - %s' % request.content_type)
print('Host - %s' % request.META['HTTP_REFERER']
if 'HTTP_REFERER' in request.META else 'None')
print('Page - %s' % request.META['PATH_INFO']
if 'PATH_INFO' in request.META else 'None')
print('Agent - %s' % request.META['HTTP_USER_AGENT']
if 'HTTP_USER_AGENT' in request.META else 'None')
print('session - %s' % request.session)
print('body - %s' % request.stream)
print(request.GET.items())
for k, v in request.GET.items():
print(k, v)
# class UserData(viewsets.ViewSet):
# permission_classes = [IsAuthenticated]
# userdata = UserData.objects.all()
# def list(self, request):
# serializer = UserDataSerializer(self.userdata, many=True)
# return Response(serializer.data)
# def create(self, request):
# serializer = UserDataSerializer(data=request.data)
# if serializer.is_valid():
# serializer.save()
# return Response(serializer.data, status=status.HTTP_201_CREATED)
# return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# def retrieve(self, request, pk=None):
# qs = get_object_or_404(self.userdata, pk=pk)
# serializer = UserDataSerializer(qs)
# return Response(serializer.data)
# def update(self, request, pk=None):
# qs = get_object_or_404(self.userdata, pk=pk)
# serializer = UserDataSerializer(qs, data=request.data)
# if serializer.is_valid():
# serializer.save()
# return Response(serializer.data)
# return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# def partial_update(self, request, pk=None):
# pass
# def destroy(self, request, pk=None):
# userdata = UserData.objects.get(pk=pk)
# userdata.delete()
# return Response(status=status.HTTP_204_NO_CONTENT)
# class ProductsListView(generics.ListCreateAPIView):
# @permission_classes((permissions.AllowAny,))
# class UserDataListView(APIView):
# """
# Retrieve, update or delete a product instance.
# """
# def get(self, request, format=None):
# print('get -----------------------')
# qs = UserData.objects.all()
# serializer = UserDataSerializer(qs, many=True)
# return Response(serializer.data)
# def post(self, request, format=None):
# print('update -----------------------')
# serializer = UserDataSerializer(data=request.data)
# if serializer.is_valid():
# serializer.save()
# return Response(serializer.data, status=status.HTTP_201_CREATED)
# return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# @permission_classes((permissions.AllowAny,))
# class UserDataDetailsView(APIView):
# """
# Retrieve, update or delete a snippet instance.
# """
# def get_object(self, pk):
# try:
# return UserData.objects.get(Product_Id=pk)
# except UserData.DoesNotExist:
# raise Http404
# def get(self, request, pk, format=None):
# print('Get -----------------------')
# qs = self.get_object(pk)
# serializer = UserDataSerializer(qs)
# return Response(serializer.data)
# def put(self, request, pk, format=None):
# print('put ----------------------- %s' % (pk))
# qs = self.get_object(pk)
# serializer = UserDataSerializer(qs, data=request.data)
# if serializer.is_valid():
# serializer.save()
# return Response(serializer.data)
# return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# def delete(self, request, pk, format=None):
# print('delete -----------------------')
# qs = self.get_object(pk)
# qs.delete()
# return Response(status=status.HTTP_204_NO_CONTENT)
# @permission_classes([IsAuthenticated])
# @api_view(['GET', 'POST'])
# def userdata_list(request):
# """
# List all code snippets, or create a new snippet.
# """
# if request.method == 'GET':
# userdata = UserData.objects.all()
# serializer = UserDataSerializer(userdata, many=True)
# return Response(serializer.data)
# elif request.method == 'POST':
# serializer = UserDataSerializer(data=request.data)
# print(request.data)
# if serializer.is_valid():
# serializer.save()
# return Response(serializer.data, status=status.HTTP_201_CREATED)
# return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# # @permission_classes((permissions.AllowAny,))
# @permission_classes([IsAuthenticated])
# @ api_view(['GET', 'PUT', 'DELETE'])
# def userdata_detail(request, pk):
# """
# Retrieve, update or delete a code snippet.
# """
# try:
# userdata = UserData.objects.get(pk=pk)
# except UserData.DoesNotExist:
# return Response(status=status.HTTP_404_NOT_FOUND)
# if request.method == 'GET':
# serializer = UserDataSerializer(userdata)
# return Response(serializer.data)
# elif request.method == 'PUT':
# serializer = UserDataSerializer(userdata, data=request.data)
# if serializer.is_valid():
# serializer.save()
# return Response(serializer.data)
# return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# elif request.method == 'DELETE':
# userdata.delete()
# return Response(status=status.HTTP_204_NO_CONTENT)
|
StarcoderdataPython
|
4808648
|
<reponame>3amon/twitter-robofact
import json, requests, html2text, re, nltk.data, time
sent_detector = nltk.data.load('tokenizers/punkt/english.pickle')
def MakeTweet(text):
sents = sent_detector.tokenize(text.strip().replace('\n', ' '))
result = ''
for sent in sents:
newres = result + sent
if len(newres) > 140:
return result
result = newres
return result
def FilterTweet(text):
if 'may refer to' in text.lower():
return ''
if '##' in text:
return ''
if not text.endswith('.'):
return ''
if text.endswith('a.k.a.'):
return ''
if text.lower().startswith('this is a list'):
return ''
if len(text) < 75:
return ''
return text
with open('wik-scrape.txt', 'w') as fhandle:
while(True):
#'https://en.wikipedia.org/w/api.php?action=query&generator=random&grnnamespace=0&prop=extracts&exchars=500&format=json'
get_random_pages_query = 'https://en.wikipedia.org/w/api.php?action=query&generator=random&grnnamespace=0&prop=extracts&exchars=500&format=json'
r = requests.get(get_random_pages_query)
j = r.json()
pages = j["query"]["pages"]
for page in pages:
extract = pages[page]["extract"]
text = html2text.html2text(extract)
try:
res = FilterTweet(MakeTweet(text))
if len(res) > 0:
fhandle.write(res)
fhandle.write('\n')
print res
print ''
except UnicodeEncodeError:
pass
time.sleep(0)
|
StarcoderdataPython
|
3319224
|
import numpy as np
import pandas as pd
FEATURES = [
'x',
'x_diff_1', 'x_diff_2','x_diff_3','x_diff_4',#'x_diff_5','x_diff_6',#'time_diff',
'norm_diff_1', 'norm_diff_2','norm_diff_3','norm_diff_4',
'mean_2','mean_4','mean_6',# 'mean_20', 'mean_50',
'std_2','std_4','std_6', #'std_20', 'std_50',
'norm_2','norm_4','norm_6', #'norm_20', 'norm_50',
'diff_with_mean_2','diff_with_mean_4','diff_with_mean_6',
'add_std_2', 'minus_std_2', 'add_2std_2', 'minus_2std_2', 'add_15std_2', 'minus_15std_2',
'add_std_4', 'minus_std_4', 'add_2std_4', 'minus_2std_4', 'add_15std_4', 'minus_15std_4',
'add_std_6', 'minus_std_6', 'add_2std_6', 'minus_2std_6', 'add_15std_6', 'minus_15std_6',
'x_log_relative', 'rolling_mean', 'rolling_mean_rel'
]
def preprocess_df(df,
_id,
chunk_len=32,
is_one_hot_y=False,
x_column='x',
y_column='y',
N_CLASS=2):
X = []
Y = []
# id2process_dct = {}
seq_df = compute_seq_features(df, _id, chunk_len=chunk_len)
# id2process_dct[_id] = seq_df
# seq_df = id2process_dct[_id]
seq_df = seq_df.fillna(0)
seq_len = len(seq_df)
for i in range(len(seq_df) - chunk_len):
slice_df = seq_df.iloc[i:i+chunk_len]
X.append(slice_df[FEATURES].values)
y = slice_df['y'].tolist()[len(slice_df) // 2]
if is_one_hot_y:
# y = tf.keras.utils.to_categorical(y, num_classes=N_CLASS, dtype='float32')
y = np.eye(N_CLASS, dtype='int')[y]
Y.append(y)
X = np.array(X, dtype='float32')
Y = np.array(Y, dtype='int')
return seq_df.time.values, X, seq_df.x.values, seq_df.y.values
def compute_seq_features(df, _id, chunk_len=32, aug=False):
seq_df = df[df.id==_id].reset_index(drop=True)
if aug:
seq_df.x = add_noize(seq_df.x.values)
x1 = np.mean(seq_df.x.values[:20])
x2 = np.mean(seq_df.x.values[-20:])
t0 = seq_df.time.values[0]
t1 = seq_df.time.values[-1]
start_df = []
for i in range(chunk_len // 2):
start_df.insert(0, [_id, t0 - (i + 1) * 600, x1, 0])
end_df = []
for i in range(chunk_len // 2):
end_df.append([_id, t1 + (i + 1) * 600, x2, 0])
start_df = pd.DataFrame(start_df, columns=['id', 'time', 'x', 'y'])
end_df = pd.DataFrame(end_df, columns=['id', 'time', 'x', 'y'])
seq_df = pd.concat([start_df, seq_df, end_df])
seq_df['x_relative'] = seq_df.x / seq_df.x.shift(1)
seq_df['x_log_relative'] = np.log(seq_df['x_relative'])
seq_df = seq_df.fillna(method='ffill')
seq_df['rolling_mean'] = seq_df['x'].rolling(window=5).max()
seq_df['rolling_mean_rel'] = seq_df['x_log_relative'].rolling(window=5).max()
seq_df['time_diff'] = seq_df.time.diff()
for i in range(12):
seq_df[f'x_diff_{i + 1}'] = seq_df.x.diff(i + 1).fillna(0)
for i in range(12):
seq_df[f'x_diff_front_{i + 1}'] = seq_df.x.diff(-(i + 1)).fillna(0)
#################################### скользящие средние и дисперсии ###########################
sizes = [2, 4, 6, 20, 50]
for i in sizes:
m, s = sliding(seq_df.x.values, i)
seq_df[f'mean_{i}'] = m
seq_df[f'std_{i}'] = s
seq_df[f'add_std_{i}'] = (np.array(m) + np.array(s)) - np.array(seq_df.x.values)
seq_df[f'minus_std_{i}'] = np.array(seq_df.x.values) - (np.array(m) - np.array(s))
seq_df[f'add_2std_{i}'] = (np.array(m) + np.array(s) / 2) - np.array(seq_df.x.values)
seq_df[f'minus_2std_{i}'] = np.array(seq_df.x.values) - (np.array(m) - np.array(s) / 2)
seq_df[f'add_15std_{i}'] = (np.array(m) + 1.5 * np.array(s)) - np.array(seq_df.x.values)
seq_df[f'minus_15std_{i}'] = np.array(seq_df.x.values) - (np.array(m) - 1.5 * np.array(s))
seq_df[f'norm_{i}'] = (seq_df.x.values - np.array(m)) / (np.array(s) + 1e-3)
seq_df[f'diff_with_mean_{i}'] = seq_df.x.values - np.array(m)
for i in range(12):
seq_df[f'norm_diff_{i + 1}'] = seq_df['norm_6'].diff(i + 1).fillna(0)
for i in range(12):
seq_df[f'norm_diff_front_{i + 1}'] = seq_df['norm_6'].diff(-(i + 1)).fillna(0)
return seq_df
def sliding(x, len_):
x = [x[0]] * (len_ // 2) + list(x) + [ x[-1] ] * (len_ // 2)
mean, std = [], []
for i in range(0, len(x) - len_, 1):
mean.append(np.mean(x[i : i + len_]))
std.append(np.std(x[i : i + len_]))
return mean, std
def add_noize(a):
return a + np.random.normal(0, 10, len(a))
|
StarcoderdataPython
|
1646340
|
# The MIT License (MIT)
# Copyright (c) 2021 by Brockmann Consult GmbH and contributors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import glob
import os.path
from typing import List, Optional, Iterator, Callable, Union, Dict, Hashable
import xarray as xr
from .error import ConverterError
from .log import LOGGER
from .log import log_duration
class DatasetOpener:
def __init__(self,
input_paths: Union[str, List[str]],
*,
input_multi_file: bool = False,
input_sort_by: str = None,
input_decode_cf: bool = False,
input_concat_dim: str = None,
input_engine: str = None,
input_prefetch_chunks: bool = False):
self._input_paths = input_paths
self._input_multi_file = input_multi_file
self._input_sort_by = input_sort_by
self._input_decode_cf = input_decode_cf
self._input_concat_dim = input_concat_dim
self._input_engine = input_engine
self._input_prefetch_chunks = input_prefetch_chunks
def open_datasets(self, preprocess: Callable[[xr.Dataset], xr.Dataset] = None) \
-> Iterator[xr.Dataset]:
input_paths = self._resolve_input_paths()
chunks = self._prefetch_chunk_sizes(input_paths[0])
if self._input_multi_file:
return self._open_mfdataset(input_paths, chunks, preprocess)
else:
return self._open_datasets(input_paths, chunks, preprocess)
def _open_mfdataset(self,
input_paths: List[str],
chunks: Optional[Dict[Hashable, int]],
preprocess: Callable[[xr.Dataset], xr.Dataset] = None) \
-> xr.Dataset:
with log_duration(f'Opening {len(input_paths)} file(s)'):
ds = xr.open_mfdataset(input_paths,
engine=self._input_engine,
preprocess=preprocess,
concat_dim=self._input_concat_dim,
decode_cf=self._input_decode_cf,
chunks=chunks)
yield ds
def _open_datasets(self,
input_paths: List[str],
chunks: Optional[Dict[Hashable, int]],
preprocess: Callable[[xr.Dataset], xr.Dataset] = None) \
-> Iterator[xr.Dataset]:
n = len(input_paths)
for i in range(n):
input_file = input_paths[i]
LOGGER.info(f'Processing input {i + 1} of {n}: {input_file}')
with log_duration('Opening'):
ds = xr.open_dataset(input_file,
engine=self._get_engine(input_file),
decode_cf=self._input_decode_cf,
chunks=chunks)
if preprocess:
ds = preprocess(ds)
yield ds
def _prefetch_chunk_sizes(self, input_file: str) -> Optional[Dict[Hashable, int]]:
if not self._input_prefetch_chunks:
return None
with log_duration('Pre-fetching chunks'):
with xr.open_dataset(input_file,
engine=self._get_engine(input_file),
decode_cf=self._input_decode_cf) as ds:
chunk_sizes = dict()
for var in ds.data_vars.values():
sizes = var.encoding.get('chunksizes')
if sizes and len(sizes) == len(var.dims):
for dim, size in zip(var.dims, sizes):
chunk_sizes[dim] = max(size, chunk_sizes.get(dim, 0))
return chunk_sizes
def _get_engine(self, input_file: str) -> Optional[str]:
engine = self._input_engine
if not engine and input_file.endswith('.zarr') and os.path.isdir(input_file):
engine = 'zarr'
return engine
def _resolve_input_paths(self) -> List[str]:
input_files = self.resolve_input_paths(self._input_paths, self._input_sort_by)
if not input_files:
raise ConverterError('No inputs given.')
LOGGER.info(f'{len(input_files)} input(s) found:\n'
+ ('\n'.join(map(lambda f: f' {f[0]}: ' + f[1],
zip(range(len(input_files)), input_files)))))
return input_files
@classmethod
def resolve_input_paths(cls,
input_paths: Union[str, List[str]],
sort_by: str = None) -> List[str]:
if not input_paths:
return []
if isinstance(input_paths, str):
input_paths = [input_paths]
resolved_input_files = []
for input_path in input_paths:
input_path = os.path.expanduser(input_path)
if '*' in input_path or '?' in input_path:
glob_result = glob.glob(input_path, recursive=True)
if not glob_result:
raise ConverterError(f'No inputs found for wildcard: "{input_path}"')
resolved_input_files.extend(glob_result)
else:
if not os.path.exists(input_path):
raise ConverterError(f'Input not found: "{input_path}"')
resolved_input_files.append(input_path)
if sort_by:
# Get rid of doubles and sort
resolved_input_files = set(resolved_input_files)
if sort_by == 'path' or sort_by is True:
return sorted(resolved_input_files)
if sort_by == 'name':
return sorted(resolved_input_files, key=_sort_by_name_key)
raise ConverterError(f'Can sort by "path" or "name" only, got "{sort_by}".')
else:
# Get rid of doubles, but preserve order
seen_input_files = set()
unique_input_files = []
for input_file in resolved_input_files:
if input_file not in seen_input_files:
unique_input_files.append(input_file)
seen_input_files.add(input_file)
return unique_input_files
def _sort_by_name_key(path: str) -> str:
while path.endswith('/') or path.endswith(os.path.sep):
path = path[0:-1]
return os.path.basename(path)
|
StarcoderdataPython
|
30305
|
"""This module contains exceptions defined for Rhasspy Desktop Satellite."""
class RDSatelliteServerError(Exception):
"""Base class for exceptions raised by Rhasspy Desktop Satellite code.
By catching this exception type, you catch all exceptions that are
defined by the Hermes Audio Server code."""
class ConfigurationFileNotFoundError(RDSatelliteServerError):
"""Raised when the configuration file is not found."""
def __init__(self, filename):
"""Initialize the exception with a string representing the filename."""
self.filename = filename
class NoDefaultAudioDeviceError(RDSatelliteServerError):
"""Raised when there's no default audio device available."""
def __init__(self, inout):
"""Initialize the exception with a string representing input or output.
"""
self.inout = inout
class UnsupportedPlatformError(RDSatelliteServerError):
"""Raised when the platform Rhasspy Desktop Satellite is running on is not
supported."""
def __init__(self, platform):
"""Initialize the exception with a string representing the platform."""
self.platform = platform
|
StarcoderdataPython
|
147237
|
<reponame>Maxsparrow/cirrus<gh_stars>10-100
#!/usr/bin/env python
"""
_deploy_plugins_
Plugin helpers to talk to various deployment platforms,
Plugins should subclass the Deployer class, override the
build_parser to handle whatever CLI args they need and
also deploy to do the actual implementation.
Drop plugins in the cirrus.plugins.deployers dir to get them
picked up by the plugin factory
"""
import argparse
from cirrus.configuration import load_configuration
from pluggage.factory_plugin import PluggagePlugin
class Deployer(PluggagePlugin):
PLUGGAGE_FACTORY_NAME = 'deploy'
def __init__(self):
super(Deployer, self).__init__()
self.package_conf = load_configuration()
self.parser = argparse.ArgumentParser()
self.parser.add_argument('--plugin', '-p', dest='plugin', default=None)
def deploy(self, options):
"""
_deploy_
:param options: Instance of argparse namespace containing
the command line options values
"""
raise NotImplementedError(
"{0}.deploy not implemented".format(type(self).__name__)
)
def build_parser(self):
"""
_build_parser_
Hook for the plugin to build out its commandline tool
suite on self.parser.
The master CLI only uses the --plugin option to select the
plugin and then passes everything onto that plugin.
The plugin parser must allow --plugin, hence the addition
of that arg in the base class
"""
raise NotImplementedError(
"{0}.build_parser not implemented".format(type(self).__name__)
)
|
StarcoderdataPython
|
175107
|
<gh_stars>100-1000
import numpy as np
from torch.utils.data import Dataset
from .seg import MaskSemSeg
from .filter import TargetFilter
from .sparse import SparseSeg
from .util import Wrapper
class ConditionalInstSeg(Wrapper, Dataset):
"""
Construct inputs (support image sparse annotations, query image)
and targets (query dense annotations) for a conditional segmentation model.
Args:
supp_ds: the `Dataset` to load support labels
qry_ds: the `Dataset` to load dense query labels as targets
Note that this class assumes the two input datasets contain
the same data in the same order.
"""
def __init__(self, qry_ds, supp_ds, shot=1):
super().__init__(qry_ds)
self.qry_ds = qry_ds
self.supp_ds = supp_ds
self.shot = shot
def __getitem__(self, idx):
# n.b. one epoch loads each image in the query dataset once
# load query image + target
qry_im, qry_tgt, aux = self.qry_ds[idx]
# load sparse input annotations
supp = []
supp_aux = []
for i in range(self.shot):
shot_im, shot_anno, shot_aux = self.supp_ds[idx]
stacked_anno = np.zeros((self.num_classes, *shot_anno.shape), dtype=np.float32)
for k in range(self.num_classes):
stacked_anno[k].flat[shot_anno.flat == k] = 1
supp.append((shot_im, stacked_anno))
supp_aux.append(shot_aux)
aux.update({'support': supp_aux})
return qry_im, supp, qry_tgt, aux
def __len__(self):
return len(self.qry_ds)
class ConditionalSemSeg(Wrapper, Dataset):
"""
Load inputs for conditional class segmentation network.
Args:
qry_ds: the `Dataset` to load query images and labels
supp_ds: dict of `Dataset`s indexed by the semantic class
from which they load data
"""
def __init__(self, qry_ds, supp_ds, shot=1):
super().__init__(qry_ds)
self.qry_ds = qry_ds
self.supp_datasets = supp_ds
self.cls2idx = {list(ds.keep)[0]: i for i, ds in enumerate(self.supp_datasets)}
self.shot = shot
@property
def num_classes(self):
return 2 # 0 == negative, 1 == positive
def __getitem__(self, idx):
qry_im, qry_tgt, aux = self.qry_ds[idx]
supp_ds = self.supp_datasets[self.cls2idx[aux['cls']]]
supp = []
supp_aux = []
for i in range(self.shot):
shot_im, shot_anno, shot_aux = supp_ds[np.random.randint(0, len(supp_ds))]
stacked_anno = np.zeros((self.num_classes, *shot_anno.shape), dtype=np.float32)
for k in range(self.num_classes):
stacked_anno[k].flat[shot_anno.flat == k] = 1
supp.append((shot_im, stacked_anno))
supp_aux.append(shot_aux)
aux.update({'support': supp_aux})
return qry_im, supp, qry_tgt, aux
def __len__(self):
return len(self.qry_ds)
|
StarcoderdataPython
|
3237128
|
<gh_stars>0
# Copyright 2020 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# Lint as: python3
"""Wraps TFP bijectors for use with Jax."""
from jax import tree_util
from jax import util as jax_util
import jax.numpy as np
from six.moves import zip
from oryx.core import primitive
from oryx.core.interpreters import inverse
from oryx.core.interpreters.inverse import slice as slc
from tensorflow_probability.substrates import jax as tfp
__all__ = [
'make_type',
]
safe_map = jax_util.safe_map
tf = tfp.tf2jax
tfb = tfp.bijectors
_registry = {}
InverseAndILDJ = inverse.core.InverseAndILDJ
NDSlice = slc.NDSlice
class _JaxBijectorTypeSpec(object):
"""TypeSpec for flattening/unflattening bijectors."""
__slots__ = ('_clsid', '_kwargs', '_param_specs')
def __init__(self, clsid, param_specs, kwargs):
self._clsid = clsid
self._kwargs = kwargs
self._param_specs = param_specs
@property
def value_type(self):
return _registry[self._clsid]
def _to_components(self, obj):
components = {
'args': obj._args # pylint: disable=protected-access
}
for k, v in sorted(obj._kwargs.items()): # pylint: disable=protected-access
if k in self._param_specs: # pylint: disable=protected-access
components[k] = v
return components
def _from_components(self, components):
kwargs = dict(self._kwargs) # pylint: disable=protected-access
kwargs.update(components)
args = kwargs.pop('args')
return self.value_type(*args, **kwargs) # pylint: disable=not-callable
@property
def _component_specs(self):
return self._param_specs
def _serialize(self):
# Include default version 1 for now
return 1, self._clsid, self._param_specs, self._kwargs
@classmethod
def _deserialize(cls, encoded):
version, clsid, param_specs, kwargs = encoded
if version != 1: raise ValueError
if clsid not in _registry: raise ValueError(clsid)
return cls(clsid, param_specs, kwargs)
bijector_p = primitive.HigherOrderPrimitive('bijector')
class _CellProxy:
"""Used for avoid recursing into cells when doing Pytree flattening/unflattening."""
def __init__(self, cell):
self.cell = cell
def bijector_ildj_rule(incells, outcells, **params):
"""Inverse/ILDJ rule for bijectors."""
incells = incells[1:]
num_consts = len(incells) - params['num_args']
const_incells, flat_incells = jax_util.split_list(incells, [num_consts])
flat_inproxies = safe_map(_CellProxy, flat_incells)
in_tree = params['in_tree']
bijector_proxies, inproxy = tree_util.tree_unflatten(in_tree,
flat_inproxies)
flat_bijector_cells = [proxy.cell for proxy
in tree_util.tree_leaves(bijector_proxies)]
if any(not cell.top() for cell in flat_bijector_cells):
return const_incells + flat_incells, outcells, False, None
bijector = tree_util.tree_multimap(lambda x: x.cell.val, bijector_proxies)
direction = params['direction']
if direction == 'forward':
forward_func = bijector.forward
inv_func = bijector.inverse
ildj_func = bijector.inverse_log_det_jacobian
elif direction == 'inverse':
forward_func = bijector.inverse
inv_func = bijector.forward
ildj_func = bijector.forward_log_det_jacobian
else:
raise ValueError('Bijector direction must be '
'"forward" or "inverse".')
outcell, = outcells
incell = inproxy.cell
if incell.bottom() and not outcell.bottom():
val, ildj = outcell.val, outcell.ildj
inildj = ildj + ildj_func(val, np.ndim(val))
ndslice = NDSlice.new(inv_func(val), inildj)
flat_incells = [
InverseAndILDJ(incell.aval, [ndslice])
]
new_outcells = outcells
elif outcell.is_unknown() and not incell.is_unknown():
new_outcells = [InverseAndILDJ.new(forward_func(incell.val))]
new_incells = flat_bijector_cells + flat_incells
return const_incells + new_incells, new_outcells, None
inverse.core.ildj_registry[bijector_p] = bijector_ildj_rule
def make_wrapper_type(cls):
"""Creates new Bijector type that can be flattened/unflattened and is lazy."""
clsid = (cls.__module__, cls.__name__)
def bijector_bind(bijector, x, **kwargs):
return primitive.call_bind(
bijector_p, direction=kwargs['direction'])(_bijector)(
bijector, x, **kwargs)
def _bijector(bij, x, **kwargs):
direction = kwargs.pop('direction', 'forward')
if direction == 'forward':
return cls.forward(bij, x, **kwargs)
elif direction == 'inverse':
return cls.inverse(bij, x, **kwargs)
else:
raise ValueError('Bijector direction must be "forward" or "inverse".')
if clsid not in _registry:
class _WrapperType(cls):
"""Oryx bijector wrapper type."""
def __init__(self, *args, **kwargs):
self.use_primitive = kwargs.pop('use_primitive', True)
self._args = args
self._kwargs = kwargs
def forward(self, x, **kwargs):
if self.use_primitive:
return bijector_bind(self, x, direction='forward',
**kwargs)
return cls.forward(self, x, **kwargs)
def inverse(self, x, **kwargs):
if self.use_primitive:
return bijector_bind(self, x, direction='inverse',
**kwargs)
return cls.inverse(self, x, **kwargs)
def _get_instance(self):
obj = object.__new__(cls)
cls.__init__(obj, *self._args, **self._kwargs)
return obj
def __getattr__(self, key):
if key not in ('_args', '_kwargs', 'parameters', '_type_spec'):
return getattr(self._get_instance(), key)
return object.__getattribute__(self, key)
@property
def parameters(self):
return self._get_instance().parameters
@property
def _type_spec(self):
kwargs = dict(self._kwargs)
param_specs = {}
event_ndims = {}
for k in event_ndims:
if k in kwargs and kwargs[k] is not None:
elem = kwargs.pop(k)
if type(elem) == object: # pylint: disable=unidiomatic-typecheck
param_specs[k] = object
elif tf.is_tensor(elem):
param_specs[k] = (elem.shape, elem.dtype)
else:
param_specs[k] = type(elem)
for k, v in list(kwargs.items()):
if isinstance(v, tfb.Bijector):
param_specs[k] = kwargs.pop(k)
return _JaxBijectorTypeSpec(
clsid, param_specs, kwargs)
def __str__(self):
return repr(self)
def __repr__(self):
return '{}()'.format(self.__class__.__name__)
_WrapperType.__name__ = cls.__name__ + 'Wrapper'
def to_tree(obj):
type_spec = obj._type_spec # pylint: disable=protected-access
components = type_spec._to_components(obj) # pylint: disable=protected-access
keys, values = list(zip(*sorted(components.items())))
return values, (keys, type_spec)
def from_tree(info, xs):
keys, type_spec = info
components = dict(list(zip(keys, xs)))
return type_spec._from_components(components) # pylint: disable=protected-access
tree_util.register_pytree_node(
_WrapperType,
to_tree,
from_tree
)
_registry[clsid] = _WrapperType
return _registry[clsid]
def make_type(dist):
"""Entry point for wrapping distributions."""
class _JaxBijectorType(dist):
def __new__(cls, *args, **kwargs):
type_ = make_wrapper_type(dist)
obj = object.__new__(type_)
obj.__init__(*args, **kwargs)
return obj
_JaxBijectorType.__name__ = dist.__name__
return _JaxBijectorType
|
StarcoderdataPython
|
19888
|
<reponame>yzjba/FATE<gh_stars>10-100
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from arch.api.utils import log_utils
from federatedml.feature.feature_selection.filter_base import BaseFilterMethod
from federatedml.statistic.statics import MultivariateStatisticalSummary
from federatedml.param.feature_selection_param import VarianceOfCoeSelectionParam
from federatedml.protobuf.generated import feature_selection_meta_pb2
from federatedml.util import consts
import math
LOGGER = log_utils.getLogger()
class VarianceCoeFilter(BaseFilterMethod):
"""
Filter the columns if coefficient of variance is less than a threshold.
"""
def __init__(self, filter_param: VarianceOfCoeSelectionParam):
super().__init__(filter_param)
self.statics_obj = None
def _parse_filter_param(self, filter_param):
self.value_threshold = filter_param.value_threshold
def set_statics_obj(self, statics_obj):
self.statics_obj = statics_obj
def fit(self, data_instances, suffix):
if self.statics_obj is None:
self.statics_obj = MultivariateStatisticalSummary(data_instances)
std_var = self.statics_obj.get_std_variance()
mean_value = self.statics_obj.get_mean()
for col_name in self.selection_properties.select_col_names:
s_v = std_var.get(col_name)
m_v = mean_value.get(col_name)
if math.fabs(m_v) < consts.FLOAT_ZERO:
m_v = consts.FLOAT_ZERO
coeff_of_var = math.fabs(s_v / m_v)
if coeff_of_var >= self.value_threshold:
self.selection_properties.add_left_col_name(col_name)
self.selection_properties.add_feature_value(col_name, coeff_of_var)
self._keep_one_feature(pick_high=True)
return self
def get_meta_obj(self, meta_dicts):
result = feature_selection_meta_pb2.VarianceOfCoeSelectionMeta(value_threshold=self.value_threshold)
meta_dicts['variance_coe_meta'] = result
return meta_dicts
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.