code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
async def notify_about_cancel_request(request, username, user_id):
from emoji import emojize
from loader import bot, db
from data.config import super_admins
emo_issuing_office = emojize(':office:', use_aliases=True)
emo_cash_recive = emojize(':chart_with_upwards_trend:', use_aliases=True)
emo_delivery = emojize(':steam_locomotive:', use_aliases=True)
emo_exchange = emojize(':recycle:', use_aliases=True)
emo_cash_in = emojize(':atm:', use_aliases=True)
emo_cash_atm = emojize(':credit_card:', use_aliases=True)
emo_request = {
'выдача в офисе': emo_issuing_office,
'прием кэша': emo_cash_recive,
'доставка': emo_delivery,
'обмен': emo_exchange,
'кэшин': emo_cash_in,
'снятие с карт': emo_cash_atm,
}
type_operation = emo_request[request[3]]
number_request = request[2]
date_request = request[0]
warning = f'Заявка {type_operation} #N{number_request} от {date_request} была отменена. Отменил - {username}'
admins = db.select_id_users(status='admin')
change = db.select_id_users(status='changer')
if not len(super_admins) == 0:
for user in super_admins:
if user == user_id:
pass
else:
await bot.send_message(user, warning)
if not len(admins) == 0:
list_admins_id = []
for item in admins:
list_admins_id.append(item[0])
for user in list_admins_id:
if user == user_id:
pass
else:
await bot.send_message(user, warning)
if not len(change) == 0:
list_changers_id = []
for item in change:
list_changers_id.append(item[0])
for user in list_changers_id:
if user == user_id:
await bot.send_message(user, warning)
return
|
[
"emoji.emojize",
"loader.bot.send_message",
"loader.db.select_id_users"
] |
[((201, 238), 'emoji.emojize', 'emojize', (['""":office:"""'], {'use_aliases': '(True)'}), "(':office:', use_aliases=True)\n", (208, 238), False, 'from emoji import emojize\n'), ((265, 320), 'emoji.emojize', 'emojize', (['""":chart_with_upwards_trend:"""'], {'use_aliases': '(True)'}), "(':chart_with_upwards_trend:', use_aliases=True)\n", (272, 320), False, 'from emoji import emojize\n'), ((340, 387), 'emoji.emojize', 'emojize', (['""":steam_locomotive:"""'], {'use_aliases': '(True)'}), "(':steam_locomotive:', use_aliases=True)\n", (347, 387), False, 'from emoji import emojize\n'), ((407, 445), 'emoji.emojize', 'emojize', (['""":recycle:"""'], {'use_aliases': '(True)'}), "(':recycle:', use_aliases=True)\n", (414, 445), False, 'from emoji import emojize\n'), ((464, 498), 'emoji.emojize', 'emojize', (['""":atm:"""'], {'use_aliases': '(True)'}), "(':atm:', use_aliases=True)\n", (471, 498), False, 'from emoji import emojize\n'), ((518, 560), 'emoji.emojize', 'emojize', (['""":credit_card:"""'], {'use_aliases': '(True)'}), "(':credit_card:', use_aliases=True)\n", (525, 560), False, 'from emoji import emojize\n'), ((1044, 1078), 'loader.db.select_id_users', 'db.select_id_users', ([], {'status': '"""admin"""'}), "(status='admin')\n", (1062, 1078), False, 'from loader import bot, db\n'), ((1092, 1128), 'loader.db.select_id_users', 'db.select_id_users', ([], {'status': '"""changer"""'}), "(status='changer')\n", (1110, 1128), False, 'from loader import bot, db\n'), ((1292, 1323), 'loader.bot.send_message', 'bot.send_message', (['user', 'warning'], {}), '(user, warning)\n', (1308, 1323), False, 'from loader import bot, db\n'), ((1584, 1615), 'loader.bot.send_message', 'bot.send_message', (['user', 'warning'], {}), '(user, warning)\n', (1600, 1615), False, 'from loader import bot, db\n'), ((1843, 1874), 'loader.bot.send_message', 'bot.send_message', (['user', 'warning'], {}), '(user, warning)\n', (1859, 1874), False, 'from loader import bot, db\n')]
|
import os, shutil
import settings
class BaseVCS(object):
def __init__(self, name, anonymous_access, template=None):
"""
A base class to handle Version Control System functions
name = name of the repository
anonymous_access = Is it public?
template = The name of the template to use
"""
self.public = anonymous_access
self.name = name
self.template = template
self.config = self.get_config()
self._update_path() # Sets self.path and url
def _update_path(self):
"""
Determine where the repository is. It is called in __init__ and sets
self.path and self.url
"""
if self.public:
self.path = os.path.abspath(os.path.join(self.config['public_path'], self.name))
self.url = "%s%s/" % (self.config['public_url'], self.name)
else:
self.path = os.path.abspath(os.path.join(self.config['private_path'], self.name))
self.url = "%s%s/" % (self.config['private_url'], self.name)
def get_config(self):
"""
Search the configuration for the correct record
"""
name = self.__class__.__name__.replace('Repository','')
for value in settings.VCS_CONFIG.values():
if value['name'] == name:
return value
raise Exception("The configuration for %s is missing." % name)
def exists(self):
"""
Does the repository exist on the file system?
"""
return os.path.exists(self.path)
def create(self):
"""
Create a new repository
"""
NotImplementedError
def make_public(self):
"""
Move a repository from private to public
"""
dest = os.path.abspath(os.path.join(self.config['public_path'], self.name))
source = self.path
shutil.move(source, dest)
self.public = True
self._update_path()
def make_private(self):
"""
Move a repository from public to private
"""
source = self.path
dest = os.path.abspath(os.path.join(self.config['private_path'], self.name))
shutil.move(source, dest)
self.public = False
self._update_path()
def delete(self):
"""
Delete the source repository here
"""
if self.exists():
shutil.rmtree(self.path)
def create_remote(self, name, description='', homepage=''):
"""
Create a remote repository on a separate service
"""
raise NotImplementedError
def add_remote(self, name, url, branch=None):
"""
Add a remote repository
"""
raise NotImplementedError
def update_remote(self, name, branch=None):
"""
Update a remote repository.
"""
raise NotImplementedError
def list_directory(self, path, revision=None, branch=None):
"""
List the files directory in the repository
Optionally can specify a revision or branch from which to show the directory.
"""
raise NotImplementedError
def get_file(self, path, revision=None, branch=None):
"""
Get the contents from a file
Optionally can specify a revision or branch from which to retrieve the contents
"""
raise NotImplementedError
def get_absolute_url(self):
"""
Return the absolute url
"""
return self.url
def get_current_revision(self):
"""
Get the current revision of he repository
"""
raise NotImplementedError
def get_archive(self, revision=None, tag=None):
"""
Get an archive of the current revision, or specific revision or tag
"""
raise NotImplementedError
|
[
"os.path.exists",
"shutil.move",
"settings.VCS_CONFIG.values",
"shutil.rmtree",
"os.path.join"
] |
[((1275, 1303), 'settings.VCS_CONFIG.values', 'settings.VCS_CONFIG.values', ([], {}), '()\n', (1301, 1303), False, 'import settings\n'), ((1563, 1588), 'os.path.exists', 'os.path.exists', (['self.path'], {}), '(self.path)\n', (1577, 1588), False, 'import os, shutil\n'), ((1924, 1949), 'shutil.move', 'shutil.move', (['source', 'dest'], {}), '(source, dest)\n', (1935, 1949), False, 'import os, shutil\n'), ((2231, 2256), 'shutil.move', 'shutil.move', (['source', 'dest'], {}), '(source, dest)\n', (2242, 2256), False, 'import os, shutil\n'), ((1836, 1887), 'os.path.join', 'os.path.join', (["self.config['public_path']", 'self.name'], {}), "(self.config['public_path'], self.name)\n", (1848, 1887), False, 'import os, shutil\n'), ((2169, 2221), 'os.path.join', 'os.path.join', (["self.config['private_path']", 'self.name'], {}), "(self.config['private_path'], self.name)\n", (2181, 2221), False, 'import os, shutil\n'), ((2444, 2468), 'shutil.rmtree', 'shutil.rmtree', (['self.path'], {}), '(self.path)\n', (2457, 2468), False, 'import os, shutil\n'), ((768, 819), 'os.path.join', 'os.path.join', (["self.config['public_path']", 'self.name'], {}), "(self.config['public_path'], self.name)\n", (780, 819), False, 'import os, shutil\n'), ((947, 999), 'os.path.join', 'os.path.join', (["self.config['private_path']", 'self.name'], {}), "(self.config['private_path'], self.name)\n", (959, 999), False, 'import os, shutil\n')]
|
import random
health = 50
difficulty = 3
potion_health = int(random.randint(25,50)/ difficulty)
health = health + potion_health
print (health)
import math
|
[
"random.randint"
] |
[((64, 86), 'random.randint', 'random.randint', (['(25)', '(50)'], {}), '(25, 50)\n', (78, 86), False, 'import random\n')]
|
#-------------------------------------------------------------------------------
#
# FBI (Frame Based Inspector) Plugin.
#
# Written by: <NAME>
#
# Date: 1/4/2006
#
# (c) Copyright 2006 by Enthought, Inc.
#
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports:
#-------------------------------------------------------------------------------
from envisage.core.core_plugin_definition \
import PluginDefinition
#-------------------------------------------------------------------------------
# The plugin definition:
#-------------------------------------------------------------------------------
PluginDefinition(
# The plugin's globally unique identifier:
id="envisage.plugins.debug.fbi",
# The name of the class that implements the plugin:
class_name="envisage.plugins.debug.fbi_plugin.FBIPlugin",
# General information about the plugin:
name="FBI Plugin",
version="1.0.0",
provider_name="Enthought Inc",
provider_url="www.enthought.com",
enabled=True,
autostart=True,
# The Id's of the plugins that this plugin requires:
requires=["envisage.core", ])
|
[
"envisage.core.core_plugin_definition.PluginDefinition"
] |
[((729, 1014), 'envisage.core.core_plugin_definition.PluginDefinition', 'PluginDefinition', ([], {'id': '"""envisage.plugins.debug.fbi"""', 'class_name': '"""envisage.plugins.debug.fbi_plugin.FBIPlugin"""', 'name': '"""FBI Plugin"""', 'version': '"""1.0.0"""', 'provider_name': '"""Enthought Inc"""', 'provider_url': '"""www.enthought.com"""', 'enabled': '(True)', 'autostart': '(True)', 'requires': "['envisage.core']"}), "(id='envisage.plugins.debug.fbi', class_name=\n 'envisage.plugins.debug.fbi_plugin.FBIPlugin', name='FBI Plugin',\n version='1.0.0', provider_name='Enthought Inc', provider_url=\n 'www.enthought.com', enabled=True, autostart=True, requires=[\n 'envisage.core'])\n", (745, 1014), False, 'from envisage.core.core_plugin_definition import PluginDefinition\n')]
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: delete.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='delete.proto',
package='template',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x0c\x64\x65lete.proto\x12\x08template\x1a\x1bgoogle/protobuf/empty.proto\"=\n\x15\x44\x65leteTemplateRequest\x12\x10\n\x08pluginId\x18\x01 \x01(\t\x12\x12\n\ntemplateId\x18\x02 \x01(\t\"w\n\x1d\x44\x65leteTemplateResponseWrapper\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x13\n\x0b\x63odeExplain\x18\x02 \x01(\t\x12\r\n\x05\x65rror\x18\x03 \x01(\t\x12$\n\x04\x64\x61ta\x18\x04 \x01(\x0b\x32\x16.google.protobuf.Emptyb\x06proto3')
,
dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,])
_DELETETEMPLATEREQUEST = _descriptor.Descriptor(
name='DeleteTemplateRequest',
full_name='template.DeleteTemplateRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pluginId', full_name='template.DeleteTemplateRequest.pluginId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='templateId', full_name='template.DeleteTemplateRequest.templateId', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=55,
serialized_end=116,
)
_DELETETEMPLATERESPONSEWRAPPER = _descriptor.Descriptor(
name='DeleteTemplateResponseWrapper',
full_name='template.DeleteTemplateResponseWrapper',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='template.DeleteTemplateResponseWrapper.code', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='codeExplain', full_name='template.DeleteTemplateResponseWrapper.codeExplain', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='template.DeleteTemplateResponseWrapper.error', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='template.DeleteTemplateResponseWrapper.data', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=118,
serialized_end=237,
)
_DELETETEMPLATERESPONSEWRAPPER.fields_by_name['data'].message_type = google_dot_protobuf_dot_empty__pb2._EMPTY
DESCRIPTOR.message_types_by_name['DeleteTemplateRequest'] = _DELETETEMPLATEREQUEST
DESCRIPTOR.message_types_by_name['DeleteTemplateResponseWrapper'] = _DELETETEMPLATERESPONSEWRAPPER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DeleteTemplateRequest = _reflection.GeneratedProtocolMessageType('DeleteTemplateRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETETEMPLATEREQUEST,
'__module__' : 'delete_pb2'
# @@protoc_insertion_point(class_scope:template.DeleteTemplateRequest)
})
_sym_db.RegisterMessage(DeleteTemplateRequest)
DeleteTemplateResponseWrapper = _reflection.GeneratedProtocolMessageType('DeleteTemplateResponseWrapper', (_message.Message,), {
'DESCRIPTOR' : _DELETETEMPLATERESPONSEWRAPPER,
'__module__' : 'delete_pb2'
# @@protoc_insertion_point(class_scope:template.DeleteTemplateResponseWrapper)
})
_sym_db.RegisterMessage(DeleteTemplateResponseWrapper)
# @@protoc_insertion_point(module_scope)
|
[
"google.protobuf.symbol_database.Default",
"google.protobuf.descriptor.FieldDescriptor",
"google.protobuf.reflection.GeneratedProtocolMessageType"
] |
[((460, 486), 'google.protobuf.symbol_database.Default', '_symbol_database.Default', ([], {}), '()\n', (484, 486), True, 'from google.protobuf import symbol_database as _symbol_database\n'), ((4783, 4946), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""DeleteTemplateRequest"""', '(_message.Message,)', "{'DESCRIPTOR': _DELETETEMPLATEREQUEST, '__module__': 'delete_pb2'}"], {}), "('DeleteTemplateRequest', (_message\n .Message,), {'DESCRIPTOR': _DELETETEMPLATEREQUEST, '__module__':\n 'delete_pb2'})\n", (4823, 4946), True, 'from google.protobuf import reflection as _reflection\n'), ((5101, 5280), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""DeleteTemplateResponseWrapper"""', '(_message.Message,)', "{'DESCRIPTOR': _DELETETEMPLATERESPONSEWRAPPER, '__module__': 'delete_pb2'}"], {}), "('DeleteTemplateResponseWrapper', (\n _message.Message,), {'DESCRIPTOR': _DELETETEMPLATERESPONSEWRAPPER,\n '__module__': 'delete_pb2'})\n", (5141, 5280), True, 'from google.protobuf import reflection as _reflection\n'), ((2658, 3005), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""code"""', 'full_name': '"""template.DeleteTemplateResponseWrapper.code"""', 'index': '(0)', 'number': '(1)', 'type': '(5)', 'cpp_type': '(1)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='code', full_name=\n 'template.DeleteTemplateResponseWrapper.code', index=0, number=1, type=\n 5, cpp_type=1, label=1, has_default_value=False, default_value=0,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, serialized_options=None, file=DESCRIPTOR)\n", (2685, 3005), True, 'from google.protobuf import descriptor as _descriptor\n'), ((3829, 4181), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""data"""', 'full_name': '"""template.DeleteTemplateResponseWrapper.data"""', 'index': '(3)', 'number': '(4)', 'type': '(11)', 'cpp_type': '(10)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': 'None', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='data', full_name=\n 'template.DeleteTemplateResponseWrapper.data', index=3, number=4, type=\n 11, cpp_type=10, label=1, has_default_value=False, default_value=None,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, serialized_options=None, file=DESCRIPTOR)\n", (3856, 4181), True, 'from google.protobuf import descriptor as _descriptor\n')]
|
#****************************************************#
# This file is part of OPTALG. #
# #
# Copyright (c) 2019, <NAME>. #
# #
# OPTALG is released under the BSD 2-clause license. #
#****************************************************#
from __future__ import print_function
import os
import numpy as np
import tempfile
import subprocess
from . import utils
from .opt_solver_error import *
from .opt_solver import OptSolver
from .problem import OptProblem
from multiprocessing import cpu_count
class OptSolverCplexCMD(OptSolver):
parameters = {'quiet' : False,
'mipgap': None,
'feasibility': None,
'debug': False}
def __init__(self):
"""
CPLEX solver interface (via command-line interface).
"""
# Check
if not utils.cmd_exists('cplex'):
raise ImportError('cplex cmd not available')
OptSolver.__init__(self)
self.parameters = OptSolverCplexCMD.parameters.copy()
def supports_properties(self, properties):
for p in properties:
if p not in [OptProblem.PROP_CURV_LINEAR,
OptProblem.PROP_VAR_CONTINUOUS,
OptProblem.PROP_VAR_INTEGER,
OptProblem.PROP_TYPE_FEASIBILITY,
OptProblem.PROP_TYPE_OPTIMIZATION]:
return False
return True
def read_solution(self, filename, problem):
import xml.etree.ElementTree as ET
x = np.zeros(problem.c.size)
lam = np.zeros(problem.A.shape[0])
nu = np.zeros(0)
mu = np.zeros(x.size)
pi = np.zeros(x.size)
tree = ET.parse(filename)
root = tree.getroot()
header = root.find('header')
status = header.get('solutionStatusString')
for var in root.find('variables'):
name = var.get('name')
value = float(var.get('value'))
index = int(name.split('_')[1])
x[index] = value
rcost = var.get('reducedCost')
if rcost is not None:
if float(rcost) > 0.:
pi[index] = float(rcost)
else:
mu[index] = -float(rcost)
for c in root.find('linearConstraints'):
name = c.get('name')
index = int(name.split('_')[1])
dual = c.get('dual')
if dual is not None:
lam[index] = float(dual)
return status, x, lam, nu, mu, pi
def solve(self, problem):
# Local vars
params = self.parameters
# Parameters
quiet = params['quiet']
mipgap = params['mipgap']
feasibility = params['feasibility']
debug = params['debug']
# Problem
try:
self.problem = problem.to_mixintlin()
except:
raise OptSolverError_BadProblemType(self)
# Solve
status = ''
try:
base_name = next(tempfile._get_candidate_names())
input_filename = base_name+'.lp'
output_filename = base_name+'.sol'
self.problem.write_to_lp_file(input_filename)
cmd = ['cplex']
cmd += ['-c', 'read', input_filename]
if mipgap is not None:
cmd += ['set mip tolerances mipgap %.2e' %mipgap]
if feasibility is not None:
cmd += ['set simplex tolerances feasibility %.2e' %feasibility]
cmd += ['optimize']
cmd += ['write', output_filename]
cmd += ['quit']
if not quiet:
code = subprocess.call(cmd)
else:
code = subprocess.call(cmd,
stdout=open(os.devnull, 'w'),
stderr=subprocess.STDOUT)
assert(code == 0)
status, self.x, self.lam, self.nu, self.mu, self.pi = self.read_solution(output_filename, self.problem)
except Exception as e:
raise OptSolverError_CplexCMDCall(self)
finally:
if os.path.isfile(input_filename) and not debug:
os.remove(input_filename)
if os.path.isfile(output_filename) and not debug:
os.remove(output_filename)
if os.path.isfile('cplex.log') and not debug:
os.remove('cplex.log')
for i in range(cpu_count()):
if os.path.isfile('clone%d.log' %i) and not debug:
os.remove('clone%d.log' %i)
if 'optimal' in status.lower():
self.set_status(self.STATUS_SOLVED)
self.set_error_msg('')
else:
raise OptSolverError_CplexCMD(self)
|
[
"xml.etree.ElementTree.parse",
"tempfile._get_candidate_names",
"os.remove",
"numpy.zeros",
"os.path.isfile",
"subprocess.call",
"multiprocessing.cpu_count"
] |
[((1648, 1672), 'numpy.zeros', 'np.zeros', (['problem.c.size'], {}), '(problem.c.size)\n', (1656, 1672), True, 'import numpy as np\n'), ((1687, 1715), 'numpy.zeros', 'np.zeros', (['problem.A.shape[0]'], {}), '(problem.A.shape[0])\n', (1695, 1715), True, 'import numpy as np\n'), ((1729, 1740), 'numpy.zeros', 'np.zeros', (['(0)'], {}), '(0)\n', (1737, 1740), True, 'import numpy as np\n'), ((1754, 1770), 'numpy.zeros', 'np.zeros', (['x.size'], {}), '(x.size)\n', (1762, 1770), True, 'import numpy as np\n'), ((1784, 1800), 'numpy.zeros', 'np.zeros', (['x.size'], {}), '(x.size)\n', (1792, 1800), True, 'import numpy as np\n'), ((1817, 1835), 'xml.etree.ElementTree.parse', 'ET.parse', (['filename'], {}), '(filename)\n', (1825, 1835), True, 'import xml.etree.ElementTree as ET\n'), ((3138, 3169), 'tempfile._get_candidate_names', 'tempfile._get_candidate_names', ([], {}), '()\n', (3167, 3169), False, 'import tempfile\n'), ((3775, 3795), 'subprocess.call', 'subprocess.call', (['cmd'], {}), '(cmd)\n', (3790, 3795), False, 'import subprocess\n'), ((4253, 4283), 'os.path.isfile', 'os.path.isfile', (['input_filename'], {}), '(input_filename)\n', (4267, 4283), False, 'import os\n'), ((4315, 4340), 'os.remove', 'os.remove', (['input_filename'], {}), '(input_filename)\n', (4324, 4340), False, 'import os\n'), ((4356, 4387), 'os.path.isfile', 'os.path.isfile', (['output_filename'], {}), '(output_filename)\n', (4370, 4387), False, 'import os\n'), ((4419, 4445), 'os.remove', 'os.remove', (['output_filename'], {}), '(output_filename)\n', (4428, 4445), False, 'import os\n'), ((4461, 4488), 'os.path.isfile', 'os.path.isfile', (['"""cplex.log"""'], {}), "('cplex.log')\n", (4475, 4488), False, 'import os\n'), ((4520, 4542), 'os.remove', 'os.remove', (['"""cplex.log"""'], {}), "('cplex.log')\n", (4529, 4542), False, 'import os\n'), ((4570, 4581), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (4579, 4581), False, 'from multiprocessing import cpu_count\n'), ((4603, 4636), 'os.path.isfile', 'os.path.isfile', (["('clone%d.log' % i)"], {}), "('clone%d.log' % i)\n", (4617, 4636), False, 'import os\n'), ((4671, 4699), 'os.remove', 'os.remove', (["('clone%d.log' % i)"], {}), "('clone%d.log' % i)\n", (4680, 4699), False, 'import os\n')]
|
# encoding: utf-8
import os
import unittest
from configurationutil.cfg_providers import base_provider
from fdutil.path_tools import pop_path
class TestConfigurationObject(unittest.TestCase):
def setUp(self):
self.cfg_file = os.path.join(pop_path(__file__), u'test_config_object.json')
self.template = os.path.join(pop_path(__file__), u'..', u'resources', u'upgrade_template.json')
self.missing_template = os.path.join(pop_path(__file__), u'test_config_object_template.json')
def tearDown(self):
pass
def test_instantiation(self):
base_provider.ConfigObject.DEFAULT_TEMPLATE = self.template
with self.assertRaises(NotImplementedError):
self.cfg = base_provider.ConfigObject(config_file=self.cfg_file,
create=True)
del base_provider.ConfigObject.DEFAULT_TEMPLATE
def test_instantiation_missing_default_template(self):
with self.assertRaises(NotImplementedError):
self.cfg = base_provider.ConfigObject(config_file=self.cfg_file,
create=True)
def test_instantiation_missing_default_file(self):
base_provider.ConfigObject.DEFAULT_TEMPLATE = self.missing_template
with self.assertRaises(IOError):
self.cfg = base_provider.ConfigObject(config_file=self.cfg_file,
create=True)
del base_provider.ConfigObject.DEFAULT_TEMPLATE
if __name__ == u'__main__':
unittest.main()
|
[
"unittest.main",
"configurationutil.cfg_providers.base_provider.ConfigObject",
"fdutil.path_tools.pop_path"
] |
[((1559, 1574), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1572, 1574), False, 'import unittest\n'), ((253, 271), 'fdutil.path_tools.pop_path', 'pop_path', (['__file__'], {}), '(__file__)\n', (261, 271), False, 'from fdutil.path_tools import pop_path\n'), ((338, 356), 'fdutil.path_tools.pop_path', 'pop_path', (['__file__'], {}), '(__file__)\n', (346, 356), False, 'from fdutil.path_tools import pop_path\n'), ((450, 468), 'fdutil.path_tools.pop_path', 'pop_path', (['__file__'], {}), '(__file__)\n', (458, 468), False, 'from fdutil.path_tools import pop_path\n'), ((726, 792), 'configurationutil.cfg_providers.base_provider.ConfigObject', 'base_provider.ConfigObject', ([], {'config_file': 'self.cfg_file', 'create': '(True)'}), '(config_file=self.cfg_file, create=True)\n', (752, 792), False, 'from configurationutil.cfg_providers import base_provider\n'), ((1036, 1102), 'configurationutil.cfg_providers.base_provider.ConfigObject', 'base_provider.ConfigObject', ([], {'config_file': 'self.cfg_file', 'create': '(True)'}), '(config_file=self.cfg_file, create=True)\n', (1062, 1102), False, 'from configurationutil.cfg_providers import base_provider\n'), ((1351, 1417), 'configurationutil.cfg_providers.base_provider.ConfigObject', 'base_provider.ConfigObject', ([], {'config_file': 'self.cfg_file', 'create': '(True)'}), '(config_file=self.cfg_file, create=True)\n', (1377, 1417), False, 'from configurationutil.cfg_providers import base_provider\n')]
|
"""
some class that used for monitor the stats information of
negmas during the time of simulation
monitor methode:
1. detect the information of changing file
2. detect the information of shared memory
"""
from abc import ABCMeta, abstractmethod
import os, time
from typing import Optional
from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer
from hachiko.hachiko import AIOWatchdog, AIOEventHandler
import asyncio
class NegmasMonitorFile(AIOEventHandler):
"""
Use this class to monitor the stats file
>>> nm = NegmasMonitorFile()
please see how to initial NegmasMonitorFile need set log_folder!
>>> nm2 = NegmasMonitorFile(log_folder='./log_folder')
{'log': ['log.txt', 'log_test'], 'stats': ['m_product', 'm_balance', 'm_breach', 'm_kkk']}
>>> nm2.run()
"""
def __init__(self, log_folder:Optional[str]=None):
super(NegmasMonitorFile, self).__init__()
self.mode = "debug"
self._watch_path = log_folder
NegmasMonitorFile._watch_path = log_folder
if log_folder is not None:
self._file_detect()
else:
print('please see how to initial NegmasMonitorFile need set log_folder!')
# Use this function to detect log and stats files in a directory, also set up the cursor to zero(after use seek to get the appended content)
def _file_detect(self) -> dict:
try:
self.worlds_stats = {}
def _detect_files(world):
stats_files = {}
logs_files = {}
all_files = {}
for f in os.listdir(self._watch_path+'/'+world):
if f.startswith("log"):
logs_files[f] = 0
elif f.startswith('m_'):
stats_files[f] = 0
all_files["log"] = logs_files
all_files["stats"] = stats_files
return all_files
worlds = next(os.walk(self._watch_path))[1]
for w in worlds:
self.worlds_stats[w] = _detect_files(w)
if self.mode == "debug":
print(self.worlds_stats)
except Exception as e:
print(f'can not find {self._watch_path}')
async def on_deleted(self, event):
print(event)
if not event.is_directory:
if self.mode == "debug":
print(f"delete {event.src_path}")
async def on_created(self, event):
print(event)
world_monitor = []
if not event.is_directory:
new_file = event.src_path.split("/")[-1]
world_name = event.src_path.split("/")[-2]
if world_name in self.worlds_stats:
if new_file.startswith("log"):
self.worlds_stats[world_name]["log"][new_file] = 0
elif new_file.startswith("m_"):
self.worlds_stats[world_name]["stats"][new_file] = 0
if self.mode == "debug":
print(f"create {event.src_path} files {self.worlds_stats}")
else:
self.worlds_stats[event.src_path.split("/")[-1]] = {"log":{}, "stats":{}}
print(self.worlds_stats)
async def on_moved(self, event):
print(event)
if not event.is_directory:
if mode == "debug":
print(f"moved {event.src_path}")
async def on_modified(self, event):
print(event)
if not event.is_directory:
file_path = event.src_path
filename = file_path.split('/')[-1]
world_name = file_path.split('/')[-2]
new_content = ''
if world_name in self.worlds_stats:
if filename.startswith('m_'):
last_seek = self.worlds_stats[world_name]['stats'][filename]
f = open(file_path)
f.seek(last_seek,0)
new_content = f.read().strip().replace("\n", "")
self.worlds_stats[world_name]['stats'][filename] = f.tell()
print(self.worlds_stats[world_name]['stats'][filename])
f.close()
if self.mode == "debug":
print(f"changed {file_path} content {new_content}")
async def watch_fs(path):
watch = AIOWatchdog(path, event_handler=NegmasMonitorFile(log_folder=path))
watch.start()
import threading
print('monitor threading is {}'. format(threading.current_thread()))
import os
print("monitor process id is {}".format(os.getpid()))
for _ in range(100):
await asyncio.sleep(1)
watch.stop()
print("Finish monitoring task")
class NegmasMonitorMemory():
pass
if __name__ == "__main__":
start = time.time()
paths = ['./log_folder']
tasks = [watch_fs(path) for path in paths]
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(asyncio.wait(tasks))
finally:
loop.close()
print("finished all monitoring tasks! time %.5f" % float(time.time()-start))
|
[
"asyncio.get_event_loop",
"os.getpid",
"asyncio.sleep",
"os.walk",
"time.time",
"asyncio.wait",
"threading.current_thread",
"os.listdir"
] |
[((4833, 4844), 'time.time', 'time.time', ([], {}), '()\n', (4842, 4844), False, 'import os, time\n'), ((4932, 4956), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (4954, 4956), False, 'import asyncio\n'), ((4543, 4569), 'threading.current_thread', 'threading.current_thread', ([], {}), '()\n', (4567, 4569), False, 'import threading\n'), ((4631, 4642), 'os.getpid', 'os.getpid', ([], {}), '()\n', (4640, 4642), False, 'import os\n'), ((4684, 4700), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (4697, 4700), False, 'import asyncio\n'), ((4998, 5017), 'asyncio.wait', 'asyncio.wait', (['tasks'], {}), '(tasks)\n', (5010, 5017), False, 'import asyncio\n'), ((1673, 1715), 'os.listdir', 'os.listdir', (["(self._watch_path + '/' + world)"], {}), "(self._watch_path + '/' + world)\n", (1683, 1715), False, 'import os\n'), ((2041, 2066), 'os.walk', 'os.walk', (['self._watch_path'], {}), '(self._watch_path)\n', (2048, 2066), False, 'import os\n'), ((5114, 5125), 'time.time', 'time.time', ([], {}), '()\n', (5123, 5125), False, 'import os, time\n')]
|
import json
def save_json(data, fpath):
"Stores data as a JSON in the provided filepath."
with open(fpath, 'w') as f:
json.dump(data, f)
|
[
"json.dump"
] |
[((135, 153), 'json.dump', 'json.dump', (['data', 'f'], {}), '(data, f)\n', (144, 153), False, 'import json\n')]
|
import os
import mcvine, mcvine.components
instrument = mcvine.instrument()
# add source
source = mcvine.components.sources.Source_simple('source')
instrument.append(source, position=(0,0,0))
# add sample
sample = mcvine.components.samples.V_sample('sample')
instrument.append(sample, position=(0,0,1))
# add detector system
from mcvine import resources
arcsxml = os.path.join(
resources.instrument('ARCS'), 'detsys', 'ARCS.xml.fornxs')
ds = mcvine.components.detectors.DetectorSystemFromXml('ds', instrumentxml=arcsxml, outfilename='events.dat')
instrument.append(ds, position=(0,0,1))
|
[
"mcvine.components.detectors.DetectorSystemFromXml",
"mcvine.components.samples.V_sample",
"mcvine.resources.instrument",
"mcvine.components.sources.Source_simple",
"mcvine.instrument"
] |
[((56, 75), 'mcvine.instrument', 'mcvine.instrument', ([], {}), '()\n', (73, 75), False, 'import mcvine, mcvine.components\n'), ((98, 147), 'mcvine.components.sources.Source_simple', 'mcvine.components.sources.Source_simple', (['"""source"""'], {}), "('source')\n", (137, 147), False, 'import mcvine, mcvine.components\n'), ((214, 258), 'mcvine.components.samples.V_sample', 'mcvine.components.samples.V_sample', (['"""sample"""'], {}), "('sample')\n", (248, 258), False, 'import mcvine, mcvine.components\n'), ((446, 555), 'mcvine.components.detectors.DetectorSystemFromXml', 'mcvine.components.detectors.DetectorSystemFromXml', (['"""ds"""'], {'instrumentxml': 'arcsxml', 'outfilename': '"""events.dat"""'}), "('ds', instrumentxml=\n arcsxml, outfilename='events.dat')\n", (495, 555), False, 'import mcvine, mcvine.components\n'), ((382, 410), 'mcvine.resources.instrument', 'resources.instrument', (['"""ARCS"""'], {}), "('ARCS')\n", (402, 410), False, 'from mcvine import resources\n')]
|
"""Test bench for the Verilog module 'nt_recv_capture_top'."""
# The MIT License
#
# Copyright (c) 2017-2019 by the author(s)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Author(s):
# - <NAME> <<EMAIL>>
#
# Description:
#
# Test bench for the Verilog module 'nt_recv_capture_top'.
import cocotb
from lib.tb import clk_gen, rstn, wait_n_cycles, swp_byte_order
from lib.mem import Mem
from lib.axilite import AXI_Lite_Reader, AXI_Lite_Writer
from lib.axis import AXIS_Writer
from lib.net import gen_packet, packet_to_axis_data, axis_data_to_packet
import random
from nt_recv_capture_cpuregs_defines import *
# clock frquency in MHz
CLK_FREQ_MHZ = 200
# AXI data width
AXI_BIT_WIDTH = 512
# AXI lite data width
AXI_CTRL_BIT_WIDTH = 32
# AXI stream data width
AXIS_BIT_WIDTH = 64
# maximum byte size of a memory read
RD_TRANSFER_SIZE_MAX = 16384
# ring buffer size in bytes
RING_BUFF_SIZES = [32768, 65536, 131072, 262144]
# offset in memory where ring buffer shall be located
RING_BUFF_ADDRS = [0, 2**32-10*(AXI_BIT_WIDTH/8)]
# different capture lengths that shall be tested
MAX_CAPTURE_LENS = [0, 1514, random.randint(1, 1513)]
# number of packets, latency timestamps and inter-packet times to generate
N_PACKETS = 1331
@cocotb.coroutine
def packets_write(dut, axis_writer, axilite_writer, axilite_reader, pkts,
latencies, inter_packet_times):
"""Apply packets on DuT input."""
# start the module
yield axilite_writer.write(CPUREG_OFFSET_CTRL_ACTIVE, 0x1)
# wait a little bit
yield wait_n_cycles(dut.clk, 10)
# iterate over all packets
for i, pkt in enumerate(pkts):
# convert packet to AXI4-Stream data
(tdata, tkeep) = packet_to_axis_data(pkt, AXIS_BIT_WIDTH)
# include latency and inter-packet time in last TUSER word
tuser = len(tdata) * [0]
tuser[-1] = latencies[i] | (1 << 24) | (inter_packet_times[i] << 25)
# write data
yield axis_writer.write(tdata, tkeep, tuser)
# wait random number of cycles before applying the next packet
yield wait_n_cycles(dut.clk, random.randint(0, 10))
# stop the module
yield axilite_writer.write(CPUREG_OFFSET_CTRL_ACTIVE, 0x0)
def check_data(pkts_ref, latencies_ref, inter_packet_times_ref, data,
max_len_capture):
"""Check the received data for correctness.
The function ensures that the data read from the ring buffer (a list of
512 bit data words) matches the expected meta data (timestamps, wire +
capture length) and packet data.
"""
# data word index
i_data = 0
# iterate over all packets
for i_pkt, pkt_ref in enumerate(pkts_ref):
# determinal actual capture length
len_capture = min(len(pkt_ref), max_len_capture)
# data is captured at the granularity of 8 byte words. how many 8 byte
# words do we have?
if len_capture % 8 == 0:
len_capture_words = len_capture / 8
else:
len_capture_words = len_capture / 8 + 1
# initialize empty packet data list
packet_data = []
# iterate over captured data words (8 byte each)
for i in range(len_capture_words+1):
# get data word and increment data word index
d = data[i_data]
i_data += 1
# swap byte order
d = swp_byte_order(d, AXIS_BIT_WIDTH/8)
if i == 0:
# this is meta data
meta_latency = d & 0xFFFFFF
meta_latency_valid = (d >> 24) & 0x1
meta_interpackettime = (d >> 25) & 0xFFFFFFF
meta_len_wire = (d >> 53) & 0x7FF
# make sure the latency is marked valid
if meta_latency_valid != 0x1:
raise cocotb.result.TestFailure(("Packet #%d: " +
"Latency value not " +
"valid") % i)
# make sure latency matches reference value
if latencies_ref[i_pkt] != meta_latency:
raise cocotb.result.TestFailure(("Packet #%d: " +
"incorrect latency") %
i_pkt)
# make sure inter-packet time matches reference value
if inter_packet_times_ref[i_pkt] != meta_interpackettime:
raise cocotb.result.TestFailure(("Packet #%d: " +
"incorrect inter-" +
"packet time") % i_pkt)
# make sure wire length matches packet length
if len(pkt_ref) != meta_len_wire:
raise cocotb.result.TestFailure(("Packet #%d: " +
"invalid wire " +
"length") % i_pkt)
else:
# this is packet data
packet_data.append(d)
# create packet from captured data
if len_capture % 8 == 0:
pkt = axis_data_to_packet(packet_data, 2**8-1, 64)
else:
pkt = axis_data_to_packet(packet_data,
2**(len_capture % 8)-1, 64)
# make sure packet data matches the exepcted packet data
if str(pkt)[0:len_capture] != \
str(pkt_ref)[0:len_capture]:
raise cocotb.result.TestFailure(("Packet #%d: " +
"invalid data") % i_pkt)
@cocotb.coroutine
def ring_buff_read(dut, axilite_writer, axilite_reader, ring_buff,
ring_buff_addr, max_len_capture, pkts_ref, latencies_ref,
inter_packet_times_ref):
"""Read data from the ring buffer and check it for correctness.
The coroutines monitors the ring buffer write pointer and reads data from
the buffer if sufficient data is available. It ensures that the read data
matches the expected one.
"""
# get ring buffer size
ring_buff_size = ring_buff.size()
# ring buffer must be larger than 16384 bytes
if ring_buff_size <= 16384:
raise cocotb.result.TestFailure("ring buffer size too small")
# ring buffer size must be a multiple of 16384 bytes
if ring_buff_size % 16384 != 0:
raise cocotb.result.TestFailure("ring buffer size invalid")
# transfer size must be smaller than ring buffer
if RD_TRANSFER_SIZE_MAX >= ring_buff_size:
raise cocotb.result.TestFailure("transfer size too large")
# determine the number of bytes that we are expecting to read in total
size_outstanding = 0
# iterate over packets
for pkt in pkts_ref:
# for each packet we need to read 8 byte of meta information
size_outstanding += 8
# determine data capture length
len_capture = min(len(pkt), max_len_capture)
# data is captured at the granularity of 8 byte words
if len_capture % 8 == 0:
size_outstanding += len_capture
else:
size_outstanding += 8 * (len_capture/8 + 1)
# total capture data is 64 byte aligned
if size_outstanding % 64 != 0:
size_outstanding = 64 * (size_outstanding/64 + 1)
# read pointer has been reset and currently is zero
rd = 0
data = []
while True:
# number of outstanding bytes that still need to be read must never be
# negative
assert size_outstanding >= 0
# abort if there is no more data to be read
if size_outstanding == 0:
break
# read error register
errs = yield axilite_reader.read(CPUREG_OFFSET_STATUS_ERRS)
# make sure there was no error
assert errs == 0x0
# get the write pointer
wr = yield axilite_reader.read(CPUREG_OFFSET_CTRL_ADDR_WR)
# get memory size from current read pointer position until the end of
# the ring buffer memory location
ring_buff_size_end = ring_buff_size - rd
# calculate the desired memory transfer size
transfer_size = min(ring_buff_size_end,
min(size_outstanding, RD_TRANSFER_SIZE_MAX))
# calculated memory transfer size must always be positive
assert transfer_size > 0
# ... and it must always be a multiple of 64 bytes
assert transfer_size % 64 == 0
if rd == wr:
# ring buffer is empty -> nothing to transfer
do_transfer = False
elif rd < wr:
# we can read if the difference between both pointers is at least
# the desired transfer size
do_transfer = (wr - rd) >= transfer_size
elif wr < rd:
# we can read until the end of the ring buffer
do_transfer = True
if not do_transfer:
# no data transfer shall take place now, do nothing
continue
# read data from the ring buffer
data_ring_buff = ring_buff.read(ring_buff_addr + rd, transfer_size)
# write data to list in 8 byte words
for i in range(transfer_size/8):
d = data_ring_buff >> ((transfer_size/8 - i - 1)*64) & 2**64-1
data.append(d)
# update read pointer
if (rd + transfer_size) == ring_buff_size:
# end of memory reached, wrap around
rd = 0
else:
assert (rd + transfer_size) < ring_buff_size
rd = rd + transfer_size
# write read pointer to DuT
yield axilite_writer.write(CPUREG_OFFSET_CTRL_ADDR_RD, rd)
# decrement number of bytes that still remain to be written to memory
size_outstanding -= transfer_size
# wait a little bit
yield wait_n_cycles(dut.clk, 100)
# check data for correctness
check_data(pkts_ref, latencies_ref, inter_packet_times_ref, data,
max_len_capture)
@cocotb.test()
def nt_recv_capture_top_test(dut):
"""Test bench main function."""
# start the clock
cocotb.fork(clk_gen(dut.clk, CLK_FREQ_MHZ))
# no software reset
dut.rst_sw <= 0
# reset DuT
yield rstn(dut.clk, dut.rstn)
# create AXI4-Lite writer, connect and reset it
axilite_writer = AXI_Lite_Writer()
axilite_writer.connect(dut, dut.clk, AXI_CTRL_BIT_WIDTH, "ctrl")
yield axilite_writer.rst()
# create AXI4-Lite reader, connect and reset it
axilite_reader = AXI_Lite_Reader()
axilite_reader.connect(dut, dut.clk, AXI_CTRL_BIT_WIDTH, "ctrl")
yield axilite_reader.rst()
# create AXI4-Stream writer, connect and reset it
axis_writer = AXIS_Writer()
axis_writer.connect(dut, dut.clk, AXIS_BIT_WIDTH)
yield axis_writer.rst()
# create a ring buffer memory (initially of size 0) and connect it to the
# DuT
ring_buff = Mem(0)
ring_buff.connect(dut, "ddr3")
# generate a couple of random Ethernet packets. For each packet, generate
# a 16 bit latency value and a 26 bit inter-packet time value
pkts = []
latencies = []
inter_packet_times = []
for _ in range(N_PACKETS):
pkts.append(gen_packet())
latencies.append(random.randint(0, 2**24-1))
inter_packet_times.append(random.randint(0, 2**28-1))
# start the ring buffer memory main routine
cocotb.fork(ring_buff.main())
# wait some more clock cycles
yield wait_n_cycles(dut.clk, 5)
# iterate over all ring buffer sizes
for i, ring_buff_size in enumerate(RING_BUFF_SIZES):
# set ring buffer size
ring_buff.set_size(ring_buff_size)
# iterate over all adderesses where ring buffer shall be located in
# memory
for j, ring_buff_addr in enumerate(RING_BUFF_ADDRS):
# print status
print("Test %d/%d (this will take a while)" %
(i*len(RING_BUFF_ADDRS) + j + 1,
len(RING_BUFF_ADDRS) * len(RING_BUFF_SIZES)))
# we have a total of 8 GByte of memory. Make sure the ring buffer
# fits at the desired address
if ring_buff_addr + ring_buff_size > 0x1FFFFFFFF:
raise cocotb.result.TestFailure("ring buffer is too large")
# to reduce the simulation memory footprint, provide the memory
# module the first memory address that we actually care about
ring_buff.set_offset(ring_buff_addr)
# write ring buffer memory location and address range
yield axilite_writer.write(CPUREG_OFFSET_CTRL_MEM_ADDR_HI,
ring_buff_addr >> 32)
yield axilite_writer.write(CPUREG_OFFSET_CTRL_MEM_ADDR_LO,
ring_buff_addr & 0xFFFFFFFF)
yield axilite_writer.write(CPUREG_OFFSET_CTRL_MEM_RANGE,
ring_buff_size - 1)
# itererate over all capture lengths
for max_len_capture in MAX_CAPTURE_LENS:
# reset read address pointer
yield axilite_writer.write(CPUREG_OFFSET_CTRL_ADDR_RD, 0x0)
# set max capture length
yield axilite_writer.write(CPUREG_OFFSET_CTRL_MAX_LEN_CAPTURE,
max_len_capture)
# start couroutine that applies packets at input
cocotb.fork(packets_write(dut, axis_writer, axilite_writer,
axilite_reader, pkts, latencies,
inter_packet_times))
# wait a bit
yield wait_n_cycles(dut.clk, 50)
# start the ring buffer read coroutine and wait until it
# completes
yield ring_buff_read(dut, axilite_writer, axilite_reader,
ring_buff, ring_buff_addr,
max_len_capture, pkts, latencies,
inter_packet_times)
# make sure no error occured
errs = yield axilite_reader.read(CPUREG_OFFSET_STATUS_ERRS)
assert errs == 0x0
# make sure packet count is correct
pkt_cnt = \
yield axilite_reader.read(CPUREG_OFFSET_STATUS_PKT_CNT)
assert pkt_cnt == len(pkts)
# make sure module is deactivated now
active = yield axilite_reader.read(CPUREG_OFFSET_STATUS_ACTIVE)
assert active == 0
# clear the ring buffer contents
ring_buff.clear()
|
[
"lib.axilite.AXI_Lite_Writer",
"lib.net.packet_to_axis_data",
"cocotb.result.TestFailure",
"random.randint",
"lib.axis.AXIS_Writer",
"lib.axilite.AXI_Lite_Reader",
"lib.mem.Mem",
"lib.tb.clk_gen",
"lib.net.gen_packet",
"cocotb.test",
"lib.tb.swp_byte_order",
"lib.tb.wait_n_cycles",
"lib.tb.rstn",
"lib.net.axis_data_to_packet"
] |
[((11054, 11067), 'cocotb.test', 'cocotb.test', ([], {}), '()\n', (11065, 11067), False, 'import cocotb\n'), ((2128, 2151), 'random.randint', 'random.randint', (['(1)', '(1513)'], {}), '(1, 1513)\n', (2142, 2151), False, 'import random\n'), ((11379, 11396), 'lib.axilite.AXI_Lite_Writer', 'AXI_Lite_Writer', ([], {}), '()\n', (11394, 11396), False, 'from lib.axilite import AXI_Lite_Reader, AXI_Lite_Writer\n'), ((11571, 11588), 'lib.axilite.AXI_Lite_Reader', 'AXI_Lite_Reader', ([], {}), '()\n', (11586, 11588), False, 'from lib.axilite import AXI_Lite_Reader, AXI_Lite_Writer\n'), ((11762, 11775), 'lib.axis.AXIS_Writer', 'AXIS_Writer', ([], {}), '()\n', (11773, 11775), False, 'from lib.axis import AXIS_Writer\n'), ((11963, 11969), 'lib.mem.Mem', 'Mem', (['(0)'], {}), '(0)\n', (11966, 11969), False, 'from lib.mem import Mem\n'), ((2549, 2575), 'lib.tb.wait_n_cycles', 'wait_n_cycles', (['dut.clk', '(10)'], {}), '(dut.clk, 10)\n', (2562, 2575), False, 'from lib.tb import clk_gen, rstn, wait_n_cycles, swp_byte_order\n'), ((2713, 2753), 'lib.net.packet_to_axis_data', 'packet_to_axis_data', (['pkt', 'AXIS_BIT_WIDTH'], {}), '(pkt, AXIS_BIT_WIDTH)\n', (2732, 2753), False, 'from lib.net import gen_packet, packet_to_axis_data, axis_data_to_packet\n'), ((7293, 7348), 'cocotb.result.TestFailure', 'cocotb.result.TestFailure', (['"""ring buffer size too small"""'], {}), "('ring buffer size too small')\n", (7318, 7348), False, 'import cocotb\n'), ((7457, 7510), 'cocotb.result.TestFailure', 'cocotb.result.TestFailure', (['"""ring buffer size invalid"""'], {}), "('ring buffer size invalid')\n", (7482, 7510), False, 'import cocotb\n'), ((7626, 7678), 'cocotb.result.TestFailure', 'cocotb.result.TestFailure', (['"""transfer size too large"""'], {}), "('transfer size too large')\n", (7651, 7678), False, 'import cocotb\n'), ((11177, 11207), 'lib.tb.clk_gen', 'clk_gen', (['dut.clk', 'CLK_FREQ_MHZ'], {}), '(dut.clk, CLK_FREQ_MHZ)\n', (11184, 11207), False, 'from lib.tb import clk_gen, rstn, wait_n_cycles, swp_byte_order\n'), ((11281, 11304), 'lib.tb.rstn', 'rstn', (['dut.clk', 'dut.rstn'], {}), '(dut.clk, dut.rstn)\n', (11285, 11304), False, 'from lib.tb import clk_gen, rstn, wait_n_cycles, swp_byte_order\n'), ((12519, 12544), 'lib.tb.wait_n_cycles', 'wait_n_cycles', (['dut.clk', '(5)'], {}), '(dut.clk, 5)\n', (12532, 12544), False, 'from lib.tb import clk_gen, rstn, wait_n_cycles, swp_byte_order\n'), ((4377, 4414), 'lib.tb.swp_byte_order', 'swp_byte_order', (['d', '(AXIS_BIT_WIDTH / 8)'], {}), '(d, AXIS_BIT_WIDTH / 8)\n', (4391, 4414), False, 'from lib.tb import clk_gen, rstn, wait_n_cycles, swp_byte_order\n'), ((6201, 6249), 'lib.net.axis_data_to_packet', 'axis_data_to_packet', (['packet_data', '(2 ** 8 - 1)', '(64)'], {}), '(packet_data, 2 ** 8 - 1, 64)\n', (6220, 6249), False, 'from lib.net import gen_packet, packet_to_axis_data, axis_data_to_packet\n'), ((6278, 6342), 'lib.net.axis_data_to_packet', 'axis_data_to_packet', (['packet_data', '(2 ** (len_capture % 8) - 1)', '(64)'], {}), '(packet_data, 2 ** (len_capture % 8) - 1, 64)\n', (6297, 6342), False, 'from lib.net import gen_packet, packet_to_axis_data, axis_data_to_packet\n'), ((6546, 6614), 'cocotb.result.TestFailure', 'cocotb.result.TestFailure', (["(('Packet #%d: ' + 'invalid data') % i_pkt)"], {}), "(('Packet #%d: ' + 'invalid data') % i_pkt)\n", (6571, 6614), False, 'import cocotb\n'), ((10887, 10914), 'lib.tb.wait_n_cycles', 'wait_n_cycles', (['dut.clk', '(100)'], {}), '(dut.clk, 100)\n', (10900, 10914), False, 'from lib.tb import clk_gen, rstn, wait_n_cycles, swp_byte_order\n'), ((12262, 12274), 'lib.net.gen_packet', 'gen_packet', ([], {}), '()\n', (12272, 12274), False, 'from lib.net import gen_packet, packet_to_axis_data, axis_data_to_packet\n'), ((12301, 12331), 'random.randint', 'random.randint', (['(0)', '(2 ** 24 - 1)'], {}), '(0, 2 ** 24 - 1)\n', (12315, 12331), False, 'import random\n'), ((12363, 12393), 'random.randint', 'random.randint', (['(0)', '(2 ** 28 - 1)'], {}), '(0, 2 ** 28 - 1)\n', (12377, 12393), False, 'import random\n'), ((3116, 3137), 'random.randint', 'random.randint', (['(0)', '(10)'], {}), '(0, 10)\n', (3130, 3137), False, 'import random\n'), ((13281, 13334), 'cocotb.result.TestFailure', 'cocotb.result.TestFailure', (['"""ring buffer is too large"""'], {}), "('ring buffer is too large')\n", (13306, 13334), False, 'import cocotb\n'), ((4810, 4895), 'cocotb.result.TestFailure', 'cocotb.result.TestFailure', (["(('Packet #%d: ' + 'Latency value not ' + 'valid') % i)"], {}), "(('Packet #%d: ' + 'Latency value not ' + 'valid') % i\n )\n", (4835, 4895), False, 'import cocotb\n'), ((5141, 5214), 'cocotb.result.TestFailure', 'cocotb.result.TestFailure', (["(('Packet #%d: ' + 'incorrect latency') % i_pkt)"], {}), "(('Packet #%d: ' + 'incorrect latency') % i_pkt)\n", (5166, 5214), False, 'import cocotb\n'), ((5491, 5583), 'cocotb.result.TestFailure', 'cocotb.result.TestFailure', (["(('Packet #%d: ' + 'incorrect inter-' + 'packet time') % i_pkt)"], {}), "(('Packet #%d: ' + 'incorrect inter-' +\n 'packet time') % i_pkt)\n", (5516, 5583), False, 'import cocotb\n'), ((5824, 5909), 'cocotb.result.TestFailure', 'cocotb.result.TestFailure', (["(('Packet #%d: ' + 'invalid wire ' + 'length') % i_pkt)"], {}), "(('Packet #%d: ' + 'invalid wire ' + 'length') % i_pkt\n )\n", (5849, 5909), False, 'import cocotb\n'), ((14738, 14764), 'lib.tb.wait_n_cycles', 'wait_n_cycles', (['dut.clk', '(50)'], {}), '(dut.clk, 50)\n', (14751, 14764), False, 'from lib.tb import clk_gen, rstn, wait_n_cycles, swp_byte_order\n')]
|
import errno
import os
import locale
from datetime import datetime
try:
import pytz
HAS_PYTZ = True
except ImportError:
HAS_PYTZ = False
from i3pystatus import IntervalModule
class Clock(IntervalModule):
"""
This class shows a clock.
.. note:: Optionally requires `pytz` for time zone data when using time
zones other than local time.
Format can be passed in four different ways:
- single string, no timezone, just the strftime-format
- one two-tuple, first is the format, second the timezone
- list of strings - no timezones
- list of two tuples, first is the format, second is timezone
Use mousewheel to cycle between formats.
For complete time format specification see:
::
man strftime
All available timezones are located in directory:
::
/usr/share/zoneinfo/
.. rubric:: Format examples
::
# one format, local timezone
format = '%a %b %-d %b %X'
# multiple formats, local timezone
format = [ '%a %b %-d %b %X', '%X' ]
# one format, specified timezone
format = ('%a %b %-d %b %X', 'Europe/Bratislava')
# multiple formats, specified timezones
format = [ ('%a %b %-d %b %X', 'America/New_York'), ('%X', 'Etc/GMT+9') ]
"""
settings = (
("format", "`None` means to use the default, locale-dependent format."),
("color", "RGB hexadecimal code color specifier, default to #ffffff"),
)
format = None
color = "#ffffff"
interval = 1
on_upscroll = ["scroll_format", 1]
on_downscroll = ["scroll_format", -1]
def init(self):
env_lang = os.environ.get('LC_TIME', None)
if env_lang is None:
env_lang = os.environ.get('LANG', None)
if env_lang is not None:
if env_lang.find('.') != -1:
lang = tuple(env_lang.split('.', 1))
else:
lang = (env_lang, None)
else:
lang = (None, None)
if lang != locale.getlocale(locale.LC_TIME):
# affects language of *.strftime() in whole program
locale.setlocale(locale.LC_TIME, lang)
if self.format is None:
if lang[0] == 'en_US':
# MDY format - United States of America
self.format = ["%a %b %-d %X"]
else:
# DMY format - almost all other countries
self.format = ["%a %-d %b %X"]
elif isinstance(self.format, str) or isinstance(self.format, tuple):
self.format = [self.format]
self.system_tz = self._get_system_tz()
self.format = [self._expand_format(fmt) for fmt in self.format]
self.current_format_id = 0
def _expand_format(self, fmt):
if isinstance(fmt, tuple):
if len(fmt) == 1:
return (fmt[0], None)
else:
if not HAS_PYTZ:
raise RuntimeError("Need `pytz` for timezone data")
return (fmt[0], pytz.timezone(fmt[1]))
return (fmt, self.system_tz)
def _get_system_tz(self):
'''
Get the system timezone for use when no timezone is explicitly provided
Requires pytz, if not available then no timezone will be set when not
explicitly provided.
'''
if not HAS_PYTZ:
return None
def _etc_localtime():
try:
with open('/etc/localtime', 'rb') as fp:
return pytz.tzfile.build_tzinfo('system', fp)
except OSError as exc:
if exc.errno != errno.ENOENT:
self.logger.error(
'Unable to read from /etc/localtime: %s', exc.strerror
)
except pytz.UnknownTimeZoneError:
self.logger.error(
'/etc/localtime contains unrecognized tzinfo'
)
return None
def _etc_timezone():
try:
with open('/etc/timezone', 'r') as fp:
tzname = fp.read().strip()
return pytz.timezone(tzname)
except OSError as exc:
if exc.errno != errno.ENOENT:
self.logger.error(
'Unable to read from /etc/localtime: %s', exc.strerror
)
except pytz.UnknownTimeZoneError:
self.logger.error(
'/etc/timezone contains unrecognized timezone \'%s\'',
tzname
)
return None
return _etc_localtime() or _etc_timezone()
def run(self):
time = datetime.now(self.format[self.current_format_id][1])
self.output = {
"full_text": time.strftime(self.format[self.current_format_id][0]),
"color": self.color,
"urgent": False,
}
def scroll_format(self, step=1):
self.current_format_id = (self.current_format_id + step) % len(self.format)
|
[
"os.environ.get",
"pytz.tzfile.build_tzinfo",
"pytz.timezone",
"locale.setlocale",
"datetime.datetime.now",
"locale.getlocale"
] |
[((1667, 1698), 'os.environ.get', 'os.environ.get', (['"""LC_TIME"""', 'None'], {}), "('LC_TIME', None)\n", (1681, 1698), False, 'import os\n'), ((4702, 4754), 'datetime.datetime.now', 'datetime.now', (['self.format[self.current_format_id][1]'], {}), '(self.format[self.current_format_id][1])\n', (4714, 4754), False, 'from datetime import datetime\n'), ((1751, 1779), 'os.environ.get', 'os.environ.get', (['"""LANG"""', 'None'], {}), "('LANG', None)\n", (1765, 1779), False, 'import os\n'), ((2032, 2064), 'locale.getlocale', 'locale.getlocale', (['locale.LC_TIME'], {}), '(locale.LC_TIME)\n', (2048, 2064), False, 'import locale\n'), ((2142, 2180), 'locale.setlocale', 'locale.setlocale', (['locale.LC_TIME', 'lang'], {}), '(locale.LC_TIME, lang)\n', (2158, 2180), False, 'import locale\n'), ((4147, 4168), 'pytz.timezone', 'pytz.timezone', (['tzname'], {}), '(tzname)\n', (4160, 4168), False, 'import pytz\n'), ((3042, 3063), 'pytz.timezone', 'pytz.timezone', (['fmt[1]'], {}), '(fmt[1])\n', (3055, 3063), False, 'import pytz\n'), ((3526, 3564), 'pytz.tzfile.build_tzinfo', 'pytz.tzfile.build_tzinfo', (['"""system"""', 'fp'], {}), "('system', fp)\n", (3550, 3564), False, 'import pytz\n')]
|
from selenium.webdriver.common.keys import Keys
from selenium import webdriver
from django.test import LiveServerTestCase
from django.test import TestCase
from django.urls import reverse
from accounts.models import UserProfileInfo, User
from admin_app.models import Magazine, Truck
from accounts.forms import UserForm, UserProfileInfoForm
from django.test import Client
from django.contrib.auth.hashers import check_password
from random_word import RandomWords
class RegistrationTestCase(TestCase):
def setUp(self):
self.user1 =User.objects.create(username="user1",first_name="Name1",last_name="Last1",
email="<EMAIL>",password='<PASSWORD>')
self.user1_info= UserProfileInfo.objects.create(user=self.user1,company_name="company 1",phone_number="123456789",longitude=50.064824,
latitude=19.923944,is_client=True)
self.magazine=Magazine.objects.create(longitude=20.262038, latitude=49.819856, radius=50)
self.truck1=Truck.objects.create(id_truck=1,capacity=100, return_date='2018-12-25',start_longitude=20.031655 , start_latitude=49.47704,
end_longitude=19.964476, end_latitude=50.088287)
class RegisterViewTest(RegistrationTestCase):
def test_unique_username(self):
response=self.client.post(reverse('accounts:register_user'),data={'username':'user1','first_name':'Test1',
'last_name':'Test1','email':'<EMAIL>','password':'<PASSWORD>',
'company_name':'TestFirma','city':'Kraków','street':'Floriańska','house_number':27})
self.assertEqual(response.status_code, 200)
self.failUnless(response.context['user_form'])
self.assertFormError(response, 'user_form', field='username',
errors='A user with that username already exists.')
def test_too_long_distance(self):
response = self.client.post(reverse('accounts:register_user'), data={'username': 'test1', 'first_name': 'Test1',
'last_name': 'Test1', 'email': '<EMAIL>',
'password': '<PASSWORD>', 'company_name':'TestFirma',
'city': 'Krzeszowice', 'street': 'Krakowska',
'house_number': 30})
self.assertEqual(response.status_code,200)
self.failUnless(response.context['profile_form'])
self.failUnless(response.context['profile_form'].errors)
def test_success(self):
response = self.client.post(reverse('accounts:register_user'), data={'username': 'test1', 'first_name': 'Test1',
'last_name': 'Test1', 'email': '<EMAIL>',
'password': '<PASSWORD>', 'company_name':'TestFirma',
'city': 'Kraków', 'street': 'Adama Mickiewicza',
'house_number': 30})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'accounts/register_user.html')
self.assertEqual(User.objects.count(), 2)
self.assertEqual(UserProfileInfo.objects.count(),2)
def test_get_success(self):
response=self.client.get(reverse('accounts:register_user'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response,
'accounts/register_user.html')
self.failUnless(isinstance(response.context['user_form'],
UserForm))
self.failUnless(isinstance(response.context['profile_form'],
UserProfileInfoForm))
def test_coordinates_calcualtion(self):
response=self.client.post(reverse('accounts:register_user'),data={'username': 'test1', 'first_name': 'Test1',
'last_name': 'Test1', 'email': '<EMAIL>',
'password': '<PASSWORD>', 'company_name':'TestFirma',
'city': 'Kraków', 'street': 'Adama Mickiewicza',
'house_number': 30})
created_user=User.objects.get(username='test1')
created_profile=UserProfileInfo.objects.get(user=created_user)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'accounts/register_user.html')
self.assertEqual(User.objects.count(), 2)
self.assertEqual(UserProfileInfo.objects.count(), 2)
self.assertEqual(19.92385,created_profile.longitude)
self.assertEqual(50.06445,created_profile.latitude)
def test_cluster_calculation(self):
response = self.client.post(reverse('accounts:register_user'), data={'username': 'test1', 'first_name': 'Test1',
'last_name': 'Test1', 'email': '<EMAIL>',
'password': '<PASSWORD>',
'company_name': 'TestFirma',
'city': 'Myślenice',
'street': '3 Maja',
'house_number': 20})
created_user = User.objects.get(username='test1')
created_profile = UserProfileInfo.objects.get(user=created_user)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'accounts/register_user.html')
self.assertEqual(self.truck1.id_truck, created_profile.id_cluster.id_truck)
class ChangePasswordViewTest(TestCase):
def setUp(self):
self.user1 = User.objects.create_user(username="user1", first_name="Name1", last_name="Last1",
email="<EMAIL>", password='<PASSWORD>')
self.user1_info = UserProfileInfo.objects.create(user=self.user1, company_name="company 1", phone_number="123456789",
longitude=50.064824,
latitude=19.923944, is_client=True)
self.c = Client()
def test_password_change(self):
login = self.c.login(username='user1', password='<PASSWORD>')
response = self.c.post(reverse('accounts:change_password'), data={'old_password':'<PASSWORD>',
'new_password1':'<PASSWORD>',
'new_password2':'<PASSWORD>'})
self.assertEqual(login,True)
self.assertRedirects(response, reverse('accounts:edit_my_profile'), status_code=302)
self.user1.refresh_from_db()
self.assertTrue(check_password('<PASSWORD>', self.user1.password))
class MyProfileViewTest(TestCase):
def test_get(self):
user1 = User.objects.create_user(username="user1", first_name="Name1", last_name="Last1",
email="<EMAIL>", password='<PASSWORD>')
user1_info = UserProfileInfo.objects.create(user=user1, company_name="company 1", phone_number="123456789",
longitude=50.064824,
latitude=19.923944, is_client=True)
c = Client()
login = c.login(username='user1', password='<PASSWORD>')
response=c.get(reverse('accounts:my_profile'))
self.assertEqual(response.status_code, 200)
self.assertTrue(login)
self.assertEqual(response.context['user'],user1)
self.assertEqual(response.context['user_profile'],user1_info)
self.assertEqual(response.context['user'].first_name,"Name1")
self.assertEqual(response.context['user_profile'].company_name,"company 1")
class AuthViewTest(TestCase):
def setUp(self):
self.user1 = User.objects.create_user(username="user1", first_name="Name1", last_name="Last1",
email="<EMAIL>", password='<PASSWORD>')
self.user1_info = UserProfileInfo.objects.create(user=self.user1, company_name="company 1", phone_number="123456789",
longitude=50.064824, latitude=19.923944, is_client=True)
self.c = Client()
def test_login_success(self):
response = self.c.get(reverse('accounts:user_login'))
self.assertEquals(response.status_code, 200)
response=self.c.post(reverse('accounts:user_login'), data={'username':'user1','password':'<PASSWORD>'})
self.assertIn('_auth_user_id', self.c.session)
self.assertRedirects(response,reverse('index'))
def test_login_fail(self):
response = self.c.get(reverse('accounts:user_login'))
self.assertEquals(response.status_code, 200)
response = self.c.post(reverse('accounts:user_login'), data={'username': 'user1', 'password': '<PASSWORD>'})
self.assertFormError(response, 'form',field=None,
errors='Błąd logowania! Spróbuj ponownie')
def test_logout(self):
login=self.c.login(username='user1',password='<PASSWORD>')
self.assertTrue(login)
response = self.c.get(reverse('logout'))
self.assertEquals(response.status_code, 302)
self.assertRedirects(response,reverse('index'))
self.assertNotIn('_auth_user_id', self.c.session)
class ShowProfileTestView(TestCase):
def setUp(self):
self.user1 = User.objects.create_user(username="user1", first_name="Name1", last_name="Last1",
email="<EMAIL>", password='<PASSWORD>')
self.user1_info = UserProfileInfo.objects.create(user=self.user1, company_name="company 1", phone_number="123456789",
longitude=50.064824, latitude=19.923944, is_client=True)
self.c = Client()
def test_show_profile(self):
response=self.c.get(reverse("accounts:show_profile", kwargs={'username': 'user1'}))
self.assertEqual(response.status_code,200)
self.assertEqual(response.context['user'], self.user1)
self.assertEqual(response.context['user_profile'], self.user1_info)
# views (uses selenium)
class TestRegister(LiveServerTestCase):
def setUp(self):
self.selenium = webdriver.Firefox()
super(TestRegister, self).setUp()
self.randomUsernameClient = RandomWords().get_random_word()
self.randomUsernameDriver = RandomWords().get_random_word()
def tearDown(self):
self.selenium.quit()
super(TestRegister, self).tearDown()
def test_register_deliever_success(self):
selenium = self.selenium
selenium.get('http://127.0.0.1:8000/accounts/register_user/')
selenium.find_element_by_id('id_username').send_keys(self.randomUsernameDriver)
selenium.find_element_by_id('id_first_name').send_keys('testtest')
selenium.find_element_by_id('id_last_name').send_keys('test')
selenium.find_element_by_id('id_email').send_keys('<EMAIL>')
selenium.find_element_by_id('id_password').send_keys('<PASSWORD>')
selenium.find_element_by_id('id_company_name').send_keys('tmp')
selenium.find_element_by_id('id_phone_number').send_keys('123456789')
selenium.find_element_by_id('city').send_keys('Krakow')
selenium.find_element_by_id('street').send_keys('al.Mickiewicza')
selenium.find_element_by_id('house_number').send_keys('1')
selenium.find_element_by_id('id_is_client')
selenium.find_element_by_name('register').click()
selenium.implicitly_wait(40)
assert 'You have registered successfully' in selenium.page_source
def test_register_client_success(self):
selenium = self.selenium
selenium.get('http://127.0.0.1:8000/accounts/register_user/')
selenium.find_element_by_id('id_username').send_keys(
self.randomUsernameClient)
selenium.find_element_by_id('id_first_name').send_keys('test<PASSWORD>')
selenium.find_element_by_id('id_last_name').send_keys('test')
selenium.find_element_by_id('id_email').send_keys('<EMAIL>')
selenium.find_element_by_id('id_password').send_keys('<PASSWORD>')
selenium.find_element_by_id('id_company_name').send_keys('tmp')
selenium.find_element_by_id('id_phone_number').send_keys('123456789')
selenium.find_element_by_id('city').send_keys('Krakow')
selenium.find_element_by_id('street').send_keys('al.Mickiewicza')
selenium.find_element_by_id('house_number').send_keys('1')
selenium.find_element_by_id('id_is_client').click()
selenium.find_element_by_name('register').click()
selenium.implicitly_wait(20)
assert 'You have registered successfully' in selenium.page_source
class TestLogin(LiveServerTestCase):
def setUp(self):
self.selenium = webdriver.Firefox()
super(TestLogin, self).setUp()
def tearDown(self):
self.selenium.quit()
super(TestLogin, self).tearDown()
def test_login_success(self):
selenium = self.selenium
selenium.get('http://127.0.0.1:8000/accounts/user_login/')
selenium.find_element_by_name('username').send_keys('testClient')
selenium.find_element_by_name('password').send_keys('<PASSWORD>')
selenium.find_element_by_name('login').click()
selenium.implicitly_wait(20)
assert 'LOGOUT' in selenium.page_source
def test_login_wrong_password_error(self):
selenium = self.selenium
selenium.get('http://127.0.0.1:8000/accounts/user_login/')
selenium.find_element_by_name('username').send_keys('testtest')
selenium.find_element_by_name('password').send_keys('<PASSWORD>')
selenium.find_element_by_name('login').click()
selenium.implicitly_wait(20)
assert 'LOGIN' in selenium.page_source
def test_login_user_not_exists_error(self):
selenium = self.selenium
selenium.get('http://127.0.0.1:8000/accounts/user_login/')
selenium.find_element_by_name('username').send_keys(
RandomWords().get_random_word())
selenium.find_element_by_name('password').send_keys('<PASSWORD>')
selenium.find_element_by_name('login').click()
assert 'LOGIN' in selenium.page_source
class TestLogout(LiveServerTestCase):
def setUp(self):
self.selenium = webdriver.Firefox()
super(TestLogout, self).setUp()
self.selenium.get('http://127.0.0.1:8000/accounts/user_login/')
self.selenium.find_element_by_name('username').send_keys('testClient')
self.selenium.find_element_by_name('password').send_keys('<PASSWORD>')
self.selenium.find_element_by_name('login').click()
def tearDown(self):
self.selenium.quit()
super(TestLogout, self).tearDown()
def test_logout(self):
selenium = self.selenium
selenium.get('http://127.0.0.1:8000')
self.selenium.find_element_by_name('logout_nav').click()
assert 'LOGIN' in selenium.page_source
class TestEditProfile(LiveServerTestCase):
def setUp(self):
self.selenium = webdriver.Firefox()
super(TestEditProfile, self).setUp()
self.selenium.get('http://127.0.0.1:8000/accounts/user_login/')
self.selenium.find_element_by_name('username').send_keys('Deliever')
self.selenium.find_element_by_name('password').send_keys('<PASSWORD>')
self.selenium.find_element_by_name('login').click()
def tearDown(self):
self.selenium.quit()
super(TestEditProfile, self).tearDown()
def test_edit_profile_info_success(self):
selenium = self.selenium
selenium.get('http://127.0.0.1:8000/accounts/profile/edit/')
selenium.find_element_by_id('id_first_name').send_keys('test<PASSWORD>')
selenium.find_element_by_id('id_last_name').send_keys('test')
selenium.find_element_by_id('id_company_name').send_keys('test')
selenium.find_element_by_id('id_phone_number').send_keys('123456789')
selenium.find_element_by_name('zapisz').click()
assert 'My profile' in selenium.page_source
class TestButtons(LiveServerTestCase):
def setUp(self):
self.selenium = webdriver.Firefox()
super(TestButtons, self).setUp()
def tearDown(self):
self.selenium.quit()
super(TestButtons, self).tearDown()
def test_index_button(self):
selenium = self.selenium
selenium.get('http://127.0.0.1:8000/')
selenium.find_element_by_name('index').click()
assert 'INDEX' in selenium.page_source
def test_admin_button(self):
selenium = self.selenium
selenium.get('http://127.0.0.1:8000/')
selenium.find_element_by_name('admin').click()
assert 'Django administration' in selenium.page_source
def test_login_button(self):
selenium = self.selenium
selenium.get('http://127.0.0.1:8000/')
selenium.find_element_by_name('login_nav').click()
assert 'Username:' in selenium.page_source
|
[
"django.test.Client",
"selenium.webdriver.Firefox",
"accounts.models.User.objects.count",
"accounts.models.User.objects.create",
"django.urls.reverse",
"admin_app.models.Magazine.objects.create",
"accounts.models.User.objects.get",
"accounts.models.UserProfileInfo.objects.create",
"accounts.models.UserProfileInfo.objects.get",
"admin_app.models.Truck.objects.create",
"random_word.RandomWords",
"accounts.models.UserProfileInfo.objects.count",
"django.contrib.auth.hashers.check_password",
"accounts.models.User.objects.create_user"
] |
[((542, 662), 'accounts.models.User.objects.create', 'User.objects.create', ([], {'username': '"""user1"""', 'first_name': '"""Name1"""', 'last_name': '"""Last1"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""'}), "(username='user1', first_name='Name1', last_name='Last1',\n email='<EMAIL>', password='<PASSWORD>')\n", (561, 662), False, 'from accounts.models import UserProfileInfo, User\n'), ((709, 873), 'accounts.models.UserProfileInfo.objects.create', 'UserProfileInfo.objects.create', ([], {'user': 'self.user1', 'company_name': '"""company 1"""', 'phone_number': '"""123456789"""', 'longitude': '(50.064824)', 'latitude': '(19.923944)', 'is_client': '(True)'}), "(user=self.user1, company_name='company 1',\n phone_number='123456789', longitude=50.064824, latitude=19.923944,\n is_client=True)\n", (739, 873), False, 'from accounts.models import UserProfileInfo, User\n'), ((942, 1017), 'admin_app.models.Magazine.objects.create', 'Magazine.objects.create', ([], {'longitude': '(20.262038)', 'latitude': '(49.819856)', 'radius': '(50)'}), '(longitude=20.262038, latitude=49.819856, radius=50)\n', (965, 1017), False, 'from admin_app.models import Magazine, Truck\n'), ((1040, 1222), 'admin_app.models.Truck.objects.create', 'Truck.objects.create', ([], {'id_truck': '(1)', 'capacity': '(100)', 'return_date': '"""2018-12-25"""', 'start_longitude': '(20.031655)', 'start_latitude': '(49.47704)', 'end_longitude': '(19.964476)', 'end_latitude': '(50.088287)'}), "(id_truck=1, capacity=100, return_date='2018-12-25',\n start_longitude=20.031655, start_latitude=49.47704, end_longitude=\n 19.964476, end_latitude=50.088287)\n", (1060, 1222), False, 'from admin_app.models import Magazine, Truck\n'), ((4806, 4840), 'accounts.models.User.objects.get', 'User.objects.get', ([], {'username': '"""test1"""'}), "(username='test1')\n", (4822, 4840), False, 'from accounts.models import UserProfileInfo, User\n'), ((4865, 4911), 'accounts.models.UserProfileInfo.objects.get', 'UserProfileInfo.objects.get', ([], {'user': 'created_user'}), '(user=created_user)\n', (4892, 4911), False, 'from accounts.models import UserProfileInfo, User\n'), ((6076, 6110), 'accounts.models.User.objects.get', 'User.objects.get', ([], {'username': '"""test1"""'}), "(username='test1')\n", (6092, 6110), False, 'from accounts.models import UserProfileInfo, User\n'), ((6137, 6183), 'accounts.models.UserProfileInfo.objects.get', 'UserProfileInfo.objects.get', ([], {'user': 'created_user'}), '(user=created_user)\n', (6164, 6183), False, 'from accounts.models import UserProfileInfo, User\n'), ((6478, 6604), 'accounts.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""user1"""', 'first_name': '"""Name1"""', 'last_name': '"""Last1"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""'}), "(username='user1', first_name='Name1', last_name=\n 'Last1', email='<EMAIL>', password='<PASSWORD>')\n", (6502, 6604), False, 'from accounts.models import UserProfileInfo, User\n'), ((6668, 6832), 'accounts.models.UserProfileInfo.objects.create', 'UserProfileInfo.objects.create', ([], {'user': 'self.user1', 'company_name': '"""company 1"""', 'phone_number': '"""123456789"""', 'longitude': '(50.064824)', 'latitude': '(19.923944)', 'is_client': '(True)'}), "(user=self.user1, company_name='company 1',\n phone_number='123456789', longitude=50.064824, latitude=19.923944,\n is_client=True)\n", (6698, 6832), False, 'from accounts.models import UserProfileInfo, User\n'), ((6946, 6954), 'django.test.Client', 'Client', ([], {}), '()\n', (6952, 6954), False, 'from django.test import Client\n'), ((7699, 7825), 'accounts.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""user1"""', 'first_name': '"""Name1"""', 'last_name': '"""Last1"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""'}), "(username='user1', first_name='Name1', last_name=\n 'Last1', email='<EMAIL>', password='<PASSWORD>')\n", (7723, 7825), False, 'from accounts.models import UserProfileInfo, User\n'), ((7884, 8043), 'accounts.models.UserProfileInfo.objects.create', 'UserProfileInfo.objects.create', ([], {'user': 'user1', 'company_name': '"""company 1"""', 'phone_number': '"""123456789"""', 'longitude': '(50.064824)', 'latitude': '(19.923944)', 'is_client': '(True)'}), "(user=user1, company_name='company 1',\n phone_number='123456789', longitude=50.064824, latitude=19.923944,\n is_client=True)\n", (7914, 8043), False, 'from accounts.models import UserProfileInfo, User\n'), ((8153, 8161), 'django.test.Client', 'Client', ([], {}), '()\n', (8159, 8161), False, 'from django.test import Client\n'), ((8722, 8848), 'accounts.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""user1"""', 'first_name': '"""Name1"""', 'last_name': '"""Last1"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""'}), "(username='user1', first_name='Name1', last_name=\n 'Last1', email='<EMAIL>', password='<PASSWORD>')\n", (8746, 8848), False, 'from accounts.models import UserProfileInfo, User\n'), ((8912, 9076), 'accounts.models.UserProfileInfo.objects.create', 'UserProfileInfo.objects.create', ([], {'user': 'self.user1', 'company_name': '"""company 1"""', 'phone_number': '"""123456789"""', 'longitude': '(50.064824)', 'latitude': '(19.923944)', 'is_client': '(True)'}), "(user=self.user1, company_name='company 1',\n phone_number='123456789', longitude=50.064824, latitude=19.923944,\n is_client=True)\n", (8942, 9076), False, 'from accounts.models import UserProfileInfo, User\n'), ((9139, 9147), 'django.test.Client', 'Client', ([], {}), '()\n', (9145, 9147), False, 'from django.test import Client\n'), ((10347, 10473), 'accounts.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""user1"""', 'first_name': '"""Name1"""', 'last_name': '"""Last1"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""'}), "(username='user1', first_name='Name1', last_name=\n 'Last1', email='<EMAIL>', password='<PASSWORD>')\n", (10371, 10473), False, 'from accounts.models import UserProfileInfo, User\n'), ((10537, 10701), 'accounts.models.UserProfileInfo.objects.create', 'UserProfileInfo.objects.create', ([], {'user': 'self.user1', 'company_name': '"""company 1"""', 'phone_number': '"""123456789"""', 'longitude': '(50.064824)', 'latitude': '(19.923944)', 'is_client': '(True)'}), "(user=self.user1, company_name='company 1',\n phone_number='123456789', longitude=50.064824, latitude=19.923944,\n is_client=True)\n", (10567, 10701), False, 'from accounts.models import UserProfileInfo, User\n'), ((10764, 10772), 'django.test.Client', 'Client', ([], {}), '()\n', (10770, 10772), False, 'from django.test import Client\n'), ((11202, 11221), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {}), '()\n', (11219, 11221), False, 'from selenium import webdriver\n'), ((13814, 13833), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {}), '()\n', (13831, 13833), False, 'from selenium import webdriver\n'), ((15340, 15359), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {}), '()\n', (15357, 15359), False, 'from selenium import webdriver\n'), ((16095, 16114), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {}), '()\n', (16112, 16114), False, 'from selenium import webdriver\n'), ((17194, 17213), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {}), '()\n', (17211, 17213), False, 'from selenium import webdriver\n'), ((1368, 1401), 'django.urls.reverse', 'reverse', (['"""accounts:register_user"""'], {}), "('accounts:register_user')\n", (1375, 1401), False, 'from django.urls import reverse\n'), ((2059, 2092), 'django.urls.reverse', 'reverse', (['"""accounts:register_user"""'], {}), "('accounts:register_user')\n", (2066, 2092), False, 'from django.urls import reverse\n'), ((2856, 2889), 'django.urls.reverse', 'reverse', (['"""accounts:register_user"""'], {}), "('accounts:register_user')\n", (2863, 2889), False, 'from django.urls import reverse\n'), ((3566, 3586), 'accounts.models.User.objects.count', 'User.objects.count', ([], {}), '()\n', (3584, 3586), False, 'from accounts.models import UserProfileInfo, User\n'), ((3616, 3647), 'accounts.models.UserProfileInfo.objects.count', 'UserProfileInfo.objects.count', ([], {}), '()\n', (3645, 3647), False, 'from accounts.models import UserProfileInfo, User\n'), ((3717, 3750), 'django.urls.reverse', 'reverse', (['"""accounts:register_user"""'], {}), "('accounts:register_user')\n", (3724, 3750), False, 'from django.urls import reverse\n'), ((4226, 4259), 'django.urls.reverse', 'reverse', (['"""accounts:register_user"""'], {}), "('accounts:register_user')\n", (4233, 4259), False, 'from django.urls import reverse\n'), ((5062, 5082), 'accounts.models.User.objects.count', 'User.objects.count', ([], {}), '()\n', (5080, 5082), False, 'from accounts.models import UserProfileInfo, User\n'), ((5112, 5143), 'accounts.models.UserProfileInfo.objects.count', 'UserProfileInfo.objects.count', ([], {}), '()\n', (5141, 5143), False, 'from accounts.models import UserProfileInfo, User\n'), ((5347, 5380), 'django.urls.reverse', 'reverse', (['"""accounts:register_user"""'], {}), "('accounts:register_user')\n", (5354, 5380), False, 'from django.urls import reverse\n'), ((7095, 7130), 'django.urls.reverse', 'reverse', (['"""accounts:change_password"""'], {}), "('accounts:change_password')\n", (7102, 7130), False, 'from django.urls import reverse\n'), ((7455, 7490), 'django.urls.reverse', 'reverse', (['"""accounts:edit_my_profile"""'], {}), "('accounts:edit_my_profile')\n", (7462, 7490), False, 'from django.urls import reverse\n'), ((7570, 7619), 'django.contrib.auth.hashers.check_password', 'check_password', (['"""<PASSWORD>"""', 'self.user1.password'], {}), "('<PASSWORD>', self.user1.password)\n", (7584, 7619), False, 'from django.contrib.auth.hashers import check_password\n'), ((8251, 8281), 'django.urls.reverse', 'reverse', (['"""accounts:my_profile"""'], {}), "('accounts:my_profile')\n", (8258, 8281), False, 'from django.urls import reverse\n'), ((9214, 9244), 'django.urls.reverse', 'reverse', (['"""accounts:user_login"""'], {}), "('accounts:user_login')\n", (9221, 9244), False, 'from django.urls import reverse\n'), ((9330, 9360), 'django.urls.reverse', 'reverse', (['"""accounts:user_login"""'], {}), "('accounts:user_login')\n", (9337, 9360), False, 'from django.urls import reverse\n'), ((9507, 9523), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (9514, 9523), False, 'from django.urls import reverse\n'), ((9587, 9617), 'django.urls.reverse', 'reverse', (['"""accounts:user_login"""'], {}), "('accounts:user_login')\n", (9594, 9617), False, 'from django.urls import reverse\n'), ((9705, 9735), 'django.urls.reverse', 'reverse', (['"""accounts:user_login"""'], {}), "('accounts:user_login')\n", (9712, 9735), False, 'from django.urls import reverse\n'), ((10079, 10096), 'django.urls.reverse', 'reverse', (['"""logout"""'], {}), "('logout')\n", (10086, 10096), False, 'from django.urls import reverse\n'), ((10190, 10206), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (10197, 10206), False, 'from django.urls import reverse\n'), ((10835, 10897), 'django.urls.reverse', 'reverse', (['"""accounts:show_profile"""'], {'kwargs': "{'username': 'user1'}"}), "('accounts:show_profile', kwargs={'username': 'user1'})\n", (10842, 10897), False, 'from django.urls import reverse\n'), ((11300, 11313), 'random_word.RandomWords', 'RandomWords', ([], {}), '()\n', (11311, 11313), False, 'from random_word import RandomWords\n'), ((11368, 11381), 'random_word.RandomWords', 'RandomWords', ([], {}), '()\n', (11379, 11381), False, 'from random_word import RandomWords\n'), ((15047, 15060), 'random_word.RandomWords', 'RandomWords', ([], {}), '()\n', (15058, 15060), False, 'from random_word import RandomWords\n')]
|
from flask_restx import fields
from application.api.mongodb import *
freshman_model = mongodb.model('freshman', {
'matricola': fields.String
})
id_conversation_model = mongodb.model('id_conversation_model', {
'id_conversation': fields.String
})
send_message_model = mongodb.model('insert_message', {
'id_conversation': fields.String,
'matricola_mittente': fields.String,
'matricola_destinatario': fields.String,
'messaggio': fields.String
})
get_message_model = mongodb.model('get_messages', {
'matricola_mittente': fields.String,
'matricola_destinatario': fields.String,
'messaggio': fields.String,
'data_invio': fields.String
})
conversation_model = mongodb.model('conversation_model', {
'matricola1': fields.String,
'matricola2': fields.String
})
full_conversation_model = mongodb.model('full_conversation_model', {
'id_conversation': fields.String,
'matricola1': fields.String,
'matricola2': fields.String,
'messages': fields.List(fields.Nested(get_message_model))
})
discipline_color_model = mongodb.model('discipline_color_model', {
'codice_corso': fields.String,
'codice_disciplina': fields.String,
'colore_esadecimale': fields.String
})
|
[
"flask_restx.fields.Nested"
] |
[((1005, 1037), 'flask_restx.fields.Nested', 'fields.Nested', (['get_message_model'], {}), '(get_message_model)\n', (1018, 1037), False, 'from flask_restx import fields\n')]
|
import re
import sys
meetup_svg = '.github/images/meetup.svg'
readme_md = 'README.md'
conf_py = 'docs/conf.py'
def rm_announce():
# remove all announcement
with open(readme_md) as fp:
_old = fp.read()
_new = re.sub(
r'(<!--startmsg-->\s*?\n).*(\n\s*?<!--endmsg-->)',
rf'\g<1>\g<2>',
_old,
flags=re.DOTALL,
)
with open(readme_md, 'w') as fp:
fp.write(_new)
with open(conf_py) as fp:
_old = fp.read()
_new = re.sub(
r'(# start-announce\s*?\n).*(\n\s*?# end-announce)',
rf'\g<1>\g<2>',
_old,
flags=re.DOTALL,
)
with open(conf_py, 'w') as fp:
fp.write(_new)
if len(sys.argv) < 3:
rm_announce()
else:
text = sys.argv[1]
url = sys.argv[2]
if not text or not url:
rm_announce()
else:
announce_url = f'''
"announcement": \'\'\'
<a href="{url}">{text}</a>
\'\'\',
'''
meetup_svg_url = f'<a href="{url}"><img src="https://github.com/jina-ai/jina/blob/master/{meetup_svg}?raw=true"></a>'
# update meetup_svg
with open(meetup_svg) as fp:
_old = fp.read()
_new = re.sub(r'(<a href=").*(")', rf'\g<1>{url}\g<2>', _old)
_new = re.sub(
r'(<!--startmsg-->\s*?\n).*(\n\s*?<!--endmsg-->)',
rf'\g<1>{text}\g<2>',
_new,
flags=re.DOTALL,
)
with open(meetup_svg, 'w') as fp:
fp.write(_new)
# update readme_md
with open(readme_md) as fp:
_old = fp.read()
_new = re.sub(
r'(<!--startmsg-->\s*?\n).*(\n\s*?<!--endmsg-->)',
rf'\g<1>{meetup_svg_url}\g<2>',
_old,
flags=re.DOTALL,
)
with open(readme_md, 'w') as fp:
fp.write(_new)
# update conf
with open(conf_py) as fp:
_old = fp.read()
_new = re.sub(
r'(# start-announce\s*?\n).*(\n\s*?# end-announce)',
rf'\g<1>{announce_url}\g<2>',
_old,
flags=re.DOTALL,
)
with open(conf_py, 'w') as fp:
fp.write(_new)
|
[
"re.sub"
] |
[((235, 339), 're.sub', 're.sub', (['"""(<!--startmsg-->\\\\s*?\\\\n).*(\\\\n\\\\s*?<!--endmsg-->)"""', 'f"""\\\\g<1>\\\\g<2>"""', '_old'], {'flags': 're.DOTALL'}), "('(<!--startmsg-->\\\\s*?\\\\n).*(\\\\n\\\\s*?<!--endmsg-->)',\n f'\\\\g<1>\\\\g<2>', _old, flags=re.DOTALL)\n", (241, 339), False, 'import re\n'), ((523, 629), 're.sub', 're.sub', (['"""(# start-announce\\\\s*?\\\\n).*(\\\\n\\\\s*?# end-announce)"""', 'f"""\\\\g<1>\\\\g<2>"""', '_old'], {'flags': 're.DOTALL'}), "('(# start-announce\\\\s*?\\\\n).*(\\\\n\\\\s*?# end-announce)',\n f'\\\\g<1>\\\\g<2>', _old, flags=re.DOTALL)\n", (529, 629), False, 'import re\n'), ((1243, 1297), 're.sub', 're.sub', (['"""(<a href=").*(")"""', 'f"""\\\\g<1>{url}\\\\g<2>"""', '_old'], {}), '(\'(<a href=").*(")\', f\'\\\\g<1>{url}\\\\g<2>\', _old)\n', (1249, 1297), False, 'import re\n'), ((1317, 1427), 're.sub', 're.sub', (['"""(<!--startmsg-->\\\\s*?\\\\n).*(\\\\n\\\\s*?<!--endmsg-->)"""', 'f"""\\\\g<1>{text}\\\\g<2>"""', '_new'], {'flags': 're.DOTALL'}), "('(<!--startmsg-->\\\\s*?\\\\n).*(\\\\n\\\\s*?<!--endmsg-->)',\n f'\\\\g<1>{text}\\\\g<2>', _new, flags=re.DOTALL)\n", (1323, 1427), False, 'import re\n'), ((1681, 1801), 're.sub', 're.sub', (['"""(<!--startmsg-->\\\\s*?\\\\n).*(\\\\n\\\\s*?<!--endmsg-->)"""', 'f"""\\\\g<1>{meetup_svg_url}\\\\g<2>"""', '_old'], {'flags': 're.DOTALL'}), "('(<!--startmsg-->\\\\s*?\\\\n).*(\\\\n\\\\s*?<!--endmsg-->)',\n f'\\\\g<1>{meetup_svg_url}\\\\g<2>', _old, flags=re.DOTALL)\n", (1687, 1801), False, 'import re\n'), ((2047, 2167), 're.sub', 're.sub', (['"""(# start-announce\\\\s*?\\\\n).*(\\\\n\\\\s*?# end-announce)"""', 'f"""\\\\g<1>{announce_url}\\\\g<2>"""', '_old'], {'flags': 're.DOTALL'}), "('(# start-announce\\\\s*?\\\\n).*(\\\\n\\\\s*?# end-announce)',\n f'\\\\g<1>{announce_url}\\\\g<2>', _old, flags=re.DOTALL)\n", (2053, 2167), False, 'import re\n')]
|
import copy
import os
from functools import reduce
from pathlib import Path
import chainer
import chainer.functions as F
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import six
from chainer import configuration, cuda, function
from chainer import reporter as reporter_module
from chainer.dataset import convert
from chainer.training.extensions import Evaluator
from chainermn import CommunicatorBase
from sklearn import metrics
from tqdm import tqdm
def _to_list(a):
"""convert value `a` to list
Args:
a: value to be convert to `list`
Returns (list):
"""
if isinstance(a, (int, float)):
return [a, ]
else:
# expected to be list or some iterable class
return a
def plot_roc(y_true, y_score, out_name):
fpr, tpr, thresholds = metrics.roc_curve(y_true=y_true, y_score=y_score)
auc = metrics.auc(fpr, tpr)
plt.clf()
plt.plot(fpr, tpr, label='ROC curve (area = %.3f)' % auc)
plt.legend()
plt.title('ROC curve', fontsize=16)
plt.xlabel('False Positive Rate', fontsize=16)
plt.ylabel('True Positive Rate', fontsize=16)
plt.grid(True)
plt.savefig(out_name)
class Classification_Evaluator(Evaluator):
"""Evaluator which calculates auc and correlation
Note that this Evaluator is only applicable to binary classification task.
Args:
iterator: Dataset iterator for the dataset to calculate pearson.
It can also be a dictionary of iterators. If this is just an
iterator, the iterator is registered by the name ``'main'``.
target: Link object or a dictionary of links to evaluate. If this is
just a link object, the link is registered by the name ``'main'``.
converter: Converter function to build input arrays and true label.
:func:`~chainer.dataset.concat_examples` is used by default.
It is expected to return input arrays of the form
`[x_0, ..., x_n, t]`, where `x_0, ..., x_n` are the inputs to
the evaluation function and `t` is the true label.
device: Device to which the training data is sent. Negative value
indicates the host memory (CPU).
eval_hook: Function to prepare for each evaluation process. It is
called at the beginning of the evaluation. The evaluator extension
object is passed at each call.
eval_func: Evaluation function called at each iteration. The target
link to evaluate as a callable is used by default.
name (str): name of this extension. When `name` is None,
`default_name='validation'` which is defined in super class
`Evaluator` is used as extension name. This name affects to the
reported key name.
pos_labels (int or list): labels of the positive class, other classes
are considered as negative.
ignore_labels (int or list or None): labels to be ignored.
`None` is used to not ignore all labels.
Attributes:
converter: Converter function.
device: Device to which the training data is sent.
eval_hook: Function to prepare for each evaluation process.
eval_func: Evaluation function called at each iteration.
pos_labels (list): labels of the positive class
ignore_labels (list): labels to be ignored.
"""
def __init__(self, iterator, target, comm, label_name, converter=convert.concat_examples,
device=None, eval_hook=None, eval_func=None, name=None,
pos_labels=1, ignore_labels=None, path_data=None):
super(Classification_Evaluator, self).__init__(
iterator, target, converter=converter, device=device,
eval_hook=eval_hook, eval_func=eval_func)
self.rank = comm.rank
self.name = name
self.pos_labels = _to_list(pos_labels)
self.ignore_labels = _to_list(ignore_labels)
self.comm = comm
self.label_name = label_name
self.path_data = path_data
def __call__(self, trainer=None):
"""Executes the evaluator extension.
Unlike usual extensions, this extension can be executed without passing
a trainer object. This extension reports the performance on validation
dataset using the :func:`~chainer.report` function. Thus, users can use
this extension independently from any trainer by manually configuring
a :class:`~chainer.Reporter` object.
Args:
trainer (~chainer.training.Trainer): Trainer object that invokes
this extension. It can be omitted in case of calling this
extension manually.
Returns:
dict: Result dictionary that contains mean statistics of values
reported by the evaluation function.
"""
# set up a reporter
reporter = reporter_module.Reporter()
if self.name is not None:
prefix = self.name + '/'
else:
prefix = ''
for name, target in six.iteritems(self._targets):
reporter.add_observer(prefix + name, target)
reporter.add_observers(prefix + name, target.namedlinks(skipself=True))
with reporter:
with configuration.using_config('train', False):
result = self.evaluate_roc_corr(trainer=trainer)
reporter_module.report(result)
return result
def evaluate_roc_corr(self, trainer):
iterator = self._iterators['main']
eval_func = self.eval_func or self._targets['main']
if self.eval_hook:
self.eval_hook(self)
if hasattr(iterator, 'reset'):
iterator.reset()
it = iterator
else:
it = copy.copy(iterator)
y_total = np.array([]).reshape([0, len(self.label_name)])
t_total = np.array([]).reshape([0, len(self.label_name)])
protein_id_total = np.array([]).reshape([0, len(self.label_name)])
for batch in it:
in_arrays = self.converter(batch, self.device)
with chainer.no_backprop_mode(), chainer.using_config('train', False):
y = eval_func(*in_arrays[:-2])
t = in_arrays[-2]
protein_id = in_arrays[-1]
# y = F.sigmoid(y)
y_data = cuda.to_cpu(y.data)
t_data = cuda.to_cpu(t)
protein_id = cuda.to_cpu(protein_id)
y_total = np.vstack([y_total, y_data])
t_total = np.vstack([t_total, t_data])
protein_id_total = np.vstack([protein_id_total, protein_id])
updater = trainer.updater
epoch = str(updater.epoch)
out_dir = Path(trainer.out)
observation = {}
for label_index, label in enumerate(self.label_name):
y = y_total[:, label_index]
t = t_total[:, label_index]
protein_id = protein_id_total[:, label_index]
index = np.where(t != -1)[0]
y = y[index]
t = t[index]
protein_id = protein_id[index]
gather_data = self.comm.gather(np.vstack([t, y, protein_id]))
if self.rank == 0:
gather_data = np.concatenate(gather_data, axis=1)
gather_t = np.array(gather_data[0], dtype=np.int)
gather_y = np.array(gather_data[1], dtype=np.float32)
gather_protein_id = np.array(gather_data[2], dtype=np.int)
global_score = []
global_label = []
target_name = []
model_path = []
for row, item in self.path_data.iterrows():
model_index = np.where(gather_protein_id==row)[0]
if len(model_index) > 0:
global_score.append(np.mean(F.sigmoid(gather_y[model_index]).data))
global_label.append(item['gdtts'])
target_name.append(item['dir_name'])
model_path.append(item['path'])
df = pd.DataFrame({'global_score':global_score, 'global_label':global_label, 'target_name':target_name, 'model_path': model_path})
pearson = df.groupby('target_name').corr(method='pearson')['global_score'].mean(level=1)['global_label']
spearman = df.groupby('target_name').corr(method='spearman')['global_score'].mean(level=1)['global_label']
csv_out_name = out_dir/(epoch+label+'_df.csv')
df.to_csv(csv_out_name)
roc_out_name = out_dir/(epoch+'iteration_'+label+'_roc.png')
y_score = F.sigmoid(gather_y).data
plot_roc(y_true=gather_t, y_score=y_score, out_name=roc_out_name)
roc_auc = metrics.roc_auc_score(gather_t, y_score)
np.savez((out_dir/epoch).with_suffix('.npz'), local_label=gather_t, local_score=y_score, protein_id=gather_protein_id)
with reporter.report_scope(observation):
reporter.report({'roc_auc_'+label: roc_auc}, self._targets['main'])
reporter.report({'loss': F.sigmoid_cross_entropy(gather_y, gather_t).data},
self._targets['main'])
reporter.report({'accuracy': F.binary_accuracy(gather_y, gather_t).data}, self._targets['main'])
reporter.report({'pearson': pearson}, self._targets['main'])
reporter.report({'spearman': spearman}, self._targets['main'])
return observation
class MultiClassification_Evaluator(Evaluator):
"""Evaluator which calculates auc and correlation
Note that this Evaluator is only applicable to binary classification task.
Args:
iterator: Dataset iterator for the dataset to calculate pearson.
It can also be a dictionary of iterators. If this is just an
iterator, the iterator is registered by the name ``'main'``.
target: Link object or a dictionary of links to evaluate. If this is
just a link object, the link is registered by the name ``'main'``.
converter: Converter function to build input arrays and true label.
:func:`~chainer.dataset.concat_examples` is used by default.
It is expected to return input arrays of the form
`[x_0, ..., x_n, t]`, where `x_0, ..., x_n` are the inputs to
the evaluation function and `t` is the true label.
device: Device to which the training data is sent. Negative value
indicates the host memory (CPU).
eval_hook: Function to prepare for each evaluation process. It is
called at the beginning of the evaluation. The evaluator extension
object is passed at each call.
eval_func: Evaluation function called at each iteration. The target
link to evaluate as a callable is used by default.
name (str): name of this extension. When `name` is None,
`default_name='validation'` which is defined in super class
`Evaluator` is used as extension name. This name affects to the
reported key name.
pos_labels (int or list): labels of the positive class, other classes
are considered as negative.
ignore_labels (int or list or None): labels to be ignored.
`None` is used to not ignore all labels.
Attributes:
converter: Converter function.
device: Device to which the training data is sent.
eval_hook: Function to prepare for each evaluation process.
eval_func: Evaluation function called at each iteration.
pos_labels (list): labels of the positive class
ignore_labels (list): labels to be ignored.
"""
def __init__(self, iterator, target, comm, label_name, class_num,
converter=convert.concat_examples,
device=None, eval_hook=None, eval_func=None, name=None,
pos_labels=1, ignore_labels=None, path_data=None):
super(MultiClassification_Evaluator, self).__init__(
iterator, target, converter=converter, device=device,
eval_hook=eval_hook, eval_func=eval_func)
self.rank = comm.rank
self.class_num = class_num
self.name = name
self.pos_labels = _to_list(pos_labels)
self.ignore_labels = _to_list(ignore_labels)
self.comm = comm
self.label_name = label_name
self.path_data = path_data
def __call__(self, trainer=None):
"""Executes the evaluator extension.
Unlike usual extensions, this extension can be executed without passing
a trainer object. This extension reports the performance on validation
dataset using the :func:`~chainer.report` function. Thus, users can use
this extension independently from any trainer by manually configuring
a :class:`~chainer.Reporter` object.
Args:
trainer (~chainer.training.Trainer): Trainer object that invokes
this extension. It can be omitted in case of calling this
extension manually.
Returns:
dict: Result dictionary that contains mean statistics of values
reported by the evaluation function.
"""
# set up a reporter
reporter = reporter_module.Reporter()
if self.name is not None:
prefix = self.name + '/'
else:
prefix = ''
for name, target in six.iteritems(self._targets):
reporter.add_observer(prefix + name, target)
reporter.add_observers(prefix + name, target.namedlinks(skipself=True))
with reporter:
with configuration.using_config('train', False):
result = self.evaluate_corr(trainer=trainer)
reporter_module.report(result)
return result
def evaluate_corr(self, trainer):
iterator = self._iterators['main']
eval_func = self.eval_func or self._targets['main']
if self.eval_hook:
self.eval_hook(self)
if hasattr(iterator, 'reset'):
iterator.reset()
it = iterator
else:
it = copy.copy(iterator)
y_total = np.array([]).reshape([0, self.class_num])
t_total = np.array([], dtype=np.int)
protein_id_total = np.array([], dtype=np.int)
for batch in it:
in_arrays = self.converter(batch, self.device)
with chainer.no_backprop_mode(), chainer.using_config('train', False):
y = eval_func(*in_arrays[:-2])
t = in_arrays[-2]
protein_id = in_arrays[-1]
# y = F.sigmoid(y)
y_data = cuda.to_cpu(y.data)
t_data = cuda.to_cpu(t)
protein_id = cuda.to_cpu(protein_id)
y_total = np.vstack([y_total, y_data])
t_total = np.concatenate([t_total, t_data])
protein_id_total = np.concatenate([protein_id_total, protein_id])
updater = trainer.updater
epoch = str(updater.epoch)
out_dir = Path(trainer.out)
observation = {}
gather_data = self.comm.gather(np.hstack([t_total.reshape(-1,1), y_total, protein_id_total.reshape(-1,1)]))
if self.rank == 0:
gather_data = np.concatenate(gather_data)
gather_t = gather_data[:, 0].astype(np.int)
gather_y = gather_data[:, 1:-1].astype(np.float32)
gather_protein_id = gather_data[:, -1].astype(np.int)
global_score = []
global_label = []
target_name = []
model_path = []
for row, item in self.path_data.iterrows():
model_index = np.where(gather_protein_id==row)[0]
if len(model_index) > 0:
local_score = np.argmax(gather_y[model_index], axis=1)/self.class_num
global_score.append(np.mean(local_score))
global_label.append(item['gdtts'])
target_name.append(item['dir_name'])
model_path.append(item['path'])
df = pd.DataFrame({'global_score':global_score, 'global_label':global_label, 'target_name':target_name, 'model_path': model_path})
pearson = df.groupby('target_name').corr(method='pearson')['global_score'].mean(level=1)['global_label']
spearman = df.groupby('target_name').corr(method='spearman')['global_score'].mean(level=1)['global_label']
csv_out_name = out_dir/(epoch+'_df.csv')
df.to_csv(csv_out_name)
np.savez((out_dir/epoch).with_suffix('.npz'), local_label=gather_t, local_score=y_score, protein_id=gather_protein_id)
with reporter.report_scope(observation):
reporter.report({'loss': F.softmax_cross_entropy(gather_y, gather_t).data},
self._targets['main'])
reporter.report({'accuracy': F.accuracy(gather_y, gather_t).data}, self._targets['main'])
reporter.report({'pearson': pearson}, self._targets['main'])
reporter.report({'spearman': spearman}, self._targets['main'])
return observation
class Regression_Evaluator(Evaluator):
"""Evaluator which calculates correlation
Args:
iterator: Dataset iterator for the dataset to calculate pearson.
It can also be a dictionary of iterators. If this is just an
iterator, the iterator is registered by the name ``'main'``.
target: Link object or a dictionary of links to evaluate. If this is
just a link object, the link is registered by the name ``'main'``.
converter: Converter function to build input arrays and true label.
:func:`~chainer.dataset.concat_examples` is used by default.
It is expected to return input arrays of the form
`[x_0, ..., x_n, t]`, where `x_0, ..., x_n` are the inputs to
the evaluation function and `t` is the true label.
device: Device to which the training data is sent. Negative value
indicates the host memory (CPU).
eval_hook: Function to prepare for each evaluation process. It is
called at the beginning of the evaluation. The evaluator extension
object is passed at each call.
eval_func: Evaluation function called at each iteration. The target
link to evaluate as a callable is used by default.
name (str): name of this extension. When `name` is None,
`default_name='validation'` which is defined in super class
`Evaluator` is used as extension name. This name affects to the
reported key name.
pos_labels (int or list): labels of the positive class, other classes
are considered as negative.
ignore_labels (int or list or None): labels to be ignored.
`None` is used to not ignore all labels.
Attributes:
converter: Converter function.
device: Device to which the training data is sent.
eval_hook: Function to prepare for each evaluation process.
eval_func: Evaluation function called at each iteration.
pos_labels (list): labels of the positive class
ignore_labels (list): labels to be ignored.
"""
def __init__(self, iterator, target, comm, label_name, converter=convert.concat_examples,
device=None, eval_hook=None, eval_func=None, name=None,
pos_labels=1, ignore_labels=None, path_data=None):
super(Regression_Evaluator, self).__init__(
iterator, target, converter=converter, device=device,
eval_hook=eval_hook, eval_func=eval_func)
self.rank = comm.rank
self.name = name
self.pos_labels = _to_list(pos_labels)
self.ignore_labels = _to_list(ignore_labels)
self.comm = comm
self.label_name = label_name
self.path_data = path_data
def __call__(self, trainer=None):
"""Executes the evaluator extension.
Unlike usual extensions, this extension can be executed without passing
a trainer object. This extension reports the performance on validation
dataset using the :func:`~chainer.report` function. Thus, users can use
this extension independently from any trainer by manually configuring
a :class:`~chainer.Reporter` object.
Args:
trainer (~chainer.training.Trainer): Trainer object that invokes
this extension. It can be omitted in case of calling this
extension manually.
Returns:
dict: Result dictionary that contains mean statistics of values
reported by the evaluation function.
"""
# set up a reporter
reporter = reporter_module.Reporter()
if self.name is not None:
prefix = self.name + '/'
else:
prefix = ''
for name, target in six.iteritems(self._targets):
reporter.add_observer(prefix + name, target)
reporter.add_observers(prefix + name, target.namedlinks(skipself=True))
with reporter:
with configuration.using_config('train', False):
result = self.evaluate_corr(trainer=trainer)
reporter_module.report(result)
return result
def evaluate_corr(self, trainer):
iterator = self._iterators['main']
eval_func = self.eval_func or self._targets['main']
if self.eval_hook:
self.eval_hook(self)
if hasattr(iterator, 'reset'):
iterator.reset()
it = iterator
else:
it = copy.copy(iterator)
y_total = np.array([]).reshape([0, len(self.label_name)])
t_total = np.array([]).reshape([0, len(self.label_name)])
protein_id_total = np.array([]).reshape([0, len(self.label_name)])
for batch in it:
in_arrays = self.converter(batch, self.device)
with chainer.no_backprop_mode(), chainer.using_config('train', False):
y = eval_func(*in_arrays[:-2])
t = in_arrays[-2]
protein_id = in_arrays[-1]
# y = F.sigmoid(y)
y_data = cuda.to_cpu(y.data)
t_data = cuda.to_cpu(t)
protein_id = cuda.to_cpu(protein_id)
y_total = np.vstack([y_total, y_data])
t_total = np.vstack([t_total, t_data])
protein_id_total = np.vstack([protein_id_total, protein_id])
updater = trainer.updater
epoch = str(updater.epoch)
out_dir = Path(trainer.out)
observation = {}
for label_index, label in enumerate(self.label_name):
y = y_total[:, label_index]
t = t_total[:, label_index]
protein_id = protein_id_total[:, label_index]
index = np.where(t != -1)[0]
y = y[index]
t = t[index]
protein_id = protein_id[index]
gather_data = self.comm.gather(np.vstack([t, y, protein_id]))
if self.rank == 0:
gather_data = np.concatenate(gather_data, axis=1)
gather_t = np.array(gather_data[0], dtype=np.float32)
gather_y = np.array(gather_data[1], dtype=np.float32)
gather_protein_id = np.array(gather_data[2], dtype=np.int)
global_score = []
global_label = []
target_name = []
model_path = []
for row, item in self.path_data.iterrows():
model_index = np.where(gather_protein_id==row)[0]
if len(model_index) > 0:
global_score.append(np.mean(gather_y[model_index]))
global_label.append(item['gdtts'])
target_name.append(item['dir_name'])
model_path.append(item['path'])
df = pd.DataFrame({'global_score':global_score, 'global_label':global_label, 'target_name':target_name, 'model_path': model_path})
pearson = df.groupby('target_name').corr(method='pearson')['global_score'].mean(level=1)['global_label']
spearman = df.groupby('target_name').corr(method='spearman')['global_score'].mean(level=1)['global_label']
csv_out_name = out_dir/(epoch+label+'_df.csv')
df.to_csv(csv_out_name)
np.savez((out_dir/epoch).with_suffix('.npz'), local_label=gather_t, local_score=y_score, protein_id=gather_protein_id)
with reporter.report_scope(observation):
reporter.report({'loss': F.mean_squared_error(gather_y, gather_t).data},
self._targets['main'])
reporter.report({'accuracy': F.r2_score(gather_y, gather_t).data}, self._targets['main'])
reporter.report({'pearson': pearson}, self._targets['main'])
reporter.report({'spearman': spearman}, self._targets['main'])
return observation
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.clf",
"numpy.argmax",
"chainer.reporter.Reporter",
"pathlib.Path",
"chainer.no_backprop_mode",
"numpy.mean",
"six.iteritems",
"pandas.DataFrame",
"chainer.functions.softmax_cross_entropy",
"chainer.functions.sigmoid_cross_entropy",
"chainer.cuda.to_cpu",
"chainer.functions.mean_squared_error",
"chainer.functions.r2_score",
"matplotlib.pyplot.legend",
"sklearn.metrics.roc_auc_score",
"matplotlib.use",
"chainer.functions.sigmoid",
"chainer.configuration.using_config",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.grid",
"chainer.functions.accuracy",
"numpy.vstack",
"numpy.concatenate",
"chainer.functions.binary_accuracy",
"matplotlib.pyplot.plot",
"sklearn.metrics.roc_curve",
"chainer.reporter.report",
"copy.copy",
"sklearn.metrics.auc",
"numpy.where",
"numpy.array",
"chainer.using_config",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.savefig"
] |
[((140, 161), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (154, 161), False, 'import matplotlib\n'), ((855, 904), 'sklearn.metrics.roc_curve', 'metrics.roc_curve', ([], {'y_true': 'y_true', 'y_score': 'y_score'}), '(y_true=y_true, y_score=y_score)\n', (872, 904), False, 'from sklearn import metrics\n'), ((915, 936), 'sklearn.metrics.auc', 'metrics.auc', (['fpr', 'tpr'], {}), '(fpr, tpr)\n', (926, 936), False, 'from sklearn import metrics\n'), ((941, 950), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (948, 950), True, 'import matplotlib.pyplot as plt\n'), ((955, 1012), 'matplotlib.pyplot.plot', 'plt.plot', (['fpr', 'tpr'], {'label': "('ROC curve (area = %.3f)' % auc)"}), "(fpr, tpr, label='ROC curve (area = %.3f)' % auc)\n", (963, 1012), True, 'import matplotlib.pyplot as plt\n'), ((1017, 1029), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1027, 1029), True, 'import matplotlib.pyplot as plt\n'), ((1034, 1069), 'matplotlib.pyplot.title', 'plt.title', (['"""ROC curve"""'], {'fontsize': '(16)'}), "('ROC curve', fontsize=16)\n", (1043, 1069), True, 'import matplotlib.pyplot as plt\n'), ((1074, 1120), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""False Positive Rate"""'], {'fontsize': '(16)'}), "('False Positive Rate', fontsize=16)\n", (1084, 1120), True, 'import matplotlib.pyplot as plt\n'), ((1125, 1170), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""True Positive Rate"""'], {'fontsize': '(16)'}), "('True Positive Rate', fontsize=16)\n", (1135, 1170), True, 'import matplotlib.pyplot as plt\n'), ((1175, 1189), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (1183, 1189), True, 'import matplotlib.pyplot as plt\n'), ((1194, 1215), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_name'], {}), '(out_name)\n', (1205, 1215), True, 'import matplotlib.pyplot as plt\n'), ((4944, 4970), 'chainer.reporter.Reporter', 'reporter_module.Reporter', ([], {}), '()\n', (4968, 4970), True, 'from chainer import reporter as reporter_module\n'), ((5108, 5136), 'six.iteritems', 'six.iteritems', (['self._targets'], {}), '(self._targets)\n', (5121, 5136), False, 'import six\n'), ((5438, 5468), 'chainer.reporter.report', 'reporter_module.report', (['result'], {}), '(result)\n', (5460, 5468), True, 'from chainer import reporter as reporter_module\n'), ((6780, 6797), 'pathlib.Path', 'Path', (['trainer.out'], {}), '(trainer.out)\n', (6784, 6797), False, 'from pathlib import Path\n'), ((13436, 13462), 'chainer.reporter.Reporter', 'reporter_module.Reporter', ([], {}), '()\n', (13460, 13462), True, 'from chainer import reporter as reporter_module\n'), ((13600, 13628), 'six.iteritems', 'six.iteritems', (['self._targets'], {}), '(self._targets)\n', (13613, 13628), False, 'import six\n'), ((13926, 13956), 'chainer.reporter.report', 'reporter_module.report', (['result'], {}), '(result)\n', (13948, 13956), True, 'from chainer import reporter as reporter_module\n'), ((14415, 14441), 'numpy.array', 'np.array', (['[]'], {'dtype': 'np.int'}), '([], dtype=np.int)\n', (14423, 14441), True, 'import numpy as np\n'), ((14469, 14495), 'numpy.array', 'np.array', (['[]'], {'dtype': 'np.int'}), '([], dtype=np.int)\n', (14477, 14495), True, 'import numpy as np\n'), ((15226, 15243), 'pathlib.Path', 'Path', (['trainer.out'], {}), '(trainer.out)\n', (15230, 15243), False, 'from pathlib import Path\n'), ((20979, 21005), 'chainer.reporter.Reporter', 'reporter_module.Reporter', ([], {}), '()\n', (21003, 21005), True, 'from chainer import reporter as reporter_module\n'), ((21143, 21171), 'six.iteritems', 'six.iteritems', (['self._targets'], {}), '(self._targets)\n', (21156, 21171), False, 'import six\n'), ((21469, 21499), 'chainer.reporter.report', 'reporter_module.report', (['result'], {}), '(result)\n', (21491, 21499), True, 'from chainer import reporter as reporter_module\n'), ((22807, 22824), 'pathlib.Path', 'Path', (['trainer.out'], {}), '(trainer.out)\n', (22811, 22824), False, 'from pathlib import Path\n'), ((5832, 5851), 'copy.copy', 'copy.copy', (['iterator'], {}), '(iterator)\n', (5841, 5851), False, 'import copy\n'), ((6404, 6423), 'chainer.cuda.to_cpu', 'cuda.to_cpu', (['y.data'], {}), '(y.data)\n', (6415, 6423), False, 'from chainer import configuration, cuda, function\n'), ((6445, 6459), 'chainer.cuda.to_cpu', 'cuda.to_cpu', (['t'], {}), '(t)\n', (6456, 6459), False, 'from chainer import configuration, cuda, function\n'), ((6485, 6508), 'chainer.cuda.to_cpu', 'cuda.to_cpu', (['protein_id'], {}), '(protein_id)\n', (6496, 6508), False, 'from chainer import configuration, cuda, function\n'), ((6531, 6559), 'numpy.vstack', 'np.vstack', (['[y_total, y_data]'], {}), '([y_total, y_data])\n', (6540, 6559), True, 'import numpy as np\n'), ((6582, 6610), 'numpy.vstack', 'np.vstack', (['[t_total, t_data]'], {}), '([t_total, t_data])\n', (6591, 6610), True, 'import numpy as np\n'), ((6642, 6683), 'numpy.vstack', 'np.vstack', (['[protein_id_total, protein_id]'], {}), '([protein_id_total, protein_id])\n', (6651, 6683), True, 'import numpy as np\n'), ((14316, 14335), 'copy.copy', 'copy.copy', (['iterator'], {}), '(iterator)\n', (14325, 14335), False, 'import copy\n'), ((14840, 14859), 'chainer.cuda.to_cpu', 'cuda.to_cpu', (['y.data'], {}), '(y.data)\n', (14851, 14859), False, 'from chainer import configuration, cuda, function\n'), ((14881, 14895), 'chainer.cuda.to_cpu', 'cuda.to_cpu', (['t'], {}), '(t)\n', (14892, 14895), False, 'from chainer import configuration, cuda, function\n'), ((14921, 14944), 'chainer.cuda.to_cpu', 'cuda.to_cpu', (['protein_id'], {}), '(protein_id)\n', (14932, 14944), False, 'from chainer import configuration, cuda, function\n'), ((14967, 14995), 'numpy.vstack', 'np.vstack', (['[y_total, y_data]'], {}), '([y_total, y_data])\n', (14976, 14995), True, 'import numpy as np\n'), ((15018, 15051), 'numpy.concatenate', 'np.concatenate', (['[t_total, t_data]'], {}), '([t_total, t_data])\n', (15032, 15051), True, 'import numpy as np\n'), ((15083, 15129), 'numpy.concatenate', 'np.concatenate', (['[protein_id_total, protein_id]'], {}), '([protein_id_total, protein_id])\n', (15097, 15129), True, 'import numpy as np\n'), ((15439, 15466), 'numpy.concatenate', 'np.concatenate', (['gather_data'], {}), '(gather_data)\n', (15453, 15466), True, 'import numpy as np\n'), ((16266, 16398), 'pandas.DataFrame', 'pd.DataFrame', (["{'global_score': global_score, 'global_label': global_label, 'target_name':\n target_name, 'model_path': model_path}"], {}), "({'global_score': global_score, 'global_label': global_label,\n 'target_name': target_name, 'model_path': model_path})\n", (16278, 16398), True, 'import pandas as pd\n'), ((21859, 21878), 'copy.copy', 'copy.copy', (['iterator'], {}), '(iterator)\n', (21868, 21878), False, 'import copy\n'), ((22431, 22450), 'chainer.cuda.to_cpu', 'cuda.to_cpu', (['y.data'], {}), '(y.data)\n', (22442, 22450), False, 'from chainer import configuration, cuda, function\n'), ((22472, 22486), 'chainer.cuda.to_cpu', 'cuda.to_cpu', (['t'], {}), '(t)\n', (22483, 22486), False, 'from chainer import configuration, cuda, function\n'), ((22512, 22535), 'chainer.cuda.to_cpu', 'cuda.to_cpu', (['protein_id'], {}), '(protein_id)\n', (22523, 22535), False, 'from chainer import configuration, cuda, function\n'), ((22558, 22586), 'numpy.vstack', 'np.vstack', (['[y_total, y_data]'], {}), '([y_total, y_data])\n', (22567, 22586), True, 'import numpy as np\n'), ((22609, 22637), 'numpy.vstack', 'np.vstack', (['[t_total, t_data]'], {}), '([t_total, t_data])\n', (22618, 22637), True, 'import numpy as np\n'), ((22669, 22710), 'numpy.vstack', 'np.vstack', (['[protein_id_total, protein_id]'], {}), '([protein_id_total, protein_id])\n', (22678, 22710), True, 'import numpy as np\n'), ((5320, 5362), 'chainer.configuration.using_config', 'configuration.using_config', (['"""train"""', '(False)'], {}), "('train', False)\n", (5346, 5362), False, 'from chainer import configuration, cuda, function\n'), ((5871, 5883), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (5879, 5883), True, 'import numpy as np\n'), ((5937, 5949), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (5945, 5949), True, 'import numpy as np\n'), ((6012, 6024), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (6020, 6024), True, 'import numpy as np\n'), ((6162, 6188), 'chainer.no_backprop_mode', 'chainer.no_backprop_mode', ([], {}), '()\n', (6186, 6188), False, 'import chainer\n'), ((6190, 6226), 'chainer.using_config', 'chainer.using_config', (['"""train"""', '(False)'], {}), "('train', False)\n", (6210, 6226), False, 'import chainer\n'), ((7044, 7061), 'numpy.where', 'np.where', (['(t != -1)'], {}), '(t != -1)\n', (7052, 7061), True, 'import numpy as np\n'), ((7201, 7230), 'numpy.vstack', 'np.vstack', (['[t, y, protein_id]'], {}), '([t, y, protein_id])\n', (7210, 7230), True, 'import numpy as np\n'), ((7293, 7328), 'numpy.concatenate', 'np.concatenate', (['gather_data'], {'axis': '(1)'}), '(gather_data, axis=1)\n', (7307, 7328), True, 'import numpy as np\n'), ((7356, 7394), 'numpy.array', 'np.array', (['gather_data[0]'], {'dtype': 'np.int'}), '(gather_data[0], dtype=np.int)\n', (7364, 7394), True, 'import numpy as np\n'), ((7422, 7464), 'numpy.array', 'np.array', (['gather_data[1]'], {'dtype': 'np.float32'}), '(gather_data[1], dtype=np.float32)\n', (7430, 7464), True, 'import numpy as np\n'), ((7501, 7539), 'numpy.array', 'np.array', (['gather_data[2]'], {'dtype': 'np.int'}), '(gather_data[2], dtype=np.int)\n', (7509, 7539), True, 'import numpy as np\n'), ((8138, 8270), 'pandas.DataFrame', 'pd.DataFrame', (["{'global_score': global_score, 'global_label': global_label, 'target_name':\n target_name, 'model_path': model_path}"], {}), "({'global_score': global_score, 'global_label': global_label,\n 'target_name': target_name, 'model_path': model_path})\n", (8150, 8270), True, 'import pandas as pd\n'), ((8848, 8888), 'sklearn.metrics.roc_auc_score', 'metrics.roc_auc_score', (['gather_t', 'y_score'], {}), '(gather_t, y_score)\n', (8869, 8888), False, 'from sklearn import metrics\n'), ((13812, 13854), 'chainer.configuration.using_config', 'configuration.using_config', (['"""train"""', '(False)'], {}), "('train', False)\n", (13838, 13854), False, 'from chainer import configuration, cuda, function\n'), ((14355, 14367), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (14363, 14367), True, 'import numpy as np\n'), ((14598, 14624), 'chainer.no_backprop_mode', 'chainer.no_backprop_mode', ([], {}), '()\n', (14622, 14624), False, 'import chainer\n'), ((14626, 14662), 'chainer.using_config', 'chainer.using_config', (['"""train"""', '(False)'], {}), "('train', False)\n", (14646, 14662), False, 'import chainer\n'), ((21355, 21397), 'chainer.configuration.using_config', 'configuration.using_config', (['"""train"""', '(False)'], {}), "('train', False)\n", (21381, 21397), False, 'from chainer import configuration, cuda, function\n'), ((21898, 21910), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (21906, 21910), True, 'import numpy as np\n'), ((21964, 21976), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (21972, 21976), True, 'import numpy as np\n'), ((22039, 22051), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (22047, 22051), True, 'import numpy as np\n'), ((22189, 22215), 'chainer.no_backprop_mode', 'chainer.no_backprop_mode', ([], {}), '()\n', (22213, 22215), False, 'import chainer\n'), ((22217, 22253), 'chainer.using_config', 'chainer.using_config', (['"""train"""', '(False)'], {}), "('train', False)\n", (22237, 22253), False, 'import chainer\n'), ((23071, 23088), 'numpy.where', 'np.where', (['(t != -1)'], {}), '(t != -1)\n', (23079, 23088), True, 'import numpy as np\n'), ((23228, 23257), 'numpy.vstack', 'np.vstack', (['[t, y, protein_id]'], {}), '([t, y, protein_id])\n', (23237, 23257), True, 'import numpy as np\n'), ((23320, 23355), 'numpy.concatenate', 'np.concatenate', (['gather_data'], {'axis': '(1)'}), '(gather_data, axis=1)\n', (23334, 23355), True, 'import numpy as np\n'), ((23383, 23425), 'numpy.array', 'np.array', (['gather_data[0]'], {'dtype': 'np.float32'}), '(gather_data[0], dtype=np.float32)\n', (23391, 23425), True, 'import numpy as np\n'), ((23453, 23495), 'numpy.array', 'np.array', (['gather_data[1]'], {'dtype': 'np.float32'}), '(gather_data[1], dtype=np.float32)\n', (23461, 23495), True, 'import numpy as np\n'), ((23532, 23570), 'numpy.array', 'np.array', (['gather_data[2]'], {'dtype': 'np.int'}), '(gather_data[2], dtype=np.int)\n', (23540, 23570), True, 'import numpy as np\n'), ((24153, 24285), 'pandas.DataFrame', 'pd.DataFrame', (["{'global_score': global_score, 'global_label': global_label, 'target_name':\n target_name, 'model_path': model_path}"], {}), "({'global_score': global_score, 'global_label': global_label,\n 'target_name': target_name, 'model_path': model_path})\n", (24165, 24285), True, 'import pandas as pd\n'), ((8715, 8734), 'chainer.functions.sigmoid', 'F.sigmoid', (['gather_y'], {}), '(gather_y)\n', (8724, 8734), True, 'import chainer.functions as F\n'), ((15856, 15890), 'numpy.where', 'np.where', (['(gather_protein_id == row)'], {}), '(gather_protein_id == row)\n', (15864, 15890), True, 'import numpy as np\n'), ((7768, 7802), 'numpy.where', 'np.where', (['(gather_protein_id == row)'], {}), '(gather_protein_id == row)\n', (7776, 7802), True, 'import numpy as np\n'), ((15967, 16007), 'numpy.argmax', 'np.argmax', (['gather_y[model_index]'], {'axis': '(1)'}), '(gather_y[model_index], axis=1)\n', (15976, 16007), True, 'import numpy as np\n'), ((16063, 16083), 'numpy.mean', 'np.mean', (['local_score'], {}), '(local_score)\n', (16070, 16083), True, 'import numpy as np\n'), ((23799, 23833), 'numpy.where', 'np.where', (['(gather_protein_id == row)'], {}), '(gather_protein_id == row)\n', (23807, 23833), True, 'import numpy as np\n'), ((16942, 16985), 'chainer.functions.softmax_cross_entropy', 'F.softmax_cross_entropy', (['gather_y', 'gather_t'], {}), '(gather_y, gather_t)\n', (16965, 16985), True, 'import chainer.functions as F\n'), ((17097, 17127), 'chainer.functions.accuracy', 'F.accuracy', (['gather_y', 'gather_t'], {}), '(gather_y, gather_t)\n', (17107, 17127), True, 'import chainer.functions as F\n'), ((23924, 23954), 'numpy.mean', 'np.mean', (['gather_y[model_index]'], {}), '(gather_y[model_index])\n', (23931, 23954), True, 'import numpy as np\n'), ((9214, 9257), 'chainer.functions.sigmoid_cross_entropy', 'F.sigmoid_cross_entropy', (['gather_y', 'gather_t'], {}), '(gather_y, gather_t)\n', (9237, 9257), True, 'import chainer.functions as F\n'), ((9373, 9410), 'chainer.functions.binary_accuracy', 'F.binary_accuracy', (['gather_y', 'gather_t'], {}), '(gather_y, gather_t)\n', (9390, 9410), True, 'import chainer.functions as F\n'), ((24863, 24903), 'chainer.functions.mean_squared_error', 'F.mean_squared_error', (['gather_y', 'gather_t'], {}), '(gather_y, gather_t)\n', (24883, 24903), True, 'import chainer.functions as F\n'), ((25019, 25049), 'chainer.functions.r2_score', 'F.r2_score', (['gather_y', 'gather_t'], {}), '(gather_y, gather_t)\n', (25029, 25049), True, 'import chainer.functions as F\n'), ((7901, 7933), 'chainer.functions.sigmoid', 'F.sigmoid', (['gather_y[model_index]'], {}), '(gather_y[model_index])\n', (7910, 7933), True, 'import chainer.functions as F\n')]
|
# -*- coding: utf-8 -*-
"""
@author: Manuel
"""
import socket
import time
import sys
import json
import sensor_version.config as config
from IoT_Client_functions import read_data_from_sensor
from sensor_version.data_message import message
udp_timeout = 2
udp_delay = 1
if len(sys.argv) != 2:
print('Error: need client number')
sys.exit(1)
client_number = int(sys.argv[1])
client_ip = config.arp_table[client_number]
server_address = (config.gateway_UDP_ip, config.gateway_UDP_port)
while True:
print('Reading data from sensor...')
hour, temperature, humidity = read_data_from_sensor()
data = message(client_number, hour, temperature, humidity)
data.set_ip_address(client_ip)
OK = False
while not OK:
try:
# socket create
udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
print(f'Sending data to {server_address}...')
data.sending_time(time.time_ns())
data_bytes = json.dumps(data.__dict__)
t0 = time.time_ns()
udp_socket.sendto(data_bytes.encode('utf8'), server_address)
t = time.time_ns()
dt = t - t0
print('Socket time:', dt, 'ns')
print('Waiting for response...')
udp_socket.settimeout(udp_timeout)
server_response, server = udp_socket.recvfrom(1024)
if server_response.decode() == 'OK':
OK = True
else:
raise Exception('Wrong Response')
except Exception as error:
print(f'Error: {error}')
print('Try sending again...')
time.sleep(udp_delay)
finally:
udp_socket.close()
print('Data are correctly sent\n')
time.sleep(5)
|
[
"sensor_version.data_message.message",
"socket.socket",
"json.dumps",
"time.sleep",
"time.time_ns",
"IoT_Client_functions.read_data_from_sensor",
"sys.exit"
] |
[((338, 349), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (346, 349), False, 'import sys\n'), ((589, 612), 'IoT_Client_functions.read_data_from_sensor', 'read_data_from_sensor', ([], {}), '()\n', (610, 612), False, 'from IoT_Client_functions import read_data_from_sensor\n'), ((624, 675), 'sensor_version.data_message.message', 'message', (['client_number', 'hour', 'temperature', 'humidity'], {}), '(client_number, hour, temperature, humidity)\n', (631, 675), False, 'from sensor_version.data_message import message\n'), ((1830, 1843), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1840, 1843), False, 'import time\n'), ((820, 868), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (833, 868), False, 'import socket\n'), ((998, 1023), 'json.dumps', 'json.dumps', (['data.__dict__'], {}), '(data.__dict__)\n', (1008, 1023), False, 'import json\n'), ((1041, 1055), 'time.time_ns', 'time.time_ns', ([], {}), '()\n', (1053, 1055), False, 'import time\n'), ((1158, 1172), 'time.time_ns', 'time.time_ns', ([], {}), '()\n', (1170, 1172), False, 'import time\n'), ((957, 971), 'time.time_ns', 'time.time_ns', ([], {}), '()\n', (969, 971), False, 'import time\n'), ((1698, 1719), 'time.sleep', 'time.sleep', (['udp_delay'], {}), '(udp_delay)\n', (1708, 1719), False, 'import time\n')]
|
import time
import random
value = random.randint(0, 1000)
while True:
if random.randint(0, 100) > 50:
print("lose everything")
|
[
"random.randint"
] |
[((35, 58), 'random.randint', 'random.randint', (['(0)', '(1000)'], {}), '(0, 1000)\n', (49, 58), False, 'import random\n'), ((79, 101), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (93, 101), False, 'import random\n')]
|
from django.db import models
# Create your models here.
class ProjectCat(models.Model):
ProjectId = models.IntegerField()
CarId = models.IntegerField()
CartName = models.CharField(max_length=225)
class ApiName(models.Model):
ProjectId = models.IntegerField()
CarId = models.IntegerField()
ApiId = models.IntegerField()
ApiName = models.CharField(max_length=225)
|
[
"django.db.models.CharField",
"django.db.models.IntegerField"
] |
[((107, 128), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (126, 128), False, 'from django.db import models\n'), ((141, 162), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (160, 162), False, 'from django.db import models\n'), ((178, 210), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(225)'}), '(max_length=225)\n', (194, 210), False, 'from django.db import models\n'), ((257, 278), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (276, 278), False, 'from django.db import models\n'), ((291, 312), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (310, 312), False, 'from django.db import models\n'), ((325, 346), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (344, 346), False, 'from django.db import models\n'), ((361, 393), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(225)'}), '(max_length=225)\n', (377, 393), False, 'from django.db import models\n')]
|
# -*- coding: utf-8 -*-
from django import forms
from .models import PoliticalClasification
class PoliticalClasificationForm(forms.ModelForm):
class Meta:
model = PoliticalClasification
fields = [
'political',
'content',
'numbers'
]
class RawPoliticalClasificationForm(forms.Form):
political = forms.CharField()
content = forms.CharField()
numbers = forms.DecimalField()
|
[
"django.forms.CharField",
"django.forms.DecimalField"
] |
[((387, 404), 'django.forms.CharField', 'forms.CharField', ([], {}), '()\n', (402, 404), False, 'from django import forms\n'), ((419, 436), 'django.forms.CharField', 'forms.CharField', ([], {}), '()\n', (434, 436), False, 'from django import forms\n'), ((451, 471), 'django.forms.DecimalField', 'forms.DecimalField', ([], {}), '()\n', (469, 471), False, 'from django import forms\n')]
|
import datetime
import django
from django.db import IntegrityError
from django.test import TestCase
import ippon.models
import ippon.models.club as cl
import ippon.models.cup_fight
import ippon.models.cup_fight as cfm
import ippon.models.fight
import ippon.models.player as plm
import ippon.models.team_fight as tfm
import ippon.models.tournament as tm
class TestCupFights(TestCase):
def setUp(self):
self.tournament = tm.Tournament.objects.create(
name='T1',
webpage='http://w1.co',
description='d1',
city='c1',
date=datetime.date(year=2021, month=1, day=1),
address='a1',
team_size=1,
group_match_length=3,
ko_match_length=3,
final_match_length=3,
finals_depth=0,
age_constraint=5,
age_constraint_value=20,
rank_constraint=5,
rank_constraint_value=7,
sex_constraint=1)
self.tournament.save()
self.cup_phase = self.tournament.cup_phases.create(name="CP",
fight_length=3,
final_fight_length=5)
self.t1 = self.tournament.teams.create(name='t1')
self.t2 = self.tournament.teams.create(name='t2')
self.team_fight1 = self.tournament.team_fights.create(aka_team=self.t1,
shiro_team=self.t2)
self.cup_fight = self.cup_phase.cup_fights.create(team_fight=self.team_fight1)
class CupFightFollowingFightTests(TestCupFights):
def setUp(self):
super(CupFightFollowingFightTests, self).setUp()
def test_fight_throws_no_such_fight_when_het_following_called_on_final(self):
with self.assertRaises(cfm.NoSuchFightException):
self.cup_fight.get_following_fight()
def test_cup_fight_which_is_previous_on_aka_side_returns_following_fight(self):
following_aka = self.cup_phase.cup_fights.create(team_fight=self.team_fight1, previous_aka_fight=self.cup_fight)
self.assertEqual(self.cup_fight.get_following_fight(), following_aka)
class CupFightSiblingTests(TestCupFights):
def setUp(self):
super(CupFightSiblingTests, self).setUp()
self.t3 = self.tournament.teams.create(name='t3')
self.t4 = self.tournament.teams.create(name='t4')
self.tf_aka = self.tournament.team_fights.create(aka_team=self.t3,
shiro_team=self.t4)
self.cf_aka = self.cup_phase.cup_fights.create(team_fight=self.tf_aka)
self.cf_parent = self.cup_phase.cup_fights.create(previous_aka_fight=self.cf_aka,
previous_shiro_fight=self.cup_fight)
def test_cup_fight_when_winner_is_set_and_sibling_has_winner_already_set_creates_team_fight_in_parent(self):
self.cf_aka.team_fight.winner = 1
self.cf_aka.team_fight.save()
self.cup_fight.team_fight.winner = 2
self.cup_fight.team_fight.save()
self.cf_parent.refresh_from_db()
self.assertIsNotNone(self.cf_parent.team_fight)
self.assertEqual(self.cf_parent.team_fight.aka_team, self.t3)
self.assertEqual(self.cf_parent.team_fight.shiro_team, self.t2)
def test_when_aka_fight_winner_is_set_and_shiro_sibling_doesnt_have_winner_yet_doesnt_change_parent(self):
self.cf_aka.team_fight.winner = 1
self.cf_aka.team_fight.save()
self.cf_parent.refresh_from_db()
self.assertIsNone(self.cf_parent.team_fight)
def test_when_shiro_fight_winner_is_set_and_aka_sibling_doesnt_have_winner_yet_doesnt_change_parent(self):
self.cup_fight.team_fight.winner = 1
self.cup_fight.team_fight.save()
self.cf_parent.refresh_from_db()
self.assertIsNone(self.cf_parent.team_fight)
def test_when_shiro_fight_winner_is_changed_and_parent_was_laready_created_but_still_in_prep_change_parent(self):
self.cf_aka.team_fight.winner = 1
self.cf_aka.team_fight.save()
self.cup_fight.team_fight.winner = 2
self.cup_fight.team_fight.save()
self.cf_parent.refresh_from_db()
old_parent_tf_id = self.cf_parent.team_fight.id
self.cf_aka.team_fight.winner = 2
self.cf_aka.team_fight.save()
self.cf_parent.refresh_from_db()
current_parent_tf = self.cf_parent.team_fight
self.assertEqual(old_parent_tf_id, current_parent_tf.id)
self.assertEqual(current_parent_tf.aka_team, self.t4)
self.assertEqual(current_parent_tf.shiro_team, self.t2)
class CupPhaseTests(TestCase):
def setUp(self) -> None:
self.tournament = tm.Tournament.objects.create(
name='T1',
webpage='http://w1.co',
description='d1',
city='c1',
date=datetime.date(year=2021, month=1, day=1),
address='a1',
team_size=1,
group_match_length=3,
ko_match_length=3,
final_match_length=3,
finals_depth=0,
age_constraint=5,
age_constraint_value=20,
rank_constraint=5,
rank_constraint_value=7,
sex_constraint=1)
self.tournament.save()
c = cl.Club.objects.create(
name='cn1',
webpage='http://cw1.co',
description='cd1',
city='cc1')
self.cup_phase = self.tournament.cup_phases.create(name="CP",
fight_length=3,
final_fight_length=5)
self.t1 = self.tournament.teams.create(name='t1')
self.t2 = self.tournament.teams.create(name='t2')
self.team_fight1 = self.tournament.team_fights.create(aka_team=self.t1,
shiro_team=self.t2)
self.cf1 = self.cup_phase.cup_fights.create(team_fight=self.team_fight1)
self.p1 = plm.Player.objects.create(name='pn1', surname='ps1', rank=7,
birthday=datetime.date(year=2001, month=1, day=1), sex=1, club_id=c)
self.p2 = plm.Player.objects.create(name='pn2', surname='ps2', rank=7,
birthday=datetime.date(year=2001, month=1, day=1), sex=1, club_id=c)
self.p3 = plm.Player.objects.create(name='pn3', surname='ps3', rank=7,
birthday=datetime.date(year=2001, month=1, day=1), sex=1, club_id=c)
self.p4 = plm.Player.objects.create(name='pn4', surname='ps4', rank=7,
birthday=datetime.date(year=2001, month=1, day=1), sex=1, club_id=c)
self.p5 = plm.Player.objects.create(name='pn5', surname='ps5', rank=7,
birthday=datetime.date(year=2001, month=1, day=1), sex=1, club_id=c)
self.p6 = plm.Player.objects.create(name='pn6', surname='ps6', rank=7,
birthday=datetime.date(year=2001, month=1, day=1), sex=1, club_id=c)
self.p7 = plm.Player.objects.create(name='pn7', surname='ps6', rank=7,
birthday=datetime.date(year=2001, month=1, day=1), sex=1, club_id=c)
self.p8 = plm.Player.objects.create(name='pn8', surname='ps6', rank=7,
birthday=datetime.date(year=2001, month=1, day=1), sex=1, club_id=c)
self.t1.team_members.create(player=self.p1)
self.t1.team_members.create(player=self.p2)
self.t1.team_members.create(player=self.p3)
self.t1.team_members.create(player=self.p7)
self.t2.team_members.create(player=self.p4)
self.t2.team_members.create(player=self.p5)
self.t2.team_members.create(player=self.p6)
self.t2.team_members.create(player=self.p8)
self.f1 = self.team_fight1.fights.create(aka=self.p1, shiro=self.p4)
self.f2 = self.team_fight1.fights.create(aka=self.p2, shiro=self.p5)
self.f3 = self.team_fight1.fights.create(aka=self.p3, shiro=self.p6)
self.f4 = self.team_fight1.fights.create(aka=self.p7, shiro=self.p8)
def test_destruction_of_cup_phase_is_impossible_when_there_are_some_fights_in_it(self):
with self.assertRaises(django.db.models.ProtectedError) as pe:
self.cup_phase.delete()
self.assertTrue(tfm.TeamFight.objects.filter(cup_fight=self.cf1).count())
self.assertTrue(ippon.models.cup_fight.CupFight.objects.filter(cup_phase=self.cup_phase).count())
self.assertTrue(ippon.models.fight.Fight.objects.filter(team_fight=self.team_fight1).count())
|
[
"datetime.date",
"ippon.models.team_fight.TeamFight.objects.filter",
"ippon.models.club.Club.objects.create"
] |
[((5374, 5469), 'ippon.models.club.Club.objects.create', 'cl.Club.objects.create', ([], {'name': '"""cn1"""', 'webpage': '"""http://cw1.co"""', 'description': '"""cd1"""', 'city': '"""cc1"""'}), "(name='cn1', webpage='http://cw1.co', description=\n 'cd1', city='cc1')\n", (5396, 5469), True, 'import ippon.models.club as cl\n'), ((594, 634), 'datetime.date', 'datetime.date', ([], {'year': '(2021)', 'month': '(1)', 'day': '(1)'}), '(year=2021, month=1, day=1)\n', (607, 634), False, 'import datetime\n'), ((4946, 4986), 'datetime.date', 'datetime.date', ([], {'year': '(2021)', 'month': '(1)', 'day': '(1)'}), '(year=2021, month=1, day=1)\n', (4959, 4986), False, 'import datetime\n'), ((6232, 6272), 'datetime.date', 'datetime.date', ([], {'year': '(2001)', 'month': '(1)', 'day': '(1)'}), '(year=2001, month=1, day=1)\n', (6245, 6272), False, 'import datetime\n'), ((6424, 6464), 'datetime.date', 'datetime.date', ([], {'year': '(2001)', 'month': '(1)', 'day': '(1)'}), '(year=2001, month=1, day=1)\n', (6437, 6464), False, 'import datetime\n'), ((6616, 6656), 'datetime.date', 'datetime.date', ([], {'year': '(2001)', 'month': '(1)', 'day': '(1)'}), '(year=2001, month=1, day=1)\n', (6629, 6656), False, 'import datetime\n'), ((6808, 6848), 'datetime.date', 'datetime.date', ([], {'year': '(2001)', 'month': '(1)', 'day': '(1)'}), '(year=2001, month=1, day=1)\n', (6821, 6848), False, 'import datetime\n'), ((7000, 7040), 'datetime.date', 'datetime.date', ([], {'year': '(2001)', 'month': '(1)', 'day': '(1)'}), '(year=2001, month=1, day=1)\n', (7013, 7040), False, 'import datetime\n'), ((7192, 7232), 'datetime.date', 'datetime.date', ([], {'year': '(2001)', 'month': '(1)', 'day': '(1)'}), '(year=2001, month=1, day=1)\n', (7205, 7232), False, 'import datetime\n'), ((7384, 7424), 'datetime.date', 'datetime.date', ([], {'year': '(2001)', 'month': '(1)', 'day': '(1)'}), '(year=2001, month=1, day=1)\n', (7397, 7424), False, 'import datetime\n'), ((7576, 7616), 'datetime.date', 'datetime.date', ([], {'year': '(2001)', 'month': '(1)', 'day': '(1)'}), '(year=2001, month=1, day=1)\n', (7589, 7616), False, 'import datetime\n'), ((8587, 8635), 'ippon.models.team_fight.TeamFight.objects.filter', 'tfm.TeamFight.objects.filter', ([], {'cup_fight': 'self.cf1'}), '(cup_fight=self.cf1)\n', (8615, 8635), True, 'import ippon.models.team_fight as tfm\n')]
|
import io
import pickle
import sys
import unittest
from satella.instrumentation import Traceback
class TestTraceback(unittest.TestCase):
def test_no_exc(self):
tb = Traceback()
byte = io.BytesIO()
byte2 = tb.pickle()
tb.pickle_to(byte)
byte.seek(0)
tb2 = Traceback.from_pickle(byte)
tb3 = Traceback.from_pickle(byte2)
self.assertEqual(tb, tb2)
self.assertEqual(tb2, tb3)
def test_json(self):
try:
raise ValueError(u'hello')
except ValueError:
tb = Traceback()
js = tb.to_json()
self.assertEqual(tb, Traceback.from_json(js))
def test_unserialize_error(self):
a = 'test'
b = pickle.dumps(a)
self.assertRaises(ValueError, lambda: Traceback.from_pickle(b))
def test_tb(self):
try:
loc = u'hello world'
raise ValueError(u'hello')
except ValueError:
tb = Traceback()
p_fmt = tb.pretty_format()
else:
self.fail('exception not raised')
self.assertTrue(p_fmt)
def test_issue_21(self):
try:
loc = u'hello world'
raise ValueError(u'hello')
except ValueError:
tb = Traceback()
a = tb.pickle()
self.assertIsInstance(pickle.loads(a), Traceback)
def test_normal_stack_frames(self):
tb = Traceback(list(sys._current_frames().values())[0])
tb.pretty_format()
def test_compression_happens(self):
try:
loc = ' ' * (10 * 1024 * 1024)
raise ValueError('hello')
except ValueError:
tb = Traceback()
self.assertLess(len(pickle.dumps(tb, -1)), 9 * 1024 * 1024)
|
[
"pickle.loads",
"satella.instrumentation.Traceback.from_json",
"io.BytesIO",
"satella.instrumentation.Traceback.from_pickle",
"sys._current_frames",
"satella.instrumentation.Traceback",
"pickle.dumps"
] |
[((180, 191), 'satella.instrumentation.Traceback', 'Traceback', ([], {}), '()\n', (189, 191), False, 'from satella.instrumentation import Traceback\n'), ((207, 219), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (217, 219), False, 'import io\n'), ((310, 337), 'satella.instrumentation.Traceback.from_pickle', 'Traceback.from_pickle', (['byte'], {}), '(byte)\n', (331, 337), False, 'from satella.instrumentation import Traceback\n'), ((352, 380), 'satella.instrumentation.Traceback.from_pickle', 'Traceback.from_pickle', (['byte2'], {}), '(byte2)\n', (373, 380), False, 'from satella.instrumentation import Traceback\n'), ((735, 750), 'pickle.dumps', 'pickle.dumps', (['a'], {}), '(a)\n', (747, 750), False, 'import pickle\n'), ((640, 663), 'satella.instrumentation.Traceback.from_json', 'Traceback.from_json', (['js'], {}), '(js)\n', (659, 663), False, 'from satella.instrumentation import Traceback\n'), ((572, 583), 'satella.instrumentation.Traceback', 'Traceback', ([], {}), '()\n', (581, 583), False, 'from satella.instrumentation import Traceback\n'), ((797, 821), 'satella.instrumentation.Traceback.from_pickle', 'Traceback.from_pickle', (['b'], {}), '(b)\n', (818, 821), False, 'from satella.instrumentation import Traceback\n'), ((977, 988), 'satella.instrumentation.Traceback', 'Traceback', ([], {}), '()\n', (986, 988), False, 'from satella.instrumentation import Traceback\n'), ((1280, 1291), 'satella.instrumentation.Traceback', 'Traceback', ([], {}), '()\n', (1289, 1291), False, 'from satella.instrumentation import Traceback\n'), ((1694, 1705), 'satella.instrumentation.Traceback', 'Traceback', ([], {}), '()\n', (1703, 1705), False, 'from satella.instrumentation import Traceback\n'), ((1735, 1755), 'pickle.dumps', 'pickle.dumps', (['tb', '(-1)'], {}), '(tb, -1)\n', (1747, 1755), False, 'import pickle\n'), ((1354, 1369), 'pickle.loads', 'pickle.loads', (['a'], {}), '(a)\n', (1366, 1369), False, 'import pickle\n'), ((1451, 1472), 'sys._current_frames', 'sys._current_frames', ([], {}), '()\n', (1470, 1472), False, 'import sys\n')]
|
# Generated by Django 2.1.1 on 2019-01-16 09:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('data_ingestion_service', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='storedfiles',
name='category',
field=models.CharField(blank=True, choices=[('offline_survey_data', 'Offline Survey Data')], max_length=255, null=True),
),
]
|
[
"django.db.models.CharField"
] |
[((345, 462), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('offline_survey_data', 'Offline Survey Data')]", 'max_length': '(255)', 'null': '(True)'}), "(blank=True, choices=[('offline_survey_data',\n 'Offline Survey Data')], max_length=255, null=True)\n", (361, 462), False, 'from django.db import migrations, models\n')]
|
from __future__ import print_function
import numpy
OPENOPT = SCIPY = True
try:
from openopt import NLP
except ImportError:
OPENOPT = False
try:
from scipy.optimize import minimize
except ImportError:
SCIPY = False
SCIPY_LOCAL_SOLVERS = ['Nelder-Mead', 'Powell', 'L-BFGS-B', 'TNC', 'SLSQP']
OPENOPT_LOCAL_SOLVERS = ['bobyqa', 'ptn', 'slmvm2', 'ralg', 'mma', 'auglag', 'sqlcp']
def AMPGO(objfun, x0, args=(), local='L-BFGS-B', local_opts=None, bounds=None, maxfunevals=None,
totaliter=20, maxiter=5, glbtol=1e-5, eps1=0.02, eps2=0.1, tabulistsize=5,
tabustrategy='farthest', fmin=-numpy.inf, disp=None):
"""
Finds the global minimum of a function using the AMPGO (Adaptive Memory Programming for
Global Optimization) algorithm.
:param `objfun`: Function to be optimized, in the form ``f(x, *args)``.
:type `objfun`: callable
:param `args`: Additional arguments passed to `objfun`.
:type `args`: tuple
:param `local`: The local minimization method (e.g. ``"L-BFGS-B"``). It can be one of the available
`scipy` local solvers or `OpenOpt` solvers.
:type `local`: string
:param `bounds`: A list of tuples specifying the lower and upper bound for each independent variable
[(`xl0`, `xu0`), (`xl1`, `xu1`), ...]
:type `bounds`: list
:param `maxfunevals`: The maximum number of function evaluations allowed.
:type `maxfunevals`: integer
:param `totaliter`: The maximum number of global iterations allowed.
:type `totaliter`: integer
:param `maxiter`: The maximum number of `Tabu Tunnelling` iterations allowed during each global iteration.
:type `maxiter`: integer
:param `glbtol`: The optimization will stop if the absolute difference between the current minimum objective
function value and the provided global optimum (`fmin`) is less than `glbtol`.
:type `glbtol`: float
:param `eps1`: A constant used to define an aspiration value for the objective function during the Tunnelling phase.
:type `eps1`: float
:param `eps2`: Perturbation factor used to move away from the latest local minimum at the start of a Tunnelling phase.
:type `eps2`: float
:param `tabulistsize`: The size of the tabu search list (a circular list).
:type `tabulistsize`: integer
:param `tabustrategy`: The strategy to use when the size of the tabu list exceeds `tabulistsize`. It can be
'oldest' to drop the oldest point from the tabu list or 'farthest' to drop the element farthest from
the last local minimum found.
:type `tabustrategy`: string
:param `fmin`: If known, the objective function global optimum value.
:type `fmin`: float
:param `disp`: If zero or defaulted, then no output is printed on screen. If a positive number, then status
messages are printed.
:type `disp`: integer
:returns: A tuple of 5 elements, in the following order:
1. **best_x** (`array_like`): the estimated position of the global minimum.
2. **best_f** (`float`): the value of `objfun` at the minimum.
3. **evaluations** (`integer`): the number of function evaluations.
4. **msg** (`string`): a message describes the cause of the termination.
5. **tunnel_info** (`tuple`): a tuple containing the total number of Tunnelling phases performed and the
successful ones.
:rtype: `tuple`
The detailed implementation of AMPGO is described in the paper
"Adaptive Memory Programming for Constrained Global Optimization" located here:
http://leeds-faculty.colorado.edu/glover/fred%20pubs/416%20-%20AMP%20(TS)%20for%20Constrained%20Global%20Opt%20w%20Lasdon%20et%20al%20.pdf
Copyright 2014 <NAME>
"""
if local not in SCIPY_LOCAL_SOLVERS + OPENOPT_LOCAL_SOLVERS:
raise Exception('Invalid local solver selected: %s'%local)
if local in SCIPY_LOCAL_SOLVERS and not SCIPY:
raise Exception('The selected solver %s is not available as there is no scipy installation'%local)
if local in OPENOPT_LOCAL_SOLVERS and not OPENOPT:
raise Exception('The selected solver %s is not available as there is no OpenOpt installation'%local)
x0 = numpy.atleast_1d(x0)
n = len(x0)
if bounds is None:
bounds = [(None, None)] * n
if len(bounds) != n:
raise ValueError('length of x0 != length of bounds')
low = [0]*n
up = [0]*n
for i in range(n):
if bounds[i] is None:
l, u = -numpy.inf, numpy.inf
else:
l, u = bounds[i]
if l is None:
low[i] = -numpy.inf
else:
low[i] = l
if u is None:
up[i] = numpy.inf
else:
up[i] = u
if maxfunevals is None:
maxfunevals = max(100, 10*len(x0))
if tabulistsize < 1:
raise Exception('Invalid tabulistsize specified: %s. It should be an integer greater than zero.'%tabulistsize)
if tabustrategy not in ['oldest', 'farthest']:
raise Exception('Invalid tabustrategy specified: %s. It must be one of "oldest" or "farthest"'%tabustrategy)
iprint = 50
if disp is None or disp <= 0:
disp = 0
iprint = -1
low = numpy.asarray(low)
up = numpy.asarray(up)
tabulist = []
best_f = numpy.inf
best_x = x0
global_iter = 0
all_tunnel = success_tunnel = 0
evaluations = 0
if glbtol < 1e-8:
local_tol = glbtol
else:
local_tol = 1e-8
while 1:
if disp > 0:
print('\n')
print('='*72)
print('Starting MINIMIZATION Phase %-3d'%(global_iter+1))
print('='*72)
if local in OPENOPT_LOCAL_SOLVERS:
problem = NLP(objfun, x0, lb=low, ub=up, maxFunEvals=max(1, maxfunevals), ftol=local_tol, iprint=iprint)
problem.args = args
results = problem.solve(local)
xf, yf, num_fun = results.xf, results.ff, results.evals['f']
else:
options = {'maxiter': max(1, maxfunevals), 'disp': disp}
if local_opts is not None:
options.update(local_opts)
res = minimize(objfun, x0, args=args, method=local, bounds=bounds, tol=local_tol, options=options)
xf, yf, num_fun = res['x'], res['fun'], res['nfev']
maxfunevals -= num_fun
evaluations += num_fun
if yf < best_f:
best_f = yf
best_x = xf
if disp > 0:
print('\n\n ==> Reached local minimum: %s\n'%yf)
if best_f < fmin + glbtol:
if disp > 0:
print('='*72)
return best_x, best_f, evaluations, 'Optimization terminated successfully', (all_tunnel, success_tunnel)
if maxfunevals <= 0:
if disp > 0:
print('='*72)
return best_x, best_f, evaluations, 'Maximum number of function evaluations exceeded', (all_tunnel, success_tunnel)
tabulist = drop_tabu_points(xf, tabulist, tabulistsize, tabustrategy)
tabulist.append(xf)
i = improve = 0
while i < maxiter and improve == 0:
if disp > 0:
print('-'*72)
print('Starting TUNNELLING Phase (%3d-%3d)'%(global_iter+1, i+1))
print('-'*72)
all_tunnel += 1
r = numpy.random.uniform(-1.0, 1.0, size=(n, ))
beta = eps2*numpy.linalg.norm(xf)/numpy.linalg.norm(r)
if numpy.abs(beta) < 1e-8:
beta = eps2
x0 = xf + beta*r
x0 = numpy.where(x0 < low, low, x0)
x0 = numpy.where(x0 > up , up , x0)
aspiration = best_f - eps1*(1.0 + numpy.abs(best_f))
tunnel_args = tuple([objfun, aspiration, tabulist] + list(args))
if local in OPENOPT_LOCAL_SOLVERS:
problem = NLP(tunnel, x0, lb=low, ub=up, maxFunEvals=max(1, maxfunevals), ftol=local_tol, iprint=iprint)
problem.args = tunnel_args
results = problem.solve(local)
xf, yf, num_fun = results.xf, results.ff, results.evals['f']
else:
options = {'maxiter': max(1, maxfunevals), 'disp': disp}
if local_opts is not None:
options.update(local_opts)
res = minimize(tunnel, x0, args=tunnel_args, method=local, bounds=bounds, tol=local_tol, options=options)
xf, yf, num_fun = res['x'], res['fun'], res['nfev']
maxfunevals -= num_fun
evaluations += num_fun
yf = inverse_tunnel(xf, yf, aspiration, tabulist)
if yf <= best_f + glbtol:
oldf = best_f
best_f = yf
best_x = xf
improve = 1
success_tunnel += 1
if disp > 0:
print('\n\n ==> Successful tunnelling phase. Reached local minimum: %s < %s\n'%(yf, oldf))
if best_f < fmin + glbtol:
return best_x, best_f, evaluations, 'Optimization terminated successfully', (all_tunnel, success_tunnel)
i += 1
if maxfunevals <= 0:
return best_x, best_f, evaluations, 'Maximum number of function evaluations exceeded', (all_tunnel, success_tunnel)
tabulist = drop_tabu_points(xf, tabulist, tabulistsize, tabustrategy)
tabulist.append(xf)
if disp > 0:
print('='*72)
global_iter += 1
x0 = xf.copy()
if global_iter >= totaliter:
return best_x, best_f, evaluations, 'Maximum number of global iterations exceeded', (all_tunnel, success_tunnel)
if best_f < fmin + glbtol:
return best_x, best_f, evaluations, 'Optimization terminated successfully', (all_tunnel, success_tunnel)
def drop_tabu_points(xf, tabulist, tabulistsize, tabustrategy):
if len(tabulist) < tabulistsize:
return tabulist
if tabustrategy == 'oldest':
tabulist.pop(0)
else:
distance = numpy.sqrt(numpy.sum((tabulist-xf)**2, axis=1))
index = numpy.argmax(distance)
tabulist.pop(index)
return tabulist
def tunnel(x0, *args):
objfun, aspiration, tabulist = args[0:3]
fun_args = ()
if len(args) > 3:
fun_args = tuple(args[3:])
numerator = (objfun(x0, *fun_args) - aspiration)**2
denominator = 1.0
for tabu in tabulist:
denominator = denominator*numpy.sqrt(numpy.sum((x0 - tabu)**2))
ytf = numerator/denominator
return ytf
def inverse_tunnel(xtf, ytf, aspiration, tabulist):
denominator = 1.0
for tabu in tabulist:
denominator = denominator*numpy.sqrt(numpy.sum((xtf - tabu)**2))
numerator = ytf*denominator
yf = aspiration + numpy.sqrt(ytf*denominator)
return yf
if __name__ == '__main__':
import os
import go_benchmark
os.system('cls')
for tests in ['Bird']:
klass = getattr(go_benchmark, tests)()
x0 = klass.generator()
fmin = klass.fglob
bounds = klass.bounds
tolfun = 1e-6
xf, yf, fun_evals, msg, tt = AMPGO(klass.evaluator, x0, args=(), local='L-BFGS-B', bounds=bounds,
maxfunevals=20000, totaliter=2000, maxiter=5, eps1=0.02, eps2=0.1,
tabulistsize=5, tabustrategy='farthest', fmin=fmin, disp=1, glbtol=tolfun)
xb = numpy.asarray(klass.global_optimum)
if xb.ndim == 2:
xb = xb[0, :]
print('\n\n')
print('F_glob :', klass.evaluator(xb))
print('F_best :', yf)
print('X_best :', xf)
print('F_evals:', fun_evals)
print('Message:', msg)
print('Tunnels:', tt)
|
[
"numpy.random.uniform",
"scipy.optimize.minimize",
"numpy.sum",
"numpy.abs",
"numpy.argmax",
"numpy.asarray",
"os.system",
"numpy.where",
"numpy.linalg.norm",
"numpy.atleast_1d",
"numpy.sqrt"
] |
[((4282, 4302), 'numpy.atleast_1d', 'numpy.atleast_1d', (['x0'], {}), '(x0)\n', (4298, 4302), False, 'import numpy\n'), ((5366, 5384), 'numpy.asarray', 'numpy.asarray', (['low'], {}), '(low)\n', (5379, 5384), False, 'import numpy\n'), ((5395, 5412), 'numpy.asarray', 'numpy.asarray', (['up'], {}), '(up)\n', (5408, 5412), False, 'import numpy\n'), ((11382, 11398), 'os.system', 'os.system', (['"""cls"""'], {}), "('cls')\n", (11391, 11398), False, 'import os\n'), ((10537, 10559), 'numpy.argmax', 'numpy.argmax', (['distance'], {}), '(distance)\n', (10549, 10559), False, 'import numpy\n'), ((11258, 11287), 'numpy.sqrt', 'numpy.sqrt', (['(ytf * denominator)'], {}), '(ytf * denominator)\n', (11268, 11287), False, 'import numpy\n'), ((11950, 11985), 'numpy.asarray', 'numpy.asarray', (['klass.global_optimum'], {}), '(klass.global_optimum)\n', (11963, 11985), False, 'import numpy\n'), ((6357, 6453), 'scipy.optimize.minimize', 'minimize', (['objfun', 'x0'], {'args': 'args', 'method': 'local', 'bounds': 'bounds', 'tol': 'local_tol', 'options': 'options'}), '(objfun, x0, args=args, method=local, bounds=bounds, tol=local_tol,\n options=options)\n', (6365, 6453), False, 'from scipy.optimize import minimize\n'), ((7610, 7652), 'numpy.random.uniform', 'numpy.random.uniform', (['(-1.0)', '(1.0)'], {'size': '(n,)'}), '(-1.0, 1.0, size=(n,))\n', (7630, 7652), False, 'import numpy\n'), ((7874, 7904), 'numpy.where', 'numpy.where', (['(x0 < low)', 'low', 'x0'], {}), '(x0 < low, low, x0)\n', (7885, 7904), False, 'import numpy\n'), ((7923, 7951), 'numpy.where', 'numpy.where', (['(x0 > up)', 'up', 'x0'], {}), '(x0 > up, up, x0)\n', (7934, 7951), False, 'import numpy\n'), ((10483, 10522), 'numpy.sum', 'numpy.sum', (['((tabulist - xf) ** 2)'], {'axis': '(1)'}), '((tabulist - xf) ** 2, axis=1)\n', (10492, 10522), False, 'import numpy\n'), ((7701, 7721), 'numpy.linalg.norm', 'numpy.linalg.norm', (['r'], {}), '(r)\n', (7718, 7721), False, 'import numpy\n'), ((7752, 7767), 'numpy.abs', 'numpy.abs', (['beta'], {}), '(beta)\n', (7761, 7767), False, 'import numpy\n'), ((8672, 8776), 'scipy.optimize.minimize', 'minimize', (['tunnel', 'x0'], {'args': 'tunnel_args', 'method': 'local', 'bounds': 'bounds', 'tol': 'local_tol', 'options': 'options'}), '(tunnel, x0, args=tunnel_args, method=local, bounds=bounds, tol=\n local_tol, options=options)\n', (8680, 8776), False, 'from scipy.optimize import minimize\n'), ((10929, 10956), 'numpy.sum', 'numpy.sum', (['((x0 - tabu) ** 2)'], {}), '((x0 - tabu) ** 2)\n', (10938, 10956), False, 'import numpy\n'), ((11166, 11194), 'numpy.sum', 'numpy.sum', (['((xtf - tabu) ** 2)'], {}), '((xtf - tabu) ** 2)\n', (11175, 11194), False, 'import numpy\n'), ((7679, 7700), 'numpy.linalg.norm', 'numpy.linalg.norm', (['xf'], {}), '(xf)\n', (7696, 7700), False, 'import numpy\n'), ((8003, 8020), 'numpy.abs', 'numpy.abs', (['best_f'], {}), '(best_f)\n', (8012, 8020), False, 'import numpy\n')]
|
# @PascalPuchtler
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import math
import sys
import time
import numpy as np
from planar import Polygon
from Controller.AreaMap.MapPoint import MapPoint
from Controller.MoveController.CarModel import CarModel
from .NearestNeighbor import NearestNeighbor
from .SupportPointChain import SupportPointChain
class TrajectoryPlanning:
def __init__(self, areaMap, emergencyStopQueue):
self.areaMap = areaMap
self.emergencyStopQueue = emergencyStopQueue
self.carModel = CarModel()
self.nearestNeighbor = NearestNeighbor()
self.supportPointChain = SupportPointChain()
self.reset()
self.maxDriveableSlope = 3
self.normalMode = True
self.minImageCount = 6
self.imageCount = 0
def reset(self):
self.newestSupportChain = []
self.callculatedNextMove = None
def nextMove(self):
self.imageCount +=1
if self.minImageCount > self.imageCount:
self.callculatedNextMove = {'x': 0, 'y': 0, 'm': 0}
return self.callculatedNextMove
nextMove = self.handleNextMove()
if not self.emergencyStopQueue.empty():
print('emergeny mode')
self.emergencyStopQueue.get()
self.normalMode = False
elif self.normalMode is False and nextMove is not None:
self.normalMode = True
print('reset Mode')
return {'command': 'resetSavety'}
if self.normalMode:
if nextMove is not None:
self.callculatedNextMove = nextMove
return nextMove
self.callculatedNextMove = {'x': 0, 'y': 0, 'm': 0}
return {'x': 0, 'y': 0, 'm': 0}
else:
self.callculatedNextMove = {'x': 0, 'y': 0, 'm': 0}
self.areaMap.reset()
self.imageCount=0
return {'x': 0, 'y': 0, 'm': 0}
def handleNextMove(self):
if not self.areaMap.isBorderAvailable():
# print('no border available')
return None
supportPoints = self.nearestNeighbor.getNearestNeighbor(self.areaMap.left, self.areaMap.right)
supportPointChain = self.supportPointChain.getSupportPointChain(supportPoints, self.areaMap.robotPosition)
self.newestSupportChain = supportPointChain
if len(supportPointChain)<=1:
print('no possible target in drive direction')
return None
nextMove = self.callculateNextTarget(self.areaMap.robotPosition, supportPointChain)
return nextMove
def callculateNextTarget(self,robotPosition, supportPointChain):
nextPoint = supportPointChain[1]
offsetNextPoint = robotPosition.getRelativeOffsetsToPoint(nextPoint[0],nextPoint[1])
if len(supportPointChain) >= 3:
secondPoint = supportPointChain[2]
offsetSecondPoint = robotPosition.getRelativeOffsetsToPoint(secondPoint[0],secondPoint[1])
slope = self.slope(offsetNextPoint, offsetSecondPoint)
if offsetNextPoint[1] < offsetSecondPoint[1]:
slope = -slope
else:
slope = 0
return {'x': offsetNextPoint[1], 'y': -offsetNextPoint[0], 'm': slope/2}
def slope(self, point1, point2):
m = (point2[0]-point1[0])/(point2[1]-point1[1])
m= np.clip(m, -self.maxDriveableSlope,self.maxDriveableSlope)
return m
|
[
"Controller.MoveController.CarModel.CarModel",
"numpy.clip"
] |
[((1120, 1130), 'Controller.MoveController.CarModel.CarModel', 'CarModel', ([], {}), '()\n', (1128, 1130), False, 'from Controller.MoveController.CarModel import CarModel\n'), ((3953, 4012), 'numpy.clip', 'np.clip', (['m', '(-self.maxDriveableSlope)', 'self.maxDriveableSlope'], {}), '(m, -self.maxDriveableSlope, self.maxDriveableSlope)\n', (3960, 4012), True, 'import numpy as np\n')]
|
"""
This module downloads a lot of songs from anime music quiz
Dependencies:
ffmpeg
selenium
Firefox
geckodriver
"""
import os
import re
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
import time
import json
from pathlib import Path
import subprocess
import sqlite3
class Database:
def __init__(self, database_file):
self.database_file = database_file
conn = self.conn = sqlite3.connect(database_file)
conn.execute("""
CREATE TABLE IF NOT EXISTS downloaded(
source TEXT,
annSongId INTEGER
);""")
conn.commit()
def is_downloaded(self, song, source):
c = self.conn.cursor()
c.execute("""
SELECT source
FROM downloaded
WHERE source=(?) AND annSongId = (?)
""", (source, song["annSongId"],))
return c.fetchone() is not None
def add_downloaded(self, song, source):
self.conn.execute("""
INSERT INTO downloaded VALUES(?,?)
""", (source, song["annSongId"]))
self.conn.commit()
def update_anime_lists(driver, anilist="", kitsu=""):
driver.execute_script('document.getElementById("mpNewsContainer").innerHTML = "Updating AniList...";')
status = driver.find_element_by_id("mpNewsContainer")
driver.execute_script("""new Listener("anime list update result", function (result) {
if (result.success) {
document.getElementById("mpNewsContainer").innerHTML = "Updated Successful: " + result.message;
} else {
document.getElementById("mpNewsContainer").innerHTML = "Update Unsuccessful: " + result.message;
}
}).bindListener()""")
driver.execute_script("""
socket.sendCommand({
type: "library",
command: "update anime list",
data: {
newUsername: arguments[0],
listType: 'ANILIST'
}
});""", anilist)
while True:
if status.text != "Updating AniList...":
break
time.sleep(0.5)
driver.execute_script('document.getElementById("mpNewsContainer").innerHTML = "Updating Kitsu...";')
driver.execute_script("""
socket.sendCommand({
type: "library",
command: "update anime list",
data: {
newUsername: arguments[0],
listType: 'KITSU'
}
});""", kitsu)
while True:
if status.text != "Updating Kitsu...":
break
time.sleep(0.5)
def get_question_list(driver):
driver.execute_script('document.getElementById("mpNewsContainer").innerHTML = "Loading Expand...";')
script ="""new Listener("expandLibrary questions", function (payload) {
expandLibrary.tackyVariable = (JSON.stringify(payload.questions));
document.getElementById("mpNewsContainer").innerHTML = "Expand Loaded!"
}).bindListener();
socket.sendCommand({
type: "library",
command: "expandLibrary questions"
});"""
driver.execute_script(script)
status = driver.find_element_by_id("mpNewsContainer")
while True:
if status.text != "Loading Expand...":
break
time.sleep(0.5)
time.sleep(3)
pure_string = driver.execute_script('return expandLibrary.tackyVariable')
driver.execute_script('expandLibrary.tackyVariable = ""')
ret = json.loads(pure_string)
driver.execute_script('document.getElementById("mpNewsContainer").innerHTML = "";')
return ret
ffmpeg = "ffmpeg"
def main():
"""
the main function, where the magic happens
"""
with open("automator.config") as file:
data = file.readlines()
username = data[0][:-1]
password = data[1][:-1]
anilist = data[2][:-1]
kitsu = data[3][:-1]
global ffmpeg
ffmpeg = data[4][:-1]
outpath = data[5][:-1]
path = Path(__file__).parent.absolute()
if not outpath:
outpath = path.joinpath(Path('out'))
else:
outpath = Path(outpath)
driver = webdriver.Firefox(executable_path='geckodriver/geckodriver')
driver.get('https://animemusicquiz.com')
driver.find_element_by_id("loginUsername").send_keys(username)
driver.find_element_by_id("loginPassword").send_keys(password)
driver.find_element_by_id("loginButton").click()
time.sleep(10)
update_anime_lists(driver, anilist, kitsu)
questions = get_question_list(driver)
driver.execute_script("options.logout();")
driver.close()
database = Database("downloaded.db")
for question in questions:
annId = question["annId"]
name = question["name"]
songs = question["songs"]
for song in songs:
save(annId, name, song, outpath, database)
def save(annId, anime, song, outpath, database):
source_mp3 = song["examples"].get("mp3", None)
if not source_mp3:
return
if database.is_downloaded(song, source_mp3):
return
title = song["name"]
artist = song["artist"]
type = ["Unknown", "Opening", "Ending", "Insert"][song["type"]]
number = song["number"]
annSongId = song["annSongId"]
command = [
'"%s"' % ffmpeg,
"-y",
"-i", source_mp3,
"-vn",
"-c:a", "copy",
"-map_metadata", "-1",
"-metadata", 'title="%s"' % title,
"-metadata", 'artist="%s"' % artist,
"-metadata", 'track="%d"' % number,
"-metadata", 'disc="%d"' % song["type"],
"-metadata", 'genre="%s"' % type,
"-metadata", 'album="%s"' % anime,
'"%s"' % create_file_name(anime, type, number, title, artist, annId, annSongId, outpath)
]
execute_command(" ".join(command))
database.add_downloaded(song, source_mp3)
return True
def execute_command(command):
subprocess.call(command)
def create_file_name_Windows(animeTitle, songType, songNumber, songTitle, songArtist, annId, annSongId, path, allowance=255):
"""
Creates a windows-compliant filename by removing all bad characters
and maintaining the windows path length limit (which by default is 255)
"""
allowance -= len(str(path)) + 1 # by default, windows is sensitive to long total paths.
bad_characters = re.compile(r"\\|/|<|>|:|\"|\||\?|\*|&|\^|\$|" + '\0')
return create_file_name_common(animeTitle, songType, songNumber, songTitle, songArtist, annId, annSongId, path, bad_characters, allowance)
def create_file_name_POSIX(animeTitle, songType, songNumber, songTitle, songArtist, annId, annSongId, path, allowance=32767):
"""
Creates a POSIX-compliant filename by removing all bad characters
and maintaining the NTFS path length limit
"""
bad_characters = re.compile(r"/" + '\0')
return create_file_name_common(animeTitle, songType, songNumber, songTitle, songArtist, annId, annSongId, path, bad_characters, allowance)
def create_file_name_common(animeTitle, songType, songNumber, songTitle, songArtist, annId, annSongId, path, bad_characters, allowance=255):
if allowance > 255:
allowance = 255 # on most common filesystems, including NTFS a filename can not exceed 255 characters
# assign allowance for things that must be in the file name
allowance -= len(str(annId))
allowance -= len(str(annSongId))
allowance -= len("_-.mp3") # accounting for separators (-_) for annId annSongId, and .mp3
if allowance < 0:
raise ValueError("""It is not possible to give a reasonable file name, due to length limitations.
Consider changing location to somewhere with a shorter path.""")
# make sure that user input doesn't contain bad characters
animeTitle = bad_characters.sub("", animeTitle)
songType = bad_characters.sub('', songType)
songTitle = bad_characters.sub('', songTitle)
songArtist = bad_characters.sub('', songArtist)
song_number_string = ""
if songNumber:
song_number_string = "_" + str(songNumber)
ret = ""
for string in [animeTitle, songType + song_number_string, songTitle, songArtist]:
length = len(string)
if allowance - length < 0:
string = string[:allowance]
length = len(string)
ret += string
allowance -= length
if allowance - 1 > 1:
ret += "-"
else:
break
else:
ret = ret[:-1] # removes last "-"
ret = path.joinpath(Path(ret + "_" + str(annId) + "-" + str(annSongId) + ".mp3"))
return str(ret)
if os.name == "nt":
create_file_name = create_file_name_Windows
elif os.name == "posix":
create_file_name = create_file_name_POSIX
if __name__ == "__main__":
main()
|
[
"json.loads",
"selenium.webdriver.Firefox",
"time.sleep",
"pathlib.Path",
"subprocess.call",
"sqlite3.connect",
"re.compile"
] |
[((3107, 3120), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (3117, 3120), False, 'import time\n'), ((3271, 3294), 'json.loads', 'json.loads', (['pure_string'], {}), '(pure_string)\n', (3281, 3294), False, 'import json\n'), ((3940, 4000), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {'executable_path': '"""geckodriver/geckodriver"""'}), "(executable_path='geckodriver/geckodriver')\n", (3957, 4000), False, 'from selenium import webdriver\n'), ((4237, 4251), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (4247, 4251), False, 'import time\n'), ((5708, 5732), 'subprocess.call', 'subprocess.call', (['command'], {}), '(command)\n', (5723, 5732), False, 'import subprocess\n'), ((6139, 6201), 're.compile', 're.compile', (['(\'\\\\\\\\|/|<|>|:|\\\\"|\\\\||\\\\?|\\\\*|&|\\\\^|\\\\$|\' + \'\\x00\')'], {}), '(\'\\\\\\\\|/|<|>|:|\\\\"|\\\\||\\\\?|\\\\*|&|\\\\^|\\\\$|\' + \'\\x00\')\n', (6149, 6201), False, 'import re\n'), ((6619, 6643), 're.compile', 're.compile', (["('/' + '\\x00')"], {}), "('/' + '\\x00')\n", (6629, 6643), False, 'import re\n'), ((491, 521), 'sqlite3.connect', 'sqlite3.connect', (['database_file'], {}), '(database_file)\n', (506, 521), False, 'import sqlite3\n'), ((2025, 2040), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (2035, 2040), False, 'import time\n'), ((2422, 2437), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (2432, 2437), False, 'import time\n'), ((3087, 3102), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (3097, 3102), False, 'import time\n'), ((3913, 3926), 'pathlib.Path', 'Path', (['outpath'], {}), '(outpath)\n', (3917, 3926), False, 'from pathlib import Path\n'), ((3872, 3883), 'pathlib.Path', 'Path', (['"""out"""'], {}), "('out')\n", (3876, 3883), False, 'from pathlib import Path\n'), ((3787, 3801), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (3791, 3801), False, 'from pathlib import Path\n')]
|
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
from lib.sender import send
from lib.core import utils
def login(options):
url = options.get('remote_api') + "/auth/api/token/"
body = {
"username": options.get('credentials')[0],
"password": options.get('credentials')[1]
}
r = send.send_post(url, body, is_json=True)
if r.json().get('access'):
utils.print_good("Authentication success")
jwt = '<PASSWORD> ' + r.json().get('access')
options['JWT'] = jwt
return options
utils.print_bad("Authentication failed")
return False
|
[
"os.path.realpath",
"lib.core.utils.print_good",
"lib.sender.send.send_post",
"lib.core.utils.print_bad"
] |
[((362, 401), 'lib.sender.send.send_post', 'send.send_post', (['url', 'body'], {'is_json': '(True)'}), '(url, body, is_json=True)\n', (376, 401), False, 'from lib.sender import send\n'), ((594, 634), 'lib.core.utils.print_bad', 'utils.print_bad', (['"""Authentication failed"""'], {}), "('Authentication failed')\n", (609, 634), False, 'from lib.core import utils\n'), ((441, 483), 'lib.core.utils.print_good', 'utils.print_good', (['"""Authentication success"""'], {}), "('Authentication success')\n", (457, 483), False, 'from lib.core import utils\n'), ((70, 96), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (86, 96), False, 'import os\n')]
|
""" Platform views for editorial app.
editorial/views/platformviews.py
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from braces.views import LoginRequiredMixin
from django.conf import settings
from django.core.urlresolvers import reverse
from django.views.generic import FormView
from editorial.forms import (
PlatformAccountFormSet,
)
# ----------------------------------------------------------------------#
# Platform Views
# ----------------------------------------------------------------------#
# ACCESS: Any user can edit their own platforms.
class UserPlatformAccountCreateView(LoginRequiredMixin, FormView):
"""Display formset to add social accounts to a user, organization or project."""
form_class = PlatformAccountFormSet
template_name = "editorial/platformaccounts_form.html"
def get_initial(self):
"""Pass user/organization to subform."""
# Older versions of Django don't have a nice way to pass things to forms within
# formsets except using initial data, so we shoe-horn it into here
return [{"user": self.request.user, 'organization': self.request.user.organization}]
def form_valid(self, form):
"""Save data."""
# One day, this may want to grow to handle deleting platform accounts, using a
# tick-to-delete. Or, with a newer Django version, this could move to extra_views,
# which has a nice built-in for formset editing.
for subform in form:
if subform.cleaned_data:
subform.save()
return super(UserPlatformAccountCreateView, self).form_valid(form)
def get_success_url(self):
"""Return to user profile."""
return reverse("user_edit", kwargs={"pk": self.request.user.id})
|
[
"django.core.urlresolvers.reverse"
] |
[((1782, 1839), 'django.core.urlresolvers.reverse', 'reverse', (['"""user_edit"""'], {'kwargs': "{'pk': self.request.user.id}"}), "('user_edit', kwargs={'pk': self.request.user.id})\n", (1789, 1839), False, 'from django.core.urlresolvers import reverse\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 27 00:52:19 2018
@author: xavier.qiu
"""
from common.load import *
from common.pd_util import *
from common.preprocess import *
from common.util import *
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
import numpy as np
import gc
import pickle
from tqdm import tqdm
class DataSet:
def __init__(self, embedding='glove', voc_len=105000, max_ques_len=72, cache=True):
"""
:param embedding:
"""
self.config = load_config()
self.embedding_type = embedding
self.voc_len = voc_len
self.max_ques_len = max_ques_len
if cache and os.path.exists(os.path.join(self.config["data_dir"], "y_train.pickle")):
with open(os.path.join(self.config["data_dir"], "x_train.pickle"), 'rb') as handle:
self.x_train = pickle.load(handle)
with open(os.path.join(self.config["data_dir"], "x_test.pickle"), 'rb') as handle:
self.x_test = pickle.load(handle)
with open(os.path.join(self.config["data_dir"], "y_train.pickle"), 'rb') as handle:
self.y_train = pickle.load(handle)
with open(os.path.join(self.config["data_dir"], "embedding_matrix.pickle"), 'rb') as handle:
self.embedding_matrix = pickle.load(handle)
return
print("Loading Train df")
self.train_df = pd.read_csv(os.path.join(self.config["data_dir"], "train.csv"))
print("Loading Test df")
self.test_df = pd.read_csv(os.path.join(self.config["data_dir"], "test.csv"))
self.preprocess("train")
self.preprocess("test")
self.word_index = None
# convert question_text to question_ids_list
self.word2indices()
print("Loading Embedding - {}".format(embedding))
self.embedding_index = load_embedding(self.embedding_type, word_index=self.word_index, voc_len = self.voc_len)
if self.embedding_type != "mix":
self.embedding_matrix = self.make_embed_matrix(self.embedding_index, self.word_index, self.voc_len)
else:
self.embedding_matrix = self.embedding_index
del self.word_index
del self.embedding_index
send_msg("Load Done")
gc.collect()
with open(os.path.join(self.config["data_dir"], "x_train.pickle"), 'wb') as handle:
pickle.dump(self.x_train, handle, protocol=pickle.HIGHEST_PROTOCOL)
with open(os.path.join(self.config["data_dir"], "x_test.pickle"), 'wb') as handle:
pickle.dump(self.x_test, handle, protocol=pickle.HIGHEST_PROTOCOL)
with open(os.path.join(self.config["data_dir"], "y_train.pickle"), 'wb') as handle:
pickle.dump(self.y_train, handle, protocol=pickle.HIGHEST_PROTOCOL)
with open(os.path.join(self.config["data_dir"], "embedding_matrix.pickle"), 'wb') as handle:
pickle.dump(self.embedding_matrix, handle, protocol=pickle.HIGHEST_PROTOCOL)
def make_embed_matrix(self, embeddings_index, word_index, len_voc):
all_embs = np.stack(embeddings_index.values())
emb_mean, emb_std = all_embs.mean(), all_embs.std()
embed_size = all_embs.shape[1]
word_index = word_index
embedding_matrix = np.random.normal(emb_mean, emb_std, (len_voc, embed_size))
for word, i in tqdm(word_index.items()):
if i >= len_voc:
continue
embedding_vector = embeddings_index.get(word)
if embedding_vector is not None:
embedding_matrix[i] = embedding_vector
return embedding_matrix
def word2indices(self):
t = Tokenizer(num_words=self.voc_len, filters='')
x_train = self.train_df["treated_question"].fillna("_na_").values
x_test = self.test_df["treated_question"].fillna("_na_").values
t.fit_on_texts(list(x_train))
self.word_index = t.word_index
# Tokenize the sentences
x_train = t.texts_to_sequences(x_train)
x_test = t.texts_to_sequences(x_test)
# Pad the sentences
x_train = pad_sequences(x_train, maxlen=self.max_ques_len)
x_test = pad_sequences(x_test, maxlen=self.max_ques_len)
# Get the target values
y_train = self.train_df['target'].values
self.x_train = x_train
self.x_test = x_test
self.y_train = y_train
def preprocess(self, data_set, filters=["punct", "contraction", "special characters", "misspell"]):
"""
:param filters:
:param data_set:
:return:
"""
if data_set == "train":
df = self.train_df
else:
df = self.test_df
print("Pre-processing {}".format(data_set))
df["treated_question"] = df["question_text"]
if "numbers" in filters:
print("Clean number ing ... ")
df["treated_question"] = df["treated_question"].apply(lambda x: deal_with_numbers(x))
if "punct" in filters:
print("Clean punct ing ... ")
df['treated_question'] = df['treated_question'].apply(lambda x: deal_with_punct(x))
if "lower" in filters:
print("Lowering ... ")
df['treated_question'] = df['treated_question'].apply(lambda x: x.lower())
if "special characters" in filters:
print("Clean special chars ing ... ")
df['treated_question'] = df['treated_question'].apply(lambda x: deal_with_special_characters(x))
if "misspell" in filters:
print("Clean misspell ing ...")
df['treated_question'] = df['treated_question'].apply(lambda x: deal_with_misspell(x))
|
[
"pickle.dump",
"keras.preprocessing.sequence.pad_sequences",
"gc.collect",
"keras.preprocessing.text.Tokenizer",
"pickle.load",
"numpy.random.normal"
] |
[((2340, 2352), 'gc.collect', 'gc.collect', ([], {}), '()\n', (2350, 2352), False, 'import gc\n'), ((3345, 3403), 'numpy.random.normal', 'np.random.normal', (['emb_mean', 'emb_std', '(len_voc, embed_size)'], {}), '(emb_mean, emb_std, (len_voc, embed_size))\n', (3361, 3403), True, 'import numpy as np\n'), ((3741, 3786), 'keras.preprocessing.text.Tokenizer', 'Tokenizer', ([], {'num_words': 'self.voc_len', 'filters': '""""""'}), "(num_words=self.voc_len, filters='')\n", (3750, 3786), False, 'from keras.preprocessing.text import Tokenizer\n'), ((4188, 4236), 'keras.preprocessing.sequence.pad_sequences', 'pad_sequences', (['x_train'], {'maxlen': 'self.max_ques_len'}), '(x_train, maxlen=self.max_ques_len)\n', (4201, 4236), False, 'from keras.preprocessing.sequence import pad_sequences\n'), ((4254, 4301), 'keras.preprocessing.sequence.pad_sequences', 'pad_sequences', (['x_test'], {'maxlen': 'self.max_ques_len'}), '(x_test, maxlen=self.max_ques_len)\n', (4267, 4301), False, 'from keras.preprocessing.sequence import pad_sequences\n'), ((2459, 2526), 'pickle.dump', 'pickle.dump', (['self.x_train', 'handle'], {'protocol': 'pickle.HIGHEST_PROTOCOL'}), '(self.x_train, handle, protocol=pickle.HIGHEST_PROTOCOL)\n', (2470, 2526), False, 'import pickle\n'), ((2630, 2696), 'pickle.dump', 'pickle.dump', (['self.x_test', 'handle'], {'protocol': 'pickle.HIGHEST_PROTOCOL'}), '(self.x_test, handle, protocol=pickle.HIGHEST_PROTOCOL)\n', (2641, 2696), False, 'import pickle\n'), ((2801, 2868), 'pickle.dump', 'pickle.dump', (['self.y_train', 'handle'], {'protocol': 'pickle.HIGHEST_PROTOCOL'}), '(self.y_train, handle, protocol=pickle.HIGHEST_PROTOCOL)\n', (2812, 2868), False, 'import pickle\n'), ((2982, 3058), 'pickle.dump', 'pickle.dump', (['self.embedding_matrix', 'handle'], {'protocol': 'pickle.HIGHEST_PROTOCOL'}), '(self.embedding_matrix, handle, protocol=pickle.HIGHEST_PROTOCOL)\n', (2993, 3058), False, 'import pickle\n'), ((920, 939), 'pickle.load', 'pickle.load', (['handle'], {}), '(handle)\n', (931, 939), False, 'import pickle\n'), ((1065, 1084), 'pickle.load', 'pickle.load', (['handle'], {}), '(handle)\n', (1076, 1084), False, 'import pickle\n'), ((1212, 1231), 'pickle.load', 'pickle.load', (['handle'], {}), '(handle)\n', (1223, 1231), False, 'import pickle\n'), ((1377, 1396), 'pickle.load', 'pickle.load', (['handle'], {}), '(handle)\n', (1388, 1396), False, 'import pickle\n')]
|
"""
Copyright (c) 2015 <NAME> and Bamboo HR LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from unittest.case import TestCase
from nose.tools.trivial import eq_
from rapid.lib.framework.ioc import IOC
from rapid.lib.framework.injectable import Injectable
class TrialClass(object):
def has_trial(self):
return False
class OtherClass(object):
def has_something(self):
return True
class TestClass(Injectable):
__injectables__ = {"trial": TrialClass, "other": OtherClass}
def __init__(self, trial, other, oneMore):
self.trial = trial
self.other = other
self.oneMore = oneMore
class TestIOC(TestCase):
def test_multi_dependency(self):
testclass = IOC.get_class_instance(TestClass, "Something")
eq_(False, testclass.trial.has_trial())
|
[
"rapid.lib.framework.ioc.IOC.get_class_instance"
] |
[((1216, 1262), 'rapid.lib.framework.ioc.IOC.get_class_instance', 'IOC.get_class_instance', (['TestClass', '"""Something"""'], {}), "(TestClass, 'Something')\n", (1238, 1262), False, 'from rapid.lib.framework.ioc import IOC\n')]
|
import numpy as np
# Sizes relevant to default camera frame
ASPECT_RATIO = 16.0 / 9.0
FRAME_HEIGHT = 8.0
FRAME_WIDTH = FRAME_HEIGHT * ASPECT_RATIO
FRAME_Y_RADIUS = FRAME_HEIGHT / 2
FRAME_X_RADIUS = FRAME_WIDTH / 2
DEFAULT_PIXEL_HEIGHT = 1080
DEFAULT_PIXEL_WIDTH = 1920
DEFAULT_FRAME_RATE = 30
SMALL_BUFF = 0.1
MED_SMALL_BUFF = 0.25
MED_LARGE_BUFF = 0.5
LARGE_BUFF = 1
DEFAULT_MOBJECT_TO_EDGE_BUFFER = MED_LARGE_BUFF
DEFAULT_MOBJECT_TO_MOBJECT_BUFFER = MED_SMALL_BUFF
# All in seconds
DEFAULT_POINTWISE_FUNCTION_RUN_TIME = 3.0
DEFAULT_WAIT_TIME = 1.0
ORIGIN = np.array((0., 0., 0.))
UP = np.array((0., 1., 0.))
DOWN = np.array((0., -1., 0.))
RIGHT = np.array((1., 0., 0.))
LEFT = np.array((-1., 0., 0.))
IN = np.array((0., 0., -1.))
OUT = np.array((0., 0., 1.))
X_AXIS = np.array((1., 0., 0.))
Y_AXIS = np.array((0., 1., 0.))
Z_AXIS = np.array((0., 0., 1.))
# Useful abbreviations for diagonals
UL = UP + LEFT
UR = UP + RIGHT
DL = DOWN + LEFT
DR = DOWN + RIGHT
TOP = FRAME_Y_RADIUS * UP
BOTTOM = FRAME_Y_RADIUS * DOWN
LEFT_SIDE = FRAME_X_RADIUS * LEFT
RIGHT_SIDE = FRAME_X_RADIUS * RIGHT
PI = np.pi
TAU = 2 * PI
DEGREES = TAU / 360
FFMPEG_BIN = "ffmpeg"
JOINT_TYPE_MAP = {
"auto": 0,
"round": 1,
"bevel": 2,
"miter": 3,
}
# Related to Text
START_X = 30
START_Y = 20
NORMAL = "NORMAL"
ITALIC = "ITALIC"
OBLIQUE = "OBLIQUE"
BOLD = "BOLD"
DEFAULT_STROKE_WIDTH = 4
# Colors
COLOR_MAP = {
"BLUE_E": "#1C758A",
"BLUE_D": "#29ABCA",
"BLUE_C": "#58C4DD",
"BLUE_B": "#9CDCEB",
"BLUE_A": "#C7E9F1",
"TEAL_E": "#49A88F",
"TEAL_D": "#55C1A7",
"TEAL_C": "#5CD0B3",
"TEAL_B": "#76DDC0",
"TEAL_A": "#ACEAD7",
"GREEN_E": "#699C52",
"GREEN_D": "#77B05D",
"GREEN_C": "#83C167",
"GREEN_B": "#A6CF8C",
"GREEN_A": "#C9E2AE",
"YELLOW_E": "#E8C11C",
"YELLOW_D": "#F4D345",
"YELLOW_C": "#FFFF00",
"YELLOW_B": "#FFEA94",
"YELLOW_A": "#FFF1B6",
"GOLD_E": "#C78D46",
"GOLD_D": "#E1A158",
"GOLD_C": "#F0AC5F",
"GOLD_B": "#F9B775",
"GOLD_A": "#F7C797",
"RED_E": "#CF5044",
"RED_D": "#E65A4C",
"RED_C": "#FC6255",
"RED_B": "#FF8080",
"RED_A": "#F7A1A3",
"MAROON_E": "#94424F",
"MAROON_D": "#A24D61",
"MAROON_C": "#C55F73",
"MAROON_B": "#EC92AB",
"MAROON_A": "#ECABC1",
"PURPLE_E": "#644172",
"PURPLE_D": "#715582",
"PURPLE_C": "#9A72AC",
"PURPLE_B": "#B189C6",
"PURPLE_A": "#CAA3E8",
"GREY_E": "#222222",
"GREY_D": "#444444",
"GREY_C": "#888888",
"GREY_B": "#BBBBBB",
"GREY_A": "#DDDDDD",
"WHITE": "#FFFFFF",
"BLACK": "#000000",
"GREY_BROWN": "#736357",
"DARK_BROWN": "#8B4513",
"LIGHT_BROWN": "#CD853F",
"PINK": "#D147BD",
"LIGHT_PINK": "#DC75CD",
"GREEN_SCREEN": "#00FF00",
"ORANGE": "#FF862F",
}
PALETTE = list(COLOR_MAP.values())
locals().update(COLOR_MAP)
for name in [s for s in list(COLOR_MAP.keys()) if s.endswith("_C")]:
locals()[name.replace("_C", "")] = locals()[name]
|
[
"numpy.array"
] |
[((567, 592), 'numpy.array', 'np.array', (['(0.0, 0.0, 0.0)'], {}), '((0.0, 0.0, 0.0))\n', (575, 592), True, 'import numpy as np\n'), ((595, 620), 'numpy.array', 'np.array', (['(0.0, 1.0, 0.0)'], {}), '((0.0, 1.0, 0.0))\n', (603, 620), True, 'import numpy as np\n'), ((625, 651), 'numpy.array', 'np.array', (['(0.0, -1.0, 0.0)'], {}), '((0.0, -1.0, 0.0))\n', (633, 651), True, 'import numpy as np\n'), ((657, 682), 'numpy.array', 'np.array', (['(1.0, 0.0, 0.0)'], {}), '((1.0, 0.0, 0.0))\n', (665, 682), True, 'import numpy as np\n'), ((687, 713), 'numpy.array', 'np.array', (['(-1.0, 0.0, 0.0)'], {}), '((-1.0, 0.0, 0.0))\n', (695, 713), True, 'import numpy as np\n'), ((716, 742), 'numpy.array', 'np.array', (['(0.0, 0.0, -1.0)'], {}), '((0.0, 0.0, -1.0))\n', (724, 742), True, 'import numpy as np\n'), ((746, 771), 'numpy.array', 'np.array', (['(0.0, 0.0, 1.0)'], {}), '((0.0, 0.0, 1.0))\n', (754, 771), True, 'import numpy as np\n'), ((778, 803), 'numpy.array', 'np.array', (['(1.0, 0.0, 0.0)'], {}), '((1.0, 0.0, 0.0))\n', (786, 803), True, 'import numpy as np\n'), ((810, 835), 'numpy.array', 'np.array', (['(0.0, 1.0, 0.0)'], {}), '((0.0, 1.0, 0.0))\n', (818, 835), True, 'import numpy as np\n'), ((842, 867), 'numpy.array', 'np.array', (['(0.0, 0.0, 1.0)'], {}), '((0.0, 0.0, 1.0))\n', (850, 867), True, 'import numpy as np\n')]
|
from flask import Flask, render_template
from datetime import date
app = Flask(__name__)
@app.route("/")
def check():
today = date.today()
newyearcheck = today.month == 1 and today.day == 1
return render_template("index.html", newyearcheck=newyearcheck)
|
[
"flask.Flask",
"datetime.date.today",
"flask.render_template"
] |
[((74, 89), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (79, 89), False, 'from flask import Flask, render_template\n'), ((133, 145), 'datetime.date.today', 'date.today', ([], {}), '()\n', (143, 145), False, 'from datetime import date\n'), ((212, 268), 'flask.render_template', 'render_template', (['"""index.html"""'], {'newyearcheck': 'newyearcheck'}), "('index.html', newyearcheck=newyearcheck)\n", (227, 268), False, 'from flask import Flask, render_template\n')]
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# This code snippet is lightly modified from that provided by AWS Secrets Manager during secrets creation.
import boto3
import base64
from botocore.exceptions import ClientError
import json
import matplotlib.pyplot as plt
import graphviz
import sagemaker
from sagemaker.feature_store.feature_group import FeatureGroup
from typing import Dict
def get_secret(secret_name, region_name):
# Create a Secrets Manager client
session = boto3.session.Session()
client = session.client(
service_name='secretsmanager',
region_name=region_name
)
# In this sample we only handle the specific exceptions for the 'GetSecretValue' API.
# See https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_GetSecretValue.html
# We rethrow the exception by default.
try:
get_secret_value_response = client.get_secret_value(
SecretId=secret_name
)
return get_secret_value_response
except ClientError as e:
print(e)
if e.response['Error']['Code'] == 'DecryptionFailureException':
# Secrets Manager can't decrypt the protected secret text using the provided KMS key.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'InternalServiceErrorException':
# An error occurred on the server side.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'InvalidParameterException':
# You provided an invalid value for a parameter.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'InvalidRequestException':
# You provided a parameter value that is not valid for the current state of the resource.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'ResourceNotFoundException':
# We can't find the resource that you asked for.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
else:
raise e
else:
# Decrypts secret using the associated KMS CMK.
# Depending on whether the secret is a string or binary, one of these fields will be populated.
print('now in else')
if 'SecretString' in get_secret_value_response:
secret = get_secret_value_response['SecretString']
print(secret)
else:
decoded_binary_secret = base64.b64decode(get_secret_value_response['SecretBinary'])
# Extract training and validation AUC values from the results returned by
# method describe_training_job()
def get_auc_from_metrics(response, metric_type):
for x in range(len(response['FinalMetricDataList'])):
if metric_type in response['FinalMetricDataList'][x].values():
return x
# Functions for model feature exploration
def plot_feature_importance(booster, f, maxfeats = 15):
from xgboost import plot_importance
res = {k:round(v, 2) for k, v in booster.get_score(importance_type = f).items()}
gain_plot = plot_importance(res,
max_num_features = maxfeats,
importance_type = f,
title = 'Feature Importance: ' + f,
color = "#4daf4a")
plt.show()
# Calculate tree depth. Adapted the code from here
# https://stackoverflow.com/questions/29005959/depth-of-a-json-tree to Python 3.
def calculate_tree_depth(tree_dict):
# input: single tree as a dictionary
# output: depth of the tree
if 'children' in tree_dict:
return 1 + max([0] + list(map(calculate_tree_depth, tree_dict['children'])))
else:
return 1
def get_depths_as_list(all_trees):
# input: list of all trees, generated by xgboost's get_dump in json format
# output: list of the same length as all_trees where each element contains
# the depth of a tree
# list to store the depth of each tree
tree_depth = []
for i in range(len(all_trees)):
tree = json.loads(all_trees[i])
tree_depth.append(calculate_tree_depth(tree))
return tree_depth
def calculate_list_unique_elements(input_list):
# calculate number of unique elements in a list
# input: list
# output: dictionary. Keys: unique elements, values: their count
res = dict()
for i in input_list:
if i in res:
res[i] += 1
else:
res[i] = 1
return res
def find_feature(tree_dict, feature):
# input:
# tree_dict: single tree as a dictionary
# feature: feature name, str
# output: 0 if a feature is not a split, 1 if the feature is a split at any node
if "split" in tree_dict:
if tree_dict["split"] == feature:
return 1
else:
for child in tree_dict["children"]:
res = find_feature(child, feature)
if res != 0:
return res
return 0
else:
return 0
# find all trees that have a feature
def find_all_trees_with_feature(all_trees, feature):
# input:
# all_trees: list of all trees, generated by xgboost's get_dump in json format
# feature: feature name, str
# output: indices of trees where a feature has been found at any node
trees_with_features = []
for i in range(len(all_trees)):
tree = json.loads(all_trees[i])
if find_feature(tree, feature) == 1:
trees_with_features.append(i)
return trees_with_features
# given a list of features find how many trees have it
def count_trees_with_features(all_trees, feature_list):
# input:
# all_trees: list of all trees, generated by xgboost's get_dump in json format
# feature_list: list of features
# output: dictionary, keys = feature_list, values = number of trees where a feature has been found
tree_count = dict()
for i in feature_list:
tree_count[i] = 0
for i in feature_list:
for j in range(len(all_trees)):
tree = json.loads(all_trees[j])
if find_feature(tree, i) == 1:
tree_count[i] += 1
return tree_count
def get_fg_info(fg_name: str, sagemaker_session: sagemaker.Session):
boto_session = sagemaker_session.boto_session
featurestore_runtime = sagemaker_session.sagemaker_featurestore_runtime_client
feature_store_session = sagemaker.Session(
boto_session=boto_session,
sagemaker_client=sagemaker_session.sagemaker_client,
sagemaker_featurestore_runtime_client=featurestore_runtime,
)
fg = FeatureGroup(name=fg_name, sagemaker_session=feature_store_session)
return fg.athena_query()
def generate_query(dataset_dict: Dict, sagemaker_session: sagemaker.Session):
customers_fg_info = get_fg_info(
dataset_dict["customers_fg_name"],
sagemaker_session=sagemaker_session,
)
label_name = dataset_dict["label_name"]
features_names = dataset_dict["features_names"]
training_columns = [label_name] + features_names
training_columns_string = ", ".join(f'"{c}"' for c in training_columns)
query_string = f"""SELECT DISTINCT {training_columns_string}
FROM "{customers_fg_info.table_name}"
"""
return dict(
catalog=claims_fg_info.catalog,
database=claims_fg_info.database,
query_string=query_string,
)
|
[
"sagemaker.feature_store.feature_group.FeatureGroup",
"xgboost.plot_importance",
"matplotlib.pyplot.show",
"json.loads",
"base64.b64decode",
"sagemaker.Session",
"boto3.session.Session"
] |
[((1442, 1465), 'boto3.session.Session', 'boto3.session.Session', ([], {}), '()\n', (1463, 1465), False, 'import boto3\n'), ((4278, 4400), 'xgboost.plot_importance', 'plot_importance', (['res'], {'max_num_features': 'maxfeats', 'importance_type': 'f', 'title': "('Feature Importance: ' + f)", 'color': '"""#4daf4a"""'}), "(res, max_num_features=maxfeats, importance_type=f, title=\n 'Feature Importance: ' + f, color='#4daf4a')\n", (4293, 4400), False, 'from xgboost import plot_importance\n'), ((4536, 4546), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4544, 4546), True, 'import matplotlib.pyplot as plt\n'), ((7630, 7796), 'sagemaker.Session', 'sagemaker.Session', ([], {'boto_session': 'boto_session', 'sagemaker_client': 'sagemaker_session.sagemaker_client', 'sagemaker_featurestore_runtime_client': 'featurestore_runtime'}), '(boto_session=boto_session, sagemaker_client=\n sagemaker_session.sagemaker_client,\n sagemaker_featurestore_runtime_client=featurestore_runtime)\n', (7647, 7796), False, 'import sagemaker\n'), ((7828, 7895), 'sagemaker.feature_store.feature_group.FeatureGroup', 'FeatureGroup', ([], {'name': 'fg_name', 'sagemaker_session': 'feature_store_session'}), '(name=fg_name, sagemaker_session=feature_store_session)\n', (7840, 7895), False, 'from sagemaker.feature_store.feature_group import FeatureGroup\n'), ((5271, 5295), 'json.loads', 'json.loads', (['all_trees[i]'], {}), '(all_trees[i])\n', (5281, 5295), False, 'import json\n'), ((6614, 6638), 'json.loads', 'json.loads', (['all_trees[i]'], {}), '(all_trees[i])\n', (6624, 6638), False, 'import json\n'), ((3667, 3726), 'base64.b64decode', 'base64.b64decode', (["get_secret_value_response['SecretBinary']"], {}), "(get_secret_value_response['SecretBinary'])\n", (3683, 3726), False, 'import base64\n'), ((7273, 7297), 'json.loads', 'json.loads', (['all_trees[j]'], {}), '(all_trees[j])\n', (7283, 7297), False, 'import json\n')]
|
import numpy as np
from net import Net
from functional import *
from os import remove
temp_path = "./model/param"
def save_model(net: Net, name: str):
'''
将网络信息保存
parameters
----------
net : 神经网络类
name : 文件名,文件将被保存到model文件夹中的指定名称文件中
return
------
1 : 表示保存成功
'''
path = "./model/{}".format(name)
args = net.args
layer_info = "layer info:\n"
for layer in args:
layer_info += "{} {}\n".format(*layer)
criterion = "criterion : {}\n".format("ce" if net.criterion ==
ce_loss else "mse")
regualarize = "regularize : " + ("{} with alpha={}\n".format(
net.regularize, net.alpha) if net.regularize else "None\n")
with open(path, "w") as f:
f.write(layer_info)
f.write(criterion)
f.write(regualarize)
for param in net.parameters():
np.savetxt(temp_path, param)
with open(temp_path, "r") as fa:
f.write(fa.read())
remove(temp_path)
return 1
def load_model(name: str):
'''
指定文件名,函数将读取文件,生成文件中描述的神经网络模型
return
------
net : 模型文件所描述的网络
'''
path = "./model/{}".format(name)
parameters = []
with open(path, "r") as f:
f.readline() # 读掉第一行
layer_info = []
while True:
s = f.readline()[:-1]
if "criterion" in s:
break
n, act = s.split()
layer_info.append((eval(n), act))
criterion = s.split(" : ")[-1]
s = f.readline()
if "alpha" in s: # 有正则化设置
regualarize = s[:2]
alpha = eval(s.split("=")[-1])
else:
regualarize = None
alpha = 0.01
net = Net(
*layer_info,
criterion=criterion,
regularize=regualarize,
alpha=alpha,
)
for l in range(len(layer_info) - 1):
i, o = layer_info[l][0], layer_info[l + 1][0]
str_W = "".join([f.readline() for l in range(i)])
str_b = f.readline()
with open(temp_path, "w") as fw:
fw.writelines(str_W)
W = np.loadtxt(temp_path).reshape(i, o)
with open(temp_path, "w") as fb:
fb.writelines(str_b)
b = np.loadtxt(temp_path).reshape(1, o)
parameters.extend((W, b))
net.reset_net(parameters)
remove(temp_path)
return net
def random_init(net: Net, path="./data/random.npy"):
'''用指定数组来初始化参数'''
n_layer = net.ct_layer
n_weight_list = [
n_layer[i] * n_layer[i + 1] for i in range(len(n_layer) - 1)
]
parameters = []
x = np.load(path)[:sum(n_weight_list)]
ptr = 0
for i in range(len(n_layer) - 1):
W = x[ptr:ptr + n_weight_list[i]].reshape((n_layer[i], n_layer[i + 1]))
b = np.zeros((1, n_layer[i + 1]))
parameters.extend((W, b))
ptr += n_weight_list[i]
net.reset_net(parameters, net.xavier, net.he)
return net
|
[
"os.remove",
"numpy.load",
"numpy.savetxt",
"numpy.zeros",
"net.Net",
"numpy.loadtxt"
] |
[((1013, 1030), 'os.remove', 'remove', (['temp_path'], {}), '(temp_path)\n', (1019, 1030), False, 'from os import remove\n'), ((2430, 2447), 'os.remove', 'remove', (['temp_path'], {}), '(temp_path)\n', (2436, 2447), False, 'from os import remove\n'), ((1754, 1828), 'net.Net', 'Net', (['*layer_info'], {'criterion': 'criterion', 'regularize': 'regualarize', 'alpha': 'alpha'}), '(*layer_info, criterion=criterion, regularize=regualarize, alpha=alpha)\n', (1757, 1828), False, 'from net import Net\n'), ((2692, 2705), 'numpy.load', 'np.load', (['path'], {}), '(path)\n', (2699, 2705), True, 'import numpy as np\n'), ((2869, 2898), 'numpy.zeros', 'np.zeros', (['(1, n_layer[i + 1])'], {}), '((1, n_layer[i + 1]))\n', (2877, 2898), True, 'import numpy as np\n'), ((900, 928), 'numpy.savetxt', 'np.savetxt', (['temp_path', 'param'], {}), '(temp_path, param)\n', (910, 928), True, 'import numpy as np\n'), ((2186, 2207), 'numpy.loadtxt', 'np.loadtxt', (['temp_path'], {}), '(temp_path)\n', (2196, 2207), True, 'import numpy as np\n'), ((2320, 2341), 'numpy.loadtxt', 'np.loadtxt', (['temp_path'], {}), '(temp_path)\n', (2330, 2341), True, 'import numpy as np\n')]
|
# Basic truss example in Openseespy:
import openseespy.opensees as ops
import openseespy.postprocessing.Get_Rendering as opsplt
##############################################################################
##### Define units in SI #####
##############################################################################
# Basic units:
m = 1
kg = 1
s = 1
N = kg * m / s**2
Pa = N / m
inches = 0.0254 * m
ft = 12 * inches
kip = 4.45 * 10**3 * N
ksi = 6.89 * 10**6 * Pa
##############################################################################
##### Input Variables #####
##############################################################################
x = [0.0, 12.0, 14.0, 6.0]
# x = list(map(lambda a: a * ft, x))
y = [0.0, 0.0, 0.0, 8.0]
# y = list(map(lambda a: a * ft, y))
A = [10.0, 5.0]
E = 3 * 10**3 * ksi
F = [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [100.0, -50.0, 0.0]]
def aux_function(lst, scalar):
'''
Description
-----------
This function is used multiply each value in a list with a scalar number.
It will be very usefull when converting the variables from the Imperial
system to S.I.
Parameters
----------
lst : LIST OF FLOATS
Initial list with the values in the Imperial System.
scalar : FLOAT
The value used to convert from Imperial to International System.
Returns
-------
result : LIST OF FLOATS
The list with the values converted to S.I..
'''
result = [t * scalar for t in lst]
return result
(x, y, A) = ([t * ft for t in x], [t * ft for t in y],
[t * inches**2 for t in A])
F = [[t * kip for t in f] for f in F]
##############################################################################
##### Main Analysis' functions #####
##############################################################################
def Model_Build(x, y, A, E):
'''
Description
-----------
This function is used to determine the basic parameters of the structural
problem at hand.
Parameters
----------
x : LIST OF FLOATS
The list of the coordinates of the nodes along the x-axis.
y : LIST OF FLOATS
The list of the coordinates of the nodes along the y-axis.
A : LIST OF FLOATS
The list with the materials used for the different elements.
E : FLOAT
The modulus of elesticity of the elements.
Returns
-------
None.
'''
# Delete existing model.
ops.wipe()
# Define the model.
ops.model('basic', '-ndm', 2, '-ndf', 3)
# Define materials.
ops.uniaxialMaterial('Elastic', 1, E)
# Define the nodes.
m = len(x)
[ops.node(i + 1, *[x[i], y[i]]) for i in range(m)]
# Fix the nodes.
fixxity = [[0, 0, 1], [0, 1, 1], [1, 0, 1], [1, 1, 1]]
[ops.fix(i + 1, *fixxity[3]) if i + 1 != 4 else ops.fix(i + 1, *fixxity[0])
for i in range(m)]
# Define elements.
conn = [[1, 4], [2, 4], [3, 4]]
[ops.element('Truss', i + 1, *conn[i], A[1], 1) if i != 0
else ops.element('Truss', i + 1, *conn[i], A[0], 1)
for i in range(len(conn))]
# Plot model.
opsplt.plot_model()
def Rec_Setup(analysis):
'''
Description
-----------
This function is used to set up the recorders. It stores the output of the
recorders to a folder, whose name is the value of the variable: analysis.
Parameters
----------
analysis : STRING
The name of the analysis, currently performed.
Returns
-------
None.
'''
analysis += '/'
# Set recorders.
ops.recorder('Node', '-file', analysis + 'NodeDisp.txt', '-time', '-node', *[4], '-dof', *[1, 2, 3], 'disp')
ops.recorder('Node', '-file', analysis + 'ReactDisp.txt', '-time', '-node', *[4], '-dof', *[1, 2, 3], 'reaction')
ops.recorder('Element', '-file', analysis + 'ElementsForces.txt', '-time', '-ele', *[1, 2, 3], 'forces')
def Analysis_setup(analysis, F, N = 1):
'''
Description
-----------
This functions is used to setup and then run the analysis.
Parameters
----------
analysis : STRING
The name of the analysis, currently performed.
F : LIST OF LISTS OF FLOATS
The list containig a list of loads along the x and y axises and around
the z axis for every node.
N : INTEGER
The number of the analysises to run. Default value: 1
Returns
-------
None.
'''
# Auxiliary variable.
m = len(F)
# Create timeseries.
ops.timeSeries('Linear', 1)
# Create load pattern.
ops.pattern('Plain', 1 , 1)
# Define loads.
[ops.load(i + 1, *F[i]) for i in range(m)]
# Define system.
ops.system('BandSPD')
# Define numberer.
ops.numberer('RCM')
# Define constraint handler
ops.constraints('Plain')
# Define integrator.
ops.integrator('LoadControl', 1.0)
# Define algorithm
ops.algorithm('Linear')
# Create analysis object
ops.analysis('Static')
# Execute the analysis
ops.initialize() # Set recorders to start recording at 0 time.
ok = ops.analyze(N)
if ok == 0:
status = 'Analysis complete, everything went smoothly.'
else:
status = 'ERROR: ANALYSIS FAILED TO CONVERGE' + '\n' + 'Tip: Change algorithm'
print(analysis + '\n' + status + '\n')
# Close recorders and scrap model.
ops.wipe()
##############################################################################
##### Main Analysis #####
##############################################################################
# Step 1: Initilize model parameters.
Model_Build(x, y, A, E)
# Step 2: Name the type of the analysis to be performed.
analysis = 'Static'
# Step 3: Set up the recorders.
Rec_Setup(analysis)
# Step 4: Perform the analysis.
N = 10 # Number of analysises to be performed.
Analysis_setup(analysis, F, N)
# Step 5: Initilize model parameters.
Model_Build(x, y, A, E)
# Step 6: Name the type of the analysis to be performed.
analysis = 'Pushover'
# Step 7: Set up the recorders.
Rec_Setup(analysis)
# Step 8: Perform the analysis.
N = 10 # Number of analysises to be performed.
F[3][1] = 0.0 # Perform only horizontal forces.
Analysis_setup(analysis, F, N)
|
[
"openseespy.opensees.analysis",
"openseespy.opensees.integrator",
"openseespy.opensees.initialize",
"openseespy.opensees.load",
"openseespy.opensees.node",
"openseespy.opensees.numberer",
"openseespy.opensees.wipe",
"openseespy.opensees.model",
"openseespy.opensees.pattern",
"openseespy.postprocessing.Get_Rendering.plot_model",
"openseespy.opensees.system",
"openseespy.opensees.element",
"openseespy.opensees.uniaxialMaterial",
"openseespy.opensees.recorder",
"openseespy.opensees.analyze",
"openseespy.opensees.timeSeries",
"openseespy.opensees.algorithm",
"openseespy.opensees.constraints",
"openseespy.opensees.fix"
] |
[((2757, 2767), 'openseespy.opensees.wipe', 'ops.wipe', ([], {}), '()\n', (2765, 2767), True, 'import openseespy.opensees as ops\n'), ((2804, 2844), 'openseespy.opensees.model', 'ops.model', (['"""basic"""', '"""-ndm"""', '(2)', '"""-ndf"""', '(3)'], {}), "('basic', '-ndm', 2, '-ndf', 3)\n", (2813, 2844), True, 'import openseespy.opensees as ops\n'), ((2881, 2918), 'openseespy.opensees.uniaxialMaterial', 'ops.uniaxialMaterial', (['"""Elastic"""', '(1)', 'E'], {}), "('Elastic', 1, E)\n", (2901, 2918), True, 'import openseespy.opensees as ops\n'), ((3520, 3539), 'openseespy.postprocessing.Get_Rendering.plot_model', 'opsplt.plot_model', ([], {}), '()\n', (3537, 3539), True, 'import openseespy.postprocessing.Get_Rendering as opsplt\n'), ((3998, 4110), 'openseespy.opensees.recorder', 'ops.recorder', (['"""Node"""', '"""-file"""', "(analysis + 'NodeDisp.txt')", '"""-time"""', '"""-node"""', '*[4]', '"""-dof"""', '*[1, 2, 3]', '"""disp"""'], {}), "('Node', '-file', analysis + 'NodeDisp.txt', '-time', '-node',\n *[4], '-dof', *[1, 2, 3], 'disp')\n", (4010, 4110), True, 'import openseespy.opensees as ops\n'), ((4112, 4229), 'openseespy.opensees.recorder', 'ops.recorder', (['"""Node"""', '"""-file"""', "(analysis + 'ReactDisp.txt')", '"""-time"""', '"""-node"""', '*[4]', '"""-dof"""', '*[1, 2, 3]', '"""reaction"""'], {}), "('Node', '-file', analysis + 'ReactDisp.txt', '-time', '-node',\n *[4], '-dof', *[1, 2, 3], 'reaction')\n", (4124, 4229), True, 'import openseespy.opensees as ops\n'), ((4231, 4339), 'openseespy.opensees.recorder', 'ops.recorder', (['"""Element"""', '"""-file"""', "(analysis + 'ElementsForces.txt')", '"""-time"""', '"""-ele"""', '*[1, 2, 3]', '"""forces"""'], {}), "('Element', '-file', analysis + 'ElementsForces.txt', '-time',\n '-ele', *[1, 2, 3], 'forces')\n", (4243, 4339), True, 'import openseespy.opensees as ops\n'), ((4984, 5011), 'openseespy.opensees.timeSeries', 'ops.timeSeries', (['"""Linear"""', '(1)'], {}), "('Linear', 1)\n", (4998, 5011), True, 'import openseespy.opensees as ops\n'), ((5051, 5077), 'openseespy.opensees.pattern', 'ops.pattern', (['"""Plain"""', '(1)', '(1)'], {}), "('Plain', 1, 1)\n", (5062, 5077), True, 'import openseespy.opensees as ops\n'), ((5180, 5201), 'openseespy.opensees.system', 'ops.system', (['"""BandSPD"""'], {}), "('BandSPD')\n", (5190, 5201), True, 'import openseespy.opensees as ops\n'), ((5237, 5256), 'openseespy.opensees.numberer', 'ops.numberer', (['"""RCM"""'], {}), "('RCM')\n", (5249, 5256), True, 'import openseespy.opensees as ops\n'), ((5301, 5325), 'openseespy.opensees.constraints', 'ops.constraints', (['"""Plain"""'], {}), "('Plain')\n", (5316, 5325), True, 'import openseespy.opensees as ops\n'), ((5363, 5397), 'openseespy.opensees.integrator', 'ops.integrator', (['"""LoadControl"""', '(1.0)'], {}), "('LoadControl', 1.0)\n", (5377, 5397), True, 'import openseespy.opensees as ops\n'), ((5433, 5456), 'openseespy.opensees.algorithm', 'ops.algorithm', (['"""Linear"""'], {}), "('Linear')\n", (5446, 5456), True, 'import openseespy.opensees as ops\n'), ((5498, 5520), 'openseespy.opensees.analysis', 'ops.analysis', (['"""Static"""'], {}), "('Static')\n", (5510, 5520), True, 'import openseespy.opensees as ops\n'), ((5560, 5576), 'openseespy.opensees.initialize', 'ops.initialize', ([], {}), '()\n', (5574, 5576), True, 'import openseespy.opensees as ops\n'), ((5642, 5656), 'openseespy.opensees.analyze', 'ops.analyze', (['N'], {}), '(N)\n', (5653, 5656), True, 'import openseespy.opensees as ops\n'), ((5945, 5955), 'openseespy.opensees.wipe', 'ops.wipe', ([], {}), '()\n', (5953, 5955), True, 'import openseespy.opensees as ops\n'), ((2978, 3008), 'openseespy.opensees.node', 'ops.node', (['(i + 1)', '*[x[i], y[i]]'], {}), '(i + 1, *[x[i], y[i]])\n', (2986, 3008), True, 'import openseespy.opensees as ops\n'), ((5108, 5130), 'openseespy.opensees.load', 'ops.load', (['(i + 1)', '*F[i]'], {}), '(i + 1, *F[i])\n', (5116, 5130), True, 'import openseespy.opensees as ops\n'), ((3129, 3156), 'openseespy.opensees.fix', 'ops.fix', (['(i + 1)', '*fixxity[3]'], {}), '(i + 1, *fixxity[3])\n', (3136, 3156), True, 'import openseespy.opensees as ops\n'), ((3178, 3205), 'openseespy.opensees.fix', 'ops.fix', (['(i + 1)', '*fixxity[0]'], {}), '(i + 1, *fixxity[0])\n', (3185, 3205), True, 'import openseespy.opensees as ops\n'), ((3316, 3362), 'openseespy.opensees.element', 'ops.element', (['"""Truss"""', '(i + 1)', '*conn[i]', 'A[1]', '(1)'], {}), "('Truss', i + 1, *conn[i], A[1], 1)\n", (3327, 3362), True, 'import openseespy.opensees as ops\n'), ((3398, 3444), 'openseespy.opensees.element', 'ops.element', (['"""Truss"""', '(i + 1)', '*conn[i]', 'A[0]', '(1)'], {}), "('Truss', i + 1, *conn[i], A[0], 1)\n", (3409, 3444), True, 'import openseespy.opensees as ops\n')]
|
from __future__ import print_function
import pandas as pd
import json
import sys
import requests
_api_version = str('0.0.1')
class MagpieServer:
"""Object to store how to connect to a server running Magpie"""
_url = None
""" URL of server """
_models = None
""" Cached information about models held by this server. """
def __init__(self, url="http://josquin.northwestern.edu:4581/"):
"""Create a connection to a Magpie server. Defaults to
connecting with a server running at Northwestern University
hosted by the Wolverton group.
:param url: URL of server
:param port: Port number"""
self._url = url
# Test whether API versions agree
self.api_version()
def _make_request(self, path, data=None, method='get'):
"""Perform a request. Handles making error messages
:param path: str, path of request
:param data: Any data to be passed as JSON
:return: requests.Request"""
r = requests.request(method=method, url=self._url + path,
data=data)
# Check error status
if r.ok:
return r
else:
raise Exception('Request failed. Status = %d. Reason = %s'%(r.status_code, r.reason))
def api_version(self):
"""Get the API version of the server.
Prints error message of that version is different than what is supported
by this wrapper.
:return: API version"""
# Make the requested
r = self._make_request("server/version")
v = r.content
# If Python 3, convert to string
if isinstance(v, bytes):
v = v.decode()
# Check whether it agrees with version of this wrapper
if _api_version != v:
print("WARNING: API version of Magpie server different than wrapper: %s!=%s"%(_api_version, v), file=sys.stderr)
return v
def status(self):
"""Get the status of the Magpie server
:return: Status of server as dict"""
return self._make_request("server/status").json()
def models(self):
"""Get information about models held by this server
:return: dict, Information about all the models"""
if self._models is None:
self._models = self._make_request("models").json()
return self._models
def get_model_info(self, name):
"""Get information about a specific model
:param name: str, name of model
:return: dict, information about a model"""
if self._models is None or name not in self._models:
r = self._make_request("model/%s/info"%name)
return r.json()
else:
return self._models[name]
def generate_attributes(self, name, entries):
"""Generate attributes that serve as input to a certain model
:param name: str, name of model
:param entries: list, list of entries to be run (as strings)
:return: Pandas array, where [i,j] is attribute j of entries[i]"""
# Package the request
data = dict(entries=json.dumps(dict(entries=[dict(name=e) for e in entries])))
r = self._make_request("model/%s/attributes"%name, data=data, method='POST')
# Compile entries into numpy array
results = r.json()
attrs = pd.DataFrame([x['attributes'] for x in results['entries']],
columns=results['attributes'])
return attrs
def run_model(self, name, entries):
"""Run a particular model.
:param name: str, Name of model to be run
:param entries: list, list of entries to be run (as strings)
:return: Predicted values. Also generates the probabilities
for membership in each class for classifier models
Second column is always the predicted value as a number."""
# Get the information about this model
model_info = self.get_model_info(name)
# Check whether it is a regression model
reg = model_info['modelType'] == "regression"
# Run the model
data = dict(entries=json.dumps(dict(entries=[dict(name=e) for e in entries])))
r = self._make_request("model/%s/run"%name, data=data, method='POST')
# Generate the output dataframe
results = r.json()
if reg:
return pd.DataFrame(list(zip(entries,[x['predictedValue'] for x in results['entries']])),
columns=['Entry']+['%s (%s)'%(model_info['property'], model_info['units'])])
else:
# Get probabilities
classes = model_info['units']
probs = []
for c in classes:
probs.append([e['classProbabilities'][c] for e in results['entries']])
return pd.DataFrame(list(zip(entries,[x['predictedValue'] for x in results['entries']],
[x['predictedClass'] for x in results['entries']], *probs)),
columns=['Entry']+['Class','ClassName']+['P(%s)'%c for c in classes])
|
[
"pandas.DataFrame",
"requests.request"
] |
[((1069, 1133), 'requests.request', 'requests.request', ([], {'method': 'method', 'url': '(self._url + path)', 'data': 'data'}), '(method=method, url=self._url + path, data=data)\n', (1085, 1133), False, 'import requests\n'), ((3582, 3677), 'pandas.DataFrame', 'pd.DataFrame', (["[x['attributes'] for x in results['entries']]"], {'columns': "results['attributes']"}), "([x['attributes'] for x in results['entries']], columns=results\n ['attributes'])\n", (3594, 3677), True, 'import pandas as pd\n')]
|
#!python
# coding=utf-8
import os
import unittest
import tempfile
from os.path import join as jn
from os.path import dirname as dn
import pytest
from pocean.dsg import ContiguousRaggedTrajectory, get_calculated_attributes
from pocean.tests.dsg.test_new import test_is_mine
import logging
from pocean import logger
logger.level = logging.INFO
logger.handlers = [logging.StreamHandler()]
@pytest.mark.parametrize("fp", [
#jn(dn(__file__), 'resources', 'cr-single.nc'),
jn(dn(__file__), 'resources', 'cr-multiple.nc'),
jn(dn(__file__), 'resources', 'cr-oot-A.nc'),
jn(dn(__file__), 'resources', 'cr-oot-B.nc'),
])
def test_crt_load(fp):
test_is_mine(ContiguousRaggedTrajectory, fp)
class TestContiguousRaggedTrajectory(unittest.TestCase):
def setUp(self):
self.multi = jn(dn(__file__), 'resources', 'cr-multiple.nc')
self.oot_A = jn(dn(__file__), 'resources', 'cr-oot-A.nc')
self.oot_B = jn(dn(__file__), 'resources', 'cr-oot-B.nc')
def test_crt_dataframe_multiple(self):
axes = {
't': 'time',
'x': 'lon',
'y': 'lat',
'z': 'z',
}
fid, tmpnc = tempfile.mkstemp(suffix='.nc')
with ContiguousRaggedTrajectory(self.multi) as ncd:
df = ncd.to_dataframe(axes=axes)
with ContiguousRaggedTrajectory.from_dataframe(df, tmpnc, axes=axes) as result_ncd:
assert 'trajectory' in result_ncd.dimensions
test_is_mine(ContiguousRaggedTrajectory, tmpnc) # Try to load it again
os.close(fid)
os.remove(tmpnc)
def test_crt_dataframe_multiple_unique_dims(self):
axes = {
't': 'time',
'x': 'lon',
'y': 'lat',
'z': 'z',
}
fid, tmpnc = tempfile.mkstemp(suffix='.nc')
with ContiguousRaggedTrajectory(self.multi) as ncd:
df = ncd.to_dataframe(axes=axes)
with ContiguousRaggedTrajectory.from_dataframe(df, tmpnc, axes=axes, unique_dims=True) as result_ncd:
assert 'trajectory_dim' in result_ncd.dimensions
test_is_mine(ContiguousRaggedTrajectory, tmpnc) # Try to load it again
os.close(fid)
os.remove(tmpnc)
def test_crt_dataframe_unlimited_dim(self):
axes = {
't': 'time',
'x': 'lon',
'y': 'lat',
'z': 'z',
}
fid, tmpnc = tempfile.mkstemp(suffix='.nc')
with ContiguousRaggedTrajectory(self.multi) as ncd:
df = ncd.to_dataframe(axes=axes)
with ContiguousRaggedTrajectory.from_dataframe(df, tmpnc, axes=axes, unlimited=True, unique_dims=True) as result_ncd:
assert 'trajectory_dim' in result_ncd.dimensions
assert 'obs_dim' in result_ncd.dimensions
assert result_ncd.dimensions['obs_dim'].isunlimited() is True
test_is_mine(ContiguousRaggedTrajectory, tmpnc) # Try to load it again
os.close(fid)
os.remove(tmpnc)
def test_crt_dataframe_oot_A(self):
axes = {
't': 'time',
'x': 'lon',
'y': 'lat',
'z': 'depth',
'sample': 'sample'
}
fid, tmpnc = tempfile.mkstemp(suffix='.nc')
with ContiguousRaggedTrajectory(self.oot_A) as ncd:
df = ncd.to_dataframe(axes=axes)
df = df.sort_values(['trajectory', 'time'])
attrs = get_calculated_attributes(df, axes=axes)
with ContiguousRaggedTrajectory.from_dataframe(df, tmpnc, axes=axes, mode='a') as result_ncd:
assert 'sample' in result_ncd.dimensions
assert result_ncd.dimensions['sample'].size == 6610
assert 'trajectory' in result_ncd.dimensions
# This is removing null trajectories that have no data. Not much to do about this
# because there is no way to store this empty trajectory in a dataframe.
assert result_ncd.dimensions['trajectory'].size == 507
result_ncd.apply_meta(attrs)
test_is_mine(ContiguousRaggedTrajectory, tmpnc) # Try to load it again
os.close(fid)
os.remove(tmpnc)
def test_crt_dataframe_oot_B(self):
axes = {
't': 'time',
'x': 'lon',
'y': 'lat',
'z': 'depth',
}
fid, tmpnc = tempfile.mkstemp(suffix='.nc')
with ContiguousRaggedTrajectory(self.oot_B) as ncd:
df = ncd.to_dataframe(axes=axes)
df = df.sort_values(['trajectory', 'time'])
attrs = get_calculated_attributes(df, axes=axes)
with ContiguousRaggedTrajectory.from_dataframe(df, tmpnc, axes=axes, mode='a') as result_ncd:
assert 'obs' in result_ncd.dimensions
assert result_ncd.dimensions['obs'].size == 64116
assert 'trajectory' in result_ncd.dimensions
# This is removing null trajectories that have no data. Not much to do about this
# because there is no way to store this empty trajectory in a dataframe.
assert result_ncd.dimensions['trajectory'].size == 1000
result_ncd.apply_meta(attrs)
test_is_mine(ContiguousRaggedTrajectory, tmpnc) # Try to load it again
os.close(fid)
os.remove(tmpnc)
|
[
"os.remove",
"pocean.tests.dsg.test_new.test_is_mine",
"tempfile.mkstemp",
"pocean.dsg.get_calculated_attributes",
"pocean.dsg.ContiguousRaggedTrajectory.from_dataframe",
"os.path.dirname",
"logging.StreamHandler",
"pocean.dsg.ContiguousRaggedTrajectory",
"os.close"
] |
[((363, 386), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (384, 386), False, 'import logging\n'), ((658, 702), 'pocean.tests.dsg.test_new.test_is_mine', 'test_is_mine', (['ContiguousRaggedTrajectory', 'fp'], {}), '(ContiguousRaggedTrajectory, fp)\n', (670, 702), False, 'from pocean.tests.dsg.test_new import test_is_mine\n'), ((1172, 1202), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'suffix': '""".nc"""'}), "(suffix='.nc')\n", (1188, 1202), False, 'import tempfile\n'), ((1557, 1570), 'os.close', 'os.close', (['fid'], {}), '(fid)\n', (1565, 1570), False, 'import os\n'), ((1579, 1595), 'os.remove', 'os.remove', (['tmpnc'], {}), '(tmpnc)\n', (1588, 1595), False, 'import os\n'), ((1795, 1825), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'suffix': '""".nc"""'}), "(suffix='.nc')\n", (1811, 1825), False, 'import tempfile\n'), ((2202, 2215), 'os.close', 'os.close', (['fid'], {}), '(fid)\n', (2210, 2215), False, 'import os\n'), ((2224, 2240), 'os.remove', 'os.remove', (['tmpnc'], {}), '(tmpnc)\n', (2233, 2240), False, 'import os\n'), ((2433, 2463), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'suffix': '""".nc"""'}), "(suffix='.nc')\n", (2449, 2463), False, 'import tempfile\n'), ((2992, 3005), 'os.close', 'os.close', (['fid'], {}), '(fid)\n', (3000, 3005), False, 'import os\n'), ((3014, 3030), 'os.remove', 'os.remove', (['tmpnc'], {}), '(tmpnc)\n', (3023, 3030), False, 'import os\n'), ((3270, 3300), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'suffix': '""".nc"""'}), "(suffix='.nc')\n", (3286, 3300), False, 'import tempfile\n'), ((4213, 4226), 'os.close', 'os.close', (['fid'], {}), '(fid)\n', (4221, 4226), False, 'import os\n'), ((4235, 4251), 'os.remove', 'os.remove', (['tmpnc'], {}), '(tmpnc)\n', (4244, 4251), False, 'import os\n'), ((4440, 4470), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'suffix': '""".nc"""'}), "(suffix='.nc')\n", (4456, 4470), False, 'import tempfile\n'), ((5379, 5392), 'os.close', 'os.close', (['fid'], {}), '(fid)\n', (5387, 5392), False, 'import os\n'), ((5401, 5417), 'os.remove', 'os.remove', (['tmpnc'], {}), '(tmpnc)\n', (5410, 5417), False, 'import os\n'), ((482, 494), 'os.path.dirname', 'dn', (['__file__'], {}), '(__file__)\n', (484, 494), True, 'from os.path import dirname as dn\n'), ((535, 547), 'os.path.dirname', 'dn', (['__file__'], {}), '(__file__)\n', (537, 547), True, 'from os.path import dirname as dn\n'), ((585, 597), 'os.path.dirname', 'dn', (['__file__'], {}), '(__file__)\n', (587, 597), True, 'from os.path import dirname as dn\n'), ((808, 820), 'os.path.dirname', 'dn', (['__file__'], {}), '(__file__)\n', (810, 820), True, 'from os.path import dirname as dn\n'), ((877, 889), 'os.path.dirname', 'dn', (['__file__'], {}), '(__file__)\n', (879, 889), True, 'from os.path import dirname as dn\n'), ((943, 955), 'os.path.dirname', 'dn', (['__file__'], {}), '(__file__)\n', (945, 955), True, 'from os.path import dirname as dn\n'), ((1216, 1254), 'pocean.dsg.ContiguousRaggedTrajectory', 'ContiguousRaggedTrajectory', (['self.multi'], {}), '(self.multi)\n', (1242, 1254), False, 'from pocean.dsg import ContiguousRaggedTrajectory, get_calculated_attributes\n'), ((1477, 1524), 'pocean.tests.dsg.test_new.test_is_mine', 'test_is_mine', (['ContiguousRaggedTrajectory', 'tmpnc'], {}), '(ContiguousRaggedTrajectory, tmpnc)\n', (1489, 1524), False, 'from pocean.tests.dsg.test_new import test_is_mine\n'), ((1839, 1877), 'pocean.dsg.ContiguousRaggedTrajectory', 'ContiguousRaggedTrajectory', (['self.multi'], {}), '(self.multi)\n', (1865, 1877), False, 'from pocean.dsg import ContiguousRaggedTrajectory, get_calculated_attributes\n'), ((2122, 2169), 'pocean.tests.dsg.test_new.test_is_mine', 'test_is_mine', (['ContiguousRaggedTrajectory', 'tmpnc'], {}), '(ContiguousRaggedTrajectory, tmpnc)\n', (2134, 2169), False, 'from pocean.tests.dsg.test_new import test_is_mine\n'), ((2477, 2515), 'pocean.dsg.ContiguousRaggedTrajectory', 'ContiguousRaggedTrajectory', (['self.multi'], {}), '(self.multi)\n', (2503, 2515), False, 'from pocean.dsg import ContiguousRaggedTrajectory, get_calculated_attributes\n'), ((2912, 2959), 'pocean.tests.dsg.test_new.test_is_mine', 'test_is_mine', (['ContiguousRaggedTrajectory', 'tmpnc'], {}), '(ContiguousRaggedTrajectory, tmpnc)\n', (2924, 2959), False, 'from pocean.tests.dsg.test_new import test_is_mine\n'), ((3314, 3352), 'pocean.dsg.ContiguousRaggedTrajectory', 'ContiguousRaggedTrajectory', (['self.oot_A'], {}), '(self.oot_A)\n', (3340, 3352), False, 'from pocean.dsg import ContiguousRaggedTrajectory, get_calculated_attributes\n'), ((3482, 3522), 'pocean.dsg.get_calculated_attributes', 'get_calculated_attributes', (['df'], {'axes': 'axes'}), '(df, axes=axes)\n', (3507, 3522), False, 'from pocean.dsg import ContiguousRaggedTrajectory, get_calculated_attributes\n'), ((4132, 4179), 'pocean.tests.dsg.test_new.test_is_mine', 'test_is_mine', (['ContiguousRaggedTrajectory', 'tmpnc'], {}), '(ContiguousRaggedTrajectory, tmpnc)\n', (4144, 4179), False, 'from pocean.tests.dsg.test_new import test_is_mine\n'), ((4484, 4522), 'pocean.dsg.ContiguousRaggedTrajectory', 'ContiguousRaggedTrajectory', (['self.oot_B'], {}), '(self.oot_B)\n', (4510, 4522), False, 'from pocean.dsg import ContiguousRaggedTrajectory, get_calculated_attributes\n'), ((4652, 4692), 'pocean.dsg.get_calculated_attributes', 'get_calculated_attributes', (['df'], {'axes': 'axes'}), '(df, axes=axes)\n', (4677, 4692), False, 'from pocean.dsg import ContiguousRaggedTrajectory, get_calculated_attributes\n'), ((5298, 5345), 'pocean.tests.dsg.test_new.test_is_mine', 'test_is_mine', (['ContiguousRaggedTrajectory', 'tmpnc'], {}), '(ContiguousRaggedTrajectory, tmpnc)\n', (5310, 5345), False, 'from pocean.tests.dsg.test_new import test_is_mine\n'), ((1325, 1388), 'pocean.dsg.ContiguousRaggedTrajectory.from_dataframe', 'ContiguousRaggedTrajectory.from_dataframe', (['df', 'tmpnc'], {'axes': 'axes'}), '(df, tmpnc, axes=axes)\n', (1366, 1388), False, 'from pocean.dsg import ContiguousRaggedTrajectory, get_calculated_attributes\n'), ((1948, 2034), 'pocean.dsg.ContiguousRaggedTrajectory.from_dataframe', 'ContiguousRaggedTrajectory.from_dataframe', (['df', 'tmpnc'], {'axes': 'axes', 'unique_dims': '(True)'}), '(df, tmpnc, axes=axes, unique_dims\n =True)\n', (1989, 2034), False, 'from pocean.dsg import ContiguousRaggedTrajectory, get_calculated_attributes\n'), ((2586, 2688), 'pocean.dsg.ContiguousRaggedTrajectory.from_dataframe', 'ContiguousRaggedTrajectory.from_dataframe', (['df', 'tmpnc'], {'axes': 'axes', 'unlimited': '(True)', 'unique_dims': '(True)'}), '(df, tmpnc, axes=axes, unlimited=\n True, unique_dims=True)\n', (2627, 2688), False, 'from pocean.dsg import ContiguousRaggedTrajectory, get_calculated_attributes\n'), ((3541, 3614), 'pocean.dsg.ContiguousRaggedTrajectory.from_dataframe', 'ContiguousRaggedTrajectory.from_dataframe', (['df', 'tmpnc'], {'axes': 'axes', 'mode': '"""a"""'}), "(df, tmpnc, axes=axes, mode='a')\n", (3582, 3614), False, 'from pocean.dsg import ContiguousRaggedTrajectory, get_calculated_attributes\n'), ((4711, 4784), 'pocean.dsg.ContiguousRaggedTrajectory.from_dataframe', 'ContiguousRaggedTrajectory.from_dataframe', (['df', 'tmpnc'], {'axes': 'axes', 'mode': '"""a"""'}), "(df, tmpnc, axes=axes, mode='a')\n", (4752, 4784), False, 'from pocean.dsg import ContiguousRaggedTrajectory, get_calculated_attributes\n')]
|
import re
from satosa.micro_services.processors.base_processor import BaseProcessor
class LegacyProcessor:
@staticmethod
def codice_fiscale_rs(schacpersonaluniqueids=[], nationprefix=False, nationprefix_sep=':'):
if isinstance(schacpersonaluniqueids, str):
schacpersonaluniqueids = [schacpersonaluniqueids]
# R&S format
rs_regexp = (r'(?P<urn_prefix>urn:schac:personalUniqueID:)?'
r'(?P<nation>[a-zA-Z]{2}):'
r'(?P<doc_type>[a-zA-Z]{2,3}):(?P<uniqueid>[\w]+)')
for uniqueid in schacpersonaluniqueids:
result = re.match(rs_regexp, uniqueid, re.I)
if result:
data = result.groupdict()
#if data.get('nation') == 'it' and data.get('doc_type') in ['CF', 'TIN']:
if nationprefix:
# returns IT:CODICEFISCALE
return nationprefix_sep.join((data['nation'].lower(),
data['uniqueid']))
# returns CODICEFISCALE
return data['uniqueid']
@staticmethod
def codice_fiscale_spid(fiscalNumbers, nationprefix=False, nationprefix_sep=':'):
if isinstance(fiscalNumbers, str):
fiscalNumbers = [fiscalNumbers]
# SPID/eIDAS FORMAT
spid_regexp = r'(?P<prefix>TIN)(?P<nation>[a-zA-Z]{2})-(?P<uniqueid>[\w]+)'
for fiscalNumber in fiscalNumbers:
result = re.match(spid_regexp, fiscalNumber, re.I)
if result:
data = result.groupdict()
if nationprefix:
# returns IT:CODICEFISCALE
return nationprefix_sep.join((data['nation'].lower(),
data['uniqueid']))
# returns CODICEFISCALE
return data['uniqueid']
@staticmethod
def matricola(personalUniqueCodes=[], id_string='dipendente', orgname='unical.it'):
if isinstance(personalUniqueCodes, str):
personalUniqueCodes = [personalUniqueCodes]
_regexp = (r'(?P<urn_prefix>urn:schac:personalUniqueCode:)?'
r'(?P<nation>[a-zA-Z]{2}):'
#r'(?P<organization>[a-zA-Z\.\-]+):'
'ORGNAME:'
'IDSTRING:'
r'(?P<uniqueid>[\w]+)').replace('IDSTRING', id_string).replace('ORGNAME', orgname)
for uniqueid in personalUniqueCodes:
result = re.match(_regexp, uniqueid, re.I)
if result:
return result.groupdict()['uniqueid']
class LegacyAttributeGenerator(BaseProcessor):
def matricola_dipendente(self, attributes):
v = None
if attributes.get('schacpersonaluniquecode'):
v = 'schacpersonaluniquecode'
elif attributes.get('schacPersonalUniqueCode'):
v = 'schacPersonalUniqueCode'
if v:
return LegacyProcessor.matricola(attributes[v],
id_string='dipendente')
def matricola_studente(self, attributes):
v = None
if attributes.get('schacpersonaluniquecode'):
v = 'schacpersonaluniquecode'
elif attributes.get('schacPersonalUniqueCode'):
v = 'schacPersonalUniqueCode'
if v:
return LegacyProcessor.matricola(attributes[v],
id_string='studente')
def codice_fiscale(self, attributes):
v = None
if attributes.get('schacpersonaluniqueid'):
return UniAttributeProcessor.codice_fiscale_rs(attributes['schacpersonaluniqueid'])
elif attributes.get('schacPersonalUniqueID'):
return UniAttributeProcessor.codice_fiscale_rs(attributes['schacPersonalUniqueID'])
elif attributes.get('fiscalNumber'):
v = 'fiscalNumber'
elif attributes.get('fiscalnumber'):
v = 'fiscalnumber'
if v:
fiscalNumber = LegacyProcessor.codice_fiscale_spid(attributes[v])
# put a fake 'schacpersonaluniqueid' to do ldap account linking with the next microservice
attributes['schacpersonaluniqueid'] = 'urn:schac:personalUniqueID:it:CF:{}'.format(fiscalNumber)
return fiscalNumber
def process(self, internal_data, attribute, **kwargs):
if hasattr(self, attribute) and callable(getattr(self, attribute)):
internal_data.attributes[attribute] = getattr(self, attribute)(internal_data.attributes)
|
[
"re.match"
] |
[((618, 653), 're.match', 're.match', (['rs_regexp', 'uniqueid', 're.I'], {}), '(rs_regexp, uniqueid, re.I)\n', (626, 653), False, 'import re\n'), ((1481, 1522), 're.match', 're.match', (['spid_regexp', 'fiscalNumber', 're.I'], {}), '(spid_regexp, fiscalNumber, re.I)\n', (1489, 1522), False, 'import re\n'), ((2504, 2537), 're.match', 're.match', (['_regexp', 'uniqueid', 're.I'], {}), '(_regexp, uniqueid, re.I)\n', (2512, 2537), False, 'import re\n')]
|
#!/usr/bin/env python
from __future__ import print_function
import subprocess
# stack two Inkscape generated files
subprocess.check_call(
'../svg_stack.py --direction=h --margin=100 red_ball.svg blue_triangle.svg > shapes_test.svg',
shell=True)
# Inkscape files don't pass xmllint -- don't test
print('You should manually verify that shapes_test.svg looks exactly the same as shapes.svg')
# subprocess.check_call(
# 'rasterizer shapes_test.svg',
# shell=True)
|
[
"subprocess.check_call"
] |
[((117, 255), 'subprocess.check_call', 'subprocess.check_call', (['"""../svg_stack.py --direction=h --margin=100 red_ball.svg blue_triangle.svg > shapes_test.svg"""'], {'shell': '(True)'}), "(\n '../svg_stack.py --direction=h --margin=100 red_ball.svg blue_triangle.svg > shapes_test.svg'\n , shell=True)\n", (138, 255), False, 'import subprocess\n')]
|
#!/usr/bin/env python3
import os
import concurrent.futures
from time import time
from utils import load_csv, write_results, check_open_port
# https://docs.python.org/3/library/concurrent.futures.html#threadpoolexecutor-example
start = time()
results = {}
top_sites = f'{os.path.dirname(os.path.realpath(__file__))}/top-1m.csv'
endpoints = load_csv(top_sites)[0:100]
max_workers = 10
# We can use a with statement to ensure threads are cleaned up promptly
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
# Start the load operations and mark each future with its endpoint
futures = {executor.submit(check_open_port, endpoint): endpoint for endpoint in endpoints}
for future in concurrent.futures.as_completed(futures):
endpoint = futures[future]
try:
data = future.result()
results[data[0]] = data[1]
except Exception as exc:
print('%r generated an exception: %s' % (endpoint, exc))
write_results(results, 'futures')
end = time()
print(f"Endpoints took {end-start} seconds")
|
[
"os.path.realpath",
"utils.write_results",
"utils.load_csv",
"time.time"
] |
[((240, 246), 'time.time', 'time', ([], {}), '()\n', (244, 246), False, 'from time import time\n'), ((993, 1026), 'utils.write_results', 'write_results', (['results', '"""futures"""'], {}), "(results, 'futures')\n", (1006, 1026), False, 'from utils import load_csv, write_results, check_open_port\n'), ((1033, 1039), 'time.time', 'time', ([], {}), '()\n', (1037, 1039), False, 'from time import time\n'), ((344, 363), 'utils.load_csv', 'load_csv', (['top_sites'], {}), '(top_sites)\n', (352, 363), False, 'from utils import load_csv, write_results, check_open_port\n'), ((291, 317), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (307, 317), False, 'import os\n')]
|
from typing import List, Optional
from fastapi import APIRouter, Body, Depends
from watchmen_auth import PrincipalService
from watchmen_meta.common import ask_meta_storage, ask_snowflake_generator
from watchmen_meta.system import ExternalWriterService
from watchmen_model.admin import UserRole
from watchmen_model.common import DataPage, ExternalWriterId, Pageable
from watchmen_model.system import ExternalWriter
from watchmen_rest import get_any_admin_principal, get_super_admin_principal
from watchmen_rest.util import raise_400, raise_403, raise_404
from watchmen_rest_doll.doll import ask_tuple_delete_enabled
from watchmen_rest_doll.util import trans, trans_readonly
from watchmen_utilities import is_blank
router = APIRouter()
def get_external_writer_service(principal_service: PrincipalService) -> ExternalWriterService:
return ExternalWriterService(ask_meta_storage(), ask_snowflake_generator(), principal_service)
@router.get('/external_writer', tags=[UserRole.ADMIN, UserRole.SUPER_ADMIN], response_model=ExternalWriter)
async def load_external_writer_by_id(
writer_id: Optional[ExternalWriterId] = None,
principal_service: PrincipalService = Depends(get_any_admin_principal)
) -> ExternalWriter:
if is_blank(writer_id):
raise_400('External writer id is required.')
if not principal_service.is_super_admin():
if writer_id != principal_service.get_tenant_id():
raise_403()
external_writer_service = get_external_writer_service(principal_service)
def action() -> ExternalWriter:
# noinspection PyTypeChecker
external_writer: ExternalWriter = external_writer_service.find_by_id(writer_id)
if external_writer is None:
raise_404()
return external_writer
return trans_readonly(external_writer_service, action)
@router.post('/external_writer', tags=[UserRole.SUPER_ADMIN], response_model=ExternalWriter)
async def save_external_writer(
external_writer: ExternalWriter, principal_service: PrincipalService = Depends(get_super_admin_principal)
) -> ExternalWriter:
external_writer_service = get_external_writer_service(principal_service)
# noinspection DuplicatedCode
def action(writer: ExternalWriter) -> ExternalWriter:
if external_writer_service.is_storable_id_faked(writer.writerId):
external_writer_service.redress_storable_id(writer)
# noinspection PyTypeChecker
writer: ExternalWriter = external_writer_service.create(writer)
else:
# noinspection PyTypeChecker
writer: ExternalWriter = external_writer_service.update(writer)
return writer
return trans(external_writer_service, lambda: action(external_writer))
class QueryExternalWriterDataPage(DataPage):
data: List[ExternalWriter]
@router.post(
'/external_writer/name', tags=[UserRole.ADMIN, UserRole.SUPER_ADMIN], response_model=QueryExternalWriterDataPage)
async def find_external_writers_by_name(
query_name: Optional[str] = None, pageable: Pageable = Body(...),
principal_service: PrincipalService = Depends(get_any_admin_principal)
) -> QueryExternalWriterDataPage:
external_writer_service = get_external_writer_service(principal_service)
# noinspection DuplicatedCode
def action() -> QueryExternalWriterDataPage:
tenant_id = None
if principal_service.is_tenant_admin():
tenant_id = principal_service.get_tenant_id()
if is_blank(query_name):
# noinspection PyTypeChecker
return external_writer_service.find_by_text(None, tenant_id, pageable)
else:
# noinspection PyTypeChecker
return external_writer_service.find_by_text(query_name, tenant_id, pageable)
return trans_readonly(external_writer_service, action)
@router.get(
"/external_writer/all", tags=[UserRole.ADMIN], response_model=List[ExternalWriter])
async def find_all_external_writers(
principal_service: PrincipalService = Depends(get_any_admin_principal)) -> List[ExternalWriter]:
tenant_id = None
if principal_service.is_tenant_admin():
tenant_id = principal_service.get_tenant_id()
external_writer_service = get_external_writer_service(principal_service)
def action() -> List[ExternalWriter]:
return external_writer_service.find_all(tenant_id)
return trans_readonly(external_writer_service, action)
@router.delete('/external_writer', tags=[UserRole.SUPER_ADMIN], response_model=ExternalWriter)
async def delete_external_writer_by_id(
writer_id: Optional[ExternalWriterId] = None,
principal_service: PrincipalService = Depends(get_super_admin_principal)
) -> ExternalWriter:
if not ask_tuple_delete_enabled():
raise_404('Not Found')
if is_blank(writer_id):
raise_400('External writer id is required.')
external_writer_service = get_external_writer_service(principal_service)
def action() -> ExternalWriter:
# noinspection PyTypeChecker
external_writer: ExternalWriter = external_writer_service.delete(writer_id)
if external_writer is None:
raise_404()
return external_writer
return trans(external_writer_service, action)
|
[
"watchmen_utilities.is_blank",
"watchmen_rest.util.raise_404",
"watchmen_rest_doll.util.trans_readonly",
"fastapi.Body",
"watchmen_rest.util.raise_400",
"watchmen_meta.common.ask_meta_storage",
"watchmen_meta.common.ask_snowflake_generator",
"watchmen_rest_doll.doll.ask_tuple_delete_enabled",
"watchmen_rest.util.raise_403",
"fastapi.Depends",
"watchmen_rest_doll.util.trans",
"fastapi.APIRouter"
] |
[((725, 736), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (734, 736), False, 'from fastapi import APIRouter, Body, Depends\n'), ((1166, 1198), 'fastapi.Depends', 'Depends', (['get_any_admin_principal'], {}), '(get_any_admin_principal)\n', (1173, 1198), False, 'from fastapi import APIRouter, Body, Depends\n'), ((1224, 1243), 'watchmen_utilities.is_blank', 'is_blank', (['writer_id'], {}), '(writer_id)\n', (1232, 1243), False, 'from watchmen_utilities import is_blank\n'), ((1705, 1752), 'watchmen_rest_doll.util.trans_readonly', 'trans_readonly', (['external_writer_service', 'action'], {}), '(external_writer_service, action)\n', (1719, 1752), False, 'from watchmen_rest_doll.util import trans, trans_readonly\n'), ((1953, 1987), 'fastapi.Depends', 'Depends', (['get_super_admin_principal'], {}), '(get_super_admin_principal)\n', (1960, 1987), False, 'from fastapi import APIRouter, Body, Depends\n'), ((2893, 2902), 'fastapi.Body', 'Body', (['...'], {}), '(...)\n', (2897, 2902), False, 'from fastapi import APIRouter, Body, Depends\n'), ((2944, 2976), 'fastapi.Depends', 'Depends', (['get_any_admin_principal'], {}), '(get_any_admin_principal)\n', (2951, 2976), False, 'from fastapi import APIRouter, Body, Depends\n'), ((3535, 3582), 'watchmen_rest_doll.util.trans_readonly', 'trans_readonly', (['external_writer_service', 'action'], {}), '(external_writer_service, action)\n', (3549, 3582), False, 'from watchmen_rest_doll.util import trans, trans_readonly\n'), ((3760, 3792), 'fastapi.Depends', 'Depends', (['get_any_admin_principal'], {}), '(get_any_admin_principal)\n', (3767, 3792), False, 'from fastapi import APIRouter, Body, Depends\n'), ((4103, 4150), 'watchmen_rest_doll.util.trans_readonly', 'trans_readonly', (['external_writer_service', 'action'], {}), '(external_writer_service, action)\n', (4117, 4150), False, 'from watchmen_rest_doll.util import trans, trans_readonly\n'), ((4376, 4410), 'fastapi.Depends', 'Depends', (['get_super_admin_principal'], {}), '(get_super_admin_principal)\n', (4383, 4410), False, 'from fastapi import APIRouter, Body, Depends\n'), ((4498, 4517), 'watchmen_utilities.is_blank', 'is_blank', (['writer_id'], {}), '(writer_id)\n', (4506, 4517), False, 'from watchmen_utilities import is_blank\n'), ((4863, 4901), 'watchmen_rest_doll.util.trans', 'trans', (['external_writer_service', 'action'], {}), '(external_writer_service, action)\n', (4868, 4901), False, 'from watchmen_rest_doll.util import trans, trans_readonly\n'), ((864, 882), 'watchmen_meta.common.ask_meta_storage', 'ask_meta_storage', ([], {}), '()\n', (880, 882), False, 'from watchmen_meta.common import ask_meta_storage, ask_snowflake_generator\n'), ((884, 909), 'watchmen_meta.common.ask_snowflake_generator', 'ask_snowflake_generator', ([], {}), '()\n', (907, 909), False, 'from watchmen_meta.common import ask_meta_storage, ask_snowflake_generator\n'), ((1247, 1291), 'watchmen_rest.util.raise_400', 'raise_400', (['"""External writer id is required."""'], {}), "('External writer id is required.')\n", (1256, 1291), False, 'from watchmen_rest.util import raise_400, raise_403, raise_404\n'), ((3278, 3298), 'watchmen_utilities.is_blank', 'is_blank', (['query_name'], {}), '(query_name)\n', (3286, 3298), False, 'from watchmen_utilities import is_blank\n'), ((4440, 4466), 'watchmen_rest_doll.doll.ask_tuple_delete_enabled', 'ask_tuple_delete_enabled', ([], {}), '()\n', (4464, 4466), False, 'from watchmen_rest_doll.doll import ask_tuple_delete_enabled\n'), ((4470, 4492), 'watchmen_rest.util.raise_404', 'raise_404', (['"""Not Found"""'], {}), "('Not Found')\n", (4479, 4492), False, 'from watchmen_rest.util import raise_400, raise_403, raise_404\n'), ((4521, 4565), 'watchmen_rest.util.raise_400', 'raise_400', (['"""External writer id is required."""'], {}), "('External writer id is required.')\n", (4530, 4565), False, 'from watchmen_rest.util import raise_400, raise_403, raise_404\n'), ((1392, 1403), 'watchmen_rest.util.raise_403', 'raise_403', ([], {}), '()\n', (1401, 1403), False, 'from watchmen_rest.util import raise_400, raise_403, raise_404\n'), ((1659, 1670), 'watchmen_rest.util.raise_404', 'raise_404', ([], {}), '()\n', (1668, 1670), False, 'from watchmen_rest.util import raise_400, raise_403, raise_404\n'), ((4817, 4828), 'watchmen_rest.util.raise_404', 'raise_404', ([], {}), '()\n', (4826, 4828), False, 'from watchmen_rest.util import raise_400, raise_403, raise_404\n')]
|
import asyncio
import json
import websockets
from .exceptions import InvalidServer, ConnectionClosed
from .utils import maybe_await
# noinspection SpellCheckingInspection
class RconConnector:
# noinspection PyTypeChecker
def __init__(self, host, port, password, message_callback=None, console_callback=None):
self.uri = f'ws://{host}:{port}/{password}'
self.ws: websockets.WebSocketClientProtocol = None
self._loop = None
self._ws_kwargs = {}
self._counter = 1
self._process_task: asyncio.Future = None
self._bucket = {}
self._closed = True
if message_callback and not callable(message_callback):
raise TypeError('Expected type `function` for `message_callback`, got type `{0}`'.format(
type(message_callback)))
elif message_callback:
self._bucket[-1] = message_callback
if console_callback and not callable(console_callback):
raise TypeError('Expected type `function` for `console_callback`, got type `{0}`'.format(
type(console_callback)))
elif console_callback:
self._bucket[0] = console_callback
async def start(self, loop, **kwargs):
self._loop = loop
try:
self.ws = await websockets.connect(self.uri, **kwargs)
self._ws_kwargs = kwargs
self._closed = False
if self._process_task:
self._process_task.cancel()
self._process_task = self._loop.create_task(self.receive_data())
except websockets.WebSocketProtocolError:
raise InvalidServer
async def close(self):
self._closed = True
await self.ws.close(reason='Client requested shutdown of WS connection.')
async def command(self, command, callback):
if not callable(callback):
raise TypeError('Expected type `function` for `message_callback`, got type `{0}`'.format(
type(callback)))
if self._closed:
raise ConnectionClosed
self._bucket[self._counter] = callback
data = json.dumps(dict(Message=command, Identifier=self._counter, Name="WebRcon"))
self._counter += 1
retry_counter = 0
sent = False
while not sent:
try:
await self.ws.send(data)
sent = True
except websockets.ConnectionClosed:
await asyncio.sleep((retry_counter + 1) * 5)
retry_counter += 1
await self.start(self._loop, **self._ws_kwargs)
except (websockets.WebSocketProtocolError, websockets.InvalidHandshake):
await asyncio.sleep((retry_counter + 1) * 5)
retry_counter += 1
if retry_counter >= 5:
# Could not reconnect / send the data
return False
return True
async def receive_data(self):
# noinspection DuplicatedCode
closed_counter = 0
while not self._closed:
data = {}
try:
resp = await self.ws.recv()
data = json.loads(resp)
except websockets.ConnectionClosed:
closed_counter += 1
if closed_counter >= 3:
await self.start(self._loop, **self._ws_kwargs)
except json.JSONDecodeError:
# Invalid response, ignore
pass
identifier = data.get('Identifier')
if identifier == -1 and self._bucket.get(-1):
self._loop.create_task(maybe_await(self._bucket[-1], data))
elif identifier == 0 and self._bucket.get(0):
self._loop.create_task(maybe_await(self._bucket[0], data))
elif identifier in self._bucket:
self._loop.create_task(maybe_await(self._bucket[identifier], data))
del self._bucket[identifier]
|
[
"websockets.connect",
"json.loads",
"asyncio.sleep"
] |
[((1298, 1336), 'websockets.connect', 'websockets.connect', (['self.uri'], {}), '(self.uri, **kwargs)\n', (1316, 1336), False, 'import websockets\n'), ((3149, 3165), 'json.loads', 'json.loads', (['resp'], {}), '(resp)\n', (3159, 3165), False, 'import json\n'), ((2454, 2492), 'asyncio.sleep', 'asyncio.sleep', (['((retry_counter + 1) * 5)'], {}), '((retry_counter + 1) * 5)\n', (2467, 2492), False, 'import asyncio\n'), ((2699, 2737), 'asyncio.sleep', 'asyncio.sleep', (['((retry_counter + 1) * 5)'], {}), '((retry_counter + 1) * 5)\n', (2712, 2737), False, 'import asyncio\n')]
|
"""State Generator"""
from lesson12_projects.house3.data.const import (
MY_ROOM,
OUT,
CLOSE_DOOR,
OPEN_DOOR,
STAIRS,
)
from lesson13n2.states.myroom import MyroomState
from lesson13n2.states.out import OutState
from lesson13n2.states.out_closedoor import OutClosedoorState
from lesson13n2.states.out_opendoor import OutOpendoorState
from lesson13n2.states.stairs import StairsState
# ステートを使い回すのではなく、アクセスするたびに ステートの生成を実行しなおせるよう、ラムダ関数を返します
house3n2_state_gen = {
OUT: {
"": lambda: OutState(),
CLOSE_DOOR: lambda: OutClosedoorState(),
OPEN_DOOR: lambda: OutOpendoorState(),
},
STAIRS: lambda: StairsState(),
MY_ROOM: lambda: MyroomState(),
}
|
[
"lesson13n2.states.out.OutState",
"lesson13n2.states.myroom.MyroomState",
"lesson13n2.states.out_closedoor.OutClosedoorState",
"lesson13n2.states.stairs.StairsState",
"lesson13n2.states.out_opendoor.OutOpendoorState"
] |
[((649, 662), 'lesson13n2.states.stairs.StairsState', 'StairsState', ([], {}), '()\n', (660, 662), False, 'from lesson13n2.states.stairs import StairsState\n'), ((685, 698), 'lesson13n2.states.myroom.MyroomState', 'MyroomState', ([], {}), '()\n', (696, 698), False, 'from lesson13n2.states.myroom import MyroomState\n'), ((514, 524), 'lesson13n2.states.out.OutState', 'OutState', ([], {}), '()\n', (522, 524), False, 'from lesson13n2.states.out import OutState\n'), ((554, 573), 'lesson13n2.states.out_closedoor.OutClosedoorState', 'OutClosedoorState', ([], {}), '()\n', (571, 573), False, 'from lesson13n2.states.out_closedoor import OutClosedoorState\n'), ((602, 620), 'lesson13n2.states.out_opendoor.OutOpendoorState', 'OutOpendoorState', ([], {}), '()\n', (618, 620), False, 'from lesson13n2.states.out_opendoor import OutOpendoorState\n')]
|
from __future__ import unicode_literals
from django.contrib.auth.decorators import login_required
from django.db.models import F
from django.utils.decorators import method_decorator
from django.views.generic import ListView
from wiki import models
from wiki.core.paginator import WikiPaginator
class GlobalHistory(ListView):
template_name = 'wiki/plugins/globalhistory/globalhistory.html'
paginator_class = WikiPaginator
paginate_by = 30
model = models.ArticleRevision
context_object_name = 'revisions'
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
self.only_last = kwargs.get('only_last', 0)
return super(GlobalHistory, self).dispatch(
request, *args, **kwargs)
def get_queryset(self):
if self.only_last == '1':
return self.model.objects.can_read(self.request.user) \
.filter(article__current_revision=F('id')).order_by('-modified')
else:
return self.model.objects.can_read(self.request.user).order_by('-modified')
def get_context_data(self, **kwargs):
kwargs['only_last'] = self.only_last
return super(GlobalHistory, self).get_context_data(**kwargs)
|
[
"django.db.models.F",
"django.utils.decorators.method_decorator"
] |
[((533, 565), 'django.utils.decorators.method_decorator', 'method_decorator', (['login_required'], {}), '(login_required)\n', (549, 565), False, 'from django.utils.decorators import method_decorator\n'), ((939, 946), 'django.db.models.F', 'F', (['"""id"""'], {}), "('id')\n", (940, 946), False, 'from django.db.models import F\n')]
|
import unittest
from torch import rand
from tensorneko.layer import PatchEmbedding2d
class TestPatching(unittest.TestCase):
# TODO
pass
class TestPatchEmbedding2d(unittest.TestCase):
def test_simple_patching(self):
# test input for 64x64 RGB image batches
b, c, h, w = (8, 3, 64, 64)
x = rand(b, c, h, w)
# patch size
p = 16
# embedding output
e = 512
# build layer
patch_layer = PatchEmbedding2d((c, h, w), p, e)
# patch grid size
seq_length = (h // p) * (w // p)
self.assertTrue(patch_layer(x).shape == (b, seq_length, e))
def test_overlap_patching(self):
# test input for 64x64 RGB image batches
b, c, h, w = (8, 3, 64, 64)
x = rand(b, c, h, w)
# patch size
p = 16
# embedding output
e = 512
# strides
s = 8
# build layer
patch_layer = PatchEmbedding2d((c, h, w), p, e, strides=(s, s))
# patch grid size
seq_length = ((h - p) // s + 1) * ((w - p) // s + 1)
self.assertTrue(patch_layer(x).shape == (b, seq_length, e))
|
[
"tensorneko.layer.PatchEmbedding2d",
"torch.rand"
] |
[((331, 347), 'torch.rand', 'rand', (['b', 'c', 'h', 'w'], {}), '(b, c, h, w)\n', (335, 347), False, 'from torch import rand\n'), ((471, 504), 'tensorneko.layer.PatchEmbedding2d', 'PatchEmbedding2d', (['(c, h, w)', 'p', 'e'], {}), '((c, h, w), p, e)\n', (487, 504), False, 'from tensorneko.layer import PatchEmbedding2d\n'), ((775, 791), 'torch.rand', 'rand', (['b', 'c', 'h', 'w'], {}), '(b, c, h, w)\n', (779, 791), False, 'from torch import rand\n'), ((947, 996), 'tensorneko.layer.PatchEmbedding2d', 'PatchEmbedding2d', (['(c, h, w)', 'p', 'e'], {'strides': '(s, s)'}), '((c, h, w), p, e, strides=(s, s))\n', (963, 996), False, 'from tensorneko.layer import PatchEmbedding2d\n')]
|
from flask_sqlalchemy import SQLAlchemy
from onto_app import db
from onto_app.helper import add_new_ontologies
class users(db.Model):
__tabelname__ = 'users'
id = db.Column(db.String, primary_key=True)
username = db.Column(db.String(200), unique=True, nullable=False)
# password = db.Column(db.String(200), nullable=False)
privilege = db.Column(db.Integer, nullable=False)
ontology = db.relationship('ontologies', backref='users')
decisions = db.relationship('class_decisions', cascade="all,delete", backref='users')
class ontologies(db.Model):
__tablename__ = 'ontologies'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(200), nullable=False)
# filepath = db.Column(db.String(200), unique=True, nullable=False)
admin_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)
relations = db.relationship('class_relations', cascade="all,delete", backref='ontologies')
class class_relations(db.Model):
__tablename__ = 'class_relations'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
property = db.Column(db.String(200))
domain = db.Column(db.String(200), nullable=False)
range = db.Column(db.String(200), nullable=False)
onto_id = db.Column(db.Integer, db.ForeignKey('ontologies.id'), nullable=False)
decisions = db.relationship('class_decisions', cascade="all,delete", backref='class_relations')
final_class_decisions = db.relationship('final_class_decisions', cascade="all,delete", backref='class_relations')
class nodes(db.Model):
__tablename__ = 'nodes'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
onto_id = db.Column(db.Integer, db.ForeignKey('ontologies.id'), nullable=False)
name = db.Column(db.String(200), nullable=False)
decisions = db.relationship('node_decisions', cascade="all,delete", backref='nodes')
final_node_decisions = db.relationship('final_node_decisions', cascade="all,delete", backref='nodes')
class class_decisions(db.Model):
__tablename__ = 'class_decisions'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
relation_id = db.Column(db.Integer, db.ForeignKey('class_relations.id'), nullable=False)
approved = db.Column(db.Integer, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)
class node_decisions(db.Model):
__tablename__ = 'node_decisions'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
node_id = db.Column(db.Integer, db.ForeignKey('nodes.id'), nullable=False)
approved = db.Column(db.Integer, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)
class final_class_decisions(db.Model):
__tablename__ = 'final_class_decisions'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
relation_id = db.Column(db.Integer, db.ForeignKey('class_relations.id'), nullable=False)
approved = db.Column(db.Integer, nullable=False)
class final_node_decisions(db.Model):
__tablename__ = 'final_node_decisions'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
node_id = db.Column(db.Integer, db.ForeignKey('nodes.id'), nullable=False)
approved = db.Column(db.Integer, nullable=False)
# db.drop_all()
try:
db.create_all()
except:
pass
add_new_ontologies()
|
[
"onto_app.db.create_all",
"onto_app.db.relationship",
"onto_app.db.Column",
"onto_app.helper.add_new_ontologies",
"onto_app.db.String",
"onto_app.db.ForeignKey"
] |
[((3381, 3401), 'onto_app.helper.add_new_ontologies', 'add_new_ontologies', ([], {}), '()\n', (3399, 3401), False, 'from onto_app.helper import add_new_ontologies\n'), ((172, 210), 'onto_app.db.Column', 'db.Column', (['db.String'], {'primary_key': '(True)'}), '(db.String, primary_key=True)\n', (181, 210), False, 'from onto_app import db\n'), ((356, 393), 'onto_app.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(False)'}), '(db.Integer, nullable=False)\n', (365, 393), False, 'from onto_app import db\n'), ((409, 455), 'onto_app.db.relationship', 'db.relationship', (['"""ontologies"""'], {'backref': '"""users"""'}), "('ontologies', backref='users')\n", (424, 455), False, 'from onto_app import db\n'), ((472, 545), 'onto_app.db.relationship', 'db.relationship', (['"""class_decisions"""'], {'cascade': '"""all,delete"""', 'backref': '"""users"""'}), "('class_decisions', cascade='all,delete', backref='users')\n", (487, 545), False, 'from onto_app import db\n'), ((617, 676), 'onto_app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)', 'autoincrement': '(True)'}), '(db.Integer, primary_key=True, autoincrement=True)\n', (626, 676), False, 'from onto_app import db\n'), ((898, 976), 'onto_app.db.relationship', 'db.relationship', (['"""class_relations"""'], {'cascade': '"""all,delete"""', 'backref': '"""ontologies"""'}), "('class_relations', cascade='all,delete', backref='ontologies')\n", (913, 976), False, 'from onto_app import db\n'), ((1058, 1117), 'onto_app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)', 'autoincrement': '(True)'}), '(db.Integer, primary_key=True, autoincrement=True)\n', (1067, 1117), False, 'from onto_app import db\n'), ((1368, 1456), 'onto_app.db.relationship', 'db.relationship', (['"""class_decisions"""'], {'cascade': '"""all,delete"""', 'backref': '"""class_relations"""'}), "('class_decisions', cascade='all,delete', backref=\n 'class_relations')\n", (1383, 1456), False, 'from onto_app import db\n'), ((1480, 1574), 'onto_app.db.relationship', 'db.relationship', (['"""final_class_decisions"""'], {'cascade': '"""all,delete"""', 'backref': '"""class_relations"""'}), "('final_class_decisions', cascade='all,delete', backref=\n 'class_relations')\n", (1495, 1574), False, 'from onto_app import db\n'), ((1631, 1690), 'onto_app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)', 'autoincrement': '(True)'}), '(db.Integer, primary_key=True, autoincrement=True)\n', (1640, 1690), False, 'from onto_app import db\n'), ((1844, 1916), 'onto_app.db.relationship', 'db.relationship', (['"""node_decisions"""'], {'cascade': '"""all,delete"""', 'backref': '"""nodes"""'}), "('node_decisions', cascade='all,delete', backref='nodes')\n", (1859, 1916), False, 'from onto_app import db\n'), ((1944, 2022), 'onto_app.db.relationship', 'db.relationship', (['"""final_node_decisions"""'], {'cascade': '"""all,delete"""', 'backref': '"""nodes"""'}), "('final_node_decisions', cascade='all,delete', backref='nodes')\n", (1959, 2022), False, 'from onto_app import db\n'), ((2104, 2163), 'onto_app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)', 'autoincrement': '(True)'}), '(db.Integer, primary_key=True, autoincrement=True)\n', (2113, 2163), False, 'from onto_app import db\n'), ((2272, 2309), 'onto_app.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(False)'}), '(db.Integer, nullable=False)\n', (2281, 2309), False, 'from onto_app import db\n'), ((2468, 2527), 'onto_app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)', 'autoincrement': '(True)'}), '(db.Integer, primary_key=True, autoincrement=True)\n', (2477, 2527), False, 'from onto_app import db\n'), ((2622, 2659), 'onto_app.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(False)'}), '(db.Integer, nullable=False)\n', (2631, 2659), False, 'from onto_app import db\n'), ((2832, 2891), 'onto_app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)', 'autoincrement': '(True)'}), '(db.Integer, primary_key=True, autoincrement=True)\n', (2841, 2891), False, 'from onto_app import db\n'), ((3000, 3037), 'onto_app.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(False)'}), '(db.Integer, nullable=False)\n', (3009, 3037), False, 'from onto_app import db\n'), ((3129, 3188), 'onto_app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)', 'autoincrement': '(True)'}), '(db.Integer, primary_key=True, autoincrement=True)\n', (3138, 3188), False, 'from onto_app import db\n'), ((3283, 3320), 'onto_app.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(False)'}), '(db.Integer, nullable=False)\n', (3292, 3320), False, 'from onto_app import db\n'), ((3347, 3362), 'onto_app.db.create_all', 'db.create_all', ([], {}), '()\n', (3360, 3362), False, 'from onto_app import db\n'), ((236, 250), 'onto_app.db.String', 'db.String', (['(200)'], {}), '(200)\n', (245, 250), False, 'from onto_app import db\n'), ((698, 712), 'onto_app.db.String', 'db.String', (['(200)'], {}), '(200)\n', (707, 712), False, 'from onto_app import db\n'), ((839, 864), 'onto_app.db.ForeignKey', 'db.ForeignKey', (['"""users.id"""'], {}), "('users.id')\n", (852, 864), False, 'from onto_app import db\n'), ((1143, 1157), 'onto_app.db.String', 'db.String', (['(200)'], {}), '(200)\n', (1152, 1157), False, 'from onto_app import db\n'), ((1182, 1196), 'onto_app.db.String', 'db.String', (['(200)'], {}), '(200)\n', (1191, 1196), False, 'from onto_app import db\n'), ((1236, 1250), 'onto_app.db.String', 'db.String', (['(200)'], {}), '(200)\n', (1245, 1250), False, 'from onto_app import db\n'), ((1304, 1334), 'onto_app.db.ForeignKey', 'db.ForeignKey', (['"""ontologies.id"""'], {}), "('ontologies.id')\n", (1317, 1334), False, 'from onto_app import db\n'), ((1727, 1757), 'onto_app.db.ForeignKey', 'db.ForeignKey', (['"""ontologies.id"""'], {}), "('ontologies.id')\n", (1740, 1757), False, 'from onto_app import db\n'), ((1796, 1810), 'onto_app.db.String', 'db.String', (['(200)'], {}), '(200)\n', (1805, 1810), False, 'from onto_app import db\n'), ((2204, 2239), 'onto_app.db.ForeignKey', 'db.ForeignKey', (['"""class_relations.id"""'], {}), "('class_relations.id')\n", (2217, 2239), False, 'from onto_app import db\n'), ((2346, 2371), 'onto_app.db.ForeignKey', 'db.ForeignKey', (['"""users.id"""'], {}), "('users.id')\n", (2359, 2371), False, 'from onto_app import db\n'), ((2564, 2589), 'onto_app.db.ForeignKey', 'db.ForeignKey', (['"""nodes.id"""'], {}), "('nodes.id')\n", (2577, 2589), False, 'from onto_app import db\n'), ((2696, 2721), 'onto_app.db.ForeignKey', 'db.ForeignKey', (['"""users.id"""'], {}), "('users.id')\n", (2709, 2721), False, 'from onto_app import db\n'), ((2932, 2967), 'onto_app.db.ForeignKey', 'db.ForeignKey', (['"""class_relations.id"""'], {}), "('class_relations.id')\n", (2945, 2967), False, 'from onto_app import db\n'), ((3225, 3250), 'onto_app.db.ForeignKey', 'db.ForeignKey', (['"""nodes.id"""'], {}), "('nodes.id')\n", (3238, 3250), False, 'from onto_app import db\n')]
|
from django.db import models
'''
SuperUser information:
User: Sandra
Email: <EMAIL>
Password: <PASSWORD>
'''
# Create your models here.
class Topic(models.Model):
top_name = models.CharField(max_length=264, unique=True)
def __str__(self) -> str:
return self.top_name
class Webpage(models.Model):
topic = models.ForeignKey(Topic, on_delete=models.CASCADE)
name = models.CharField(max_length=264, unique=True)
url = models.URLField(unique=True)
def __str__(self) -> str:
return self.name
class AccessRecord(models.Model):
name = models.ForeignKey(Webpage, on_delete=models.CASCADE)
date = models.DateField()
def __str__(self) -> str:
return str(self.date)
|
[
"django.db.models.CharField",
"django.db.models.URLField",
"django.db.models.ForeignKey",
"django.db.models.DateField"
] |
[((177, 222), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(264)', 'unique': '(True)'}), '(max_length=264, unique=True)\n', (193, 222), False, 'from django.db import models\n'), ((323, 373), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Topic'], {'on_delete': 'models.CASCADE'}), '(Topic, on_delete=models.CASCADE)\n', (340, 373), False, 'from django.db import models\n'), ((383, 428), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(264)', 'unique': '(True)'}), '(max_length=264, unique=True)\n', (399, 428), False, 'from django.db import models\n'), ((437, 465), 'django.db.models.URLField', 'models.URLField', ([], {'unique': '(True)'}), '(unique=True)\n', (452, 465), False, 'from django.db import models\n'), ((562, 614), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Webpage'], {'on_delete': 'models.CASCADE'}), '(Webpage, on_delete=models.CASCADE)\n', (579, 614), False, 'from django.db import models\n'), ((624, 642), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (640, 642), False, 'from django.db import models\n')]
|
import os
from flask.ext.dotenv import DotEnv
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
class Config(object):
"""Main configuration class"""
DEBUG = False
TESTING = False
CSRF_ENABLED = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')
SQLALCHEMY_TRACK_MODIFICATIONS = True
SECRET_KEY = os.environ.get('SECRET')
@classmethod
def init_app(self, app):
env = DotEnv()
env.init_app(app, os.path.join(BASE_DIR, '.env'), verbose_mode=True)
# configuration for when in production
class ProductionConfig(Config):
"""configuration for when in production"""
DEBUG = False
# configuration for when in development
class DevelopmentConfig(Config):
"""configuration for when in development"""
DEVELOPMENT = True
DEBUG = True
# configuration for when testing
class TestingConfig(Config):
"""configuration for when testing"""
TESTING = True
if os.getenv('TRAVIS_BUILD', None):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')
else:
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DB_URL')
config = {
'production': ProductionConfig,
'development': DevelopmentConfig,
'testing': TestingConfig,
'default': ProductionConfig,
}
|
[
"os.path.abspath",
"flask.ext.dotenv.DotEnv",
"os.environ.get",
"os.path.join",
"os.getenv"
] |
[((269, 299), 'os.environ.get', 'os.environ.get', (['"""DATABASE_URL"""'], {}), "('DATABASE_URL')\n", (283, 299), False, 'import os\n'), ((359, 383), 'os.environ.get', 'os.environ.get', (['"""SECRET"""'], {}), "('SECRET')\n", (373, 383), False, 'import os\n'), ((963, 994), 'os.getenv', 'os.getenv', (['"""TRAVIS_BUILD"""', 'None'], {}), "('TRAVIS_BUILD', None)\n", (972, 994), False, 'import os\n'), ((90, 115), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (105, 115), False, 'import os\n'), ((445, 453), 'flask.ext.dotenv.DotEnv', 'DotEnv', ([], {}), '()\n', (451, 453), False, 'from flask.ext.dotenv import DotEnv\n'), ((1030, 1060), 'os.environ.get', 'os.environ.get', (['"""DATABASE_URL"""'], {}), "('DATABASE_URL')\n", (1044, 1060), False, 'import os\n'), ((1105, 1134), 'os.environ.get', 'os.environ.get', (['"""TEST_DB_URL"""'], {}), "('TEST_DB_URL')\n", (1119, 1134), False, 'import os\n'), ((480, 510), 'os.path.join', 'os.path.join', (['BASE_DIR', '""".env"""'], {}), "(BASE_DIR, '.env')\n", (492, 510), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
"""
ZenMaxBuilder Copyright © 2021 darkmaster@grm34 https://github.com/grm34
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from modules.main.banner import app_banner
from modules.main.helper import app_helper
from modules.main.logger import app_logger
from modules.main.translator import app_translator
from modules.manager.error import (prevent_android_folder,
prevent_defconfig_folder,
prevent_img_folder, prevent_kernel_folder,
prevent_out_folder, prevent_wrong_usage,
prevent_zip_folder)
from modules.manager.exit import app_exit
from modules.manager.json import load_json_file
from modules.manager.options import options_manager
from modules.session.debug import session_debug
from modules.session.prompt import ask_for_mode, ask_questions
from modules.session.requirements import session_requirements
from modules.session.run import run_session
from modules.session.settings import (global_settings, mode_settings,
session_settings)
class ZenMaxBuilder:
"""Application `main` object
Project structure
-----------------
" ZenMaxBuilder.py
" |
" |---- modules/
" | |
" | |---- cloner/
" | | |---- anykernel.py
" | | |---- toolchains.py
" | | |---- zipsigner.py
" | |
" | |---- compiler/
" | | |---- build.py
" | | |---- clean.py
" | | |---- defconfig.py
" | | |---- menuconfig.py
" | |
" | |---- inquirer/
" | | |---- mode.py
" | | |---- save.py
" | | |---- session.py
" | | |---- validator.py
" | |
" | |---- main/
" | | |---- banner.py
" | | |---- helper.py
" | | |---- logger.py
" | | |---- translator.py
" | |
" | |---- manager/
" | | |---- cmd.py
" | | |---- error.py
" | | |---- exit.py
" | | |---- json.py
" | | |---- options.py
" | |
" | |---- obtainer/
" | | |---- compiler.py
" | | |---- defconfigs.py
" | | |---- devices.py
" | | |---- images.py
" | | |---- processor.py
" | | |---- version.py
" | | |---- zips.py
" | |
" | |---- session/
" | | |---- debug.py
" | | |---- prompt.py
" | | |---- requirements.py
" | | |---- run.py
" | | |---- settings.py
" | |
" | |---- zipper/
" | | |---- config.py
" | | |---- makezip.py
" | | |---- signer.py
" | |
" |
"""
def __init__(self):
"""Set main `class` instance
Initialize
----------
self.app: "Dictionary containing application informations"
self.language: "String containing desired language code"
self.themes: "Dictionary containing application themes"
self.theme: "Dictionary containing selected theme settings"
self.options: "Tuple containing cmd line options from sys.argv"
self.session: "Dictionary to store session parameters"
self.devices: "Array of dict of availables devices and data"
self.trad: "Gettext function to translate strings"
"""
self.app = load_json_file('app.json')
self.language = self.app['language']
self.themes = load_json_file('themes.json')
self.theme = self.themes['default']
self.options = app_helper(self)
self.session = load_json_file('settings.json')
self.devices = {}
self.trad = ''
def __str__(self):
"""Add extra method to the class.
Returns
-------
Current class name
"""
return self.__class__.__name__
def run(self):
"""Start the `application`
Actions
-------
1) "Set global settings"
2) "Set user options"
3) "Prevent bad settings"
3) "Ask for mode to use"
4) "Set mode settings"
5) "Ask required questions"
6) "Set session settings"
7) "Check for requirements"
8) "Run selected action"
"""
# Options
global_settings(self)
options_manager(self)
self.trad = app_translator(self.language)
# Prevent wrong settings
prevent_kernel_folder(self)
prevent_defconfig_folder(self)
prevent_out_folder(self)
prevent_img_folder(self)
prevent_zip_folder(self)
prevent_android_folder()
# Session
app_banner(self)
ask_for_mode(self)
mode_settings(self)
ask_questions(self)
session_debug(self)
session_settings(self)
session_requirements(self)
run_session(self)
if __name__ == '__main__':
try:
app_logger()
prevent_wrong_usage()
ZenMaxBuilder().run()
except KeyboardInterrupt:
app_exit()
|
[
"modules.manager.options.options_manager",
"modules.manager.json.load_json_file",
"modules.manager.error.prevent_defconfig_folder",
"modules.manager.error.prevent_out_folder",
"modules.main.logger.app_logger",
"modules.main.helper.app_helper",
"modules.session.requirements.session_requirements",
"modules.manager.error.prevent_android_folder",
"modules.manager.error.prevent_img_folder",
"modules.session.settings.global_settings",
"modules.main.translator.app_translator",
"modules.session.prompt.ask_for_mode",
"modules.session.run.run_session",
"modules.session.debug.session_debug",
"modules.session.prompt.ask_questions",
"modules.manager.error.prevent_wrong_usage",
"modules.manager.exit.app_exit",
"modules.manager.error.prevent_kernel_folder",
"modules.session.settings.session_settings",
"modules.manager.error.prevent_zip_folder",
"modules.main.banner.app_banner",
"modules.session.settings.mode_settings"
] |
[((4305, 4331), 'modules.manager.json.load_json_file', 'load_json_file', (['"""app.json"""'], {}), "('app.json')\n", (4319, 4331), False, 'from modules.manager.json import load_json_file\n'), ((4399, 4428), 'modules.manager.json.load_json_file', 'load_json_file', (['"""themes.json"""'], {}), "('themes.json')\n", (4413, 4428), False, 'from modules.manager.json import load_json_file\n'), ((4496, 4512), 'modules.main.helper.app_helper', 'app_helper', (['self'], {}), '(self)\n', (4506, 4512), False, 'from modules.main.helper import app_helper\n'), ((4536, 4567), 'modules.manager.json.load_json_file', 'load_json_file', (['"""settings.json"""'], {}), "('settings.json')\n", (4550, 4567), False, 'from modules.manager.json import load_json_file\n'), ((5261, 5282), 'modules.session.settings.global_settings', 'global_settings', (['self'], {}), '(self)\n', (5276, 5282), False, 'from modules.session.settings import global_settings, mode_settings, session_settings\n'), ((5291, 5312), 'modules.manager.options.options_manager', 'options_manager', (['self'], {}), '(self)\n', (5306, 5312), False, 'from modules.manager.options import options_manager\n'), ((5333, 5362), 'modules.main.translator.app_translator', 'app_translator', (['self.language'], {}), '(self.language)\n', (5347, 5362), False, 'from modules.main.translator import app_translator\n'), ((5405, 5432), 'modules.manager.error.prevent_kernel_folder', 'prevent_kernel_folder', (['self'], {}), '(self)\n', (5426, 5432), False, 'from modules.manager.error import prevent_android_folder, prevent_defconfig_folder, prevent_img_folder, prevent_kernel_folder, prevent_out_folder, prevent_wrong_usage, prevent_zip_folder\n'), ((5441, 5471), 'modules.manager.error.prevent_defconfig_folder', 'prevent_defconfig_folder', (['self'], {}), '(self)\n', (5465, 5471), False, 'from modules.manager.error import prevent_android_folder, prevent_defconfig_folder, prevent_img_folder, prevent_kernel_folder, prevent_out_folder, prevent_wrong_usage, prevent_zip_folder\n'), ((5480, 5504), 'modules.manager.error.prevent_out_folder', 'prevent_out_folder', (['self'], {}), '(self)\n', (5498, 5504), False, 'from modules.manager.error import prevent_android_folder, prevent_defconfig_folder, prevent_img_folder, prevent_kernel_folder, prevent_out_folder, prevent_wrong_usage, prevent_zip_folder\n'), ((5513, 5537), 'modules.manager.error.prevent_img_folder', 'prevent_img_folder', (['self'], {}), '(self)\n', (5531, 5537), False, 'from modules.manager.error import prevent_android_folder, prevent_defconfig_folder, prevent_img_folder, prevent_kernel_folder, prevent_out_folder, prevent_wrong_usage, prevent_zip_folder\n'), ((5546, 5570), 'modules.manager.error.prevent_zip_folder', 'prevent_zip_folder', (['self'], {}), '(self)\n', (5564, 5570), False, 'from modules.manager.error import prevent_android_folder, prevent_defconfig_folder, prevent_img_folder, prevent_kernel_folder, prevent_out_folder, prevent_wrong_usage, prevent_zip_folder\n'), ((5579, 5603), 'modules.manager.error.prevent_android_folder', 'prevent_android_folder', ([], {}), '()\n', (5601, 5603), False, 'from modules.manager.error import prevent_android_folder, prevent_defconfig_folder, prevent_img_folder, prevent_kernel_folder, prevent_out_folder, prevent_wrong_usage, prevent_zip_folder\n'), ((5631, 5647), 'modules.main.banner.app_banner', 'app_banner', (['self'], {}), '(self)\n', (5641, 5647), False, 'from modules.main.banner import app_banner\n'), ((5656, 5674), 'modules.session.prompt.ask_for_mode', 'ask_for_mode', (['self'], {}), '(self)\n', (5668, 5674), False, 'from modules.session.prompt import ask_for_mode, ask_questions\n'), ((5683, 5702), 'modules.session.settings.mode_settings', 'mode_settings', (['self'], {}), '(self)\n', (5696, 5702), False, 'from modules.session.settings import global_settings, mode_settings, session_settings\n'), ((5711, 5730), 'modules.session.prompt.ask_questions', 'ask_questions', (['self'], {}), '(self)\n', (5724, 5730), False, 'from modules.session.prompt import ask_for_mode, ask_questions\n'), ((5739, 5758), 'modules.session.debug.session_debug', 'session_debug', (['self'], {}), '(self)\n', (5752, 5758), False, 'from modules.session.debug import session_debug\n'), ((5767, 5789), 'modules.session.settings.session_settings', 'session_settings', (['self'], {}), '(self)\n', (5783, 5789), False, 'from modules.session.settings import global_settings, mode_settings, session_settings\n'), ((5798, 5824), 'modules.session.requirements.session_requirements', 'session_requirements', (['self'], {}), '(self)\n', (5818, 5824), False, 'from modules.session.requirements import session_requirements\n'), ((5833, 5850), 'modules.session.run.run_session', 'run_session', (['self'], {}), '(self)\n', (5844, 5850), False, 'from modules.session.run import run_session\n'), ((5898, 5910), 'modules.main.logger.app_logger', 'app_logger', ([], {}), '()\n', (5908, 5910), False, 'from modules.main.logger import app_logger\n'), ((5919, 5940), 'modules.manager.error.prevent_wrong_usage', 'prevent_wrong_usage', ([], {}), '()\n', (5938, 5940), False, 'from modules.manager.error import prevent_android_folder, prevent_defconfig_folder, prevent_img_folder, prevent_kernel_folder, prevent_out_folder, prevent_wrong_usage, prevent_zip_folder\n'), ((6010, 6020), 'modules.manager.exit.app_exit', 'app_exit', ([], {}), '()\n', (6018, 6020), False, 'from modules.manager.exit import app_exit\n')]
|
"""Common options and utils that can me used in commandline utils"""
import logging
import argparse
import sys
RESOLVED_TOOL_CONTRACT_OPTION = "--resolved-tool-contract"
EMIT_TOOL_CONTRACT_OPTION = "--emit-tool-contract"
def add_debug_option(p):
p.add_argument("--pdb", action="store_true", default=False,
help="Enable Python debugger")
return p
def add_log_debug_option(p):
"""This requires the log-level option"""
p.add_argument('--debug', action="store_true", default=False, help="Alias for setting log level to DEBUG")
return p
def add_log_quiet_option(p):
"""This requires the log-level option"""
p.add_argument('--quiet', action="store_true", default=False, help="Alias for setting log level to CRITICAL to suppress output.")
return p
def add_log_verbose_option(p):
p.add_argument(
"-v",
"--verbose",
dest="verbosity",
action="count",
help="Set the verbosity level.")
return p
def add_log_level_option(p, default_level='INFO'):
"""Add logging level with a default value"""
if isinstance(default_level, int):
default_level = logging.getLevelName(default_level)
p.add_argument('--log-level', choices=('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'),
default=default_level, help="Set log level")
return p
def add_log_file_option(p):
p.add_argument('--log-file', default=None, type=str,
help="Write the log to file. Default(None) will write to stdout.")
return p
def add_resolved_tool_contract_option(p):
p.add_argument(RESOLVED_TOOL_CONTRACT_OPTION, type=str,
help="Run Tool directly from a PacBio Resolved tool contract")
return p
def add_emit_tool_contract_option(p):
p.add_argument(EMIT_TOOL_CONTRACT_OPTION, action="store_true",
default=False,
help="Emit Tool Contract to stdout")
return p
def add_nproc_option(p, default=1):
p.add_argument("-j", "--nproc", type=int, default=default,
help="Number of processors to use")
return p
def add_base_options(p, default_level='INFO'):
"""Add the core logging options to the parser and set the default log level
If you don't want the default log behavior to go to stdout, then set
the default log level to be "ERROR". This will essentially suppress all
output to stdout.
Default behavior will only emit to stderr. This is essentially a '--quiet'
default mode.
my-tool --my-opt=1234 file_in.txt
To override the default behavior:
my-tool --my-opt=1234 --log-level=INFO file_in.txt
Or write the file to an explict log file
my-tool --my-opt=1234 --log-level=DEBUG --log-file=file.log file_in.txt
"""
# This should automatically/required be added to be added from get_default_argparser
add_log_file_option(p)
p_log = p.add_mutually_exclusive_group()
add_log_verbose_option(add_log_quiet_option(add_log_debug_option(
add_log_level_option(p_log, default_level=default_level))))
return p
def add_common_options(p, default_level='INFO'):
"""
New model for 3.1 release. This should replace add_base_options
"""
return add_log_quiet_option(add_log_debug_option(add_log_level_option(add_log_file_option(p), default_level=default_level)))
def add_base_options_with_emit_tool_contract(p, default_level='INFO'):
# can't use compose here because of circular imports via parser
return add_base_options(add_resolved_tool_contract_option(add_emit_tool_contract_option(p)), default_level=default_level)
def _to_print_message_action(msg):
class PrintMessageAction(argparse.Action):
"""Print message and exit"""
def __call__(self, parser, namespace, values, option_string=None):
sys.stdout.write(msg + "\n")
sys.exit(0)
return PrintMessageAction
def add_subcomponent_versions_option(p, subcomponents):
"""Add subcomponents to a subparser to provide more information
about the tools dependencies.
Subcomponents must be provided as a list of tuples (component, version)
"""
max_length = max(len(x) for x, _ in subcomponents)
pad = 2
msg = "\n" .join([" : ".join([x.rjust(max_length + pad), y]) for x, y in subcomponents])
action = _to_print_message_action(msg)
p.add_argument("--versions",
nargs=0,
help="Show versions of individual components",
action=action)
return p
|
[
"sys.stdout.write",
"logging.getLevelName",
"sys.exit"
] |
[((1158, 1193), 'logging.getLevelName', 'logging.getLevelName', (['default_level'], {}), '(default_level)\n', (1178, 1193), False, 'import logging\n'), ((3847, 3875), 'sys.stdout.write', 'sys.stdout.write', (["(msg + '\\n')"], {}), "(msg + '\\n')\n", (3863, 3875), False, 'import sys\n'), ((3888, 3899), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3896, 3899), False, 'import sys\n')]
|
from contentbase.upgrader import upgrade_step
@upgrade_step('annotation', '1', '2')
def annotation_1_2(value, system):
# https://github.com/ClinGen/clincoded/issues/453
value['status'] = 'in progress'
@upgrade_step('annotation', '2', '3')
def annotation_2_3(value, system):
# https://github.com/ClinGen/clincoded/issues/1507
# Add affiliation property and update schema version
return
@upgrade_step('annotation', '3', '4')
def annotation_3_4(value, system):
# https://github.com/ClinGen/clincoded/issues/1486
# Add articleNotes property and update schema version
return
|
[
"contentbase.upgrader.upgrade_step"
] |
[((49, 85), 'contentbase.upgrader.upgrade_step', 'upgrade_step', (['"""annotation"""', '"""1"""', '"""2"""'], {}), "('annotation', '1', '2')\n", (61, 85), False, 'from contentbase.upgrader import upgrade_step\n'), ((214, 250), 'contentbase.upgrader.upgrade_step', 'upgrade_step', (['"""annotation"""', '"""2"""', '"""3"""'], {}), "('annotation', '2', '3')\n", (226, 250), False, 'from contentbase.upgrader import upgrade_step\n'), ((411, 447), 'contentbase.upgrader.upgrade_step', 'upgrade_step', (['"""annotation"""', '"""3"""', '"""4"""'], {}), "('annotation', '3', '4')\n", (423, 447), False, 'from contentbase.upgrader import upgrade_step\n')]
|
import multiprocessing as mp
import pandas as pd
from selenium import webdriver
from time import sleep
import config
import json
from utils import access_url
import argparse
def crawler(cate_name,item_links):
driver = webdriver.Chrome()
fb_li = []
counter = 0
for item_link in item_links:
access_url(driver,item_link.replace('now','foody'))
fb_n = 1
while True:
while True:
try:
more_fb_button = driver.find_element_by_xpath(config.more_fb_bt.format('/'.join(item_link.split('/')[3:])))
more_fb_button.click()
except:
break
try:
dic = {}
dic['category'] = cate_name
dic['text'] = driver.find_element_by_xpath(config.text_element.format('/'.join(item_link.split('/')[3:]),fb_n)).text
dic['star'] = driver.find_element_by_xpath(config.star_element.format('/'.join(item_link.split('/')[3:]),fb_n)).text
fb_li.append(dic)
df = pd.DataFrame(fb_li)
df.to_csv('./_data/{}.csv'.format(cate_name))
counter += 1
except:
break
fb_n += 1
print(counter)
def multiprocess(data):
parser = argparse.ArgumentParser(description='Multiprocessing!!!')
parser.add_argument("-p","--processes", help="Number of processes for Multiprocessing.", type=int)
args = parser.parse_args()
pool = mp.Pool(args.processes)
pool.starmap(crawler,data.items())
if __name__ == '__main__':
with open('item_links.json','r') as f:
data = json.load(f)
multiprocess(data)
|
[
"pandas.DataFrame",
"json.load",
"argparse.ArgumentParser",
"selenium.webdriver.Chrome",
"multiprocessing.Pool"
] |
[((221, 239), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (237, 239), False, 'from selenium import webdriver\n'), ((1061, 1118), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Multiprocessing!!!"""'}), "(description='Multiprocessing!!!')\n", (1084, 1118), False, 'import argparse\n'), ((1255, 1278), 'multiprocessing.Pool', 'mp.Pool', (['args.processes'], {}), '(args.processes)\n', (1262, 1278), True, 'import multiprocessing as mp\n'), ((1393, 1405), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1402, 1405), False, 'import json\n'), ((888, 907), 'pandas.DataFrame', 'pd.DataFrame', (['fb_li'], {}), '(fb_li)\n', (900, 907), True, 'import pandas as pd\n')]
|
from functools import partial
from typing import List, Sequence, Tuple
import einops
import torch
import torch.nn.functional as F
from pytorch_wavelets import DWTForward, DWTInverse
from torch import nn
from mmderain.models.common import get_rcp, make_layer, sizeof
from mmderain.models.layers import SELayer
from mmderain.models.registry import BACKBONES
class ConvAct(nn.Module):
"""2D Convolution + Activation"""
def __init__(self, in_planes: int, out_planes: int) -> None:
super().__init__()
self.model = nn.Sequential(
nn.Conv2d(in_planes, out_planes, kernel_size=3, padding=1, padding_mode='reflect'),
nn.ReLU(inplace=True)
)
def forward(self, x: torch.Tensor) -> torch.Tensor:
return self.model(x)
class SEResBlock(nn.Module):
"""SE-ResBlock"""
def __init__(self, planes: int) -> None:
super().__init__()
self.model = nn.Sequential(
nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1),
nn.ReLU(inplace=True),
SELayer(planes, reduction=1)
)
def forward(self, x: torch.Tensor) -> torch.Tensor:
return x + self.model(x)
class SRiR(nn.Module):
"""SE-ResBlock in Residual Block"""
def __init__(self, planes: int, n_resblock: int) -> None:
super().__init__()
self.model = nn.Sequential(
*[SEResBlock(planes) for _ in range(n_resblock)],
nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1)
)
self.act = nn.ReLU(inplace=True)
def forward(self, x: torch.Tensor) -> torch.Tensor:
out = x + self.model(x)
out = self.act(out)
return out
class RCPEM(nn.Module):
"""RCP Extration Module"""
def __init__(self, in_planes: int, out_planes: int, n_resblock: int) -> None:
super().__init__()
self.model = nn.Sequential(
ConvAct(in_planes, out_planes//2),
ConvAct(out_planes//2, out_planes),
SRiR(out_planes, n_resblock)
)
def forward(self, x: torch.Tensor) -> torch.Tensor:
feat = get_rcp(x).repeat(1, x.size(1), 1, 1)
return self.model(feat)
class IFM(nn.Module):
"""Interactive Fusion Module"""
def __init__(self, planes: int) -> None:
super().__init__()
self.conv0 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1)
self.conv1 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1)
self.conv2 = nn.Conv2d(planes*2, 2, kernel_size=3, stride=1, padding=1)
self.conv3 = nn.Conv2d(planes*2, 2, kernel_size=3, stride=1, padding=1)
def forward(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor:
feat_x0 = self.conv0(x)
feat_y0 = self.conv1(y)
w0 = torch.sigmoid(feat_x0 * feat_y0)
x_prime = x * w0
y_prime = y * w0
wx1, wx2 = torch.chunk(self.conv2(torch.cat([x, x_prime], dim=1)), chunks=2, dim=1)
wy1, wy2 = torch.chunk(self.conv3(torch.cat([x, x_prime], dim=1)), chunks=2, dim=1)
out_x = x*wx1 + x_prime*wx2
out_y = y*wy1 + y_prime*wy2
out = torch.cat([out_x, out_y], dim=1)
return out
class WMLMDecomposition(nn.Module):
def __init__(self, planes: int, is_first_level: bool) -> None:
super().__init__()
self.is_first_level = is_first_level
self.dwt = DWTForward(J=1, wave='haar')
self.conv = ConvAct(planes*2, planes) if is_first_level else ConvAct(planes*4, planes)
def forward(self, x: torch.Tensor) -> torch.Tensor:
if self.is_first_level:
return self.conv(x)
else:
return self.conv(self._decomp(x))
def _decomp(self, x: torch.Tensor) -> torch.Tensor:
xl, xh = self.dwt(x)
xl = xl.unsqueeze(2)
feat = torch.cat([xh[0], xl], dim=2)
out = einops.rearrange(feat, 'b c n h w -> b (n c) h w')
return out
class WMLMFusion(nn.Module):
def __init__(self, planes: int) -> None:
super().__init__()
self.idwt = DWTInverse(wave='haar')
self.conv = ConvAct(planes, planes*4)
self.upsample = nn.Sequential(
nn.ReflectionPad2d(1),
nn.ConvTranspose2d(planes, planes, kernel_size=3, stride=2),
nn.ReLU(inplace=True)
)
self.last = nn.Sequential(
SEResBlock(planes),
nn.ReLU(inplace=True)
)
def forward(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor:
x = self.conv(x)
x = self._reconstruct(x)
x = self.upsample(x)
x = F.interpolate(x, size=sizeof(y))
y = x + y
return self.last(y)
def _reconstruct(self, x: torch.Tensor) -> torch.Tensor:
feat = einops.rearrange(x, 'b (c1 c2) h w -> b c1 c2 h w', c2=4)
xl, xh = torch.split(feat, [1, 3], dim=2)
xl = xl.squeeze(dim=2)
out = self.idwt((xl, [xh]))
return out
class WMLM(nn.Module):
"""Wavelet-based Multi-level Module"""
def __init__(self, planes: int, n_level: int, n_srir: int, n_resblock: int) -> None:
super().__init__()
self.decomposition = nn.ModuleList([
WMLMDecomposition(planes, is_first_level=(i == 0))
for i in range(n_level)
])
self.trunks = nn.ModuleList([
make_layer(SRiR, n_srir, planes=planes, n_resblock=n_resblock)
for _ in range(n_level)
])
self.fusions = nn.ModuleList([
WMLMFusion(planes)
for _ in range(n_level-1)
])
def forward(self, x: torch.Tensor) -> torch.Tensor:
init_features = []
prev = x
for model in self.decomposition: # level 0, level 1, ... (top-down)
prev = model(prev)
init_features.append(prev)
out_features = []
for init_feat, model in zip(init_features, self.trunks):
feat = model(init_feat)
out_features.append(feat)
out = out_features.pop() # feature from bottom level
for model in self.fusions:
out = model(out, out_features.pop()) # bottom-up fusion
return out
class Subnet(nn.Module):
def __init__(
self,
in_channels: int,
out_channels: int,
mid_channels: int,
n_level: int,
n_srir: int,
n_resblock: int,
index: int
) -> None:
super().__init__()
if index > 0:
conv3x3 = partial(nn.Conv2d, kernel_size=3, stride=1, padding=1)
self.fusion1 = conv3x3(mid_channels*(index+1), mid_channels)
self.fusion2 = conv3x3(mid_channels*(index+1), mid_channels)
else:
self.fusion1 = nn.Identity()
self.fusion2 = nn.Identity()
self.rcpem = RCPEM(in_channels, mid_channels, n_resblock)
self.ifm = IFM(mid_channels)
self.wmlm = WMLM(mid_channels, n_level, n_srir, n_resblock)
self.last = nn.Conv2d(mid_channels, out_channels, kernel_size=3, stride=1, padding=1)
def forward(self, x: torch.Tensor, feats: Sequence[torch.Tensor]) -> Tuple[torch.Tensor]:
rcp_feat = self.rcpem(x)
feat0 = self.fusion1(torch.cat(feats, dim=1))
feat1 = self.ifm(feat0, rcp_feat)
feat2 = self.wmlm(feat1)
feat3 = self.fusion2(torch.cat([feat2] + feats[:-1], dim=1))
out = self.last(feat3)
return out, feat2
@BACKBONES.register_module()
class SPDNet(nn.Module):
"""SPDNet Network Structure
Paper: Structure-Preserving Deraining with Residue Channel Prior Guidance
Official Code: https://github.com/Joyies/SPDNet
Args:
in_channels (int): Channel number of inputs.
out_channels (int): Channel number of outputs.
mid_channels (int): Channel number of intermediate features. Default: 32.
n_stage (int): Number of stages. Default: 3.
n_level (int): Number of levels in WMLM. Default: 3.
n_srir (int): Number of SRiR blocks of each level in WMLM. Default: 3.
n_resblock (int): Number of Resblocks in SRiR Module. Default: 3.
"""
def __init__(
self,
in_channels: int,
out_channels: int,
mid_channels: int = 32,
n_stage: int = 3,
n_level: int = 3,
n_srir: int = 3,
n_resblock: int = 3
) -> None:
super().__init__()
self.head = nn.Sequential(
ConvAct(in_channels, mid_channels//2),
ConvAct(mid_channels//2, mid_channels),
)
self.subnets = nn.ModuleList([
Subnet(in_channels, out_channels, mid_channels, n_level, n_srir, n_resblock, i)
for i in range(n_stage)
])
def forward(self, x: torch.Tensor) -> List[torch.Tensor]:
outputs = []
prev_out = x
init_feat = self.head(x)
features = [init_feat]
for net in self.subnets:
out, feat = net(prev_out, features)
prev_out = out
outputs.append(out)
features.insert(0, feat)
return outputs
|
[
"mmderain.models.registry.BACKBONES.register_module",
"functools.partial",
"torch.nn.ReLU",
"torch.nn.ConvTranspose2d",
"mmderain.models.common.get_rcp",
"torch.nn.ReflectionPad2d",
"mmderain.models.common.make_layer",
"torch.nn.Conv2d",
"torch.split",
"torch.cat",
"torch.sigmoid",
"pytorch_wavelets.DWTInverse",
"einops.rearrange",
"mmderain.models.common.sizeof",
"mmderain.models.layers.SELayer",
"torch.nn.Identity",
"pytorch_wavelets.DWTForward"
] |
[((7589, 7616), 'mmderain.models.registry.BACKBONES.register_module', 'BACKBONES.register_module', ([], {}), '()\n', (7614, 7616), False, 'from mmderain.models.registry import BACKBONES\n'), ((1661, 1682), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1668, 1682), False, 'from torch import nn\n'), ((2466, 2527), 'torch.nn.Conv2d', 'nn.Conv2d', (['planes', 'planes'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(planes, planes, kernel_size=3, stride=1, padding=1)\n', (2475, 2527), False, 'from torch import nn\n'), ((2549, 2610), 'torch.nn.Conv2d', 'nn.Conv2d', (['planes', 'planes'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(planes, planes, kernel_size=3, stride=1, padding=1)\n', (2558, 2610), False, 'from torch import nn\n'), ((2633, 2693), 'torch.nn.Conv2d', 'nn.Conv2d', (['(planes * 2)', '(2)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(planes * 2, 2, kernel_size=3, stride=1, padding=1)\n', (2642, 2693), False, 'from torch import nn\n'), ((2713, 2773), 'torch.nn.Conv2d', 'nn.Conv2d', (['(planes * 2)', '(2)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(planes * 2, 2, kernel_size=3, stride=1, padding=1)\n', (2722, 2773), False, 'from torch import nn\n'), ((2923, 2955), 'torch.sigmoid', 'torch.sigmoid', (['(feat_x0 * feat_y0)'], {}), '(feat_x0 * feat_y0)\n', (2936, 2955), False, 'import torch\n'), ((3280, 3312), 'torch.cat', 'torch.cat', (['[out_x, out_y]'], {'dim': '(1)'}), '([out_x, out_y], dim=1)\n', (3289, 3312), False, 'import torch\n'), ((3531, 3559), 'pytorch_wavelets.DWTForward', 'DWTForward', ([], {'J': '(1)', 'wave': '"""haar"""'}), "(J=1, wave='haar')\n", (3541, 3559), False, 'from pytorch_wavelets import DWTForward, DWTInverse\n'), ((3966, 3995), 'torch.cat', 'torch.cat', (['[xh[0], xl]'], {'dim': '(2)'}), '([xh[0], xl], dim=2)\n', (3975, 3995), False, 'import torch\n'), ((4010, 4060), 'einops.rearrange', 'einops.rearrange', (['feat', '"""b c n h w -> b (n c) h w"""'], {}), "(feat, 'b c n h w -> b (n c) h w')\n", (4026, 4060), False, 'import einops\n'), ((4205, 4228), 'pytorch_wavelets.DWTInverse', 'DWTInverse', ([], {'wave': '"""haar"""'}), "(wave='haar')\n", (4215, 4228), False, 'from pytorch_wavelets import DWTForward, DWTInverse\n'), ((4907, 4964), 'einops.rearrange', 'einops.rearrange', (['x', '"""b (c1 c2) h w -> b c1 c2 h w"""'], {'c2': '(4)'}), "(x, 'b (c1 c2) h w -> b c1 c2 h w', c2=4)\n", (4923, 4964), False, 'import einops\n'), ((4982, 5014), 'torch.split', 'torch.split', (['feat', '[1, 3]'], {'dim': '(2)'}), '(feat, [1, 3], dim=2)\n', (4993, 5014), False, 'import torch\n'), ((7129, 7202), 'torch.nn.Conv2d', 'nn.Conv2d', (['mid_channels', 'out_channels'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(mid_channels, out_channels, kernel_size=3, stride=1, padding=1)\n', (7138, 7202), False, 'from torch import nn\n'), ((566, 653), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_planes', 'out_planes'], {'kernel_size': '(3)', 'padding': '(1)', 'padding_mode': '"""reflect"""'}), "(in_planes, out_planes, kernel_size=3, padding=1, padding_mode=\n 'reflect')\n", (575, 653), False, 'from torch import nn\n'), ((662, 683), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (669, 683), False, 'from torch import nn\n'), ((955, 1016), 'torch.nn.Conv2d', 'nn.Conv2d', (['planes', 'planes'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(planes, planes, kernel_size=3, stride=1, padding=1)\n', (964, 1016), False, 'from torch import nn\n'), ((1030, 1051), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1037, 1051), False, 'from torch import nn\n'), ((1065, 1126), 'torch.nn.Conv2d', 'nn.Conv2d', (['planes', 'planes'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(planes, planes, kernel_size=3, stride=1, padding=1)\n', (1074, 1126), False, 'from torch import nn\n'), ((1140, 1161), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1147, 1161), False, 'from torch import nn\n'), ((1175, 1203), 'mmderain.models.layers.SELayer', 'SELayer', (['planes'], {'reduction': '(1)'}), '(planes, reduction=1)\n', (1182, 1203), False, 'from mmderain.models.layers import SELayer\n'), ((1570, 1631), 'torch.nn.Conv2d', 'nn.Conv2d', (['planes', 'planes'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(planes, planes, kernel_size=3, stride=1, padding=1)\n', (1579, 1631), False, 'from torch import nn\n'), ((4327, 4348), 'torch.nn.ReflectionPad2d', 'nn.ReflectionPad2d', (['(1)'], {}), '(1)\n', (4345, 4348), False, 'from torch import nn\n'), ((4362, 4421), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['planes', 'planes'], {'kernel_size': '(3)', 'stride': '(2)'}), '(planes, planes, kernel_size=3, stride=2)\n', (4380, 4421), False, 'from torch import nn\n'), ((4435, 4456), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (4442, 4456), False, 'from torch import nn\n'), ((4546, 4567), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (4553, 4567), False, 'from torch import nn\n'), ((6640, 6694), 'functools.partial', 'partial', (['nn.Conv2d'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(nn.Conv2d, kernel_size=3, stride=1, padding=1)\n', (6647, 6694), False, 'from functools import partial\n'), ((6882, 6895), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (6893, 6895), False, 'from torch import nn\n'), ((6923, 6936), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (6934, 6936), False, 'from torch import nn\n'), ((7360, 7383), 'torch.cat', 'torch.cat', (['feats'], {'dim': '(1)'}), '(feats, dim=1)\n', (7369, 7383), False, 'import torch\n'), ((7489, 7527), 'torch.cat', 'torch.cat', (['([feat2] + feats[:-1])'], {'dim': '(1)'}), '([feat2] + feats[:-1], dim=1)\n', (7498, 7527), False, 'import torch\n'), ((2241, 2251), 'mmderain.models.common.get_rcp', 'get_rcp', (['x'], {}), '(x)\n', (2248, 2251), False, 'from mmderain.models.common import get_rcp, make_layer, sizeof\n'), ((3050, 3080), 'torch.cat', 'torch.cat', (['[x, x_prime]'], {'dim': '(1)'}), '([x, x_prime], dim=1)\n', (3059, 3080), False, 'import torch\n'), ((3142, 3172), 'torch.cat', 'torch.cat', (['[x, x_prime]'], {'dim': '(1)'}), '([x, x_prime], dim=1)\n', (3151, 3172), False, 'import torch\n'), ((4773, 4782), 'mmderain.models.common.sizeof', 'sizeof', (['y'], {}), '(y)\n', (4779, 4782), False, 'from mmderain.models.common import get_rcp, make_layer, sizeof\n'), ((5493, 5555), 'mmderain.models.common.make_layer', 'make_layer', (['SRiR', 'n_srir'], {'planes': 'planes', 'n_resblock': 'n_resblock'}), '(SRiR, n_srir, planes=planes, n_resblock=n_resblock)\n', (5503, 5555), False, 'from mmderain.models.common import get_rcp, make_layer, sizeof\n')]
|
import ipywidgets as ipw
def get_start_widget(appbase, jupbase):
#http://fontawesome.io/icons/
template = """
<table>
<tr>
<th style="text-align:center"></th>
<th style="width:70px" rowspan=2></th>
<th style="text-align:center"></th>
<th style="width:70px" rowspan=2></th>
<th style="text-align:center"></th>
<tr>
<td valign="top"><ul>
<li><a href="{appbase}/examples.ipynb" target="_blank">Look at the examples</a>
</ul></td>
<td valign="top"><ul>
<li><a href="{appbase}/import_from_cod.ipynb" target="_blank">Upload from the CoD</a>
</ul></td>
<td valign="top"><ul>
<li><a href="{appbase}/upload_structure.ipynb" target="_blank">Upload from computer</a>
<li><a href="{appbase}/upload_structures.ipynb" target="_blank">Upload from computer (multi) </a>
</ul></td>
</tr></table>
"""
html = template.format(appbase=appbase, jupbase=jupbase)
return ipw.HTML(html)
#EOF
|
[
"ipywidgets.HTML"
] |
[((965, 979), 'ipywidgets.HTML', 'ipw.HTML', (['html'], {}), '(html)\n', (973, 979), True, 'import ipywidgets as ipw\n')]
|
###############################
#
# Created by <NAME>
# 3/16/2021
#
###############################
import unittest
import torch as t
import ffeat
from ffeat.strategies import mutation
class ReplaceWithNormalTest(unittest.TestCase):
def test_norm(self):
m = mutation.Replace(t.distributions.Normal(0.0, 5.0), 0.02)
pop = t.randn((1000,400))
(newpop,), kargs = m(pop)
self.assertEqual(newpop.shape, (1000,400))
self.assertIs(pop, newpop)
def test_not_inplace(self):
m = mutation.Replace(t.distributions.Normal(0.0, 5.0), 0.02, in_place=False)
pop = t.randn((1000,400))
(newpop,), kargs = m(pop)
self.assertEqual(newpop.shape, (1000,400))
self.assertIsNot(pop, newpop)
def test_rate_callable(self):
m = mutation.Replace(t.distributions.Normal(0.0, 5.0), ffeat.utils.decay.Linear(0.1, 0.01))
pop = t.randn((1000,400))
(newpop,), kargs = m(pop, iteration=13, max_iteration=23)
self.assertEqual(newpop.shape, (1000,400))
self.assertIs(pop, newpop)
def test_rate_high(self):
with self.assertRaises(ValueError):
mutation.Replace(t.distributions.Normal(0.0, 5.0), 1.6)
def test_rate_low(self):
with self.assertRaises(ValueError):
mutation.Replace(t.distributions.Normal(0.0, 5.0), 1.6)
def test_rate_high_callable(self):
m = mutation.Replace(t.distributions.Normal(0.0, 5.0), ffeat.utils.decay.Linear(1.2, 1.4))
pop = t.randn((1000,400))
with self.assertRaises(ValueError):
m(pop, iteration=13, max_iteration=23)
def test_rate_low_callable(self):
m = mutation.Replace(t.distributions.Normal(0.0, 5.0), ffeat.utils.decay.Linear(-1.6, -0.2))
pop = t.randn((1000,400))
with self.assertRaises(ValueError):
m(pop, iteration=13, max_iteration=23)
def test_invalid_distribution_shape(self):
m = mutation.Replace(t.distributions.Normal(0.0, t.ones((413,))), 0.02)
pop = t.randn((1000,400))
with self.assertRaises(ValueError):
m(pop, iteration=13, max_iteration=23)
@unittest.skipIf(not t.cuda.is_available(), 'CUDA not available')
def test_cuda(self):
m = mutation.Replace(t.distributions.Normal(0.0, 5.0), 0.02)
pop = t.randn((1000,400))
(newpop,), kargs = m(pop)
self.assertEqual(newpop.shape, (1000,400))
self.assertIs(pop, newpop)
@unittest.skipIf(not t.cuda.is_available(), 'CUDA not available')
def test_not_inplace_cuda(self):
m = mutation.Replace(t.distributions.Normal(0.0, 5.0), 0.02, in_place=False)
pop = t.randn((1000,400))
(newpop,), kargs = m(pop)
self.assertEqual(newpop.shape, (1000,400))
self.assertIsNot(pop, newpop)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"torch.ones",
"torch.randn",
"torch.cuda.is_available",
"torch.distributions.Normal",
"ffeat.utils.decay.Linear"
] |
[((2861, 2876), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2874, 2876), False, 'import unittest\n'), ((343, 363), 'torch.randn', 't.randn', (['(1000, 400)'], {}), '((1000, 400))\n', (350, 363), True, 'import torch as t\n'), ((615, 635), 'torch.randn', 't.randn', (['(1000, 400)'], {}), '((1000, 400))\n', (622, 635), True, 'import torch as t\n'), ((907, 927), 'torch.randn', 't.randn', (['(1000, 400)'], {}), '((1000, 400))\n', (914, 927), True, 'import torch as t\n'), ((1517, 1537), 'torch.randn', 't.randn', (['(1000, 400)'], {}), '((1000, 400))\n', (1524, 1537), True, 'import torch as t\n'), ((1786, 1806), 'torch.randn', 't.randn', (['(1000, 400)'], {}), '((1000, 400))\n', (1793, 1806), True, 'import torch as t\n'), ((2043, 2063), 'torch.randn', 't.randn', (['(1000, 400)'], {}), '((1000, 400))\n', (2050, 2063), True, 'import torch as t\n'), ((2338, 2358), 'torch.randn', 't.randn', (['(1000, 400)'], {}), '((1000, 400))\n', (2345, 2358), True, 'import torch as t\n'), ((2685, 2705), 'torch.randn', 't.randn', (['(1000, 400)'], {}), '((1000, 400))\n', (2692, 2705), True, 'import torch as t\n'), ((289, 321), 'torch.distributions.Normal', 't.distributions.Normal', (['(0.0)', '(5.0)'], {}), '(0.0, 5.0)\n', (311, 321), True, 'import torch as t\n'), ((545, 577), 'torch.distributions.Normal', 't.distributions.Normal', (['(0.0)', '(5.0)'], {}), '(0.0, 5.0)\n', (567, 577), True, 'import torch as t\n'), ((822, 854), 'torch.distributions.Normal', 't.distributions.Normal', (['(0.0)', '(5.0)'], {}), '(0.0, 5.0)\n', (844, 854), True, 'import torch as t\n'), ((856, 891), 'ffeat.utils.decay.Linear', 'ffeat.utils.decay.Linear', (['(0.1)', '(0.01)'], {}), '(0.1, 0.01)\n', (880, 891), False, 'import ffeat\n'), ((1433, 1465), 'torch.distributions.Normal', 't.distributions.Normal', (['(0.0)', '(5.0)'], {}), '(0.0, 5.0)\n', (1455, 1465), True, 'import torch as t\n'), ((1467, 1501), 'ffeat.utils.decay.Linear', 'ffeat.utils.decay.Linear', (['(1.2)', '(1.4)'], {}), '(1.2, 1.4)\n', (1491, 1501), False, 'import ffeat\n'), ((1700, 1732), 'torch.distributions.Normal', 't.distributions.Normal', (['(0.0)', '(5.0)'], {}), '(0.0, 5.0)\n', (1722, 1732), True, 'import torch as t\n'), ((1734, 1770), 'ffeat.utils.decay.Linear', 'ffeat.utils.decay.Linear', (['(-1.6)', '(-0.2)'], {}), '(-1.6, -0.2)\n', (1758, 1770), False, 'import ffeat\n'), ((2284, 2316), 'torch.distributions.Normal', 't.distributions.Normal', (['(0.0)', '(5.0)'], {}), '(0.0, 5.0)\n', (2306, 2316), True, 'import torch as t\n'), ((2185, 2206), 'torch.cuda.is_available', 't.cuda.is_available', ([], {}), '()\n', (2204, 2206), True, 'import torch as t\n'), ((2615, 2647), 'torch.distributions.Normal', 't.distributions.Normal', (['(0.0)', '(5.0)'], {}), '(0.0, 5.0)\n', (2637, 2647), True, 'import torch as t\n'), ((2504, 2525), 'torch.cuda.is_available', 't.cuda.is_available', ([], {}), '()\n', (2523, 2525), True, 'import torch as t\n'), ((1183, 1215), 'torch.distributions.Normal', 't.distributions.Normal', (['(0.0)', '(5.0)'], {}), '(0.0, 5.0)\n', (1205, 1215), True, 'import torch as t\n'), ((1325, 1357), 'torch.distributions.Normal', 't.distributions.Normal', (['(0.0)', '(5.0)'], {}), '(0.0, 5.0)\n', (1347, 1357), True, 'import torch as t\n'), ((2006, 2020), 'torch.ones', 't.ones', (['(413,)'], {}), '((413,))\n', (2012, 2020), True, 'import torch as t\n')]
|
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
import eikon as ek
from pandas import DataFrame
import json
from datetime import datetime
import os
import dateutil
#### SET EIKON APP KEY ####
ek.set_app_key('SET_APP_KEY_HERE')
##### NEWS #####
class NewsHeadlineView(APIView):
"""
GET /news/headlines
"""
def get(self, request):
queryString = request.query_params.get('queryString', None)
if queryString == None : return Response("queryString not provided. Please add queryString in query parameters.", status=400) # 필수 query parameter인 queryString 가 누락된 경우에 대한 응답
count = request.query_params.get('count', None)
if count != None : count = int(count)
else : count = 10
dateFrom = request.query_params.get('dateFrom', None)
dateTo = request.query_params.get('dateTo', None)
# EikonError 처리
try:
result = ek.get_news_headlines(queryString, count, dateFrom, dateTo)
except dateutil.parser._parser.ParserError:
return Response('Invalid Date', status=400)
except ek.eikonError.EikonError as err:
return Response(str(err), status=400)
return Response(json.loads(result.to_json(orient='index', date_format='iso', date_unit='s')), status=200) #pandas dataframe 객체에 to_json 사용시 redundant quote 가 추가되므로 json.loads를 적용한다.
class NewsStoryView(APIView):
"""
GET /news/stories
"""
def get(self, request):
storyId = request.query_params.get('storyId', None)
if storyId == None : return Response("storyId not provided. Please add storyId in query parameters.", status=400) # 필수 query parameter인 storyId 가 누락된 경우에 대한 응답
# EikonError 처리
try:
result = ek.get_news_story(storyId)
except ek.eikonError.EikonError as err:
return Response(str(err), status=400)
return Response(result, status=200)
##### DATA #####
class DataView(APIView):
"""
GET /data
"""
def get(self, request):
instruments = request.query_params.get('instruments', None).replace(" ", "").split(",")
if instruments == None : return Response("instruments not provided. Please add instruments in query parameters.", status=400) # 필수 query parameter인 instruments 가 누락된 경우에 대한 응답
fields = request.query_params.get('fields', None).replace(" ", "").split(",")
if fields == None : return Response("fields not provided. Please add fields in query parameters.", status=400) # 필수 query parameter인 fields 가 누락된 경우에 대한 응답
# EikonError 처리
try:
result = ek.get_data(instruments, fields)[0]
except ek.eikonError.EikonError as err:
return Response(str(err), status=400)
return Response(json.loads(result.to_json(orient='index')), status=200) # get_data API 반환 값이 json이 아닌 tuple 형태여서 to_json을 사용하기 위해 [0] 인덱싱.. redundant quote 제거를 위해 json.loads 적용
class TimeSeriesDataView(APIView):
"""
GET /data/timeseries
"""
def get(self, request):
instruments = request.query_params.get('instruments', None).replace(" ", "").split(",")
if instruments == None : return Response("instruments not provided. Please add instruments in query parameters.", status=400) # 필수 query parameter인 instruments 가 누락된 경우에 대한 응답
fields = request.query_params.get('fields', '*').replace(" ", "").split(",")
dateFrom = request.query_params.get('dateFrom', None)
dateTo = request.query_params.get('dateTo', None)
interval = request.query_params.get('interval', None)
# Invalid RIC에 대한 응답
try:
result = ek.get_timeseries(instruments, fields, start_date=dateFrom, end_date=dateTo, interval=interval)
except ek.eikonError.EikonError as err:
return Response(str(err), status=400)
# 엑셀 파일 저장
directory = f'{os.path.dirname(__file__)}\\data\\timeseries\\'
try:
if not os.path.exists(directory):
os.makedirs(directory)
except OSError as err:
print(str(err))
result.to_excel(f'{directory}\\{datetime.today().strftime("%Y%m%d%H%M%S")}_{instruments}.xlsx')
#interval이 minute, hour, daily, weekly, monthly, quarterly, yearly인 경우 (tick이 아닌 경우)
if interval != 'tick' :
return Response(json.loads(result.to_json(orient='index', date_format='iso', date_unit='ms')), status=200) #pandas dataframe 객체에 to_json 사용시 redundant quote 가 추가되므로 json.loads를 적용한다.
#interval이 tick인 경우 (이 경우 index인 시각이 중복되는 경우에 대한 처리가 필요함)
elif interval == 'tick' :
dictByColumns = result.apply(dict, axis=1) #column(VALUE, VOLUME)에 dictionary 적용
result = dictByColumns.groupby(dictByColumns.index).apply(list) #index(시각) 기준으로 list 적용
return Response(json.loads(result.to_json(orient='index', date_format='iso', date_unit='ms')), status=200) #pandas dataframe 객체에 to_json 사용시 redundant quote 가 추가되므로 json.loads를 적용한다.
|
[
"os.makedirs",
"datetime.datetime.today",
"os.path.dirname",
"eikon.get_news_headlines",
"os.path.exists",
"eikon.get_news_story",
"rest_framework.response.Response",
"eikon.get_timeseries",
"eikon.set_app_key",
"eikon.get_data"
] |
[((265, 299), 'eikon.set_app_key', 'ek.set_app_key', (['"""SET_APP_KEY_HERE"""'], {}), "('SET_APP_KEY_HERE')\n", (279, 299), True, 'import eikon as ek\n'), ((1963, 1991), 'rest_framework.response.Response', 'Response', (['result'], {'status': '(200)'}), '(result, status=200)\n', (1971, 1991), False, 'from rest_framework.response import Response\n'), ((527, 629), 'rest_framework.response.Response', 'Response', (['"""queryString not provided. Please add queryString in query parameters."""'], {'status': '(400)'}), "(\n 'queryString not provided. Please add queryString in query parameters.',\n status=400)\n", (535, 629), False, 'from rest_framework.response import Response\n'), ((978, 1037), 'eikon.get_news_headlines', 'ek.get_news_headlines', (['queryString', 'count', 'dateFrom', 'dateTo'], {}), '(queryString, count, dateFrom, dateTo)\n', (999, 1037), True, 'import eikon as ek\n'), ((1633, 1722), 'rest_framework.response.Response', 'Response', (['"""storyId not provided. Please add storyId in query parameters."""'], {'status': '(400)'}), "('storyId not provided. Please add storyId in query parameters.',\n status=400)\n", (1641, 1722), False, 'from rest_framework.response import Response\n'), ((1823, 1849), 'eikon.get_news_story', 'ek.get_news_story', (['storyId'], {}), '(storyId)\n', (1840, 1849), True, 'import eikon as ek\n'), ((2230, 2332), 'rest_framework.response.Response', 'Response', (['"""instruments not provided. Please add instruments in query parameters."""'], {'status': '(400)'}), "(\n 'instruments not provided. Please add instruments in query parameters.',\n status=400)\n", (2238, 2332), False, 'from rest_framework.response import Response\n'), ((2495, 2582), 'rest_framework.response.Response', 'Response', (['"""fields not provided. Please add fields in query parameters."""'], {'status': '(400)'}), "('fields not provided. Please add fields in query parameters.',\n status=400)\n", (2503, 2582), False, 'from rest_framework.response import Response\n'), ((3252, 3354), 'rest_framework.response.Response', 'Response', (['"""instruments not provided. Please add instruments in query parameters."""'], {'status': '(400)'}), "(\n 'instruments not provided. Please add instruments in query parameters.',\n status=400)\n", (3260, 3354), False, 'from rest_framework.response import Response\n'), ((3736, 3835), 'eikon.get_timeseries', 'ek.get_timeseries', (['instruments', 'fields'], {'start_date': 'dateFrom', 'end_date': 'dateTo', 'interval': 'interval'}), '(instruments, fields, start_date=dateFrom, end_date=dateTo,\n interval=interval)\n', (3753, 3835), True, 'import eikon as ek\n'), ((1109, 1145), 'rest_framework.response.Response', 'Response', (['"""Invalid Date"""'], {'status': '(400)'}), "('Invalid Date', status=400)\n", (1117, 1145), False, 'from rest_framework.response import Response\n'), ((2691, 2723), 'eikon.get_data', 'ek.get_data', (['instruments', 'fields'], {}), '(instruments, fields)\n', (2702, 2723), True, 'import eikon as ek\n'), ((3981, 4006), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3996, 4006), False, 'import os\n'), ((4061, 4086), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (4075, 4086), False, 'import os\n'), ((4104, 4126), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (4115, 4126), False, 'import os\n'), ((4226, 4242), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (4240, 4242), False, 'from datetime import datetime\n')]
|
from flask import Flask
from dataclasses import dataclass, field
from typing import List, Dict
app = Flask(__name__)
@dataclass
class Human:
name: str
pets: List[str] = field(default_factory=list)
def get_humans() -> Dict[str, Human]:
humans = [
Human('<NAME>', pets=['Bork the Dog',
'Henrietta the Chicken',
'Davis the Duck']),
Human('<NAME>', pets=['127 the Cellular Automata']),
Human('<NAME>', pets=[])
]
return {human.name.split()[0].lower(): human for human in humans}
humans = get_humans()
@app.route('/human/<name>', methods=['GET'])
def get_human(name):
human = humans.get(name.lower())
if human:
html_part1 = f"""
<html>
<body>
<h1>{human.name}</h1>
"""
pets_html = ""
if human.pets:
pets_html += "<h2>Pets</h2><ul>"
for pet in human.pets:
pets_html += f"<li>{pet}</li>"
pets_html += "</ul>"
else:
pets_html += "<h2>No pets! :(</h2>"
html_part2 = "</body></html>"
return html_part1 + pets_html + html_part2
else:
return f"Couldn't find human {name}", 404
|
[
"dataclasses.field",
"flask.Flask"
] |
[((102, 117), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (107, 117), False, 'from flask import Flask\n'), ((180, 207), 'dataclasses.field', 'field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (185, 207), False, 'from dataclasses import dataclass, field\n')]
|
import os
from Crypto.Cipher import AES
from Crypto.Hash import SHA256
from Crypto import Random
from tkinter import *
from tkinter.messagebox import *
from tkinter.ttk import Label
import sys
import time
from threading import Thread
import webbrowser
import tkinter as tk
Kinter = tk()
Kinter.title("Kinter Cryptor")
def encrypt(key, filename):
chunksize = 64*1024
outputFile = "(encrypted)"+filename
filesize = str(os.path.getsize(filename)).zfill(16)
IV = Random.new().read(16)
encryptor = AES.new(key, AES.MODE_CBC, IV)
with open(filename, 'rb') as infile:
with open(outputFile, 'wb') as outfile:
outfile.write(filesize.encode('utf-8'))
outfile.write(IV)
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
elif len(chunk) % 16 != 0:
chunk += b' ' * (16 - (len(chunk) % 16))
outfile.write(encryptor.encrypt(chunk))
def decrypt(key, filename):
chunksize = 64*1024
outputFile = filename[11:]
with open(filename, 'rb') as infile:
filesize = int(infile.read(16))
IV = infile.read(16)
decryptor = AES.new(key, AES.MODE_CBC, IV)
with open(outputFile, 'wb') as outfile:
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
outfile.write(decryptor.decrypt(chunk))
outfile.truncate(filesize)
def getKey(password):
hasher = SHA256.new(password.encode('utf-8'))
return hasher.digest()
def Main():
string = StringVar()
string.set("Voulez-vous (E)ncrypter ou (D)crypter ?: ")
entree = Entry(Kinter, textvariable=string, width=30, bg="black", fg="white")
entree.pack(side=TOP)
if string == "E":
filename = input("Fichier à Encrypter: ")
password = input("<PASSWORD> de crypt<PASSWORD>: ")
encrypt(getKey(password), filename)
print("Fait.")
elif string == 'D':
filename = input("Fichier à Décrypter: ")
password = input("<PASSWORD> dé<PASSWORD>: ")
decrypt(getKey(password), filename)
print("Fait.")
else:
print("Aucune option séléctionée, fermeture...")
if __name__ == '__main__':
Main()
Kinter.mainloop()
|
[
"os.path.getsize",
"tkinter",
"Crypto.Random.new",
"Crypto.Cipher.AES.new"
] |
[((283, 287), 'tkinter', 'tk', ([], {}), '()\n', (285, 287), True, 'import tkinter as tk\n'), ((516, 546), 'Crypto.Cipher.AES.new', 'AES.new', (['key', 'AES.MODE_CBC', 'IV'], {}), '(key, AES.MODE_CBC, IV)\n', (523, 546), False, 'from Crypto.Cipher import AES\n'), ((1231, 1261), 'Crypto.Cipher.AES.new', 'AES.new', (['key', 'AES.MODE_CBC', 'IV'], {}), '(key, AES.MODE_CBC, IV)\n', (1238, 1261), False, 'from Crypto.Cipher import AES\n'), ((477, 489), 'Crypto.Random.new', 'Random.new', ([], {}), '()\n', (487, 489), False, 'from Crypto import Random\n'), ((431, 456), 'os.path.getsize', 'os.path.getsize', (['filename'], {}), '(filename)\n', (446, 456), False, 'import os\n')]
|
"""
Module for vega-admin test models
"""
from django.db import models
from django.utils.translation import ugettext as _
class Artist(models.Model):
"""
Artist Model class
"""
name = models.CharField(_("Name"), max_length=100)
class Meta:
ordering = ["name"]
verbose_name = "Artist"
verbose_name_plural = "Artists"
def __str__(self):
"""Unicode representation of Song."""
return self.name
class Song(models.Model):
"""Model definition for Song."""
SINGLE = "1"
COLLABO = "2"
SKIT = "3"
SONG_TYPES = ((SINGLE, "Single"), (COLLABO, "Collaboration"), (SKIT,
"Skit"))
artist = models.ForeignKey(
Artist, verbose_name=_("Artist"), on_delete=models.PROTECT)
name = models.CharField(_("Name"), max_length=100)
song_type = models.CharField(
_("Type"), max_length=1, choices=SONG_TYPES, default=SINGLE)
release_date = models.DateField(_("Release Date"))
release_time = models.TimeField(_("Release Time"))
recording_time = models.DateTimeField(
_("Recording Time"), auto_now_add=True)
class Meta:
"""Meta definition for Song."""
verbose_name = "Song"
verbose_name_plural = "Songs"
ordering = ["name"]
def __str__(self):
"""Unicode representation of Song."""
return self.name
|
[
"django.utils.translation.ugettext"
] |
[((220, 229), 'django.utils.translation.ugettext', '_', (['"""Name"""'], {}), "('Name')\n", (221, 229), True, 'from django.utils.translation import ugettext as _\n'), ((854, 863), 'django.utils.translation.ugettext', '_', (['"""Name"""'], {}), "('Name')\n", (855, 863), True, 'from django.utils.translation import ugettext as _\n'), ((923, 932), 'django.utils.translation.ugettext', '_', (['"""Type"""'], {}), "('Type')\n", (924, 932), True, 'from django.utils.translation import ugettext as _\n'), ((1020, 1037), 'django.utils.translation.ugettext', '_', (['"""Release Date"""'], {}), "('Release Date')\n", (1021, 1037), True, 'from django.utils.translation import ugettext as _\n'), ((1075, 1092), 'django.utils.translation.ugettext', '_', (['"""Release Time"""'], {}), "('Release Time')\n", (1076, 1092), True, 'from django.utils.translation import ugettext as _\n'), ((1145, 1164), 'django.utils.translation.ugettext', '_', (['"""Recording Time"""'], {}), "('Recording Time')\n", (1146, 1164), True, 'from django.utils.translation import ugettext as _\n'), ((787, 798), 'django.utils.translation.ugettext', '_', (['"""Artist"""'], {}), "('Artist')\n", (788, 798), True, 'from django.utils.translation import ugettext as _\n')]
|
from skmob.utils import gislib
import math
class TestClustering:
def setup_method(self):
self.point_1 = (43.8430139, 10.5079940)
self.point_2 = (43.5442700, 10.3261500)
self.decimal = 43.8430139
self.DMS = (43, 50, 34.85)
def test_get_distance(self):
output = gislib.getDistance(self.point_1, self.point_2)
assert (math.isclose(output, 36.293701213))
support = gislib.getDistanceByHaversine(self.point_1, self.point_2)
assert (math.isclose(support, output))
output = gislib.getDistance(self.point_1, self.point_1)
assert (math.isclose(output, 0))
def test_get_distance_by_haversine(self):
output = gislib.getDistanceByHaversine(self.point_1, self.point_2)
assert (math.isclose(output, 36.293701213))
output = gislib.getDistanceByHaversine(self.point_1, self.point_1)
assert (math.isclose(output, 0))
# def test_decimal_to_DMS(self):
# output = gislib.DecimalToDMS(self.decimal)
# assert (output[0] == 43)
# assert (output[1] == 50)
# assert (math.isclose(output[2], 34.85))
def test_DMS_to_decimal(self):
output = gislib.DMSToDecimal(self.DMS[0], self.DMS[1], self.DMS[2])
assert (math.isclose(output, 43.84301388888))
def test_get_coordinates_for_distance(self):
output = gislib.getCoordinatesForDistance(self.point_1[0], self.point_1[1], 15)
assert (math.isclose(output[0], 0.134989200863))
assert (math.isclose(output[1], 0.187162559305))
# def test_is_within_distance(self):
# assert (gislib.isWithinDistance(self.point_1, self.point_2, 20))
# assert (gislib.isWithinDistance(self.point_1, self.point_2, 40) is False)
|
[
"skmob.utils.gislib.DMSToDecimal",
"skmob.utils.gislib.getDistance",
"skmob.utils.gislib.getDistanceByHaversine",
"math.isclose",
"skmob.utils.gislib.getCoordinatesForDistance"
] |
[((313, 359), 'skmob.utils.gislib.getDistance', 'gislib.getDistance', (['self.point_1', 'self.point_2'], {}), '(self.point_1, self.point_2)\n', (331, 359), False, 'from skmob.utils import gislib\n'), ((376, 410), 'math.isclose', 'math.isclose', (['output', '(36.293701213)'], {}), '(output, 36.293701213)\n', (388, 410), False, 'import math\n'), ((431, 488), 'skmob.utils.gislib.getDistanceByHaversine', 'gislib.getDistanceByHaversine', (['self.point_1', 'self.point_2'], {}), '(self.point_1, self.point_2)\n', (460, 488), False, 'from skmob.utils import gislib\n'), ((505, 534), 'math.isclose', 'math.isclose', (['support', 'output'], {}), '(support, output)\n', (517, 534), False, 'import math\n'), ((554, 600), 'skmob.utils.gislib.getDistance', 'gislib.getDistance', (['self.point_1', 'self.point_1'], {}), '(self.point_1, self.point_1)\n', (572, 600), False, 'from skmob.utils import gislib\n'), ((617, 640), 'math.isclose', 'math.isclose', (['output', '(0)'], {}), '(output, 0)\n', (629, 640), False, 'import math\n'), ((706, 763), 'skmob.utils.gislib.getDistanceByHaversine', 'gislib.getDistanceByHaversine', (['self.point_1', 'self.point_2'], {}), '(self.point_1, self.point_2)\n', (735, 763), False, 'from skmob.utils import gislib\n'), ((780, 814), 'math.isclose', 'math.isclose', (['output', '(36.293701213)'], {}), '(output, 36.293701213)\n', (792, 814), False, 'import math\n'), ((834, 891), 'skmob.utils.gislib.getDistanceByHaversine', 'gislib.getDistanceByHaversine', (['self.point_1', 'self.point_1'], {}), '(self.point_1, self.point_1)\n', (863, 891), False, 'from skmob.utils import gislib\n'), ((908, 931), 'math.isclose', 'math.isclose', (['output', '(0)'], {}), '(output, 0)\n', (920, 931), False, 'import math\n'), ((1197, 1255), 'skmob.utils.gislib.DMSToDecimal', 'gislib.DMSToDecimal', (['self.DMS[0]', 'self.DMS[1]', 'self.DMS[2]'], {}), '(self.DMS[0], self.DMS[1], self.DMS[2])\n', (1216, 1255), False, 'from skmob.utils import gislib\n'), ((1272, 1308), 'math.isclose', 'math.isclose', (['output', '(43.84301388888)'], {}), '(output, 43.84301388888)\n', (1284, 1308), False, 'import math\n'), ((1377, 1447), 'skmob.utils.gislib.getCoordinatesForDistance', 'gislib.getCoordinatesForDistance', (['self.point_1[0]', 'self.point_1[1]', '(15)'], {}), '(self.point_1[0], self.point_1[1], 15)\n', (1409, 1447), False, 'from skmob.utils import gislib\n'), ((1464, 1503), 'math.isclose', 'math.isclose', (['output[0]', '(0.134989200863)'], {}), '(output[0], 0.134989200863)\n', (1476, 1503), False, 'import math\n'), ((1521, 1560), 'math.isclose', 'math.isclose', (['output[1]', '(0.187162559305)'], {}), '(output[1], 0.187162559305)\n', (1533, 1560), False, 'import math\n')]
|
# -*- coding: utf-8 -*-
import re
import json
def getTitle(storyContent):
pattern = re.compile("[^:\ ][A-Za-zäöüßÄÖÜ\d\ .\[\|\]\"\']*")
result = pattern.search(storyContent)
return result.group(0)
def getContent(storyContent):
pattern = re.compile("^[A-Za-z]{2}[A-Za-zäüößÄÖÜ\w\s\.\:]*", re.MULTILINE)
result = pattern.search(storyContent)
return result.group(0)
def getLinks(storyContent):
pattern = re.compile("\[{2}[A-Za-zäöüß\s\d]*\|[A-Za-zäöüßÄÖÜ\s\d]*\]{2}", re.MULTILINE)
result = pattern.findall(storyContent)
return result
def getLinkDesc(link):
pattern = re.compile("[^\[][A-Za-zäüößÄÖÜ\d\ ]*[^\|]")
result = pattern.search(link)
return result.group(0)
def getLinkTarget(link):
pattern = re.compile("\|[A-Za-zäöüßÄÖÜ\s\d]*")
result = pattern.search(link)
result = result.group(0)[1:]
return result
def readFile(fileName):
f = open(fileName, 'rb')
fileContent = f.read().decode('utf-8')
f.close()
return fileContent
def writeFile(fileName, fileContent):
f = open(fileName, 'wb')
f.write(fileContent.encode('utf-8'))
f.flush()
f.close()
# Datei lesen
storyContent = readFile('story.txt')
pattern = re.compile("::[\ A-Za-zäöüß\d\s.\[\|\]\"\']*")
storyParts = pattern.findall(storyContent)
resultDict = dict()
for i in range(len(storyParts)):
currentItem = storyParts[i]
title = getTitle(currentItem)
content = getContent(currentItem)
links = getLinks(currentItem)
linksArray = []
# Links extrahieren
for i in range(len(links)):
currentLink = links[i]
linkDesc = getLinkDesc(links[i])
linkTarget = getLinkTarget(links[i])
linksArray.append({'desc':linkDesc, 'target': linkTarget})
resultDict[title] = {'content': content, 'links': linksArray}
jsonData = json.dumps(resultDict, sort_keys=True, indent=4, ensure_ascii=False)
writeFile('story.json', jsonData)
|
[
"json.dumps",
"re.compile"
] |
[((1258, 1309), 're.compile', 're.compile', (['"""::[\\\\ A-Za-zäöüß\\\\d\\\\s.\\\\[\\\\|\\\\]"\']*"""'], {}), '(\'::[\\\\ A-Za-zäöüß\\\\d\\\\s.\\\\[\\\\|\\\\]"\\\']*\')\n', (1268, 1309), False, 'import re\n'), ((1892, 1960), 'json.dumps', 'json.dumps', (['resultDict'], {'sort_keys': '(True)', 'indent': '(4)', 'ensure_ascii': '(False)'}), '(resultDict, sort_keys=True, indent=4, ensure_ascii=False)\n', (1902, 1960), False, 'import json\n'), ((94, 150), 're.compile', 're.compile', (['"""[^:\\\\ ][A-Za-zäöüßÄÖÜ\\\\d\\\\ .\\\\[\\\\|\\\\]"\']*"""'], {}), '(\'[^:\\\\ ][A-Za-zäöüßÄÖÜ\\\\d\\\\ .\\\\[\\\\|\\\\]"\\\']*\')\n', (104, 150), False, 'import re\n'), ((265, 333), 're.compile', 're.compile', (['"""^[A-Za-z]{2}[A-Za-zäüößÄÖÜ\\\\w\\\\s\\\\.\\\\:]*"""', 're.MULTILINE'], {}), "('^[A-Za-z]{2}[A-Za-zäüößÄÖÜ\\\\w\\\\s\\\\.\\\\:]*', re.MULTILINE)\n", (275, 333), False, 'import re\n'), ((447, 536), 're.compile', 're.compile', (['"""\\\\[{2}[A-Za-zäöüß\\\\s\\\\d]*\\\\|[A-Za-zäöüßÄÖÜ\\\\s\\\\d]*\\\\]{2}"""', 're.MULTILINE'], {}), "('\\\\[{2}[A-Za-zäöüß\\\\s\\\\d]*\\\\|[A-Za-zäöüßÄÖÜ\\\\s\\\\d]*\\\\]{2}', re.\n MULTILINE)\n", (457, 536), False, 'import re\n'), ((629, 677), 're.compile', 're.compile', (['"""[^\\\\[][A-Za-zäüößÄÖÜ\\\\d\\\\ ]*[^\\\\|]"""'], {}), "('[^\\\\[][A-Za-zäüößÄÖÜ\\\\d\\\\ ]*[^\\\\|]')\n", (639, 677), False, 'import re\n'), ((780, 819), 're.compile', 're.compile', (['"""\\\\|[A-Za-zäöüßÄÖÜ\\\\s\\\\d]*"""'], {}), "('\\\\|[A-Za-zäöüßÄÖÜ\\\\s\\\\d]*')\n", (790, 819), False, 'import re\n')]
|
import unittest
import numpy as np
from gradient_checker import GradientChecker, create_op
from op_test_util import OpTestMeta
class MinusOpTest(unittest.TestCase):
__metaclass__ = OpTestMeta
def setUp(self):
self.type = "minus"
self.inputs = {
'X': np.random.random((32, 84)).astype("float32"),
'Y': np.random.random((32, 84)).astype("float32")
}
self.outputs = {'Out': (self.inputs['X'] - self.inputs['Y'])}
class MinusGradTest(GradientChecker):
def test_left(self):
op = create_op("minus")
inputs = {
"X": np.random.random((10, 10)).astype("float32"),
"Y": np.random.random((10, 10)).astype("float32")
}
self.check_grad(op, inputs, ["X", 'Y'], "Out")
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"numpy.random.random",
"gradient_checker.create_op"
] |
[((816, 831), 'unittest.main', 'unittest.main', ([], {}), '()\n', (829, 831), False, 'import unittest\n'), ((555, 573), 'gradient_checker.create_op', 'create_op', (['"""minus"""'], {}), "('minus')\n", (564, 573), False, 'from gradient_checker import GradientChecker, create_op\n'), ((289, 315), 'numpy.random.random', 'np.random.random', (['(32, 84)'], {}), '((32, 84))\n', (305, 315), True, 'import numpy as np\n'), ((352, 378), 'numpy.random.random', 'np.random.random', (['(32, 84)'], {}), '((32, 84))\n', (368, 378), True, 'import numpy as np\n'), ((610, 636), 'numpy.random.random', 'np.random.random', (['(10, 10)'], {}), '((10, 10))\n', (626, 636), True, 'import numpy as np\n'), ((673, 699), 'numpy.random.random', 'np.random.random', (['(10, 10)'], {}), '((10, 10))\n', (689, 699), True, 'import numpy as np\n')]
|
import logging
import os.path
import sys
from .modes import TrimmingMode
logger = logging.getLogger(__name__)
def process_args(args) -> dict:
"""
Process args from argparser and set defaults
"""
input_file = args.input
output_file = args.output or f"{input_file}.clipkit"
if not os.path.isfile(input_file):
logger.warning("Input file does not exist")
sys.exit()
if input_file == output_file:
logger.warning("Input and output files can't have the same name.")
sys.exit()
# assign optional arguments
complement = args.complementary or False
mode = TrimmingMode(args.mode) if args.mode else TrimmingMode.smart_gap
gaps = float(args.gaps) if args.gaps is not None else 0.9
use_log = args.log or False
return dict(
input_file=input_file,
output_file=output_file,
input_file_format=args.input_file_format,
output_file_format=args.output_file_format,
complement=complement,
gaps=gaps,
mode=mode,
use_log=use_log,
)
|
[
"sys.exit",
"logging.getLogger"
] |
[((84, 111), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (101, 111), False, 'import logging\n'), ((396, 406), 'sys.exit', 'sys.exit', ([], {}), '()\n', (404, 406), False, 'import sys\n'), ((525, 535), 'sys.exit', 'sys.exit', ([], {}), '()\n', (533, 535), False, 'import sys\n')]
|
from django.contrib import admin
from django.urls import include, path
from django.conf.urls import url
from votes import urls
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('comments.urls')),
path('api-auth/', include('rest_framework.urls')),
url(r'^', include(urls)),
]
|
[
"django.urls.path",
"django.urls.include"
] |
[((148, 179), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (152, 179), False, 'from django.urls import include, path\n'), ((194, 218), 'django.urls.include', 'include', (['"""comments.urls"""'], {}), "('comments.urls')\n", (201, 218), False, 'from django.urls import include, path\n'), ((243, 273), 'django.urls.include', 'include', (['"""rest_framework.urls"""'], {}), "('rest_framework.urls')\n", (250, 273), False, 'from django.urls import include, path\n'), ((290, 303), 'django.urls.include', 'include', (['urls'], {}), '(urls)\n', (297, 303), False, 'from django.urls import include, path\n')]
|
"""Seek behaviour in Pygame"""
import pygame
import numpy as np
import math
WIDTH,HEIGHT = 700,400
screen = pygame.display.set_mode((WIDTH,HEIGHT))
class Seeker():
def __init__(self,x,y):
super().__init__()
self.pos=np.array([x,y])
self.vel=np.array([0,0])
self.acc=np.array([0,0])
self.max_speed=0.1
def Draw(self):
#pygame.draw.polygon(screen, (0,255,255), ((self.pos),(self.pos+(8,-20)),(self.pos+(18,0))))
pygame.draw.circle(screen, (0,255,255), self.pos, 10)
def Update(self):
self.vel = np.add(self.vel, self.acc)
self.pos = np.subtract(self.pos, self.vel)
self.acc = np.multiply(self.acc,[0,0])
def Apply(self,force):
self.acc = np.add(self.acc,force)
def Seek(self,target):
desired_vel = self.pos - target
desired_vel = desired_vel/math.sqrt(desired_vel[0]*desired_vel[0]+desired_vel[1]*desired_vel[1])
desired_vel = desired_vel * self.max_speed
steering_vel = desired_vel - self.vel
self.Apply(steering_vel)
def Snitch(pos):
pygame.draw.circle(screen, (255,215,0), pos,10)
pygame.init()
agents=[]
for i in range(20):
agents.append(Seeker(i*100,i*100))
running = True
while running:
screen.fill((0,0,0))
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
#Get target position
target_pos= np.array(pygame.mouse.get_pos())
Snitch(target_pos)
for agent in agents:
agent.Seek(target_pos)
agent.Update()
agent.Draw()
pygame.display.update()
#pygame.time.Clock().tick(30)
|
[
"numpy.multiply",
"pygame.draw.circle",
"numpy.subtract",
"math.sqrt",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.init",
"pygame.mouse.get_pos",
"pygame.display.update",
"numpy.array",
"numpy.add"
] |
[((117, 157), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(WIDTH, HEIGHT)'], {}), '((WIDTH, HEIGHT))\n', (140, 157), False, 'import pygame\n'), ((1196, 1209), 'pygame.init', 'pygame.init', ([], {}), '()\n', (1207, 1209), False, 'import pygame\n'), ((1145, 1195), 'pygame.draw.circle', 'pygame.draw.circle', (['screen', '(255, 215, 0)', 'pos', '(10)'], {}), '(screen, (255, 215, 0), pos, 10)\n', (1163, 1195), False, 'import pygame\n'), ((1362, 1380), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (1378, 1380), False, 'import pygame\n'), ((1673, 1696), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (1694, 1696), False, 'import pygame\n'), ((251, 267), 'numpy.array', 'np.array', (['[x, y]'], {}), '([x, y])\n', (259, 267), True, 'import numpy as np\n'), ((285, 301), 'numpy.array', 'np.array', (['[0, 0]'], {}), '([0, 0])\n', (293, 301), True, 'import numpy as np\n'), ((319, 335), 'numpy.array', 'np.array', (['[0, 0]'], {}), '([0, 0])\n', (327, 335), True, 'import numpy as np\n'), ((497, 552), 'pygame.draw.circle', 'pygame.draw.circle', (['screen', '(0, 255, 255)', 'self.pos', '(10)'], {}), '(screen, (0, 255, 255), self.pos, 10)\n', (515, 552), False, 'import pygame\n'), ((600, 626), 'numpy.add', 'np.add', (['self.vel', 'self.acc'], {}), '(self.vel, self.acc)\n', (606, 626), True, 'import numpy as np\n'), ((647, 678), 'numpy.subtract', 'np.subtract', (['self.pos', 'self.vel'], {}), '(self.pos, self.vel)\n', (658, 678), True, 'import numpy as np\n'), ((699, 728), 'numpy.multiply', 'np.multiply', (['self.acc', '[0, 0]'], {}), '(self.acc, [0, 0])\n', (710, 728), True, 'import numpy as np\n'), ((777, 800), 'numpy.add', 'np.add', (['self.acc', 'force'], {}), '(self.acc, force)\n', (783, 800), True, 'import numpy as np\n'), ((1504, 1526), 'pygame.mouse.get_pos', 'pygame.mouse.get_pos', ([], {}), '()\n', (1524, 1526), False, 'import pygame\n'), ((906, 982), 'math.sqrt', 'math.sqrt', (['(desired_vel[0] * desired_vel[0] + desired_vel[1] * desired_vel[1])'], {}), '(desired_vel[0] * desired_vel[0] + desired_vel[1] * desired_vel[1])\n', (915, 982), False, 'import math\n')]
|
# Copyright 2016-2022 Swiss National Supercomputing Centre (CSCS/ETH Zurich)
# ReFrame Project Developers. See the top-level LICENSE file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
import abc
import reframe.core.fields as fields
import reframe.utility.typecheck as typ
from reframe.core.exceptions import ContainerError
_STAGEDIR_MOUNT = '/rfm_workdir'
class ContainerPlatform(abc.ABC):
'''The abstract base class of any container platform.'''
#: The default mount location of the test case stage directory inside the
#: container
#: The container image to be used for running the test.
#:
#: :type: :class:`str` or :class:`None`
#: :default: :class:`None`
image = fields.TypedField(str, type(None))
#: The command to be executed within the container.
#:
#: If no command is given, then the default command of the corresponding
#: container image is going to be executed.
#:
#: .. versionadded:: 3.5.0
#: Changed the attribute name from `commands` to `command` and its type
#: to a string.
#:
#: :type: :class:`str` or :class:`None`
#: :default: :class:`None`
command = fields.TypedField(str, type(None))
_commands = fields.TypedField(typ.List[str])
#: The commands to be executed within the container.
#:
#: .. deprecated:: 3.5.0
#: Please use the `command` field instead.
#:
#: :type: :class:`list[str]`
#: :default: ``[]``
commands = fields.DeprecatedField(
_commands,
'The `commands` field is deprecated, please use the `command` field '
'to set the command to be executed by the container.',
fields.DeprecatedField.OP_SET, from_version='3.5.0'
)
#: Pull the container image before running.
#:
#: This does not have any effect for the `Singularity` container platform.
#:
#: .. versionadded:: 3.5
#:
#: :type: :class:`bool`
#: :default: ``True``
pull_image = fields.TypedField(bool)
#: List of mount point pairs for directories to mount inside the container.
#:
#: Each mount point is specified as a tuple of
#: ``(/path/in/host, /path/in/container)``. The stage directory of the
#: ReFrame test is always mounted under ``/rfm_workdir`` inside the
#: container, independelty of this field.
#:
#: :type: :class:`list[tuple[str, str]]`
#: :default: ``[]``
mount_points = fields.TypedField(typ.List[typ.Tuple[str, str]])
#: Additional options to be passed to the container runtime when executed.
#:
#: :type: :class:`list[str]`
#: :default: ``[]``
options = fields.TypedField(typ.List[str])
_workdir = fields.TypedField(str, type(None))
#: The working directory of ReFrame inside the container.
#:
#: This is the directory where the test's stage directory is mounted inside
#: the container. This directory is always mounted regardless if
#: :attr:`mount_points` is set or not.
#:
#: .. deprecated:: 3.5
#: Please use the `options` field to set the working directory.
#:
#: :type: :class:`str`
#: :default: ``/rfm_workdir``
workdir = fields.DeprecatedField(
_workdir,
'The `workdir` field is deprecated, please use the `options` field to '
'set the container working directory',
fields.DeprecatedField.OP_SET, from_version='3.5.0'
)
def __init__(self):
self.image = None
self.command = None
# NOTE: Here we set the target fields directly to avoid the deprecation
# warnings
self._commands = []
self._workdir = _STAGEDIR_MOUNT
self.mount_points = []
self.options = []
self.pull_image = True
@abc.abstractmethod
def emit_prepare_commands(self, stagedir):
'''Returns commands for preparing this container for running.
Such a command could be for pulling the container image from a
repository.
.. note:
This method is relevant only to developers of new container
platform backends.
:meta private:
:arg stagedir: The stage directory of the test.
'''
@abc.abstractmethod
def launch_command(self, stagedir):
'''Returns the command for running :attr:`commands` with this container
platform.
.. note:
This method is relevant only to developers of new container
platforms.
:meta private:
:arg stagedir: The stage directory of the test.
'''
def validate(self):
if self.image is None:
raise ContainerError('no image specified')
def __str__(self):
return type(self).__name__
def __rfm_json_encode__(self):
return str(self)
class Docker(ContainerPlatform):
'''Container platform backend for running containers with `Docker
<https://www.docker.com/>`__.'''
def emit_prepare_commands(self, stagedir):
return [f'docker pull {self.image}'] if self.pull_image else []
def launch_command(self, stagedir):
super().launch_command(stagedir)
mount_points = self.mount_points + [(stagedir, _STAGEDIR_MOUNT)]
run_opts = [f'-v "{mp[0]}":"{mp[1]}"' for mp in mount_points]
run_opts += self.options
if self.command:
return (f'docker run --rm {" ".join(run_opts)} '
f'{self.image} {self.command}')
if self.commands:
return (f"docker run --rm {' '.join(run_opts)} {self.image} "
f"bash -c 'cd {self.workdir}; {'; '.join(self.commands)}'")
return f'docker run --rm {" ".join(run_opts)} {self.image}'
class Sarus(ContainerPlatform):
'''Container platform backend for running containers with `Sarus
<https://sarus.readthedocs.io>`__.'''
#: Enable MPI support when launching the container.
#:
#: :type: boolean
#: :default: :class:`False`
with_mpi = fields.TypedField(bool)
def __init__(self):
super().__init__()
self.with_mpi = False
self._command = 'sarus'
def emit_prepare_commands(self, stagedir):
# The format that Sarus uses to call the images is
# <reposerver>/<user>/<image>:<tag>. If an image was loaded
# locally from a tar file, the <reposerver> is 'load'.
if not self.pull_image or self.image.startswith('load/'):
return []
else:
return [f'{self._command} pull {self.image}']
def launch_command(self, stagedir):
super().launch_command(stagedir)
mount_points = self.mount_points + [(stagedir, _STAGEDIR_MOUNT)]
run_opts = [f'--mount=type=bind,source="{mp[0]}",destination="{mp[1]}"'
for mp in mount_points]
if self.with_mpi:
run_opts.append('--mpi')
run_opts += self.options
if self.command:
return (f'{self._command} run {" ".join(run_opts)} {self.image} '
f'{self.command}')
if self.commands:
return (f"{self._command} run {' '.join(run_opts)} {self.image} "
f"bash -c 'cd {self.workdir}; {'; '.join(self.commands)}'")
return f'{self._command} run {" ".join(run_opts)} {self.image}'
class Shifter(Sarus):
'''Container platform backend for running containers with `Shifter
<https://www.nersc.gov/research-and-development/user-defined-images/>`__.
'''
def __init__(self):
super().__init__()
self._command = 'shifter'
class Singularity(ContainerPlatform):
'''Container platform backend for running containers with `Singularity
<https://sylabs.io/>`__.'''
#: Enable CUDA support when launching the container.
#:
#: :type: boolean
#: :default: :class:`False`
with_cuda = fields.TypedField(bool)
def __init__(self):
super().__init__()
self.with_cuda = False
def emit_prepare_commands(self, stagedir):
return []
def launch_command(self, stagedir):
super().launch_command(stagedir)
mount_points = self.mount_points + [(stagedir, _STAGEDIR_MOUNT)]
run_opts = [f'-B"{mp[0]}:{mp[1]}"' for mp in mount_points]
if self.with_cuda:
run_opts.append('--nv')
run_opts += self.options
if self.command:
return (f'singularity exec {" ".join(run_opts)} '
f'{self.image} {self.command}')
if self.commands:
return (f"singularity exec {' '.join(run_opts)} {self.image} "
f"bash -c 'cd {self.workdir}; {'; '.join(self.commands)}'")
return f'singularity run {" ".join(run_opts)} {self.image}'
class ContainerPlatformField(fields.TypedField):
def __init__(self, *other_types):
super().__init__(ContainerPlatform, *other_types)
def __set__(self, obj, value):
if isinstance(value, str):
try:
value = globals()[value]()
except KeyError:
raise ValueError(
f'unknown container platform: {value}') from None
super().__set__(obj, value)
|
[
"reframe.core.fields.DeprecatedField",
"reframe.core.fields.TypedField",
"reframe.core.exceptions.ContainerError"
] |
[((1229, 1261), 'reframe.core.fields.TypedField', 'fields.TypedField', (['typ.List[str]'], {}), '(typ.List[str])\n', (1246, 1261), True, 'import reframe.core.fields as fields\n'), ((1484, 1701), 'reframe.core.fields.DeprecatedField', 'fields.DeprecatedField', (['_commands', '"""The `commands` field is deprecated, please use the `command` field to set the command to be executed by the container."""', 'fields.DeprecatedField.OP_SET'], {'from_version': '"""3.5.0"""'}), "(_commands,\n 'The `commands` field is deprecated, please use the `command` field to set the command to be executed by the container.'\n , fields.DeprecatedField.OP_SET, from_version='3.5.0')\n", (1506, 1701), True, 'import reframe.core.fields as fields\n'), ((1983, 2006), 'reframe.core.fields.TypedField', 'fields.TypedField', (['bool'], {}), '(bool)\n', (2000, 2006), True, 'import reframe.core.fields as fields\n'), ((2434, 2482), 'reframe.core.fields.TypedField', 'fields.TypedField', (['typ.List[typ.Tuple[str, str]]'], {}), '(typ.List[typ.Tuple[str, str]])\n', (2451, 2482), True, 'import reframe.core.fields as fields\n'), ((2641, 2673), 'reframe.core.fields.TypedField', 'fields.TypedField', (['typ.List[str]'], {}), '(typ.List[str])\n', (2658, 2673), True, 'import reframe.core.fields as fields\n'), ((3173, 3375), 'reframe.core.fields.DeprecatedField', 'fields.DeprecatedField', (['_workdir', '"""The `workdir` field is deprecated, please use the `options` field to set the container working directory"""', 'fields.DeprecatedField.OP_SET'], {'from_version': '"""3.5.0"""'}), "(_workdir,\n 'The `workdir` field is deprecated, please use the `options` field to set the container working directory'\n , fields.DeprecatedField.OP_SET, from_version='3.5.0')\n", (3195, 3375), True, 'import reframe.core.fields as fields\n'), ((5981, 6004), 'reframe.core.fields.TypedField', 'fields.TypedField', (['bool'], {}), '(bool)\n', (5998, 6004), True, 'import reframe.core.fields as fields\n'), ((7843, 7866), 'reframe.core.fields.TypedField', 'fields.TypedField', (['bool'], {}), '(bool)\n', (7860, 7866), True, 'import reframe.core.fields as fields\n'), ((4637, 4673), 'reframe.core.exceptions.ContainerError', 'ContainerError', (['"""no image specified"""'], {}), "('no image specified')\n", (4651, 4673), False, 'from reframe.core.exceptions import ContainerError\n')]
|
from django.urls import path, re_path, include
from . import views
app_name = 'osiris'
url_patterns_v2 = [
path('courses/all/', views.get_all_courses, name='getallcourses'),
]
urlpatterns = [
path('', views.index, name='index'),
path('api/unicodes/', views.unicodes, name='unicodes'),
path('api/<slug:uni>/<int:year>/course/<slug:code>/header/', views.get_course_header, name='getcourseheader'),
# path('api/<slug:uni>/<int:year>/course/<slug:code>/info/', views.getCourseInfo, name='getcourseinfo'),
path('api/<slug:uni>/faculties/', views.get_departments, name='faculties'),
path('api/<slug:uni>/types/', views.get_type_names, name='types'),
path('api/<slug:uni>/studies/', views.get_studies, name='studies'),
# path('api/<slug:uni>/faculty/courses/<slug:faculty>/<slug:type>/', views.getCoursesFromFaculty, name='getcoursesfromfaculty'),
re_path(r'^api/(?P<uni>[\w|\W]+)/(?P<year>[\d]+)/faculty/courses/(?P<department>[\w|\W]+)/(?P<type_shortname>[\w|\W]+)/$',
views.get_courses_from_faculty, name='getcoursesfromfaculty'),
path('api/v2/<slug:uni>/<int:year>/', include(url_patterns_v2)),
]
|
[
"django.urls.re_path",
"django.urls.path",
"django.urls.include"
] |
[((113, 178), 'django.urls.path', 'path', (['"""courses/all/"""', 'views.get_all_courses'], {'name': '"""getallcourses"""'}), "('courses/all/', views.get_all_courses, name='getallcourses')\n", (117, 178), False, 'from django.urls import path, re_path, include\n'), ((203, 238), 'django.urls.path', 'path', (['""""""', 'views.index'], {'name': '"""index"""'}), "('', views.index, name='index')\n", (207, 238), False, 'from django.urls import path, re_path, include\n'), ((244, 298), 'django.urls.path', 'path', (['"""api/unicodes/"""', 'views.unicodes'], {'name': '"""unicodes"""'}), "('api/unicodes/', views.unicodes, name='unicodes')\n", (248, 298), False, 'from django.urls import path, re_path, include\n'), ((304, 418), 'django.urls.path', 'path', (['"""api/<slug:uni>/<int:year>/course/<slug:code>/header/"""', 'views.get_course_header'], {'name': '"""getcourseheader"""'}), "('api/<slug:uni>/<int:year>/course/<slug:code>/header/', views.\n get_course_header, name='getcourseheader')\n", (308, 418), False, 'from django.urls import path, re_path, include\n'), ((528, 602), 'django.urls.path', 'path', (['"""api/<slug:uni>/faculties/"""', 'views.get_departments'], {'name': '"""faculties"""'}), "('api/<slug:uni>/faculties/', views.get_departments, name='faculties')\n", (532, 602), False, 'from django.urls import path, re_path, include\n'), ((608, 673), 'django.urls.path', 'path', (['"""api/<slug:uni>/types/"""', 'views.get_type_names'], {'name': '"""types"""'}), "('api/<slug:uni>/types/', views.get_type_names, name='types')\n", (612, 673), False, 'from django.urls import path, re_path, include\n'), ((679, 745), 'django.urls.path', 'path', (['"""api/<slug:uni>/studies/"""', 'views.get_studies'], {'name': '"""studies"""'}), "('api/<slug:uni>/studies/', views.get_studies, name='studies')\n", (683, 745), False, 'from django.urls import path, re_path, include\n'), ((884, 1084), 'django.urls.re_path', 're_path', (['"""^api/(?P<uni>[\\\\w|\\\\W]+)/(?P<year>[\\\\d]+)/faculty/courses/(?P<department>[\\\\w|\\\\W]+)/(?P<type_shortname>[\\\\w|\\\\W]+)/$"""', 'views.get_courses_from_faculty'], {'name': '"""getcoursesfromfaculty"""'}), "(\n '^api/(?P<uni>[\\\\w|\\\\W]+)/(?P<year>[\\\\d]+)/faculty/courses/(?P<department>[\\\\w|\\\\W]+)/(?P<type_shortname>[\\\\w|\\\\W]+)/$'\n , views.get_courses_from_faculty, name='getcoursesfromfaculty')\n", (891, 1084), False, 'from django.urls import path, re_path, include\n'), ((1124, 1148), 'django.urls.include', 'include', (['url_patterns_v2'], {}), '(url_patterns_v2)\n', (1131, 1148), False, 'from django.urls import path, re_path, include\n')]
|
import numpy as np
import elevation.model_comparison
import os
import pandas
import multiprocessing
cur_dir = os.path.dirname(os.path.abspath(__file__))
class CFDModel(object):
def __init__(self, cfd_table=None, cfd_table_file=None):
if cfd_table is None:
#print "Loading CFD table from file"
self.cfd_table = elevation.model_comparison.get_NBT_cfd(cfd_table_file)
else:
self.cfd_table = cfd_table
self.cfd_table.index = self.cfd_table['Mismatch Type']
def fit(self):
pass
def predict(self, annots_list, num_proc=20):
if len(annots_list) == 0:
preds = 1.0
preds = np.ones(len(annots_list))
if num_proc > 1:
pool = multiprocessing.Pool(processes=num_proc)
jobs = []
for i, annots in enumerate(annots_list):
jobs.append(pool.apply_async(predict_annot, (annots, self.cfd_table)))
pool.close()
pool.join()
for i, j in enumerate(jobs):
pred = j.get()
preds[i] = pred
pool.terminate()
else:
for i, annots in enumerate(annots_list):
preds[i] = predict_annot(annots, self.cfd_table)
return preds
def predict_annot(annots, cfd_table):
pred_i = 1.0
for a in annots:
letters, pos = elevation.model_comparison.parse_mismatch_annot(a)
if pos=='':
annot_new = letters # a PAM mutation
else:
letters = str(letters)
annot_new = letters[0] + ":" + letters[1] + "," + str(pos)
if a == 'GG':
tmp_pred = 1.0
else:
tmp_pred = cfd_table["Percent-Active"].loc[annot_new]
# preds[i] = tmp_pred*preds[i]
pred_i = pred_i * tmp_pred
return pred_i
|
[
"os.path.abspath",
"multiprocessing.Pool"
] |
[((126, 151), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (141, 151), False, 'import os\n'), ((756, 796), 'multiprocessing.Pool', 'multiprocessing.Pool', ([], {'processes': 'num_proc'}), '(processes=num_proc)\n', (776, 796), False, 'import multiprocessing\n')]
|
import os
import collections
import pdb
import gym
import gym.envs.mujoco
import time
import csv
import json
import shutil
import numpy as np
import random
from . import ant_env
from . import proprioceptive_humanoid_env
from . import maze_ant
from . import maze_humanoid
# Wrapper that records everything we might care about in our environment
# All rewards (clipped and raw), states, actions, time and steps
# Copied originally from https://github.com/openai/baselines/blob/master/baselines/bench/monitor.py
class SmartMonitor(gym.Wrapper):
def __init__(self, env, log_dir, rank, opt, verbose=True, allow_early_resets=False):
super(SmartMonitor, self).__init__(env)
self.tstart = time.time()
self.episode_count = -1
# Get the rewards we want to log
# Got to be a better way to get the names of the subpart rewards, but it seems to be hardcoded in the mujoco envs
self.reward_list = ['reward_env']
if opt['model']['mode'] in ['baseline', 'baseline_reverse', 'baselinewtheta', 'baseline_lowlevel']:
self.baseline = True
elif opt['model']['mode'] in ['phasesimple', 'phasewstate', 'phasewtheta', 'phase_lowlevel']:
self.baseline = False
self.reward_list.append('reward_exp')
if opt['model']['mode'] != 'phase_lowlevel':
self.reward_list.append('reward_move')
if opt['env']['state_cycle_weight'] > 0 or opt['env']['action_cycle_weight'] > 0:
self.reward_list.append('reward_cycle')
self.reward_list.append('reward_cycle_s')
self.reward_list.append('reward_cycle_a')
elif opt['model']['mode'] == 'interpolate':
self.baseline = False
self.reward_list.append('reward_interpolate')
elif opt['model']['mode'] == 'cyclic':
self.baseline = False
self.reward_list.append('reward_cycle')
self.reward_list.append('reward_thresh')
elif opt['model']['mode'] in ['hierarchical', 'hierarchical_many']:
self.baseline = True
self.reward_list.append('reward_velocity')
self.reward_list.append('reward_goal')
elif opt['model']['mode'] in [ 'maze_baseline', 'maze_baseline_wphase']:
self.baseline = True
self.reward_list.append('reward_velocity')
self.reward_list.append('reward_goal')
else:
raise NotImplementedError
# This is currently hardcoded to Mujoco envs
if isinstance(env.unwrapped, ant_env.BaseAntEnv) or isinstance(env.unwrapped, ant_env.BaseAntLowGearEnv) or isinstance(env.unwrapped, proprioceptive_humanoid_env.BaseProprioceptiveHumanoidEnv):
self.reward_list += ['reward_forward', 'reward_ctrl', 'reward_contact', 'reward_survive']
elif isinstance(env.unwrapped, gym.envs.mujoco.AntEnv):
self.reward_list += ['reward_forward', 'reward_ctrl', 'reward_contact', 'reward_survive']
else:
raise NotImplementedError
# Data structure that holds all the values we want to log
self.episode_struct = collections.OrderedDict()
all_keys = self.reward_list + ['obs', 'action', 'env_count', 'episode_count']
if isinstance(env.unwrapped, ant_env.BaseAntEnv) or isinstance(env.unwrapped, ant_env.BaseAntLowGearEnv) or isinstance(env.unwrapped, proprioceptive_humanoid_env.BaseProprioceptiveHumanoidEnv) or isinstance(env.unwrapped, gym.envs.mujoco.MujocoEnv):
all_keys += ['state']
# Log the distances
if opt['model']['mode'] in ['hierarchical', 'hierarchical_many', 'maze_baseline', 'maze_baseline_wphase']:
if isinstance(env.unwrapped, maze_humanoid.ProprioceptiveHumanoidMazeEnv) or isinstance(env.unwrapped, maze_ant.AntMazeEnv):
all_keys += ['goal_distance', 'goal_distance_radius']
for key in all_keys:
self.episode_struct[key] = []
# Create and initialize our csv files
# File to store entire episode information (rather than every single step)
# Prints total reward (for all rewards), overall obs and state displacements, episode length, and episode time
episode_filename = os.path.join(log_dir, str(rank) + '.Episode.Monitor.csv')
self.ep_f = open(episode_filename, "wt")
self.ep_f.write('# Episode Logging %s\n'%json.dumps({"t_start": self.tstart, 'env_id' : env.spec and env.spec.id, 'mode': opt['model']['mode'], 'name': opt['logs']['exp_name']}))
ep_fields = self.reward_list + ['delta_obs', 'mean_action', 'episode_len', 'episode_dt', 'episode_count']
if isinstance(env.unwrapped, ant_env.BaseAntEnv) or isinstance(env.unwrapped, ant_env.BaseAntLowGearEnv) or isinstance(env.unwrapped, proprioceptive_humanoid_env.BaseProprioceptiveHumanoidEnv) or isinstance(env.unwrapped, gym.envs.mujoco.MujocoEnv):
ep_fields += ['delta_state']
if opt['model']['mode'] in ['hierarchical', 'hierarchical_many', 'maze_baseline', 'maze_baseline_wphase']:
if isinstance(env.unwrapped, maze_humanoid.ProprioceptiveHumanoidMazeEnv) or isinstance(env.unwrapped, maze_ant.AntMazeEnv):
ep_fields += ['goal_distance', 'goal_distance_radius']
self.ep_logger = csv.DictWriter(self.ep_f, fieldnames=ep_fields)
self.ep_logger.writeheader()
self.ep_f.flush()
# If in super verbose mode
if verbose:
# File to store every step
# Prints everything in episode_struct plus episode count
step_filename = os.path.join(log_dir, str(rank) + '.Step.Monitor.csv')
self.st_f = open(step_filename, "wt")
self.st_f.write('# Episode Logging %s\n'%json.dumps({"t_start": self.tstart, 'env_id' : env.spec and env.spec.id, 'mode': opt['model']['mode'], 'name': opt['logs']['exp_name']}))
st_fields = list(self.episode_struct.keys())
self.st_logger = csv.DictWriter(self.st_f, fieldnames=st_fields)
self.st_logger.writeheader()
self.st_f.flush()
else:
self.st_f = None
self.verbose = verbose
self.rank = rank
self.opt = opt
self.log_dir = log_dir
# Other bookkeeping
self.allow_early_resets = allow_early_resets
self.needs_reset = True
self.total_steps = 0
self.current_reset_info = {} # extra info about the current episode, that was passed in during reset()
# Reset environment, record initial values
def reset(self, **kwargs):
if not self.allow_early_resets and not self.needs_reset:
raise RuntimeError("Tried to reset an environment before done. If you want to allow early resets, wrap your env with Monitor(env, path, allow_early_resets=True)")
# Reset all the values in self.episode_struct
for key in self.episode_struct:
self.episode_struct[key] = []
# Update episode count
self.episode_count += 1
# Update values and return
obs = self.env.reset(**kwargs)
self.record_info(obs, 0)
self.needs_reset = False
return obs
# Take a step, update all the values
def step(self, action):
if self.needs_reset:
raise RuntimeError("Tried to step environment that needs reset")
# Do step
obs, rew, done, info = self.env.step(action)
# Record new info
self.record_info(obs, rew, action, info)
# If done with episode, get summary info for episode and dump values to episode and step files
if done:
self.needs_reset = True
# For rewards, get sums
epinfo = {}
for key in self.reward_list:
reward_val = sum(self.episode_struct[key])
epinfo[key] = reward_val
# For obs and state, get delta change
epinfo['delta_obs'] = self.episode_struct['obs'][-1] - self.episode_struct['obs'][0]
if 'state' in self.episode_struct:
epinfo['delta_state'] = self.episode_struct['state'][-1] - self.episode_struct['state'][0]
# For action, get average value
epinfo['mean_action'] = np.mean(self.episode_struct['action'], axis=0)
# Update episode_len, episode_dt and episode_count
epinfo['episode_len'] = len(self.episode_struct['env_count'])
epinfo['episode_dt'] = round(time.time() - self.tstart, 6)
epinfo['episode_count'] = self.episode_count
# Update goal distances
if 'goal_distance' in self.episode_struct:
epinfo['goal_distance'] = self.episode_struct['goal_distance'][-1]
epinfo['goal_distance_radius'] = self.episode_struct['goal_distance_radius'][-1]
elif 'key_distance' in self.episode_struct:
epinfo['key_distance'] = self.episode_struct['key_distance'][-1]
epinfo['key_distance_radius'] = self.episode_struct['key_distance_radius'][-1]
epinfo['lock_distance'] = self.episode_struct['lock_distance'][-1]
epinfo['lock_distance_radius'] = self.episode_struct['lock_distance_radius'][-1]
# Do string conversion
for k in epinfo:
epinfo[k] = str(epinfo[k]).replace('\n', '')
# Update episode file
if self.ep_logger:
self.ep_logger.writerow(epinfo)
self.ep_f.flush()
# If in super verbose mode
if self.verbose:
# Make and update a temp step file with just the last episode (and only rank 0, and only every 100)
if self.rank == 0: #and self.episode_count % 100 == 0:
# Setup temp file
tmp_step_filename = os.path.join(self.log_dir, 'Tmp.Last.Step.Monitor.csv')
tmp_f = open(tmp_step_filename, "wt")
tmp_f.write('# Episode Logging %s\n'%json.dumps({"t_start": self.tstart, 'env_id' : self.env.spec and self.env.spec.id, 'mode': self.opt['model']['mode'], 'name': self.opt['logs']['exp_name']}))
st_fields = list(self.episode_struct.keys())
tmp_logger = csv.DictWriter(tmp_f, fieldnames=st_fields)
tmp_logger.writeheader()
tmp_f.flush()
else:
tmp_f = None
# Update step file
assert(self.episode_struct['env_count'][-1]+1 == len(self.episode_struct['env_count']))
for step in range(len(self.episode_struct['env_count'])):
stepinfo = {}
for key in self.episode_struct:
stepinfo[key] = self.episode_struct[key][step]
# Do string conversion
for k in stepinfo:
stepinfo[k] = str(stepinfo[k]).replace('\n', '')
# Update loggers
self.st_logger.writerow(stepinfo)
if tmp_f is not None:
tmp_logger.writerow(stepinfo)
self.st_f.flush()
# Write tmp file and close, copy tmp to last
if tmp_f is not None:
tmp_f.flush()
tmp_f.close()
# Copy tmp to last
last_step_filename = os.path.join(self.log_dir, 'Last.Step.Monitor.csv')
shutil.copyfile(tmp_step_filename, last_step_filename)
# Update info
info['episode'] = epinfo
self.total_steps += 1
return (obs, rew, done, info)
# Record step info
def record_info(self, obs, rew, action=None, info=None):
# Update all of our values
# Reward values
for key in self.reward_list:
# If reset, all 0
if info is None:
self.episode_struct[key].append(0)
else:
# For baseline, reward_env is reward
if key == 'reward_env' and self.baseline:
self.episode_struct[key].append(rew)
else:
self.episode_struct[key].append(info[key])
# Observation values
self.episode_struct['obs'].append(obs)
# State values, right now just Mujoco
if isinstance(self.env.unwrapped, ant_env.BaseAntEnv) or isinstance(self.env.unwrapped, ant_env.BaseAntLowGearEnv) or isinstance(self.env.unwrapped, proprioceptive_humanoid_env.BaseProprioceptiveHumanoidEnv) or isinstance(self.env.unwrapped, gym.envs.mujoco.MujocoEnv):
state = self.env.unwrapped.state_vector()
self.episode_struct['state'].append(state)
# Update actions
if action is None:
action = np.zeros(self.env.action_space.shape)
self.episode_struct['action'].append(action)
# Update step and episode counts
env_count = self.env._elapsed_steps
self.episode_struct['env_count'].append(env_count)
self.episode_struct['episode_count'].append(self.episode_count)
# Update distances
if 'goal_distance' in self.episode_struct:
if info is None:
self.episode_struct['goal_distance'].append(0)
self.episode_struct['goal_distance_radius'].append(0)
else:
self.episode_struct['goal_distance'].append(info['goal_distance'])
self.episode_struct['goal_distance_radius'].append(info['goal_distance_radius'])
# Close file handles
def close(self):
if self.ep_f is not None:
self.ep_f.close()
if self.st_f is not None:
self.st_f.close()
# Get total number of steps
def get_total_steps(self):
return self.total_steps
|
[
"numpy.zeros",
"json.dumps",
"time.time",
"numpy.mean",
"collections.OrderedDict",
"shutil.copyfile",
"os.path.join",
"csv.DictWriter"
] |
[((702, 713), 'time.time', 'time.time', ([], {}), '()\n', (711, 713), False, 'import time\n'), ((3162, 3187), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (3185, 3187), False, 'import collections\n'), ((5319, 5366), 'csv.DictWriter', 'csv.DictWriter', (['self.ep_f'], {'fieldnames': 'ep_fields'}), '(self.ep_f, fieldnames=ep_fields)\n', (5333, 5366), False, 'import csv\n'), ((6016, 6063), 'csv.DictWriter', 'csv.DictWriter', (['self.st_f'], {'fieldnames': 'st_fields'}), '(self.st_f, fieldnames=st_fields)\n', (6030, 6063), False, 'import csv\n'), ((8452, 8498), 'numpy.mean', 'np.mean', (["self.episode_struct['action']"], {'axis': '(0)'}), "(self.episode_struct['action'], axis=0)\n", (8459, 8498), True, 'import numpy as np\n'), ((13095, 13132), 'numpy.zeros', 'np.zeros', (['self.env.action_space.shape'], {}), '(self.env.action_space.shape)\n', (13103, 13132), True, 'import numpy as np\n'), ((4419, 4558), 'json.dumps', 'json.dumps', (["{'t_start': self.tstart, 'env_id': env.spec and env.spec.id, 'mode': opt[\n 'model']['mode'], 'name': opt['logs']['exp_name']}"], {}), "({'t_start': self.tstart, 'env_id': env.spec and env.spec.id,\n 'mode': opt['model']['mode'], 'name': opt['logs']['exp_name']})\n", (4429, 4558), False, 'import json\n'), ((5792, 5931), 'json.dumps', 'json.dumps', (["{'t_start': self.tstart, 'env_id': env.spec and env.spec.id, 'mode': opt[\n 'model']['mode'], 'name': opt['logs']['exp_name']}"], {}), "({'t_start': self.tstart, 'env_id': env.spec and env.spec.id,\n 'mode': opt['model']['mode'], 'name': opt['logs']['exp_name']})\n", (5802, 5931), False, 'import json\n'), ((8678, 8689), 'time.time', 'time.time', ([], {}), '()\n', (8687, 8689), False, 'import time\n'), ((10058, 10113), 'os.path.join', 'os.path.join', (['self.log_dir', '"""Tmp.Last.Step.Monitor.csv"""'], {}), "(self.log_dir, 'Tmp.Last.Step.Monitor.csv')\n", (10070, 10113), False, 'import os\n'), ((10485, 10528), 'csv.DictWriter', 'csv.DictWriter', (['tmp_f'], {'fieldnames': 'st_fields'}), '(tmp_f, fieldnames=st_fields)\n', (10499, 10528), False, 'import csv\n'), ((11677, 11728), 'os.path.join', 'os.path.join', (['self.log_dir', '"""Last.Step.Monitor.csv"""'], {}), "(self.log_dir, 'Last.Step.Monitor.csv')\n", (11689, 11728), False, 'import os\n'), ((11749, 11803), 'shutil.copyfile', 'shutil.copyfile', (['tmp_step_filename', 'last_step_filename'], {}), '(tmp_step_filename, last_step_filename)\n', (11764, 11803), False, 'import shutil\n'), ((10229, 10394), 'json.dumps', 'json.dumps', (["{'t_start': self.tstart, 'env_id': self.env.spec and self.env.spec.id,\n 'mode': self.opt['model']['mode'], 'name': self.opt['logs']['exp_name']}"], {}), "({'t_start': self.tstart, 'env_id': self.env.spec and self.env.\n spec.id, 'mode': self.opt['model']['mode'], 'name': self.opt['logs'][\n 'exp_name']})\n", (10239, 10394), False, 'import json\n')]
|
#!/usr/bin/env python3
import argparse
import json
import os
from pyquery import PyQuery
import re
import requests
# given a URL such as http://www.ustream.tv/recorded/102894434
# fetch the details of the presentation
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--url', help='URL of the video: http://www.ustream.tv/recorded/102894434', required=True)
parser.add_argument('-d', '--date', help='date in YYYY-MM-DD format', required=True)
parser.add_argument('-e', '--event', help='date in YYYY-MM-DD format', required=True)
args = parser.parse_args()
#print(args.url)
#print(args.date)
#print(args.event)
response = requests.get(args.url)
if response.status_code != 200:
print("Failed to fetch {}".format(args.url))
return
m = re.search(r'\d+$', args.url)
video_code = m.group(0)
#print(video_code)
event_dir = 'data/videos/{}'.format(args.event)
#print(event_dir)
if not os.path.exists(event_dir):
os.mkdir(event_dir)
html = PyQuery(response.content)
# speaker - title
# <meta property="og:title" content="<NAME> - Tech Lead Skills for Developers" />
speaker_title = html('meta[@property="og:title"]')[0].attrib['content']
speaker, title = speaker_title.split(' - ', 2)
# print(speaker)
# print(title)
#re.sub(r'', '-', title.lower())
speaker_nickname = re.sub(r' +', '-', speaker.lower())
print(speaker_nickname)
speaker_file = "data/people/{}.txt".format(speaker_nickname)
if not os.path.exists(speaker_file):
with open(speaker_file, 'w') as fh:
fh.write("name: {}\n".format(speaker))
event_file = "{}/{}.json".format(event_dir, video_code)
print(event_file)
data = {
"description" : html('meta[<EMAIL>"]')[0].attrib['content'],
"favorite": "0",
"length": "",
"likes": "0",
"recorded": args.date,
"speakers": [
speaker_nickname
],
"tags": [],
# <meta property="og:image" content="http://static-cdn1.ustream.tv/i/video/picture/0/1/102/102894/102894434/1_17590738_102894434,640x360,b,1:2.jpg" />
"thumbnail_url": html('meta[<EMAIL>="og:<EMAIL>"]')[0].attrib['content'],
"title": title,
"videos": [
{
"code": video_code,
"type": "ustream"
}
],
"views": "0"
}
#import code
#video_code.interact(local=locals())
#m = html('meta["property="og:description"]')
#print(m.html)
if os.path.exists(event_file):
print("File {} already exists.".format(event_file))
return
with open(event_file, 'w') as fh:
json.dump(data, fh, sort_keys=True, indent=4, separators=(',', ': '))
print("length is missing! Add it manually!")
main()
# vim: expandtab
|
[
"os.mkdir",
"json.dump",
"pyquery.PyQuery",
"argparse.ArgumentParser",
"os.path.exists",
"requests.get",
"re.search"
] |
[((245, 270), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (268, 270), False, 'import argparse\n'), ((686, 708), 'requests.get', 'requests.get', (['args.url'], {}), '(args.url)\n', (698, 708), False, 'import requests\n'), ((822, 850), 're.search', 're.search', (['"""\\\\d+$"""', 'args.url'], {}), "('\\\\d+$', args.url)\n", (831, 850), False, 'import re\n'), ((1055, 1080), 'pyquery.PyQuery', 'PyQuery', (['response.content'], {}), '(response.content)\n', (1062, 1080), False, 'from pyquery import PyQuery\n'), ((2586, 2612), 'os.path.exists', 'os.path.exists', (['event_file'], {}), '(event_file)\n', (2600, 2612), False, 'import os\n'), ((988, 1013), 'os.path.exists', 'os.path.exists', (['event_dir'], {}), '(event_dir)\n', (1002, 1013), False, 'import os\n'), ((1023, 1042), 'os.mkdir', 'os.mkdir', (['event_dir'], {}), '(event_dir)\n', (1031, 1042), False, 'import os\n'), ((1556, 1584), 'os.path.exists', 'os.path.exists', (['speaker_file'], {}), '(speaker_file)\n', (1570, 1584), False, 'import os\n'), ((2736, 2805), 'json.dump', 'json.dump', (['data', 'fh'], {'sort_keys': '(True)', 'indent': '(4)', 'separators': "(',', ': ')"}), "(data, fh, sort_keys=True, indent=4, separators=(',', ': '))\n", (2745, 2805), False, 'import json\n')]
|
#!/usr/bin/env python
# coding=utf-8
import kaldiio
x=kaldiio.load_mat("/home/zlj/dxf/espnet/egs/vwm/tts1/decode/tts/xvectors/xvector.1.ark:4")
spemb = torch.FloatTensor(x).to(device)
print(str(x))
|
[
"kaldiio.load_mat"
] |
[((55, 149), 'kaldiio.load_mat', 'kaldiio.load_mat', (['"""/home/zlj/dxf/espnet/egs/vwm/tts1/decode/tts/xvectors/xvector.1.ark:4"""'], {}), "(\n '/home/zlj/dxf/espnet/egs/vwm/tts1/decode/tts/xvectors/xvector.1.ark:4')\n", (71, 149), False, 'import kaldiio\n')]
|
import MapReduce
import sys
"""
Word Count Example in the Simple Python MapReduce Framework
"""
mr = MapReduce.MapReduce()
# =============================
# Do not modify above this line
def mapper(record):
# key: document identifier
# value: document contents
key = record[0]
friend = record[1]
#emit main relationship
mr.emit_intermediate(key, record);
#emit friend relationship to check non-sym
mr.emit_intermediate(friend, record);
def reducer(key, list_of_values):
# key: word
# value: book
for v in list_of_values:
nonRel=[v[1],v[0]]
if nonRel not in list_of_values:
if v[0] == key:
mr.emit((v[0],v[1]))
else:
mr.emit((v[1],v[0]))
# Do not modify below this line
# =============================
if __name__ == '__main__':
inputdata = open(sys.argv[1])
mr.execute(inputdata, mapper, reducer)
|
[
"MapReduce.MapReduce"
] |
[((103, 124), 'MapReduce.MapReduce', 'MapReduce.MapReduce', ([], {}), '()\n', (122, 124), False, 'import MapReduce\n')]
|
import praw, os, discord, requests
from steamstorefront import SteamStoreFront
from datetime import datetime
from collections import deque
from bs4 import BeautifulSoup
from common import common, database
from ..helpers import steam, gamedeals, guild
from ..helpers.gamedeals import isFromAcceptableStore
class GameDeals:
r = None
steam = None
# constructor to initialize varialbles
def __init__(self):
config = common.getConfig()
self.masterLogger = common.getMasterLog()
self.r = praw.Reddit(client_id=config['REDDIT']['client.id'], client_secret=config['REDDIT']['client.secret'], user_agent=config['REDDIT']['user.agent'])
self.steam = steam.Steam()
self.ssf = SteamStoreFront()
# get new results
def getSubreddit(self, subreddit, limit):
rsub = self.r.subreddit(subreddit)
res = rsub.new(limit=limit)
return res
def keyDoesNotExists(self, deq, dict):
for el in deq:
if el['url'] == dict['url']:
return False
return True
async def run(self, bot):
masterLogger = common.getMasterLog()
db = database.Database()
# subreddits to fetch
subreddits = ['gamedeals', 'steamdeals', 'freegamefindings']
# final post container of non existing and distinct deals
enriched_post = deque()
# for each subreddit
for subreddit in subreddits:
# get the service record
service = db.getService(subreddit)
if 'latest' not in service:
service['latest'] = None
# get the latest submissions
posts = []
try:
posts = self.getSubreddit(subreddit, 30)
except Exception:
await bot.get_channel(masterLogger).send(f"**Error** : unable to fetch r/{subreddit}")
# id container
id = None
if common.getEnvironment() == 'dev':
# post log in masterlogger
await bot.get_channel(masterLogger).send(f"scraped {subreddit}.")
# iterate through posts
for post in posts:
# this is done for getting the first id
if not id:
id = post.id
# if there are no new post, break
if post.id == service['latest']:
break
if isFromAcceptableStore(post):
deal = {}
deal['title'] = post.title
deal['id'] = post.id
if "reddit.com" in post.url:
deal['url'] = gamedeals.getStoreLink(post)
else:
deal['url'] = gamedeals.removeURI(post.url)
deal['created'] = common.getTimeFromTimestamp(post.created)
if 'url' in deal and deal['url']:
# check if its steam store link
if 'steampowered.com' in deal['url']:
price = None
try:
price = self.ssf.getPrice(url=deal['url'])
except InvalidArgument as e:
if common.getEnvironment() == 'prod' or common.getEnvironment() == 'dev':
await bot.get_channel(masterLogger).send(f"error getting price for {deal['url']} of reddit id {deal['id']}. Arguments passed {e.error}, error type {e.type}.")
pass
if price:
deal['price'] = price['final']
if self.keyDoesNotExists(enriched_post, deal):
enriched_post.appendleft(deal)
# update database
data = {}
data["name"] = subreddit
if len(enriched_post) > 0:
data["lastposted"] = common.getDatetimeIST()
if id:
data["latest"] = id
status = db.upsertService(data)
if status == common.STATUS.SUCCESS.INSERTED:
await bot.get_channel(masterLogger).send(f"**Created Service**: {data['name']}.")
elif status == common.STATUS.FAIL.INSERT:
await bot.get_channel(masterLogger).send(f"**DB Insert Error - Service**: {data['name']}.")
elif status == common.STATUS.FAIL.UPDATE:
await bot.get_channel(masterLogger).send(f"**DB Update Error - Service**: {data['name']}.")
else:
pass
# send the final deque for posting
await self.send(enriched_post, bot)
# steam deals
async def send(self, posts, bot):
db = database.Database()
# go through new submissions
for post in posts:
status = db.upsertGameDeal(post)
# 1 = updated, 2 = created, -1 = error in update/inserting
channels = guild.getChannels('gamedeals')
for channel in channels:
# the deal already exists
if status == common.STATUS.SUCCESS.UPDATED:
# price check for steam games
if 'steampowered.com' in post['url']:
try:
existingDeal = db.getGameDeal(post)
new_price = self.ssf.getPrice(url=post['url'])
new_price = new_price['final'] if new_price else 9223372036854775806
if 'price' in existingDeal:
old_price = existingDeal['price']
# if new price is less than older price post the deal
if int(new_price) < int(old_price):
await self.steam.post(bot, channel, post)
# can't compare price, so leave the deal
except InvalidArgument as e:
if common.getEnvironment() == 'prod' or common.getEnvironment() == 'dev':
await bot.get_channel(common.getMasterLog()).send(f"error getting price for {post['url']} of reddit id {post['id']}. Arguments passed {e.error}, error type {e.type}.")
pass
# else:
# await self.steam.post(bot, channel, post)
# the deal is a new one
elif status == common.STATUS.SUCCESS.INSERTED:
# special handler for steam
if 'steampowered.com' in post['url']:
await self.steam.post(bot, channel, post)
else:
await bot.get_channel(channel['channel_id']).send(post['url'])
# if logging is enabled post log
if 'logging' in channel:
await bot.get_channel(channel['logging']).send(f"sent {post['title']} in {channel['channel_name']}")
# there has been error updating or inserting deal
else:
# log it in master log
bot.get_channel(self.masterLogger).send(f"**DB Error**: Failed Updating/Inserting {post['id']}.")
|
[
"common.common.getTimeFromTimestamp",
"common.common.getMasterLog",
"common.common.getConfig",
"steamstorefront.SteamStoreFront",
"common.database.Database",
"common.common.getDatetimeIST",
"common.common.getEnvironment",
"praw.Reddit",
"collections.deque"
] |
[((439, 457), 'common.common.getConfig', 'common.getConfig', ([], {}), '()\n', (455, 457), False, 'from common import common, database\n'), ((486, 507), 'common.common.getMasterLog', 'common.getMasterLog', ([], {}), '()\n', (505, 507), False, 'from common import common, database\n'), ((525, 674), 'praw.Reddit', 'praw.Reddit', ([], {'client_id': "config['REDDIT']['client.id']", 'client_secret': "config['REDDIT']['client.secret']", 'user_agent': "config['REDDIT']['user.agent']"}), "(client_id=config['REDDIT']['client.id'], client_secret=config[\n 'REDDIT']['client.secret'], user_agent=config['REDDIT']['user.agent'])\n", (536, 674), False, 'import praw, os, discord, requests\n'), ((724, 741), 'steamstorefront.SteamStoreFront', 'SteamStoreFront', ([], {}), '()\n', (739, 741), False, 'from steamstorefront import SteamStoreFront\n'), ((1120, 1141), 'common.common.getMasterLog', 'common.getMasterLog', ([], {}), '()\n', (1139, 1141), False, 'from common import common, database\n'), ((1155, 1174), 'common.database.Database', 'database.Database', ([], {}), '()\n', (1172, 1174), False, 'from common import common, database\n'), ((1366, 1373), 'collections.deque', 'deque', ([], {}), '()\n', (1371, 1373), False, 'from collections import deque\n'), ((4791, 4810), 'common.database.Database', 'database.Database', ([], {}), '()\n', (4808, 4810), False, 'from common import common, database\n'), ((1944, 1967), 'common.common.getEnvironment', 'common.getEnvironment', ([], {}), '()\n', (1965, 1967), False, 'from common import common, database\n'), ((3983, 4006), 'common.common.getDatetimeIST', 'common.getDatetimeIST', ([], {}), '()\n', (4004, 4006), False, 'from common import common, database\n'), ((2828, 2869), 'common.common.getTimeFromTimestamp', 'common.getTimeFromTimestamp', (['post.created'], {}), '(post.created)\n', (2855, 2869), False, 'from common import common, database\n'), ((6120, 6143), 'common.common.getEnvironment', 'common.getEnvironment', ([], {}), '()\n', (6141, 6143), False, 'from common import common, database\n'), ((6157, 6180), 'common.common.getEnvironment', 'common.getEnvironment', ([], {}), '()\n', (6178, 6180), False, 'from common import common, database\n'), ((3283, 3306), 'common.common.getEnvironment', 'common.getEnvironment', ([], {}), '()\n', (3304, 3306), False, 'from common import common, database\n'), ((3320, 3343), 'common.common.getEnvironment', 'common.getEnvironment', ([], {}), '()\n', (3341, 3343), False, 'from common import common, database\n'), ((6245, 6266), 'common.common.getMasterLog', 'common.getMasterLog', ([], {}), '()\n', (6264, 6266), False, 'from common import common, database\n')]
|
import pprint
from googlevoice import Voice
def run():
voice = Voice()
voice.login()
pprint.pprint(voice.settings)
__name__ == '__main__' and run()
|
[
"pprint.pprint",
"googlevoice.Voice"
] |
[((70, 77), 'googlevoice.Voice', 'Voice', ([], {}), '()\n', (75, 77), False, 'from googlevoice import Voice\n'), ((101, 130), 'pprint.pprint', 'pprint.pprint', (['voice.settings'], {}), '(voice.settings)\n', (114, 130), False, 'import pprint\n')]
|
#!/bin/env python3
from PIL import Image
from numpy import asarray
import json
regionmap = asarray(Image.open('RegionMap.png'))
region1 = 42 * 4
regions = [
None,
"Galactic Centre",
"Empyrean Straits",
"Ryker's Hope",
"Odin's Hold",
"Norma Arm",
"Arcadian Stream",
"Izanami",
"Inner Orion-Perseus Conflux",
"Inner Scutum-Centaurus Arm",
"Norma Expanse",
"Trojan Belt",
"The Veils",
"Newton's Vault",
"The Conduit",
"Outer Orion-Perseus Conflux",
"Orion-Cygnus Arm",
"Temple",
"Inner Orion Spur",
"Hawking's Gap",
"Dryman's Point",
"Sagittarius-Carina Arm",
"<NAME>",
"Acheron",
"Formorian Frontier",
"Hieronymus Delta",
"Outer Scutum-Centaurus Arm",
"Outer Arm",
"Aquila's Halo",
"Errant Marches",
"Perseus Arm",
"Formidine Rift",
"Vulcan Gate",
"Elysian Shore",
"Sanguineous Rim",
"Outer Orion Spur",
"Achilles's Altar",
"Xibalba",
"Lyra's Song",
"Tenebrae",
"The Abyss",
"Kepler's Crest",
"The Void"
]
lines = []
for l in regionmap[::-1]:
rle = []
p = 0
n = 0
for px in l:
px = 0 if px == 0 else (region1 - px) // 4 + 1
if px != p:
rle.append((n, p))
p = px
n = 1
else:
n += 1
rle.append((n, p))
lines.append(rle)
with open('RegionMapData.py', 'wt') as f:
f.write('#!/bin/env python3\n')
f.write('\n')
f.write('regions = [\n')
for r in regions:
f.write(' {0},\n'.format(repr(r)))
f.write(']\n')
f.write('\n')
f.write('regionmap = [\n')
for l in lines:
f.write(' {0},\n'.format(repr(l)))
f.write(']\n')
f.write('\n')
with open('RegionMapData.json', 'wt') as f:
f.write('{\n')
f.write(' "regions": [\n')
f.write(' {0}\n'.format(',\n '.join(json.dumps(r) for r in regions)))
f.write(' ],\n')
f.write(' "regionmap": [\n');
f.write(' {0}\n'.format(',\n '.join(json.dumps([[int(rl), int(rv)] for rl, rv in l]) for l in lines)))
f.write(' ]\n')
f.write('}\n')
with open('RegionMapData.cs', 'wt') as f:
f.write('namespace EliteDangerousRegionMap\n')
f.write('{\n')
f.write(' public static partial class RegionMap\n')
f.write(' {\n')
f.write(' private static string[] RegionNames = new[]\n')
f.write(' {\n')
for r in regions:
f.write(' {0},\n'.format(json.dumps(r)))
f.write(' };\n')
f.write('\n')
f.write(' private static (int, int)[][] RegionMapLines = new[]\n')
f.write(' {\n')
for row in lines:
f.write(' new[]{' + ','.join(repr((l, v)) for l, v in row) + '},\n')
f.write(' };\n')
f.write(' }\n')
f.write('}\n')
|
[
"json.dumps",
"PIL.Image.open"
] |
[((100, 127), 'PIL.Image.open', 'Image.open', (['"""RegionMap.png"""'], {}), "('RegionMap.png')\n", (110, 127), False, 'from PIL import Image\n'), ((2519, 2532), 'json.dumps', 'json.dumps', (['r'], {}), '(r)\n', (2529, 2532), False, 'import json\n'), ((1904, 1917), 'json.dumps', 'json.dumps', (['r'], {}), '(r)\n', (1914, 1917), False, 'import json\n')]
|
import click
import json
from pathlib import Path
from .query import query
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
def _format_line(obj, _format):
if _format == 'refstr':
return obj.to_string()
if _format == 'str':
return '`{}` ({})'.format(obj.text,obj.version)
def _formatter(obj_list, _format):
# Handle json
if _format == 'json':
return json.dumps([v.to_dict() for v in obj_list],indent=4)
# Handle everything else
return "\n".join([_format_line(obj,_format) for obj in obj_list])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('--version', '-v', default='ESV', help='Bible version to query, default is `ESV`')
@click.option('--format', '-f', '_format', default='refstr', type=click.Choice(['refstr','str','json']),
help='Specify output format, default is `refstr`')
@click.option('--cache/--no-cache', default=False,
help='Look up verses saved in a local cache first, and save new queries locally')
@click.argument('reference_string')
def scrap(version, reference_string, _format, cache):
"""Scrap bible verses
REFERENCE_STRING a (comma delimited) list of references, e.g. `John3.16` or `1Peter3.1-5` or `Gen1,2`
"""
verses = query( reference_string, version, cache )
click.echo(_formatter(verses, _format))
|
[
"click.Choice",
"click.option",
"click.argument",
"click.command"
] |
[((561, 609), 'click.command', 'click.command', ([], {'context_settings': 'CONTEXT_SETTINGS'}), '(context_settings=CONTEXT_SETTINGS)\n', (574, 609), False, 'import click\n'), ((611, 711), 'click.option', 'click.option', (['"""--version"""', '"""-v"""'], {'default': '"""ESV"""', 'help': '"""Bible version to query, default is `ESV`"""'}), "('--version', '-v', default='ESV', help=\n 'Bible version to query, default is `ESV`')\n", (623, 711), False, 'import click\n'), ((872, 1013), 'click.option', 'click.option', (['"""--cache/--no-cache"""'], {'default': '(False)', 'help': '"""Look up verses saved in a local cache first, and save new queries locally"""'}), "('--cache/--no-cache', default=False, help=\n 'Look up verses saved in a local cache first, and save new queries locally'\n )\n", (884, 1013), False, 'import click\n'), ((1015, 1049), 'click.argument', 'click.argument', (['"""reference_string"""'], {}), "('reference_string')\n", (1029, 1049), False, 'import click\n'), ((773, 812), 'click.Choice', 'click.Choice', (["['refstr', 'str', 'json']"], {}), "(['refstr', 'str', 'json'])\n", (785, 812), False, 'import click\n')]
|
import os
import pygame
import random
from level_3.module import background_module
from level_3.module import foreground_module
from level_3.module import player_module
class Ghost():
"""
Describes ghost obstacles.
"""
# Loading ghost images
num_of_imgs = 6
list_of_lists = []
path = r'level_3/Utils/Pics/Ghost/'
colour_list = os.listdir(path)
num_of_colours = len(colour_list)
for colour in colour_list:
imgs = []
for x in range(num_of_imgs):
imgs.append(pygame.image.load(os.path.join(path, colour+"/"+ str(x) + '.png')))
list_of_lists.append(imgs)
ghosts_list = []
collision_ghosts = [] # ghosts for which we have to check collision
def __init__(self,x,y,colour_num):
self.x = x
self.y = y
self.run_count = 0
self.colour_num = colour_num
random_num = random.uniform(6, 10)
self.ghosts_list = [pygame.transform.scale(img, (int(img.get_width()/random_num), int(img.get_height()/random_num))) for img in self.list_of_lists[colour_num]]
# Variables for sine wave trajectory calculation
self.org_y = y # initial y value where the ghost is spawned
self.time = 0 # Taken for a reference
self.frequency = random.uniform(0.005, 0.013) # frequency of sine wave
self.amplitude = random.randrange(30, 70) # Amplitude of sine wave - defines range of ghost movement in y axis
def draw(self, win):
# Determining index of ghost image to be drawn
self.frames_per_image = 7 # each ghost image is drawn for 7 consecutive frames
if self.run_count >= self.frames_per_image*self.num_of_imgs:
self.run_count = 0
self.index = self.run_count//self.frames_per_image
self.run_count += 1
# Drawing ghost image
self.img = self.ghosts_list[self.index]
self.randomize_movement()
win.blit(self.img, (self.x,self.y))
def randomize_movement(self):
# Sine wave trajectory for ghost
self.y= self.org_y
self.time += 1
def create_ghost():
"""
Creates a dragon in the free space.
"""
x = random.randint(50,400) # choose random y value in upper half of window (WIP)
colour_num = random.randrange(Ghost.num_of_colours)
new_ghost = Ghost(background_module.bg.get_width(), x, colour_num)
Ghost.ghosts_list.append(new_ghost)
Ghost.collision_ghosts.append(new_ghost) # To check collision
def draw_ghost(win):
for ghost in Ghost.ghosts_list:
ghost.draw(win)
update_ghosts_position()
def update_ghosts_position():
"""
Updates the x coordinates of ghost. If ghost goes offscreen, remove it from the list.
"""
for ghost in Ghost.ghosts_list:
ghost_width = ghost.imgs[0].get_width()
if ghost.x < -1*ghost_width: # If ghost goes offscreen, removing it from ghost list
try:
ghost.ghosts_list.remove(ghost)
except: pass
else:
ghost.x -= (foreground_module.foreground_speed + 4)
def collision_with_ghost():
"""
Collision with ghost is checked using Pixel perfect collision method. If collision occurs returns True, else False.
Collision is checked only if ghost is near the player to save computation.
"""
player = player_module.player
propeller = player_module.propeller
if len(Ghost.collision_ghosts)!=0:
for ghost in Ghost.collision_ghosts:
if ghost.x < (player.x + player.img.get_width()) and (ghost.x + ghost.img.get_width()) > player.x:
if ghost.y < (player.y + player.img.get_height()) and (ghost.y + ghost.img.get_height()) > player.y: # Checking for collision if near player
player_mask = pygame.mask.from_surface(player.img)
propeller_mask = pygame.mask.from_surface(propeller.propeller_img)
ghost_mask = pygame.mask.from_surface(ghost.img)
offset = int(ghost.x - player.x), int(ghost.y - player.y)
collision_point_with_player = player_mask.overlap(ghost_mask, offset)
collision_point_with_propeller = propeller_mask.overlap(ghost_mask, offset) # Checking collision with player
if collision_point_with_player or collision_point_with_propeller:
Ghost.collision_ghosts.remove(ghost)
return True
return False
|
[
"random.randint",
"random.uniform",
"pygame.mask.from_surface",
"random.randrange",
"level_3.module.background_module.bg.get_width",
"os.listdir"
] |
[((338, 354), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (348, 354), False, 'import os\n'), ((1971, 1994), 'random.randint', 'random.randint', (['(50)', '(400)'], {}), '(50, 400)\n', (1985, 1994), False, 'import random\n'), ((2062, 2100), 'random.randrange', 'random.randrange', (['Ghost.num_of_colours'], {}), '(Ghost.num_of_colours)\n', (2078, 2100), False, 'import random\n'), ((795, 816), 'random.uniform', 'random.uniform', (['(6)', '(10)'], {}), '(6, 10)\n', (809, 816), False, 'import random\n'), ((1168, 1196), 'random.uniform', 'random.uniform', (['(0.005)', '(0.013)'], {}), '(0.005, 0.013)\n', (1182, 1196), False, 'import random\n'), ((1241, 1265), 'random.randrange', 'random.randrange', (['(30)', '(70)'], {}), '(30, 70)\n', (1257, 1265), False, 'import random\n'), ((2120, 2152), 'level_3.module.background_module.bg.get_width', 'background_module.bg.get_width', ([], {}), '()\n', (2150, 2152), False, 'from level_3.module import background_module\n'), ((3424, 3460), 'pygame.mask.from_surface', 'pygame.mask.from_surface', (['player.img'], {}), '(player.img)\n', (3448, 3460), False, 'import pygame\n'), ((3483, 3532), 'pygame.mask.from_surface', 'pygame.mask.from_surface', (['propeller.propeller_img'], {}), '(propeller.propeller_img)\n', (3507, 3532), False, 'import pygame\n'), ((3551, 3586), 'pygame.mask.from_surface', 'pygame.mask.from_surface', (['ghost.img'], {}), '(ghost.img)\n', (3575, 3586), False, 'import pygame\n')]
|
import json
from sklearn.metrics import mean_squared_error, mean_absolute_error
import numpy as np
from model import Dfembeding
from sklearn.kernel_ridge import KernelRidge
import torch
from PIL import Image
from utils import *
import csv
import torch.utils.data as data
import pandas as pd
def mean_absolute_percentage_error(y_true, y_pred):
y_true, y_pred = np.array(y_true), np.array(y_pred)
return np.mean(np.abs((y_true - y_pred) / y_true)) * 100
# class Dataset(data.Dataset):
# def __init__(self, file, transfrom):
# self.Pic_Names = os.listdir(file)
# self.file = file
# self.transfrom = transfrom
#
# def __len__(self):
# return len(self.Pic_Names)
#
# def __getitem__(self, idx):
# img_name = self.Pic_Names[idx]
# Pic = Image.open(os.path.join(self.file, self.Pic_Names[idx]))
# Pic = self.transfrom(Pic)
# try:
# ret = re.match(r"\d+?_([FMfm])_(\d+?)_(\d+?)_(\d+).+", img_name)
# BMI = (int(ret.group(4)) / 100000) / (int(ret.group(3)) / 100000) ** 2
# Pic_name = os.path.join(self.file, self.Pic_Names[idx])
# return (Pic, Pic_name), BMI
# except:
# return (Pic, ''), 10000
class Dataset(data.Dataset):
def __init__(self, file, transfrom):
self.Pic_Names = os.listdir(file)
self.file = file
self.transfrom = transfrom
def __len__(self):
return len(self.Pic_Names)
def __getitem__(self, idx):
img_name = self.Pic_Names[idx]
Pic = Image.open(os.path.join(self.file, self.Pic_Names[idx]))
Pic = self.transfrom(Pic)
ret = re.match(r"\d+?_([FMfm])_(\d+?)_(\d+?)_(\d+).+", img_name)
sex = 0 if (ret.group(1) == 'F' or ret.group(1) == 'f') else 1
age = int(ret.group(2))
height = int(ret.group(3)) / 100000
weight = int(ret.group(4)) / 100000
BMI = weight / (height ** 2)
# BMI = (int(ret.group(4))/100000) / (int(ret.group(3))/100000)**2
Pic_name = os.path.join(self.file, self.Pic_Names[idx])
return (Pic, Pic_name, img_name, sex, age, height, weight), BMI
def CombineDFBF(model, BodyFeatures, df, loader_test, loader_train):
# test(model, DEVICE, loader_test)
loaders = [ loader_test, loader_train,]
files = [ 'test', 'train',]
for loader, file in zip(loaders, files):
with open('/home/benkesheng/BMI_DETECT/Deep_Learning_Method/DF_BF_csv/20-1_{}.csv'.format(file), 'w',
newline='') as fp:
writer = csv.writer(fp)
model.eval()
pred = []
targ = []
for (img, name, img_name, sex, age, height, weight), target in loader:
values = []
img, target = img.to(DEVICE), target.to(DEVICE)
img_name = img_name[0]
# print('Processing IMage :', img_name)
values.append(img_name)
values.append(target.cpu().numpy()[0])
values.append(sex.numpy()[0])
values.append(BodyFeatures[img_name]['WSR'])
values.append(BodyFeatures[img_name]['WTR'])
values.append(BodyFeatures[img_name]['WHpR'])
values.append(BodyFeatures[img_name]['WHdR'])
values.append(BodyFeatures[img_name]['HpHdR'])
values.append(BodyFeatures[img_name]['Area'])
values.append(BodyFeatures[img_name]['H2W'])
conv_out = LayerActivations(model.fc, 1)
out = model(img)
pred.append(out.item())
targ.append(target.item())
conv_out.remove()
xs = torch.squeeze(conv_out.features.detach()).numpy()
# print(xs.shape)
for x in xs:
values.append(float(x))
values.append(age.numpy()[0])
values.append(height.numpy()[0])
values.append(weight.numpy()[0])
writer.writerow(values)
MAE = mean_absolute_error(targ, pred)
print(file,' ',MAE)
def Pre(raw_data, name):
if (name != 'vgg16'):
raw_data = raw_data.iloc[:, 1:]
raw_data = raw_data.replace([np.inf, -np.inf], np.nan)
# raw_data = raw_data.fillna(raw_data.mean())
raw_data = raw_data.replace(np.nan, 0)
raw_data = raw_data.values.astype(np.float64)
return raw_data
def Feature(data, df, name):
if (name == 'author'):
x_5f = data[:, 0:5]
y = data[:, 9]
return x_5f, y
elif (name == 'vgg16'):
x_df = data[:, 2:]
y = data[:, 0]
return x_df, y
elif (name == 'ours'):
x_5f = data[:, 3:8]
x_7f = data[:, 2:9]
x_df = data[:, 9:9 + df]
y = data[:, 0]
return x_5f, x_7f, x_df, y
def Stdm(x):
Mean = np.mean(x, axis=0)
Std = np.std(x, axis=0)
return Mean, Std
def Regression(df=20, file='test'):
# raw_data_train = pd.read_csv('/home/benkesheng/BMI_DETECT/ReDone_CSV/Ours/Image_train.csv')
# raw_data_test = pd.read_csv('/home/benkesheng/BMI_DETECT/ReDone_CSV/Ours/Image_test.csv')
raw_data_train = pd.read_csv('/home/benkesheng/BMI_DETECT/Deep_Learning_Method/DF_BF_csv/20-1_train.csv')
raw_data_test = pd.read_csv('/home/benkesheng/BMI_DETECT/Deep_Learning_Method/DF_BF_csv/20-1_test.csv')
raw_data_name = raw_data_test.values
raw_data_train = Pre(raw_data_train, 'ours')
raw_data_test = Pre(raw_data_test, 'ours')
x_5f_train, x_7f_train, x_df_train, y_train = Feature(raw_data_train, df, 'ours')
x_5f_test, x_7f_test, x_df_test, y_test = Feature(raw_data_test, df, 'ours')
x_body_train = x_7f_train
Mean, Std = Stdm(x_body_train)
x_body_train = (x_body_train - Mean) / Std
x_train = np.append(x_body_train, x_df_train, axis=1)
y_train = y_train
x_body_test = x_7f_test
x_body_test = (x_body_test - Mean) / Std
x_test = np.append(x_body_test, x_df_test, axis=1)
y_test = y_test
print(x_test.shape)
print(x_train.shape)
krr = KernelRidge()
krr.fit(x_train, y_train)
y_krr = krr.predict(x_test)
print('KRR: MAE: ', mean_absolute_error(y_test, y_krr), ' MAPE: ', mean_absolute_percentage_error(y_test, y_krr))
if file == 'demo':
for i, data in enumerate(x_test):
y_pred = krr.predict(data[None,:])
print('Name: ', raw_data_name[i][0], ' y_pred:', y_pred[0], ' y_ture:', y_test[i])
if __name__ == '__main__':
IMG_SIZE = 224
IMG_MEAN = [0.485, 0.456, 0.406]
IMG_STD = [0.229, 0.224, 0.225]
transform = transforms.Compose([
Resize(IMG_SIZE),
transforms.Pad(IMG_SIZE),
transforms.CenterCrop(IMG_SIZE),
transforms.ToTensor(),
transforms.Normalize(IMG_MEAN, IMG_STD)
])
DEVICE = torch.device("cuda:0")
dataset_train = Dataset('/home/benkesheng/BMI_DETECT/datasets/Image_train', transform)
dataset_test = Dataset('/home/benkesheng/BMI_DETECT/datasets/Image_test', transform)
loader_train = torch.utils.data.DataLoader(dataset_train, batch_size=1, shuffle=True)
loader_test = torch.utils.data.DataLoader(dataset_test, batch_size=1, shuffle=True)
df = 20
model = Dfembeding()
# model.load_state_dict(torch.load('/home/benkesheng/BMI_DETECT/ReDone_CSV/model/Ours.pkl'.format(df)))
model.load_state_dict(torch.load('/home/benkesheng/BMI_DETECT/MODEL/9-1reexperiment/MIN_RESNET101_BMI_20-1fc.pkl'))
model.to(DEVICE)
Path = '/home/benkesheng/BMI_DETECT/Deep_Learning_Method/datasets_bodyfeature/BodyFeature.json'
with open(Path, 'r') as f:
BodyFeatures = json.load(f)
# CombineDFBF(model, BodyFeatures, df, loader_test, loader_train)
Regression(df)
|
[
"json.load",
"numpy.abs",
"csv.writer",
"torch.utils.data.DataLoader",
"numpy.std",
"sklearn.kernel_ridge.KernelRidge",
"pandas.read_csv",
"torch.load",
"model.Dfembeding",
"sklearn.metrics.mean_absolute_error",
"numpy.append",
"numpy.mean",
"numpy.array",
"torch.device"
] |
[((5043, 5061), 'numpy.mean', 'np.mean', (['x'], {'axis': '(0)'}), '(x, axis=0)\n', (5050, 5061), True, 'import numpy as np\n'), ((5073, 5090), 'numpy.std', 'np.std', (['x'], {'axis': '(0)'}), '(x, axis=0)\n', (5079, 5090), True, 'import numpy as np\n'), ((5374, 5472), 'pandas.read_csv', 'pd.read_csv', (['"""/home/benkesheng/BMI_DETECT/Deep_Learning_Method/DF_BF_csv/20-1_train.csv"""'], {}), "(\n '/home/benkesheng/BMI_DETECT/Deep_Learning_Method/DF_BF_csv/20-1_train.csv'\n )\n", (5385, 5472), True, 'import pandas as pd\n'), ((5484, 5576), 'pandas.read_csv', 'pd.read_csv', (['"""/home/benkesheng/BMI_DETECT/Deep_Learning_Method/DF_BF_csv/20-1_test.csv"""'], {}), "(\n '/home/benkesheng/BMI_DETECT/Deep_Learning_Method/DF_BF_csv/20-1_test.csv')\n", (5495, 5576), True, 'import pandas as pd\n'), ((6015, 6058), 'numpy.append', 'np.append', (['x_body_train', 'x_df_train'], {'axis': '(1)'}), '(x_body_train, x_df_train, axis=1)\n', (6024, 6058), True, 'import numpy as np\n'), ((6173, 6214), 'numpy.append', 'np.append', (['x_body_test', 'x_df_test'], {'axis': '(1)'}), '(x_body_test, x_df_test, axis=1)\n', (6182, 6214), True, 'import numpy as np\n'), ((6302, 6315), 'sklearn.kernel_ridge.KernelRidge', 'KernelRidge', ([], {}), '()\n', (6313, 6315), False, 'from sklearn.kernel_ridge import KernelRidge\n'), ((7083, 7105), 'torch.device', 'torch.device', (['"""cuda:0"""'], {}), "('cuda:0')\n", (7095, 7105), False, 'import torch\n'), ((7308, 7378), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['dataset_train'], {'batch_size': '(1)', 'shuffle': '(True)'}), '(dataset_train, batch_size=1, shuffle=True)\n', (7335, 7378), False, 'import torch\n'), ((7398, 7467), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['dataset_test'], {'batch_size': '(1)', 'shuffle': '(True)'}), '(dataset_test, batch_size=1, shuffle=True)\n', (7425, 7467), False, 'import torch\n'), ((7496, 7508), 'model.Dfembeding', 'Dfembeding', ([], {}), '()\n', (7506, 7508), False, 'from model import Dfembeding\n'), ((380, 396), 'numpy.array', 'np.array', (['y_true'], {}), '(y_true)\n', (388, 396), True, 'import numpy as np\n'), ((398, 414), 'numpy.array', 'np.array', (['y_pred'], {}), '(y_pred)\n', (406, 414), True, 'import numpy as np\n'), ((6405, 6439), 'sklearn.metrics.mean_absolute_error', 'mean_absolute_error', (['y_test', 'y_krr'], {}), '(y_test, y_krr)\n', (6424, 6439), False, 'from sklearn.metrics import mean_squared_error, mean_absolute_error\n'), ((7647, 7749), 'torch.load', 'torch.load', (['"""/home/benkesheng/BMI_DETECT/MODEL/9-1reexperiment/MIN_RESNET101_BMI_20-1fc.pkl"""'], {}), "(\n '/home/benkesheng/BMI_DETECT/MODEL/9-1reexperiment/MIN_RESNET101_BMI_20-1fc.pkl'\n )\n", (7657, 7749), False, 'import torch\n'), ((7924, 7936), 'json.load', 'json.load', (['f'], {}), '(f)\n', (7933, 7936), False, 'import json\n'), ((435, 469), 'numpy.abs', 'np.abs', (['((y_true - y_pred) / y_true)'], {}), '((y_true - y_pred) / y_true)\n', (441, 469), True, 'import numpy as np\n'), ((2645, 2659), 'csv.writer', 'csv.writer', (['fp'], {}), '(fp)\n', (2655, 2659), False, 'import csv\n'), ((4197, 4228), 'sklearn.metrics.mean_absolute_error', 'mean_absolute_error', (['targ', 'pred'], {}), '(targ, pred)\n', (4216, 4228), False, 'from sklearn.metrics import mean_squared_error, mean_absolute_error\n')]
|
import pickle
class Cache:
def __init__(self):
self._filename = "alliancepy.txt"
self._cache = {}
def __enter__(self):
try:
with open(self._filename, "rb") as file:
d = pickle.load(file)
for key, value in d.items():
self._cache[key] = value
except (FileNotFoundError, EOFError):
self._cache = {}
finally:
return self
def __exit__(self, exc_type, exc_val, exc_tb):
with open(self._filename, "wb+") as file:
d = {}
for key, value in self._cache.items():
d[key] = value
pickle.dump(d, file)
def keys(self):
return self._cache.keys()
def add(self, key, value):
self._cache[key] = value
def get(self, key):
return self._cache[key]
def remove(self, key):
self._cache.pop(key, None)
def clear(self):
map(self.remove, self.keys())
|
[
"pickle.dump",
"pickle.load"
] |
[((671, 691), 'pickle.dump', 'pickle.dump', (['d', 'file'], {}), '(d, file)\n', (682, 691), False, 'import pickle\n'), ((232, 249), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (243, 249), False, 'import pickle\n')]
|
"""Test for examples.multiply.configs"""
import logging
import deepr as dpr
import deepr.examples.multiply
logging.basicConfig(level=logging.INFO)
PATH_CONFIG = dpr.io.Path(deepr.examples.multiply.__file__).parent / "configs"
def test_example_multiply_configs(tmpdir):
"""Test for examples.multiply.configs"""
path_model = str(tmpdir.join("model"))
path_dataset = str(tmpdir.join("dataset"))
config = dpr.io.read_json(PATH_CONFIG / "config.json")
macros = dpr.io.read_json(PATH_CONFIG / "macros.json")
macros["paths"]["path_model"] = path_model
macros["paths"]["path_dataset"] = path_dataset
parsed = dpr.parse_config(config, macros)
job = dpr.from_config(parsed)
job.run()
|
[
"logging.basicConfig",
"deepr.io.Path",
"deepr.parse_config",
"deepr.io.read_json",
"deepr.from_config"
] |
[((111, 150), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (130, 150), False, 'import logging\n'), ((425, 470), 'deepr.io.read_json', 'dpr.io.read_json', (["(PATH_CONFIG / 'config.json')"], {}), "(PATH_CONFIG / 'config.json')\n", (441, 470), True, 'import deepr as dpr\n'), ((484, 529), 'deepr.io.read_json', 'dpr.io.read_json', (["(PATH_CONFIG / 'macros.json')"], {}), "(PATH_CONFIG / 'macros.json')\n", (500, 529), True, 'import deepr as dpr\n'), ((641, 673), 'deepr.parse_config', 'dpr.parse_config', (['config', 'macros'], {}), '(config, macros)\n', (657, 673), True, 'import deepr as dpr\n'), ((684, 707), 'deepr.from_config', 'dpr.from_config', (['parsed'], {}), '(parsed)\n', (699, 707), True, 'import deepr as dpr\n'), ((167, 212), 'deepr.io.Path', 'dpr.io.Path', (['deepr.examples.multiply.__file__'], {}), '(deepr.examples.multiply.__file__)\n', (178, 212), True, 'import deepr as dpr\n')]
|
import numpy as np
from scipy import sparse
def fit_glove_bias(A, emb):
N = A.shape[0]
row_sum = np.array(A.sum(axis=1)).reshape(-1).astype(float)
col_sum = np.array(A.sum(axis=0)).reshape(-1).astype(float)
emb_sum = np.array(emb @ np.array(np.sum(emb, axis=0)).reshape((-1, 1))).reshape(-1)
row_sum -= emb_sum
col_sum -= emb_sum
a = np.zeros(N)
b = np.zeros(N)
adam_a = ADAM()
adam_b = ADAM()
for it in range(1000):
grad_a = row_sum - np.sum(b) * a
grad_b = col_sum - np.sum(a) * b
anew = adam_a.update(a, grad_a, 0)
bnew = adam_b.update(b, grad_b, 0)
if it % 20 == 0:
dif = np.mean(np.abs(a - anew) + np.abs(b - bnew)) / 2
dif /= np.maximum(np.mean(np.abs(a) + np.abs(b)) / 2, 1e-8)
if dif < 1e-2:
break
a = anew.copy()
b = bnew.copy()
return a, b
class ADAM:
def __init__(self):
self.beta1 = 0.9
self.beta2 = 0.999
self.eta = 0.001
self.t = 0
self.mt = None
self.vt = None
self.eps = 1e-8
def update(self, theta, grad, lasso_penalty, positiveConstraint=False):
"""Ascending."""
if self.mt is None:
self.mt = np.zeros(grad.shape)
self.vt = np.zeros(grad.shape)
self.t = self.t + 1
self.mt = self.beta1 * self.mt + (1 - self.beta1) * grad
self.vt = self.beta2 * self.vt + (1 - self.beta2) * np.multiply(grad, grad)
mthat = self.mt / (1 - np.power(self.beta1, self.t))
vthat = self.vt / (1 - np.power(self.beta2, self.t))
new_grad = mthat / (np.sqrt(vthat) + self.eps)
return self._prox(
theta + self.eta * new_grad, lasso_penalty * self.eta, positiveConstraint
)
def _prox(self, x, lam, positiveConstraint):
"""Soft thresholding operator.
Parameters
----------
x : float
Variable.
lam : float
Lasso penalty.
Returns
-------
y : float
Thresholded value of x.
"""
if positiveConstraint:
b = ((lam) > 0).astype(int)
return np.multiply(b, np.maximum(x - lam, np.zeros(x.shape))) + np.multiply(
1 - b,
np.multiply(np.sign(x), np.maximum(np.abs(x) - lam, np.zeros(x.shape))),
)
else:
return np.multiply(
np.sign(x), np.maximum(np.abs(x) - lam, np.zeros(x.shape))
)
|
[
"numpy.multiply",
"numpy.sum",
"numpy.abs",
"numpy.power",
"numpy.zeros",
"numpy.sign",
"numpy.sqrt"
] |
[((366, 377), 'numpy.zeros', 'np.zeros', (['N'], {}), '(N)\n', (374, 377), True, 'import numpy as np\n'), ((386, 397), 'numpy.zeros', 'np.zeros', (['N'], {}), '(N)\n', (394, 397), True, 'import numpy as np\n'), ((1272, 1292), 'numpy.zeros', 'np.zeros', (['grad.shape'], {}), '(grad.shape)\n', (1280, 1292), True, 'import numpy as np\n'), ((1315, 1335), 'numpy.zeros', 'np.zeros', (['grad.shape'], {}), '(grad.shape)\n', (1323, 1335), True, 'import numpy as np\n'), ((494, 503), 'numpy.sum', 'np.sum', (['b'], {}), '(b)\n', (500, 503), True, 'import numpy as np\n'), ((535, 544), 'numpy.sum', 'np.sum', (['a'], {}), '(a)\n', (541, 544), True, 'import numpy as np\n'), ((1491, 1514), 'numpy.multiply', 'np.multiply', (['grad', 'grad'], {}), '(grad, grad)\n', (1502, 1514), True, 'import numpy as np\n'), ((1547, 1575), 'numpy.power', 'np.power', (['self.beta1', 'self.t'], {}), '(self.beta1, self.t)\n', (1555, 1575), True, 'import numpy as np\n'), ((1608, 1636), 'numpy.power', 'np.power', (['self.beta2', 'self.t'], {}), '(self.beta2, self.t)\n', (1616, 1636), True, 'import numpy as np\n'), ((1667, 1681), 'numpy.sqrt', 'np.sqrt', (['vthat'], {}), '(vthat)\n', (1674, 1681), True, 'import numpy as np\n'), ((2480, 2490), 'numpy.sign', 'np.sign', (['x'], {}), '(x)\n', (2487, 2490), True, 'import numpy as np\n'), ((2520, 2537), 'numpy.zeros', 'np.zeros', (['x.shape'], {}), '(x.shape)\n', (2528, 2537), True, 'import numpy as np\n'), ((688, 704), 'numpy.abs', 'np.abs', (['(a - anew)'], {}), '(a - anew)\n', (694, 704), True, 'import numpy as np\n'), ((707, 723), 'numpy.abs', 'np.abs', (['(b - bnew)'], {}), '(b - bnew)\n', (713, 723), True, 'import numpy as np\n'), ((2257, 2274), 'numpy.zeros', 'np.zeros', (['x.shape'], {}), '(x.shape)\n', (2265, 2274), True, 'import numpy as np\n'), ((2343, 2353), 'numpy.sign', 'np.sign', (['x'], {}), '(x)\n', (2350, 2353), True, 'import numpy as np\n'), ((2503, 2512), 'numpy.abs', 'np.abs', (['x'], {}), '(x)\n', (2509, 2512), True, 'import numpy as np\n'), ((767, 776), 'numpy.abs', 'np.abs', (['a'], {}), '(a)\n', (773, 776), True, 'import numpy as np\n'), ((779, 788), 'numpy.abs', 'np.abs', (['b'], {}), '(b)\n', (785, 788), True, 'import numpy as np\n'), ((2383, 2400), 'numpy.zeros', 'np.zeros', (['x.shape'], {}), '(x.shape)\n', (2391, 2400), True, 'import numpy as np\n'), ((260, 279), 'numpy.sum', 'np.sum', (['emb'], {'axis': '(0)'}), '(emb, axis=0)\n', (266, 279), True, 'import numpy as np\n'), ((2366, 2375), 'numpy.abs', 'np.abs', (['x'], {}), '(x)\n', (2372, 2375), True, 'import numpy as np\n')]
|
# Generated by Django 2.2.13 on 2020-10-04 06:53
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('socials', '0004_auto_20201004_0347'),
('shows', '0009_auto_20201004_0137'),
]
operations = [
migrations.AddField(
model_name='show',
name='social',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='socials.UserSocialNetwork', verbose_name='Social Network'),
),
]
|
[
"django.db.models.ForeignKey"
] |
[((412, 552), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""socials.UserSocialNetwork"""', 'verbose_name': '"""Social Network"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='socials.UserSocialNetwork', verbose_name='Social Network')\n", (429, 552), False, 'from django.db import migrations, models\n')]
|
import os
from pepper_snp.modules.python.ImageGenerationUI import UserInterfaceSupport
def make_images(bam_file, draft_file, region, output_path, total_threads, downsample_rate):
output_dir = UserInterfaceSupport.handle_output_directory(os.path.abspath(output_path))
chr_list, bed_list = UserInterfaceSupport.get_chromosome_list(region, draft_file, bam_file, region_bed=None)
UserInterfaceSupport.chromosome_level_parallelization(chr_list=chr_list,
bam_file=bam_file,
draft_file=draft_file,
truth_bam_h1=None,
truth_bam_h2=None,
output_path=output_dir,
total_threads=total_threads,
realignment_flag=False,
train_mode=False,
downsample_rate=downsample_rate,
bed_list=None)
def make_train_images(bam_file, draft_file, truth_bam_h1, truth_bam_h2, region, region_bed, output_path, total_threads, downsample_rate):
output_dir = UserInterfaceSupport.handle_output_directory(os.path.abspath(output_path))
chr_list, bed_list_dictionary = UserInterfaceSupport.get_chromosome_list(region, draft_file, bam_file, region_bed=region_bed)
UserInterfaceSupport.chromosome_level_parallelization(chr_list=chr_list,
bam_file=bam_file,
draft_file=draft_file,
truth_bam_h1=truth_bam_h1,
truth_bam_h2=truth_bam_h2,
output_path=output_dir,
total_threads=total_threads,
realignment_flag=False,
train_mode=True,
downsample_rate=downsample_rate,
bed_list=bed_list_dictionary)
|
[
"pepper_snp.modules.python.ImageGenerationUI.UserInterfaceSupport.chromosome_level_parallelization",
"os.path.abspath",
"pepper_snp.modules.python.ImageGenerationUI.UserInterfaceSupport.get_chromosome_list"
] |
[((299, 390), 'pepper_snp.modules.python.ImageGenerationUI.UserInterfaceSupport.get_chromosome_list', 'UserInterfaceSupport.get_chromosome_list', (['region', 'draft_file', 'bam_file'], {'region_bed': 'None'}), '(region, draft_file, bam_file,\n region_bed=None)\n', (339, 390), False, 'from pepper_snp.modules.python.ImageGenerationUI import UserInterfaceSupport\n'), ((392, 704), 'pepper_snp.modules.python.ImageGenerationUI.UserInterfaceSupport.chromosome_level_parallelization', 'UserInterfaceSupport.chromosome_level_parallelization', ([], {'chr_list': 'chr_list', 'bam_file': 'bam_file', 'draft_file': 'draft_file', 'truth_bam_h1': 'None', 'truth_bam_h2': 'None', 'output_path': 'output_dir', 'total_threads': 'total_threads', 'realignment_flag': '(False)', 'train_mode': '(False)', 'downsample_rate': 'downsample_rate', 'bed_list': 'None'}), '(chr_list=chr_list,\n bam_file=bam_file, draft_file=draft_file, truth_bam_h1=None,\n truth_bam_h2=None, output_path=output_dir, total_threads=total_threads,\n realignment_flag=False, train_mode=False, downsample_rate=\n downsample_rate, bed_list=None)\n', (445, 704), False, 'from pepper_snp.modules.python.ImageGenerationUI import UserInterfaceSupport\n'), ((1537, 1634), 'pepper_snp.modules.python.ImageGenerationUI.UserInterfaceSupport.get_chromosome_list', 'UserInterfaceSupport.get_chromosome_list', (['region', 'draft_file', 'bam_file'], {'region_bed': 'region_bed'}), '(region, draft_file, bam_file,\n region_bed=region_bed)\n', (1577, 1634), False, 'from pepper_snp.modules.python.ImageGenerationUI import UserInterfaceSupport\n'), ((1636, 1979), 'pepper_snp.modules.python.ImageGenerationUI.UserInterfaceSupport.chromosome_level_parallelization', 'UserInterfaceSupport.chromosome_level_parallelization', ([], {'chr_list': 'chr_list', 'bam_file': 'bam_file', 'draft_file': 'draft_file', 'truth_bam_h1': 'truth_bam_h1', 'truth_bam_h2': 'truth_bam_h2', 'output_path': 'output_dir', 'total_threads': 'total_threads', 'realignment_flag': '(False)', 'train_mode': '(True)', 'downsample_rate': 'downsample_rate', 'bed_list': 'bed_list_dictionary'}), '(chr_list=chr_list,\n bam_file=bam_file, draft_file=draft_file, truth_bam_h1=truth_bam_h1,\n truth_bam_h2=truth_bam_h2, output_path=output_dir, total_threads=\n total_threads, realignment_flag=False, train_mode=True, downsample_rate\n =downsample_rate, bed_list=bed_list_dictionary)\n', (1689, 1979), False, 'from pepper_snp.modules.python.ImageGenerationUI import UserInterfaceSupport\n'), ((243, 271), 'os.path.abspath', 'os.path.abspath', (['output_path'], {}), '(output_path)\n', (258, 271), False, 'import os\n'), ((1470, 1498), 'os.path.abspath', 'os.path.abspath', (['output_path'], {}), '(output_path)\n', (1485, 1498), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-02-26 07:38
from __future__ import unicode_literals
from datetime import datetime
from timezonefinder import TimezoneFinder
import pytz
from django.db import migrations
from django.utils import timezone
tf = TimezoneFinder()
def set_timezone(apps, schema_editor):
Event = apps.get_model('events', 'Event')
for event in Event.objects.filter(start__isnull=False, location__isnull=False):
tz_name = tf.timezone_at(
lng=event.location.position.x,
lat=event.location.position.y
)
tz = pytz.timezone(tz_name)
start = event.start.astimezone(timezone.get_current_timezone())
event.start = tz.localize(
datetime(
start.year,
start.month,
start.day,
start.hour,
start.minute,
)
)
event.save()
class Migration(migrations.Migration):
dependencies = [
('events', '0014_auto_20200217_1107'),
]
operations = [
migrations.RunPython(set_timezone)
]
|
[
"django.db.migrations.RunPython",
"django.utils.timezone.get_current_timezone",
"timezonefinder.TimezoneFinder",
"datetime.datetime",
"pytz.timezone"
] |
[((273, 289), 'timezonefinder.TimezoneFinder', 'TimezoneFinder', ([], {}), '()\n', (287, 289), False, 'from timezonefinder import TimezoneFinder\n'), ((604, 626), 'pytz.timezone', 'pytz.timezone', (['tz_name'], {}), '(tz_name)\n', (617, 626), False, 'import pytz\n'), ((1088, 1122), 'django.db.migrations.RunPython', 'migrations.RunPython', (['set_timezone'], {}), '(set_timezone)\n', (1108, 1122), False, 'from django.db import migrations\n'), ((666, 697), 'django.utils.timezone.get_current_timezone', 'timezone.get_current_timezone', ([], {}), '()\n', (695, 697), False, 'from django.utils import timezone\n'), ((747, 817), 'datetime.datetime', 'datetime', (['start.year', 'start.month', 'start.day', 'start.hour', 'start.minute'], {}), '(start.year, start.month, start.day, start.hour, start.minute)\n', (755, 817), False, 'from datetime import datetime\n')]
|
import numpy as np
from scipy.linalg import cho_solve, inv
from scipy.stats import norm
from scipy.interpolate import InterpolatedUnivariateSpline
from sklearn.mixture import GaussianMixture as GMM
from .utils import custom_KDE
import time
class Acq(object):
'''
The base acq class.
'''
def __init__(self, inputs):
self.inputs = inputs
def compute_value(self, x):
raise NotImplementedError
def update_prior_search(self, model):
raise NotImplementedError
class AcqLW(Acq):
''' Select the next sample for estimating extreme event statistics.
This acquisition can be used in both single and multi-fidelity contexts.
parameters:
---------
inputs: instance of Input class
Input of the problem including pdf information and a sampling method.
ll_type: string
the type of the weights, must be one of
(1) rare: w(x)=p(x)/p(y(x))
(2) extreme: w(x)=p(x)|y(x)-z|^n
(3) plain: no weights
(4) input: w(x)=p(x)
load_pts: bool
whether load the input samples from a txt file
ll_kwargs: key words for extreme ll_type
attributes:
----------
model: instance of gpr.GaussianProcessRegressor
The surrogate model based on current dataset
DX: array
The inputs of current samples
gmm: instance of sklearn.GMM
The gmm to approximate likelihood, including gmm.means_,
gmm.covariances_, and gmm.scores_.
'''
def __init__(self, inputs, ll_type='rare', load_pts=False, **ll_kwargs):
self.inputs = inputs
self.ll_type = ll_type
self.load_pts = load_pts
self.ll_kwargs = ll_kwargs
if load_pts:
smpl = np.loadtxt('map_samples.txt')
self.pts = smpl[:,0:-1] # mc points
self.fx = smpl[:,-1] # pdf of mc points
def compute_value_tf_cost(self, pos, fidelity, cost):
''' Compute the benefit per cost of adding a sample (pos, fidelity)
'''
x = np.append(pos, fidelity)
value, gradient = self.compute_value(x)
return value/cost, gradient/cost
def compute_value(self, x):
''' Compute the benefit of adding a sample x
For single fidelity, x = pos, while for multi-fidelity,
x = {pos, fidelity}.
'''
x = np.atleast_2d(x)
integral, integral_derivative = self.compute_integral(x)
cov, cov_deriv = self.model.post_cov(x)
value = (integral / cov).item()
gradient = 1/cov**2 * (cov*integral_derivative - integral*cov_deriv)
gradient = gradient.reshape(-1)
return -value, -gradient
def compute_integral(self, x):
''' \int cov^2(f_i(pos), f_h(x'))*w(x')dx', x = {pos, i=fidelity}
Eq.(15) in paper.
and
d \int cov^2(f_i(pos), f_h(x'))*w(x')dx' d pos,
x = {pos, i=fidelity} Eq.(49) in paper.
'''
# compute value
kernel = self.model.kernel_
integral = self.compute_mixed_kappa(x,x)
alpha = cho_solve((self.model.L_, True), kernel(self.X, x))
integral += alpha.T.dot(np.dot(self.kappaXX, alpha)
- 2*self.compute_mixed_kappa(self.X, x))
# compute derivative
term1 = 2*self.compute_mixed_dkappa_dx(x,x)
dalpha_dx = cho_solve((self.model.L_, True),
kernel.gradient_x(x, self.X))
term2 = 2 * alpha.T.dot(np.dot(self.kappaXX, dalpha_dx))
term3 = 2 * alpha.T.dot(self.compute_mixed_dkappa_dx(x,self.X))
term3 += 2 * self.compute_mixed_kappa(x, self.X).dot(dalpha_dx)
return integral, term1 + term2 - term3
def update_prior_search(self, model):
''' Update the model(gpr), data(X), compute the gmm of weights and
kappa(X,X).
'''
self.model = model
self.X = self.model.X_train_
# generate GMM approximation of the likelihood
self._prepare_likelihood(self.ll_type, **self.ll_kwargs)
# constant for all hypothetical point
self.kappaXX = self.compute_mixed_kappa(self.X, self.X)
def compute_mixed_kappa(self, X1, X2):
''' compute averaged kappa w.r.t gmm components.
Eq. (18) in paper. The 'G' function relies on kernel properties.
'''
kernel = self.model.kernel_
mixed_kappa = 0
for i in range(self.gmm.n_components): # the number of gmm component
mixed_kappa += self.gmm.weights_[i] * kernel.intKKNorm(X1, X2,
self.gmm.means_[i],
self.gmm.covariances_[i])
return mixed_kappa
def compute_mixed_dkappa_dx(self, x, X):
''' Compute the averaged kappa derivatives.
Eq.(53) in paper.
'''
kernel = self.model.kernel_
mixed_kappa = 0
for i in range(self.gmm.n_components):
mixed_kappa += self.gmm.weights_[i] * kernel.dintKKNorm_dx(x, X,
self.gmm.means_[i],
self.gmm.covariances_[i])
return mixed_kappa
def _prepare_likelihood(self, ll_type, n_components=2, power=6,
center=0, depressed_side=None):
'''Compute gmm components of w(x').
'''
if self.load_pts:
pts = self.pts
fx = self.fx
n_samples = pts.shape[0]
else:
if self.inputs.dim <= 2:
n_samples = int(1e5)
else:
n_samples = int(1e6)
pts = self.inputs.sampling(n_samples) # input-samples
fx = self.inputs.pdf(pts) # weights
if ll_type =='input':
w_raw = fx
elif ll_type == 'plain':
w_raw = 1
else:
# compute the mean prediction for input-samples
if self.X.shape[1] != self.inputs.dim:
aug_pts = np.concatenate((pts, [[1]] * n_samples), axis = 1)
else:
aug_pts = pts
if ll_type == 'rare':
if n_samples > 4*1e5:
aug_pts_list = np.array_split(aug_pts, 10)
mu = np.empty(0)
for iii in range(10):
mu = np.concatenate((mu,
self.model.predict(aug_pts_list[iii]).flatten()))
else:
mu = self.model.predict(aug_pts).flatten()
x, y = custom_KDE(mu, weights=fx).evaluate()
self.fy_interp = InterpolatedUnivariateSpline(x, y, k=1)
w_raw = fx/self.fy_interp(mu)
elif ll_type == 'extreme':
mu = self.model.predict(aug_pts).flatten()
if center == 'mean':
center = np.average(mu, fx)
if depressed_side == 'negative':
w_raw = fx*abs(mu - center) ** (power*np.sign(mu - center))
elif depressed_side == 'positive':
w_raw = fx*abs(mu - center) ** (-power*np.sign(mu - center))
else:
w_raw = fx*abs(mu - center)**power
elif ll_type == 'failure':
# P(X)(1-P(X)) * p(X) / var(X)
mu, std = self.model.predict(aug_pts, return_std=True)
# failure probability as a Bernoulli RV
p = norm.cdf(mu.flatten()/std.flatten())
vb = p*(1-p) # var of the Bernoulli
vf = std**2 # var of the predictions
w_raw = vb * fx / vf
self.gmm = self._fit_gmm(pts, w_raw, n_components)
return self
@staticmethod
def _fit_gmm(pts, w_raw, n_components):
'''Fit gmm with weighted samples
'''
sca = np.sum(w_raw)
rng = np.random.default_rng()
aa = rng.choice(pts, size=50000, p=w_raw/sca)
gmm = GMM(n_components=n_components, covariance_type="full")
gmm = gmm.fit(X=aa)
return gmm
|
[
"numpy.sum",
"scipy.interpolate.InterpolatedUnivariateSpline",
"numpy.average",
"numpy.empty",
"sklearn.mixture.GaussianMixture",
"numpy.random.default_rng",
"numpy.append",
"numpy.loadtxt",
"numpy.sign",
"numpy.array_split",
"numpy.dot",
"numpy.concatenate",
"numpy.atleast_2d"
] |
[((2119, 2143), 'numpy.append', 'np.append', (['pos', 'fidelity'], {}), '(pos, fidelity)\n', (2128, 2143), True, 'import numpy as np\n'), ((2464, 2480), 'numpy.atleast_2d', 'np.atleast_2d', (['x'], {}), '(x)\n', (2477, 2480), True, 'import numpy as np\n'), ((8341, 8354), 'numpy.sum', 'np.sum', (['w_raw'], {}), '(w_raw)\n', (8347, 8354), True, 'import numpy as np\n'), ((8370, 8393), 'numpy.random.default_rng', 'np.random.default_rng', ([], {}), '()\n', (8391, 8393), True, 'import numpy as np\n'), ((8464, 8518), 'sklearn.mixture.GaussianMixture', 'GMM', ([], {'n_components': 'n_components', 'covariance_type': '"""full"""'}), "(n_components=n_components, covariance_type='full')\n", (8467, 8518), True, 'from sklearn.mixture import GaussianMixture as GMM\n'), ((1818, 1847), 'numpy.loadtxt', 'np.loadtxt', (['"""map_samples.txt"""'], {}), "('map_samples.txt')\n", (1828, 1847), True, 'import numpy as np\n'), ((3321, 3348), 'numpy.dot', 'np.dot', (['self.kappaXX', 'alpha'], {}), '(self.kappaXX, alpha)\n', (3327, 3348), True, 'import numpy as np\n'), ((3657, 3688), 'numpy.dot', 'np.dot', (['self.kappaXX', 'dalpha_dx'], {}), '(self.kappaXX, dalpha_dx)\n', (3663, 3688), True, 'import numpy as np\n'), ((6399, 6447), 'numpy.concatenate', 'np.concatenate', (['(pts, [[1]] * n_samples)'], {'axis': '(1)'}), '((pts, [[1]] * n_samples), axis=1)\n', (6413, 6447), True, 'import numpy as np\n'), ((7060, 7099), 'scipy.interpolate.InterpolatedUnivariateSpline', 'InterpolatedUnivariateSpline', (['x', 'y'], {'k': '(1)'}), '(x, y, k=1)\n', (7088, 7099), False, 'from scipy.interpolate import InterpolatedUnivariateSpline\n'), ((6613, 6640), 'numpy.array_split', 'np.array_split', (['aug_pts', '(10)'], {}), '(aug_pts, 10)\n', (6627, 6640), True, 'import numpy as np\n'), ((6667, 6678), 'numpy.empty', 'np.empty', (['(0)'], {}), '(0)\n', (6675, 6678), True, 'import numpy as np\n'), ((7318, 7336), 'numpy.average', 'np.average', (['mu', 'fx'], {}), '(mu, fx)\n', (7328, 7336), True, 'import numpy as np\n'), ((7446, 7466), 'numpy.sign', 'np.sign', (['(mu - center)'], {}), '(mu - center)\n', (7453, 7466), True, 'import numpy as np\n'), ((7580, 7600), 'numpy.sign', 'np.sign', (['(mu - center)'], {}), '(mu - center)\n', (7587, 7600), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""Concentrate the heavy business logic of the operations of an application.
It knows all Models that should be part of the flow and knows
the API/services of those models. It also orchestrate all the side-effects
and therefore can make the use of other use cases/services.
"""
from django.utils.translation import gettext as _
from payments_comissions.models import PaymentComission
from posts.models import Job
from posts_areas.models import PostArea
from tektank.internal_services.use_case_interface import UseCaseInterface
from tektank.libs_project.helpers import slug_generator
from .errors import InvalidCategories, InvalidDateOrder
from .interfaces import JobRepositoryInterface
class CreateJob(UseCaseInterface):
"""Create Job service.
Service layer for the creation of a Job. Here we are going to do all
validations and side effects. We are going to always use this service
instead of calling the Models create method directly.
We combine validations here, and validators in the model itself.
Input:
Parameters of Job model, i.e. its fields.
repository : A class that will operate against the DB,
or any other source to get/put information.
Raises:
InvalidCategories
InvalidDateOrder
Returns:
Instance of Job created.
"""
def __init__(
self,
repository: JobRepositoryInterface,
title,
email,
date_start,
date_end,
amount_to_pay,
avatar=None,
company=None,
city=None,
state=None,
country=None,
postal_code=None,
post_category=None,
post_subcategory=None,
address=None,
phone=None,
cellphone=None,
description=None,
terms=None,
deleted=False,
slug=None,
):
"""
We can instantiate like
CreateJob('title','<EMAIL>',date1,date2,
** { "address" : " 123 street ","description ":"This is a descr"}
So we are providing mandatory fields, and rest that we want to set.
Fields: slug and payment_comission does not appear, because they are
set by us. It is not an user input.
"""
# -- Set the internal state of the model for the operation
# The fields listed here, should match with the ones defined in the
# model definition. And also with only one _ before the field name.
self._title = title
self._email = email
self._date_start = date_start
self._date_end = date_end
self._amount_to_pay = amount_to_pay
self._avatar = avatar
self._company = company
self._city = city
self._state = state
self._country = country
self._postal_code = postal_code
self._post_category = post_category
self._post_subcategory = post_subcategory
self._address = address
self._phone = phone
self._cellphone = cellphone
self._description = description
self._terms = terms
# Forces None, as we set them
self._slug = None
self._payment_comission = None
self._deleted = deleted
# ----- Other objects ----- #
self.__obj = None
self.__repository = repository
# A list of keys defined in the model. If model is modified, we should
# also modify this.
self.__model_keys = [
'title',
'email',
'date_start',
'date_end',
'amount_to_pay',
'avatar',
'company',
'city',
'state',
'country',
'postal_code',
'post_category',
'post_subcategory',
'address',
'phone',
'cellphone',
'description',
'terms',
'slug',
'deleted',
'payment_comission',
]
@property
def repository(self) -> JobRepositoryInterface:
"""Return the respository (adapter) used."""
return self.__repository
def execute(self) -> Job:
"""Main operation, the one that be executed by external code. This
operation will condense the rest. Will execute side effects, and
all required operations in order.
"""
self._strip_data()
# Create an instance of Job, and save it into self.__obj
self._factory()
self.is_valid()
self.__obj.slug = self._generate_slug( # noqa: T484
self.__obj.id, self.__obj.title, # noqa: T484
)
self.__obj.payment_comission = self._generate_payment_comission( # noqa: T484
self.__obj.amount_to_pay, # noqa: T484
)
self.__repository.save(self.__obj)
return self.__obj
def _strip_data(self):
"""Clean fields. For example, delete trailing spaces."""
fields = [
"title",
"address_1",
"address_2",
"email",
"website",
"phone",
"cellphone",
]
for field in fields:
value = getattr(self, field, None)
if value:
setattr(self, field, value.strip())
def is_valid(self):
"""Public method to allow clients of this object to validate the data even before to execute the use case.
To use it, create an instance of the class with the values desired.
And execute it.
Returns:
True or False
Raises:
ValidationError, InvalidDateOrder, InvalidCategories
"""
# ## Check date order
if self._date_end and self._date_start and self._date_end <= self._date_start:
raise InvalidDateOrder(_("Start date should be before end date"))
# ## Check categories match.
# TODO: This should not be necessary, but in admin
# dropdown menu for selecting categories are not well filtered when selecting parent
# categorie, so we need to do it.
# TODO: Send ID instead of name for better lookup
# TODO: This logic would go inside posts_category services
if self._post_category:
assert isinstance(
self._post_category, str
), "Category name should be a string"
if self._post_subcategory:
assert isinstance(
self._post_subcategory, str
), "Subcategory name should be a string"
# If user selected both categories, check that the parent is the correct
# If only subcategory selected, fill the right parent.
# If only category, do nothing.
cat = (
PostArea.objects.find_by_name(self._post_category)
if self._post_category
else None
)
subcat = (
PostArea.objects.find_by_name(self._post_subcategory)
if self._post_subcategory
else None
)
if subcat:
if cat and subcat.parent != cat:
raise InvalidCategories(cat.name, subcat.name)
else:
self._post_category = subcat.parent.name
# Here at the end, as before this, we were cleaning and validating all
# fields, so it has sense that at this point, the model will be in the
# final state.
# If object is not stored locally, do it.
if not self.__obj:
self._factory()
# ## Execute programatically model validations. Raises validation error.
self.__obj.full_clean()
return True
def _generate_slug(self, uuid, title):
"""Generate slug for the instance."""
return slug_generator(uuid, title)
def _generate_payment_comission(self, amount_to_pay):
"""Assign an instance to PaymentComission related to this model.
This assignment will later dictated how we are going to charge this
job.
The rules of how we are going to calculate this, are done by us.
"""
return PaymentComission.assign_payment_comission(amount_to_pay)
def _factory(self):
"""Create an instance of a Job, and save it into self.__obj."""
# Check if it is a field in the model # TODO do it better?
# Remove _ from keys, so we pass correct arguments to create,
# and leave only values that are not None.
def process(s):
if s[0] == '_' and s[1] != '_' and s[1:] in self.__model_keys:
return s[1:]
params = {
process(k): v
for k, v in self.__dict__.items()
if v is not None and process(k)
}
self.__obj = self.__repository.factory(**params)
|
[
"posts_areas.models.PostArea.objects.find_by_name",
"tektank.libs_project.helpers.slug_generator",
"django.utils.translation.gettext",
"payments_comissions.models.PaymentComission.assign_payment_comission"
] |
[((7769, 7796), 'tektank.libs_project.helpers.slug_generator', 'slug_generator', (['uuid', 'title'], {}), '(uuid, title)\n', (7783, 7796), False, 'from tektank.libs_project.helpers import slug_generator\n'), ((8119, 8175), 'payments_comissions.models.PaymentComission.assign_payment_comission', 'PaymentComission.assign_payment_comission', (['amount_to_pay'], {}), '(amount_to_pay)\n', (8160, 8175), False, 'from payments_comissions.models import PaymentComission\n'), ((6768, 6818), 'posts_areas.models.PostArea.objects.find_by_name', 'PostArea.objects.find_by_name', (['self._post_category'], {}), '(self._post_category)\n', (6797, 6818), False, 'from posts_areas.models import PostArea\n'), ((6917, 6970), 'posts_areas.models.PostArea.objects.find_by_name', 'PostArea.objects.find_by_name', (['self._post_subcategory'], {}), '(self._post_subcategory)\n', (6946, 6970), False, 'from posts_areas.models import PostArea\n'), ((5838, 5879), 'django.utils.translation.gettext', '_', (['"""Start date should be before end date"""'], {}), "('Start date should be before end date')\n", (5839, 5879), True, 'from django.utils.translation import gettext as _\n')]
|
#coding=utf-8
from flask.ext.wtf import Form
from wtforms import StringField, SubmitField, PasswordField
from wtforms.validators import Required, Email, Length, EqualTo
from app.models import User
class RegisterForm(Form):
username = StringField('用户名', validators=[Required('请输入用户名')])
email = StringField('邮箱',
validators=[Required('请输入邮箱地址'), Email('邮箱格式不正确')])
password = PasswordField('密码',
validators=[Required('请输入密码'), Length(6, 20, '密码长度为6~20'),
EqualTo('password2', '两次输入不一致')])
password2 = PasswordField('重复密码',
validators=[Required('请重复密码'), Length(6, 20, '密码长度为6~20')])
submit = SubmitField('注册')
def validate_username(self, field):
if User.query.filter_by(username=field.data).count():
raise ValueError('用户名已存在')
def validate_email(self, field):
if User.query.filter_by(email=field.data).count():
raise ValueError('邮箱已注册')
class LoginFrom(Form):
email = StringField('邮箱',
validators=[Required('请输入邮箱地址'), Email('邮箱格式不正确')])
password = PasswordField('密码',
validators=[Required('请输入密码'), Length(6, 20, '密码长度为6~20')])
submit = SubmitField('登陆')
|
[
"wtforms.validators.Length",
"wtforms.validators.Email",
"wtforms.SubmitField",
"app.models.User.query.filter_by",
"wtforms.validators.Required",
"wtforms.validators.EqualTo"
] |
[((657, 674), 'wtforms.SubmitField', 'SubmitField', (['"""注册"""'], {}), "('注册')\n", (668, 674), False, 'from wtforms import StringField, SubmitField, PasswordField\n'), ((1182, 1199), 'wtforms.SubmitField', 'SubmitField', (['"""登陆"""'], {}), "('登陆')\n", (1193, 1199), False, 'from wtforms import StringField, SubmitField, PasswordField\n'), ((277, 295), 'wtforms.validators.Required', 'Required', (['"""请输入用户名"""'], {}), "('请输入用户名')\n", (285, 295), False, 'from wtforms.validators import Required, Email, Length, EqualTo\n'), ((342, 361), 'wtforms.validators.Required', 'Required', (['"""请输入邮箱地址"""'], {}), "('请输入邮箱地址')\n", (350, 361), False, 'from wtforms.validators import Required, Email, Length, EqualTo\n'), ((377, 393), 'wtforms.validators.Email', 'Email', (['"""邮箱格式不正确"""'], {}), "('邮箱格式不正确')\n", (382, 393), False, 'from wtforms.validators import Required, Email, Length, EqualTo\n'), ((437, 454), 'wtforms.validators.Required', 'Required', (['"""请输入密码"""'], {}), "('请输入密码')\n", (445, 454), False, 'from wtforms.validators import Required, Email, Length, EqualTo\n'), ((466, 492), 'wtforms.validators.Length', 'Length', (['(6)', '(20)', '"""密码长度为6~20"""'], {}), "(6, 20, '密码长度为6~20')\n", (472, 492), False, 'from wtforms.validators import Required, Email, Length, EqualTo\n'), ((504, 535), 'wtforms.validators.EqualTo', 'EqualTo', (['"""password2"""', '"""两次输入不一致"""'], {}), "('password2', '两次输入不一致')\n", (511, 535), False, 'from wtforms.validators import Required, Email, Length, EqualTo\n'), ((596, 613), 'wtforms.validators.Required', 'Required', (['"""请重复密码"""'], {}), "('请重复密码')\n", (604, 613), False, 'from wtforms.validators import Required, Email, Length, EqualTo\n'), ((625, 651), 'wtforms.validators.Length', 'Length', (['(6)', '(20)', '"""密码长度为6~20"""'], {}), "(6, 20, '密码长度为6~20')\n", (631, 651), False, 'from wtforms.validators import Required, Email, Length, EqualTo\n'), ((727, 768), 'app.models.User.query.filter_by', 'User.query.filter_by', ([], {'username': 'field.data'}), '(username=field.data)\n', (747, 768), False, 'from app.models import User\n'), ((866, 904), 'app.models.User.query.filter_by', 'User.query.filter_by', ([], {'email': 'field.data'}), '(email=field.data)\n', (886, 904), False, 'from app.models import User\n'), ((1026, 1045), 'wtforms.validators.Required', 'Required', (['"""请输入邮箱地址"""'], {}), "('请输入邮箱地址')\n", (1034, 1045), False, 'from wtforms.validators import Required, Email, Length, EqualTo\n'), ((1061, 1077), 'wtforms.validators.Email', 'Email', (['"""邮箱格式不正确"""'], {}), "('邮箱格式不正确')\n", (1066, 1077), False, 'from wtforms.validators import Required, Email, Length, EqualTo\n'), ((1121, 1138), 'wtforms.validators.Required', 'Required', (['"""请输入密码"""'], {}), "('请输入密码')\n", (1129, 1138), False, 'from wtforms.validators import Required, Email, Length, EqualTo\n'), ((1150, 1176), 'wtforms.validators.Length', 'Length', (['(6)', '(20)', '"""密码长度为6~20"""'], {}), "(6, 20, '密码长度为6~20')\n", (1156, 1176), False, 'from wtforms.validators import Required, Email, Length, EqualTo\n')]
|
from functools import wraps
def cached(func):
cache = {}
@wraps(func)
def cached_wrapper(*args):
if args not in cache:
cache[args] = func(*args)
return cache[args]
return cached_wrapper
|
[
"functools.wraps"
] |
[((62, 73), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (67, 73), False, 'from functools import wraps\n')]
|
#!/usr/bin/env python
import socket
import threading
import config
import websocketclient
class WebSocketServer:
"""
Handle the Server, bind and accept new connections, open and close
clients connections.
"""
def __init__(self):
self.clients = []
def start(self):
"""
Start the server.
"""
s = socket.socket()
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(('', config.socketport))
s.listen(1)
try:
while 1:
conn, addr = s.accept()
print('Connected by', addr)
newClient = websocketclient.WebSocketClient(conn, addr, self)
self.clients.append(newClient)
newClient.start()
except KeyboardInterrupt:
[client.close() for client in self.clients]
s.close()
def send_all(self, data):
"""
Send a message to all the currenly connected clients.
"""
[client.send(data) for client in self.clients]
def remove(self, client):
"""
Remove a client from the connected list.
"""
l = threading.Lock()
l.acquire()
self.clients.remove(client)
l.release()
|
[
"threading.Lock",
"websocketclient.WebSocketClient",
"socket.socket"
] |
[((361, 376), 'socket.socket', 'socket.socket', ([], {}), '()\n', (374, 376), False, 'import socket\n'), ((1179, 1195), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (1193, 1195), False, 'import threading\n'), ((648, 697), 'websocketclient.WebSocketClient', 'websocketclient.WebSocketClient', (['conn', 'addr', 'self'], {}), '(conn, addr, self)\n', (679, 697), False, 'import websocketclient\n')]
|
import numpy as np
# class for 3D points in an image frame
class Point(object):
# class constructor
def __init__(self, img_map, location, color):
self.point = location
self.frames = []
self.idx = []
self.color = np.copy(color)
self.id = img_map.max_point
img_map.max_point += 1
img_map.points.append(self)
def orb(self):
des = []
for f in self.frames:
des.append(f.des[f.pts.index(self)])
return des
# class method to add a frame and index from video
# feed to the Point object
def add_observation(self, frame, index):
frame.pts[index] = self
self.frames.append(frame)
self.idx.append(index)
# class method to delete a point from a frame
def delete_point(self):
for f in self.frames:
f.pts[f.pts.index(self)] = None
del self
def homogenous(self):
return np.array([self.point[0], self.point[1], self.point[2], 1.0])
|
[
"numpy.array",
"numpy.copy"
] |
[((253, 267), 'numpy.copy', 'np.copy', (['color'], {}), '(color)\n', (260, 267), True, 'import numpy as np\n'), ((948, 1008), 'numpy.array', 'np.array', (['[self.point[0], self.point[1], self.point[2], 1.0]'], {}), '([self.point[0], self.point[1], self.point[2], 1.0])\n', (956, 1008), True, 'import numpy as np\n')]
|
import copy
import json
from enum import Enum, IntFlag, Flag
from json import JSONEncoder
from typing import Any, List, Dict
from vgm.command import VgmCommand, VgmCommandType
class Waveform(Enum):
SAW = 0
SQUARE = 1
TRIANGLE = 2
NOISE = 3
def repr_json(self):
return self.name
class Operators(Flag):
MOD1 = 1
CAR1 = 2
MOD2 = 4
CAR2 = 8
def repr_json(self):
o = []
if self & Operators.MOD1:
o.append(Operators.MOD1.name)
if self & Operators.CAR1:
o.append(Operators.CAR1.name)
if self & Operators.MOD2:
o.append(Operators.MOD2.name)
if self & Operators.CAR2:
o.append(Operators.CAR2.name)
return "|".join(o)
class BaseConfig:
def __init__(self, other=None) -> None:
self.waveform: Waveform = Waveform.SAW if not other else other.waveform
self.lfo: int = 0 if not other else other.lfo
self.amp_md: int = 0 if not other else other.amp_md
self.phs_md: int = 0 if not other else other.phs_md
def __eq__(self, other: Any) -> bool:
if isinstance(other, BaseConfig):
return (
self.lfo == other.lfo
and self.phs_md == other.phs_md
and self.amp_md == other.amp_md
and self.waveform == other.waveform
)
return NotImplemented
# noinspection PyArgumentList
def __deepcopy__(self, _) -> object:
return BaseConfig(self)
def repr_json(self):
return self.__dict__
class OperatorConfig:
def __init__(self, other=None) -> None:
self.total_level: int = 0 if not other else other.total_level
self.attack_rate: int = 0 if not other else other.attack_rate
self.first_decay_rate: int = 0 if not other else other.first_decay_rate
self.first_decay_level: int = 0 if not other else other.first_decay_level
self.second_decay_rate: int = 0 if not other else other.second_decay_rate
self.release_rate: int = 0 if not other else other.release_rate
self.key_scale: int = 0 if not other else other.key_scale
self.multiply: int = 0 if not other else other.multiply
self.first_detune: int = 0 if not other else other.first_detune
self.second_detune: int = 0 if not other else other.second_detune
self.ase: bool = False if not other else other.ase
def __eq__(self, other: Any) -> bool:
if isinstance(other, OperatorConfig):
return (
self.first_detune == other.first_detune
and self.second_detune == other.second_detune
and self.multiply == other.multiply
and self.total_level == other.total_level
and self.key_scale == other.key_scale
and self.attack_rate == other.attack_rate
and self.ase == other.ase
and self.first_decay_rate == other.first_decay_rate
and self.second_decay_rate == other.second_decay_rate
and self.first_decay_level == other.first_decay_level
and self.release_rate == other.release_rate
)
return NotImplemented
# noinspection PyArgumentList
def __deepcopy__(self, _) -> object:
return OperatorConfig(self)
def repr_json(self):
return {
"tl": self.total_level,
"ar": self.attack_rate,
"d1r": self.first_decay_rate,
"d1l": self.first_decay_level,
"d2r": self.second_decay_rate,
"rr": self.release_rate,
"ks": self.key_scale,
"mul": self.multiply,
"dt1": self.first_detune,
"dt2": self.second_detune,
"ase": self.ase,
}
class NoteConfig:
def __init__(self, other=None) -> None:
self.right: bool = False if not other else other.right
self.left: bool = False if not other else other.left
self.octave: int = 0 if not other else other.octave
self.note: int = 0 if not other else other.note
self.key_fraction: int = 0 if not other else other.key_fraction
def __eq__(self, other: Any) -> bool:
if isinstance(other, NoteConfig):
return (
self.right == other.right
and self.left == other.left
and self.octave == other.octave
and self.note == other.note
and self.key_fraction == other.key_fraction
)
return NotImplemented
# noinspection PyArgumentList
def __deepcopy__(self, _) -> object:
return NoteConfig(self)
def repr_json(self):
return {
"right": self.right,
"left": self.left,
"octave": self.octave,
"note": self.note,
"key_fraction": self.key_fraction,
}
class ChannelConfig:
def __init__(self, other=None) -> None:
self.operators: List[OperatorConfig] = (
[] if not other else copy.deepcopy(other.operators)
)
self.fb: int = 0 if not other else other.fb
self.ams: int = 0 if not other else other.ams
self.pms: int = 0 if not other else other.pms
self.connection: int = 0 if not other else other.connection
self.noise: bool = False if not other else other.noise
self.noise_freq: int = 0 if not other else other.noise_freq
if not other:
for dev in range(4):
self.operators.append(OperatorConfig())
def __eq__(self, other: Any) -> bool:
if isinstance(other, ChannelConfig):
return (
self.fb == other.fb
and self.connection == other.connection
and self.ams == other.ams
and self.pms == other.pms
and self.operators == other.operators
and self.noise == other.noise
and self.noise_freq == other.noise_freq
)
return NotImplemented
# noinspection PyArgumentList
def __deepcopy__(self, _) -> object:
return ChannelConfig(self)
def repr_json(self):
return {
"feedback": self.fb,
"connection": self.connection,
"ams": self.ams,
"pms": self.pms,
"noise": self.noise,
"noise_freq": self.noise_freq,
"m1": self.operators[0],
"c1": self.operators[2],
"m2": self.operators[1],
"c2": self.operators[3],
}
class Config:
def __init__(
self,
id: int,
base: BaseConfig = BaseConfig(),
channel: ChannelConfig = ChannelConfig(),
operators: Operators = 0,
):
self._id = id
self._base = base
self._operators: Operators = operators
self._channel = channel
def __getattr__(self, item):
if item == "lfo":
return self._base.lfo
if item == "phs_md":
return self._base.phs_md
if item == "amp_md":
return self._base.amp_md
if item == "waveform":
return self._base.waveform
if item == "enabled_operators":
return self._operators
return getattr(self._channel, item)
def compare(self, base: BaseConfig, channel: ChannelConfig, operators: Operators):
return (
self._base == base
and self._channel == channel
and self._operators == operators
)
def repr_json(self) -> Dict:
return {
"id": self._id,
"base": self._base,
"operators": self._operators,
"channel": self._channel,
}
class ConfigEncoder(json.JSONEncoder):
def default(self, o: Any) -> Any:
if hasattr(o, "repr_json"):
return o.repr_json()
else:
return json.JSONEncoder.default(self, o)
class YM2151Command(VgmCommand):
command_type = VgmCommandType.YM2151
def __init__(self, cmd_id, reg: int, value: int) -> None:
super().__init__(cmd_id)
self.reg = reg
self.value = value
def __str__(self) -> str:
return f"YM2151Command(Reg: {hex(self.reg)}, Data: {hex(self.value)})"
def create(reg: int, value: int) -> YM2151Command:
return YM2151Command(0x54, reg, value)
|
[
"copy.deepcopy",
"json.JSONEncoder.default"
] |
[((5066, 5096), 'copy.deepcopy', 'copy.deepcopy', (['other.operators'], {}), '(other.operators)\n', (5079, 5096), False, 'import copy\n'), ((7940, 7973), 'json.JSONEncoder.default', 'json.JSONEncoder.default', (['self', 'o'], {}), '(self, o)\n', (7964, 7973), False, 'import json\n')]
|
"""Tests out the code for generating randomised test trades/orders.
"""
from __future__ import print_function
__author__ = 'saeedamen' # <NAME> / <EMAIL>
#
# Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
#
# See the License for the specific language governing permissions and limitations under the License.
#
import os
from tcapy.conf.constants import Constants
from tcapy.data.datatestcreator import DataTestCreator
from tcapy.data.databasesource import DatabaseSourceCSVBinary as DatabaseSourceCSV
from tcapy.data.databasesource import DatabaseSourceArctic
from tcapy.util.loggermanager import LoggerManager
logger = LoggerManager().getLogger(__name__)
constants = Constants()
postfix = 'dukascopy'
ticker = ['EURUSD']
start_date = '01 May 2017'
finish_date = '31 May 2017'
use_test_csv = True
# mainly just to speed up tests - note: you will need to generate the HDF5 files using convert_csv_to_h5.py from the CSVs
use_hdf5_market_files = False
logger.info('Make sure you have created folder ' + constants.csv_folder + ' & ' + constants.temp_data_folder +
' otherwise tests will fail')
########################################################################################################################
# you can change the test_data_harness_folder to one on your own machine with real data
folder = constants.test_data_harness_folder
eps = 10 ** -5
if use_test_csv:
# only contains limited amount of EURUSD and USDJPY in Apr/Jun 2017
if use_hdf5_market_files:
market_data_store = os.path.join(folder, 'small_test_market_df.h5')
else:
market_data_store = os.path.join(folder, 'small_test_market_df.csv.gz')
def test_randomized_trade_data_generation():
"""Tests randomized trade generation data (and writing to database)
"""
data_test_creator = DataTestCreator(write_to_db=False)
# use database source as Arctic for market data (assume we are using market data as a source)
if use_test_csv:
data_test_creator._database_source_market = DatabaseSourceCSV(market_data_database_csv=market_data_store)
else:
data_test_creator._database_source_market = DatabaseSourceArctic(postfix=postfix)
# create randomised trade/order data
trade_order = data_test_creator.create_test_trade_order(ticker, start_date=start_date, finish_date=finish_date)
# trade_order has dictionary of trade_df and order_df
# make sure the number of trades > number of orders
assert (len(trade_order['trade_df'].index) > len(trade_order['order_df'].index))
if __name__ == '__main__':
test_randomized_trade_data_generation()
# import pytest; pytest.main()
|
[
"tcapy.util.loggermanager.LoggerManager",
"tcapy.data.datatestcreator.DataTestCreator",
"tcapy.conf.constants.Constants",
"tcapy.data.databasesource.DatabaseSourceCSVBinary",
"tcapy.data.databasesource.DatabaseSourceArctic",
"os.path.join"
] |
[((696, 707), 'tcapy.conf.constants.Constants', 'Constants', ([], {}), '()\n', (705, 707), False, 'from tcapy.conf.constants import Constants\n'), ((1843, 1877), 'tcapy.data.datatestcreator.DataTestCreator', 'DataTestCreator', ([], {'write_to_db': '(False)'}), '(write_to_db=False)\n', (1858, 1877), False, 'from tcapy.data.datatestcreator import DataTestCreator\n'), ((647, 662), 'tcapy.util.loggermanager.LoggerManager', 'LoggerManager', ([], {}), '()\n', (660, 662), False, 'from tcapy.util.loggermanager import LoggerManager\n'), ((1555, 1602), 'os.path.join', 'os.path.join', (['folder', '"""small_test_market_df.h5"""'], {}), "(folder, 'small_test_market_df.h5')\n", (1567, 1602), False, 'import os\n'), ((1641, 1692), 'os.path.join', 'os.path.join', (['folder', '"""small_test_market_df.csv.gz"""'], {}), "(folder, 'small_test_market_df.csv.gz')\n", (1653, 1692), False, 'import os\n'), ((2050, 2111), 'tcapy.data.databasesource.DatabaseSourceCSVBinary', 'DatabaseSourceCSV', ([], {'market_data_database_csv': 'market_data_store'}), '(market_data_database_csv=market_data_store)\n', (2067, 2111), True, 'from tcapy.data.databasesource import DatabaseSourceCSVBinary as DatabaseSourceCSV\n'), ((2174, 2211), 'tcapy.data.databasesource.DatabaseSourceArctic', 'DatabaseSourceArctic', ([], {'postfix': 'postfix'}), '(postfix=postfix)\n', (2194, 2211), False, 'from tcapy.data.databasesource import DatabaseSourceArctic\n')]
|
from PySide2.QtCore import QCoreApplication
from PySide2.QtWidgets import QMenuBar, QMenu
def _quit():
QCoreApplication.quit()
class MenuBar(QMenuBar):
def __init__(self):
super().__init__()
file_action = self.addMenu(QMenu('File'))
file_action.menu().addAction('New Project')
file_action.menu().addAction('Open')
file_action.menu().addSeparator()
file_action.menu().addAction('Save')
file_action.menu().addAction('Save As')
file_action.menu().addSeparator()
file_action.menu().addAction('Quit').triggered.connect(_quit)
edit_action = self.addMenu(QMenu('Edit'))
edit_action.menu().addAction('Undo')
edit_action.menu().addAction('Redo')
edit_action.menu().addSeparator()
edit_action.menu().addAction('Preferences')
view_action = self.addMenu(QMenu('View'))
view_action.menu().addAction('Show Logs')
|
[
"PySide2.QtCore.QCoreApplication.quit",
"PySide2.QtWidgets.QMenu"
] |
[((109, 132), 'PySide2.QtCore.QCoreApplication.quit', 'QCoreApplication.quit', ([], {}), '()\n', (130, 132), False, 'from PySide2.QtCore import QCoreApplication\n'), ((247, 260), 'PySide2.QtWidgets.QMenu', 'QMenu', (['"""File"""'], {}), "('File')\n", (252, 260), False, 'from PySide2.QtWidgets import QMenuBar, QMenu\n'), ((642, 655), 'PySide2.QtWidgets.QMenu', 'QMenu', (['"""Edit"""'], {}), "('Edit')\n", (647, 655), False, 'from PySide2.QtWidgets import QMenuBar, QMenu\n'), ((877, 890), 'PySide2.QtWidgets.QMenu', 'QMenu', (['"""View"""'], {}), "('View')\n", (882, 890), False, 'from PySide2.QtWidgets import QMenuBar, QMenu\n')]
|
# -*- coding:utf-8 -*-
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # Remove unnecessary information
import numpy as np
# cpu_count = 4
# 因为服务器没有图形界面,所以必须这样弄
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
# 好看的打印格式
def fancy_print(n = None, c = None, s = '#'):
print(s * 40)
print(n)
print(c)
print(s * 40)
print() # 空一行避免混淆
# 拿到所有模型
from model import *
# 图片读取生成器
from keras.preprocessing.image import ImageDataGenerator
import tensorflow as tf
import keras
from keras.optimizers import Adam
from keras.callbacks import EarlyStopping
from keras.callbacks import ModelCheckpoint
from keras.callbacks import Callback
from sklearn.metrics import roc_auc_score, average_precision_score
from sklearn.model_selection import train_test_split
from sklearn import metrics
most_epoches = 500 # 最大训练次数 500 测试时 2-10
def train_cnn_dense_resnet(gen_name, model_name, gene_length):
# 打印说明,方便检查
fancy_print('gen_name', gen_name)
fancy_print('model_name', model_name)
##############################
#
# png reader in iterator
#
##############################
# 训练集:验证集:测试集 = 8:1:1
train_datagen = ImageDataGenerator(rescale = 1./255, validation_split = 0.11) # set validation split
BATCH_SIZE = 32 # 一次大小
train_generator = train_datagen.flow_from_directory(directory = 'data/'+gen_name+'/png_train/',
target_size = (gene_length*2, 5),
color_mode = 'grayscale',
class_mode = 'categorical',
batch_size = BATCH_SIZE,
subset = 'training', # set as training data
shuffle = True, # must shuffle
seed = 42,
)
val_generator = train_datagen.flow_from_directory(directory = 'data/'+gen_name+'/png_train/', # same directory as training data
target_size = (gene_length*2, 5),
color_mode = 'grayscale',
class_mode = 'categorical',
batch_size = BATCH_SIZE,
subset = 'validation', # set as validation data
shuffle = True, # must shuffle
seed = 42,
)
##############################
#
# loss数据可视化
#
##############################
class PlotProgress(keras.callbacks.Callback):
def __init__(self, entity = ['loss', 'accuracy']):
self.entity = entity
def on_train_begin(self, logs={}):
self.i = 0
self.x = []
self.losses = []
self.val_losses = []
self.accs = []
self.val_accs = []
self.fig = plt.figure()
self.logs = []
def on_epoch_end(self, epoch, logs={}):
self.logs.append(logs)
self.x.append(self.i)
# 损失函数
self.losses.append(logs.get('{}'.format(self.entity[0])))
self.val_losses.append(logs.get('val_{}'.format(self.entity[0])))
# 准确率
self.accs.append(logs.get('{}'.format(self.entity[1])))
self.val_accs.append(logs.get('val_{}'.format(self.entity[1])))
self.i += 1
# clear_output(wait=True)
plt.figure(0)
plt.clf() # 清理历史遗迹
plt.plot(self.x, self.losses, label="{}".format(self.entity[0]))
plt.plot(self.x, self.val_losses, label="val_{}".format(self.entity[0]))
plt.legend()
plt.savefig('result/'+gen_name+'/'+model_name+'/loss.png')
# plt.pause(0.01)
# plt.show()
plt.figure(1)
plt.clf() # 清理历史遗迹
plt.plot(self.x, self.accs, label="{}".format(self.entity[1]))
plt.plot(self.x, self.val_accs, label="val_{}".format(self.entity[1]))
plt.legend()
plt.savefig('result/'+gen_name+'/'+model_name+'/acc.png')
# plt.pause(0.01)
# plt.show()
##############################
#
# Model building
#
##############################
if model_name == 'onehot_cnn_one_branch':
clf = model_onehot_cnn_one_branch(gene_length)
if model_name == 'onehot_embedding_dense':
clf = model_onehot_embedding_dense(gene_length)
if model_name == 'onehot_dense':
clf = model_onehot_dense(gene_length)
if model_name == 'onehot_resnet18':
clf = model_onehot_resnet18(gene_length)
if model_name == 'onehot_resnet34':
clf = model_onehot_resnet34(gene_length)
clf.summary() # Print model structure
early_stopping = EarlyStopping(monitor = 'val_accuracy', patience = 10, restore_best_weights = True)
# 绘图函数
plot_progress = PlotProgress(entity = ['loss', 'accuracy'])
##############################
#
# Model training
#
##############################
# No need to count how many epochs, keras can count
history = clf.fit_generator(generator = train_generator,
epochs = most_epoches,
validation_data = val_generator,
steps_per_epoch = train_generator.samples // BATCH_SIZE,
validation_steps = val_generator.samples // BATCH_SIZE,
callbacks = [plot_progress, early_stopping],
# max_queue_size = 64,
# workers = cpu_count,
# use_multiprocessing = True,
verbose = 2 # 一次训练就显示一行
)
clf.save_weights('h5_weights/'+gen_name+'/'+model_name+'.h5')
# 打印一下,方便检查
fancy_print('save_weights', 'h5_weights/'+gen_name+'/'+model_name+'.h5', '=')
def train_cnn_separate(gen_name, model_name, gene_length):
##############################
#
# 构建迭代器
#
##############################
from keras.preprocessing.image import ImageDataGenerator
# train_datagen = ImageDataGenerator(horizontal_flip = True, vertical_flip = True, rescale = 1. / 255) # 上下翻转 左右翻转
train_datagen = ImageDataGenerator(rescale = 1. / 255, validation_split = 0.11)
BATCH_SIZE = 32 # 每次大小
def generator_two_train():
train_generator1 = train_datagen.flow_from_directory(directory = 'data/'+gen_name+'/train_en/', target_size = (gene_length, 5),
color_mode = 'grayscale',
class_mode = 'categorical', # 'categorical'会返回2D的one-hot编码标签, 'binary'返回1D的二值标签, 'sparse'返回1D的整数标签
batch_size = BATCH_SIZE,
subset = 'training', # set as training data
shuffle = True,
seed = 42) # 相同方式打散
train_generator2 = train_datagen.flow_from_directory(directory = 'data/'+gen_name+'/train_pr/', target_size = (gene_length, 5),
color_mode = 'grayscale',
class_mode = 'categorical', # 'categorical'会返回2D的one-hot编码标签, 'binary'返回1D的二值标签, 'sparse'返回1D的整数标签
batch_size = BATCH_SIZE,
subset = 'training', # set as training data
shuffle = True,
seed = 42) # 相同方式打散
while True:
out1 = train_generator1.next()
out2 = train_generator2.next()
yield [out1[0], out2[0]], out1[1] # 返回两个的组合和结果
def generator_two_val():
val_generator1 = train_datagen.flow_from_directory(directory = 'data/'+gen_name+'/train_en/', target_size = (gene_length, 5),
color_mode = 'grayscale',
class_mode = 'categorical', # 'categorical'会返回2D的one-hot编码标签, 'binary'返回1D的二值标签, 'sparse'返回1D的整数标签
batch_size = BATCH_SIZE,
subset = 'validation', # set as validation data
shuffle =True,
seed = 42) # 相同方式打散
val_generator2 = train_datagen.flow_from_directory(directory = 'data/'+gen_name+'/train_pr/', target_size = (gene_length, 5),
color_mode = 'grayscale',
class_mode = 'categorical', # 'categorical'会返回2D的one-hot编码标签, 'binary'返回1D的二值标签, 'sparse'返回1D的整数标签
batch_size = BATCH_SIZE,
subset = 'validation', # set as validation data
shuffle = True,
seed = 42) # 相同方式打散
while True:
out1 = val_generator1.next()
out2 = val_generator2.next()
yield [out1[0], out2[0]], out1[1] # 返回两个的组合和结果
##############################
#
# 模型搭建
#
##############################
# 如果出现版本不兼容,那么就用这两句代码,否则会报警告
# import tensorflow.compat.v1 as tf
# tf.disable_v2_behavior()
from sklearn import metrics
from keras.callbacks import ModelCheckpoint
##############################
#
# Model building
#
##############################
if model_name == 'onehot_cnn_two_branch':
clf = model_onehot_cnn_two_branch(gene_length)
clf.summary() # 打印模型结构
'''
filename = 'best_model.h5'
modelCheckpoint = ModelCheckpoint(filename, monitor = 'val_accuracy', save_best_only = True, mode = 'max')
'''
from keras.callbacks import EarlyStopping
early_stopping = EarlyStopping(monitor = 'val_accuracy', patience = 10, restore_best_weights = True)
'''
fancy_print('train_generator.next()[0]', train_generator.next()[0], '+')
fancy_print('train_generator.next()[1]', train_generator.next()[1], '+')
fancy_print('train_generator.next()[0].shape', train_generator.next()[0].shape, '+')
fancy_print('train_generator.next()[1].shape', train_generator.next()[1].shape, '+')
fancy_print('val_generator.next()[0]', val_generator.next()[0], '-')
fancy_print('val_generator.next()[1]', val_generator.next()[1], '-')
fancy_print('val_generator.next()[0].shape', val_generator.next()[0].shape, '-')
fancy_print('val_generator.next()[1].shape', val_generator.next()[1].shape, '-')
'''
##############################
#
# 模型训练
#
##############################
# 不需要再算多少个epoch了,自己会算
history = clf.fit_generator(generator = generator_two_train(),
epochs = most_epoches,
validation_data = generator_two_val(),
steps_per_epoch = 24568 * 2 // BATCH_SIZE, # 全部训练
validation_steps = 3071 * 2 // BATCH_SIZE, # 全部验证
callbacks = [early_stopping],
shuffle = True, # 再次 shuffle
# max_queue_size = 64,
# workers = cpu_count,
# use_multiprocessing = True,
verbose = 2) # 一次训练就显示一行
clf.save_weights('h5_weights/'+gen_name+'/'+model_name+'.h5')
# 打印一下,方便检查
fancy_print('save_weights', 'h5_weights/'+gen_name+'/'+model_name+'.h5', '=')
def train_embedding(gen_name, model_name):
# 打印说明,方便检查
fancy_print('gen_name', gen_name)
fancy_print('model_name', model_name)
'''
2021-04-11 16:53:06.007063: E tensorflow/stream_executor/dnn.cc:616] CUDNN_STATUS_INTERNAL_ERROR
in tensorflow/stream_executor/cuda/cuda_dnn.cc(2011): 'cudnnRNNBackwardData( cudnn.handle(), rnn_desc.handle(),
model_dims.max_seq_length, output_desc.handles(), output_data.opaque(), output_desc.handles(), output_backprop_data.opaque(),
output_h_desc.handle(), output_h_backprop_data.opaque(), output_c_desc.handle(), output_c_backprop_data.opaque(),
rnn_desc.params_handle(), params.opaque(), input_h_desc.handle(), input_h_data.opaque(), input_c_desc.handle(),
input_c_data.opaque(), input_desc.handles(), input_backprop_data->opaque(), input_h_desc.handle(), input_h_backprop_data->opaque(),
input_c_desc.handle(), input_c_backprop_data->opaque(), workspace.opaque(), workspace.size(), reserve_space_data->opaque(), reserve_space_data->size())'
2021-04-11 16:53:06.007530: W tensorflow/core/framework/op_kernel.cc:1767] OP_REQUIRES failed at cudnn_rnn_ops.cc:1922:
Internal: Failed to call ThenRnnBackward with model config: [rnn_mode, rnn_input_mode, rnn_direction_mode]: 3, 0, 0 ,
[num_layers, input_size, num_units, dir_count, max_seq_length, batch_size, cell_num_units]: [1, 64, 50, 1, 100, 32, 0]
2021-04-11 16:53:06.007077: F tensorflow/stream_executor/cuda/cuda_dnn.cc:190] Check failed: status == CUDNN_STATUS_SUCCESS (7 vs. 0)Failed to set cuDNN stream.
解决方案
'''
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
physical_devices = tf.config.experimental.list_physical_devices('GPU')
tf.config.experimental.set_memory_growth(physical_devices[0], True)
##############################
#
# loss数据可视化
#
##############################
class PlotProgress(keras.callbacks.Callback):
def __init__(self, entity = ['loss', 'accuracy']):
self.entity = entity
def on_train_begin(self, logs={}):
self.i = 0
self.x = []
self.losses = []
self.val_losses = []
self.accs = []
self.val_accs = []
self.fig = plt.figure()
self.logs = []
def on_epoch_end(self, epoch, logs={}):
self.logs.append(logs)
self.x.append(self.i)
# 损失函数
self.losses.append(logs.get('{}'.format(self.entity[0])))
self.val_losses.append(logs.get('val_{}'.format(self.entity[0])))
# 准确率
self.accs.append(logs.get('{}'.format(self.entity[1])))
self.val_accs.append(logs.get('val_{}'.format(self.entity[1])))
self.i += 1
plt.figure(0)
plt.clf() # 清理历史遗迹
plt.plot(self.x, self.losses, label="{}".format(self.entity[0]))
plt.plot(self.x, self.val_losses, label="val_{}".format(self.entity[0]))
plt.legend()
plt.savefig('result/'+gen_name+'/'+model_name+'/loss.png')
# plt.pause(0.01)
# plt.show()
plt.figure(1)
plt.clf() # 清理历史遗迹
plt.plot(self.x, self.accs, label="{}".format(self.entity[1]))
plt.plot(self.x, self.val_accs, label="val_{}".format(self.entity[1]))
plt.legend()
plt.savefig('result/'+gen_name+'/'+model_name+'/acc.png')
# plt.pause(0.01)
# plt.show()
train = np.load('data/'+gen_name+'/embedding_train.npz')
X_en_tra, X_pr_tra, y_tra = train['X_en_tra'], train['X_pr_tra'], train['y_tra']
##############################
#
# Model building
#
##############################
if model_name == 'embedding_cnn_one_branch':
model = model_embedding_cnn_one_branch()
if model_name == 'embedding_cnn_two_branch':
model = model_embedding_cnn_two_branch()
if model_name == 'embedding_dense':
model = model_embedding_dense()
if model_name == 'onehot_embedding_cnn_one_branch':
model = model_onehot_embedding_cnn_one_branch()
if model_name == 'onehot_embedding_cnn_two_branch':
model = model_onehot_embedding_cnn_two_branch()
model.summary()
early_stopping = EarlyStopping(monitor = 'val_accuracy', patience = 20, restore_best_weights = True)
# 绘图函数
plot_progress = PlotProgress(entity = ['loss', 'accuracy'])
history = model.fit([X_en_tra, X_pr_tra], y_tra, epochs=most_epoches, batch_size=32, validation_split=0.11,
callbacks=[early_stopping, plot_progress],
# max_queue_size = 64,
# workers = cpu_count,
# use_multiprocessing = True,
verbose = 2 # 一次训练就显示一行
)
model.save_weights('h5_weights/'+gen_name+'/'+model_name+'.h5')
# 打印一下,方便检查
fancy_print('save_weights', 'h5_weights/'+gen_name+'/'+model_name+'.h5', '=')
########################################
#
# 本模块没有代码运行
#
########################################
if __name__ == '__main__':
pass
|
[
"keras.preprocessing.image.ImageDataGenerator",
"numpy.load",
"matplotlib.pyplot.clf",
"matplotlib.pyplot.legend",
"tensorflow.config.experimental.set_memory_growth",
"matplotlib.pyplot.figure",
"matplotlib.use",
"keras.callbacks.EarlyStopping",
"tensorflow.config.experimental.list_physical_devices",
"matplotlib.pyplot.savefig"
] |
[((205, 226), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (219, 226), False, 'import matplotlib\n'), ((1274, 1334), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0 / 255)', 'validation_split': '(0.11)'}), '(rescale=1.0 / 255, validation_split=0.11)\n', (1292, 1334), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((5417, 5494), 'keras.callbacks.EarlyStopping', 'EarlyStopping', ([], {'monitor': '"""val_accuracy"""', 'patience': '(10)', 'restore_best_weights': '(True)'}), "(monitor='val_accuracy', patience=10, restore_best_weights=True)\n", (5430, 5494), False, 'from keras.callbacks import EarlyStopping\n'), ((7054, 7114), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0 / 255)', 'validation_split': '(0.11)'}), '(rescale=1.0 / 255, validation_split=0.11)\n', (7072, 7114), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((11269, 11346), 'keras.callbacks.EarlyStopping', 'EarlyStopping', ([], {'monitor': '"""val_accuracy"""', 'patience': '(10)', 'restore_best_weights': '(True)'}), "(monitor='val_accuracy', patience=10, restore_best_weights=True)\n", (11282, 11346), False, 'from keras.callbacks import EarlyStopping\n'), ((14817, 14868), 'tensorflow.config.experimental.list_physical_devices', 'tf.config.experimental.list_physical_devices', (['"""GPU"""'], {}), "('GPU')\n", (14861, 14868), True, 'import tensorflow as tf\n'), ((14874, 14941), 'tensorflow.config.experimental.set_memory_growth', 'tf.config.experimental.set_memory_growth', (['physical_devices[0]', '(True)'], {}), '(physical_devices[0], True)\n', (14914, 14941), True, 'import tensorflow as tf\n'), ((16747, 16799), 'numpy.load', 'np.load', (["('data/' + gen_name + '/embedding_train.npz')"], {}), "('data/' + gen_name + '/embedding_train.npz')\n", (16754, 16799), True, 'import numpy as np\n'), ((17569, 17646), 'keras.callbacks.EarlyStopping', 'EarlyStopping', ([], {'monitor': '"""val_accuracy"""', 'patience': '(20)', 'restore_best_weights': '(True)'}), "(monitor='val_accuracy', patience=20, restore_best_weights=True)\n", (17582, 17646), False, 'from keras.callbacks import EarlyStopping\n'), ((3429, 3441), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3439, 3441), True, 'from matplotlib import pyplot as plt\n'), ((4010, 4023), 'matplotlib.pyplot.figure', 'plt.figure', (['(0)'], {}), '(0)\n', (4020, 4023), True, 'from matplotlib import pyplot as plt\n'), ((4037, 4046), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (4044, 4046), True, 'from matplotlib import pyplot as plt\n'), ((4233, 4245), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4243, 4245), True, 'from matplotlib import pyplot as plt\n'), ((4259, 4325), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('result/' + gen_name + '/' + model_name + '/loss.png')"], {}), "('result/' + gen_name + '/' + model_name + '/loss.png')\n", (4270, 4325), True, 'from matplotlib import pyplot as plt\n'), ((4390, 4403), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (4400, 4403), True, 'from matplotlib import pyplot as plt\n'), ((4417, 4426), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (4424, 4426), True, 'from matplotlib import pyplot as plt\n'), ((4610, 4622), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4620, 4622), True, 'from matplotlib import pyplot as plt\n'), ((4636, 4701), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('result/' + gen_name + '/' + model_name + '/acc.png')"], {}), "('result/' + gen_name + '/' + model_name + '/acc.png')\n", (4647, 4701), True, 'from matplotlib import pyplot as plt\n'), ((15443, 15455), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (15453, 15455), True, 'from matplotlib import pyplot as plt\n'), ((15987, 16000), 'matplotlib.pyplot.figure', 'plt.figure', (['(0)'], {}), '(0)\n', (15997, 16000), True, 'from matplotlib import pyplot as plt\n'), ((16014, 16023), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (16021, 16023), True, 'from matplotlib import pyplot as plt\n'), ((16210, 16222), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (16220, 16222), True, 'from matplotlib import pyplot as plt\n'), ((16236, 16302), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('result/' + gen_name + '/' + model_name + '/loss.png')"], {}), "('result/' + gen_name + '/' + model_name + '/loss.png')\n", (16247, 16302), True, 'from matplotlib import pyplot as plt\n'), ((16367, 16380), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (16377, 16380), True, 'from matplotlib import pyplot as plt\n'), ((16394, 16403), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (16401, 16403), True, 'from matplotlib import pyplot as plt\n'), ((16587, 16599), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (16597, 16599), True, 'from matplotlib import pyplot as plt\n'), ((16613, 16678), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('result/' + gen_name + '/' + model_name + '/acc.png')"], {}), "('result/' + gen_name + '/' + model_name + '/acc.png')\n", (16624, 16678), True, 'from matplotlib import pyplot as plt\n')]
|
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from django.core.management.base import BaseCommand, CommandError
from yepes.contrib.slugs import SlugHistory
class Command(BaseCommand):
help = 'Populates the slug history.'
requires_system_checks = True
def add_arguments(self, parser):
parser.add_argument('-f', '--force',
action='store_true',
default=False,
dest='force',
help='Collects slugs even if the history is not empty.')
parser.add_argument('-a', '--app-label',
action='store',
dest='app_label',
help='Limits the slug collection to the models of the given application.')
parser.add_argument('-m', '--model-names',
action='store',
dest='model_names',
help='Limits the slug collection to the given models.')
def handle(self, **options):
force = options.get('force')
app_label = options.get('app_label')
if not app_label:
app_label = None
model_names = options.get('model_names')
if not model_names:
model_names = None
else:
model_names = model_names.split(',')
SlugHistory.objects.populate(
force=force,
app_label=app_label,
model_names=model_names)
verbosity = int(options.get('verbosity', '1'))
if verbosity > 0:
self.stdout.write('Slugs were successfully collected.')
|
[
"yepes.contrib.slugs.SlugHistory.objects.populate"
] |
[((1246, 1338), 'yepes.contrib.slugs.SlugHistory.objects.populate', 'SlugHistory.objects.populate', ([], {'force': 'force', 'app_label': 'app_label', 'model_names': 'model_names'}), '(force=force, app_label=app_label, model_names=\n model_names)\n', (1274, 1338), False, 'from yepes.contrib.slugs import SlugHistory\n')]
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# --------------------------------------------------------------------------------
# Librerías
from bs4 import BeautifulSoup
import requests
import time
import json
import os
from json import dumps
import anyjson
from datastore import SubSecction
from log_helper import LogHelper
from parsedata import Tokenizer
from collections import namedtuple
import nltk
# --------------------------------------------------------------------------------
# Descargamos nltk punkt
nltk.download('punkt')
# --------------------------------------------------------------------------------
# Archivos de registro
loghelper = LogHelper()
logger = loghelper.getLogger("default")
logger.info("Start App")
# --------------------------------------------------------------------------------
# FAQ urls en diferentes lenguajes, actualmente admitidos: ES, EN
urls = {#'es': 'https://help.twitter.com/es',
'en': 'https://help.twitter.com/'
}
languages = { 'es':'spanish', 'en': 'english'}
# --------------------------------------------------------------------------------
# Tiempo limite para las solicitudes
CONST_TIMEOUT = 10
# --------------------------------------------------------------------------------
# Tiempo entre solicitudes
CONST_REQUEST_TIME_DELAY = 0
# --------------------------------------------------------------------------------
# Lista principal de las subsecciones
main_subsection_list = []
# --------------------------------------------------------------------------------
# Diccionario URL
url_dictionary = {}
# --------------------------------------------------------------------------------
for language, url in urls.items():
# Creación del archivo de registro
logger.info("Create Language Subsection {0!r} with url {1!r}".format(language,url))
sec = SubSecction('FAQ language {0}'.format(language), url, '', -1)
# Cogemos las ayudas principales en el correspondiente lenguaje
response = requests.get(url, timeout=CONST_TIMEOUT)
# Creamos el tokenizador para el lenguaje seleccionado
tokenizer = Tokenizer(logger,languages[language])
# Contenido HTML para analizar
content = BeautifulSoup(response.content, "html.parser")
# En esta función trataremos de almacenar en diferentes secciones el contenido
# de ayuda de la página, para ello tendremos que explorar todas las posibilidades
# que nos proporciona la página HTML en donde se puede encontrar dicho contenido
# cómo puede ser: hp01__content, hp01__topic-list-item, ap04, twtr-component-space--md
# Así pues el JSON generado, tendrá un título, un ID y contenido para quedar mejor
# estructurado a la hora poder trabajar con él
id = 0
for tweet in content.findAll('div', attrs={"class": "hp01__content"}):
title = tweet.p.text.strip()
logger.info("Create Subsection {0!r}".format(title))
mainSecction_item = SubSecction(title, url, tweet.p.text.strip(), id)
id = id + 1
pid = id
for text in tweet.findAll('li', attrs={"class", "hp01__topic-list-item"}):
sub_content_secction_title = text.a.text.strip()
logger.info("Create Subsection {0!r}".format(sub_content_secction_title))
if text.a.get('href') in url_dictionary:
pid = url_dictionary[text.a.get('href')]
continue
else:
url_dictionary[text.a.get('href')] = id
sub_content_secction = SubSecction(sub_content_secction_title,text.a.get('href'), '', pid)
sub_response = requests.get(text.a.get('href'), timeout=CONST_TIMEOUT)
sub_content = BeautifulSoup(sub_response.content, "html.parser")
for sub_text in sub_content.findAll('script', attrs={"type": "application/ld+json"}):
y = anyjson.deserialize(sub_text.text.strip().replace('@', ''))
if (y['type'] == 'CollectionPage'):
item_list = y['mainEntity']['itemListElement']
for item_text in item_list:
id = id +1
pid = id
if item_text['url'] in url_dictionary:
pid = url_dictionary[text.a.get('href')]
continue
else:
url_dictionary[item_text['url']] = id
time.sleep(CONST_REQUEST_TIME_DELAY)
page_response = requests.get(item_text['url'], timeout=CONST_TIMEOUT)
page_content = BeautifulSoup(page_response.content,"html.parser")
separator = ' '
buffer = ' '
data_html = page_content.findAll('div', attrs={"class": "ap04"})
data_html2 = page_content.findAll('div', attrs={"class": "twtr-component-space--md"})
if(len(data_html) >0):
for help_text in page_content.findAll('div', attrs={"class": "ap04"}):
data = separator.join(tokenizer.tokenize(help_text.text.strip().replace('@', '')))
if data not in buffer:
buffer = '{0} {1}'.format(buffer, data)
elif len(data_html2) > 0:
for help_text in data_html2:
data_text_2 = help_text.text.strip().replace('@', '')
if 'BreadcrumbList' not in data_text_2:
data = separator.join(tokenizer.tokenize(data_text_2))
if data not in buffer:
buffer = '{0} {1}'.format(buffer, data)
logger.info("Create Subsection {0!r} -> {1!r}".format(item_text['name'],item_text['url']))
item_subSection = SubSecction(item_text['name'],item_text['url'],buffer,pid)
sub_content_secction.addSubSecction(subSecction=item_subSection)
mainSecction_item.addSubSecction(subSecction = sub_content_secction)
sec.addSubSecction(subSecction=mainSecction_item)
main_subsection_list.append(sec)
# --------------------------------------------------------------------------------
# Guardamos los datos en español en un JSON
with open('es_data.json', 'a') as the_file:
str_data = str(main_subsection_list[0]).replace("\\","")
the_file.write(str_data)
# --------------------------------------------------------------------------------
# Guardamos los datos en inglés en un JSON
with open('en_data.json', 'a') as the_file:
str_data = str(main_subsection_list[0]).replace("\\","")
the_file.write(str_data)
|
[
"time.sleep",
"requests.get",
"datastore.SubSecction",
"bs4.BeautifulSoup",
"nltk.download",
"log_helper.LogHelper",
"parsedata.Tokenizer"
] |
[((512, 534), 'nltk.download', 'nltk.download', (['"""punkt"""'], {}), "('punkt')\n", (525, 534), False, 'import nltk\n'), ((654, 665), 'log_helper.LogHelper', 'LogHelper', ([], {}), '()\n', (663, 665), False, 'from log_helper import LogHelper\n'), ((1980, 2020), 'requests.get', 'requests.get', (['url'], {'timeout': 'CONST_TIMEOUT'}), '(url, timeout=CONST_TIMEOUT)\n', (1992, 2020), False, 'import requests\n'), ((2097, 2135), 'parsedata.Tokenizer', 'Tokenizer', (['logger', 'languages[language]'], {}), '(logger, languages[language])\n', (2106, 2135), False, 'from parsedata import Tokenizer\n'), ((2189, 2235), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content', '"""html.parser"""'], {}), "(response.content, 'html.parser')\n", (2202, 2235), False, 'from bs4 import BeautifulSoup\n'), ((3678, 3728), 'bs4.BeautifulSoup', 'BeautifulSoup', (['sub_response.content', '"""html.parser"""'], {}), "(sub_response.content, 'html.parser')\n", (3691, 3728), False, 'from bs4 import BeautifulSoup\n'), ((4437, 4473), 'time.sleep', 'time.sleep', (['CONST_REQUEST_TIME_DELAY'], {}), '(CONST_REQUEST_TIME_DELAY)\n', (4447, 4473), False, 'import time\n'), ((4514, 4567), 'requests.get', 'requests.get', (["item_text['url']"], {'timeout': 'CONST_TIMEOUT'}), "(item_text['url'], timeout=CONST_TIMEOUT)\n", (4526, 4567), False, 'import requests\n'), ((4607, 4658), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page_response.content', '"""html.parser"""'], {}), "(page_response.content, 'html.parser')\n", (4620, 4658), False, 'from bs4 import BeautifulSoup\n'), ((6006, 6067), 'datastore.SubSecction', 'SubSecction', (["item_text['name']", "item_text['url']", 'buffer', 'pid'], {}), "(item_text['name'], item_text['url'], buffer, pid)\n", (6017, 6067), False, 'from datastore import SubSecction\n')]
|
# Version 1
#TODO clean up these imports now that we are doing more modular processing
import sys, signal,platform,os,time,subprocess,configparser,multiprocessing,easygui,requests
from Communicate import *
from functions import LinuxIdleTime,WindowsIdleTime
#Script Version
ScriptVersion = '1.0'
#SHA256 of your downtime programs
SHA256ProgramMiner = '7db002483369077051d179a80105a816c45951c24fe65023d58bc05609c49f65'
SHA256ProgramSheepit = 'e4674e9e1be5bfd843c10dd9e4c42767608e3777760c83f9ccdfad5d9cffe59c'
#Github Repo link
GithubLink = 'https://api.github.com/repos/Luke-Larsen/DarkMiner'
#Development Mode ( Stops it from hiding in the background)
DevMode = 0 #0 off. Anything else means on
#functions
def errorOccurred(errorCode):
easygui.msgbox(errorCode,"ERROR OCCURRED")
sys.exit("ERROR")
def UpdateTotalMiningTime(value):
config.read('config.ini')
TotalTimeMining = config['value']['TotalTimeMining']
NewTotalTimeMining = int(TotalTimeMining) + int(value)
config['value'] = {
'TotalTimeMining' : NewTotalTimeMining
}
with open(os.path.expanduser('~') +'/.darkminer/config.ini', 'w+') as configfile:
config.write(configfile)
def UpdateScript():
print("Ran Update")
def Is64Windows():
return 'PROGRAMFILES(X86)' in os.environ
def GetProgramFiles32():
if Is64Windows():
return False
else:
return True
from functions import DownloadData
def Miner():
#TODO: Check if the last idle time is less then 1 minute and if it is increase the idle time required in the config.
#TODO: Start logging away time so that we can build a simple computer model of downtime to prevent false positives
if Communication == 2:
downTimeSignal(BaseSite,1)
if osSystem == 'win32':
if not os32Bit:
if os.path.exists(WinPathDownloads + 'xmrig.exe'):
print('exists no need to download')
else:
DownloadData(BaseSite + 'xmrig.exe', WinPathDownloads + 'xmrig.exe')
if os.path.exists(WinPathDownloads + 'WinRing0x64.sys'):
print('exists no need to download')
else:
DownloadData(BaseSite + 'WinRing64.sys', WinPathDownloads + 'WinRing0x64.sys')
if os.path.exists(WinPathDownloads + 'config.json'):
print('exists no need to download')
else:
DownloadData(BaseSite + 'config.json', WinPathDownloads + 'config.json')
import win32gui
import win32api
proc = subprocess.Popen([WinPathDownloads + "xmrig.exe"])
time.sleep(3)
def enumWindowFunc(hwnd, windowList):
""" win32gui.EnumWindows() callback """
text = win32gui.GetWindowText(hwnd)
className = win32gui.GetClassName(hwnd)
if text.find("xmrig") >= 0:
windowList.append((hwnd, text, className))
myWindows = []
win32gui.EnumWindows(enumWindowFunc, myWindows)
for hwnd, text, className in myWindows:
win32gui.ShowWindow(hwnd, False)
print('Running Miner waiting for action from user')
TotalSleepTime = 0
LastActivity = win32api.GetLastInputInfo()
while True:
if LastActivity != win32api.GetLastInputInfo():
proc.terminate() # Terminates Child Process
UpdateTotalMiningTime(TotalSleepTime)
if Communication == 2:
downTimeSignal(BaseSite,0)
break
elif LastActivity == win32api.GetLastInputInfo():
time.sleep(3)
TotalSleepTime += 3
main()
elif osSystem == 'Linux':
if is_64bits:
if(DownTimeActivity == "Miner"):
from Miner import LinuxMine64
LinuxMine64(LinuxPathDownloads,SHA256ProgramMiner,SHA256Program,waitTime,Communication,BaseSite)
elif(DownTimeActivity == "Sheepit"):
from sheepit import LinuxRender64
LinuxRender64(LinuxPathDownloads,waitTime,Communication,BaseSite)
main()
def Install():
if easygui.ynbox('Proceed with the install of DarkMiner. If you do not know what this is press NO', 'Title', ('Yes', 'No')):
if easygui.ynbox('Would you like this to reboot on each startup of the computer', 'Title', ('Yes', 'No')):
rebootStart = 1
else:
rebootStart = 0
#Grab data for config
msg = "Enter your configuration values"
title = "Enter Config data"
#0 least communication. 2 is the most communication
fieldNames = ["Webdomain", "Communication mode(0-2)"]
fieldValues = easygui.multenterbox(msg, title, fieldNames)
if fieldValues is None:
sys.exit(0)
# make sure that none of the fields were left blank
while 1:
errmsg = ""
for i, name in enumerate(fieldNames):
if fieldValues[i].strip() == "":
errmsg += "{} is a required field.\n\n".format(name)
if errmsg == "":
break # no problems found
fieldValues = easygui.multenterbox(errmsg, title, fieldNames, fieldValues)
if fieldValues is None:
break
#TODO check to make sure the website url is valid and will work
#writting to config
config['settings'] = {
"Agree" : 1,
"Communication" : fieldValues[1], #0 no communication; 1 basic comunication; 2 verbose communication
"DownTimeActivity" : "Miner",
"rebootStart" : rebootStart,
"waitTime" : '120',
"WinPathDownloads" : 'C:/Users/' + os.getlogin() + '/Downloads/',
"LinuxPathDownloads" : os.path.expanduser('~') +'/.darkminer/',
"UpdateFrom": 0 #0 github, 1 CNC
}
config['server'] = {
"Version" : ScriptVersion,
'BaseSite' : fieldValues[0]
}
config['value'] = {
'TotalTimeMining' : 0,
'SHA256Program': SHA256ProgramMiner #Checking the sha256 of the downloaded program to make sure that its good for now you will need to change it manually
}
with open('config.ini', 'w') as configfile:
config.write(configfile)
TotalTimeMining = 0
if(rebootStart):
#Set path to bin and create a folder in it
UserPath = os.path.expanduser('~') +'/.darkminer/'
FileName = sys.argv[0]
if not os.path.isdir(UserPath):
if osSystem == 'win32':
os.makedirs(UserPath)
elif osSystem == 'Linux':
os.mkdir(UserPath,0o755)
#code for setting up the boot
if osSystem == 'Linux':
#switching to using systemd
#check if systemd user path is set up
if not os.path.isdir(os.path.expanduser('~')+'/.config/systemd/user'):
os.mkdir(os.path.expanduser('~')+'/.config/systemd',0o755)
os.mkdir(os.path.expanduser('~')+'/.config/systemd/user',0o755)
#Add our service
filehandle = open(os.path.expanduser('~')+'/.config/systemd/user/darkminer.service', 'w')
if DevMode == 0:
filehandle.write('[Unit]\
\nDescription=Dark Miner Service\
\nPartOf=graphical-session.target\
\n[Service]\
\nExecStart=/usr/bin/python3.8 '+os.path.expanduser('~')+'/.darkminer/main.py --display=:0.0\
\nRestart=always\
\n[Install]\
\nWantedBy=xsession.target\
')
else:
filehandle.write('[Unit]\
\nDescription=Dark Miner Service\
\nPartOf=graphical-session.target\
\n[Service]\
\nExecStart=/usr/bin/python3.8 '+os.path.expanduser('~')+'/.darkminer/main.py\
\nRestart=always\
\n[Install]\
\nWantedBy=xsession.target\
')
filehandle.close()
#Setting up startup on user login; check graphical environment is ready
filehandle = open(os.path.expanduser('~')+'/.config/systemd/user/xsession.target', 'w')
filehandle.write('[Unit]\
\nDescription=Users Xsession running\
\nBindsTo=graphical-session.target\
')
filehandle.close()
#Start xsession.service on user login
filehandle = open(os.path.expanduser('~')+'/.xsessionrc', 'w')
filehandle.write('systemctl --user import-environment PATH DBUS_SESSION_BUS_ADDRESS\
\nsystemctl --no-block --user start xsession.target\
')
filehandle.close()
result = subprocess.run(['systemctl', '--user', 'enable','darkminer'], stdout=subprocess.PIPE)
print(result)
elif osSystem == 'win32':
#I may come back to this later so that I can use the task schedular for updating and running some on crash. Also might make it
#easier to install because windows probably picks up this method as a virus.
#Keep everything clean and in folders
os.makedirs(os.path.expanduser('~')+"/AppData/Roaming/DarkMiner/")
bat = open(os.path.expanduser('~')+"/AppData/Roaming/DarkMiner/"+"DarkMiner.bat", "a")
bat.write("py "+UserPath+"main.py")
bat.close()
#now create a vbs script so you don't have to see the damn terminal all the time
vbs = open(os.path.expanduser('~')+"/AppData/Roaming/Microsoft/Windows/Start Menu/Programs/Startup/"+"DarkMiner.vbs", "a")
vbs.write('Set WinScriptHost = CreateObject("WScript.Shell") \n WinScriptHost.Run Chr(34) & "'+os.path.expanduser('~')+"/AppData/Roaming/DarkMiner/DarkMiner.bat"+'" & Chr(34), 0 \n Set WinScriptHost = Nothing')
vbs.close()
#Copy files to working directory
from shutil import copyfile
copyfile("main.py", UserPath+"main.py")
copyfile("functions.py", UserPath+"functions.py")
copyfile("Communicate.py", UserPath+"Communicate.py")
copyfile("Miner.py", UserPath+"Miner.py")
copyfile("sheepit.py", UserPath+"sheepit.py")
copyfile("config.ini", UserPath+"config.ini")
#os.remove("config.ini")
#Start file from working directory
easygui.msgbox('Installed DarkMiner in '+UserPath+ " starting program", 'All done')
if osSystem == 'Linux':
if DevMode == 0:
os.system("nohup python3 "+UserPath+"main.py"+" &")
else:
os.system("python3 "+UserPath+"main.py")
elif osSystem == 'win32':
os.system("py "+UserPath+"main.py")
def main():
if osSystem == 'win32':
WindowsIdleTime()
elif osSystem == 'Linux':
LinuxIdleTime(waitTime)
Miner()
#Handle a program shutdown
def handler(signum = None, frame = None):
print('\n')
if DownTimeActivity == "Miner":
from Miner import Kill
elif DownTimeActivity == "Sheepit":
from sheepit import Kill
Kill()
print('Program Closed')
sys.exit(0)
for sig in [signal.SIGTERM, signal.SIGINT, signal.SIGHUP, signal.SIGQUIT]:
signal.signal(sig, handler)
#Dependency check
try:
result = subprocess.run(['xprintidle'], stdout=subprocess.PIPE)
except:
print("xprintidle is not installed")
exit()
#Read from Config file if exists
config = configparser.ConfigParser()
if os.path.isfile(os.path.expanduser('~') +'/.darkminer/'+"config.ini"):
config.read(os.path.expanduser('~') +'/.darkminer/'+"config.ini")
#Settings
Agree = int(config['settings']['Agree'])
Communication = int(config['settings']['communication'])
DownTimeActivity = config['settings']['DownTimeActivity'] #What you want to run on downtime
rebootStart = int(config['settings']['rebootStart'])
waitTime = int(config['settings']['waitTime'])
WinPathDownloads = config['settings']['WinPathDownloads']
LinuxPathDownloads = config['settings']['LinuxPathDownloads']
try:
UpdateFrom = config['settings']['UpdateFrom']
except KeyError as e:
#No value set because this could be an update to a running system
UpdateFrom = 0
#Server
BaseSite = config['server']['BaseSite']
Version = config['server']['Version']
#check if updated script
if float(Version) < float(ScriptVersion):
print('Script has been updated')
Version = ScriptVersion
config['server']= {
'BaseSite': BaseSite,
'Version' : ScriptVersion
}
with open('config.ini', 'w') as configfile:
config.write(configfile)
#Values
TotalTimeMining = config['value']['totaltimemining']
try:
SHA256Program = config['value']['SHA256Program']
except KeyError as e:
SHA256Program = SHA256ProgramMiner
else:
Agree = 0
#Start of program determines what operating system to go with
if sys.platform.startswith('win32'):
osSystem = 'win32'
os32Bit = GetProgramFiles32()
#Check if User has agreed to mine
if(Agree):
#Check version of the program to make sure we are running the latest and greatest
if Communication >= 1:
checkVersion(ScriptVersion,BaseSite,osSystem,GithubLink)
main()
else:
Install()
elif sys.platform.startswith('linux'):
osSystem = 'Linux'
is_64bits = sys.maxsize > 2 ** 32
if(Agree):
if Communication >= 1:
checkVersion(ScriptVersion,BaseSite,osSystem,GithubLink)
main()
else:
Install()
|
[
"sys.platform.startswith",
"os.mkdir",
"functions.DownloadData",
"sheepit.LinuxRender64",
"win32gui.ShowWindow",
"Miner.LinuxMine64",
"os.path.exists",
"win32gui.GetWindowText",
"functions.LinuxIdleTime",
"win32gui.EnumWindows",
"easygui.ynbox",
"shutil.copyfile",
"win32gui.GetClassName",
"configparser.ConfigParser",
"subprocess.Popen",
"win32api.GetLastInputInfo",
"os.system",
"time.sleep",
"signal.signal",
"sys.exit",
"subprocess.run",
"os.getlogin",
"os.makedirs",
"easygui.multenterbox",
"functions.WindowsIdleTime",
"os.path.isdir",
"sheepit.Kill",
"easygui.msgbox",
"os.path.expanduser"
] |
[((11897, 11924), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (11922, 11924), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((13451, 13483), 'sys.platform.startswith', 'sys.platform.startswith', (['"""win32"""'], {}), "('win32')\n", (13474, 13483), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((743, 786), 'easygui.msgbox', 'easygui.msgbox', (['errorCode', '"""ERROR OCCURRED"""'], {}), "(errorCode, 'ERROR OCCURRED')\n", (757, 786), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((790, 807), 'sys.exit', 'sys.exit', (['"""ERROR"""'], {}), "('ERROR')\n", (798, 807), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((4258, 4388), 'easygui.ynbox', 'easygui.ynbox', (['"""Proceed with the install of DarkMiner. If you do not know what this is press NO"""', '"""Title"""', "('Yes', 'No')"], {}), "(\n 'Proceed with the install of DarkMiner. If you do not know what this is press NO'\n , 'Title', ('Yes', 'No'))\n", (4271, 4388), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((10450, 10491), 'shutil.copyfile', 'copyfile', (['"""main.py"""', "(UserPath + 'main.py')"], {}), "('main.py', UserPath + 'main.py')\n", (10458, 10491), False, 'from shutil import copyfile\n'), ((10494, 10545), 'shutil.copyfile', 'copyfile', (['"""functions.py"""', "(UserPath + 'functions.py')"], {}), "('functions.py', UserPath + 'functions.py')\n", (10502, 10545), False, 'from shutil import copyfile\n'), ((10548, 10603), 'shutil.copyfile', 'copyfile', (['"""Communicate.py"""', "(UserPath + 'Communicate.py')"], {}), "('Communicate.py', UserPath + 'Communicate.py')\n", (10556, 10603), False, 'from shutil import copyfile\n'), ((10606, 10649), 'shutil.copyfile', 'copyfile', (['"""Miner.py"""', "(UserPath + 'Miner.py')"], {}), "('Miner.py', UserPath + 'Miner.py')\n", (10614, 10649), False, 'from shutil import copyfile\n'), ((10652, 10699), 'shutil.copyfile', 'copyfile', (['"""sheepit.py"""', "(UserPath + 'sheepit.py')"], {}), "('sheepit.py', UserPath + 'sheepit.py')\n", (10660, 10699), False, 'from shutil import copyfile\n'), ((10702, 10749), 'shutil.copyfile', 'copyfile', (['"""config.ini"""', "(UserPath + 'config.ini')"], {}), "('config.ini', UserPath + 'config.ini')\n", (10710, 10749), False, 'from shutil import copyfile\n'), ((10820, 10910), 'easygui.msgbox', 'easygui.msgbox', (["('Installed DarkMiner in ' + UserPath + ' starting program')", '"""All done"""'], {}), "('Installed DarkMiner in ' + UserPath + ' starting program',\n 'All done')\n", (10834, 10910), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((11543, 11549), 'sheepit.Kill', 'Kill', ([], {}), '()\n', (11547, 11549), False, 'from sheepit import Kill\n'), ((11582, 11593), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (11590, 11593), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((11674, 11701), 'signal.signal', 'signal.signal', (['sig', 'handler'], {}), '(sig, handler)\n', (11687, 11701), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((11739, 11793), 'subprocess.run', 'subprocess.run', (["['xprintidle']"], {'stdout': 'subprocess.PIPE'}), "(['xprintidle'], stdout=subprocess.PIPE)\n", (11753, 11793), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((13833, 13865), 'sys.platform.startswith', 'sys.platform.startswith', (['"""linux"""'], {}), "('linux')\n", (13856, 13865), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((4391, 4497), 'easygui.ynbox', 'easygui.ynbox', (['"""Would you like this to reboot on each startup of the computer"""', '"""Title"""', "('Yes', 'No')"], {}), "('Would you like this to reboot on each startup of the computer',\n 'Title', ('Yes', 'No'))\n", (4404, 4497), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((4823, 4867), 'easygui.multenterbox', 'easygui.multenterbox', (['msg', 'title', 'fieldNames'], {}), '(msg, title, fieldNames)\n', (4843, 4867), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((11221, 11238), 'functions.WindowsIdleTime', 'WindowsIdleTime', ([], {}), '()\n', (11236, 11238), False, 'from functions import LinuxIdleTime, WindowsIdleTime\n'), ((1814, 1860), 'os.path.exists', 'os.path.exists', (["(WinPathDownloads + 'xmrig.exe')"], {}), "(WinPathDownloads + 'xmrig.exe')\n", (1828, 1860), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((2032, 2084), 'os.path.exists', 'os.path.exists', (["(WinPathDownloads + 'WinRing0x64.sys')"], {}), "(WinPathDownloads + 'WinRing0x64.sys')\n", (2046, 2084), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((2266, 2314), 'os.path.exists', 'os.path.exists', (["(WinPathDownloads + 'config.json')"], {}), "(WinPathDownloads + 'config.json')\n", (2280, 2314), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((2551, 2601), 'subprocess.Popen', 'subprocess.Popen', (["[WinPathDownloads + 'xmrig.exe']"], {}), "([WinPathDownloads + 'xmrig.exe'])\n", (2567, 2601), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((2614, 2627), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (2624, 2627), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((2990, 3037), 'win32gui.EnumWindows', 'win32gui.EnumWindows', (['enumWindowFunc', 'myWindows'], {}), '(enumWindowFunc, myWindows)\n', (3010, 3037), False, 'import win32gui\n'), ((3261, 3288), 'win32api.GetLastInputInfo', 'win32api.GetLastInputInfo', ([], {}), '()\n', (3286, 3288), False, 'import win32api\n'), ((4912, 4923), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4920, 4923), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((5294, 5354), 'easygui.multenterbox', 'easygui.multenterbox', (['errmsg', 'title', 'fieldNames', 'fieldValues'], {}), '(errmsg, title, fieldNames, fieldValues)\n', (5314, 5354), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((6587, 6610), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (6605, 6610), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((6673, 6696), 'os.path.isdir', 'os.path.isdir', (['UserPath'], {}), '(UserPath)\n', (6686, 6696), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((9225, 9316), 'subprocess.run', 'subprocess.run', (["['systemctl', '--user', 'enable', 'darkminer']"], {'stdout': 'subprocess.PIPE'}), "(['systemctl', '--user', 'enable', 'darkminer'], stdout=\n subprocess.PIPE)\n", (9239, 9316), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((10969, 11026), 'os.system', 'os.system', (["('nohup python3 ' + UserPath + 'main.py' + ' &')"], {}), "('nohup python3 ' + UserPath + 'main.py' + ' &')\n", (10978, 11026), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((11047, 11091), 'os.system', 'os.system', (["('python3 ' + UserPath + 'main.py')"], {}), "('python3 ' + UserPath + 'main.py')\n", (11056, 11091), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((11135, 11174), 'os.system', 'os.system', (["('py ' + UserPath + 'main.py')"], {}), "('py ' + UserPath + 'main.py')\n", (11144, 11174), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((11277, 11300), 'functions.LinuxIdleTime', 'LinuxIdleTime', (['waitTime'], {}), '(waitTime)\n', (11290, 11300), False, 'from functions import LinuxIdleTime, WindowsIdleTime\n'), ((11943, 11966), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (11961, 11966), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((1080, 1103), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (1098, 1103), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((1948, 2016), 'functions.DownloadData', 'DownloadData', (["(BaseSite + 'xmrig.exe')", "(WinPathDownloads + 'xmrig.exe')"], {}), "(BaseSite + 'xmrig.exe', WinPathDownloads + 'xmrig.exe')\n", (1960, 2016), False, 'from functions import DownloadData\n'), ((2172, 2250), 'functions.DownloadData', 'DownloadData', (["(BaseSite + 'WinRing64.sys')", "(WinPathDownloads + 'WinRing0x64.sys')"], {}), "(BaseSite + 'WinRing64.sys', WinPathDownloads + 'WinRing0x64.sys')\n", (2184, 2250), False, 'from functions import DownloadData\n'), ((2402, 2474), 'functions.DownloadData', 'DownloadData', (["(BaseSite + 'config.json')", "(WinPathDownloads + 'config.json')"], {}), "(BaseSite + 'config.json', WinPathDownloads + 'config.json')\n", (2414, 2474), False, 'from functions import DownloadData\n'), ((2758, 2786), 'win32gui.GetWindowText', 'win32gui.GetWindowText', (['hwnd'], {}), '(hwnd)\n', (2780, 2786), False, 'import win32gui\n'), ((2815, 2842), 'win32gui.GetClassName', 'win32gui.GetClassName', (['hwnd'], {}), '(hwnd)\n', (2836, 2842), False, 'import win32gui\n'), ((3106, 3138), 'win32gui.ShowWindow', 'win32gui.ShowWindow', (['hwnd', '(False)'], {}), '(hwnd, False)\n', (3125, 3138), False, 'import win32gui\n'), ((5912, 5935), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (5930, 5935), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((6749, 6770), 'os.makedirs', 'os.makedirs', (['UserPath'], {}), '(UserPath)\n', (6760, 6770), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((12014, 12037), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (12032, 12037), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((3348, 3375), 'win32api.GetLastInputInfo', 'win32api.GetLastInputInfo', ([], {}), '()\n', (3373, 3375), False, 'import win32api\n'), ((3938, 4043), 'Miner.LinuxMine64', 'LinuxMine64', (['LinuxPathDownloads', 'SHA256ProgramMiner', 'SHA256Program', 'waitTime', 'Communication', 'BaseSite'], {}), '(LinuxPathDownloads, SHA256ProgramMiner, SHA256Program, waitTime,\n Communication, BaseSite)\n', (3949, 4043), False, 'from Miner import LinuxMine64\n'), ((5846, 5859), 'os.getlogin', 'os.getlogin', ([], {}), '()\n', (5857, 5859), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((6825, 6848), 'os.mkdir', 'os.mkdir', (['UserPath', '(493)'], {}), '(UserPath, 493)\n', (6833, 6848), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((7314, 7337), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (7332, 7337), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((8539, 8562), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (8557, 8562), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((8919, 8942), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (8937, 8942), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((3657, 3684), 'win32api.GetLastInputInfo', 'win32api.GetLastInputInfo', ([], {}), '()\n', (3682, 3684), False, 'import win32api\n'), ((3706, 3719), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (3716, 3719), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((4150, 4218), 'sheepit.LinuxRender64', 'LinuxRender64', (['LinuxPathDownloads', 'waitTime', 'Communication', 'BaseSite'], {}), '(LinuxPathDownloads, waitTime, Communication, BaseSite)\n', (4163, 4218), False, 'from sheepit import LinuxRender64\n'), ((7049, 7072), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (7067, 7072), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((7124, 7147), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (7142, 7147), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((7199, 7222), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (7217, 7222), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((9675, 9698), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (9693, 9698), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((7700, 7723), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (7718, 7723), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((8206, 8229), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (8224, 8229), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((9753, 9776), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (9771, 9776), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((10017, 10040), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (10035, 10040), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n'), ((10236, 10259), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (10254, 10259), False, 'import sys, signal, platform, os, time, subprocess, configparser, multiprocessing, easygui, requests\n')]
|
import pygame
import sys
#define bird class
class Bird(object):
def __init__(self):
self.birdRect = pygame.Rect(65,50,50,50)
self.birdStatus = [pygame.image.load("flappybirdassets/assets/1.png"),
pygame.image.load("flappybirdassets/assets/2.png"),
pygame.image.load("flappybirdassets/assets/dead.png")]
self.status = 0
self.birdX = 120
self.birdY = 350
self.jump = False
self.jumpSpeed = 10
self.gravity = 5
self.dead = False
def birdUpdate(self):
#movement
if self.jump:
self.jumpSpeed -= 1
self.birdY -= self.jumpSpeed
else:
self.gravity += 0.2
self.birdY += self.gravity
self.birdRect[1] = self.birdY
def createMap():
screen.blit(background,(0,0))
#display pine
screen.blit(Pipeline.pineUp,(Pipeline.wallx, -300))
screen.blit(Pipeline.pineUp,(Pipeline.wallx, 500))
Pipeline.PipelineUpdate()
#display bird
if Bird.dead :
Bird.status = 2
elif Bird.jump :
Bird.status = 1
screen.blit(Bird.birdStatus[Bird.status], (Bird.birdX,Bird.birdY))
Bird.birdUpdate()
screen.blit(font.render('Score:'+ str(score),1,(255,255,255)),(100,50))
pygame.display.update()
#define pipeline class
class Pipeline(object):
def __init__(self):
self.wallx = 400
self.pineUp = pygame.image.load("flappybirdassets/assets/top.png")
self.pineDown = pygame.image.load("flappybirdassets/assets/bottom.png")
def PipelineUpdate(self):
#movement
self.wallx -= 5
if self.wallx < -80:
global score
score += 1
self.wallx = 400
def checkDead():
upRect = pygame.Rect(Pipeline.wallx,-300,Pipeline.pineUp.get_width(),Pipeline.pineUp.get_height())
downRect = pygame.Rect(Pipeline.wallx,500,Pipeline.pineDown.get_width(),Pipeline.pineDown.get_height())
if upRect.colliderect(Bird.birdRect) or downRect.colliderect(Bird.birdRect):
Bird.dead = True
if not Bird.birdRect[1] < height:
Bird.dead = True
return True
else:
return False
def getResult():
final_text1 = "GAME OVER"
final_text2 = "Your final score is :" + str(score)
ft1_font = fit1_font = pygame.font.SysFont("Arial",70)
ft1_surf = font.render(final_text1,1,(242,3,36))
ft2_font = fit2_font = pygame.font.SysFont("Arial",50)
ft2_surf = font.render(final_text2,1,(253,177,6))
screen.blit(ft1_surf,[screen.get_width()/2-ft1_surf.get_width()/2,100])
screen.blit(ft2_surf,[screen.get_width()/2-ft2_surf.get_width()/2,200])
pygame.display.update()
if __name__ == '__main__':
pygame.init()
font = pygame.font.SysFont(None,50)
size = width, height = 400,650
screen = pygame.display.set_mode(size) #setting windows sieze
clock = pygame.time.Clock()# setting delay time
color = (255,255,255)
Bird = Bird()
Pipeline = Pipeline()
score = 0
while True:
clock.tick(60)
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
if (event.type == pygame.MOUSEBUTTONDOWN or event.type == pygame.KEYDOWN) and not Bird.dead :
Bird.jump = True
Bird.gravity = 5
Bird.jumpSpeed = 10
# screen.fill(color)
background = pygame.image.load("flappybirdassets/assets/background.png")
if checkDead():
getResult()
else:
createMap()
pygame.quit()
|
[
"pygame.quit",
"pygame.font.SysFont",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.Rect",
"pygame.init",
"pygame.display.update",
"pygame.image.load",
"pygame.time.Clock",
"sys.exit"
] |
[((3678, 3691), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (3689, 3691), False, 'import pygame\n'), ((1382, 1405), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (1403, 1405), False, 'import pygame\n'), ((2417, 2449), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""Arial"""', '(70)'], {}), "('Arial', 70)\n", (2436, 2449), False, 'import pygame\n'), ((2529, 2561), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""Arial"""', '(50)'], {}), "('Arial', 50)\n", (2548, 2561), False, 'import pygame\n'), ((2772, 2795), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (2793, 2795), False, 'import pygame\n'), ((2828, 2841), 'pygame.init', 'pygame.init', ([], {}), '()\n', (2839, 2841), False, 'import pygame\n'), ((2853, 2882), 'pygame.font.SysFont', 'pygame.font.SysFont', (['None', '(50)'], {}), '(None, 50)\n', (2872, 2882), False, 'import pygame\n'), ((2931, 2960), 'pygame.display.set_mode', 'pygame.display.set_mode', (['size'], {}), '(size)\n', (2954, 2960), False, 'import pygame\n'), ((2996, 3015), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (3013, 3015), False, 'import pygame\n'), ((114, 141), 'pygame.Rect', 'pygame.Rect', (['(65)', '(50)', '(50)', '(50)'], {}), '(65, 50, 50, 50)\n', (125, 141), False, 'import pygame\n'), ((1525, 1577), 'pygame.image.load', 'pygame.image.load', (['"""flappybirdassets/assets/top.png"""'], {}), "('flappybirdassets/assets/top.png')\n", (1542, 1577), False, 'import pygame\n'), ((1602, 1657), 'pygame.image.load', 'pygame.image.load', (['"""flappybirdassets/assets/bottom.png"""'], {}), "('flappybirdassets/assets/bottom.png')\n", (1619, 1657), False, 'import pygame\n'), ((3183, 3201), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (3199, 3201), False, 'import pygame\n'), ((3531, 3590), 'pygame.image.load', 'pygame.image.load', (['"""flappybirdassets/assets/background.png"""'], {}), "('flappybirdassets/assets/background.png')\n", (3548, 3590), False, 'import pygame\n'), ((166, 216), 'pygame.image.load', 'pygame.image.load', (['"""flappybirdassets/assets/1.png"""'], {}), "('flappybirdassets/assets/1.png')\n", (183, 216), False, 'import pygame\n'), ((245, 295), 'pygame.image.load', 'pygame.image.load', (['"""flappybirdassets/assets/2.png"""'], {}), "('flappybirdassets/assets/2.png')\n", (262, 295), False, 'import pygame\n'), ((324, 377), 'pygame.image.load', 'pygame.image.load', (['"""flappybirdassets/assets/dead.png"""'], {}), "('flappybirdassets/assets/dead.png')\n", (341, 377), False, 'import pygame\n'), ((3260, 3270), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3268, 3270), False, 'import sys\n')]
|
from mock import patch
from specter import Spec, expect
from requests_cloud_auth import rackspace
from spec import get_keystone_v2_auth_resp
class AuthenticationToRackspace(Spec):
class PasswordAuthentication(Spec):
def before_all(self):
self.auth = rackspace.RackspacePasswordAuth(
username='tester',
password='password'
)
@patch("requests.post")
def can_authenticate(self, post_func):
post_func.return_value = get_keystone_v2_auth_resp()
creds = self.auth.authenticate()
expect(creds.get('token', None)).to.equal('some_token')
expect(creds.get('project_id', None)).to.equal('some_tenant')
@patch("requests.post")
def can_get_token(self, post_func):
post_func.return_value = get_keystone_v2_auth_resp()
token, tenant = self.auth.get_token()
expect(token).to.equal('some_token')
class ApiKeyAuthentication(Spec):
def before_all(self):
self.auth = rackspace.RackspaceApiKeyAuth(
username='tester',
api_key='api_key'
)
@patch("requests.post")
def can_authenticate(self, post_func):
post_func.return_value = get_keystone_v2_auth_resp()
creds = self.auth.authenticate()
expect(creds.get('token', None)).to.equal('some_token')
expect(creds.get('project_id', None)).to.equal('some_tenant')
@patch("requests.post")
def can_get_token(self, post_func):
post_func.return_value = get_keystone_v2_auth_resp()
token, tenant = self.auth.get_token()
expect(token).to.equal('some_token')
class SupportedRackspaceRegions(Spec):
def can_use_uk_region(self):
self.auth = rackspace.RackspacePasswordAuth(
username='tester',
password='<PASSWORD>',
region='UK'
)
expect(rackspace.UK_ENDPOINT).to.be_in(self.auth.endpoint)
self.auth = rackspace.RackspaceApiKeyAuth(
username='tester',
api_key='some_pass',
region='UK'
)
expect(rackspace.UK_ENDPOINT).to.be_in(self.auth.endpoint)
def can_use_us_region(self):
self.auth = rackspace.RackspacePasswordAuth(
username='tester',
password='<PASSWORD>',
region='US'
)
expect(rackspace.US_ENDPOINT).to.be_in(self.auth.endpoint)
self.auth = rackspace.RackspaceApiKeyAuth(
username='tester',
api_key='some_pass',
region='US'
)
expect(rackspace.US_ENDPOINT).to.be_in(self.auth.endpoint)
|
[
"requests_cloud_auth.rackspace.RackspacePasswordAuth",
"spec.get_keystone_v2_auth_resp",
"specter.expect",
"mock.patch",
"requests_cloud_auth.rackspace.RackspaceApiKeyAuth"
] |
[((405, 427), 'mock.patch', 'patch', (['"""requests.post"""'], {}), "('requests.post')\n", (410, 427), False, 'from mock import patch\n'), ((739, 761), 'mock.patch', 'patch', (['"""requests.post"""'], {}), "('requests.post')\n", (744, 761), False, 'from mock import patch\n'), ((1188, 1210), 'mock.patch', 'patch', (['"""requests.post"""'], {}), "('requests.post')\n", (1193, 1210), False, 'from mock import patch\n'), ((1522, 1544), 'mock.patch', 'patch', (['"""requests.post"""'], {}), "('requests.post')\n", (1527, 1544), False, 'from mock import patch\n'), ((1849, 1939), 'requests_cloud_auth.rackspace.RackspacePasswordAuth', 'rackspace.RackspacePasswordAuth', ([], {'username': '"""tester"""', 'password': '"""<PASSWORD>"""', 'region': '"""UK"""'}), "(username='tester', password='<PASSWORD>',\n region='UK')\n", (1880, 1939), False, 'from requests_cloud_auth import rackspace\n'), ((2071, 2157), 'requests_cloud_auth.rackspace.RackspaceApiKeyAuth', 'rackspace.RackspaceApiKeyAuth', ([], {'username': '"""tester"""', 'api_key': '"""some_pass"""', 'region': '"""UK"""'}), "(username='tester', api_key='some_pass',\n region='UK')\n", (2100, 2157), False, 'from requests_cloud_auth import rackspace\n'), ((2322, 2412), 'requests_cloud_auth.rackspace.RackspacePasswordAuth', 'rackspace.RackspacePasswordAuth', ([], {'username': '"""tester"""', 'password': '"""<PASSWORD>"""', 'region': '"""US"""'}), "(username='tester', password='<PASSWORD>',\n region='US')\n", (2353, 2412), False, 'from requests_cloud_auth import rackspace\n'), ((2544, 2630), 'requests_cloud_auth.rackspace.RackspaceApiKeyAuth', 'rackspace.RackspaceApiKeyAuth', ([], {'username': '"""tester"""', 'api_key': '"""some_pass"""', 'region': '"""US"""'}), "(username='tester', api_key='some_pass',\n region='US')\n", (2573, 2630), False, 'from requests_cloud_auth import rackspace\n'), ((277, 348), 'requests_cloud_auth.rackspace.RackspacePasswordAuth', 'rackspace.RackspacePasswordAuth', ([], {'username': '"""tester"""', 'password': '"""password"""'}), "(username='tester', password='password')\n", (308, 348), False, 'from requests_cloud_auth import rackspace\n'), ((512, 539), 'spec.get_keystone_v2_auth_resp', 'get_keystone_v2_auth_resp', ([], {}), '()\n', (537, 539), False, 'from spec import get_keystone_v2_auth_resp\n'), ((843, 870), 'spec.get_keystone_v2_auth_resp', 'get_keystone_v2_auth_resp', ([], {}), '()\n', (868, 870), False, 'from spec import get_keystone_v2_auth_resp\n'), ((1064, 1131), 'requests_cloud_auth.rackspace.RackspaceApiKeyAuth', 'rackspace.RackspaceApiKeyAuth', ([], {'username': '"""tester"""', 'api_key': '"""api_key"""'}), "(username='tester', api_key='api_key')\n", (1093, 1131), False, 'from requests_cloud_auth import rackspace\n'), ((1295, 1322), 'spec.get_keystone_v2_auth_resp', 'get_keystone_v2_auth_resp', ([], {}), '()\n', (1320, 1322), False, 'from spec import get_keystone_v2_auth_resp\n'), ((1626, 1653), 'spec.get_keystone_v2_auth_resp', 'get_keystone_v2_auth_resp', ([], {}), '()\n', (1651, 1653), False, 'from spec import get_keystone_v2_auth_resp\n'), ((1991, 2020), 'specter.expect', 'expect', (['rackspace.UK_ENDPOINT'], {}), '(rackspace.UK_ENDPOINT)\n', (1997, 2020), False, 'from specter import Spec, expect\n'), ((2209, 2238), 'specter.expect', 'expect', (['rackspace.UK_ENDPOINT'], {}), '(rackspace.UK_ENDPOINT)\n', (2215, 2238), False, 'from specter import Spec, expect\n'), ((2464, 2493), 'specter.expect', 'expect', (['rackspace.US_ENDPOINT'], {}), '(rackspace.US_ENDPOINT)\n', (2470, 2493), False, 'from specter import Spec, expect\n'), ((2682, 2711), 'specter.expect', 'expect', (['rackspace.US_ENDPOINT'], {}), '(rackspace.US_ENDPOINT)\n', (2688, 2711), False, 'from specter import Spec, expect\n'), ((934, 947), 'specter.expect', 'expect', (['token'], {}), '(token)\n', (940, 947), False, 'from specter import Spec, expect\n'), ((1717, 1730), 'specter.expect', 'expect', (['token'], {}), '(token)\n', (1723, 1730), False, 'from specter import Spec, expect\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Time : 17/3/21 下午5:32
import os
from app import create_app, db
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
[
"flask_script.Manager",
"flask_migrate.Migrate",
"os.getenv"
] |
[((267, 279), 'flask_script.Manager', 'Manager', (['app'], {}), '(app)\n', (274, 279), False, 'from flask_script import Manager\n'), ((290, 306), 'flask_migrate.Migrate', 'Migrate', (['app', 'db'], {}), '(app, db)\n', (297, 306), False, 'from flask_migrate import Migrate, MigrateCommand\n'), ((217, 242), 'os.getenv', 'os.getenv', (['"""FLASK_CONFIG"""'], {}), "('FLASK_CONFIG')\n", (226, 242), False, 'import os\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.