content
stringlengths 5
1.05M
|
---|
from newrelic.agent import wrap_in_function, WSGIApplicationWrapper
def instrument_gevent_wsgi(module):
def wrapper_WSGIServer___init__(*args, **kwargs):
def _bind_params(self, listener, application, *args, **kwargs):
return self, listener, application, args, kwargs
self, listener, application, _args, _kwargs = _bind_params(
*args, **kwargs)
application = WSGIApplicationWrapper(application)
_args = (self, listener, application) + _args
return _args, _kwargs
wrap_in_function(module, 'WSGIServer.__init__',
wrapper_WSGIServer___init__)
def instrument_gevent_pywsgi(module):
def wrapper_WSGIServer___init__(*args, **kwargs):
def _bind_params(self, listener, application, *args, **kwargs):
return self, listener, application, args, kwargs
self, listener, application, _args, _kwargs = _bind_params(
*args, **kwargs)
application = WSGIApplicationWrapper(application)
_args = (self, listener, application) + _args
return _args, _kwargs
wrap_in_function(module, 'WSGIServer.__init__',
wrapper_WSGIServer___init__)
|
from flask import Flask, request, render_template, redirect, url_for
from flask_restful import Api
from flask_jwt import JWT
from security import authenticate, identity
from resources.user import UserRegister
from resources.item import Item, ItemList
from resources.inventory import Inventory, InventoryList
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///data.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False # turns off Flask's SQLAlchemy mod tracker
app.secret_key = 'secret'
""" The Api works with Resources and every resource has to be a Class """
api = Api(app) # instantiate an instance of the Api Class
@app.before_first_request
def create_tables():
db.create_all()
"""
Create an instance of the JWT Class using the app object and the
authenticate and identity methods from security.py
"""
jwt = JWT(app, authenticate, identity) # /auth endpoint
"""
The api object methods below define the endpoints ...
for the GET methods by name Resource
"""
api.add_resource(Inventory, '/inventory/<string:name>')
api.add_resource(Item, '/item/<string:name>')
api.add_resource(ItemList, '/items')
api.add_resource(InventoryList, '/inventories')
api.add_resource(UserRegister, '/register')
"""
The @app decorated methods below define the endpoints for ...
the front end of the app
"""
@app.route('/')
@app.route('/home')
def home():
return render_template('home.html')
# route for handling the login page logic
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] != 'admin' or request.form['password'] != 'admin':
error = 'Invalid Credentials. Please try again.'
else:
return redirect(url_for('home'))
return render_template('login.html', error=error)
@app.route('/registration')
def register_template():
return render_template('register.html')
""" The app.run method below starts the Flask app and binds it to port 5000 """
if __name__ == '__main__':
from db import db
db.init_app(app)
app.run(host='0.0.0.0', port=8080, debug=True) |
# Made by Kerberos v1.0 on 2008/02/03
# this script is part of the Official L2J Datapack Project.
# Visit http://www.l2jdp.com/forum/ for more details.
import sys
from com.l2jserver.gameserver.ai import CtrlIntention
from com.l2jserver.gameserver.model.quest import State
from com.l2jserver.gameserver.model.quest import QuestState
from com.l2jserver.gameserver.model.quest.jython import QuestJython as JQuest
from com.l2jserver.gameserver.network.serverpackets import NpcSay
from com.l2jserver.gameserver.network.serverpackets import SocialAction
from com.l2jserver.gameserver.network.serverpackets import ActionFailed
qn = "65_CertifiedSoulBreaker"
#NPCs
Lucas = 30071
Jacob = 30073
Harlan = 30074
Xaber = 30075
Liam = 30076
Vesa = 30123
Zerome = 30124
Felton = 30879
Kekropus = 32138
Casca = 32139
Holst = 32199
Vitus = 32213
Meldina = 32214
Katenar = 32242
CargoBox = 32243
#Mobs
Wyrm = 20176
Angel = 27332
#Items
Diamond = 7562
Document = 9803
Heart = 9804
Recommend= 9805
certificate=9806
class Quest (JQuest) :
def __init__(self,id,name,descr):
JQuest.__init__(self,id,name,descr)
self.isAngelSpawned = 0
self.isKatenarSpawned = 0
self.questItemIds = [Document,Heart,Recommend]
def onEvent (self,event,st) :
htmltext = event
player = st.getPlayer()
if event == "32213-03.htm" :
st.playSound("ItemSound.quest_accept")
st.set("cond","1")
st.setState(State.STARTED)
#st.giveItems(Diamond,47)
elif event == "32138-03.htm" :
st.set("cond","2")
st.playSound("ItemSound.quest_middle")
elif event == "32139-02.htm" :
st.set("cond","3")
st.playSound("ItemSound.quest_middle")
elif event == "32139-04.htm" :
st.set("cond","4")
st.playSound("ItemSound.quest_middle")
elif event == "32199-02.htm" :
st.set("cond","5")
st.playSound("ItemSound.quest_middle")
elif event == "30071-02.htm" :
st.set("cond","8")
st.playSound("ItemSound.quest_middle")
elif event == "32214-02.htm" :
st.set("cond","11")
st.playSound("ItemSound.quest_middle")
elif event == "30879-03.htm" :
st.set("cond","12")
st.set("angel","0")
st.playSound("ItemSound.quest_middle")
elif event == "angel_cleanup" :
self.isAngelSpawned = 0
return
elif event == "katenar_cleanup" :
self.isKatenarSpawned = 0
return
elif event == "32139-08.htm" :
st.set("cond","14")
st.takeItems(Document,1)
st.playSound("ItemSound.quest_middle")
elif event == "32138-06.htm" :
st.set("cond","15")
st.playSound("ItemSound.quest_middle")
elif event == "32138-11.htm" :
st.set("cond","17")
st.takeItems(Heart,-1)
st.giveItems(Recommend,1)
st.playSound("ItemSound.quest_middle")
return htmltext
def onFirstTalk (self,npc,player):
st = player.getQuestState(qn)
if st :
if npc.getNpcId() == Katenar and st.getInt("cond") == 12:
st.unset("angel")
st.playSound("ItemSound.quest_itemget")
st.set("cond","13")
self.isAngelSpawned = 0
self.isKatenarSpawned = 0
st.giveItems(Document,1)
return "32242-01.htm"
player.sendPacket(ActionFailed.STATIC_PACKET)
return None
def onTalk (self,npc,player):
htmltext = "<html><body>You are either not on a quest that involves this NPC, or you don't meet this NPC's minimum quest requirements.</body></html>"
st = player.getQuestState(qn)
if not st : return htmltext
npcId = npc.getNpcId()
id = st.getState()
cond = st.getInt("cond")
if id == State.COMPLETED :
htmltext = "<html><body>This quest has already been completed.</body></html>"
elif npcId == Vitus :
if player.getClassId().getId() not in [125,126] or player.getLevel() < 39:
htmltext = "<html><body>Only Troopers or Warders are allowed to take this quest! Go away before I get angry!<br>You must be level 39 or higher to undertake this quest.</body></html>"
st.exitQuest(1)
elif id == State.CREATED :
htmltext = "32213-01.htm"
elif cond >= 1 and cond <= 3 :
htmltext = "32213-04.htm"
elif cond >= 4 and cond <17 :
htmltext = "32213-05.htm"
elif cond == 17 and st.getQuestItemsCount(Recommend) == 1 :
htmltext = "32213-06.htm"
player.sendPacket(SocialAction(player.getObjectId(),3))
st.takeItems(Recommend,-1)
st.giveItems(certificate,1)
st.exitQuest(False)
st.playSound("ItemSound.quest_finish")
st.addExpAndSp(189831,21526)
elif npcId == Kekropus :
if cond == 1 :
htmltext = "32138-00.htm"
elif cond == 2 :
htmltext = "32138-04.htm"
elif cond == 14 :
htmltext = "32138-05.htm"
elif cond == 15 :
htmltext = "32138-07.htm"
elif cond == 16 :
htmltext = "32138-08.htm"
elif cond == 17 :
htmltext = "32138-12.htm"
elif npcId == Casca :
if cond == 2 :
htmltext = "32139-01.htm"
elif cond == 3 :
htmltext = "32139-03.htm"
elif cond == 4 :
htmltext = "32139-05.htm"
elif cond == 13 :
htmltext = "32139-06.htm"
elif cond == 14 :
htmltext = "32139-09.htm"
elif npcId == Holst :
if cond == 4 :
htmltext = "32199-01.htm"
elif cond == 5 :
htmltext = "32199-03.htm"
st.set("cond","6")
st.playSound("ItemSound.quest_middle")
elif cond == 6 :
htmltext = "32199-04.htm"
elif npcId == Harlan :
if cond == 6 :
htmltext = "30074-01.htm"
elif cond == 7 :
htmltext = "30074-02.htm"
elif npcId == Jacob :
if cond == 6 :
htmltext = "30073-01.htm"
st.set("cond","7")
st.playSound("ItemSound.quest_middle")
elif cond == 7 :
htmltext = "30073-02.htm"
elif npcId == Lucas :
if cond == 7 :
htmltext = "30071-01.htm"
elif cond == 8 :
htmltext = "30071-03.htm"
elif npcId == Xaber :
if cond == 8 :
htmltext = "30075-01.htm"
elif cond == 9 :
htmltext = "30075-02.htm"
elif npcId == Liam :
if cond == 8 :
htmltext = "30076-01.htm"
st.set("cond","9")
st.playSound("ItemSound.quest_middle")
elif cond == 9 :
htmltext = "30076-02.htm"
elif npcId == Zerome :
if cond == 9 :
htmltext = "30124-01.htm"
elif cond == 10 :
htmltext = "30124-02.htm"
elif npcId == Vesa :
if cond == 9 :
htmltext = "30123-01.htm"
st.set("cond","10")
st.playSound("ItemSound.quest_middle")
elif cond == 10 :
htmltext = "30123-02.htm"
elif npcId == Meldina :
if cond == 10 :
htmltext = "32214-01.htm"
elif cond == 11 :
htmltext = "32214-03.htm"
elif npcId == Felton :
if cond == 11 :
htmltext = "30879-01.htm"
elif cond == 12 :
htmltext = "30879-04.htm"
elif npcId == CargoBox :
if cond == 12 :
htmltext = "32243-01.htm"
if st.getInt("angel") == 0 and self.isAngelSpawned == 0 :
angel = st.addSpawn(27332,36198,191949,-3728,180000)
angel.broadcastPacket(NpcSay(angel.getObjectId(),0,angel.getNpcId(),player.getName()+"! Step back from the confounded box! I will take it myself!"))
angel.setRunning()
angel.addDamageHate(player,0,999)
angel.getAI().setIntention(CtrlIntention.AI_INTENTION_ATTACK, player)
self.isAngelSpawned = 1
self.startQuestTimer("angel_cleanup",180000,angel,player)
elif self.isKatenarSpawned == 0 and st.getInt("angel") == 1:
katenar = st.addSpawn(32242,36110,191921,-3712,60000)
katenar.broadcastPacket(NpcSay(katenar.getObjectId(),0,katenar.getNpcId(),"I am late!"))
self.isKatenarSpawned == 1
self.startQuestTimer("katenar_cleanup",60000,katenar,player)
htmltext = "32243-02.htm"
elif cond == 13 :
htmltext = "32243-03.htm"
return htmltext
def onKill(self,npc,player,isPet):
st = player.getQuestState(qn)
if not st : return
if st.getState() != State.STARTED : return
npcId = npc.getNpcId()
cond = st.getInt("cond")
if npcId == Angel and cond == 12:
st.set("angel","1")
self.isAngelSpawned = 0
npc.broadcastPacket(NpcSay(npc.getObjectId(),0,npc.getNpcId(),"Grr. I've been hit..."))
if self.isKatenarSpawned == 0 :
katenar = st.addSpawn(32242,36110,191921,-3712,60000)
katenar.broadcastPacket(NpcSay(katenar.getObjectId(),0,katenar.getNpcId(),"I am late!"))
self.isKatenarSpawned == 1
self.startQuestTimer("katenar_cleanup",60000,katenar,player)
if npcId == Wyrm and st.getQuestItemsCount(Heart) < 10 and cond == 15 and st.getRandom(100) <= 25:
if st.getQuestItemsCount(Heart) == 9 :
st.giveItems(Heart,1)
st.set("cond","16")
st.playSound("ItemSound.quest_middle")
else :
st.giveItems(Heart,1)
st.playSound("ItemSound.quest_itemget")
return
QUEST = Quest(65,qn,"Certified Soul Breaker")
QUEST.addStartNpc(Vitus)
QUEST.addTalkId(Vitus)
QUEST.addTalkId(Kekropus)
QUEST.addTalkId(Casca)
QUEST.addTalkId(Holst)
QUEST.addTalkId(Harlan)
QUEST.addTalkId(Jacob)
QUEST.addTalkId(Lucas)
QUEST.addTalkId(Xaber)
QUEST.addTalkId(Liam)
QUEST.addTalkId(Vesa)
QUEST.addTalkId(Zerome)
QUEST.addTalkId(Meldina)
QUEST.addTalkId(Felton)
QUEST.addTalkId(CargoBox)
QUEST.addFirstTalkId(Katenar)
QUEST.addKillId(Angel)
QUEST.addKillId(Wyrm) |
# Copyright 2020 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# python3
"""Tests for dm_robotics.agentflow.logging.utils."""
from absl.testing import absltest
from absl.testing import parameterized
from dm_robotics.agentflow.loggers import utils
import numpy as np
class UtilsTest(parameterized.TestCase):
@parameterized.named_parameters(
('simple', [0., 1., 2.], [1., 1., 0.], 0.9),
('random', np.random.rand(3), np.random.rand(3), np.random.rand(1)),
)
def test_compute_return(self, rewards, discounts, additional_discount):
actual_return = utils.compute_return(
rewards,
np.asarray(discounts) * additional_discount)
expected_return = (
rewards[0] + rewards[1] * discounts[0] * additional_discount +
rewards[2] * discounts[0] * discounts[1] * additional_discount**2)
np.testing.assert_almost_equal(actual_return, expected_return)
if __name__ == '__main__':
absltest.main()
|
#!/usr/bin/env python3
import sys
from random import randint
def generate_array(amount: int, outfile: str) -> None:
with open(outfile, 'w') as file:
print("Writing random numbers to file.")
file.write(str(amount))
file.write(" ")
for _ in range(amount):
file.write(str(randint(0, 10**6)))
file.write(" ")
print("Done writing", amount, "random numbers to", outfile)
if __name__ == "__main__":
if len(sys.argv) >= 2:
amount = int(sys.argv[1])
if len(sys.argv) >= 3:
outfile = sys.argv[2]
else:
outfile = str(amount) +".txt"
else:
amount = int(input("How many elements are required in the array? "))
outfile = input("Specify name of output file: ")
generate_array(amount, outfile)
|
#!/usr/bin/env python
"""
Pegasus utility functions for pasing a kickstart output file and return wanted information
"""
from __future__ import print_function
##
# Copyright 2007-2010 University Of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
# Revision : $Revision: 2012 $
# Import Python modules
from xml.parsers import expat
from Pegasus.monitoring.metadata import FileMetadata
import re
import sys
import logging
import traceback
import os
# Regular expressions used in the kickstart parser
re_parse_props = re.compile(r'(\S+)\s*=\s*([^",]+)')
re_parse_quoted_props = re.compile(r'(\S+)\s*=\s*"([^"]+)"')
logger = logging.getLogger(__name__)
class Parser:
"""
This class is used to parse a kickstart output file, and return
requested information.
"""
def __init__(self, filename):
"""
This function initializes the Parser class with the kickstart
output file that should be parsed.
"""
self._kickstart_output_file = filename
self._parsing_job_element = False
self._parsing_arguments = False
self._parsing_main_job = False
self._parsing_machine = False
self._parsing_stdout = False
self._parsing_stderr = False
self._parsing_data = False
self._parsing_cwd = False
self._parsing_final_statcall = False
self._record_number = 0
self._arguments = []
self._stdout = ""
self._stderr = ""
self._cwd = ""
self._lfn = "" # filename parsed from statcall record
self._keys = {}
self._ks_elements = {}
self._fh = None
self._open_error = False
def open(self):
"""
This function opens a kickstart output file.
"""
try:
self._fh = open(self._kickstart_output_file)
except:
# Error opening file
self._fh = None
self._open_error = True
return False
# Open succeeded
self._open_error = False
return True
def close(self):
"""
This function closes the kickstart output file.
"""
try:
self._fh.close()
except:
return False
return True
def read_record(self):
"""
This function reads an invocation record from the kickstart
output file. We also look for the struct at the end of a file
containing multiple records. It returns a string containing
the record, or None if it is not found.
"""
buffer = ""
#valid token that is parsed
token = ""
self._record_number += 1
logger.trace("Started reading record number %d from kickstart file %s" %( self._record_number, self._kickstart_output_file))
# First, we find the beginning <invocation xmlns....
while True:
line = self._fh.readline()
if line == '':
# End of file, record not found
return None
if line.find("<invocation") != -1:
token = "<invocation"
break
if ( line.find("[cluster-task") != -1 ):
token = "[cluster-task"
break
if ( line.find("[cluster-summary") != -1 ):
token = "[cluster-summary"
break
if ( line.find("[seqexec-task") != -1 ):
#deprecated token
token = "[seqexec-task"
break
if ( line.find("[seqexec-summary") != -1 ):
#deprecated token
token = "[seqexec-summary"
break
# Found something!
#if line.find("<invocation") >= 0:
if token == "<invocation" :
# Found invocation record
start = line.find("<invocation")
buffer = line[start:]
end = buffer.find("</invocation>")
# Check if we have everything in a single line
if end >= 0:
end = end + len("</invocation>")
return buffer[:end]
#elif line.find("[seqexec-summary") >= 0:
elif ( token == "[cluster-summary" or token == "[seqexec-summary" ):
# Found line with cluster jobs summary
start = line.find(token)
buffer = line[start:]
end = buffer.find("]")
if end >= 0:
end = end + len("]")
return buffer[:end]
# clustered record should be in a single line!
logger.warning("%s: %s line is malformed... ignoring it..." % (self._kickstart_output_file, token ))
return ""
#elif line.find("[seqexec-task") >= 0:
elif ( token == "[cluster-task" or token == "[seqexec-task" ):
# Found line with task information
start = line.find( token )
buffer = line[start:]
end = buffer.find("]")
if end >= 0:
end = end + len("]")
return buffer[:end]
# task record should be in a single line!
logger.warning("%s: %s line is malformed... ignoring it..." % (self._kickstart_output_file, token))
return ""
else:
return ""
# Ok, now continue reading the file until we get a full record
buffer = [buffer]
while True:
line = self._fh.readline()
if line == '':
# End of file, record not found
return None
#buffer = buffer + line
buffer.append( line )
if line.find("</invocation>") >= 0:
break
# Now, we got it, let's make sure
end = line.find("</invocation>")
if end == -1:
return ""
#end = end + len("</invocation>")
invocation = "".join(buffer)
logger.trace("Finished reading record number %d from kickstart file %s" %( self._record_number, self._kickstart_output_file))
return invocation
#return buffer[:end]
def is_invocation_record(self, buffer=''):
"""
Returns True if buffer contains an invocation record.
"""
if buffer.find("<invocation") == -1:
return False
return True
def is_task_record(self, buffer=''):
"""
Returns True if buffer contains a task record.
"""
if ( buffer.find("[seqexec-task") != -1 or buffer.find( "[cluster-task" ) != -1 ):
return True
return False
def is_clustered_record(self, buffer=''):
"""
Returns True if buffer contains a clustered record.
"""
if ( buffer.find("[seqexec-summary") != -1 or buffer.find( "[cluster-summary" ) != -1):
return True
return False
def start_element(self, name, attrs):
"""
Function called by the parser every time a new element starts
"""
# Keep track if we are parsing the main job element
if name == "mainjob":
self._parsing_main_job = True
if name == "machine":
self._parsing_machine = True
# Keep track if we are inside one of the job elements
if (name == "setup" or name == "prejob" or
name == "mainjob" or name == "postjob" or name == "cleanup"):
self._parsing_job_element = True
if name == "argument-vector" and name in self._ks_elements:
# Start parsing arguments
self._parsing_arguments = True
elif name == "cwd" and name in self._ks_elements:
# Start parsing cwd
self._parsing_cwd = True
elif name == "checksum" and name in self._ks_elements:
# PM-1180 <checksum type="sha256" value="f2307670158c64c4407971f8fad67772724b0bad92bfb48f386b0814ba24e3af"/>
self._keys[name] = {}
for attr_name in self._ks_elements[name]:
if attr_name in attrs:
self._keys[ name ] [attr_name] = attrs[attr_name]
elif name == "data":
# Start parsing data for stdout and stderr output
self._parsing_data = True
elif name == "file" and name in self._ks_elements:
if self._parsing_main_job == True :
# Special case for name inside the mainjob element (will change this later)
for my_element in self._ks_elements[name]:
if my_element in attrs:
self._keys[my_element] = attrs[my_element]
elif name == "ram" and name in self._ks_elements:
if self._parsing_machine == True:
# Special case for ram inside the machine element (will change this later)
for my_element in self._ks_elements[name]:
if my_element in attrs:
self._keys[my_element] = attrs[my_element]
elif name == "uname" and name in self._ks_elements:
if self._parsing_machine == True:
# Special case for uname inside the machine element (will change this later)
for my_element in self._ks_elements[name]:
if my_element in attrs:
self._keys[my_element] = attrs[my_element]
elif name == "signalled":
# PM-1109 grab the attributes we are interested in
self._keys[ name ] = {} #a dictionary indexed by attributes
self._parsing_signalled = True
self._keys[ name ]["action"] = "" #grabbed later in char data
for attr in attrs:
if attr in self._ks_elements[name]:
self._keys[name][attr] = attrs[attr]
elif name == "statcall":
if "id" in attrs:
if attrs["id"] == "stdout" and "stdout" in self._ks_elements:
self._parsing_stdout = True
elif attrs["id"] == "stderr" and "stderr" in self._ks_elements:
self._parsing_stderr = True
elif attrs["id"] == "final" :
self._parsing_final_statcall = True
self._lfn = attrs["lfn"]
elif name == "statinfo":
if self._parsing_final_statcall is True:
statinfo = FileMetadata()
for my_element in self._ks_elements[name]:
if my_element in attrs:
statinfo.add_attribute( my_element, attrs[my_element])
if "outputs" not in self._keys:
self._keys[ "outputs" ] = {} #a dictionary indexed by lfn
lfn = self._lfn
statinfo.set_id( lfn )
if lfn is None or not statinfo:
logger.warning( "Malformed/Empty stat record for output lfn %s %s" %(lfn, statinfo))
self._keys["outputs"][lfn] = statinfo
elif name == "usage" and name in self._ks_elements:
if self._parsing_job_element:
# Special case for handling utime and stime, which need to be added
for my_element in self._ks_elements[name]:
if my_element in attrs:
if my_element in self._keys:
try:
self._keys[my_element] = self._keys[my_element] + float(attrs[my_element])
except ValueError:
logger.warning("cannot convert element %s to float!" % (my_element))
else:
try:
self._keys[my_element] = float(attrs[my_element])
except ValueError:
logger.warning("cannot convert element %s to float!" % (my_element))
else:
# For all other elements, check if we want them
if name in self._ks_elements:
for my_element in self._ks_elements[name]:
if my_element in attrs:
self._keys[my_element] = attrs[my_element]
def end_element(self, name):
"""
Function called by the parser whenever we reach the end of an element
"""
# Stop parsing argement-vector and cwd if we reached the end of those elements
if name == "argument-vector":
self._parsing_arguments = False
elif name == "cwd":
self._parsing_cwd = False
elif name == "mainjob":
self._parsing_main_job = False
elif name == "machine":
self._parsing_machine = False
elif name == "signalled":
self._parsing_signalled = False
elif name == "statcall":
if self._parsing_stdout == True:
self._parsing_stdout = False
if self._parsing_stderr == True:
self._parsing_stderr = False
if self._parsing_final_statcall == True:
self._parsing_final_statcall = False
if "outputs" in self._keys:
if self._lfn in self._keys["outputs"]:
# PM-1180 get the statinfo and update with checksum
statinfo = self._keys["outputs"][self._lfn]
if "checksum" in self._keys:
for key in self._keys["checksum"]:
statinfo.add_attribute( "checksum." + key, self._keys["checksum"][key])
self._keys[ "checksum" ] = {}
elif name == "data":
self._parsing_data = False
# Now, see if we left one of the job elements
if (name == "setup" or name == "prejob" or
name == "mainjob" or name == "postjob" or name == "cleanup"):
self._parsing_job_element = False
def char_data(self, data=''):
"""
Function called by the parser whenever there's character data in an element
"""
if self._parsing_cwd == True:
self._cwd += data
elif self._parsing_arguments == True:
self._arguments.append(data.strip())
elif self._parsing_stdout == True and self._parsing_data == True:
self._stdout += data
elif self._parsing_stderr == True and self._parsing_data == True:
self._stderr += data
elif self._parsing_signalled == True:
self._keys["signalled"]["action"] += data
def parse_invocation_record(self, buffer=''):
"""
Parses the xml record in buffer, returning the desired keys.
"""
# Initialize variables
self._parsing_arguments = False
self._parsing_main_job = False
self._parsing_machine = False
self._parsing_stdout = False
self._parsing_stderr = False
self._parsing_data = False
self._parsing_cwd = False
self._parsing_signalled = False
self._arguments = []
self._stdout = ""
self._stderr = ""
self._cwd = ""
self._keys = {}
# Check if we have an invocation record
if self.is_invocation_record(buffer) == False:
return self._keys
# Add invocation key to our response
self._keys["invocation"] = True
# Prepend XML header
buffer = '<?xml version="1.0" encoding="ISO-8859-1"?>\n' + buffer
# Create parser
self._my_parser = expat.ParserCreate()
self._my_parser.StartElementHandler = self.start_element
self._my_parser.EndElementHandler = self.end_element
self._my_parser.CharacterDataHandler = self.char_data
# Parse everything!
output = self._my_parser.Parse(buffer)
# Add cwd, arguments, stdout, and stderr to keys
if "cwd" in self._ks_elements:
self._keys["cwd"] = self._cwd
if "argument-vector" in self._ks_elements:
self._keys["argument-vector"] = " ".join(self._arguments)
if "stdout" in self._ks_elements:
self._keys["stdout"] = self._stdout
if "stderr" in self._ks_elements:
self._keys["stderr"] = self._stderr
return self._keys
def parse_clustered_record(self, buffer=''):
"""
Parses the clustered record in buffer, returning all found keys
"""
self._keys = {}
# Check if we have an invocation record
if self.is_clustered_record(buffer) == False:
return self._keys
# Add clustered key to our response
self._keys["clustered"] = True
# Parse all quoted properties
for my_key, my_val in re_parse_quoted_props.findall(buffer):
self._keys[my_key] = my_val
# And add unquoted properties as well
for my_key, my_val in re_parse_props.findall(buffer):
self._keys[my_key] = my_val
return self._keys
def parse_task_record(self, buffer=''):
"""
Parses the task record in buffer, returning all found keys
"""
self._keys = {}
# Check if we have an invocation record
if self.is_task_record(buffer) == False:
return self._keys
# Add task key to our response
self._keys["task"] = True
# Parse all quoted properties
for my_key, my_val in re_parse_quoted_props.findall(buffer):
self._keys[my_key] = my_val
# And add unquoted properties as well
for my_key, my_val in re_parse_props.findall(buffer):
self._keys[my_key] = my_val
return self._keys
def parse(self, keys_dict, tasks=True, clustered=True):
"""
This function parses the kickstart output file, looking for
the keys specified in the keys_dict variable. It returns a
list of dictionaries containing the found keys. Look at the
parse_stampede function for details about how to pass keys
using the keys_dict structure. The function will return an
empty list if no records are found or if an error happens.
"""
my_reply = []
# Place keys_dict in the _ks_elements
self._ks_elements = keys_dict
# Try to open the file
if self.open() == False:
return my_reply
logger.debug( "Started reading records from kickstart file %s" %(self._kickstart_output_file))
self._record_number = 0
# Read first record
my_buffer = self.read_record()
# Loop while we still have record to read
while my_buffer is not None:
if self.is_invocation_record(my_buffer) == True:
# We have an invocation record, parse it!
try:
my_record = self.parse_invocation_record(my_buffer)
except:
logger.warning("KICKSTART-PARSE-ERROR --> error parsing invocation record in file %s"
% (self._kickstart_output_file))
logger.warning(traceback.format_exc())
# Found error parsing this file, return empty reply
my_reply = []
# Finish the loop
break
my_reply.append(my_record)
elif self.is_clustered_record(my_buffer) == True:
# Check if we want clustered records too
if clustered:
# Clustered records are seqexec summary records for clustered jobs
# We have a clustered record, parse it!
my_reply.append(self.parse_clustered_record(my_buffer))
elif self.is_task_record(my_buffer) == True:
# Check if we want task records too
if tasks:
# We have a clustered record, parse it!
my_reply.append(self.parse_task_record(my_buffer))
else:
# We have something else, this shouldn't happen!
# Just skip it
pass
# Read next record
my_buffer = self.read_record()
# Lastly, close the file
self.close()
return my_reply
def parse_stampede(self):
"""
This function works similarly to the parse function above,
but does not require a keys_dict parameter as it uses a
built-in list of keys speficically used in the Stampede
schema.
"""
stampede_elements = {"invocation": ["hostname", "resource", "user", "hostaddr", "transformation", "derivation"],
"mainjob": ["duration", "start"],
"usage": ["utime", "stime"],
"ram": ["total"],
"uname": ["system", "release", "machine"],
"file": ["name"],
"status": ["raw"],
"signalled": ["signal", "corefile", "action"], #action is the char data in signalled element
"regular": ["exitcode"],
"argument-vector": [],
"cwd": [],
"stdout": [],
"stderr": [],
"statinfo": ["lfn", "size", "ctime", "user" ],
"checksum": ["type", "value", "timing"],
"type": ["type", "value"]}
return self.parse(stampede_elements, tasks=True, clustered=True)
def parse_stdout_stderr(self):
"""
This function extracts the stdout and stderr from a kickstart output file.
It returns an array containing the output for each task in a job.
"""
stdout_stderr_elements = {"invocation": ["hostname", "resource", "derivation", "transformation"],
"file": ["name"],
"regular": ["exitcode"],
"failure": ["error"],
"argument-vector": [],
"cwd": [],
"stdout": [],
"stderr": []}
return self.parse(stdout_stderr_elements, tasks=False, clustered=False)
if __name__ == "__main__":
# Let's run a test!
print("Testing kickstart output file parsing...")
# Make sure we have an argument
if len(sys.argv) < 2:
print("For testing, please give a kickstart output filename!")
sys.exit(1)
# Create parser class
p = Parser(sys.argv[1])
# Parse file according to the Stampede schema
output = p.parse_stampede()
# Print output
for record in output:
print(record)
|
import framework, secret
from framework import discord
############################################################################################
# GUILD MESSAGES DEFINITION #
############################################################################################
# File object representing file that will be sent
l_file = framework.FILE("./Examples/main_send_file.py")
guilds = [
framework.USER(
user_id=123456789, # ID of server (guild)
messages_to_send=[ # List MESSAGE objects
framework.DirectMESSAGE(
start_period=None, # If None, messages will be send on a fixed period (end period)
end_period=15, # If start_period is None, it dictates the fixed sending period,
# If start period is defined, it dictates the maximum limit of randomized period
data=l_file, # Data you want to sent to the function (Can be of types : str, embed, file, list of types to the left
# or function that returns any of above types(or returns None if you don't have any data to send yet),
# where if you pass a function you need to use the framework.FUNCTION decorator on top of it ).
mode="send", # "send" will send a new message every time, "edit" will edit the previous message, "clear-send" will delete
# the previous message and then send a new one
start_now=True # Start sending now (True) or wait until period
),
],
generate_log=True ## Generate file log of sent messages (and failed attempts) for this user
)
]
############################################################################################
if __name__ == "__main__":
framework.run( token=secret.C_TOKEN, # MANDATORY,
server_list=guilds, # MANDATORY
is_user=False, # OPTIONAL
user_callback=None, # OPTIONAL
server_log_output="History", # OPTIONAL
debug=True) # OPTIONAL
|
from pathlib import Path
from typing import BinaryIO
import pytest
from quetz import hookimpl
from quetz.authorization import MAINTAINER, MEMBER, OWNER
from quetz.condainfo import CondaInfo
from quetz.config import Config
from quetz.db_models import ChannelMember, Package, PackageMember, PackageVersion
from quetz.errors import ValidationError
from quetz.pkgstores import PackageStore
@pytest.mark.parametrize("package_role", [OWNER, MAINTAINER, MEMBER])
@pytest.mark.parametrize("channel_role", [MEMBER])
def test_delete_package(
auth_client, public_package, public_channel, dao, db, package_role, user
):
response = auth_client.delete(
f"/api/channels/{public_channel.name}/packages/{public_package.name}"
)
if package_role == MEMBER:
assert response.status_code == 403
return
package = (
db.query(Package).filter(Package.name == public_package.name).one_or_none()
)
if package_role == MEMBER:
assert response.status_code == 403
assert package is not None
else:
assert response.status_code == 200
assert package is None
def test_delete_package_non_member(
client, public_package, public_channel, dao, db, other_user
):
response = client.get(f"/api/dummylogin/{other_user.username}")
assert response.status_code == 200
response = client.delete(
f"/api/channels/{public_channel.name}/packages/{public_package.name}"
)
assert response.status_code == 403
package = (
db.query(Package).filter(Package.name == public_package.name).one_or_none()
)
assert package is not None
def test_delete_package_versions_with_package(
auth_client, public_channel, public_package, package_version, dao, db, pkgstore
):
assert public_channel.size > 0
assert public_channel.size == package_version.size
assert package_version.package_name == public_package.name
response = auth_client.delete(
f"/api/channels/{public_channel.name}/packages/{public_package.name}"
)
assert response.status_code == 200
db.refresh(public_channel)
assert public_channel.size == 0
versions = (
db.query(PackageVersion)
.filter(PackageVersion.package_name == public_package.name)
.all()
)
assert len(versions) == 0
files = pkgstore.list_files(public_channel.name)
assert len(files) == 0
def test_get_package_version(auth_client, public_channel, package_version, dao):
filename = "test-package-0.1-0.tar.bz2"
platform = "linux-64"
response = auth_client.get(
f"/api/channels/{public_channel.name}/"
f"packages/{package_version.package_name}/versions/{platform}/{filename}"
)
assert response.status_code == 200
assert response.json()["filename"] == filename
assert response.json()["platform"] == platform
assert response.json()["download_count"] == 0
@pytest.mark.parametrize("user_server_role", [OWNER, MAINTAINER])
@pytest.mark.parametrize("user_package_role", [OWNER, MAINTAINER, MEMBER, None])
@pytest.mark.parametrize("user_channel_role", [OWNER, MAINTAINER, MEMBER, None])
@pytest.mark.parametrize("private", [True, False])
def test_get_package_version_permissions(
auth_client,
user,
private_package_version,
user_package_role,
user_channel_role,
private_channel,
db,
private_package,
private,
user_server_role,
):
private_channel.private = private
user.role = user_server_role
if user_channel_role:
channel_member = ChannelMember(
channel=private_channel, user=user, role=user_channel_role
)
db.add(channel_member)
if user_package_role:
package_member = PackageMember(
channel=private_channel,
user=user,
package=private_package,
role=user_package_role,
)
db.add(package_member)
db.commit()
filename = private_package_version.filename
platform = private_package_version.platform
channel_name = private_package_version.channel_name
package_name = private_package_version.package_name
response = auth_client.get(
f"/api/channels/{channel_name}/"
f"packages/{package_name}/versions/{platform}/{filename}"
)
if not private:
assert response.status_code == 200
elif user_server_role in [OWNER, MAINTAINER]:
assert response.status_code == 200
elif user_channel_role in [OWNER, MAINTAINER, MEMBER]:
assert response.status_code == 200
elif user_package_role in [OWNER, MAINTAINER, MEMBER]:
assert response.status_code == 200
else:
assert response.status_code == 403
@pytest.mark.parametrize("user_server_role", [OWNER, MAINTAINER])
@pytest.mark.parametrize("user_package_role", [OWNER, MAINTAINER, MEMBER, None])
@pytest.mark.parametrize("user_channel_role", [OWNER, MAINTAINER, MEMBER, None])
@pytest.mark.parametrize("private", [True, False])
def test_delete_package_version_permissions(
auth_client,
user,
private_package_version,
user_package_role,
user_channel_role,
private_channel,
db,
private_package,
pkgstore,
private,
user_server_role,
):
private_channel.private = private
user.role = user_server_role
if user_channel_role:
channel_member = ChannelMember(
channel=private_channel, user=user, role=user_channel_role
)
db.add(channel_member)
if user_package_role:
package_member = PackageMember(
channel=private_channel,
user=user,
package=private_package,
role=user_package_role,
)
db.add(package_member)
db.commit()
filename = private_package_version.filename
platform = private_package_version.platform
channel_name = private_package_version.channel_name
package_name = private_package_version.package_name
response = auth_client.delete(
f"/api/channels/{channel_name}/"
f"packages/{package_name}/versions/{platform}/{filename}"
)
if user_server_role in [OWNER, MAINTAINER]:
assert response.status_code == 200
elif user_channel_role in [OWNER, MAINTAINER]:
assert response.status_code == 200
elif user_package_role in [OWNER, MAINTAINER]:
assert response.status_code == 200
else:
assert response.status_code == 403
def test_get_non_existing_package_version(
auth_client, public_channel, package_version, dao
):
filename = "test-package-0.2-0.tar.bz2"
platform = "linux-64"
response = auth_client.get(
f"/api/channels/{public_channel.name}/"
f"packages/test-package/versions/{platform}/{filename}"
)
assert response.status_code == 404
@pytest.fixture
def remove_package_versions(db):
yield
db.query(PackageVersion).delete()
@pytest.mark.parametrize("package_name", ["test-package", "my-package"])
def test_upload_package_version(
auth_client,
public_channel,
public_package,
package_name,
db,
config,
remove_package_versions,
):
pkgstore = config.get_package_store()
package_filename = "test-package-0.1-0.tar.bz2"
with open(package_filename, "rb") as fid:
files = {"files": (package_filename, fid)}
response = auth_client.post(
f"/api/channels/{public_channel.name}/packages/"
f"{public_package.name}/files/",
files=files,
)
with open(package_filename, "rb") as fid:
condainfo = CondaInfo(fid, package_filename)
condainfo._parse_conda()
if package_name == "my-package":
assert response.status_code == 400
detail = response.json()['detail']
assert "does not match" in detail
assert "test-package" in detail
assert "my-package" in detail
else:
assert response.status_code == 201
db.refresh(public_channel)
assert public_channel.size == condainfo.info['size']
assert pkgstore.serve_path(
public_channel.name, str(Path(condainfo.info['subdir']) / package_filename)
)
@pytest.mark.parametrize("package_name", ["test-package"])
def test_check_channel_size_limits(
auth_client, public_channel, public_package, db, config
):
public_channel.size_limit = 0
db.commit()
pkgstore = config.get_package_store()
package_filename = "test-package-0.1-0.tar.bz2"
with open(package_filename, "rb") as fid:
files = {"files": (package_filename, fid)}
response = auth_client.post(
f"/api/channels/{public_channel.name}/packages/"
f"{public_package.name}/files/",
files=files,
)
assert response.status_code == 422
detail = response.json()['detail']
assert "quota" in detail
with pytest.raises(FileNotFoundError):
pkgstore.serve_path(
public_channel.name, str(Path("linux-64") / package_filename)
)
def test_delete_package_version(
auth_client, public_channel, package_version, dao, pkgstore: PackageStore, db
):
assert public_channel.size > 0
assert public_channel.size == package_version.size
filename = "test-package-0.1-0.tar.bz2"
platform = "linux-64"
response = auth_client.delete(
f"/api/channels/{public_channel.name}/"
f"packages/{package_version.package_name}/versions/{platform}/{filename}"
)
assert response.status_code == 200
versions = (
db.query(PackageVersion)
.filter(PackageVersion.package_name == package_version.package_name)
.all()
)
assert len(versions) == 0
with pytest.raises(Exception):
pkgstore.serve_path(public_channel.name, str(Path(platform) / filename))
db.refresh(public_channel)
assert public_channel.size == 0
def test_package_name_length_limit(auth_client, public_channel, db):
package_name = "package_" * 100
response = auth_client.post(
f"/api/channels/{public_channel.name}/packages", json={"name": package_name}
)
assert response.status_code == 201
pkg = db.query(Package).filter(Package.name == package_name).one_or_none()
assert pkg is not None
def test_validate_package_names(auth_client, public_channel, remove_package_versions):
valid_package_names = [
"interesting-package",
"valid.package.name",
"valid-package-name",
"valid_package_name",
"validpackage1234",
]
for package_name in valid_package_names:
response = auth_client.post(
f"/api/channels/{public_channel.name}/packages", json={"name": package_name}
)
assert response.status_code == 201
invalid_package_names = [
"InvalidPackage", # no uppercase
"invalid%20package", # no spaces
"invalid package", # no spaces
"invalid%package", # no special characters
"**invalidpackage**",
"błędnypakiet", # no unicode
]
for package_name in invalid_package_names:
response = auth_client.post(
f"/api/channels/{public_channel.name}/packages", json={"name": package_name}
)
assert response.status_code == 422
@pytest.mark.parametrize(
"package_name,msg",
[
("TestPackage", "string does not match"),
("test-package", None),
],
)
def test_validate_package_names_files_endpoint(
auth_client,
public_channel,
mocker,
package_name,
msg,
config: Config,
remove_package_versions,
):
pkgstore = config.get_package_store()
package_filename = "test-package-0.1-0.tar.bz2"
with open(package_filename, "rb") as fid:
condainfo = CondaInfo(fid, package_filename)
condainfo._parse_conda()
# patch conda info
condainfo.info['name'] = package_name
condainfo.channeldata['packagename'] = package_name
mocked_cls = mocker.patch("quetz.main.CondaInfo")
mocked_cls.return_value = condainfo
with open(package_filename, "rb") as fid:
files = {"files": (f"{package_name}-0.1-0.tar.bz2", fid)}
response = auth_client.post(
f"/api/channels/{public_channel.name}/files/", files=files
)
if msg:
assert response.status_code == 422
assert msg in response.json()["detail"]
with pytest.raises(FileNotFoundError):
pkgstore.serve_path(
public_channel.name, f'linux-64/{package_name}-0.1-0.tar.bz2'
)
else:
assert response.status_code == 201
assert pkgstore.serve_path(
public_channel.name, f'linux-64/{package_name}-0.1-0.tar.bz2'
)
@pytest.fixture
def plugin(app):
from quetz.main import pm
class Plugin:
@hookimpl
def validate_new_package(
self,
channel_name: str,
package_name: str,
file_handler: BinaryIO,
condainfo: CondaInfo,
):
raise ValidationError(f"name {package_name} not allowed")
plugin = Plugin()
pm.register(plugin)
yield plugin
pm.unregister(plugin)
def test_validation_hook(auth_client, public_channel, plugin, config):
pkgstore = config.get_package_store()
response = auth_client.post(
f"/api/channels/{public_channel.name}/packages", json={"name": "package-name"}
)
assert response.status_code == 422
assert "package-name not allowed" in response.json()["detail"]
package_filename = "test-package-0.1-0.tar.bz2"
with open(package_filename, "rb") as fid:
files = {"files": (package_filename, fid)}
response = auth_client.post(
f"/api/channels/{public_channel.name}/files/", files=files
)
assert response.status_code == 422
assert "test-package not allowed" in response.json()["detail"]
with pytest.raises(FileNotFoundError):
pkgstore.serve_path(public_channel.name, 'linux-64/test-package-0.1-0.tar.bz2')
@pytest.mark.parametrize(
"endpoint",
[
"/api/channels/{channel_name}/packages/{package_name}",
"/api/channels/{channel_name}/packages",
"/api/packages/search/?q='channel:{channel_name}'",
],
)
@pytest.mark.parametrize("package_name", ["test-package"])
def test_package_current_version(
auth_client, make_package_version, channel_name, endpoint
):
# test platforms, current_version and url
make_package_version("test-package-0.1-0.tar.bz2", "0.1", platform="linux-64")
make_package_version("test-package-0.1-0.tar.bz2", "0.1", platform="noarch")
make_package_version("test-package-0.2-0.tar.bz2", "0.2", platform="linux-64")
v = make_package_version("test-package-0.2-0.tar.bz2", "0.2", platform="os-x")
response = auth_client.get(
endpoint.format(channel_name=channel_name, package_name=v.package_name)
)
assert response.status_code == 200
package_data = response.json()
if isinstance(package_data, list):
assert len(package_data) == 1
package_data = package_data[0]
assert package_data['current_version'] == "0.2"
@pytest.mark.parametrize("package_name", ["test-package"])
def test_get_package_with_versions(
make_package_version, channel_name, dao, package_name
):
# test loading of latest (current) and all other versions
make_package_version("test-package-0.1-0.tar.bz2", "0.1", platform="linux-64")
make_package_version("test-package-0.1-0.tar.bz2", "0.1", platform="noarch")
v = make_package_version("test-package-0.2-0.tar.bz2", "0.2", platform="linux-64")
package = dao.get_package(channel_name, package_name)
assert package.current_package_version == v
assert len(package.package_versions) == 3
@pytest.mark.parametrize("package_name", ["test-package"])
def test_package_channel_data_attributes(
auth_client,
make_package_version,
channel_name,
remove_package_versions,
):
# test attributes derived from channel data
for package_filename in Path(".").glob("*.tar.bz2"):
with open(package_filename, "rb") as fid:
files = {"files": (str(package_filename), fid)}
response = auth_client.post(
f"/api/channels/{channel_name}/files/",
files=files,
)
response = auth_client.get(f"/api/channels/{channel_name}/packages/test-package")
assert response.status_code == 200
content = response.json()
assert content['platforms'] == ['linux-64']
assert content['url'].startswith("https://")
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-05-29 20:02
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('chat', '0014_auto_20181116_0657'),
]
operations = [
migrations.AddField(
model_name='message',
name='sub_kind',
field=models.CharField(choices=[('add_faq', 'add_faq')], max_length=32, null=True),
),
migrations.AlterField(
model_name='message',
name='kind',
field=models.CharField(choices=[(b'base', b'base'), (b'orct', b'orct'), (b'answer', b'answer'), (b'errmod', b'errmod'), ('ask_faq_understanding', 'ask_faq_understanding'), ('chatdivider', 'chatdivider'), ('uniterror', 'uniterror'), ('response', 'response'), ('add_faq', 'add_faq'), ('message', 'message'), ('button', 'button'), ('abort', 'abort'), ('faqs', 'faqs'), ('faq', 'faq')], max_length=32, null=True),
),
]
|
#!
# pip3 install translate
#https://www.udemy.com/course/complete-ethical-hacking-bootcamp-zero-to-mastery/
from translate import Translator
LANG = input("Input a compatible language to translate to:" )
translator = Translator(to_lang=LANG)
try:
with open('filename.txt', mode='r') as
text = (file.read())
translation = translator.translate(text)
with open('./translationOutput.txt', mode='w') as fileTwo:
fileTwo.write(translation)
except FileNotFoundError as e:
print('Restate file and file path')
exit()
|
# Enter your code here. Read input from STDIN. Print output to STDOUT
N=int(input())
x=input().split()
#N,n = int(raw_input()),raw_input().split()
print (all([int(i)>0 for i in x]) and any([j == j[::-1] for j in x]))
|
#!/usr/bin/env python
#------------------------------------------------------------------------------
# Copyright 2015 Esri
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#------------------------------------------------------------------------------
# ==================================================
# raster2gpkg.py
# --------------------------------------------------
# author: Esri
# company: Esri
# ==================================================
# description: Simple script to add a dataset into an existing or new GeoPackage.
# Based on other python utilities but made PEP compliant.
# ==================================================
# ==========================
# $Id$
import arcpy
import os
import sys
import re
import math
import sqlite3
import numpy
import tempfile
import collections
from osgeo import gdal
from osgeo.gdalconst import *
import cache2gpkg
import shutil
TILES_TABLE_SUFFIX = '_tiles' # Added to basename to create table_name
TILES_TABLE_PREFIX = 'table_' # Used if basename starts with a non alphabet character
class GeoPackage:
"""
Simple class to add tiles to an existing or new GeoPackage using GDAL.
"""
def __init__(self, fmt='image/jpeg'):
self.connection = None
self.filename = None
self.tile_width = 256
self.tile_height = 256
self.full_width = 0
self.full_height = 0
self.format = fmt
self.format_options = list()
self.sr_organization = "NONE"
self.sr_organization_coordsys_id = 0
self.sr_description = None
self.data_type = GDT_Unknown
self.mem_driver = gdal.GetDriverByName("MEM")
if self.format == 'image/jpeg':
self.driver = gdal.GetDriverByName('JPEG')
elif self.format == 'image/png':
self.driver = gdal.GetDriverByName("PNG")
if self.driver is None or self.mem_driver is None:
raise RuntimeError("Can't find appropriate GDAL driver for MEM and/or format ", self.format)
self.jpeg_options = []
self.ResolutionLayerInfo = collections.namedtuple('ResolutionLayerInfo', ['factor_x', 'factor_y',
'pixel_x_size', 'pixel_y_size',
'width', 'height',
'matrix_width', 'matrix_height',
'expected_pixel_x_size',
'expected_pixel_y_size'])
self.overviews = []
self.tile_lod_info = [
[156543.033928, 591657527.591555],
[78271.5169639999, 295828763.795777],
[39135.7584820001, 147914381.897889],
[19567.8792409999, 73957190.948944],
[9783.93962049996, 36978595.474472],
[4891.96981024998, 18489297.737236],
[2445.98490512499, 9244648.868618],
[1222.99245256249, 4622324.434309],
[611.49622628138, 2311162.217155],
[305.748113140558, 1155581.108577],
[152.874056570411, 577790.554289],
[76.4370282850732, 288895.277144],
[38.2185141425366, 144447.638572],
[19.1092570712683, 72223.819286],
[9.55462853563415, 36111.909643],
[4.77731426794937, 18055.954822],
[2.38865713397468, 9027.977411],
[1.19432856685505, 4513.988705],
[0.597164283559817, 2256.994353],
[0.298582141647617, 1128.497176],
]
def __del__(self):
if self.connection is not None:
self.connection.close()
def add_dataset(self, filename):
raster_desc = arcpy.Describe(filename)
if raster_desc is None:
arcpy.AddError("Failed to describe input")
return False
arcpy.AddMessage("Raster described {0}".format(filename))
srs = raster_desc.spatialReference
extent = raster_desc.Extent
#compute the new projected extent
new_srs = arcpy.SpatialReference(3857)
new_extent = extent.projectAs(new_srs)
#compute the new projected source cellsize using the extent of one cell from the input
#extent of one cell in the input is:
#xmin = input_xmin
#xmax = input_xmin + input_x_cell_size
#ymin = input_ymin
#ymax = input_ymin + input_y_cell_size
input_cs_x = float(str(arcpy.GetRasterProperties_management(filename, 'CELLSIZEX')))
input_cs_y = float(str(arcpy.GetRasterProperties_management(filename, 'CELLSIZEY')))
arcpy.AddMessage("Input CS X: {0}".format(input_cs_x))
arcpy.AddMessage("Input CS Y: {0}".format(input_cs_y))
#update the 'extent' with cell extent
extent.XMax = extent.XMin + input_cs_x
extent.YMax = extent.YMin + input_cs_y
new_cell_extent = extent.projectAs(new_srs)
# Get the cell size of the projected_raster
pixel_x_size = new_cell_extent.width
pixel_y_size = new_cell_extent.height
# Get the extent of the projected_raster
max_y = new_extent.YMax
min_y = new_extent.YMin
min_x = new_extent.XMin
max_x = new_extent.XMax
if pixel_x_size == 0 or pixel_y_size == 0:
print("Invalid pixel sizes")
return False
if min_x == 0.0 or min_y == 0.0 or max_x == 0.0 or max_y == 0.0:
print("Invalid extent")
return False
# Set the source_pixel_size to twice the original resolution to compute the max scale
source_pixel_size = pixel_x_size
# Set the max cell size to twice the cell size required for a super tile size of 512
max_pixel_size = (max_x - min_x) / 256
min_scale = 0.0
max_scale = 0.0
for lod_info in self.tile_lod_info:
print(lod_info[0], lod_info[1])
if source_pixel_size > lod_info[0]:
break
max_scale = lod_info[1]
for lod_info in self.tile_lod_info:
print(lod_info[0], lod_info[1])
if max_pixel_size > lod_info[0]:
break
min_scale = lod_info[1]
tempFolder = tempfile.mkdtemp(suffix='_gpkg_cache')
cacheName = os.path.basename(filename)
arcmap_bin_dir = os.path.dirname(sys.executable)
arcmap_dir = os.path.dirname(arcmap_bin_dir)
arcmap_tilingscheme_dir = os.path.join(arcmap_dir, 'TilingSchemes', 'gpkg_scheme.xml')
if os.path.isfile(arcmap_tilingscheme_dir) == False:
raise arcpy.ExecuteError("Tiling Scheme File is Missing.")
arcpy.AddMessage("Generating tiles in {0}".format(tempFolder))
arcpy.ManageTileCache_management(in_cache_location=tempFolder,
manage_mode='RECREATE_ALL_TILES',
in_cache_name=cacheName,
in_datasource=filename,
tiling_scheme='IMPORT_SCHEME',
import_tiling_scheme=arcmap_tilingscheme_dir,
max_cached_scale=max_scale,
min_cached_scale=min_scale)
arcpy.AddMessage("Creating GeoPackage {0}".format(self.filename))
cachePath = tempFolder + "/" + cacheName
cache2gpkg.cache2gpkg(cachePath, self.filename, True)
arcpy.AddMessage("GeoPackage {0} created".format(self.filename))
### Cleanup
new_srs = None
new_extent = None
raster_desc = None
shutil.rmtree(tempFolder)
return True
def open(self, filename):
"""
Create or open a GeoPackage and create necessary tables and triggers.
@param filename: Name of sqlite3 database.
@return: True on success, False on failure.
"""
self.filename = filename
try:
self.connection = sqlite3.connect(filename)
except sqlite3.Error as e:
print("Error opening ", filename, ": ", e.args[0])
return False
self.connection.row_factory = sqlite3.Row
try:
self.connection.execute(
"""
PRAGMA application_id = 1196437808;
"""
)
self.connection.execute(
"""
CREATE TABLE IF NOT EXISTS gpkg_spatial_ref_sys ( \
srs_name TEXT NOT NULL, \
srs_id INTEGER NOT NULL PRIMARY KEY, \
organization TEXT NOT NULL, \
organization_coordsys_id INTEGER NOT NULL, \
definition TEXT NOT NULL, \
description TEXT );
"""
)
self.connection.execute(
"""
INSERT INTO gpkg_spatial_ref_sys(srs_name,srs_id,organization,organization_coordsys_id,definition)
SELECT 'Undefined Cartesian', -1, 'NONE', -1, 'undefined'
WHERE NOT EXISTS(SELECT 1 FROM gpkg_spatial_ref_sys WHERE srs_id=-1);
"""
)
self.connection.execute(
"""
INSERT INTO gpkg_spatial_ref_sys(srs_name,srs_id,organization,organization_coordsys_id,definition)
SELECT 'Undefined Geographic', 0, 'NONE', 0, 'undefined'
WHERE NOT EXISTS(SELECT 1 FROM gpkg_spatial_ref_sys WHERE srs_id=0);
"""
)
self.connection.execute(
"""
INSERT INTO gpkg_spatial_ref_sys(srs_name,srs_id,organization,organization_coordsys_id,definition)
SELECT 'WGS84', 4326, 'EPSG', 4326, 'GEOGCS["WGS 84",
DATUM["WGS_1984",
SPHEROID["WGS 84",6378137,298.257223563,
AUTHORITY["EPSG","7030"]],
AUTHORITY["EPSG","6326"]],
PRIMEM["Greenwich",0,
AUTHORITY["EPSG","8901"]],
UNIT["degree",0.01745329251994328,
AUTHORITY["EPSG","9122"]],
AUTHORITY["EPSG","4326"]]'
WHERE NOT EXISTS(SELECT 1 FROM gpkg_spatial_ref_sys WHERE srs_id=4326);
"""
)
self.connection.execute(
"""
CREATE TABLE IF NOT EXISTS gpkg_contents (
table_name TEXT NOT NULL PRIMARY KEY, \
data_type TEXT NOT NULL, \
identifier TEXT UNIQUE, \
description TEXT DEFAULT '', \
last_change DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ',CURRENT_TIMESTAMP)), \
min_x DOUBLE, \
min_y DOUBLE, \
max_x DOUBLE, \
max_y DOUBLE, \
srs_id INTEGER, \
CONSTRAINT fk_gc_r_srs_id FOREIGN KEY (srs_id) REFERENCES gpkg_spatial_ref_sys(srs_id) );
"""
)
self.connection.execute(
"""
CREATE TABLE IF NOT EXISTS gpkg_tile_matrix (
table_name TEXT NOT NULL,
zoom_level INTEGER NOT NULL,
matrix_width INTEGER NOT NULL,
matrix_height INTEGER NOT NULL,
tile_width INTEGER NOT NULL,
tile_height INTEGER NOT NULL,
pixel_x_size DOUBLE NOT NULL,
pixel_y_size DOUBLE NOT NULL,
CONSTRAINT pk_ttm PRIMARY KEY (table_name, zoom_level),
CONSTRAINT fk_tmm_table_name FOREIGN KEY (table_name) REFERENCES gpkg_contents(table_name) );
"""
)
self.connection.execute(
"""
CREATE TRIGGER IF NOT EXISTS 'gpkg_tile_matrix_zoom_level_insert'
BEFORE INSERT ON 'gpkg_tile_matrix'
FOR EACH ROW BEGIN
SELECT RAISE(ABORT, 'insert on table ''gpkg_tile_matrix'' violates constraint: zoom_level cannot be less than 0')
WHERE (NEW.zoom_level < 0);
END
"""
)
self.connection.execute(
"""
CREATE TRIGGER IF NOT EXISTS 'gpkg_tile_matrix_zoom_level_update'
BEFORE UPDATE OF zoom_level ON 'gpkg_tile_matrix'
FOR EACH ROW BEGIN
SELECT RAISE(ABORT, 'update on table ''gpkg_tile_matrix'' violates constraint: zoom_level cannot be less than 0')
WHERE (NEW.zoom_level < 0);
END
"""
)
self.connection.execute(
"""
CREATE TRIGGER IF NOT EXISTS 'gpkg_tile_matrix_matrix_width_insert'
BEFORE INSERT ON 'gpkg_tile_matrix'
FOR EACH ROW BEGIN
SELECT RAISE(ABORT, 'insert on table ''gpkg_tile_matrix'' violates constraint: matrix_width cannot be less than 1')
WHERE (NEW.matrix_width < 1);
END
"""
)
self.connection.execute(
"""
CREATE TRIGGER IF NOT EXISTS 'gpkg_tile_matrix_matrix_width_update'
BEFORE UPDATE OF matrix_width ON 'gpkg_tile_matrix'
FOR EACH ROW BEGIN
SELECT RAISE(ABORT, 'update on table ''gpkg_tile_matrix'' violates constraint: matrix_width cannot be less than 1')
WHERE (NEW.matrix_width < 1);
END
"""
)
self.connection.execute(
"""
CREATE TRIGGER IF NOT EXISTS 'gpkg_tile_matrix_matrix_height_insert'
BEFORE INSERT ON 'gpkg_tile_matrix'
FOR EACH ROW BEGIN
SELECT RAISE(ABORT, 'insert on table ''gpkg_tile_matrix'' violates constraint: matrix_height cannot be less than 1')
WHERE (NEW.matrix_height < 1);
END
"""
)
self.connection.execute(
"""
CREATE TRIGGER IF NOT EXISTS 'gpkg_tile_matrix_matrix_height_update'
BEFORE UPDATE OF matrix_height ON 'gpkg_tile_matrix'
FOR EACH ROW BEGIN
SELECT RAISE(ABORT, 'update on table ''gpkg_tile_matrix'' violates constraint: matrix_height cannot be less than 1')
WHERE (NEW.matrix_height < 1);
END
"""
)
self.connection.execute(
"""
CREATE TRIGGER IF NOT EXISTS 'gpkg_tile_matrix_pixel_x_size_insert'
BEFORE INSERT ON 'gpkg_tile_matrix'
FOR EACH ROW BEGIN
SELECT RAISE(ABORT, 'insert on table ''gpkg_tile_matrix'' violates constraint: pixel_x_size must be greater than 0')
WHERE NOT (NEW.pixel_x_size > 0);
END
"""
)
self.connection.execute(
"""
CREATE TRIGGER IF NOT EXISTS 'gpkg_tile_matrix_pixel_x_size_update'
BEFORE UPDATE OF pixel_x_size ON 'gpkg_tile_matrix'
FOR EACH ROW BEGIN
SELECT RAISE(ABORT, 'update on table ''gpkg_tile_matrix'' violates constraint: pixel_x_size must be greater than 0')
WHERE NOT (NEW.pixel_x_size > 0);
END
"""
)
self.connection.execute(
"""
CREATE TRIGGER IF NOT EXISTS 'gpkg_tile_matrix_pixel_y_size_insert'
BEFORE INSERT ON 'gpkg_tile_matrix'
FOR EACH ROW BEGIN
SELECT RAISE(ABORT, 'insert on table ''gpkg_tile_matrix'' violates constraint: pixel_y_size must be greater than 0')
WHERE NOT (NEW.pixel_y_size > 0);
END
"""
)
self.connection.execute(
"""
CREATE TRIGGER IF NOT EXISTS 'gpkg_tile_matrix_pixel_y_size_update'
BEFORE UPDATE OF pixel_y_size ON 'gpkg_tile_matrix'
FOR EACH ROW BEGIN
SELECT RAISE(ABORT, 'update on table ''gpkg_tile_matrix'' violates constraint: pixel_y_size must be greater than 0')
WHERE NOT (NEW.pixel_y_size > 0);
END
"""
)
self.connection.execute(
"""
CREATE TABLE IF NOT EXISTS gpkg_tile_matrix_set (
table_name TEXT NOT NULL PRIMARY KEY,
srs_id INTEGER NOT NULL,
min_x DOUBLE NOT NULL,
min_y DOUBLE NOT NULL,
max_x DOUBLE NOT NULL,
max_y DOUBLE NOT NULL,
CONSTRAINT fk_gtms_table_name FOREIGN KEY (table_name) REFERENCES gpkg_contents(table_name),
CONSTRAINT fk_gtms_srs FOREIGN KEY (srs_id) REFERENCES gpkg_spatial_ref_sys (srs_id) );
"""
)
self.connection.commit()
except sqlite3.Error as e:
print("ERROR: SQLite error while creating core tables and triggers: ", e.args[0])
return False
return True
def usage():
print("Usage: gdal2gpkg3 datasetname gpkgname")
return 2
def equal(a, b):
"""
Case insensitive string compare.
@param a: String to compare.
@param b: String to compare.
@return: True if equal, False if not.
"""
return a.lower() == b.lower()
def main(argv=None):
dataset_filename = None
gpkg_filename = None
gpkg = GeoPackage()
dataset_filename = arcpy.GetParameterAsText(0)
gpkg_filename = arcpy.GetParameterAsText(1)
image_extension = os.path.splitext(dataset_filename)[1][1:].strip()
if image_extension.lower() in ('jpg', 'jpeg'):
gpkg.format = "image/jpeg"
elif image_extension.lower() == 'png':
gpkg.format = "image/png"
else:
extension = ''
if not gpkg.open(gpkg_filename):
print("ERROR: Failed to open or create ", gpkg_filename)
return 1
if not gpkg.add_dataset(dataset_filename):
print("ERROR: Adding ", dataset_filename, " to ", gpkg_filename, " failed")
return 1
gpkg = None
arcpy.AddMessage('\nDone')
return 0
if __name__ == '__main__':
version_num = int(gdal.VersionInfo('VERSION_NUM'))
if version_num < 1800: # because of GetGeoTransform(can_return_null)
print('ERROR: Python bindings of GDAL 1.8.0 or later required')
sys.exit(1)
sys.exit(main(sys.argv))
|
# Copyright 2021 Canonical
# See LICENSE file for licensing details.
"""Module defining Legend DB consumer class and helpers."""
import json
import logging
from ops import framework
# The unique Charmhub library identifier, never change it
LIBID = "02ed64badd5941c5acfdae546b0f79a2"
# Increment this major API version when introducing breaking changes
LIBAPI = 0
# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 3
LEGEND_DB_RELATION_DATA_KEY = "legend-db-connection"
REQUIRED_LEGEND_DATABASE_CREDENTIALS = [
"username", "password", "database", "uri"]
logger = logging.getLogger(__name__)
def get_database_connection_from_mongo_data(
mongodb_consumer_data, mongodb_databases):
"""Returns a dict with Mongo connection info for Legend components
just like `LegendDatabaseConsumer.get_legend_database_creds()`.
Args:
mongodb_consumer_connection: connection data as returned by
charms.mongodb_k8s.v0.mongodb.MongoConsumer.connection()
Should be a dict of the following structure: {
"username": "user",
"password": "pass",
"replica_set_uri": "Replica set URI with user/pass/login DB"
}
mongodb_databases: List of database names as returned by
charms.mongodb_k8s.v0.mongodb.MongoConsumer.databases()
Returns:
Dictionary with the following structure:
{
"uri": "<replica set URI (with user/pass, no DB name)>",
"username": "<username>",
"password": "<password>",
"database": "<database name>"
}
"""
if not isinstance(mongodb_consumer_data, dict):
logger.warning("MongoDB consumer data not a dict.")
return {}
missing_keys = [
k for k in ["username", "password", "replica_set_uri"]
if not mongodb_consumer_data.get(k)]
if missing_keys:
logger.warning(
"Following keys were missing from the MongoDB connection "
"data provided: %s. Data was: %s",
missing_keys, mongodb_consumer_data)
return {}
if any([not isinstance(v, str) for v in mongodb_consumer_data.values()]):
logger.warning(
"Not all mongoDB database values are strings: %s", mongodb_consumer_data)
return {}
if not isinstance(mongodb_databases, list) or not (
all([isinstance(v, str) for v in mongodb_databases])):
logger.warning(
"MongoDB databases must be a list of strings, not: %s",
mongodb_databases)
return {}
if not mongodb_databases:
logger.info("No Mongo databases provided by the MongoConsumer.")
return {}
uri = mongodb_consumer_data['replica_set_uri']
# NOTE: we remove the trailing database from the URI:
split_uri = [
elem for elem in uri.split('/')[:-1]
# NOTE: filter any empty strings resulting from double-slashes:
if elem]
if not len(split_uri) > 1:
logger.warning("Failed to process DB URI: %s", uri)
return {}
# NOTE: schema prefix needs two slashes added back:
uri = "%s//%s" % (
split_uri[0], "/".join(split_uri[1:]))
res = {
"uri": uri,
"username": mongodb_consumer_data['username'],
"password": mongodb_consumer_data['password'],
"database": mongodb_databases[0]}
if not _validate_legend_database_credentials(res):
logger.warning("Failed to validate legend creds.")
return {}
return res
def set_legend_database_creds_in_relation_data(relation_data, creds):
"""Set connection data for MongoDB from the provided relation data.
Args:
relation_data: Data of the relation to set the info into.
Returns:
True if the provided creds are of a valid structure, else False.
"""
if not _validate_legend_database_credentials(creds):
return False
relation_data[LEGEND_DB_RELATION_DATA_KEY] = json.dumps(creds)
return True
def _validate_legend_database_credentials(creds):
"""Returns True/False depending on whether the provided Legend
database credentials dict contains all the required fields.
"""
if not isinstance(creds, dict) or any([
not isinstance(creds.get(k), str) for k in REQUIRED_LEGEND_DATABASE_CREDENTIALS]):
return False
return True
class LegendDatabaseConsumer(framework.Object):
"""Class which facilitates reading Legend DB creds from relation data."""
def __init__(self, charm, relation_name="legend-db"):
super().__init__(charm, relation_name)
self.charm = charm
self.relation_name = relation_name
def get_legend_database_creds(self, relation_id):
"""Get connection data for MongoDB from the provided relation.
Args:
relation_id: ID of the relation to fetch data from.
Returns:
Dictionary with the following structure:
{
"uri": "<replica set URI (with user/pass, no DB name)>",
"username": "<username>",
"password": "<password>",
"database": "<database name>"
}
Raises:
TooManyRelatedAppsError if relation id is not provided and
multiple relation of the same name are present.
"""
relation = self.framework.model.get_relation(
self.relation_name, relation_id)
if not relation:
logger.warning(
"No relation of name '%s' and ID '%s' was found.",
self.relation_name, relation_id)
return {}
relation_data = relation.data[relation.app]
creds_data = relation_data.get(LEGEND_DB_RELATION_DATA_KEY, "{}")
try:
creds = json.loads(creds_data)
except Exception as ex:
logger.warning(
"Could not deserialize Legend DB creds JSON: %s. Error "
"was: %s", creds_data, str(ex))
return {}
if not _validate_legend_database_credentials(creds):
logger.warning("Invalid DB creds in relation: %s", creds)
return {}
return creds
|
# -*- coding: utf-8 -*-
import copy
import json
import os
import re
import shutil
import subprocess
import time
from requests_toolbelt import MultipartEncoder
from . import config
def download_video(
self,
media_id,
filename=None,
media=False,
folder="videos"
):
video_urls = []
if not media:
self.media_info(media_id)
media = self.last_json["items"][0]
filename = (
"{}_{}.mp4".format(media["user"]["username"], media_id)
if not filename
else "{}.mp4".format(filename)
)
try:
clips = media["video_versions"]
video_urls.append(clips[0]["url"])
except KeyError:
carousels = media.get("carousel_media", [])
for carousel in carousels:
video_urls.append(carousel["video_versions"][0]["url"])
except Exception:
return False
for counter, video_url in enumerate(video_urls):
fname = os.path.join(folder, "{}_{}".format(counter, filename))
if os.path.exists(fname):
print('File %s is exists, return it' % fname)
return os.path.abspath(fname)
response = self.session.get(video_url, stream=True)
if response.status_code == 200:
with open(fname, "wb") as f:
response.raw.decode_content = True
shutil.copyfileobj(response.raw, f)
return os.path.abspath(fname)
# leaving here function used by old upload_video, no more used now
def get_video_info(filename):
res = {}
try:
terminalResult = subprocess.Popen(
["ffprobe", filename],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
for x in terminalResult.stdout.readlines():
# Duration: 00:00:59.51, start: 0.000000, bitrate: 435 kb/s
m = re.search(
r"duration: (\d\d:\d\d:\d\d\.\d\d),",
str(x),
flags=re.IGNORECASE
)
if m is not None:
res["duration"] = m.group(1)
# Video: h264 (Constrained Baseline)
# (avc1 / 0x31637661), yuv420p, 480x268
m = re.search(
r"video:\s.*\s(\d+)x(\d+)\s",
str(x),
flags=re.IGNORECASE
)
if m is not None:
res["width"] = m.group(1)
res["height"] = m.group(2)
finally:
if "width" not in res:
print(
"ERROR: 'ffprobe' not found, please install "
"'ffprobe' with one of following methods:"
)
print(" sudo apt-get install ffmpeg")
print("or sudo apt-get install -y libav-tools")
return res
def upload_video(
self,
video,
caption=None,
upload_id=None,
thumbnail=None,
options={}
):
"""Upload video to Instagram
@param video Path to video file (String)
@param caption Media description (String)
@param upload_id Unique upload_id (String). When None, then generate
automatically
@param thumbnail Path to thumbnail for video (String). When None, then
thumbnail is generate automatically
@param options Object with difference options, e.g. configure_timeout,
rename_thumbnail, rename (Dict)
Designed to reduce the number of function arguments!
This is the simplest request object.
@return Object with state of uploading to Instagram (or False)
"""
options = dict(
{"configure_timeout": 15, "rename_thumbnail": True, "rename": True},
**(options or {})
)
if upload_id is None:
upload_id = str(int(time.time() * 1000))
video, thumbnail, width, height, duration = resize_video(video, thumbnail)
data = {
"upload_id": upload_id,
"_csrftoken": self.token,
"media_type": "2",
"_uuid": self.uuid,
}
m = MultipartEncoder(data, boundary=self.uuid)
self.session.headers.update(
{
"X-IG-Capabilities": "3Q4=",
"X-IG-Connection-Type": "WIFI",
"Host": "i.instagram.com",
"Cookie2": "$Version=1",
"Accept-Language": "en-US",
"Accept-Encoding": "gzip, deflate",
"Content-type": m.content_type,
"Connection": "keep-alive",
"User-Agent": self.user_agent,
}
)
response = self.session.post(
config.API_URL + "upload/video/", data=m.to_string()
)
if response.status_code == 200:
body = json.loads(response.text)
upload_url = body["video_upload_urls"][3]["url"]
upload_job = body["video_upload_urls"][3]["job"]
with open(video, "rb") as video_bytes:
video_data = video_bytes.read()
# solve issue #85 TypeError:
# slice indices must be integers or None or have an __index__ method
request_size = len(video_data) // 4
last_request_extra = len(video_data) - 3 * request_size
headers = copy.deepcopy(self.session.headers)
self.session.headers.update(
{
"X-IG-Capabilities": "3Q4=",
"X-IG-Connection-Type": "WIFI",
"Cookie2": "$Version=1",
"Accept-Language": "en-US",
"Accept-Encoding": "gzip, deflate",
"Content-type": "application/octet-stream",
"Session-ID": upload_id,
"Connection": "keep-alive",
"Content-Disposition": 'attachment; filename="video.mov"',
"job": upload_job,
"Host": "upload.instagram.com",
"User-Agent": self.user_agent,
}
)
for i in range(4):
start = i * request_size
if i == 3:
end = i * request_size + last_request_extra
else:
end = (i + 1) * request_size
length = last_request_extra if i == 3 else request_size
content_range = "bytes {start}-{end}/{len_video}".format(
start=start, end=end - 1, len_video=len(video_data)
).encode("utf-8")
self.session.headers.update(
{
"Content-Length": str(end - start),
"Content-Range": content_range
}
)
response = self.session.post(
upload_url, data=video_data[start: start + length]
)
self.session.headers = headers
configure_timeout = options.get("configure_timeout")
if response.status_code == 200:
for attempt in range(4):
if configure_timeout:
time.sleep(configure_timeout)
if self.configure_video(
upload_id,
video,
thumbnail,
width,
height,
duration,
caption,
options=options,
):
media = self.last_json.get("media")
self.expose()
if options.get("rename"):
from os import rename
rename(video, "{}.REMOVE_ME".format(video))
return media
return False
def configure_video(
self,
upload_id,
video,
thumbnail,
width,
height,
duration,
caption="",
options={}
):
"""Post Configure Video (send caption, thumbnail and more to Instagram)
@param upload_id Unique upload_id (String). Received from "upload_video"
@param video Path to video file (String)
@param thumbnail Path to thumbnail for video (String). When None,
then thumbnail is generate automatically
@param width Width in px (Integer)
@param height Height in px (Integer)
@param duration Duration in seconds (Integer)
@param caption Media description (String)
@param options Object with difference options, e.g. configure_timeout,
rename_thumbnail, rename (Dict)
Designed to reduce the number of function arguments!
This is the simplest request object.
"""
# clipInfo = get_video_info(video)
options = {"rename": options.get("rename_thumbnail", True)}
self.upload_photo(
photo=thumbnail,
caption=caption,
upload_id=upload_id,
from_video=True,
options=options,
)
data = self.json_data(
{
"upload_id": upload_id,
"source_type": 3,
"poster_frame_index": 0,
"length": 0.00,
"audio_muted": False,
"filter_type": 0,
"video_result": "deprecated",
"clips": {
"length": duration,
"source_type": "3",
"camera_position": "back",
},
"extra": {"source_width": width, "source_height": height},
"device": self.device_settings,
"caption": caption,
}
)
return self.send_request("media/configure/?video=1", data)
def resize_video(fname, thumbnail=None):
from math import ceil
try:
import moviepy.editor as mp
except ImportError as e:
print("ERROR: {}".format(e))
print(
"Required module `moviepy` not installed\n"
"Install with `pip install moviepy` and retry.\n\n"
"You may need also:\n"
"pip install --upgrade setuptools\n"
"pip install numpy --upgrade --ignore-installed"
)
return False
print("Analizing `{}`".format(fname))
h_lim = {"w": 90.0, "h": 47.0}
v_lim = {"w": 4.0, "h": 5.0}
d_lim = 60
vid = mp.VideoFileClip(fname)
(w, h) = vid.size
deg = vid.rotation
ratio = w * 1.0 / h * 1.0
print(
"FOUND w:{w}, h:{h}, rotation={d}, ratio={r}".format(
w=w,
h=h,
r=ratio,
d=deg
)
)
if w > h:
print("Horizontal video")
if ratio > (h_lim["w"] / h_lim["h"]):
print("Cropping video")
cut = int(ceil((w - h * h_lim["w"] / h_lim["h"]) / 2))
left = cut
right = w - cut
top = 0
bottom = h
vid = vid.crop(x1=left, y1=top, x2=right, y2=bottom)
(w, h) = vid.size
if w > 1081:
print("Resizing video")
vid = vid.resize(width=1080)
elif w < h:
print("Vertical video")
if ratio < (v_lim["w"] / v_lim["h"]):
print("Cropping video")
cut = int(ceil((h - w * v_lim["h"] / v_lim["w"]) / 2))
left = 0
right = w
top = cut
bottom = h - cut
vid = vid.crop(x1=left, y1=top, x2=right, y2=bottom)
(w, h) = vid.size
if h > 1081:
print("Resizing video")
vid = vid.resize(height=1080)
else:
print("Square video")
if w > 1081:
print("Resizing video")
vid = vid.resize(width=1080)
(w, h) = vid.size
if vid.duration > d_lim:
print("Cutting video to {} sec from start".format(d_lim))
vid = vid.subclip(0, d_lim)
new_fname = "{}.CONVERTED.mp4".format(fname)
print(
"Saving new video w:{w} h:{h} to `{f}`".format(
w=w,
h=h,
f=new_fname
)
)
vid.write_videofile(new_fname, codec="libx264", audio_codec="aac")
if not thumbnail:
print("Generating thumbnail...")
thumbnail = "{}.jpg".format(fname)
vid.save_frame(thumbnail, t=(vid.duration / 2))
return new_fname, thumbnail, w, h, vid.duration
|
# Lint as: python3
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities to calculate batch sizes."""
from lingvo.core import cluster_factory
from lingvo.core import py_utils
def scale_infeed_to_global(infeed_batch_size, use_per_host_infeed):
"""Obtains a global batch size from an infeed batch size and cluster configs.
Args:
infeed_batch_size: int: Per-infeed batch size.
use_per_host_infeed: bool: Whether to use an individual infeed for each
host.
Returns:
int: Global batch size.
"""
cluster = cluster_factory.Current()
if use_per_host_infeed and cluster.num_tpu_hosts > 0:
if not py_utils.use_tpu():
raise ValueError('Scaling to TPU hosts without TPUs. {}'.format(
cluster.num_tpu_hosts))
return infeed_batch_size * cluster.num_tpu_hosts
else:
return infeed_batch_size
def scale_global_to_infeed(global_batch_size, use_per_host_infeed):
"""Obtains infeed batch size from global batch size and cluster configs.
Args:
global_batch_size: int: Global batch size.
use_per_host_infeed: bool: Whether to use an individual infeed for each
host.
Returns:
int: infeed batch size.
"""
cluster = cluster_factory.Current()
if use_per_host_infeed and cluster.num_tpu_hosts > 0:
if not py_utils.use_tpu():
raise ValueError('Scaling to TPU hosts without TPUs. {}'.format(
cluster.num_tpu_hosts))
return global_batch_size // cluster.num_tpu_hosts
else:
return global_batch_size
def scale_split_to_infeed(split_batch_size, use_per_host_infeed):
"""Obtains an infeed batch size from a split batch size and cluster configs.
Args:
split_batch_size: int: Per-split batch size.
use_per_host_infeed: bool: Whether to use an individual infeed for each
host.
Returns:
int: Per-infeed batch size.
"""
cluster = cluster_factory.Current()
global_batch_size = split_batch_size * cluster.num_splits_per_client
# If use_per_host_infeed, each input op is only responsible
# for generating a subset of the whole batch.
if use_per_host_infeed and cluster.num_tpu_hosts > 0:
return global_batch_size // cluster.num_tpu_hosts
else:
return global_batch_size
|
# -*- coding: utf-8 -*-
import scrapy
from scrapy.spiders import Rule
class SinaSpider(scrapy.Spider):
name = 'sina'
allowed_domains = ['sina.com']
start_urls = ['https://news.sina.cn']
def parse(self, response):
li_list = response.xpath('//ul[@class="swiper-wrapper"]/li')
for li in li_list:
item = dict()
item['title'] = li.xpath('./a/h2/em/text()').extract_first()
item['url'] = li.xpath('./a/@href').extract_first()
yield item
|
# coding: utf-8
import re
import os
import sys
import json
import types
import errno
import random
import string
import functools
import hashlib
import traceback
import requests
import urlparse
from urllib import urlencode
from datetime import datetime, date
from StringIO import StringIO
from jinja2 import Markup
from flask import jsonify, current_app, request, redirect, render_template
from flask.ext.login import current_user
from werkzeug.local import LocalProxy
import urllib3
urllib3.disable_warnings()
__all__ = [
'Puppet', 'strip', 'json_success', 'json_error', 'datetime2best',
'time2best', 'today', 'err_logger', 'parse_spm', 'get_spm', 'get_version',
'get_os', 'get_platform', 'get_channel', 'get_ip', 'is_ajax',
'str2datetime', 'is_json', 'is_empty', 'randchar', 'randstr', 'AttrDict',
'url2image', 'retry', 'tpl_data', 'get_module', 'rmb3', 'check_encode',
'url_with_user', 'get_url_arg', 'create_short_url', 'ip_limit',
'random_index', 'is_debug', 'sign', 'add_args', 'import_file',
'unicode2utf8', 'json2utf8', 'url_external', 'is_wechat',
'success', 'error', 'message', 'markup', 'current_db',
]
def _get_db():
return current_app.db
current_db = LocalProxy(lambda: _get_db())
class Puppet(object):
@classmethod
def init(cls, app, key=None):
key = key or cls.__name__.upper()
if key in app.config:
return cls(app, key)
def __init__(self, app=None, key=None, config=None, holder=None,
name=None):
self.name = name or self.__class__.__name__.lower()
self.key = key
self.config = config
self.holder = holder
if app:
self.init_app(app)
def init_app(self, app):
self.app = app
name = self.__class__.__name__
if self.config is None:
self.config = app.config.get(self.key or name.upper())
self.puppets = dict()
for key, config in self.config.get('puppets', dict()).iteritems():
self.puppets[key] = self.__class__(
app=app, key=key, config=config, holder=self)
if not hasattr(app, name.lower()) and not self.holder:
setattr(app, name.lower(), self)
def get_key(self):
return self.key if self.holder else 'default'
def get_config(self, key, default=None, repalce=True, config=None):
if config and key in config:
return config.get(key)
if self.holder:
value = self.config.get(
key, self.holder.get_config(key, default, False))
else:
value = self.config.get(key, default)
if repalce and isinstance(value, (str, unicode)):
return value.replace('[key]', self.get_key())
return value
def get_puppet(self, key):
if self.holder:
return self.holder.get_puppet(key)
return self if key in ['default', ''] else self.puppets.get(key)
def down(url, source=None):
try:
if source:
return StringIO(requests.get(
url, headers=dict(Referer=source), verify=False).content)
return StringIO(requests.get(url, verify=False).content)
except:
current_app.logger.error(traceback.format_exc())
def get_format(image):
format = image.split('.')[-1]
if format in ['jpg', 'jpeg']:
return 'jpg'
if format in ['gif', 'bmp', 'png', 'ico']:
return format
return ''
def url2image(url, source=None, format=''):
return dict(stream=down(url, source=source),
format=get_format(url) or format) if url else None
class AttrDict(dict):
__getattr__ = dict.__getitem__
__setattr__ = dict.__setitem__
def today():
return datetime.strptime(str(date.today()), '%Y-%m-%d')
def strip(val, *args):
if not val:
return val
if isinstance(val, dict):
return dict((x, strip(
y) if x not in args else y) for x, y in val.iteritems())
if isinstance(val, list):
return list(strip(x) for x in val)
if hasattr(val, 'strip'):
return val.strip()
return val
def json_success(**kwargs):
kwargs['code'] = 0
return jsonify(kwargs)
def json_error(**kwargs):
kwargs['code'] = -1
return jsonify(kwargs)
def datetime2best(input):
tmp = datetime.now() - input
if tmp.days in [0, -1]:
seconds = tmp.days * 86400 + tmp.seconds
if seconds < -3600:
return '%d小时后' % (-seconds // 3600)
elif seconds < -60:
return '%d分钟后' % (-seconds // 60)
elif seconds < 0:
return '%d秒后' % -seconds
elif seconds < 60:
return '%d秒前' % seconds
elif seconds < 3600:
return '%d分钟前' % (seconds // 60)
else:
return '%d小时前' % (seconds // 3600)
elif tmp.days < -365:
return '%d年后' % (-tmp.days // 365)
elif tmp.days < -30:
return '%d个月后' % (-tmp.days // 30)
elif tmp.days < -1:
return '%d天后' % -(tmp.days + 1)
elif tmp.days < 30:
return '%d天前' % tmp.days
elif tmp.days < 365:
return '%d个月前' % (tmp.days // 30)
else:
return '%d年前' % (tmp.days // 365)
def time2best(input):
if type(input) != datetime:
input = datetime.fromtimestamp(input)
return datetime2best(input)
def err_logger(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
current_app.logger.error(traceback.format_exc())
return wrapper
def parse_spm(spm):
if spm:
spm = spm.replace('unknown', '0')
if spm and re.match(r'^(\d+\.)+\d+$', spm):
res = map(lambda x: int(x), spm.split('.'))
while len(res) < 5:
res.append(0)
return res[:5]
return 0, 0, 0, 0, 0
def get_spm():
spm = request.args.get('spm')
if spm:
return spm
spm = []
oslist = ['ios', 'android', 'windows', 'linux', 'mac']
plist = ['micromessenger', 'weibo', 'qq']
ua = request.args.get('User-Agent', '').lower()
for index, _os in enumerate(oslist):
if _os in ua:
spm.append(index + 1)
break
else:
spm.append(index + 1)
for index, p in enumerate(plist):
if p in ua:
spm.append(index + 1)
break
else:
spm.append(index + 1)
spm.append(0)
spm.append(0)
return '.'.join([str(x) for x in spm])
def get_version():
return parse_spm(get_spm())[3]
def get_channel():
return parse_spm(get_spm())[2]
def get_os():
return parse_spm(get_spm())[0]
def get_platform():
return parse_spm(get_spm())[1]
def get_ip():
if 'Cdn-Real-Ip' in request.headers:
return request.headers['Cdn-Real-Ip']
if 'X-Real-Forwarded-For' in request.headers:
return request.headers['X-Real-Forwarded-For'].split(',')[0]
if 'X-FORWARDED-FOR' in request.headers:
return request.headers['X-FORWARDED-FOR'].split(',')[0]
return request.headers.get('X-Real-Ip') or request.remote_addr
def is_ajax():
return request.headers.get('X-Requested-With') == 'XMLHttpRequest' \
or request.args.get('is_ajax', 'false') == 'true' \
or request.headers.get('Accept', '').startswith('application/json') \
or 'application/json' in request.headers.get('Content-Type', '')
def is_wechat():
ua = request.headers['User-Agent'].lower()
return 'micromessenger' in ua
def is_api():
return 'API' in current_app.config.get('ENVVAR', '')
def is_json():
return is_api() or is_ajax()
def is_empty(fd):
fd.seek(0)
first_char = fd.read(1)
fd.seek(0)
return not bool(first_char)
def str2datetime(datestr):
try:
return datetime.strptime(datestr, '%Y-%m-%d %H:%M:%s')
except ValueError:
return datetime.min
def randchar():
return random.choice(string.ascii_letters + string.digits)
def randstr(x=32):
return ''.join(randchar() for _ in range(x))
def retry(times=3):
def wrapper(func):
res = None
for i in range(times):
try:
res = func()
break
except:
current_app.logger.error(traceback.format_exc())
return res
return wrapper
def tpl_data(color="#333333", **kwargs):
res = dict()
for key, value in kwargs.iteritems():
res[key] = dict(value=value, color=color)
return res
def get_module():
def main_module_name():
mod = sys.modules['__main__']
file = getattr(mod, '__file__', None)
return file and os.path.splitext(os.path.basename(file))[0]
def modname(fvars):
file, name = fvars.get('__file__'), fvars.get('__name__')
if file is None or name is None:
return None
if name == '__main__':
name = main_module_name()
return name
return modname(globals())
def rmb3(num):
d = float('%.2f' % (num / 100.0))
return str([str(d), int(d)][int(d) == d])
def check_encode(text, code='gb18030'):
try:
text.encode(code)
return True
except:
pass
return False
def url_with_user(url):
if current_user.is_authenticated() and current_user.is_user():
res = urlparse.urlparse(url)
if 'uid=' not in res.query:
if res.query:
query = '%s&uid=%d' % (res.query, current_user.id)
else:
query = 'uid=%d' % current_user.id
url = '%s://%s%s?%s' % (res.scheme, res.netloc, res.path, query)
if res.fragment:
url += '#' + res.fragment
return url
def get_url_arg(url, key):
res = urlparse.parse_qs(urlparse.urlparse(url).query).get(key)
return res[0] if res else None
def create_short_url(key, url, **kwargs):
tpl = 'http://api.t.sina.com.cn/short_url/shorten.json?%s'
res = requests.get(
tpl % urlencode(dict(source=key, url_long=url)), **kwargs).json()
return res[0]['url_short'] if res[0]['type'] == 0 else url
def ip_limit(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
from chiki.base import db
ip = get_ip()
regx = db.Item.data('ip_regx', '^116\.23\.|^59\.41\.', name='IP正则')
limit = db.Item.get('ip_limit', 100, name='IP限制')
url = db.Item.data('ip_redirect', 'http://www.qq.com/', name='IP跳转')
if not re.match(regx, ip) and db.User.objects(ip=ip).count() >= limit:
return redirect(url)
return func(*args, **kwargs)
return wrapper
def random_index(rate):
start, index = 0, 0
num = random.randint(0, sum(rate))
for index, scope in enumerate(rate):
start += scope
if num < start:
break
return index
def is_debug():
return current_app.debug or \
request.args.get('debug') == 'true' or \
request.host.startswith('0.0.0.0') or \
request.host.startswith('127.0.') or \
request.host.startswith('192.168.')
def sign(key, **kwargs):
keys = sorted(filter(
lambda x: x[1] is not None, kwargs.iteritems()), key=lambda x: x[0])
text = '&'.join(['%s=%s' % x for x in keys])
text += '&key=%s' % key
return hashlib.md5(text.encode('utf-8')).hexdigest().upper()
def add_args(url, **kwargs):
if '?' in url:
return url + '&' + urlencode(kwargs)
return url + '?' + urlencode(kwargs)
def import_file(filename):
d = types.ModuleType('module')
d.__file__ = filename
try:
with open(filename, mode='rb') as fd:
exec(compile(fd.read(), filename, 'exec'), d.__dict__)
except IOError as e:
if e.errno in (errno.ENOENT, errno.EISDIR):
return False
e.strerror = 'Unable to load file (%s)' % e.strerror
raise
return d
def unicode2utf8(obj):
u = unicode2utf8
if type(obj) == str:
return obj
if isinstance(obj, unicode):
return obj.encode('utf-8')
if isinstance(obj, dict):
return dict((u(k), u(v)) for k, v in obj.iteritems())
if isinstance(obj, list):
return [u(x) for x in obj]
return obj
def json2utf8(ensure_ascii=False, indent=None, **kwargs):
return json.dumps(
unicode2utf8(kwargs), ensure_ascii=ensure_ascii, indent=indent)
def url_external(url):
if url.startswith('/'):
return 'http://' + request.host + url
return url
def message(msg, style='info', url='', timeout=0, **kwargs):
return render_template('msg.html', msg=msg, style=style,
url=url, timeout=timeout, **kwargs)
def success(msg, url='', timeout=0, **kwargs):
return message(msg, style='success', url=url, timeout=timeout, **kwargs)
def error(msg, url='', timeout=0, **kwargs):
return message(msg, style='warn', url=url, timeout=timeout, **kwargs)
def markup(html):
return Markup(html) if current_app.jinja_env.autoescape else html
|
import codecs
import configparser
class Parser:
def __init__(self):
self.configfile = './config/config.ini'
config = configparser.ConfigParser()
config.read_file(codecs.open(self.configfile, "r", "utf-8-sig"))
self.api_key = config['Binance']['API_Key']
self.secret = config['Binance']['Secret']
|
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
url(r'^$', views.BlogHome.as_view(), name='blog'),
)
|
from datetime import datetime
from flask import url_for
from sopy import db
from sopy.auth.models import User
from sopy.ext.models import IDModel
class WikiPage(IDModel):
title = db.Column('title', db.String, nullable=False, unique=True)
body = db.Column(db.String, nullable=False)
updated = db.Column(db.DateTime, nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow)
draft = db.Column(db.Boolean, nullable=False, default=False)
community = db.Column(db.Boolean, nullable=False, default=False)
author_id = db.Column(db.Integer, db.ForeignKey(User.id), nullable=False)
redirect_id = db.Column(db.Integer, db.ForeignKey('wiki_page.id'))
author = db.relationship(User)
redirect = db.relationship(lambda: WikiPage, remote_side=lambda: (WikiPage.id,), backref='redirects')
def __str__(self):
return self.title
@property
def detail_url(self):
return url_for('wiki.detail', title=self.title)
@property
def update_url(self):
return url_for('wiki.update', title=self.title)
@property
def delete_url(self):
return url_for('wiki.delete', title=self.title)
|
import collections
import charmhelpers.contrib.openstack.context as context
import yaml
import json
import unittest
from copy import copy, deepcopy
from mock import (
patch,
Mock,
MagicMock,
call
)
from tests.helpers import patch_open
import six
if not six.PY3:
open_builtin = '__builtin__.open'
else:
open_builtin = 'builtins.open'
class FakeRelation(object):
'''
A fake relation class. Lets tests specify simple relation data
for a default relation + unit (foo:0, foo/0, set in setUp()), eg:
rel = {
'private-address': 'foo',
'password': 'passwd',
}
relation = FakeRelation(rel)
self.relation_get.side_effect = relation.get
passwd = self.relation_get('password')
or more complex relations meant to be addressed by explicit relation id
+ unit id combos:
rel = {
'mysql:0': {
'mysql/0': {
'private-address': 'foo',
'password': 'passwd',
}
}
}
relation = FakeRelation(rel)
self.relation_get.side_affect = relation.get
passwd = self.relation_get('password', rid='mysql:0', unit='mysql/0')
'''
def __init__(self, relation_data):
self.relation_data = relation_data
def get(self, attribute=None, unit=None, rid=None):
if not rid or rid == 'foo:0':
if attribute is None:
return self.relation_data
elif attribute in self.relation_data:
return self.relation_data[attribute]
return None
else:
if rid not in self.relation_data:
return None
try:
relation = self.relation_data[rid][unit]
except KeyError:
return None
if attribute is None:
return relation
if attribute in relation:
return relation[attribute]
return None
def relation_ids(self, relation):
rids = []
for rid in sorted(self.relation_data.keys()):
if relation + ':' in rid:
rids.append(rid)
return rids
def relation_units(self, relation_id):
if relation_id not in self.relation_data:
return None
return sorted(self.relation_data[relation_id].keys())
SHARED_DB_RELATION = {
'db_host': 'dbserver.local',
'password': 'foo'
}
SHARED_DB_RELATION_ALT_RID = {
'mysql-alt:0': {
'mysql-alt/0': {
'db_host': 'dbserver-alt.local',
'password': 'flump'}}}
SHARED_DB_RELATION_SSL = {
'db_host': 'dbserver.local',
'password': 'foo',
'ssl_ca': 'Zm9vCg==',
'ssl_cert': 'YmFyCg==',
'ssl_key': 'Zm9vYmFyCg==',
}
SHARED_DB_CONFIG = {
'database-user': 'adam',
'database': 'foodb',
}
SHARED_DB_RELATION_NAMESPACED = {
'db_host': 'bar',
'quantum_password': 'bar2'
}
SHARED_DB_RELATION_ACCESS_NETWORK = {
'db_host': 'dbserver.local',
'password': 'foo',
'access-network': '10.5.5.0/24',
'hostname': 'bar',
}
IDENTITY_SERVICE_RELATION_HTTP = {
'service_port': '5000',
'service_host': 'keystonehost.local',
'auth_host': 'keystone-host.local',
'auth_port': '35357',
'service_domain': 'admin_domain',
'service_tenant': 'admin',
'service_tenant_id': '123456',
'service_password': 'foo',
'service_username': 'adam',
'service_protocol': 'http',
'auth_protocol': 'http',
}
IDENTITY_SERVICE_RELATION_UNSET = {
'service_port': '5000',
'service_host': 'keystonehost.local',
'auth_host': 'keystone-host.local',
'auth_port': '35357',
'service_domain': 'admin_domain',
'service_tenant': 'admin',
'service_password': 'foo',
'service_username': 'adam',
}
IDENTITY_CREDENTIALS_RELATION_UNSET = {
'credentials_port': '5000',
'credentials_host': 'keystonehost.local',
'auth_host': 'keystone-host.local',
'auth_port': '35357',
'auth_protocol': 'https',
'domain': 'admin_domain',
'credentials_project': 'admin',
'credentials_project_id': '123456',
'credentials_password': 'foo',
'credentials_username': 'adam',
'credentials_protocol': 'https',
}
APIIDENTITY_SERVICE_RELATION_UNSET = {
'neutron-plugin-api:0': {
'neutron-api/0': {
'service_port': '5000',
'service_host': 'keystonehost.local',
'auth_host': 'keystone-host.local',
'auth_port': '35357',
'service_domain': 'admin_domain',
'service_tenant': 'admin',
'service_password': 'foo',
'service_username': 'adam',
}
}
}
IDENTITY_SERVICE_RELATION_HTTPS = {
'service_port': '5000',
'service_host': 'keystonehost.local',
'auth_host': 'keystone-host.local',
'auth_port': '35357',
'service_domain': 'admin_domain',
'service_tenant': 'admin',
'service_password': 'foo',
'service_username': 'adam',
'service_protocol': 'https',
'auth_protocol': 'https',
}
IDENTITY_SERVICE_RELATION_VERSIONED = {
'api_version': '3',
}
IDENTITY_SERVICE_RELATION_VERSIONED.update(IDENTITY_SERVICE_RELATION_HTTPS)
IDENTITY_CREDENTIALS_RELATION_VERSIONED = {
'api_version': '3',
'service_domain_id': '567890',
}
IDENTITY_CREDENTIALS_RELATION_VERSIONED.update(IDENTITY_CREDENTIALS_RELATION_UNSET)
POSTGRESQL_DB_RELATION = {
'host': 'dbserver.local',
'user': 'adam',
'password': 'foo',
}
POSTGRESQL_DB_CONFIG = {
'database': 'foodb',
}
IDENTITY_SERVICE_RELATION = {
'service_port': '5000',
'service_host': 'keystonehost.local',
'auth_host': 'keystone-host.local',
'auth_port': '35357',
'service_domain': 'admin_domain',
'service_tenant': 'admin',
'service_password': 'foo',
'service_username': 'adam',
}
AMQP_RELATION = {
'private-address': 'rabbithost',
'password': 'foobar',
'vip': '10.0.0.1',
}
AMQP_RELATION_ALT_RID = {
'amqp-alt:0': {
'rabbitmq-alt/0': {
'private-address': 'rabbitalthost1',
'password': 'flump',
},
}
}
AMQP_RELATION_WITH_SSL = {
'private-address': 'rabbithost',
'password': 'foobar',
'vip': '10.0.0.1',
'ssl_port': 5671,
'ssl_ca': 'cert',
'ha_queues': 'queues',
}
AMQP_AA_RELATION = {
'amqp:0': {
'rabbitmq/0': {
'private-address': 'rabbithost1',
'password': 'foobar',
},
'rabbitmq/1': {
'private-address': 'rabbithost2',
}
}
}
AMQP_CONFIG = {
'rabbit-user': 'adam',
'rabbit-vhost': 'foo',
}
AMQP_OSLO_CONFIG = {
'oslo-messaging-flags': ("rabbit_max_retries=1"
",rabbit_retry_backoff=1"
",rabbit_retry_interval=1")
}
AMQP_NOVA_CONFIG = {
'nova-rabbit-user': 'adam',
'nova-rabbit-vhost': 'foo',
}
HAPROXY_CONFIG = {
'haproxy-server-timeout': 50000,
'haproxy-client-timeout': 50000,
}
CEPH_RELATION = {
'ceph:0': {
'ceph/0': {
'private-address': 'ceph_node1',
'auth': 'foo',
'key': 'bar',
'use_syslog': 'true'
},
'ceph/1': {
'private-address': 'ceph_node2',
'auth': 'foo',
'key': 'bar',
'use_syslog': 'false'
},
}
}
CEPH_RELATION_WITH_PUBLIC_ADDR = {
'ceph:0': {
'ceph/0': {
'ceph-public-address': '192.168.1.10',
'private-address': 'ceph_node1',
'auth': 'foo',
'key': 'bar'
},
'ceph/1': {
'ceph-public-address': '192.168.1.11',
'private-address': 'ceph_node2',
'auth': 'foo',
'key': 'bar'
},
}
}
CEPH_REL_WITH_PUBLIC_ADDR_PORT = {
'ceph:0': {
'ceph/0': {
'ceph-public-address': '192.168.1.10:1234',
'private-address': 'ceph_node1',
'auth': 'foo',
'key': 'bar'
},
'ceph/1': {
'ceph-public-address': '192.168.1.11:4321',
'private-address': 'ceph_node2',
'auth': 'foo',
'key': 'bar'
},
}
}
CEPH_REL_WITH_PUBLIC_IPv6_ADDR = {
'ceph:0': {
'ceph/0': {
'ceph-public-address': '2001:5c0:9168::1',
'private-address': 'ceph_node1',
'auth': 'foo',
'key': 'bar'
},
'ceph/1': {
'ceph-public-address': '2001:5c0:9168::2',
'private-address': 'ceph_node2',
'auth': 'foo',
'key': 'bar'
},
}
}
CEPH_REL_WITH_PUBLIC_IPv6_ADDR_PORT = {
'ceph:0': {
'ceph/0': {
'ceph-public-address': '[2001:5c0:9168::1]:1234',
'private-address': 'ceph_node1',
'auth': 'foo',
'key': 'bar'
},
'ceph/1': {
'ceph-public-address': '[2001:5c0:9168::2]:4321',
'private-address': 'ceph_node2',
'auth': 'foo',
'key': 'bar'
},
}
}
CEPH_REL_WITH_MULTI_PUBLIC_ADDR = {
'ceph:0': {
'ceph/0': {
'ceph-public-address': '192.168.1.10 192.168.1.20',
'private-address': 'ceph_node1',
'auth': 'foo',
'key': 'bar'
},
'ceph/1': {
'ceph-public-address': '192.168.1.11 192.168.1.21',
'private-address': 'ceph_node2',
'auth': 'foo',
'key': 'bar'
},
}
}
CEPH_REL_WITH_DEFAULT_FEATURES = {
'ceph:0': {
'ceph/0': {
'private-address': 'ceph_node1',
'auth': 'foo',
'key': 'bar',
'use_syslog': 'true',
'rbd-features': '1'
},
'ceph/1': {
'private-address': 'ceph_node2',
'auth': 'foo',
'key': 'bar',
'use_syslog': 'false',
'rbd-features': '1'
},
}
}
IDENTITY_RELATION_NO_CERT = {
'identity-service:0': {
'keystone/0': {
'private-address': 'keystone1',
},
}
}
IDENTITY_RELATION_SINGLE_CERT = {
'identity-service:0': {
'keystone/0': {
'private-address': 'keystone1',
'ssl_cert_cinderhost1': 'certa',
'ssl_key_cinderhost1': 'keya',
},
}
}
IDENTITY_RELATION_MULTIPLE_CERT = {
'identity-service:0': {
'keystone/0': {
'private-address': 'keystone1',
'ssl_cert_cinderhost1-int-network': 'certa',
'ssl_key_cinderhost1-int-network': 'keya',
'ssl_cert_cinderhost1-pub-network': 'certa',
'ssl_key_cinderhost1-pub-network': 'keya',
'ssl_cert_cinderhost1-adm-network': 'certa',
'ssl_key_cinderhost1-adm-network': 'keya',
},
}
}
QUANTUM_NETWORK_SERVICE_RELATION = {
'quantum-network-service:0': {
'unit/0': {
'keystone_host': '10.5.0.1',
'service_port': '5000',
'auth_port': '20000',
'service_tenant': 'tenant',
'service_username': 'username',
'service_password': 'password',
'quantum_host': '10.5.0.2',
'quantum_port': '9696',
'quantum_url': 'http://10.5.0.2:9696/v2',
'region': 'aregion'
},
}
}
QUANTUM_NETWORK_SERVICE_RELATION_VERSIONED = {
'quantum-network-service:0': {
'unit/0': {
'keystone_host': '10.5.0.1',
'service_port': '5000',
'auth_port': '20000',
'service_tenant': 'tenant',
'service_username': 'username',
'service_password': 'password',
'quantum_host': '10.5.0.2',
'quantum_port': '9696',
'quantum_url': 'http://10.5.0.2:9696/v2',
'region': 'aregion',
'api_version': '3',
},
}
}
SUB_CONFIG = """
nova:
/etc/nova/nova.conf:
sections:
DEFAULT:
- [nova-key1, value1]
- [nova-key2, value2]
glance:
/etc/glance/glance.conf:
sections:
DEFAULT:
- [glance-key1, value1]
- [glance-key2, value2]
"""
NOVA_SUB_CONFIG1 = """
nova:
/etc/nova/nova.conf:
sections:
DEFAULT:
- [nova-key1, value1]
- [nova-key2, value2]
"""
NOVA_SUB_CONFIG2 = """
nova-compute:
/etc/nova/nova.conf:
sections:
DEFAULT:
- [nova-key3, value3]
- [nova-key4, value4]
"""
NOVA_SUB_CONFIG3 = """
nova-compute:
/etc/nova/nova.conf:
sections:
DEFAULT:
- [nova-key5, value5]
- [nova-key6, value6]
"""
CINDER_SUB_CONFIG1 = """
cinder:
/etc/cinder/cinder.conf:
sections:
cinder-1-section:
- [key1, value1]
"""
CINDER_SUB_CONFIG2 = """
cinder:
/etc/cinder/cinder.conf:
sections:
cinder-2-section:
- [key2, value2]
not-a-section:
1234
"""
SUB_CONFIG_RELATION = {
'nova-subordinate:0': {
'nova-subordinate/0': {
'private-address': 'nova_node1',
'subordinate_configuration': json.dumps(yaml.load(SUB_CONFIG)),
},
},
'glance-subordinate:0': {
'glance-subordinate/0': {
'private-address': 'glance_node1',
'subordinate_configuration': json.dumps(yaml.load(SUB_CONFIG)),
},
},
'foo-subordinate:0': {
'foo-subordinate/0': {
'private-address': 'foo_node1',
'subordinate_configuration': 'ea8e09324jkadsfh',
},
},
'cinder-subordinate:0': {
'cinder-subordinate/0': {
'private-address': 'cinder_node1',
'subordinate_configuration': json.dumps(
yaml.load(CINDER_SUB_CONFIG1)),
},
},
'cinder-subordinate:1': {
'cinder-subordinate/1': {
'private-address': 'cinder_node1',
'subordinate_configuration': json.dumps(
yaml.load(CINDER_SUB_CONFIG2)),
},
},
}
SUB_CONFIG_RELATION2 = {
'nova-ceilometer:6': {
'ceilometer-agent/0': {
'private-address': 'nova_node1',
'subordinate_configuration': json.dumps(
yaml.load(NOVA_SUB_CONFIG1)),
},
},
'neutron-plugin:3': {
'neutron-ovs-plugin/0': {
'private-address': 'nova_node1',
'subordinate_configuration': json.dumps(
yaml.load(NOVA_SUB_CONFIG2)),
},
},
'neutron-plugin:4': {
'neutron-other-plugin/0': {
'private-address': 'nova_node1',
'subordinate_configuration': json.dumps(
yaml.load(NOVA_SUB_CONFIG3)),
},
}
}
NONET_CONFIG = {
'vip': 'cinderhost1vip',
'os-internal-network': None,
'os-admin-network': None,
'os-public-network': None
}
FULLNET_CONFIG = {
'vip': '10.5.1.1 10.5.2.1 10.5.3.1',
'os-internal-network': "10.5.1.0/24",
'os-admin-network': "10.5.2.0/24",
'os-public-network': "10.5.3.0/24"
}
MACHINE_MACS = {
'eth0': 'fe:c5:ce:8e:2b:00',
'eth1': 'fe:c5:ce:8e:2b:01',
'eth2': 'fe:c5:ce:8e:2b:02',
'eth3': 'fe:c5:ce:8e:2b:03',
}
MACHINE_NICS = {
'eth0': ['192.168.0.1'],
'eth1': ['192.168.0.2'],
'eth2': [],
'eth3': [],
}
ABSENT_MACS = "aa:a5:ae:ae:ab:a4 "
# Imported in contexts.py and needs patching in setUp()
TO_PATCH = [
'b64decode',
'check_call',
'get_cert',
'get_ca_cert',
'install_ca_cert',
'log',
'config',
'relation_get',
'relation_ids',
'related_units',
'is_relation_made',
'relation_set',
'unit_get',
'https',
'determine_api_port',
'determine_apache_port',
'is_clustered',
'time',
'https',
'get_address_in_network',
'get_netmask_for_address',
'local_unit',
'get_ipv6_addr',
'mkdir',
'write_file',
'get_relation_ip',
'charm_name',
'sysctl_create',
'kv',
'pwgen',
'lsb_release',
'is_container',
'network_get_primary_address',
'resolve_address',
'is_ipv6_disabled',
]
class fake_config(object):
def __init__(self, data):
self.data = data
def __call__(self, attr):
if attr in self.data:
return self.data[attr]
return None
class fake_is_relation_made():
def __init__(self, relations):
self.relations = relations
def rel_made(self, relation):
return self.relations[relation]
class TestDB(object):
'''Test KV store for unitdata testing'''
def __init__(self):
self.data = {}
self.flushed = False
def get(self, key, default=None):
return self.data.get(key, default)
def set(self, key, value):
self.data[key] = value
return value
def flush(self):
self.flushed = True
class ContextTests(unittest.TestCase):
def setUp(self):
for m in TO_PATCH:
setattr(self, m, self._patch(m))
# mock at least a single relation + unit
self.relation_ids.return_value = ['foo:0']
self.related_units.return_value = ['foo/0']
self.local_unit.return_value = 'localunit'
self.kv.side_effect = TestDB
self.pwgen.return_value = 'testpassword'
self.lsb_release.return_value = {'DISTRIB_RELEASE': '16.04'}
self.is_container.return_value = False
self.network_get_primary_address.side_effect = NotImplementedError()
self.resolve_address.return_value = '10.5.1.50'
self.maxDiff = None
def _patch(self, method):
_m = patch('charmhelpers.contrib.openstack.context.' + method)
mock = _m.start()
self.addCleanup(_m.stop)
return mock
def test_base_class_not_implemented(self):
base = context.OSContextGenerator()
self.assertRaises(NotImplementedError, base)
@patch.object(context, 'get_os_codename_install_source')
def test_shared_db_context_with_data(self, os_codename):
'''Test shared-db context with all required data'''
os_codename.return_value = 'stein'
relation = FakeRelation(relation_data=SHARED_DB_RELATION)
self.relation_get.side_effect = relation.get
self.get_address_in_network.return_value = ''
self.config.side_effect = fake_config(SHARED_DB_CONFIG)
shared_db = context.SharedDBContext()
result = shared_db()
expected = {
'database_host': 'dbserver.local',
'database': 'foodb',
'database_user': 'adam',
'database_password': 'foo',
'database_type': 'mysql+pymysql',
}
self.assertEquals(result, expected)
def test_shared_db_context_with_data_and_access_net_mismatch(self):
"""Mismatch between hostname and hostname for access net - defers
execution"""
relation = FakeRelation(
relation_data=SHARED_DB_RELATION_ACCESS_NETWORK)
self.relation_get.side_effect = relation.get
self.get_address_in_network.return_value = '10.5.5.1'
self.config.side_effect = fake_config(SHARED_DB_CONFIG)
shared_db = context.SharedDBContext()
result = shared_db()
self.assertEquals(result, None)
self.relation_set.assert_called_with(
relation_settings={
'hostname': '10.5.5.1'})
@patch.object(context, 'get_os_codename_install_source')
def test_shared_db_context_with_data_and_access_net_match(self,
os_codename):
"""Correctly set hostname for access net returns complete context"""
os_codename.return_value = 'stein'
relation = FakeRelation(
relation_data=SHARED_DB_RELATION_ACCESS_NETWORK)
self.relation_get.side_effect = relation.get
self.get_address_in_network.return_value = 'bar'
self.config.side_effect = fake_config(SHARED_DB_CONFIG)
shared_db = context.SharedDBContext()
result = shared_db()
expected = {
'database_host': 'dbserver.local',
'database': 'foodb',
'database_user': 'adam',
'database_password': 'foo',
'database_type': 'mysql+pymysql',
}
self.assertEquals(result, expected)
@patch.object(context, 'get_os_codename_install_source')
def test_shared_db_context_explicit_relation_id(self, os_codename):
'''Test shared-db context setting the relation_id'''
os_codename.return_value = 'stein'
relation = FakeRelation(relation_data=SHARED_DB_RELATION_ALT_RID)
self.related_units.return_value = ['mysql-alt/0']
self.relation_get.side_effect = relation.get
self.get_address_in_network.return_value = ''
self.config.side_effect = fake_config(SHARED_DB_CONFIG)
shared_db = context.SharedDBContext(relation_id='mysql-alt:0')
result = shared_db()
expected = {
'database_host': 'dbserver-alt.local',
'database': 'foodb',
'database_user': 'adam',
'database_password': 'flump',
'database_type': 'mysql+pymysql',
}
self.assertEquals(result, expected)
@patch('os.path.exists')
@patch(open_builtin)
def test_db_ssl(self, _open, osexists):
osexists.return_value = False
ssl_dir = '/etc/dbssl'
db_ssl_ctxt = context.db_ssl(SHARED_DB_RELATION_SSL, {}, ssl_dir)
expected = {
'database_ssl_ca': ssl_dir + '/db-client.ca',
'database_ssl_cert': ssl_dir + '/db-client.cert',
'database_ssl_key': ssl_dir + '/db-client.key',
}
files = [
call(expected['database_ssl_ca'], 'wb'),
call(expected['database_ssl_cert'], 'wb'),
call(expected['database_ssl_key'], 'wb')
]
for f in files:
self.assertIn(f, _open.call_args_list)
self.assertEquals(db_ssl_ctxt, expected)
decode = [
call(SHARED_DB_RELATION_SSL['ssl_ca']),
call(SHARED_DB_RELATION_SSL['ssl_cert']),
call(SHARED_DB_RELATION_SSL['ssl_key'])
]
self.assertEquals(decode, self.b64decode.call_args_list)
def test_db_ssl_nossldir(self):
db_ssl_ctxt = context.db_ssl(SHARED_DB_RELATION_SSL, {}, None)
self.assertEquals(db_ssl_ctxt, {})
@patch.object(context, 'get_os_codename_install_source')
def test_shared_db_context_with_missing_relation(self, os_codename):
'''Test shared-db context missing relation data'''
os_codename.return_value = 'stein'
incomplete_relation = copy(SHARED_DB_RELATION)
incomplete_relation['password'] = None
relation = FakeRelation(relation_data=incomplete_relation)
self.relation_get.side_effect = relation.get
self.config.return_value = SHARED_DB_CONFIG
shared_db = context.SharedDBContext()
result = shared_db()
self.assertEquals(result, {})
def test_shared_db_context_with_missing_config(self):
'''Test shared-db context missing relation data'''
incomplete_config = copy(SHARED_DB_CONFIG)
del incomplete_config['database-user']
self.config.side_effect = fake_config(incomplete_config)
relation = FakeRelation(relation_data=SHARED_DB_RELATION)
self.relation_get.side_effect = relation.get
self.config.return_value = incomplete_config
shared_db = context.SharedDBContext()
self.assertRaises(context.OSContextError, shared_db)
@patch.object(context, 'get_os_codename_install_source')
def test_shared_db_context_with_params(self, os_codename):
'''Test shared-db context with object parameters'''
os_codename.return_value = 'stein'
shared_db = context.SharedDBContext(
database='quantum', user='quantum', relation_prefix='quantum')
relation = FakeRelation(relation_data=SHARED_DB_RELATION_NAMESPACED)
self.relation_get.side_effect = relation.get
result = shared_db()
self.assertIn(
call(rid='foo:0', unit='foo/0'),
self.relation_get.call_args_list)
self.assertEquals(
result, {'database': 'quantum',
'database_user': 'quantum',
'database_password': 'bar2',
'database_host': 'bar',
'database_type': 'mysql+pymysql'})
@patch.object(context, 'get_os_codename_install_source')
def test_shared_db_context_with_params_rocky(self, os_codename):
'''Test shared-db context with object parameters'''
os_codename.return_value = 'rocky'
shared_db = context.SharedDBContext(
database='quantum', user='quantum', relation_prefix='quantum')
relation = FakeRelation(relation_data=SHARED_DB_RELATION_NAMESPACED)
self.relation_get.side_effect = relation.get
result = shared_db()
self.assertIn(
call(rid='foo:0', unit='foo/0'),
self.relation_get.call_args_list)
self.assertEquals(
result, {'database': 'quantum',
'database_user': 'quantum',
'database_password': 'bar2',
'database_host': 'bar',
'database_type': 'mysql'})
@patch.object(context, 'get_os_codename_install_source')
@patch('charmhelpers.contrib.openstack.context.format_ipv6_addr')
def test_shared_db_context_with_ipv6(self, format_ipv6_addr, os_codename):
'''Test shared-db context with ipv6'''
shared_db = context.SharedDBContext(
database='quantum', user='quantum', relation_prefix='quantum')
os_codename.return_value = 'stein'
relation = FakeRelation(relation_data=SHARED_DB_RELATION_NAMESPACED)
self.relation_get.side_effect = relation.get
format_ipv6_addr.return_value = '[2001:db8:1::1]'
result = shared_db()
self.assertIn(
call(rid='foo:0', unit='foo/0'),
self.relation_get.call_args_list)
self.assertEquals(
result, {'database': 'quantum',
'database_user': 'quantum',
'database_password': 'bar2',
'database_host': '[2001:db8:1::1]',
'database_type': 'mysql+pymysql'})
def test_postgresql_db_context_with_data(self):
'''Test postgresql-db context with all required data'''
relation = FakeRelation(relation_data=POSTGRESQL_DB_RELATION)
self.relation_get.side_effect = relation.get
self.config.side_effect = fake_config(POSTGRESQL_DB_CONFIG)
postgresql_db = context.PostgresqlDBContext()
result = postgresql_db()
expected = {
'database_host': 'dbserver.local',
'database': 'foodb',
'database_user': 'adam',
'database_password': 'foo',
'database_type': 'postgresql',
}
self.assertEquals(result, expected)
def test_postgresql_db_context_with_missing_relation(self):
'''Test postgresql-db context missing relation data'''
incomplete_relation = copy(POSTGRESQL_DB_RELATION)
incomplete_relation['password'] = None
relation = FakeRelation(relation_data=incomplete_relation)
self.relation_get.side_effect = relation.get
self.config.return_value = POSTGRESQL_DB_CONFIG
postgresql_db = context.PostgresqlDBContext()
result = postgresql_db()
self.assertEquals(result, {})
def test_postgresql_db_context_with_missing_config(self):
'''Test postgresql-db context missing relation data'''
incomplete_config = copy(POSTGRESQL_DB_CONFIG)
del incomplete_config['database']
self.config.side_effect = fake_config(incomplete_config)
relation = FakeRelation(relation_data=POSTGRESQL_DB_RELATION)
self.relation_get.side_effect = relation.get
self.config.return_value = incomplete_config
postgresql_db = context.PostgresqlDBContext()
self.assertRaises(context.OSContextError, postgresql_db)
def test_postgresql_db_context_with_params(self):
'''Test postgresql-db context with object parameters'''
postgresql_db = context.PostgresqlDBContext(database='quantum')
result = postgresql_db()
self.assertEquals(result['database'], 'quantum')
def test_identity_service_context_with_data(self):
'''Test shared-db context with all required data'''
relation = FakeRelation(relation_data=IDENTITY_SERVICE_RELATION_UNSET)
self.relation_get.side_effect = relation.get
identity_service = context.IdentityServiceContext()
result = identity_service()
expected = {
'admin_password': 'foo',
'admin_tenant_name': 'admin',
'admin_tenant_id': None,
'admin_domain_id': None,
'admin_user': 'adam',
'auth_host': 'keystone-host.local',
'auth_port': '35357',
'auth_protocol': 'http',
'service_host': 'keystonehost.local',
'service_port': '5000',
'service_protocol': 'http',
'api_version': '2.0',
}
self.assertEquals(result, expected)
def test_identity_credentials_context_with_data(self):
'''Test identity-credentials context with all required data'''
relation = FakeRelation(relation_data=IDENTITY_CREDENTIALS_RELATION_UNSET)
self.relation_get.side_effect = relation.get
identity_credentials = context.IdentityCredentialsContext()
result = identity_credentials()
expected = {
'admin_password': 'foo',
'admin_tenant_name': 'admin',
'admin_tenant_id': '123456',
'admin_user': 'adam',
'auth_host': 'keystone-host.local',
'auth_port': '35357',
'auth_protocol': 'https',
'service_host': 'keystonehost.local',
'service_port': '5000',
'service_protocol': 'https',
'api_version': '2.0',
}
self.assertEquals(result, expected)
def test_identity_service_context_with_altname(self):
'''Test identity context when using an explicit relation name'''
relation = FakeRelation(
relation_data=APIIDENTITY_SERVICE_RELATION_UNSET
)
self.relation_get.side_effect = relation.get
self.relation_ids.return_value = ['neutron-plugin-api:0']
self.related_units.return_value = ['neutron-api/0']
identity_service = context.IdentityServiceContext(
rel_name='neutron-plugin-api'
)
result = identity_service()
expected = {
'admin_password': 'foo',
'admin_tenant_name': 'admin',
'admin_tenant_id': None,
'admin_domain_id': None,
'admin_user': 'adam',
'auth_host': 'keystone-host.local',
'auth_port': '35357',
'auth_protocol': 'http',
'service_host': 'keystonehost.local',
'service_port': '5000',
'service_protocol': 'http',
'api_version': '2.0',
}
self.assertEquals(result, expected)
def test_identity_service_context_with_cache(self):
'''Test shared-db context with signing cache info'''
relation = FakeRelation(relation_data=IDENTITY_SERVICE_RELATION_UNSET)
self.relation_get.side_effect = relation.get
svc = 'cinder'
identity_service = context.IdentityServiceContext(service=svc,
service_user=svc)
result = identity_service()
expected = {
'admin_password': 'foo',
'admin_tenant_name': 'admin',
'admin_tenant_id': None,
'admin_domain_id': None,
'admin_user': 'adam',
'auth_host': 'keystone-host.local',
'auth_port': '35357',
'auth_protocol': 'http',
'service_host': 'keystonehost.local',
'service_port': '5000',
'service_protocol': 'http',
'signing_dir': '/var/cache/cinder',
'api_version': '2.0',
}
self.assertTrue(self.mkdir.called)
self.assertEquals(result, expected)
def test_identity_service_context_with_data_http(self):
'''Test shared-db context with all required data'''
relation = FakeRelation(relation_data=IDENTITY_SERVICE_RELATION_HTTP)
self.relation_get.side_effect = relation.get
identity_service = context.IdentityServiceContext()
result = identity_service()
expected = {
'admin_password': 'foo',
'admin_tenant_name': 'admin',
'admin_tenant_id': '123456',
'admin_domain_id': None,
'admin_user': 'adam',
'auth_host': 'keystone-host.local',
'auth_port': '35357',
'auth_protocol': 'http',
'service_host': 'keystonehost.local',
'service_port': '5000',
'service_protocol': 'http',
'api_version': '2.0',
}
self.assertEquals(result, expected)
def test_identity_service_context_with_data_https(self):
'''Test shared-db context with all required data'''
relation = FakeRelation(relation_data=IDENTITY_SERVICE_RELATION_HTTPS)
self.relation_get.side_effect = relation.get
identity_service = context.IdentityServiceContext()
result = identity_service()
expected = {
'admin_password': 'foo',
'admin_tenant_name': 'admin',
'admin_tenant_id': None,
'admin_domain_id': None,
'admin_user': 'adam',
'auth_host': 'keystone-host.local',
'auth_port': '35357',
'auth_protocol': 'https',
'service_host': 'keystonehost.local',
'service_port': '5000',
'service_protocol': 'https',
'api_version': '2.0',
}
self.assertEquals(result, expected)
def test_identity_service_context_with_data_versioned(self):
'''Test shared-db context with api version supplied from keystone'''
relation = FakeRelation(
relation_data=IDENTITY_SERVICE_RELATION_VERSIONED)
self.relation_get.side_effect = relation.get
identity_service = context.IdentityServiceContext()
result = identity_service()
expected = {
'admin_password': 'foo',
'admin_domain_name': 'admin_domain',
'admin_tenant_name': 'admin',
'admin_tenant_id': None,
'admin_domain_id': None,
'admin_user': 'adam',
'auth_host': 'keystone-host.local',
'auth_port': '35357',
'auth_protocol': 'https',
'service_host': 'keystonehost.local',
'service_port': '5000',
'service_protocol': 'https',
'api_version': '3',
}
self.assertEquals(result, expected)
def test_identity_credentials_context_with_data_versioned(self):
'''Test identity-credentials context with api version supplied from keystone'''
relation = FakeRelation(
relation_data=IDENTITY_CREDENTIALS_RELATION_VERSIONED)
self.relation_get.side_effect = relation.get
identity_credentials = context.IdentityCredentialsContext()
result = identity_credentials()
expected = {
'admin_password': 'foo',
'admin_domain_name': 'admin_domain',
'admin_tenant_name': 'admin',
'admin_tenant_id': '123456',
'admin_user': 'adam',
'auth_host': 'keystone-host.local',
'auth_port': '35357',
'auth_protocol': 'https',
'service_host': 'keystonehost.local',
'service_port': '5000',
'service_protocol': 'https',
'api_version': '3',
}
self.assertEquals(result, expected)
@patch('charmhelpers.contrib.openstack.context.format_ipv6_addr')
def test_identity_service_context_with_ipv6(self, format_ipv6_addr):
'''Test identity-service context with ipv6'''
relation = FakeRelation(relation_data=IDENTITY_SERVICE_RELATION_HTTP)
self.relation_get.side_effect = relation.get
format_ipv6_addr.return_value = '[2001:db8:1::1]'
identity_service = context.IdentityServiceContext()
result = identity_service()
expected = {
'admin_password': 'foo',
'admin_tenant_name': 'admin',
'admin_tenant_id': '123456',
'admin_domain_id': None,
'admin_user': 'adam',
'auth_host': '[2001:db8:1::1]',
'auth_port': '35357',
'auth_protocol': 'http',
'service_host': '[2001:db8:1::1]',
'service_port': '5000',
'service_protocol': 'http',
'api_version': '2.0',
}
self.assertEquals(result, expected)
def test_identity_service_context_with_missing_relation(self):
'''Test shared-db context missing relation data'''
incomplete_relation = copy(IDENTITY_SERVICE_RELATION_UNSET)
incomplete_relation['service_password'] = None
relation = FakeRelation(relation_data=incomplete_relation)
self.relation_get.side_effect = relation.get
identity_service = context.IdentityServiceContext()
result = identity_service()
self.assertEquals(result, {})
def test_amqp_context_with_data(self):
'''Test amqp context with all required data'''
relation = FakeRelation(relation_data=AMQP_RELATION)
self.relation_get.side_effect = relation.get
self.config.return_value = AMQP_CONFIG
amqp = context.AMQPContext()
result = amqp()
expected = {
'rabbitmq_host': 'rabbithost',
'rabbitmq_password': 'foobar',
'rabbitmq_user': 'adam',
'rabbitmq_virtual_host': 'foo',
'transport_url': 'rabbit://adam:foobar@rabbithost:5672/foo'
}
self.assertEquals(result, expected)
def test_amqp_context_explicit_relation_id(self):
'''Test amqp context setting the relation_id'''
relation = FakeRelation(relation_data=AMQP_RELATION_ALT_RID)
self.relation_get.side_effect = relation.get
self.related_units.return_value = ['rabbitmq-alt/0']
self.config.return_value = AMQP_CONFIG
amqp = context.AMQPContext(relation_id='amqp-alt:0')
result = amqp()
expected = {
'rabbitmq_host': 'rabbitalthost1',
'rabbitmq_password': 'flump',
'rabbitmq_user': 'adam',
'rabbitmq_virtual_host': 'foo',
'transport_url': 'rabbit://adam:flump@rabbitalthost1:5672/foo'
}
self.assertEquals(result, expected)
def test_amqp_context_with_data_altname(self):
'''Test amqp context with alternative relation name'''
relation = FakeRelation(relation_data=AMQP_RELATION)
self.relation_get.side_effect = relation.get
self.config.return_value = AMQP_NOVA_CONFIG
amqp = context.AMQPContext(
rel_name='amqp-nova',
relation_prefix='nova')
result = amqp()
expected = {
'rabbitmq_host': 'rabbithost',
'rabbitmq_password': 'foobar',
'rabbitmq_user': 'adam',
'rabbitmq_virtual_host': 'foo',
'transport_url': 'rabbit://adam:foobar@rabbithost:5672/foo'
}
self.assertEquals(result, expected)
@patch(open_builtin)
def test_amqp_context_with_data_ssl(self, _open):
'''Test amqp context with all required data and ssl'''
relation = FakeRelation(relation_data=AMQP_RELATION_WITH_SSL)
self.relation_get.side_effect = relation.get
self.config.return_value = AMQP_CONFIG
ssl_dir = '/etc/sslamqp'
amqp = context.AMQPContext(ssl_dir=ssl_dir)
result = amqp()
expected = {
'rabbitmq_host': 'rabbithost',
'rabbitmq_password': 'foobar',
'rabbitmq_user': 'adam',
'rabbit_ssl_port': 5671,
'rabbitmq_virtual_host': 'foo',
'rabbit_ssl_ca': ssl_dir + '/rabbit-client-ca.pem',
'rabbitmq_ha_queues': True,
'transport_url': 'rabbit://adam:foobar@rabbithost:5671/foo'
}
_open.assert_called_once_with(ssl_dir + '/rabbit-client-ca.pem', 'wb')
self.assertEquals(result, expected)
self.assertEquals([call(AMQP_RELATION_WITH_SSL['ssl_ca'])],
self.b64decode.call_args_list)
def test_amqp_context_with_data_ssl_noca(self):
'''Test amqp context with all required data with ssl but missing ca'''
relation = FakeRelation(relation_data=AMQP_RELATION_WITH_SSL)
self.relation_get.side_effect = relation.get
self.config.return_value = AMQP_CONFIG
amqp = context.AMQPContext()
result = amqp()
expected = {
'rabbitmq_host': 'rabbithost',
'rabbitmq_password': 'foobar',
'rabbitmq_user': 'adam',
'rabbit_ssl_port': 5671,
'rabbitmq_virtual_host': 'foo',
'rabbit_ssl_ca': 'cert',
'rabbitmq_ha_queues': True,
'transport_url': 'rabbit://adam:foobar@rabbithost:5671/foo'
}
self.assertEquals(result, expected)
def test_amqp_context_with_data_clustered(self):
'''Test amqp context with all required data with clustered rabbit'''
relation_data = copy(AMQP_RELATION)
relation_data['clustered'] = 'yes'
relation = FakeRelation(relation_data=relation_data)
self.relation_get.side_effect = relation.get
self.config.return_value = AMQP_CONFIG
amqp = context.AMQPContext()
result = amqp()
expected = {
'clustered': True,
'rabbitmq_host': relation_data['vip'],
'rabbitmq_password': 'foobar',
'rabbitmq_user': 'adam',
'rabbitmq_virtual_host': 'foo',
'transport_url': 'rabbit://adam:[email protected]:5672/foo'
}
self.assertEquals(result, expected)
def test_amqp_context_with_data_active_active(self):
'''Test amqp context with required data with active/active rabbit'''
relation_data = copy(AMQP_AA_RELATION)
relation = FakeRelation(relation_data=relation_data)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
self.config.return_value = AMQP_CONFIG
amqp = context.AMQPContext()
result = amqp()
expected = {
'rabbitmq_host': 'rabbithost1',
'rabbitmq_password': 'foobar',
'rabbitmq_user': 'adam',
'rabbitmq_virtual_host': 'foo',
'rabbitmq_hosts': 'rabbithost1,rabbithost2',
'transport_url': ('rabbit://adam:foobar@rabbithost1:5672'
',adam:foobar@rabbithost2:5672/foo')
}
self.assertEquals(result, expected)
def test_amqp_context_with_missing_relation(self):
'''Test amqp context missing relation data'''
incomplete_relation = copy(AMQP_RELATION)
incomplete_relation['password'] = ''
relation = FakeRelation(relation_data=incomplete_relation)
self.relation_get.side_effect = relation.get
self.config.return_value = AMQP_CONFIG
amqp = context.AMQPContext()
result = amqp()
self.assertEquals({}, result)
def test_amqp_context_with_missing_config(self):
'''Test amqp context missing relation data'''
incomplete_config = copy(AMQP_CONFIG)
del incomplete_config['rabbit-user']
relation = FakeRelation(relation_data=AMQP_RELATION)
self.relation_get.side_effect = relation.get
self.config.return_value = incomplete_config
amqp = context.AMQPContext()
self.assertRaises(context.OSContextError, amqp)
@patch('charmhelpers.contrib.openstack.context.format_ipv6_addr')
def test_amqp_context_with_ipv6(self, format_ipv6_addr):
'''Test amqp context with ipv6'''
relation_data = copy(AMQP_AA_RELATION)
relation = FakeRelation(relation_data=relation_data)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
format_ipv6_addr.return_value = '[2001:db8:1::1]'
self.config.return_value = AMQP_CONFIG
amqp = context.AMQPContext()
result = amqp()
expected = {
'rabbitmq_host': '[2001:db8:1::1]',
'rabbitmq_password': 'foobar',
'rabbitmq_user': 'adam',
'rabbitmq_virtual_host': 'foo',
'rabbitmq_hosts': '[2001:db8:1::1],[2001:db8:1::1]',
'transport_url': ('rabbit://adam:foobar@[2001:db8:1::1]:5672'
',adam:foobar@[2001:db8:1::1]:5672/foo')
}
self.assertEquals(result, expected)
def test_amqp_context_with_oslo_messaging(self):
"""Test amqp context with oslo-messaging-flags option"""
relation = FakeRelation(relation_data=AMQP_RELATION)
self.relation_get.side_effect = relation.get
AMQP_OSLO_CONFIG.update(AMQP_CONFIG)
self.config.return_value = AMQP_OSLO_CONFIG
amqp = context.AMQPContext()
result = amqp()
expected = {
'rabbitmq_host': 'rabbithost',
'rabbitmq_password': 'foobar',
'rabbitmq_user': 'adam',
'rabbitmq_virtual_host': 'foo',
'oslo_messaging_flags': {
'rabbit_max_retries': '1',
'rabbit_retry_backoff': '1',
'rabbit_retry_interval': '1'
},
'transport_url': 'rabbit://adam:foobar@rabbithost:5672/foo'
}
self.assertEquals(result, expected)
def test_libvirt_config_flags(self):
self.config.side_effect = fake_config({
'libvirt-flags': 'iscsi_use_multipath=True,chap_auth=False',
})
results = context.LibvirtConfigFlagsContext()()
self.assertEquals(results, {
'libvirt_flags': {
'chap_auth': 'False',
'iscsi_use_multipath': 'True'
}
})
def test_ceph_no_relids(self):
'''Test empty ceph realtion'''
relation = FakeRelation(relation_data={})
self.relation_ids.side_effect = relation.get
ceph = context.CephContext()
result = ceph()
self.assertEquals(result, {})
@patch.object(context, 'config')
@patch('os.path.isdir')
@patch('os.mkdir')
@patch.object(context, 'ensure_packages')
def test_ceph_context_with_data(self, ensure_packages, mkdir, isdir,
mock_config):
'''Test ceph context with all relation data'''
config_dict = {'use-syslog': True}
def fake_config(key):
return config_dict.get(key)
mock_config.side_effect = fake_config
isdir.return_value = False
relation = FakeRelation(relation_data=CEPH_RELATION)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
ceph = context.CephContext()
result = ceph()
expected = {
'mon_hosts': 'ceph_node1 ceph_node2',
'auth': 'foo',
'key': 'bar',
'use_syslog': 'true',
}
self.assertEquals(result, expected)
ensure_packages.assert_called_with(['ceph-common'])
mkdir.assert_called_with('/etc/ceph')
@patch('os.mkdir')
@patch.object(context, 'ensure_packages')
def test_ceph_context_with_missing_data(self, ensure_packages, mkdir):
'''Test ceph context with missing relation data'''
relation = deepcopy(CEPH_RELATION)
for k, v in six.iteritems(relation):
for u in six.iterkeys(v):
del relation[k][u]['auth']
relation = FakeRelation(relation_data=relation)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
ceph = context.CephContext()
result = ceph()
self.assertEquals(result, {})
self.assertFalse(ensure_packages.called)
@patch.object(context, 'config')
@patch('os.path.isdir')
@patch('os.mkdir')
@patch.object(context, 'ensure_packages')
def test_ceph_context_partial_missing_data(self, ensure_packages, mkdir,
isdir, config):
'''Test ceph context last unit missing data
Tests a fix to a previously bug which meant only the config from
last unit was returned so if a valid value was supplied from an
earlier unit it would be ignored'''
config.side_effect = fake_config({'use-syslog': 'True'})
relation = deepcopy(CEPH_RELATION)
for k, v in six.iteritems(relation):
last_unit = sorted(six.iterkeys(v))[-1]
unit_data = relation[k][last_unit]
del unit_data['auth']
relation[k][last_unit] = unit_data
relation = FakeRelation(relation_data=relation)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
ceph = context.CephContext()
result = ceph()
expected = {
'mon_hosts': 'ceph_node1 ceph_node2',
'auth': 'foo',
'key': 'bar',
'use_syslog': 'true',
}
self.assertEquals(result, expected)
@patch.object(context, 'config')
@patch('os.path.isdir')
@patch('os.mkdir')
@patch.object(context, 'ensure_packages')
def test_ceph_context_with_public_addr(
self, ensure_packages, mkdir, isdir, mock_config):
'''Test ceph context in host with multiple networks with all
relation data'''
isdir.return_value = False
config_dict = {'use-syslog': True}
def fake_config(key):
return config_dict.get(key)
mock_config.side_effect = fake_config
relation = FakeRelation(relation_data=CEPH_RELATION_WITH_PUBLIC_ADDR)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
ceph = context.CephContext()
result = ceph()
expected = {
'mon_hosts': '192.168.1.10 192.168.1.11',
'auth': 'foo',
'key': 'bar',
'use_syslog': 'true',
}
self.assertEquals(result, expected)
ensure_packages.assert_called_with(['ceph-common'])
mkdir.assert_called_with('/etc/ceph')
@patch.object(context, 'config')
@patch('os.path.isdir')
@patch('os.mkdir')
@patch.object(context, 'ensure_packages')
def test_ceph_context_with_public_addr_and_port(
self, ensure_packages, mkdir, isdir, mock_config):
'''Test ceph context in host with multiple networks with all
relation data'''
isdir.return_value = False
config_dict = {'use-syslog': True}
def fake_config(key):
return config_dict.get(key)
mock_config.side_effect = fake_config
relation = FakeRelation(relation_data=CEPH_REL_WITH_PUBLIC_ADDR_PORT)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
ceph = context.CephContext()
result = ceph()
expected = {
'mon_hosts': '192.168.1.10:1234 192.168.1.11:4321',
'auth': 'foo',
'key': 'bar',
'use_syslog': 'true',
}
self.assertEquals(result, expected)
ensure_packages.assert_called_with(['ceph-common'])
mkdir.assert_called_with('/etc/ceph')
@patch.object(context, 'config')
@patch('os.path.isdir')
@patch('os.mkdir')
@patch.object(context, 'ensure_packages')
def test_ceph_context_with_public_ipv6_addr(self, ensure_packages, mkdir,
isdir, mock_config):
'''Test ceph context in host with multiple networks with all
relation data'''
isdir.return_value = False
config_dict = {'use-syslog': True}
def fake_config(key):
return config_dict.get(key)
mock_config.side_effect = fake_config
relation = FakeRelation(relation_data=CEPH_REL_WITH_PUBLIC_IPv6_ADDR)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
ceph = context.CephContext()
result = ceph()
expected = {
'mon_hosts': '[2001:5c0:9168::1] [2001:5c0:9168::2]',
'auth': 'foo',
'key': 'bar',
'use_syslog': 'true',
}
self.assertEquals(result, expected)
ensure_packages.assert_called_with(['ceph-common'])
mkdir.assert_called_with('/etc/ceph')
@patch.object(context, 'config')
@patch('os.path.isdir')
@patch('os.mkdir')
@patch.object(context, 'ensure_packages')
def test_ceph_context_with_public_ipv6_addr_port(
self, ensure_packages, mkdir, isdir, mock_config):
'''Test ceph context in host with multiple networks with all
relation data'''
isdir.return_value = False
config_dict = {'use-syslog': True}
def fake_config(key):
return config_dict.get(key)
mock_config.side_effect = fake_config
relation = FakeRelation(
relation_data=CEPH_REL_WITH_PUBLIC_IPv6_ADDR_PORT)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
ceph = context.CephContext()
result = ceph()
expected = {
'mon_hosts': '[2001:5c0:9168::1]:1234 [2001:5c0:9168::2]:4321',
'auth': 'foo',
'key': 'bar',
'use_syslog': 'true',
}
self.assertEquals(result, expected)
ensure_packages.assert_called_with(['ceph-common'])
mkdir.assert_called_with('/etc/ceph')
@patch.object(context, 'config')
@patch('os.path.isdir')
@patch('os.mkdir')
@patch.object(context, 'ensure_packages')
def test_ceph_context_with_multi_public_addr(
self, ensure_packages, mkdir, isdir, mock_config):
'''Test ceph context in host with multiple networks with all
relation data'''
isdir.return_value = False
config_dict = {'use-syslog': True}
def fake_config(key):
return config_dict.get(key)
mock_config.side_effect = fake_config
relation = FakeRelation(relation_data=CEPH_REL_WITH_MULTI_PUBLIC_ADDR)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
ceph = context.CephContext()
result = ceph()
expected = {
'mon_hosts': '192.168.1.10 192.168.1.11 192.168.1.20 192.168.1.21',
'auth': 'foo',
'key': 'bar',
'use_syslog': 'true',
}
self.assertEquals(result, expected)
ensure_packages.assert_called_with(['ceph-common'])
mkdir.assert_called_with('/etc/ceph')
@patch.object(context, 'config')
@patch('os.path.isdir')
@patch('os.mkdir')
@patch.object(context, 'ensure_packages')
def test_ceph_context_with_default_features(
self, ensure_packages, mkdir, isdir, mock_config):
'''Test ceph context in host with multiple networks with all
relation data'''
isdir.return_value = False
config_dict = {'use-syslog': True}
def fake_config(key):
return config_dict.get(key)
mock_config.side_effect = fake_config
relation = FakeRelation(relation_data=CEPH_REL_WITH_DEFAULT_FEATURES)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
ceph = context.CephContext()
result = ceph()
expected = {
'mon_hosts': 'ceph_node1 ceph_node2',
'auth': 'foo',
'key': 'bar',
'use_syslog': 'true',
'rbd_features': '1',
}
self.assertEquals(result, expected)
ensure_packages.assert_called_with(['ceph-common'])
mkdir.assert_called_with('/etc/ceph')
@patch.object(context, 'config')
@patch('os.path.isdir')
@patch('os.mkdir')
@patch.object(context, 'ensure_packages')
def test_ceph_context_with_rbd_cache(self, ensure_packages, mkdir, isdir,
mock_config):
isdir.return_value = False
config_dict = {'rbd-client-cache': 'enabled',
'use-syslog': False}
def fake_config(key):
return config_dict.get(key)
mock_config.side_effect = fake_config
relation = FakeRelation(relation_data=CEPH_RELATION_WITH_PUBLIC_ADDR)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
class CephContextWithRBDCache(context.CephContext):
def __call__(self):
ctxt = super(CephContextWithRBDCache, self).__call__()
rbd_cache = fake_config('rbd-client-cache') or ""
if rbd_cache.lower() == "enabled":
ctxt['rbd_client_cache_settings'] = \
{'rbd cache': 'true',
'rbd cache writethrough until flush': 'true'}
elif rbd_cache.lower() == "disabled":
ctxt['rbd_client_cache_settings'] = \
{'rbd cache': 'false'}
return ctxt
ceph = CephContextWithRBDCache()
result = ceph()
expected = {
'mon_hosts': '192.168.1.10 192.168.1.11',
'auth': 'foo',
'key': 'bar',
'use_syslog': 'false',
}
expected['rbd_client_cache_settings'] = \
{'rbd cache': 'true',
'rbd cache writethrough until flush': 'true'}
self.assertDictEqual(result, expected)
ensure_packages.assert_called_with(['ceph-common'])
mkdir.assert_called_with('/etc/ceph')
@patch.object(context, 'config')
def test_sysctl_context_with_config(self, config):
self.charm_name.return_value = 'test-charm'
config.return_value = '{ kernel.max_pid: "1337"}'
self.sysctl_create.return_value = True
ctxt = context.SysctlContext()
result = ctxt()
self.sysctl_create.assert_called_with(
config.return_value,
"/etc/sysctl.d/50-test-charm.conf")
self.assertTrue(result, {'sysctl': config.return_value})
@patch.object(context, 'config')
def test_sysctl_context_without_config(self, config):
self.charm_name.return_value = 'test-charm'
config.return_value = None
self.sysctl_create.return_value = True
ctxt = context.SysctlContext()
result = ctxt()
self.assertTrue(self.sysctl_create.called == 0)
self.assertTrue(result, {'sysctl': config.return_value})
@patch.object(context, 'config')
@patch('os.path.isdir')
@patch('os.mkdir')
@patch.object(context, 'ensure_packages')
def test_ceph_context_missing_public_addr(
self, ensure_packages, mkdir, isdir, mock_config):
'''Test ceph context in host with multiple networks with no
ceph-public-addr in relation data'''
isdir.return_value = False
config_dict = {'use-syslog': True}
def fake_config(key):
return config_dict.get(key)
mock_config.side_effect = fake_config
relation = deepcopy(CEPH_RELATION_WITH_PUBLIC_ADDR)
del relation['ceph:0']['ceph/0']['ceph-public-address']
relation = FakeRelation(relation_data=relation)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
ceph = context.CephContext()
result = ceph()
expected = {
'mon_hosts': '192.168.1.11 ceph_node1',
'auth': 'foo',
'key': 'bar',
'use_syslog': 'true',
}
self.assertEquals(result, expected)
ensure_packages.assert_called_with(['ceph-common'])
mkdir.assert_called_with('/etc/ceph')
@patch('charmhelpers.contrib.openstack.context.unit_get')
@patch('charmhelpers.contrib.openstack.context.local_unit')
def test_haproxy_context_with_data(self, local_unit, unit_get):
'''Test haproxy context with all relation data'''
cluster_relation = {
'cluster:0': {
'peer/1': {
'private-address': 'cluster-peer1.localnet',
},
'peer/2': {
'private-address': 'cluster-peer2.localnet',
},
},
}
local_unit.return_value = 'peer/0'
# We are only using get_relation_ip.
# Setup the values it returns on each subsequent call.
self.get_relation_ip.side_effect = [None, None, None,
'cluster-peer0.localnet']
relation = FakeRelation(cluster_relation)
self.relation_ids.side_effect = relation.relation_ids
self.relation_get.side_effect = relation.get
self.related_units.side_effect = relation.relation_units
self.get_netmask_for_address.return_value = '255.255.0.0'
self.config.return_value = False
self.maxDiff = None
self.is_ipv6_disabled.return_value = True
haproxy = context.HAProxyContext()
with patch_open() as (_open, _file):
result = haproxy()
ex = {
'frontends': {
'cluster-peer0.localnet': {
'network': 'cluster-peer0.localnet/255.255.0.0',
'backends': collections.OrderedDict([
('peer-0', 'cluster-peer0.localnet'),
('peer-1', 'cluster-peer1.localnet'),
('peer-2', 'cluster-peer2.localnet'),
]),
},
},
'default_backend': 'cluster-peer0.localnet',
'local_host': '127.0.0.1',
'haproxy_host': '0.0.0.0',
'ipv6_enabled': False,
'stat_password': 'testpassword',
'stat_port': '8888',
}
# the context gets generated.
self.assertEquals(ex, result)
# and /etc/default/haproxy is updated.
self.assertEquals(_file.write.call_args_list,
[call('ENABLED=1\n')])
self.get_relation_ip.assert_has_calls([call('admin', False),
call('internal', False),
call('public', False),
call('cluster')])
@patch('charmhelpers.contrib.openstack.context.unit_get')
@patch('charmhelpers.contrib.openstack.context.local_unit')
def test_haproxy_context_with_data_timeout(self, local_unit, unit_get):
'''Test haproxy context with all relation data and timeout'''
cluster_relation = {
'cluster:0': {
'peer/1': {
'private-address': 'cluster-peer1.localnet',
},
'peer/2': {
'private-address': 'cluster-peer2.localnet',
},
},
}
local_unit.return_value = 'peer/0'
# We are only using get_relation_ip.
# Setup the values it returns on each subsequent call.
self.get_relation_ip.side_effect = [None, None, None,
'cluster-peer0.localnet']
relation = FakeRelation(cluster_relation)
self.relation_ids.side_effect = relation.relation_ids
self.relation_get.side_effect = relation.get
self.related_units.side_effect = relation.relation_units
self.get_netmask_for_address.return_value = '255.255.0.0'
self.config.return_value = False
self.maxDiff = None
c = fake_config(HAPROXY_CONFIG)
c.data['prefer-ipv6'] = False
self.config.side_effect = c
self.is_ipv6_disabled.return_value = True
haproxy = context.HAProxyContext()
with patch_open() as (_open, _file):
result = haproxy()
ex = {
'frontends': {
'cluster-peer0.localnet': {
'network': 'cluster-peer0.localnet/255.255.0.0',
'backends': collections.OrderedDict([
('peer-0', 'cluster-peer0.localnet'),
('peer-1', 'cluster-peer1.localnet'),
('peer-2', 'cluster-peer2.localnet'),
]),
}
},
'default_backend': 'cluster-peer0.localnet',
'local_host': '127.0.0.1',
'haproxy_host': '0.0.0.0',
'ipv6_enabled': False,
'stat_password': 'testpassword',
'stat_port': '8888',
'haproxy_client_timeout': 50000,
'haproxy_server_timeout': 50000,
}
# the context gets generated.
self.assertEquals(ex, result)
# and /etc/default/haproxy is updated.
self.assertEquals(_file.write.call_args_list,
[call('ENABLED=1\n')])
self.get_relation_ip.assert_has_calls([call('admin', None),
call('internal', None),
call('public', None),
call('cluster')])
@patch('charmhelpers.contrib.openstack.context.unit_get')
@patch('charmhelpers.contrib.openstack.context.local_unit')
def test_haproxy_context_with_data_multinet(self, local_unit, unit_get):
'''Test haproxy context with all relation data for network splits'''
cluster_relation = {
'cluster:0': {
'peer/1': {
'private-address': 'cluster-peer1.localnet',
'admin-address': 'cluster-peer1.admin',
'internal-address': 'cluster-peer1.internal',
'public-address': 'cluster-peer1.public',
},
'peer/2': {
'private-address': 'cluster-peer2.localnet',
'admin-address': 'cluster-peer2.admin',
'internal-address': 'cluster-peer2.internal',
'public-address': 'cluster-peer2.public',
},
},
}
local_unit.return_value = 'peer/0'
relation = FakeRelation(cluster_relation)
self.relation_ids.side_effect = relation.relation_ids
self.relation_get.side_effect = relation.get
self.related_units.side_effect = relation.relation_units
# We are only using get_relation_ip.
# Setup the values it returns on each subsequent call.
self.get_relation_ip.side_effect = ['cluster-peer0.admin',
'cluster-peer0.internal',
'cluster-peer0.public',
'cluster-peer0.localnet']
self.get_netmask_for_address.return_value = '255.255.0.0'
self.config.return_value = False
self.maxDiff = None
self.is_ipv6_disabled.return_value = True
haproxy = context.HAProxyContext()
with patch_open() as (_open, _file):
result = haproxy()
ex = {
'frontends': {
'cluster-peer0.admin': {
'network': 'cluster-peer0.admin/255.255.0.0',
'backends': collections.OrderedDict([
('peer-0', 'cluster-peer0.admin'),
('peer-1', 'cluster-peer1.admin'),
('peer-2', 'cluster-peer2.admin'),
]),
},
'cluster-peer0.internal': {
'network': 'cluster-peer0.internal/255.255.0.0',
'backends': collections.OrderedDict([
('peer-0', 'cluster-peer0.internal'),
('peer-1', 'cluster-peer1.internal'),
('peer-2', 'cluster-peer2.internal'),
]),
},
'cluster-peer0.public': {
'network': 'cluster-peer0.public/255.255.0.0',
'backends': collections.OrderedDict([
('peer-0', 'cluster-peer0.public'),
('peer-1', 'cluster-peer1.public'),
('peer-2', 'cluster-peer2.public'),
]),
},
'cluster-peer0.localnet': {
'network': 'cluster-peer0.localnet/255.255.0.0',
'backends': collections.OrderedDict([
('peer-0', 'cluster-peer0.localnet'),
('peer-1', 'cluster-peer1.localnet'),
('peer-2', 'cluster-peer2.localnet'),
]),
}
},
'default_backend': 'cluster-peer0.localnet',
'local_host': '127.0.0.1',
'haproxy_host': '0.0.0.0',
'ipv6_enabled': False,
'stat_password': 'testpassword',
'stat_port': '8888',
}
# the context gets generated.
self.assertEquals(ex, result)
# and /etc/default/haproxy is updated.
self.assertEquals(_file.write.call_args_list,
[call('ENABLED=1\n')])
self.get_relation_ip.assert_has_calls([call('admin', False),
call('internal', False),
call('public', False),
call('cluster')])
@patch('charmhelpers.contrib.openstack.context.unit_get')
@patch('charmhelpers.contrib.openstack.context.local_unit')
def test_haproxy_context_with_data_public_only(self, local_unit, unit_get):
'''Test haproxy context with with openstack-dashboard public only binding'''
cluster_relation = {
'cluster:0': {
'peer/1': {
'private-address': 'cluster-peer1.localnet',
'public-address': 'cluster-peer1.public',
},
'peer/2': {
'private-address': 'cluster-peer2.localnet',
'public-address': 'cluster-peer2.public',
},
},
}
local_unit.return_value = 'peer/0'
relation = FakeRelation(cluster_relation)
self.relation_ids.side_effect = relation.relation_ids
self.relation_get.side_effect = relation.get
self.related_units.side_effect = relation.relation_units
# We are only using get_relation_ip.
# Setup the values it returns on each subsequent call.
_network_get_map = {
'public': 'cluster-peer0.public',
'cluster': 'cluster-peer0.localnet',
}
self.get_relation_ip.side_effect = (
lambda binding, config_opt=None:
_network_get_map[binding]
)
self.get_netmask_for_address.return_value = '255.255.0.0'
self.config.return_value = None
self.maxDiff = None
self.is_ipv6_disabled.return_value = True
haproxy = context.HAProxyContext(address_types=['public'])
with patch_open() as (_open, _file):
result = haproxy()
ex = {
'frontends': {
'cluster-peer0.public': {
'network': 'cluster-peer0.public/255.255.0.0',
'backends': collections.OrderedDict([
('peer-0', 'cluster-peer0.public'),
('peer-1', 'cluster-peer1.public'),
('peer-2', 'cluster-peer2.public'),
]),
},
'cluster-peer0.localnet': {
'network': 'cluster-peer0.localnet/255.255.0.0',
'backends': collections.OrderedDict([
('peer-0', 'cluster-peer0.localnet'),
('peer-1', 'cluster-peer1.localnet'),
('peer-2', 'cluster-peer2.localnet'),
]),
}
},
'default_backend': 'cluster-peer0.localnet',
'local_host': '127.0.0.1',
'haproxy_host': '0.0.0.0',
'ipv6_enabled': False,
'stat_password': 'testpassword',
'stat_port': '8888',
}
# the context gets generated.
self.assertEquals(ex, result)
# and /etc/default/haproxy is updated.
self.assertEquals(_file.write.call_args_list,
[call('ENABLED=1\n')])
self.get_relation_ip.assert_has_calls([call('public', None),
call('cluster')])
@patch('charmhelpers.contrib.openstack.context.unit_get')
@patch('charmhelpers.contrib.openstack.context.local_unit')
def test_haproxy_context_with_data_ipv6(self, local_unit, unit_get):
'''Test haproxy context with all relation data ipv6'''
cluster_relation = {
'cluster:0': {
'peer/1': {
'private-address': 'cluster-peer1.localnet',
},
'peer/2': {
'private-address': 'cluster-peer2.localnet',
},
},
}
local_unit.return_value = 'peer/0'
# We are only using get_relation_ip.
# Setup the values it returns on each subsequent call.
self.get_relation_ip.side_effect = [None, None, None,
'cluster-peer0.localnet']
relation = FakeRelation(cluster_relation)
self.relation_ids.side_effect = relation.relation_ids
self.relation_get.side_effect = relation.get
self.related_units.side_effect = relation.relation_units
self.get_address_in_network.return_value = None
self.get_netmask_for_address.return_value = \
'FFFF:FFFF:FFFF:FFFF:0000:0000:0000:0000'
self.get_ipv6_addr.return_value = ['cluster-peer0.localnet']
c = fake_config(HAPROXY_CONFIG)
c.data['prefer-ipv6'] = True
self.config.side_effect = c
self.maxDiff = None
self.is_ipv6_disabled.return_value = False
haproxy = context.HAProxyContext()
with patch_open() as (_open, _file):
result = haproxy()
ex = {
'frontends': {
'cluster-peer0.localnet': {
'network': 'cluster-peer0.localnet/'
'FFFF:FFFF:FFFF:FFFF:0000:0000:0000:0000',
'backends': collections.OrderedDict([
('peer-0', 'cluster-peer0.localnet'),
('peer-1', 'cluster-peer1.localnet'),
('peer-2', 'cluster-peer2.localnet'),
]),
}
},
'default_backend': 'cluster-peer0.localnet',
'local_host': 'ip6-localhost',
'haproxy_server_timeout': 50000,
'haproxy_client_timeout': 50000,
'haproxy_host': '::',
'ipv6_enabled': True,
'stat_password': 'testpassword',
'stat_port': '8888',
}
# the context gets generated.
self.assertEquals(ex, result)
# and /etc/default/haproxy is updated.
self.assertEquals(_file.write.call_args_list,
[call('ENABLED=1\n')])
self.get_relation_ip.assert_has_calls([call('admin', None),
call('internal', None),
call('public', None),
call('cluster')])
def test_haproxy_context_with_missing_data(self):
'''Test haproxy context with missing relation data'''
self.relation_ids.return_value = []
haproxy = context.HAProxyContext()
self.assertEquals({}, haproxy())
@patch('charmhelpers.contrib.openstack.context.unit_get')
@patch('charmhelpers.contrib.openstack.context.local_unit')
def test_haproxy_context_with_no_peers(self, local_unit, unit_get):
'''Test haproxy context with single unit'''
# peer relations always show at least one peer relation, even
# if unit is alone. should be an incomplete context.
cluster_relation = {
'cluster:0': {
'peer/0': {
'private-address': 'lonely.clusterpeer.howsad',
},
},
}
local_unit.return_value = 'peer/0'
# We are only using get_relation_ip.
# Setup the values it returns on each subsequent call.
self.get_relation_ip.side_effect = [None, None, None, None]
relation = FakeRelation(cluster_relation)
self.relation_ids.side_effect = relation.relation_ids
self.relation_get.side_effect = relation.get
self.related_units.side_effect = relation.relation_units
self.config.return_value = False
haproxy = context.HAProxyContext()
self.assertEquals({}, haproxy())
self.get_relation_ip.assert_has_calls([call('admin', False),
call('internal', False),
call('public', False),
call('cluster')])
@patch('charmhelpers.contrib.openstack.context.unit_get')
@patch('charmhelpers.contrib.openstack.context.local_unit')
def test_haproxy_context_with_net_override(self, local_unit, unit_get):
'''Test haproxy context with single unit'''
# peer relations always show at least one peer relation, even
# if unit is alone. should be an incomplete context.
cluster_relation = {
'cluster:0': {
'peer/0': {
'private-address': 'lonely.clusterpeer.howsad',
},
},
}
local_unit.return_value = 'peer/0'
# We are only using get_relation_ip.
# Setup the values it returns on each subsequent call.
self.get_relation_ip.side_effect = [None, None, None, None]
relation = FakeRelation(cluster_relation)
self.relation_ids.side_effect = relation.relation_ids
self.relation_get.side_effect = relation.get
self.related_units.side_effect = relation.relation_units
self.config.return_value = False
c = fake_config(HAPROXY_CONFIG)
c.data['os-admin-network'] = '192.168.10.0/24'
c.data['os-internal-network'] = '192.168.20.0/24'
c.data['os-public-network'] = '192.168.30.0/24'
self.config.side_effect = c
haproxy = context.HAProxyContext()
self.assertEquals({}, haproxy())
self.get_relation_ip.assert_has_calls([call('admin', '192.168.10.0/24'),
call('internal', '192.168.20.0/24'),
call('public', '192.168.30.0/24'),
call('cluster')])
@patch('charmhelpers.contrib.openstack.context.unit_get')
@patch('charmhelpers.contrib.openstack.context.local_unit')
def test_haproxy_context_with_no_peers_singlemode(self, local_unit, unit_get):
'''Test haproxy context with single unit'''
# peer relations always show at least one peer relation, even
# if unit is alone. should be an incomplete context.
cluster_relation = {
'cluster:0': {
'peer/0': {
'private-address': 'lonely.clusterpeer.howsad',
},
},
}
local_unit.return_value = 'peer/0'
# We are only using get_relation_ip.
# Setup the values it returns on each subsequent call.
self.get_relation_ip.side_effect = [None, None, None,
'lonely.clusterpeer.howsad']
relation = FakeRelation(cluster_relation)
self.relation_ids.side_effect = relation.relation_ids
self.relation_get.side_effect = relation.get
self.related_units.side_effect = relation.relation_units
self.config.return_value = False
self.get_address_in_network.return_value = None
self.get_netmask_for_address.return_value = '255.255.0.0'
self.is_ipv6_disabled.return_value = True
with patch_open() as (_open, _file):
result = context.HAProxyContext(singlenode_mode=True)()
ex = {
'frontends': {
'lonely.clusterpeer.howsad': {
'backends': collections.OrderedDict([
('peer-0', 'lonely.clusterpeer.howsad')]),
'network': 'lonely.clusterpeer.howsad/255.255.0.0'
},
},
'default_backend': 'lonely.clusterpeer.howsad',
'haproxy_host': '0.0.0.0',
'local_host': '127.0.0.1',
'ipv6_enabled': False,
'stat_port': '8888',
'stat_password': 'testpassword',
}
self.assertEquals(ex, result)
# and /etc/default/haproxy is updated.
self.assertEquals(_file.write.call_args_list,
[call('ENABLED=1\n')])
self.get_relation_ip.assert_has_calls([call('admin', False),
call('internal', False),
call('public', False),
call('cluster')])
def test_https_context_with_no_https(self):
'''Test apache2 https when no https data available'''
apache = context.ApacheSSLContext()
self.https.return_value = False
self.assertEquals({}, apache())
def _https_context_setup(self):
'''
Helper for test_https_context* tests.
'''
self.https.return_value = True
self.determine_api_port.return_value = 8756
self.determine_apache_port.return_value = 8766
apache = context.ApacheSSLContext()
apache.configure_cert = MagicMock()
apache.enable_modules = MagicMock()
apache.configure_ca = MagicMock()
apache.canonical_names = MagicMock()
apache.canonical_names.return_value = [
'10.5.1.1',
'10.5.2.1',
'10.5.3.1',
]
apache.get_network_addresses = MagicMock()
apache.get_network_addresses.return_value = [
('10.5.1.100', '10.5.1.1'),
('10.5.2.100', '10.5.2.1'),
('10.5.3.100', '10.5.3.1'),
]
apache.external_ports = '8776'
apache.service_namespace = 'cinder'
ex = {
'namespace': 'cinder',
'endpoints': [('10.5.1.100', '10.5.1.1', 8766, 8756),
('10.5.2.100', '10.5.2.1', 8766, 8756),
('10.5.3.100', '10.5.3.1', 8766, 8756)],
'ext_ports': [8766]
}
return apache, ex
def test_https_context(self):
self.relation_ids.return_value = []
apache, ex = self._https_context_setup()
self.assertEquals(ex, apache())
apache.configure_cert.assert_has_calls([
call('10.5.1.1'),
call('10.5.2.1'),
call('10.5.3.1')
])
self.assertTrue(apache.configure_ca.called)
self.assertTrue(apache.enable_modules.called)
self.assertTrue(apache.configure_cert.called)
def test_https_context_vault_relation(self):
self.relation_ids.return_value = ['certificates:2']
self.related_units.return_value = 'vault/0'
apache, ex = self._https_context_setup()
self.assertEquals(ex, apache())
self.assertFalse(apache.configure_cert.called)
self.assertFalse(apache.configure_ca.called)
def test_https_context_no_canonical_names(self):
self.relation_ids.return_value = []
apache, ex = self._https_context_setup()
apache.canonical_names.return_value = []
self.resolve_address.side_effect = (
'10.5.1.4', '10.5.2.5', '10.5.3.6')
self.assertEquals(ex, apache())
apache.configure_cert.assert_has_calls([
call('10.5.1.4'),
call('10.5.2.5'),
call('10.5.3.6')
])
self.resolve_address.assert_has_calls([
call(endpoint_type=context.INTERNAL),
call(endpoint_type=context.ADMIN),
call(endpoint_type=context.PUBLIC),
])
self.assertTrue(apache.configure_ca.called)
self.assertTrue(apache.enable_modules.called)
self.assertTrue(apache.configure_cert.called)
def test_https_context_loads_correct_apache_mods(self):
# Test apache2 context also loads required apache modules
apache = context.ApacheSSLContext()
apache.enable_modules()
ex_cmd = ['a2enmod', 'ssl', 'proxy', 'proxy_http', 'headers']
self.check_call.assert_called_with(ex_cmd)
def test_https_configure_cert(self):
# Test apache2 properly installs certs and keys to disk
self.get_cert.return_value = ('SSL_CERT', 'SSL_KEY')
self.b64decode.side_effect = [b'SSL_CERT', b'SSL_KEY']
apache = context.ApacheSSLContext()
apache.service_namespace = 'cinder'
apache.configure_cert('test-cn')
# appropriate directories are created.
self.mkdir.assert_called_with(path='/etc/apache2/ssl/cinder')
# appropriate files are written.
files = [call(path='/etc/apache2/ssl/cinder/cert_test-cn',
content=b'SSL_CERT', perms=0o640),
call(path='/etc/apache2/ssl/cinder/key_test-cn',
content=b'SSL_KEY', perms=0o640)]
self.write_file.assert_has_calls(files)
# appropriate bits are b64decoded.
decode = [call('SSL_CERT'), call('SSL_KEY')]
self.assertEquals(decode, self.b64decode.call_args_list)
def test_https_configure_cert_deprecated(self):
# Test apache2 properly installs certs and keys to disk
self.get_cert.return_value = ('SSL_CERT', 'SSL_KEY')
self.b64decode.side_effect = ['SSL_CERT', 'SSL_KEY']
apache = context.ApacheSSLContext()
apache.service_namespace = 'cinder'
apache.configure_cert()
# appropriate directories are created.
self.mkdir.assert_called_with(path='/etc/apache2/ssl/cinder')
# appropriate files are written.
files = [call(path='/etc/apache2/ssl/cinder/cert',
content='SSL_CERT', perms=0o640),
call(path='/etc/apache2/ssl/cinder/key',
content='SSL_KEY', perms=0o640)]
self.write_file.assert_has_calls(files)
# appropriate bits are b64decoded.
decode = [call('SSL_CERT'), call('SSL_KEY')]
self.assertEquals(decode, self.b64decode.call_args_list)
def test_https_canonical_names(self):
rel = FakeRelation(IDENTITY_RELATION_SINGLE_CERT)
self.relation_ids.side_effect = rel.relation_ids
self.related_units.side_effect = rel.relation_units
self.relation_get.side_effect = rel.get
apache = context.ApacheSSLContext()
self.assertEquals(apache.canonical_names(), ['cinderhost1'])
rel.relation_data = IDENTITY_RELATION_MULTIPLE_CERT
self.assertEquals(apache.canonical_names(),
sorted(['cinderhost1-adm-network',
'cinderhost1-int-network',
'cinderhost1-pub-network']))
rel.relation_data = IDENTITY_RELATION_NO_CERT
self.assertEquals(apache.canonical_names(), [])
def test_image_service_context_missing_data(self):
'''Test image-service with missing relation and missing data'''
image_service = context.ImageServiceContext()
self.relation_ids.return_value = []
self.assertEquals({}, image_service())
self.relation_ids.return_value = ['image-service:0']
self.related_units.return_value = ['glance/0']
self.relation_get.return_value = None
self.assertEquals({}, image_service())
def test_image_service_context_with_data(self):
'''Test image-service with required data'''
image_service = context.ImageServiceContext()
self.relation_ids.return_value = ['image-service:0']
self.related_units.return_value = ['glance/0']
self.relation_get.return_value = 'http://glancehost:9292'
self.assertEquals({'glance_api_servers': 'http://glancehost:9292'},
image_service())
@patch.object(context, 'neutron_plugin_attribute')
def test_neutron_context_base_properties(self, attr):
'''Test neutron context base properties'''
neutron = context.NeutronContext()
attr.return_value = 'quantum-plugin-package'
self.assertEquals(None, neutron.plugin)
self.assertEquals(None, neutron.network_manager)
self.assertEquals(None, neutron.neutron_security_groups)
self.assertEquals('quantum-plugin-package', neutron.packages)
@patch.object(context, 'neutron_plugin_attribute')
@patch.object(context, 'apt_install')
@patch.object(context, 'filter_installed_packages')
def test_neutron_ensure_package(self, _filter, _install, _packages):
'''Test neutron context installed required packages'''
_filter.return_value = ['quantum-plugin-package']
_packages.return_value = [['quantum-plugin-package']]
neutron = context.NeutronContext()
neutron._ensure_packages()
_install.assert_called_with(['quantum-plugin-package'], fatal=True)
@patch.object(context.NeutronContext, 'neutron_security_groups')
@patch.object(context, 'unit_private_ip')
@patch.object(context, 'neutron_plugin_attribute')
def test_neutron_ovs_plugin_context(self, attr, ip, sec_groups):
ip.return_value = '10.0.0.1'
sec_groups.__get__ = MagicMock(return_value=True)
attr.return_value = 'some.quantum.driver.class'
neutron = context.NeutronContext()
self.assertEquals({
'config': 'some.quantum.driver.class',
'core_plugin': 'some.quantum.driver.class',
'neutron_plugin': 'ovs',
'neutron_security_groups': True,
'local_ip': '10.0.0.1'}, neutron.ovs_ctxt())
@patch.object(context.NeutronContext, 'neutron_security_groups')
@patch.object(context, 'unit_private_ip')
@patch.object(context, 'neutron_plugin_attribute')
def test_neutron_nvp_plugin_context(self, attr, ip, sec_groups):
ip.return_value = '10.0.0.1'
sec_groups.__get__ = MagicMock(return_value=True)
attr.return_value = 'some.quantum.driver.class'
neutron = context.NeutronContext()
self.assertEquals({
'config': 'some.quantum.driver.class',
'core_plugin': 'some.quantum.driver.class',
'neutron_plugin': 'nvp',
'neutron_security_groups': True,
'local_ip': '10.0.0.1'}, neutron.nvp_ctxt())
@patch.object(context, 'config')
@patch.object(context.NeutronContext, 'neutron_security_groups')
@patch.object(context, 'unit_private_ip')
@patch.object(context, 'neutron_plugin_attribute')
def test_neutron_n1kv_plugin_context(self, attr, ip, sec_groups, config):
ip.return_value = '10.0.0.1'
sec_groups.__get__ = MagicMock(return_value=True)
attr.return_value = 'some.quantum.driver.class'
config.return_value = 'n1kv'
neutron = context.NeutronContext()
self.assertEquals({
'core_plugin': 'some.quantum.driver.class',
'neutron_plugin': 'n1kv',
'neutron_security_groups': True,
'local_ip': '10.0.0.1',
'config': 'some.quantum.driver.class',
'vsm_ip': 'n1kv',
'vsm_username': 'n1kv',
'vsm_password': 'n1kv',
'user_config_flags': {},
'restrict_policy_profiles': 'n1kv',
}, neutron.n1kv_ctxt())
@patch.object(context.NeutronContext, 'neutron_security_groups')
@patch.object(context, 'unit_private_ip')
@patch.object(context, 'neutron_plugin_attribute')
def test_neutron_calico_plugin_context(self, attr, ip, sec_groups):
ip.return_value = '10.0.0.1'
sec_groups.__get__ = MagicMock(return_value=True)
attr.return_value = 'some.quantum.driver.class'
neutron = context.NeutronContext()
self.assertEquals({
'config': 'some.quantum.driver.class',
'core_plugin': 'some.quantum.driver.class',
'neutron_plugin': 'Calico',
'neutron_security_groups': True,
'local_ip': '10.0.0.1'}, neutron.calico_ctxt())
@patch.object(context.NeutronContext, 'neutron_security_groups')
@patch.object(context, 'unit_private_ip')
@patch.object(context, 'neutron_plugin_attribute')
def test_neutron_plumgrid_plugin_context(self, attr, ip, sec_groups):
ip.return_value = '10.0.0.1'
sec_groups.__get__ = MagicMock(return_value=True)
attr.return_value = 'some.quantum.driver.class'
neutron = context.NeutronContext()
self.assertEquals({
'config': 'some.quantum.driver.class',
'core_plugin': 'some.quantum.driver.class',
'neutron_plugin': 'plumgrid',
'neutron_security_groups': True,
'local_ip': '10.0.0.1'}, neutron.pg_ctxt())
@patch.object(context.NeutronContext, 'neutron_security_groups')
@patch.object(context, 'unit_private_ip')
@patch.object(context, 'neutron_plugin_attribute')
def test_neutron_nuage_plugin_context(self, attr, ip, sec_groups):
ip.return_value = '10.0.0.1'
sec_groups.__get__ = MagicMock(return_value=True)
attr.return_value = 'some.quantum.driver.class'
neutron = context.NeutronContext()
self.assertEquals({
'config': 'some.quantum.driver.class',
'core_plugin': 'some.quantum.driver.class',
'neutron_plugin': 'vsp',
'neutron_security_groups': True,
'local_ip': '10.0.0.1'}, neutron.nuage_ctxt())
@patch.object(context.NeutronContext, 'neutron_security_groups')
@patch.object(context, 'unit_private_ip')
@patch.object(context, 'neutron_plugin_attribute')
def test_neutron_midonet_plugin_context(self, attr, ip, sec_groups):
ip.return_value = '10.0.0.1'
sec_groups.__get__ = MagicMock(return_value=True)
attr.return_value = 'some.quantum.driver.class'
neutron = context.NeutronContext()
self.assertEquals({
'config': 'some.quantum.driver.class',
'core_plugin': 'some.quantum.driver.class',
'neutron_plugin': 'midonet',
'neutron_security_groups': True,
'local_ip': '10.0.0.1'}, neutron.midonet_ctxt())
@patch('charmhelpers.contrib.openstack.context.unit_get')
@patch.object(context.NeutronContext, 'network_manager')
def test_neutron_neutron_ctxt(self, mock_network_manager,
mock_unit_get):
vip = '88.11.22.33'
priv_addr = '10.0.0.1'
mock_unit_get.return_value = priv_addr
neutron = context.NeutronContext()
config = {'vip': vip}
self.config.side_effect = lambda key: config[key]
mock_network_manager.__get__ = Mock(return_value='neutron')
self.is_clustered.return_value = False
self.assertEquals(
{'network_manager': 'neutron',
'neutron_url': 'https://%s:9696' % (priv_addr)},
neutron.neutron_ctxt()
)
self.is_clustered.return_value = True
self.assertEquals(
{'network_manager': 'neutron',
'neutron_url': 'https://%s:9696' % (vip)},
neutron.neutron_ctxt()
)
@patch('charmhelpers.contrib.openstack.context.unit_get')
@patch.object(context.NeutronContext, 'network_manager')
def test_neutron_neutron_ctxt_http(self, mock_network_manager,
mock_unit_get):
vip = '88.11.22.33'
priv_addr = '10.0.0.1'
mock_unit_get.return_value = priv_addr
neutron = context.NeutronContext()
config = {'vip': vip}
self.config.side_effect = lambda key: config[key]
self.https.return_value = False
mock_network_manager.__get__ = Mock(return_value='neutron')
self.is_clustered.return_value = False
self.assertEquals(
{'network_manager': 'neutron',
'neutron_url': 'http://%s:9696' % (priv_addr)},
neutron.neutron_ctxt()
)
self.is_clustered.return_value = True
self.assertEquals(
{'network_manager': 'neutron',
'neutron_url': 'http://%s:9696' % (vip)},
neutron.neutron_ctxt()
)
@patch.object(context.NeutronContext, 'neutron_ctxt')
@patch.object(context.NeutronContext, 'ovs_ctxt')
@patch.object(context.NeutronContext, 'plugin')
@patch.object(context.NeutronContext, '_ensure_packages')
@patch.object(context.NeutronContext, 'network_manager')
def test_neutron_main_context_generation(self, mock_network_manager,
mock_ensure_packages,
mock_plugin, mock_ovs_ctxt,
mock_neutron_ctxt):
mock_neutron_ctxt.return_value = {'network_manager': 'neutron',
'neutron_url': 'https://foo:9696'}
config = {'neutron-alchemy-flags': None}
self.config.side_effect = lambda key: config[key]
neutron = context.NeutronContext()
mock_network_manager.__get__ = Mock(return_value='flatdhcpmanager')
mock_plugin.__get__ = Mock()
self.assertEquals({}, neutron())
self.assertTrue(mock_network_manager.__get__.called)
self.assertFalse(mock_plugin.__get__.called)
mock_network_manager.__get__.return_value = 'neutron'
mock_plugin.__get__ = Mock(return_value=None)
self.assertEquals({}, neutron())
self.assertTrue(mock_plugin.__get__.called)
mock_ovs_ctxt.return_value = {'ovs': 'ovs_context'}
mock_plugin.__get__.return_value = 'ovs'
self.assertEquals(
{'network_manager': 'neutron',
'ovs': 'ovs_context',
'neutron_url': 'https://foo:9696'},
neutron()
)
@patch.object(context.NeutronContext, 'neutron_ctxt')
@patch.object(context.NeutronContext, 'nvp_ctxt')
@patch.object(context.NeutronContext, 'plugin')
@patch.object(context.NeutronContext, '_ensure_packages')
@patch.object(context.NeutronContext, 'network_manager')
def test_neutron_main_context_gen_nvp_and_alchemy(self,
mock_network_manager,
mock_ensure_packages,
mock_plugin,
mock_nvp_ctxt,
mock_neutron_ctxt):
mock_neutron_ctxt.return_value = {'network_manager': 'neutron',
'neutron_url': 'https://foo:9696'}
config = {'neutron-alchemy-flags': 'pool_size=20'}
self.config.side_effect = lambda key: config[key]
neutron = context.NeutronContext()
mock_network_manager.__get__ = Mock(return_value='flatdhcpmanager')
mock_plugin.__get__ = Mock()
self.assertEquals({}, neutron())
self.assertTrue(mock_network_manager.__get__.called)
self.assertFalse(mock_plugin.__get__.called)
mock_network_manager.__get__.return_value = 'neutron'
mock_plugin.__get__ = Mock(return_value=None)
self.assertEquals({}, neutron())
self.assertTrue(mock_plugin.__get__.called)
mock_nvp_ctxt.return_value = {'nvp': 'nvp_context'}
mock_plugin.__get__.return_value = 'nvp'
self.assertEquals(
{'network_manager': 'neutron',
'nvp': 'nvp_context',
'neutron_alchemy_flags': {'pool_size': '20'},
'neutron_url': 'https://foo:9696'},
neutron()
)
@patch.object(context.NeutronContext, 'neutron_ctxt')
@patch.object(context.NeutronContext, 'calico_ctxt')
@patch.object(context.NeutronContext, 'plugin')
@patch.object(context.NeutronContext, '_ensure_packages')
@patch.object(context.NeutronContext, 'network_manager')
def test_neutron_main_context_gen_calico(self, mock_network_manager,
mock_ensure_packages,
mock_plugin, mock_ovs_ctxt,
mock_neutron_ctxt):
mock_neutron_ctxt.return_value = {'network_manager': 'neutron',
'neutron_url': 'https://foo:9696'}
config = {'neutron-alchemy-flags': None}
self.config.side_effect = lambda key: config[key]
neutron = context.NeutronContext()
mock_network_manager.__get__ = Mock(return_value='flatdhcpmanager')
mock_plugin.__get__ = Mock()
self.assertEquals({}, neutron())
self.assertTrue(mock_network_manager.__get__.called)
self.assertFalse(mock_plugin.__get__.called)
mock_network_manager.__get__.return_value = 'neutron'
mock_plugin.__get__ = Mock(return_value=None)
self.assertEquals({}, neutron())
self.assertTrue(mock_plugin.__get__.called)
mock_ovs_ctxt.return_value = {'Calico': 'calico_context'}
mock_plugin.__get__.return_value = 'Calico'
self.assertEquals(
{'network_manager': 'neutron',
'Calico': 'calico_context',
'neutron_url': 'https://foo:9696'},
neutron()
)
@patch('charmhelpers.contrib.openstack.utils.juju_log',
lambda *args, **kwargs: None)
@patch.object(context, 'config')
def test_os_configflag_context(self, config):
flags = context.OSConfigFlagContext()
# single
config.return_value = 'deadbeef=True'
self.assertEquals({
'user_config_flags': {
'deadbeef': 'True',
}
}, flags())
# multi
config.return_value = 'floating_ip=True,use_virtio=False,max=5'
self.assertEquals({
'user_config_flags': {
'floating_ip': 'True',
'use_virtio': 'False',
'max': '5',
}
}, flags())
for empty in [None, '']:
config.return_value = empty
self.assertEquals({}, flags())
# multi with commas
config.return_value = 'good_flag=woot,badflag,great_flag=w00t'
self.assertEquals({
'user_config_flags': {
'good_flag': 'woot,badflag',
'great_flag': 'w00t',
}
}, flags())
# missing key
config.return_value = 'good_flag=woot=toow'
self.assertRaises(context.OSContextError, flags)
# bad value
config.return_value = 'good_flag=woot=='
self.assertRaises(context.OSContextError, flags)
@patch.object(context, 'config')
def test_os_configflag_context_custom(self, config):
flags = context.OSConfigFlagContext(
charm_flag='api-config-flags',
template_flag='api_config_flags')
# single
config.return_value = 'deadbeef=True'
self.assertEquals({
'api_config_flags': {
'deadbeef': 'True',
}
}, flags())
def test_os_subordinate_config_context(self):
relation = FakeRelation(relation_data=SUB_CONFIG_RELATION)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
nova_sub_ctxt = context.SubordinateConfigContext(
service='nova',
config_file='/etc/nova/nova.conf',
interface='nova-subordinate',
)
glance_sub_ctxt = context.SubordinateConfigContext(
service='glance',
config_file='/etc/glance/glance.conf',
interface='glance-subordinate',
)
cinder_sub_ctxt = context.SubordinateConfigContext(
service='cinder',
config_file='/etc/cinder/cinder.conf',
interface='cinder-subordinate',
)
foo_sub_ctxt = context.SubordinateConfigContext(
service='foo',
config_file='/etc/foo/foo.conf',
interface='foo-subordinate',
)
self.assertEquals(
nova_sub_ctxt(),
{'sections': {
'DEFAULT': [
['nova-key1', 'value1'],
['nova-key2', 'value2']]
}}
)
self.assertEquals(
glance_sub_ctxt(),
{'sections': {
'DEFAULT': [
['glance-key1', 'value1'],
['glance-key2', 'value2']]
}}
)
self.assertEquals(
cinder_sub_ctxt(),
{'sections': {
'cinder-1-section': [
['key1', 'value1']],
'cinder-2-section': [
['key2', 'value2']]
}, 'not-a-section': 1234}
)
# subrodinate supplies nothing for given config
glance_sub_ctxt.config_file = '/etc/glance/glance-api-paste.ini'
self.assertEquals(glance_sub_ctxt(), {'sections': {}})
# subordinate supplies bad input
self.assertEquals(foo_sub_ctxt(), {'sections': {}})
def test_os_subordinate_config_context_multiple(self):
relation = FakeRelation(relation_data=SUB_CONFIG_RELATION2)
self.relation_get.side_effect = relation.get
self.relation_ids.side_effect = relation.relation_ids
self.related_units.side_effect = relation.relation_units
nova_sub_ctxt = context.SubordinateConfigContext(
service=['nova', 'nova-compute'],
config_file='/etc/nova/nova.conf',
interface=['nova-ceilometer', 'neutron-plugin'],
)
self.assertEquals(
nova_sub_ctxt(),
{'sections': {
'DEFAULT': [
['nova-key1', 'value1'],
['nova-key2', 'value2'],
['nova-key3', 'value3'],
['nova-key4', 'value4'],
['nova-key5', 'value5'],
['nova-key6', 'value6']]
}}
)
def test_syslog_context(self):
self.config.side_effect = fake_config({'use-syslog': 'foo'})
syslog = context.SyslogContext()
result = syslog()
expected = {
'use_syslog': 'foo',
}
self.assertEquals(result, expected)
def test_loglevel_context_set(self):
self.config.side_effect = fake_config({
'debug': True,
'verbose': True,
})
syslog = context.LogLevelContext()
result = syslog()
expected = {
'debug': True,
'verbose': True,
}
self.assertEquals(result, expected)
def test_loglevel_context_unset(self):
self.config.side_effect = fake_config({
'debug': None,
'verbose': None,
})
syslog = context.LogLevelContext()
result = syslog()
expected = {
'debug': False,
'verbose': False,
}
self.assertEquals(result, expected)
@patch.object(context, '_calculate_workers')
def test_wsgi_worker_config_context(self,
_calculate_workers):
self.config.return_value = 2 # worker-multiplier=2
_calculate_workers.return_value = 8
service_name = 'service-name'
script = '/usr/bin/script'
ctxt = context.WSGIWorkerConfigContext(name=service_name,
script=script)
expect = {
"service_name": service_name,
"user": service_name,
"group": service_name,
"script": script,
"admin_script": None,
"public_script": None,
"processes": 8,
"admin_processes": 2,
"public_processes": 6,
"threads": 1,
}
self.assertEqual(expect, ctxt())
@patch.object(context, '_calculate_workers')
def test_wsgi_worker_config_context_user_and_group(self,
_calculate_workers):
self.config.return_value = 1
_calculate_workers.return_value = 1
service_name = 'service-name'
script = '/usr/bin/script'
user = 'nova'
group = 'nobody'
ctxt = context.WSGIWorkerConfigContext(name=service_name,
user=user,
group=group,
script=script)
expect = {
"service_name": service_name,
"user": user,
"group": group,
"script": script,
"admin_script": None,
"public_script": None,
"processes": 1,
"admin_processes": 1,
"public_processes": 1,
"threads": 1,
}
self.assertEqual(expect, ctxt())
def test_zeromq_context_unrelated(self):
self.is_relation_made.return_value = False
self.assertEquals(context.ZeroMQContext()(), {})
def test_zeromq_context_related(self):
self.is_relation_made.return_value = True
self.relation_ids.return_value = ['zeromq-configuration:1']
self.related_units.return_value = ['openstack-zeromq/0']
self.relation_get.side_effect = ['nonce-data', 'hostname', 'redis']
self.assertEquals(context.ZeroMQContext()(),
{'zmq_host': 'hostname',
'zmq_nonce': 'nonce-data',
'zmq_redis_address': 'redis'})
def test_notificationdriver_context_nomsg(self):
relations = {
'zeromq-configuration': False,
'amqp': False,
}
rels = fake_is_relation_made(relations=relations)
self.is_relation_made.side_effect = rels.rel_made
self.assertEquals(context.NotificationDriverContext()(),
{'notifications': 'False'})
def test_notificationdriver_context_zmq_nometer(self):
relations = {
'zeromq-configuration': True,
'amqp': False,
}
rels = fake_is_relation_made(relations=relations)
self.is_relation_made.side_effect = rels.rel_made
self.assertEquals(context.NotificationDriverContext()(),
{'notifications': 'False'})
def test_notificationdriver_context_zmq_meter(self):
relations = {
'zeromq-configuration': True,
'amqp': False,
}
rels = fake_is_relation_made(relations=relations)
self.is_relation_made.side_effect = rels.rel_made
self.assertEquals(context.NotificationDriverContext()(),
{'notifications': 'False'})
def test_notificationdriver_context_amq(self):
relations = {
'zeromq-configuration': False,
'amqp': True,
}
rels = fake_is_relation_made(relations=relations)
self.is_relation_made.side_effect = rels.rel_made
self.assertEquals(context.NotificationDriverContext()(),
{'notifications': 'True'})
@patch.object(context, 'psutil')
def test_num_cpus_xenial(self, _psutil):
_psutil.cpu_count.return_value = 4
self.assertTrue(context._num_cpus(), 4)
@patch.object(context, 'psutil')
def test_num_cpus_trusty(self, _psutil):
_psutil.NUM_CPUS = 4
self.assertTrue(context._num_cpus(), 4)
@patch.object(context, '_num_cpus')
def test_calculate_workers_float(self, _num_cpus):
self.config.side_effect = fake_config({
'worker-multiplier': 0.3
})
_num_cpus.return_value = 4
self.assertTrue(context._calculate_workers(), 4)
@patch.object(context, '_num_cpus')
def test_calculate_workers_not_quite_0(self, _num_cpus):
# Make sure that the multiplier evaluating to somewhere between
# 0 and 1 in the floating point range still has at least one
# worker.
self.config.side_effect = fake_config({
'worker-multiplier': 0.001
})
_num_cpus.return_value = 100
self.assertTrue(context._calculate_workers(), 1)
@patch.object(context, 'psutil')
def test_calculate_workers_0(self, _psutil):
self.config.side_effect = fake_config({
'worker-multiplier': 0
})
_psutil.cpu_count.return_value = 2
self.assertTrue(context._calculate_workers(), 0)
@patch.object(context, '_num_cpus')
def test_calculate_workers_noconfig(self, _num_cpus):
self.config.return_value = None
_num_cpus.return_value = 1
self.assertTrue(context._calculate_workers(), 2)
@patch.object(context, '_num_cpus')
def test_calculate_workers_noconfig_container(self, _num_cpus):
self.config.return_value = None
self.is_container.return_value = True
_num_cpus.return_value = 1
self.assertTrue(context._calculate_workers(), 2)
@patch.object(context, '_num_cpus')
def test_calculate_workers_noconfig_lotsa_cpus_container(self,
_num_cpus):
self.config.return_value = None
self.is_container.return_value = True
_num_cpus.return_value = 32
self.assertTrue(context._calculate_workers(), 4)
@patch.object(context, '_num_cpus')
def test_calculate_workers_noconfig_lotsa_cpus_not_container(self,
_num_cpus):
self.config.return_value = None
_num_cpus.return_value = 32
self.assertTrue(context._calculate_workers(), 64)
@patch.object(context, '_calculate_workers', return_value=256)
def test_worker_context(self, calculate_workers):
self.assertEqual(context.WorkerConfigContext()(),
{'workers': 256})
def test_apache_get_addresses_no_network_config(self):
self.config.side_effect = fake_config({
'os-internal-network': None,
'os-admin-network': None,
'os-public-network': None
})
self.resolve_address.return_value = '10.5.1.50'
self.unit_get.return_value = '10.5.1.50'
apache = context.ApacheSSLContext()
apache.external_ports = '8776'
addresses = apache.get_network_addresses()
expected = [('10.5.1.50', '10.5.1.50')]
self.assertEqual(addresses, expected)
self.get_address_in_network.assert_not_called()
self.resolve_address.assert_has_calls([
call(context.INTERNAL),
call(context.ADMIN),
call(context.PUBLIC)
])
def test_apache_get_addresses_with_network_config(self):
self.config.side_effect = fake_config({
'os-internal-network': '10.5.1.0/24',
'os-admin-network': '10.5.2.0/24',
'os-public-network': '10.5.3.0/24',
})
_base_addresses = ['10.5.1.100',
'10.5.2.100',
'10.5.3.100']
self.get_address_in_network.side_effect = _base_addresses
self.resolve_address.side_effect = _base_addresses
self.unit_get.return_value = '10.5.1.50'
apache = context.ApacheSSLContext()
addresses = apache.get_network_addresses()
expected = [('10.5.1.100', '10.5.1.100'),
('10.5.2.100', '10.5.2.100'),
('10.5.3.100', '10.5.3.100')]
self.assertEqual(addresses, expected)
calls = [call('10.5.1.0/24', '10.5.1.50'),
call('10.5.2.0/24', '10.5.1.50'),
call('10.5.3.0/24', '10.5.1.50')]
self.get_address_in_network.assert_has_calls(calls)
self.resolve_address.assert_has_calls([
call(context.INTERNAL),
call(context.ADMIN),
call(context.PUBLIC)
])
def test_apache_get_addresses_network_spaces(self):
self.config.side_effect = fake_config({
'os-internal-network': None,
'os-admin-network': None,
'os-public-network': None
})
self.network_get_primary_address.side_effect = None
self.network_get_primary_address.return_value = '10.5.2.50'
self.resolve_address.return_value = '10.5.2.100'
self.unit_get.return_value = '10.5.1.50'
apache = context.ApacheSSLContext()
apache.external_ports = '8776'
addresses = apache.get_network_addresses()
expected = [('10.5.2.50', '10.5.2.100')]
self.assertEqual(addresses, expected)
self.get_address_in_network.assert_not_called()
self.resolve_address.assert_has_calls([
call(context.INTERNAL),
call(context.ADMIN),
call(context.PUBLIC)
])
def test_config_flag_parsing_simple(self):
# Standard key=value checks...
flags = context.config_flags_parser('key1=value1, key2=value2')
self.assertEqual(flags, {'key1': 'value1', 'key2': 'value2'})
# Check for multiple values to a single key
flags = context.config_flags_parser('key1=value1, '
'key2=value2,value3,value4')
self.assertEqual(flags, {'key1': 'value1',
'key2': 'value2,value3,value4'})
# Check for yaml formatted key value pairings for more complex
# assignment options.
flags = context.config_flags_parser('key1: subkey1=value1,'
'subkey2=value2')
self.assertEqual(flags, {'key1': 'subkey1=value1,subkey2=value2'})
# Check for good measure the ldap formats
test_string = ('user_tree_dn: ou=ABC General,'
'ou=User Accounts,dc=example,dc=com')
flags = context.config_flags_parser(test_string)
self.assertEqual(flags, {'user_tree_dn': ('ou=ABC General,'
'ou=User Accounts,'
'dc=example,dc=com')})
def _fake_get_hwaddr(self, arg):
return MACHINE_MACS[arg]
def _fake_get_ipv4(self, arg, fatal=False):
return MACHINE_NICS[arg]
@patch('charmhelpers.contrib.openstack.context.config')
def test_no_ext_port(self, mock_config):
self.config.side_effect = config = fake_config({})
mock_config.side_effect = config
self.assertEquals(context.ExternalPortContext()(), {})
@patch('charmhelpers.contrib.openstack.context.config')
def test_ext_port_eth(self, mock_config):
config = fake_config({'ext-port': 'eth1010'})
self.config.side_effect = config
mock_config.side_effect = config
self.assertEquals(context.ExternalPortContext()(),
{'ext_port': 'eth1010'})
@patch('charmhelpers.contrib.openstack.context.is_phy_iface',
lambda arg: True)
@patch('charmhelpers.contrib.openstack.context.get_nic_hwaddr')
@patch('charmhelpers.contrib.openstack.context.list_nics')
@patch('charmhelpers.contrib.openstack.context.get_ipv6_addr')
@patch('charmhelpers.contrib.openstack.context.get_ipv4_addr')
@patch('charmhelpers.contrib.openstack.context.config')
def test_ext_port_mac(self, mock_config, mock_get_ipv4_addr,
mock_get_ipv6_addr, mock_list_nics,
mock_get_nic_hwaddr):
config_macs = ABSENT_MACS + " " + MACHINE_MACS['eth2']
config = fake_config({'ext-port': config_macs})
self.config.side_effect = config
mock_config.side_effect = config
mock_get_ipv4_addr.side_effect = self._fake_get_ipv4
mock_get_ipv6_addr.return_value = []
mock_list_nics.return_value = MACHINE_MACS.keys()
mock_get_nic_hwaddr.side_effect = self._fake_get_hwaddr
self.assertEquals(context.ExternalPortContext()(),
{'ext_port': 'eth2'})
config = fake_config({'ext-port': ABSENT_MACS})
self.config.side_effect = config
mock_config.side_effect = config
self.assertEquals(context.ExternalPortContext()(), {})
@patch('charmhelpers.contrib.openstack.context.is_phy_iface',
lambda arg: True)
@patch('charmhelpers.contrib.openstack.context.get_nic_hwaddr')
@patch('charmhelpers.contrib.openstack.context.list_nics')
@patch('charmhelpers.contrib.openstack.context.get_ipv6_addr')
@patch('charmhelpers.contrib.openstack.context.get_ipv4_addr')
@patch('charmhelpers.contrib.openstack.context.config')
def test_ext_port_mac_one_used_nic(self, mock_config,
mock_get_ipv4_addr,
mock_get_ipv6_addr, mock_list_nics,
mock_get_nic_hwaddr):
self.relation_ids.return_value = ['neutron-plugin-api:1']
self.related_units.return_value = ['neutron-api/0']
self.relation_get.return_value = {'network-device-mtu': 1234,
'l2-population': 'False'}
config_macs = "%s %s" % (MACHINE_MACS['eth1'],
MACHINE_MACS['eth2'])
mock_get_ipv4_addr.side_effect = self._fake_get_ipv4
mock_get_ipv6_addr.return_value = []
mock_list_nics.return_value = MACHINE_MACS.keys()
mock_get_nic_hwaddr.side_effect = self._fake_get_hwaddr
config = fake_config({'ext-port': config_macs})
self.config.side_effect = config
mock_config.side_effect = config
self.assertEquals(context.ExternalPortContext()(),
{'ext_port': 'eth2', 'ext_port_mtu': 1234})
@patch('charmhelpers.contrib.openstack.context.NeutronPortContext.'
'resolve_ports')
def test_data_port_eth(self, mock_resolve):
self.config.side_effect = fake_config({'data-port':
'phybr1:eth1010 '
'phybr1:eth1011'})
mock_resolve.side_effect = lambda ports: ['eth1010']
self.assertEquals(context.DataPortContext()(),
{'eth1010': 'phybr1'})
@patch.object(context, 'get_nic_hwaddr')
@patch.object(context.NeutronPortContext, 'resolve_ports')
def test_data_port_mac(self, mock_resolve, mock_get_nic_hwaddr):
extant_mac = 'cb:23:ae:72:f2:33'
non_extant_mac = 'fa:16:3e:12:97:8e'
self.config.side_effect = fake_config({'data-port':
'phybr1:%s phybr1:%s' %
(non_extant_mac, extant_mac)})
def fake_resolve(ports):
resolved = []
for port in ports:
if port == extant_mac:
resolved.append('eth1010')
return resolved
mock_get_nic_hwaddr.side_effect = lambda nic: extant_mac
mock_resolve.side_effect = fake_resolve
self.assertEquals(context.DataPortContext()(),
{'eth1010': 'phybr1'})
@patch.object(context.NeutronAPIContext, '__call__', lambda *args:
{'network_device_mtu': 5000})
@patch.object(context, 'get_nic_hwaddr', lambda inst, port: port)
@patch.object(context.NeutronPortContext, 'resolve_ports',
lambda inst, ports: ports)
def test_phy_nic_mtu_context(self):
self.config.side_effect = fake_config({'data-port':
'phybr1:eth0'})
ctxt = context.PhyNICMTUContext()()
self.assertEqual(ctxt, {'devs': 'eth0', 'mtu': 5000})
@patch.object(context.glob, 'glob')
@patch.object(context.NeutronAPIContext, '__call__', lambda *args:
{'network_device_mtu': 5000})
@patch.object(context, 'get_nic_hwaddr', lambda inst, port: port)
@patch.object(context.NeutronPortContext, 'resolve_ports',
lambda inst, ports: ports)
def test_phy_nic_mtu_context_vlan(self, mock_glob):
self.config.side_effect = fake_config({'data-port':
'phybr1:eth0.100'})
mock_glob.return_value = ['/sys/class/net/eth0.100/lower_eth0']
ctxt = context.PhyNICMTUContext()()
self.assertEqual(ctxt, {'devs': 'eth0\\neth0.100', 'mtu': 5000})
@patch.object(context.glob, 'glob')
@patch.object(context.NeutronAPIContext, '__call__', lambda *args:
{'network_device_mtu': 5000})
@patch.object(context, 'get_nic_hwaddr', lambda inst, port: port)
@patch.object(context.NeutronPortContext, 'resolve_ports',
lambda inst, ports: ports)
def test_phy_nic_mtu_context_vlan_w_duplicate_raw(self, mock_glob):
self.config.side_effect = fake_config({'data-port':
'phybr1:eth0.100 '
'phybr1:eth0.200'})
def fake_glob(wcard):
if 'eth0.100' in wcard:
return ['/sys/class/net/eth0.100/lower_eth0']
elif 'eth0.200' in wcard:
return ['/sys/class/net/eth0.200/lower_eth0']
raise Exception("Unexpeced key '%s'" % (wcard))
mock_glob.side_effect = fake_glob
ctxt = context.PhyNICMTUContext()()
self.assertEqual(ctxt, {'devs': 'eth0\\neth0.100\\neth0.200',
'mtu': 5000})
def test_neutronapicontext_defaults(self):
self.relation_ids.return_value = []
expected_keys = [
'l2_population', 'enable_dvr', 'enable_l3ha',
'overlay_network_type', 'network_device_mtu',
'enable_qos', 'enable_nsg_logging'
]
api_ctxt = context.NeutronAPIContext()()
for key in expected_keys:
self.assertTrue(key in api_ctxt)
self.assertEquals(api_ctxt['polling_interval'], 2)
self.assertEquals(api_ctxt['rpc_response_timeout'], 60)
self.assertEquals(api_ctxt['report_interval'], 30)
self.assertEquals(api_ctxt['enable_nsg_logging'], False)
def setup_neutron_api_context_relation(self, cfg):
self.relation_ids.return_value = ['neutron-plugin-api:1']
self.related_units.return_value = ['neutron-api/0']
# The l2-population key is used by the context as a way of checking if
# the api service on the other end is sending data in a recent format.
self.relation_get.return_value = cfg
def test_neutronapicontext_extension_drivers_qos_on(self):
self.setup_neutron_api_context_relation({
'enable-qos': 'True',
'l2-population': 'True'})
api_ctxt = context.NeutronAPIContext()()
self.assertTrue(api_ctxt['enable_qos'])
self.assertEquals(api_ctxt['extension_drivers'], 'qos')
def test_neutronapicontext_extension_drivers_qos_off(self):
self.setup_neutron_api_context_relation({
'enable-qos': 'False',
'l2-population': 'True'})
api_ctxt = context.NeutronAPIContext()()
self.assertFalse(api_ctxt['enable_qos'])
self.assertEquals(api_ctxt['extension_drivers'], '')
def test_neutronapicontext_extension_drivers_qos_absent(self):
self.setup_neutron_api_context_relation({
'l2-population': 'True'})
api_ctxt = context.NeutronAPIContext()()
self.assertFalse(api_ctxt['enable_qos'])
self.assertEquals(api_ctxt['extension_drivers'], '')
def test_neutronapicontext_extension_drivers_log_off(self):
self.setup_neutron_api_context_relation({
'enable-nsg-logging': 'False',
'l2-population': 'True'})
api_ctxt = context.NeutronAPIContext()()
self.assertEquals(api_ctxt['extension_drivers'], '')
def test_neutronapicontext_extension_drivers_log_on(self):
self.setup_neutron_api_context_relation({
'enable-nsg-logging': 'True',
'l2-population': 'True'})
api_ctxt = context.NeutronAPIContext()()
self.assertEquals(api_ctxt['extension_drivers'], 'log')
def test_neutronapicontext_extension_drivers_log_qos_on(self):
self.setup_neutron_api_context_relation({
'enable-qos': 'True',
'enable-nsg-logging': 'True',
'l2-population': 'True'})
api_ctxt = context.NeutronAPIContext()()
self.assertEquals(api_ctxt['extension_drivers'], 'qos,log')
def test_neutronapicontext_string_converted(self):
self.setup_neutron_api_context_relation({
'l2-population': 'True'})
api_ctxt = context.NeutronAPIContext()()
self.assertEquals(api_ctxt['l2_population'], True)
def test_neutronapicontext_none(self):
self.relation_ids.return_value = ['neutron-plugin-api:1']
self.related_units.return_value = ['neutron-api/0']
self.relation_get.return_value = {'l2-population': 'True'}
api_ctxt = context.NeutronAPIContext()()
self.assertEquals(api_ctxt['network_device_mtu'], None)
def test_network_service_ctxt_no_units(self):
self.relation_ids.return_value = []
self.relation_ids.return_value = ['foo']
self.related_units.return_value = []
self.assertEquals(context.NetworkServiceContext()(), {})
@patch.object(context.OSContextGenerator, 'context_complete')
def test_network_service_ctxt_no_data(self, mock_context_complete):
rel = FakeRelation(QUANTUM_NETWORK_SERVICE_RELATION)
self.relation_ids.side_effect = rel.relation_ids
self.related_units.side_effect = rel.relation_units
relation = FakeRelation(relation_data=QUANTUM_NETWORK_SERVICE_RELATION)
self.relation_get.side_effect = relation.get
mock_context_complete.return_value = False
self.assertEquals(context.NetworkServiceContext()(), {})
def test_network_service_ctxt_data(self):
data_result = {
'keystone_host': '10.5.0.1',
'service_port': '5000',
'auth_port': '20000',
'service_tenant': 'tenant',
'service_username': 'username',
'service_password': 'password',
'quantum_host': '10.5.0.2',
'quantum_port': '9696',
'quantum_url': 'http://10.5.0.2:9696/v2',
'region': 'aregion',
'service_protocol': 'http',
'auth_protocol': 'http',
'api_version': '2.0',
}
rel = FakeRelation(QUANTUM_NETWORK_SERVICE_RELATION)
self.relation_ids.side_effect = rel.relation_ids
self.related_units.side_effect = rel.relation_units
relation = FakeRelation(relation_data=QUANTUM_NETWORK_SERVICE_RELATION)
self.relation_get.side_effect = relation.get
self.assertEquals(context.NetworkServiceContext()(), data_result)
def test_network_service_ctxt_data_api_version(self):
data_result = {
'keystone_host': '10.5.0.1',
'service_port': '5000',
'auth_port': '20000',
'service_tenant': 'tenant',
'service_username': 'username',
'service_password': 'password',
'quantum_host': '10.5.0.2',
'quantum_port': '9696',
'quantum_url': 'http://10.5.0.2:9696/v2',
'region': 'aregion',
'service_protocol': 'http',
'auth_protocol': 'http',
'api_version': '3',
}
rel = FakeRelation(QUANTUM_NETWORK_SERVICE_RELATION_VERSIONED)
self.relation_ids.side_effect = rel.relation_ids
self.related_units.side_effect = rel.relation_units
relation = FakeRelation(
relation_data=QUANTUM_NETWORK_SERVICE_RELATION_VERSIONED)
self.relation_get.side_effect = relation.get
self.assertEquals(context.NetworkServiceContext()(), data_result)
def test_internal_endpoint_context(self):
config = {'use-internal-endpoints': False}
self.config.side_effect = fake_config(config)
ctxt = context.InternalEndpointContext()
self.assertFalse(ctxt()['use_internal_endpoints'])
config = {'use-internal-endpoints': True}
self.config.side_effect = fake_config(config)
self.assertTrue(ctxt()['use_internal_endpoints'])
@patch.object(context, 'os_release')
def test_volume_api_context(self, mock_os_release):
mock_os_release.return_value = 'ocata'
config = {'use-internal-endpoints': False}
self.config.side_effect = fake_config(config)
ctxt = context.VolumeAPIContext('cinder-common')
c = ctxt()
self.assertEqual(c['volume_api_version'], '2')
self.assertEqual(c['volume_catalog_info'],
'volumev2:cinderv2:publicURL')
mock_os_release.return_value = 'pike'
config['use-internal-endpoints'] = True
self.config.side_effect = fake_config(config)
ctxt = context.VolumeAPIContext('cinder-common')
c = ctxt()
self.assertEqual(c['volume_api_version'], '3')
self.assertEqual(c['volume_catalog_info'],
'volumev3:cinderv3:internalURL')
def test_volume_api_context_no_pkg(self):
self.assertRaises(ValueError, context.VolumeAPIContext, "")
self.assertRaises(ValueError, context.VolumeAPIContext, None)
def test_apparmor_context_call_not_valid(self):
''' Tests for the apparmor context'''
mock_aa_object = context.AppArmorContext()
# Test with invalid config
self.config.return_value = 'NOTVALID'
self.assertEquals(mock_aa_object.__call__(), None)
def test_apparmor_context_call_complain(self):
''' Tests for the apparmor context'''
mock_aa_object = context.AppArmorContext()
# Test complain mode
self.config.return_value = 'complain'
self.assertEquals(mock_aa_object.__call__(),
{'aa_profile_mode': 'complain',
'ubuntu_release': '16.04'})
def test_apparmor_context_call_enforce(self):
''' Tests for the apparmor context'''
mock_aa_object = context.AppArmorContext()
# Test enforce mode
self.config.return_value = 'enforce'
self.assertEquals(mock_aa_object.__call__(),
{'aa_profile_mode': 'enforce',
'ubuntu_release': '16.04'})
def test_apparmor_context_call_disable(self):
''' Tests for the apparmor context'''
mock_aa_object = context.AppArmorContext()
# Test complain mode
self.config.return_value = 'disable'
self.assertEquals(mock_aa_object.__call__(),
{'aa_profile_mode': 'disable',
'ubuntu_release': '16.04'})
def test_apparmor_setup_complain(self):
''' Tests for the apparmor setup'''
AA = context.AppArmorContext(profile_name='fake-aa-profile')
AA.install_aa_utils = MagicMock()
AA.manually_disable_aa_profile = MagicMock()
# Test complain mode
self.config.return_value = 'complain'
AA.setup_aa_profile()
AA.install_aa_utils.assert_called_with()
self.check_call.assert_called_with(['aa-complain', 'fake-aa-profile'])
self.assertFalse(AA.manually_disable_aa_profile.called)
def test_apparmor_setup_enforce(self):
''' Tests for the apparmor setup'''
AA = context.AppArmorContext(profile_name='fake-aa-profile')
AA.install_aa_utils = MagicMock()
AA.manually_disable_aa_profile = MagicMock()
# Test enforce mode
self.config.return_value = 'enforce'
AA.setup_aa_profile()
self.check_call.assert_called_with(['aa-enforce', 'fake-aa-profile'])
self.assertFalse(AA.manually_disable_aa_profile.called)
def test_apparmor_setup_disable(self):
''' Tests for the apparmor setup'''
AA = context.AppArmorContext(profile_name='fake-aa-profile')
AA.install_aa_utils = MagicMock()
AA.manually_disable_aa_profile = MagicMock()
# Test disable mode
self.config.return_value = 'disable'
AA.setup_aa_profile()
self.check_call.assert_called_with(['aa-disable', 'fake-aa-profile'])
self.assertFalse(AA.manually_disable_aa_profile.called)
# Test failed to disable
from subprocess import CalledProcessError
self.check_call.side_effect = CalledProcessError(0, 0, 0)
AA.setup_aa_profile()
self.check_call.assert_called_with(['aa-disable', 'fake-aa-profile'])
AA.manually_disable_aa_profile.assert_called_with()
@patch.object(context, 'enable_memcache')
@patch.object(context, 'is_ipv6_disabled')
def test_memcache_context_ipv6(self, _is_ipv6_disabled, _enable_memcache):
self.lsb_release.return_value = {'DISTRIB_CODENAME': 'xenial'}
_enable_memcache.return_value = True
_is_ipv6_disabled.return_value = False
config = {
'openstack-origin': 'distro',
}
self.config.side_effect = fake_config(config)
ctxt = context.MemcacheContext()
self.assertTrue(ctxt()['use_memcache'])
expect = {
'memcache_port': '11211',
'memcache_server': '::1',
'memcache_server_formatted': '[::1]',
'memcache_url': 'inet6:[::1]:11211',
'use_memcache': True}
self.assertEqual(ctxt(), expect)
self.lsb_release.return_value = {'DISTRIB_CODENAME': 'trusty'}
expect['memcache_server'] = 'ip6-localhost'
ctxt = context.MemcacheContext()
self.assertEqual(ctxt(), expect)
@patch.object(context, 'enable_memcache')
@patch.object(context, 'is_ipv6_disabled')
def test_memcache_context_ipv4(self, _is_ipv6_disabled, _enable_memcache):
self.lsb_release.return_value = {'DISTRIB_CODENAME': 'xenial'}
_enable_memcache.return_value = True
_is_ipv6_disabled.return_value = True
config = {
'openstack-origin': 'distro',
}
self.config.side_effect = fake_config(config)
ctxt = context.MemcacheContext()
self.assertTrue(ctxt()['use_memcache'])
expect = {
'memcache_port': '11211',
'memcache_server': '127.0.0.1',
'memcache_server_formatted': '127.0.0.1',
'memcache_url': '127.0.0.1:11211',
'use_memcache': True}
self.assertEqual(ctxt(), expect)
self.lsb_release.return_value = {'DISTRIB_CODENAME': 'trusty'}
expect['memcache_server'] = 'localhost'
ctxt = context.MemcacheContext()
self.assertEqual(ctxt(), expect)
@patch.object(context, 'enable_memcache')
def test_memcache_off_context(self, _enable_memcache):
_enable_memcache.return_value = False
config = {'openstack-origin': 'distro'}
self.config.side_effect = fake_config(config)
ctxt = context.MemcacheContext()
self.assertFalse(ctxt()['use_memcache'])
self.assertEqual(ctxt(), {'use_memcache': False})
@patch('charmhelpers.contrib.openstack.context.mkdir')
def test_ensure_dir_ctx(self, mkdir):
dirname = '/etc/keystone/policy.d'
owner = 'someuser'
group = 'somegroup'
perms = 0o555
force = False
ctxt = context.EnsureDirContext(dirname, owner=owner,
group=group, perms=perms,
force=force)
ctxt()
mkdir.assert_called_with(dirname, owner=owner, group=group,
perms=perms, force=force)
@patch.object(context, 'os_release')
def test_VersionsContext(self, os_release):
self.lsb_release.return_value = {'DISTRIB_CODENAME': 'xenial'}
os_release.return_value = 'essex'
self.assertEqual(
context.VersionsContext()(),
{
'openstack_release': 'essex',
'operating_system_release': 'xenial'})
os_release.assert_called_once_with('python-keystone', base='icehouse')
self.lsb_release.assert_called_once_with()
|
import os, sys, json
import subprocess
## A script to generate Fig.13
## Fig.13 includes two subplot
## (a) preemption latency with increased kernel execution time
## (b) preemption latency with increased kernel number
quick_eval = False
if os.getenv("REEF_QUICK_EVAL") != None:
quick_eval = True
def run_shell_cmd(cmd):
lines = []
p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
while p.poll() is None:
line = p.stdout.readline().strip().decode('utf-8')
lines.append(line)
print(line)
if p.returncode != 0:
print(f'Subprogram {cmd} failed')
return lines
def SubplotA():
kernel_size = [
"1", "100", "500", "1000", "2000", "3000", "4000", "5000", "6000", "7000", "8000"]
# "1600", "2000", "2400", "2800", "3200",
# "3600", "4000", "5000"]
methods = ["reset", "wait"]
def run_evaluation():
res = []
for ks in kernel_size:
subres = []
for method in methods:
cmd = ["kernel_size", ks, method]
if quick_eval:
cmd.append("10")
else:
cmd.append("30")
subres.append(run_shell_cmd(cmd))
res.append(subres)
return res
## 2. format evaluation results
def format_results(res):
preempt_lat = []
execution_lat = []
for i in range(len(kernel_size)):
preempt_lat_row = []
execution_lat_row = []
for j in range(len(methods)):
log = res[i][j]
lat_list = []
for l in log:
if l.find("preempt latency: ") != -1:
lat = l.strip().split(" ")[-2]
lat_list.append(int(lat))
if l.find("Execute avg latency: ") != -1:
lat = l.strip().split(" ")[-2]
execution_lat_row.append(lat)
lat_list = sorted(lat_list)
preempt_lat_row.append(lat_list[int(len(lat_list)*0.8)])
execution_lat.append(execution_lat_row)
preempt_lat.append(preempt_lat_row)
return execution_lat, preempt_lat
## 3. generate plot
def generate_plot(exe, preempt):
header = "exec-lat reset wait\n"
col_header = exe
print(col_header)
lat_file = open("fig13a.dat", "w")
lat_file.write(header)
for i in range(len(preempt)):
lat_file.write(col_header[i][0] + " ")
for l in preempt[i]:
lat_file.write(str(l) + " ")
lat_file.write("\n")
lat_file.close()
subprocess.getoutput("make -f ../Makefile fig13a.pdf")
res = run_evaluation()
exe, lat = format_results(res)
print(exe)
print(lat)
generate_plot(exe, lat)
def SubplotB():
resource_dir = os.getenv("REEF_RESOURCE_DIR")
queue_size = [10, 100, 200, 300, 400, 500]
methods = ["reset", "wait"]
temp_file_path = "reef_fig13_temp.json.tmp"
jfile = open(resource_dir+"/bench_kernel/bench_kernel.json", "r")
base_json = json.loads(jfile.read())
jfile.close()
def prepare_config_file(qs):
new_conf = {}
new_conf['storage'] = base_json['storage']
new_conf['args'] = base_json['args']
new_conf['shared_memory'] = base_json['shared_memory']
new_conf['kernels'] = []
for i in range(qs):
new_conf['kernels'].append(base_json['kernels'][0])
temp_file = open(temp_file_path, "w")
temp_file.write(json.dumps(new_conf))
temp_file.close()
def run_evaluation():
res = []
for qs in queue_size:
subres = []
prepare_config_file(qs)
for method in methods:
cmd = ["queue_size", temp_file_path, method]
if quick_eval:
cmd.append("10")
else:
cmd.append("30")
subres.append(run_shell_cmd(cmd))
res.append(subres)
return res
## 2. format evaluation results
def format_results(res):
preempt_lat = []
for i in range(len(queue_size)):
preempt_lat_row = []
for j in range(len(methods)):
log = res[i][j]
lat_list = []
for l in log:
if l.find("preempt latency: ") != -1:
lat = l.strip().split(" ")[-2]
lat_list.append(int(lat))
lat_list = sorted(lat_list)
preempt_lat_row.append(lat_list[int(len(lat_list)*0.5)])
preempt_lat.append(preempt_lat_row)
return preempt_lat
## 3. generate plot
def generate_plot(preempt):
header = "queue_size reset wait\n"
col_header = queue_size
lat_file = open("fig13b.dat", "w")
lat_file.write(header)
for i in range(len(preempt)):
lat_file.write(str(col_header[i]) + " ")
for l in preempt[i]:
lat_file.write(str(l) + " ")
lat_file.write("\n")
lat_file.close()
subprocess.getoutput("make -f ../Makefile fig13b.pdf")
res = run_evaluation()
lat = format_results(res)
generate_plot(lat)
SubplotA()
SubplotB()
|
# Copyright 2014 Max Sharples
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" S3/storage utility methods """
import os
import re
import logging
from boto.s3.connection import S3Connection
from boto.s3.key import Key
logger = logging.getLogger(__name__)
class MissingDataException(Exception):
pass
def get_conn(aws_access_key_id=None, aws_secret_access_key=None):
k = aws_access_key_id or os.environ['AWS_ACCESS_KEY_ID']
s = aws_secret_access_key or os.environ['AWS_SECRET_ACCESS_KEY']
return S3Connection(k, s)
def copy_s3_file(source, destination,
aws_access_key_id=None, aws_secret_access_key=None):
""" Copy an S3 object from one location to another
"""
dest_bucket, dest_key = parse_s3_uri(destination)
source_bucket, source_key = parse_s3_uri(source)
conn = get_conn(aws_access_key_id, aws_secret_access_key)
logger.info("Copying S3 source files. This may take some time.")
if is_dir(source):
s_bkt = conn.get_bucket(source_bucket)
d_bkt = conn.get_bucket(dest_bucket)
objs = get_bucket_list(s_bkt, source_key)
if len(objs) == 0:
raise MissingDataException("supplied path is empty")
for i, k in enumerate(get_bucket_list(s_bkt, source_key)):
new_key = dest_key + str(i)
logger.debug("copying {0}/{1} to {2}/{3}".format(source_bucket,
k.key,
dest_bucket,
new_key))
d_bkt.copy_key(new_key, source_bucket, k.key)
return destination + '/'
else:
bkt = conn.get_bucket(dest_bucket)
logger.debug("copying {0}/{1} to {2}/{3}".format(source_bucket,
source_key,
dest_bucket,
dest_key))
return bkt.copy_key(dest_key, source_bucket, source_key)
def upload_file_to_s3(file_path, s3_path,
aws_access_key_id=None, aws_secret_access_key=None):
"""Create an S3 object from the contents of a local file
"""
s3_bucket, s3_key = parse_s3_uri(s3_path)
conn = get_conn(aws_access_key_id, aws_secret_access_key)
bkt = conn.get_bucket(s3_bucket)
k = Key(bkt)
k.key = s3_key
return k.set_contents_from_filename(file_path)
def parse_s3_uri(uri):
"""Parse an S3 uri from: s3://bucketname/some/other/path/info/
to:
bucket = bucketname
key = some/other/path/info
"""
m = re.search(r'(s3://)([A-Za-z0-9_-]+)/(\S*)', uri)
if m:
return (m.group(2), m.group(3))
else:
return None
def obj_type(key):
"""If it is a 'dir' it will end with a slash
otherwise it is a 'file'
"""
if key[-1:] == '/':
return 'directory'
else:
return 'file'
def is_dir(key):
return obj_type(key) == 'directory'
def concatenate_keys(source_dir, destination_key,
aws_access_key_id=None, aws_secret_access_key=None):
"""Concatenate all the files in a bucket
using multipart upload feature of S3 API.
NOTE: this only works when all files are above 5MB
"""
s_bucket, s_key = parse_s3_uri(source_dir)
d_bucket, d_key = parse_s3_uri(destination_key)
conn = get_conn(aws_access_key_id, aws_secret_access_key)
s_bk = conn.get_bucket(s_bucket)
d_bk = conn.get_bucket(d_bucket)
mp = d_bk.initiate_multipart_upload(d_key)
for i, k in enumerate(get_bucket_list(s_bk, s_key)):
mp.copy_part_from_key(s_bucket, k.key, i+1)
mp.complete_upload()
def get_bucket_list(bucket, key):
""" list items in a bucket that match given key """
# ignore key if zero bytes
return [k for k in bucket.list(key) if k.size > 0]
|
# -*- coding: utf-8 -*-
"""This file contains a plist plugin for the iPod/iPhone storage plist."""
from __future__ import unicode_literals
from plaso.containers import events
from plaso.containers import time_events
from plaso.lib import definitions
from plaso.parsers import plist
from plaso.parsers.plist_plugins import interface
# TODO: add more attributes.
class IPodPlistEventData(events.EventData):
"""iPod plist event data.
Attributes:
device_id (str): unique identifier of the iPod device.
"""
DATA_TYPE = 'ipod:device:entry'
def __init__(self):
"""Initializes event data."""
super(IPodPlistEventData, self).__init__(data_type=self.DATA_TYPE)
self.device_id = None
class IPodPlugin(interface.PlistPlugin):
"""Plugin to extract iPod/iPad/iPhone device information."""
NAME = 'ipod_device'
DESCRIPTION = 'Parser for iPod, iPad and iPhone plist files.'
PLIST_PATH = 'com.apple.iPod.plist'
PLIST_KEYS = frozenset(['Devices'])
# pylint: disable=arguments-differ
def GetEntries(self, parser_mediator, match=None, **unused_kwargs):
"""Extract device information from the iPod plist.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS.
"""
devices = match.get('Devices', {})
for device_identifier, device_information in iter(devices.items()):
datetime_value = device_information.get('Connected', None)
if not datetime_value:
continue
event_data = IPodPlistEventData()
event_data.device_id = device_identifier
# TODO: refactor.
for key, value in iter(device_information.items()):
if key == 'Connected':
continue
attribute_name = key.lower().replace(' ', '_')
setattr(event_data, attribute_name, value)
event = time_events.PythonDatetimeEvent(
datetime_value, definitions.TIME_DESCRIPTION_LAST_CONNECTED)
parser_mediator.ProduceEventWithEventData(event, event_data)
plist.PlistParser.RegisterPlugin(IPodPlugin)
|
'''
CS 115, Inheritance Activity
Author: <your name here>
Pledge: <write pledge>
'''
'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
' RULES: You can use Canvas to download this file and upload your solution.
' You can use Eclipse to edit and run your program. You should NOT look at
' other programs in Eclipse, you should NOT use any other programs, and you
' should NOT use any notes or books.
' According to the Honor Code, you should report any student who appears
' to be violating these rules.
'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
' Question 7 (15 points)
' Implement missing sections of the Employee class.
'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
class Employee(object):
'''Write the constructor below. It should take in five arguments:
- first_name (a string)
- last_name (a string)
- title (a string)
- hours_per_week (an int)
- hourly_rate (a float)
All fields must be private. No error checking or type conversions
are required.
5 points'''
def __init__(self, first_name, last_name, title, hours_per_week, hourly_rate):
self.__first_name = first_name
self.__last_name = last_name
self.__title = title
self.__hours_per_week = hours_per_week
self.__hourly_rate = hourly_rate
'''Write a property for hourly_rate. 3 points'''
@property
def hourly_rate(self):
return self.__hourly_rate
'''Write a setter for hourly rate. 3 points'''
@hourly_rate.setter
def hourly_rate(self, hourly_rate):
self.__hourly_rate = hourly_rate
'''Write a method called get_total_compensation.
It returns the total amount of money an employee earns in a year.
Assume that the employee works 50 weeks each year, with the remaining
2 set aside for vacation.
4 points'''
def get_total_compensation(self):
return 50 * self.__hours_per_week * self.__hourly_rate
def __str__(self):
return 'Employee: %s %s\n Title: %s\n Hours per week: %d\n' \
' Hourly rate: $%.2f\n Yearly compensation: $%.2f' % \
(self.__first_name, self.__last_name, self.__title, \
self.__hours_per_week, self.__hourly_rate, \
self.get_total_compensation())
'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
' Question 8 (15 points)
' Implement missing sections of the Manager class. Manager should be a
' subclass of Employee.
'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
class Manager(Employee): # 2 points
'''Write the constructor below. It should take in six arguments:
- the first five are the same as in the Employee constructor
- bonus_percent, a float >= 0. This attribute represents the percentage
of the employee's yearly compensation that will be used to
create the manager's annual bonus. MAKE SURE the argument is a float
>= 0. Otherwise, if it's not a float raise a TypeError stating,
"Bonus percent must be a float." If it's a float but < 0, raise a
ValueError stating, "Bonus percent cannot be negative."
bonus_percent must be private.
8 points'''
def __init__(self, first_name, last_name, title, hours_per_week, hourly_rate, bonus_percent):
super().__init__(first_name, last_name, title, hours_per_week, hourly_rate)
try:
self.__bonus_percent = float(bonus_percent)
except:
raise TypeError("Bonus percent must be a float.")
if bonus_percent < 0:
raise ValueError("Bonus percent cannot be negative.")
'''Override the method get_total_compensation.
It returns the total amount of money the manager earns in a year, i.e.
basic employee compensation + bonus.
To get full credit, you must call get_total_compensation in the superclass.
Note: If a manager's yearly compensation is $100,000 and the bonus_percent
is 10 (ten), the total compensation will be 110,000.
5 points'''
def get_total_compensation(self):
return super().get_total_compensation() * (100 + self.__bonus_percent) / 100
|
import os
import sys
import argparse
from map_functions import read_content
parser = argparse.ArgumentParser(description='Count objects in the custom dataset')
parser.add_argument('-a', '--anno_dir', default=None, help='annotation dir')
args = parser.parse_args()
anno_list = sorted(os.listdir(args.anno_dir))
paths = []
for j in anno_list:
paths.append(os.path.join(args.anno_dir, j))
total_imgs = len(paths)
total_objs = 0
for j in paths:
img_name, boxes, labels = read_content(j)
total_objs += len(labels)
print("Total images: ", total_imgs, "Total objects: ", total_objs)
|
import PySimpleGUI as sg
class App:
def __init__(self):
layout = [
[sg.Text('Primeiro número:' ), sg.Input(key='num1')],
[sg.Text('Segundo número: '), sg.Input(key='num2')],
[sg.Button('OK'), sg.Button('Cancel')]
]
self.win = sg.Window('EXE-003', layout)
def gerador(self)
while True:
events, values = self.win.read()
if events in (sg.WINDOW_CLOSED, 'Cancel'):
break
elif events == 'OK':
num1 = int(values['num1'])
num2 = intgoog(values['num2'])
sg.popup(f'A Soma dos valores {num1} + {num2} = {num1 + num2}')
self.win.close()
aplic = App()
aplic.gerador()
|
from test_utils import create_and_save_env
from flatland.envs.rail_env import RailEnv
from flatland.envs.rail_generators import sparse_rail_generator, random_rail_generator, complex_rail_generator, \
rail_from_file
from flatland.envs.schedule_generators import sparse_schedule_generator, random_schedule_generator, \
complex_schedule_generator, schedule_from_file
def test_schedule_from_file_sparse():
"""
Test to see that all parameters are loaded as expected
Returns
-------
"""
# Different agent types (trains) with different speeds.
speed_ration_map = {1.: 0.25, # Fast passenger train
1. / 2.: 0.25, # Fast freight train
1. / 3.: 0.25, # Slow commuter train
1. / 4.: 0.25} # Slow freight train
# Generate Sparse test env
rail_generator = sparse_rail_generator(max_num_cities=5,
seed=1,
grid_mode=False,
max_rails_between_cities=3,
max_rails_in_city=6,
)
schedule_generator = sparse_schedule_generator(speed_ration_map)
create_and_save_env(file_name="./sparse_env_test.pkl", rail_generator=rail_generator,
schedule_generator=schedule_generator)
# Sparse generator
rail_generator = rail_from_file("./sparse_env_test.pkl")
schedule_generator = schedule_from_file("./sparse_env_test.pkl")
sparse_env_from_file = RailEnv(width=1, height=1, rail_generator=rail_generator,
schedule_generator=schedule_generator)
sparse_env_from_file.reset(True, True)
# Assert loaded agent number is correct
assert sparse_env_from_file.get_num_agents() == 10
# Assert max steps is correct
assert sparse_env_from_file._max_episode_steps == 500
def test_schedule_from_file_random():
"""
Test to see that all parameters are loaded as expected
Returns
-------
"""
# Different agent types (trains) with different speeds.
speed_ration_map = {1.: 0.25, # Fast passenger train
1. / 2.: 0.25, # Fast freight train
1. / 3.: 0.25, # Slow commuter train
1. / 4.: 0.25} # Slow freight train
# Generate random test env
rail_generator = random_rail_generator()
schedule_generator = random_schedule_generator(speed_ration_map)
create_and_save_env(file_name="./random_env_test.pkl", rail_generator=rail_generator,
schedule_generator=schedule_generator)
# Random generator
rail_generator = rail_from_file("./random_env_test.pkl")
schedule_generator = schedule_from_file("./random_env_test.pkl")
random_env_from_file = RailEnv(width=1, height=1, rail_generator=rail_generator,
schedule_generator=schedule_generator)
random_env_from_file.reset(True, True)
# Assert loaded agent number is correct
assert random_env_from_file.get_num_agents() == 10
# Assert max steps is correct
assert random_env_from_file._max_episode_steps == 1350
def test_schedule_from_file_complex():
"""
Test to see that all parameters are loaded as expected
Returns
-------
"""
# Different agent types (trains) with different speeds.
speed_ration_map = {1.: 0.25, # Fast passenger train
1. / 2.: 0.25, # Fast freight train
1. / 3.: 0.25, # Slow commuter train
1. / 4.: 0.25} # Slow freight train
# Generate complex test env
rail_generator = complex_rail_generator(nr_start_goal=10,
nr_extra=1,
min_dist=8,
max_dist=99999)
schedule_generator = complex_schedule_generator(speed_ration_map)
create_and_save_env(file_name="./complex_env_test.pkl", rail_generator=rail_generator,
schedule_generator=schedule_generator)
# Load the different envs and check the parameters
# Complex generator
rail_generator = rail_from_file("./complex_env_test.pkl")
schedule_generator = schedule_from_file("./complex_env_test.pkl")
complex_env_from_file = RailEnv(width=1, height=1, rail_generator=rail_generator,
schedule_generator=schedule_generator)
complex_env_from_file.reset(True, True)
# Assert loaded agent number is correct
assert complex_env_from_file.get_num_agents() == 10
# Assert max steps is correct
assert complex_env_from_file._max_episode_steps == 1350
|
"""Base classes for Jenkins test report unit tests."""
from ..source_collector_test_case import SourceCollectorTestCase
class JenkinsTestReportTestCase(SourceCollectorTestCase): # skipcq: PTC-W0046
"""Base class for Jenkins test report unit tests."""
SOURCE_TYPE = "jenkins_test_report"
|
# -*- coding: utf-8 -*-
"""
Original file from Telefónica I+D:
https://github.com/telefonicaid/lettuce-tools/blob/master/lettuce_tools/dataset_utils/dataset_utils.py
dataset_utils module contains:
- A dataset manager to prepare test data:
* generate_fixed_length_params: Transforms the '[LENGTH]' param value to a valid length.
* remove_missing_params: Remove parameters with value '[MISSING]'
* infere_datatypes: Inferes type of parameters to convert them in the suitable var type
"""
__author__ = "Telefónica I+D, @jframos"
__project__ = "python-qautils [https://github.com/qaenablers/python-qautils]"
__copyright__ = "Copyright 2015"
__license__ = " Apache License, Version 2.0"
__version__ = "1.2.1"
import json
class DatasetUtils(object):
def prepare_data(self, data):
"""
Generate a fixed length data for elements tagged with the text [LENGTH]
Removes al the data elements tagged with the text [MISSING_PARAM]
Transformes data from string to primitive type
:param data: hash entry
:return cleaned data
"""
try:
data = self.generate_fixed_length_params(data)
data = self.remove_missing_params(data)
data = self.infere_datatypes(data)
return data
except:
return None
def prepare_param(self, param):
"""
Generate a fixed length data for elements tagged with the text [LENGTH]
Removes all data elements tagged with the text [MISSING_PARAM]
:param param: Test parameter
:return data without not desired params
"""
if "[MISSING_PARAM]" in param:
new_param = None
else:
new_param = self.generate_fixed_length_param(param)
return new_param
def remove_missing_params(self, data):
"""
Removes all the data elements tagged with the text [MISSING_PARAM]
:param data: Lettuce step hash entry
:return data without not desired params
"""
try:
for item in data.keys():
if "[MISSING_PARAM]" in data[item]:
del(data[item])
finally:
return data
def generate_fixed_length_param(self, param):
"""
Generate a fixed length param if the elements matches the expression
[<type>_WITH_LENGTH_<length>]. E.g.: [STRING_WITH_LENGTH_15]
:param param: Lettuce param
:return param with the desired length
"""
try:
if "_WITH_LENGTH_" in param:
if "_ARRAY_WITH_LENGTH_" in param:
seeds = {'STRING': 'a', 'INTEGER': 1}
seed, length = param[1:-1].split("_ARRAY_WITH_LENGTH_")
param = list(seeds[seed] for x in xrange(int(length)))
elif "JSON_WITH_LENGTH_" in param:
length = int(param[1:-1].split("JSON_WITH_LENGTH_")[1])
param = dict((str(x), str(x)) for x in xrange(length))
else:
seeds = {'STRING': 'a', 'INTEGER': "1"}
# The chain to be generated can be just a part of param
start = param.find("[")
end = param.find("]")
seed, length = param[start + 1:end].split("_WITH_LENGTH_")
generated_part = seeds[seed] * int(length)
placeholder = "[" + seed + "_WITH_LENGTH_" + length + "]"
param = param.replace(placeholder, generated_part)
if seed is "INTEGER":
param = int(param)
finally:
return param
def generate_fixed_length_params(self, data):
"""
Generate a fixed length data for the elements that match the expression
[<type>_WITH_LENGTH_<length>]. E.g.: [STRING_WITH_LENTGH_15]
:param data: hash entry
:return data with the desired params with the desired length
"""
try:
for item in data.keys():
data[item] = self.generate_fixed_length_param(data[item])
finally:
return data
def infere_datatypes(self, data):
"""
Process the input data and replace the values in string format with the
the appropriate primitive type, based on its content
:param data: list of items, dict of items or single item
:return processed list of items, dict of items or single item
"""
""" Separate the process of lists, dicts and plain items"""
try:
if isinstance(data, dict): # dict of items
for key in data:
data[key] = self._get_item_with_type(data[key])
elif isinstance(data, list): # list of items
for index in range(len(data)):
data[index] = self._get_item_with_type(data[index])
else: # single item
data = self._get_item_with_type(data)
finally:
return data
def _get_item_with_type(self, data):
"""
Transform data from string to primitive type
:param data: Data to be transformed
:return data with the correct type
"""
if "[TRUE]" in data: # boolean
data = True
elif "[FALSE]" in data: # boolean
data = False
elif data.startswith("{") and data.endswith("}"): # json
data = json.loads(data)
else:
try: # maybe an int
data = int(data)
except:
try: # maybe a float
data = float(data)
except:
pass # if no condition matches, leave the data unchanged
return data |
from os import getcwd
from os.path import join
PATH = getcwd()
LOG_FILE_DIR = join(PATH, "log")
LOGGER_CONFIG = join(PATH, "utils", "logging.json") |
from globals import Globals
import os
import subprocess
import datetime as dt
from urllib import \
request as request
# urlopen
from io import \
StringIO, BytesIO
import string
import requests
import re
import csv
import threading
import utils as utils
import time
import datetime as datetime
import multiprocessing
from report import PDFItem
from PyPDF2 import PdfFileReader
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdfpage import PDFPage
from pdfminer.pdfinterp import resolve1
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.pdfpage import PDFTextExtractionNotAllowed
from pdfminer.layout import LAParams # , LTTextBox, LTTextLine
from threading import Thread, Event
stop_event = Event()
global document
class PDFAudit:
def __init__(self):
self.report_folder = ''
self.document_folder = ''
self.pdf_path = ''
self.report_name = ''
self.csv_header = []
self.gbl_report_folder = Globals.gbl_report_folder + self.report_folder
self.log = self.gbl_report_folder + 'logs\\'
self.document_t = PDFDocument
self.parser = PDFParser
self.url = ''
self.line_count = 1
def load_pdf(self, PDFDocument, password):
i = 0
while threading.currentThread().is_alive():
i += 1
report_path = self.report_folder + self.report_name
print('LOADING: ' + i.__str__())
time.sleep(1)
# try:
self.document_t = PDFDocument(self.parser)
# except Exception as e:
# print('PDFDocument(self.parser) FAILED ::::: ' + e.__str__())
if stop_event.is_set():
if i >= 120:
# print(self.parser.fp.name + ' FAILED (SEC): ' + i.__str__())
print(' >>> FAIL : PDF LOAD STOP EVENT : 120 SECONDS')
row = [self.line_count, 'PDFDocument FAILED TO LOAD - 90 SEC TIMEOUT REACHED FOR: ' + self.url,
'', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '',
'', ]
# self.line_count += 1
# 90 SECOND TIMEOUT or FAILED TO PARSER
with open(report_path, 'a', encoding='utf8', newline='') as csv_file:
writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
writer.dialect.lineterminator.replace('\n', '')
writer.writerow(row)
break
def thread_monitor(self, process_name, thread):
i = 0
while thread.is_alive():
time.sleep(2)
i += 2
print(process_name + ' WORKING FOR ' + i.__str__() + ' seconds for: ' + thread.getName())
print('ACTIVE COUNT: ' + str(threading.active_count()))
if i == 180:
print(thread.getName() + ' KILLED AT 180 SECONDS')
report_path = self.report_folder + self.report_name
row = [self.line_count, 'PDF THREAD FAILED TO PROCESS - 180 SEC TIMEOUT REACHED FOR: ' + self.url,
'', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ]
# self.line_count += 1
# 120 SECOND TIMEOUT
with open(report_path, 'a', encoding='utf8', newline='') as csv_file:
writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
writer.dialect.lineterminator.replace('\n', '')
writer.writerow(row)
break
print(process_name + ':[COMPLETED IN ' + i.__str__() + ' seconds for: ' + thread.getName() + ']')
def pdf_csv(self, csv_to_audit, source_folder, scope):
# Define CSV
self.csv_header = (['csvline', 'url', 'filename', 'local_path',
'encrypted', 'decrypt_pass', 'istagged', 'pages', 'toc', 'form', 'fields', 'tables',
'word_count', 'char_count', 'words_per_page', 'chars_per_word', 'image_count',
'%_img_per_page', 'ocr_risk', 'author', 'creator', 'producer', 'subject', 'title', 'text'])
# root_path = os.path.split(source_folder)[0]
self.report_folder = os.path.split(source_folder)[0].replace('SPIDER', '')
# Set logs
self.log = os.path.join(self.report_folder, 'logs')
if not os.path.exists(self.log):
os.makedirs(self.log)
self.report_folder = os.path.join( self.report_folder, 'PDF')
if not os.path.exists(self.report_folder):
os.makedirs(self.report_folder)
# os.chdir(self.report_folder)
if csv_to_audit.find('internal') >= 0 or scope == 'internal':
self.log = os.path.join(self.log, '_pdf_internal_log.txt')
self.report_name = csv_to_audit[:-4] + '_a.csv'
if csv_to_audit.find('external') >= 0 or scope == 'external':
self.log = os.path.join(self.log, '_pdf_external_log.txt')
self.report_name = csv_to_audit[:-4] + '_a.csv'
self.document_folder = self.report_folder
if not os.path.exists(self.document_folder):
os.makedirs(self.document_folder)
try:
write_header = False
report_path = self.report_folder + self.report_name
if not os.path.exists(report_path):
write_header = True
os.chdir(self.report_folder)
with open(report_path, 'a', encoding='utf8', newline='') as csv_file:
writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
if write_header:
writer.writerow(self.csv_header)
except Exception as e:
print('PDF I/O error: ' + e.__str__())
csv_source = os.path.join(source_folder, csv_to_audit)
row_count = sum(1 for row in csv.reader(open(csv_source, 'r',
encoding='utf8'), delimiter=','))
row_count_i = row_count - 2
with open(csv_source, encoding='utf8') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
# set number of threads
thread_count = 1
destination_folder = self.report_name
# Get URL for PDF from row[1]
# FOR EACH PDF
first_line = True
for row in csv_reader:
pdf_url = row[0]
skip = False
if first_line:
first_line = False
print(' ::: START ALL PDF :::')
continue
elif os.path.exists(destination_folder):
with open(destination_folder, encoding='utf8') as completed_urls:
completed_urls_reader = csv.reader(completed_urls, delimiter=',')
jump = True
fl = True
skip = False
for completed_url in completed_urls_reader:
if fl:
jump = True
fl = False
continue
if pdf_url in completed_url[1]:
msg = (' >>> Remaining PDFs: ' + row_count_i.__str__() + ' out of ' +
row_count.__str__() + ' ' + (datetime.datetime.now().__str__()[:-7]))
row_count_i -= 1
# self.line_count += 1
utils.logline(self.log, msg)
print(msg)
fl = False
skip = True
break
# completed_urls.close()
try:
if skip:
skip = False
continue
self.line_count = csv_reader.line_num
self.url = pdf_url
thread = Thread(target=self.pdf_thread,
args=(pdf_url,))
thread.setDaemon(True)
while threading.active_count() > 35:
print(' !! TAKE 5 !!')
time.sleep(5)
print('RUN AUDIT FOR :: ' + pdf_url + ' ' + thread.getName())
thread.start()
i = 0
thread_monitor = Thread(target=self.thread_monitor,
args=('PDF', thread))
thread_monitor.setDaemon(True)
thread_monitor.start()
time.sleep(5)
msg = (' >>> Remaining PDFs: ' + row_count_i.__str__() + ' out of ' +
row_count.__str__() + ' ' + (datetime.datetime.now().__str__()[:-7]))
row_count_i -= 1
utils.logline(self.log, msg)
print(msg)
except Exception as e:
msg = e.__str__() + ' PDF:01' + '\n'
print(msg)
utils.logline(self.log, msg)
def pdf_thread(self, url):
pdf_name = ''
exit_call = ''
csv_row = []
# save PDF to disk
try:
pdf_name = BytesIO(url.split("/")[-1].encode('UTF-8')).read().__str__()[2:-1]
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
regex = re.compile(valid_chars)
pdf_name = regex.sub('', pdf_name.__str__())
self.pdf_path = self.document_folder + regex.sub('', pdf_name)
r = requests.get(url, headers={'User-Agent': 'Mozilla/5.0'})
with open(self.pdf_path, 'wb') as code:
code.write(r.content)
code.close()
csv_row.insert(0, [self.csv_header[0], self.line_count.__str__()])
csv_row.insert(1, [self.csv_header[1], url if url.__len__() > 0 else 'NULL'])
csv_row.insert(2, [self.csv_header[2], pdf_name if pdf_name.__len__() > 0 else 'NULL'])
csv_row.insert(3, [self.csv_header[3], self.pdf_path if self.pdf_path.__len__() > 0 else 'NULL'])
print(' >>>> PDF START:[' + url + '] ' + self.line_count.__str__() + ' ' + (
datetime.datetime.now().__str__()[:-7]))
except Exception as e:
csv_row.insert(0, [self.csv_header[0], self.line_count.__str__()])
csv_row.insert(1, [self.csv_header[1], url if url.__len__() > 0 else 'NULL'])
csv_row.insert(2, [self.csv_header[2], e.__str__()])
csv_row.insert(3, [self.csv_header[3], self.pdf_path if self.pdf_path.__len__() > 0 else 'NULL'])
print(e)
pass
my_file = os.path.join(self.document_folder + pdf_name)
try:
fp = open(my_file, 'rb')
# self.pdf(fp, csv_row)
except Exception as e:
print(' PDF LOAD FAILED !!! ' + self.line_count.__str__() + ' : ' + self.pdf_path)
csv_row.pop(3)
csv_row.insert(3, [self.csv_header[3], 'PDF FAILED TO OPEN:' + self.pdf_path if self.pdf_path.__len__() > 0 else 'NULL'])
# Write results
row = []
for i in range(csv_row.__len__()):
row.append(csv_row[i][1])
report_path = self.report_folder + self.report_name
row_append = ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '']
index = 4
for ii in row_append:
row.insert(index, ii)
index += 1
# OPEN FAILED
with open(report_path, 'a', encoding='utf8', newline='') as csv_file:
writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
writer.dialect.lineterminator.replace('\n', '')
writer.writerow(row)
return
try:
self.pdf(fp, csv_row)
except Exception as e:
print('PDF FAIL')
def pdf(self, fp, csv_row):
password = ''
extracted_text = ''
self.parser = PDFParser(fp)
self.document_t = PDFDocument
pf = PdfFileReader
# isEncrypted
try:
i = 0
try:
thread = Thread(target=self.load_pdf,
args=(PDFDocument, password))
thread.start()
thread.join(timeout=90)
except Exception as e:
print('PDF I/O error: ' + e.__str__())
row = [self.line_count, 'PDF DOCUMENT OBJECT FAILED TO LOAD - ' + e.__str__() + ': ' +
self.url, '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '',
'', ]
# self.line_count += 1
report_path = self.report_folder + self.report_name
# 90 SECONDS or LOAD FAIL
with open(report_path, 'a', encoding='utf8', newline='') as csv_file:
writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
writer.dialect.lineterminator.replace('\n', '')
writer.writerow(row)
stop_event.set()
document = PDFDocument
document = self.document_t
pf = PdfFileReader(BytesIO(open(self.pdf_path, 'rb').read()))
# ENCRYPTION
if self.parser.doc.encryption is not None:
csv_row.insert(4, [self.csv_header[4], 'ENCRYPTED'])
csv_row.insert(5, [self.csv_header[5], 'ENCRYPTED'])
else:
csv_row.insert(4, [self.csv_header[4], 'FALSE'])
csv_row.insert(5, [self.csv_header[5], 'NA'])
except Exception as e:
csv_row.insert(4, [self.csv_header[4], 'FAILED: ' + e.__str__()])
csv_row.insert(5, [self.csv_header[5], 'NA'])
exit_call = e.__str__() + ' document failed!!'
print(exit_call)
pass
page_count = 0
# istagged
try:
pages = PDFPage.get_pages(document)
if not document.is_extractable:
raise PDFTextExtractionNotAllowed
rsrcmgr = PDFResourceManager()
laparams = LAParams()
page_no = 0
istagged = 'FALSE'
try:
# document.catalog
if document.catalog['MarkInfo']:
istagged = 'TRUE'
except Exception as e:
exit_call = e.__str__() + ' tagged info failed!!'
print(exit_call)
page_count = resolve1(document.catalog['Pages'])['Count']
csv_row.insert(6, [self.csv_header[6], istagged])
csv_row.insert(7, [self.csv_header[7], page_count])
except Exception as e:
csv_row.insert(6, [self.csv_header[6], 'IsTagged: ' + e.__str__()])
csv_row.insert(7, [self.csv_header[7], 'Page Count: ' + e.__str__()])
exit_call = e.__str__() + ' tagged info failed!!'
print(exit_call)
# TOC
try:
if pf.outlines:
csv_row.insert(8, [self.csv_header[8], 'TRUE'])
'''pdf_path_toc = self.document_folder + pdf_name + '_toc.txt'
places_list = pf.outlines
with open(pdf_path_toc, 'w') as filehandle:
filehandle.writelines("%s\n" % place for place in places_list)
filehandle.close()'''
else:
csv_row.insert(8, [self.csv_header[8], 'FALSE'])
except Exception as e:
csv_row.insert(8, [self.csv_header[8], 'TOC FAILED: ' + e.__str__()])
exit_call = e.__str__() + ' toc info failed!!'
print(exit_call)
# isForm, fields,
try:
if pf.getFields():
csv_row.insert(9, [self.csv_header[9], 'TRUE'])
csv_row.insert(10, [self.csv_header[10], pf.getFields().__len__()])
else:
csv_row.insert(9, [self.csv_header[9], 'FALSE'])
csv_row.insert(10, [self.csv_header[10], 0])
except Exception as e:
csv_row.insert(9, [self.csv_header[9], 'FORMS: ' + e.__str__()])
csv_row.insert(10, [self.csv_header[10], 'FIELDS: ' + e.__str__()])
exit_call = e.__str__() + ' forms failed!!'
print(exit_call)
# tables
csv_row.insert(11, [self.csv_header[11], 'NOT RUN'])
write_clip = ''
word_count = 0
words_per_page = 0
char_count = 0
chars_per_word = 0
image_count = 0
# TODO: write 3 page sample and word count
try:
if pf.getNumPages() < 50:
for page in range(pf.getNumPages()):
p = pf.getPage(page)
text_clip = p.extractText().encode('UTF-8')
text_clip = BytesIO(text_clip).read().__str__()[2:]
count_clip = re.findall(r"[^\W_]+", text_clip, re.MULTILINE)
word_count += len(count_clip)
char_count += len(text_clip)
if page <= 3:
write_clip += '[ PAGE ' + (page + 1).__str__() + ' START ] '
write_clip += text_clip.replace('\n', '').replace(',', ' ').replace('"', '')
write_clip += '[ PAGE ' + (page + 1).__str__() + ' END ]'
else:
write_clip = 'OVER 50 PAGES - SAMPLE SKIPPED'
except Exception as e:
exit_call = e.__str__() + ' :: TEXT sample failed!!'
write_clip = exit_call
word_count = exit_call
char_count = exit_call
print(exit_call)
# TODO: Words/chars per page
try:
if not word_count == 0:
chars_per_word = char_count / word_count
else:
chars_per_word = 0
if not page_count == 0:
words_per_page = word_count / page_count
else:
words_per_page = 0
except Exception as e:
exit_call = e.__str__() + ' :: WORD METRICS failed!!'
chars_per_word = exit_call
words_per_page = exit_call
print(exit_call)
# TODO: Add to row
i = 12
try:
csv_row.insert(i, [self.csv_header[i], word_count.__str__()])
except Exception as e:
csv_row.insert(i, [self.csv_header[i], 'WORD_COUNT: ' + e.__str__()])
i = 13
try:
csv_row.insert(i, [self.csv_header[i], char_count.__str__()])
except Exception as e:
csv_row.insert(i, [self.csv_header[i], 'CHAR_COUNT: ' + e.__str__()])
i = 14
try:
csv_row.insert(i, [self.csv_header[i], words_per_page.__str__()])
except Exception as e:
csv_row.insert(i, [self.csv_header[i], 'WPP: ' + e.__str__()])
i = 15
try:
csv_row.insert(i, [self.csv_header[i], chars_per_word.__str__()])
except Exception as e:
csv_row.insert(i, [self.csv_header[i], 'CPP: ' + e.__str__()])
# TODO: IMAGES
i = 16
'''try:
pdfImages = Globals.base_folder + 'cli-tools\\pdfimages.exe'
img_folder = self.document_folder + 'images\\' # + pdf_name[:-4] + '\\'
if not os.path.exists(img_folder):
os.makedirs(img_folder)
# cmd = pdfImages + ' -list ' + '\"' + pdf_path + '\"'
# output = subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0].split(b'\n')
# save images to disk
cmd = pdfImages + ' -list \"' + self.pdf_path + '\" \"' + ' ' + '\"'
# subprocess.Popen(cmd, stdout=subprocess.PIPE)
os.chdir(img_folder)
image_list = subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0].split(b'\r\n')
# os.remove(img_folder)
# image_count = output.count('\n')
image_count = image_list.__len__()
if image_count > 2:
# target = open(pdf_path_image, 'w')
# target.write(image_list)
# target.close()
csv_row.insert(i, [self.csv_header[i], (image_count - 2).__str__()])
elif image_count == 0:
csv_row.insert(i, [self.csv_header[i], 0])
else:
csv_row.insert(i, [self.csv_header[i], 0])
except Exception as e:
csv_row.insert(i, [self.csv_header[i], e.__str__() + ' image info failed!!'])
exit_call = e.__str__() + ' image info failed!!'
print(exit_call)'''
# TODO: IMAGES per page
i = 17
percent_img_per_page = float
try:
if not image_count == 0 or page_count == 0:
percent_img_per_page = (float(image_count) / float(page_count)) * 100
else:
percent_img_per_page = 0
csv_row.insert(i, [self.csv_header[i], percent_img_per_page])
except Exception as e:
csv_row.insert(i, [self.csv_header[i], 'IMG: ' + e.__str__()])
# TODO: OCR risk
i = 18
try:
if words_per_page == 0 or percent_img_per_page > 3000:
ocr_risk = 5
elif words_per_page < 15 or percent_img_per_page > 2000:
ocr_risk = 4
elif words_per_page < 40 or percent_img_per_page > 1000:
ocr_risk = 3
elif words_per_page < 70 or percent_img_per_page > 425:
ocr_risk = 2
elif words_per_page < 80 or percent_img_per_page > 200:
ocr_risk = 1
else:
ocr_risk = 0
csv_row.insert(i, [self.csv_header[i], ocr_risk])
except Exception as e:
csv_row.insert(i, [self.csv_header[i], 'OCR: ' + e.__str__()])
# author, creator, producer, subject, title,
di = pf
try:
di = pf.documentInfo
except Exception as e:
exit_call = e.__str__() + ' :: DOCUMENT INFO LOAD failed!!'
print(exit_call)
# Document info
if di:
# Author
try:
i = 19
if di.author:
csv_row.insert(i, [self.csv_header[i], di.author.encode('UTF-8')])
else:
csv_row.insert(i, [self.csv_header[i], 'NULL'])
except Exception as e:
csv_row.insert(i, [self.csv_header[i], 'AUTHOR: ' + e.__str__()])
exit_call = e.__str__() + ' doc info failed!!'
print(exit_call)
# Creator
try:
i = 20
if di.creator:
csv_row.insert(i, [self.csv_header[i], di.creator.encode('UTF-8')])
else:
csv_row.insert(i, [self.csv_header[i], 'NULL'])
except Exception as e:
csv_row.insert(i, [self.csv_header[i], 'CREATOR: ' + e.__str__()])
print(exit_call)
print('#5.1')
# Producer
try:
i = 21
if di.producer:
csv_row.insert(i, [self.csv_header[i], di.producer.encode('UTF-8')])
else:
csv_row.insert(i, [self.csv_header[i], 'NULL'])
except Exception as e:
csv_row.insert(i, [self.csv_header[i], 'PRODUCER: ' + e.__str__()])
print(exit_call)
# Subject
try:
i = 22
if di.subject:
csv_row.insert(i, [self.csv_header[i], di.subject.encode('UTF-8')])
else:
csv_row.insert(i, [self.csv_header[i], 'NULL'])
except Exception as e:
csv_row.insert(i, [self.csv_header[i], 'SUBJECT: ' + e.__str__()])
print(exit_call)
# Title
try:
i = 23
if di.title:
csv_row.insert(i, [self.csv_header[i], di.title.encode('UTF-8')])
else:
csv_row.insert(i, [self.csv_header[i], 'NULL'])
except Exception as e:
csv_row.insert(i, [self.csv_header[i], 'TITLE: ' + e.__str__()])
print(exit_call)
# Document clip
i = 24
try:
csv_row.insert(i, [self.csv_header[i], write_clip])
except Exception as e:
csv_row.insert(i, [self.csv_header[i], e.__str__()])
# Write results
row = []
for i in range(csv_row.__len__()):
row.append(csv_row[i][1])
report_path = self.report_folder + self.report_name
# COPLETE WRITE
with open(report_path, 'a', encoding='utf8', newline='') as csv_file:
writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
writer.dialect.lineterminator.replace('\n', '')
writer.writerow(row)
# csv_file.close()
fp.close()
os.remove(self.pdf_path)
# Log close
msg = (' >>>> PDF complete:[' + self.url + '] ' + self.line_count.__str__() + ' ' +
(datetime.datetime.now().__str__()[:-7]))
print(msg)
utils.logline(self.log, msg)
|
"""
This file is derived from the original XNAS source code,
and has not been rigorously tested subsequently.
If you find it useful, please open an issue to tell us.
recoded file path: XNAS/tools/sng_nasbench1shot1.py
"""
import argparse
import os
import ConfigSpace
import numpy as np
from nasbench import api
from xnas.algorithms.SNG.ASNG import ASNG, Dynamic_ASNG
from xnas.algorithms.SNG.DDPNAS import CategoricalDDPNAS
from xnas.algorithms.SNG.GridSearch import GridSearch
from xnas.algorithms.SNG.MDENAS import CategoricalMDENAS
from xnas.algorithms.SNG.MIGO import MIGO
from xnas.algorithms.SNG.SNG import SNG, Dynamic_SNG
from xnas.spaces.NASBench1Shot1.ops import *
from xnas.logger.timer import Timer
from xnas.core.utils import one_hot_to_index
def get_optimizer(name, category, step=4, gamma=0.9, sample_with_prob=True, utility_function='log', utility_function_hyper=0.4):
if name == 'SNG':
return SNG(categories=category)
elif name == 'ASNG':
return ASNG(categories=category)
elif name == 'dynamic_SNG':
return Dynamic_SNG(categories=category, step=step,
pruning=True, sample_with_prob=sample_with_prob)
elif name == 'dynamic_ASNG':
return Dynamic_ASNG(categories=category, step=step, pruning=True, sample_with_prob=sample_with_prob)
elif name == 'DDPNAS':
return CategoricalDDPNAS(category, 3)
elif name == 'MDENAS':
return CategoricalMDENAS(category, 0.01)
elif name == 'MIGO':
return MIGO(categories=category, step=step,
pruning=False, sample_with_prob=sample_with_prob,
utility_function='log', utility_function_hyper=utility_function_hyper,
momentum=True, gamma=gamma, dynamic_sampling=False)
elif name == 'GridSearch':
return GridSearch(category)
else:
raise NotImplementedError
class Reward(object):
"""Computes the fitness of a sampled model by querying NASBench."""
def __init__(self, space, nasbench, budget):
self.space = space
self.nasbench = nasbench
self.budget = budget
def compute_reward(self, sample):
config = ConfigSpace.Configuration(
self.space.get_configuration_space(), vector=sample)
y, c = self.space.objective_function(
self.nasbench, config, budget=self.budget)
fitness = float(y)
return fitness
def get_accuracy(self, sample):
# return test_accuracy of a sample
config = ConfigSpace.Configuration(
self.space.get_configuration_space(), vector=sample)
adjacency_matrix, node_list = self.space.convert_config_to_nasbench_format(
config)
node_list = [INPUT, *node_list, OUTPUT] if self.space.search_space_number == 3 else [INPUT, *node_list, CONV1X1, OUTPUT]
adjacency_list = adjacency_matrix.astype(np.int).tolist()
model_spec = api.ModelSpec(matrix=adjacency_list, ops=node_list)
nasbench_data = self.nasbench.query(model_spec, epochs=self.budget)
return nasbench_data['test_accuracy']
def run(space=1, optimizer_name='SNG', budget=108, runing_times=500, runing_epochs=200,
step=4, gamma=0.9, save_dir=None, nasbench=None, noise=0.0, sample_with_prob=True, utility_function='log',
utility_function_hyper=0.4):
print('##### Search Space {} #####'.format(space))
search_space = eval('SearchSpace{}()'.format(space))
cat_variables = []
cs = search_space.get_configuration_space()
for h in cs.get_hyperparameters():
if type(h) == ConfigSpace.hyperparameters.CategoricalHyperparameter:
cat_variables.append(len(h.choices))
# get category using cat_variables
category = cat_variables
distribution_optimizer = get_optimizer(optimizer_name, category, step=step, gamma=gamma,
sample_with_prob=sample_with_prob, utility_function=utility_function,
utility_function_hyper=utility_function_hyper)
# path to save the test_accuracy
file_name = '{}_{}_{}_{}_{}_{}_{}_{}_{}.npz'.format(optimizer_name, str(space), str(runing_epochs),
str(step), str(
gamma), str(noise),
str(sample_with_prob), utility_function, str(utility_function_hyper))
file_name = os.path.join(save_dir, file_name)
nb_reward = Reward(search_space, nasbench, budget)
record = {
'validation_accuracy': np.zeros([runing_times, runing_epochs]) - 1,
'test_accuracy': np.zeros([runing_times, runing_epochs]) - 1,
}
last_test_accuracy = np.zeros([runing_times])
running_time_interval = np.zeros([runing_times, runing_epochs])
test_accuracy = 0
run_timer = Timer()
for i in range(runing_times):
for j in range(runing_epochs):
run_timer.tic()
if hasattr(distribution_optimizer, 'training_finish') or j == (runing_epochs - 1):
last_test_accuracy[i] = test_accuracy
if hasattr(distribution_optimizer, 'training_finish'):
if distribution_optimizer.training_finish:
break
sample = distribution_optimizer.sampling()
sample_index = one_hot_to_index(np.array(sample))
validation_accuracy = nb_reward.compute_reward(sample_index)
distribution_optimizer.record_information(
sample, validation_accuracy)
distribution_optimizer.update()
current_best = np.argmax(
distribution_optimizer.p_model.theta, axis=1)
test_accuracy = nb_reward.get_accuracy(current_best)
record['validation_accuracy'][i, j] = validation_accuracy
record['test_accuracy'][i, j] = test_accuracy
run_timer.toc()
running_time_interval[i, j] = run_timer.diff
del distribution_optimizer
distribution_optimizer = get_optimizer(optimizer_name, category, step=step, gamma=gamma,
sample_with_prob=sample_with_prob, utility_function=utility_function,
utility_function_hyper=utility_function_hyper)
np.savez(file_name, record['test_accuracy'], running_time_interval)
return distribution_optimizer
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--space", help="search space name in [1,2,3]", type=int, default=1)
parser.add_argument("--optimizer", help="dicrete level", type=str, default='MIGO')
parser.add_argument("--step", help="pruning step", type=int, default=4)
parser.add_argument("--gamma", help="gamma value", type=float, default=0.2)
parser.add_argument("--noise", help="noise std", type=float, default=0.0)
parser.add_argument("-uh", "--utility_function_hyper",
help="the factor of utility_function", type=float, default=0.4)
parser.add_argument("-ut", "--utility_function_type", help="the type of utility_function", type=str, default='log')
parser.add_argument("-sp", "--sample_with_prob", action='store_true', default=True)
parser.add_argument("--save_dir", help="save directory", type=str, default='experiment/sng_nasbench1shot1')
args = parser.parse_args()
# get nasbench
nasbench_path = 'benchmark/nasbench_full.tfrecord'
nasbench = api.NASBench(nasbench_path)
# get args
space = args.space
step = args.step
gamma = args.gamma
save_dir = args.save_dir
optimizer_name = args.optimizer
print("space = {}, step = {}, gamma = {}, optimizer = {}, noise_std = {}, utility_function_hyper = {}, utility_function_type = {}, sample_with_prob = {}".format(
str(space), str(step), str(gamma), optimizer_name, str(args.noise), str(args.utility_function_hyper), args.utility_function_type, str(args.sample_with_prob)))
run(space, optimizer_name=optimizer_name, runing_times=1, runing_epochs=100,
step=step, gamma=gamma, save_dir=save_dir, nasbench=nasbench, noise=args.noise, sample_with_prob=args.sample_with_prob,
utility_function=args.utility_function_type, utility_function_hyper=args.utility_function_hyper)
|
import asyncio
import os
import requests
import pytest
import starlette.responses
import ray
from ray import serve
from ray._private.test_utils import SignalActor, wait_for_condition
from ray.serve.application import Application
@serve.deployment()
def sync_d():
return "sync!"
@serve.deployment()
async def async_d():
return "async!"
@serve.deployment
class Counter:
def __init__(self):
self.count = 0
def __call__(self):
self.count += 1
return {"count": self.count}
@serve.deployment
class AsyncCounter:
async def __init__(self):
await asyncio.sleep(0.01)
self.count = 0
async def __call__(self):
self.count += 1
await asyncio.sleep(0.01)
return {"count": self.count}
def test_e2e(serve_instance):
@serve.deployment(name="api")
def function(starlette_request):
return {"method": starlette_request.method}
function.deploy()
resp = requests.get("http://127.0.0.1:8000/api").json()["method"]
assert resp == "GET"
resp = requests.post("http://127.0.0.1:8000/api").json()["method"]
assert resp == "POST"
def test_starlette_response(serve_instance):
@serve.deployment(name="basic")
def basic(_):
return starlette.responses.Response("Hello, world!", media_type="text/plain")
basic.deploy()
assert requests.get("http://127.0.0.1:8000/basic").text == "Hello, world!"
@serve.deployment(name="html")
def html(_):
return starlette.responses.HTMLResponse(
"<html><body><h1>Hello, world!</h1></body></html>"
)
html.deploy()
assert (
requests.get("http://127.0.0.1:8000/html").text
== "<html><body><h1>Hello, world!</h1></body></html>"
)
@serve.deployment(name="plain_text")
def plain_text(_):
return starlette.responses.PlainTextResponse("Hello, world!")
plain_text.deploy()
assert requests.get("http://127.0.0.1:8000/plain_text").text == "Hello, world!"
@serve.deployment(name="json")
def json(_):
return starlette.responses.JSONResponse({"hello": "world"})
json.deploy()
assert requests.get("http://127.0.0.1:8000/json").json()["hello"] == "world"
@serve.deployment(name="redirect")
def redirect(_):
return starlette.responses.RedirectResponse(url="http://127.0.0.1:8000/basic")
redirect.deploy()
assert requests.get("http://127.0.0.1:8000/redirect").text == "Hello, world!"
@serve.deployment(name="streaming")
def streaming(_):
async def slow_numbers():
for number in range(1, 4):
yield str(number)
await asyncio.sleep(0.01)
return starlette.responses.StreamingResponse(
slow_numbers(), media_type="text/plain", status_code=418
)
streaming.deploy()
resp = requests.get("http://127.0.0.1:8000/streaming")
assert resp.text == "123"
assert resp.status_code == 418
@pytest.mark.parametrize("use_async", [False, True])
def test_deploy_function_no_params(serve_instance, use_async):
serve.start()
if use_async:
expected_output = "async!"
deployment_cls = async_d
else:
expected_output = "sync!"
deployment_cls = sync_d
deployment_cls.deploy()
assert (
requests.get(f"http://localhost:8000/{deployment_cls.name}").text
== expected_output
)
assert ray.get(deployment_cls.get_handle().remote()) == expected_output
@pytest.mark.parametrize("use_async", [False, True])
def test_deploy_function_no_params_call_with_param(serve_instance, use_async):
serve.start()
if use_async:
expected_output = "async!"
deployment_cls = async_d
else:
expected_output = "sync!"
deployment_cls = sync_d
deployment_cls.deploy()
assert (
requests.get(f"http://localhost:8000/{deployment_cls.name}").text
== expected_output
)
with pytest.raises(
TypeError, match=r"\(\) takes 0 positional arguments but 1 was given"
):
assert ray.get(deployment_cls.get_handle().remote(1)) == expected_output
with pytest.raises(TypeError, match=r"\(\) got an unexpected keyword argument"):
assert ray.get(deployment_cls.get_handle().remote(key=1)) == expected_output
@pytest.mark.parametrize("use_async", [False, True])
def test_deploy_class_no_params(serve_instance, use_async):
serve.start()
if use_async:
deployment_cls = AsyncCounter
else:
deployment_cls = Counter
deployment_cls.deploy()
assert requests.get(f"http://127.0.0.1:8000/{deployment_cls.name}").json() == {
"count": 1
}
assert requests.get(f"http://127.0.0.1:8000/{deployment_cls.name}").json() == {
"count": 2
}
assert ray.get(deployment_cls.get_handle().remote()) == {"count": 3}
def test_user_config(serve_instance):
@serve.deployment("counter", num_replicas=2, user_config={"count": 123, "b": 2})
class Counter:
def __init__(self):
self.count = 10
def __call__(self, *args):
return self.count, os.getpid()
def reconfigure(self, config):
self.count = config["count"]
Counter.deploy()
handle = Counter.get_handle()
def check(val, num_replicas):
pids_seen = set()
for i in range(100):
result = ray.get(handle.remote())
if str(result[0]) != val:
return False
pids_seen.add(result[1])
return len(pids_seen) == num_replicas
wait_for_condition(lambda: check("123", 2))
Counter = Counter.options(num_replicas=3)
Counter.deploy()
wait_for_condition(lambda: check("123", 3))
Counter = Counter.options(user_config={"count": 456})
Counter.deploy()
wait_for_condition(lambda: check("456", 3))
def test_reject_duplicate_route(serve_instance):
@serve.deployment(name="A", route_prefix="/api")
class A:
pass
A.deploy()
with pytest.raises(ValueError):
A.options(name="B").deploy()
def test_scaling_replicas(serve_instance):
@serve.deployment(name="counter", num_replicas=2)
class Counter:
def __init__(self):
self.count = 0
def __call__(self, _):
self.count += 1
return self.count
Counter.deploy()
counter_result = []
for _ in range(10):
resp = requests.get("http://127.0.0.1:8000/counter").json()
counter_result.append(resp)
# If the load is shared among two replicas. The max result cannot be 10.
assert max(counter_result) < 10
Counter.options(num_replicas=1).deploy()
counter_result = []
for _ in range(10):
resp = requests.get("http://127.0.0.1:8000/counter").json()
counter_result.append(resp)
# Give some time for a replica to spin down. But majority of the request
# should be served by the only remaining replica.
assert max(counter_result) - min(counter_result) > 6
def test_delete_deployment(serve_instance):
@serve.deployment(name="delete")
def function(_):
return "hello"
function.deploy()
assert requests.get("http://127.0.0.1:8000/delete").text == "hello"
function.delete()
@serve.deployment(name="delete")
def function2(_):
return "olleh"
function2.deploy()
wait_for_condition(
lambda: requests.get("http://127.0.0.1:8000/delete").text == "olleh", timeout=6
)
@pytest.mark.parametrize("blocking", [False, True])
def test_delete_deployment_group(serve_instance, blocking):
@serve.deployment(num_replicas=1)
def f(*args):
return "got f"
@serve.deployment(num_replicas=2)
def g(*args):
return "got g"
# Check redeploying after deletion
for _ in range(2):
f.deploy()
g.deploy()
wait_for_condition(
lambda: requests.get("http://127.0.0.1:8000/f").text == "got f", timeout=5
)
wait_for_condition(
lambda: requests.get("http://127.0.0.1:8000/g").text == "got g", timeout=5
)
# Check idempotence
for _ in range(2):
serve_instance.delete_deployments(["f", "g"], blocking=blocking)
wait_for_condition(
lambda: requests.get("http://127.0.0.1:8000/f").status_code == 404,
timeout=5,
)
wait_for_condition(
lambda: requests.get("http://127.0.0.1:8000/g").status_code == 404,
timeout=5,
)
def test_starlette_request(serve_instance):
@serve.deployment(name="api")
async def echo_body(starlette_request):
data = await starlette_request.body()
return data
echo_body.deploy()
# Long string to test serialization of multiple messages.
UVICORN_HIGH_WATER_MARK = 65536 # max bytes in one message
long_string = "x" * 10 * UVICORN_HIGH_WATER_MARK
resp = requests.post("http://127.0.0.1:8000/api", data=long_string).text
assert resp == long_string
def test_start_idempotent(serve_instance):
@serve.deployment(name="start")
def func(*args):
pass
func.deploy()
assert "start" in serve.list_deployments()
serve.start(detached=True)
serve.start()
serve.start(detached=True)
serve.start()
assert "start" in serve.list_deployments()
def test_shutdown_destructor(serve_instance):
signal = SignalActor.remote()
@serve.deployment
class A:
def __del__(self):
signal.send.remote()
A.deploy()
A.delete()
ray.get(signal.wait.remote(), timeout=10)
# If the destructor errored, it should be logged but also cleaned up.
@serve.deployment
class B:
def __del__(self):
raise RuntimeError("Opps")
B.deploy()
B.delete()
def test_run_get_ingress_app(serve_instance):
"""Check that serve.run() with an app returns the ingress."""
@serve.deployment(route_prefix=None)
def f():
return "got f"
@serve.deployment(route_prefix="/g")
def g():
return "got g"
app = Application([f, g])
ingress_handle = serve.run(app)
assert ray.get(ingress_handle.remote()) == "got g"
serve_instance.delete_deployments(["f", "g"])
no_ingress_app = Application([f.options(route_prefix="/f"), g])
ingress_handle = serve.run(no_ingress_app)
assert ingress_handle is None
def test_run_get_ingress_node(serve_instance):
"""Check that serve.run() with a node returns the ingress."""
@serve.deployment
class Driver:
def __init__(self, dag):
self.dag = dag
async def __call__(self, *args):
return await self.dag.remote()
@serve.deployment
class f:
def __call__(self, *args):
return "got f"
dag = Driver.bind(f.bind())
ingress_handle = serve.run(dag)
assert ray.get(ingress_handle.remote()) == "got f"
class TestSetOptions:
def test_set_options_basic(self):
@serve.deployment(
num_replicas=4,
max_concurrent_queries=3,
prev_version="abcd",
ray_actor_options={"num_cpus": 2},
_health_check_timeout_s=17,
)
def f():
pass
f.set_options(
num_replicas=9,
prev_version="abcd",
version="efgh",
ray_actor_options={"num_gpus": 3},
)
assert f.num_replicas == 9
assert f.max_concurrent_queries == 3
assert f.prev_version == "abcd"
assert f.version == "efgh"
assert f.ray_actor_options == {"num_gpus": 3}
assert f._config.health_check_timeout_s == 17
def test_set_options_validation(self):
@serve.deployment
def f():
pass
with pytest.raises(TypeError):
f.set_options(init_args=-4)
with pytest.raises(ValueError):
f.set_options(max_concurrent_queries=-4)
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-v", "-s", __file__]))
|
import importlib
import threading
import requests
from discord_interactions import verify_key_decorator
from flask import Blueprint, abort, send_file, request, jsonify
from globals import tokens, running_interactions, client
from statics import config
views = Blueprint("misc", __name__)
@views.route("/pictures/<guild_id>/<dl_token>/")
def pictures(guild_id, dl_token):
try:
if tokens[int(guild_id)]["pictures"] != dl_token:
abort(401)
except KeyError:
abort(401)
return send_file(f"web/picture_downloads/{guild_id}.zip")
@views.route("/interaction/", methods=["POST"])
@verify_key_decorator(config.DISCORD_PUBLIC_KEY)
def interaction():
req = request.json
if req["type"] == 1:
return {"type": 1}
if "options" not in req["data"].keys():
req["data"]["options"] = []
location = [req["data"]["name"]]
option_level = req["data"]["options"]
if len(option_level) > 0:
while "options" in option_level[0].keys():
location.append(option_level[0]["name"])
option_level = option_level[0]["options"]
location = "interactions.commands." + ".".join(location)
try:
module = importlib.import_module(location) # "slash-commands."+req["data"]["name"])
except ModuleNotFoundError:
return jsonify({"type": 4,
"data": {
"tts": False,
"content": "Huch! Das sollte nicht passieren, aber das Feature gibts irgendwie nicht...",
"embeds": [],
"allowed_mentions": []
}
})
# res = module.run(req, client=client, options=option_level)
name = req["guild_id"] + f"_{location}"
if name in [thread.name for thread in threading.enumerate()]:
r = requests.get(f"https://discord.com/api/v8/webhooks/{config.DISCORD_CLIENT_ID}/{running_interactions[req['guild_id']][location]['token']}/messages/@original")
message_id = r.json()["id"]
m_url = f"https://discord.com/channels/{req['guild_id']}/{req['channel_id']}/{message_id}"
return jsonify({"type": 4,
"data": {
"tts": False,
"content": "Eine solche Interaktion läuft bereits! Bitte warte, bis diese abgeschlossen ist.\nZu finden unter: " + m_url,
"embeds": [],
"allowed_mentions": []
}
})
res_url = f"https://discord.com/api/v8/webhooks/{config.DISCORD_CLIENT_ID}/{req['token']}/messages/@original"
t = threading.Thread(name=name, target=module.run, args=[req], kwargs={"client": client, "options": option_level, "res_url": res_url})
t.start()
if req["guild_id"] not in running_interactions.keys():
running_interactions[req["guild_id"]] = {}
running_interactions[req["guild_id"]][location] = {"token": req["token"]}
return jsonify(
{
"type": 5
}
)
|
# SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries
# SPDX-License-Identifier: MIT
"""
This example uses adafruit_display_text.label to display text using a custom font
loaded by adafruit_bitmap_font.
Adapted for use on MagTag
"""
import time
import board
from adafruit_display_text import label
from adafruit_bitmap_font import bitmap_font
# use built in display (MagTag, PyPortal, PyGamer, PyBadge, CLUE, etc.)
# see guide for setting up external displays (TFT / OLED breakouts, RGB matrices, etc.)
# https://learn.adafruit.com/circuitpython-display-support-using-displayio/display-and-display-bus
display = board.DISPLAY
# wait until we can refresh the display
time.sleep(display.time_to_refresh)
# try uncommenting different font files if you like
font_file = "fonts/LeagueSpartan-Bold-16.bdf"
# font_file = "fonts/Junction-regular-24.pcf"
# Set text, font, and color
text = "HELLO WORLD\nbitmap_font example"
font = bitmap_font.load_font(font_file)
color = 0xFFFFFF
background_color = 0x999999
# Create the tet label
text_area = label.Label(
font,
text=text,
color=color,
background_color=background_color,
padding_top=3,
padding_bottom=3,
padding_right=4,
padding_left=4,
)
text_area.line_spacing = 1.0
# Set the location
text_area.x = 20
text_area.y = 20
# Show it and refresh
display.show(text_area)
display.refresh()
while True:
pass
|
import requests
from bs4 import BeautifulSoup
from settings import DATAFILE, OUTFILE
def main():
html_doc = open(DATAFILE).read()
soup = BeautifulSoup(html_doc, 'html.parser')
with open(OUTFILE, 'w') as open_file:
for _ in soup('dt'):
if _.h3:
print(_.h3.string)
open_file.write('{}\n'.format(_.h3.string))
elif _.a:
print('|-{}'.format(_.a.string))
try:
r = requests.get(_.a['href'])
if r.status_code == 404:
open_file.write('|-{}\n'.format(_.a.string))
except requests.exceptions.ConnectionError:
open_file.write('|-{}\n'.format(_.a.string))
if __name__ == '__main__':
main()
|
import pandas as pd
visits = pd.read_csv('./analysis/data/pagevisitsfunnel/visits.csv', parse_dates=[1])
cart = pd.read_csv('./analysis/data/pagevisitsfunnel/cart.csv', parse_dates=[1])
checkout = pd.read_csv('./analysis/data/pagevisitsfunnel/checkouts.csv', parse_dates=[1])
purchase = pd.read_csv('./analysis/data/pagevisitsfunnel/purchase.csv', parse_dates=[1])
#print(visits.head())
#print(cart.head())
#print(checkout.head())
#print(purchase.head())
#Left Join visits and cart (include visits rows without a corresponding entry in cart, but not the opposite)
v_to_c = pd.merge(visits, cart, how="left")
# Check which rows hasn't any cart_time value
not_cart = v_to_c[v_to_c['cart_time'].isnull()]
print('### Visits/Cart Merge ###')
print(v_to_c)
print("Visit/cart #rows: %d"%(len(v_to_c)))
print("Null cart time #rows: %d"%(len(not_cart)))
empty_percentage = float(len(not_cart))/len(v_to_c)
print("Percent of empty cart #rows: %.2f%%"%( empty_percentage *100))
print('##########################')
#Letf Join cart and checkout entries
cart_to_checkout = pd.merge(cart, checkout, how="left")
#print(cart_to_checkout)
#Calculate entries without checkout time
not_checkout = cart_to_checkout[cart_to_checkout['checkout_time'].isnull()]
#Compute percentage of empty checkouts
empty_percentage_checkout = float(len(not_checkout))/len(cart_to_checkout)
print("Percent of empty checkout time rows: %.2f"%( empty_percentage_checkout ))
#Left join all the datasets
all_data = visits.merge(cart,how="left").merge(checkout,how="left").merge(purchase,how="left")
print("### All datasets merge head ###")
print(all_data.head())
print('###############################')
all_data_len = float(len(all_data))
checkout_not_purchase = all_data[ (all_data['checkout_time'].notnull()) & (all_data['purchase_time'].isnull())]
print("Percentage of checkouts without purchase %f."% ( float(len(checkout_not_purchase)) / all_data_len))
null_cart = all_data[ all_data['cart_time'].isnull()]
print("Percentage of null cart step %f."% ( float(len(null_cart)) / all_data_len))
null_checkouts = all_data[ (all_data['checkout_time'].isnull()) & (all_data['cart_time'].notnull())]
print("Percentage of null checkout step %f."% ( float(len(null_checkouts)) / all_data_len))
null_purchase = all_data[ (all_data['purchase_time'].isnull()) & (all_data['checkout_time'].notnull())]
print("Percentage of null purchase step %f."% ( float(len(null_purchase)) / all_data_len))
all_data['time_to_purchase'] = all_data.purchase_time - all_data.visit_time
print("### Time to purchase ###")
#print(all_data.time_to_purchase)
print(all_data.time_to_purchase.mean())
|
#!/usr/bin/env python3
import sqlite3
from datetime import datetime
import config
# set pathname
database_file = config.database_file
class SqlConnect:
def __init__(self, filename):
self.filename = filename
def __enter__(self):
self.connection = sqlite3.connect(self.filename)
self.cursor = self.connection.cursor()
return self.cursor
def __exit__(self, exc_type, exc_value, exc_traceback):
self.connection.commit()
self.connection.close()
def create_table(table):
with SqlConnect(database_file) as cursor:
cursor.execute(f'''
CREATE TABLE {table} (
item TEXT,
price INTEGER DEFAULT 0,
paid TEXT DEFAULT 'No',
payment_date TEXT);
''')
default_value = [
('Iriz' ,),
('CC Maybank' ,),
('CC Shopee' ,),
('CC CIMB' ,),
('Taska Batrisyia' ,),
('PAIP Chatin Hill' ,),
('PAIP Seri Chatin' ,),
('TNB Chatin Hill' ,),
('TNB Seri Chatin' ,),
('Mak' ,),
('Wife' ,),
('Saga abah' ,),]
cursor.executemany(f'''
INSERT INTO {table} (item) VALUES (?);
''', default_value)
def check_payment(table):
with SqlConnect(database_file) as cursor:
db_data = cursor.execute(f'''
SELECT rowid, paid, item, price FROM {table};
''').fetchall()
total = cursor.execute(f'''
SELECT SUM(price) FROM {table}
''').fetchone()
item_str = f'Total price: RM{total[0]}\n\nID - Status - Item\n'
for i in db_data:
item_str += f'{i[0]}. {i[1]} - {i[2]} (RM{i[3]})\n'
return item_str
def update_data(table, rowid, price):
with SqlConnect(database_file) as cursor:
cursor.execute(f'''
UPDATE {table}
SET price = ?, paid = ?, payment_date = ?
WHERE rowid = {rowid};
''', [price, 'Yes', datetime.now().strftime('%d-%b-%y')])
def get_rowid_name(table, rowid):
with SqlConnect(database_file) as cursor:
name = cursor.execute(f'''
SELECT item FROM {table} WHERE rowid = {rowid};
''').fetchone()
return name[0]
def sql_command(command):
with SqlConnect(database_file) as cursor:
data = cursor.execute(command).fetchall()
output = f'# Sql command:\n{command}\n# Sql output:'
for i in data:
output += f'\n {i}'
return output
|
r"""
Mappings from `Ontology`\ s to particular languages.
"""
from attr import attrib, attrs
from attr.validators import instance_of
from immutablecollections import ImmutableSet, ImmutableSetMultiDict
from immutablecollections.converter_utils import _to_immutablesetmultidict
from vistautils.preconditions import check_arg
from adam.language.lexicon import LexiconEntry
from adam.ontology import OntologyNode
from adam.ontology.ontology import Ontology
@attrs(frozen=True, slots=True)
class OntologyLexicon:
r"""
A mapping from `OntologyNode`\ s to words.
This will become more sophisticated in the future.
"""
ontology: Ontology = attrib(validator=instance_of(Ontology))
"""
The ontology this lexicon assigns words to.
"""
_ontology_node_to_word: ImmutableSetMultiDict[OntologyNode, LexiconEntry] = attrib(
converter=_to_immutablesetmultidict
)
r"""
Maps `OntologyNode`\ s to `LexiconEntry`\ s which describe them in some particular language.
"""
def words_for_node(self, node: OntologyNode) -> ImmutableSet[LexiconEntry]:
"""
Get the translation for an `OntologyNode`, if available.
Args:
node: The `OntologyNode` whose translation you want.
Returns:
The translation, if available.
"""
return self._ontology_node_to_word[node]
def __attrs_post_init__(self) -> None:
for node in self._ontology_node_to_word:
check_arg(
node in self.ontology,
f"Ontology lexicon refers to non-ontology node {node}",
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Sword York
# GitHub: https://github.com/SwordYork/sequencing
# No rights reserved.
#
import random
from collections import deque, namedtuple
import subprocess
import numpy
import tensorflow as tf
import sequencing as sq
from sequencing import MODE, TIME_MAJOR
def build_parallel_char_inputs(src_vocab, trg_vocab, src_data_file,
trg_data_file, batch_size, buffer_size=16,
rand_append=True, mode=MODE.TRAIN):
# data file should be preprocessed. For example, tokenize and remove long
# lines.
input_tuple = namedtuple('inputs',
['src', 'src_len',
'src_sample_matrix','src_word_len',
'trg', 'trg_len'])
def _parallel_generator():
read_buffer = deque(maxlen=buffer_size)
should_stop_read = False
if mode == MODE.TRAIN:
tf.logging.info('Shuffling ......')
subprocess.call(['./shuffle_data.sh', src_data_file, trg_data_file])
src_data = open(src_data_file + '.shuf', 'r')
trg_data = open(trg_data_file + '.shuf', 'r')
tf.logging.info('Shuffle done ......')
else:
src_data = open(src_data_file, 'r')
trg_data = open(trg_data_file, 'r')
while True:
if not read_buffer and should_stop_read:
# should_stop_read will never be True when TRAIN
break
if not read_buffer:
# read_buffer is empty
# we read a lot of sentences to read_buffer for sorting and
# caching
buffer_batch = []
for _ in range(buffer_size * batch_size):
s = src_data.readline()
t = trg_data.readline()
if not s or not t:
if s:
tf.logging.warning(
'The source data file contains '
'more sentences!')
if t:
tf.logging.warning(
'The target data file contains '
'more sentences!')
if mode == MODE.TRAIN:
# one of the files is reaching end of file
tf.logging.info('Read from head ......')
src_data.close()
trg_data.close()
# shuf and reopen
tf.logging.info('Shuffling ......')
subprocess.call(['./shuffle_data.sh', src_data_file, trg_data_file])
src_data = open(src_data_file + '.shuf', 'r')
trg_data = open(trg_data_file + '.shuf', 'r')
tf.logging.info('Shuffle done ......')
s = src_data.readline()
t = trg_data.readline()
else:
src_data.close()
trg_data.close()
should_stop_read = True
break
# impossible for s, t to be None
src_char_ids = src_vocab.string_to_ids(s)[:-1] \
+ [src_vocab.space_id, src_vocab.eos_id, src_vocab.space_id]
buffer_batch.append((src_char_ids,
trg_vocab.string_to_ids(t)))
# sort by length if train
if mode == MODE.TRAIN:
buffer_batch = sorted(buffer_batch,
key=lambda l: len(l[1]))
total_batches = len(buffer_batch) // batch_size
# smaller batch
if len(buffer_batch) % batch_size > 0:
total_batches += 1
for i in range(total_batches):
if i == (total_batches - 1):
# take all in last
lines = buffer_batch[i * batch_size:]
else:
lines = buffer_batch[
i * batch_size:(i + 1) * batch_size]
num_lines = len(lines)
src_word_len_np = numpy.asarray([l[0].count(src_vocab.space_id) for l in lines],
dtype=numpy.int32)
max_word_length = src_word_len_np.max()
src_len_np = numpy.asarray([len(l[0]) + max_word_length - src_word_len_np[li]
for li, l in enumerate(lines)],
dtype=numpy.int32)
trg_len_np = numpy.asarray([len(l[1]) for l in lines],
dtype=numpy.int32)
src_sample_matrix_np = numpy.zeros((num_lines, max_word_length,
src_len_np.max()),
dtype=numpy.float32)
if TIME_MAJOR:
# fill with eos
src_np = numpy.full((src_len_np.max(), num_lines),
src_vocab.eos_id,
dtype=numpy.int32)
trg_np = numpy.full((trg_len_np.max(), num_lines),
trg_vocab.eos_id,
dtype=numpy.int32)
for idx, l in enumerate(lines):
src_np[:len(l[0]), idx] = l[0]
src_np[len(l[0]):src_len_np[idx], idx] = src_vocab.space_id
src_sample_matrix_np[idx, range(max_word_length),
numpy.where(src_np[:, idx] == src_vocab.space_id)[0]] = 1.
trg_np[:len(l[1]), idx] = l[1]
else:
# fill with eos
src_np = numpy.full((num_lines, src_len_np.max()),
src_vocab.eos_id,
dtype=numpy.int32)
trg_np = numpy.full((num_lines, trg_len_np.max()),
trg_vocab.eos_id,
dtype=numpy.int32)
for idx, l in enumerate(lines):
src_np[idx, :len(l[0])] = l[0]
src_np[idx, len(l[0]):src_len_np[idx]] = src_vocab.space_id
src_sample_matrix_np[idx, range(max_word_length),
numpy.where(src_np[:, idx] == src_vocab.space_id)[0]] = 1.
trg_np[idx, :len(l[1])] = l[1]
current_input_np = input_tuple(
src=src_np,
src_len=src_len_np,
src_sample_matrix=src_sample_matrix_np,
src_word_len=src_word_len_np,
trg=trg_np,
trg_len=trg_len_np)
read_buffer.appendleft(current_input_np)
# shuffle batches
if (mode == MODE.TRAIN or mode == MODE.RL) and rand_append:
random.shuffle(read_buffer)
yield read_buffer.pop()
return _parallel_generator()
|
# coding: utf-8
"""
Prisma Cloud Compute API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 21.04.439
Generated by: https://openapi-generator.tech
"""
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
import pprint
import re # noqa: F401
import six
from openapi_client.configuration import Configuration
class SharedTrustAudit(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'id': 'str',
'account_id': 'str',
'cluster': 'str',
'count': 'int',
'effect': 'VulnEffect',
'image_id': 'str',
'image_name': 'str',
'msg': 'str',
'rule_name': 'str',
'time': 'datetime'
}
attribute_map = {
'id': '_id',
'account_id': 'accountID',
'cluster': 'cluster',
'count': 'count',
'effect': 'effect',
'image_id': 'imageID',
'image_name': 'imageName',
'msg': 'msg',
'rule_name': 'ruleName',
'time': 'time'
}
def __init__(self, id=None, account_id=None, cluster=None, count=None, effect=None, image_id=None, image_name=None, msg=None, rule_name=None, time=None, local_vars_configuration=None): # noqa: E501
"""SharedTrustAudit - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._id = None
self._account_id = None
self._cluster = None
self._count = None
self._effect = None
self._image_id = None
self._image_name = None
self._msg = None
self._rule_name = None
self._time = None
self.discriminator = None
if id is not None:
self.id = id
if account_id is not None:
self.account_id = account_id
if cluster is not None:
self.cluster = cluster
if count is not None:
self.count = count
if effect is not None:
self.effect = effect
if image_id is not None:
self.image_id = image_id
if image_name is not None:
self.image_name = image_name
if msg is not None:
self.msg = msg
if rule_name is not None:
self.rule_name = rule_name
if time is not None:
self.time = time
@property
def id(self):
"""Gets the id of this SharedTrustAudit. # noqa: E501
ID is the registry-repo of the created container. # noqa: E501
:return: The id of this SharedTrustAudit. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this SharedTrustAudit.
ID is the registry-repo of the created container. # noqa: E501
:param id: The id of this SharedTrustAudit. # noqa: E501
:type id: str
"""
self._id = id
@property
def account_id(self):
"""Gets the account_id of this SharedTrustAudit. # noqa: E501
AccountID is the cloud account ID where the audit was generated. # noqa: E501
:return: The account_id of this SharedTrustAudit. # noqa: E501
:rtype: str
"""
return self._account_id
@account_id.setter
def account_id(self, account_id):
"""Sets the account_id of this SharedTrustAudit.
AccountID is the cloud account ID where the audit was generated. # noqa: E501
:param account_id: The account_id of this SharedTrustAudit. # noqa: E501
:type account_id: str
"""
self._account_id = account_id
@property
def cluster(self):
"""Gets the cluster of this SharedTrustAudit. # noqa: E501
Cluster is the cluster where the audit was generated. # noqa: E501
:return: The cluster of this SharedTrustAudit. # noqa: E501
:rtype: str
"""
return self._cluster
@cluster.setter
def cluster(self, cluster):
"""Sets the cluster of this SharedTrustAudit.
Cluster is the cluster where the audit was generated. # noqa: E501
:param cluster: The cluster of this SharedTrustAudit. # noqa: E501
:type cluster: str
"""
self._cluster = cluster
@property
def count(self):
"""Gets the count of this SharedTrustAudit. # noqa: E501
Count is the number of times this audit occurred. # noqa: E501
:return: The count of this SharedTrustAudit. # noqa: E501
:rtype: int
"""
return self._count
@count.setter
def count(self, count):
"""Sets the count of this SharedTrustAudit.
Count is the number of times this audit occurred. # noqa: E501
:param count: The count of this SharedTrustAudit. # noqa: E501
:type count: int
"""
self._count = count
@property
def effect(self):
"""Gets the effect of this SharedTrustAudit. # noqa: E501
:return: The effect of this SharedTrustAudit. # noqa: E501
:rtype: VulnEffect
"""
return self._effect
@effect.setter
def effect(self, effect):
"""Sets the effect of this SharedTrustAudit.
:param effect: The effect of this SharedTrustAudit. # noqa: E501
:type effect: VulnEffect
"""
self._effect = effect
@property
def image_id(self):
"""Gets the image_id of this SharedTrustAudit. # noqa: E501
ImageID is the container image id. # noqa: E501
:return: The image_id of this SharedTrustAudit. # noqa: E501
:rtype: str
"""
return self._image_id
@image_id.setter
def image_id(self, image_id):
"""Sets the image_id of this SharedTrustAudit.
ImageID is the container image id. # noqa: E501
:param image_id: The image_id of this SharedTrustAudit. # noqa: E501
:type image_id: str
"""
self._image_id = image_id
@property
def image_name(self):
"""Gets the image_name of this SharedTrustAudit. # noqa: E501
ImageName is the container image name. # noqa: E501
:return: The image_name of this SharedTrustAudit. # noqa: E501
:rtype: str
"""
return self._image_name
@image_name.setter
def image_name(self, image_name):
"""Sets the image_name of this SharedTrustAudit.
ImageName is the container image name. # noqa: E501
:param image_name: The image_name of this SharedTrustAudit. # noqa: E501
:type image_name: str
"""
self._image_name = image_name
@property
def msg(self):
"""Gets the msg of this SharedTrustAudit. # noqa: E501
Message is the blocking message text. # noqa: E501
:return: The msg of this SharedTrustAudit. # noqa: E501
:rtype: str
"""
return self._msg
@msg.setter
def msg(self, msg):
"""Sets the msg of this SharedTrustAudit.
Message is the blocking message text. # noqa: E501
:param msg: The msg of this SharedTrustAudit. # noqa: E501
:type msg: str
"""
self._msg = msg
@property
def rule_name(self):
"""Gets the rule_name of this SharedTrustAudit. # noqa: E501
If blocked, contains the name of the rule that was applied. # noqa: E501
:return: The rule_name of this SharedTrustAudit. # noqa: E501
:rtype: str
"""
return self._rule_name
@rule_name.setter
def rule_name(self, rule_name):
"""Sets the rule_name of this SharedTrustAudit.
If blocked, contains the name of the rule that was applied. # noqa: E501
:param rule_name: The rule_name of this SharedTrustAudit. # noqa: E501
:type rule_name: str
"""
self._rule_name = rule_name
@property
def time(self):
"""Gets the time of this SharedTrustAudit. # noqa: E501
Time is the UTC time of the audit event. # noqa: E501
:return: The time of this SharedTrustAudit. # noqa: E501
:rtype: datetime
"""
return self._time
@time.setter
def time(self, time):
"""Sets the time of this SharedTrustAudit.
Time is the UTC time of the audit event. # noqa: E501
:param time: The time of this SharedTrustAudit. # noqa: E501
:type time: datetime
"""
self._time = time
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = getfullargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SharedTrustAudit):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, SharedTrustAudit):
return True
return self.to_dict() != other.to_dict()
|
from numpy import array
from UQSetting import UQSetting
from UQSettingFormatter import UQSettingFormatter
class Samplingresult:
def __init__(self, uqsetting):
"""
Create a Samplingresult using data of the given UQSetting. If
a filename or a list of filenames is given, load the UQSetting
from those files.
"""
self.uq = None
self.qoi = None
self.time = None
self.chunk_ref = {}
self.onlyActive = False
self.unitcube = False
self.onlyValues = False
self.raw = False
if isinstance(uqsetting, str):
self.loadfile(uqsetting)
elif isinstance(uqsetting, list):
for f in uqsetting:
self.loadfile(f)
else:
self.uq = uqsetting
self.tag_stats = self.uq.getTagsStats()
self.sim_stats = self.uq.getSimulationStats()
self.post_stats = self.uq.getPostprocessorStats()
def loadfile(self, filename):
m = UQSettingFormatter().deserializeFromFile(filename)
uq = UQSetting.fromJson(m)
if not self.uq:
self.uq = uq
else:
self.uq.mergeStats(uq)
def tagMatch(self, tag, chunk_ref, not_match=None):
"""
Check if a tag matches an description.
tag may be given as list of items or dict.
return true if all keys in chuck_ref have the same value in tag.
@return: false otherwise or if tag matches not.
"""
ok = True
if isinstance(tag, dict):
tk = tag.items()
else:
tk = tag
for keyval in chunk_ref.iteritems():
if keyval not in tk:
ok = False
not_matched = False
if not_match is not None:
not_matched = self.tagMatch(tag, not_match)
return ok and not not_matched
def getSamplesSorted(self, chunktag=None, first=0, number=None,
not_match=None, **kwargs):
"""
Returns the samples which match the preset parameters and are assigned
to the chunk given by the chunktag. The samples will be ordered as
they are in the chunk.
@param chunktag: denotes the chunk
@param first: index of the first sample to return
@param number: number of samples to return, None for all
@param not_match: tags matching not_match will be excluded from the result
@param kwargs: same as a dict for chunktag
"""
chunk_ref = self.chunk_ref.copy()
if isinstance(chunktag, dict):
chunk_ref.update(chunktag)
elif isinstance(chunktag, str):
chunk_ref['type'] = chunktag
elif chunktag is None:
chunk_ref.update(kwargs)
else:
print "Warning: illegal chunktag", chunktag
return []
items = []
if hasattr(self, 'index'):
for tk, v in self.index.iteritems():
if self.tagMatch(tk, chunk_ref, not_match):
for k in v:
t = self.tag_stats[k]
idx = t[0]['__index']
items.append((idx, k, self.sim_stats[k]))
else:
for k, v in self.tag_stats.iteritems():
for t in v:
if self.tagMatch(t, chunk_ref, not_match):
idx = t['__index']
items.append((idx, k, self.sim_stats[k]))
items.sort()
out = []
param = self.uq.getParameters()
for i, k, v in items:
s = [None] * 2
if self.onlyActive:
s[0] = param.extractActiveTuple(k)
elif self.unitcube:
s[0] = v['__unitcube_value'] # TODO
else:
s[0] = k
r = self.post_stats[k]
if self.raw or self.qoi is None:
s[1] = r
else:
qoi = r[self.qoi]
if self.time is not None:
qoi = qoi[self.time]
else:
qoi = array(qoi)
s[1] = qoi
out.append(s)
if self.onlyValues:
out = [i[1] for i in out]
else:
out = [tuple(i) for i in out]
if number:
return out[first:first + number]
return out[first:]
def makeIndex(self):
"""
Creates an index over the samples by tag. This speeds up searching for
a short chunk in a huge dataset (might be a factor of 100) but this
might not always be the case.
The index stored in the index attribute is a dict of lists of sample
points indexed by "atomic" tags (those reported by listChunks()) which
are flattend into tuples of (key, value) tuples.
"""
index = {}
self.index = index
for k, ts in self.tag_stats.iteritems():
for t in ts:
tk = self.__tagToTuple(t)
if tk not in index:
index[tk] = [k]
else:
index[tk].append(k)
def setSampleFormat(self, onlyActive=False, unitcube=False, onlyValues=False, raw=False):
"""
Specifies the format that getSamplesSorted returns.
@param onlyActive: only add active parameters to the sample parameters TODO: Not implemented
@param unitcube: use unitcube values (implies active subset only) TODO: not implemented
@param onlyValues: do not return sample parameters
@param raw: do not extract QoI values
"""
self.onlyActive = onlyActive
self.unitcube = unitcube
self.onlyValues = onlyValues
self.raw = raw
def setQoI(self, qoi):
"""
Set the selected QoI
"""
self.qoi = qoi
def setTag(self, **kwargs):
"""
Pre-Select samples with the given tag properties.
"""
self.chunk_ref = kwargs
def listQoI(self):
"""
Return the QoIs found in the dataset as a list
"""
return self.uq.getAvailableQoI()
def setTime(self, time):
"""
Set a specific timestep value to return in getSamplesSorted. Default is None,
in this case all timesteps will be returned as a vector.
"""
self.time = time
def listTime(self):
"""
Return the timesteps in the dataset as a list.
"""
# TODO: This is inefficient and kind of useless.
return range(0, len(self.post_stats.values()[0][self.qoi]))
def __tagToTuple(self, t):
"""
convert a tag given as a dict to something suitable as a dict key.
"""
return tuple(i for i in sorted(t.iteritems())
if not i[0].startswith('__'))
def listChunks(self, chunk_ref=None, **kwargs):
"""
Return the available chunks in the dataset as a list of chunk tags.
Slow and should not be used.
@param chunk_ref: return only those of the given prototype (Values
the same for all keys in prototype.)
@param kwargs: alternate synty for chunk_ref
"""
if chunk_ref is None:
chunk_ref = kwargs
tags = {}
for ts in self.tag_stats.itervalues():
for t in ts:
# all key-value pairs except __index
ok = True
for key, val in chunk_ref.iteritems():
if key not in t or t[key] != val:
ok = False
if ok:
k = self.__tagToTuple(t)
tags[k] = t
return [{k: v for k, v in t.iteritems() if not k.startswith('__')}
for t in tags.itervalues()]
|
import discord
class Utils:
def __init__(self, client: object):
self.client: object = client
@staticmethod
def is_DMChannel(message: str) -> bool:
return isinstance(message.channel, discord.channel.DMChannel)
def is_Command(self, message: str) -> list:
prefix: str = self.client.config.get("Prefix")
content: str = message.content
command, arguments = None, None
if content.startswith(prefix):
arguments: list = content[len(prefix):].lower().split()
if arguments:
command = arguments.pop(0)
return command, arguments
|
#!/usr/bin/env python3
import gvm
from gvm.protocols.latest import Gmp
from gvm.transforms import EtreeCheckCommandTransform
from gvm.errors import GvmError
connection = gvm.connections.TLSConnection(hostname='localhost')
username = 'admin'
password = 'admin'
transform = EtreeCheckCommandTransform()
try:
with Gmp(connection=connection, transform=transform) as gmp:
gmp.authenticate(username, password)
users = gmp.get_users()
tasks = gmp.get_tasks()
targets = gmp.get_targets()
scanners = gmp.get_scanners()
configs = gmp.get_configs()
feeds = gmp.get_feeds()
nvts = gmp.get_nvts()
print("Users\n------------")
for user in users.xpath('user'):
print(user.find('name').text)
print("\nTasks\n------------")
for task in tasks.xpath('task'):
print(task.find('name').text)
print("\nTargets\n-------------")
for target in targets.xpath('target'):
print(target.find('name').text)
print(target.find('hosts').text)
print("\nScanners\n-------------")
for scanner in scanners.xpath('scanner'):
print(scanner.find('name').text)
print("\nConfigs\n-------------")
for config in configs.xpath('config'):
print(config.find('name').text)
print("\nFeeds\n-------------")
for feed in feeds.xpath('feed'):
print(feed.find('name').text)
print("\nNVTs\n-------------")
for nvt in nvts.xpath('nvt'):
print(nvt.attrib.get('oid'),"-->",nvt.find('name').text)
except GvmError as error:
print('Error connection with server:', error)
|
def average(li):
s = sum(li)
n = len(li)
return s / n
li = [1, 2, 3]
print("Averagr:", average(li))
li = [10, 20, 30, 40, 50, 60, 70, 80];
print("Averagr:", average(li))
li = [-1, 0, 1]
print("Averagr:", average(li)) |
from .base import db
def init_app(app):
db.init_app(app)
|
import pytest
from ...core import ProxyTypeError
from ...primitives import Int, Str
from ...geospatial import ImageCollection, Image, FeatureCollection, GeometryCollection
from .. import Tuple, List, zip as wf_zip
examples = [
List[Int]([1, 2, 3]),
List[Str](["a", "b", "c"]),
List[Tuple[Int, Str]]([(1, "foo"), (3, "bar")]),
ImageCollection.from_id("foo"),
Image.from_id("foo"),
FeatureCollection.from_vector_id("bar"),
GeometryCollection.from_geojson({"type": "GeometryCollection", "geometries": []}),
]
@pytest.mark.parametrize("args", [examples, ()] + [(ex,) for ex in examples])
def test_zip(args):
zipped = wf_zip(*args)
assert isinstance(
zipped,
List[Tuple[tuple(getattr(arg, "_element_type", type(arg)) for arg in args)]],
)
def test_zip_str():
zipped = wf_zip(Str("abcd"), List[Int]([1, 2, 3]))
assert isinstance(zipped, List[Tuple[Str, Int]])
@pytest.mark.parametrize(
"seqs",
[
[List[Int]([1, 2, 3]), [1, 2, 3]],
[List[Int]([1, 2, 3]), Tuple[Int, Int, Int]([1, 2, 3])],
[List[Int]([1, 2, 3]), "asdf"],
],
)
def test_zip_wrong_args(seqs):
with pytest.raises(
ProxyTypeError, match="All arguments to 'zip' must be Proxytype sequences"
):
wf_zip(*seqs)
|
import requests
from json import JSONDecodeError
import pythoncom
pythoncom.CoInitialize()
class LocationRecord:
"""Holds and records the location of the client device
Keyword Arguments:
ip -- Current client device IP address (Default: None)
city -- Approximate city of the device (Default: None)
region -- Approximate region of device, typically state in the USA (Default: None)
loc -- Approximate gps location of device (Default: None)
org -- Registered owner of IP address, usually the ISP (Default: None)
timezone -- Timezone of client machine (Default: None)
"""
def __init__(self, ip=None, city=None, region=None, loc=None, org=None, timezone=None):
self.ip = ip
self.city = city
self.region = region
self.loc = loc
self.org = org
self.timezone = timezone
def __repr__(self):
return f"<LocationRecord city:{self.city} region:{self.region}>"
def __str__(self):
return f"""
Location Information:
Ip: {self.ip}
City: {self.city}
Region: {self.region}
Loc: {self.loc}
Org Size: {self.org}
Timezone: {self.timezone}"""
def test(self):
"""Performs the location test and records record to self
Returns: <LocationRecord>
"""
response = requests.get("https://ipinfo.io/")
response_json = {}
try:
response_json = response.json()
except JSONDecodeError as e:
response_json["ip"] = "Error with remote website. This is not an error with the client."
response_json["city"] = "Error with remote website. This is not an error with the client."
response_json["region"] = "Error with remote website. This is not an error with the client."
response_json["loc"] = "Error with remote website. This is not an error with the client."
response_json["org"] = "Error with remote website. This is not an error with the client."
response_json["timezone"] = "Error with remote website. This is not an error with the client."
self.ip = str(response_json['ip'])
self.city = str(response_json['city'])
self.region = str(response_json['region'])
self.loc = str(response_json['loc'])
self.org = str(response_json['org'])
self.timezone = str(response_json['timezone'])
return self
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from flask import Blueprint, jsonify, request
from app.classes.network import Network
network = Network()
network_bp = Blueprint("network", __name__)
@network_bp.route("/status", methods=["GET"])
def api_network_status():
""" Get the network status of eth0, wlan0 """
return jsonify(network.check_status())
@network_bp.route("/wifi/list", methods=["GET"])
def api_get_wifi_list():
""" List available WIFI networks """
return jsonify(network.wifi_list_networks())
@network_bp.route("/wifi/setup", methods=["POST", "OPTIONS"])
def api_set_wifi():
""" Set an access point and a password """
if request.method == "POST":
data = request.get_json()
res = network.wifi_setup(data["ssid"], data["password"])
return jsonify(res)
else:
return ""
@network_bp.route("/wifi/connect", methods=["GET"])
def api_connect_wifi():
""" Connect to the specified wifi network """
res = network.wifi_connect()
return jsonify(res)
@network_bp.route("/ap/start", methods=["GET"])
def api_start_ap():
""" Start an access point """
return jsonify(network.start_ap())
@network_bp.route("/ap/stop", methods=["GET"])
def api_stop_ap():
""" Generate an access point """
return jsonify(network.stop_hostapd())
|
# -*- coding: utf-8 -*-
# unzip練習
# http://www.pythonchallenge.com/pc/def/channel.html
import os
import os.path
import zipfile
import re
fileName = 'channel'
# フォルダがなかったら作成してunzip
if not os.path.isdir(fileName) :
os.mkdir(fileName)
# unzip
fZip = zipfile.ZipFile(fileName+'.zip', 'r')
fZip.extractall(fileName)
fZip.close
# zipを解凍した中に、readme.txtがあって
# それを読むと・・・
#welcome to my zipped list.
#
#hint1: start from 90052
#hint2: answer is inside the zip
# こう書いてある。
# インチキだけど、これを使う
nothing = 90052
listFileName = []
foundSomething = False
while not foundSomething:
print(nothing)
listFileName.append(nothing)
fText = open(fileName + '/' + str(nothing) + '.txt')
textLine = fText.read()
fText.close()
objMatch = re.search('Next nothing is ([0-9]+)', textLine)
if objMatch:
tupleMatch = objMatch.groups(1)
nothing = tupleMatch[0]
else:
print('FOUND*' + textLine)
foundSomething = True
#46145.txt
#Collect the comments.
print('--------------')
fZip = zipfile.ZipFile(fileName+'.zip', 'r')
strComments = ''
for l in listFileName:
bComment = fZip.getinfo('%s.txt' % str(l)).comment
print(bComment.decode('ascii'))
strComments += bComment.decode('ascii')
fZip.close
print(strComments)
# ****************************************************************
# ****************************************************************
# ** **
# ** OO OO XX YYYY GG GG EEEEEE NN NN **
# ** OO OO XXXXXX YYYYYY GG GG EEEEEE NN NN **
# ** OO OO XXX XXX YYY YY GG GG EE NN NN **
# ** OOOOOOOO XX XX YY GGG EEEEE NNNN **
# ** OOOOOOOO XX XX YY GGG EEEEE NN **
# ** OO OO XXX XXX YYY YY GG GG EE NN **
# ** OO OO XXXXXX YYYYYY GG GG EEEEEE NN **
# ** OO OO XX YYYY GG GG EEEEEE NN **
# ** **
# ****************************************************************
# **************************************************************
# hockey.htmlを見ると
# it's in the air. look at the letters.
# → oxygen.html
|
class Solution:
def isRobotBounded(self, instructions: str) -> bool:
d = 1j
p = 0
for ins in instructions:
if ins == "L":
d = d * 1j
elif ins == 'R':
d = d * (-1j)
else:
p += d
return False if (d == 1j and p != 0) else True
|
#Desafio10
#Crie um programa que leia quanto dinheiro uma pessoa tem na carteira
#e mostre quantos dólares ela pode comprar
#Considere US$1,00 = R$3,27
rs=float(input('Quanto dinheiro você tem na carteira?: R$ '))
dolar = rs/3.27
print('Com R${:.2f} você pode comprar US${:.2f}'.format(rs,dolar))
|
from pytransform import pyarmor_runtime
pyarmor_runtime()
__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x09\x00\x61\x0d\x0d\x0a\x08\x2d\xa0\x01\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\xba\x1b\x00\x00\x00\x00\x00\x18\x5b\x96\xc4\xd6\x8b\x57\xf3\x80\xa1\x68\x9e\x3b\xfc\xc3\x8d\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x39\x81\x53\x36\x30\xf3\xff\xff\x54\x4a\x8d\xd8\x5b\xf0\xb8\x01\x69\x9b\x84\x57\xc7\xff\x09\xf7\xda\xf7\xe8\xc7\xcd\x5f\xac\x90\x02\xa4\x64\x74\x35\x82\xf3\x8c\xe8\xbf\x69\xa8\xbc\x22\x37\x16\x85\x66\x22\x5c\xf6\x60\x16\xbe\x1e\x05\x25\x14\x4c\x19\x51\xa6\x74\xc6\xfc\xbb\xf7\x84\xdb\xae\xd4\x88\x3b\x12\x0a\xa4\x2b\xb3\xfe\x45\x56\x82\x37\x41\x70\xdf\x3a\x83\x81\x49\x9d\x9b\x1d\xb6\x22\xbb\x19\x6b\x5b\xae\x82\x1d\x76\x7f\xd1\x71\x4f\x2a\x85\x0d\xb8\xc2\xcb\x95\x56\xb0\x56\xf6\x32\x44\xd4\xb0\x0e\x63\x78\xc0\x85\x69\xc0\x37\xc0\x62\x8b\xd8\x59\xf9\x1b\xbb\x4d\x00\x7c\x91\x4a\x22\xb0\x69\x5e\x84\x96\x15\x8a\x64\xb1\xef\xa9\x1f\xf8\x68\x14\x18\xc6\xf8\x53\xb1\xf1\x7a\x76\x04\x6c\x93\xc6\x6e\x71\x00\x5d\x65\x28\x7a\xbe\x25\x66\x3a\x01\xe5\x15\x08\x35\x16\x71\x7c\x73\x1d\xf1\xc7\x23\xab\x29\x7d\xfe\x4c\x22\x31\x2c\xef\x3d\x9a\x15\x25\xdd\x31\xc9\x34\x69\xde\x94\x40\xef\xba\x55\xb9\xea\xa9\x79\x18\xd7\x2e\xf2\x27\xaa\x49\xd0\xa7\x8c\x13\x64\x0b\x84\x76\xdc\x02\xdc\xe8\xa4\xd6\x14\xb9\x32\x4c\x94\x6e\x8e\xab\x79\xb8\xc8\x97\xc6\xc4\xc9\x7b\x3a\xd7\x77\x0e\x10\x10\xd4\x43\xde\xb2\x2a\x3f\xdf\x7e\x74\x49\x0a\x64\x61\x05\xa7\xab\xe5\x39\x16\x81\xa6\x60\xb6\x7a\x41\x79\xc2\x3a\x0c\x4d\xbc\xf2\xbf\xdb\xf1\xd0\x38\x53\x0e\x13\xbe\x83\x8e\xf4\xf7\x5e\x8c\x30\xf3\x25\x9a\x96\x38\x51\x99\xbb\x60\xf9\xa1\xac\x94\x02\x17\xb8\x07\x13\x88\x8a\xf8\x4e\x1d\xc9\x93\x3b\x65\xf6\x3e\x49\x2a\xdb\xf3\x66\xe4\xc0\xfe\x60\x77\x7b\xe6\x81\xf4\xc5\x4a\xc1\x29\x5a\xfc\x5f\x4a\x24\x23\x5e\x3a\xcf\x43\x63\x28\x2f\xc6\x5f\xdb\xeb\x85\xca\x20\xba\x44\xfe\xd0\x7b\xb0\xda\x35\xe3\xf8\x29\xfb\x66\x3a\x58\xbf\xc7\xaf\x37\xec\xc7\x30\xf1\xb3\x58\xb8\xb2\x82\x0f\xfa\x09\xb1\xe3\x3d\xa2\xe6\xc2\xbf\x58\x98\xc3\x64\x58\xc0\x33\xb9\x6a\x93\x58\x82\x0a\x92\xb8\xa8\x66\xe7\xaa\xa4\x9e\x4e\x4d\x35\x30\x9e\xea\x8a\xd2\xe0\x74\x9d\x53\x8e\x75\x0f\x05\x4b\x16\x55\xe5\x29\x9a\xa9\x81\x31\xaf\xf6\x36\x57\x77\xa7\xe3\xa6\x3b\x25\x2b\xc6\xc0\xc0\xaf\x85\xb8\x5e\xea\xdf\xee\x60\xae\xad\x88\xaa\x24\x1a\x9a\xef\xf2\x2d\x2e\xe0\x74\x0d\x9b\x75\x0b\xbc\x05\x6c\xc2\x5e\xe6\xd8\xb5\x53\xb1\x9a\x18\x97\xee\x70\x7c\xc3\xc1\x00\x67\x04\x23\x15\xe2\xdc\xf9\x65\x45\xc2\x7e\x9e\x61\x7e\x4c\xda\x85\x44\xa6\x00\xef\x9c\xbd\xfd\x0d\xda\x0f\x8f\x9a\xa8\xd3\xdb\x0b\x53\x50\xf2\x6b\x50\xd4\xd9\x49\x5a\x70\x66\x3f\x35\x35\xd0\xd8\x3c\x23\xf9\x87\xf8\x4e\x1e\x84\x00\x04\x89\x01\x55\x9a\xe2\x68\x71\xfd\x0f\xc6\x46\x14\xf3\x75\xf4\x32\x0f\x5e\x8a\x2e\x69\x54\x36\x38\x47\xb4\x67\xe9\x38\xd4\xe3\xa5\xb6\xb8\x76\x69\x83\x39\x65\xfb\xc2\x32\x1b\xba\x16\x26\xfa\xdd\xb9\x09\x7b\x8e\xac\x76\xb9\x6c\x05\xf3\xab\x76\xc8\x79\x53\xd1\x6f\x34\x11\x71\xa4\xfd\xa0\xa0\x2f\x2f\x06\xc5\x58\xac\x16\xec\x04\x80\xa7\xbf\x2f\x68\x32\xf2\xd1\xaa\x58\xdc\x0d\x7c\xf4\xc6\xa1\xce\xc6\x41\x8e\x9d\xb5\x8e\x3f\x89\xbb\xf3\x31\x23\x17\xa6\x04\xec\x4b\xde\x70\x52\x47\xa3\x3d\x93\x5c\x02\x8f\xc1\x29\x72\xf0\x9b\x5d\x8a\xc2\xe9\x6f\x09\x57\x38\xd0\x64\x8c\xf8\x8a\x11\x24\xec\x9d\x5e\xa9\x47\xe8\x6d\x89\xe9\x95\xfe\xc8\xc4\x33\x3d\xd3\x57\x91\xeb\x05\x62\x97\x12\x4a\x34\xca\x25\xac\xdc\x3c\x9d\xae\xa9\x86\x3e\x22\x9c\x8b\xf4\x08\x80\x6f\xe0\xa9\x46\x1e\x16\x35\xda\xac\x85\x24\xb2\xa9\x7c\x23\xf5\xaf\xa7\xa8\x56\xdb\x98\x17\x71\x08\xd3\xee\xe4\xca\x06\x16\xde\x7a\x08\x49\x9c\x51\xc9\x7d\xa9\x85\x37\xbc\x73\x23\x8a\xb9\x63\x99\x5f\x9c\xd9\x1f\x5f\x32\xc0\xac\x92\x8e\xd3\x84\x2f\x13\x9b\x66\xf5\x0d\x31\x4c\x3e\x61\x52\xf1\x75\x64\x2f\x84\x65\x7a\xfe\x9c\x72\xe8\xb3\x47\x41\x67\x11\xd0\xac\x5a\x08\x89\x5f\x8f\x27\x7a\x34\xbc\x3f\x21\x87\xb9\x7b\xb3\x98\x16\x94\x7d\xd2\x56\x8d\x2a\x03\x7a\x4f\x1d\x4c\x9b\xe4\x4c\x9d\xb2\x1c\x1d\x51\xc2\x9b\xdd\x31\xbc\xe3\x67\x6f\x89\x02\x7d\x44\x90\x67\x0e\x95\xcb\x44\xfd\xca\xf1\xa2\x05\xdb\x4b\x3b\x08\x34\xc8\xfd\xb1\x14\x18\xa8\x48\xc3\x83\x38\xef\x84\xee\x7b\xab\x53\x8d\x6b\xde\xbd\x0e\x57\x1d\xa3\xcc\x75\x12\xe0\x10\xc1\x98\xee\x3d\xbc\xd5\xd0\xd3\xdb\x99\x03\x3f\xce\xa6\xc9\x27\xc8\x65\x09\x60\x47\xcb\xf6\x41\xf0\xc1\x5a\x3c\xe4\x4a\xa5\xb9\xd3\x56\x67\x9f\x68\x00\x67\xdd\x32\x72\xb1\xeb\xe9\x78\xf7\xfb\x9e\x4d\x69\xa7\x4d\x27\x06\x3e\xb3\xaf\x04\xe2\xf4\xad\xf5\x74\x00\x67\x23\x7b\x56\x64\xac\x59\x10\xe7\x67\xce\x97\xfd\xf5\xdf\x91\xf2\x0a\x0a\xe4\xa4\xd3\xcd\x0f\xf9\x0d\xd4\xf2\xe6\x48\x73\x6d\x92\x66\x38\xb4\xeb\xfa\x4f\xa5\x98\x79\x56\x26\x4e\x23\x26\xf3\xda\xf0\x5a\x93\xac\xf2\x63\xef\xb7\xd3\x85\xc5\xce\xcb\x63\x94\xa8\x66\x19\x29\xde\x4d\x39\x25\xc1\x97\x2e\x6a\x50\xe2\xdb\x1f\x44\x32\x70\x16\x87\x00\xb6\xf6\x1e\x0b\x17\xb3\x4e\xc5\xeb\x89\x85\x32\x58\xf0\x1f\xbc\x18\xa0\x25\xb6\xdf\x07\x7e\x10\x3d\x69\xd6\xe4\x3e\x4a\x16\x5e\xe9\xa5\x2c\x06\x6f\x2d\x72\xae\x30\x49\x75\x9f\x34\x19\xb9\x08\xf5\xcc\xe3\x11\x3b\x0e\x68\xbb\xd4\x79\x5b\x45\xf0\x4c\x56\xe5\x04\x0a\xeb\xc7\x1f\xa1\x96\x43\x20\x66\x42\xf8\x28\x58\x4d\xae\x14\x8d\xcf\x5f\x85\x98\x39\x72\xef\x40\xc6\x57\xc7\xcf\x3b\xdd\x31\x37\x60\x3c\xc4\xbc\x57\xd6\x9b\xbd\x3c\x25\x5d\x6a\x75\x79\x86\xc3\xe2\x60\x01\xf1\x72\x17\x12\x71\xf0\xf2\xe4\x2f\x10\xd4\xc5\x81\x77\x19\xd3\xe4\x3e\xc1\xe0\x9a\x16\xbb\x77\x61\xc3\x51\x87\x79\xa1\xf5\x72\x3f\x99\x69\x42\x70\xe2\xba\x7d\x7a\x28\x11\x62\x1a\x7d\x60\xbd\x89\x7a\x92\x0c\xbb\xdf\xdc\xd1\xf8\x66\x67\xb2\x8c\x5b\x48\x6e\x8d\x53\x18\x15\xb0\x2b\x5a\x7d\xe7\x26\x36\x6e\x6a\xe8\x87\x4a\xf9\xd3\xdf\xb4\x91\xb9\x7e\x12\xb0\x78\xcb\xe1\xc1\xef\x4c\x01\x4f\x29\xdb\x59\xa6\x85\x5b\xe7\x27\xd8\xb1\xf5\x6f\x0d\x63\x0b\x7e\x11\x49\x76\x8d\x39\xb2\x0b\x41\xe0\x71\x28\x22\x29\x53\x02\x67\xf7\x32\x8a\x80\xba\xf6\xbc\x86\x41\xae\x75\x43\xa5\x64\xc4\xac\x78\x2a\x11\x63\x0f\x18\x36\xb9\x5e\x8a\xf6\xa5\x86\x44\x87\x33\xb5\xec\x4e\xf2\xc0\x9e\x80\x28\x5b\xc7\x3b\x88\xfe\x95\xe5\x22\x8f\x17\xb9\x81\xb2\x10\x45\xfa\x94\x30\x6d\xbc\x3c\xf5\x9a\xa4\x00\xf9\xc1\xda\xfc\xd0\x58\x0b\x0e\x73\x91\x6c\x05\x4a\xb2\x7c\x1e\xd5\xd5\x3a\xc7\x50\xa9\x25\x38\xe1\x94\x76\x47\x4f\x9f\x0f\x08\xd7\x77\xa8\xd4\xa3\x93\x34\x97\x71\x83\x56\xf9\x98\xfb\x3c\x61\xa9\x6d\x4c\xb4\x59\x52\x8e\x34\xf6\x40\xc9\x79\x7d\xab\x80\x69\x54\x28\x04\x37\x01\xa4\x89\xbd\x6f\x8a\xc7\x7c\x3a\x23\x55\xbe\x40\x66\xb8\xe0\x19\xd7\x50\x35\xca\xbf\x0f\x09\x6b\xf3\x78\xb2\xa6\x6e\x2d\x8a\x65\x24\xfe\x98\xa4\x32\x40\x01\x0f\x3d\x81\xe0\x54\xa5\xdb\xc4\xb8\xe3\xa6\xb6\x6b\x89\x62\x91\x96\xe3\xb1\xd0\x11\x3f\xa2\xf0\x7c\xbb\x6c\x78\xf9\xf6\xbf\xc7\xbb\xea\x53\xd4\x05\x99\x0f\x5e\x31\x7b\x11\xc9\x03\xe0\xbf\x03\x22\x87\x43\xc1\x96\x23\xab\xb0\xc0\x61\xe9\x94\xe4\x4b\x46\x05\x7b\xbc\xac\x2c\x7d\x8e\x6c\x99\xb7\xbd\x6d\x22\xec\x87\x65\x71\x7f\x8e\x01\xdc\x8e\xdc\xdb\x45\xf8\x54\xc6\x3a\x02\xa9\xcb\xcc\x56\x73\xc3\xe7\x4a\x6c\xa3\x07\x9a\x48\xbb\x1c\x57\x40\xdb\x4b\x71\xaa\xc6\x05\x23\x36\x6a\xf9\x48\x78\xe4\x06\x04\x26\x2f\x8d\x00\x2a\x63\x07\x3f\x93\x49\x1a\xb3\xdd\xea\x2b\xda\xcd\xde\x29\x72\xa9\x1a\x4c\x25\xfb\x64\x1d\x71\xe8\xce\x7f\xee\x60\x64\x3e\x1f\x80\xb9\xad\x60\xa7\x9b\xa5\xd2\x25\xf8\x37\xe4\x52\x50\x9a\xd1\x51\x38\xab\x21\x2e\x62\xcb\xd6\x0f\xa2\xeb\x23\xd1\x1d\xe3\x5d\x83\xac\x1b\xd9\x78\xc6\xbb\xc9\xc0\x53\x96\xc4\xb4\x6c\x62\xa5\x2f\x8d\xef\x3f\x73\x7d\x29\xa2\x3e\x58\x0b\x08\xfa\xed\x9f\x72\x35\xc4\x3b\xd6\xcb\x64\xef\xe0\x6e\x9d\x84\xf8\x31\x6c\x9b\x8f\x23\xce\x27\x50\xba\x51\xc2\x42\x14\xd3\xc3\x42\x9a\x83\xf1\xa3\x51\xef\x9e\x3a\xbd\x83\x99\x24\x9f\xc2\xdb\x06\x33\xd8\x8c\x6b\x3f\xde\x0b\x7c\xb3\xb5\xd4\xc5\xd3\xf9\x2b\xc1\x49\x9d\x01\xe8\xd2\x9c\x80\x13\x67\xe6\x15\xac\x82\x74\xe5\x9a\x8c\x68\x36\x4c\x18\xc7\x20\x88\xb1\x16\x53\xdd\x95\x3b\x5d\x04\x53\xfc\x19\x1d\xac\xcc\x49\xbc\xb3\x07\xae\x93\xbf\xea\x7f\xe6\xec\x21\xc4\x19\xd1\x73\xef\x32\xe5\xb0\x7c\x35\x69\xb4\x78\x68\xb2\x92\x40\xd3\x33\x01\xd1\x70\xe1\x38\xde\x2c\x8c\x16\x78\x2a\xea\x35\x5b\x75\x3c\xc0\xa8\x45\x84\x60\xb4\x32\x90\xd3\x71\x60\x06\xd2\xcf\x96\x9e\xdf\xc2\xd6\x28\x8d\x35\x49\x0b\xc1\xbb\xce\x4f\x23\x29\x37\xb9\xcc\xd0\x22\xc6\xbe\xbc\x3d\x12\xb4\x2e\xba\xdb\x3a\x10\x20\x65\x98\x8d\xff\x42\x3c\xdf\x24\xc1\x38\xeb\x9c\xa2\xad\xf5\x24\x83\x8e\x94\xb0\x8c\x12\xf7\xe9\x29\xea\x6d\xd9\xb6\xc8\xbe\x67\xd8\xa6\xe0\xcd\xe0\xe1\xce\xf8\xaa\x66\x98\x4a\x3e\xda\x03\x96\xff\x23\x12\x65\x49\x9a\x82\x90\x15\xfb\xd7\x3a\x25\x6b\xef\xb6\xf8\x69\xfd\x1c\xcb\xda\x79\x1c\x04\xd8\x71\xa4\x11\xa7\xf8\x39\xb8\xb7\x0b\xd7\xbb\xe6\xf1\x7e\xbc\x6c\x47\x54\x12\x26\x90\xe1\x3a\x90\x73\x08\xac\x96\xe6\xd5\x79\x32\x6b\x57\x1d\xbc\x2f\xfd\x8e\xd1\x22\xa1\x0d\x74\x17\x5c\xc5\x38\x8a\x9a\xd2\x0f\x99\x99\x67\xcb\x0a\xbe\xac\x2f\x45\xfa\x73\x8d\x82\xc7\x86\xbb\x90\x41\x05\x45\x42\x5e\x51\x2c\xcf\x10\xa4\x52\xbd\x78\x87\x2a\x48\xea\x8a\x91\x6c\x67\x9c\xc2\x32\xb1\x2f\x29\xe5\xd4\xbe\xd7\xc1\xfe\x5b\x31\x57\x94\x05\x78\xe0\x28\x78\xab\xb4\xd7\x78\x26\x7b\x05\x3d\xc1\x47\x0d\x1c\x2d\xa8\x14\x9d\xc7\x3e\x2e\xc0\x2a\x6c\xad\xb8\x33\xbb\xfb\xe4\xca\xfd\xa8\x11\x20\xab\xd8\x7d\x6e\x3a\x47\x6c\xb4\xb0\xe2\x91\x85\x16\x02\x44\xe1\x36\x27\x35\x6d\xf9\x40\x3c\x02\x40\xec\xe7\x31\xeb\x89\xbf\x90\x0d\x8b\x4e\x8c\xf8\x29\x2f\x8a\x72\x10\x8b\x33\xda\x0a\xee\x4e\x76\x89\xc8\xee\x7a\x07\xe7\xfb\x18\x33\xa2\x9c\xde\xf6\x78\x29\x9f\x5e\x53\xc7\x5d\xfc\xb6\xc2\x52\xf6\x2d\x0a\xe3\xee\xd6\x44\x76\x7e\xca\xdd\x15\x7a\xf6\xbb\xa5\x22\xca\x00\x42\xcd\x7c\x19\x20\xce\x2c\xfa\x2b\xdf\xa0\xe8\x27\xef\x67\x4e\xb4\x7b\xc3\xc2\xe4\xe4\x46\x72\x54\xd8\x2b\x2f\xff\xd7\xe9\x5f\xe2\x91\xe8\x46\x72\xf9\xc9\x05\x58\xbe\x71\x92\x55\xee\xaa\xf6\x2a\x2b\xd9\x3c\x3d\x9d\x2d\x43\xeb\xea\x95\x35\x5b\xd2\x26\x13\x31\xc4\xb1\x07\xd7\x02\xf8\x75\x90\x5b\xb0\xd5\x7e\x53\x2a\x50\x31\x13\xe3\x68\x78\x31\xec\x04\x9f\xde\xac\x65\x8a\xd3\xe4\x81\xa6\xe9\x89\xb8\x02\xba\x23\x15\x10\x38\x30\x24\x37\xcd\x37\x2d\x42\xf6\x05\xb9\xcd\xba\xf8\xbc\xee\x16\xb0\xb5\x0e\x4d\xfd\x25\xf7\x30\x98\xfd\x19\x49\xca\x80\xfd\x8d\x4f\x55\x18\x95\xe9\x1f\xd2\x4b\x1f\x5f\xf2\xcb\x05\xd2\x9b\x65\xff\x5c\x33\xa0\x41\xd6\xc6\x79\xdc\x85\x89\x94\x3b\xcd\x33\xb5\x0e\xdf\x21\x2f\x0d\x34\x13\x10\xe2\x3b\x28\x2b\xe1\x7f\x96\xca\x2e\xa5\x6c\x1e\x47\x89\xea\x98\x10\x93\x6f\xaa\x63\x8a\x05\xca\xb0\xbf\xbd\x53\x08\xe4\x49\x97\xde\x39\x61\x2a\x85\x01\x64\xf9\xfb\x20\xc1\x52\xb3\xe9\x7c\xd8\xcf\x0b\xa6\x8f\xe0\xf8\x12\xf3\x13\x9d\x05\x6d\xe0\x6e\x20\xff\x96\xef\x22\x3b\x15\x13\x3e\x3d\x3c\x20\x02\x46\x8c\x9f\x9a\x58\xac\x81\x98\x7b\x8e\xec\x79\xf8\x79\xd6\x31\x9d\x03\x9a\x83\xc1\x7a\xcf\x22\xf6\x02\x2d\x20\x95\xf2\x58\xc6\xc5\x52\x24\xcf\x4c\x18\xd1\x26\x0f\xf5\xd7\xf0\xcb\x59\xae\x12\xa1\x29\x63\x96\x1d\x11\x3c\x4e\xa4\xfa\x1a\x0d\xe3\x1f\x30\x9c\xfc\xca\x60\x9e\xcb\xb0\x6f\x85\x99\xc7\x16\xfc\x50\x54\x9b\x01\x44\x6c\xbe\x33\x32\x12\xab\x03\xfe\x6d\x5b\x0b\x25\x94\xc9\xdd\x42\xb4\xc8\x3e\x24\xd2\xce\x98\xd8\x33\x1e\x02\x12\xd3\x0b\x93\x52\x7b\xd1\x3d\x57\x9e\x6f\x52\x1f\xee\xd9\xdf\xc3\xb9\x79\xee\x52\x46\x7d\x84\x7a\x4f\x21\x1e\x91\xa5\x51\xb6\xf7\xbf\xb9\x80\x99\xf3\xcb\x91\xfe\x03\xb7\x83\x3f\xcc\xd7\xbe\xe8\xac\x56\x3a\x7d\xfc\x80\xff\x7a\xaa\xdb\x14\xc1\xf6\x4c\xd0\x15\x48\x98\x61\x2d\x1f\xbe\x30\x4a\xd8\x22\x1e\xfa\xe7\x86\x97\x8e\x51\x7f\x2e\x01\x31\xf6\xd0\xe6\x1e\xbf\x38\xf0\xf1\x10\x74\xc4\x48\x82\x4d\xb1\xd0\xdf\x3b\x26\x3e\x4e\x3f\x44\xfb\xe4\xb1\x6c\xab\x00\x72\x2d\x85\x8c\xe0\x4c\xce\xab\x8b\x44\xf1\x8f\x2f\x10\x1b\xc1\x34\x3a\x34\x66\x26\xad\x6c\x55\xff\x41\x86\x59\xa9\xf4\x21\x9a\x53\xb4\xef\xd7\x51\x06\x8c\x6d\x27\x3a\x06\xc6\x6d\x1b\xe6\x3f\x64\x84\x34\x82\x57\xc1\xe9\xc3\xe7\x2e\x18\xec\x09\x9f\x3b\x93\xeb\x57\x50\x60\xd5\x4a\x60\x37\x8d\x29\x55\x42\xc7\x4e\x23\x91\x9c\xc5\x9f\xb7\x0b\x25\xd5\x68\xf4\xda\x97\x9d\xe3\xe1\x55\xeb\x8c\xbb\x8d\xdc\xc8\x97\x56\x93\xff\xb2\x37\x7a\x3a\x5e\x7b\xb3\xe1\xf1\xa8\xad\x31\x0e\xb4\xb4\xb9\x7c\xff\x9b\xfb\xaa\x82\xc2\x0b\x0e\xcb\xed\x0f\x6b\x95\x84\x1b\x88\x37\x12\x00\x32\x8e\x18\xa5\x41\x39\x3e\xcd\x3d\x08\x0f\x8b\xfc\x76\x52\x85\xdb\xea\xfe\xbc\x4f\x97\xce\x2d\x02\xfa\x22\x9c\xe9\x73\xa1\x7b\x22\xc6\xf6\xa5\x8d\x43\x82\xff\x76\x4a\x16\x1b\x21\xd3\x79\xc4\xae\x82\x4f\x9b\xe5\x8a\x64\xb6\x43\xfc\x94\x6a\xe3\x8e\xf7\x28\xb4\x53\xf1\x76\x15\x61\xd6\xc1\x57\x7b\x86\x42\x5c\x99\x13\x7d\x5a\x62\x46\xd4\x09\x75\x9f\x13\x94\xc1\x0a\xdb\xa8\x25\xc8\x32\x46\xe1\x66\x9e\xb1\x2b\xd4\xcf\x1f\xc0\x86\x38\x9c\xe6\xd8\xf9\x4b\x5b\x57\x11\x80\x6a\xf5\xf2\x98\x1e\xa6\x4d\xc7\xf6\x45\x2b\xf7\x6b\x6c\x45\xbd\x37\x27\x3c\x08\x1c\x22\x8e\xd3\x14\x24\x2d\x49\xd5\x73\x58\x75\xe3\x5a\xda\xc8\xa1\x93\x01\xa4\xdd\xfd\xf8\x91\x6d\x57\xab\x0c\x49\xd6\xbe\x81\xc7\x1a\x93\xe8\x6b\xc2\x27\x0d\x5c\x3e\x2a\x2b\x0b\xa5\xd1\x19\x2f\x44\x64\xf3\x74\xe7\x33\x9a\x17\x4e\xf3\x49\x91\x98\x67\xeb\xc2\x4d\x71\x85\xaa\xf1\x2f\xa2\xe7\x53\x37\x5e\x16\x00\xbd\x30\x7b\x84\x1c\x59\x61\xd9\x5e\xc7\x94\xac\x4e\xaa\xb5\xd9\x47\xe4\x8d\x86\x68\xb4\x75\xbb\xa9\x2e\xce\xf7\xfd\x2b\x07\x6a\x59\x50\xc8\x8c\x99\xbc\xda\x2f\xbf\x96\xbf\x25\x00\x3d\xdf\x29\x61\xe7\x7a\xaa\xa3\x6d\x12\xe6\xf9\xfe\x1e\xbb\x20\xa4\xd8\x96\x41\xcd\xd2\x7f\x9f\xa6\xd8\x6d\xd8\x2d\xdc\xf8\xf3\xe1\x18\x50\xc9\x5e\x48\x20\xbe\x61\x91\xb7\xee\x67\xc9\xe8\xf6\x91\x83\xf4\xd6\xf5\xe4\x96\xc1\x45\x87\xe5\x70\x3e\x08\xa8\xfc\x83\x5e\x66\x3c\x69\x7b\xe3\x49\x50\x7d\x81\xec\xf8\x12\x6c\x49\xad\x3f\x32\xc3\x3b\x45\x55\x27\xe8\x0b\x64\x47\xef\x73\x27\xce\x23\x24\xd8\xf7\xb6\x0c\x2a\xad\xc0\x3a\xb1\xe0\xf1\x33\xe7\xb3\x04\xff\x03\x39\xd3\x14\x95\x6b\xe3\x72\x33\x93\xc2\x42\x95\x2d\xd5\x98\x2a\xd0\x7d\xdd\xee\xc3\x7c\x97\x62\x96\x1a\xe5\x31\x69\xda\x5b\x1c\xf5\xd7\x61\xfd\x5c\xe3\xa2\x63\x11\x02\xcd\xe9\x43\x57\xaf\xa3\xe3\x45\xc6\x9b\x0d\xc6\xb8\x63\xdb\x52\x2e\x06\xc4\x55\xad\x23\xb6\x32\x89\x74\x8a\x86\xd7\x6a\x4e\x11\x6c\x9b\xd8\xc1\xfd\x82\x62\xec\x55\x99\x2b\xf9\x29\xed\x80\x23\x9c\xab\x1e\x77\x76\x9b\x93\x63\xf4\x53\xaf\x3d\xd2\x39\xd1\x9c\xd3\xbf\xb4\x9f\x0a\xee\xab\xb8\x2c\x3b\xdf\x23\xad\x20\xf9\xdf\x40\x02\xb1\xf7\x9c\xf8\x15\x9d\x0e\x7a\xc3\xdc\x8a\x6b\x2e\x35\xfb\x5f\xd7\x18\x84\xb3\xa1\x86\xc7\xa4\xa9\xa4\x75\x4c\xf0\x69\x64\x0f\xb3\xbc\x5d\x39\x26\x41\xac\x6e\xf6\xab\xa9\xb4\x2c\xdb\x22\xb6\xb6\x20\xcd\xe7\x86\x54\x52\xe8\x19\xa8\x22\x6f\x61\x93\xef\xad\x2c\x1b\xe9\x7b\x72\x60\x64\x98\xc9\x7e\x15\x37\x10\x17\xfa\x64\x09\x27\xbe\x89\x97\x18\x57\x95\x21\x88\x34\x48\x84\x4f\x63\xb6\xe3\x59\x0d\xa3\x2e\x45\x9b\xf1\x8f\xeb\xbb\xc2\x94\xa1\xfe\xcf\x79\x95\xa0\xb8\xdf\x22\x75\xfb\x3a\xaa\x89\x9c\x83\xf3\x93\x37\xb8\x57\xde\x7a\x73\x7b\x8b\xa8\x71\xfa\xf2\x70\x6c\xc4\xa3\x5e\x4c\x95\x48\x51\xcf\xa7\xbe\x33\x35\x66\x67\x05\xca\x31\xa8\xcc\x38\xcc\x2d\x29\xd2\x16\xf7\xd7\x8f\xee\xa9\xb3\x14\x3b\xaa\xbe\xfe\x76\x9a\x06\x38\xb7\xa6\xf2\x2d\xb4\x72\x83\xef\xe9\x0e\x00\x35\xa0\x21\xd2\x52\x7b\x3e\xa4\x2e\x79\xd0\xb4\x81\xe4\xc2\xc0\x3a\x47\xd0\x01\x24\xc1\x6f\xf0\x78\xb7\x8b\x08\xf3\x1f\xa8\xbe\x32\xcb\xe3\xb8\x72\x24\x1d\xf6\x3c\x9b\x8e\xde\xa3\xb6\xa3\x75\xd4\x11\xba\xa4\xa0\x1a\xdf\x2f\x6d\x6d\x88\x83\x81\x9b\xc9\x7a\xdf\x44\xc4\x43\xff\x57\x55\x8a\x22\x34\xe6\x5b\x04\xe1\xd5\x6c\x81\x48\xf7\x72\x64\x67\xdc\xd4\x3b\x7c\xaa\xdf\x7e\x75\xdc\xaf\x66\xde\xe4\x8c\x5d\x69\x68\x41\x82\xcd\x6b\x7c\x65\xd9\x07\x4f\x6e\x93\xed\x22\xc5\xcc\x6a\xa9\xcf\x88\x7d\xaa\xb8\x26\xa5\xc4\x16\x39\xd8\x37\x67\xe1\x6a\xb8\xc5\x4a\xf3\x96\x15\x0e\x84\x67\xda\x99\x91\x30\x56\xbe\xf2\x0f\xfd\x8f\xb3\x4d\x31\x51\xdc\xde\x48\xe4\x81\x04\x9b\xdf\xd5\x44\x11\x61\xe2\xc9\xe5\xb9\x5b\x55\x41\xf7\x9d\x6d\xa6\xcf\xe8\xb5\x37\xa9\x9b\xd9\xc9\xf5\x53\x8e\xc5\x66\x73\x09\xc4\x18\xad\x1c\x59\xd9\xe7\xbe\x63\xc7\x5e\x68\x61\x5c\xf7\x74\x9d\xa5\x62\xfe\xe7\xf6\xb4\xd8\xb7\x2c\x52\xb4\xc9\xda\xb7\xa3\xe4\x95\xa2\x0b\x71\x8f\xd1\x3b\x14\xd2\x3a\x4d\xfa\xf0\xfc\x55\x68\x41\xf9\x00\xb3\xbe\xbc\x9b\x65\xb5\x2c\xbb\x04\x1e\x31\x8f\xdc\xa7\xb9\x88\x9b\x59\x27\xfa\xc1\x90\x48\xce\xc4\x4c\x92\xfb\xde\x57\x40\x49\x8e\xe9\x20\x5c\x24\x9d\x8c\x86\xbf\xce\x8f\x72\xb0\x1a\x7a\x3c\xb4\xbc\x44\x46\x24\xe7\x32\x2a\xff\x12\x37\x48\x4f\xa9\xdd\xc3\x29\x6d\x0a\x0f\x6b\x99\x2f\x86\x5b\xac\xda\xc1\x19\xf3\xb7\xc0\x60\xd4\x40\x4f\xcd\xe8\xd9\x0c\x09\xf8\x72\xcf\xb9\xec\xa7\x5e\xa0\x07\x26\xb1\x89\x61\x03\x75\x9e\x47\xae\xad\x38\x7b\xc6\x83\x0b\x85\x0c\x39\xfe\x4c\x89\x12\xb1\x40\xcf\x6c\x53\x44\x34\x2e\x04\xcc\x9d\x60\x9d\xe4\x33\x83\x13\xb4\x79\xdc\x9a\xad\xec\x5a\xb7\xca\xde\xd7\xc6\xa8\xbb\xd3\xf5\x49\x4a\xc8\xc9\x67\x86\xe6\xe6\x92\x06\x3e\xa4\x3e\x48\x5d\x2d\x55\x89\xf0\x78\x7e\x2d\x5d\xb0\xf2\x3a\xb4\xf7\x73\x72\xad\x6b\x8f\x37\x0c\x9a\xa3\xd9\xf5\xcc\x3f\x2c\x57\x29\x86\xd0\xcc\xda\xe5\xcf\xac\x07\xb9\x27\xed\x9f\x50\x26\x3a\x7e\x76\xfe\xc6\x7f\xca\xfe\x70\x13\xc1\xd9\xa8\x6e\xcc\x95\x44\xc7\x9b\x5a\x92\x1e\x15\xf4\xff\x29\x36\x39\x0a\x7d\x1f\xc8\xd9\x7e\x1f\x7b\x5e\x89\x1f\x50\x72\x3e\xe6\xab\x62\x6d\x2b\xea\x7f\x0b\x38\xf4\x80\x64\x3b\x62\x03\xa4\x3c\xdf\x4d\xf5\x4c\x2b\x18\xd1\x14\xff\xd0\x7a\x52\xeb\xb6\xb4\x7e\xdc\xad\xdd\x62\xe0\x18\xfd\x10\xfa\x88\xa4\x69\xf7\x95\xd8\x9b\x1e\x1e\xee\x68\xd4\x7c\xce\x9e\xcf\xcb\xb5\x5f\xe1\x9e\x86\x81\x42\x24\x62\x91\x31\x0a\x06\xa3\x16\x42\x6f\xf3\x63\x71\xd5\x93\x5b\x6d\x1b\x58\x44\x79\xb1\x26\x41\x1e\x29\xa8\xf6\x7f\x1c\x42\x3c\x93\x05\xfd\xc4\x39\x36\xab\xbb\xb1\xfb\xb7\x60\x52\xa6\x0a\xe0\x17\xe5\x54\xa9\x02\x05\x2e\x59\xea\xfd\xce\x9f\xa1\xf6\x8c\x37\x6b\x4a\x09\x47\x7e\x26\x75\x81\xaf\x92\x60\x8d\xf2\x2b\x11\x05\x6e\x81\x1b\x97\x74\x9d\x6d\x02\xc5\x4b\x70\x9c\x1e\x10\x43\x00\x9f\xac\x5e\x2a\x8e\xb3\x8c\x83\xb1\x3a\xbb\xae\x3a\x97\x3f\x50\xfd\xf7\xb6\x3b\xaf\xa5\x15\x6f\xbc\x6c\xca\xf5\xab\x0d\xe8\x37\x27\x3e\x31\xb2\x56\x1b\xfc\x3a\xd0\x46\x36\x0f\xe7\xaf\x0f\x2d\x78\xad\xc7\x81\xcb\x9d\x91\x21\x18\x29\xc1\x4b\xd3\xd1\x1b\xd3\x2f\x72\x6a\x98\x45\xe7\x79\xb5\x4a\xdb\xcd\x3f\x20\x11\xeb\x9c\x76\x4b\x9f\xf3\xf8\x77\xbe\xdc\x87\x54\x83\x67\xd5\x81\x0f\x05\x86\x5b\x22\x38\x94\xa3\x2e\xec\x04\xa3\xc4\x94\xc7\x45\x06\x9b\xa1\x42\x28\xb0\x1a\x4b\xbd\xdf\xa1\x59\xe1\x9b\xeb\x70\x8b\x7c\x09\x84\x1d\x03\xfd\xef\xf9\x9c\x62\xa5\xb1\x84\xb3\x2b\xea\xda\xea\x1f\x7e\xf2\x9f\x78\x13\x87\x2d\x0b\xd9\x9c\x85\xba\xaa\xe9\xdd\x26\x63\xda\xa9\xe9\x96\xa8\x52\x29\xf2\xf7\x46\x8a\x6e\x7c\x88\xe4\xf3\xb3\x1f\xf6\xb5\x9d\x51\xb5\x87\x4f\x6b\x8c\x36\x19\x2a\xc7\xb2\x60\xdf\x02\x55\x7e\xeb\x88\x5b\xab\xa7\xb0\xe1\xc3\x07\x7b\xa1\x68\xcb\x17\xf2\xa2\x0c\x4b\x6c\x68\x72\x02\xf2\x90\xba\x38\xd1\x08\x71\xde\xf0\x58\xad\x65\xe6\xb4\x60\x05\x65\xd0\x2b\xc3\x25\xd8\x7e\x2e\x93\x02\xa4\x25\x90\x49\x0c\x2d\x2a\xa5\x2e\xe5\x03\xf4\xa1\xdf\xc4\x47\x71\x7e\xa8\x79\x20\x2b\x61\x81\xca\xd3\xf3\x3d\xc4\xd4\xf4\xd4\x15\x57\x44\x91\x69\xf5\xd9\x21\xe1\x6d\xec\xb6\xff\x29\x41\xe6\x5a\x8f\xca\xcc\x4a\x33\xef\x17\x00\x77\x95\xb9\x68\x47\x66\x55\x43\x91\x16\xdc\x8a\x8b\x09\x1e\xc2\x04\x70\xb7\x1b\x8a\xd4\x86\x5d\xe3\x98\x25\x47\x27\xe8\x35\x77\x71\x44\x23\xeb\xd5\x34\x09\xce\x6e\xad\x43\x79\x23\x21\x27\x4f\xdd\xf8\x60\xaf\xd6\xf8\x4b\x9a\x78\xc0\x8c\x47\x3a\x9e\x26\x33\x13\x29\xbd\x1b\xeb\x6b\x4e\x83\xaf\xac\x64\xc0\x45\xb4\x11\x8c\xaf\x9b\x2d\xe1\x44\xf2\xf0\x5e\x66\x9c\xe9\x71\x6f\x65\x47\x54\x72\xad\xd4\x0c\xc1\x01\x1f\x4c\x26\xab\x5e\xee\x04\x69\x25\x5b\x23\x6f\xe9\xe8\xa3\xcf\xd3\x70\x99\x9a\x29\xa9\xa8\x63\x80\xb5\x98\x27\x88\x19\x11\x1c\x4f\x61\x09\x38\x61\x14\xdf\xd9\x39\xa4\xa8\x29\x51\x64\x6f\xd3\x87\x1f\xa6\x8f\x72\xe4\x9c\x30\x83\xff\xf6\x7d\x0f\xfd\x33\x64\x8d\xdf\x6a\xc6\xb8\x0c\x33\x8e\x74\x5e\x11\x86\xfe\x08\x0d\x0a\xa7\xde\x38\x1d\x48\xc8\x0e\xd4\x44\x3b\x72\x58\x97\xc8\x4d\x53\x1f\xae\x0e\xc6\x3e\x4a\x3b\x73\x62\xcc\x8c\xfb\x58\xe8\xe4\xac\x0e\x8b\xb3\x62\x75\x23\x61\x5e\x12\x90\x35\x66\xe1\x7f\x87\xf4\x57\x97\x18\x5a\x35\x29\xbb\xd4\xb1\xf4\xf0\xda\x52\x1d\x5a\x78\x2f\x24\xeb\x6f\x69\xf0\xcc\xd0\x65\x99\x5e\x19\x9b\x83\xbe\xc1\x3b\xae\xa8\x8d\x7a\x1d\xd2\xe3\x3e\xf2\x46\x5a\xf8\x26\xd1\x66\x2f\x6b\xfc\x99\x49\x19\x48\xc1\x57\xfe\x6c\x73\x16\x7c\x29\xd6\xdd\xfb\x45\xf1\x81\xfe\x52\xf9\xff\xc5\x46\x4b\x56\x0e\xc5\x97\x42\x48\xb7\x0a\x4b\x7d\x41\xe5\xac\x20\xcd\x23\xbb\x6f\xcb\xb1\x40\xd1\xa6\x05\x5c\x45\x2f\x78\x58\x31\x2b\xcf\x95\x15\xe6\x7e\xd3\x23\xa8\x91\x0a\x9c\xd3\x1e\x74\x48\x95\x8b\x03\x6f\x7c\xf5\xec\xf8\x80\x28\x4f\x99\xfa\x37\x32\x3b\x56\x9c\x04\x52\xbd\xff\x68\xa7\x13\xf4\xa1\x63\x7b\x69\xd8\xe3\x40\xa7\x6a\xef\x70\x00\x76\x85\x36\x6f\x8d\x12\x18\x14\x7b\xa8\xd0\x96\x3c\x6f\x4d\x7f\x2d\x2a\xaf\x89\x7c\x28\xd8\xa4\x8d\xb6\xfa\x95\x80\xc9\xef\xdf\x4b\xd6\x28\x48\xfb\x02\x8e\x72\x1c\xa7\x9e\x11\xf8\xbb\xca\x5f\xa2\x19\xd0\xac\xdf\x14\xcc\xf1\x27\xd8\x17\x28\xb2\x15\xaf\x19\x0e\x59\x4b\xc6\x39\x13\x79\xe9\x3f\x03\xe3\xe3\x2f\xce\x17\x4e\x33\xa5\xb5\x60\xb7\xe3\xd6\xf8\xb7\x8c\xdc\x8f\xf6\xaf\x2d\xb5\xfe\xe9\x99\x5d\xae\x86\x5a\x5e\x52\xc2\x30\xfd\x6c\x5a\x37\xe0\xbc\x2c\xbe\x57\xe9\xe1\xf4\x77\x16\x5d\xa9\x41\x89\x60\x94\xf6\x34\x6c\xca\x5f\x41\x3b\x3c\x52\x2e\x1e\x26\xaa\xf7\x56\x2a\x72\x64\x8c\x46\x59\xf3\x1b\x5a\x9e\x3c\x3c\xa8\x5f\xcc\xf3\xc6\x59\x33\x33\x38\xb7\x6d\xcc\x74\x86\xc7\xa6\x0f\x54\x55\xf2\x1d\x24\x3b\xa2\x95\x56\x95\x12\x9e\xa2\x64\x66\xe2\x3d\x77\x44\x40\xf1\x08\xe1\xf5\xad\xf7\x27\x2d\x21\xb3\x65\x9b\x8f\xa1\x1c\x2d\x27\x00\xcf\x87\x35\xdf\xf8\xb4\x8f\x2b\x4a\x59\x6c\x64\x6b\x3b\x0f\x12\x3b\xdc\x99\x3f\xc9\xb3\xaf\x83\x8f\x2b\x10\xe1\x74\xa7\x73\xb1\x58\xb2\x2f\x93\x6e\x42\x46\xdf\x6a\x1a\x98\x12\xcc\xdf\xda\xd7\xc9\x8a\x77\x4b\x9c\x4d\x0e\x61\xf2\xb2\x62\xf0\x59\x02\xd6\x8c\x46\xf5\x67\xa2\x72\x80\x97\xfb\xce\xc6\x7a\xe6\xaa\x64\xe3\x8d\x1f\x8f\x02\xd6\x95\xfc\xec\x26\x11\xc8\x25\x36\x7f\x47\xbc\x91\xf9\xdb\x9a\x0c\x78\x9a\x7f\x18\x05\x19\x1b\x48\x34\x1b\xa3\x85\x40\x74\xbb\xb3\x3e\x04\x2c\x20\xc5\x2c\xfd\x71\x35\xae\x6b\x70\x5a\x69\xdf\xf0\x2d\xf4\x65\x1c\x56\x96\x92\xc0\x2e\x4f\xee\x1d\x56\x1c\xf4\x86\x66\x08\x2d\x0d\x58\x38\x3f\x69\xec\x0a\x56\x90\x0e\x04\xbe\xa9\x8c\xe4\xd2\x33\x91\x74\x03\xad\x43\x56\xd1\x24\x3d\x86\x9c\x78\x9d\x62\xbd\x75\x80\xf4\xe8\x3f\x45\xcf\x14\x2a\x35\x1b\x6f\xdc\xeb\xd8\xd0\x51\x87\x34\x98\xcc\x9f\xf9\xf0\xfd\x2b\xc3\xd9\x6a\xe9\x29\x1c\xff\xa2\x01\x3c\x0d\xe3\x1a\x5d\x80\x59\xed\xdd\x81\x45\x5e\x1b\xb2\xe5\x0c\x23\x7f\xa3\x01\x58\xec\x3b\x07\xb9\x24\x49\xb5\x38\x1d\x18\xae\xf5\xcf\x9d\x67\x13\xb7\xe1\x7b\xb1\x44\x23\xb6\x9a\xd7\x00\x00\x8f\x1a\x8e\x62\x5a\xea\x83\x29\xba\xe9\x71\xaf\x57\x22\x1c\xbc\xd5\xb1\x94\x3e\x19\xaa\x5f\x56\x8e\xed\x19\xaa\x0a\x28\xdd\x4e\xee\x43\xa2\xae\x3b\x73\xaf\x2e\x24\x78\xf5\xb9\xb1\xb2\xb6\x95\x9a\x63\xfc\x6c\x9b\xc4\xa5\x96\x52\x08\xfd\x51\x18\xa1\xfd\x13\xc5\xe8\x2d\x3b\x66\x58\x4f\xeb\x90\xc1\x7e\x4a\xec\x63\x30\x9e\xee\x8b\xd2\x6c\x2d\x7c\x39\xdc\x6b\x5e\xf7\x6e\xe9\xb9\x35\x9e\xc1\x61\x08\xfc\x18\x20\x81\x16\xe8\x57\x67\xb8\xbe\x29\x26\xc3\xf1\x6a\x14\xb5\x61\xd9\x85\xcd\x9f\xcb\x6c\x85\x12\xa1\xb5\x99\xce\x22\x86\x87\x1e\x5b\x0f\x4c\x23\xe2\xdc\x9c\xcb\xca\x27\xf1\xc5\x17\x57\x87\x4a\xf8\xe0\xd3\x6d\xdd\x74\xa5\x64\x3d\x97\xac\x34\x3a\x6c\xc5\x94\xd1\x43\x38\xcf\x97\x16\xa3\x4f\x83\x82\x54\x64\x7e\xe3\x6d\xa4\xde\xc4\xea\x90\x05\x81\xe7\xeb\xa7\x57\x97\xcc\xaf\x8f\xbf\xb6\xf9\x3c\x86\x7b\x7f\x8a\x60\x86\x26\xca\x8c\x79\x0c\x60\xea\x48\xce\x91\x42\x93\x47\x6c\x93\x2f\x72\x4f\x96\x12\x3a\xb4\xc9\x86\x90\xbe\x3f\x0b\x25\xc6\x83\x1c\x58\xd6\xff\xf8\x30\x89\xc5\xf8\x2d\xf2\x87\x32\x8a\xc7\x5d\x23\x30\x5f\x25\x74\x22\x9b\xb0\xf0\xac\x91\xbd\x8f\xf0\x8b\x4c\x20\x7d\x6e\x98\x89\xea\x11\xfe\xa0\xcd\x59\x25\x34\x0b\x7f\x3d\xe5\x3d\x46\x71\x98\x51\xe1\x0b\x1b\xfb\xc3\x3a\x22\x89\x65\xce\x5b\xd9\xdd\xfa\xe8\x94\x84\x8b\xe5\x29\x4b\x39\xc6\x52\x5d\x8c\xfd\xd5\x8f\xc9\x93\xd1\x27\xc8\x34\xba\x81\xad\xae\x8c\x35\xc8\x42\xd1\x84\x8b\x88\xd1\x32\x28\x0f\x14\x22\x3b\x74\xc4\x48\xd9\x2d\x60\x18\xd3\x44\x5e\xac\xb0\x1a\xab\x59\x50\x44\x94\xdd\x85\xac\x60\x06\x48\xaf\xa2\xb7\xd0\xa4\xe5\x24\x7b\xb7\xae\x03\x37\xd3\xe4\x41\xc9\x92\xbf\x51\xf9\x90\x17\x77\xa7\x29\x67\x03\x0a\xc6\xf1\x3f\xca\x02\x31\xcf\x2c\xa8\xc4\xa7\xb8\xa9\x7c\x83\x2e\x04\xbd\x31\x81\xf2\x6e\xd9\xa2\xbc\x8d\xb0\x76\xbf\xc4\x79\xc7\xc7\x85\xa2\x77\x54\x2f\x39\x15\xaa\x79\x5f\xd0\xde\xe0\x6a\xe4\xa4\x00\xea\xac\x90\x7f\xfe\xcf\x4d\x3d\x7b\x01\xd8\x12\x58\x54\x06\x44\x92\xbc\xaf\x0e\xb5\x79\xec\x7b\x34\x09\x62\xac\x22\x05\x01\x81\x87\x5e\xab\xa3\xd7\xb3\x33\xcf\xfd\x59\x34\x99\xa6\x02\x2d\x67\xcb\x26\xe8\x40\x4d\x27\x35\x36\x1d\x97\xfd\x3f\xfd\x52\x91\x88\xa0\x98\x6f\x89\x52\x47\x3d\xaa\x9d\x05\x93\xdf\x3a\x44\x09\x0e\x7c\x59\xa6\x3a\xe5\x3c\xa2\x63\x4b\x62\xd2\x8e\x7e\x03\x35\xa8\x0f\x79\xd2\x71\xe8\xec\x1c\x31\xcf\xeb\x56\xad\xd4\xbc\x7c\x4d\x35\x0d\x1b\x3b\x09\xe8\x7a\x4e\x21\x5f\xf6\xf7\x9d\xf8\x3d\xda\xe7\xf9\x10\xf1\x28\x1c\xc3\x35\x1b\xe6\x08\x93\x7f\x6c\xd4\x87\xba\x48\xcb\x3c\xfb\xf2\x88\xd9\x1b\x4b\xda\x42\xc0\xb2\x72\x8b\xd1\x74\x2d\xed\x93\x14\x61\x2f\xb3\x37\x91\x34\xd6\x3e\xf3\xb1\xce\xf0\xfc\x96\x4e\x24\x37\x70\x8e\x54\x63\xd8\xf7\xfa\xdb\x02\x22\x49\x47\xd2\xb5\xcc\x78\x10\x22\x8a\x5d\xd1\x40\x38\xd3\x1e\x5d\xef\xbe\x23\xba\x0f\xda\x71\x3c\x78\xf4\x4b\x95\x99\xeb\xca\x4d\xfb\xb4\xaa\xab\xaa\x1c\xf8\x5d\xcc\x34\x0a\x54\xd2\x59\x53\x4c\xb1\x0d\x90\x4d\x6e\xbf\xb4\xbc\x4f\x83\x38\x95\xc1\xd1\xe6\xcd\xe5\xe2\x3e\x67\x2a\x36\x0d\xa2\x5b\x55\x7c\x35\x2c\xb1\x9a\xb0\x08\xf6\x52\xb0\x1c\xf4\x27\x98\x3e\xf7\x14\x59\xd1\xdf\xe1\x43\x3c\xc3\xd5\xff\xc5\x6d\x44\x67\xa9\x91\x5d\x59\xd2\x90\xbe\xfa\xc9\x88\x42\x84\xb0\x39\xd1\x54\x12\xc7\xb5\xdb\x15\xe1\x05\xb0\x8a\x7e\xa3\xb1\x95\x5c\x17\x37\x5d\x9d\xd0\x9b\xdf\xd6\xcc\x0b\x75\xff\x34\x9d\x1d\x33\x51\x01\x42\x22\x22\x0d\x5d\xa9\x56\x29\x3a\x6b\x62\xd2\xa1\x7d\x31\xb7\xbb\x2a\xa4\x6b\x9e\xb0\xed\x40\x57\xd9\xe5\x93\xdf\x71\x26\xad\x2f\xb6\xb1\x96\x36\x10\xbc\x47\x44\x72\x4c\xe5\xcc\x56\x97\x75\xa9\xd7\x1a\xd2\x79\xdb\x1a\x14\x65\x8c\xb4\xf6\xe6\x6b\x61\x88\xf1\xd4\xa2\x60\x48\xd3\xad\x60\xa5\xef\x96\x23\x02\x76\x7c\x72\xff\x36\x5b\x48\x07\x06\x80\x86\x2d\x3e\x83\x56\x55\x0d\x93\x44\x2b\xdd\x1c\x58\xe5\x02\x1a\x91\xcb\x09\x9a\xa8\x6f\xc8\x84\xfa\xbf\xc4\xed\x0c\x11\x5f\x84\x15\x29\xcf\x8d\xaa\xaa\xb4\x31\xa8\x0f\x17\x72\xae\x43\xa4\x02\x60\x8b\x41\xac\x8a\xce\xf5\x55\xda\xb3\x3a\x93\xdb\x35\x2a\x7d\x2c\x07\x2d\x6e\x82\x64\x31\xf6\x14\x48\xba\x4a\xa3\x44\xc1\x47\x61\x99\x97\x84\x37\x9e\x03\x83\xd9\x3b\x02\xe3\x63\xea\x21\xa9\x92\x2e\xb1\xd4\x9d\xc2\xad\xba\x2d\x77\x42\x03\x39\x0f\x86\xec\xd3\x98\x4d\x24\x30\x4d\xa4\xfe\x9c\xac\x22\x37\xe1\x50\xef\x88\x5c\xaf\xda\x95\x56\x63\xe9\xac\x09\xa2\x76\x09\xd6\x14\x00\x25\x30\x1a\x93\x94\xae\x11\xb6\x2a\xf7\x4b\x81\x5b\x3b\xa4\x42\x12\x70\x75\xb7\x06\xd9\x28\xba\xff\xe5\xd1\x77\x77\xf0\xde\x10\x76\xfd\xc4\x7a\x17\x34\x8a\x4d\xdb\x66\xa9\x45\x8e\x09\xa6\xbd\x0e\xec\x9e\x0c\x91\x66\xc5\xa1\xb4\x3f\xc5\x61\x41\x71\xa3\xe6\xe0\xcd\x53\x86\xc2\x41\xb8\x81\x0c\x18\x21\x77\x6a\x12\x5c\x9f\x67\xf7\xd7\x00\x89\x15\x7f\x77\xf4\x1a\x3c\x18\x9a\x1e\x52\xe1\x4a\x5c\x5d\x56\x30\x40\xa0\xa2\x1f\x3d\xa8\x14\xd1\xe7\x36\xa9\xcb\x3f\x8b\xc4\xf2\xbd\x9a\x34\xf0\x36\xec\xec\xe1\x44\xae\x3c\xb3\xf0\x96\x16\xec\xb2\xd2\xea\x0f\xbb\xd3\xd3\xef\xef\x58\xe5\x7b\x5c\xa9\xea\xac\xdd\x44\x95\x3e\x1d\xd8\xaf\x2a\xe0\xeb\xb4\x3a\x44\x06\xce\x7d\x0d\xb8\x19\x89\xf1\x95\x22\x03\x76\xcd\x43\x94\x29\x72\x15\xc8\xd0\xa8\x3c\x6b\xea\x4f\xd9\x25\x7b\x3e\x1b\xe9\x47\xb0\x45\x4e\x58\x6c\xdb\xfd\x73\x00\xbc\xb2\xe4\x82\xf6\x7f\x3d\x13\x58\xcc\x16\x69\x97\x50\xd1\xe3\x86\x75\x3e\x98\x3e\x09\xad\xd6\x2a\x1d\x13\x01\x80\xb2\xd0\x64\x54\xdc\x0e\x05\xd4\xdb\x95\xde\x16\x80\x1f\xb8\xa5\x6d\xc6\x80\x5e\x9c\x79\x01\x20\x6f\x4a\x07\x74\x8b\xc2\xb9\x2c\xfd\x1a\x4c\xdd\x7e\xfc\x7a\xff\xde\x41\x18\xd9\x7d\xff\x4f\x08\xc8\x8d\x10\x2e\x9e\x52\x2b\x8e\xbb\xbd\xaf\xda\x81\x4b\xdd\x4d\x4f\x24\x60\x1e\x0f\x4a\x84\xc9\x99\x48\xd1\x4b\x4e\x3f\x5d\x44\x88\x12\x6f\xbe\xdb\xce\xe4\x64\x82\x9f\xcf\x09\xea\x7f\x9b\x52\x29\x78\x94\x34\x72\x11\xd3\xf8\x21\x52\xde\x49\x9f\xe8\xdd\xe4\x30\x6d\xb6\xf8\x61\x36\xc6\x13\xb6\x03\xa6\xee\x2f\x58\x28\x93\x63\xe5\x4c\x57\x59\x5d\xd2\x7a\xef\xc0\xa6\x8d\x4f\x22\xeb\x93\x0f\xe7\xa3\x5e\xee\xd1\x56\x99\x86\x2e\xa1\xea\x32\x8f\x23\x9c\x76\x1a\xe2\x15\x5f\x5b\x13\x77\x22\x5e\xbd\xf6\xbf\xa8\xd3\x7c\xae\xaa\xf8\xe0\xe8\xb2\x2b\x32\x84\xad\x0a\x85\x71\x9b\x71\x9e\xb4\xc2\x31\xe3\x55\x06\x25\xb4\x9b\x78\x50\x16\xeb\x41\xc6\x56\xbb\x4a\xf7\xa8\x37\x70\x5f\xd6\x5a\xfe\x9d\xb2\x38\x1c\xfd\xc0\x5f\xe2\x51\x8f\x7c\x21\x0c\xef\x4c\x94\x02\xc4\x21\x41\x75\xa1\xef\x40\x79\xb8\x28\x9f\x57\xa4\x96\x39\x24\x5e\xc3\x95\xda\x22\x67\x1a\x30\x48\x10\xfc\x42\x12\x87\x69\x91\xa6\xd2\xf3\xfd\x31\x83\x27\xb2\x8d\xf5\xb4\x8e\xa9\x1f\x75\x1b\x8b\xfd\x7b\x6c\x88\x6c\xb8\x1e\x34\x46\x5f\x36\xef\x65\xbe\x77\x1f\x72\xf3\x45\xf6\xb1\xea\x32\x5b\x46\x7e\xa5\xb7\x03\xd4\xe1\x4e\x7b\x16\x97\xa5\xf4\xd2\xff\x12\x70\x39\x1a\x6e\x66\x34\x1c\x73\xab\xd4\xc4\x0b\xd7\xfe\x14\x5e\x13\x68\xe4\xfa\xf1\x3b\xbc\x02\xc4\xe2\x89\xce\x8b\x98\x8a\x4b\x89\xf2\xb8\x57\xf9\xc9\x33\xf1\xcc\xdc\x29\xc4\x58\xc9\xc1\xc9\xd5\x48\x10\xef\x70\x15\x76\xc8\xa2\x7d\x71\xcf\x11\x78\x7e\x98\x6d\xcd\x0a\xa3\xd2\xc2\x0e\x01\x03\xac\x2c\xa8\xfe\xb6\x02\x20\xb4\xb9\x1b\x6b\x45\x1f\x71\xf2\xb7\x4c\xfc\xcc\x11\x0c\x6a\x7c\xb3\x34\x78\x4f\xb6\x39\x41\x54\xe6\x4e\xb8\x7a\x53\x96\xd0\xe7\xb4\x8c\x01\xfc\x20\x42\x2d\x04\x6e\x5e\x11\xe2\x96\x8a\x6e\x41\x18\x09', 2) |
from django.shortcuts import render
from django.http import HttpResponse
from api.models import *
import requests
import json
import dateutil.parser
#Method:GET - Method to query by book name.
def external_books(request):
if request.method == "GET":
pass
else:
return HttpResponse("Error: INVALID METHOD", content_type='text/json')
contents = {}
try:
response = requests.get("https://www.anapioficeandfire.com/api/books?name=%s"%(request.GET.get('name')))
except Exception as e:
contents["status"] = "failure"
contents["Error"] = "%s"%e
return HttpResponse(json.dumps(contents, indent = 4), content_type='text/json')
#Formating the response.
if response.status_code == 200:
contents["status_code"] = response.status_code
contents["status"] = "success"
contents["data"] = []
for item in response.json():
data = {}
data["name"] = item["name"]
data["isbn"] = item["isbn"]
data["authors"] = list(item["authors"])
data["number_of_pages"] = item["numberOfPages"]
data["publisher"] = item["publisher"]
data["country"] = item["country"]
data["release_date"] = item["released"]
contents["data"].append(data)
return HttpResponse(json.dumps(contents, indent = 4), content_type='text/json')
#End of external_books API.
#Helper function to check for a key in the POST request.
def _isKeyPresent(contents, key):
if (key not in contents) or (contents["%s"%key] == ""):
return False
else:
return True
#End of _isKeyPresent function.
#Helper function return the response.
def _return_response(book):
details = {}
details["id"] = book.id
details["name"] = book.name
details["isbn"] = book.isbn
details["number_of_pages"] = book.numberOfPages
#TODO: Publisher
details["publisher"] = book.publisher
details["country"] = book.country
#TODO: Dateformat
details["release_date"] = "%s"%book.released
authors = Author.objects.filter(book_id = book.id)
details["authors"] = []
for author in authors:
author_name = author.first_name+" "+author.last_name
details["authors"].append(author_name)
return details
#End of _return_response function.
#Method:GET - Method to display all the records.
#Method:POST - Method to create an object.
def create_read_method(request):
if request.method == 'GET':
#The Books API should be searchable by name (string), country (string), publisher (string) and release date (year, integer).
if request.GET.get('name'):
books = Book.objects.filter(name = request.GET.get('name'))
elif request.GET.get('country'):
books = Book.objects.filter(country = request.GET.get('country'))
elif request.GET.get('publisher'):
books = Book.objects.filter(publisher = request.GET.get('publisher'))
elif request.GET.get('released'):
books = Book.objects.filter(released = request.GET.get('released'))
else:
#Dispay all the list of books pesent in the local DB.
books = Book.objects.all()
contents = {}
contents["data"] = []
for book in books:
details = _return_response(book)
contents["data"].append(details)
contents["status_code"] = 200
contents["stauts"] = "success"
return HttpResponse(json.dumps(contents, indent = 4), content_type='text/json')
else: #Create a new book object.
try:
contents = json.loads(request.body.decode("utf-8"))
except Exception:
return HttpResponse("Error parsing the response.", content_type='text/json')
#Check for the missing key - name.
if _isKeyPresent(contents, "name") == False:
return HttpResponse("Error: Missing Book name tag-> name!", content_type='application/json')
#Check for the missing key - isbn.
elif _isKeyPresent(contents, "isbn") == False:
return HttpResponse("Error: Missing Book ISBN tag -> isbn!", content_type='application/json')
elif Book.objects.filter(isbn = contents["isbn"], name = contents["name"]).count()>0:
return HttpResponse("The book %s is already exist!"%contents["name"], content_type='application/json')
else:
book = Book()
book.name = contents["name"].strip()
book.isbn = contents["isbn"]
if _isKeyPresent(contents, "number_of_pages") == False:
return HttpResponse("Error: Missing number of book pages tag -> number_of_pages!", content_type='application/json')
else:
book.numberOfPages = contents["number_of_pages"]
if _isKeyPresent(contents, "publisher") == False:
return HttpResponse("Error: Missing publisher tag -> publisher!", content_type='application/json')
else:
book.publisher = contents["publisher"].strip()
if _isKeyPresent(contents, "country") == False:
return HttpResponse("Error: Missing country tag -> country!", content_type='application/json')
else:
book.country = contents["country"].strip()
if _isKeyPresent(contents, "release_date") == False:
return HttpResponse("Error: Missing book release date tag -> release_date!", content_type='application/json')
else:
book.released = dateutil.parser.parse(contents["release_date"]).date()
book.save()
if _isKeyPresent(contents, "authors") == False:
return HttpResponse("Error: Missing authors tag -> authors!", content_type='application/json')
else:
for val in contents["authors"]:
author = Author()
author.book = book
first_name, last_name = val.split(" ", 1)
author.first_name = first_name
author.last_name = last_name
author.save()
contents["release_date"] = "%s"%(dateutil.parser.parse(contents["release_date"]).date())
contents["status_code"] = 201
contents["status"] = "success"
return HttpResponse(json.dumps(contents, indent = 4), content_type='application/json')
#End of create_read_method.
#Method:POST - Method to update an object.
def update(request, book_id):
if request.method == "GET": #Create a new book.
if Book.objects.filter(id = book_id).count() == 0:
return HttpResponse("No book with the ID %s exist!"%book_id, content_type='application/json')
else:
book = Book.objects.get(id = book_id)
contents = {}
contents["data"] = []
details = _return_response(book)
contents["data"].append(details)
contents["status_code"] = 200
contents["stauts"] = "success"
return HttpResponse(json.dumps(contents, indent = 4), content_type='text/json')
else:
try:
contents = json.loads(request.body.decode("utf-8"))
except Exception:
return HttpResponse("Error parsing the response.", content_type='text/json')
if Book.objects.filter(id=book_id).count()==1:
book = Book.objects.get(id=book_id)
#Check for new updates.
if book.name != contents["name"].strip():
book.name = contents["name"].strip()
if book.isbn != contents["isbn"]:
book.isbn = contents["isbn"]
if book.numberOfPages != contents["number_of_pages"]:
book.numberOfPages = contents["number_of_pages"]
if book.publisher != contents["publisher"]:
book.publisher = contents["publisher"]
if book.country != contents["country"]:
book.country = contents["country"]
if book.released != contents["release_date"]:
book.released = contents["release_date"]
book.save()
for val in contents["authors"]: #Update author details.
first_name, last_name = val.split(" ", 1)
if Author.objects.filter(book = book, first_name = first_name, last_name = last_name).count() == 1:
continue
else:
author = Author()
author.book = book
author.first_name = first_name
author.last_name = last_name
author.save()
#Delete an author if not present in the new updates.
authors = Author.objects.filter(book = book)
for author in authors:
if (author.first_name+" "+author.last_name) not in contents["authors"]:
author.delete()
contents["status_code"] = 201
contents["status"] = "success"
return HttpResponse(json.dumps(contents, indent = 4), content_type='application/json')
else:
return HttpResponse("Error: INVALID ID!", content_type='application/json')
#End of update method.
#API to update an object.
def delete(request, book_id):
if request.method == "GET": #Create a new book.
return HttpResponse("INVALID METHOD", content_type='application/json')
else:
try:
contents = json.loads(request.body.decode("utf-8"))
except Exception:
return HttpResponse("Error parsing the response.", content_type='text/json')
else:
if Book.objects.filter(id=book_id).count()==1:
book = Book.objects.get(id=book_id)
book_name = book.name
book.delete()
contents = {}
contents["status_code"] = 201
contents["status"] = "success"
contents["message"] = "The book %s was deleted successfully"%book_name
contents["data"] = []
return HttpResponse(json.dumps(contents, indent = 4), content_type='application/json')
else:
return HttpResponse("No book with the ID %s exist!"%book_id, content_type='application/json')
#End of delete method.
|
import sys
a=[0]*1002
a[0]=1
a[1]=1
a[2]=1
for i in range(3,1002):
a[i]=(3*(2*i-3)*a[i-1]-(i-3)*a[i-2])//i
b=[0]*1002
b[0]=1
b[1]=1
b[2]=2
for i in range(3,1002):
b[i]=(2*i*(2*i-1)*(b[i-1]))//((i+1)*i)
for line in sys.stdin:
n=int(line)
if(n<3):
print(0)
else:
print(a[n]-b[n-1]) |
# %%
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.model_selection import train_test_split
import pandas as pd
import numpy as np
import lightgbm as lgb
from mail import mail
# %%
user = pd.read_csv(
'data/train_preliminary/user.csv').sort_values(['user_id'], ascending=(True,))
Y_train_gender = user.gender
Y_train_age = user.age
corpus = []
f = open('word2vec/userid_creativeids.txt', 'r')
# train_examples = 100
# test_examples = 200
# train_test = 300
train_test = 1900000
train_examples = 900000
test_examples = 1000000
flag = 0
for row in f:
# row = [[int(e) for e in seq] for seq in row.strip().split(' ')]
row = row.strip()
corpus.append(row)
flag += 1
if flag == train_test:
break
# %%
Y_train_gender = Y_train_gender.iloc[:train_examples]-1
Y_train_age = Y_train_age.iloc[:train_examples]-1
# %%
min_df = 30
max_df = 0.001
vectorizer = TfidfVectorizer(
token_pattern=r"(?u)\b\w+\b",
min_df=min_df,
# max_df=max_df,
# max_features=128,
dtype=np.float32,
)
all_data = vectorizer.fit_transform(corpus)
print('(examples, features)', all_data.shape)
print('train tfidf done! min_df={}, max_df={} shape is {}'.format(
min_df, max_df, all_data.shape[1]))
mail('train tfidf done! min_df={}, max_df={} shape is {}'.format(
min_df, max_df, all_data.shape[1]))
# %%
train_val = all_data[:train_examples, :]
# %%
X_test = all_data[train_examples:(train_examples+test_examples), :]
# %%
test_user_id = pd.read_csv(
'data/test/click_log.csv').sort_values(['user_id'], ascending=(True)).user_id.unique()
# %%
test_user_id = test_user_id[:test_examples]
|
from abc import ABCMeta
import math
from django.test import override_settings
from django.urls import reverse
from spacer.config import MIN_TRAINIMAGES
from api_core.tests.utils import BaseAPITest
from images.model_utils import PointGen
from images.models import Source
from lib.tests.utils import create_sample_image
from vision_backend.tasks import collect_all_jobs, submit_classifier
@override_settings(MIN_NBR_ANNOTATED_IMAGES=1)
class DeployBaseTest(BaseAPITest, metaclass=ABCMeta):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.user = cls.create_user(
username='testuser', password='SamplePassword')
cls.source = cls.create_source(
cls.user,
visibility=Source.VisibilityTypes.PUBLIC,
point_generation_type=PointGen.Types.SIMPLE,
simple_number_of_points=2,
)
label_names = ['A', 'B']
labels = cls.create_labels(cls.user, label_names, 'GroupA')
labelset = cls.create_labelset(cls.user, cls.source, labels)
cls.labels_by_name = dict(
zip(label_names, labelset.get_globals_ordered_by_name()))
# Set custom label codes, so we can confirm we're returning the
# source's custom codes, not the default codes.
for label_name in label_names:
local_label = labelset.locallabel_set.get(
global_label__name=label_name)
# A_mycode, B_mycode, etc.
local_label.code = label_name + '_mycode'
local_label.save()
# Add enough annotated images to train a classifier.
#
# Must have at least 2 unique labels in training data in order to
# be accepted by spacer.
annotations = {1: 'A_mycode', 2: 'B_mycode'}
num_validation_images = math.ceil(MIN_TRAINIMAGES / 8)
for i in range(MIN_TRAINIMAGES):
img = cls.upload_image(
cls.user, cls.source, dict(filename=f'train{i}.png'))
cls.add_annotations(cls.user, img, annotations)
for i in range(num_validation_images):
# Unit tests use the image filename to designate what goes into
# the validation set.
img = cls.upload_image(
cls.user, cls.source, dict(filename=f'val{i}.png'))
cls.add_annotations(cls.user, img, annotations)
# Extract features.
collect_all_jobs()
# Train a classifier.
submit_classifier(cls.source.id)
collect_all_jobs()
cls.classifier = cls.source.get_latest_robot()
cls.deploy_url = reverse('api:deploy', args=[cls.classifier.pk])
# Get a token
response = cls.client.post(
reverse('api:token_auth'),
data='{"username": "testuser", "password": "SamplePassword"}',
content_type='application/vnd.api+json',
)
token = response.json()['token']
# Kwargs for test client post() and get().
cls.request_kwargs = dict(
# Authorization header.
HTTP_AUTHORIZATION='Token {token}'.format(token=token),
# Content type. Particularly needed for POST requests,
# but doesn't hurt for other requests either.
content_type='application/vnd.api+json',
)
# During tests, we use CELERY_ALWAYS_EAGER = True to run tasks synchronously,
# so that we don't have to wait for tasks to finish before checking their
# results. To test state before all tasks finish, we'll mock the task
# functions to disable or change their behavior.
#
# Note: We have to patch the run() method of the task rather than patching
# the task itself. Otherwise, the patched task may end up being
# patched / not patched in tests where it's not supposed to be.
# https://stackoverflow.com/a/29269211/
#
# Note: Yes, patching views.deploy.run (views, not tasks) is
# correct if we want to affect usages of deploy in the views module.
# https://docs.python.org/3/library/unittest.mock.html#where-to-patch
def noop_task(*args):
pass
def mocked_load_image(*args):
"""
Return a Pillow image. This can be used to mock spacer.storage.load_image()
to bypass image downloading from URL, for example.
"""
return create_sample_image()
|
'''Matcher class'''
from easy_tokenizer.tokenizer import Tokenizer
from . import LOGGER
from .token_trie import TokenTrie
from .match_patterns import PatternsGZ
from .match_patterns import PatternsCT
from .match_patterns import PatternsNT
from .matched_phrase import MatchedPhrase
from . import data_utils
class Matcher():
'''
Matcher finds all matched phrases from the input text. It contains
- an internal tokenizer applied on both patterns and input text
- a token trie structure created eithor from gazetteer or codetable
- a dictionary to map codeID to code_description and code_category
Params:
- normtable: normalized table file in json format
- gazetteer: gazetteer file
- codetable: taxonomy codetable format file
- blacklist: blacklist file
- with_context: also output the context if set to True
'''
# the magic number is related to the average length of the context in the
# training data
CONTEXT_LENGTH = 14
def __init__(
self,
normtable=None,
gazetteer=None,
codetable=None,
blacklist=None,
regexp=None,
with_context=False
):
self.regexp = regexp
self.tokenizer = Tokenizer(self.regexp)
self.blacklist = dict()
self.with_context = with_context
if normtable:
match_patterns = PatternsNT(self.tokenizer, normtable)
elif gazetteer:
match_patterns = PatternsGZ(self.tokenizer, gazetteer)
elif codetable:
match_patterns = PatternsCT(self.tokenizer, codetable)
else:
raise Exception('source file is required to build a \
Matcher object')
self.code_property_mapping = match_patterns.codeid_description
self.meta_info = match_patterns.meta_info
if blacklist:
self.blacklist = data_utils.from_file_to_list(blacklist)
self.trie_matcher = TokenTrie(
patterns=match_patterns.tokenized_pattern
)
def matching(self, text):
'''
find all matching phrases from the input text
params:
- text: string
output:
- all matching phrases as MatchedPhrase object
'''
tokens = self.tokenizer.tokenize_with_pos_info(text)
for token in tokens:
token.text = data_utils.normalize(token.text)
idx = 0
nr_tokens = len(tokens)
while idx < nr_tokens:
local_match = self.trie_matcher.longest_match_at_position(
self.trie_matcher.token_trie, tokens[idx:])
if local_match:
start_pos, end_pos = local_match.text_range()
left_context, right_context = self.prepare_context(tokens,
local_match,
idx,
text)
surface_form = local_match.surface_form
matched_text = text[start_pos:end_pos]
yield MatchedPhrase(
surface_form,
matched_text,
start_pos,
end_pos - 1, # prepare for the entity fromwork (in perl)
local_match.code_id,
self.code_id_property_lookup(local_match.code_id, 'desc'),
self.code_id_property_lookup(local_match.code_id, 'type'),
left_context,
right_context,
self.code_id_property_lookup(
local_match.code_id,
'skill_likelihoods',
dict()
).get(data_utils.normalize(surface_form), None)
)
idx += len(local_match.tokens)
else:
idx += 1
def prepare_context(self, tokens, local_match, idx, text):
l_context = ''
r_context = ''
if self.with_context:
nr_matched_tokens = len(local_match.tokens)
l_context_begin = max(0, idx - self.CONTEXT_LENGTH)
l_context_end = idx
r_context_begin = idx + nr_matched_tokens
r_context_end = min(
len(tokens),
r_context_begin + self.CONTEXT_LENGTH
)
if l_context_begin < l_context_end:
l_context = text[tokens[l_context_begin].start_pos:
tokens[l_context_end - 1].end_pos]
if r_context_begin < r_context_end:
r_context = text[tokens[r_context_begin].start_pos:
tokens[r_context_end - 1].end_pos]
return l_context, r_context
def code_id_property_lookup(self, code_id, property_name, default=None):
code_property = default
if code_id is not None:
if code_id in self.code_property_mapping:
code_property = self.code_property_mapping[code_id].get(
property_name, default)
else:
LOGGER.warning(
'WARNING: no property {} for codeid: {}'.
format(property_name, code_id)
)
return code_property
|
from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Projects, Profile, Rating, User
from .forms import NewProjectsForm, NewProfileForm,NewRatingForm
from .serializer import ProjectsSerializer, ProfileSerializer
from rest_framework import status
# Create your views here.
def welcome(request):
projects = Projects.objects.all().order_by("post_date")
profile = Profile.objects.all()
return render(request, 'index.html' ,{'projects':projects}, {'profile':profile})
# search function
def search_project(request):
if 'project' in request.GET and request.GET["project"]:
search_term = request.GET.get("project")
searched_projects = Projects.search_by_title(search_term)
message = f"{search_term}"
return render(request, 'search.html',{"message":message,"projects": searched_projects})
else:
message = "You haven't searched for any term"
return render(request, 'search.html',{"message":message})
# function to add site
@login_required(login_url='/accounts/login/')
def add_site(request):
current_user = request.user
if request.method == 'POST':
form = NewProjectsForm(request.POST, request.FILES)
if form.is_valid():
project = form.save(commit=False)
project.poster = current_user
project.save()
return redirect('welcome')
else:
form = NewProjectsForm()
return render(request, 'create_site.html', {"form": form})
# profile function
@login_required(login_url='/accounts/login/')
def profile(request, profile_id):
current_user = request.user
if request.method == 'POST':
form = NewProfileForm(request.POST, request.FILES)
if form.is_valid():
profile = form.save(commit=False)
profile.username = current_user
profile.save()
return redirect('welcome')
else:
form = NewProfileForm()
username=User.objects.all()
myProfile = Profile.objects.filter(username = current_user)
projects = Projects.objects.filter(poster = current_user)
return render(request, 'profile.html', {"form": form, "username": username,"myProfile": myProfile, "projects":projects})
# edit profile function
@login_required(login_url='/accounts/login/')
def edit_profile(request):
current_user=request.user
if request.method =='POST':
if Profile.objects.filter(username_id=current_user).exists():
form = NewProfileForm(request.POST,request.FILES,instance=Profile.objects.get(username_id = current_user))
else:
form=NewProfileForm(request.POST,request.FILES)
if form.is_valid():
profile=form.save(commit=False)
profile.username=current_user
profile.save()
return redirect('profile', current_user.id)
else:
if Profile.objects.filter(username_id = current_user).exists():
form=NewProfileForm(instance =Profile.objects.get(username_id=current_user))
else:
form=NewProfileForm()
return render(request,'editProfile.html',{"form":form})
# rating function
@login_required(login_url='/accounts/login/')
def grade_rating(request,id):
current_user=request.user
project=Projects.objects.get(id=id)
if request.method == 'POST':
form = NewRatingForm(request.POST, request.FILES)
if form.is_valid():
grade = form.save(commit=False)
grade.user = current_user
grade.project=project
grade.total=int(form.cleaned_data['design'])+int(form.cleaned_data['content'])+int(form.cleaned_data['usability'])
grade.avg= int(grade.total)/3
grade.save()
return redirect('welcome')
else:
form = NewRatingForm()
return render(request, 'rating.html', {"form": form, 'project':project})
# serialize projects model objects
class ProjectsList(APIView):
def get(self, request, format=None):
all_merch = Projects.objects.all()
serializers = ProjectsSerializer(all_merch, many=True)
return Response(serializers.data)
def post(self, request, format=None):
serializers = ProjectsSerializer(data=request.data)
if serializers.is_valid():
serializers.save()
return Response(serializers.data, status=status.HTTP_201_CREATED)
return Response(serializers.errors, status=status.HTTP_400_BAD_REQUEST)
# serialize profile model
class ProfileList(APIView):
def get(self, request, format=None):
all_merch = Profile.objects.all()
serializers = ProfileSerializer(all_merch, many=True)
return Response(serializers.data)
def post(self, request, format=None):
serializers = ProfileSerializer(data=request.data)
if serializers.is_valid():
serializers.save()
return Response(serializers.data, status=status.HTTP_201_CREATED)
return Response(serializers.errors, status=status.HTTP_400_BAD_REQUEST) |
#!/usr/bin/env python
import matplotlib as mpl
mpl.use('Agg')
import numpy as np
import healpy as hp
import matplotlib.pyplot as plt
from scipy.special import gammaln
import numpy.lib.recfunctions as recfunctions
def chooseBins(catalog = None, tag=None, binsize = None, upperLimit = None, lowerLimit = None):
if binsize is None:
binsize = 2*( np.percentile(catalog[tag], 75) - np.percentile( catalog[tag], 25 ) ) / (catalog.size ) **(1./3.)
if upperLimit is None:
upperLimit = np.max(catalog[tag])
if lowerLimit is None:
lowerLimit = np.min(catalog[tag])
nbins = int( np.ceil( (upperLimit - lowerLimit) / binsize) )
nEdge = nbins+1
bins = lowerLimit + binsize * np.arange(nEdge)
bins[0] = bins[0] - 0.001*binsize
bins[-1] = bins[-1] + 0.001*binsize
return bins
def assignToBins(catalog= None, tag = None, bins = None):
nbins = np.product( [len(x)-1 for x in bins] )
bin_index = np.digitize(catalog[tag[0]], bins[0]) - 1
keep = (bin_index >= 0) & ( bin_index < (len(bins[0]) -1) )
for i in xrange(len(tag) -1 ):
this_bin_index = np.digitize( catalog[tag[i+1]], bins[i+1]) -1
keep = keep & ( (this_bin_index >= 0) & (this_bin_index < (len(bins[i+1]) - 1)) )
bin_index = bin_index + (len(bins[i])-1) * this_bin_index
bin_index = bin_index[keep]
return bin_index
def histogramND(catalog = None, tag = None, bins = None, flat = False):
bin_index = assign_to_bins(catalog=catalog, tag=tag, bins=bins)
histogram = np.bincount(bin_index)
if flat is False:
nbins_truth = [len(x)-1 for x in bins]
histogram = np.reshape(histogram, nbins_truth)
return histogram
def makeLikelihoodMatrix( sim=None, truth=None, truthMatched = None, Lcut = 0., ncut = 0.,
obs_bins = None, truth_bins = None, simTag = None, truthTag = None):
if ( ( len(simTag) == 1) or ( type(simTag) is type('')) ) and ( (len(truthTag) == 1 ) or (type(truthTag) is type('') ) ) :
obs_bin_index = np.digitize(sim[simTag], obs_bins) - 1
truth_bin_index = np.digitize(truthMatched[truthTag], truth_bins) - 1
# Limit loop to objects in the given bin ranges.
nbins_truth = truth_bins.size -1
nbins_obs = obs_bins.size - 1
good = ((truth_bin_index > 0) & (truth_bin_index < nbins_truth) &
(obs_bin_index > 0) & (obs_bin_index < nbins_obs) )
obs_bin_index = obs_bin_index[good]
truth_bin_index = truth_bin_index[good]
N_truth, _ = np.histogram( truth[truthTag], bins=truth_bins )
L = np.zeros( (nbins_obs, nbins_truth) )
for i in xrange(obs_bin_index.size):
if N_truth[truth_bin_index[i]] > ncut:
L[obs_bin_index[i], truth_bin_index[i]] = ( L[obs_bin_index[i], truth_bin_index[i]] +
1./N_truth[truth_bin_index[i]] )
L[L < Lcut] = 0.
else:
# In this case, the user has asked us to make a likelihood
# matrix that maps an n>1 -dimensional space onto another
# n>1 -dimensional space.
nbins_truth = np.product( [len(x)-1 for x in truth_bins] )
nbins_obs = np.product( [len(x)-1 for x in obs_bins] )
# --------------------------------------------------
#Assume that truth_bins and obs_bins are indexable.
truth_bin_index = np.digitize(truthMatched[truthTag[0]], truth_bins[0]) - 1
obs_bin_index = np.digitize(sim[simTag[0]], obs_bins[0]) - 1
all_bin_index = np.digitize(truth[truthTag[0]], truth_bins[0]) -1
good = ((truth_bin_index >= 0) & (truth_bin_index < (len(truth_bins[0]) - 1)) &
(obs_bin_index >= 0) & (obs_bin_index < (len(obs_bins[0]) -1)) )
keep = (all_bin_index >= 0) & ( all_bin_index < (len(truth_bins[0]) -1) )
# --------------------------------------------------
# Fancy multi-dimensional indexing.
for i in xrange(len(truthTag) -1 ):
this_truth_bin_index = np.digitize( truthMatched[truthTag[i+1]], truth_bins[i+1]) -1
this_obs_bin_index = np.digitize( sim[simTag[i+1]], obs_bins[i+1]) -1
this_all_bin_index = np.digitize( truth[truthTag[i+1]], truth_bins[i+1]) - 1
good = good & ( (this_truth_bin_index >= 0) & (this_truth_bin_index < (len(truth_bins[i+1]) - 1)) &
(this_obs_bin_index >= 0) & (this_obs_bin_index < (len(obs_bins[i+1]) -1) ) )
keep = keep & ( (this_all_bin_index >= 0) & ( this_all_bin_index < (len(truth_bins[0]) -1) ) )
truth_bin_index = truth_bin_index + (len(truth_bins[i])-1) * this_truth_bin_index
obs_bin_index = obs_bin_index + (len(obs_bins[i])-1) * this_obs_bin_index
all_bin_index = all_bin_index + (len(truth_bins[i])-1) * this_all_bin_index
# --------------------------------------------------
truth_bin_index = truth_bin_index[good]
obs_bin_index = obs_bin_index[good]
all_bin_index = all_bin_index[keep]
N_truth = np.bincount(all_bin_index)
L = np.zeros( (nbins_obs, nbins_truth) )
for i in xrange(obs_bin_index.size):
try:
if N_truth[truth_bin_index[i]] > ncut:
L[obs_bin_index[i], truth_bin_index[i]] = ( L[obs_bin_index[i], truth_bin_index[i]] +
1./N_truth[truth_bin_index[i]] )
except:
pass
L[L < Lcut] = 0.
return L
def getAllLikelihoods( truth=None, sim=None, truthMatched = None, healConfig=None , doplot = False, getBins = False, ncut = 0.,
ratag= 'ra', dectag = 'dec', obs_bins = None, truth_bins = None, obsTag = 'mag_auto', truthTag = 'mag', Lcut= 0.):
# Check the dimensionality of the binning schemes.
if ( (len(truthTag) > 1) and type(truthTag) is not type('')) or ( ( len(obsTag) > 1 ) and (type(obsTag) is not type('') )) :
if obs_bins is None:
obs_bins = [chooseBins(catalog=sim, tag = tag, binsize=0.1,upperLimit=24.5,lowerLimit=15.) for tag in obsTag]
if truth_bins is None:
truth_bins = [ chooseBins(catalog = truthMatched, tag = truthTag, binsize = 0.1,upperLimit=26.,lowerLimit=15) for tag in obsTag]
nbins_truth = np.product( [len(x)-1 for x in truth_bins] )
nbins_obs = np.product( [len(x)-1 for x in obs_bins] )
multiDim = True
else:
if obs_bins is None:
obs_bins = chooseBins(catalog=sim, tag = obsTag, binsize=0.1,upperLimit=24.5,lowerLimit=15.)
if truth_bins is None:
truth_bins = chooseBins(catalog = truthMatched, tag = truthTag, binsize = 0.1,upperLimit=26.,lowerLimit=15)
nbins_truth = len(truth_bins) -1
nbins_obs = len(obs_bins) -1
multiDim = False
try:
useInds = np.unique(sim['HEALIndex'])
except:
import cfunc
truth = cfunc.HealPixifyCatalogs(catalog=truth, healConfig=healConfig)
sim = cfunc.HealPixifyCatalogs(catalog=sim, healConfig=healConfig)
truthMatched = cfunc.HealPixifyCatalogs(catalog=truthMatched, healConfig = healConfig)
truthMatched['HEALIndex'] = sim['HEALIndex']
useInds = np.unique(sim['HEALIndex'])
Lensemble = np.empty( (nbins_obs , nbins_truth, useInds.size) )
if doplot is True:
if not multiDim:
truth_bin_centers = (truth_bins[0:-1] + truth_bins[1:])/2.
obs_bin_centers = (obs_bins[0:-1] + obs_bins[1:])/2.
from matplotlib.backends.backend_pdf import PdfPages
from matplotlib.colors import LogNorm
pp = PdfPages('likelihoods.pdf')
fig,ax = plt.subplots(figsize=(6.,6.))
# Make a plot of the likelihood of the whole region.
masterLikelihood = makeLikelihoodMatrix( sim=sim, truth=truth, truthMatched = truthMatched, Lcut = Lcut,
obs_bins = obs_bins, truth_bins = truth_bins,
simTag = obsTag, truthTag = truthTag, ncut = ncut)
if not multiDim:
im = ax.imshow(np.arcsinh(masterLikelihood/1e-3), origin='lower',cmap=plt.cm.Greys,
extent = [truth_bin_centers[0],truth_bin_centers[-1],obs_bin_centers[0],obs_bin_centers[-1]])
else:
im = ax.imshow(np.arcsinh(masterLikelihood/1e-3), origin='lower',cmap=plt.cm.Greys)
ax.set_xlabel('truth ')
ax.set_ylabel('measured ')
ax.set_title('full area likelihood')
fig.colorbar(im,ax=ax)
pp.savefig(fig)
else:
masterLikelihood = makeLikelihoodMatrix( sim=sim, truth=truth, truthMatched = truthMatched, Lcut = 0.,
obs_bins = obs_bins, truth_bins = truth_bins,
simTag = obsTag, truthTag = truthTag, ncut = ncut)
for hpIndex,i in zip(useInds,xrange(useInds.size)):
print "Processing likelihood "+str(i)+" of "+str(useInds.size-1)
thisSim = sim[sim['HEALIndex'] == hpIndex]
thisTruth = truth[truth['HEALIndex'] == hpIndex]
thisTruthMatched = truthMatched[sim['HEALIndex'] == hpIndex]
if thisTruth.size > 100:
thisLikelihood = makeLikelihoodMatrix( sim=thisSim, truth=thisTruth, truthMatched = thisTruthMatched,Lcut = 0.,
obs_bins = obs_bins, truth_bins = truth_bins, simTag = obsTag, truthTag = truthTag)
Lensemble[:,:,i] = thisLikelihood
if doplot is True:
fig,ax = plt.subplots(figsize = (6.,6.))
if not multiDim:
im = ax.imshow(np.arcsinh(thisLikelihood/1e-3), origin='lower',cmap=plt.cm.Greys,
extent = [truth_bin_centers[0],truth_bin_centers[-1],obs_bin_centers[0],obs_bin_centers[-1]])
else:
im = ax.imshow(np.arcsinh(thisLikelihood/1e-3), origin='lower',cmap=plt.cm.Greys)
ax.set_xlabel('truth mag.')
ax.set_ylabel('measured mag.')
ax.set_title('nside= '+str(healConfig['map_nside'])+', HEALPixel= '+str(hpIndex) )
fig.colorbar(im,ax=ax)
pp.savefig(fig)
if doplot is True:
pp.close()
if getBins is False:
return Lensemble, useInds, masterLikelihood, truth_bin_centers, obs_bin_centers
if getBins is True:
return Lensemble, useInds, masterLikelihood, truth_bins, obs_bins
def likelihoodPCA(likelihood= None, doplot = False, band = None,
extent = None):
# This does a simple PCA on the array of likelihood matrices to find
# a compact basis with which to represent the likelihood.
print "computing likelihood pca..."
origShape = np.shape(likelihood)
likelihood_1d = np.reshape(likelihood, (origShape[0]*origShape[1], origShape[2]))
L1d = likelihood_1d.T.copy()
U,s,Vt = np.linalg.svd(L1d,full_matrices=False)
V = Vt.T
ind = np.argsort(s)[::-1]
ind = np.argsort(s)[::-1]
U = U[:, ind]
s = s[ind]
V = V[:, ind]
likelihood_pcomp = V.reshape(origShape)
if doplot is True:
from matplotlib.backends.backend_pdf import PdfPages
from matplotlib.colors import LogNorm, Normalize
if band is None:
print "Must supply band (g,r,i,z,Y) in order to save PCA plots."
stop
pp = PdfPages('likelihood_pca_components-'+band+'.pdf')
for i,thing in zip(xrange(s.size),s):
print "plotting pca component "+str(i)+" of "+str(s.size-1)
fig,ax = plt.subplots(nrows=1,ncols=1,figsize = (6.,6.))
im = ax.imshow( np.arcsinh(likelihood_pcomp[:,:,i]/1e-3),origin='lower',cmap=plt.cm.Greys, extent = extent)
ax.set_xlabel(band+' mag (true)')
ax.set_ylabel(band+' mag (meas)')
fig.colorbar(im,ax=ax)
pp.savefig(fig)
fig,ax = plt.subplots(1,1,figsize = (6.,6.) )
ax.plot(np.abs(s))
ax.set_yscale('log')
ax.set_xscale('log')
ax.set_ylim([np.min(s[s > 1e-6]), 2*np.max(s)])
ax.set_xlabel('rank')
ax.set_ylabel('eigenvalue')
pp.savefig(fig)
pp.close()
return likelihood_pcomp, s
def doLikelihoodPCAfit(pcaComp = None, likelihood =None, n_component = 5, Lcut = 0., Ntot = 1e5):
# Perform least-squares: Find the best combination of pcaComps[:,:,0:n_component] that fits likelihood
origShape = likelihood.shape
L1d = likelihood.reshape(likelihood.size)
pca1d = pcaComp.reshape( ( likelihood.size, pcaComp.shape[-1]) )
pcafit = pca1d[:,0:(n_component)]
# Full least-squares, taking the covariances of the likelihood into account.
# covar(L1d) = Ntot * np.outer( L1d, L1d)
# in the solution, the scaling Ntot falls out. However, we do need it for getting the errors later.
#L1dCovar = Ntot * np.outer(L1d, L1d)
#aa= np.linalg.pinv( np.dot( pcafit.T, np.dot(L1dCovar, pcafit)) )
#bb = np.dot(pcafit.T, L1dCovar)
#coeff = np.dot( np.dot(aa,bb), L1d)
#coeffCovar = aa
coeff, resid, _, _ = np.linalg.lstsq(pcafit, L1d)
bestFit = np.dot(pcafit,coeff)
bestFit2d = bestFit.reshape(likelihood.shape)
bestFit2d[bestFit2d < Lcut] = 0.
return bestFit2d, coeff#, coeffCovar
def mcmcLogL(N_truth, N_data, likelihood, lninf=-1000):
if np.sum(N_truth < 0.) > 0:
return -np.inf
pObs = np.dot(likelihood, N_truth) / np.sum(N_truth)
pMiss = 1. - np.sum(pObs)
Nmiss = np.sum(N_truth) - np.sum( np.dot( likelihood, N_truth) )
Nobs = np.sum( N_data )
if pMiss == 0.:
logPmiss = -np.inf
else:
logPmiss = np.log(pMiss)
lpObs = np.zeros(pObs.size)
valid = ( pObs > 0. )
lpObs[valid] = np.log(pObs[valid])
lpObs[~valid] = lninf
t4 = np.dot(np.transpose(N_data), lpObs)
t5 = Nmiss * logPmiss
t1 = gammaln(1 + Nmiss + Nobs)
t2 = gammaln(1 + Nmiss)
t3 = np.sum(gammaln(1 + likelihood))
logL = t1 - t2 - t3 + t4 + t5
return logL
def initializeMCMC(N_data, likelihood, multiplier = 1.):
print "Initializing MCMC..."
A = likelihood.copy()
Ainv = np.linalg.pinv(A,rcond = 0.001)
N_initial = np.abs(np.dot(Ainv, N_data))
covar_truth = np.diag(N_initial)
Areg = np.dot(Ainv, A)
covar_recon = np.dot( np.dot(Areg, covar_truth), Areg.T)
leakage = np.abs(np.dot( Areg, N_initial) - N_initial)
errors = np.sqrt( np.diag(covar_recon) ) + leakage
nParams = likelihood.shape[1]
nWalkers = np.min( [100*nParams, 2000.] )
N_initial = N_initial*0. + np.mean(N_data)
start= np.sqrt( ( N_initial + (multiplier*errors*N_initial) * np.random.randn( nWalkers, nParams ) )**2 )
return start, nWalkers
def getBinCenters(bins = None):
if ( type(bins) is not list) and (type(bins) is not tuple):
bin_centers = (bins[0:-1] + bins[1:])/2.
else:
bin_centers = [( thisBins[0:-1] + thisBins[1:] ) / 2. for thisBins in bins]
return bin_centers
def doInference(catalog = None, likelihood = None, obs_bins=None, truth_bins = None, tag = 'mag_auto',
invType = 'tikhonov', lambda_reg = 1e-6, prior = None, priorNumber = None):
# Some conditions to help us deal with multi-dimensional reconstructions.
if type(tag) is not type(''):
pts = [catalog[thisTag] for thisTag in tag]
n_tags = len(tag)
N_real_obs, _ = np.histogramdd( pts, bins = obs_bins )
else:
pts = catalog[tag]
n_tags = 1
N_real_obs, _ = np.histogramdd( pts, bins = obs_bins )
N_real_obs = N_real_obs*1.0
if n_tags > 1:
shape_orig = ([len(bins)-1 for bins in truth_bins])
N_real_obs = np.ravel( N_real_obs, order='F' )
A = likelihood.copy()
if invType is 'basic':
if prior is None:
if n_tags == 1:
nbins_truth = truth_bins.size-1
prior = np.zeros(truth_bins.size-1)
else:
nbins_truth = np.product( [len(x)-1 for x in truth_bins] )
prior = np.zeros( nbins_truth )
else:
prior = np.ravel(prior.copy(), order = 'F' )
nbins_truth = len(prior)
Ainv = np.linalg.pinv( A,rcond = lambda_reg )
N_real_truth = np.dot( Ainv, N_real_obs - np.dot(A, prior) ) + prior
covar_truth = np.diag( N_real_truth )
Areg = np.dot(Ainv, A)
covar_recon = np.dot( np.dot(Areg, covar_truth), Areg.T)
leakage = np.abs(np.dot( Areg, N_real_truth) - N_real_truth)
errors = np.sqrt( np.diag(covar_recon) ) + leakage
if n_tags > 1:
N_real_truth = np.ravel(N_real_truth, order='F')
errors = np.ravel(errors, order='F')
if invType is 'tikhonov':
if prior is None:
if n_tags == 1:
nbins_truth = truth_bins.size-1
prior = np.zeros(truth_bins.size-1)
else:
nbins_truth = np.product( [len(x)-1 for x in truth_bins] )
prior = np.zeros( nbins_truth )
else:
prior = np.ravel(prior.copy(), order = 'F' )
nbins_truth = len(prior)
Ainv = np.dot( np.linalg.pinv(np.dot(A.T, A) + lambda_reg * np.identity( nbins_truth ) ), A.T)
N_real_truth = np.dot(Ainv, N_real_obs - np.dot(A, prior)) + prior
covar_truth = np.diag(N_real_truth)
Areg = np.dot(Ainv, A)
covar_recon = np.dot( np.dot(Areg, covar_truth), Areg.T)
leakage = np.abs(np.dot( Areg, N_real_truth) - N_real_truth)
aa = np.dot(A.T, A)
aainv = np.linalg.pinv(aa)
g = np.trace( lambda_reg * aainv)
reg_err = lambda_reg / (1 + g) * np.dot( np.dot( np.dot( aainv, aainv), A.T) , N_real_obs)
errors = np.sqrt( np.diag(covar_recon) ) + leakage + np.abs(reg_err)
if n_tags > 1:
N_real_truth = np.reshape(N_real_truth, shape_orig, order='F')
errors = np.reshape(errors, shape_orig, order='F')
if invType is 'mcmc':
import emcee
start, nWalkers = initializeMCMC(N_real_obs, A)
nParams = likelihood.shape[1]
nSteps = 1000
sampler = emcee.EnsembleSampler(nWalkers, nParams, mcmcLogL, args = [N_real_obs, A], threads = 8)
print "burninating mcmc"
pos, prob, state = sampler.run_mcmc(start, nSteps)
mean_accept = np.mean(sampler.acceptance_fraction)
sampler.reset()
delta_mean_accept = 1.
print "Acceptance fraction: ",mean_accept
print "running mcmc"
while np.abs(delta_mean_accept) > 0.001:
pos, prob, state = sampler.run_mcmc( pos, nSteps, rstate0 = state )
delta_mean_accept = np.mean(sampler.acceptance_fraction) - mean_accept
mean_accept = np.mean(sampler.acceptance_fraction)
print "Acceptance fraction: ",mean_accept
#print "autocorr_time", sampler.acor
N_real_truth = np.mean( sampler.flatchain, axis=0 )
errors = np.std( sampler.flatchain, axis=0 )
sampler.reset()
if n_tags > 1:
N_real_truth = np.reshape( N_real_truth, shape_orig, order='F')
errors = np.reshape( errors, shape_orig, order='F')
truth_bins_centers = getBinCenters( bins = truth_bins)
return N_real_truth, errors, truth_bins_centers
|
"""
Created on April 4, 2020
Tensorflow 2.1.0 implementation of APR.
@author Anonymized
"""
import numpy as np
from elliot.recommender.base_recommender_model import BaseRecommenderModel
from elliot.recommender.recommender_utils_mixin import RecMixin
from elliot.recommender.test_item_strategy import test_item_only_filter
from elliot.utils.write import store_recommendation
from elliot.recommender.base_recommender_model import init_charger
class Random(RecMixin, BaseRecommenderModel):
@init_charger
def __init__(self, data, config, params, *args, **kwargs):
"""
Create a Random recommender.
:param data: data loader object
:param path_output_rec_result: path to the directory rec. results
:param path_output_rec_weight: path to the directory rec. model parameters
:param args: parameters
"""
self._random = np.random
self._params_list = [
("_seed", "random_seed", "seed", 42, None, None)
]
self.autoset_params()
np.random.seed(self._seed)
@property
def name(self):
return f"Random_{self.get_params_shortcut()}"
def train(self):
recs = self.get_recommendations(self.evaluator.get_needed_recommendations())
result_dict = self.evaluator.eval(recs)
self._results.append(result_dict)
if self._save_recs:
store_recommendation(recs, self._config.path_output_rec_result + f"{self.name}.tsv")
def get_recommendations(self, top_k):
r_int = np.random.randint
n_items = self._num_items
items = self._data.items
ratings = self._data.train_dict
r = {}
for u, i_s in ratings.items():
l = []
ui = set(i_s.keys())
lui = len(ui)
local_k = min(top_k, n_items - lui)
for index in range(local_k):
j = items[r_int(n_items)]
while j in ui:
j = items[r_int(n_items)]
l.append((j, 1))
r[u] = l
return test_item_only_filter(r, self._data.test_dict)
|
import logging
from django.conf import settings
from django.core.urlresolvers import reverse, NoReverseMatch
from debug_toolbar.toolbar.loader import DebugToolbar
from debug_toolbar.middleware import DebugToolbarMiddleware
from debug_logging.settings import LOGGING_CONFIG
logger = logging.getLogger('debug.logger')
for HandlerClass in LOGGING_CONFIG["LOGGING_HANDLERS"]:
logger.addHandler(HandlerClass)
class DebugLoggingMiddleware(DebugToolbarMiddleware):
"""
Extends the Debug Toolbar middleware with some extras for logging stats.
"""
def _logging_enabled(self, request):
return request.META.get('DJANGO_DEBUG_LOGGING', False)
def _show_toolbar(self, request):
if self._logging_enabled(request):
# If logging is enabled, don't show the toolbar
return False
return super(DebugLoggingMiddleware, self)._show_toolbar(request)
def process_request(self, request):
if self._logging_enabled(request):
request.debug_logging = LOGGING_CONFIG
request.debug_logging['ENABLED'] = True
response = super(DebugLoggingMiddleware, self).process_request(request)
if self._logging_enabled(request):
# If the debug-logging frontend is in use, add it to the blacklist
blacklist = request.debug_logging['BLACKLIST']
try:
debug_logging_prefix = reverse('debug_logging_index')
blacklist.append(debug_logging_prefix)
except NoReverseMatch:
pass
# Don't log requests to urls in the blacklist
for blacklist_url in blacklist:
if request.path.startswith(blacklist_url):
return response
# Add an attribute to the request to track stats, and log the
# request path
request.debug_logging_stats = {'request_path': request.path}
self.debug_toolbars[request] = DebugToolbar(request)
for panel in self.debug_toolbars[request].panels:
panel.process_request(request)
return response
def process_response(self, request, response):
response = super(DebugLoggingMiddleware, self).process_response(
request, response)
if response.status_code == 200:
if self._logging_enabled(request) and \
hasattr(request, 'debug_logging_stats'):
# If logging is enabled, log the stats to the selected handler
logger.debug(request.debug_logging_stats)
return response
|
import json
import os
import datetime
import tornado.web
import tornado.auth
import psycopg2.pool
from tornado import gen
from redshift_console import settings
from redshift_console import redshift
connection_pool = psycopg2.pool.ThreadedConnectionPool(settings.REDSHIFT['connection_pool_min_size'], settings.REDSHIFT['connection_pool_max_size'], settings.REDSHIFT['connection_string'])
queries = redshift.Queries(connection_pool, datetime.timedelta(seconds=int(settings.DATA['inflight_refresh_interval'])))
tables = redshift.Tables(connection_pool, datetime.timedelta(seconds=int(settings.DATA['tables_refresh_interval'])))
def handle_default(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
raise TypeError('{} is not JSON serializeable'.format(obj))
class BaseHandler(tornado.web.RequestHandler):
def prepare(self):
if self.request.headers.get("Content-Type", "").startswith("application/json"):
self.json_args = json.loads(self.request.body)
else:
self.json_args = None
def on_finish(self):
pass
def write_json(self, response):
response = json.dumps(response, default=handle_default)
self.set_header("Content-Type", "application/json; charset=UTF-8")
self.write(response)
class TableDefinitionHandler(BaseHandler):
@gen.coroutine
def get(self, schema_name, table_name):
definition = tables.get(schema_name, table_name)
self.write_json({'definition': definition, 'updated_at': tables.updated_at})
class SchemasHandler(BaseHandler):
def get(self):
results = tables.get_schemas()
self.write_json({'results': results})
class QueriesInflightHandler(BaseHandler):
def get(self):
self.write_json({'inflight_queries': queries.inflight_queries.values(),
'updated_at': queries.inflight_queries_updated_at})
class QueriesQueueHandler(BaseHandler):
def get(self):
self.write_json({'queries': queries.queries_queue,
'updated_at': queries.queries_queue_updated_at})
class QueriesCancelHandler(BaseHandler):
@gen.coroutine
def post(self, pid):
result = yield queries.cancel_query(pid)
self.write({'success': result})
class LoadErrorsHandler(BaseHandler):
def get(self):
self.write_json({'errors': tables.load_errors, 'updated_at': tables.load_errors_updated_at})
class StatusHandler(BaseHandler):
def get(self):
status = [
tables.get_status(),
queries.get_status()
]
self.write_json({'fetchers': status})
class MainHandler(BaseHandler):
def get(self, *args):
self.render("index.html")
def create_app(debug):
static_assets_path = os.path.join(os.path.dirname(__file__), settings.API['static_assets_path'])
queries.start()
tables.start()
return tornado.web.Application([(r"/", MainHandler),
(r"/api/queries/inflight$", QueriesInflightHandler),
(r"/api/queries/queue$", QueriesQueueHandler),
(r"/api/queries/cancel/(.*)$", QueriesCancelHandler),
(r"/api/schemas$", SchemasHandler),
(r"/api/schemas/(.*)/(.*)$", TableDefinitionHandler),
(r"/api/copy/errors$", LoadErrorsHandler),
(r"/api/status$", StatusHandler),
(r"/(.*)", tornado.web.StaticFileHandler, {"path": static_assets_path})],
template_path=static_assets_path,
static_path=static_assets_path,
debug=debug,
cookie_secret=settings.API['cookie_secret'],
compress_response=True
)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-04-30 15:40
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import simple_history.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('FrontEnd', '0005_auto_20190428_1551'),
]
operations = [
migrations.CreateModel(
name='HistoricalResultadoBusqInteligenteTokens',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('destacado', models.BooleanField()),
('aparicion', models.CharField(max_length=50)),
('tipo', models.CharField(max_length=10)),
('frase', models.CharField(max_length=150)),
('lema', models.CharField(max_length=50)),
('categoria', models.CharField(max_length=50)),
('parrafo_nro', models.IntegerField()),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('doc', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='FrontEnd.Documento')),
('header', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='FrontEnd.ResultadoHeader')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('parrafo', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='FrontEnd.Parrafo')),
],
options={
'verbose_name': 'historical resultado busq inteligente tokens',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalTokensDoc',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('aparicion', models.CharField(max_length=50)),
('tipo', models.CharField(max_length=10)),
('frase', models.CharField(max_length=150)),
('lema', models.CharField(max_length=50)),
('categoria', models.CharField(max_length=50)),
('eliminado', models.BooleanField()),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('doc', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='FrontEnd.Documento')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('parrafo', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='FrontEnd.Parrafo')),
],
options={
'verbose_name': 'historical tokens doc',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='ResultadoBusqInteligenteTokens',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('destacado', models.BooleanField()),
('aparicion', models.CharField(max_length=50)),
('tipo', models.CharField(max_length=10)),
('frase', models.CharField(max_length=150)),
('lema', models.CharField(max_length=50)),
('categoria', models.CharField(max_length=50)),
('parrafo_nro', models.IntegerField()),
('doc', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='FrontEnd.Documento')),
('header', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='FrontEnd.ResultadoHeader')),
('parrafo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='FrontEnd.Parrafo')),
],
),
migrations.CreateModel(
name='TokensDoc',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('aparicion', models.CharField(max_length=50)),
('tipo', models.CharField(max_length=10)),
('frase', models.CharField(max_length=150)),
('lema', models.CharField(max_length=50)),
('categoria', models.CharField(max_length=50)),
('eliminado', models.BooleanField()),
('doc', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='FrontEnd.Documento')),
('parrafo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='FrontEnd.Parrafo')),
],
),
migrations.AlterField(
model_name='caso',
name='modelo',
field=models.CharField(choices=[('ECON', 'Económico'), ('DRUG', 'Drogas')], default='ECON', max_length=10),
),
migrations.AlterField(
model_name='entidadesdoc',
name='string',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='entidadesdoc',
name='string_original',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='historicalcaso',
name='modelo',
field=models.CharField(choices=[('ECON', 'Económico'), ('DRUG', 'Drogas')], default='ECON', max_length=10),
),
migrations.AlterField(
model_name='historicalentidadesdoc',
name='string',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='historicalentidadesdoc',
name='string_original',
field=models.CharField(max_length=100),
),
]
|
#!/usr/bin/env python
#
# Copyright (c) 2010 Jiri Svoboda
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# - The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
Create legacy uImage (U-Boot image)
"""
from collections import deque
import os
import sys
import xstruct
import zlib
UIMAGE_HEADER = """big:
uint32_t magic
uint32_t header_crc
uint32_t c_tstamp
uint32_t data_size
uint32_t load_addr
uint32_t start_addr
uint32_t data_crc
uint8_t os
uint8_t arch
uint8_t img_type
uint8_t compression
char img_name[32]
"""
def main():
args = deque(sys.argv)
cmd_name = args.popleft()
base_name = os.path.basename(cmd_name)
image_name = 'Noname'
load_addr = 0
start_addr = 0
os_type = 5 #Linux is the default
while len(args) >= 2 and args[0][0] == '-':
opt = args.popleft()[1:]
optarg = args.popleft()
if opt == 'name':
image_name = optarg
elif opt == 'laddr':
load_addr = (int)(optarg, 0)
elif opt == 'saddr':
start_addr = (int)(optarg, 0)
elif opt == 'ostype':
os_type = (int)(optarg, 0)
else:
print(base_name + ": Unrecognized option.")
print_syntax(cmd_name)
return
if len(args) < 2:
print(base_name + ": Argument missing.")
print_syntax(cmd_name)
return
inf_name = args[0]
outf_name = args[1]
try:
mkuimage(inf_name, outf_name, image_name, load_addr, start_addr, os_type)
except:
os.remove(outf_name)
raise
def mkuimage(inf_name, outf_name, image_name, load_addr, start_addr, os_type):
inf = open(inf_name, 'rb')
outf = open(outf_name, 'wb')
header = xstruct.create(UIMAGE_HEADER)
header_size = header.size()
#
# Write data
#
outf.seek(header_size, os.SEEK_SET)
data = inf.read()
data_size = inf.tell()
data_crc = calc_crc32(data)
data_tstamp = (int)(os.path.getmtime(inf_name))
outf.write(data)
data = ''
#
# Write header
#
outf.seek(0, os.SEEK_SET)
header.magic = 0x27051956 # uImage magic
header.header_crc = 0
header.c_tstamp = data_tstamp
header.data_size = data_size
header.load_addr = load_addr # Address where to load image
header.start_addr = start_addr # Address of entry point
header.data_crc = data_crc
header.os = os_type
header.arch = 2 # ARM
header.img_type = 2 # Kernel
header.compression = 0 # None
header.img_name = image_name.encode('ascii')
header_crc = calc_crc32(header.pack())
header.header_crc = header_crc
outf.write(header.pack())
outf.close()
## Compute CRC32 of binary string.
#
# Works around bug in zlib.crc32() which returns signed int32 result
# in Python < 3.0.
#
def calc_crc32(byteseq):
signed_crc = zlib.crc32(byteseq, 0)
if signed_crc < 0:
return signed_crc + (1 << 32)
else:
return signed_crc
## Print command-line syntax.
#
def print_syntax(cmd):
print("syntax: " + cmd + " [<options>] <raw_image> <uImage>")
print()
print("\traw_image\tInput image name (raw binary data)")
print("\tuImage\t\tOutput uImage name (U-Boot image)")
print()
print("options:")
print("\t-name <name>\tImage name (default: 'Noname')")
print("\t-laddr <name>\tLoad address (default: 0x00000000)")
print("\t-saddr <name>\tStart address (default: 0x00000000)")
if __name__ == '__main__':
main()
|
"""Advent of Code 2015 Day 5."""
def main(file_input='input.txt'):
strings = [line.strip() for line in get_file_contents(file_input)]
nice_strings = get_nice_strings(strings, is_nice_string)
print(f'Nice strings: {len(nice_strings)}')
nice_strings_part_two = get_nice_strings(strings, is_nice_string_part_two)
print(f'Nice strings part two: {len(nice_strings_part_two)}')
def get_nice_strings(strings, check_function):
"""Validate strings with check_function."""
return [
string for string in strings
if check_function(string)
]
def is_nice_string(string):
"""Validate niceness of string for part one."""
return (
has_three_or_more_vowels(string)
and has_letters_in_row(string)
and not has_sub_strings(string, ('ab', 'cd', 'pq', 'xy'))
)
def is_nice_string_part_two(string):
"""Validate niceness of string for part two."""
return (has_repeating_pair(string)
and has_repeating_separated_letter(string))
def has_repeating_pair(string):
"""Check if string has repeating pair of letters."""
return any(
string.count(string[index:index+2]) > 1
for index, _ in enumerate(string[:-1])
)
def has_repeating_separated_letter(string):
"""Check if string has repeating letter separated by one other letter."""
return any(
char == string[index + 2]
for index, char in enumerate(string[:-2])
)
def has_three_or_more_vowels(string):
"""Check if string has three or more vowels."""
return sum(string.count(vowel) for vowel in 'aeiou') >= 3
def has_letters_in_row(string):
"""Check if string has any letter repeating in row."""
return any(
char == string[index + 1]
for index, char in enumerate(string[:-1])
)
def has_sub_strings(string, sub_strings):
"""Check if string contains any of the sub_strings."""
return any(
sub_string in string
for sub_string in sub_strings
)
def get_file_contents(file):
"""Read all lines from file."""
with open(file) as f:
return f.readlines()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
Re-write config file and optionally convert to python
"""
__revision__ = "$Id: wCfg.py,v 1.1 2012/03/30 17:46:35 paus Exp $"
__version__ = "$Revision: 1.1 $"
import getopt
import imp
import os
import pickle
import sys
import xml.dom.minidom
from random import SystemRandom
from ProdCommon.CMSConfigTools.ConfigAPI.CfgInterface import CfgInterface
import FWCore.ParameterSet.Types as CfgTypes
MyRandom = SystemRandom()
class ConfigException(Exception):
"""
Exceptions raised by writeCfg
"""
def __init__(self, msg):
Exception.__init__(self, msg)
self._msg = msg
return
def __str__(self):
return self._msg
def main(argv) :
"""
writeCfg
- Read in existing, user supplied pycfg or pickled pycfg file
- Modify job specific parameters based on environment variables and arguments.xml
- Write out pickled pycfg file
required parameters: none
optional parameters:
--help : help
--debug : debug statements
"""
# defaults
inputFileNames = None
parentFileNames = None
debug = False
_MAXINT = 900000000
try:
opts, args = getopt.getopt(argv, "", ["debug", "help"])
except getopt.GetoptError:
print main.__doc__
sys.exit(2)
try:
CMSSW = os.environ['CMSSW_VERSION']
parts = CMSSW.split('_')
CMSSW_major = int(parts[1])
CMSSW_minor = int(parts[2])
CMSSW_patch = int(parts[3])
except (KeyError, ValueError):
msg = "Your environment doesn't specify the CMSSW version or specifies it incorrectly"
raise ConfigException(msg)
# Parse command line options
for opt, arg in opts :
if opt == "--help" :
print main.__doc__
sys.exit()
elif opt == "--debug" :
debug = True
# Parse remaining parameters
try:
fileName = args[0]
outFileName = args[1]
except IndexError:
print main.__doc__
sys.exit()
# Read in Environment, XML and get optional Parameters
nJob = int(os.environ.get('NJob', '0'))
preserveSeeds = os.environ.get('PreserveSeeds','')
incrementSeeds = os.environ.get('IncrementSeeds','')
# Defaults
maxEvents = 0
skipEvents = 0
firstEvent = -1
compHEPFirstEvent = 0
firstRun = 0
# FUTURE: Remove firstRun
firstLumi = 0
dom = xml.dom.minidom.parse(os.environ['RUNTIME_AREA']+'/arguments.xml')
for elem in dom.getElementsByTagName("Job"):
if nJob == int(elem.getAttribute("JobID")):
if elem.getAttribute("MaxEvents"):
maxEvents = int(elem.getAttribute("MaxEvents"))
if elem.getAttribute("SkipEvents"):
skipEvents = int(elem.getAttribute("SkipEvents"))
if elem.getAttribute("FirstEvent"):
firstEvent = int(elem.getAttribute("FirstEvent"))
if elem.getAttribute("FirstRun"):
firstRun = int(elem.getAttribute("FirstRun"))
if elem.getAttribute("FirstLumi"):
firstLumi = int(elem.getAttribute("FirstLumi"))
generator = str(elem.getAttribute('Generator'))
inputFiles = str(elem.getAttribute('InputFiles'))
parentFiles = str(elem.getAttribute('ParentFiles'))
lumis = str(elem.getAttribute('Lumis'))
# Read Input python config file
handle = open(fileName, 'r')
try: # Nested form for Python < 2.5
try:
print "Importing .py file"
cfo = imp.load_source("pycfg", fileName, handle)
cmsProcess = cfo.process
except Exception, ex:
msg = "Your pycfg file is not valid python: %s" % str(ex)
raise ConfigException(msg)
finally:
handle.close()
cfg = CfgInterface(cmsProcess)
# Set parameters for job
print "Setting parameters"
inModule = cfg.inputSource
if maxEvents:
cfg.maxEvents.setMaxEventsInput(maxEvents)
if skipEvents:
inModule.setSkipEvents(skipEvents)
# Set "skip events" for various generators
if generator == 'comphep':
cmsProcess.source.CompHEPFirstEvent = CfgTypes.int32(firstEvent)
elif generator == 'lhe':
cmsProcess.source.skipEvents = CfgTypes.untracked(CfgTypes.uint32(firstEvent))
cmsProcess.source.firstEvent = CfgTypes.untracked(CfgTypes.uint32(firstEvent+1))
elif firstEvent != -1: # (Old? Madgraph)
cmsProcess.source.firstEvent = CfgTypes.untracked(CfgTypes.uint32(firstEvent))
if inputFiles:
inputFileNames = inputFiles.split(',')
inModule.setFileNames(*inputFileNames)
# handle parent files if needed
if parentFiles:
parentFileNames = parentFiles.split(',')
inModule.setSecondaryFileNames(*parentFileNames)
if lumis:
if CMSSW_major < 3: # FUTURE: Can remove this check
print "Cannot skip lumis for CMSSW 2_x"
else:
lumiRanges = lumis.split(',')
inModule.setLumisToProcess(*lumiRanges)
# Pythia parameters
if (firstRun):
inModule.setFirstRun(firstRun)
if (firstLumi):
inModule.setFirstLumi(firstLumi)
# Check if there are random #'s to deal with
if cfg.data.services.has_key('RandomNumberGeneratorService'):
print "RandomNumberGeneratorService found, will attempt to change seeds"
from IOMC.RandomEngine.RandomServiceHelper import RandomNumberServiceHelper
ranGenerator = cfg.data.services['RandomNumberGeneratorService']
randSvc = RandomNumberServiceHelper(ranGenerator)
incrementSeedList = []
preserveSeedList = []
if incrementSeeds:
incrementSeedList = incrementSeeds.split(',')
if preserveSeeds:
preserveSeedList = preserveSeeds.split(',')
# Increment requested seed sets
for seedName in incrementSeedList:
curSeeds = randSvc.getNamedSeed(seedName)
newSeeds = [x+nJob for x in curSeeds]
randSvc.setNamedSeed(seedName, *newSeeds)
preserveSeedList.append(seedName)
# Randomize remaining seeds
randSvc.populate(*preserveSeedList)
# Write out new config file
outFile = open(outFileName,"w")
outFile.write("import FWCore.ParameterSet.Config as cms\n")
outFile.write("import pickle\n")
outFile.write("pickledCfg=\"\"\"%s\"\"\"\n" % pickle.dumps(cmsProcess))
outFile.write("process = pickle.loads(pickledCfg)\n")
outFile.close()
if (debug):
print "writeCfg output (May not be exact):"
print "import FWCore.ParameterSet.Config as cms"
print cmsProcess.dumpPython()
if __name__ == '__main__' :
exit_status = main(sys.argv[1:])
sys.exit(exit_status)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-21 06:31
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtailcore', '0023_alter_page_revision_on_delete_behaviour'),
]
operations = [
migrations.CreateModel(
name='NavMenu',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('footer', models.BooleanField(default=False, help_text='Select to display this menu in the footer rather than in the nav bar.')),
('order', models.PositiveSmallIntegerField(default=1, help_text='The order that this menu appears. Lower numbers appear first.')),
('url', models.ForeignKey(blank=True, help_text='Internal path to specific page', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtailcore.Page')),
],
options={
'ordering': ('footer', 'order'),
},
),
]
|
"""Tests out the code for generating randomised test trades/orders.
"""
from __future__ import print_function
__author__ = 'saeedamen' # Saeed Amen / [email protected]
#
# Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
#
# See the License for the specific language governing permissions and limitations under the License.
#
import os
from tcapy.conf.constants import Constants
from tcapy.data.datatestcreator import DataTestCreator
from tcapy.data.databasesource import DatabaseSourceCSVBinary as DatabaseSourceCSV
from tcapy.data.databasesource import DatabaseSourceArctic
from tcapy.util.loggermanager import LoggerManager
logger = LoggerManager().getLogger(__name__)
constants = Constants()
postfix = 'dukascopy'
ticker = ['EURUSD']
start_date = '01 May 2017'
finish_date = '31 May 2017'
use_test_csv = True
# mainly just to speed up tests - note: you will need to generate the HDF5 files using convert_csv_to_h5.py from the CSVs
use_hdf5_market_files = False
logger.info('Make sure you have created folder ' + constants.csv_folder + ' & ' + constants.temp_data_folder +
' otherwise tests will fail')
########################################################################################################################
# you can change the test_data_harness_folder to one on your own machine with real data
folder = constants.test_data_harness_folder
eps = 10 ** -5
if use_test_csv:
# only contains limited amount of EURUSD and USDJPY in Apr/Jun 2017
if use_hdf5_market_files:
market_data_store = os.path.join(folder, 'small_test_market_df.h5')
else:
market_data_store = os.path.join(folder, 'small_test_market_df.csv.gz')
def test_randomized_trade_data_generation():
"""Tests randomized trade generation data (and writing to database)
"""
data_test_creator = DataTestCreator(write_to_db=False)
# use database source as Arctic for market data (assume we are using market data as a source)
if use_test_csv:
data_test_creator._database_source_market = DatabaseSourceCSV(market_data_database_csv=market_data_store)
else:
data_test_creator._database_source_market = DatabaseSourceArctic(postfix=postfix)
# create randomised trade/order data
trade_order = data_test_creator.create_test_trade_order(ticker, start_date=start_date, finish_date=finish_date)
# trade_order has dictionary of trade_df and order_df
# make sure the number of trades > number of orders
assert (len(trade_order['trade_df'].index) > len(trade_order['order_df'].index))
if __name__ == '__main__':
test_randomized_trade_data_generation()
# import pytest; pytest.main() |
"""
hudai.resources.company_profile
"""
from ..helpers.resource import Resource
class CompanyProfileResource(Resource):
def __init__(self, client):
Resource.__init__(
self, client, base_path='/companies/{company_id}/profiles')
self.resource_name = 'CompanyProfile'
def fetch(self, company_id):
return self.http_get(
'/',
params={
'company_id': company_id
}
)
|
import face_recognition
import cv2
import numpy as np
import os
import time
import pymysql
from datetime import datetime
import requests
import json
import csv
def save_csv_encodingVector():
train_path = 'C:/Users/dongyoung/Desktop/Git/face_recognition_project/examples/knn_examples/train/'
file_list = os.listdir(train_path)
# image_list = []
csv_path = 'C:/Users/dongyoung/Desktop/Git/face_recognition_project/examples/encoding_csv.csv'
f = open(csv_path,'w',newline='')
wr = csv.writer(f)
for filename in file_list:
current_filename_list = os.listdir(train_path + filename)
for current_filename in current_filename_list:
now_filename = train_path + f'{filename}/' + current_filename
print('filename : ', now_filename)
vector = face_recognition.face_encodings(face_recognition.load_image_file(now_filename))[0]
wr.writerow([filename,vector])
f.close()
def read_csv_encodingVector():
csv_path = 'C:/Users/dongyoung/Desktop/Git/face_recognition_project/examples/encoding_csv.csv'
f = open(csv_path, 'r')
lines = csv.reader(f)
known_face_encodings = []
known_face_names = []
known_face_names_check = dict()
for line in lines:
name = line[0]
vector = line[1]
# print(f'name = {name} // vector = {np.array(vector[1:-1].split()).astype(np.float).shape}')
known_face_encodings.append(np.array(vector[1:-1].split()).astype(np.float))
known_face_names.append(name)
known_face_names_check[name] = 0
return known_face_encodings, known_face_names,known_face_names_check
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
MODEL_PARAMS = {
"model": "HTMPrediction",
"version": 1,
"predictAheadTime": None,
"modelParams": {
"sensorParams": {
"verbosity": 0,
"encoders": {
"timestamp_timeOfDay": {
"fieldname": "timestamp",
"timeOfDay": [
21,
1
],
"type": "DateEncoder",
"name": "timestamp_timeOfDay"
},
"value": {
"fieldname": "value",
"seed": 1,
"resolution": 0.88,
"name": "value",
"type": "RandomDistributedScalarEncoder"
},
"timestamp_weekend": {
"fieldname": "timestamp",
"type": "DateEncoder",
"name": "timestamp_weekend",
"weekend": 21
}
},
"sensorAutoReset": None
},
"spParams": {
"columnCount": 2048,
"spVerbosity": 0,
"localAreaDensity": -1.0,
"spatialImp": "cpp",
"inputWidth": 946,
"synPermInactiveDec": 0.005,
"synPermConnected": 0.1,
"synPermActiveInc": 0.04,
"seed": 1956,
"numActiveColumnsPerInhArea": 40,
"boostStrength": 3.0,
"globalInhibition": 1,
"potentialPct": 0.85
},
"trainSPNetOnlyIfRequested": False,
"clParams": {
"steps": "1,5",
"maxCategoryCount": 1000,
"implementation": "cpp",
"alpha": 0.1,
"verbosity": 0,
"regionName": "SDRClassifierRegion"
},
"tmParams": {
"columnCount": 2048,
"pamLength": 1,
"permanenceInc": 0.1,
"outputType": "normal",
"initialPerm": 0.21,
"seed": 1960,
"maxSegmentsPerCell": 128,
"temporalImp": "cpp",
"activationThreshold": 16,
"cellsPerColumn": 32,
"permanenceDec": 0.1,
"minThreshold": 12,
"verbosity": 0,
"maxSynapsesPerSegment": 32,
"globalDecay": 0.0,
"newSynapseCount": 20,
"maxAge": 0,
"inputWidth": 2048
},
"tmEnable": True,
"spEnable": True,
"inferenceType": "TemporalMultiStep"
}
}
|
from project.server import app, db, bcrypt
from sqlalchemy import Column, Date, Integer, Text, create_engine, inspect
DONOR_MATCH = {
'O-': ['O-'],
'O+': ['O-', 'O+'],
'B-': ['O-', 'B-'],
'B+': ['O-', 'O+', 'B-', 'B+'],
'A-': ['O-', 'A-'],
'A+': ['O-', 'O+', 'A-', 'A+'],
'AB-': ['O-', 'B-', 'A-', 'AB-'],
'AB+': ['O-', 'O+', 'B-', 'B+', 'A-', 'A+', 'AB-', 'AB+']
}
class Subscriber(db.Model):
__tablename__ = "subscribers"
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
sn = db.Column(db.String, unique=True, nullable=False)
email = db.Column(db.String(100), unique=True, nullable=False)
transfusions = db.relationship('Transfusion', backref='subscriber', lazy=True)
hospital_id = db.Column(db.Integer, db.ForeignKey('hospitals.id'), nullable=False)
medical_conditions = db.Column(db.String(100))
current_medications = db.Column(db.String(100))
first_name = db.Column(db.String(100))
middle_name = db.Column(db.String(100))
last_name = db.Column(db.String(100))
home_address = db.Column(db.String(100))
city = db.Column(db.String(100))
region = db.Column(db.String(100))
phone1 = db.Column(db.String(100))
phone2 = db.Column(db.String(100))
cni = db.Column(db.String(100))
cni_doi = db.Column(db.DateTime())
cni_poi = db.Column(db.String())
dob = db.Column(db.DateTime())
pob = db.Column(db.String(100))
gender = db.Column(db.String(100))
blood_group = db.Column(db.String(100))
rhesus_factor = db.Column(db.String(100))
allergies = db.Column(db.String(100))
active = db.Column(db.Boolean(), default=True)
created_at = db.Column(db.DateTime, default=db.func.now())
updated_at = db.Column(db.DateTime, default=db.func.now())
def __init__(self):
self.active = True
# def __init__(self, email, first_name, middle_name, last_name, home_address,
# city, phone1, phone2, cni, cni_doi, cni_poi, dob,
# pob, gender, blood_group, active=True):
# self.email = email
# self.first_name = first_name
# self.middle_name = middle_name
# self.last_name = last_name
# self.home_address = home_address
# self.city = city
# self.phone1 = phone1
# self.phone2 = phone2
# self.cni = cni
# self.cni_doi = cni_doi
# self.cni_poi = cni_poi
# self.dob = dob
# self.pob = pob
# self.gender = gender
# self.blood_group = blood_group
# self.active = active
def generate_sn(self):
self.sn = "DN"+self.first_name[0]+self.middle_name[0]+str(random.randint(10000, 99999))
def _asdict(self):
return {c.key: getattr(self, c.key)
for c in inspect(self).mapper.column_attrs}
def _return_data(self):
from project.server.models.Donor import Donor
sub_arr = self._asdict()
sub_arr['transfusions'] = [trans._asdict() for trans in self.transfusions]
for j in range(len(self.transfusions)):
sub_arr['transfusions'][j]['hospital'] = self.transfusions[j].hospital._asdict()
match = Donor().query.filter(Donor.blood_group.in_(DONOR_MATCH[sub_arr['blood_group']])).all()
sub_arr['match'] = [bg._asdict() for bg in match]
sub_arr['done_at'] = self.done_at._asdict()
return sub_arr |
#!/usr/bin/env python3
# Author: Katie Sexton
# Tested on: UniFi Cloud Key Gen2 Plus firmware version 1.1.10
# This script parses UniFi Management Portal's ump.js and outputs a list of API
# endpoints and associated HTTP methods
import argparse
import json
import sys
import os.path
import time
import re
VALID_METHODS = ["GET", "HEAD", "OPTIONS", "TRACE", "CONNECT", "POST", "PUT", "DELETE", "PATCH"]
def cli_params():
parser = argparse.ArgumentParser(
description="Enumerate UniFi Management Portal API endpoints.")
parser.add_argument("-f", "--file",
metavar="file",
required=False,
default="/usr/share/unifi-management-portal/app/be/ump.js",
help="Path to ump.js")
return parser
def find_endpoints(filepath):
apps_pattern = 'const apps=\{([^}]+)\}'
endpoint_pattern = 'app\.([a-z]+)\("(/api[^"]+)"'
endpoints = []
appname_endpoints = []
appnames = []
with open(filepath) as fp:
for line in fp:
if "const apps={" in line:
match = re.search(apps_pattern, line)
if match:
apps = match.group(1).split(',')
for app in apps:
app = app.split(':')
appname = app[0].replace('"','')
appnames.append(appname)
matches = re.findall(endpoint_pattern, line)
if not matches:
continue
for match in matches:
method = match[0].upper()
if method not in VALID_METHODS:
continue
endpoint = match[1]
if not endpoint.startswith("/api"):
endpoint = "/api/ump{}".format(endpoint)
if ":appName" in endpoint:
appname_endpoints.append((method, endpoint))
else:
endpoints.append((method, endpoint))
if len(appname_endpoints):
if not len(appnames):
endpoints.extend(appname_endpoints)
else:
for appname in appnames:
for entry in appname_endpoints:
method, endpoint = entry
endpoints.append((method, endpoint.replace(":appName", appname)))
return list(set(endpoints))
def print_endpoints(endpoints):
for entry in sorted(endpoints):
method, endpoint = entry
print("{} {}".format(method, endpoint))
def main():
"""
Enumerate and print API endpoints and associated methods
"""
parser = cli_params()
args = parser.parse_args()
if not os.path.isfile(args.file):
sys.exit("File {} does not exist".format(args.file))
endpoints = find_endpoints(args.file)
if not len(endpoints):
sys.exit("No endpoints found in file {}".format(args.file))
print_endpoints(endpoints)
print()
main()
|
from datetime import datetime, timedelta
from os import getenv
from google.cloud import datastore
from telegram_send.telegram_send import send
import requests
YC_REQUEST_URL = getenv("YC_REQUEST_URL")
YC_AUTH_COOKIE = getenv("YC_AUTH_COOKIE")
YC_GOOD_ID = getenv("YC_GOOD_ID")
TG_TOKEN = getenv("TG_TOKEN")
TG_CHAT_ID = getenv("TG_CHAT_ID")
TG_NOTIFICATION_MESSAGE = getenv("TG_NOTIFICATION_MESSAGE")
def get_last_count_from_fb(client, *, day):
"""Get last count value from firebase for given day."""
key = client.key('cert-checker', day)
entity = client.get(key=key)
if not entity:
print(f"No entities for {day} creating new one.")
entity = datastore.Entity(key=key)
entity.update({"count": 0})
client.put(entity)
return entity["count"]
def set_last_count_to_fb(client, *, day, count):
"""Set last count for given day to firebase."""
key = client.key('cert-checker', day)
entity = client.get(key=key)
if not entity:
notify("Error on update data in Firebase")
raise ValueError(f"No entities for {day}, expecting one. Skip update.")
entity["count"] = count
client.put(entity)
print(f"Set last_count={count} for {day}")
def get_last_count_from_yc(*, dt_from, dt_to):
"""Get last count from YClients."""
url = f"{YC_REQUEST_URL}?date_start={dt_from}&date_end={dt_to}&good_id={YC_GOOD_ID}"
headers = {
"accept": "application/json",
"cookie": f"auth={YC_AUTH_COOKIE};"
}
response = requests.request("GET", url, headers=headers)
response.raise_for_status()
data = response.json()
if not data.get("success", False):
notify("Error on check certs from YClients")
raise ValueError(f"Non-successfull response. Response.content={response.content.decode()}")
return data["count"]
def notify(message):
"""Send message to telegram."""
config = {
"token": TG_TOKEN,
"chat_id": TG_CHAT_ID,
}
send(messages=[message], conf=config)
def checkcert_pubsub(event, context):
"""
Entrypoint. Executed by Cloud Scheduler.
It takes a count of items from list view YC_REQUEST_URL, compare it with last checked amount, if new item appears,
it sends notification to telegram chat through forked telegram_send (check it in my repos list).
"""
client = datastore.Client()
today = (datetime.now().strftime("%d.%m.%Y"))
tomorrow = ((datetime.now() + timedelta(days=1)).strftime("%d.%m.%Y"))
last_count_yc = get_last_count_from_yc(dt_from=today, dt_to=tomorrow)
last_count_fb = get_last_count_from_fb(client, day=today)
if last_count_yc > last_count_fb:
print("New cert found")
notify(message=TG_NOTIFICATION_MESSAGE.format(dt_from=today, dt_to=tomorrow)) # format harcoded in env value
set_last_count_to_fb(client, day=today, count=last_count_yc)
|
# Copyright 2020 trueto
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import torch.nn as nn
import torch.nn.functional as F
# refer to: https://github.com/Shawn1993/cnn-text-classification-pytorch/blob/master/model.py
class TextCNN(nn.Module):
def __init__(self, hidden_size, kernel_num, kernel_sizes):
super().__init__()
self.convs = nn.ModuleList([nn.Conv2d(1, kernel_num,
(K, hidden_size)) for K in kernel_sizes])
def forward(self, hidden_states):
# (N,Ci,W,D)
hidden_states = hidden_states.unsqueeze(1)
# [(N, Co, W), ...]*len(Ks)
hidden_states = [F.relu(conv(hidden_states)).squeeze(3) for conv in self.convs]
# [(N, Co), ...]*len(Ks)
hidden_states = [F.max_pool1d(i, i.size(2)).squeeze(2) for i in hidden_states]
hidden_states = torch.cat(hidden_states, 1)
return hidden_states
# refer to: https://github.com/keishinkickback/Pytorch-RNN-text-classification/blob/master/model.py
class TextRNN(nn.Module):
def __init__(self, input_size, num_layers, dropout,rnn_model="LSTM", use_first=True):
super().__init__()
if rnn_model == "LSTM":
self.rnn = nn.LSTM(input_size, input_size//2, num_layers=num_layers,
dropout=dropout, batch_first=True, bidirectional=True)
if rnn_model == "GRU":
self.rnn = nn.GRU(input_size, input_size//2, num_layers=num_layers,
dropout=dropout, batch_first=True, bidirectional=True)
self.bn = nn.BatchNorm1d(input_size)
self.use_first = use_first
def forward(self, hidden_states):
rnn_output, _ = self.rnn(hidden_states, None)
if self.use_first:
return self.bn(rnn_output[:, 0, :])
else:
return self.bn(torch.mean(rnn_output, dim=1))
class LSTM(nn.Module):
def __init__(self, input_size, hidden_size, num_layers, dropout):
super().__init__()
self.lstm = nn.LSTM(input_size, hidden_size, num_layers=num_layers,
dropout=dropout, batch_first=True, bidirectional=True)
def forward(self, hidden_states):
if not hasattr(self, '_flattened'):
self.lstm.flatten_parameters()
setattr(self, '_flattened', True)
lstm_out, _ = self.lstm(hidden_states, None)
return lstm_out
class GRU(nn.Module):
def __init__(self, input_size, hidden_size, num_layers, dropout):
super().__init__()
self.gru = nn.GRU(input_size, hidden_size, num_layers=num_layers,
dropout=dropout, batch_first=True, bidirectional=True)
def forward(self, hidden_states):
if not hasattr(self, '_flattened'):
self.gru.flatten_parameters()
setattr(self, '_flattened', True)
gru_out, _ = self.gru(hidden_states, None)
return gru_out |
"""
Basic Path Resolver that looks for the executable by runtime first, before proceeding to 'language' in PATH.
"""
from aws_lambda_builders.utils import which
class PathResolver(object):
def __init__(self, binary, runtime, executable_search_paths=None):
self.binary = binary
self.runtime = runtime
self.executables = [self.runtime, self.binary]
self.executable_search_paths = executable_search_paths
def _which(self):
exec_paths = []
for executable in [executable for executable in self.executables if executable is not None]:
paths = which(executable, executable_search_paths=self.executable_search_paths)
exec_paths.extend(paths)
if not exec_paths:
raise ValueError("Path resolution for runtime: {} of binary: "
"{} was not successful".format(self.runtime, self.binary))
return exec_paths
@property
def exec_paths(self):
return self._which()
|
# Lint as: python3
# Copyright 2020 The AdaNet Authors. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AdaNet Keras models."""
from adanet.experimental.keras.ensemble_model import EnsembleModel
from adanet.experimental.keras.ensemble_model import MeanEnsemble
from adanet.experimental.keras.ensemble_model import WeightedEnsemble
from adanet.experimental.keras.model_search import ModelSearch
__all__ = [
"EnsembleModel",
"MeanEnsemble",
"WeightedEnsemble",
"ModelSearch",
]
|
# -*- coding: utf-8; -*-
#
# Copyright (c) 2016 Álan Crístoffer
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import time
from enum import Enum
import ahio.abstract_driver
try:
import snap7
except Exception:
ahioDriverInfo.AVAILABLE = False
def retry_on_job_pending(func):
def f(*args, **kargs):
exception = None
for _ in range(10):
try:
return func(*args, **kargs)
except snap7.snap7exceptions.Snap7Exception as e:
exception = e
print(f'Retrying, Exception: {e}')
time.sleep(100 / 1000)
if 'Job pending' not in str(exception):
raise exception
else:
if exception:
print('retry failed')
raise exception
return f
class ahioDriverInfo(ahio.abstract_driver.AbstractahioDriverInfo):
NAME = 'snap7'
AVAILABLE = True
class Driver(ahio.abstract_driver.AbstractDriver):
_client = None
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if self._client:
self._client.disconnect()
self._client = None
def setup(self, address, rack=0, slot=1, port=102):
"""Connects to a Siemens S7 PLC.
Connects to a Siemens S7 using the Snap7 library.
See [the snap7 documentation](http://snap7.sourceforge.net/) for
supported models and more details.
It's not currently possible to query the device for available pins,
so `available_pins()` returns an empty list. Instead, you should use
`map_pin()` to map to a Merker, Input or Output in the PLC. The
internal id you should use is a string following this format:
'[DMQI][XBWD][0-9]+.?[0-9]*' where:
* [DMQI]: D for DB, M for Merker, Q for Output, I for Input
* [XBWD]: X for bit, B for byte, W for word, D for dword
* [0-9]+: Address of the resource
* [0-9]*: Bit of the address (type X only, ignored in others)
For example: 'IB100' will read a byte from an input at address 100 and
'MX50.2' will read/write bit 2 of the Merker at address 50. It's not
allowed to write to inputs (I), but you can read/write Outpus, DBs and
Merkers. If it's disallowed by the PLC, an exception will be thrown by
python-snap7 library.
For this library to work, it might be needed to change some settings
in the PLC itself. See
[the snap7 documentation](http://snap7.sourceforge.net/) for more
information. You also need to put the PLC in RUN mode. Not however that
having a Ladder program downloaded, running and modifying variables
will probably interfere with inputs and outputs, so put it in RUN mode,
but preferably without a downloaded program.
@arg address IP address of the module.
@arg rack rack where the module is installed.
@arg slot slot in the rack where the module is installed.
@arg port port the PLC is listenning to.
@throw RuntimeError if something went wrong
@throw any exception thrown by `snap7`'s methods.
"""
rack = int(rack)
slot = int(slot)
port = int(port)
address = str(address)
self._client = snap7.client.Client()
self._client.connect(address, rack, slot, port)
def available_pins(self):
return []
def _set_pin_direction(self, pin, direction):
d = self._pin_direction(pin)
if direction != d and not (type(d) is list and direction in d):
raise RuntimeError('Port %s does not support this Direction' % pin)
def _pin_direction(self, pin):
return {
'D': [ahio.Direction.Input, ahio.Direction.Output],
'M': [ahio.Direction.Input, ahio.Direction.Output],
'Q': ahio.Direction.Output,
'I': ahio.Direction.Input
}[pin[0].upper()]
def _set_pin_type(self, pin, ptype):
raise RuntimeError('Hardware does not support changing the pin type')
def _pin_type(self, pin):
raise RuntimeError('Hardware does not support querying the pin type')
@retry_on_job_pending
def _write(self, pin, value, pwm):
if pwm:
raise RuntimeError('Pin does not support PWM')
if self._pin_direction(pin) == ahio.Direction.Input:
raise RuntimeError('Can not write to Input')
mem = self._parse_port_name(pin)
value = {
ahio.LogicValue.High: 1,
ahio.LogicValue.Low: 0
}.get(value, value)
self._set_memory(mem, value)
@retry_on_job_pending
def _read(self, pin):
mem = self._parse_port_name(pin)
value = self._get_memory(mem)
if mem[1] == 'X':
return {
0: ahio.LogicValue.Low,
1: ahio.LogicValue.High
}.get(value, value)
else:
return value
def analog_references(self):
return []
def _set_analog_reference(self, reference, pin):
raise RuntimeError('Hardware does not support setting analog ref')
def _analog_reference(self, pin):
pass
def _set_pwm_frequency(self, frequency, pin):
raise RuntimeError(
'Setting PWM frequency is not supported by hardware')
def _parse_port_name(self, s):
s = s.upper()
area = {
'D': snap7.snap7types.S7AreaDB,
'M': snap7.snap7types.S7AreaMK,
'Q': snap7.snap7types.S7AreaPA,
'I': snap7.snap7types.S7AreaPE
}[s[0]]
length = {'X': 1, 'B': 1, 'W': 2, 'D': 4}[s[1]]
start = int(s.split('.')[0][2:])
bit = int(s.split('.')[1]) if s[1] == 'X' else None
dtype = {
'X': {
'get': lambda m: snap7.util.get_bool(m, 0, bit),
'set': lambda m, v: snap7.util.set_bool(m, 0, bit, v)
},
'B': {
'get': lambda m: snap7.util.get_int(m, 0),
'set': lambda m, v: snap7.util.set_int(m, 0, v)
},
'W': {
'get': lambda m: snap7.util.get_int(m, 0),
'set': lambda m, v: snap7.util.set_int(m, 0, v)
},
'D': {
'get': lambda m: snap7.util.get_dword(m, 0),
'set': lambda m, v: snap7.util.set_dword(m, 0, v)
}
}[s[1]]
return (area, dtype, start, length)
@retry_on_job_pending
def _get_memory(self, mem):
m = self._client.read_area(mem[0], 0, mem[2], mem[3])
return mem[1]['get'](m)
@retry_on_job_pending
def _set_memory(self, mem, value):
m = self._client.read_area(mem[0], 0, mem[2], mem[3])
mem[1]['set'](m, value)
self._client.write_area(mem[0], 0, mem[2], m)
|
# Special vocabulary symbols. Artifact from the vocab system. I don't know a good way to replace this in a linear system
PAD_ID = 0.0
GO_ID = -5.0
EOS_ID = 2.0
UNK_ID = 3.0
data_linspace_tuple = (0, 100, 10000)
import numpy as np
import pandas as pd
import logging
logging.basicConfig(level=logging.INFO)
def x_sin(x):
return x * np.sin(x)
def sin_cos(x):
return pd.DataFrame(dict(a=np.sin(x), b=np.cos(x)), index=x)
def rnn_data(data, encoder_steps, decoder_steps):
"""
creates new data frame based on previous observation
* example:
l = [1, 2, 3, 4, 5, 6,7]
encoder_steps = 2
decoder_steps = 3
-> encoder [[1, 2], [2, 3], [3, 4]]
-> decoder [[3,4,5], [4,5,6], [5,6,7]]
"""
rnn_df_encoder = []
rnn_df_decoder = []
for i in range(len(data) - (encoder_steps+decoder_steps)):
try:
rnn_df_decoder.append(data.iloc[i + encoder_steps:i +(encoder_steps+decoder_steps)].as_matrix())
except AttributeError:
rnn_df_decoder.append(data.iloc[i + encoder_steps:i +(encoder_steps+decoder_steps)])
data_ = data.iloc[i: i + encoder_steps].as_matrix()
rnn_df_encoder.append(data_ if len(data_.shape) > 1 else [[i] for i in data_])
return np.array(rnn_df_encoder), np.array(rnn_df_decoder)
def split_data(data, val_size=0.1, test_size=0.1):
"""
splits data to training, validation and testing parts
"""
ntest = int(round(len(data) * (1 - test_size)))
nval = int(round(len(data.iloc[:ntest]) * (1 - val_size)))
df_train, df_val, df_test = data.iloc[:nval], data.iloc[nval:ntest], data.iloc[ntest:]
return df_train, df_val, df_test
def prepare_data(data, encoder_steps, decoder_steps, labels=False, val_size=0.1, test_size=0.1):
"""
Given the number of `time_steps` and some data,
prepares training, validation and test data for an lstm cell.
"""
df_train, df_val, df_test = split_data(data, val_size, test_size)
return (rnn_data(df_train, encoder_steps, decoder_steps),
rnn_data(df_val, encoder_steps, decoder_steps),
rnn_data(df_test, encoder_steps, decoder_steps))
def generate_data(fct, x, fct_mod, encoder_steps, decoder_steps, seperate=False):
"""generates data with based on a function fct
input:
fct: The function to be used to generate data (eg sin)
x: the linspace to pass to the function
fct mod: A list of elements of 4 tuples that represent function modifiers: a+b*fct(c+d*x)
"""
train_x, val_x, test_x = [],[],[]
train_y, val_y, test_y = [],[],[]
for wave in fct_mod:
a = wave[0]
b = wave[1]
c = wave[2]
d = wave[3]
data = a+b*fct(c+d*x)
#If there is only 1 function, do the regular split for training /test /val
if(len(fct_mod)==1):
if not isinstance(data, pd.DataFrame):
data = pd.DataFrame(data)
w_train, w_val, w_test = prepare_data(data['a'] if seperate else data, encoder_steps, decoder_steps)
train_x.extend(w_train[0])
val_x.extend(w_val[0])
test_x.extend(w_test[0])
train_y.extend(w_train[1])
val_y.extend(w_val[1])
test_y.extend(w_test[1])
else:
#training / val are most of data. Test is the last function.
if(wave is not fct_mod[-1]):
if not isinstance(data, pd.DataFrame):
data = pd.DataFrame(data)
w_train, w_val, w_test = prepare_data(data['a'] if seperate else data, encoder_steps, decoder_steps, test_size = 0)
train_x.extend(w_train[0])
val_x.extend(w_val[0])
test_x.extend(w_test[0])
train_y.extend(w_train[1])
val_y.extend(w_val[1])
test_y.extend(w_test[1])
else:
#last function track, use for testing
if not isinstance(data, pd.DataFrame):
data = pd.DataFrame(data)
test_x, test_y = rnn_data(data, encoder_steps, decoder_steps)
return dict(train=np.array(train_x),
val=np.array(val_x),
test=np.array(test_x)), \
dict(train=np.array(train_y),
val=np.array(val_y),
test=np.array(test_y))
def generate_sequence(regressor, test_sequence, seed_timesteps, prediction_length=None):
if prediction_length > len(test_sequence)-seed_timesteps:
raise AssertionError("Prediction length must be less than len(test_sequence)-seed_timesteps")
if prediction_length == None:
prediction_length = len(test_sequence)-seed_timesteps
track = test_sequence[0:seed_timesteps]
for i in range(prediction_length):
packed =np.array([track])
temp = regressor.predict(packed,axis=2)
track = np.insert(track,track.shape[0],temp,axis=0) #Insert used (not append) to prevent array of shape (T,1)
# collapsing to a 1D array of (T,)
return track
#
# #self test code
# if __name__ == "__main__":
#
#
# LOG_DIR = './ops_logs'
# TIMESTEPS = 20
# #BUG This is wrong. The number of RNN layers is not the length of data fed into the RNN
# #100 is ... ok, try fewer later
# RNN_LAYERS = [{'steps': 100}] #[{'steps': TIMESTEPS}]
# DENSE_LAYERS = None
# TRAINING_STEPS = 10000
# BATCH_SIZE = 100
# PRINT_STEPS = TRAINING_STEPS / 100
#
# X, y = generate_data(np.sin, np.linspace(data_linspace_tuple[0],data_linspace_tuple[1],data_linspace_tuple[2])
# , [(0, 1, 0, 16),
# (0, 1, 0, 16),
# (0, 1, 0, 16),
# (0, 1, 0, 16),
# ],TIMESTEPS, TIMESTEPS, seperate=False)
# #New y format breaks this
# test_sequence = np.concatenate([X['test'][0],y['test']])
# #The below is false. It still has a strange disjoint when it starts predicting though
# #BUG there is a chance the sequence generator is predicting backwards, which would explain the step at the beginning.
# #I find this strange, but there is an easy way to find out, stop feeding at a peak
# #This section here needs to be modified with a sequence generation function
# # plot_predicted, = plt.plot(predicted, label='predicted')
# # plot_test, = plt.plot(test_sequence[0:len(predicted)], label='test')
# # plt.legend(handles=[plot_predicted, plot_test])
# # plt.show()
# quit()
# X, y = generate_data(x_sin, np.linspace(0, 100, 10000), [(0,1,0,1)],TIMESTEPS, seperate=False) |
from connect_ssh_class import ConnectSSH
import time
class CiscoSSH(ConnectSSH):
def __init__(self, ip, username, password, enable_password, disable_paging=True):
super().__init__(ip, username, password)
self._ssh.send("enable\n")
self._ssh.send(enable_password + "\n")
if disable_paging:
self._ssh.send("terminal length 0\n")
time.sleep(1)
self._ssh.recv(self._MAX_READ)
def config_mode(self):
self._ssh.send("conf t\n")
time.sleep(0.2)
result = self._ssh.recv(self._MAX_READ).decode("ascii")
return result
def exit_config_mode(self):
self._ssh.send("end\n")
time.sleep(0.2)
result = self._ssh.recv(self._MAX_READ).decode("ascii")
return result
def send_config_commands(self, commands):
output = self.config_mode()
output += super().send_config_commands(commands)
output += self.exit_config_mode()
return output
|
import torch
import torch.nn as nn
class Conv2d(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, padding, stride=1, bias=True):
super(Conv2d, self).__init__()
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride=stride, padding=padding, bias=bias)
self.bn = nn.BatchNorm2d(out_channels, eps=0.001, momentum=0.1)
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
x = self.relu(x)
return x
class SeparableConv2d(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, bias=False):
super(SeparableConv2d, self).__init__()
self.depthwise = nn.Conv2d(in_channels, in_channels, kernel_size=kernel_size,
groups=in_channels, bias=bias, padding=1)
self.pointwise = nn.Conv2d(in_channels, out_channels,
kernel_size=1, bias=bias)
def forward(self, x):
out = self.depthwise(x)
out = self.pointwise(out)
return out
class Identity(nn.Module):
def __init__(self, channel):
super(Identity, self).__init__()
def forward(self, x):
return x
class SepconvResidual(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride):
super(SepconvResidual, self).__init__()
assert stride in [1, 2]
assert kernel_size in [3, 5]
self.use_res_connect = stride == 1 and in_channels == out_channels
self.conv = nn.Sequential(
Conv2d(in_channels, out_channels*4, kernel_size=1, padding=0, stride=1),
SeparableConv2d(out_channels*4, out_channels, kernel_size=kernel_size)
)
def forward(self, x):
if self.use_res_connect:
return x + self.conv(x)
else:
return self.conv(x)
class Inceptionv4_stem(nn.Module):
def __init__(self):
super(Inceptionv4_stem, self).__init__()
self.conv2d_1a_3x3 = Conv2d(3, 32, 3, stride=2, padding=0, bias=False)
self.conv2d_2a_3x3 = Conv2d(32, 32, 3, stride=1, padding=0, bias=False)
self.conv2d_2b_3x3 = Conv2d(32, 64, 3, stride=1, padding=1, bias=False)
self.mixed_3a_branch_0 = nn.MaxPool2d(3, stride=2, padding=0)
self.mixed_3a_branch_1 = Conv2d(64, 96, 3, stride=2, padding=0, bias=False)
self.mixed_4a_branch_0 = nn.Sequential(
Conv2d(160, 64, 1, stride=1, padding=0, bias=False),
Conv2d(64, 96, 3, stride=1, padding=0, bias=False),
)
self.mixed_4a_branch_1 = nn.Sequential(
Conv2d(160, 64, 1, stride=1, padding=0, bias=False),
Conv2d(64, 64, (1, 7), stride=1, padding=(0, 3), bias=False),
Conv2d(64, 64, (7, 1), stride=1, padding=(3, 0), bias=False),
Conv2d(64, 96, 3, stride=1, padding=0, bias=False)
)
self.mixed_5a_branch_0 = Conv2d(192, 192, 3, stride=2, padding=0, bias=False)
self.mixed_5a_branch_1 = nn.MaxPool2d(3, stride=2, padding=0)
# self.sepconv_residual = SepconvResidual(384, 512, 3, 1)
self.conv2d_last = Conv2d(384, 512, 3, 2, 2)
def forward(self, x):
x = self.conv2d_1a_3x3(x) # 126 x 126 x 32
x = self.conv2d_2a_3x3(x) # 124 x 124 x 32
x = self.conv2d_2b_3x3(x) # 124 x 124 x 64
x0 = self.mixed_3a_branch_0(x)
x1 = self.mixed_3a_branch_1(x)
x = torch.cat((x0, x1), dim=1) # 62 x 62 x 160
x0 = self.mixed_4a_branch_0(x)
x1 = self.mixed_4a_branch_1(x)
x = torch.cat((x0, x1), dim=1) # 60 x 60 x 192
x0 = self.mixed_5a_branch_0(x)
x1 = self.mixed_5a_branch_1(x)
x = torch.cat((x0, x1), dim=1) # 29 x 29 x 384
x = self.conv2d_last(x) # 16 x 16 x 512
return x |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Module that contains implementation to create Artella launchers
"""
from __future__ import print_function, division, absolute_import
__author__ = "Tomas Poveda"
__license__ = "MIT"
__maintainer__ = "Tomas Poveda"
__email__ = "[email protected]"
import os
import sys
import time
import random
import logging
import argparse
import importlib
from distutils import util
from Qt.QtCore import *
from Qt.QtWidgets import *
from tpDcc.libs.python import path as path_utils
import tpDcc
from tpDcc.libs.qt.core import base, qtutils
from tpDcc.libs.qt.widgets import grid
from artellapipe.utils import exceptions
from artellapipe.launcher.core import defines, plugin
LOGGER = logging.getLogger('artellapipe-launcher-plugins-dccselector')
class DccData(object):
def __init__(self, name, icon, enabled, default_version, supported_versions,
installation_paths, departments, plugins, launch_fn=None):
super(DccData, self).__init__()
self.name = name
self.icon = icon
self.enabled = enabled
self.default_version = default_version
self.supported_versions = supported_versions
self.installation_paths = installation_paths
self.departments = departments
self.plugins = plugins
self.launch_fn = launch_fn
def __str__(self):
msg = super(DccData, self).__str__()
msg += '\tName: {}\n'.format(self.name)
msg += '\tIcon: {}\n'.format(self.icon)
msg += '\tEnabled: {}\n'.format(self.enabled)
msg += '\tDefault Version: {}\n'.format(self.default_version)
msg += '\tSupported Versions: {}\n'.format(self.supported_versions)
msg += '\tInstallation Paths: {}\n'.format(self.installation_paths)
msg += '\tDepartments: {}\n'.format(self.departments)
msg += '\tPlugins: {}\n'.format(self.plugins)
msg += '\tLaunch Function: {}\n'.format(self.launch_fn)
return msg
class DCCButton(base.BaseWidget, object):
clicked = Signal(str, str)
def __init__(self, dcc, parent=None):
self._dcc = dcc
super(DCCButton, self).__init__(parent=parent)
@property
def name(self):
"""
Returns the name of the DCC
:return: str
"""
return self._name
def ui(self):
super(DCCButton, self).ui()
dcc_name = self._dcc.name.lower().replace(' ', '_')
dcc_icon = self._dcc.icon
icon_split = dcc_icon.split('/')
if len(icon_split) == 1:
theme = ''
icon_name = icon_split[0]
elif len(icon_split) > 1:
theme = icon_split[0]
icon_name = icon_split[1]
else:
theme = 'color'
icon_name = dcc_name
icon_path = tpDcc.ResourcesMgr().get('icons', theme, '{}.png'.format(icon_name))
if not os.path.isfile(icon_path):
icon_path = tpDcc.ResourcesMgr().get('icons', theme, '{}.png'.format(icon_name))
if not os.path.isfile(icon_path):
dcc_icon = tpDcc.ResourcesMgr().icon('artella')
else:
dcc_icon = tpDcc.ResourcesMgr().icon(icon_name, theme=theme)
else:
dcc_icon = tpDcc.ResourcesMgr().icon(icon_name, theme=theme)
self._title = QPushButton(self._dcc.name.title())
self._title.setStyleSheet(
"""
border-top-left-radius: 10px;
border-top-right-radius: 10px;
"""
)
self._title.setFixedHeight(20)
self.main_layout.addWidget(self._title)
self._dcc_btn = QPushButton()
self._dcc_btn.setFixedSize(QSize(100, 100))
self._dcc_btn.setIconSize(QSize(110, 110))
self._dcc_btn.setIcon(dcc_icon)
self._dcc_btn.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
self.main_layout.addWidget(self._dcc_btn)
self._version_combo = QComboBox()
self.main_layout.addWidget(self._version_combo)
for version in self._dcc.supported_versions:
self._version_combo.addItem(str(version))
default_version = self._dcc.default_version
index = self._version_combo.findText(default_version, Qt.MatchFixedString)
if index > -1:
self._version_combo.setCurrentIndex(index)
self.setMaximumWidth(105)
def setup_signals(self):
self._dcc_btn.clicked.connect(self._on_button_clicked)
self._title.clicked.connect(self._on_button_clicked)
def _on_button_clicked(self):
dcc_name = self._dcc.name
dcc_version = self._version_combo.currentText()
if not dcc_version:
dcc_version = self._dcc.default_version
self.clicked.emit(dcc_name, dcc_version)
class DCCSelector(plugin.ArtellaLauncherPlugin, object):
LABEL = 'DCC Launcher'
ICON = 'launcher'
dccSelected = Signal(str, str)
COLUMNS_COUNT = 4
def __init__(self, project, launcher, parent=None):
self._dccs = dict()
self._splash = None
self._departments = dict()
self._selected_dcc = None
self._selected_version = None
self._config = tpDcc.ConfigsMgr().get_config(
config_name='artellapipe-launcher-plugins-dccselector',
package_name=project.get_clean_name(),
root_package_name='artellapipe',
environment=project.get_environment()
)
super(DCCSelector, self).__init__(project=project, launcher=launcher, parent=parent)
def get_main_layout(self):
"""
Overrides base get_main_layout function
:return: QLayout
"""
main_layout = QVBoxLayout()
main_layout.setContentsMargins(0, 0, 0, 0)
main_layout.setSpacing(0)
main_layout.setAlignment(Qt.AlignTop)
return main_layout
@property
def dccs(self):
"""
Returns dict of current DCCs data
:return: dict
"""
return self._dccs
@property
def selected_dcc(self):
"""
Returns the selected DCC
:return: str
"""
return self._selected_dcc
@property
def selected_version(self):
"""
Returns the selected DCC version
:return: str
"""
return self._selected_version
def ui(self):
super(DCCSelector, self).ui()
self._departments_tab = QTabWidget()
self.main_layout.addWidget(self._departments_tab)
self.add_department('All')
LOGGER.debug('DCCs found: {}'.format(self._dccs))
if self._dccs:
for dcc_name, dcc_data in self._dccs.items():
LOGGER.debug('DCC: {} | {}'.format(dcc_name, dcc_data))
if not dcc_data.enabled:
continue
if not dcc_data.installation_paths:
LOGGER.warning('No installed versions found for DCC: {}'.format(dcc_name))
continue
dcc_departments = ['All']
dcc_departments.extend(dcc_data.departments)
for department in dcc_departments:
self.add_department(department)
dcc_btn = DCCButton(dcc=dcc_data)
dcc_btn.clicked.connect(self._on_dcc_selected)
self.add_dcc_to_department(department, dcc_btn)
def init_config(self):
config_data = self._config.data
self.load_dccs(config_data)
def get_enabled_dccs(self):
"""
Returns a list with all enabled DCCs
:return: list(str)
"""
return [dcc_name for dcc_name, dcc_data in self._dccs.items() if dcc_data.enabled]
def add_department(self, department_name):
if department_name not in self._departments:
department_widget = grid.GridWidget()
department_widget.setColumnCount(self.COLUMNS_COUNT)
department_widget.setShowGrid(False)
department_widget.horizontalHeader().hide()
department_widget.verticalHeader().hide()
department_widget.resizeRowsToContents()
department_widget.resizeColumnsToContents()
department_widget.setEditTriggers(QAbstractItemView.NoEditTriggers)
department_widget.setFocusPolicy(Qt.NoFocus)
department_widget.setSelectionMode(QAbstractItemView.NoSelection)
department_widget.setStyleSheet('QTableWidget::item:hover{background-color: transparent;}')
self._departments[department_name] = department_widget
self._departments_tab.addTab(department_widget, department_name.title())
return department_widget
return None
def load_dccs(self, dccs_dict):
"""
Loads DCCs from given configuration file
:param config_file: str
"""
if not dccs_dict:
return
for dcc_name, dcc_data in dccs_dict.items():
dcc_icon = dcc_data.get(defines.LAUNCHER_DCC_ICON_ATTRIBUTE_NAME, None)
dcc_enabled = dcc_data.get(defines.LAUNCHER_DCC_ENABLED_ATTRIBUTE_NAME, False)
if type(dcc_enabled) in [str, unicode]:
dcc_enabled = bool(util.strtobool(dcc_enabled))
default_version = dcc_data.get(defines.LAUNCHER_DCC_DEFAULT_VERSION_ATTRIBUTE_NAME, None)
if default_version:
default_version = str(default_version)
supported_versions = dcc_data.get(defines.LAUNCHER_DCC_SUPPORTED_VERSIONS_ATTRIBUTE_NAME, list())
if supported_versions:
supported_versions = [str(v) for v in supported_versions]
departments = dcc_data.get(defines.LAUNCHER_DCC_DEPARTMENTS_ATTRIBUTE_NAME, list())
plugins = dcc_data.get(defines.LAUNCHER_DCC_PLUGINS_ATTRIBUTE_NAME, list())
self._dccs[dcc_name] = DccData(
name=dcc_name,
icon=dcc_icon,
enabled=dcc_enabled,
default_version=default_version,
supported_versions=supported_versions,
installation_paths=list(),
departments=departments,
plugins=plugins
)
if not self._dccs:
LOGGER.warning('No DCCs enabled!')
return
for dcc_name, dcc_data in self._dccs.items():
if dcc_data.enabled and not dcc_data.supported_versions:
LOGGER.warning('{0} DCC enabled but no supported versions found in launcher settings. '
'{0} DCC has been disabled!'.format(dcc_name.title()))
try:
dcc_module = importlib.import_module(
'artellapipe.launcher.plugins.dccselector.dccs.{}dcc'.format(dcc_name.lower().replace(' ', '')))
except ImportError:
LOGGER.warning('DCC Python module {}dcc not found!'.format(dcc_name.lower().replace(' ', '')))
continue
if not dcc_data.enabled:
continue
fn_name = 'get_installation_paths'
fn_launch = 'launch'
if not hasattr(dcc_module, fn_name):
continue
dcc_installation_paths = getattr(dcc_module, fn_name)(dcc_data.supported_versions)
dcc_data.installation_paths = dcc_installation_paths
if hasattr(dcc_module, fn_launch):
dcc_data.launch_fn = getattr(dcc_module, fn_launch)
else:
LOGGER.warning('DCC {} has not launch function implemented. Disabling it ...'.format(dcc_data.name))
dcc_data.enabled = False
def add_dcc_to_department(self, department_name, dcc_button):
if department_name not in self._departments:
department_widget = self.add_department(department_name)
else:
department_widget = self._departments[department_name]
row, col = department_widget.first_empty_cell()
department_widget.addWidget(row, col, dcc_button)
department_widget.resizeRowsToContents()
def _get_splash_pixmap(self):
"""
Returns pixmap to be used as splash background
:return: Pixmap
"""
splash_path = tpDcc.ResourcesMgr().get('images', 'splash.png', key='project')
splash_dir = os.path.dirname(splash_path)
splash_files = [f for f in os.listdir(splash_dir) if
f.startswith('splash') and os.path.isfile(os.path.join(splash_dir, f))]
if splash_files or not os.path.isfile(splash_path):
splash_index = random.randint(0, len(splash_files) - 1)
splash_name, splash_extension = os.path.splitext(splash_files[splash_index])
splash_pixmap = tpDcc.ResourcesMgr().pixmap(
splash_name, extension=splash_extension[1:], key='project')
else:
splash_pixmap = tpDcc.ResourcesMgr().pixmap('splash')
return splash_pixmap.scaled(QSize(800, 270))
def _setup_splash(self, dcc_name):
"""
Internal function that is used to setup launch splash depending on the selected DCC
:param dcc_name: str
"""
splash_pixmap = self._get_splash_pixmap()
self._splash = QSplashScreen(splash_pixmap)
# self._splash.setFixedSize(QSize(800, 270))
self._splash.setWindowFlags(Qt.FramelessWindowHint)
self._splash.setEnabled(True)
self.main_layout = QVBoxLayout()
self.main_layout.setContentsMargins(5, 2, 5, 2)
self.main_layout.setSpacing(2)
self.main_layout.setAlignment(Qt.AlignBottom)
self._splash.setLayout(self.main_layout)
self.progress_bar = self.project.get_progress_bar()
self.main_layout.addWidget(self.progress_bar)
self.progress_bar.setMaximum(6)
self.progress_bar.setTextVisible(False)
self._progress_text = QLabel('Loading {} Tools ...'.format(self.project.name.title()))
self._progress_text.setAlignment(Qt.AlignCenter)
self._progress_text.setStyleSheet("QLabel { background-color : rgba(0, 0, 0, 180); color : white; }")
font = self._progress_text.font()
font.setPointSize(10)
self._progress_text.setFont(font)
self.main_layout.addWidget(self._progress_text)
self.main_layout.addItem(QSpacerItem(0, 20))
artella_icon = tpDcc.ResourcesMgr().icon('artella')
artella_lbl = QLabel()
artella_lbl.setFixedSize(QSize(52, 52))
artella_lbl.setParent(self._splash)
artella_lbl.move(self._splash.width() - artella_lbl.width(), 0)
artella_lbl.setPixmap(artella_icon.pixmap(artella_icon.actualSize(QSize(48, 48))))
dcc_icon = tpDcc.ResourcesMgr().icon(dcc_name.lower())
dcc_lbl = QLabel()
dcc_lbl.setFixedSize(QSize(52, 52))
dcc_lbl.setParent(self._splash)
dcc_lbl.move(self._splash.width() - dcc_lbl.width(), 52)
dcc_lbl.setPixmap(dcc_icon.pixmap(dcc_icon.actualSize(QSize(48, 48))))
self._splash.show()
self._splash.raise_()
def _set_text(self, msg):
"""
Internal function that sets given text
:param msg: str
"""
self._progress_text.setText(msg)
LOGGER.info('> {}'.format(msg))
QApplication.instance().processEvents()
def _on_dcc_selected(self, selected_dcc, selected_version):
"""
Internal callback function that is called when the user selects a DCC to launch in DCCSelector window
:param selected_dcc: str
"""
self._selected_dcc = selected_dcc
self._selected_version = selected_version
self.dccSelected.emit(self._selected_dcc, self._selected_version)
try:
if not selected_dcc:
qtutils.show_warning(
None, 'DCC installations not found',
'{} Launcher cannot found any DCC installed in your computer.'.format(self.name))
sys.exit()
if selected_dcc not in self._dccs:
qtutils.show_warning(
None, '{} not found in your computer'.format(selected_dcc.title()),
'{} Launcher cannot launch {} because no version is installed in your computer.'.format(
self.name, selected_dcc.title()))
sys.exit()
installation_paths = self._dccs[selected_dcc].installation_paths
if not installation_paths:
return
if selected_version not in installation_paths:
qtutils.show_warning(
None, '{} {} installation path not found'.format(selected_dcc.title(), selected_version),
'{} Launcher cannot launch {} {} because it is not installed in your computer.'.format(
self.name, selected_dcc.title(), selected_version))
return
installation_path = installation_paths[selected_version]
self._setup_splash(selected_dcc)
self._progress_text.setText('Creating {} Launcher Configuration ...'.format(self.project.name.title()))
LOGGER.info('> Creating {} Launcher Configuration ...'.format(self.project.name.title()))
QApplication.instance().processEvents()
parser = argparse.ArgumentParser(
description='{} Launcher allows to setup a custom initialization for DCCs. '
'This allows to setup specific paths in an easy way.'.format(self.project.name.title())
)
parser.add_argument(
'-e', '--edit',
action='store_true',
help='Edit configuration file'
)
exec_ = os.path.abspath(installation_path)
self.progress_bar.setValue(1)
QApplication.instance().processEvents()
time.sleep(1)
install_path = self.launcher.install_path
if not install_path or not os.path.isdir(install_path):
msg = 'Current installation path does not exists: {}. Aborting DCC launch ...'.format(install_path)
self._set_text(msg)
LOGGER.error(msg)
sys.exit()
install_path = path_utils.clean_path(os.path.abspath(install_path))
id_path = path_utils.clean_path(self.project.id_path)
if id_path in install_path:
qtutils.show_warning(
None, 'Installation folder is not valid!',
'Folder {} is not a valid installation folder. '
'Install tools in a folder that is not inside Artella Project folder please!'.format(install_path))
sys.exit()
self.progress_bar.setValue(4)
self._set_text('Setting {} environment variables ...'.format(selected_dcc.title()))
bootstrap_path = None
# We force the addition of bootstrap and external module
folders_to_register = list()
mods_to_register = self.project.modules_to_register
for mod_name in mods_to_register:
try:
imported_mod = importlib.import_module(
'{}.{}'.format(self.project.get_clean_name(), mod_name))
if imported_mod:
mod_path = os.path.dirname(imported_mod.__file__)
if mod_name == 'bootstrap':
mod_path = os.path.join(mod_path, self._selected_dcc.lower())
if os.path.isdir(mod_path):
bootstrap_path = mod_path
if os.path.isdir(mod_path):
if mod_path not in folders_to_register:
folders_to_register.append(mod_path)
else:
LOGGER.warning(
'Impossible to register Bootstrap Path for Project "{}" and DCC "{}"'.format(
self.project.get_clean_name(), self._selected_dcc))
except ImportError:
continue
project_folders_to_register = self.project.get_folders_to_register(full_path=False)
if project_folders_to_register:
for p in project_folders_to_register:
if p not in folders_to_register:
folders_to_register.append(p)
for p in self.launcher.paths_to_register:
if p not in folders_to_register:
folders_to_register.append(p)
if self.launcher.dev:
for f_name in os.listdir(p):
f_path = path_utils.clean_path(os.path.join(p, f_name))
if f_path.endswith('-link') and os.path.isfile(f_path):
with open(f_path, 'r') as f:
mod_path = str(path_utils.clean_path(f.readline()))
if mod_path and os.path.isdir(mod_path):
folders_to_register.append(mod_path)
if folders_to_register:
LOGGER.info("Registering following paths: \n")
for f in folders_to_register:
LOGGER.info(f)
if os.environ.get('PYTHONPATH'):
os.environ['PYTHONPATH'] = os.environ['PYTHONPATH'] + ';' + self.launcher.install_path
for p in folders_to_register:
p = path_utils.clean_path(os.path.join(install_path, p))
LOGGER.debug('Adding path to PYTHONPATH: {}'.format(p))
os.environ['PYTHONPATH'] = os.environ['PYTHONPATH'] + ';' + p
else:
os.environ['PYTHONPATH'] = self.launcher.install_path
for p in folders_to_register:
p = path_utils.clean_path(os.path.join(install_path, p))
LOGGER.debug('Adding path to PYTHONPATH: {}'.format(p))
os.environ['PYTHONPATH'] = os.environ['PYTHONPATH'] + ';' + p
self.progress_bar.setValue(5)
self._set_text('Launching DCC: {} ...'.format(selected_dcc))
# os.environ[self.project.get_clean_name()+'_show'] = 'show'
time.sleep(1)
# # We need to import this here because this path maybe is not available until we update Artella paths
# try:
# import spigot
# except ImportError:
# LOGGER.error('Impossible to import Artella Python modules! Maybe Artella is not installed properly.')
launch_fn = self._dccs[selected_dcc].launch_fn
if not launch_fn:
LOGGER.error('Selected DCC: {} has no launch function!'.format(selected_dcc.name))
return
except Exception as e:
self._splash.close()
raise exceptions.ArtellaPipeException(self.project, msg=e)
self._splash.close()
time.sleep(1)
if not bootstrap_path or not os.path.isdir(bootstrap_path):
QMessageBox.warning(
None, 'Bootstrap Directory not found!',
'Bootstrap folder for Project "{}" and DCC "{}" not found. Tools will not load. '
'Please contact TD!'.format(self.project.get_clean_name, self._selected_dcc))
launch_fn(exec_=exec_, setup_path=bootstrap_path)
# self.launcher.close()
# QApplication.instance().quit()
|
#!/usr/bin/python
import sys
d = {}
def myfun1(x):
return d[x][0]
def myfun2(x):
return d[x][1]
def myfun3(x):
return x[0]
for o in sys.stdin:
o = o.strip()
line_val = o.split(',')
bowler,batsman,runs,balls = line_val
runs = int(runs)
balls = int(balls)
key = (bowler,batsman)
if key in d:
d[key][0].append(runs)
d[key][1].append(balls)
else:
d[key] = [[],[]]
d[key][0].append(runs)
d[key][1].append(balls)
for key in d.keys():
d[key][0] = sum(d[key][0])
d[key][1] = sum(d[key][1])
y = sorted(d,key = myfun3)
y = sorted(y,key = myfun2)
y = sorted(y,key = myfun1,reverse = True)
for k in y:
if d[k][1] > 5:
print('%s,%s,%d,%d' % (k[0],k[1],d[k][0],d[k][1]))
|
N = int(input())
c = 1
for i in range(1, 10):
for j in range(1, 10):
if c != N:
c += 1
continue
print(str(j) * i)
exit()
|
from django.contrib.auth import logout
from django.shortcuts import render, redirect
from website.views.decorators.auth import require_auth_or_redirect_with_return
@require_auth_or_redirect_with_return
def profile(request):
return render(request, "user/profile/index.html", {
'page_title': 'User Profile'
})
def sign_out(request):
request.session.clear()
if request.user.is_authenticated:
logout(request)
return redirect("home")
|
from arm.logicnode.arm_nodes import *
class TranslateObjectNode(ArmLogicTreeNode):
"""Translates (moves) the given object using the given vector in world coordinates."""
bl_idname = 'LNTranslateObjectNode'
bl_label = 'Translate Object'
arm_section = 'location'
arm_version = 1
def arm_init(self, context):
self.add_input('ArmNodeSocketAction', 'In')
self.add_input('ArmNodeSocketObject', 'Object')
self.add_input('ArmVectorSocket', 'Vector')
self.add_input('ArmBoolSocket', 'On Local Axis')
self.add_output('ArmNodeSocketAction', 'Out')
|
# standard imports
import glob
import pandas as pd
import os
import lightkurve as lk
# useful functions
def locate_files(tic,path=None):
'''
~ Locates TESS lightcurve files with filenames formatted from a mast bulk download.~
REQUIRES: glob
Args:
tic -(int or str)TESS TIC ID
path -(str) path on computer to file(s) location
Returns:
list of path strings for all files found with specified tic
'''
if path == None: #if only need filename
fullpath = glob.glob('*{}-*-s_lc.fits'.format(tic)) #to use wildcard*
else: #user defined path to datafile on their computer
pathstart = path
pathstart = str(pathstart) #make a string in case user forgets to but think that gives an err anyway
pathend = pathstart +'*{}-*-s_lc.fits'.format(tic) #s
#titches path & filename
fullpath= glob.glob(pathend) #to use wildcard*
return fullpath
def open_rawdata(fullpath,sector):
'''
~ Opens raw data light curve file objects downloaded to our shared google drive folder~
REQUIRES: lightkurve as lk
Args:
fullpath -(str) list of path strings for all files found with specified tic
sector -(int) sector number for desired data
Returns:
lcfs -(list) list of lightkurve 'lightcurvefile' class objects
'''
lcfs = []
for file in fullpath:
if len(file)==0:
print('no files')
else:
try:
lcfile = lk.open(file)
mystring = str(type(lcfile))
if mystring[34:-2] == 'TessLightCurveFile':
hdr = lcfile.header()
mysector = hdr['SECTOR']
if sector == mysector:
lcfs.append(lcfile)
else:
pass
else:
pass
except FileNotFoundError:
pass
return lcfs
def Dataclean(TICIDlist,sectornumber,path):
'''
~PURPOSE~ cleans data: detrending, removing outliers, and removing NaN Flux
REQUIRES: lightkurve as lk
TICIDlist -(list) a list of TIC ids
sectornumber -(int) the number of the sector we are working with
path -(str) the path to our raw data
RETURNS: cleaned_data
'''
subsample_tics = TICIDlist
sector = sectornumber
raw_data_path = path
list1 = []
list2 = []
for target in subsample_tics:
#open data
paths = locate_files(target,raw_data_path)
original_data = open_rawdata(paths,sector)
#saveguard against missing files
if len(original_data)==0: #if no files exist go to next target in for loop
pass
else: #if files exist proceed
#format data (class object type)
new_data = original_data[0].PDCSAP_FLUX #detrends
outliers = new_data.remove_outliers(sigma = 3) #removes outliers
cleaned_data = outliers.remove_nans() #removes NaN Flux
list1.append(cleaned_data)
list2.append(target)
return list1, list2
#one way
#variable = Dataclean(1,2,2)
#print(variable)
#([data1,data2,data3], [tic1,tic2,tic3])
#another anyway
#variable1, variable2 = Dataclean(1,2,3)
#print(variable1)
#[data1,data2,data3]
#print(variable2)
#[tic1,tic2,tic3]
|
from flask import Flask, request, Response
from flask import render_template
app = Flask(__name__)
@app.route('/')
def hello_world():
resp = Response("FLASK 2 DOCKERIZED")
return(resp)
@app.route('/vulnerabilities/mime-sniffing/')
def mimesniffing_home():
return render_template('mime_sniffing_demo.html')
@app.route('/vulnerabilities/mime-sniffing/textsniff')
def mimesniffing_home1():
resp = Response("alert('Hi from nosniff');")
resp.headers['X-Content-Type-Options'] = 'nosniff'
resp.headers['Content-Security-Policy'] = 'default-src \'self\''
resp.headers['Content-Type'] = 'text/plain'
return resp
@app.route('/vulnerabilities/mime-sniffing/textnosniff')
def mimesniffing_home2():
resp = Response("alert('No nosniff');")
resp.headers['Content-Type'] = 'text/plain'
return(resp)
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
#!/usr/bin/env python3
while True:
n = int(input("Please enter an Integer: "))
if n < 0:
continue #there will retrun while running
elif n == 0:
break
print("Square is ", n ** 2)
print("Goodbye")
|
import falcon
from expects import expect, be, have_key
# NOTE(agileronin): These keys are returned from the XKCD JSON service.
VALID_KEYS = [
'month',
'num',
'link',
'year',
'news',
'safe_title',
'transcript',
'alt',
'img',
'title'
]
class TestAPI:
"""API test class.
For convenience, all unit tests for the XKCD API are encapsulated into this class.
"""
def test_fetch_latest(self, client):
"""Fetch the latest XKCD comic.
Args:
client: Falcon API context injected as a fixture by pytest.
"""
resp = client.simulate_get('/')
expect(resp.status).to(be(falcon.HTTP_200))
for key in VALID_KEYS:
expect(resp.json).to(have_key(key))
def test_fetch_specific_comic(self, client):
"""Test the ability to fetch a specific XKCD comic by it's identifier.
Args:
client: Falcon API context injected as a fixture by pytest.
"""
resp = client.simulate_get('/1')
expect(resp.status).to(be(falcon.HTTP_200))
for key in VALID_KEYS:
expect(resp.json).to(have_key(key))
# NOTE(agileronin): At some point in time this test will fail; however it will be an
# epic day when XKCD releases their 10 millionth comic!
resp = client.simulate_get('/10000000')
expect(resp.status).to(be(falcon.HTTP_404))
|
####################################
# --- Day 19: Go With The Flow --- #
####################################
import AOCUtils
####################################
program = AOCUtils.loadInput(19)
pc, program = int(program[0].split()[1]), program[1:]
registers = [0 for _ in range(6)]
while registers[pc] < len(program):
instr = program[registers[pc]].split()
op = instr[0]
a, b, c = [int(i) for i in instr[1:]]
if op == "addr":
registers[c] = registers[a] + registers[b]
elif op == "addi":
registers[c] = registers[a] + b
elif op == "mulr":
registers[c] = registers[a] * registers[b]
elif op == "muli":
registers[c] = registers[a] * b
elif op == "borr":
registers[c] = registers[a] | registers[b]
elif op == "bori":
registers[c] = registers[a] | b
elif op == "banr":
registers[c] = registers[a] & registers[b]
elif op == "bani":
registers[c] = registers[a] & b
elif op == "setr":
registers[c] = registers[a]
elif op == "seti":
registers[c] = a
elif op == "gtir":
registers[c] = int(a > registers[b])
elif op == "gtri":
registers[c] = int(registers[a] > b)
elif op == "gtrr":
registers[c] = int(registers[a] > registers[b])
elif op == "eqir":
registers[c] = int(a == registers[b])
elif op == "eqri":
registers[c] = int(registers[a] == b)
elif op == "eqrr":
registers[c] = int(registers[a] == registers[b])
registers[pc] += 1
print("Part 1: {}".format(registers[0]))
args = [[int(i) for i in instr.split()[1:]] for instr in program]
x = (args[17][1] * args[17][1] * 19 * args[20][1]) + (args[21][1] * 22 + args[23][1])
y = (27 * 28 + 29) * 30 * args[31][1] * 32
magicValue = x + y
sumFactors = sum(i for i in range(1, magicValue+1) if magicValue % i == 0)
print("Part 2: {}".format(sumFactors))
AOCUtils.printTimeTaken()
'''
#ip 5
PC = F
PC = 0
A, B, C, D, E = 1, 0, 0, 0, 0
==================================
0 addi 5 16 5 PC = 16 GOTO 17
1 seti 1 9 1 B = 1 B = 1
2 seti 1 5 4 E = 1 E = 1
3 mulr 1 4 3 D = A * E
4 eqrr 3 2 3 D = (D == C) if A == C:
5 addr 3 5 5 PC += D GOTO 7
6 addi 5 1 5 PC = 7 else: GOTO 8
7 addr 1 0 0 A += 1
8 addi 4 1 4 E += 1
9 gtrr 4 2 3 D = (E > C)
10 addr 5 3 5 PC = 10 * D
11 seti 2 4 5 PC = 2
12 addi 1 1 1 B = B + 1
13 gtrr 1 2 3 D = (B > C)
14 addr 3 5 5 PC += D
15 seti 1 9 5 PC = 1
16 mulr 5 5 5 PC = 256
17 addi 2 2 2 C += 2 C = 2
18 mulr 2 2 2 C *= C C = 4
19 mulr 5 2 2 C *= 19 C = 76
20 muli 2 11 2 C *= 11 C = 836
21 addi 3 8 3 D += 8 D = 8
22 mulr 3 5 3 D *= 22 D = 176
23 addi 3 16 3 D += 16 D = 192
24 addr 2 3 2 C += D C = 1028 x = ((2*2)*19*11 + (8*22)+16)
25 addr 5 0 5 PC += A if part2: GOTO 27
26 seti 0 7 5 PC = 0 else: GOTO 1
27 setr 5 3 3 D = 27 D = 27
28 mulr 3 5 3 D *= 28 D = 756
29 addr 5 3 3 D += 29 D = 785
30 mulr 5 3 3 D *= 30 D = 23550
31 muli 3 14 3 D *= 14 D = 329700
32 mulr 3 5 3 D *= 32 D = 10550400 y = ((27*28+29)*30*14*32)
33 addr 2 3 2 C += D C = 10551428 will get sum of factors of (x+y)
34 seti 0 1 0 A = 0 A = 0
35 seti 0 6 5 PC = 0 GOTO 1
''' |
"""
Tests
"""
def test_phat_persistence_diagram():
import numpy as np
from numpy.testing import assert_allclose
from dmt.complexes import MorseComplex
from dmt.phat_wrap import persistence_diagram
# Filtered circle
morse_complex = MorseComplex([[0, 0, 0, 1, 0, 1],
[0, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]],
cell_dimensions=[0, 0, 0, 1, 1, 1],
filtration=[0, 0, 0, 1, 2, 3])
dgms = persistence_diagram(morse_complex.boundary_matrix_csc, morse_complex.cell_dimensions,
morse_complex.filtration)
assert_allclose(dgms[0], [[0, 1], [0, 2], [0, np.inf]])
assert_allclose(dgms[1], [[3, np.inf]])
def test_phat():
import numpy as np
import phat
columns = [[], [], [], [], [], [], [], [], [], [], [0, 7], [5, 9], [0, 2], [4, 8], [7, 8],
[2, 9], [0, 9], [16, 12, 15], [6, 8], [6, 7], [14, 18, 19], [1, 6], [1, 4],
[4, 6], [23, 18, 13], [7, 9], [25, 16, 10], [0, 8], [27, 14, 10],
[23, 21, 22], [6, 9], [30, 25, 19], [5, 6], [30, 32, 11], [3, 5], [3, 6], [35, 32, 34], [2, 8], [37, 27, 12], [1, 3], [39, 21, 35], [2, 4], [41, 37, 13]]
dimensions = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 2, 1,
1, 1, 2, 1, 2, 1, 2, 2, 1, 2, 1, 2, 1, 1, 2, 1, 2, 1, 2, 1, 2]
bdry = phat.boundary_matrix(representation=phat.representations.vector_heap,
columns=list(zip(dimensions, columns)))
pairs = np.array(bdry.compute_persistence_pairs(reduction=phat.reductions.twist_reduction))
assert np.all(pairs[:, 0] != 10), "First added edge should kill 7" |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""significant bugfix testcase."""
import paddle.fluid as fluid
import numpy
import tools
def test_scalar():
"""
test scalar bug fix, input lr(float) but it is be set as double in the backend, fix this bug
Returns:
None
"""
train_program = fluid.Program()
startup_program = fluid.Program()
train_program.random_seed = 33
startup_program.random_seed = 33
numpy.random.seed(33)
with fluid.unique_name.guard():
with fluid.program_guard(train_program, startup_program):
train_data = numpy.array(
[[1.0], [2.0], [3.0], [4.0]]).astype('float32')
y_true = numpy.array([[2.0], [4.0], [6.0], [8.0]]).astype('float32')
lr = fluid.layers.data(
name="lr", shape=[1], dtype='float32', append_batch_size=False)
x = fluid.data(name="x", shape=[None, 1], dtype='float32')
y = fluid.data(name="y", shape=[None, 1], dtype='float32')
y_predict = fluid.layers.fc(input=x, size=1, act=None)
cost = fluid.layers.square_error_cost(input=y_predict, label=y)
avg_cost = fluid.layers.mean(cost)
sgd_optimizer = fluid.optimizer.Adam(learning_rate=lr)
sgd_optimizer.minimize(avg_cost)
cpu = fluid.CPUPlace()
exe = fluid.Executor(cpu)
exe.run(startup_program)
res = exe.run(feed={
'x': train_data,
'y': y_true,
'lr': numpy.asarray(
[1], dtype=numpy.float32)
},
fetch_list=[y_predict, avg_cost])
expect = [104.31773]
tools.compare(res[1], expect)
|
import sys, os
"专门用于调用处理coco数据相关的脚本"
file_path = os.path.abspath(__file__)
sys.path.append(os.path.abspath(os.path.join(file_path, "..", "..")))
from code_aculat.data_analyse.data_analyse_coco import analyse_obs_size_after_resized, analyse_obs_ratio, check_annos, \
analyse_image_hw, analyse_obs_size, analyse_num_each_class, stastic_ann_per_image, checkout_iterstrat_split, \
check_empty_coco
json_path = r"/home/data1/yw/copy_paste_empty/500_aug/hrsc_104_tv_raw_trans/train_data/aug_fold_v1/train_13_18_14_17_16_bg57.json"
# json_path = r"/home/data1/yw/copy_paste_empty/500_aug/hrsc_104_tv_raw_trans/train_data/aug_fold_v1/train_13_18_14_17_16.json"
# analyse_obs_size_after_resized(json_path, [(640, 480)])
# analyse_obs_ratio(json_path)
# check_annos(r"G:\hrsc\out_coco\train.json")
# analyse_image_hw(json_path)
# analyse_obs_size(json_path)
analyse_num_each_class(json_path)
# stastic_ann_per_image(json_path)
# check_empty_coco(json_path)
|
import bcrypt
import pytz
from flask_sqlalchemy import SQLAlchemy
import datetime
from tzlocal import get_localzone
db = SQLAlchemy()
def insert_timestamp():
user_timezone = pytz.timezone(get_localzone().zone)
new_post_date = user_timezone.localize(datetime.datetime.now())
return new_post_date.astimezone(pytz.utc).timestamp()
def convert_timestamp(timestamp):
utc_date = pytz.utc.localize(datetime.datetime.utcfromtimestamp(timestamp))
return str(utc_date.astimezone(pytz.timezone(get_localzone().zone)))
def encrypt(content):
return bcrypt.hashpw(content.encode("UTF-8"), bcrypt.gensalt(14))
def decrypt(x, y):
return bcrypt.checkpw(x.encode("UTF-8"), y)
|
#!/usr/bin/env python
import yaml
import docker
import tempfile
import pdb
import sys
with open(sys.argv[1], 'r') as stream:
pkg = yaml.load(stream)
requires = " \\n".join(pkg['build_requires'])
pkg['requires'] = requires
container_tag = '/'.join([pkg['namespace'], pkg['name']])
skip_build = False
client = docker.from_env()
if pkg['container_id']:
images = client.images()
ids = [x['Id'] for x in images]
if pkg['container_id'] in ids:
skip_build = True
if not skip_build:
dockerfile = u"""
# Shared Volume
FROM %(imagename)s
VOLUME %(volume)s
RUN groupadd -r builder && useradd -r -g builder -d /build builder
ENV LC_ALL C.UTF-8
RUN set -x;apt-get update -y && \
apt-get install -y \
%(requires)s \
&& rm -rf /var/lib/apt/lists/*
""" % pkg
# from io import BytesIO
# f = BytesIO(dockerfile.encode('utf8'))
f = tempfile.NamedTemporaryFile()
f.write(dockerfile.encode('utf8'))
f.seek(0)
response = [line for line in client.build(
fileobj=f, rm=True, tag=container_tag)]
print('\n'.join(response))
|
''' File IO and miscellaneous utilities'''
__author__ = "Adam Hughes"
__copyright__ = "Copyright 2012, GWU Physics"
__license__ = "Free BSD"
__maintainer__ = "Adam Hughes"
__email__ = "[email protected]"
__status__ = "Development"
import os
from pandas import DataFrame, read_csv, concat
from skspec.core.imk_utils import get_files_in_dir, get_shortname
def df_from_directory(directory, csvargs, sortnames=False, concat_axis=1, shortname=True, cut_extension=False):
''' Takes files from a directory, presuming they are identically formatted, and reads them into
a dataframe by iterating over read_csv(). All arguments that read_csv() would take are passed
in. These kwargs are in regard the files themselves, for example, skiprows, skipfooter and header.
For now, no support for glob to take only files of a certain file extension.
For now, conctaentation occurs along
Args:
directory- Path to directory where files are stored.
csvargs- Dictionary of arguments that are passed directly to the read_csv() function. For example
skiprows, header, na_values etc... see pandas API for read_csv()
Kwds:
sortnames- Will attempt to autosort the filelist. Otherwise, files are ordered by the module
os.path.listdir().
concat_axis- How to merge datafiles into a dataframe. Default is axis=1, which means all files
should share the same index values. I use this for spectral data where the wavelength
column is my dataframe index.
shortname- If false, full file path is used as the column name. If true, only the filename is used.
cut_extension- If kwd shortname is True, this will determine if the file extension is saved or cut from the data.
'''
filelist=get_files_in_dir(directory)
if shortname:
fget=lambda x:get_shortname(x, cut_extension=cut_extension)
else:
fget=lambda x: x
### Either full names or short names of filelist
working_names=[fget(afile) for afile in filelist]
### This parameter can't be passed in, so if user does so, pull it out.
try:
csvargs.pop('names')
except KeyError:
pass
else:
raise Warning('Please do not enter a names keyword for csvargs, it gets inferred from the filenames in\
the directory.')
dflist=[read_csv(afile, names=[fget(afile)], **csvargs) for afile in filelist]
### THIS IS BUSTED, PUTTING NANS EVERYWHERE EXCEPT ONE FILE, but dflist itself ws nice.
dataframe=concat(dflist, axis=1)
### concat tries to sort these, so this will preserve the sort order
if sortnames:
dataframe=dataframe.reindex(columns=sorted(working_names))
return dataframe
if __name__=='__main__':
datadirectory='./NPConcentration'
read_csv_args={'sep':',', 'header':2, 'index_col':0, 'skiprows':2, 'na_values':' \r'}
df=df_from_directory(datadirectory, read_csv_args, sortnames=True)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Hammer logging code.
#
# See LICENSE for licence details.
__all__ = ['HammerVLSIFileLogger', 'HammerVLSILogging', 'HammerVLSILoggingContext', 'Level']
from .logging import HammerVLSIFileLogger, HammerVLSILogging, HammerVLSILoggingContext, Level
|
Subsets and Splits