repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
Ibrahem3amer/bala7 | cms/tests/test_views.py | 70638c121ea85ff0e6a650c5f2641b0b3b04d6d0 | from django.core.urlresolvers import resolve
from django.urls import reverse
from django.test import TestCase, RequestFactory
from django.http import HttpRequest, Http404
from django.contrib.auth.models import User
from unittest import skip
from users.models import University, Faculty, Department, UserProfile
from cms.models import Topic
from cms.views import get_topic
class AccessRestriction(TestCase):
def setUp(self):
self.user = User.objects.create(username='test_username', email='[email protected]', password='secrettt23455')
self.uni = University.objects.create(name='test_university')
self.fac = Faculty.objects.create(name='Test faculty')
self.dep = Department.objects.create(name='Test dep')
self.profile = UserProfile.objects.create(university=self.uni, faculty=self.fac, department=self.dep)
self.topic = Topic.objects.create(name='cs', desc="test test test", faculty=self.fac, term=1)
self.topic.department.add(self.dep)
self.user.profile = self.profile
self.profile.topics.add(self.topic)
def test_return_topic_that_match_user(self):
# Setup test
request = RequestFactory()
request = request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id}))
request.user = self.user
# Exercise test
response = get_topic(request, self.dep.id, self.topic.id)
# Assert test
self.assertEqual(200, response.status_code)
def test_return_topic_that_has_different_department(self):
# Setup test
request = RequestFactory()
request = request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id}))
request.user = self.user
# Exercise test
another_dep = Department.objects.create()
try:
response = get_topic(request, another_dep.id, self.topic.id)
flag = False
except Http404:
flag = True
# Assert test
self.assertTrue(flag)
def test_return_topic_that_does_not_exist(self):
# Setup test
request = RequestFactory()
request = request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id}))
request.user = self.user
# Exercise test
try:
response = get_topic(request, self.dep.id, 990)
flag = False
except Http404:
flag = True
# Assert test
self.assertTrue(flag)
def test_return_topic_that_outside_user_topics(self):
# Setup test
another_topic = Topic.objects.create(name='is', desc="test test test", faculty=self.fac, term=1)
another_topic.department.add(self.dep)
self.user.profile.topics.add(another_topic)
request = RequestFactory()
request = request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id}))
request.user = self.user
# Exercise test
outsider_topic = Topic.objects.create(name='ms', desc="test test test", faculty=self.fac, term=1)
outsider_topic.department.add(self.dep)
try:
response = get_topic(request, self.dep.id, outsider_topic.id)
flag = False
except Http404:
flag = True
# Assert test
self.assertTrue(flag)
def test_get_topic_with_no_parameters(self):
# Setup test
another_topic = Topic.objects.create(name='is', desc="test test test", faculty=self.fac, term=1)
another_topic.department.add(self.dep)
self.user.profile.topics.add(another_topic)
request = RequestFactory()
request = request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id}))
request.user = self.user
# Exercise test
outsider_topic = Topic.objects.create(name='ms', desc="test test test", faculty=self.fac, term=1)
outsider_topic.department.add(self.dep)
try:
response = get_topic(request)
flag = False
except Http404:
flag = True
# Assert test
self.assertTrue(flag)
class TableViews(TestCase):
def setUp(self):
self.user = User.objects.create_user(username='ssss', email='[email protected]', password='secrettt23455')
self.fac = Faculty.objects.create()
self.dep = Department.objects.create(faculty=self.fac)
self.profile = UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac)
def test_page_load_on_get(self):
# Setup test
url = reverse('web_dep_table')
request = self.client.login(username="ssss", password="secrettt23455")
# Exercise test
request = self.client.get(url)
# Assert test
self.assertEqual(200, request.status_code)
self.assertTemplateUsed(request, 'tables/table_main.html')
def test_page_redirect_on_post(self):
# Setup test
url = reverse('web_dep_table')
request = self.client.login(username="ssss", password="secrettt23455")
# Exercise test
request = self.client.post(url)
# Assert test
self.assertEqual(302, request.status_code)
def test_page_redirect_on_no_profile(self):
# Setup test
user = User.objects.create_user(
username='test_username',
email='[email protected]',
password='secrettt23455'
)
url = reverse('web_dep_table')
request = self.client.login(username="test_username", password="secrettt23455")
# Exercise test
request = self.client.get(url)
# Assert test
self.assertEqual(302, request.status_code)
class UserTableViews(TestCase):
def setUp(self):
self.user = User.objects.create_user(username='ssss', email='[email protected]', password='secrettt23455')
self.fac = Faculty.objects.create()
self.dep = Department.objects.create(faculty=self.fac)
UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac)
self.topic = Topic.objects.create(name='topic name', desc='ddddd', term=1)
self.topic.department.add(self.dep)
def test_page_load_on_get(self):
# Setup test
url = reverse('web_user_table')
request = self.client.login(username="ssss", password="secrettt23455")
# Exercise test
request = self.client.get(url)
# Assert test
self.assertEqual(200, request.status_code)
self.assertTemplateUsed(request, 'tables/user_table.html')
def test_page_load_if_no_profile(self):
# Setup test
url = reverse('web_user_table')
another_user = User.objects.create_user(username='xxxss', email='[email protected]', password='secrettt23455')
request = self.client.login(username="xxxss", password="secrettt23455")
# Exercise test
request = self.client.get(url)
# Assert test
self.assertEqual(200, request.status_code)
self.assertTemplateUsed(request, 'tables/user_table.html')
def test_post_when_no_choices(self):
# Setup test
url = reverse('web_user_table')
data = {}
request = self.client.login(username="xxxss", password="secrettt23455")
# Exercise test
request = self.client.post(url, data=data)
# Assert test
self.assertEqual(302, request.status_code)
| [((450, 551), 'django.contrib.auth.models.User.objects.create', 'User.objects.create', ([], {'username': '"""test_username"""', 'email': '"""[email protected]"""', 'password': '"""secrettt23455"""'}), "(username='test_username', email='[email protected]',\n password='secrettt23455')\n", (469, 551), False, 'from django.contrib.auth.models import User\n'), ((567, 616), 'users.models.University.objects.create', 'University.objects.create', ([], {'name': '"""test_university"""'}), "(name='test_university')\n", (592, 616), False, 'from users.models import University, Faculty, Department, UserProfile\n'), ((636, 679), 'users.models.Faculty.objects.create', 'Faculty.objects.create', ([], {'name': '"""Test faculty"""'}), "(name='Test faculty')\n", (658, 679), False, 'from users.models import University, Faculty, Department, UserProfile\n'), ((699, 741), 'users.models.Department.objects.create', 'Department.objects.create', ([], {'name': '"""Test dep"""'}), "(name='Test dep')\n", (724, 741), False, 'from users.models import University, Faculty, Department, UserProfile\n'), ((765, 855), 'users.models.UserProfile.objects.create', 'UserProfile.objects.create', ([], {'university': 'self.uni', 'faculty': 'self.fac', 'department': 'self.dep'}), '(university=self.uni, faculty=self.fac,\n department=self.dep)\n', (791, 855), False, 'from users.models import University, Faculty, Department, UserProfile\n'), ((873, 958), 'cms.models.Topic.objects.create', 'Topic.objects.create', ([], {'name': '"""cs"""', 'desc': '"""test test test"""', 'faculty': 'self.fac', 'term': '(1)'}), "(name='cs', desc='test test test', faculty=self.fac, term=1\n )\n", (893, 958), False, 'from cms.models import Topic\n'), ((1172, 1188), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (1186, 1188), False, 'from django.test import TestCase, RequestFactory\n'), ((1377, 1423), 'cms.views.get_topic', 'get_topic', (['request', 'self.dep.id', 'self.topic.id'], {}), '(request, self.dep.id, self.topic.id)\n', (1386, 1423), False, 'from cms.views import get_topic\n'), ((1602, 1618), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (1616, 1618), False, 'from django.test import TestCase, RequestFactory\n'), ((1810, 1837), 'users.models.Department.objects.create', 'Department.objects.create', ([], {}), '()\n', (1835, 1837), False, 'from users.models import University, Faculty, Department, UserProfile\n'), ((2143, 2159), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (2157, 2159), False, 'from django.test import TestCase, RequestFactory\n'), ((2632, 2717), 'cms.models.Topic.objects.create', 'Topic.objects.create', ([], {'name': '"""is"""', 'desc': '"""test test test"""', 'faculty': 'self.fac', 'term': '(1)'}), "(name='is', desc='test test test', faculty=self.fac, term=1\n )\n", (2652, 2717), False, 'from cms.models import Topic\n'), ((2830, 2846), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (2844, 2846), False, 'from django.test import TestCase, RequestFactory\n'), ((3041, 3126), 'cms.models.Topic.objects.create', 'Topic.objects.create', ([], {'name': '"""ms"""', 'desc': '"""test test test"""', 'faculty': 'self.fac', 'term': '(1)'}), "(name='ms', desc='test test test', faculty=self.fac, term=1\n )\n", (3061, 3126), False, 'from cms.models import Topic\n'), ((3478, 3563), 'cms.models.Topic.objects.create', 'Topic.objects.create', ([], {'name': '"""is"""', 'desc': '"""test test test"""', 'faculty': 'self.fac', 'term': '(1)'}), "(name='is', desc='test test test', faculty=self.fac, term=1\n )\n", (3498, 3563), False, 'from cms.models import Topic\n'), ((3676, 3692), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (3690, 3692), False, 'from django.test import TestCase, RequestFactory\n'), ((3887, 3972), 'cms.models.Topic.objects.create', 'Topic.objects.create', ([], {'name': '"""ms"""', 'desc': '"""test test test"""', 'faculty': 'self.fac', 'term': '(1)'}), "(name='ms', desc='test test test', faculty=self.fac, term=1\n )\n", (3907, 3972), False, 'from cms.models import Topic\n'), ((4268, 4365), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""ssss"""', 'email': '"""[email protected]"""', 'password': '"""secrettt23455"""'}), "(username='ssss', email='[email protected]',\n password='secrettt23455')\n", (4292, 4365), False, 'from django.contrib.auth.models import User\n'), ((4381, 4405), 'users.models.Faculty.objects.create', 'Faculty.objects.create', ([], {}), '()\n', (4403, 4405), False, 'from users.models import University, Faculty, Department, UserProfile\n'), ((4425, 4468), 'users.models.Department.objects.create', 'Department.objects.create', ([], {'faculty': 'self.fac'}), '(faculty=self.fac)\n', (4450, 4468), False, 'from users.models import University, Faculty, Department, UserProfile\n'), ((4492, 4578), 'users.models.UserProfile.objects.create', 'UserProfile.objects.create', ([], {'user': 'self.user', 'department': 'self.dep', 'faculty': 'self.fac'}), '(user=self.user, department=self.dep, faculty=\n self.fac)\n', (4518, 4578), False, 'from users.models import University, Faculty, Department, UserProfile\n'), ((4647, 4671), 'django.urls.reverse', 'reverse', (['"""web_dep_table"""'], {}), "('web_dep_table')\n", (4654, 4671), False, 'from django.urls import reverse\n'), ((5034, 5058), 'django.urls.reverse', 'reverse', (['"""web_dep_table"""'], {}), "('web_dep_table')\n", (5041, 5058), False, 'from django.urls import reverse\n'), ((5362, 5468), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""test_username"""', 'email': '"""[email protected]"""', 'password': '"""secrettt23455"""'}), "(username='test_username', email='[email protected]',\n password='secrettt23455')\n", (5386, 5468), False, 'from django.contrib.auth.models import User\n'), ((5525, 5549), 'django.urls.reverse', 'reverse', (['"""web_dep_table"""'], {}), "('web_dep_table')\n", (5532, 5549), False, 'from django.urls import reverse\n'), ((5851, 5948), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""ssss"""', 'email': '"""[email protected]"""', 'password': '"""secrettt23455"""'}), "(username='ssss', email='[email protected]',\n password='secrettt23455')\n", (5875, 5948), False, 'from django.contrib.auth.models import User\n'), ((5964, 5988), 'users.models.Faculty.objects.create', 'Faculty.objects.create', ([], {}), '()\n', (5986, 5988), False, 'from users.models import University, Faculty, Department, UserProfile\n'), ((6008, 6051), 'users.models.Department.objects.create', 'Department.objects.create', ([], {'faculty': 'self.fac'}), '(faculty=self.fac)\n', (6033, 6051), False, 'from users.models import University, Faculty, Department, UserProfile\n'), ((6060, 6146), 'users.models.UserProfile.objects.create', 'UserProfile.objects.create', ([], {'user': 'self.user', 'department': 'self.dep', 'faculty': 'self.fac'}), '(user=self.user, department=self.dep, faculty=\n self.fac)\n', (6086, 6146), False, 'from users.models import University, Faculty, Department, UserProfile\n'), ((6163, 6224), 'cms.models.Topic.objects.create', 'Topic.objects.create', ([], {'name': '"""topic name"""', 'desc': '"""ddddd"""', 'term': '(1)'}), "(name='topic name', desc='ddddd', term=1)\n", (6183, 6224), False, 'from cms.models import Topic\n'), ((6342, 6367), 'django.urls.reverse', 'reverse', (['"""web_user_table"""'], {}), "('web_user_table')\n", (6349, 6367), False, 'from django.urls import reverse\n'), ((6732, 6757), 'django.urls.reverse', 'reverse', (['"""web_user_table"""'], {}), "('web_user_table')\n", (6739, 6757), False, 'from django.urls import reverse\n'), ((6781, 6879), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""xxxss"""', 'email': '"""[email protected]"""', 'password': '"""secrettt23455"""'}), "(username='xxxss', email='[email protected]',\n password='secrettt23455')\n", (6805, 6879), False, 'from django.contrib.auth.models import User\n'), ((7238, 7263), 'django.urls.reverse', 'reverse', (['"""web_user_table"""'], {}), "('web_user_table')\n", (7245, 7263), False, 'from django.urls import reverse\n'), ((1219, 1298), 'django.urls.reverse', 'reverse', (['"""get_topic"""'], {'kwargs': "{'dep_id': self.dep.id, 'topic_id': self.topic.id}"}), "('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id})\n", (1226, 1298), False, 'from django.urls import reverse\n'), ((1649, 1728), 'django.urls.reverse', 'reverse', (['"""get_topic"""'], {'kwargs': "{'dep_id': self.dep.id, 'topic_id': self.topic.id}"}), "('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id})\n", (1656, 1728), False, 'from django.urls import reverse\n'), ((1874, 1923), 'cms.views.get_topic', 'get_topic', (['request', 'another_dep.id', 'self.topic.id'], {}), '(request, another_dep.id, self.topic.id)\n', (1883, 1923), False, 'from cms.views import get_topic\n'), ((2190, 2269), 'django.urls.reverse', 'reverse', (['"""get_topic"""'], {'kwargs': "{'dep_id': self.dep.id, 'topic_id': self.topic.id}"}), "('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id})\n", (2197, 2269), False, 'from django.urls import reverse\n'), ((2365, 2401), 'cms.views.get_topic', 'get_topic', (['request', 'self.dep.id', '(990)'], {}), '(request, self.dep.id, 990)\n', (2374, 2401), False, 'from cms.views import get_topic\n'), ((2877, 2956), 'django.urls.reverse', 'reverse', (['"""get_topic"""'], {'kwargs': "{'dep_id': self.dep.id, 'topic_id': self.topic.id}"}), "('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id})\n", (2884, 2956), False, 'from django.urls import reverse\n'), ((3206, 3256), 'cms.views.get_topic', 'get_topic', (['request', 'self.dep.id', 'outsider_topic.id'], {}), '(request, self.dep.id, outsider_topic.id)\n', (3215, 3256), False, 'from cms.views import get_topic\n'), ((3723, 3802), 'django.urls.reverse', 'reverse', (['"""get_topic"""'], {'kwargs': "{'dep_id': self.dep.id, 'topic_id': self.topic.id}"}), "('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id})\n", (3730, 3802), False, 'from django.urls import reverse\n'), ((4052, 4070), 'cms.views.get_topic', 'get_topic', (['request'], {}), '(request)\n', (4061, 4070), False, 'from cms.views import get_topic\n')] |
geometatqueens/RCNN | 3D/Train_Module_3D.py | 2e1e67264969f05a2f554595577dfb1025938245 | """The present code is the Version 1.0 of the RCNN approach to perform MPS
in 3D for categorical variables. It has been developed by S. Avalos and J. Ortiz in the
Geometallurygical Group at Queen's University as part of a PhD program.
The code is not free of bugs but running end-to-end.
Any comments and further improvements are well recevied to: [email protected]
April 16, 2019.
Geomet Group - Queen's University - Canada"""
# Do not display the AVX message about using GPU
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
#from tensorflow.python.client import device_lib
#print(device_lib.list_local_devices())
#os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID" # see issue #152
#os.environ["CUDA_VISIBLE_DEVICES"]="0"
## #########################
import numpy as np
import tensorflow as tf
import time
import External_Functions_3D as fns_nested
import gc
for ind0 in range(1):
start_time_AllTrain = time.time()
HyperPar = []
HyperPar.append(50) # SGsizex - Num 0
HyperPar.append(50) # SGsizey - Num 1
HyperPar.append(50) # SGsizez - Num 2
HyperPar.append(int(7)) # Search_x - Num 3
HyperPar.append(int(7)) # Search_y - Num 4
HyperPar.append(int(7)) # Search_z - Num 5
HyperPar.append(int(7)) # IPsizex - Num 6
HyperPar.append(int(7)) # IPsizey - Num 7
HyperPar.append(int(7)) # IPsizez - Num 8
HyperPar.append(50) # Percentage of Data Conditioning - Num 9 .. divided by 3 so 1% is 10 represents 1%
HyperPar.append(1) # MinDC - Num 10
HyperPar.append(1500) # Num Fully Connected - Num 11
HyperPar.append(3) # wdnh - Num 12
HyperPar.append(16) # convdepth - Num 13
HyperPar.append(2) # num of categories - Num 14
print("SG: ", int(HyperPar[3]),"x",int(HyperPar[4]),"x",int(HyperPar[5]), "IP: ", int(HyperPar[6]),"x",int(HyperPar[7]),"x",int(HyperPar[8]))
Ncicles = 500
Nepoch = 1
#Nbatch = 250
Nsamples = 512
TrainingImage = "TI_Collaboration_1of4_50x50x50_newRepresentation.dat"
LocModel = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13]))
#LocModel = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13]))
LocFile = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13]))
#LocFile = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13]))
print("[Graph]")
#fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D_NoBN(HyperPar=HyperPar, LocModel=LocModel)
fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D(HyperPar=HyperPar, LocModel=LocModel)
# To save the TI
TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=3,Training=False, Padding=True)
TempSimGrid.SavePlot(name=LocModel+'_TI.png', Level=1)
MaxLR, MinLR = 0.01, 0.001
StepLR = 10
PointStart = 1
for indTrain in range(Ncicles):
#HyperPar[9] = np.random.randint(41)+10
cuos = indTrain%(2*StepLR)
if cuos < StepLR:
LearningRate = np.around(((MaxLR - MinLR)/StepLR)*cuos + MinLR, decimals=7)
else:
LearningRate = np.around(((MaxLR - MinLR)/StepLR)*(StepLR - cuos) + MaxLR, decimals=7)
start_time_1 = time.time()
print ("Cicle: {}".format(indTrain+PointStart), "Learning Rate: ", LearningRate)
TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=5, Training=True, Padding=True)
print("[Sim]")
TempSimGrid.Simulate_4ConvNets_BN_3D(LocModel=LocModel, Cicle=(indTrain+PointStart), Plot=True)
print("[Saving Grid]")
TempSimGrid.SaveGrid(file="{}/TrainReas_{}.txt".format(LocFile, indTrain+PointStart))
print("[Train]")
TempSimGrid.Train_4ConvNets_BN_3D(Epochs=Nepoch, Num_samples=Nsamples, LocModel=LocModel, LR=LearningRate)
print("--%s seconds of whole training process-" % (np.around((time.time() - start_time_1), decimals=2)))
gc.collect()
print(" ")
print("--%s minutes of ALL training-" % ((time.time() - start_time_AllTrain)/60)) | [((941, 952), 'time.time', 'time.time', ([], {}), '()\n', (950, 952), False, 'import time\n'), ((3421, 3525), 'External_Functions_3D.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D', 'fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D', ([], {'HyperPar': 'HyperPar', 'LocModel': 'LocModel'}), '(HyperPar=\n HyperPar, LocModel=LocModel)\n', (3482, 3525), True, 'import External_Functions_3D as fns_nested\n'), ((3562, 3660), 'External_Functions_3D.Grid', 'fns_nested.Grid', ([], {'HyperPar': 'HyperPar', 'DBname': 'TrainingImage', 'Lvl': '(3)', 'Training': '(False)', 'Padding': '(True)'}), '(HyperPar=HyperPar, DBname=TrainingImage, Lvl=3, Training=\n False, Padding=True)\n', (3577, 3660), True, 'import External_Functions_3D as fns_nested\n'), ((4101, 4112), 'time.time', 'time.time', ([], {}), '()\n', (4110, 4112), False, 'import time\n'), ((4214, 4311), 'External_Functions_3D.Grid', 'fns_nested.Grid', ([], {'HyperPar': 'HyperPar', 'DBname': 'TrainingImage', 'Lvl': '(5)', 'Training': '(True)', 'Padding': '(True)'}), '(HyperPar=HyperPar, DBname=TrainingImage, Lvl=5, Training=\n True, Padding=True)\n', (4229, 4311), True, 'import External_Functions_3D as fns_nested\n'), ((4786, 4798), 'gc.collect', 'gc.collect', ([], {}), '()\n', (4796, 4798), False, 'import gc\n'), ((3920, 3982), 'numpy.around', 'np.around', (['((MaxLR - MinLR) / StepLR * cuos + MinLR)'], {'decimals': '(7)'}), '((MaxLR - MinLR) / StepLR * cuos + MinLR, decimals=7)\n', (3929, 3982), True, 'import numpy as np\n'), ((4010, 4083), 'numpy.around', 'np.around', (['((MaxLR - MinLR) / StepLR * (StepLR - cuos) + MaxLR)'], {'decimals': '(7)'}), '((MaxLR - MinLR) / StepLR * (StepLR - cuos) + MaxLR, decimals=7)\n', (4019, 4083), True, 'import numpy as np\n'), ((4863, 4874), 'time.time', 'time.time', ([], {}), '()\n', (4872, 4874), False, 'import time\n'), ((4737, 4748), 'time.time', 'time.time', ([], {}), '()\n', (4746, 4748), False, 'import time\n')] |
steuke/django_feature_flags_example | feature_flags_project/feature_flags/providers.py | 00e33378999d6d567c37593c17289405fc7b5847 | import logging
from typing import Dict
from django.http import HttpRequest
logger = logging.getLogger(__name__)
class FeatureFlagProvider:
def is_feature_enabled(self, feature_name: str, user_id: str = None, attributes: Dict = None):
raise NotImplementedError("You must override FeatureFlagProvider.is_feature_enabled()")
def _attributes_from_request(request: HttpRequest) -> Dict:
if not request:
return dict()
attributes = dict()
try:
attributes["is_staff"] = request.user.is_staff
return attributes
except Exception:
logger.exception(
"Unexpected exception while trying to parse http-request for feature-attributes."
)
return dict()
def is_feature_enabled(feature_name: str, request: HttpRequest) -> bool:
from django.conf import settings
is_enabled = False
attributes = _attributes_from_request(request)
try:
is_enabled = settings.FEATURE_FLAG_PROVIDER.is_feature_enabled(
feature_name=feature_name, user_id="dontcare", attributes=attributes
)
logger.info(f"Feature '{feature_name}' is enabled={is_enabled}")
except Exception:
logger.exception(f"Exception while trying to check feature-flag state for '{feature_name}'")
return is_enabled
| [((86, 113), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (103, 113), False, 'import logging\n'), ((946, 1069), 'django.conf.settings.FEATURE_FLAG_PROVIDER.is_feature_enabled', 'settings.FEATURE_FLAG_PROVIDER.is_feature_enabled', ([], {'feature_name': 'feature_name', 'user_id': '"""dontcare"""', 'attributes': 'attributes'}), "(feature_name=feature_name,\n user_id='dontcare', attributes=attributes)\n", (995, 1069), False, 'from django.conf import settings\n')] |
roch1990/aiohttp-blog | src/app/database/__init__.py | 32e7b76b5b293d4517631ea82dfa2b268a1662eb | from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base() | [((64, 82), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {}), '()\n', (80, 82), False, 'from sqlalchemy.ext.declarative import declarative_base\n')] |
timcera/plottoolbox | src/plottoolbox/functions/kde.py | b5f4b634d366eb5ba244e2f1fd33a7ef0eba7298 | # -*- coding: utf-8 -*-
"""Collection of functions for the manipulation of time series."""
from __future__ import absolute_import, division, print_function
import itertools
import os
import warnings
import mando
import numpy as np
import pandas as pd
from mando.rst_text_formatter import RSTHelpFormatter
from tstoolbox import tsutils
from .. import plotutils
warnings.filterwarnings("ignore")
@mando.command("kde", formatter_class=RSTHelpFormatter, doctype="numpy")
@tsutils.doc(plotutils.ldocstrings)
def kde_cli(
input_ts="-",
columns=None,
start_date=None,
end_date=None,
clean=False,
skiprows=None,
index_type="datetime",
names=None,
ofilename="plot.png",
xtitle="",
ytitle="",
title="",
figsize="10,6.0",
legend=None,
legend_names=None,
subplots=False,
sharex=True,
sharey=False,
colors="auto",
linestyles="auto",
markerstyles=" ",
bar_hatchstyles="auto",
style="auto",
logx=False,
logy=False,
xaxis="arithmetic",
yaxis="arithmetic",
xlim=None,
ylim=None,
secondary_y=False,
mark_right=True,
scatter_matrix_diagonal="kde",
bootstrap_size=50,
bootstrap_samples=500,
norm_xaxis=False,
norm_yaxis=False,
lognorm_xaxis=False,
lognorm_yaxis=False,
xy_match_line="",
grid=False,
label_rotation=None,
label_skip=1,
force_freq=None,
drawstyle="default",
por=False,
invert_xaxis=False,
invert_yaxis=False,
round_index=None,
plotting_position="weibull",
prob_plot_sort_values="descending",
source_units=None,
target_units=None,
lag_plot_lag=1,
plot_styles="bright",
hlines_y=None,
hlines_xmin=None,
hlines_xmax=None,
hlines_colors=None,
hlines_linestyles="-",
vlines_x=None,
vlines_ymin=None,
vlines_ymax=None,
vlines_colors=None,
vlines_linestyles="-",
):
r"""Kernel density estimation of probability density function.
"kde" will create a plot of estimation of the probability density function
based on the data called kernel density estimation (KDE).
{ydata}
Parameters
----------
{input_ts}
ofilename : str
[optional, defaults to 'plot.png']
Output filename for the plot. Extension defines
the type, for example 'filename.png' will create a PNG file.
If used within Python, and `ofilename` is None will return the
Matplotlib figure that can then be changed or added to as
needed.
lag_plot_lag
[optional, default to 1]
The lag used if ``type`` "lag_plot" is chosen.
xtitle : str
[optional, default depends on ``type``]
Title of x-axis.
ytitle : str
[optional, default depends on ``type``]
Title of y-axis.
title : str
[optional, defaults to '']
Title of chart.
figsize : str
[optional, defaults to '10,6.5']
The 'width,height' of plot in inches.
legend
[optional, defaults to True]
Whether to display the legend.
legend_names : str
[optional, defaults to None]
Legend would normally use the time-series names associated with
the input data. The 'legend_names' option allows you to
override the names in the data set. You must supply a comma
separated list of strings for each time-series in the data set.
subplots
[optional, defaults to False]
Make separate subplots for each time series.
sharex
[optional, default to True]
In case subplots=True, share x axis.
sharey
[optional, default to False]
In case subplots=True, share y axis.
colors
[optional, default is 'auto']
The default 'auto' will cycle through matplotlib colors in the chosen
style.
At the command line supply a comma separated matplotlib
color codes, or within Python a list of color code strings.
Can identify colors in four different ways.
1. Use 'CN' where N is a number from 0 to 9 that gets the Nth color
from the current style.
2. Single character code from the table below.
+------+---------+
| Code | Color |
+======+=========+
| b | blue |
+------+---------+
| g | green |
+------+---------+
| r | red |
+------+---------+
| c | cyan |
+------+---------+
| m | magenta |
+------+---------+
| y | yellow |
+------+---------+
| k | black |
+------+---------+
3. Number between 0 and 1 that represents the level of gray, where 0 is
white an 1 is black.
4. Any of the HTML color names.
+------------------+
| HTML Color Names |
+==================+
| red |
+------------------+
| burlywood |
+------------------+
| chartreuse |
+------------------+
| ...etc. |
+------------------+
Color reference:
http://matplotlib.org/api/colors_api.html
linestyles
[optional, default to 'auto']
If 'auto' will iterate through the available matplotlib line types.
Otherwise on the command line a comma separated list, or a list of
strings if using the Python API.
To not display lines use a space (' ') as the linestyle code.
Separated 'colors', 'linestyles', and 'markerstyles' instead of using
the 'style' keyword.
+---------+--------------+
| Code | Lines |
+=========+==============+
| ``-`` | solid |
+---------+--------------+
| -- | dashed |
+---------+--------------+
| -. | dash_dot |
+---------+--------------+
| : | dotted |
+---------+--------------+
| None | draw nothing |
+---------+--------------+
| ' ' | draw nothing |
+---------+--------------+
| '' | draw nothing |
+---------+--------------+
Line reference:
http://matplotlib.org/api/artist_api.html
markerstyles
[optional, default to ' ']
The default ' ' will not plot a marker. If 'auto' will iterate through
the available matplotlib marker types. Otherwise on the command line
a comma separated list, or a list of strings if using the Python API.
Separated 'colors', 'linestyles', and 'markerstyles' instead of using
the 'style' keyword.
+-------+----------------+
| Code | Markers |
+=======+================+
| . | point |
+-------+----------------+
| o | circle |
+-------+----------------+
| v | triangle down |
+-------+----------------+
| ^ | triangle up |
+-------+----------------+
| < | triangle left |
+-------+----------------+
| > | triangle right |
+-------+----------------+
| 1 | tri_down |
+-------+----------------+
| 2 | tri_up |
+-------+----------------+
| 3 | tri_left |
+-------+----------------+
| 4 | tri_right |
+-------+----------------+
| 8 | octagon |
+-------+----------------+
| s | square |
+-------+----------------+
| p | pentagon |
+-------+----------------+
| ``*`` | star |
+-------+----------------+
| h | hexagon1 |
+-------+----------------+
| H | hexagon2 |
+-------+----------------+
| ``+`` | plus |
+-------+----------------+
| x | x |
+-------+----------------+
| D | diamond |
+-------+----------------+
| d | thin diamond |
+-------+----------------+
| _ | hlines_y |
+-------+----------------+
| None | nothing |
+-------+----------------+
| ' ' | nothing |
+-------+----------------+
| '' | nothing |
+-------+----------------+
Marker reference:
http://matplotlib.org/api/markers_api.html
style
[optional, default is None]
Still available, but if None is replaced by 'colors', 'linestyles', and
'markerstyles' options. Currently the 'style' option will override the
others.
Comma separated matplotlib style strings per time-series. Just
combine codes in 'ColorMarkerLine' order, for example 'r*--' is
a red dashed line with star marker.
bar_hatchstyles
[optional, default to "auto", only used if type equal to "bar", "barh",
"bar_stacked", and "barh_stacked"]
If 'auto' will iterate through the available matplotlib hatch types.
Otherwise on the command line a comma separated list, or a list of
strings if using the Python API.
+-----------------+-------------------+
| bar_hatchstyles | Description |
+=================+===================+
| / | diagonal hatching |
+-----------------+-------------------+
| ``\`` | back diagonal |
+-----------------+-------------------+
| ``|`` | vertical |
+-----------------+-------------------+
| - | horizontal |
+-----------------+-------------------+
| + | crossed |
+-----------------+-------------------+
| x | crossed diagonal |
+-----------------+-------------------+
| o | small circle |
+-----------------+-------------------+
| O | large circle |
+-----------------+-------------------+
| . | dots |
+-----------------+-------------------+
| * | stars |
+-----------------+-------------------+
logx
DEPRECATED: use '--xaxis="log"' instead.
logy
DEPRECATED: use '--yaxis="log"' instead.
xlim
[optional, default is based on range of x values]
Comma separated lower and upper limits for the x-axis of the
plot. For example, '--xlim 1,1000' would limit the plot from
1 to 1000, where '--xlim ,1000' would base the lower limit on
the data and set the upper limit to 1000.
ylim
[optional, default is based on range of y values]
Comma separated lower and upper limits for the y-axis of the
plot. See `xlim` for examples.
xaxis : str
[optional, default is 'arithmetic']
Defines the type of the xaxis. One of 'arithmetic', 'log'.
yaxis : str
[optional, default is 'arithmetic']
Defines the type of the yaxis. One of 'arithmetic', 'log'.
secondary_y
[optional, default is False]
Whether to plot on the secondary y-axis. If a list/tuple, which
time-series to plot on secondary y-axis.
mark_right
[optional, default is True]
When using a secondary_y axis, should the legend label the axis of the
various time-series automatically.
scatter_matrix_diagonal : str
[optional, defaults to 'kde']
If plot type is 'scatter_matrix', this specifies the plot along the
diagonal. One of 'kde' for Kernel Density Estimation or 'hist'
for a histogram.
bootstrap_size : int
[optional, defaults to 50]
The size of the random subset for 'bootstrap' plot.
bootstrap_samples
[optional, defaults to 500]
The number of random subsets of 'bootstrap_size'.
norm_xaxis
DEPRECATED: use '--type="norm_xaxis"' instead.
norm_yaxis
DEPRECATED: use '--type="norm_yaxis"' instead.
lognorm_xaxis
DEPRECATED: use '--type="lognorm_xaxis"' instead.
lognorm_yaxis
DEPRECATED: use '--type="lognorm_yaxis"' instead.
xy_match_line : str
[optional, defaults is '']
Will add a match line where x == y. Set to a line style code.
grid
[optional, default is False]
Whether to plot grid lines on the major ticks.
label_rotation : int
[optional]
Rotation for major labels for bar plots.
label_skip : int
[optional]
Skip for major labels for bar plots.
drawstyle : str
[optional, default is 'default']
'default' connects the points with lines. The
steps variants produce step-plots. 'steps' is equivalent to 'steps-pre'
and is maintained for backward-compatibility.
ACCEPTS::
['default' | 'steps' | 'steps-pre' | 'steps-mid' | 'steps-post']
por
[optional]
Plot from first good value to last good value. Strips NANs
from beginning and end.
{force_freq}
invert_xaxis
[optional, default is False]
Invert the x-axis.
invert_yaxis
[optional, default is False]
Invert the y-axis.
plotting_position : str
[optional, default is 'weibull']
{plotting_position_table}
Only used for norm_xaxis, norm_yaxis, lognorm_xaxis,
lognorm_yaxis, weibull_xaxis, and weibull_yaxis.
prob_plot_sort_values : str
[optional, default is 'descending']
How to sort the values for the probability plots.
Only used for norm_xaxis, norm_yaxis, lognorm_xaxis,
lognorm_yaxis, weibull_xaxis, and weibull_yaxis.
{columns}
{start_date}
{end_date}
{clean}
{skiprows}
{index_type}
{names}
{source_units}
{target_units}
{round_index}
plot_styles: str
[optional, default is "default"]
Set the style of the plot. One or more of Matplotlib styles "classic",
"Solarize_Light2", "bmh", "dark_background", "fast", "fivethirtyeight",
"ggplot", "grayscale", "seaborn", "seaborn-bright",
"seaborn-colorblind", "seaborn-dark", "seaborn-dark-palette",
"seaborn-darkgrid", "seaborn-deep", "seaborn-muted",
"seaborn-notebook", "seaborn-paper", "seaborn-pastel",
"seaborn-poster", "seaborn-talk", "seaborn-ticks", "seaborn-white",
"seaborn-whitegrid", "tableau-colorblind10", and
SciencePlots styles "science", "grid", "ieee", "scatter", "notebook",
"high-vis", "bright", "vibrant", "muted", and "retro".
If multiple styles then each over rides some or all of the
characteristics of the previous.
Color Blind Appropriate Styles
The styles "seaborn-colorblind", "tableau-colorblind10", "bright",
"vibrant", and "muted" are all styles that are setup to be able to be
distinguished by someone with color blindness.
Black, White, and Gray Styles
The "ieee" style is appropriate for black, white, and gray, however the
"ieee" also will change the chart size to fit in a column of the "IEEE"
journal.
The "grayscale" is another style useful for photo-copyable black,
white, nd gray.
Matplotlib styles:
https://matplotlib.org/3.3.1/gallery/style_sheets/style_sheets_reference.html
SciencePlots styles:
https://github.com/garrettj403/SciencePlots
hlines_y:
[optional, defaults to None]
Number or list of y values where to place a horizontal line.
hlines_xmin:
[optional, defaults to None]
List of minimum x values to start the horizontal line. If a list must
be same length as `hlines_y`. If a single number will be used as the
minimum x values for all horizontal lines. A missing value or None
will start at the minimum x value for the entire plot.
hlines_xmax:
[optional, defaults to None]
List of maximum x values to end each horizontal line. If a list must
be same length as `hlines_y`. If a single number will be the maximum
x value for all horizontal lines. A missing value or None will end at
the maximum x value for the entire plot.
hlines_colors:
[optional, defaults to None]
List of colors for the horizontal lines. If a single color then will
be used as the color for all horizontal lines. If a list must be same
length as `hlines_y`. If None will take from the color pallette in the
current plot style.
hlines_linestyles:
[optional, defaults to None]
List of linestyles for the horizontal lines. If a single linestyle
then will be used as the linestyle for all horizontal lines. If a list
must be same length as `hlines_y`. If None will take for the standard
linestyles list.
vlines_x:
[optional, defaults to None]
List of x values where to place a vertical line.
vlines_ymin:
[optional, defaults to None]
List of minimum y values to start the vertical line. If a list must be
same length as `vlines_x`. If a single number will be used as the
minimum x values for all vertical lines. A missing value or None will
start at the minimum x value for the entire plot.
vlines_ymax:
[optional, defaults to None]
List of maximum x values to end each vertical line. If a list must be
same length as `vlines_x`. If a single number will be the maximum
x value for all vertical lines. A missing value or None will end at
the maximum x value for the entire plot.
vlines_colors:
[optional, defaults to None]
List of colors for the vertical lines. If a single color then will be
used as the color for all vertical lines. If a list must be same
length as `vlines_x`. If None will take from the color pallette in the
current plot style.
vlines_linestyles:
[optional, defaults to None]
List of linestyles for the vertical lines. If a single linestyle then
will be used as the linestyle for all vertical lines. If a list must
be same length as `vlines_x`. If None will take for the standard
linestyles list.
"""
plt = kde(
input_ts=input_ts,
columns=columns,
start_date=start_date,
end_date=end_date,
clean=clean,
skiprows=skiprows,
index_type=index_type,
names=names,
ofilename=ofilename,
xtitle=xtitle,
ytitle=ytitle,
title=title,
figsize=figsize,
legend=legend,
legend_names=legend_names,
subplots=subplots,
sharex=sharex,
sharey=sharey,
colors=colors,
linestyles=linestyles,
markerstyles=markerstyles,
bar_hatchstyles=bar_hatchstyles,
style=style,
logx=logx,
logy=logy,
xaxis=xaxis,
yaxis=yaxis,
xlim=xlim,
ylim=ylim,
secondary_y=secondary_y,
mark_right=mark_right,
scatter_matrix_diagonal=scatter_matrix_diagonal,
bootstrap_size=bootstrap_size,
bootstrap_samples=bootstrap_samples,
norm_xaxis=norm_xaxis,
norm_yaxis=norm_yaxis,
lognorm_xaxis=lognorm_xaxis,
lognorm_yaxis=lognorm_yaxis,
xy_match_line=xy_match_line,
grid=grid,
label_rotation=label_rotation,
label_skip=label_skip,
force_freq=force_freq,
drawstyle=drawstyle,
por=por,
invert_xaxis=invert_xaxis,
invert_yaxis=invert_yaxis,
round_index=round_index,
plotting_position=plotting_position,
prob_plot_sort_values=prob_plot_sort_values,
source_units=source_units,
target_units=target_units,
lag_plot_lag=lag_plot_lag,
plot_styles=plot_styles,
hlines_y=hlines_y,
hlines_xmin=hlines_xmin,
hlines_xmax=hlines_xmax,
hlines_colors=hlines_colors,
hlines_linestyles=hlines_linestyles,
vlines_x=vlines_x,
vlines_ymin=vlines_ymin,
vlines_ymax=vlines_ymax,
vlines_colors=vlines_colors,
vlines_linestyles=vlines_linestyles,
)
# @tsutils.validator(
# ofilename=[str, ["pass", []], 1],
# type=[str, ["domain", ["kde",],], 1,],
# lag_plot_lag=[int, ["range", [1, None]], 1],
# xtitle=[str, ["pass", []], 1],
# ytitle=[str, ["pass", []], 1],
# title=[str, ["pass", []], 1],
# figsize=[float, ["range", [0, None]], 2],
# legend=[bool, ["domain", [True, False]], 1],
# legend_names=[str, ["pass", []], 1],
# subplots=[bool, ["domain", [True, False]], 1],
# sharex=[bool, ["domain", [True, False]], 1],
# sharey=[bool, ["domain", [True, False]], 1],
# colors=[str, ["pass", []], None],
# linestyles=[str, ["domain", ["auto", None, "", " ", " "] + plotutils.LINE_LIST], None],
# markerstyles=[str, ["domain", ["auto", None, "", " ", " "] + plotutils.MARKER_LIST], None],
# bar_hatchstyles=[str, ["domain", ["auto", None, "", " ", " "] + plotutils.HATCH_LIST], None],
# style=[str, ["pass", []], None],
# xlim=[float, ["pass", []], 2],
# ylim=[float, ["pass", []], 2],
# xaxis=[str, ["domain", ["arithmetic", "log"]], 1],
# yaxis=[str, ["domain", ["arithmetic", "log"]], 1],
# secondary_y=[bool, ["domain", [True, False]], 1],
# mark_right=[bool, ["domain", [True, False]], 1],
# scatter_matrix_diagonal=[str, ["domain", ["kde", "hist"]], 1],
# bootstrap_size=[int, ["range", [0, None]], 1],
# xy_match_line=[str, ["pass", []], 1],
# grid=[bool, ["domain", [True, False]], 1],
# label_rotation=[float, ["pass", []], 1],
# label_skip=[int, ["range", [1, None]], 1],
# drawstyle=[str, ["pass", []], 1],
# por=[bool, ["domain", [True, False]], 1],
# invert_xaxis=[bool, ["domain", [True, False]], 1],
# invert_yaxis=[bool, ["domain", [True, False]], 1],
# plotting_position=[
# str,
# [
# "domain",
# ["weibull", "benard", "tukey", "gumbel", "hazen", "cunnane", "california"],
# ],
# 1,
# ],
# prob_plot_sort_values=[str, ["domain", ["ascending", "descending"]], 1],
# plot_styles=[
# str,
# [
# "domain",
# [
# "classic",
# "Solarize_Light2",
# "bmh",
# "dark_background",
# "fast",
# "fivethirtyeight",
# "ggplot",
# "grayscale",
# "seaborn",
# "seaborn-bright",
# "seaborn-colorblind",
# "seaborn-dark",
# "seaborn-dark-palette",
# "seaborn-darkgrid",
# "seaborn-deep",
# "seaborn-muted",
# "seaborn-notebook",
# "seaborn-paper",
# "seaborn-pastel",
# "seaborn-poster",
# "seaborn-talk",
# "seaborn-ticks",
# "seaborn-white",
# "seaborn-whitegrid",
# "tableau-colorblind10",
# "science",
# "grid",
# "ieee",
# "scatter",
# "notebook",
# "high-vis",
# "bright",
# "vibrant",
# "muted",
# "retro",
# ],
# ],
# None,
# ],
# hlines_y=[float, ["pass", []], None],
# hlines_xmin=[float, ["pass", []], None],
# hlines_xmax=[float, ["pass", []], None],
# hlines_colors=[str, ["pass", []], None],
# hlines_linestyles=[
# str,
# ["domain", ["auto", None, "", " ", " "] + plotutils.LINE_LIST],
# None,
# ],
# vlines_x=[float, ["pass", []], None],
# vlines_ymin=[float, ["pass", []], None],
# vlines_ymax=[float, ["pass", []], None],
# vlines_colors=[str, ["pass", []], None],
# vlines_linestyles=[
# str,
# ["domain", ["auto", None, "", " ", " "] + plotutils.LINE_LIST],
# None,
# ],
# )
def kde(
input_ts="-",
columns=None,
start_date=None,
end_date=None,
clean=False,
skiprows=None,
index_type="datetime",
names=None,
ofilename="plot.png",
xtitle="",
ytitle="",
title="",
figsize="10,6.0",
legend=None,
legend_names=None,
subplots=False,
sharex=True,
sharey=False,
colors="auto",
linestyles="auto",
markerstyles=" ",
bar_hatchstyles="auto",
style="auto",
logx=False,
logy=False,
xaxis="arithmetic",
yaxis="arithmetic",
xlim=None,
ylim=None,
secondary_y=False,
mark_right=True,
scatter_matrix_diagonal="kde",
bootstrap_size=50,
bootstrap_samples=500,
norm_xaxis=False,
norm_yaxis=False,
lognorm_xaxis=False,
lognorm_yaxis=False,
xy_match_line="",
grid=False,
label_rotation=None,
label_skip=1,
force_freq=None,
drawstyle="default",
por=False,
invert_xaxis=False,
invert_yaxis=False,
round_index=None,
plotting_position="weibull",
prob_plot_sort_values="descending",
source_units=None,
target_units=None,
lag_plot_lag=1,
plot_styles="bright",
hlines_y=None,
hlines_xmin=None,
hlines_xmax=None,
hlines_colors=None,
hlines_linestyles="-",
vlines_x=None,
vlines_ymin=None,
vlines_ymax=None,
vlines_colors=None,
vlines_linestyles="-",
**kwds,
):
r"""Plot data."""
# Need to work around some old option defaults with the implementation of
# mando
legend = bool(legend == "" or legend == "True" or legend is None)
type = "kde"
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
from matplotlib.ticker import FixedLocator
tsd = tsutils.common_kwds(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna="all",
source_units=source_units,
target_units=target_units,
clean=clean,
por=por,
)
tsd, lnames = plotutils.check(type, tsd, legend_names)
# This is to help pretty print the frequency
try:
try:
pltfreq = str(tsd.index.freq, "utf-8").lower()
except TypeError:
pltfreq = str(tsd.index.freq).lower()
if pltfreq.split(" ")[0][1:] == "1":
beginstr = 3
else:
beginstr = 1
if pltfreq == "none":
short_freq = ""
else:
# short freq string (day) OR (2 day)
short_freq = "({})".format(pltfreq[beginstr:-1])
except AttributeError:
short_freq = ""
if colors == "auto":
colors = None
else:
colors = tsutils.make_list(colors)
if linestyles == "auto":
linestyles = plotutils.LINE_LIST
else:
linestyles = tsutils.make_list(linestyles)
if bar_hatchstyles == "auto":
bar_hatchstyles = plotutils.HATCH_LIST
else:
bar_hatchstyles = tsutils.make_list(bar_hatchstyles)
if markerstyles == "auto":
markerstyles = plotutils.MARKER_LIST
else:
markerstyles = tsutils.make_list(markerstyles)
if markerstyles is None:
markerstyles = " "
if style != "auto":
nstyle = tsutils.make_list(style)
if len(nstyle) != len(tsd.columns):
raise ValueError(
tsutils.error_wrapper(
"""
You have to have the same number of style strings as time-series to plot.
You supplied '{}' for style which has {} style strings,
but you have {} time-series.
""".format(
style, len(nstyle), len(tsd.columns)
)
)
)
colors = []
markerstyles = []
linestyles = []
for st in nstyle:
colors.append(st[0])
if len(st) == 1:
markerstyles.append(" ")
linestyles.append("-")
continue
if st[1] in plotutils.MARKER_LIST:
markerstyles.append(st[1])
try:
linestyles.append(st[2:])
except IndexError:
linestyles.append(" ")
else:
markerstyles.append(" ")
linestyles.append(st[1:])
if linestyles is None:
linestyles = [" "]
else:
linestyles = [" " if i in [" ", None] else i for i in linestyles]
markerstyles = [" " if i is None else i for i in markerstyles]
if colors is not None:
icolors = itertools.cycle(colors)
else:
icolors = None
imarkerstyles = itertools.cycle(markerstyles)
ilinestyles = itertools.cycle(linestyles)
# Only for bar, barh, bar_stacked, and barh_stacked.
ibar_hatchstyles = itertools.cycle(bar_hatchstyles)
if (
logx is True
or logy is True
or norm_xaxis is True
or norm_yaxis is True
or lognorm_xaxis is True
or lognorm_yaxis is True
):
warnings.warn(
"""
*
* The --logx, --logy, --norm_xaxis, --norm_yaxis, --lognorm_xaxis, and
* --lognorm_yaxis options are deprecated.
*
* For --logx use --xaxis="log"
* For --logy use --yaxis="log"
* For --norm_xaxis use --type="norm_xaxis"
* For --norm_yaxis use --type="norm_yaxis"
* For --lognorm_xaxis use --type="lognorm_xaxis"
* For --lognorm_yaxis use --type="lognorm_yaxis"
*
"""
)
if xaxis == "log":
logx = True
if yaxis == "log":
logy = True
xlim = plotutils.know_your_limits(xlim, axis=xaxis)
ylim = plotutils.know_your_limits(ylim, axis=yaxis)
plot_styles = tsutils.make_list(plot_styles) + ["no-latex"]
style_loc = os.path.join(
os.path.dirname(__file__), os.pardir, "SciencePlots_styles"
)
plot_styles = [
os.path.join(style_loc, i + ".mplstyle")
if os.path.exists(os.path.join(style_loc, i + ".mplstyle"))
else i
for i in plot_styles
]
plt.style.use(plot_styles)
figsize = tsutils.make_list(figsize, n=2)
_, ax = plt.subplots(figsize=figsize)
if type in ["kde", "probability_density"]:
ax = tsd.plot.kde(
legend=legend,
subplots=subplots,
sharex=sharex,
sharey=sharey,
style=None,
logx=logx,
logy=logy,
xlim=xlim,
ylim=ylim,
secondary_y=secondary_y,
figsize=figsize,
)
for index, line in enumerate(ax.lines):
if icolors is not None:
c = next(icolors)
else:
c = None
if imarkerstyles is not None:
m = next(imarkerstyles)
else:
m = None
if ilinestyles is not None:
l = next(ilinestyles)
else:
l = None
if c is not None:
plt.setp(line, color=c)
plt.setp(line, marker=m)
plt.setp(line, linestyle=l)
ytitle = ytitle or "Density"
if legend is True:
plt.legend(loc="best")
if hlines_y is not None:
hlines_y = tsutils.make_list(hlines_y)
hlines_xmin = tsutils.make_list(hlines_xmin)
hlines_xmax = tsutils.make_list(hlines_xmax)
hlines_colors = tsutils.make_list(hlines_colors)
hlines_linestyles = tsutils.make_list(hlines_linestyles)
nxlim = ax.get_xlim()
if hlines_xmin is None:
hlines_xmin = nxlim[0]
if hlines_xmax is None:
hlines_xmax = nxlim[1]
if vlines_x is not None:
vlines_x = tsutils.make_list(vlines_x)
vlines_ymin = tsutils.make_list(vlines_ymin)
vlines_ymax = tsutils.make_list(vlines_ymax)
vlines_colors = tsutils.make_list(vlines_colors)
vlines_linestyles = tsutils.make_list(vlines_linestyles)
nylim = ax.get_ylim()
if vlines_ymin is None:
vlines_ymin = nylim[0]
if vlines_ymax is None:
vlines_ymax = nylim[1]
if type in [
"time",
"xy",
"bar",
"bar_stacked",
"histogram",
"norm_xaxis",
"lognorm_xaxis",
"weibull_xaxis",
"norm_yaxis",
"lognorm_yaxis",
"weibull_yaxis",
]:
if hlines_y is not None:
if type in ["norm_yaxis", "lognorm_yaxis", "weibull_yaxis"]:
hlines_y = ppf(tsutils.make_list(hlines_y))
plt.hlines(
hlines_y,
hlines_xmin,
hlines_xmax,
colors=hlines_colors,
linestyles=hlines_linestyles,
)
if vlines_x is not None:
if type in ["norm_xaxis", "lognorm_xaxis", "weibull_xaxis"]:
vlines_x = ppf(tsutils.make_list(vlines_x))
plt.vlines(
vlines_x,
vlines_ymin,
vlines_ymax,
colors=vlines_colors,
linestyles=vlines_linestyles,
)
plt.xlabel(xtitle)
plt.ylabel(ytitle)
if invert_xaxis is True:
plt.gca().invert_xaxis()
if invert_yaxis is True:
plt.gca().invert_yaxis()
plt.grid(grid)
plt.title(title)
plt.tight_layout()
if ofilename is not None:
plt.savefig(ofilename)
return plt
kde.__doc__ = kde_cli.__doc__
| [((365, 398), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (388, 398), False, 'import warnings\n'), ((402, 473), 'mando.command', 'mando.command', (['"""kde"""'], {'formatter_class': 'RSTHelpFormatter', 'doctype': '"""numpy"""'}), "('kde', formatter_class=RSTHelpFormatter, doctype='numpy')\n", (415, 473), False, 'import mando\n'), ((475, 509), 'tstoolbox.tsutils.doc', 'tsutils.doc', (['plotutils.ldocstrings'], {}), '(plotutils.ldocstrings)\n', (486, 509), False, 'from tstoolbox import tsutils\n'), ((26321, 26342), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (26335, 26342), False, 'import matplotlib\n'), ((26437, 26705), 'tstoolbox.tsutils.common_kwds', 'tsutils.common_kwds', (['input_ts'], {'skiprows': 'skiprows', 'names': 'names', 'index_type': 'index_type', 'start_date': 'start_date', 'end_date': 'end_date', 'pick': 'columns', 'round_index': 'round_index', 'dropna': '"""all"""', 'source_units': 'source_units', 'target_units': 'target_units', 'clean': 'clean', 'por': 'por'}), "(input_ts, skiprows=skiprows, names=names, index_type=\n index_type, start_date=start_date, end_date=end_date, pick=columns,\n round_index=round_index, dropna='all', source_units=source_units,\n target_units=target_units, clean=clean, por=por)\n", (26456, 26705), False, 'from tstoolbox import tsutils\n'), ((29424, 29453), 'itertools.cycle', 'itertools.cycle', (['markerstyles'], {}), '(markerstyles)\n', (29439, 29453), False, 'import itertools\n'), ((29472, 29499), 'itertools.cycle', 'itertools.cycle', (['linestyles'], {}), '(linestyles)\n', (29487, 29499), False, 'import itertools\n'), ((29581, 29613), 'itertools.cycle', 'itertools.cycle', (['bar_hatchstyles'], {}), '(bar_hatchstyles)\n', (29596, 29613), False, 'import itertools\n'), ((30796, 30822), 'matplotlib.pyplot.style.use', 'plt.style.use', (['plot_styles'], {}), '(plot_styles)\n', (30809, 30822), True, 'import matplotlib.pyplot as plt\n'), ((30838, 30869), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['figsize'], {'n': '(2)'}), '(figsize, n=2)\n', (30855, 30869), False, 'from tstoolbox import tsutils\n'), ((30882, 30911), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (30894, 30911), True, 'import matplotlib.pyplot as plt\n'), ((33887, 33905), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['xtitle'], {}), '(xtitle)\n', (33897, 33905), True, 'import matplotlib.pyplot as plt\n'), ((33910, 33928), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['ytitle'], {}), '(ytitle)\n', (33920, 33928), True, 'import matplotlib.pyplot as plt\n'), ((34059, 34073), 'matplotlib.pyplot.grid', 'plt.grid', (['grid'], {}), '(grid)\n', (34067, 34073), True, 'import matplotlib.pyplot as plt\n'), ((34079, 34095), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (34088, 34095), True, 'import matplotlib.pyplot as plt\n'), ((34100, 34118), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (34116, 34118), True, 'import matplotlib.pyplot as plt\n'), ((27488, 27513), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['colors'], {}), '(colors)\n', (27505, 27513), False, 'from tstoolbox import tsutils\n'), ((27616, 27645), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['linestyles'], {}), '(linestyles)\n', (27633, 27645), False, 'from tstoolbox import tsutils\n'), ((27764, 27798), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['bar_hatchstyles'], {}), '(bar_hatchstyles)\n', (27781, 27798), False, 'from tstoolbox import tsutils\n'), ((27909, 27940), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['markerstyles'], {}), '(markerstyles)\n', (27926, 27940), False, 'from tstoolbox import tsutils\n'), ((28048, 28072), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['style'], {}), '(style)\n', (28065, 28072), False, 'from tstoolbox import tsutils\n'), ((29347, 29370), 'itertools.cycle', 'itertools.cycle', (['colors'], {}), '(colors)\n', (29362, 29370), False, 'import itertools\n'), ((29810, 30223), 'warnings.warn', 'warnings.warn', (['"""\n*\n* The --logx, --logy, --norm_xaxis, --norm_yaxis, --lognorm_xaxis, and\n* --lognorm_yaxis options are deprecated.\n*\n* For --logx use --xaxis="log"\n* For --logy use --yaxis="log"\n* For --norm_xaxis use --type="norm_xaxis"\n* For --norm_yaxis use --type="norm_yaxis"\n* For --lognorm_xaxis use --type="lognorm_xaxis"\n* For --lognorm_yaxis use --type="lognorm_yaxis"\n*\n"""'], {}), '(\n """\n*\n* The --logx, --logy, --norm_xaxis, --norm_yaxis, --lognorm_xaxis, and\n* --lognorm_yaxis options are deprecated.\n*\n* For --logx use --xaxis="log"\n* For --logy use --yaxis="log"\n* For --norm_xaxis use --type="norm_xaxis"\n* For --norm_yaxis use --type="norm_yaxis"\n* For --lognorm_xaxis use --type="lognorm_xaxis"\n* For --lognorm_yaxis use --type="lognorm_yaxis"\n*\n"""\n )\n', (29823, 30223), False, 'import warnings\n'), ((30455, 30485), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['plot_styles'], {}), '(plot_styles)\n', (30472, 30485), False, 'from tstoolbox import tsutils\n'), ((30539, 30564), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (30554, 30564), False, 'import os\n'), ((31993, 32020), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['hlines_y'], {}), '(hlines_y)\n', (32010, 32020), False, 'from tstoolbox import tsutils\n'), ((32043, 32073), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['hlines_xmin'], {}), '(hlines_xmin)\n', (32060, 32073), False, 'from tstoolbox import tsutils\n'), ((32096, 32126), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['hlines_xmax'], {}), '(hlines_xmax)\n', (32113, 32126), False, 'from tstoolbox import tsutils\n'), ((32151, 32183), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['hlines_colors'], {}), '(hlines_colors)\n', (32168, 32183), False, 'from tstoolbox import tsutils\n'), ((32212, 32248), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['hlines_linestyles'], {}), '(hlines_linestyles)\n', (32229, 32248), False, 'from tstoolbox import tsutils\n'), ((32461, 32488), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['vlines_x'], {}), '(vlines_x)\n', (32478, 32488), False, 'from tstoolbox import tsutils\n'), ((32511, 32541), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['vlines_ymin'], {}), '(vlines_ymin)\n', (32528, 32541), False, 'from tstoolbox import tsutils\n'), ((32564, 32594), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['vlines_ymax'], {}), '(vlines_ymax)\n', (32581, 32594), False, 'from tstoolbox import tsutils\n'), ((32619, 32651), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['vlines_colors'], {}), '(vlines_colors)\n', (32636, 32651), False, 'from tstoolbox import tsutils\n'), ((32680, 32716), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['vlines_linestyles'], {}), '(vlines_linestyles)\n', (32697, 32716), False, 'from tstoolbox import tsutils\n'), ((34157, 34179), 'matplotlib.pyplot.savefig', 'plt.savefig', (['ofilename'], {}), '(ofilename)\n', (34168, 34179), True, 'import matplotlib.pyplot as plt\n'), ((30633, 30673), 'os.path.join', 'os.path.join', (['style_loc', "(i + '.mplstyle')"], {}), "(style_loc, i + '.mplstyle')\n", (30645, 30673), False, 'import os\n'), ((31780, 31804), 'matplotlib.pyplot.setp', 'plt.setp', (['line'], {'marker': 'm'}), '(line, marker=m)\n', (31788, 31804), True, 'import matplotlib.pyplot as plt\n'), ((31817, 31844), 'matplotlib.pyplot.setp', 'plt.setp', (['line'], {'linestyle': 'l'}), '(line, linestyle=l)\n', (31825, 31844), True, 'import matplotlib.pyplot as plt\n'), ((31921, 31943), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""'}), "(loc='best')\n", (31931, 31943), True, 'import matplotlib.pyplot as plt\n'), ((33316, 33418), 'matplotlib.pyplot.hlines', 'plt.hlines', (['hlines_y', 'hlines_xmin', 'hlines_xmax'], {'colors': 'hlines_colors', 'linestyles': 'hlines_linestyles'}), '(hlines_y, hlines_xmin, hlines_xmax, colors=hlines_colors,\n linestyles=hlines_linestyles)\n', (33326, 33418), True, 'import matplotlib.pyplot as plt\n'), ((33688, 33790), 'matplotlib.pyplot.vlines', 'plt.vlines', (['vlines_x', 'vlines_ymin', 'vlines_ymax'], {'colors': 'vlines_colors', 'linestyles': 'vlines_linestyles'}), '(vlines_x, vlines_ymin, vlines_ymax, colors=vlines_colors,\n linestyles=vlines_linestyles)\n', (33698, 33790), True, 'import matplotlib.pyplot as plt\n'), ((30700, 30740), 'os.path.join', 'os.path.join', (['style_loc', "(i + '.mplstyle')"], {}), "(style_loc, i + '.mplstyle')\n", (30712, 30740), False, 'import os\n'), ((31744, 31767), 'matplotlib.pyplot.setp', 'plt.setp', (['line'], {'color': 'c'}), '(line, color=c)\n', (31752, 31767), True, 'import matplotlib.pyplot as plt\n'), ((33967, 33976), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (33974, 33976), True, 'import matplotlib.pyplot as plt\n'), ((34029, 34038), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (34036, 34038), True, 'import matplotlib.pyplot as plt\n'), ((33275, 33302), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['hlines_y'], {}), '(hlines_y)\n', (33292, 33302), False, 'from tstoolbox import tsutils\n'), ((33647, 33674), 'tstoolbox.tsutils.make_list', 'tsutils.make_list', (['vlines_x'], {}), '(vlines_x)\n', (33664, 33674), False, 'from tstoolbox import tsutils\n')] |
3verlyn/DL-abstract-argumentation | src/models/GNN.py | 885e442077f5f8e576092c6648077e00ceb79dff | from collections import OrderedDict
import torch
import torch.nn as nn
from torch_geometric.data.batch import Batch
class GNN(nn.Module):
def __init__(self, mp_steps, **config):
super().__init__()
self.mp_steps = mp_steps
self.update_fns = self.assign_update_fns()
self.readout_fns = self.assign_readout_fns()
def assign_update_fns(self) -> OrderedDict:
raise NotImplementedError
def assign_readout_fns(self) -> dict:
raise NotImplementedError
def forward(self, batch: Batch, output_all_steps=True):
edge_index = batch.edge_index
sections = (
torch.bincount(batch.batch).tolist() if hasattr(batch, "batch") else None
)
hiddens = self.initialize(batch)
del batch
# update attributes with update and aggregation step
outputs = {element: [] for element in self.readout_fns.keys()}
for step in range(self.mp_steps):
hiddens = self.step(edge_index=edge_index, sections=sections, **hiddens)
if not output_all_steps and (step + 1) != self.mp_steps:
continue
for element, readout_fn in self.readout_fns.items():
outputs[element].append(readout_fn(**hiddens))
return outputs
def initialize(self, batch):
hiddens = {}
# initialize attributes trough embeddings and intialize lstm states to None
for element in self.embeddings.keys():
embedding = self.embeddings[element](batch[f"{element}_input"])
hiddens.update(
{
f"{element}_input": embedding,
f"{element}_embedding": embedding.clone(),
f"{element}_lstm": None,
}
)
return hiddens
def step(self, edge_index, sections, **hiddens):
"""
Perform a message passing step by propagating information and updating each element
"""
for element, update_fn in self.update_fns.items():
hiddens[f"{element}_embedding"], hiddens[f"{element}_lstm"] = update_fn(
edge_index=edge_index, sections=sections, element=element, **hiddens
)
return hiddens
| [((642, 669), 'torch.bincount', 'torch.bincount', (['batch.batch'], {}), '(batch.batch)\n', (656, 669), False, 'import torch\n')] |
vivek-r-2000/BoundaryNet | configs/baselines/DACN/GNN/GCN_res_layer.py | fce8d51a516646c1001116d03872dbba9e4c5082 | import math
import torch
import torch.nn as nn
from torch.nn.modules.module import Module
from GNN.GCN_layer import GraphConvolution
class GraphResConvolution(Module):
"""
Simple GCN layer, similar to https://arxiv.org/abs/1609.02907
"""
def __init__(self, state_dim, name=''):
super(GraphResConvolution, self).__init__()
self.state_dim = state_dim
self.gcn_1 = GraphConvolution(state_dim, '%s_1' % name)
self.gcn_2 = GraphConvolution(state_dim, '%s_2' % name)
self.relu1 = nn.ReLU()
self.relu2 = nn.ReLU()
self.name = name
def forward(self, input, adj):
output_1 = self.gcn_1(input, adj)
output_1_relu = self.relu1(output_1)
output_2 = self.gcn_2(output_1_relu, adj)
output_2_res = output_2 + input
output = self.relu2(output_2_res)
return output
def __repr__(self):
return self.__class__.__name__ + ' (' + self.name + ')' | [((407, 449), 'GNN.GCN_layer.GraphConvolution', 'GraphConvolution', (['state_dim', "('%s_1' % name)"], {}), "(state_dim, '%s_1' % name)\n", (423, 449), False, 'from GNN.GCN_layer import GraphConvolution\n'), ((471, 513), 'GNN.GCN_layer.GraphConvolution', 'GraphConvolution', (['state_dim', "('%s_2' % name)"], {}), "(state_dim, '%s_2' % name)\n", (487, 513), False, 'from GNN.GCN_layer import GraphConvolution\n'), ((536, 545), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (543, 545), True, 'import torch.nn as nn\n'), ((567, 576), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (574, 576), True, 'import torch.nn as nn\n')] |
lukasvosyka/mtools | mtools/util/logfile.py | b94620cef48a9eb71b6a7fa93ad88f70cd36982f | #!/usr/bin/env python3
from __future__ import print_function
import os
import re
import sys
from datetime import datetime
from math import ceil
from mtools.util.input_source import InputSource
from mtools.util.logevent import LogEvent
class LogFile(InputSource):
"""Log file wrapper class. Handles open file streams or stdin."""
def __init__(self, filehandle):
"""Provide logfile as open file stream or stdin."""
self.filehandle = filehandle
self.name = filehandle.name
self.from_stdin = filehandle.name == "<stdin>"
self._bounds_calculated = False
self._start = None
self._end = None
self._filesize = None
self._num_lines = None
self._restarts = None
self._binary = None
self._timezone = None
self._hostname = None
self._port = None
self._rs_state = None
self._repl_set = None
self._repl_set_members = None
self._repl_set_version = None
self._repl_set_protocol = None
self._storage_engine = None
self._datetime_format = None
self._year_rollover = None
self._shards = None
self._csrs = None
self._chunks_moved_from = None
self._chunks_moved_to = None
self._chunk_splits = None
# Track previous file position for loop detection in _find_curr_line()
self.prev_pos = None
self._has_level = None
# make sure bounds are calculated before starting to iterate,
# including potential year rollovers
self._calculate_bounds()
@property
def start(self):
"""
Lazy evaluation of start and end of logfile.
Returns None for stdin input currently.
"""
if not self._start:
self._calculate_bounds()
return self._start
@property
def end(self):
"""
Lazy evaluation of start and end of logfile.
Returns None for stdin input currently.
"""
if not self._end:
self._calculate_bounds()
return self._end
@property
def timezone(self):
"""Lazy evaluation of timezone of logfile."""
if not self._timezone:
self._calculate_bounds()
return self._timezone
@property
def filesize(self):
"""
Lazy evaluation of start and end of logfile.
Returns None for stdin input currently.
"""
if self.from_stdin:
return None
if not self._filesize:
self._calculate_bounds()
return self._filesize
@property
def datetime_format(self):
"""Lazy evaluation of the datetime format."""
if not self._datetime_format:
self._calculate_bounds()
return self._datetime_format
@property
def has_level(self):
"""Lazy evaluation of the whether the logfile has any level lines."""
if self._has_level is None:
self._iterate_lines()
return self._has_level
@property
def year_rollover(self):
"""Lazy evaluation of the datetime format."""
if self._year_rollover is None:
self._calculate_bounds()
return self._year_rollover
@property
def num_lines(self):
"""
Lazy evaluation of the number of lines.
Returns None for stdin input currently.
"""
if self.from_stdin:
return None
if not self._num_lines:
self._iterate_lines()
return self._num_lines
@property
def restarts(self):
"""Lazy evaluation of all restarts."""
if not self._num_lines:
self._iterate_lines()
return self._restarts
@property
def rs_state(self):
"""Lazy evaluation of all restarts."""
if not self._num_lines:
self._iterate_lines()
return self._rs_state
@property
def binary(self):
"""Lazy evaluation of the binary name."""
if not self._num_lines:
self._iterate_lines()
return self._binary
@property
def hostname(self):
"""Lazy evaluation of the binary name."""
if not self._num_lines:
self._iterate_lines()
return self._hostname
@property
def port(self):
"""Lazy evaluation of the binary name."""
if not self._num_lines:
self._iterate_lines()
return self._port
@property
def versions(self):
"""Return all version changes."""
versions = []
for v, _ in self.restarts:
if len(versions) == 0 or v != versions[-1]:
versions.append(v)
return versions
@property
def repl_set(self):
"""Return the replSet (if available)."""
if not self._num_lines:
self._iterate_lines()
return self._repl_set
@property
def repl_set_members(self):
"""Return the replSet (if available)."""
if not self._num_lines:
self._iterate_lines()
return self._repl_set_members
@property
def repl_set_version(self):
"""Return the replSet (if available)."""
if not self._num_lines:
self._iterate_lines()
return self._repl_set_version
@property
def repl_set_protocol(self):
"""Return the replSet protocolVersion (if available)."""
if not self._num_lines:
self._iterate_lines()
return self._repl_set_protocol
@property
def storage_engine(self):
"""Return storage engine if available."""
if not self._num_lines:
self._iterate_lines()
return self._storage_engine
@property
def shards(self):
"""Lazily return the shards (if available)"""
if not self._shards:
self._find_sharding_info()
return self._shards
@property
def csrs(self):
"""Lazily return the CSRS (if available)"""
if not self._csrs:
self._find_sharding_info()
return self._csrs
@property
def chunks_moved_to(self):
"""Lazily return the chunks moved to this shard (if available)"""
if not self._chunks_moved_to:
self._find_sharding_info()
return self._chunks_moved_to
@property
def chunks_moved_from(self):
"""Lazily return the chunks moved from this shard (if available)"""
if not self._chunks_moved_from:
self._find_sharding_info()
return self._chunks_moved_from
@property
def chunk_splits(self):
"""Lazily return the chunks split in this shard (if available)"""
if not self._chunk_splits:
self._find_sharding_info()
return self._chunk_splits
def next(self):
"""Get next line, adjust for year rollover and hint datetime format."""
# use readline here because next() iterator uses internal readahead
# buffer so seek position is wrong
line = self.filehandle.readline()
if isinstance(line, bytes):
line = line.decode('utf-8', 'replace')
if line == '':
raise StopIteration
line = line.rstrip('\n')
le = LogEvent(line)
# hint format and nextpos from previous line
if self._datetime_format and self._datetime_nextpos is not None:
ret = le.set_datetime_hint(self._datetime_format,
self._datetime_nextpos,
self.year_rollover)
if not ret:
# logevent indicates timestamp format has changed,
# invalidate hint info
self._datetime_format = None
self._datetime_nextpos = None
elif le.datetime:
# gather new hint info from another logevent
self._datetime_format = le.datetime_format
self._datetime_nextpos = le._datetime_nextpos
return le
def __iter__(self):
"""
Iterate over LogFile object.
Return a LogEvent object for each line (generator).
"""
le = None
while True:
try:
le = self.next()
except StopIteration as e:
# end of log file, get end date
if not self.end and self.from_stdin:
if le and le.datetime:
self._end = le.datetime
# future iterations start from the beginning
if not self.from_stdin:
self.filehandle.seek(0)
# return (instead of raising StopIteration exception) per PEP 479
return
# get start date for stdin input
if not self.start and self.from_stdin:
if le and le.datetime:
self._start = le.datetime
try:
yield le
except StopIteration:
return
states = (['PRIMARY', 'SECONDARY', 'DOWN', 'STARTUP', 'STARTUP2',
'RECOVERING', 'ROLLBACK', 'ARBITER', 'UNKNOWN'])
def __len__(self):
"""Return the number of lines in a log file."""
return self.num_lines
def _iterate_lines(self):
"""Count number of lines (can be expensive)."""
self._num_lines = 0
self._restarts = []
self._rs_state = []
ln = 0
for ln, line in enumerate(self.filehandle):
if isinstance(line, bytes):
line = line.decode("utf-8", "replace")
if (self._has_level is None and
line[28:31].strip() in LogEvent.log_levels and
line[31:39].strip() in LogEvent.log_components):
self._has_level = True
# find version string (fast check to eliminate most lines)
if "version" in line[:100]:
logevent = LogEvent(line)
restart = self._check_for_restart(logevent)
if restart:
self._restarts.append((restart, logevent))
if "starting :" in line or "starting:" in line:
# look for hostname, port
match = re.search('port=(?P<port>\d+).*host=(?P<host>\S+)',
line)
if match:
self._hostname = match.group('host')
self._port = match.group('port')
""" For 3.0 the "[initandlisten] options:" long entry contained the
"engine" field if WiredTiger was the storage engine. There were
only two engines, MMAPv1 and WiredTiger
"""
if "[initandlisten] options:" in line:
match = re.search('replSet: "(?P<replSet>\S+)"', line)
if match:
self._repl_set = match.group('replSet')
match = re.search('engine: "(?P<engine>\S+)"', line)
if match:
self._storage_engine = match.group('engine')
else:
self._storage_engine = 'mmapv1'
""" For 3.2 the "[initandlisten] options:" no longer contains the
"engine" field So now we have to look for the "[initandlisten]
wiredtiger_open config:" which was present in 3.0, but would
now tell us definitively that wiredTiger is being used
"""
if "[initandlisten] wiredtiger_open config:" in line:
self._storage_engine = 'wiredTiger'
if "command admin.$cmd command: { replSetInitiate:" in line:
match = re.search('{ _id: "(?P<replSet>\S+)", '
'members: (?P<replSetMembers>[^]]+ ])', line)
if match:
self._repl_set = match.group('replSet')
self._repl_set_members = match.group('replSetMembers')
# Replica set config logging in MongoDB 3.0+
new_config = ("New replica set config in use: ")
if new_config in line:
match = re.search('{ _id: "(?P<replSet>\S+)", '
'version: (?P<replSetVersion>\d+), ', line)
if match:
self._repl_set = match.group('replSet')
self._repl_set_version = match.group('replSetVersion')
match = re.search(', protocolVersion: (?P<replSetProtocol>\d+), ', line)
if match:
self._repl_set_protocol = match.group('replSetProtocol')
match = re.search('members: (?P<replSetMembers>[^]]+ ])', line)
if match:
self._repl_set_members = match.group('replSetMembers')
# if ("is now in state" in line and
# next(state for state in states if line.endswith(state))):
if "is now in state" in line:
tokens = line.split()
# 2.6
if tokens[1].endswith(']'):
pos = 4
else:
pos = 5
host = tokens[pos]
rs_state = tokens[-1]
state = (host, rs_state, LogEvent(line))
self._rs_state.append(state)
continue
if "[rsMgr] replSet" in line:
tokens = line.split()
if self._hostname:
host = self._hostname + ':' + self._port
else:
host = os.path.basename(self.name)
host += ' (self)'
if tokens[-1] in self.states:
rs_state = tokens[-1]
else:
# 2.6
if tokens[1].endswith(']'):
pos = 2
else:
pos = 6
rs_state = ' '.join(tokens[pos:])
state = (host, rs_state, LogEvent(line))
self._rs_state.append(state)
continue
self._num_lines = ln + 1
# reset logfile
self.filehandle.seek(0)
def _check_for_restart(self, logevent):
if (logevent.thread == 'initandlisten' and
"db version v" in logevent.line_str):
self._binary = 'mongod'
elif logevent.thread == 'mongosMain' and ('MongoS' in logevent.line_str or
'mongos' in logevent.line_str):
self._binary = 'mongos'
else:
return False
version = re.search(r'(\d\.\d\.\d+)', logevent.line_str)
if version:
version = version.group(1)
return version
else:
return False
def _calculate_bounds(self):
"""Calculate beginning and end of logfile."""
if self._bounds_calculated:
# Assume no need to recalc bounds for lifetime of a Logfile object
return
if self.from_stdin:
return False
# we should be able to find a valid log line within max_start_lines
max_start_lines = 10
lines_checked = 0
# get start datetime
for line in self.filehandle:
logevent = LogEvent(line)
lines_checked += 1
if logevent.datetime:
self._start = logevent.datetime
self._timezone = logevent.datetime.tzinfo
self._datetime_format = logevent.datetime_format
self._datetime_nextpos = logevent._datetime_nextpos
break
if lines_checked > max_start_lines:
break
# sanity check before attempting to find end date
if (self._start is None):
raise SystemExit("Error: <%s> does not appear to be a supported "
"MongoDB log file format" % self.filehandle.name)
# get end datetime (lines are at most 10k,
# go back 30k at most to make sure we catch one)
self.filehandle.seek(0, 2)
self._filesize = self.filehandle.tell()
self.filehandle.seek(-min(self._filesize, 30000), 2)
for line in reversed(self.filehandle.readlines()):
logevent = LogEvent(line)
if logevent.datetime:
self._end = logevent.datetime
break
# if there was a roll-over, subtract 1 year from start time
if self._end < self._start:
self._start = self._start.replace(year=self._start.year - 1)
self._year_rollover = self._end
else:
self._year_rollover = False
# reset logfile
self.filehandle.seek(0)
self._bounds_calculated = True
return True
def _find_curr_line(self, prev=False):
"""
Internal helper function.
Find the current (or previous if prev=True) line in a log file based on
the current seek position.
"""
curr_pos = self.filehandle.tell()
# jump back 15k characters (at most) and find last newline char
jump_back = min(self.filehandle.tell(), 15000)
self.filehandle.seek(-jump_back, 1)
buff = self.filehandle.read(jump_back)
self.filehandle.seek(curr_pos, 0)
if prev and self.prev_pos is not None and self.prev_pos == curr_pos:
# Number of characters to show before/after the log offset
error_context = 300
self.filehandle.seek(-error_context, 1)
buff = self.filehandle.read(curr_pos)
hr = "-" * 60
print("Fatal log parsing loop detected trying to find previous "
"log line near offset %s in %s:\n\n%s\n%s\n"
"<--- (current log parsing offset) \n%s\n%s\n"
% (curr_pos, self.name, hr, buff[:error_context],
buff[error_context:error_context + 1], hr),
file=sys.stderr)
raise SystemExit("Cannot parse %s with requested options"
% self.filehandle.name)
else:
self.prev_pos = curr_pos
if isinstance(buff, bytes):
buff = buff.decode("utf-8", "replace")
newline_pos = buff.rfind('\n')
if prev:
newline_pos = buff[:newline_pos].rfind('\n')
# move back to last newline char
if newline_pos == -1:
self.filehandle.seek(0)
return self.next()
self.filehandle.seek(newline_pos - jump_back + 1, 1)
# roll forward until we found a line with a datetime
try:
logevent = self.next()
while not logevent.datetime:
logevent = self.next()
return logevent
except StopIteration:
# reached end of file
return None
def _find_sharding_info(self):
"""
Iterate over file and find any sharding related information
"""
self._shards = []
self._chunks_moved_from = []
self._chunks_moved_to = []
self._chunk_splits = []
prev_line = ""
for line in self.filehandle:
if isinstance(line, bytes):
line = line.decode("utf-8", "replace")
if self.binary == "mongos":
if "Starting new replica set monitor for" in line:
if "[mongosMain]" in line:
match = re.search("for (?P<csrsName>\w+)/"
"(?P<replSetMembers>\S+)", line)
if match:
csrs_info = (match.group('csrsName'),
match.group('replSetMembers'))
self._csrs = csrs_info
else:
match = re.search("for (?P<shardName>\w+)/"
"(?P<replSetMembers>\S+)", line)
if match:
shard_info = (match.group('shardName'),
match.group('replSetMembers'))
self._shards.append(shard_info)
elif self.binary == "mongod":
logevent = LogEvent(line)
if "New replica set config in use" in line:
if "configsvr: true" in line:
match = re.search(' _id: "(?P<replSet>\S+)".*'
'members: (?P<replSetMembers>[^]]+ ])', line)
if match:
self._csrs = (
match.group('replSet'),
match.group('replSetMembers')
)
if "Starting new replica set monitor for" in line:
match = re.search("for (?P<replSet>\w+)/"
"(?P<replSetMembers>\S+)", line)
if match:
if self._csrs and match.group('replSet') != self._csrs[0]:
self._shards.append((
match.group('replSet'),
match.group('replSetMembers')
))
elif not self._csrs:
self._csrs = (
match.group('replSet'),
match.group('replSetMembers')
)
if "moveChunk.from" in line:
logevent = LogEvent(line)
match = re.search('ns: "(?P<namespace>\S+)".*'
'details: { (?P<range>.*\}).*'
'to: "(?P<movedTo>\S+)".*note: "(?P<note>\S+)"', line)
if match:
time = logevent.datetime
chunk_range = match.group('range')
namespace = match.group('namespace')
moved_to = match.group('movedTo')
note = match.group('note')
if note == "success":
errmsg = None
steps = re.findall('(?P<steps>step \d of \d): (?P<stepTimes>\d+)', line)
else:
match = re.search(':: caused by :: (?P<errmsg>\S+):', prev_line)
steps = None
if match:
errmsg = match.group('errmsg')
else:
errmsg = "Unknown"
chunk_migration = (time, chunk_range, moved_to, namespace, steps, note, errmsg)
self._chunks_moved_from.append(chunk_migration)
if "moveChunk.to" in line:
logevent = LogEvent(line)
match = re.search('ns: "(?P<namespace>\S+)".*'
'details: { (?P<range>.*\}).*.*note: "(?P<note>\S+)"', line)
if match:
time = logevent.datetime
chunk_range = match.group('range')
namespace = match.group('namespace')
# TODO: alter this to find moved from shard name when SERVER-45770 TICKET is added
moved_from = "Unknown"
note = match.group('note')
if note == "success":
errmsg = None
steps = re.findall('(?P<steps>step \d of \d): (?P<stepTimes>\d+)', line)
else:
steps = None
match = re.search('errmsg: "(?P<errmsg>.*)"', line)
if match:
errmsg = match.group('errmsg')
chunk_migration = (time, chunk_range, moved_from, namespace, steps, note, errmsg)
self._chunks_moved_to.append(chunk_migration)
if "Finding the split vector for" in line:
logevent = LogEvent(line)
match = re.search('for (?P<namespace>\S+).*'
'numSplits: (?P<numSplits>\d+)', line)
if match:
time = logevent.datetime
split_range = None
namespace = match.group("namespace")
numSplits = match.group('numSplits')
success = None
time_taken = 0
error = None
self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error))
elif "splitVector" in line:
logevent = LogEvent(line)
match = re.search('splitVector: "(?P<namespace>\S+)".*,'
' (?P<range>min:.*), max.*op_msg (?P<time_taken>\d+)', line)
if match:
time = logevent.datetime
split_range = match.group("range")
namespace = match.group("namespace")
time_taken = match.group("time_taken")
numSplits = 0
success = True
error = None
self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error))
elif "Unable to auto-split chunk" in line:
logevent = LogEvent(line)
match = re.search("chunk \[(?P<range>.*)\) "
'in namespace (?P<namespace>\S+)'
' :: caused by :: (?P<error>\S+): ', line)
if match:
time = logevent.datetime
split_range = match.group("range")
namespace = match.group("namespace")
numSplits = 0
success = False
time_taken = 0
error = match.group("error")
self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error))
elif "jumbo" in line:
logevent = LogEvent(line)
match = re.search('migration (?P<namespace>\S+): \[(?P<range>.*)\)', prev_line)
if match:
time = logevent.datetime
split_range = match.group("range")
namespace = match.group("namespace")
numSplits = 0
success = False
time_taken = 0
error = "Jumbo"
self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error))
prev_line = line
# reset logfile
self.filehandle.seek(0)
def fast_forward(self, start_dt):
"""
Fast-forward file to given start_dt datetime obj using binary search.
Only fast for files. Streams need to be forwarded manually, and it will
miss the first line that would otherwise match (as it consumes the log
line).
"""
if self.from_stdin:
# skip lines until start_dt is reached
return
else:
# fast bisection path
max_mark = self.filesize
step_size = max_mark
# check if start_dt is already smaller than first datetime
self.filehandle.seek(0)
le = self.next()
if le.datetime and le.datetime >= start_dt:
self.filehandle.seek(0)
return
le = None
self.filehandle.seek(0)
# search for lower bound
while abs(step_size) > 100:
step_size = ceil(step_size / 2.)
self.filehandle.seek(step_size, 1)
le = self._find_curr_line()
if not le:
break
if le.datetime >= start_dt:
step_size = -abs(step_size)
else:
step_size = abs(step_size)
if not le:
return
# now walk backwards until we found a truly smaller line
while self.filehandle.tell() >= 2 and (le.datetime is None or
le.datetime >= start_dt):
self.filehandle.seek(-2, 1)
le = self._find_curr_line(prev=True)
| [((7241, 7255), 'mtools.util.logevent.LogEvent', 'LogEvent', (['line'], {}), '(line)\n', (7249, 7255), False, 'from mtools.util.logevent import LogEvent\n'), ((14595, 14645), 're.search', 're.search', (['"""(\\\\d\\\\.\\\\d\\\\.\\\\d+)"""', 'logevent.line_str'], {}), "('(\\\\d\\\\.\\\\d\\\\.\\\\d+)', logevent.line_str)\n", (14604, 14645), False, 'import re\n'), ((15266, 15280), 'mtools.util.logevent.LogEvent', 'LogEvent', (['line'], {}), '(line)\n', (15274, 15280), False, 'from mtools.util.logevent import LogEvent\n'), ((16263, 16277), 'mtools.util.logevent.LogEvent', 'LogEvent', (['line'], {}), '(line)\n', (16271, 16277), False, 'from mtools.util.logevent import LogEvent\n'), ((9950, 9964), 'mtools.util.logevent.LogEvent', 'LogEvent', (['line'], {}), '(line)\n', (9958, 9964), False, 'from mtools.util.logevent import LogEvent\n'), ((10243, 10302), 're.search', 're.search', (['"""port=(?P<port>\\\\d+).*host=(?P<host>\\\\S+)"""', 'line'], {}), "('port=(?P<port>\\\\d+).*host=(?P<host>\\\\S+)', line)\n", (10252, 10302), False, 'import re\n'), ((10779, 10826), 're.search', 're.search', (['"""replSet: "(?P<replSet>\\\\S+)\\""""', 'line'], {}), '(\'replSet: "(?P<replSet>\\\\S+)"\', line)\n', (10788, 10826), False, 'import re\n'), ((10937, 10982), 're.search', 're.search', (['"""engine: "(?P<engine>\\\\S+)\\""""', 'line'], {}), '(\'engine: "(?P<engine>\\\\S+)"\', line)\n', (10946, 10982), False, 'import re\n'), ((11685, 11772), 're.search', 're.search', (['"""{ _id: "(?P<replSet>\\\\S+)", members: (?P<replSetMembers>[^]]+ ])"""', 'line'], {}), '(\'{ _id: "(?P<replSet>\\\\S+)", members: (?P<replSetMembers>[^]]+ ])\',\n line)\n', (11694, 11772), False, 'import re\n'), ((12144, 12230), 're.search', 're.search', (['"""{ _id: "(?P<replSet>\\\\S+)", version: (?P<replSetVersion>\\\\d+), """', 'line'], {}), '(\'{ _id: "(?P<replSet>\\\\S+)", version: (?P<replSetVersion>\\\\d+), \',\n line)\n', (12153, 12230), False, 'import re\n'), ((12447, 12512), 're.search', 're.search', (['""", protocolVersion: (?P<replSetProtocol>\\\\d+), """', 'line'], {}), "(', protocolVersion: (?P<replSetProtocol>\\\\d+), ', line)\n", (12456, 12512), False, 'import re\n'), ((12639, 12694), 're.search', 're.search', (['"""members: (?P<replSetMembers>[^]]+ ])"""', 'line'], {}), "('members: (?P<replSetMembers>[^]]+ ])', line)\n", (12648, 12694), False, 'import re\n'), ((21638, 21652), 'mtools.util.logevent.LogEvent', 'LogEvent', (['line'], {}), '(line)\n', (21646, 21652), False, 'from mtools.util.logevent import LogEvent\n'), ((21677, 21809), 're.search', 're.search', (['"""ns: "(?P<namespace>\\\\S+)".*details: { (?P<range>.*\\\\}).*to: "(?P<movedTo>\\\\S+)".*note: "(?P<note>\\\\S+)\\""""', 'line'], {}), '(\n \'ns: "(?P<namespace>\\\\S+)".*details: { (?P<range>.*\\\\}).*to: "(?P<movedTo>\\\\S+)".*note: "(?P<note>\\\\S+)"\'\n , line)\n', (21686, 21809), False, 'import re\n'), ((22911, 22925), 'mtools.util.logevent.LogEvent', 'LogEvent', (['line'], {}), '(line)\n', (22919, 22925), False, 'from mtools.util.logevent import LogEvent\n'), ((22950, 23059), 're.search', 're.search', (['"""ns: "(?P<namespace>\\\\S+)".*details: { (?P<range>.*\\\\}).*.*note: "(?P<note>\\\\S+)\\""""', 'line'], {}), '(\n \'ns: "(?P<namespace>\\\\S+)".*details: { (?P<range>.*\\\\}).*.*note: "(?P<note>\\\\S+)"\'\n , line)\n', (22959, 23059), False, 'import re\n'), ((24123, 24137), 'mtools.util.logevent.LogEvent', 'LogEvent', (['line'], {}), '(line)\n', (24131, 24137), False, 'from mtools.util.logevent import LogEvent\n'), ((24162, 24236), 're.search', 're.search', (['"""for (?P<namespace>\\\\S+).*numSplits: (?P<numSplits>\\\\d+)"""', 'line'], {}), "('for (?P<namespace>\\\\S+).*numSplits: (?P<numSplits>\\\\d+)', line)\n", (24171, 24236), False, 'import re\n'), ((27871, 27892), 'math.ceil', 'ceil', (['(step_size / 2.0)'], {}), '(step_size / 2.0)\n', (27875, 27892), False, 'from math import ceil\n'), ((13263, 13277), 'mtools.util.logevent.LogEvent', 'LogEvent', (['line'], {}), '(line)\n', (13271, 13277), False, 'from mtools.util.logevent import LogEvent\n'), ((13575, 13602), 'os.path.basename', 'os.path.basename', (['self.name'], {}), '(self.name)\n', (13591, 13602), False, 'import os\n'), ((14007, 14021), 'mtools.util.logevent.LogEvent', 'LogEvent', (['line'], {}), '(line)\n', (14015, 14021), False, 'from mtools.util.logevent import LogEvent\n'), ((20284, 20298), 'mtools.util.logevent.LogEvent', 'LogEvent', (['line'], {}), '(line)\n', (20292, 20298), False, 'from mtools.util.logevent import LogEvent\n'), ((24783, 24797), 'mtools.util.logevent.LogEvent', 'LogEvent', (['line'], {}), '(line)\n', (24791, 24797), False, 'from mtools.util.logevent import LogEvent\n'), ((24822, 24940), 're.search', 're.search', (['"""splitVector: "(?P<namespace>\\\\S+)".*, (?P<range>min:.*), max.*op_msg (?P<time_taken>\\\\d+)"""', 'line'], {}), '(\n \'splitVector: "(?P<namespace>\\\\S+)".*, (?P<range>min:.*), max.*op_msg (?P<time_taken>\\\\d+)\'\n , line)\n', (24831, 24940), False, 'import re\n'), ((19479, 19545), 're.search', 're.search', (['"""for (?P<csrsName>\\\\w+)/(?P<replSetMembers>\\\\S+)"""', 'line'], {}), "('for (?P<csrsName>\\\\w+)/(?P<replSetMembers>\\\\S+)', line)\n", (19488, 19545), False, 'import re\n'), ((19870, 19937), 're.search', 're.search', (['"""for (?P<shardName>\\\\w+)/(?P<replSetMembers>\\\\S+)"""', 'line'], {}), "('for (?P<shardName>\\\\w+)/(?P<replSetMembers>\\\\S+)', line)\n", (19879, 19937), False, 'import re\n'), ((20910, 20975), 're.search', 're.search', (['"""for (?P<replSet>\\\\w+)/(?P<replSetMembers>\\\\S+)"""', 'line'], {}), "('for (?P<replSet>\\\\w+)/(?P<replSetMembers>\\\\S+)', line)\n", (20919, 20975), False, 'import re\n'), ((22287, 22354), 're.findall', 're.findall', (['"""(?P<steps>step \\\\d of \\\\d): (?P<stepTimes>\\\\d+)"""', 'line'], {}), "('(?P<steps>step \\\\d of \\\\d): (?P<stepTimes>\\\\d+)', line)\n", (22297, 22354), False, 'import re\n'), ((22410, 22467), 're.search', 're.search', (['""":: caused by :: (?P<errmsg>\\\\S+):"""', 'prev_line'], {}), "(':: caused by :: (?P<errmsg>\\\\S+):', prev_line)\n", (22419, 22467), False, 'import re\n'), ((23573, 23640), 're.findall', 're.findall', (['"""(?P<steps>step \\\\d of \\\\d): (?P<stepTimes>\\\\d+)"""', 'line'], {}), "('(?P<steps>step \\\\d of \\\\d): (?P<stepTimes>\\\\d+)', line)\n", (23583, 23640), False, 'import re\n'), ((23733, 23776), 're.search', 're.search', (['"""errmsg: "(?P<errmsg>.*)\\""""', 'line'], {}), '(\'errmsg: "(?P<errmsg>.*)"\', line)\n', (23742, 23776), False, 'import re\n'), ((25509, 25523), 'mtools.util.logevent.LogEvent', 'LogEvent', (['line'], {}), '(line)\n', (25517, 25523), False, 'from mtools.util.logevent import LogEvent\n'), ((25548, 25669), 're.search', 're.search', (['"""chunk \\\\[(?P<range>.*)\\\\) in namespace (?P<namespace>\\\\S+) :: caused by :: (?P<error>\\\\S+): """', 'line'], {}), "(\n 'chunk \\\\[(?P<range>.*)\\\\) in namespace (?P<namespace>\\\\S+) :: caused by :: (?P<error>\\\\S+): '\n , line)\n", (25557, 25669), False, 'import re\n'), ((20462, 20548), 're.search', 're.search', (['""" _id: "(?P<replSet>\\\\S+)".*members: (?P<replSetMembers>[^]]+ ])"""', 'line'], {}), '(\' _id: "(?P<replSet>\\\\S+)".*members: (?P<replSetMembers>[^]]+ ])\',\n line)\n', (20471, 20548), False, 'import re\n'), ((26265, 26279), 'mtools.util.logevent.LogEvent', 'LogEvent', (['line'], {}), '(line)\n', (26273, 26279), False, 'from mtools.util.logevent import LogEvent\n'), ((26304, 26378), 're.search', 're.search', (['"""migration (?P<namespace>\\\\S+): \\\\[(?P<range>.*)\\\\)"""', 'prev_line'], {}), "('migration (?P<namespace>\\\\S+): \\\\[(?P<range>.*)\\\\)', prev_line)\n", (26313, 26378), False, 'import re\n')] |
Tillsten/pyqtgraph | tests/svg.py | 0045863165fe526988c58cf4f8232ae2d261a5ee | """
SVG export test
"""
import test
import pyqtgraph as pg
app = pg.mkQApp()
class SVGTest(test.TestCase):
#def test_plotscene(self):
#pg.setConfigOption('foreground', (0,0,0))
#w = pg.GraphicsWindow()
#w.show()
#p1 = w.addPlot()
#p2 = w.addPlot()
#p1.plot([1,3,2,3,1,6,9,8,4,2,3,5,3], pen={'color':'k'})
#p1.setXRange(0,5)
#p2.plot([1,5,2,3,4,6,1,2,4,2,3,5,3], pen={'color':'k', 'cosmetic':False, 'width': 0.3})
#app.processEvents()
#app.processEvents()
#ex = pg.exporters.SVGExporter.SVGExporter(w.scene())
#ex.export(fileName='test.svg')
def test_simple(self):
scene = pg.QtGui.QGraphicsScene()
#rect = pg.QtGui.QGraphicsRectItem(0, 0, 100, 100)
#scene.addItem(rect)
#rect.setPos(20,20)
#rect.translate(50, 50)
#rect.rotate(30)
#rect.scale(0.5, 0.5)
#rect1 = pg.QtGui.QGraphicsRectItem(0, 0, 100, 100)
#rect1.setParentItem(rect)
#rect1.setFlag(rect1.ItemIgnoresTransformations)
#rect1.setPos(20, 20)
#rect1.scale(2,2)
#el1 = pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 100)
#el1.setParentItem(rect1)
##grp = pg.ItemGroup()
#grp.setParentItem(rect)
#grp.translate(200,0)
##grp.rotate(30)
#rect2 = pg.QtGui.QGraphicsRectItem(0, 0, 100, 25)
#rect2.setFlag(rect2.ItemClipsChildrenToShape)
#rect2.setParentItem(grp)
#rect2.setPos(0,25)
#rect2.rotate(30)
#el = pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 50)
#el.translate(10,-5)
#el.scale(0.5,2)
#el.setParentItem(rect2)
grp2 = pg.ItemGroup()
scene.addItem(grp2)
grp2.scale(100,100)
rect3 = pg.QtGui.QGraphicsRectItem(0,0,2,2)
rect3.setPen(pg.mkPen(width=1, cosmetic=False))
grp2.addItem(rect3)
ex = pg.exporters.SVGExporter.SVGExporter(scene)
ex.export(fileName='test.svg')
if __name__ == '__main__':
test.unittest.main() | [((65, 76), 'pyqtgraph.mkQApp', 'pg.mkQApp', ([], {}), '()\n', (74, 76), True, 'import pyqtgraph as pg\n'), ((2116, 2136), 'test.unittest.main', 'test.unittest.main', ([], {}), '()\n', (2134, 2136), False, 'import test\n'), ((704, 729), 'pyqtgraph.QtGui.QGraphicsScene', 'pg.QtGui.QGraphicsScene', ([], {}), '()\n', (727, 729), True, 'import pyqtgraph as pg\n'), ((1754, 1768), 'pyqtgraph.ItemGroup', 'pg.ItemGroup', ([], {}), '()\n', (1766, 1768), True, 'import pyqtgraph as pg\n'), ((1850, 1888), 'pyqtgraph.QtGui.QGraphicsRectItem', 'pg.QtGui.QGraphicsRectItem', (['(0)', '(0)', '(2)', '(2)'], {}), '(0, 0, 2, 2)\n', (1876, 1888), True, 'import pyqtgraph as pg\n'), ((1992, 2035), 'pyqtgraph.exporters.SVGExporter.SVGExporter', 'pg.exporters.SVGExporter.SVGExporter', (['scene'], {}), '(scene)\n', (2028, 2035), True, 'import pyqtgraph as pg\n'), ((1907, 1940), 'pyqtgraph.mkPen', 'pg.mkPen', ([], {'width': '(1)', 'cosmetic': '(False)'}), '(width=1, cosmetic=False)\n', (1915, 1940), True, 'import pyqtgraph as pg\n')] |
jedicontributors/pythondataintegrator | src/api/models/enums/apschedulerevents.py | 3e877b367ab9b20185476128ec053db41087879f | EVENT_SCHEDULER_STARTED = EVENT_SCHEDULER_START = 2 ** 0
EVENT_SCHEDULER_SHUTDOWN = 2 ** 1
EVENT_SCHEDULER_PAUSED = 2 ** 2
EVENT_SCHEDULER_RESUMED = 2 ** 3
EVENT_EXECUTOR_ADDED = 2 ** 4
EVENT_EXECUTOR_REMOVED = 2 ** 5
EVENT_JOBSTORE_ADDED = 2 ** 6
EVENT_JOBSTORE_REMOVED = 2 ** 7
EVENT_ALL_JOBS_REMOVED = 2 ** 8
EVENT_JOB_ADDED = 2 ** 9
EVENT_JOB_REMOVED = 2 ** 10
EVENT_JOB_MODIFIED = 2 ** 11
EVENT_JOB_EXECUTED = 2 ** 12
EVENT_JOB_ERROR = 2 ** 13
EVENT_JOB_MISSED = 2 ** 14
EVENT_JOB_SUBMITTED = 2 ** 15
EVENT_JOB_MAX_INSTANCES = 2 ** 16
EVENT_ALL = (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED |
EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED |
EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED |
EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED |
EVENT_JOB_ERROR | EVENT_JOB_MISSED | EVENT_JOB_SUBMITTED | EVENT_JOB_MAX_INSTANCES) | [] |
mrninhvn/matter | scripts/build/build/targets.py | c577b233db9d2f3a6f87108a062b1699a40c5169 | # Copyright (c) 2021 Project CHIP Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from itertools import combinations
from typing import List
from builders.ameba import AmebaApp, AmebaBoard, AmebaBuilder
from builders.android import AndroidApp, AndroidBoard, AndroidBuilder
from builders.cc13x2x7_26x2x7 import cc13x2x7_26x2x7App, cc13x2x7_26x2x7Builder
from builders.cyw30739 import Cyw30739App, Cyw30739Board, Cyw30739Builder
from builders.efr32 import Efr32App, Efr32Board, Efr32Builder
from builders.esp32 import Esp32App, Esp32Board, Esp32Builder
from builders.host import HostApp, HostBoard, HostBuilder
from builders.infineon import InfineonApp, InfineonBoard, InfineonBuilder
from builders.k32w import K32WApp, K32WBuilder
from builders.mbed import MbedApp, MbedBoard, MbedBuilder, MbedProfile
from builders.nrf import NrfApp, NrfBoard, NrfConnectBuilder
from builders.qpg import QpgApp, QpgBoard, QpgBuilder
from builders.telink import TelinkApp, TelinkBoard, TelinkBuilder
from builders.tizen import TizenApp, TizenBoard, TizenBuilder
from builders.bl602 import Bl602App, Bl602Board, Bl602Builder
from builders.imx import IMXApp, IMXBuilder
class Target:
"""Represents a build target:
Has a name identifier plus parameters on how to build it (what
builder class to use and what arguments are required to produce
the specified build)
"""
def __init__(self, name, builder_class, **kwargs):
self.name = name
self.builder_class = builder_class
self.glob_blacklist_reason = None
self.create_kw_args = kwargs
def Clone(self):
"""Creates a clone of self."""
clone = Target(self.name, self.builder_class,
**self.create_kw_args.copy())
clone.glob_blacklist_reason = self.glob_blacklist_reason
return clone
def Extend(self, suffix, **kargs):
"""Creates a clone of the current object extending its build parameters.
Arguments:
suffix: appended with a "-" as separator to the clone name
**kargs: arguments needed to produce the new build variant
"""
clone = self.Clone()
clone.name += "-" + suffix
clone.create_kw_args.update(kargs)
return clone
def Create(self, runner, repository_path: str, output_prefix: str,
enable_flashbundle: bool):
builder = self.builder_class(
repository_path, runner=runner, **self.create_kw_args)
builder.target = self
builder.identifier = self.name
builder.output_dir = os.path.join(output_prefix, self.name)
builder.enable_flashbundle(enable_flashbundle)
return builder
def GlobBlacklist(self, reason):
clone = self.Clone()
if clone.glob_blacklist_reason:
clone.glob_blacklist_reason += ", "
clone.glob_blacklist_reason += reason
else:
clone.glob_blacklist_reason = reason
return clone
@property
def IsGlobBlacklisted(self):
return self.glob_blacklist_reason is not None
@property
def GlobBlacklistReason(self):
return self.glob_blacklist_reason
class AcceptAnyName:
def Accept(self, name: str):
return True
class AcceptNameWithSubstrings:
def __init__(self, substr: List[str]):
self.substr = substr
def Accept(self, name: str):
for s in self.substr:
if s in name:
return True
return False
class BuildVariant:
def __init__(self, name: str, validator=AcceptAnyName(),
conflicts: List[str] = [], requires: List[str] = [],
**buildargs):
self.name = name
self.validator = validator
self.conflicts = conflicts
self.buildargs = buildargs
self.requires = requires
def HasConflicts(items: List[BuildVariant]) -> bool:
for a, b in combinations(items, 2):
if (a.name in b.conflicts) or (b.name in a.conflicts):
return True
return False
def AllRequirementsMet(items: List[BuildVariant]) -> bool:
"""
Check that item.requires is satisfied for all items in the given list
"""
available = set([item.name for item in items])
for item in items:
for requirement in item.requires:
if requirement not in available:
return False
return True
class VariantBuilder:
"""Handles creating multiple build variants based on a starting target.
"""
def __init__(self, targets: List[Target] = []):
# note the clone in case the default arg is used
self.targets = targets[:]
self.variants = []
self.glob_whitelist = []
def WhitelistVariantNameForGlob(self, name):
"""
Whitelist the specified variant to be allowed for globbing.
By default we do not want a 'build all' to select all variants, so
variants are generally glob-blacklisted.
"""
self.glob_whitelist.append(name)
def AppendVariant(self, **args):
"""
Add another variant to accepted variants. Arguments are construction
variants to BuildVariant.
Example usage:
builder.AppendVariant(name="ipv6only", enable_ipv4=False)
"""
self.variants.append(BuildVariant(**args))
def AllVariants(self):
"""
Yields a list of acceptable variants for the given targets.
Handles conflict resolution between build variants and globbing
whitelist targets.
"""
for target in self.targets:
yield target
# skip variants that do not work for this target
ok_variants = [
v for v in self.variants if v.validator.Accept(target.name)]
# Build every possible variant
for variant_count in range(1, len(ok_variants) + 1):
for subgroup in combinations(ok_variants, variant_count):
if HasConflicts(subgroup):
continue
if not AllRequirementsMet(subgroup):
continue
# Target ready to be created - no conflicts
variant_target = target.Clone()
for option in subgroup:
variant_target = variant_target.Extend(
option.name, **option.buildargs)
# Only a few are whitelisted for globs
name = '-'.join([o.name for o in subgroup])
if name not in self.glob_whitelist:
if not variant_target.IsGlobBlacklisted:
variant_target = variant_target.GlobBlacklist(
'Reduce default build variants')
yield variant_target
def HostTargets():
target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder)
target_native = target.Extend(HostBoard.NATIVE.BoardName(), board=HostBoard.NATIVE)
targets = [target_native]
# x64 linux supports cross compile
cross_compile = (HostBoard.NATIVE.PlatformName() == 'linux') and (HostBoard.NATIVE.BoardName() != HostBoard.ARM64.BoardName())
if cross_compile:
targets.append(target.Extend('arm64', board=HostBoard.ARM64))
app_targets = []
# Don't cross compile some builds
app_targets.append(
target_native.Extend('rpc-console', app=HostApp.RPC_CONSOLE))
app_targets.append(
target_native.Extend('tv-app', app=HostApp.TV_APP))
app_targets.append(
target_native.Extend('tv-casting-app', app=HostApp.TV_CASTING_APP))
app_targets.append(
target_native.Extend('nl-test-runner', app=HostApp.NL_TEST_RUNNER))
for target in targets:
app_targets.append(target.Extend(
'all-clusters', app=HostApp.ALL_CLUSTERS))
if (HostBoard.NATIVE.PlatformName() == 'darwin'):
app_targets.append(target.Extend(
'chip-tool-darwin', app=HostApp.CHIP_TOOL_DARWIN))
app_targets.append(target.Extend('chip-tool', app=HostApp.CHIP_TOOL))
app_targets.append(target.Extend('thermostat', app=HostApp.THERMOSTAT))
app_targets.append(target.Extend('minmdns', app=HostApp.MIN_MDNS))
app_targets.append(target.Extend('light', app=HostApp.LIGHT))
app_targets.append(target.Extend('lock', app=HostApp.LOCK))
app_targets.append(target.Extend('shell', app=HostApp.SHELL))
app_targets.append(target.Extend(
'ota-provider', app=HostApp.OTA_PROVIDER, enable_ble=False))
app_targets.append(target.Extend(
'ota-requestor', app=HostApp.OTA_REQUESTOR, enable_ble=False))
app_targets.append(target.Extend('python-bindings', app=HostApp.PYTHON_BINDINGS))
builder = VariantBuilder()
# Possible build variants. Note that number of potential
# builds is exponential here
builder.AppendVariant(name="same-event-loop", validator=AcceptNameWithSubstrings(
['-chip-tool', '-chip-tool-darwin']), separate_event_loop=False),
builder.AppendVariant(name="no-interactive", validator=AcceptNameWithSubstrings(
['-chip-tool']), interactive_mode=False),
builder.AppendVariant(name="ipv6only", enable_ipv4=False),
builder.AppendVariant(name="no-ble", enable_ble=False),
builder.AppendVariant(name="no-wifi", enable_wifi=False),
builder.AppendVariant(name="tsan", conflicts=['asan'], use_tsan=True),
builder.AppendVariant(name="asan", conflicts=['tsan'], use_asan=True),
builder.AppendVariant(name="libfuzzer", requires=[
"clang"], use_libfuzzer=True),
builder.AppendVariant(name="clang", use_clang=True),
builder.AppendVariant(name="test", extra_tests=True),
builder.WhitelistVariantNameForGlob('no-interactive-ipv6only')
builder.WhitelistVariantNameForGlob('ipv6only')
for target in app_targets:
if ('-rpc-console' in target.name) or ('-python-bindings' in target.name) or ('nl-test-runner' in target.name):
# Single-variant builds
yield target
else:
builder.targets.append(target)
for target in builder.AllVariants():
if cross_compile and 'chip-tool' in target.name and 'arm64' in target.name and '-no-interactive' not in target.name:
# Interactive builds will not compile by default on arm cross compiles
# because libreadline is not part of the default sysroot
yield target.GlobBlacklist('Arm crosscompile does not support libreadline-dev')
else:
yield target
# Without extra build variants
yield target_native.Extend('chip-cert', app=HostApp.CERT_TOOL)
yield target_native.Extend('address-resolve-tool', app=HostApp.ADDRESS_RESOLVE)
yield target_native.Extend('address-resolve-tool-clang', app=HostApp.ADDRESS_RESOLVE,
use_clang=True).GlobBlacklist("Reduce default build variants")
yield target_native.Extend('address-resolve-tool-platform-mdns', app=HostApp.ADDRESS_RESOLVE,
use_platform_mdns=True).GlobBlacklist("Reduce default build variants")
yield target_native.Extend('address-resolve-tool-platform-mdns-ipv6only', app=HostApp.ADDRESS_RESOLVE,
use_platform_mdns=True, enable_ipv4=False).GlobBlacklist("Reduce default build variants")
test_target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder)
for board in [HostBoard.NATIVE, HostBoard.FAKE]:
yield test_target.Extend(board.BoardName() + '-tests', board=board, app=HostApp.TESTS)
def Esp32Targets():
esp32_target = Target('esp32', Esp32Builder)
yield esp32_target.Extend('m5stack-all-clusters', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS)
yield esp32_target.Extend('m5stack-all-clusters-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS,
enable_ipv4=False)
yield esp32_target.Extend('m5stack-all-clusters-rpc', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS,
enable_rpcs=True)
yield esp32_target.Extend('m5stack-all-clusters-rpc-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS,
enable_rpcs=True, enable_ipv4=False)
yield esp32_target.Extend('c3devkit-all-clusters', board=Esp32Board.C3DevKit, app=Esp32App.ALL_CLUSTERS)
devkitc = esp32_target.Extend('devkitc', board=Esp32Board.DevKitC)
yield devkitc.Extend('all-clusters', app=Esp32App.ALL_CLUSTERS)
yield devkitc.Extend('all-clusters-ipv6only', app=Esp32App.ALL_CLUSTERS, enable_ipv4=False)
yield devkitc.Extend('shell', app=Esp32App.SHELL)
yield devkitc.Extend('light', app=Esp32App.LIGHT)
yield devkitc.Extend('lock', app=Esp32App.LOCK)
yield devkitc.Extend('bridge', app=Esp32App.BRIDGE)
yield devkitc.Extend('temperature-measurement', app=Esp32App.TEMPERATURE_MEASUREMENT)
yield devkitc.Extend('temperature-measurement-rpc', app=Esp32App.TEMPERATURE_MEASUREMENT, enable_rpcs=True)
yield esp32_target.Extend('qemu-tests', board=Esp32Board.QEMU, app=Esp32App.TESTS)
def Efr32Targets():
efr_target = Target('efr32', Efr32Builder)
board_targets = [
efr_target.Extend('brd4161a', board=Efr32Board.BRD4161A),
efr_target.Extend('brd4163a', board=Efr32Board.BRD4163A).GlobBlacklist(
'only user requested'),
efr_target.Extend('brd4164a', board=Efr32Board.BRD4164A).GlobBlacklist(
'only user requested'),
efr_target.Extend('brd4166a', board=Efr32Board.BRD4166A).GlobBlacklist(
'only user requested'),
efr_target.Extend('brd4170a', board=Efr32Board.BRD4170A).GlobBlacklist(
'only user requested'),
efr_target.Extend('brd4186a', board=Efr32Board.BRD4186A).GlobBlacklist(
'only user requested'),
efr_target.Extend('brd4187a', board=Efr32Board.BRD4187A).GlobBlacklist(
'only user requested'),
efr_target.Extend('brd4304a', board=Efr32Board.BRD4304A).GlobBlacklist(
'only user requested')
]
builder = VariantBuilder()
for board_target in board_targets:
builder.targets.append(board_target.Extend(
'window-covering', app=Efr32App.WINDOW_COVERING))
builder.targets.append(board_target.Extend(
'switch', app=Efr32App.SWITCH))
builder.targets.append(board_target.Extend(
'unit-test', app=Efr32App.UNIT_TEST))
builder.targets.append(
board_target.Extend('light', app=Efr32App.LIGHT))
builder.targets.append(board_target.Extend('lock', app=Efr32App.LOCK))
# Possible build variants. Note that number of potential
# builds is exponential here
builder.AppendVariant(name="rpc", validator=AcceptNameWithSubstrings(
['-light', '-lock']), enable_rpcs=True)
builder.AppendVariant(name="with-ota-requestor", enable_ota_requestor=True)
builder.WhitelistVariantNameForGlob('rpc')
for target in builder.AllVariants():
yield target
def NrfTargets():
target = Target('nrf', NrfConnectBuilder)
yield target.Extend('native-posix-64-tests', board=NrfBoard.NATIVE_POSIX_64, app=NrfApp.UNIT_TESTS)
targets = [
target.Extend('nrf5340dk', board=NrfBoard.NRF5340DK),
target.Extend('nrf52840dk', board=NrfBoard.NRF52840DK),
]
# Enable nrf52840dongle for all-clusters and lighting app only
yield target.Extend('nrf52840dongle-all-clusters', board=NrfBoard.NRF52840DONGLE, app=NrfApp.ALL_CLUSTERS)
yield target.Extend('nrf52840dongle-light', board=NrfBoard.NRF52840DONGLE, app=NrfApp.LIGHT)
for target in targets:
yield target.Extend('all-clusters', app=NrfApp.ALL_CLUSTERS)
yield target.Extend('lock', app=NrfApp.LOCK)
yield target.Extend('light', app=NrfApp.LIGHT)
yield target.Extend('shell', app=NrfApp.SHELL)
yield target.Extend('pump', app=NrfApp.PUMP)
yield target.Extend('pump-controller', app=NrfApp.PUMP_CONTROLLER)
rpc = target.Extend('light-rpc', app=NrfApp.LIGHT, enable_rpcs=True)
if '-nrf5340dk-' in rpc.name:
rpc = rpc.GlobBlacklist(
'Compile failure due to pw_build args not forwarded to proto compiler. '
'https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/66760')
yield rpc
def AndroidTargets():
target = Target('android', AndroidBuilder)
yield target.Extend('arm-chip-tool', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TOOL)
yield target.Extend('arm64-chip-tool', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TOOL)
yield target.Extend('x64-chip-tool', board=AndroidBoard.X64, app=AndroidApp.CHIP_TOOL)
yield target.Extend('x86-chip-tool', board=AndroidBoard.X86, app=AndroidApp.CHIP_TOOL)
yield target.Extend('arm64-chip-test', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TEST)
yield target.Extend('androidstudio-arm-chip-tool', board=AndroidBoard.AndroidStudio_ARM, app=AndroidApp.CHIP_TOOL)
yield target.Extend('androidstudio-arm64-chip-tool', board=AndroidBoard.AndroidStudio_ARM64, app=AndroidApp.CHIP_TOOL)
yield target.Extend('androidstudio-x86-chip-tool', board=AndroidBoard.AndroidStudio_X86, app=AndroidApp.CHIP_TOOL)
yield target.Extend('androidstudio-x64-chip-tool', board=AndroidBoard.AndroidStudio_X64, app=AndroidApp.CHIP_TOOL)
yield target.Extend('arm64-chip-tvserver', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TVServer)
yield target.Extend('arm-chip-tvserver', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TVServer)
yield target.Extend('x86-chip-tvserver', board=AndroidBoard.X86, app=AndroidApp.CHIP_TVServer)
yield target.Extend('x64-chip-tvserver', board=AndroidBoard.X64, app=AndroidApp.CHIP_TVServer)
yield target.Extend('arm64-chip-tv-casting-app', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TV_CASTING_APP)
yield target.Extend('arm-chip-tv-casting-app', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TV_CASTING_APP)
def MbedTargets():
target = Target('mbed', MbedBuilder)
targets = [
target.Extend('CY8CPROTO_062_4343W',
board=MbedBoard.CY8CPROTO_062_4343W),
]
app_targets = []
for target in targets:
app_targets.append(target.Extend('lock', app=MbedApp.LOCK))
app_targets.append(target.Extend('light', app=MbedApp.LIGHT))
app_targets.append(target.Extend(
'all-clusters', app=MbedApp.ALL_CLUSTERS))
app_targets.append(target.Extend('pigweed', app=MbedApp.PIGWEED))
app_targets.append(target.Extend('shell', app=MbedApp.SHELL))
for target in app_targets:
yield target.Extend('release', profile=MbedProfile.RELEASE)
yield target.Extend('develop', profile=MbedProfile.DEVELOP).GlobBlacklist(
'Compile only for debugging purpose - '
'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html')
yield target.Extend('debug', profile=MbedProfile.DEBUG).GlobBlacklist(
'Compile only for debugging purpose - '
'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html')
def InfineonTargets():
target = Target('infineon', InfineonBuilder)
yield target.Extend('p6-lock', board=InfineonBoard.P6BOARD, app=InfineonApp.LOCK)
yield target.Extend('p6-all-clusters', board=InfineonBoard.P6BOARD, app=InfineonApp.ALL_CLUSTERS)
yield target.Extend('p6-light', board=InfineonBoard.P6BOARD, app=InfineonApp.LIGHT)
def AmebaTargets():
ameba_target = Target('ameba', AmebaBuilder)
yield ameba_target.Extend('amebad-all-clusters', board=AmebaBoard.AMEBAD, app=AmebaApp.ALL_CLUSTERS)
yield ameba_target.Extend('amebad-light', board=AmebaBoard.AMEBAD, app=AmebaApp.LIGHT)
yield ameba_target.Extend('amebad-pigweed', board=AmebaBoard.AMEBAD, app=AmebaApp.PIGWEED)
def K32WTargets():
target = Target('k32w', K32WBuilder)
yield target.Extend('light-ota-se', app=K32WApp.LIGHT, release=True, disable_ble=True, se05x=True).GlobBlacklist("Only on demand build")
yield target.Extend('light-release-no-ota', app=K32WApp.LIGHT, tokenizer=True, disable_ota=True, release=True)
yield target.Extend('shell-release', app=K32WApp.SHELL, release=True)
yield target.Extend('lock-release', app=K32WApp.LOCK, release=True)
yield target.Extend('lock-low-power-release', app=K32WApp.LOCK,
low_power=True, release=True).GlobBlacklist("Only on demand build")
def cc13x2x7_26x2x7Targets():
target = Target('cc13x2x7_26x2x7', cc13x2x7_26x2x7Builder)
yield target.Extend('lock-ftd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=True)
yield target.Extend('lock-mtd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=False)
yield target.Extend('pump', app=cc13x2x7_26x2x7App.PUMP)
yield target.Extend('pump-controller', app=cc13x2x7_26x2x7App.PUMP_CONTROLLER)
yield target.Extend('all-clusters', app=cc13x2x7_26x2x7App.ALL_CLUSTERS)
yield target.Extend('shell', app=cc13x2x7_26x2x7App.SHELL)
def Cyw30739Targets():
yield Target('cyw30739-cyw930739m2evb_01-light', Cyw30739Builder,
board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LIGHT)
yield Target('cyw30739-cyw930739m2evb_01-lock', Cyw30739Builder,
board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LOCK)
yield Target('cyw30739-cyw930739m2evb_01-ota-requestor', Cyw30739Builder,
board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR).GlobBlacklist(
"Running out of XIP flash space")
yield Target('cyw30739-cyw930739m2evb_01-ota-requestor-no-progress-logging', Cyw30739Builder,
board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR, progress_logging=False)
def QorvoTargets():
target = Target('qpg', QpgBuilder)
yield target.Extend('lock', board=QpgBoard.QPG6105, app=QpgApp.LOCK)
yield target.Extend('light', board=QpgBoard.QPG6105, app=QpgApp.LIGHT)
yield target.Extend('shell', board=QpgBoard.QPG6105, app=QpgApp.SHELL)
yield target.Extend('persistent-storage', board=QpgBoard.QPG6105, app=QpgApp.PERSISTENT_STORAGE)
def TizenTargets():
# Possible build variants.
# NOTE: The number of potential builds is exponential here.
builder = VariantBuilder()
builder.AppendVariant(name="no-ble", enable_ble=False)
builder.AppendVariant(name="no-wifi", enable_wifi=False)
builder.AppendVariant(name="asan", use_asan=True)
target = Target('tizen-arm', TizenBuilder, board=TizenBoard.ARM)
builder.targets.append(target.Extend('light', app=TizenApp.LIGHT))
for target in builder.AllVariants():
yield target
def Bl602Targets():
target = Target('bl602', Bl602Builder)
yield target.Extend('light', board=Bl602Board.BL602BOARD, app=Bl602App.LIGHT)
def IMXTargets():
target = Target('imx', IMXBuilder)
yield target.Extend('chip-tool', app=IMXApp.CHIP_TOOL)
yield target.Extend('lighting-app', app=IMXApp.LIGHT)
yield target.Extend('thermostat', app=IMXApp.THERMOSTAT)
yield target.Extend('all-clusters-app', app=IMXApp.ALL_CLUSTERS)
yield target.Extend('ota-provider-app', app=IMXApp.OTA_PROVIDER)
yield target.Extend('chip-tool-release', app=IMXApp.CHIP_TOOL, release=True)
yield target.Extend('lighting-app-release', app=IMXApp.LIGHT, release=True)
yield target.Extend('thermostat-release', app=IMXApp.THERMOSTAT, release=True)
yield target.Extend('all-clusters-app-release', app=IMXApp.ALL_CLUSTERS, release=True)
yield target.Extend('ota-provider-app-release', app=IMXApp.OTA_PROVIDER, release=True)
ALL = []
target_generators = [
HostTargets(),
Esp32Targets(),
Efr32Targets(),
NrfTargets(),
AndroidTargets(),
MbedTargets(),
InfineonTargets(),
AmebaTargets(),
K32WTargets(),
cc13x2x7_26x2x7Targets(),
Cyw30739Targets(),
QorvoTargets(),
TizenTargets(),
Bl602Targets(),
IMXTargets(),
]
for generator in target_generators:
for target in generator:
ALL.append(target)
# Simple targets added one by one
ALL.append(Target('telink-tlsr9518adk80d-light', TelinkBuilder,
board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.LIGHT))
ALL.append(Target('telink-tlsr9518adk80d-light-switch', TelinkBuilder,
board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.SWITCH))
# have a consistent order overall
ALL.sort(key=lambda t: t.name)
| [((4433, 4455), 'itertools.combinations', 'combinations', (['items', '(2)'], {}), '(items, 2)\n', (4445, 4455), False, 'from itertools import combinations\n'), ((3092, 3130), 'os.path.join', 'os.path.join', (['output_prefix', 'self.name'], {}), '(output_prefix, self.name)\n', (3104, 3130), False, 'import os\n'), ((7412, 7443), 'builders.host.HostBoard.NATIVE.PlatformName', 'HostBoard.NATIVE.PlatformName', ([], {}), '()\n', (7441, 7443), False, 'from builders.host import HostApp, HostBoard, HostBuilder\n'), ((7492, 7520), 'builders.host.HostBoard.NATIVE.BoardName', 'HostBoard.NATIVE.BoardName', ([], {}), '()\n', (7518, 7520), False, 'from builders.host import HostApp, HostBoard, HostBuilder\n'), ((11989, 12020), 'builders.host.HostBoard.NATIVE.PlatformName', 'HostBoard.NATIVE.PlatformName', ([], {}), '()\n', (12018, 12020), False, 'from builders.host import HostApp, HostBoard, HostBuilder\n'), ((7639, 7670), 'builders.host.HostBoard.NATIVE.PlatformName', 'HostBoard.NATIVE.PlatformName', ([], {}), '()\n', (7668, 7670), False, 'from builders.host import HostApp, HostBoard, HostBuilder\n'), ((7688, 7716), 'builders.host.HostBoard.NATIVE.BoardName', 'HostBoard.NATIVE.BoardName', ([], {}), '()\n', (7714, 7716), False, 'from builders.host import HostApp, HostBoard, HostBuilder\n'), ((7720, 7747), 'builders.host.HostBoard.ARM64.BoardName', 'HostBoard.ARM64.BoardName', ([], {}), '()\n', (7745, 7747), False, 'from builders.host import HostApp, HostBoard, HostBuilder\n'), ((8418, 8449), 'builders.host.HostBoard.NATIVE.PlatformName', 'HostBoard.NATIVE.PlatformName', ([], {}), '()\n', (8447, 8449), False, 'from builders.host import HostApp, HostBoard, HostBuilder\n'), ((6445, 6485), 'itertools.combinations', 'combinations', (['ok_variants', 'variant_count'], {}), '(ok_variants, variant_count)\n', (6457, 6485), False, 'from itertools import combinations\n')] |
TRINITRONIC/musegan | src/musegan/data.py | 0a62e0303a8ff357d7f385dcc6edba76afb132b2 | """This file contains functions for loading and preprocessing pianoroll data.
"""
import logging
import numpy as np
import tensorflow.compat.v1 as tf
from musegan.config import SHUFFLE_BUFFER_SIZE, PREFETCH_SIZE
LOGGER = logging.getLogger(__name__)
# --- Data loader --------------------------------------------------------------
def load_data_from_npy(filename):
"""Load and return the training data from a npy file."""
return np.load(filename)
def load_data_from_npz(filename):
"""Load and return the training data from a npz file (sparse format)."""
with np.load(filename) as f:
data = np.zeros(f['shape'], np.bool_)
data[[x for x in f['nonzero']]] = True
return data
def load_data(data_source, data_filename):
"""Load and return the training data."""
if data_source == 'sa':
import SharedArray as sa
return sa.attach(data_filename)
if data_source == 'npy':
return load_data_from_npy(data_filename)
if data_source == 'npz':
return load_data_from_npz(data_filename)
raise ValueError("Expect `data_source` to be one of 'sa', 'npy', 'npz'. "
"But get " + str(data_source))
# --- Dataset Utilities -------------------------------------------------------
def random_transpose(pianoroll):
"""Randomly transpose a pianoroll with [-5, 6] semitones."""
semitone = np.random.randint(-5, 6)
if semitone > 0:
pianoroll[:, semitone:, 1:] = pianoroll[:, :-semitone, 1:]
pianoroll[:, :semitone, 1:] = 0
elif semitone < 0:
pianoroll[:, :semitone, 1:] = pianoroll[:, -semitone:, 1:]
pianoroll[:, semitone:, 1:] = 0
return pianoroll
def set_pianoroll_shape(pianoroll, data_shape):
"""Set the pianoroll shape and return the pianoroll."""
pianoroll.set_shape(data_shape)
return pianoroll
def set_label_shape(label):
"""Set the label shape and return the label."""
label.set_shape([1])
return label
# --- Sampler ------------------------------------------------------------------
def get_samples(n_samples, data, labels=None, use_random_transpose=False):
"""Return some random samples of the training data."""
indices = np.random.choice(len(data), n_samples, False)
if np.issubdtype(data.dtype, np.bool_):
sample_data = data[indices] * 2. - 1.
else:
sample_data = data[indices]
if use_random_transpose:
sample_data = np.array([random_transpose(x) for x in sample_data])
if labels is None:
return sample_data
return sample_data, labels[indices]
# --- Tensorflow Dataset -------------------------------------------------------
def _gen_data(data, labels=None):
"""Data Generator."""
if labels is None:
for item in data:
if np.issubdtype(data.dtype, np.bool_):
yield item * 2. - 1.
else:
yield item
else:
for i, item in enumerate(data):
if np.issubdtype(data.dtype, np.bool_):
yield (item * 2. - 1., labels[i])
else:
yield (item, labels[i])
def get_dataset(data, labels=None, batch_size=None, data_shape=None,
use_random_transpose=False, num_threads=1):
"""Create and return a tensorflow dataset from an array."""
if labels is None:
dataset = tf.data.Dataset.from_generator(
lambda: _gen_data(data), tf.float32)
if use_random_transpose:
dataset = dataset.map(
lambda pianoroll: tf.py_func(
random_transpose, [pianoroll], tf.float32),
num_parallel_calls=num_threads)
dataset = dataset.map(lambda pianoroll: set_pianoroll_shape(
pianoroll, data_shape), num_parallel_calls=num_threads)
else:
assert len(data) == len(labels), (
"Lengths of `data` and `lables` do not match.")
dataset = tf.data.Dataset.from_generator(
lambda: _gen_data(data, labels), [tf.float32, tf.int32])
if use_random_transpose:
dataset = dataset.map(
lambda pianoroll, label: (
tf.py_func(random_transpose, [pianoroll], tf.float32),
label),
num_parallel_calls=num_threads)
dataset = dataset.map(
lambda pianoroll, label: (set_pianoroll_shape(
pianoroll, data_shape), set_label_shape(label)),
num_parallel_calls=num_threads)
dataset = dataset.shuffle(SHUFFLE_BUFFER_SIZE).repeat().batch(batch_size)
return dataset.prefetch(PREFETCH_SIZE)
| [((221, 248), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (238, 248), False, 'import logging\n'), ((437, 454), 'numpy.load', 'np.load', (['filename'], {}), '(filename)\n', (444, 454), True, 'import numpy as np\n'), ((1379, 1403), 'numpy.random.randint', 'np.random.randint', (['(-5)', '(6)'], {}), '(-5, 6)\n', (1396, 1403), True, 'import numpy as np\n'), ((2255, 2290), 'numpy.issubdtype', 'np.issubdtype', (['data.dtype', 'np.bool_'], {}), '(data.dtype, np.bool_)\n', (2268, 2290), True, 'import numpy as np\n'), ((576, 593), 'numpy.load', 'np.load', (['filename'], {}), '(filename)\n', (583, 593), True, 'import numpy as np\n'), ((615, 645), 'numpy.zeros', 'np.zeros', (["f['shape']", 'np.bool_'], {}), "(f['shape'], np.bool_)\n", (623, 645), True, 'import numpy as np\n'), ((874, 898), 'SharedArray.attach', 'sa.attach', (['data_filename'], {}), '(data_filename)\n', (883, 898), True, 'import SharedArray as sa\n'), ((2784, 2819), 'numpy.issubdtype', 'np.issubdtype', (['data.dtype', 'np.bool_'], {}), '(data.dtype, np.bool_)\n', (2797, 2819), True, 'import numpy as np\n'), ((2968, 3003), 'numpy.issubdtype', 'np.issubdtype', (['data.dtype', 'np.bool_'], {}), '(data.dtype, np.bool_)\n', (2981, 3003), True, 'import numpy as np\n'), ((3532, 3585), 'tensorflow.compat.v1.py_func', 'tf.py_func', (['random_transpose', '[pianoroll]', 'tf.float32'], {}), '(random_transpose, [pianoroll], tf.float32)\n', (3542, 3585), True, 'import tensorflow.compat.v1 as tf\n'), ((4156, 4209), 'tensorflow.compat.v1.py_func', 'tf.py_func', (['random_transpose', '[pianoroll]', 'tf.float32'], {}), '(random_transpose, [pianoroll], tf.float32)\n', (4166, 4209), True, 'import tensorflow.compat.v1 as tf\n')] |
PushpneetSingh/Hello-world | Python/hello-world-pt-BR.py | def0f44737e02fb40063cd347e93e456658e2532 | print(u"Olá mundo!") | [] |
saidulislam/flask-bootcamp-2 | 02-static-templates-files/02_html_template.py | 4ba8f5e012aa0159275ab264f0247815dcf635e6 | from flask import Flask,
app = Flask(__name__)
@app.route("/")
def homepage():
return "Paws Rescue Center 🐾"
@app.route("/about")
def about():
return """We are a non-profit organization working as an animal rescue center.
We aim to help you connect with purrfect furbaby for you!
The animals you find at our website are rescue animals which have been rehabilitated.
Our mission is to promote the ideology of "Adopt, don't Shop"! """
if __name__ == "__main__":
app.run(debug=True) | [] |
arpastrana/compas | src/compas/datastructures/mesh/bbox.py | ed677a162c14dbe562c82d72f370279259faf7da | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compas.geometry import bounding_box
from compas.geometry import bounding_box_xy
__all__ = [
'mesh_bounding_box',
'mesh_bounding_box_xy',
]
def mesh_bounding_box(mesh):
"""Compute the (axis aligned) bounding box of a mesh.
Parameters
----------
mesh : compas.datastructures.Mesh
The mesh data structure.
Returns
-------
list of point
The 8 corners of the bounding box of the mesh.
Examples
--------
>>> mesh_bounding_box(mesh)
[[0.0, 0.0, 0.0], [10.0, 0.0, 0.0], [10.0, 10.0, 0.0], [0.0, 10.0, 0.0], [0.0, 0.0, 0.0], [10.0, 0.0, 0.0], [10.0, 10.0, 0.0], [0.0, 10.0, 0.0]]
"""
xyz = mesh.vertices_attributes('xyz', keys=list(mesh.vertices()))
return bounding_box(xyz)
def mesh_bounding_box_xy(mesh):
"""Compute the (axis aligned) bounding box of a projection of the mesh in the XY plane.
Parameters
----------
mesh : compas.datastructures.Mesh
The mesh data structure.
Returns
-------
list of point
The 4 corners of the bounding polygon in the XY plane.
Examples
--------
>>> mesh_bounding_box_xy(mesh)
[[0.0, 0.0, 0.0], [10.0, 0.0, 0.0], [10.0, 10.0, 0.0], [0.0, 10.0, 0.0]]
"""
xyz = mesh.vertices_attributes('xyz')
return bounding_box_xy(xyz)
# ==============================================================================
# Main
# ==============================================================================
if __name__ == '__main__':
import doctest
import compas
from compas.datastructures import Mesh
mesh = Mesh.from_obj(compas.get('faces.obj'))
doctest.testmod()
| [((852, 869), 'compas.geometry.bounding_box', 'bounding_box', (['xyz'], {}), '(xyz)\n', (864, 869), False, 'from compas.geometry import bounding_box\n'), ((1405, 1425), 'compas.geometry.bounding_box_xy', 'bounding_box_xy', (['xyz'], {}), '(xyz)\n', (1420, 1425), False, 'from compas.geometry import bounding_box_xy\n'), ((1762, 1779), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (1777, 1779), False, 'import doctest\n'), ((1732, 1755), 'compas.get', 'compas.get', (['"""faces.obj"""'], {}), "('faces.obj')\n", (1742, 1755), False, 'import compas\n')] |
Lars-H/federated_crop | crop/source_selection/__init__.py | 8e936926462aa5df5a9b8e6b42b061a3623fddf4 | from naive import NaiveSourceSelection
from star_based import StarBasedSourceSelection
from utils import AskSourceSelector, HybridSourceSelector, StatSourceSelector
from charset_selector import CharSet_Selector | [] |
Mhaiyang/iccv | base3_plus.py | 04a8ee52c2323d7ff5cdf03c0be1466e8180d2eb | """
@Time : 201/21/19 10:47
@Author : TaylorMei
@Email : [email protected]
@Project : iccv
@File : base3_plus.py
@Function:
""" | [] |
oknuutti/hw_visnav | visnav/algo/orig/tools.py | 5254b8bdd146548413554c00e6e76264a2540e8b | import math
import time
import numpy as np
import numba as nb
import quaternion # adds to numpy # noqa # pylint: disable=unused-import
import sys
import scipy
from astropy.coordinates import SkyCoord
from scipy.interpolate import RectBivariateSpline
from scipy.interpolate import NearestNDInterpolator
# from scipy.spatial.ckdtree import cKDTree
from visnav.settings import *
class PositioningException(Exception):
pass
class Stopwatch:
# from https://www.safaribooksonline.com/library/view/python-cookbook-3rd/9781449357337/ch13s13.html
def __init__(self, elapsed=0.0, func=time.perf_counter):
self._elapsed = elapsed
self._func = func
self._start = None
@property
def elapsed(self):
return self._elapsed + ((self._func() - self._start) if self.running else 0)
def start(self):
if self._start is not None:
raise RuntimeError('Already started')
self._start = self._func()
def stop(self):
if self._start is None:
raise RuntimeError('Not started')
end = self._func()
self._elapsed += end - self._start
self._start = None
def reset(self):
self._elapsed = 0.0
@property
def running(self):
return self._start is not None
def __enter__(self):
self.start()
return self
def __exit__(self, *args):
self.stop()
def sphere_angle_radius(loc, r):
return np.arcsin(r / np.linalg.norm(loc, axis=1))
def dist_across_and_along_vect(A, b):
""" A: array of vectors, b: axis vector """
lat, lon, r = cartesian2spherical(*b)
q = ypr_to_q(lat, lon, 0).conj()
R = quaternion.as_rotation_matrix(q)
Ab = R.dot(A.T).T
d = Ab[:, 0:1]
r = np.linalg.norm(Ab[:, 1:3], axis=1).reshape((-1, 1))
return r, d
def point_vector_dist(A, B, dist_along_v=False):
""" A: point, B: vector """
# (length of b)**2
normB2 = (B ** 2).sum(-1).reshape((-1, 1))
# a dot b vector product (project a on b but also times length of b)
diagAB = (A * B).sum(-1).reshape((-1, 1))
# A projected along B (projection = a dot b/||b|| * b/||b||)
A_B = (diagAB / normB2) * B
# vector from projected A to A, it is perpendicular to B
AB2A = A - A_B
# diff vector lengths
normD = np.sqrt((AB2A ** 2).sum(-1)).reshape((-1, 1))
return (normD, diagAB / np.sqrt(normB2)) if dist_along_v else normD
def sc_asteroid_max_shift_error(A, B):
"""
Calculate max error between two set of vertices when projected to camera,
A = estimated vertex positions
B = true vertex positions
Error is a vector perpendicular to B, i.e. A - A||
"""
# diff vector lengths
normD = point_vector_dist(A, B)
# max length of diff vectors
return np.max(normD)
@nb.njit(nb.f8[:](nb.f8[:], nb.f8[:]))
def cross3d(left, right):
# for short vectors cross product is faster in pure python than with numpy.cross
x = ((left[1] * right[2]) - (left[2] * right[1]))
y = ((left[2] * right[0]) - (left[0] * right[2]))
z = ((left[0] * right[1]) - (left[1] * right[0]))
return np.array((x, y, z))
def normalize_v(v):
norm = np.linalg.norm(v)
return v / norm if norm != 0 else v
@nb.njit(nb.types.f8[:](nb.types.f8[:]))
def normalize_v_f8(v):
norm = np.linalg.norm(v)
return v / norm if norm != 0 else v
def generate_field_fft(shape, sd=(0.33, 0.33, 0.34), len_sc=(0.5, 0.5 / 4, 0.5 / 16)):
from visnav.algo.image import ImageProc
sds = sd if getattr(sd, '__len__', False) else [sd]
len_scs = len_sc if getattr(len_sc, '__len__', False) else [len_sc]
assert len(shape) == 2, 'only 2d shapes are valid'
assert len(sds) == len(len_scs), 'len(sd) differs from len(len_sc)'
n = np.prod(shape)
kernel = np.sum(
np.stack([1 / len_sc * sd * n * ImageProc.gkern2d(shape, 1 / len_sc) for sd, len_sc in zip(sds, len_scs)],
axis=2), axis=2)
f_img = np.random.normal(0, 1, shape) + np.complex(0, 1) * np.random.normal(0, 1, shape)
f_img = np.real(np.fft.ifft2(np.fft.fftshift(kernel * f_img)))
return f_img
@nb.njit(nb.types.f8[:](nb.types.f8[:], nb.types.f8[:], nb.types.f8[:]))
def _surf_normal(x1, x2, x3):
# a, b, c = np.array(x1, dtype=np.float64), np.array(x2, dtype=np.float64), np.array(x3, dtype=np.float64)
return normalize_v_f8(cross3d(x2-x1, x3-x1))
def surf_normal(x1, x2, x3):
a, b, c = np.array(x1, dtype=np.float64), np.array(x2, dtype=np.float64), np.array(x3, dtype=np.float64)
return _surf_normal(a, b, c)
# return normalize_v_f8(cross3d(b-a, c-a))
def vector_projection(a, b):
return a.dot(b) / b.dot(b) * b
def vector_rejection(a, b):
return a - vector_projection(a, b)
def angle_between_v(v1, v2):
# Notice: only returns angles between 0 and 180 deg
try:
v1 = np.reshape(v1, (1, -1))
v2 = np.reshape(v2, (-1, 1))
n1 = v1 / np.linalg.norm(v1)
n2 = v2 / np.linalg.norm(v2)
cos_angle = n1.dot(n2)
except TypeError as e:
raise Exception('Bad vectors:\n\tv1: %s\n\tv2: %s' % (v1, v2)) from e
return math.acos(np.clip(cos_angle, -1, 1))
def angle_between_v_mx(a, B, normalize=True):
Bn = B / np.linalg.norm(B, axis=1).reshape((-1, 1)) if normalize else B
an = normalize_v(a).reshape((-1, 1)) if normalize else a
return np.arccos(np.clip(Bn.dot(an), -1.0, 1.0))
def angle_between_mx(A, B):
return angle_between_rows(A, B)
def angle_between_rows(A, B, normalize=True):
assert A.shape[1] == 3 and B.shape[1] == 3, 'matrices need to be of shape (n, 3) and (m, 3)'
if A.shape[0] == B.shape[0]:
# from https://stackoverflow.com/questions/50772176/calculate-the-angle-between-the-rows-of-two-matrices-in-numpy/50772253
cos_angles = np.einsum('ij,ij->i', A, B)
if normalize:
p2 = np.einsum('ij,ij->i', A, A)
p3 = np.einsum('ij,ij->i', B, B)
cos_angles /= np.sqrt(p2 * p3)
else:
if normalize:
A = A / np.linalg.norm(A, axis=1).reshape((-1, 1))
B = B / np.linalg.norm(B, axis=1).reshape((-1, 1))
cos_angles = B.dot(A.T)
return np.arccos(np.clip(cos_angles, -1.0, 1.0))
def rand_q(angle):
r = normalize_v(np.random.normal(size=3))
return angleaxis_to_q(np.hstack((angle, r)))
def angle_between_q(q1, q2):
# from https://chrischoy.github.io/research/measuring-rotation/
qd = q1.conj() * q2
return abs(wrap_rads(2 * math.acos(qd.normalized().w)))
def angle_between_q_arr(q1, q2):
qd = quaternion.as_float_array(q1.conj() * q2)
qd = qd / np.linalg.norm(qd, axis=1).reshape((-1, 1))
return np.abs(wrap_rads(2 * np.arccos(qd[:, 0])))
def angle_between_ypr(ypr1, ypr2):
q1 = ypr_to_q(*ypr1)
q2 = ypr_to_q(*ypr2)
return angle_between_q(q1, q2)
def distance_mx(A, B):
assert A.shape[1] == B.shape[1], 'matrices must have same amount of columns'
k = A.shape[1]
O = np.repeat(A.reshape((-1, 1, k)), B.shape[0], axis=1) - np.repeat(B.reshape((1, -1, k)), A.shape[0], axis=0)
D = np.linalg.norm(O, axis=2)
return D
def q_to_unitbase(q):
U0 = quaternion.as_quat_array([[0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1.]])
Uq = q * U0 * q.conj()
return quaternion.as_float_array(Uq)[:, 1:]
def equatorial_to_ecliptic(ra, dec):
""" translate from equatorial ra & dec to ecliptic ones """
sc = SkyCoord(ra, dec, unit='deg', frame='icrs', obstime='J2000') \
.transform_to('barycentrictrueecliptic')
return sc.lat.value, sc.lon.value
def q_to_angleaxis(q, compact=False):
theta = math.acos(np.clip(q.w, -1, 1)) * 2.0
v = normalize_v(np.array([q.x, q.y, q.z]))
if compact:
return theta * v
else:
return np.array((theta,) + tuple(v))
def angleaxis_to_q(rv):
""" first angle, then axis """
if len(rv) == 4:
theta = rv[0]
v = normalize_v(np.array(rv[1:]))
elif len(rv) == 3:
theta = math.sqrt(sum(x ** 2 for x in rv))
v = np.array(rv) / (1 if theta == 0 else theta)
else:
raise Exception('Invalid angle-axis vector: %s' % (rv,))
w = math.cos(theta / 2)
v = v * math.sin(theta / 2)
return np.quaternion(w, *v).normalized()
def ypr_to_q(lat, lon, roll):
# Tait-Bryan angles, aka yaw-pitch-roll, nautical angles, cardan angles
# intrinsic euler rotations z-y'-x'', pitch=-lat, yaw=lon
return (
np.quaternion(math.cos(lon / 2), 0, 0, math.sin(lon / 2))
* np.quaternion(math.cos(-lat / 2), 0, math.sin(-lat / 2), 0)
* np.quaternion(math.cos(roll / 2), math.sin(roll / 2), 0, 0)
)
def eul_to_q(angles, order='xyz', reverse=False):
assert len(angles) == len(order), 'len(angles) != len(order)'
q = quaternion.one
idx = {'x': 0, 'y': 1, 'z': 2}
for angle, axis in zip(angles, order):
w = math.cos(angle / 2)
v = [0, 0, 0]
v[idx[axis]] = math.sin(angle / 2)
dq = np.quaternion(w, *v)
q = (dq * q) if reverse else (q * dq)
return q
def q_to_ypr(q):
# from https://math.stackexchange.com/questions/687964/getting-euler-tait-bryan-angles-from-quaternion-representation
q0, q1, q2, q3 = quaternion.as_float_array(q)
roll = np.arctan2(q2 * q3 + q0 * q1, .5 - q1 ** 2 - q2 ** 2)
lat = -np.arcsin(np.clip(-2 * (q1 * q3 - q0 * q2), -1, 1))
lon = np.arctan2(q1 * q2 + q0 * q3, .5 - q2 ** 2 - q3 ** 2)
return lat, lon, roll
def mean_q(qs, ws=None):
"""
returns a (weighted) mean of a set of quaternions
idea is to rotate a bit in the direction of new quaternion from the sum of previous rotations
NOTE: not tested properly, might not return same mean quaternion if order of input changed
"""
wtot = 0
qtot = quaternion.one
for q, w in zip(qs, np.ones((len(qs),)) if ws is None else ws):
ddaa = q_to_angleaxis(qtot.conj() * q)
ddaa[0] = wrap_rads(ddaa[0]) * w / (w + wtot)
qtot = angleaxis_to_q(ddaa) * qtot
wtot += w
return qtot
def q_times_v(q, v):
qv = np.quaternion(0, *v)
qv2 = q * qv * q.conj()
return np.array([qv2.x, qv2.y, qv2.z])
def q_times_mx(q, mx):
qqmx = q * mx2qmx(mx) * q.conj()
aqqmx = quaternion.as_float_array(qqmx)
return aqqmx[:, 1:]
def mx2qmx(mx):
qmx = np.zeros((mx.shape[0], 4))
qmx[:, 1:] = mx
return quaternion.as_quat_array(qmx)
def wrap_rads(a):
return (a + math.pi) % (2 * math.pi) - math.pi
def wrap_degs(a):
return (a + 180) % 360 - 180
def eccentric_anomaly(eccentricity, mean_anomaly, tol=1e-6):
# from http://www.jgiesen.de/kepler/kepler.html
E = mean_anomaly if eccentricity < 0.8 else math.pi
F = E - eccentricity * math.sin(mean_anomaly) - mean_anomaly;
for i in range(30):
if abs(F) < tol:
break
E = E - F / (1.0 - eccentricity * math.cos(E))
F = E - eccentricity * math.sin(E) - mean_anomaly
return round(E / tol) * tol
def solar_elongation(ast_v, sc_q):
sco_x, sco_y, sco_z = q_to_unitbase(sc_q)
if USE_ICRS:
sc = SkyCoord(x=ast_v[0], y=ast_v[1], z=ast_v[2], frame='icrs',
unit='m', representation_type='cartesian', obstime='J2000') \
.transform_to('hcrs') \
.represent_as('cartesian')
ast_v = np.array([sc.x.value, sc.y.value, sc.z.value])
# angle between camera axis and the sun, 0: right ahead, pi: behind
elong = angle_between_v(-ast_v, sco_x)
# direction the sun is at when looking along camera axis
nvec = np.cross(sco_x, ast_v)
direc = angle_between_v(nvec, sco_z)
# decide if direction needs to be negative or not
if np.cross(nvec, sco_z).dot(sco_x) < 0:
direc = -direc
return elong, direc
def find_nearest_lesser(array, value):
I = np.where(array < value)
idx = (np.abs(array - value)).argmin()
return array[I[idx]], I[idx]
def find_nearest_greater(array, value):
I = np.where(array > value)
idx = (np.abs(array - value)).argmin()
return array[I[idx]], I[idx]
def find_nearest(array, value):
idx = (np.abs(array - value)).argmin()
return array[idx], idx
def find_nearest_arr(array, value, ord=None, fun=None):
diff = array - value
idx = np.linalg.norm(diff if fun is None else list(map(fun, diff)), ord=ord, axis=1).argmin()
return array[idx], idx
def find_nearest_n(array, value, r, ord=None, fun=None):
diff = array - value
d = np.linalg.norm(diff if fun is None else list(map(fun, diff)), ord=ord, axis=1)
idxs = np.where(d < r)
return idxs[0]
def find_nearest_each(haystack, needles, ord=None):
assert len(haystack.shape) == 2 and len(needles.shape) == 2 and haystack.shape[1] == needles.shape[1], \
'wrong shapes for haystack and needles, %s and %s, respectively' % (haystack.shape, needles.shape)
c = haystack.shape[1]
diff_mx = np.repeat(needles.reshape((-1, 1, c)), haystack.shape[0], axis=1) - np.repeat(
haystack.reshape((1, -1, c)), needles.shape[0], axis=0)
norm_mx = np.linalg.norm(diff_mx, axis=2, ord=ord)
idxs = norm_mx.argmin(axis=1)
return haystack[idxs], idxs
def cartesian2spherical(x, y, z):
r = math.sqrt(x ** 2 + y ** 2 + z ** 2)
theta = math.acos(z / r)
phi = math.atan2(y, x)
lat = math.pi / 2 - theta
lon = phi
return np.array([lat, lon, r])
def spherical2cartesian(lat, lon, r):
theta = math.pi / 2 - lat
phi = lon
x = r * math.sin(theta) * math.cos(phi)
y = r * math.sin(theta) * math.sin(phi)
z = r * math.cos(theta)
return np.array([x, y, z])
def spherical2cartesian_arr(A, r=None):
theta = math.pi / 2 - A[:, 0]
phi = A[:, 1]
r = (r or A[:, 2])
x = r * np.sin(theta)
y = x * np.sin(phi)
x *= np.cos(phi)
# x = r * np.sin(theta) * np.cos(phi)
# y = r * np.sin(theta) * np.sin(phi)
z = r * np.cos(theta)
return np.vstack([x, y, z]).T
def discretize_v(v, tol=None, lat_range=(-math.pi / 2, math.pi / 2), points=None):
"""
simulate feature database by giving closest light direction with given tolerance
"""
if tol is not None and points is not None or tol is None and points is None:
assert False, 'Give either tol or points'
elif tol is not None:
points = bf2_lat_lon(tol, lat_range=lat_range)
lat, lon, r = cartesian2spherical(*v)
(nlat, nlon), idx = find_nearest_arr(
points,
np.array((lat, lon)),
ord=2,
fun=wrap_rads,
)
ret = spherical2cartesian(nlat, nlon, r)
return ret, idx
def discretize_q(q, tol=None, lat_range=(-math.pi / 2, math.pi / 2), points=None):
"""
simulate feature database by giving closest lat & roll with given tolerance
and set lon to zero as feature detectors are rotation invariant (in opengl coords)
"""
if tol is not None and points is not None or tol is None and points is None:
assert False, 'Give either tol or points'
elif tol is not None:
points = bf2_lat_lon(tol, lat_range=lat_range)
lat, lon, roll = q_to_ypr(q)
(nlat, nroll), idx = find_nearest_arr(
points,
np.array((lat, roll)),
ord=2,
fun=wrap_rads,
)
nq0 = ypr_to_q(nlat, 0, nroll)
return nq0, idx
def bf_lat_lon(tol, lat_range=(-math.pi / 2, math.pi / 2)):
# tol**2 == (step/2)**2 + (step/2)**2 -- 7deg is quite nice in terms of len(lon)*len(lat) == 1260
step = math.sqrt(2) * tol
lat_steps = np.linspace(*lat_range, num=math.ceil((lat_range[1] - lat_range[0]) / step), endpoint=False)[1:]
lon_steps = np.linspace(-math.pi, math.pi, num=math.ceil(2 * math.pi / step), endpoint=False)
return lat_steps, lon_steps
def bf2_lat_lon(tol, lat_range=(-math.pi / 2, math.pi / 2)):
# tol**2 == (step/2)**2 + (step/2)**2 -- 7deg is quite nice in terms of len(lon)*len(lat) == 1260
step = math.sqrt(2) * tol
lat_steps = np.linspace(*lat_range, num=math.ceil((lat_range[1] - lat_range[0]) / step), endpoint=False)[1:]
# similar to https://www.cmu.edu/biolphys/deserno/pdf/sphere_equi.pdf
points = []
for lat in lat_steps:
Mphi = math.ceil(2 * math.pi * math.cos(lat) / step)
lon_steps = np.linspace(-math.pi, math.pi, num=Mphi, endpoint=False)
points.extend(zip([lat] * len(lon_steps), lon_steps))
return points
def robust_mean(arr, discard_percentile=0.2, ret_n=False, axis=None):
J = np.logical_not(np.isnan(arr))
if axis is not None:
J = np.all(J, axis=1 if axis == 0 else 0)
if axis == 0:
arr = arr[J, :]
elif axis == 1:
arr = arr[:, J]
else:
arr = arr[J]
low = np.percentile(arr, discard_percentile, axis=axis)
high = np.percentile(arr, 100 - discard_percentile, axis=axis)
I = np.logical_and(low < arr, arr < high)
if axis is not None:
I = np.all(I, axis=1 if axis == 0 else 0)
m = np.mean(arr[:, I] if axis == 1 else arr[I], axis=axis)
return (m, np.sum(I, axis=axis)) if ret_n else m
def robust_std(arr, discard_percentile=0.2, mean=None, axis=None):
corr = 1
if mean is None:
mean, n = robust_mean(arr, discard_percentile=discard_percentile, ret_n=True, axis=axis)
corr = n / (n - 1)
return np.sqrt(robust_mean((arr - mean) ** 2, discard_percentile=discard_percentile, axis=axis) * corr)
def mv_normal(mean, cov=None, L=None, size=None):
if size is None:
final_shape = []
elif isinstance(size, (int, np.integer)):
final_shape = [size]
else:
final_shape = size
final_shape = list(final_shape[:])
final_shape.append(mean.shape[0])
if L is None and cov is None \
or L is not None and cov is not None:
raise ValueError("you must provide either cov or L (cholesky decomp result)")
if len(mean.shape) != 1:
raise ValueError("mean must be 1 dimensional")
if L is not None:
if (len(L.shape) != 2) or (L.shape[0] != L.shape[1]):
raise ValueError("L must be 2 dimensional and square")
if mean.shape[0] != L.shape[0]:
raise ValueError("mean and L must have same length")
if cov is not None:
if (len(cov.shape) != 2) or (cov.shape[0] != cov.shape[1]):
raise ValueError("cov must be 2 dimensional and square")
if mean.shape[0] != cov.shape[0]:
raise ValueError("mean and cov must have same length")
L = np.linalg.cholesky(cov)
from numpy.random import standard_normal
z = standard_normal(final_shape).reshape(mean.shape[0], -1)
x = L.dot(z).T
x += mean
x.shape = tuple(final_shape)
return x, L
def point_cloud_vs_model_err(points: np.ndarray, model) -> np.ndarray:
faces = np.array([f[0] for f in model.faces], dtype='uint')
vertices = np.array(model.vertices)
errs = get_model_errors(points, vertices, faces)
return errs
# @nb.njit(nb.f8[:](nb.f8[:, :], nb.f8[:, :]), nogil=True)
@nb.njit(nb.f8(nb.f8[:, :], nb.f8[:, :]), nogil=True, cache=True)
def poly_line_intersect(poly, line):
# extend_line = True
eps = 1e-6
none = np.inf # np.zeros(1)
v0v1 = poly[1, :] - poly[0, :]
v0v2 = poly[2, :] - poly[0, :]
dir = line[1, :] - line[0, :]
line_len = math.sqrt(np.sum(dir ** 2))
if line_len < eps:
return none
dir = dir / line_len
pvec = cross3d(dir, v0v2).ravel()
det = np.dot(v0v1, pvec)
if abs(det) < eps:
return none
# backface culling
if False and det < 0:
return none
# frontface culling
if False and det > 0:
return none
inv_det = 1.0 / det
tvec = line[0, :] - poly[0, :]
u = tvec.dot(pvec) * inv_det
if u + eps < 0 or u - eps > 1:
return none
qvec = cross3d(tvec, v0v1).ravel()
v = dir.dot(qvec) * inv_det
if v + eps < 0 or u + v - eps > 1:
return none
t = v0v2.dot(qvec) * inv_det
if True:
# return error directly
return t - line_len
else:
# return actual 3d intersect point
if not extend_line and t - eps > line_len:
return none
return line[0, :] + t * dir
# INVESTIGATE: parallel = True does not speed up at all (or marginally) for some reason even though all cores are in use
@nb.njit(nb.f8(nb.u4[:, :], nb.f8[:, :], nb.f8[:, :]), nogil=True, parallel=False, cache=True)
def intersections(faces, vertices, line):
# pts = np.zeros((10, 3))
# i = 0
min_err = np.ones(faces.shape[0]) * np.inf
for k in nb.prange(1, faces.shape[0]):
err = poly_line_intersect(vertices[faces[k, :], :], line)
min_err[k] = err
# if abs(err) < min_err:
# min_err = err
# if len(pt) == 3:
# pts[i, :] = pt
# i += 1
# if i >= pts.shape[0]:
# print('too many intersects')
# i -= 1
i = np.argmin(np.abs(min_err))
return min_err[i] # pts[0:i, :]
# @nb.jit(nb.f8[:](nb.f8[:, :], nb.f8[:, :], nb.i4[:, :]), nogil=True, parallel=False)
def get_model_errors(points, vertices, faces):
count = len(points)
show_progress(count // 10, 0)
j = 0
devs = np.empty(points.shape[0])
for i in nb.prange(count):
vx = points[i, :]
err = intersections(faces, vertices, np.array(((0, 0, 0), vx)))
if math.isinf(err): # len(pts) == 0:
print('no intersections!')
continue
if False:
idx = np.argmin([np.linalg.norm(pt - vx) for pt in pts])
err = np.linalg.norm(pts[idx]) - np.linalg.norm(vx)
devs[i] = err
if j < i // 10:
show_progress(count // 10, i // 10)
j = i // 10
return devs
def crop_model(model, cam_v, cam_q, x_fov, y_fov):
assert False, 'not implemented'
def augment_model(model, multiplier=3, length_scales=(0, 0.1, 1), sds=(1e-5, 1.6e-4, 2.4e-4)):
assert multiplier > 1 and multiplier % 1 == 0, 'multiplier must be integer and >1'
from scipy.interpolate import LinearNDInterpolator
try:
from sklearn.gaussian_process.kernels import Matern, WhiteKernel
except:
print('Requires scikit-learn, install using "conda install scikit-learn"')
sys.exit()
points = np.array(model.vertices)
max_rng = np.max(np.ptp(points, axis=0))
# white noise to ensure positive definite covariance matrix
ls = dict(zip(length_scales, sds))
sd0 = ls.pop(0, 1e-5)
kernel = WhiteKernel(noise_level=sd0 * max_rng)
for l, s in ls.items():
kernel += s ** 2 * Matern(length_scale=l * max_rng, nu=1.5)
assert False, 'not implemented'
# TODO: how is the covariance mx constructed again?
y_cov = kernel(points)
# TODO: sample gp ??? how to tie existing points and generate the new points in between?
aug_points, L = mv_normal(points, cov=y_cov)
# TODO: how to interpolate faces?
pass
# interpolate texture
# TODO: augment texture
interp = LinearNDInterpolator(points, model.texcoords)
aug_texcoords = interp(aug_points)
data = model.as_dict()
data['faces'] = aug_faces
data['vertices'] = aug_points
data['texcoords'] = aug_texcoords
from visnav.iotools import objloader
aug_model = objloader.ShapeModel(data=data)
aug_model.recalc_norms()
return aug_model, L
def apply_noise(model, support=(None, None), L=(None, None), len_sc=SHAPE_MODEL_NOISE_LEN_SC,
noise_lv=SHAPE_MODEL_NOISE_LV['lo'], only_z=False,
tx_noise=0, tx_noise_len_sc=SHAPE_MODEL_NOISE_LEN_SC, tx_hf_noise=True):
Sv, St = support
Lv, Lt = L
inplace = noise_lv == 0 and model.texfile is None
if noise_lv > 0:
noisy_points, avg_dev, Lv = points_with_noise(points=model.vertices, support=Sv, L=Lv,
noise_lv=noise_lv, len_sc=len_sc, only_z=only_z)
else:
noisy_points, avg_dev, Lv = model.vertices, 0, None
tex = model.tex
if tx_noise > 0:
if inplace:
model.tex = np.ones(model.tex.shape)
Lt = Lv if Lt is None and tx_noise == noise_lv and tx_noise_len_sc == len_sc else Lt
tex, tx_avg_dev, Lt = texture_noise(model, support=St, L=Lt, noise_sd=tx_noise,
len_sc=tx_noise_len_sc, hf_noise=tx_hf_noise)
if inplace:
model.tex = tex
noisy_model = model
else:
data = model.as_dict()
data['vertices'] = noisy_points
if tx_noise > 0:
data['tex'] = tex
data['texfile'] = None
from visnav.iotools import objloader
noisy_model = objloader.ShapeModel(data=data)
if noise_lv > 0:
noisy_model.recalc_norms()
else:
noisy_model.normals = model.normals
return noisy_model, avg_dev, (Lv, Lt)
def texture_noise(model, support=None, L=None, noise_sd=SHAPE_MODEL_NOISE_LV['lo'],
len_sc=SHAPE_MODEL_NOISE_LEN_SC, max_rng=None, max_n=1e4, hf_noise=True):
tex = model.load_texture()
if tex is None:
print('tools.texture_noise: no texture loaded')
return [None] * 3
r = np.sqrt(max_n / np.prod(tex.shape[:2]))
ny, nx = (np.array(tex.shape[:2]) * r).astype(np.int)
n = nx * ny
tx_grid_xx, tx_grid_yy = np.meshgrid(np.linspace(0, 1, nx), np.linspace(0, 1, ny))
tx_grid = np.hstack((tx_grid_xx.reshape((-1, 1)), tx_grid_yy.reshape((-1, 1))))
support = support if support else model
points = np.array(support.vertices)
max_rng = np.max(np.ptp(points, axis=0)) if max_rng is None else max_rng
# use vertices for distances, find corresponding vertex for each pixel
y_cov = None
if L is None:
try:
from sklearn.gaussian_process.kernels import Matern, WhiteKernel
except:
print('Requires scikit-learn, install using "conda install scikit-learn"')
sys.exit()
kernel = 1.0 * noise_sd * Matern(length_scale=len_sc * max_rng, nu=1.5) \
+ 0.5 * noise_sd * Matern(length_scale=0.1 * len_sc * max_rng, nu=1.5) \
+ WhiteKernel(
noise_level=1e-5 * noise_sd * max_rng) # white noise for positive definite covariance matrix only
# texture coordinates given so that x points left and *Y POINTS UP*
tex_img_coords = np.array(support.texcoords)
tex_img_coords[:, 1] = 1 - tex_img_coords[:, 1]
_, idxs = find_nearest_each(haystack=tex_img_coords, needles=tx_grid)
tx2vx = support.texture_to_vertex_map()
y_cov = kernel(points[tx2vx[idxs], :] - np.mean(points, axis=0))
if 0:
# for debugging distances
import matplotlib.pyplot as plt
import cv2
from visnav.algo.image import ImageProc
orig_tx = cv2.imread(os.path.join(DATA_DIR, '67p+tex.png'), cv2.IMREAD_GRAYSCALE)
gx, gy = np.gradient(points[tx2vx[idxs], :].reshape((ny, nx, 3)), axis=(1, 0))
gxy = np.linalg.norm(gx, axis=2) + np.linalg.norm(gy, axis=2)
gxy = (gxy - np.min(gxy)) / (np.max(gxy) - np.min(gxy))
grad_img = cv2.resize((gxy * 255).astype('uint8'), orig_tx.shape)
overlaid = ImageProc.merge((orig_tx, grad_img))
plt.figure(1)
plt.imshow(overlaid)
plt.show()
# sample gp
e0, L = mv_normal(np.zeros(n), cov=y_cov, L=L)
e0 = e0.reshape((ny, nx))
# interpolate for final texture
x = np.linspace(np.min(tx_grid_xx), np.max(tx_grid_xx), tex.shape[1])
y = np.linspace(np.min(tx_grid_yy), np.max(tx_grid_yy), tex.shape[0])
interp0 = RectBivariateSpline(tx_grid_xx[0, :], tx_grid_yy[:, 0], e0, kx=1, ky=1)
err0 = interp0(x, y)
if 0:
import matplotlib.pyplot as plt
import cv2
from visnav.algo.image import ImageProc
orig_tx = cv2.imread(os.path.join(DATA_DIR, '67p+tex.png'), cv2.IMREAD_GRAYSCALE)
err_ = err0 if 1 else e0
eimg = (err_ - np.min(err_)) / (np.max(err_) - np.min(err_))
eimg = cv2.resize((eimg * 255).astype('uint8'), orig_tx.shape)
overlaid = ImageProc.merge((orig_tx, eimg))
plt.figure(1)
plt.imshow(overlaid)
plt.show()
err1 = 0
if hf_noise:
e1, L = mv_normal(np.zeros(n), L=L)
e1 = e1.reshape((ny, nx))
interp1 = RectBivariateSpline(tx_grid_xx[0, :], tx_grid_yy[:, 0], e1, kx=1, ky=1)
err_coef = interp1(x, y)
lo, hi = np.min(err_coef), np.max(err_coef)
err_coef = (err_coef - lo) / (hi - lo)
len_sc = 10
err1 = generate_field_fft(tex.shape, (6 * noise_sd, 4 * noise_sd),
(len_sc / 1000, len_sc / 4500)) if hf_noise else 0
err1 *= err_coef
noisy_tex = tex + err0 + err1
noisy_tex /= np.max(noisy_tex)
if 0:
import matplotlib.pyplot as plt
plt.figure(1)
plt.imshow(noisy_tex)
plt.figure(2)
plt.imshow(err0)
plt.figure(3)
plt.imshow(err1)
plt.show()
return noisy_tex, np.std(err0 + err1), L
class NearestKernelNDInterpolator(NearestNDInterpolator):
def __init__(self, *args, k_nearest=None, kernel='gaussian', kernel_sc=None,
kernel_eps=1e-12, query_eps=0.05, max_distance=None, **kwargs):
"""
Parameters
----------
kernel : one of the following functions of distance that give weight to neighbours:
'linear': (kernel_sc/(r + kernel_eps))
'quadratic': (kernel_sc/(r + kernel_eps))**2
'cubic': (kernel_sc/(r + kernel_eps))**3
'gaussian': exp(-(r/kernel_sc)**2)
k_nearest : if given, uses k_nearest neighbours for interpolation regardless of their distances
"""
choices = ('linear', 'quadratic', 'cubic', 'gaussian')
assert kernel in choices, 'kernel must be one of %s' % (choices,)
self._tree_options = kwargs.get('tree_options', {})
super(NearestKernelNDInterpolator, self).__init__(*args, **kwargs)
if max_distance is None:
if kernel_sc is None:
d, _ = self.tree.query(self.points, k=k_nearest)
kernel_sc = np.mean(d) * k_nearest / (k_nearest - 1)
max_distance = kernel_sc * 3
assert kernel_sc is not None, 'kernel_sc need to be set'
self.kernel = kernel
self.kernel_sc = kernel_sc
self.kernel_eps = kernel_eps
self.k_nearest = k_nearest
self.max_distance = max_distance
self.query_eps = query_eps
def _linear(self, r):
if scipy.sparse.issparse(r):
return self.kernel_sc / (r + self.kernel_eps)
else:
return self.kernel_sc / (r + self.kernel_eps)
def _quadratic(self, r):
if scipy.sparse.issparse(r):
return np.power(self.kernel_sc / (r.data + self.kernel_eps), 2, out=r.data)
else:
return (self.kernel_sc / (r + self.kernel_eps)) ** 2
def _cubic(self, r):
if scipy.sparse.issparse(r):
return self.kernel_sc / (r + self.kernel_eps).power(3)
else:
return (self.kernel_sc / (r + self.kernel_eps)) ** 3
def _gaussian(self, r):
if scipy.sparse.issparse(r):
return np.exp((-r.data / self.kernel_sc) ** 2, out=r.data)
else:
return np.exp(-(r / self.kernel_sc) ** 2)
def __call__(self, *args):
"""
Evaluate interpolator at given points.
Parameters
----------
xi : ndarray of float, shape (..., ndim)
Points where to interpolate data at.
"""
from scipy.interpolate.interpnd import _ndim_coords_from_arrays
xi = _ndim_coords_from_arrays(args, ndim=self.points.shape[1])
xi = self._check_call_shape(xi)
xi = self._scale_x(xi)
r, idxs = self.tree.query(xi, self.k_nearest, eps=self.query_eps,
distance_upper_bound=self.max_distance or np.inf)
w = getattr(self, '_' + self.kernel)(r).reshape((-1, self.k_nearest, 1)) + self.kernel_eps
w /= np.sum(w, axis=1).reshape((-1, 1, 1))
yt = np.vstack((self.values, [0])) # if idxs[i, j] == len(values), then i:th point doesnt have j:th match
yi = np.sum(yt[idxs, :] * w, axis=1)
return yi
def points_with_noise(points, support=None, L=None, noise_lv=SHAPE_MODEL_NOISE_LV['lo'],
len_sc=SHAPE_MODEL_NOISE_LEN_SC, max_rng=None, only_z=False):
try:
from sklearn.gaussian_process.kernels import Matern, WhiteKernel
except:
print('Requires scikit-learn, install using "conda install scikit-learn"')
sys.exit()
if support is None:
support = points # [random.sample(list(range(len(points))), min(3000,len(points)))]
n = len(support)
mean = np.mean(points, axis=0)
max_rng = np.max(np.ptp(points, axis=0)) if max_rng is None else max_rng
y_cov = None
if L is None:
kernel = 0.6 * noise_lv * Matern(length_scale=len_sc * max_rng, nu=1.5) \
+ 0.4 * noise_lv * Matern(length_scale=0.1 * len_sc * max_rng, nu=1.5) \
+ WhiteKernel(
noise_level=1e-5 * noise_lv * max_rng) # white noise for positive definite covariance matrix only
y_cov = kernel(support - mean)
# sample gp
e0, L = mv_normal(np.zeros(n), cov=y_cov, L=L)
err = np.exp(e0.astype(points.dtype)).reshape((-1, 1))
if len(err) == len(points):
full_err = err
if DEBUG:
print('using orig gp sampled err')
else:
# interpolate
sc = 0.05 * len_sc * max_rng
interp = NearestKernelNDInterpolator(support - mean, err, k_nearest=12, kernel='gaussian',
kernel_sc=sc, max_distance=sc * 6)
full_err = interp(points - mean).astype(points.dtype)
# maybe extrapolate
nanidx = tuple(np.isnan(full_err).flat)
if np.any(nanidx):
assert False, 'shouldnt happen'
# if DEBUG or not BATCH_MODE:
# print('%sx nans'%np.sum(nanidx))
# naninterp = NearestNDInterpolator(support, err)
# try:
# full_err[nanidx,] = naninterp(points[nanidx, :]).astype(points.dtype)
# except IndexError as e:
# raise IndexError('%s,%s,%s'%(err.shape, full_err.shape, points.shape)) from e
# extra high frequency noise
# white_noise = 1 if True else np.exp(np.random.normal(scale=0.2*noise_lv*max_rng, size=(len(full_err),1)))
if only_z:
add_err_z = (max_rng / 2) * (full_err - 1)
add_err = np.concatenate((np.zeros((len(full_err), 2)), add_err_z), axis=1)
noisy_points = points + add_err
devs = np.abs(noisy_points[:, 2] - points[:, 2]) / (max_rng / 2)
assert np.isclose(devs.flatten(), np.abs(full_err - 1).flatten()).all(), 'something wrong'
else:
# noisy_points = (points-mean)*full_err*white_noise +mean
# r = np.sqrt(np.sum((points - mean)**2, axis=-1)).reshape(-1, 1)
# noisy_points = (points - mean) * (1 + np.log(full_err)/r) + mean
noisy_points = (points - mean) * full_err + mean
devs = np.sqrt(np.sum((noisy_points - points) ** 2, axis=-1) / np.sum((points - mean) ** 2, axis=-1))
if DEBUG or not BATCH_MODE:
print('noise (lv=%.3f): %.3f, %.3f; avg=%.3f' % (
(noise_lv,) + tuple(np.percentile(devs, (68, 95))) + (np.mean(devs),)))
if False:
import matplotlib.pyplot as plt
plt.figure(1, figsize=(8, 8))
# plt.plot(np.concatenate((points[:,0], err0[:,0], err[:,0], points[:,0]*err[:,0])))
plt.subplot(2, 2, 1)
plt.plot(points[:, 0])
plt.title('original', fontsize=12)
plt.subplot(2, 2, 2)
plt.plot(err0[:, 0])
plt.title('norm-err', fontsize=12)
plt.subplot(2, 2, 3)
plt.plot(err[:, 0])
plt.title('exp-err', fontsize=12)
plt.subplot(2, 2, 4)
plt.plot(noisy_points[:, 0])
plt.title('noisy', fontsize=12)
plt.tight_layout()
plt.show()
assert False, 'exiting'
return noisy_points, np.mean(devs), L
def foreground_idxs(array, max_val=None):
iy, ix = np.where(array < max_val)
idxs = np.concatenate(((iy,), (ix,)), axis=0).T
return idxs
def interp2(array, x, y, max_val=None, max_dist=30, idxs=None, discard_bg=False):
assert y < array.shape[0] and x < array.shape[1], 'out of bounds %s: %s' % (array.shape, (y, x))
v = array[int(y):int(y) + 2, int(x):int(x) + 2]
xf = x - int(x)
yf = y - int(y)
w = np.array((
((1 - yf) * (1 - xf), (1 - yf) * xf),
(yf * (1 - xf), yf * xf),
))
# ignore background depths
if max_val is not None:
idx = v.reshape(1, -1) < max_val * 0.999
else:
idx = ~np.isnan(v.reshape(1, -1))
w_sum = np.sum(w.reshape(1, -1)[idx])
if w_sum > 0:
# ignore background values
val = np.sum(w.reshape(1, -1)[idx] * v.reshape(1, -1)[idx]) / w_sum
elif discard_bg:
return float('nan')
else:
# no foreground values in 2x2 matrix, find nearest foreground value
if idxs is None:
idxs = foreground_idxs(array, max_val)
fallback = len(idxs) == 0
if not fallback:
dist = np.linalg.norm(idxs - np.array((y, x)), axis=1)
i = np.argmin(dist)
val = array[idxs[i, 0], idxs[i, 1]]
# print('\n%s, %s, %s, %s, %s, %s, %s'%(v, x,y,dist[i],idxs[i,1],idxs[i,0],val))
fallback = dist[i] > max_dist
if fallback:
val = np.sum(w * v) / np.sum(w)
return val
def solve_rotation(src_q, dst_q):
""" q*src_q*q.conj() == dst_q, solve for q """
# based on http://web.cs.iastate.edu/~cs577/handouts/quaternion.pdf
# and https://en.wikipedia.org/wiki/Quaternions_and_spatial_rotation#Pairs_of_unit_quaternions_as_rotations_in_4D_space
# NOTE: not certain if works..
M = np.zeros((4, 4))
for i in range(len(src_q)):
si = src_q[i]
Pi = np.array((
(si.w, -si.x, -si.y, -si.z),
(si.x, si.w, si.z, -si.y),
(si.y, -si.z, si.w, si.x),
(si.z, si.y, -si.x, si.w),
))
qi = dst_q[i]
Qi = np.array((
(qi.w, -qi.x, -qi.y, -qi.z),
(qi.x, qi.w, -qi.z, qi.y),
(qi.y, qi.z, qi.w, -qi.x),
(qi.z, -qi.y, qi.x, qi.w),
))
M += Pi.T * Qi
w, v = np.linalg.eig(M)
i = np.argmax(w)
res_q = np.quaternion(*v[:, i])
# alt = v.dot(w)
# print('%s,%s'%(res_q, alt))
# res_q = np.quaternion(*alt).normalized()
return res_q
def solve_q_bf(src_q, dst_q):
qs = []
d = []
for res_q in (
np.quaternion(0, 0, 0, 1).normalized(),
np.quaternion(0, 0, 1, 0).normalized(),
np.quaternion(0, 0, 1, 1).normalized(),
np.quaternion(0, 0, -1, 1).normalized(),
np.quaternion(0, 1, 0, 0).normalized(),
np.quaternion(0, 1, 0, 1).normalized(),
np.quaternion(0, 1, 0, -1).normalized(),
np.quaternion(0, 1, 1, 0).normalized(),
np.quaternion(0, 1, -1, 0).normalized(),
np.quaternion(0, 1, 1, 1).normalized(),
np.quaternion(0, 1, 1, -1).normalized(),
np.quaternion(0, 1, -1, 1).normalized(),
np.quaternion(0, 1, -1, -1).normalized(),
np.quaternion(1, 0, 0, 1).normalized(),
np.quaternion(1, 0, 0, -1).normalized(),
np.quaternion(1, 0, 1, 0).normalized(),
np.quaternion(1, 0, -1, 0).normalized(),
np.quaternion(1, 0, 1, 1).normalized(),
np.quaternion(1, 0, 1, -1).normalized(),
np.quaternion(1, 0, -1, 1).normalized(),
np.quaternion(1, 0, -1, -1).normalized(),
np.quaternion(1, 1, 0, 0).normalized(),
np.quaternion(1, -1, 0, 0).normalized(),
np.quaternion(1, 1, 0, 1).normalized(),
np.quaternion(1, 1, 0, -1).normalized(),
np.quaternion(1, -1, 0, 1).normalized(),
np.quaternion(1, -1, 0, -1).normalized(),
np.quaternion(1, 1, 1, 0).normalized(),
np.quaternion(1, 1, -1, 0).normalized(),
np.quaternion(1, -1, 1, 0).normalized(),
np.quaternion(1, -1, -1, 0).normalized(),
np.quaternion(1, 1, 1, -1).normalized(),
np.quaternion(1, 1, -1, 1).normalized(),
np.quaternion(1, 1, -1, -1).normalized(),
np.quaternion(1, -1, 1, 1).normalized(),
np.quaternion(1, -1, 1, -1).normalized(),
np.quaternion(1, -1, -1, 1).normalized(),
np.quaternion(1, -1, -1, -1).normalized(),
):
tq = res_q * src_q * res_q.conj()
qs.append(res_q)
# d.append(1-np.array((tq.w, tq.x, tq.y, tq.z)).dot(np.array((dst_q.w, dst_q.x, dst_q.y, dst_q.z)))**2)
d.append(angle_between_q(tq, dst_q))
i = np.argmin(d)
return qs[i]
def hover_annotate(fig, ax, line, annotations):
annot = ax.annotate("", xy=(0, 0), xytext=(-20, 20), textcoords="offset points",
bbox=dict(boxstyle="round", fc="w"),
arrowprops=dict(arrowstyle="->"))
annot.set_visible(False)
def update_annot(ind):
idx = ind["ind"][0]
try:
# for regular plots
x, y = line.get_data()
annot.xy = (x[idx], y[idx])
except AttributeError:
# for scatter plots
annot.xy = tuple(line.get_offsets()[idx])
text = ", ".join([annotations[n] for n in ind["ind"]])
annot.set_text(text)
annot.get_bbox_patch().set_alpha(0.4)
def hover(event):
vis = annot.get_visible()
if event.inaxes == ax:
cont, ind = line.contains(event)
if cont:
update_annot(ind)
annot.set_visible(True)
fig.canvas.draw_idle()
else:
if vis:
annot.set_visible(False)
fig.canvas.draw_idle()
fig.canvas.mpl_connect("motion_notify_event", hover)
def plot_vectors(pts3d, scatter=True, conseq=True, neg_z=True):
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure()
ax = Axes3D(fig)
if scatter:
ax.scatter(pts3d[:, 0], pts3d[:, 1], pts3d[:, 2])
else:
if conseq:
ax.set_prop_cycle('color', map(lambda c: '%f' % c, np.linspace(1, 0, len(pts3d))))
for i, v1 in enumerate(pts3d):
if v1 is not None:
ax.plot((0, v1[0]), (0, v1[1]), (0, v1[2]))
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
if neg_z:
ax.view_init(90, -90)
else:
ax.view_init(-90, -90)
plt.show()
def numeric(s):
try:
float(s)
except ValueError:
return False
return True
def pseudo_huber_loss(a, delta):
# from https://en.wikipedia.org/wiki/Huber_loss
# first +1e-15 is to avoid divide by zero, second to avoid loss becoming zero if delta > 1e7 due to float precision
return delta ** 2 * (np.sqrt(1 + a ** 2 / (delta ** 2 + 1e-15)) - 1 + 1e-15)
def fixed_precision(val, precision, as_str=False):
if val == 0:
return ('%%.%df' % precision) % val if as_str else val
d = math.ceil(math.log10(abs(val))) - precision
c = 10 ** d
fp_val = round(val / c) * c
return ('%%.%df' % max(0, -d)) % fp_val if as_str else fp_val
def plot_quats(quats, conseq=True, wait=True):
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure()
ax = Axes3D(fig)
ax.set_xlim(-1, 1)
ax.set_ylim(-1, 1)
ax.set_zlim(-1, 1)
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
if conseq:
ax.set_prop_cycle('color', map(lambda c: '%f' % c, np.linspace(1, 0, len(quats))))
for i, q in enumerate(quats):
if q is not None:
lat, lon, _ = q_to_ypr(q)
v1 = spherical2cartesian(lat, lon, 1)
v2 = (v1 + normalize_v(np.cross(np.cross(v1, np.array([0, 0, 1])), v1)) * 0.1) * 0.85
v2 = q_times_v(q, v2)
ax.plot((0, v1[0], v2[0]), (0, v1[1], v2[1]), (0, v1[2], v2[2]))
while (wait and not plt.waitforbuttonpress()):
pass
def plot_poses(poses, conseq=True, wait=True, arrow_len=1):
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure()
ax = Axes3D(fig)
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
if conseq:
plt.hsv()
# ax.set_prop_cycle('color', map(lambda c: '%f' % c, np.linspace(.7, 0, len(poses))))
for i, pose in enumerate(poses):
if pose is not None:
q = np.quaternion(*pose[3:])
lat, lon, _ = q_to_ypr(q)
v1 = spherical2cartesian(lat, lon, 1) * arrow_len
v2 = (v1 + normalize_v(np.cross(np.cross(v1, np.array([0, 0, 1])), v1)) * 0.1 * arrow_len) * 0.85
v2 = q_times_v(q, v2)
ax.plot((pose[0], v1[0], v2[0]), (pose[1], v1[1], v2[1]), (pose[2], v1[2], v2[2]))
while (wait and not plt.waitforbuttonpress()):
pass
#
# Not sure if unitbase_to_q works, haven't deleted just in case still need:
#
# def unitbase_to_q(b_dst, b_src = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]):
# # based on http://stackoverflow.com/questions/16648452/calculating-\
# # quaternion-for-transformation-between-2-3d-cartesian-coordinate-syst
# # , which is based on http://dx.doi.org/10.1117/12.57955
#
# M = np.zeros((3, 3))
#
# for i, v in enumerate(b_src):
# x = np.matrix(np.outer(v, b_dst[i]))
# M = M + x
#
# N11 = M[0, 0] + M[1, 1] + M[2, 2]
# N22 = M[0, 0] - M[1, 1] - M[2, 2]
# N33 = -M[0, 0] + M[1, 1] - M[2, 2]
# N44 = -M[0, 0] - M[1, 1] + M[2, 2]
# N12 = M[1, 2] - M[2, 1]
# N13 = M[2, 0] - M[0, 2]
# N14 = M[0, 1] - M[1, 0]
# N21 = N12
# N23 = M[0, 1] + M[1, 0]
# N24 = M[2, 0] + M[0, 2]
# N31 = N13
# N32 = N23
# N34 = M[1, 2] + M[2, 1]
# N41 = N14
# N42 = N24
# N43 = N34
#
# N=np.matrix([[N11, N12, N13, N14],\
# [N21, N22, N23, N24],\
# [N31, N32, N33, N34],\
# [N41, N42, N43, N44]])
#
# values, vectors = np.linalg.eig(N)
# quat = vectors[:, np.argmax(values)]
# #quat = np.array(quat).reshape(-1,).tolist()
#
# return np.quaternion(*quat)
import tracemalloc
import os
import linecache
def display_top(top_stats, key_type='lineno', limit=10):
# snapshot = snapshot.filter_traces((
# tracemalloc.Filter(False, "<frozen importlib._bootstrap>"),
# tracemalloc.Filter(False, "<unknown>"),
# ))
# top_stats = snapshot.statistics(key_type, cumulative=True)
print("Top %s lines" % limit)
for index, stat in enumerate(top_stats[:limit], 1):
frame = stat.traceback[0]
# replace "/path/to/module/file.py" with "module/file.py"
filename = os.sep.join(frame.filename.split(os.sep)[-2:])
print("#%s: %s:%s: %.1f MB (x%.0f)"
% (index, filename, frame.lineno, stat.size / 1024 / 1024, stat.count))
line = linecache.getline(frame.filename, frame.lineno).strip()
if line:
print(' %s' % line)
other = top_stats[limit:]
if other:
size = sum(stat.size for stat in other)
print("%s other: %.1f MB" % (len(other), size / 1024 / 1024))
total = sum(stat.size for stat in top_stats)
print("Total allocated size: %.1f MB" % (total / 1024 / 1024))
def show_progress(tot, i):
digits = int(math.ceil(math.log10(tot + 1)))
if i == 0:
print('%s/%d' % ('0' * digits, tot), end='', flush=True)
else:
print(('%s%0' + str(digits) + 'd/%d') % ('\b' * (digits * 2 + 1), i + 1, tot), end='', flush=True)
def smooth1d(xt, x, Y, weight_fun=lambda d: 0.9 ** abs(d)):
if xt.ndim != 1 or x.ndim != 1:
raise ValueError("smooth1d only accepts 1 dimension arrays for location")
if x.shape[0] != Y.shape[0]:
raise ValueError("different lenght x and Y")
D = np.repeat(np.expand_dims(xt, 1), len(x), axis=1) - np.repeat(np.expand_dims(x, 0), len(xt), axis=0)
weights = np.array(list(map(weight_fun, D.flatten()))).reshape(D.shape)
Yt = np.sum(Y * weights, axis=1) / np.sum(weights, axis=1)
return Yt
| [((1674, 1706), 'quaternion.as_rotation_matrix', 'quaternion.as_rotation_matrix', (['q'], {}), '(q)\n', (1703, 1706), False, 'import quaternion\n'), ((2797, 2810), 'numpy.max', 'np.max', (['normD'], {}), '(normD)\n', (2803, 2810), True, 'import numpy as np\n'), ((3136, 3155), 'numpy.array', 'np.array', (['(x, y, z)'], {}), '((x, y, z))\n', (3144, 3155), True, 'import numpy as np\n'), ((3189, 3206), 'numpy.linalg.norm', 'np.linalg.norm', (['v'], {}), '(v)\n', (3203, 3206), True, 'import numpy as np\n'), ((3324, 3341), 'numpy.linalg.norm', 'np.linalg.norm', (['v'], {}), '(v)\n', (3338, 3341), True, 'import numpy as np\n'), ((3778, 3792), 'numpy.prod', 'np.prod', (['shape'], {}), '(shape)\n', (3785, 3792), True, 'import numpy as np\n'), ((7122, 7147), 'numpy.linalg.norm', 'np.linalg.norm', (['O'], {'axis': '(2)'}), '(O, axis=2)\n', (7136, 7147), True, 'import numpy as np\n'), ((7194, 7264), 'quaternion.as_quat_array', 'quaternion.as_quat_array', (['[[0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1.0]]'], {}), '([[0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1.0]])\n', (7218, 7264), False, 'import quaternion\n'), ((8193, 8212), 'math.cos', 'math.cos', (['(theta / 2)'], {}), '(theta / 2)\n', (8201, 8212), False, 'import math\n'), ((9268, 9296), 'quaternion.as_float_array', 'quaternion.as_float_array', (['q'], {}), '(q)\n', (9293, 9296), False, 'import quaternion\n'), ((9308, 9362), 'numpy.arctan2', 'np.arctan2', (['(q2 * q3 + q0 * q1)', '(0.5 - q1 ** 2 - q2 ** 2)'], {}), '(q2 * q3 + q0 * q1, 0.5 - q1 ** 2 - q2 ** 2)\n', (9318, 9362), True, 'import numpy as np\n'), ((9435, 9489), 'numpy.arctan2', 'np.arctan2', (['(q1 * q2 + q0 * q3)', '(0.5 - q2 ** 2 - q3 ** 2)'], {}), '(q1 * q2 + q0 * q3, 0.5 - q2 ** 2 - q3 ** 2)\n', (9445, 9489), True, 'import numpy as np\n'), ((10122, 10142), 'numpy.quaternion', 'np.quaternion', (['(0)', '*v'], {}), '(0, *v)\n', (10135, 10142), True, 'import numpy as np\n'), ((10182, 10213), 'numpy.array', 'np.array', (['[qv2.x, qv2.y, qv2.z]'], {}), '([qv2.x, qv2.y, qv2.z])\n', (10190, 10213), True, 'import numpy as np\n'), ((10288, 10319), 'quaternion.as_float_array', 'quaternion.as_float_array', (['qqmx'], {}), '(qqmx)\n', (10313, 10319), False, 'import quaternion\n'), ((10372, 10398), 'numpy.zeros', 'np.zeros', (['(mx.shape[0], 4)'], {}), '((mx.shape[0], 4))\n', (10380, 10398), True, 'import numpy as np\n'), ((10430, 10459), 'quaternion.as_quat_array', 'quaternion.as_quat_array', (['qmx'], {}), '(qmx)\n', (10454, 10459), False, 'import quaternion\n'), ((11619, 11641), 'numpy.cross', 'np.cross', (['sco_x', 'ast_v'], {}), '(sco_x, ast_v)\n', (11627, 11641), True, 'import numpy as np\n'), ((11880, 11903), 'numpy.where', 'np.where', (['(array < value)'], {}), '(array < value)\n', (11888, 11903), True, 'import numpy as np\n'), ((12030, 12053), 'numpy.where', 'np.where', (['(array > value)'], {}), '(array > value)\n', (12038, 12053), True, 'import numpy as np\n'), ((12624, 12639), 'numpy.where', 'np.where', (['(d < r)'], {}), '(d < r)\n', (12632, 12639), True, 'import numpy as np\n'), ((13126, 13166), 'numpy.linalg.norm', 'np.linalg.norm', (['diff_mx'], {'axis': '(2)', 'ord': 'ord'}), '(diff_mx, axis=2, ord=ord)\n', (13140, 13166), True, 'import numpy as np\n'), ((13277, 13312), 'math.sqrt', 'math.sqrt', (['(x ** 2 + y ** 2 + z ** 2)'], {}), '(x ** 2 + y ** 2 + z ** 2)\n', (13286, 13312), False, 'import math\n'), ((13325, 13341), 'math.acos', 'math.acos', (['(z / r)'], {}), '(z / r)\n', (13334, 13341), False, 'import math\n'), ((13352, 13368), 'math.atan2', 'math.atan2', (['y', 'x'], {}), '(y, x)\n', (13362, 13368), False, 'import math\n'), ((13424, 13447), 'numpy.array', 'np.array', (['[lat, lon, r]'], {}), '([lat, lon, r])\n', (13432, 13447), True, 'import numpy as np\n'), ((13659, 13678), 'numpy.array', 'np.array', (['[x, y, z]'], {}), '([x, y, z])\n', (13667, 13678), True, 'import numpy as np\n'), ((13855, 13866), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (13861, 13866), True, 'import numpy as np\n'), ((16754, 16803), 'numpy.percentile', 'np.percentile', (['arr', 'discard_percentile'], {'axis': 'axis'}), '(arr, discard_percentile, axis=axis)\n', (16767, 16803), True, 'import numpy as np\n'), ((16815, 16870), 'numpy.percentile', 'np.percentile', (['arr', '(100 - discard_percentile)'], {'axis': 'axis'}), '(arr, 100 - discard_percentile, axis=axis)\n', (16828, 16870), True, 'import numpy as np\n'), ((16879, 16916), 'numpy.logical_and', 'np.logical_and', (['(low < arr)', '(arr < high)'], {}), '(low < arr, arr < high)\n', (16893, 16916), True, 'import numpy as np\n'), ((17000, 17056), 'numpy.mean', 'np.mean', (['(arr[:, (I)] if axis == 1 else arr[I])'], {'axis': 'axis'}), '(arr[:, (I)] if axis == 1 else arr[I], axis=axis)\n', (17007, 17056), True, 'import numpy as np\n'), ((18829, 18880), 'numpy.array', 'np.array', (['[f[0] for f in model.faces]'], {'dtype': '"""uint"""'}), "([f[0] for f in model.faces], dtype='uint')\n", (18837, 18880), True, 'import numpy as np\n'), ((18896, 18920), 'numpy.array', 'np.array', (['model.vertices'], {}), '(model.vertices)\n', (18904, 18920), True, 'import numpy as np\n'), ((19496, 19514), 'numpy.dot', 'np.dot', (['v0v1', 'pvec'], {}), '(v0v1, pvec)\n', (19502, 19514), True, 'import numpy as np\n'), ((19060, 19091), 'numba.f8', 'nb.f8', (['nb.f8[:, :]', 'nb.f8[:, :]'], {}), '(nb.f8[:, :], nb.f8[:, :])\n', (19065, 19091), True, 'import numba as nb\n'), ((20613, 20641), 'numba.prange', 'nb.prange', (['(1)', 'faces.shape[0]'], {}), '(1, faces.shape[0])\n', (20622, 20641), True, 'import numba as nb\n'), ((20383, 20427), 'numba.f8', 'nb.f8', (['nb.u4[:, :]', 'nb.f8[:, :]', 'nb.f8[:, :]'], {}), '(nb.u4[:, :], nb.f8[:, :], nb.f8[:, :])\n', (20388, 20427), True, 'import numba as nb\n'), ((21267, 21292), 'numpy.empty', 'np.empty', (['points.shape[0]'], {}), '(points.shape[0])\n', (21275, 21292), True, 'import numpy as np\n'), ((21306, 21322), 'numba.prange', 'nb.prange', (['count'], {}), '(count)\n', (21315, 21322), True, 'import numba as nb\n'), ((22354, 22378), 'numpy.array', 'np.array', (['model.vertices'], {}), '(model.vertices)\n', (22362, 22378), True, 'import numpy as np\n'), ((22567, 22605), 'sklearn.gaussian_process.kernels.WhiteKernel', 'WhiteKernel', ([], {'noise_level': '(sd0 * max_rng)'}), '(noise_level=sd0 * max_rng)\n', (22578, 22605), False, 'from sklearn.gaussian_process.kernels import Matern, WhiteKernel\n'), ((23083, 23128), 'scipy.interpolate.LinearNDInterpolator', 'LinearNDInterpolator', (['points', 'model.texcoords'], {}), '(points, model.texcoords)\n', (23103, 23128), False, 'from scipy.interpolate import LinearNDInterpolator\n'), ((23355, 23386), 'visnav.iotools.objloader.ShapeModel', 'objloader.ShapeModel', ([], {'data': 'data'}), '(data=data)\n', (23375, 23386), False, 'from visnav.iotools import objloader\n'), ((25627, 25653), 'numpy.array', 'np.array', (['support.vertices'], {}), '(support.vertices)\n', (25635, 25653), True, 'import numpy as np\n'), ((27777, 27852), 'scipy.interpolate.RectBivariateSpline', 'RectBivariateSpline', (['tx_grid_xx[(0), :]', 'tx_grid_yy[:, (0)]', 'e0'], {'kx': '(1)', 'ky': '(1)'}), '(tx_grid_xx[(0), :], tx_grid_yy[:, (0)], e0, kx=1, ky=1)\n', (27796, 27852), False, 'from scipy.interpolate import RectBivariateSpline\n'), ((28966, 28983), 'numpy.max', 'np.max', (['noisy_tex'], {}), '(noisy_tex)\n', (28972, 28983), True, 'import numpy as np\n'), ((33036, 33059), 'numpy.mean', 'np.mean', (['points'], {'axis': '(0)'}), '(points, axis=0)\n', (33043, 33059), True, 'import numpy as np\n'), ((36490, 36515), 'numpy.where', 'np.where', (['(array < max_val)'], {}), '(array < max_val)\n', (36498, 36515), True, 'import numpy as np\n'), ((36870, 36944), 'numpy.array', 'np.array', (['(((1 - yf) * (1 - xf), (1 - yf) * xf), (yf * (1 - xf), yf * xf))'], {}), '((((1 - yf) * (1 - xf), (1 - yf) * xf), (yf * (1 - xf), yf * xf)))\n', (36878, 36944), True, 'import numpy as np\n'), ((38266, 38282), 'numpy.zeros', 'np.zeros', (['(4, 4)'], {}), '((4, 4))\n', (38274, 38282), True, 'import numpy as np\n'), ((38782, 38798), 'numpy.linalg.eig', 'np.linalg.eig', (['M'], {}), '(M)\n', (38795, 38798), True, 'import numpy as np\n'), ((38807, 38819), 'numpy.argmax', 'np.argmax', (['w'], {}), '(w)\n', (38816, 38819), True, 'import numpy as np\n'), ((38832, 38857), 'numpy.quaternion', 'np.quaternion', (['*v[:, (i)]'], {}), '(*v[:, (i)])\n', (38845, 38857), True, 'import numpy as np\n'), ((41307, 41319), 'numpy.argmin', 'np.argmin', (['d'], {}), '(d)\n', (41316, 41319), True, 'import numpy as np\n'), ((42663, 42675), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (42673, 42675), True, 'import matplotlib.pyplot as plt\n'), ((42685, 42696), 'mpl_toolkits.mplot3d.Axes3D', 'Axes3D', (['fig'], {}), '(fig)\n', (42691, 42696), False, 'from mpl_toolkits.mplot3d import Axes3D\n'), ((43185, 43195), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (43193, 43195), True, 'import matplotlib.pyplot as plt\n'), ((44027, 44039), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (44037, 44039), True, 'import matplotlib.pyplot as plt\n'), ((44049, 44060), 'mpl_toolkits.mplot3d.Axes3D', 'Axes3D', (['fig'], {}), '(fig)\n', (44055, 44060), False, 'from mpl_toolkits.mplot3d import Axes3D\n'), ((44880, 44892), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (44890, 44892), True, 'import matplotlib.pyplot as plt\n'), ((44902, 44913), 'mpl_toolkits.mplot3d.Axes3D', 'Axes3D', (['fig'], {}), '(fig)\n', (44908, 44913), False, 'from mpl_toolkits.mplot3d import Axes3D\n'), ((3976, 4005), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)', 'shape'], {}), '(0, 1, shape)\n', (3992, 4005), True, 'import numpy as np\n'), ((4450, 4480), 'numpy.array', 'np.array', (['x1'], {'dtype': 'np.float64'}), '(x1, dtype=np.float64)\n', (4458, 4480), True, 'import numpy as np\n'), ((4482, 4512), 'numpy.array', 'np.array', (['x2'], {'dtype': 'np.float64'}), '(x2, dtype=np.float64)\n', (4490, 4512), True, 'import numpy as np\n'), ((4514, 4544), 'numpy.array', 'np.array', (['x3'], {'dtype': 'np.float64'}), '(x3, dtype=np.float64)\n', (4522, 4544), True, 'import numpy as np\n'), ((4870, 4893), 'numpy.reshape', 'np.reshape', (['v1', '(1, -1)'], {}), '(v1, (1, -1))\n', (4880, 4893), True, 'import numpy as np\n'), ((4907, 4930), 'numpy.reshape', 'np.reshape', (['v2', '(-1, 1)'], {}), '(v2, (-1, 1))\n', (4917, 4930), True, 'import numpy as np\n'), ((5165, 5190), 'numpy.clip', 'np.clip', (['cos_angle', '(-1)', '(1)'], {}), '(cos_angle, -1, 1)\n', (5172, 5190), True, 'import numpy as np\n'), ((5826, 5853), 'numpy.einsum', 'np.einsum', (['"""ij,ij->i"""', 'A', 'B'], {}), "('ij,ij->i', A, B)\n", (5835, 5853), True, 'import numpy as np\n'), ((6221, 6251), 'numpy.clip', 'np.clip', (['cos_angles', '(-1.0)', '(1.0)'], {}), '(cos_angles, -1.0, 1.0)\n', (6228, 6251), True, 'import numpy as np\n'), ((6294, 6318), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(3)'}), '(size=3)\n', (6310, 6318), True, 'import numpy as np\n'), ((6346, 6367), 'numpy.hstack', 'np.hstack', (['(angle, r)'], {}), '((angle, r))\n', (6355, 6367), True, 'import numpy as np\n'), ((7302, 7331), 'quaternion.as_float_array', 'quaternion.as_float_array', (['Uq'], {}), '(Uq)\n', (7327, 7331), False, 'import quaternion\n'), ((7710, 7735), 'numpy.array', 'np.array', (['[q.x, q.y, q.z]'], {}), '([q.x, q.y, q.z])\n', (7718, 7735), True, 'import numpy as np\n'), ((8225, 8244), 'math.sin', 'math.sin', (['(theta / 2)'], {}), '(theta / 2)\n', (8233, 8244), False, 'import math\n'), ((8928, 8947), 'math.cos', 'math.cos', (['(angle / 2)'], {}), '(angle / 2)\n', (8936, 8947), False, 'import math\n'), ((8993, 9012), 'math.sin', 'math.sin', (['(angle / 2)'], {}), '(angle / 2)\n', (9001, 9012), False, 'import math\n'), ((9026, 9046), 'numpy.quaternion', 'np.quaternion', (['w', '*v'], {}), '(w, *v)\n', (9039, 9046), True, 'import numpy as np\n'), ((11383, 11429), 'numpy.array', 'np.array', (['[sc.x.value, sc.y.value, sc.z.value]'], {}), '([sc.x.value, sc.y.value, sc.z.value])\n', (11391, 11429), True, 'import numpy as np\n'), ((13562, 13575), 'math.cos', 'math.cos', (['phi'], {}), '(phi)\n', (13570, 13575), False, 'import math\n'), ((13606, 13619), 'math.sin', 'math.sin', (['phi'], {}), '(phi)\n', (13614, 13619), False, 'import math\n'), ((13632, 13647), 'math.cos', 'math.cos', (['theta'], {}), '(theta)\n', (13640, 13647), False, 'import math\n'), ((13808, 13821), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (13814, 13821), True, 'import numpy as np\n'), ((13834, 13845), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (13840, 13845), True, 'import numpy as np\n'), ((13963, 13976), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (13969, 13976), True, 'import numpy as np\n'), ((13988, 14008), 'numpy.vstack', 'np.vstack', (['[x, y, z]'], {}), '([x, y, z])\n', (13997, 14008), True, 'import numpy as np\n'), ((14520, 14540), 'numpy.array', 'np.array', (['(lat, lon)'], {}), '((lat, lon))\n', (14528, 14540), True, 'import numpy as np\n'), ((15234, 15255), 'numpy.array', 'np.array', (['(lat, roll)'], {}), '((lat, roll))\n', (15242, 15255), True, 'import numpy as np\n'), ((15533, 15545), 'math.sqrt', 'math.sqrt', (['(2)'], {}), '(2)\n', (15542, 15545), False, 'import math\n'), ((15973, 15985), 'math.sqrt', 'math.sqrt', (['(2)'], {}), '(2)\n', (15982, 15985), False, 'import math\n'), ((16303, 16359), 'numpy.linspace', 'np.linspace', (['(-math.pi)', 'math.pi'], {'num': 'Mphi', 'endpoint': '(False)'}), '(-math.pi, math.pi, num=Mphi, endpoint=False)\n', (16314, 16359), True, 'import numpy as np\n'), ((16536, 16549), 'numpy.isnan', 'np.isnan', (['arr'], {}), '(arr)\n', (16544, 16549), True, 'import numpy as np\n'), ((16588, 16625), 'numpy.all', 'np.all', (['J'], {'axis': '(1 if axis == 0 else 0)'}), '(J, axis=1 if axis == 0 else 0)\n', (16594, 16625), True, 'import numpy as np\n'), ((16954, 16991), 'numpy.all', 'np.all', (['I'], {'axis': '(1 if axis == 0 else 0)'}), '(I, axis=1 if axis == 0 else 0)\n', (16960, 16991), True, 'import numpy as np\n'), ((18526, 18549), 'numpy.linalg.cholesky', 'np.linalg.cholesky', (['cov'], {}), '(cov)\n', (18544, 18549), True, 'import numpy as np\n'), ((19361, 19377), 'numpy.sum', 'np.sum', (['(dir ** 2)'], {}), '(dir ** 2)\n', (19367, 19377), True, 'import numpy as np\n'), ((20567, 20590), 'numpy.ones', 'np.ones', (['faces.shape[0]'], {}), '(faces.shape[0])\n', (20574, 20590), True, 'import numpy as np\n'), ((20997, 21012), 'numpy.abs', 'np.abs', (['min_err'], {}), '(min_err)\n', (21003, 21012), True, 'import numpy as np\n'), ((21433, 21448), 'math.isinf', 'math.isinf', (['err'], {}), '(err)\n', (21443, 21448), False, 'import math\n'), ((22400, 22422), 'numpy.ptp', 'np.ptp', (['points'], {'axis': '(0)'}), '(points, axis=0)\n', (22406, 22422), True, 'import numpy as np\n'), ((24763, 24794), 'visnav.iotools.objloader.ShapeModel', 'objloader.ShapeModel', ([], {'data': 'data'}), '(data=data)\n', (24783, 24794), False, 'from visnav.iotools import objloader\n'), ((25439, 25460), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'nx'], {}), '(0, 1, nx)\n', (25450, 25460), True, 'import numpy as np\n'), ((25462, 25483), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'ny'], {}), '(0, 1, ny)\n', (25473, 25483), True, 'import numpy as np\n'), ((26476, 26503), 'numpy.array', 'np.array', (['support.texcoords'], {}), '(support.texcoords)\n', (26484, 26503), True, 'import numpy as np\n'), ((27519, 27530), 'numpy.zeros', 'np.zeros', (['n'], {}), '(n)\n', (27527, 27530), True, 'import numpy as np\n'), ((27635, 27653), 'numpy.min', 'np.min', (['tx_grid_xx'], {}), '(tx_grid_xx)\n', (27641, 27653), True, 'import numpy as np\n'), ((27655, 27673), 'numpy.max', 'np.max', (['tx_grid_xx'], {}), '(tx_grid_xx)\n', (27661, 27673), True, 'import numpy as np\n'), ((27709, 27727), 'numpy.min', 'np.min', (['tx_grid_yy'], {}), '(tx_grid_yy)\n', (27715, 27727), True, 'import numpy as np\n'), ((27729, 27747), 'numpy.max', 'np.max', (['tx_grid_yy'], {}), '(tx_grid_yy)\n', (27735, 27747), True, 'import numpy as np\n'), ((28274, 28306), 'visnav.algo.image.ImageProc.merge', 'ImageProc.merge', (['(orig_tx, eimg)'], {}), '((orig_tx, eimg))\n', (28289, 28306), False, 'from visnav.algo.image import ImageProc\n'), ((28315, 28328), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (28325, 28328), True, 'import matplotlib.pyplot as plt\n'), ((28337, 28357), 'matplotlib.pyplot.imshow', 'plt.imshow', (['overlaid'], {}), '(overlaid)\n', (28347, 28357), True, 'import matplotlib.pyplot as plt\n'), ((28366, 28376), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (28374, 28376), True, 'import matplotlib.pyplot as plt\n'), ((28504, 28579), 'scipy.interpolate.RectBivariateSpline', 'RectBivariateSpline', (['tx_grid_xx[(0), :]', 'tx_grid_yy[:, (0)]', 'e1'], {'kx': '(1)', 'ky': '(1)'}), '(tx_grid_xx[(0), :], tx_grid_yy[:, (0)], e1, kx=1, ky=1)\n', (28523, 28579), False, 'from scipy.interpolate import RectBivariateSpline\n'), ((29043, 29056), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (29053, 29056), True, 'import matplotlib.pyplot as plt\n'), ((29065, 29086), 'matplotlib.pyplot.imshow', 'plt.imshow', (['noisy_tex'], {}), '(noisy_tex)\n', (29075, 29086), True, 'import matplotlib.pyplot as plt\n'), ((29095, 29108), 'matplotlib.pyplot.figure', 'plt.figure', (['(2)'], {}), '(2)\n', (29105, 29108), True, 'import matplotlib.pyplot as plt\n'), ((29117, 29133), 'matplotlib.pyplot.imshow', 'plt.imshow', (['err0'], {}), '(err0)\n', (29127, 29133), True, 'import matplotlib.pyplot as plt\n'), ((29142, 29155), 'matplotlib.pyplot.figure', 'plt.figure', (['(3)'], {}), '(3)\n', (29152, 29155), True, 'import matplotlib.pyplot as plt\n'), ((29164, 29180), 'matplotlib.pyplot.imshow', 'plt.imshow', (['err1'], {}), '(err1)\n', (29174, 29180), True, 'import matplotlib.pyplot as plt\n'), ((29189, 29199), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (29197, 29199), True, 'import matplotlib.pyplot as plt\n'), ((29223, 29242), 'numpy.std', 'np.std', (['(err0 + err1)'], {}), '(err0 + err1)\n', (29229, 29242), True, 'import numpy as np\n'), ((30765, 30789), 'scipy.sparse.issparse', 'scipy.sparse.issparse', (['r'], {}), '(r)\n', (30786, 30789), False, 'import scipy\n'), ((30962, 30986), 'scipy.sparse.issparse', 'scipy.sparse.issparse', (['r'], {}), '(r)\n', (30983, 30986), False, 'import scipy\n'), ((31192, 31216), 'scipy.sparse.issparse', 'scipy.sparse.issparse', (['r'], {}), '(r)\n', (31213, 31216), False, 'import scipy\n'), ((31404, 31428), 'scipy.sparse.issparse', 'scipy.sparse.issparse', (['r'], {}), '(r)\n', (31425, 31428), False, 'import scipy\n'), ((31896, 31953), 'scipy.interpolate.interpnd._ndim_coords_from_arrays', '_ndim_coords_from_arrays', (['args'], {'ndim': 'self.points.shape[1]'}), '(args, ndim=self.points.shape[1])\n', (31920, 31953), False, 'from scipy.interpolate.interpnd import _ndim_coords_from_arrays\n'), ((32349, 32378), 'numpy.vstack', 'np.vstack', (['(self.values, [0])'], {}), '((self.values, [0]))\n', (32358, 32378), True, 'import numpy as np\n'), ((32464, 32497), 'numpy.sum', 'np.sum', (['(yt[(idxs), :] * w)'], {'axis': '(1)'}), '(yt[(idxs), :] * w, axis=1)\n', (32470, 32497), True, 'import numpy as np\n'), ((33566, 33577), 'numpy.zeros', 'np.zeros', (['n'], {}), '(n)\n', (33574, 33577), True, 'import numpy as np\n'), ((34173, 34187), 'numpy.any', 'np.any', (['nanidx'], {}), '(nanidx)\n', (34179, 34187), True, 'import numpy as np\n'), ((35776, 35805), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {'figsize': '(8, 8)'}), '(1, figsize=(8, 8))\n', (35786, 35805), True, 'import matplotlib.pyplot as plt\n'), ((35907, 35927), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(2)', '(1)'], {}), '(2, 2, 1)\n', (35918, 35927), True, 'import matplotlib.pyplot as plt\n'), ((35936, 35960), 'matplotlib.pyplot.plot', 'plt.plot', (['points[:, (0)]'], {}), '(points[:, (0)])\n', (35944, 35960), True, 'import matplotlib.pyplot as plt\n'), ((35967, 36001), 'matplotlib.pyplot.title', 'plt.title', (['"""original"""'], {'fontsize': '(12)'}), "('original', fontsize=12)\n", (35976, 36001), True, 'import matplotlib.pyplot as plt\n'), ((36011, 36031), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(2)', '(2)'], {}), '(2, 2, 2)\n', (36022, 36031), True, 'import matplotlib.pyplot as plt\n'), ((36040, 36062), 'matplotlib.pyplot.plot', 'plt.plot', (['err0[:, (0)]'], {}), '(err0[:, (0)])\n', (36048, 36062), True, 'import matplotlib.pyplot as plt\n'), ((36069, 36103), 'matplotlib.pyplot.title', 'plt.title', (['"""norm-err"""'], {'fontsize': '(12)'}), "('norm-err', fontsize=12)\n", (36078, 36103), True, 'import matplotlib.pyplot as plt\n'), ((36113, 36133), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(2)', '(3)'], {}), '(2, 2, 3)\n', (36124, 36133), True, 'import matplotlib.pyplot as plt\n'), ((36142, 36163), 'matplotlib.pyplot.plot', 'plt.plot', (['err[:, (0)]'], {}), '(err[:, (0)])\n', (36150, 36163), True, 'import matplotlib.pyplot as plt\n'), ((36170, 36203), 'matplotlib.pyplot.title', 'plt.title', (['"""exp-err"""'], {'fontsize': '(12)'}), "('exp-err', fontsize=12)\n", (36179, 36203), True, 'import matplotlib.pyplot as plt\n'), ((36213, 36233), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(2)', '(4)'], {}), '(2, 2, 4)\n', (36224, 36233), True, 'import matplotlib.pyplot as plt\n'), ((36242, 36272), 'matplotlib.pyplot.plot', 'plt.plot', (['noisy_points[:, (0)]'], {}), '(noisy_points[:, (0)])\n', (36250, 36272), True, 'import matplotlib.pyplot as plt\n'), ((36279, 36310), 'matplotlib.pyplot.title', 'plt.title', (['"""noisy"""'], {'fontsize': '(12)'}), "('noisy', fontsize=12)\n", (36288, 36310), True, 'import matplotlib.pyplot as plt\n'), ((36320, 36338), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (36336, 36338), True, 'import matplotlib.pyplot as plt\n'), ((36347, 36357), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (36355, 36357), True, 'import matplotlib.pyplot as plt\n'), ((36416, 36429), 'numpy.mean', 'np.mean', (['devs'], {}), '(devs)\n', (36423, 36429), True, 'import numpy as np\n'), ((36527, 36565), 'numpy.concatenate', 'np.concatenate', (['((iy,), (ix,))'], {'axis': '(0)'}), '(((iy,), (ix,)), axis=0)\n', (36541, 36565), True, 'import numpy as np\n'), ((38350, 38475), 'numpy.array', 'np.array', (['((si.w, -si.x, -si.y, -si.z), (si.x, si.w, si.z, -si.y), (si.y, -si.z, si.w,\n si.x), (si.z, si.y, -si.x, si.w))'], {}), '(((si.w, -si.x, -si.y, -si.z), (si.x, si.w, si.z, -si.y), (si.y, -\n si.z, si.w, si.x), (si.z, si.y, -si.x, si.w)))\n', (38358, 38475), True, 'import numpy as np\n'), ((38566, 38691), 'numpy.array', 'np.array', (['((qi.w, -qi.x, -qi.y, -qi.z), (qi.x, qi.w, -qi.z, qi.y), (qi.y, qi.z, qi.w,\n -qi.x), (qi.z, -qi.y, qi.x, qi.w))'], {}), '(((qi.w, -qi.x, -qi.y, -qi.z), (qi.x, qi.w, -qi.z, qi.y), (qi.y, qi\n .z, qi.w, -qi.x), (qi.z, -qi.y, qi.x, qi.w)))\n', (38574, 38691), True, 'import numpy as np\n'), ((45006, 45015), 'matplotlib.pyplot.hsv', 'plt.hsv', ([], {}), '()\n', (45013, 45015), True, 'import matplotlib.pyplot as plt\n'), ((48718, 48745), 'numpy.sum', 'np.sum', (['(Y * weights)'], {'axis': '(1)'}), '(Y * weights, axis=1)\n', (48724, 48745), True, 'import numpy as np\n'), ((48748, 48771), 'numpy.sum', 'np.sum', (['weights'], {'axis': '(1)'}), '(weights, axis=1)\n', (48754, 48771), True, 'import numpy as np\n'), ((1470, 1497), 'numpy.linalg.norm', 'np.linalg.norm', (['loc'], {'axis': '(1)'}), '(loc, axis=1)\n', (1484, 1497), True, 'import numpy as np\n'), ((1756, 1790), 'numpy.linalg.norm', 'np.linalg.norm', (['Ab[:, 1:3]'], {'axis': '(1)'}), '(Ab[:, 1:3], axis=1)\n', (1770, 1790), True, 'import numpy as np\n'), ((4008, 4024), 'numpy.complex', 'np.complex', (['(0)', '(1)'], {}), '(0, 1)\n', (4018, 4024), True, 'import numpy as np\n'), ((4027, 4056), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)', 'shape'], {}), '(0, 1, shape)\n', (4043, 4056), True, 'import numpy as np\n'), ((4090, 4121), 'numpy.fft.fftshift', 'np.fft.fftshift', (['(kernel * f_img)'], {}), '(kernel * f_img)\n', (4105, 4121), True, 'import numpy as np\n'), ((4950, 4968), 'numpy.linalg.norm', 'np.linalg.norm', (['v1'], {}), '(v1)\n', (4964, 4968), True, 'import numpy as np\n'), ((4987, 5005), 'numpy.linalg.norm', 'np.linalg.norm', (['v2'], {}), '(v2)\n', (5001, 5005), True, 'import numpy as np\n'), ((5893, 5920), 'numpy.einsum', 'np.einsum', (['"""ij,ij->i"""', 'A', 'A'], {}), "('ij,ij->i', A, A)\n", (5902, 5920), True, 'import numpy as np\n'), ((5938, 5965), 'numpy.einsum', 'np.einsum', (['"""ij,ij->i"""', 'B', 'B'], {}), "('ij,ij->i', B, B)\n", (5947, 5965), True, 'import numpy as np\n'), ((5992, 6008), 'numpy.sqrt', 'np.sqrt', (['(p2 * p3)'], {}), '(p2 * p3)\n', (5999, 6008), True, 'import numpy as np\n'), ((7451, 7511), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['ra', 'dec'], {'unit': '"""deg"""', 'frame': '"""icrs"""', 'obstime': '"""J2000"""'}), "(ra, dec, unit='deg', frame='icrs', obstime='J2000')\n", (7459, 7511), False, 'from astropy.coordinates import SkyCoord\n'), ((7663, 7682), 'numpy.clip', 'np.clip', (['q.w', '(-1)', '(1)'], {}), '(q.w, -1, 1)\n', (7670, 7682), True, 'import numpy as np\n'), ((7961, 7977), 'numpy.array', 'np.array', (['rv[1:]'], {}), '(rv[1:])\n', (7969, 7977), True, 'import numpy as np\n'), ((8256, 8276), 'numpy.quaternion', 'np.quaternion', (['w', '*v'], {}), '(w, *v)\n', (8269, 8276), True, 'import numpy as np\n'), ((8645, 8663), 'math.cos', 'math.cos', (['(roll / 2)'], {}), '(roll / 2)\n', (8653, 8663), False, 'import math\n'), ((8665, 8683), 'math.sin', 'math.sin', (['(roll / 2)'], {}), '(roll / 2)\n', (8673, 8683), False, 'import math\n'), ((9383, 9423), 'numpy.clip', 'np.clip', (['(-2 * (q1 * q3 - q0 * q2))', '(-1)', '(1)'], {}), '(-2 * (q1 * q3 - q0 * q2), -1, 1)\n', (9390, 9423), True, 'import numpy as np\n'), ((11915, 11936), 'numpy.abs', 'np.abs', (['(array - value)'], {}), '(array - value)\n', (11921, 11936), True, 'import numpy as np\n'), ((12065, 12086), 'numpy.abs', 'np.abs', (['(array - value)'], {}), '(array - value)\n', (12071, 12086), True, 'import numpy as np\n'), ((12175, 12196), 'numpy.abs', 'np.abs', (['(array - value)'], {}), '(array - value)\n', (12181, 12196), True, 'import numpy as np\n'), ((13544, 13559), 'math.sin', 'math.sin', (['theta'], {}), '(theta)\n', (13552, 13559), False, 'import math\n'), ((13588, 13603), 'math.sin', 'math.sin', (['theta'], {}), '(theta)\n', (13596, 13603), False, 'import math\n'), ((15716, 15745), 'math.ceil', 'math.ceil', (['(2 * math.pi / step)'], {}), '(2 * math.pi / step)\n', (15725, 15745), False, 'import math\n'), ((17070, 17090), 'numpy.sum', 'np.sum', (['I'], {'axis': 'axis'}), '(I, axis=axis)\n', (17076, 17090), True, 'import numpy as np\n'), ((18604, 18632), 'numpy.random.standard_normal', 'standard_normal', (['final_shape'], {}), '(final_shape)\n', (18619, 18632), False, 'from numpy.random import standard_normal\n'), ((21395, 21420), 'numpy.array', 'np.array', (['((0, 0, 0), vx)'], {}), '(((0, 0, 0), vx))\n', (21403, 21420), True, 'import numpy as np\n'), ((22329, 22339), 'sys.exit', 'sys.exit', ([], {}), '()\n', (22337, 22339), False, 'import sys\n'), ((22662, 22702), 'sklearn.gaussian_process.kernels.Matern', 'Matern', ([], {'length_scale': '(l * max_rng)', 'nu': '(1.5)'}), '(length_scale=l * max_rng, nu=1.5)\n', (22668, 22702), False, 'from sklearn.gaussian_process.kernels import Matern, WhiteKernel\n'), ((24159, 24183), 'numpy.ones', 'np.ones', (['model.tex.shape'], {}), '(model.tex.shape)\n', (24166, 24183), True, 'import numpy as np\n'), ((25300, 25322), 'numpy.prod', 'np.prod', (['tex.shape[:2]'], {}), '(tex.shape[:2])\n', (25307, 25322), True, 'import numpy as np\n'), ((25675, 25697), 'numpy.ptp', 'np.ptp', (['points'], {'axis': '(0)'}), '(points, axis=0)\n', (25681, 25697), True, 'import numpy as np\n'), ((26250, 26301), 'sklearn.gaussian_process.kernels.WhiteKernel', 'WhiteKernel', ([], {'noise_level': '(1e-05 * noise_sd * max_rng)'}), '(noise_level=1e-05 * noise_sd * max_rng)\n', (26261, 26301), False, 'from sklearn.gaussian_process.kernels import Matern, WhiteKernel\n'), ((27360, 27396), 'visnav.algo.image.ImageProc.merge', 'ImageProc.merge', (['(orig_tx, grad_img)'], {}), '((orig_tx, grad_img))\n', (27375, 27396), False, 'from visnav.algo.image import ImageProc\n'), ((27410, 27423), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (27420, 27423), True, 'import matplotlib.pyplot as plt\n'), ((27436, 27456), 'matplotlib.pyplot.imshow', 'plt.imshow', (['overlaid'], {}), '(overlaid)\n', (27446, 27456), True, 'import matplotlib.pyplot as plt\n'), ((27469, 27479), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (27477, 27479), True, 'import matplotlib.pyplot as plt\n'), ((28021, 28058), 'os.path.join', 'os.path.join', (['DATA_DIR', '"""67p+tex.png"""'], {}), "(DATA_DIR, '67p+tex.png')\n", (28033, 28058), False, 'import os\n'), ((28434, 28445), 'numpy.zeros', 'np.zeros', (['n'], {}), '(n)\n', (28442, 28445), True, 'import numpy as np\n'), ((28626, 28642), 'numpy.min', 'np.min', (['err_coef'], {}), '(err_coef)\n', (28632, 28642), True, 'import numpy as np\n'), ((28644, 28660), 'numpy.max', 'np.max', (['err_coef'], {}), '(err_coef)\n', (28650, 28660), True, 'import numpy as np\n'), ((31007, 31075), 'numpy.power', 'np.power', (['(self.kernel_sc / (r.data + self.kernel_eps))', '(2)'], {'out': 'r.data'}), '(self.kernel_sc / (r.data + self.kernel_eps), 2, out=r.data)\n', (31015, 31075), True, 'import numpy as np\n'), ((31449, 31500), 'numpy.exp', 'np.exp', (['((-r.data / self.kernel_sc) ** 2)'], {'out': 'r.data'}), '((-r.data / self.kernel_sc) ** 2, out=r.data)\n', (31455, 31500), True, 'import numpy as np\n'), ((31534, 31568), 'numpy.exp', 'np.exp', (['(-(r / self.kernel_sc) ** 2)'], {}), '(-(r / self.kernel_sc) ** 2)\n', (31540, 31568), True, 'import numpy as np\n'), ((32874, 32884), 'sys.exit', 'sys.exit', ([], {}), '()\n', (32882, 32884), False, 'import sys\n'), ((33081, 33103), 'numpy.ptp', 'np.ptp', (['points'], {'axis': '(0)'}), '(points, axis=0)\n', (33087, 33103), True, 'import numpy as np\n'), ((33364, 33415), 'sklearn.gaussian_process.kernels.WhiteKernel', 'WhiteKernel', ([], {'noise_level': '(1e-05 * noise_lv * max_rng)'}), '(noise_level=1e-05 * noise_lv * max_rng)\n', (33375, 33415), False, 'from sklearn.gaussian_process.kernels import Matern, WhiteKernel\n'), ((34981, 35026), 'numpy.abs', 'np.abs', (['(noisy_points[:, (2)] - points[:, (2)])'], {}), '(noisy_points[:, (2)] - points[:, (2)])\n', (34987, 35026), True, 'import numpy as np\n'), ((44687, 44711), 'matplotlib.pyplot.waitforbuttonpress', 'plt.waitforbuttonpress', ([], {}), '()\n', (44709, 44711), True, 'import matplotlib.pyplot as plt\n'), ((45192, 45216), 'numpy.quaternion', 'np.quaternion', (['*pose[3:]'], {}), '(*pose[3:])\n', (45205, 45216), True, 'import numpy as np\n'), ((45581, 45605), 'matplotlib.pyplot.waitforbuttonpress', 'plt.waitforbuttonpress', ([], {}), '()\n', (45603, 45605), True, 'import matplotlib.pyplot as plt\n'), ((48039, 48058), 'math.log10', 'math.log10', (['(tot + 1)'], {}), '(tot + 1)\n', (48049, 48058), False, 'import math\n'), ((48543, 48564), 'numpy.expand_dims', 'np.expand_dims', (['xt', '(1)'], {}), '(xt, 1)\n', (48557, 48564), True, 'import numpy as np\n'), ((48594, 48614), 'numpy.expand_dims', 'np.expand_dims', (['x', '(0)'], {}), '(x, 0)\n', (48608, 48614), True, 'import numpy as np\n'), ((2390, 2405), 'numpy.sqrt', 'np.sqrt', (['normB2'], {}), '(normB2)\n', (2397, 2405), True, 'import numpy as np\n'), ((6653, 6679), 'numpy.linalg.norm', 'np.linalg.norm', (['qd'], {'axis': '(1)'}), '(qd, axis=1)\n', (6667, 6679), True, 'import numpy as np\n'), ((6729, 6750), 'numpy.arccos', 'np.arccos', (['qd[:, (0)]'], {}), '(qd[:, (0)])\n', (6738, 6750), True, 'import numpy as np\n'), ((8065, 8077), 'numpy.array', 'np.array', (['rv'], {}), '(rv)\n', (8073, 8077), True, 'import numpy as np\n'), ((8499, 8516), 'math.cos', 'math.cos', (['(lon / 2)'], {}), '(lon / 2)\n', (8507, 8516), False, 'import math\n'), ((8524, 8541), 'math.sin', 'math.sin', (['(lon / 2)'], {}), '(lon / 2)\n', (8532, 8541), False, 'import math\n'), ((8571, 8589), 'math.cos', 'math.cos', (['(-lat / 2)'], {}), '(-lat / 2)\n', (8579, 8589), False, 'import math\n'), ((8594, 8612), 'math.sin', 'math.sin', (['(-lat / 2)'], {}), '(-lat / 2)\n', (8602, 8612), False, 'import math\n'), ((10783, 10805), 'math.sin', 'math.sin', (['mean_anomaly'], {}), '(mean_anomaly)\n', (10791, 10805), False, 'import math\n'), ((11745, 11766), 'numpy.cross', 'np.cross', (['nvec', 'sco_z'], {}), '(nvec, sco_z)\n', (11753, 11766), True, 'import numpy as np\n'), ((15596, 15643), 'math.ceil', 'math.ceil', (['((lat_range[1] - lat_range[0]) / step)'], {}), '((lat_range[1] - lat_range[0]) / step)\n', (15605, 15643), False, 'import math\n'), ((16036, 16083), 'math.ceil', 'math.ceil', (['((lat_range[1] - lat_range[0]) / step)'], {}), '((lat_range[1] - lat_range[0]) / step)\n', (16045, 16083), False, 'import math\n'), ((21634, 21658), 'numpy.linalg.norm', 'np.linalg.norm', (['pts[idx]'], {}), '(pts[idx])\n', (21648, 21658), True, 'import numpy as np\n'), ((21661, 21679), 'numpy.linalg.norm', 'np.linalg.norm', (['vx'], {}), '(vx)\n', (21675, 21679), True, 'import numpy as np\n'), ((25338, 25361), 'numpy.array', 'np.array', (['tex.shape[:2]'], {}), '(tex.shape[:2])\n', (25346, 25361), True, 'import numpy as np\n'), ((26047, 26057), 'sys.exit', 'sys.exit', ([], {}), '()\n', (26055, 26057), False, 'import sys\n'), ((26734, 26757), 'numpy.mean', 'np.mean', (['points'], {'axis': '(0)'}), '(points, axis=0)\n', (26741, 26757), True, 'import numpy as np\n'), ((26965, 27002), 'os.path.join', 'os.path.join', (['DATA_DIR', '"""67p+tex.png"""'], {}), "(DATA_DIR, '67p+tex.png')\n", (26977, 27002), False, 'import os\n'), ((27135, 27161), 'numpy.linalg.norm', 'np.linalg.norm', (['gx'], {'axis': '(2)'}), '(gx, axis=2)\n', (27149, 27161), True, 'import numpy as np\n'), ((27164, 27190), 'numpy.linalg.norm', 'np.linalg.norm', (['gy'], {'axis': '(2)'}), '(gy, axis=2)\n', (27178, 27190), True, 'import numpy as np\n'), ((28138, 28150), 'numpy.min', 'np.min', (['err_'], {}), '(err_)\n', (28144, 28150), True, 'import numpy as np\n'), ((28155, 28167), 'numpy.max', 'np.max', (['err_'], {}), '(err_)\n', (28161, 28167), True, 'import numpy as np\n'), ((28170, 28182), 'numpy.min', 'np.min', (['err_'], {}), '(err_)\n', (28176, 28182), True, 'import numpy as np\n'), ((32297, 32314), 'numpy.sum', 'np.sum', (['w'], {'axis': '(1)'}), '(w, axis=1)\n', (32303, 32314), True, 'import numpy as np\n'), ((34137, 34155), 'numpy.isnan', 'np.isnan', (['full_err'], {}), '(full_err)\n', (34145, 34155), True, 'import numpy as np\n'), ((35443, 35488), 'numpy.sum', 'np.sum', (['((noisy_points - points) ** 2)'], {'axis': '(-1)'}), '((noisy_points - points) ** 2, axis=-1)\n', (35449, 35488), True, 'import numpy as np\n'), ((35491, 35528), 'numpy.sum', 'np.sum', (['((points - mean) ** 2)'], {'axis': '(-1)'}), '((points - mean) ** 2, axis=-1)\n', (35497, 35528), True, 'import numpy as np\n'), ((37657, 37672), 'numpy.argmin', 'np.argmin', (['dist'], {}), '(dist)\n', (37666, 37672), True, 'import numpy as np\n'), ((39070, 39095), 'numpy.quaternion', 'np.quaternion', (['(0)', '(0)', '(0)', '(1)'], {}), '(0, 0, 0, 1)\n', (39083, 39095), True, 'import numpy as np\n'), ((39122, 39147), 'numpy.quaternion', 'np.quaternion', (['(0)', '(0)', '(1)', '(0)'], {}), '(0, 0, 1, 0)\n', (39135, 39147), True, 'import numpy as np\n'), ((39174, 39199), 'numpy.quaternion', 'np.quaternion', (['(0)', '(0)', '(1)', '(1)'], {}), '(0, 0, 1, 1)\n', (39187, 39199), True, 'import numpy as np\n'), ((39226, 39252), 'numpy.quaternion', 'np.quaternion', (['(0)', '(0)', '(-1)', '(1)'], {}), '(0, 0, -1, 1)\n', (39239, 39252), True, 'import numpy as np\n'), ((39279, 39304), 'numpy.quaternion', 'np.quaternion', (['(0)', '(1)', '(0)', '(0)'], {}), '(0, 1, 0, 0)\n', (39292, 39304), True, 'import numpy as np\n'), ((39331, 39356), 'numpy.quaternion', 'np.quaternion', (['(0)', '(1)', '(0)', '(1)'], {}), '(0, 1, 0, 1)\n', (39344, 39356), True, 'import numpy as np\n'), ((39383, 39409), 'numpy.quaternion', 'np.quaternion', (['(0)', '(1)', '(0)', '(-1)'], {}), '(0, 1, 0, -1)\n', (39396, 39409), True, 'import numpy as np\n'), ((39436, 39461), 'numpy.quaternion', 'np.quaternion', (['(0)', '(1)', '(1)', '(0)'], {}), '(0, 1, 1, 0)\n', (39449, 39461), True, 'import numpy as np\n'), ((39488, 39514), 'numpy.quaternion', 'np.quaternion', (['(0)', '(1)', '(-1)', '(0)'], {}), '(0, 1, -1, 0)\n', (39501, 39514), True, 'import numpy as np\n'), ((39541, 39566), 'numpy.quaternion', 'np.quaternion', (['(0)', '(1)', '(1)', '(1)'], {}), '(0, 1, 1, 1)\n', (39554, 39566), True, 'import numpy as np\n'), ((39593, 39619), 'numpy.quaternion', 'np.quaternion', (['(0)', '(1)', '(1)', '(-1)'], {}), '(0, 1, 1, -1)\n', (39606, 39619), True, 'import numpy as np\n'), ((39646, 39672), 'numpy.quaternion', 'np.quaternion', (['(0)', '(1)', '(-1)', '(1)'], {}), '(0, 1, -1, 1)\n', (39659, 39672), True, 'import numpy as np\n'), ((39699, 39726), 'numpy.quaternion', 'np.quaternion', (['(0)', '(1)', '(-1)', '(-1)'], {}), '(0, 1, -1, -1)\n', (39712, 39726), True, 'import numpy as np\n'), ((39753, 39778), 'numpy.quaternion', 'np.quaternion', (['(1)', '(0)', '(0)', '(1)'], {}), '(1, 0, 0, 1)\n', (39766, 39778), True, 'import numpy as np\n'), ((39805, 39831), 'numpy.quaternion', 'np.quaternion', (['(1)', '(0)', '(0)', '(-1)'], {}), '(1, 0, 0, -1)\n', (39818, 39831), True, 'import numpy as np\n'), ((39858, 39883), 'numpy.quaternion', 'np.quaternion', (['(1)', '(0)', '(1)', '(0)'], {}), '(1, 0, 1, 0)\n', (39871, 39883), True, 'import numpy as np\n'), ((39910, 39936), 'numpy.quaternion', 'np.quaternion', (['(1)', '(0)', '(-1)', '(0)'], {}), '(1, 0, -1, 0)\n', (39923, 39936), True, 'import numpy as np\n'), ((39963, 39988), 'numpy.quaternion', 'np.quaternion', (['(1)', '(0)', '(1)', '(1)'], {}), '(1, 0, 1, 1)\n', (39976, 39988), True, 'import numpy as np\n'), ((40015, 40041), 'numpy.quaternion', 'np.quaternion', (['(1)', '(0)', '(1)', '(-1)'], {}), '(1, 0, 1, -1)\n', (40028, 40041), True, 'import numpy as np\n'), ((40068, 40094), 'numpy.quaternion', 'np.quaternion', (['(1)', '(0)', '(-1)', '(1)'], {}), '(1, 0, -1, 1)\n', (40081, 40094), True, 'import numpy as np\n'), ((40121, 40148), 'numpy.quaternion', 'np.quaternion', (['(1)', '(0)', '(-1)', '(-1)'], {}), '(1, 0, -1, -1)\n', (40134, 40148), True, 'import numpy as np\n'), ((40175, 40200), 'numpy.quaternion', 'np.quaternion', (['(1)', '(1)', '(0)', '(0)'], {}), '(1, 1, 0, 0)\n', (40188, 40200), True, 'import numpy as np\n'), ((40227, 40253), 'numpy.quaternion', 'np.quaternion', (['(1)', '(-1)', '(0)', '(0)'], {}), '(1, -1, 0, 0)\n', (40240, 40253), True, 'import numpy as np\n'), ((40280, 40305), 'numpy.quaternion', 'np.quaternion', (['(1)', '(1)', '(0)', '(1)'], {}), '(1, 1, 0, 1)\n', (40293, 40305), True, 'import numpy as np\n'), ((40332, 40358), 'numpy.quaternion', 'np.quaternion', (['(1)', '(1)', '(0)', '(-1)'], {}), '(1, 1, 0, -1)\n', (40345, 40358), True, 'import numpy as np\n'), ((40385, 40411), 'numpy.quaternion', 'np.quaternion', (['(1)', '(-1)', '(0)', '(1)'], {}), '(1, -1, 0, 1)\n', (40398, 40411), True, 'import numpy as np\n'), ((40438, 40465), 'numpy.quaternion', 'np.quaternion', (['(1)', '(-1)', '(0)', '(-1)'], {}), '(1, -1, 0, -1)\n', (40451, 40465), True, 'import numpy as np\n'), ((40492, 40517), 'numpy.quaternion', 'np.quaternion', (['(1)', '(1)', '(1)', '(0)'], {}), '(1, 1, 1, 0)\n', (40505, 40517), True, 'import numpy as np\n'), ((40544, 40570), 'numpy.quaternion', 'np.quaternion', (['(1)', '(1)', '(-1)', '(0)'], {}), '(1, 1, -1, 0)\n', (40557, 40570), True, 'import numpy as np\n'), ((40597, 40623), 'numpy.quaternion', 'np.quaternion', (['(1)', '(-1)', '(1)', '(0)'], {}), '(1, -1, 1, 0)\n', (40610, 40623), True, 'import numpy as np\n'), ((40650, 40677), 'numpy.quaternion', 'np.quaternion', (['(1)', '(-1)', '(-1)', '(0)'], {}), '(1, -1, -1, 0)\n', (40663, 40677), True, 'import numpy as np\n'), ((40704, 40730), 'numpy.quaternion', 'np.quaternion', (['(1)', '(1)', '(1)', '(-1)'], {}), '(1, 1, 1, -1)\n', (40717, 40730), True, 'import numpy as np\n'), ((40757, 40783), 'numpy.quaternion', 'np.quaternion', (['(1)', '(1)', '(-1)', '(1)'], {}), '(1, 1, -1, 1)\n', (40770, 40783), True, 'import numpy as np\n'), ((40810, 40837), 'numpy.quaternion', 'np.quaternion', (['(1)', '(1)', '(-1)', '(-1)'], {}), '(1, 1, -1, -1)\n', (40823, 40837), True, 'import numpy as np\n'), ((40864, 40890), 'numpy.quaternion', 'np.quaternion', (['(1)', '(-1)', '(1)', '(1)'], {}), '(1, -1, 1, 1)\n', (40877, 40890), True, 'import numpy as np\n'), ((40917, 40944), 'numpy.quaternion', 'np.quaternion', (['(1)', '(-1)', '(1)', '(-1)'], {}), '(1, -1, 1, -1)\n', (40930, 40944), True, 'import numpy as np\n'), ((40971, 40998), 'numpy.quaternion', 'np.quaternion', (['(1)', '(-1)', '(-1)', '(1)'], {}), '(1, -1, -1, 1)\n', (40984, 40998), True, 'import numpy as np\n'), ((41025, 41053), 'numpy.quaternion', 'np.quaternion', (['(1)', '(-1)', '(-1)', '(-1)'], {}), '(1, -1, -1, -1)\n', (41038, 41053), True, 'import numpy as np\n'), ((43532, 43574), 'numpy.sqrt', 'np.sqrt', (['(1 + a ** 2 / (delta ** 2 + 1e-15))'], {}), '(1 + a ** 2 / (delta ** 2 + 1e-15))\n', (43539, 43574), True, 'import numpy as np\n'), ((47596, 47643), 'linecache.getline', 'linecache.getline', (['frame.filename', 'frame.lineno'], {}), '(frame.filename, frame.lineno)\n', (47613, 47643), False, 'import linecache\n'), ((3855, 3891), 'visnav.algo.image.ImageProc.gkern2d', 'ImageProc.gkern2d', (['shape', '(1 / len_sc)'], {}), '(shape, 1 / len_sc)\n', (3872, 3891), False, 'from visnav.algo.image import ImageProc\n'), ((5253, 5278), 'numpy.linalg.norm', 'np.linalg.norm', (['B'], {'axis': '(1)'}), '(B, axis=1)\n', (5267, 5278), True, 'import numpy as np\n'), ((10975, 10986), 'math.sin', 'math.sin', (['E'], {}), '(E)\n', (10983, 10986), False, 'import math\n'), ((16261, 16274), 'math.cos', 'math.cos', (['lat'], {}), '(lat)\n', (16269, 16274), False, 'import math\n'), ((21576, 21599), 'numpy.linalg.norm', 'np.linalg.norm', (['(pt - vx)'], {}), '(pt - vx)\n', (21590, 21599), True, 'import numpy as np\n'), ((26093, 26138), 'sklearn.gaussian_process.kernels.Matern', 'Matern', ([], {'length_scale': '(len_sc * max_rng)', 'nu': '(1.5)'}), '(length_scale=len_sc * max_rng, nu=1.5)\n', (26099, 26138), False, 'from sklearn.gaussian_process.kernels import Matern, WhiteKernel\n'), ((26177, 26228), 'sklearn.gaussian_process.kernels.Matern', 'Matern', ([], {'length_scale': '(0.1 * len_sc * max_rng)', 'nu': '(1.5)'}), '(length_scale=0.1 * len_sc * max_rng, nu=1.5)\n', (26183, 26228), False, 'from sklearn.gaussian_process.kernels import Matern, WhiteKernel\n'), ((27216, 27227), 'numpy.min', 'np.min', (['gxy'], {}), '(gxy)\n', (27222, 27227), True, 'import numpy as np\n'), ((27232, 27243), 'numpy.max', 'np.max', (['gxy'], {}), '(gxy)\n', (27238, 27243), True, 'import numpy as np\n'), ((27246, 27257), 'numpy.min', 'np.min', (['gxy'], {}), '(gxy)\n', (27252, 27257), True, 'import numpy as np\n'), ((33207, 33252), 'sklearn.gaussian_process.kernels.Matern', 'Matern', ([], {'length_scale': '(len_sc * max_rng)', 'nu': '(1.5)'}), '(length_scale=len_sc * max_rng, nu=1.5)\n', (33213, 33252), False, 'from sklearn.gaussian_process.kernels import Matern, WhiteKernel\n'), ((33291, 33342), 'sklearn.gaussian_process.kernels.Matern', 'Matern', ([], {'length_scale': '(0.1 * len_sc * max_rng)', 'nu': '(1.5)'}), '(length_scale=0.1 * len_sc * max_rng, nu=1.5)\n', (33297, 33342), False, 'from sklearn.gaussian_process.kernels import Matern, WhiteKernel\n'), ((37896, 37909), 'numpy.sum', 'np.sum', (['(w * v)'], {}), '(w * v)\n', (37902, 37909), True, 'import numpy as np\n'), ((37912, 37921), 'numpy.sum', 'np.sum', (['w'], {}), '(w)\n', (37918, 37921), True, 'import numpy as np\n'), ((6061, 6086), 'numpy.linalg.norm', 'np.linalg.norm', (['A'], {'axis': '(1)'}), '(A, axis=1)\n', (6075, 6086), True, 'import numpy as np\n'), ((6124, 6149), 'numpy.linalg.norm', 'np.linalg.norm', (['B'], {'axis': '(1)'}), '(B, axis=1)\n', (6138, 6149), True, 'import numpy as np\n'), ((10931, 10942), 'math.cos', 'math.cos', (['E'], {}), '(E)\n', (10939, 10942), False, 'import math\n'), ((11149, 11271), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'x': 'ast_v[0]', 'y': 'ast_v[1]', 'z': 'ast_v[2]', 'frame': '"""icrs"""', 'unit': '"""m"""', 'representation_type': '"""cartesian"""', 'obstime': '"""J2000"""'}), "(x=ast_v[0], y=ast_v[1], z=ast_v[2], frame='icrs', unit='m',\n representation_type='cartesian', obstime='J2000')\n", (11157, 11271), False, 'from astropy.coordinates import SkyCoord\n'), ((30367, 30377), 'numpy.mean', 'np.mean', (['d'], {}), '(d)\n', (30374, 30377), True, 'import numpy as np\n'), ((35695, 35708), 'numpy.mean', 'np.mean', (['devs'], {}), '(devs)\n', (35702, 35708), True, 'import numpy as np\n'), ((37615, 37631), 'numpy.array', 'np.array', (['(y, x)'], {}), '((y, x))\n', (37623, 37631), True, 'import numpy as np\n'), ((35081, 35101), 'numpy.abs', 'np.abs', (['(full_err - 1)'], {}), '(full_err - 1)\n', (35087, 35101), True, 'import numpy as np\n'), ((35661, 35690), 'numpy.percentile', 'np.percentile', (['devs', '(68, 95)'], {}), '(devs, (68, 95))\n', (35674, 35690), True, 'import numpy as np\n'), ((44510, 44529), 'numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (44518, 44529), True, 'import numpy as np\n'), ((45374, 45393), 'numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (45382, 45393), True, 'import numpy as np\n')] |
Tarekbouamer/Image-Retrieval-for-Image-Based-Localization | cirtorch/filters/sobel.py | fcad9af4f558bebb3cbec1d08e49603a452f439d | import torch
import torch.nn as nn
import torch.nn.functional as F
from .kernels import (
get_spatial_gradient_kernel2d,
get_spatial_gradient_kernel3d,
normalize_kernel2d
)
def spatial_gradient(input, mode='sobel', order=1, normalized=True):
"""
Computes the first order image derivative in both x and y using a Sobel operator.
"""
if not len(input.shape) == 4:
raise ValueError("Invalid input shape, we expect BxCxHxW. Got: {}"
.format(input.shape))
# allocate kernel
kernel = get_spatial_gradient_kernel2d(mode, order)
if normalized:
kernel = normalize_kernel2d(kernel)
# prepare kernel
b, c, h, w = input.shape
tmp_kernel = kernel.to(input).detach()
tmp_kernel = tmp_kernel.unsqueeze(1).unsqueeze(1)
# convolve input tensor with sobel kernel
kernel_flip = tmp_kernel.flip(-3)
# Pad with "replicate for spatial dims, but with zeros for channel
spatial_pad = [
kernel.size(1) // 2,
kernel.size(1) // 2,
kernel.size(2) // 2,
kernel.size(2) // 2
]
out_channels = 3 if order == 2 else 2
padded_inp = F.pad(input.reshape(b * c, 1, h, w), spatial_pad, 'replicate')[:, :, None]
return F.conv3d(padded_inp, kernel_flip, padding=0).view(b, c, out_channels, h, w)
def spatial_gradient3d(input, mode='diff', order=1):
"""
Computes the first and second order volume derivative in x, y and d using a diff operator.
"""
if not len(input.shape) == 5:
raise ValueError("Invalid input shape, we expect BxCxDxHxW. Got: {}"
.format(input.shape))
# allocate kernel
kernel = get_spatial_gradient_kernel3d(mode, order)
# prepare kernel
b, c, d, h, w = input.shape
tmp_kernel = kernel.to(input).detach()
tmp_kernel = tmp_kernel.repeat(c, 1, 1, 1, 1)
# convolve input tensor with grad kernel
kernel_flip = tmp_kernel.flip(-3)
# Pad with "replicate for spatial dims, but with zeros for channel
spatial_pad = [
kernel.size(2) // 2,
kernel.size(2) // 2,
kernel.size(3) // 2,
kernel.size(3) // 2,
kernel.size(4) // 2,
kernel.size(4) // 2
]
out_ch = 6 if order == 2 else 3
return F.conv3d(F.pad(
input, spatial_pad, 'replicate'), kernel_flip, padding=0, groups=c).view(b, c, out_ch, d, h, w)
def sobel(input, normalized=True, eps=1e-6):
"""
Computes the Sobel operator and returns the magnitude per channel.
"""
if not len(input.shape) == 4:
raise ValueError("Invalid input shape, we expect BxCxHxW. Got: {}"
.format(input.shape))
# comput the x/y gradients
edges = spatial_gradient(input, normalized=normalized)
# unpack the edges
gx = edges[:, :, 0]
gy = edges[:, :, 1]
# compute gradient maginitude
magnitude = torch.sqrt(gx * gx + gy * gy + eps)
return magnitude
class SpatialGradient(nn.Module):
"""
Computes the first order image derivative in both x and y using a Sobel operator.
"""
def __init__(self, mode='sobel', order=1, normalized=True):
super(SpatialGradient, self).__init__()
self.normalized = normalized
self.order = order
self.mode = mode
def forward(self, input):
return spatial_gradient(input, self.mode, self.order, self.normalized)
class SpatialGradient3d(nn.Module):
"""
Computes the first and second order volume derivative in x, y and d using a diff operator.
"""
def __init__(self, mode='diff', order=1):
super(SpatialGradient3d, self).__init__()
self.order = order
self.mode = mode
self.kernel = get_spatial_gradient_kernel3d(mode, order)
def forward(self, input):
return spatial_gradient3d(input, self.mode, self.order)
class Sobel(nn.Module):
"""
Computes the Sobel operator and returns the magnitude per channel.
"""
def __init__(self, normalized=True, eps=1e-6):
super(Sobel, self).__init__()
self.normalized = normalized
self.eps = eps
def forward(self, input):
return sobel(input, self.normalized, self.eps)
| [((2916, 2951), 'torch.sqrt', 'torch.sqrt', (['(gx * gx + gy * gy + eps)'], {}), '(gx * gx + gy * gy + eps)\n', (2926, 2951), False, 'import torch\n'), ((1255, 1299), 'torch.nn.functional.conv3d', 'F.conv3d', (['padded_inp', 'kernel_flip'], {'padding': '(0)'}), '(padded_inp, kernel_flip, padding=0)\n', (1263, 1299), True, 'import torch.nn.functional as F\n'), ((2296, 2334), 'torch.nn.functional.pad', 'F.pad', (['input', 'spatial_pad', '"""replicate"""'], {}), "(input, spatial_pad, 'replicate')\n", (2301, 2334), True, 'import torch.nn.functional as F\n')] |
xu6148152/Binea_Python_Project | PythonCookbook/concurrent_test/findrobots.py | d943eb5f4685d08f080b372dcf1a7cbd5d63efed | # -*- encoding: utf-8 -*-
import gzip
import io
import glob
from concurrent import futures
def find_robots(filename):
'''
Find all of the hosts that access robots.txt in a single log file
'''
robots = set()
with gzip.open(filename) as f:
for line in io.TextIOWrapper(f, encoding='ascii'):
fields = line.split()
if fields[6] == '/robots.txt':
robots.add(fields[0])
return robots
def find_all_robots(logdir):
'''
Find all hosts across and entire sequence of files
'''
files = glob.glob(logdir + '/*.log.gz')
all_robots = set()
with futures.ProcessPoolExecutor() as pool:
for robots in pool.map(find_robots, files):
all_robots.update(robots)
return all_robots
if __name__ == '__main__':
robots = find_all_robots('logs')
for ipaddr in robots:
print(ipaddr)
| [((567, 598), 'glob.glob', 'glob.glob', (["(logdir + '/*.log.gz')"], {}), "(logdir + '/*.log.gz')\n", (576, 598), False, 'import glob\n'), ((235, 254), 'gzip.open', 'gzip.open', (['filename'], {}), '(filename)\n', (244, 254), False, 'import gzip\n'), ((281, 318), 'io.TextIOWrapper', 'io.TextIOWrapper', (['f'], {'encoding': '"""ascii"""'}), "(f, encoding='ascii')\n", (297, 318), False, 'import io\n'), ((631, 660), 'concurrent.futures.ProcessPoolExecutor', 'futures.ProcessPoolExecutor', ([], {}), '()\n', (658, 660), False, 'from concurrent import futures\n')] |
sreynit02/RunestoneServer | docker/setup.py | 2d72fd1c26264a8d7d88e2bccfe9bfbb4d8b9a98 | # ******************************************************************
# |docname| - Provide `docker_tools.py` as the script `docker-tools`
# ******************************************************************
from setuptools import setup
setup(
name="runestone-docker-tools",
version="0.1",
install_requires=["click"],
entry_points={
"console_scripts": ["docker-tools = docker_tools:cli"]
},
)
| [((237, 397), 'setuptools.setup', 'setup', ([], {'name': '"""runestone-docker-tools"""', 'version': '"""0.1"""', 'install_requires': "['click']", 'entry_points': "{'console_scripts': ['docker-tools = docker_tools:cli']}"}), "(name='runestone-docker-tools', version='0.1', install_requires=[\n 'click'], entry_points={'console_scripts': [\n 'docker-tools = docker_tools:cli']})\n", (242, 397), False, 'from setuptools import setup\n')] |
AbhinavSingh-21f1002369/AFKZenCoders | PS12/api2.py | 344475e7d5d60c09637b0bec28c5dab1befe2b65 | from flask import Flask, render_template, request, jsonify,send_file, redirect,session, url_for
from werkzeug import secure_filename
import os
import utilities, queries
import logger
from flask_cors import CORS, cross_origin
from datetime import timedelta
app = Flask(__name__)
CORS(app)
cors = CORS(app, resources={r"/*": {"origins": "*"}})
UPLOAD_FOLDER = '/home/pi/Desktop/AFKZenCoders/PS12/uploads/'
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
app.config['CORS_HEADERS'] = 'Content-Type'
app.secret_key = "AFKZenCodersAAS"
app.permanent_session_lifetime = timedelta(minutes=60)
@app.route('/')
def hello():
if "username" in session:
logger.logit("Rendered upload.html - test wali")
return render_template('upload.html')
else:
logger.logit("Session does not exist")
logger.logit("Rendered root '/'")
return render_template('index.html')
@app.route('/restart')
def restart():
logger.logit(f"---GOOGLE RESTART---")
os.system("sudo reboot -h now")
@app.route('/userauth', methods = ['POST','GET'])
def userauth():
username = request.form.get('username')
password = request.form.get('password')
if username=="root" and password=="toor":
logger.logit(f"Success LOGIN Request Username:{username} Password:{password}")
session["username"] = username
session.permanent = True
return redirect(url_for("page_upload"))
else:
logger.logit(f"Failure LOGIN Request Username:{username} Password:{password}")
return redirect("http://www.themedallionschool.com/abhinav/PS12/incorrect.html", code=302)
@app.route('/page_upload')
def page_upload():
if "username" in session:
logger.logit("Rendered upload.html")
return render_template('upload.html')
else:
logger.logit("Session does not exist")
return redirect("/")
@app.route('/page_cdr')
def page_cdr():
if "username" in session:
logger.logit("Rendered cdr.html")
return render_template('cdr.html')
else:
logger.logit("Session does not exist")
return redirect("/")
@app.route('/page_fir')
def page_fir():
if "username" in session:
logger.logit("Rendered fir.html")
return render_template('fir.html')
else:
logger.logit("Session does not exist")
return redirect("/")
@app.route('/logout')
def logout():
if "username" in session:
session.pop("username", None)
logger.logit("Successfull logout")
return redirect("/")
else:
logger.logit("Session does not exist")
return redirect("/")
@app.route('/upload')
def upload_file():
logger.logit("Rendered upload.html - test wali")
return render_template('upload.html')
@app.route('/uploader',methods=['GET','POST'])
def uploader():
uploaded_files = request.files.getlist("file")
#number = request.args.get('number')
#number = "7982345234"
#print(uploaded_files)
logger.logit(f"/° Multiple Files Upload Start")
for file in uploaded_files:
filename = secure_filename(file.filename)
if filename=="917982345234.csv":
path = os.path.join(app.config['UPLOAD_FOLDER'],filename)
file.save(path)
number = filename[2:11]
logger.logit(f"| CDRData Saved {number}")
utilities.addCDRData(path,number)
elif filename=="918367448476.csv":
path = os.path.join(app.config['UPLOAD_FOLDER'],filename)
file.save(path)
number = filename[2:11]
logger.logit(f"| CDRData Saved {number}")
utilities.addCDRData(path,number)
elif filename=="916100080762.csv":
path = os.path.join(app.config['UPLOAD_FOLDER'],filename)
file.save(path)
number = filename[2:11]
logger.logit(f"| CDRData Saved {number}")
utilities.addCDRData(path,number)
elif filename=="CGI_Dataset.csv":
path = os.path.join(app.config['UPLOAD_FOLDER'],filename)
file.save(path)
logger.logit("| CGIData Saved")
utilities.addCGIData(path)
elif filename=="Bank_Details.csv":
path = os.path.join(app.config['UPLOAD_FOLDER'],filename)
file.save(path)
logger.logit("| Bank_Details Saved")
utilities.addBankData(path)
elif filename=="FIR_Dataset.csv":
path = os.path.join(app.config['UPLOAD_FOLDER'],filename)
file.save(path)
logger.logit("| FIR_Dataset Saved")
utilities.addFIRData(path)
elif filename=="Thana.csv":
path = os.path.join(app.config['UPLOAD_FOLDER'],filename)
file.save(path)
logger.logit("| Thana Saved")
utilities.addThanaData(path)
elif filename=="Thana_list_UP.csv":
path = os.path.join(app.config['UPLOAD_FOLDER'],filename)
# print(path,file,filename)
# /home/pi/Desktop/AFKZenCoders/PS12/uploads/Thana_list_UP.csv <FileStorage: 'Thana_list_UP.csv' ('application/vnd.ms-excel')> Thana_list_UP.csv
file.save(path)
logger.logit("| Thana_list_UP Saved")
utilities.addthanaListData(path)
else:
logger.logit(f"File Upload error - {filename}")
logger.logit(f"\. Multiple Files Uploaded - {len(uploaded_files)}")
return render_template('cdr.html')
@app.route('/uploader/cdr', methods = ['GET', 'POST'])
def upload_cdr_fxn():
if request.method == 'POST':
# Getting the File
file = request.files['file']
number = request.files['number']
filename = secure_filename(file.filename)
# Path for file
path_of_csv = os.path.join(app.config['UPLOAD_FOLDER'], filename)
# Saving File
file.save(path_of_csv)
logger.logit("CDRData Saved")
print("CDR File Saved successfully")
# Loading File To Database
utilities.addCDRData(path_of_csv,number)
return "CDR File Saved and Loaded to Database Successfully"
@app.route('/uploader/thana', methods = ['GET', 'POST'])
def upload_thana_fxn():
if request.method == 'POST':
# Getting the File
file = request.files['file']
filename = secure_filename(file.filename)
# Path for file
path_of_csv = os.path.join(app.config['UPLOAD_FOLDER'], filename)
# Saving File
file.save(path_of_csv)
logger.logit("ThanaData Saved")
print("Thana File Saved successfully")
# Loading File To Database
utilities.addThanaData(path_of_csv)
return "Thana File Saved and Loaded to Database Successfully"
@app.route('/uploader/bankacc', methods = ['GET', 'POST'])
def upload_bankacc_fxn():
if request.method == 'POST':
# Getting the File
file = request.files['file']
filename = secure_filename(file.filename)
# Path for file
path_of_csv = os.path.join(app.config['UPLOAD_FOLDER'], filename)
# Saving File
file.save(path_of_csv)
print("BankAcc File Saved successfully")
logger.logit("BankData Saved")
# Loading File To Database
utilities.addBankData(path_of_csv)
return "BankAcc File Saved and Loaded to Database Successfully"
@app.route('/uploader/cgi', methods = ['GET', 'POST'])
def upload_cgi_fxn():
if request.method == 'POST':
# Getting the File
file = request.files['file']
filename = secure_filename(file.filename)
# Path for file
path_of_csv = os.path.join(app.config['UPLOAD_FOLDER'], filename)
# Saving File
file.save(path_of_csv)
print("CGI File Saved successfully")
logger.logit("CGIData Saved")
# Loading File To Database
utilities.addCGIData(path_of_csv)
return "CGI File Saved and Loaded to Database Successfully"
@app.route('/uploader/fir', methods = ['GET', 'POST'])
def upload_fir_fxn():
if request.method == 'POST':
# Getting the File
file = request.files['file']
filename = secure_filename(file.filename)
# Path for file
path_of_csv = os.path.join(app.config['UPLOAD_FOLDER'], filename)
# Saving File
file.save(path_of_csv)
print("FIR File Saved successfully")
logger.logit("FIRData Saved")
# Loading File To Database
utilities.addFIRData(path_of_csv)
return "FIR File Saved and Loaded to Database Successfully"
@app.route('/uploader/thanalist', methods = ['GET', 'POST'])
def upload_thanalist_fxn():
if request.method == 'POST':
# Getting the File
file = request.files['file']
filename = secure_filename(file.filename)
# Path for file
path_of_csv = os.path.join(app.config['UPLOAD_FOLDER'], filename)
# Saving File
file.save(path_of_csv)
print("Thana List File Saved successfully")
logger.logit("ThanaListDATA Saved")
# Loading File To Database
utilities.addthanaListData(path_of_csv)
return "Thana File Saved and Loaded to Database Successfully"
# ############################### Queries ##################################
@app.route('/query/1/', methods = ['GET'])
def query_1():
headers = ["Calling Number","Called Number","Start Time","Duration(sec)","Call Type"]
query = "SELECT calling_number, called_number, start_time, duration, cell_type FROM CallData ORDER BY duration DESC"
result = queries.runQuery(query)
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
logger.logit(">>> Query 1 Call")
return jsonify(response)
@app.route('/query/2/', methods = ['GET'])
def query_2():
# Parsing the Headers
since = str(request.args.get('since')) + " 00:00:00"
till = str(request.args.get('till')) + " 23:59:59"
headers = ["Calling Number","Called Number","Start Time","End Time","Duration(sec)","Start Tower","End Tower","Call Type","IMEI","IMSI","SMSC","Service Provider"]
query = f'SELECT * FROM CallData WHERE start_time < "{till}" AND start_time > "{since}";'
result = queries.runQuery(query)
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
fString = f">>> Query 2 Call since:{since}, till:{till}"
logger.logit(fString)
return jsonify(response)
@app.route('/query/3/', methods = ['GET'])
def query_3():
headers = ["Calling Number","Called Number","Start Time","End Time","Duration(sec)","Start Tower","End Tower","Call Type","IMEI","IMSI","SMSC","Service Provider"]
query = f"SELECT * FROM CallData ORDER BY duration DESC LIMIT 10"
result = queries.runQuery(query)
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
logger.logit(">>> Query 3 Call")
return jsonify(response)
@app.route('/query/4/', methods = ['GET'])
def query_4():
headers = ["Dialled Number","Total Dialled Calls","Total Duration"]
query = f'''SELECT called_number, count(*) as 'Frequency', sum(duration) as 'Total Duration' from CallData where cell_type="OUT" GROUP by called_number ORDER by Frequency DESC'''
result = queries.runQuery(query)
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
logger.logit(">>> Query 4 Call")
return jsonify(response)
@app.route('/query/5/', methods = ['GET'])
def query_5():
headers = ["Caller","Total Recieved Calls","Total Duration"]
query = f'''SELECT calling_number, count(*) as 'Frequency', sum(duration) as 'Total Duration' from CallData where cell_type="IN" GROUP by calling_number ORDER by Frequency DESC'''
result = queries.runQuery(query)
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
logger.logit(">>> Query 5 Call")
return jsonify(response)
@app.route('/query/6/', methods = ['GET'])
def query_6():
headers = ["Called Number","Total Duration(sec)"]
query = f"SELECT DISTINCT called_number, sum(duration) as totalDuration FROM CallData WHERE called_number NOT in (7982345234) GROUP BY called_number ORDER BY totalDuration DESC "
result = queries.runQuery(query)
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
logger.logit(">>> Query 6 Call")
return jsonify(response)
@app.route('/query/7/', methods = ['GET'])
def query_7():
headers = ["Called Number","Duration","Call Type"]
query = f'SELECT called_number, duration, cell_type FROM CallData WHERE cell_type="OUT" ORDER by duration DESC'
result = queries.runQuery(query)
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
logger.logit(">>> Query 7 Call")
return jsonify(response)
@app.route('/query/8/', methods = ['GET'])
def query_8():
headers = ["Calling Number","Duration","Call Type"]
query = f'SELECT calling_number, duration, cell_type FROM CallData WHERE cell_type="IN" ORDER by duration DESC'
result = queries.runQuery(query)
headers = ["Phone NO","Duration","Call Type"]
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
logger.logit(">>> Query 8 Call")
return jsonify(response)
@app.route('/query/9/', methods = ['GET'])
def query_9():
headers = ["Calling Number","Called Number","Start Time","End Time","Duration(sec)","Start Tower","End Tower","Call Type","IMEI","IMSI","SMSC","Service Provider"]
# Parsing the Headers
date = request.args.get('date')
query = f'SELECT * from CallData where start_time like "{date}%" or end_time like "{date}%"'
result = queries.runQuery(query)
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
fString = f">>> Query 10 Call date:{date}"
logger.logit(fString)
return jsonify(response)
@app.route('/query/10/', methods = ['GET'])
def query_10():
headers = ["Start Time","End Time","Tower 1","Tower 2"]
# Parsing the Headers
date = request.args.get('date')
query = f'''SELECT start_time, end_time, cell1, cell2 from CallData where (start_time like "2021-01-04%" or end_time like "2021-01-04%")'''
result = queries.runQuery(query)
#print(result)
fString = f">>> Query 10 Call date:{date}"
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
logger.logit(fString)
return jsonify(response)
@app.route('/query/11/', methods = ['GET'])
def query_11():
query = f'''SELECT DISTINCT called_number FROM CallData WHERE cell_type="OUT" UNION SELECT DISTINCT calling_number FROM CallData WHERE cell_type="IN"'''
result = queries.runQuery(query)
#print(result)
#res = []
#for item in result:
# res.append(item[0])
headers = ["Mobile Number"]
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
logger.logit(">>> Query 11 Call")
return jsonify(response)
@app.route('/query/12/', methods = ['GET'])
def query_12():
# Parsing the Headers
number = request.args.get('number')
query = f'''SELECT * FROM CallData WHERE called_number="{number}" or calling_number="{number}"'''
result = queries.runQuery(query)
headers = ["Calling Number","Called Number","Start Time","End Time","Duration(sec)","Start Tower","End Tower","Call Type","IMEI","IMSI","SMSC","Service Provider"]
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
fString = f">>> Query 12 Call number:{number}"
logger.logit(fString)
return jsonify(response)
@app.route('/query/20/', methods = ['GET'])
def query_20():
# Parsing the Headers
fir = request.args.get('fir')
query = f'SELECT * from FIR WHERE FIR_No={int(fir)}'
result = queries.runQuery(query)
#print(result)
headers = ["FIR No","District","PS ID","Time of FIR","Complainant","Act","Section","Complainant Mobile Number"]
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
fString = f">>> Query 20 Call for:{fir}"
logger.logit(fString)
return jsonify(response)
@app.route('/query/100/', methods = ['GET'])
def query_100():
# Parsing the Headers
IMEI = request.args.get('imei')
query = f'SELECT * from FIR WHERE FIR_No={int(fir)}'
result = queries.runQuery(query)
#print(result)
headers = ["FIR No","District","PS ID","Time of FIR","Complainant","Act","Section","Complainant Mobile Number"]
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
fString = f">>> Query 100 Call IMEI:{imei}"
logger.logit(fString)
return jsonify(response)
@app.route('/query/101/', methods = ['GET'])
def query_101():
#unique IMEIs
IMEI = []
unique_imeis_query = f'SELECT DISTINCT imei FROM CallData'
resultset = queries.runQuery(unique_imeis_query)
for results in resultset:
print(results)
#unique_imsi_query = f'SELECT * from CallData where imei={results}'
return ("OK", code=200)
#unique_imsi =
@app.route('/loadedfiles', methods = ['GET'])
def loadedfiles():
csv_files = []
for filename in os.listdir("/home/pi/Desktop/AFKZenCoders/PS12/uploads/"):
if filename.endswith(".csv"):
csv_files.append(filename)
logger.logit("Rendered uploaded files")
return jsonify({'CSV files':csv_files})
@app.route('/deleteloaded', methods = ['GET'])
def deleteloaded():
csv_files = []
for filename in os.listdir("/home/pi/Desktop/AFKZenCoders/PS12/uploads/"):
if filename.endswith(".csv"):
fstring = f"/home/pi/Desktop/AFKZenCoders/PS12/uploads/{filename}"
os.remove(fstring)
os.remove("/home/pi/Desktop/AFKZenCoders/PS12/CDRdata.db")
logger.logit("### Files Deleted ###")
return jsonify({'CSV files':csv_files})
# Download API
@app.route("/downloadfile/<filename>", methods = ['GET'])
def download_file(filename):
logger.logit("Rendered download.html")
return render_template('download.html',value=filename)
@app.route('/return-files/<filename>')
def return_files_tut(filename):
file_path = "/home/pi/Desktop/AFKZenCoders/PS12/CDRdata.db"
logger.logit("Database Downloaded")
return send_file(file_path, as_attachment=True, attachment_filename='')
@app.route('/logs')
def logs():
with open("/home/pi/Desktop/AFKZenCoders/PS12/Logs.txt","r") as f:
lines = f.readlines()
f.close()
formated_lines = []
for i in range(len(lines)-1,0,-1):
formated_lines.append(lines[i])
return jsonify({'logs':formated_lines})
@app.route('/graph')
def graph():
query = f'SELECT date,in_count,out_count,sms_count,total from "798234523"'
result = queries.runQuery(query)
#print(result)
headers = ["Date","Incomming Calls","OutGoing Calls","SMS","Total Interactions"]
if len(result) != 0:
response = {'headers':headers,'rows':result}
else:
response = {'headers':["No Data Available"],'rows':[]}
fString = f">>> GRAPH Call"
logger.logit(fString)
return jsonify(response)
if __name__ == "__main__":
app.run(host='0.0.0.0',port = 1313,debug = True) | [] |
Fassial/Air-Writing-with-TL | cnnblstm_with_adabn/cnnblstm_with_adabn.py | 9b9047c5bd5aef3a869e2d5166be1c0cf0c5ccf0 | import os
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import numpy as np
import matplotlib.pyplot as plt
# local model
import sys
sys.path.append("../network")
import Coral
from lstm import LSTMHardSigmoid
from AdaBN import AdaBN
sys.path.append("../network/AutoEncoder")
import AutoEncoder
class cnnblstm_with_adabn(nn.Module):
PARAMS_FILE = "params.pkl"
PARAMS_AE = "params_ae.pkl"
NET1_ADABN = "net1_adabn"
NET2_ADABN = "net2_adabn"
NET3_ADABN = "net3_adabn"
def __init__(self, time_steps = 800, n_features = 3, n_outputs = 10, use_cuda = False, params_dir = "./params", enable_CORAL = False):
super(cnnblstm_with_adabn, self).__init__()
self.time_steps = time_steps
self.n_features = n_features
self.n_outputs = n_outputs
self.use_cuda = use_cuda
self.params_dir = params_dir
if not os.path.exists(self.params_dir):
os.mkdir(self.params_dir)
self.enable_CORAL = enable_CORAL
self.n_filters = 128
self.kernel_size = 15
self.n_hidden = 150 # 150
self.n_layers = 1
self.bidirectional = True
# self.ae = AutoEncoder.load_AE(type = "ConvAE", time_steps = self.time_steps, n_features = self.n_features, use_cuda = self.use_cuda, params_pkl = os.path.join(self.params_dir, cnnblstm_with_adabn.PARAMS_AE))
# build net1 cnn
self.net1 = nn.Sequential(
nn.Conv1d(in_channels = self.n_features, out_channels = self.n_filters, kernel_size = self.kernel_size),
# nn.Conv1d(in_channels = self.ae.n_filters3, out_channels = self.n_filters, kernel_size = self.kernel_size),
nn.ReLU(),
# nn.Sigmoid(),
nn.Dropout(p = 0.5),
nn.MaxPool1d(kernel_size = 2)
)
# build net1_adabn
self.net1_adabn = AdaBN(self.n_filters, variables_dir = os.path.join(self.params_dir, cnnblstm_with_adabn.NET1_ADABN), use_cuda = self.use_cuda)
# build net2 blstm
# self.net2 = nn.LSTM(input_size = self.n_filters, hidden_size = self.n_hidden, num_layers = self.n_layers, dropout = 0.2, batch_first = True, bidirectional = self.bidirectional, bias = True)
self.net2 = LSTMHardSigmoid(input_size = self.n_filters, hidden_size = self.n_hidden, num_layers = self.n_layers, dropout = 0.2, batch_first = True, bidirectional = self.bidirectional, bias = True)
# build net2_adabn
if self.bidirectional:
n_blstm_output = self.n_hidden * 2
else:
n_blstm_output = self.n_hidden
self.net2_adabn = AdaBN(n_blstm_output, variables_dir = os.path.join(self.params_dir, cnnblstm_with_adabn.NET2_ADABN), use_cuda = self.use_cuda)
# build net3 fc
self.net3 = nn.Sequential(
nn.Linear(n_blstm_output, 50, bias = True),
nn.ReLU(),
# nn.Sigmoid(),
)
# build net3_adabn
self.net3_adabn = AdaBN(50, variables_dir = os.path.join(self.params_dir, cnnblstm_with_adabn.NET3_ADABN), use_cuda = self.use_cuda)
# build net4 fc
self.net4 = nn.Sequential(
nn.Dropout(p = 0.2),
nn.Linear(50, self.n_outputs, bias = True),
nn.Softmax(dim = 1)
)
def init_hidden(self, batch_size):
"""
init blstm's hidden states
"""
if self.bidirectional:
n_layers = self.n_layers * 2
else:
n_layers = self.n_layers
if self.use_cuda:
hidden_state = torch.zeros(n_layers, batch_size, self.n_hidden).cuda()
cell_state = torch.zeros(n_layers, batch_size, self.n_hidden).cuda()
else:
hidden_state = torch.zeros(n_layers, batch_size, self.n_hidden)
cell_state = torch.zeros(n_layers, batch_size, self.n_hidden)
self.hidden = (hidden_state, cell_state)
def reset_parameters(self):
"""
temp useless
Here we reproduce Keras default initialization weights for consistency with Keras version
"""
# get weights & bias set
net1_weights = ((name, param.data) for name, param in self.named_parameters() if (("weight" in name) and (("net1" in name) and ("net1_adabn" not in name))))
net1_biases = ((name, param.data) for name, param in self.named_parameters() if (("bias" in name) and (("net1" in name) and ("net1_adabn" not in name))))
# net2_weights = ((name, param.data) for name, param in self.named_parameters() if (("weight" in name) and (("net2" in name) and ("net2_adabn" not in name))))
# net2_biases = ((name, param.data) for name, param in self.named_parameters() if (("bias" in name) and (("net2" in name) and ("net2_adabn" not in name))))
net3_weights = ((name, param.data) for name, param in self.named_parameters() if (("weight" in name) and (("net3" in name) and ("net3_adabn" not in name))))
net3_biases = ((name, param.data) for name, param in self.named_parameters() if (("bias" in name) and (("net3" in name) and ("net3_adabn" not in name))))
net4_weights = ((name, param.data) for name, param in self.named_parameters() if (("weight" in name) and (("net4" in name) and ("net4_adabn" not in name))))
net4_biases = ((name, param.data) for name, param in self.named_parameters() if (("bias" in name) and (("net4" in name) and ("net4_adabn" not in name))))
# init weights & bias
# self.ae.reset_parameters()
for name, params_data in net1_weights:
# print(name)
nn.init.xavier_uniform_(params_data)
for name, params_data in net1_biases:
nn.init.constant_(params_data, 0)
self.net1_adabn.reset_parameters()
self.net2.reset_parameters() # lstm reset parameters
self.net2_adabn.reset_parameters()
for name, params_data in net3_weights:
nn.init.xavier_uniform_(params_data)
for name, params_data in net3_biases:
nn.init.constant_(params_data, 0)
self.net3_adabn.reset_parameters()
for name, params_data in net4_weights:
nn.init.xavier_uniform_(params_data)
for name, params_data in net4_biases:
nn.init.constant_(params_data, 0)
def forward(self, input):
"""
compute the output of input according to the entire network model
"""
# print(input.shape)
# AutoEncoder
# input = self.ae.encoder(input)
# input = self.ae(input)
# MaxPool1d
maxPool1d_output = self.net1(input)
# maxPool1d_adabn_output = maxPool1d_output
maxPool1d_adabn_output, maxPool1d_output = self.net1_adabn(maxPool1d_output), None
maxPool1d_adabn_t_output = maxPool1d_adabn_output.permute(0, 2, 1).contiguous()
# BiLSTM
(bilstm_output, _), maxPool1d_adabn_t_output = self.net2(maxPool1d_adabn_t_output, None), None
# MaxPooling1D time_steps
bilstm_output = bilstm_output.permute(0, 2, 1)
maxPooling_output, bilstm_output = F.max_pool1d(bilstm_output, kernel_size = bilstm_output.size(2)).squeeze(2), None
# maxPooling_adabn_output = maxPooling_output
maxPooling_adabn_output, maxPooling_output = self.net2_adabn(maxPooling_output), None
# get classifier
net3_output, maxPooling_adabn_output = self.net3(maxPooling_adabn_output), None
net3_adabn_output, net3_output = self.net3_adabn(net3_output), None
linear2_softmax_output, net3_adabn_output = self.net4(net3_adabn_output), None
return linear2_softmax_output
def update_adabn_running_stats(self):
"""
update adabn running states, update mu_j with mu_j_next to start next round
"""
self.net1_adabn.update_running_stats()
self.net2_adabn.update_running_stats()
self.net3_adabn.update_running_stats()
def trainAllLayers(self, train_x, train_y, test_x = None, learning_rate = 0.001, n_epoches = 20, batch_size = 20, shuffle = True):
"""
train all layers of network model
"""
# print(os.environ["CUDA_VISIBLE_DEVICES"])
# CORAL
if self.enable_CORAL:
if test_x == None:
print("ERROR: (in cnnblstm_with_adabn.trainAllLayers) test_x == None!")
return
# review train_x & test_x
train_x = train_x.view(-1, self.time_steps * self.n_features)
test_x = test_x.view(-1, self.time_steps * self.n_features)
# get CORAL(train_x, test_x)
train_x = Coral.CORAL_torch(train_x, test_x)
# review train_x
train_x = train_x.view(-1, self.n_features, self.time_steps)
# optimize all cnn parameters
params = [{"params": model.parameters()} for model in self.children() if model not in [self.ae]]
optimizer = torch.optim.Adam(params, lr = learning_rate)
# the target label is not one-hotted
loss_func = nn.CrossEntropyLoss()
# init params
self.reset_parameters()
# load params
self.load_params()
# set train mode True
self.train()
# get parallel model
parallel_cba = self
if self.use_cuda:
# print("we use cuda!")
parallel_cba = torch.nn.DataParallel(self, device_ids = range(torch.cuda.device_count()))
# parallel_cba = parallel_cba.cuda()
# if use_cuda
if self.use_cuda:
train_x = train_x.cuda()
train_y = train_y.cuda()
"""
# get autoencoder
self.ae = AutoEncoder.train_AE(self.ae, train_x, train_x, n_epoches = 20)
self.ae.save_params()
"""
# get train_data
train_data = torch.utils.data.TensorDataset(train_x, train_y)
# Data Loader for easy mini-batch return in training
train_loader = torch.utils.data.DataLoader(dataset = train_data, batch_size = batch_size, shuffle = shuffle)
# training and testing
for epoch in range(n_epoches):
# init loss & acc
train_loss = 0
train_acc = 0
for step, (b_x, b_y) in enumerate(train_loader): # gives batch data
b_x = b_x.view(-1, self.n_features, self.time_steps) # reshape x to (batch, n_features, time_step)
if self.use_cuda:
b_x, b_y = Variable(b_x).cuda(), Variable(b_y).cuda()
else:
b_x, b_y = Variable(b_x), Variable(b_y)
"""
# get hidden
if self.use_cuda:
self.init_hidden(b_x.size(0) // torch.cuda.device_count())
else:
self.init_hidden(b_x.size(0))
"""
# update adabn running stats
self.update_adabn_running_stats()
# get output
output = parallel_cba(b_x) # CNN_BLSTM output
# get loss
loss = loss_func(output, b_y) # cross entropy loss
train_loss += loss.item() * len(b_y)
_, pre = torch.max(output, 1)
num_acc = (pre == b_y).sum()
train_acc += num_acc.item()
# backward
optimizer.zero_grad() # clear gradients for this training step
loss.backward() # backpropagation, compute gradients
optimizer.step() # apply gradients
# print loss
# if (step + 1) % 5 == 0:
# print("[{}/{}], train loss is: {:.6f}, train acc is: {:.6f}".format(step, len(train_loader), train_loss / ((step + 1) * batch_size), train_acc / ((step + 1) * batch_size)))
print("[{}/{}], train loss is: {:.6f}, train acc is: {:.6f}".format(len(train_loader), len(train_loader), train_loss / (len(train_loader) * batch_size), train_acc / (len(train_loader) * batch_size)))
# save params
self.save_params()
# print("train finish!")
def getTestAccuracy(self, test_x, test_y):
"""
test network model with test set
"""
# init params
self.reset_parameters()
# load params
self.load_params()
# set eval
self.eval()
# get parallel model
parallel_cba = self
if self.use_cuda:
# print("we use cuda!")
parallel_cba = torch.nn.DataParallel(self, device_ids = range(torch.cuda.device_count()))
# parallel_cba = parallel_cba.cuda()
# cuda test_data
with torch.no_grad():
if self.use_cuda:
test_x, test_y = Variable(test_x).cuda(), Variable(test_y).cuda()
else:
test_x, test_y = Variable(test_x), Variable(test_y)
"""
# get hidden
if self.use_cuda:
self.init_hidden(test_x.size(0) // torch.cuda.device_count())
else:
self.init_hidden(test_x.size(0))
"""
# update adabn running stats
self.update_adabn_running_stats()
# get output
with torch.no_grad():
output = parallel_cba(test_x)
# print(output)
prediction = torch.max(output, 1)[1]
pred_y = prediction.cpu().data.numpy()
# print(pred_y)
target_y = test_y.cpu().data.numpy()
# print(test_y)
accuracy = float((pred_y == target_y).astype(int).sum()) / float(target_y.size)
# print("Accuracy: ", str(accuracy))
return accuracy
def save_params(self):
"""
save params & adabn's inner stats
"""
self.save_adabn_variables()
torch.save(self.state_dict(), os.path.join(self.params_dir, cnnblstm_with_adabn.PARAMS_FILE))
# self.ae.save_params()
# print("save_params success!")
def save_adabn_variables(self):
"""
save adabn's inner stats
"""
self.net1_adabn.save_attrs()
self.net2_adabn.save_attrs()
self.net3_adabn.save_attrs()
def load_params(self):
"""
load params & adabn's inner stats
"""
self.load_adabn_variables()
if os.path.exists(os.path.join(self.params_dir, cnnblstm_with_adabn.PARAMS_FILE)):
if self.use_cuda:
self.load_state_dict(torch.load(os.path.join(self.params_dir, cnnblstm_with_adabn.PARAMS_FILE), map_location = torch.device('cuda')))
else:
self.load_state_dict(torch.load(os.path.join(self.params_dir, cnnblstm_with_adabn.PARAMS_FILE), map_location = torch.device('cpu')))
# print("load_params success!")
# self.ae.load_params()
def load_adabn_variables(self):
"""
load adabn's inner stats
"""
self.net1_adabn.load_attrs()
self.net2_adabn.load_attrs()
self.net3_adabn.load_attrs()
def get_model(self, pre_trained = False):
"""
get pretrained model
"""
if pre_trained:
self.load_params()
return self
if __name__ == '__main__':
use_cuda = torch.cuda.is_available()
if use_cuda:
cnnblstm = cnnblstm_with_adabn(use_cuda = use_cuda).cuda()
else:
cnnblstm = cnnblstm_with_adabn(use_cuda = use_cuda)
print(cnnblstm)
# get train_x, train_y
train_x = torch.rand(20, 3, 800, dtype = torch.float32)
train_y = torch.randint(10, (20, ), dtype = torch.int64)
# train_y = torch.LongTensor(20, 1).random_() % 10
print(train_x.type())
# train_y = torch.zeros(20, 10).scatter_(1, train_y, 1)
print(train_y)
train_data = torch.utils.data.TensorDataset(train_x, train_y)
cnnblstm.trainAllLayers(train_data)
| [((189, 218), 'sys.path.append', 'sys.path.append', (['"""../network"""'], {}), "('../network')\n", (204, 218), False, 'import sys\n'), ((289, 330), 'sys.path.append', 'sys.path.append', (['"""../network/AutoEncoder"""'], {}), "('../network/AutoEncoder')\n", (304, 330), False, 'import sys\n'), ((13068, 13093), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (13091, 13093), False, 'import torch\n'), ((13282, 13325), 'torch.rand', 'torch.rand', (['(20)', '(3)', '(800)'], {'dtype': 'torch.float32'}), '(20, 3, 800, dtype=torch.float32)\n', (13292, 13325), False, 'import torch\n'), ((13339, 13382), 'torch.randint', 'torch.randint', (['(10)', '(20,)'], {'dtype': 'torch.int64'}), '(10, (20,), dtype=torch.int64)\n', (13352, 13382), False, 'import torch\n'), ((13548, 13596), 'torch.utils.data.TensorDataset', 'torch.utils.data.TensorDataset', (['train_x', 'train_y'], {}), '(train_x, train_y)\n', (13578, 13596), False, 'import torch\n'), ((2066, 2246), 'lstm.LSTMHardSigmoid', 'LSTMHardSigmoid', ([], {'input_size': 'self.n_filters', 'hidden_size': 'self.n_hidden', 'num_layers': 'self.n_layers', 'dropout': '(0.2)', 'batch_first': '(True)', 'bidirectional': 'self.bidirectional', 'bias': '(True)'}), '(input_size=self.n_filters, hidden_size=self.n_hidden,\n num_layers=self.n_layers, dropout=0.2, batch_first=True, bidirectional=\n self.bidirectional, bias=True)\n', (2081, 2246), False, 'from lstm import LSTMHardSigmoid\n'), ((7921, 7963), 'torch.optim.Adam', 'torch.optim.Adam', (['params'], {'lr': 'learning_rate'}), '(params, lr=learning_rate)\n', (7937, 7963), False, 'import torch\n'), ((8019, 8040), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (8038, 8040), True, 'import torch.nn as nn\n'), ((8650, 8698), 'torch.utils.data.TensorDataset', 'torch.utils.data.TensorDataset', (['train_x', 'train_y'], {}), '(train_x, train_y)\n', (8680, 8698), False, 'import torch\n'), ((8771, 8862), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', ([], {'dataset': 'train_data', 'batch_size': 'batch_size', 'shuffle': 'shuffle'}), '(dataset=train_data, batch_size=batch_size,\n shuffle=shuffle)\n', (8798, 8862), False, 'import torch\n'), ((869, 900), 'os.path.exists', 'os.path.exists', (['self.params_dir'], {}), '(self.params_dir)\n', (883, 900), False, 'import os\n'), ((905, 930), 'os.mkdir', 'os.mkdir', (['self.params_dir'], {}), '(self.params_dir)\n', (913, 930), False, 'import os\n'), ((1355, 1456), 'torch.nn.Conv1d', 'nn.Conv1d', ([], {'in_channels': 'self.n_features', 'out_channels': 'self.n_filters', 'kernel_size': 'self.kernel_size'}), '(in_channels=self.n_features, out_channels=self.n_filters,\n kernel_size=self.kernel_size)\n', (1364, 1456), True, 'import torch.nn as nn\n'), ((1576, 1585), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1583, 1585), True, 'import torch.nn as nn\n'), ((1609, 1626), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)'}), '(p=0.5)\n', (1619, 1626), True, 'import torch.nn as nn\n'), ((1633, 1660), 'torch.nn.MaxPool1d', 'nn.MaxPool1d', ([], {'kernel_size': '(2)'}), '(kernel_size=2)\n', (1645, 1660), True, 'import torch.nn as nn\n'), ((2577, 2617), 'torch.nn.Linear', 'nn.Linear', (['n_blstm_output', '(50)'], {'bias': '(True)'}), '(n_blstm_output, 50, bias=True)\n', (2586, 2617), True, 'import torch.nn as nn\n'), ((2624, 2633), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (2631, 2633), True, 'import torch.nn as nn\n'), ((2866, 2883), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.2)'}), '(p=0.2)\n', (2876, 2883), True, 'import torch.nn as nn\n'), ((2890, 2930), 'torch.nn.Linear', 'nn.Linear', (['(50)', 'self.n_outputs'], {'bias': '(True)'}), '(50, self.n_outputs, bias=True)\n', (2899, 2930), True, 'import torch.nn as nn\n'), ((2937, 2954), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (2947, 2954), True, 'import torch.nn as nn\n'), ((3324, 3372), 'torch.zeros', 'torch.zeros', (['n_layers', 'batch_size', 'self.n_hidden'], {}), '(n_layers, batch_size, self.n_hidden)\n', (3335, 3372), False, 'import torch\n'), ((3389, 3437), 'torch.zeros', 'torch.zeros', (['n_layers', 'batch_size', 'self.n_hidden'], {}), '(n_layers, batch_size, self.n_hidden)\n', (3400, 3437), False, 'import torch\n'), ((5037, 5073), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['params_data'], {}), '(params_data)\n', (5060, 5073), True, 'import torch.nn as nn\n'), ((5117, 5150), 'torch.nn.init.constant_', 'nn.init.constant_', (['params_data', '(0)'], {}), '(params_data, 0)\n', (5134, 5150), True, 'import torch.nn as nn\n'), ((5325, 5361), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['params_data'], {}), '(params_data)\n', (5348, 5361), True, 'import torch.nn as nn\n'), ((5405, 5438), 'torch.nn.init.constant_', 'nn.init.constant_', (['params_data', '(0)'], {}), '(params_data, 0)\n', (5422, 5438), True, 'import torch.nn as nn\n'), ((5520, 5556), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['params_data'], {}), '(params_data)\n', (5543, 5556), True, 'import torch.nn as nn\n'), ((5600, 5633), 'torch.nn.init.constant_', 'nn.init.constant_', (['params_data', '(0)'], {}), '(params_data, 0)\n', (5617, 5633), True, 'import torch.nn as nn\n'), ((7658, 7692), 'Coral.CORAL_torch', 'Coral.CORAL_torch', (['train_x', 'test_x'], {}), '(train_x, test_x)\n', (7675, 7692), False, 'import Coral\n'), ((10969, 10984), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (10982, 10984), False, 'import torch\n'), ((11387, 11402), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (11400, 11402), False, 'import torch\n'), ((11470, 11490), 'torch.max', 'torch.max', (['output', '(1)'], {}), '(output, 1)\n', (11479, 11490), False, 'import torch\n'), ((11885, 11947), 'os.path.join', 'os.path.join', (['self.params_dir', 'cnnblstm_with_adabn.PARAMS_FILE'], {}), '(self.params_dir, cnnblstm_with_adabn.PARAMS_FILE)\n', (11897, 11947), False, 'import os\n'), ((12298, 12360), 'os.path.join', 'os.path.join', (['self.params_dir', 'cnnblstm_with_adabn.PARAMS_FILE'], {}), '(self.params_dir, cnnblstm_with_adabn.PARAMS_FILE)\n', (12310, 12360), False, 'import os\n'), ((1747, 1808), 'os.path.join', 'os.path.join', (['self.params_dir', 'cnnblstm_with_adabn.NET1_ADABN'], {}), '(self.params_dir, cnnblstm_with_adabn.NET1_ADABN)\n', (1759, 1808), False, 'import os\n'), ((2437, 2498), 'os.path.join', 'os.path.join', (['self.params_dir', 'cnnblstm_with_adabn.NET2_ADABN'], {}), '(self.params_dir, cnnblstm_with_adabn.NET2_ADABN)\n', (2449, 2498), False, 'import os\n'), ((2726, 2787), 'os.path.join', 'os.path.join', (['self.params_dir', 'cnnblstm_with_adabn.NET3_ADABN'], {}), '(self.params_dir, cnnblstm_with_adabn.NET3_ADABN)\n', (2738, 2787), False, 'import os\n'), ((9730, 9750), 'torch.max', 'torch.max', (['output', '(1)'], {}), '(output, 1)\n', (9739, 9750), False, 'import torch\n'), ((3170, 3218), 'torch.zeros', 'torch.zeros', (['n_layers', 'batch_size', 'self.n_hidden'], {}), '(n_layers, batch_size, self.n_hidden)\n', (3181, 3218), False, 'import torch\n'), ((3242, 3290), 'torch.zeros', 'torch.zeros', (['n_layers', 'batch_size', 'self.n_hidden'], {}), '(n_layers, batch_size, self.n_hidden)\n', (3253, 3290), False, 'import torch\n'), ((11107, 11123), 'torch.autograd.Variable', 'Variable', (['test_x'], {}), '(test_x)\n', (11115, 11123), False, 'from torch.autograd import Variable\n'), ((11125, 11141), 'torch.autograd.Variable', 'Variable', (['test_y'], {}), '(test_y)\n', (11133, 11141), False, 'from torch.autograd import Variable\n'), ((8320, 8345), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (8343, 8345), False, 'import torch\n'), ((9262, 9275), 'torch.autograd.Variable', 'Variable', (['b_x'], {}), '(b_x)\n', (9270, 9275), False, 'from torch.autograd import Variable\n'), ((9277, 9290), 'torch.autograd.Variable', 'Variable', (['b_y'], {}), '(b_y)\n', (9285, 9290), False, 'from torch.autograd import Variable\n'), ((10875, 10900), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (10898, 10900), False, 'import torch\n'), ((12420, 12482), 'os.path.join', 'os.path.join', (['self.params_dir', 'cnnblstm_with_adabn.PARAMS_FILE'], {}), '(self.params_dir, cnnblstm_with_adabn.PARAMS_FILE)\n', (12432, 12482), False, 'import os\n'), ((12567, 12629), 'os.path.join', 'os.path.join', (['self.params_dir', 'cnnblstm_with_adabn.PARAMS_FILE'], {}), '(self.params_dir, cnnblstm_with_adabn.PARAMS_FILE)\n', (12579, 12629), False, 'import os\n'), ((11028, 11044), 'torch.autograd.Variable', 'Variable', (['test_x'], {}), '(test_x)\n', (11036, 11044), False, 'from torch.autograd import Variable\n'), ((11053, 11069), 'torch.autograd.Variable', 'Variable', (['test_y'], {}), '(test_y)\n', (11061, 11069), False, 'from torch.autograd import Variable\n'), ((12499, 12519), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (12511, 12519), False, 'import torch\n'), ((12646, 12665), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (12658, 12665), False, 'import torch\n'), ((9193, 9206), 'torch.autograd.Variable', 'Variable', (['b_x'], {}), '(b_x)\n', (9201, 9206), False, 'from torch.autograd import Variable\n'), ((9215, 9228), 'torch.autograd.Variable', 'Variable', (['b_y'], {}), '(b_y)\n', (9223, 9228), False, 'from torch.autograd import Variable\n')] |
woffett/emmental | src/emmental/model.py | 87884fcd89662cca45f0ea0f78cff73380cc47c8 | """Emmental model."""
import itertools
import logging
import os
from collections import defaultdict
from collections.abc import Iterable
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
import numpy as np
import torch
from numpy import ndarray
from torch import Tensor, nn as nn
from torch.nn import ModuleDict
from tqdm import tqdm
from emmental.data import EmmentalDataLoader
from emmental.meta import Meta
from emmental.scorer import Scorer
from emmental.task import EmmentalTask
from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred
logger = logging.getLogger(__name__)
class EmmentalModel(nn.Module):
"""A class to build multi-task model.
Args:
name: Name of the model, defaults to None.
tasks: A task or a list of tasks.
"""
def __init__(
self,
name: Optional[str] = None,
tasks: Optional[Union[EmmentalTask, List[EmmentalTask]]] = None,
) -> None:
"""Initialize EmmentalModel."""
super().__init__()
self.name = name if name is not None else type(self).__name__
# Initiate the model attributes
self.module_pool: ModuleDict = ModuleDict()
self.task_names: Set[str] = set()
self.task_flows: Dict[str, Any] = dict() # TODO: make it concrete
self.loss_funcs: Dict[str, Callable] = dict()
self.output_funcs: Dict[str, Callable] = dict()
self.scorers: Dict[str, Scorer] = dict()
self.action_outputs: Dict[
str, Optional[List[Union[Tuple[str, str], Tuple[str, int]]]]
] = dict()
self.weights: Dict[str, float] = dict()
# Build network with given tasks
if tasks is not None:
self.add_tasks(tasks)
if Meta.config["meta_config"]["verbose"]:
logger.info(
f"Created emmental model {self.name} that contains "
f"task {self.task_names}."
)
# Move model to specified device
self._move_to_device()
def _move_to_device(self) -> None:
"""Move model to specified device."""
if Meta.config["model_config"]["device"] != -1:
if torch.cuda.is_available():
device = (
f"cuda:{Meta.config['model_config']['device']}"
if isinstance(Meta.config["model_config"]["device"], int)
else Meta.config["model_config"]["device"]
)
if Meta.config["meta_config"]["verbose"]:
logger.info(f"Moving model to GPU ({device}).")
self.to(torch.device(device))
else:
if Meta.config["meta_config"]["verbose"]:
logger.info("No cuda device available. Switch to cpu instead.")
def _to_dataparallel(self) -> None:
for key in self.module_pool.keys():
self.module_pool[key] = torch.nn.DataParallel(self.module_pool[key])
def _to_distributed_dataparallel(self) -> None:
for key in self.module_pool.keys():
self.module_pool[
key
] = torch.nn.parallel.DistributedDataParallel( # type: ignore
self.module_pool[key],
device_ids=[Meta.config["learner_config"]["local_rank"]],
output_device=Meta.config["learner_config"]["local_rank"],
find_unused_parameters=True,
)
def add_tasks(self, tasks: Union[EmmentalTask, List[EmmentalTask]]) -> None:
"""Build the MTL network using all tasks.
Args:
tasks: A task or a list of tasks.
"""
if not isinstance(tasks, Iterable):
tasks = [tasks]
for task in tasks:
self.add_task(task)
def add_task(self, task: EmmentalTask) -> None:
"""Add a single task into MTL network.
Args:
task: A task to add.
"""
if not isinstance(task, EmmentalTask):
raise ValueError(f"Unrecognized task type {task}.")
if task.name in self.task_names:
raise ValueError(
f"Found duplicate task {task.name}, different task should use "
f"different task name."
)
# Combine module_pool from all tasks
for key in task.module_pool.keys():
if key in self.module_pool.keys():
task.module_pool[key] = self.module_pool[key]
else:
self.module_pool[key] = task.module_pool[key]
# Collect task name
self.task_names.add(task.name)
# Collect task flow
self.task_flows[task.name] = task.task_flow
# Collect loss function
self.loss_funcs[task.name] = task.loss_func
# Collect output function
self.output_funcs[task.name] = task.output_func
# Collect action outputs
self.action_outputs[task.name] = task.action_outputs
# Collect scorer
self.scorers[task.name] = task.scorer
# Collect weight
self.weights[task.name] = task.weight
# Move model to specified device
self._move_to_device()
def update_task(self, task: EmmentalTask) -> None:
"""Update a existing task in MTL network.
Args:
task: A task to update.
"""
# Update module_pool with task
for key in task.module_pool.keys():
# Update the model's module with the task's module
self.module_pool[key] = task.module_pool[key]
# Update task flow
self.task_flows[task.name] = task.task_flow
# Update loss function
self.loss_funcs[task.name] = task.loss_func
# Update output function
self.output_funcs[task.name] = task.output_func
# Update action outputs
self.action_outputs[task.name] = task.action_outputs
# Update scorer
self.scorers[task.name] = task.scorer
# Update weight
self.weights[task.name] = task.weight
# Move model to specified device
self._move_to_device()
def remove_task(self, task_name: str) -> None:
"""Remove a existing task from MTL network.
Args:
task_name: The task name to remove.
"""
if task_name not in self.task_flows:
if Meta.config["meta_config"]["verbose"]:
logger.info(f"Task ({task_name}) not in the current model, skip...")
return
# Remove task by task_name
if Meta.config["meta_config"]["verbose"]:
logger.info(f"Removing Task {task_name}.")
self.task_names.remove(task_name)
del self.task_flows[task_name]
del self.loss_funcs[task_name]
del self.output_funcs[task_name]
del self.action_outputs[task_name]
del self.scorers[task_name]
del self.weights[task_name]
# TODO: remove the modules only associate with that task
def __repr__(self) -> str:
"""Represent the model as a string."""
cls_name = type(self).__name__
return f"{cls_name}(name={self.name})"
def flow(self, X_dict: Dict[str, Any], task_names: List[str]) -> Dict[str, Any]:
"""Forward based on input and task flow.
Note:
We assume that all shared modules from all tasks are based on the
same input.
Args:
X_dict: The input data
task_names: The task names that needs to forward.
Returns:
The output of all forwarded modules
"""
X_dict = move_to_device(X_dict, Meta.config["model_config"]["device"])
output_dict = dict(_input_=X_dict)
# Call forward for each task
for task_name in task_names:
for action in self.task_flows[task_name]:
if action["name"] not in output_dict:
if action["inputs"]:
try:
input = [
output_dict[action_name][output_index]
for action_name, output_index in action["inputs"]
]
except Exception:
raise ValueError(f"Unrecognized action {action}.")
output = self.module_pool[action["module"]].forward(*input)
else:
output = self.module_pool[action["module"]].forward(output_dict)
if isinstance(output, tuple):
output = list(output)
if not isinstance(output, list) and not isinstance(output, dict):
output = [output]
output_dict[action["name"]] = output
return output_dict
def forward( # type: ignore
self,
uids: List[str],
X_dict: Dict[str, Any],
Y_dict: Dict[str, Tensor],
task_to_label_dict: Dict[str, str],
return_action_outputs=False,
) -> Union[
Tuple[
Dict[str, List[str]],
Dict[str, ndarray],
Dict[str, ndarray],
Dict[str, ndarray],
Dict[str, Dict[str, ndarray]],
],
Tuple[
Dict[str, List[str]],
Dict[str, ndarray],
Dict[str, ndarray],
Dict[str, ndarray],
],
]:
"""Forward function.
Args:
uids: The uids of input data.
X_dict: The input data.
Y_dict: The output data.
task_to_label_dict: The task to label mapping.
return_action_outputs: Whether return action_outputs or not,
defaults to False.
Returns:
The (active) uids, loss, prob, gold, action_output (optional) in the batch of
all tasks.
"""
uid_dict: Dict[str, List[str]] = defaultdict(list)
loss_dict: Dict[str, ndarray] = defaultdict(float)
gold_dict: Dict[str, ndarray] = defaultdict(list)
prob_dict: Dict[str, ndarray] = defaultdict(list)
out_dict: Dict[str, Dict[str, ndarray]] = defaultdict(lambda: defaultdict(list))
task_names = (
list(task_to_label_dict.keys())
if isinstance(task_to_label_dict, dict)
else list(task_to_label_dict)
)
output_dict = self.flow(X_dict, task_names)
if Y_dict is not None:
# Calculate logit and loss for each task
for task_name, label_name in task_to_label_dict.items():
Y = Y_dict[label_name]
# Select the active samples
if Meta.config["learner_config"]["ignore_index"] is not None:
if len(Y.size()) == 1:
active = (
Y.detach() != Meta.config["learner_config"]["ignore_index"]
)
else:
active = torch.any(
Y.detach() != Meta.config["learner_config"]["ignore_index"],
dim=1,
)
else:
active = torch.BoolTensor([True] * Y.size()[0]) # type: ignore
# Only calculate the loss when active example exists
if active.any():
uid_dict[task_name] = [*itertools.compress(uids, active.numpy())]
loss_dict[task_name] = self.loss_funcs[task_name](
output_dict,
move_to_device(
Y_dict[label_name], Meta.config["model_config"]["device"]
),
move_to_device(active, Meta.config["model_config"]["device"]),
)
prob_dict[task_name] = (
self.output_funcs[task_name](output_dict)[
move_to_device(
active, Meta.config["model_config"]["device"]
)
]
.cpu()
.detach()
.numpy()
)
gold_dict[task_name] = Y_dict[label_name][active].cpu().numpy()
if (
return_action_outputs
and self.action_outputs[task_name] is not None
):
for action_name, output_index in self.action_outputs[task_name]:
out_dict[task_name][f"{action_name}_{output_index}"] = (
output_dict[action_name][output_index][
move_to_device(
active, Meta.config["model_config"]["device"]
)
]
.cpu()
.detach()
.numpy()
)
else:
# Calculate logit for each task
for task_name in task_to_label_dict:
uid_dict[task_name] = uids
prob_dict[task_name] = (
self.output_funcs[task_name](output_dict).cpu().detach().numpy()
)
if return_action_outputs and self.action_outputs[task_name] is not None:
for action_name, output_index in self.action_outputs[task_name]:
out_dict[task_name][f"{action_name}_{output_index}"] = (
output_dict[action_name][output_index]
.cpu()
.detach()
.numpy()
)
loss_dict = None
gold_dict = None
if return_action_outputs:
return uid_dict, loss_dict, prob_dict, gold_dict, out_dict
else:
return uid_dict, loss_dict, prob_dict, gold_dict
@torch.no_grad()
def predict(
self,
dataloader: EmmentalDataLoader,
return_preds: bool = False,
return_action_outputs: bool = True,
) -> Dict[str, Any]:
"""Predict from dataloader.
Args:
dataloader: The dataloader to predict.
return_preds: Whether return predictions or not, defaults to False.
return_action_outputs: Whether return action_outputs or not, defaults to True.
Returns:
The result dict.
"""
self.eval()
uid_dict: Dict[str, List[str]] = defaultdict(list)
prob_dict: Dict[str, List[Union[ndarray, int, float]]] = defaultdict(list)
pred_dict: Dict[str, List[ndarray]] = defaultdict(list)
gold_dict: Dict[str, List[Union[ndarray, int, float]]] = defaultdict(list)
out_dict: Dict[str, Dict[str, List[Union[ndarray, int, float]]]] = defaultdict(
lambda: defaultdict(list)
)
loss_dict: Dict[str, Union[ndarray, float]] = defaultdict(list) # type: ignore
if not dataloader.is_learnable:
gold_dict = None
loss_dict = None
# Collect dataloader information
task_to_label_dict = dataloader.task_to_label_dict
uid = dataloader.uid
for batch_num, bdict in tqdm(
enumerate(dataloader),
total=len(dataloader),
desc=f"Evaluating {dataloader.data_name} ({dataloader.split})",
):
if isinstance(bdict, dict) == 1:
X_bdict = bdict
Y_bdict = None
else:
X_bdict, Y_bdict = bdict
if not dataloader.is_learnable:
Y_bdict = None
if return_action_outputs:
(
uid_bdict,
loss_bdict,
prob_bdict,
gold_bdict,
out_bdict,
) = self.forward( # type: ignore
X_bdict[uid],
X_bdict,
Y_bdict,
task_to_label_dict,
return_action_outputs=return_action_outputs,
)
else:
(
uid_bdict,
loss_bdict,
prob_bdict,
gold_bdict,
) = self.forward( # type: ignore
X_bdict[uid],
X_bdict,
Y_bdict,
task_to_label_dict,
return_action_outputs=return_action_outputs,
)
out_bdict = None
for task_name in uid_bdict.keys():
uid_dict[task_name].extend(uid_bdict[task_name])
prob_dict[task_name].extend(prob_bdict[task_name])
if dataloader.is_learnable:
gold_dict[task_name].extend(gold_bdict[task_name])
if len(loss_bdict[task_name].size()) == 0:
if loss_dict[task_name] == []:
loss_dict[task_name] = 0
loss_dict[task_name] += loss_bdict[task_name].item() * len(
uid_bdict[task_name]
)
else:
loss_dict[task_name].extend( # type: ignore
loss_bdict[task_name].cpu().numpy()
)
if return_action_outputs and out_bdict:
for task_name in out_bdict.keys():
for action_name in out_bdict[task_name].keys():
out_dict[task_name][action_name].extend(
out_bdict[task_name][action_name]
)
# Calculate average loss
if dataloader.is_learnable:
for task_name in uid_dict.keys():
if not isinstance(loss_dict[task_name], list):
loss_dict[task_name] /= len(uid_dict[task_name])
res = {
"uids": uid_dict,
"golds": gold_dict,
"probs": prob_dict,
"losses": loss_dict,
}
if return_action_outputs:
res["outputs"] = out_dict
if return_preds:
for task_name, prob in prob_dict.items():
pred_dict[task_name] = prob_to_pred(prob)
res["preds"] = pred_dict
return res
@torch.no_grad()
def score(
self,
dataloaders: Union[EmmentalDataLoader, List[EmmentalDataLoader]],
return_average: bool = True,
) -> Dict[str, float]:
"""Score the data from dataloader.
Args:
dataloaders: The dataloaders to score.
return_average: Whether to return average score.
Returns:
Score dict.
"""
self.eval()
if not isinstance(dataloaders, list):
dataloaders = [dataloaders]
metric_score_dict = dict()
if return_average:
micro_score_dict: defaultdict = defaultdict(list)
macro_score_dict: defaultdict = defaultdict(list)
macro_loss_dict: defaultdict = defaultdict(list)
for dataloader in dataloaders:
if not dataloader.is_learnable:
logger.warning(
f"Dataloader {dataloader.data_name} doesn't have gold data, "
f"continue..."
)
continue
predictions = self.predict(dataloader, return_preds=True)
for task_name in predictions["uids"].keys():
metric_score = self.scorers[task_name].score(
predictions["golds"][task_name],
predictions["probs"][task_name],
predictions["preds"][task_name],
predictions["uids"][task_name],
)
for metric_name, metric_value in metric_score.items():
identifier = construct_identifier(
task_name, dataloader.data_name, dataloader.split, metric_name
)
metric_score_dict[identifier] = metric_value
# Store the loss
identifier = construct_identifier(
task_name, dataloader.data_name, dataloader.split, "loss"
)
metric_score_dict[identifier] = np.mean(
predictions["losses"][task_name]
)
if return_average:
# Collect average score
identifier = construct_identifier(
task_name, dataloader.data_name, dataloader.split, "average"
)
metric_score_dict[identifier] = np.mean(list(metric_score.values()))
micro_score_dict[dataloader.split].extend(
list(metric_score.values())
)
macro_score_dict[dataloader.split].append(
metric_score_dict[identifier]
)
# Store the loss
identifier = construct_identifier(
task_name, dataloader.data_name, dataloader.split, "loss"
)
macro_loss_dict[dataloader.split].append(
metric_score_dict[identifier]
)
if return_average:
# Collect split-wise micro/macro average score
for split in micro_score_dict.keys():
identifier = construct_identifier(
"model", "all", split, "micro_average"
)
metric_score_dict[identifier] = np.mean(micro_score_dict[split])
identifier = construct_identifier(
"model", "all", split, "macro_average"
)
metric_score_dict[identifier] = np.mean(macro_score_dict[split])
identifier = construct_identifier("model", "all", split, "loss")
metric_score_dict[identifier] = np.mean(macro_loss_dict[split])
# Collect overall micro/macro average score/loss
if len(micro_score_dict):
identifier = construct_identifier(
"model", "all", "all", "micro_average"
)
metric_score_dict[identifier] = np.mean(
list(itertools.chain.from_iterable(micro_score_dict.values()))
)
if len(macro_score_dict):
identifier = construct_identifier(
"model", "all", "all", "macro_average"
)
metric_score_dict[identifier] = np.mean(
list(itertools.chain.from_iterable(macro_score_dict.values()))
)
if len(macro_loss_dict):
identifier = construct_identifier("model", "all", "all", "loss")
metric_score_dict[identifier] = np.mean(
list(itertools.chain.from_iterable(macro_loss_dict.values()))
)
# TODO: have a better to handle global evaluation metric
if Meta.config["learner_config"]["global_evaluation_metric_dict"]:
global_evaluation_metric_dict = Meta.config["learner_config"][
"global_evaluation_metric_dict"
]
for metric_name, metric in global_evaluation_metric_dict.items():
metric_score_dict[metric_name] = metric(metric_score_dict)
return metric_score_dict
def save(self, model_path: str) -> None:
"""Save the current model.
Args:
model_path: Saved model path.
"""
# Check existence of model saving directory and create if does not exist.
if not os.path.exists(os.path.dirname(model_path)):
os.makedirs(os.path.dirname(model_path))
state_dict = {
"model": {
"name": self.name,
"module_pool": self.collect_state_dict(),
# "task_names": self.task_names,
# "task_flows": self.task_flows,
# "loss_funcs": self.loss_funcs,
# "output_funcs": self.output_funcs,
# "scorers": self.scorers,
}
}
try:
torch.save(state_dict, model_path)
except BaseException:
logger.warning("Saving failed... continuing anyway.")
if Meta.config["meta_config"]["verbose"]:
logger.info(f"[{self.name}] Model saved in {model_path}")
def load(self, model_path: str) -> None:
"""Load model state_dict from file and reinitialize the model weights.
Args:
model_path: Saved model path.
"""
if not os.path.exists(model_path):
logger.error("Loading failed... Model does not exist.")
try:
checkpoint = torch.load(model_path, map_location=torch.device("cpu"))
except BaseException:
logger.error(f"Loading failed... Cannot load model from {model_path}")
raise
self.load_state_dict(checkpoint["model"]["module_pool"])
if Meta.config["meta_config"]["verbose"]:
logger.info(f"[{self.name}] Model loaded from {model_path}")
# Move model to specified device
self._move_to_device()
def collect_state_dict(self) -> Dict[str, Any]:
"""Collect the state dict."""
state_dict: Dict[str, Any] = defaultdict(list)
for module_name, module in self.module_pool.items():
if hasattr(module, "module"):
state_dict[module_name] = module.module.state_dict() # type: ignore
else:
state_dict[module_name] = module.state_dict()
return state_dict
def load_state_dict(self, state_dict: Dict[str, Any]) -> None: # type: ignore
"""Load the state dict.
Args:
state_dict: The state dict to load.
"""
for module_name, module_state_dict in state_dict.items():
if module_name in self.module_pool:
if hasattr(self.module_pool[module_name], "module"):
self.module_pool[module_name].module.load_state_dict(
module_state_dict
)
else:
self.module_pool[module_name].load_state_dict(module_state_dict)
else:
logger.info(f"Missing {module_name} in module_pool, skip it..")
| [((604, 631), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (621, 631), False, 'import logging\n'), ((14036, 14051), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (14049, 14051), False, 'import torch\n'), ((18511, 18526), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (18524, 18526), False, 'import torch\n'), ((1190, 1202), 'torch.nn.ModuleDict', 'ModuleDict', ([], {}), '()\n', (1200, 1202), False, 'from torch.nn import ModuleDict\n'), ((7563, 7624), 'emmental.utils.utils.move_to_device', 'move_to_device', (['X_dict', "Meta.config['model_config']['device']"], {}), "(X_dict, Meta.config['model_config']['device'])\n", (7577, 7624), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((9861, 9878), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (9872, 9878), False, 'from collections import defaultdict\n'), ((9919, 9937), 'collections.defaultdict', 'defaultdict', (['float'], {}), '(float)\n', (9930, 9937), False, 'from collections import defaultdict\n'), ((9978, 9995), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (9989, 9995), False, 'from collections import defaultdict\n'), ((10036, 10053), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (10047, 10053), False, 'from collections import defaultdict\n'), ((14614, 14631), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (14625, 14631), False, 'from collections import defaultdict\n'), ((14697, 14714), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (14708, 14714), False, 'from collections import defaultdict\n'), ((14761, 14778), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (14772, 14778), False, 'from collections import defaultdict\n'), ((14844, 14861), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (14855, 14861), False, 'from collections import defaultdict\n'), ((15052, 15069), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (15063, 15069), False, 'from collections import defaultdict\n'), ((25629, 25646), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (25640, 25646), False, 'from collections import defaultdict\n'), ((2192, 2217), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2215, 2217), False, 'import torch\n'), ((2926, 2970), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['self.module_pool[key]'], {}), '(self.module_pool[key])\n', (2947, 2970), False, 'import torch\n'), ((3134, 3354), 'torch.nn.parallel.DistributedDataParallel', 'torch.nn.parallel.DistributedDataParallel', (['self.module_pool[key]'], {'device_ids': "[Meta.config['learner_config']['local_rank']]", 'output_device': "Meta.config['learner_config']['local_rank']", 'find_unused_parameters': '(True)'}), "(self.module_pool[key], device_ids\n =[Meta.config['learner_config']['local_rank']], output_device=Meta.\n config['learner_config']['local_rank'], find_unused_parameters=True)\n", (3175, 3354), False, 'import torch\n'), ((19127, 19144), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (19138, 19144), False, 'from collections import defaultdict\n'), ((19189, 19206), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (19200, 19206), False, 'from collections import defaultdict\n'), ((19250, 19267), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (19261, 19267), False, 'from collections import defaultdict\n'), ((24456, 24490), 'torch.save', 'torch.save', (['state_dict', 'model_path'], {}), '(state_dict, model_path)\n', (24466, 24490), False, 'import torch\n'), ((24915, 24941), 'os.path.exists', 'os.path.exists', (['model_path'], {}), '(model_path)\n', (24929, 24941), False, 'import os\n'), ((10124, 10141), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (10135, 10141), False, 'from collections import defaultdict\n'), ((14970, 14987), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (14981, 14987), False, 'from collections import defaultdict\n'), ((18429, 18447), 'emmental.utils.utils.prob_to_pred', 'prob_to_pred', (['prob'], {}), '(prob)\n', (18441, 18447), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((20325, 20404), 'emmental.utils.utils.construct_identifier', 'construct_identifier', (['task_name', 'dataloader.data_name', 'dataloader.split', '"""loss"""'], {}), "(task_name, dataloader.data_name, dataloader.split, 'loss')\n", (20345, 20404), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((20491, 20532), 'numpy.mean', 'np.mean', (["predictions['losses'][task_name]"], {}), "(predictions['losses'][task_name])\n", (20498, 20532), True, 'import numpy as np\n'), ((21680, 21740), 'emmental.utils.utils.construct_identifier', 'construct_identifier', (['"""model"""', '"""all"""', 'split', '"""micro_average"""'], {}), "('model', 'all', split, 'micro_average')\n", (21700, 21740), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((21827, 21859), 'numpy.mean', 'np.mean', (['micro_score_dict[split]'], {}), '(micro_score_dict[split])\n', (21834, 21859), True, 'import numpy as np\n'), ((21889, 21949), 'emmental.utils.utils.construct_identifier', 'construct_identifier', (['"""model"""', '"""all"""', 'split', '"""macro_average"""'], {}), "('model', 'all', split, 'macro_average')\n", (21909, 21949), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((22036, 22068), 'numpy.mean', 'np.mean', (['macro_score_dict[split]'], {}), '(macro_score_dict[split])\n', (22043, 22068), True, 'import numpy as np\n'), ((22098, 22149), 'emmental.utils.utils.construct_identifier', 'construct_identifier', (['"""model"""', '"""all"""', 'split', '"""loss"""'], {}), "('model', 'all', split, 'loss')\n", (22118, 22149), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((22198, 22229), 'numpy.mean', 'np.mean', (['macro_loss_dict[split]'], {}), '(macro_loss_dict[split])\n', (22205, 22229), True, 'import numpy as np\n'), ((22359, 22419), 'emmental.utils.utils.construct_identifier', 'construct_identifier', (['"""model"""', '"""all"""', '"""all"""', '"""micro_average"""'], {}), "('model', 'all', 'all', 'micro_average')\n", (22379, 22419), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((22683, 22743), 'emmental.utils.utils.construct_identifier', 'construct_identifier', (['"""model"""', '"""all"""', '"""all"""', '"""macro_average"""'], {}), "('model', 'all', 'all', 'macro_average')\n", (22703, 22743), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((23006, 23057), 'emmental.utils.utils.construct_identifier', 'construct_identifier', (['"""model"""', '"""all"""', '"""all"""', '"""loss"""'], {}), "('model', 'all', 'all', 'loss')\n", (23026, 23057), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((23940, 23967), 'os.path.dirname', 'os.path.dirname', (['model_path'], {}), '(model_path)\n', (23955, 23967), False, 'import os\n'), ((23994, 24021), 'os.path.dirname', 'os.path.dirname', (['model_path'], {}), '(model_path)\n', (24009, 24021), False, 'import os\n'), ((2623, 2643), 'torch.device', 'torch.device', (['device'], {}), '(device)\n', (2635, 2643), False, 'import torch\n'), ((20066, 20154), 'emmental.utils.utils.construct_identifier', 'construct_identifier', (['task_name', 'dataloader.data_name', 'dataloader.split', 'metric_name'], {}), '(task_name, dataloader.data_name, dataloader.split,\n metric_name)\n', (20086, 20154), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((20684, 20770), 'emmental.utils.utils.construct_identifier', 'construct_identifier', (['task_name', 'dataloader.data_name', 'dataloader.split', '"""average"""'], {}), "(task_name, dataloader.data_name, dataloader.split,\n 'average')\n", (20704, 20770), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((21250, 21329), 'emmental.utils.utils.construct_identifier', 'construct_identifier', (['task_name', 'dataloader.data_name', 'dataloader.split', '"""loss"""'], {}), "(task_name, dataloader.data_name, dataloader.split, 'loss')\n", (21270, 21329), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((25086, 25105), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (25098, 25105), False, 'import torch\n'), ((11523, 11596), 'emmental.utils.utils.move_to_device', 'move_to_device', (['Y_dict[label_name]', "Meta.config['model_config']['device']"], {}), "(Y_dict[label_name], Meta.config['model_config']['device'])\n", (11537, 11596), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((11676, 11737), 'emmental.utils.utils.move_to_device', 'move_to_device', (['active', "Meta.config['model_config']['device']"], {}), "(active, Meta.config['model_config']['device'])\n", (11690, 11737), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((11902, 11963), 'emmental.utils.utils.move_to_device', 'move_to_device', (['active', "Meta.config['model_config']['device']"], {}), "(active, Meta.config['model_config']['device'])\n", (11916, 11963), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n'), ((12705, 12766), 'emmental.utils.utils.move_to_device', 'move_to_device', (['active', "Meta.config['model_config']['device']"], {}), "(active, Meta.config['model_config']['device'])\n", (12719, 12766), False, 'from emmental.utils.utils import construct_identifier, move_to_device, prob_to_pred\n')] |
jangxx/OVRT_Soundpad | server/ws_server.py | 2f9b2cd19421bc7b5586a3dcded2998d381ba688 | import asyncio, json
from config import Config
from soundpad_manager import SoundpadManager
from version import BRIDGE_VERSION
import websockets
from sanic.log import logger
# yes I know that it's very lazy to run a separate WS and HTTP server, when both could be run on the same port
# I don't like sanics WS implementation tho and this is just a quick and dirty project anyway, so there is no reason to get all that fancy
class WebsocketServer:
def __init__(self, config: Config, sp_manager: SoundpadManager):
self._server = None
self._config = config
self._soundpad = sp_manager
# ephemeral state
self._state = {
"edit_mode": False,
"soundpad_connected": False,
"version": BRIDGE_VERSION,
}
self._index_sockets = set()
self._control_sockets = set()
def start(self):
port = self._config.get(["server", "ws_port"])
logger.info(f"Websocket server is running on port {port}")
self._server = asyncio.get_event_loop().run_until_complete(websockets.serve(self.connHandler, "localhost", port))
async def stop(self):
self._server.close()
await self._server.wait_closed()
async def changeState(self, key, value):
self._state[key] = value
await self.emitEvent("state-update", self._state)
async def commandHandler(self, socket, command, params):
if command == "register":
if params["as"] == "index":
self._index_sockets.add(socket)
elif params["as"] == "control":
self._control_sockets.add(socket)
await self.emitEvent("settings-change", self._config.getExternalSerialized(), socket=socket, index_sockets=False, control_sockets=False)
await self.emitEvent("state-update", self._state, socket=socket, index_sockets=False, control_sockets=False)
elif command == "change-settings":
if params["setting"] == [ "board", "rows" ] or params["setting"] == [ "board", "columns" ]:
if not 1 <= params["value"] <= 10:
return # invalid values are not allowed
self._config.set(params["setting"], params["value"])
await self.emitEvent("settings-change", self._config.getExternalSerialized())
elif command == "set-edit-mode":
self._state["edit_mode"] = params["value"]
await self.emitEvent("state-update", self._state)
elif command == "select-sound":
if not 0 <= params['page'] <= 9 or not 0 <= params['row'] <= 9 or not 0 <= params['col'] <= 9:
return # out of bounds
if params['page'] == 0 and self._config.exists([ "sounds", f"{params['row']},{params['col']}" ]):
self._config.delete([ "sounds", f"{params['row']},{params['col']}" ])
sound_index = f"{params['page']}:{params['row']},{params['col']}"
self._config.set([ "sounds", sound_index ], params["sound"])
await self.emitEvent("settings-change", self._config.getExternalSerialized(), index_sockets=False)
elif command == "play-sound":
sound_id = params["sound"]
self._soundpad.playSound(sound_id)
elif command == "stop-sound":
self._soundpad.stopSound()
elif command == "pause-sound":
self._soundpad.pauseSound()
elif command == "log":
if "message" in params:
logger.info("Log: " + params["message"])
else:
logger.info("Log: " + json.dumps(params))
async def emitEvent(self, event, data, socket=None, index_sockets=True, control_sockets=True):
msg = json.dumps({ "type": "event", "event": event, "data": data })
if socket is not None:
await socket.send(msg)
if index_sockets:
for socket in self._index_sockets:
await socket.send(msg)
if control_sockets:
for socket in self._control_sockets:
await socket.send(msg)
async def connHandler(self, socket, path):
print("Client connected")
try:
async for raw_msg in socket:
try:
msg = json.loads(raw_msg)
except Exception as err:
logger.error(f"Could not parse JSON: {repr(err)}")
continue
if not "type" in msg:
continue
if msg["type"] == "command":
if not "command" in msg or not "params" in msg:
continue
try:
await self.commandHandler(socket, msg["command"], msg["params"])
except Exception as e: # if we get garbage data just ignore
print(f"Error in commandHandler: {msg['command']}({msg['params']}): {repr(e)}")
pass
except websockets.ConnectionClosedError:
pass
finally:
if socket in self._index_sockets:
self._index_sockets.discard(socket)
if socket in self._control_sockets:
self._control_sockets.discard(socket)
print("Client disconnected") | [((886, 944), 'sanic.log.logger.info', 'logger.info', (['f"""Websocket server is running on port {port}"""'], {}), "(f'Websocket server is running on port {port}')\n", (897, 944), False, 'from sanic.log import logger\n'), ((3359, 3418), 'json.dumps', 'json.dumps', (["{'type': 'event', 'event': event, 'data': data}"], {}), "({'type': 'event', 'event': event, 'data': data})\n", (3369, 3418), False, 'import asyncio, json\n'), ((1007, 1060), 'websockets.serve', 'websockets.serve', (['self.connHandler', '"""localhost"""', 'port'], {}), "(self.connHandler, 'localhost', port)\n", (1023, 1060), False, 'import websockets\n'), ((963, 987), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (985, 987), False, 'import asyncio, json\n'), ((3797, 3816), 'json.loads', 'json.loads', (['raw_msg'], {}), '(raw_msg)\n', (3807, 3816), False, 'import asyncio, json\n'), ((3153, 3193), 'sanic.log.logger.info', 'logger.info', (["('Log: ' + params['message'])"], {}), "('Log: ' + params['message'])\n", (3164, 3193), False, 'from sanic.log import logger\n'), ((3231, 3249), 'json.dumps', 'json.dumps', (['params'], {}), '(params)\n', (3241, 3249), False, 'import asyncio, json\n')] |
CityPulse/dynamic-bus-scheduling | tests/route_generator_test.py | 7516283be5a374fe0a27715f4facee11c847f39f | #!/usr/local/bin/python
# -*- coding: utf-8 -*-
"""
- LICENCE
The MIT License (MIT)
Copyright (c) 2016 Eleftherios Anagnostopoulos for Ericsson AB (EU FP7 CityPulse Project)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
- DESCRIPTION OF DOCUMENTS
-- MongoDB Database Documents:
address_document: {
'_id', 'name', 'node_id', 'point': {'longitude', 'latitude'}
}
bus_line_document: {
'_id', 'bus_line_id', 'bus_stops': [{'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}}]
}
bus_stop_document: {
'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}
}
bus_stop_waypoints_document: {
'_id', 'starting_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'ending_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'waypoints': [[edge_object_id]]
}
bus_vehicle_document: {
'_id', 'bus_vehicle_id', 'maximum_capacity',
'routes': [{'starting_datetime', 'ending_datetime', 'timetable_id'}]
}
detailed_bus_stop_waypoints_document: {
'_id', 'starting_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'ending_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'waypoints': [[edge_document]]
}
edge_document: {
'_id', 'starting_node': {'osm_id', 'point': {'longitude', 'latitude'}},
'ending_node': {'osm_id', 'point': {'longitude', 'latitude'}},
'max_speed', 'road_type', 'way_id', 'traffic_density'
}
node_document: {
'_id', 'osm_id', 'tags', 'point': {'longitude', 'latitude'}
}
point_document: {
'_id', 'osm_id', 'point': {'longitude', 'latitude'}
}
timetable_document: {
'_id', 'timetable_id', 'bus_line_id', 'bus_vehicle_id',
'timetable_entries': [{
'starting_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'ending_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'departure_datetime', 'arrival_datetime', 'number_of_onboarding_passengers',
'number_of_deboarding_passengers', 'number_of_current_passengers',
'route': {
'total_distance', 'total_time', 'node_osm_ids', 'points', 'edges',
'distances_from_starting_node', 'times_from_starting_node',
'distances_from_previous_node', 'times_from_previous_node'
}
}],
'travel_requests': [{
'_id', 'client_id', 'bus_line_id',
'starting_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'ending_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'departure_datetime', 'arrival_datetime',
'starting_timetable_entry_index', 'ending_timetable_entry_index'
}]
}
traffic_event_document: {
'_id', 'event_id', 'event_type', 'event_level', 'point': {'longitude', 'latitude'}, 'datetime'
}
travel_request_document: {
'_id', 'client_id', 'bus_line_id',
'starting_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'ending_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'departure_datetime', 'arrival_datetime',
'starting_timetable_entry_index', 'ending_timetable_entry_index'
}
way_document: {
'_id', 'osm_id', 'tags', 'references'
}
-- Route Generator Responses:
get_route_between_two_bus_stops: {
'starting_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'ending_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'route': {
'total_distance', 'total_time', 'node_osm_ids', 'points', 'edges',
'distances_from_starting_node', 'times_from_starting_node',
'distances_from_previous_node', 'times_from_previous_node'
}
}
get_route_between_multiple_bus_stops: [{
'starting_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'ending_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'route': {
'total_distance', 'total_time', 'node_osm_ids', 'points', 'edges',
'distances_from_starting_node', 'times_from_starting_node',
'distances_from_previous_node', 'times_from_previous_node'
}
}]
get_waypoints_between_two_bus_stops: {
'starting_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'ending_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'waypoints': [[{
'_id', 'starting_node': {'osm_id', 'point': {'longitude', 'latitude'}},
'ending_node': {'osm_id', 'point': {'longitude', 'latitude'}},
'max_speed', 'road_type', 'way_id', 'traffic_density'
}]]
}
get_waypoints_between_multiple_bus_stops: [{
'starting_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'ending_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
'waypoints': [[{
'_id', 'starting_node': {'osm_id', 'point': {'longitude', 'latitude'}},
'ending_node': {'osm_id', 'point': {'longitude', 'latitude'}},
'max_speed', 'road_type', 'way_id', 'traffic_density'
}]]
}]
"""
import time
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../'))
from src.common.logger import log
from src.common.parameters import testing_bus_stop_names
from src.route_generator.route_generator_client import get_route_between_two_bus_stops, \
get_route_between_multiple_bus_stops, get_waypoints_between_two_bus_stops, get_waypoints_between_multiple_bus_stops
__author__ = 'Eleftherios Anagnostopoulos'
__email__ = '[email protected]'
__credits__ = [
'Azadeh Bararsani (Senior Researcher at Ericsson AB) - email: [email protected]'
'Aneta Vulgarakis Feljan (Senior Researcher at Ericsson AB) - email: [email protected]'
]
def test_get_route_between_two_bus_stops(starting_bus_stop=None, ending_bus_stop=None,
starting_bus_stop_name=None, ending_bus_stop_name=None):
"""
:param starting_bus_stop: bus_stop_document
:param ending_bus_stop: bus_stop_document
:param starting_bus_stop_name: string
:param ending_bus_stop_name: string
"""
log(module_name='route_generator_test', log_type='INFO',
log_message='get_route_between_two_bus_stops: starting')
start_time = time.time()
# response = {
# 'starting_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
# 'ending_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
# 'route': {
# 'total_distance', 'total_time', 'node_osm_ids', 'points', 'edges',
# 'distances_from_starting_node', 'times_from_starting_node',
# 'distances_from_previous_node', 'times_from_previous_node'
# }
# }
response = get_route_between_two_bus_stops(
starting_bus_stop=starting_bus_stop,
ending_bus_stop=ending_bus_stop,
starting_bus_stop_name=starting_bus_stop_name,
ending_bus_stop_name=ending_bus_stop_name
)
starting_bus_stop = response.get('starting_bus_stop')
ending_bus_stop = response.get('ending_bus_stop')
route = response.get('route')
if route is not None:
total_distance = route.get('total_distance')
total_time = route.get('total_time')
node_osm_ids = route.get('node_osm_ids')
points = route.get('points')
edges = route.get('edges')
distances_from_starting_node = route.get('distances_from_starting_node')
times_from_starting_node = route.get('times_from_starting_node')
distances_from_previous_node = route.get('distances_from_previous_node')
times_from_previous_node = route.get('times_from_previous_node')
output = '\nstarting_bus_stop: ' + str(starting_bus_stop) + \
'\nending_bus_stop: ' + str(ending_bus_stop) + \
'\ntotal_distance: ' + str(total_distance) + \
'\ntotal_time: ' + str(total_time) + \
'\nnode_osm_ids: ' + str(node_osm_ids) + \
'\npoints: ' + str(points) + \
'\nedges: ' + str(edges) + \
'\ndistances_from_starting_node: ' + str(distances_from_starting_node) + \
'\ntimes_from_starting_node: ' + str(times_from_starting_node) + \
'\ndistances_from_previous_node: ' + str(distances_from_previous_node) + \
'\ntimes_from_previous_node: ' + str(times_from_previous_node)
else:
output = '\nstarting_bus_stop: ' + str(starting_bus_stop) + \
'\nending_bus_stop: ' + str(ending_bus_stop) + \
'\nroute: None'
print output
elapsed_time = time.time() - start_time
time.sleep(0.1)
log(module_name='route_generator_test', log_type='INFO',
log_message='test_get_route_between_two_bus_stops: finished - elapsed_time = ' +
str(elapsed_time) + ' sec')
def test_get_route_between_multiple_bus_stops(bus_stops=None, bus_stop_names=None):
"""
:param bus_stops: [bus_stop_document]
:param bus_stop_names: [string]
"""
log(module_name='route_generator_test', log_type='INFO',
log_message='get_route_between_multiple_bus_stops: starting')
start_time = time.time()
route_distance = 0
route_traveling_time = 0
# response = [{
# 'starting_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
# 'ending_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
# 'route': {
# 'total_distance', 'total_time', 'node_osm_ids', 'points', 'edges',
# 'distances_from_starting_node', 'times_from_starting_node',
# 'distances_from_previous_node', 'times_from_previous_node'
# }
# }]
response = get_route_between_multiple_bus_stops(
bus_stops=bus_stops,
bus_stop_names=bus_stop_names
)
for intermediate_response in response:
starting_bus_stop = intermediate_response.get('starting_bus_stop')
ending_bus_stop = intermediate_response.get('ending_bus_stop')
intermediate_route = intermediate_response.get('route')
if intermediate_route is not None:
total_distance = intermediate_route.get('total_distance')
route_distance += total_distance
total_time = intermediate_route.get('total_time')
route_traveling_time += total_time
node_osm_ids = intermediate_route.get('node_osm_ids')
points = intermediate_route.get('points')
edges = intermediate_route.get('edges')
distances_from_starting_node = intermediate_route.get('distances_from_starting_node')
times_from_starting_node = intermediate_route.get('times_from_starting_node')
distances_from_previous_node = intermediate_route.get('distances_from_previous_node')
times_from_previous_node = intermediate_route.get('times_from_previous_node')
output = '\nstarting_bus_stop: ' + str(starting_bus_stop) + \
'\nending_bus_stop: ' + str(ending_bus_stop) + \
'\ntotal_distance: ' + str(total_distance) + \
'\ntotal_time: ' + str(total_time) + \
'\nnode_osm_ids: ' + str(node_osm_ids) + \
'\npoints: ' + str(points) + \
'\nedges: ' + str(edges) + \
'\ndistances_from_starting_node: ' + str(distances_from_starting_node) + \
'\ntimes_from_starting_node: ' + str(times_from_starting_node) + \
'\ndistances_from_previous_node: ' + str(distances_from_previous_node) + \
'\ntimes_from_previous_node: ' + str(times_from_previous_node)
else:
output = '\nstarting_bus_stop: ' + str(starting_bus_stop) + \
'\nending_bus_stop: ' + str(ending_bus_stop) + \
'\nroute: None'
print output
route_average_speed = (route_distance / 1000) / (route_traveling_time / 3600)
print '\nroute_distance: ' + str(route_distance / 1000) + \
' - route_traveling_time: ' + str(route_traveling_time / 60) + \
' - route_average_speed: ' + str(route_average_speed)
elapsed_time = time.time() - start_time
time.sleep(0.1)
log(module_name='route_generator_test', log_type='INFO',
log_message='test_get_route_between_multiple_bus_stops: finished - elapsed_time = ' +
str(elapsed_time) + ' sec')
def test_get_waypoints_between_two_bus_stops(starting_bus_stop=None, ending_bus_stop=None,
starting_bus_stop_name=None, ending_bus_stop_name=None):
"""
:param starting_bus_stop: bus_stop_document
:param ending_bus_stop: bus_stop_document
:param starting_bus_stop_name: string
:param ending_bus_stop_name: string
"""
log(module_name='route_generator_test', log_type='INFO',
log_message='test_get_waypoints_between_two_bus_stops: starting')
start_time = time.time()
# response = {
# 'starting_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
# 'ending_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
# 'waypoints': [[{
# '_id', 'starting_node': {'osm_id', 'point': {'longitude', 'latitude'}},
# 'ending_node': {'osm_id', 'point': {'longitude', 'latitude'}},
# 'max_speed', 'road_type', 'way_id', 'traffic_density'
# }]]
# }
response = get_waypoints_between_two_bus_stops(
starting_bus_stop=starting_bus_stop,
ending_bus_stop=ending_bus_stop,
starting_bus_stop_name=starting_bus_stop_name,
ending_bus_stop_name=ending_bus_stop_name
)
starting_bus_stop = response.get('starting_bus_stop')
ending_bus_stop = response.get('ending_bus_stop')
waypoints = response.get('waypoints')
output = '\nstarting_bus_stop: ' + str(starting_bus_stop) + \
'\nending_bus_stop: ' + str(ending_bus_stop)
print output
for separate_waypoints in waypoints:
print 'waypoints: ' + str(separate_waypoints)
elapsed_time = time.time() - start_time
time.sleep(0.1)
log(module_name='route_generator_test', log_type='INFO',
log_message='test_get_waypoints_between_two_bus_stops: finished - elapsed_time = ' +
str(elapsed_time) + ' sec')
def test_get_waypoints_between_multiple_bus_stops(bus_stops=None, bus_stop_names=None):
"""
:param bus_stops: [bus_stop_document]
:param bus_stop_names: [string]
"""
log(module_name='route_generator_test', log_type='INFO',
log_message='test_get_waypoints_between_multiple_bus_stops: starting')
start_time = time.time()
# response = [{
# 'starting_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
# 'ending_bus_stop': {'_id', 'osm_id', 'name', 'point': {'longitude', 'latitude'}},
# 'waypoints': [[{
# '_id', 'starting_node': {'osm_id', 'point': {'longitude', 'latitude'}},
# 'ending_node': {'osm_id', 'point': {'longitude', 'latitude'}},
# 'max_speed', 'road_type', 'way_id', 'traffic_density'
# }]]
# }]
response = get_waypoints_between_multiple_bus_stops(
bus_stops=bus_stops,
bus_stop_names=bus_stop_names
)
for intermediate_response in response:
starting_bus_stop = intermediate_response.get('starting_bus_stop')
ending_bus_stop = intermediate_response.get('ending_bus_stop')
waypoints = intermediate_response.get('waypoints')
output = '\nstarting_bus_stop: ' + str(starting_bus_stop) + \
'\nending_bus_stop: ' + str(ending_bus_stop)
print output
for separate_waypoints in waypoints:
print 'waypoints: ' + str(separate_waypoints)
elapsed_time = time.time() - start_time
time.sleep(0.1)
log(module_name='route_generator_test', log_type='INFO',
log_message='test_get_waypoints_between_multiple_bus_stops: finished - elapsed_time = ' +
str(elapsed_time) + ' sec')
if __name__ == '__main__':
selection = ''
while True:
selection = raw_input(
'\n0. exit'
'\n1. test_get_route_between_two_bus_stops'
'\n2. test_get_route_between_multiple_bus_stops'
'\n3. test_get_waypoints_between_two_bus_stops'
'\n4. test_get_waypoints_between_multiple_bus_stops'
'\nSelection: '
)
if selection == '0':
break
elif selection == '1':
test_get_route_between_two_bus_stops(
starting_bus_stop_name=testing_bus_stop_names[0],
ending_bus_stop_name=testing_bus_stop_names[1]
)
elif selection == '2':
test_get_route_between_multiple_bus_stops(
bus_stop_names=testing_bus_stop_names
)
elif selection == '3':
test_get_waypoints_between_two_bus_stops(
starting_bus_stop_name=testing_bus_stop_names[0],
ending_bus_stop_name=testing_bus_stop_names[1]
)
elif selection == '4':
test_get_waypoints_between_multiple_bus_stops(
bus_stop_names=testing_bus_stop_names
)
else:
print 'Invalid input'
| [] |
gian1312/suchen | tensorforce/tests/test_model_save_restore.py | df863140fd8df1ac2e195cbdfa4756f09f962270 | from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import unittest
import pytest
from tensorforce import TensorForceError
from tensorforce.core.networks import LayeredNetwork
from tensorforce.models import DistributionModel
from tensorforce.tests.minimal_test import MinimalTest
from tensorforce.agents import PPOAgent
from tensorforce.execution import Runner
import tensorflow as tf
import numpy as np
from tensorforce.util import SavableComponent
import os
class SavableNetwork(LayeredNetwork, SavableComponent):
"""
Minimal implementation of a Network that can be saved and restored independently of the Model.
"""
def get_savable_variables(self):
return super(SavableNetwork, self).get_variables(include_nontrainable=False)
def _get_base_variable_scope(self):
return self.apply.variable_scope_name
def create_environment(spec):
return MinimalTest(spec)
def create_agent(environment, network_spec):
return PPOAgent(
update_mode=dict(
unit='episodes',
batch_size=4,
frequency=4
),
memory=dict(
type='latest',
include_next_states=False,
capacity=100
),
step_optimizer=dict(
type='adam',
learning_rate=1e-3
),
subsampling_fraction=0.3,
optimization_steps=20,
states=environment.states,
actions=environment.actions,
network=network_spec
)
class TestModelSaveRestore(unittest.TestCase):
@pytest.fixture(autouse=True)
def initdir(self, tmpdir):
tmpdir.chdir()
self._tmp_dir_path = str(tmpdir)
print("Using %s" % (self._tmp_dir_path, ))
def test_save_restore(self):
environment_spec = {"float": ()}
environment = create_environment(environment_spec)
network_spec = [
dict(type='dense', size=32)
]
agent = create_agent(environment, network_spec)
runner = Runner(agent=agent, environment=environment)
runner.run(episodes=100)
model_values = agent.model.session.run(agent.model.get_variables(
include_submodules=True,
include_nontrainable=False
))
save_path = agent.model.save(directory=self._tmp_dir_path + "/model")
print("Saved at: %s" % (save_path,))
runner.close()
agent = create_agent(environment, network_spec)
agent.model.restore(directory="", file=save_path)
restored_model_values = agent.model.session.run(agent.model.get_variables(
include_submodules=True,
include_nontrainable=False
))
assert len(model_values) == len(restored_model_values)
assert all([np.array_equal(v1, v2) for v1, v2 in zip(model_values, restored_model_values)])
agent.close()
def test_save_network(self):
"""
Test to validate that calls to save and restore of a SavableComponent successfully save and restore the
component's state.
"""
environment_spec = {"float": ()}
environment = create_environment(environment_spec)
network_spec = dict(
type=SavableNetwork,
layers=[dict(type='dense', size=1)]
)
agent = create_agent(environment, network_spec)
assert isinstance(agent.model.network, SavableComponent)
runner = Runner(agent=agent, environment=environment)
runner.run(episodes=100)
network_values = agent.model.session.run(agent.model.network.get_variables())
distribution = next(iter(agent.model.distributions.values()))
distribution_values = agent.model.session.run(distribution.get_variables())
save_path = self._tmp_dir_path + "/network"
agent.model.save_component(component_name=DistributionModel.COMPONENT_NETWORK, save_path=save_path)
runner.close()
assert os.path.isfile(save_path + ".data-00000-of-00001")
assert os.path.isfile(save_path + ".index")
agent = create_agent(environment, network_spec)
agent.model.restore_component(component_name=DistributionModel.COMPONENT_NETWORK, save_path=save_path)
# Ensure only the network variables are loaded
restored_network_values = agent.model.session.run(agent.model.network.get_variables(include_nontrainable=True))
distribution = next(iter(agent.model.distributions.values()))
restored_distribution_values = agent.model.session.run(distribution.get_variables())
assert len(restored_network_values) == len(network_values)
assert all([np.array_equal(v1, v2) for v1, v2 in zip(network_values, restored_network_values)])
assert len(restored_distribution_values) == len(distribution_values)
assert not all([np.array_equal(v1, v2) for v1, v2 in zip(distribution_values, restored_distribution_values)])
agent.close()
environment.close()
def test_pretrain_network(self):
"""
Simulates training outside of Tensorforce and then loading the parameters in the agent's network.
"""
environment_spec = {"float": ()}
environment = create_environment(environment_spec)
size = environment.states["shape"]
output_size = 1
save_path = self._tmp_dir_path + "/network"
g = tf.Graph()
with g.as_default():
x = tf.placeholder(dtype=environment.states["type"], shape=[None, size])
layer = tf.layers.Dense(units=output_size)
y = layer(x)
y_ = tf.placeholder(dtype=environment.states["type"], shape=[None, output_size])
loss = tf.losses.mean_squared_error(y_, y)
optimizer = tf.train.AdamOptimizer(learning_rate=0.1)
train_step = optimizer.minimize(loss)
batch_size = 64
with tf.Session(graph=g) as sess:
sess.run(tf.global_variables_initializer())
for epoch in range(100):
batch = np.random.random([batch_size, size])
correct = np.ones(shape=[batch.shape[0], output_size])
loss_value, _ = sess.run([loss, train_step], {x: batch, y_: correct})
if epoch % 10 == 0:
print("epoch %d: %f" % (epoch, loss_value))
var_map = {
"dense0/apply/linear/apply/W:0": layer.kernel,
"dense0/apply/linear/apply/b:0": layer.bias
}
saver = tf.train.Saver(var_list=var_map)
saver.save(sess=sess, write_meta_graph=False, save_path=save_path)
network_spec = dict(
type=SavableNetwork,
layers=[dict(type='dense', size=output_size)],
)
agent = create_agent(environment, network_spec)
agent.model.restore_component(component_name=agent.model.COMPONENT_NETWORK, save_path=save_path)
agent.close()
def test_non_savable_component(self):
environment_spec = {"float": ()}
environment = create_environment(environment_spec)
network_spec = [dict(type='dense', size=32)]
agent = create_agent(environment, network_spec)
expected_message = "Component network must implement SavableComponent but is "
with pytest.raises(TensorForceError) as excinfo:
agent.model.restore_component(component_name="network", save_path=self._tmp_dir_path + "/network")
assert expected_message in str(excinfo.value)
with pytest.raises(TensorForceError) as excinfo:
agent.model.save_component(component_name="network", save_path=self._tmp_dir_path + "/network")
assert expected_message in str(excinfo.value)
with pytest.raises(TensorForceError) as excinfo:
agent.model.restore_component(component_name="non-existent", save_path=self._tmp_dir_path + "/network")
assert "Component non-existent must implement SavableComponent but is None" == str(excinfo.value)
agent.close()
| [((944, 961), 'tensorforce.tests.minimal_test.MinimalTest', 'MinimalTest', (['spec'], {}), '(spec)\n', (955, 961), False, 'from tensorforce.tests.minimal_test import MinimalTest\n'), ((1591, 1619), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (1605, 1619), False, 'import pytest\n'), ((2048, 2092), 'tensorforce.execution.Runner', 'Runner', ([], {'agent': 'agent', 'environment': 'environment'}), '(agent=agent, environment=environment)\n', (2054, 2092), False, 'from tensorforce.execution import Runner\n'), ((3462, 3506), 'tensorforce.execution.Runner', 'Runner', ([], {'agent': 'agent', 'environment': 'environment'}), '(agent=agent, environment=environment)\n', (3468, 3506), False, 'from tensorforce.execution import Runner\n'), ((3980, 4030), 'os.path.isfile', 'os.path.isfile', (["(save_path + '.data-00000-of-00001')"], {}), "(save_path + '.data-00000-of-00001')\n", (3994, 4030), False, 'import os\n'), ((4046, 4082), 'os.path.isfile', 'os.path.isfile', (["(save_path + '.index')"], {}), "(save_path + '.index')\n", (4060, 4082), False, 'import os\n'), ((5409, 5419), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (5417, 5419), True, 'import tensorflow as tf\n'), ((5465, 5533), 'tensorflow.placeholder', 'tf.placeholder', ([], {'dtype': "environment.states['type']", 'shape': '[None, size]'}), "(dtype=environment.states['type'], shape=[None, size])\n", (5479, 5533), True, 'import tensorflow as tf\n'), ((5554, 5588), 'tensorflow.layers.Dense', 'tf.layers.Dense', ([], {'units': 'output_size'}), '(units=output_size)\n', (5569, 5588), True, 'import tensorflow as tf\n'), ((5631, 5706), 'tensorflow.placeholder', 'tf.placeholder', ([], {'dtype': "environment.states['type']", 'shape': '[None, output_size]'}), "(dtype=environment.states['type'], shape=[None, output_size])\n", (5645, 5706), True, 'import tensorflow as tf\n'), ((5726, 5761), 'tensorflow.losses.mean_squared_error', 'tf.losses.mean_squared_error', (['y_', 'y'], {}), '(y_, y)\n', (5754, 5761), True, 'import tensorflow as tf\n'), ((5786, 5827), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': '(0.1)'}), '(learning_rate=0.1)\n', (5808, 5827), True, 'import tensorflow as tf\n'), ((7376, 7407), 'pytest.raises', 'pytest.raises', (['TensorForceError'], {}), '(TensorForceError)\n', (7389, 7407), False, 'import pytest\n'), ((7599, 7630), 'pytest.raises', 'pytest.raises', (['TensorForceError'], {}), '(TensorForceError)\n', (7612, 7630), False, 'import pytest\n'), ((7819, 7850), 'pytest.raises', 'pytest.raises', (['TensorForceError'], {}), '(TensorForceError)\n', (7832, 7850), False, 'import pytest\n'), ((2802, 2824), 'numpy.array_equal', 'np.array_equal', (['v1', 'v2'], {}), '(v1, v2)\n', (2816, 2824), True, 'import numpy as np\n'), ((4678, 4700), 'numpy.array_equal', 'np.array_equal', (['v1', 'v2'], {}), '(v1, v2)\n', (4692, 4700), True, 'import numpy as np\n'), ((5923, 5942), 'tensorflow.Session', 'tf.Session', ([], {'graph': 'g'}), '(graph=g)\n', (5933, 5942), True, 'import tensorflow as tf\n'), ((6592, 6624), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {'var_list': 'var_map'}), '(var_list=var_map)\n', (6606, 6624), True, 'import tensorflow as tf\n'), ((4863, 4885), 'numpy.array_equal', 'np.array_equal', (['v1', 'v2'], {}), '(v1, v2)\n', (4877, 4885), True, 'import numpy as np\n'), ((5977, 6010), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (6008, 6010), True, 'import tensorflow as tf\n'), ((6081, 6117), 'numpy.random.random', 'np.random.random', (['[batch_size, size]'], {}), '([batch_size, size])\n', (6097, 6117), True, 'import numpy as np\n'), ((6148, 6192), 'numpy.ones', 'np.ones', ([], {'shape': '[batch.shape[0], output_size]'}), '(shape=[batch.shape[0], output_size])\n', (6155, 6192), True, 'import numpy as np\n')] |
lihuiba/SoftSAN | guid.py | 1b8ab2cae92b7aac34211909b27d4ebe595275d7 | import random
import messages_pb2 as msg
def assign(x, y):
x.a=y.a; x.b=y.b; x.c=y.c; x.d=y.d
def isZero(x):
return (x.a==0 and x.b==0 and x.c==0 and x.d==0)
def setZero(x):
x.a=0; x.b=0; x.c=0; x.d=0
def toStr(x):
return "%08x-%08x-%08x-%08x" % (x.a, x.b, x.c, x.d)
def toTuple(x):
return (x.a, x.b, x.c, x.d)
def fromTuple(x):
ret=msg.Guid()
ret.a=x[0]
ret.b=x[1]
ret.c=x[2]
ret.d=x[3]
return ret
def generate(guid=None):
ret=guid or msg.Guid()
ret.a=random.randint(0, 0xffffffff)
ret.b=random.randint(0, 0xffffffff)
ret.c=random.randint(0, 0xffffffff)
ret.d=random.randint(0, 0xffffffff)
return ret
def fromStr(s):
ret=msg.Guid()
s=s.split('-')
ret.a=int(s[0], 16)
ret.b=int(s[1], 16)
ret.c=int(s[2], 16)
ret.d=int(s[3], 16)
return ret
| [((363, 373), 'messages_pb2.Guid', 'msg.Guid', ([], {}), '()\n', (371, 373), True, 'import messages_pb2 as msg\n'), ((512, 541), 'random.randint', 'random.randint', (['(0)', '(4294967295)'], {}), '(0, 4294967295)\n', (526, 541), False, 'import random\n'), ((552, 581), 'random.randint', 'random.randint', (['(0)', '(4294967295)'], {}), '(0, 4294967295)\n', (566, 581), False, 'import random\n'), ((592, 621), 'random.randint', 'random.randint', (['(0)', '(4294967295)'], {}), '(0, 4294967295)\n', (606, 621), False, 'import random\n'), ((632, 661), 'random.randint', 'random.randint', (['(0)', '(4294967295)'], {}), '(0, 4294967295)\n', (646, 661), False, 'import random\n'), ((702, 712), 'messages_pb2.Guid', 'msg.Guid', ([], {}), '()\n', (710, 712), True, 'import messages_pb2 as msg\n'), ((491, 501), 'messages_pb2.Guid', 'msg.Guid', ([], {}), '()\n', (499, 501), True, 'import messages_pb2 as msg\n')] |
kronael/mango-explorer | mango/__init__.py | 6292c089c2a3d1ff2cf0b50b815849451a50ec39 | # In --strict mode, mypy complains about imports unless they're done this way.
#
# It complains 'Module has no attribute ABC' or 'Module "mango" does not explicitly export
# attribute "XYZ"; implicit reexport disabled'. We could dial that back by using the
# --implicit-reexport parameter, but let's keep things strict.
#
# Each import then *must* be of the form `from .file import X as X`. (Until/unless there's
# a better way.)
#
from .account import Account as Account
from .account import AccountSlot as AccountSlot
from .accountflags import AccountFlags as AccountFlags
from .accountinfo import AccountInfo as AccountInfo
from .accountinfoconverter import build_account_info_converter as build_account_info_converter
from .accountinstrumentvalues import AccountInstrumentValues as AccountInstrumentValues
from .accountinstrumentvalues import PricedAccountInstrumentValues as PricedAccountInstrumentValues
from .accountliquidator import AccountLiquidator as AccountLiquidator
from .accountliquidator import NullAccountLiquidator as NullAccountLiquidator
from .accountscout import AccountScout as AccountScout
from .accountscout import ScoutReport as ScoutReport
from .addressableaccount import AddressableAccount as AddressableAccount
from .arguments import parse_args as parse_args
from .arguments import output as output
from .balancesheet import BalanceSheet as BalanceSheet
from .cache import Cache as Cache
from .cache import MarketCache as MarketCache
from .cache import PerpMarketCache as PerpMarketCache
from .cache import PriceCache as PriceCache
from .cache import RootBankCache as RootBankCache
from .client import BetterClient as BetterClient
from .client import BlockhashNotFoundException as BlockhashNotFoundException
from .client import ClientException as ClientException
from .client import CompoundException as CompoundException
from .client import CompoundRPCCaller as CompoundRPCCaller
from .client import FailedToFetchBlockhashException as FailedToFetchBlockhashException
from .client import NodeIsBehindException as NodeIsBehindException
from .client import RateLimitException as RateLimitException
from .client import RPCCaller as RPCCaller
from .client import SlotHolder as SlotHolder
from .client import TooManyRequestsRateLimitException as TooManyRequestsRateLimitException
from .client import TooMuchBandwidthRateLimitException as TooMuchBandwidthRateLimitException
from .client import TransactionException as TransactionException
from .combinableinstructions import CombinableInstructions as CombinableInstructions
from .constants import MangoConstants as MangoConstants
from .constants import DATA_PATH as DATA_PATH
from .constants import SOL_DECIMAL_DIVISOR as SOL_DECIMAL_DIVISOR
from .constants import SOL_DECIMALS as SOL_DECIMALS
from .constants import SOL_MINT_ADDRESS as SOL_MINT_ADDRESS
from .constants import SYSTEM_PROGRAM_ADDRESS as SYSTEM_PROGRAM_ADDRESS
from .constants import WARNING_DISCLAIMER_TEXT as WARNING_DISCLAIMER_TEXT
from .constants import version as version
from .context import Context as Context
from .contextbuilder import ContextBuilder as ContextBuilder
from .createmarketoperations import create_market_instruction_builder as create_market_instruction_builder
from .createmarketoperations import create_market_operations as create_market_operations
from .encoding import decode_binary as decode_binary
from .encoding import encode_binary as encode_binary
from .encoding import encode_key as encode_key
from .encoding import encode_int as encode_int
from .ensuremarketloaded import ensure_market_loaded as ensure_market_loaded
from .ensuremarketloaded import load_market_by_symbol as load_market_by_symbol
from .group import Group as Group
from .group import GroupSlot as GroupSlot
from .group import GroupSlotPerpMarket as GroupSlotPerpMarket
from .group import GroupSlotSpotMarket as GroupSlotSpotMarket
from .healthcheck import HealthCheck as HealthCheck
from .idl import IdlParser as IdlParser
from .idl import lazy_load_cached_idl_parser as lazy_load_cached_idl_parser
from .idsjsonmarketlookup import IdsJsonMarketLookup as IdsJsonMarketLookup
from .inventory import Inventory as Inventory
from .inventory import PerpInventoryAccountWatcher as PerpInventoryAccountWatcher
from .inventory import SpotInventoryAccountWatcher as SpotInventoryAccountWatcher
from .instructions import build_cancel_perp_order_instructions as build_cancel_perp_order_instructions
from .instructions import build_cancel_spot_order_instructions as build_cancel_spot_order_instructions
from .instructions import build_close_spl_account_instructions as build_close_spl_account_instructions
from .instructions import build_create_account_instructions as build_create_account_instructions
from .instructions import build_create_associated_spl_account_instructions as build_create_associated_spl_account_instructions
from .instructions import build_create_solana_account_instructions as build_create_solana_account_instructions
from .instructions import build_create_spl_account_instructions as build_create_spl_account_instructions
from .instructions import build_create_serum_open_orders_instructions as build_create_serum_open_orders_instructions
from .instructions import build_deposit_instructions as build_deposit_instructions
from .instructions import build_faucet_airdrop_instructions as build_faucet_airdrop_instructions
from .instructions import build_mango_consume_events_instructions as build_mango_consume_events_instructions
from .instructions import build_place_perp_order_instructions as build_place_perp_order_instructions
from .instructions import build_redeem_accrued_mango_instructions as build_redeem_accrued_mango_instructions
from .instructions import build_serum_consume_events_instructions as build_serum_consume_events_instructions
from .instructions import build_serum_place_order_instructions as build_serum_place_order_instructions
from .instructions import build_serum_settle_instructions as build_serum_settle_instructions
from .instructions import build_spot_place_order_instructions as build_spot_place_order_instructions
from .instructions import build_transfer_spl_tokens_instructions as build_transfer_spl_tokens_instructions
from .instructions import build_withdraw_instructions as build_withdraw_instructions
from .instructionreporter import InstructionReporter as InstructionReporter
from .instructionreporter import SerumInstructionReporter as SerumInstructionReporter
from .instructionreporter import MangoInstructionReporter as MangoInstructionReporter
from .instructionreporter import CompoundInstructionReporter as CompoundInstructionReporter
from .instructiontype import InstructionType as InstructionType
from .instrumentlookup import InstrumentLookup as InstrumentLookup
from .instrumentlookup import NullInstrumentLookup as NullInstrumentLookup
from .instrumentlookup import CompoundInstrumentLookup as CompoundInstrumentLookup
from .instrumentlookup import IdsJsonTokenLookup as IdsJsonTokenLookup
from .instrumentlookup import NonSPLInstrumentLookup as NonSPLInstrumentLookup
from .instrumentlookup import SPLTokenLookup as SPLTokenLookup
from .instrumentvalue import InstrumentValue as InstrumentValue
from .liquidatablereport import LiquidatableState as LiquidatableState
from .liquidatablereport import LiquidatableReport as LiquidatableReport
from .liquidationevent import LiquidationEvent as LiquidationEvent
from .liquidationprocessor import LiquidationProcessor as LiquidationProcessor
from .liquidationprocessor import LiquidationProcessorState as LiquidationProcessorState
from .loadedmarket import LoadedMarket as LoadedMarket
from .logmessages import expand_log_messages as expand_log_messages
from .lotsizeconverter import LotSizeConverter as LotSizeConverter
from .mangoinstruction import MangoInstruction as MangoInstruction
from .lotsizeconverter import NullLotSizeConverter as NullLotSizeConverter
from .market import DryRunMarket as DryRunMarket
from .market import InventorySource as InventorySource
from .market import Market as Market
from .marketlookup import CompoundMarketLookup as CompoundMarketLookup
from .marketlookup import MarketLookup as MarketLookup
from .marketlookup import NullMarketLookup as NullMarketLookup
from .marketoperations import MarketInstructionBuilder as MarketInstructionBuilder
from .marketoperations import MarketOperations as MarketOperations
from .marketoperations import NullMarketInstructionBuilder as NullMarketInstructionBuilder
from .marketoperations import NullMarketOperations as NullMarketOperations
from .metadata import Metadata as Metadata
from .modelstate import ModelState as ModelState
from .notification import CompoundNotificationTarget as CompoundNotificationTarget
from .notification import ConsoleNotificationTarget as ConsoleNotificationTarget
from .notification import CsvFileNotificationTarget as CsvFileNotificationTarget
from .notification import DiscordNotificationTarget as DiscordNotificationTarget
from .notification import FilteringNotificationTarget as FilteringNotificationTarget
from .notification import MailjetNotificationTarget as MailjetNotificationTarget
from .notification import NotificationHandler as NotificationHandler
from .notification import NotificationTarget as NotificationTarget
from .notification import TelegramNotificationTarget as TelegramNotificationTarget
from .notification import parse_notification_target as parse_notification_target
from .observables import CaptureFirstItem as CaptureFirstItem
from .observables import CollectingObserverSubscriber as CollectingObserverSubscriber
from .observables import DisposePropagator as DisposePropagator
from .observables import DisposeWrapper as DisposeWrapper
from .observables import EventSource as EventSource
from .observables import FunctionObserver as FunctionObserver
from .observables import LatestItemObserverSubscriber as LatestItemObserverSubscriber
from .observables import NullObserverSubscriber as NullObserverSubscriber
from .observables import PrintingObserverSubscriber as PrintingObserverSubscriber
from .observables import TimestampedPrintingObserverSubscriber as TimestampedPrintingObserverSubscriber
from .observables import create_backpressure_skipping_observer as create_backpressure_skipping_observer
from .observables import debug_print_item as debug_print_item
from .observables import log_subscription_error as log_subscription_error
from .observables import observable_pipeline_error_reporter as observable_pipeline_error_reporter
from .openorders import OpenOrders as OpenOrders
from .oracle import Oracle as Oracle
from .oracle import OracleProvider as OracleProvider
from .oracle import OracleSource as OracleSource
from .oracle import Price as Price
from .oracle import SupportedOracleFeature as SupportedOracleFeature
from .orderbookside import OrderBookSideType as OrderBookSideType
from .orderbookside import PerpOrderBookSide as PerpOrderBookSide
from .orders import Order as Order
from .orders import OrderType as OrderType
from .orders import OrderBook as OrderBook
from .orders import Side as Side
from .ownedinstrumentvalue import OwnedInstrumentValue as OwnedInstrumentValue
from .oraclefactory import create_oracle_provider as create_oracle_provider
from .parse_account_info_to_orders import parse_account_info_to_orders as parse_account_info_to_orders
from .perpaccount import PerpAccount as PerpAccount
from .perpeventqueue import PerpEvent as PerpEvent
from .perpeventqueue import PerpEventQueue as PerpEventQueue
from .perpeventqueue import PerpFillEvent as PerpFillEvent
from .perpeventqueue import PerpOutEvent as PerpOutEvent
from .perpeventqueue import PerpUnknownEvent as PerpUnknownEvent
from .perpeventqueue import UnseenPerpEventChangesTracker as UnseenPerpEventChangesTracker
from .perpmarket import PerpMarket as PerpMarket
from .perpmarket import PerpMarketStub as PerpMarketStub
from .perpmarketdetails import PerpMarketDetails as PerpMarketDetails
from .perpmarketoperations import PerpMarketInstructionBuilder as PerpMarketInstructionBuilder
from .perpmarketoperations import PerpMarketOperations as PerpMarketOperations
from .perpopenorders import PerpOpenOrders as PerpOpenOrders
from .placedorder import PlacedOrder as PlacedOrder
from .placedorder import PlacedOrdersContainer as PlacedOrdersContainer
from .publickey import encode_public_key_for_sorting as encode_public_key_for_sorting
from .reconnectingwebsocket import ReconnectingWebsocket as ReconnectingWebsocket
from .retrier import RetryWithPauses as RetryWithPauses
from .retrier import retry_context as retry_context
from .serumeventqueue import SerumEventQueue as SerumEventQueue
from .serumeventqueue import UnseenSerumEventChangesTracker as UnseenSerumEventChangesTracker
from .serummarket import SerumMarket as SerumMarket
from .serummarket import SerumMarketStub as SerumMarketStub
from .serummarketlookup import SerumMarketLookup as SerumMarketLookup
from .serummarketoperations import SerumMarketInstructionBuilder as SerumMarketInstructionBuilder
from .serummarketoperations import SerumMarketOperations as SerumMarketOperations
from .spotmarket import SpotMarket as SpotMarket
from .spotmarket import SpotMarketStub as SpotMarketStub
from .spotmarketoperations import SpotMarketInstructionBuilder as SpotMarketInstructionBuilder
from .spotmarketoperations import SpotMarketOperations as SpotMarketOperations
from .text import indent_collection_as_str as indent_collection_as_str
from .text import indent_item_by as indent_item_by
from .token import Instrument as Instrument
from .token import SolToken as SolToken
from .token import Token as Token
from .tokenaccount import TokenAccount as TokenAccount
from .tokenbank import BankBalances as BankBalances
from .tokenbank import InterestRates as InterestRates
from .tokenbank import NodeBank as NodeBank
from .tokenbank import RootBank as RootBank
from .tokenbank import TokenBank as TokenBank
from .tradeexecutor import ImmediateTradeExecutor as ImmediateTradeExecutor
from .tradeexecutor import NullTradeExecutor as NullTradeExecutor
from .tradeexecutor import TradeExecutor as TradeExecutor
from .tradehistory import TradeHistory as TradeHistory
from .transactionscout import TransactionScout as TransactionScout
from .transactionscout import fetch_all_recent_transaction_signatures as fetch_all_recent_transaction_signatures
from .transactionscout import mango_instruction_from_response as mango_instruction_from_response
from .valuation import AccountValuation as AccountValuation
from .valuation import TokenValuation as TokenValuation
from .valuation import Valuation as Valuation
from .version import Version as Version
from .wallet import Wallet as Wallet
from .walletbalancer import FilterSmallChanges as FilterSmallChanges
from .walletbalancer import FixedTargetBalance as FixedTargetBalance
from .walletbalancer import LiveAccountBalancer as LiveAccountBalancer
from .walletbalancer import LiveWalletBalancer as LiveWalletBalancer
from .walletbalancer import NullWalletBalancer as NullWalletBalancer
from .walletbalancer import PercentageTargetBalance as PercentageTargetBalance
from .walletbalancer import TargetBalance as TargetBalance
from .walletbalancer import WalletBalancer as WalletBalancer
from .walletbalancer import calculate_required_balance_changes as calculate_required_balance_changes
from .walletbalancer import parse_fixed_target_balance as parse_fixed_target_balance
from .walletbalancer import parse_target_balance as parse_target_balance
from .walletbalancer import sort_changes_for_trades as sort_changes_for_trades
from .watcher import LamdaUpdateWatcher as LamdaUpdateWatcher
from .watcher import ManualUpdateWatcher as ManualUpdateWatcher
from .watcher import Watcher as Watcher
from .watchers import build_group_watcher as build_group_watcher
from .watchers import build_account_watcher as build_account_watcher
from .watchers import build_cache_watcher as build_cache_watcher
from .watchers import build_spot_open_orders_watcher as build_spot_open_orders_watcher
from .watchers import build_serum_open_orders_watcher as build_serum_open_orders_watcher
from .watchers import build_perp_open_orders_watcher as build_perp_open_orders_watcher
from .watchers import build_price_watcher as build_price_watcher
from .watchers import build_serum_inventory_watcher as build_serum_inventory_watcher
from .watchers import build_orderbook_watcher as build_orderbook_watcher
from .websocketsubscription import IndividualWebSocketSubscriptionManager as IndividualWebSocketSubscriptionManager
from .websocketsubscription import SharedWebSocketSubscriptionManager as SharedWebSocketSubscriptionManager
from .websocketsubscription import WebSocketAccountSubscription as WebSocketAccountSubscription
from .websocketsubscription import WebSocketLogSubscription as WebSocketLogSubscription
from .websocketsubscription import WebSocketProgramSubscription as WebSocketProgramSubscription
from .websocketsubscription import WebSocketSubscription as WebSocketSubscription
from .websocketsubscription import WebSocketSubscriptionManager as WebSocketSubscriptionManager
from .layouts import layouts
import decimal
# Increased precision from 18 to 36 because for a decimal like:
# val = Decimal("17436036573.2030800")
#
# The following rounding operations would both throw decimal.InvalidOperation:
# val.quantize(Decimal('.000000001'))
# round(val, 9)
decimal.getcontext().prec = 36
| [((17390, 17410), 'decimal.getcontext', 'decimal.getcontext', ([], {}), '()\n', (17408, 17410), False, 'import decimal\n')] |
MariannaJan/LettersOfSherlock | letters_of_sherlock.py | cf356c002078d4e0e6bcf1a669bc8b358680460f | import lettercounter as lc
#Books form Gutenberg Project: https://www.gutenberg.org/ebooks/author/69
lc.showPlots(text_directory_pathname="./Books/",
title="Sir Arthur Conan Doyle's favourite letters",
legend_label_main="in Doyle's stories") | [((103, 253), 'lettercounter.showPlots', 'lc.showPlots', ([], {'text_directory_pathname': '"""./Books/"""', 'title': '"""Sir Arthur Conan Doyle\'s favourite letters"""', 'legend_label_main': '"""in Doyle\'s stories"""'}), '(text_directory_pathname=\'./Books/\', title=\n "Sir Arthur Conan Doyle\'s favourite letters", legend_label_main=\n "in Doyle\'s stories")\n', (115, 253), True, 'import lettercounter as lc\n')] |
mwaitzman/GOF2BountyBot | BB/bbObjects/items/bbTurret.py | b66026228b752b07ac4734ca74b60730dbd74995 | from .bbItem import bbItem
from ...bbConfig import bbData
class bbTurret(bbItem):
dps = 0.0
def __init__(self, name, aliases, dps=0.0, value=0, wiki="", manufacturer="", icon="", emoji=""):
super(bbTurret, self).__init__(name, aliases, value=value, wiki=wiki, manufacturer=manufacturer, icon=icon, emoji=emoji)
self.dps = dps
def statsStringShort(self):
return "*Dps: " + str(self.dps) + "*"
def getType(self):
return bbTurret
def fromDict(turretDict):
if turretDict["builtIn"]:
return bbData.builtInTurretObjs[turretDict["name"]]
else:
return bbTurret(turretDict["name"], turretDict["aliases"], dps=turretDict["dps"], value=turretDict["value"],
wiki=turretDict["wiki"] if "wiki" in turretDict else "", manufacturer=turretDict["manufacturer"] if "manufacturer" in turretDict else "",
icon=turretDict["icon"] if "icon" in turretDict else bbData.rocketIcon, emoji=turretDict["emoji"] if "emoji" in turretDict else "")
| [] |
s-maibuecher/what_can_i_cook | what_can_i_cook/urls.py | 07d0eb1e1862fad299477b800654e895d7f8829a | from django.urls import path
from what_can_i_cook.views import WCICFilterView, WCICResultView
app_name = "wcic"
urlpatterns = [
path("", WCICFilterView.as_view(), name="wcic-start"),
path("results/", WCICResultView.as_view(), name="wcic-results"),
]
| [((145, 169), 'what_can_i_cook.views.WCICFilterView.as_view', 'WCICFilterView.as_view', ([], {}), '()\n', (167, 169), False, 'from what_can_i_cook.views import WCICFilterView, WCICResultView\n'), ((212, 236), 'what_can_i_cook.views.WCICResultView.as_view', 'WCICResultView.as_view', ([], {}), '()\n', (234, 236), False, 'from what_can_i_cook.views import WCICFilterView, WCICResultView\n')] |
justchris1/scap-security-guide | shared/templates/grub2_bootloader_argument/template.py | 030097afa80041fcdffc537a49c09896efedadca | import ssg.utils
def preprocess(data, lang):
data["arg_name_value"] = data["arg_name"] + "=" + data["arg_value"]
if lang == "oval":
# escape dot, this is used in oval regex
data["escaped_arg_name_value"] = data["arg_name_value"].replace(".", "\\.")
# replace . with _, this is used in test / object / state ids
data["sanitized_arg_name"] = ssg.utils.escape_id(data["arg_name"])
return data
| [] |
NNDEV1/NMTWithLuongAttention | preprocess.py | e6f11d9e8c5f999d413fa0dc51219e979a8f975c | import tensorflow as tf
import os
import contractions
import tensorflow as tf
import pandas as pd
import numpy as np
import time
import rich
from rich.progress import track
import spacy
from config import params
#Preprocessing Text
class preprocess_text():
def __init__(self):
pass
def remove_pattern(self, text, pattern= r'[^a-zA-Z0-9.!?, ]', replace_with= ""):
return re.sub(pattern, replace_with, text)
def tokenize_sent(self, text, nlp):
doc= nlp(text)
return [sent.text for sent in doc.sents]
def tokenize_words(self, text, nlp):
doc= nlp(text)
return " ".join(tok.text for tok in doc)
def expand_contractions(self, text):
return contractions.fix(text)
def do_lemmatization(self, text, nlp):
doc= nlp(text)
return ' '.join(tok.lemma_ if tok.lemma_ != "-PRON-" else tok.text for tok in doc)
def add_sos_eos(self, text, sos= False, eos= False):
if (sos and eos):
return "<sos> " + text + " <eos>"
if eos:
return text + " <eos>"
if sos:
return "<sos> " + text
return text
def remove_accents(self, text):
return unicodedata.normalize('NFKD', text).encode('ascii', 'ignore').decode('UTF-8', 'ignore')
def call_preprocessing(df_col, nlp_en= True, lower_= True, remove_pattern_= False, tokenize_words_= False,
expand_contractions_= False, do_lemmatization_= False,
sos= False, eos= False, remove_accents_= False):
nlp= spacy.load('en_core_web_sm') if nlp_en else spacy.load('de_core_news_sm')
prep= preprocess_text()
if expand_contractions_:
df_col= df_col.map(lambda text: prep.expand_contractions(text))
if remove_accents_:
df_col= df_col.map(lambda text: prep.remove_accents(text))
if do_lemmatization_:
df_col= df_col.map(lambda text: prep.do_lemmatization(text, nlp))
if tokenize_words_:
df_col= df_col.map(lambda text: prep.tokenize_words(text, nlp))
if remove_pattern_:
df_col= df_col.map(lambda text: prep.remove_pattern_(text))
if eos or sos:
df_col= df_col.map(lambda text: prep.add_sos_eos(text, sos, eos))
if lower_:
df_col= df_col.map(lambda text: text.lower())
return df_col
def tokenizer(df_col, nlp_en= True):
vocab= set()
_= [[vocab.update([tok]) for tok in text.split(" ")] for text in df_col]
if not nlp_en:
vocab.update(["<sos>"])
vocab.update(["<eos>"])
tokenize= dict(zip(vocab, range(1, 1+len(vocab))))
detokenize= dict(zip(range(1, 1+len(vocab)), vocab))
return tokenize, detokenize, len(vocab)
def padding(txt_toks, max_len):
curr_ls= txt_toks.split(" ")
len_ls= len(curr_ls)
_= [curr_ls.append("<pad>") for i in range(max_len-len_ls) if len(curr_ls)<max_len]
return " ".join(curr_ls)
def make_minibatches(df, col1= 'rev_eng_tok', col2= 'teach_force_tok', col3= 'target_tok'):
enc_seq= np.array([df[col1].values[i] for i in range(len(df[col1]))])
enc_seq= tf.data.Dataset.from_tensor_slices(enc_seq).batch(params.batch_size)
teach_force_seq= np.array([df[col2].values[i] for i in range(len(df[col2]))])
teach_force_seq= tf.data.Dataset.from_tensor_slices(teach_force_seq).batch(params.batch_size)
y= np.array([df[col3].values[i] for i in range(len(df[col3]))])
y= tf.data.Dataset.from_tensor_slices(y).batch(params.batch_size)
return enc_seq, teach_force_seq, y
| [((736, 758), 'contractions.fix', 'contractions.fix', (['text'], {}), '(text)\n', (752, 758), False, 'import contractions\n'), ((1591, 1619), 'spacy.load', 'spacy.load', (['"""en_core_web_sm"""'], {}), "('en_core_web_sm')\n", (1601, 1619), False, 'import spacy\n'), ((1635, 1664), 'spacy.load', 'spacy.load', (['"""de_core_news_sm"""'], {}), "('de_core_news_sm')\n", (1645, 1664), False, 'import spacy\n'), ((3170, 3213), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['enc_seq'], {}), '(enc_seq)\n', (3204, 3213), True, 'import tensorflow as tf\n'), ((3343, 3394), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['teach_force_seq'], {}), '(teach_force_seq)\n', (3377, 3394), True, 'import tensorflow as tf\n'), ((3496, 3533), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['y'], {}), '(y)\n', (3530, 3533), True, 'import tensorflow as tf\n')] |
johannesulf/dsigma | setup.py | 729337c94669f4a0fdacb51b175df1e13e26304c | from setuptools import setup, find_packages
from distutils.extension import Extension
from distutils.command.sdist import sdist
try:
from Cython.Build import cythonize
USE_CYTHON = True
except ImportError:
USE_CYTHON = False
ext = 'pyx' if USE_CYTHON else 'c'
extensions = [Extension(
'dsigma.precompute_engine', ['dsigma/precompute_engine.{}'.format(ext)],
extra_compile_args=['-Ofast', '-march=native'])]
if USE_CYTHON:
extensions = cythonize(extensions)
class sdist_with_cythonize(sdist):
def run(self):
cythonize(['dsigma/precompute_engine.pyx'])
sdist.run(self)
with open('README.md', 'r') as fstream:
long_description = fstream.read()
setup(
name='dsigma',
version='0.5.0',
description=('A Galaxy-Galaxy Lensing Pipeline'),
long_description=long_description,
long_description_content_type='text/markdown',
classifiers=[
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Astronomy',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
keywords='astronomy, weak-lensing',
url='https://github.com/johannesulf/dsigma',
author='Johannes Lange, Song Huang',
author_email='[email protected]',
packages=find_packages(),
install_requires=['numpy', 'astropy', 'scipy', 'scikit-learn',
'healpy'],
python_requires='>=3.4',
ext_modules=extensions,
cmdclass={'sdist': sdist_with_cythonize}
)
| [((462, 483), 'Cython.Build.cythonize', 'cythonize', (['extensions'], {}), '(extensions)\n', (471, 483), False, 'from Cython.Build import cythonize\n'), ((548, 591), 'Cython.Build.cythonize', 'cythonize', (["['dsigma/precompute_engine.pyx']"], {}), "(['dsigma/precompute_engine.pyx'])\n", (557, 591), False, 'from Cython.Build import cythonize\n'), ((600, 615), 'distutils.command.sdist.sdist.run', 'sdist.run', (['self'], {}), '(self)\n', (609, 615), False, 'from distutils.command.sdist import sdist\n'), ((1444, 1459), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (1457, 1459), False, 'from setuptools import setup, find_packages\n')] |
jtfan3/face_detection | face_detector/modules/mod_faceDetection.py | 82e3bc839bf12c956f3166c07012912a0638048f | import cv2
import mediapipe as mp
class FaceDetection():
# initialize the face detection class with arguments from https://google.github.io/mediapipe/solutions/face_detection.html
def __init__(self, model_selection = 0, threshold = 0.5):
self.model_selection = model_selection
self.threshold = threshold
self.mp_draw = mp.solutions.drawing_utils
self.face_detection = mp.solutions.face_detection.FaceDetection(model_selection = self.model_selection, min_detection_confidence = self.threshold)
# gets bounding boxes using self.face_detection, returns a list of element, elment = (score, bbox_dict)
def get_bboxs(self, frame):
mp_detections = self.face_detection.process(frame)
score_bboxs = []
if mp_detections.detections:
for detection in mp_detections.detections:
score = detection.score[0]
mp_bbox = detection.location_data.relative_bounding_box
bbox_dict = {
'x_min': mp_bbox.xmin,
'y_min': mp_bbox.ymin,
'w': mp_bbox.width,
'h': mp_bbox.height
}
score_bboxs.append([score, bbox_dict])
return score_bboxs
# draws the bbox onto the frame
def draw_bbox(self, face_probs, bbox_dict, frame, col = (255, 0, 255), gender = None, gender_score = None):
x_min, y_min, w, h = bbox_dict.values()
frame_h, frame_w, _ = frame.shape
bbox = int(x_min * frame_w), int(y_min * frame_h), int(w * frame_w), int(h * frame_h)
# prepare text, depending on what atributes we predict
text = str(round(face_probs, 3))
if gender:
text = gender + ": " + str(round(gender_score, 2))
# draw bbox
cv2.rectangle(frame, bbox, col, 2)
cv2.putText(frame, text, (bbox[0], bbox[1] - 10),
cv2.FONT_HERSHEY_COMPLEX, 0.5, col, 1)
| [((410, 535), 'mediapipe.solutions.face_detection.FaceDetection', 'mp.solutions.face_detection.FaceDetection', ([], {'model_selection': 'self.model_selection', 'min_detection_confidence': 'self.threshold'}), '(model_selection=self.\n model_selection, min_detection_confidence=self.threshold)\n', (451, 535), True, 'import mediapipe as mp\n'), ((1814, 1848), 'cv2.rectangle', 'cv2.rectangle', (['frame', 'bbox', 'col', '(2)'], {}), '(frame, bbox, col, 2)\n', (1827, 1848), False, 'import cv2\n'), ((1857, 1949), 'cv2.putText', 'cv2.putText', (['frame', 'text', '(bbox[0], bbox[1] - 10)', 'cv2.FONT_HERSHEY_COMPLEX', '(0.5)', 'col', '(1)'], {}), '(frame, text, (bbox[0], bbox[1] - 10), cv2.FONT_HERSHEY_COMPLEX,\n 0.5, col, 1)\n', (1868, 1949), False, 'import cv2\n')] |
bmj-hackathon/ethberlinzwei-babelfish_3_0 | backend/0_publish_audio.py | e986ad1b9fa896f20d7cdd296d130d804f55ecfa | import sys
import logging
# loggers_dict = logging.Logger.manager.loggerDict
#
# logger = logging.getLogger()
# logger.handlers = []
#
# # Set level
# logger.setLevel(logging.DEBUG)
#
# # FORMAT = "%(asctime)s - %(levelno)s - %(module)-15s - %(funcName)-15s - %(message)s"
# # FORMAT = "%(asctime)s %(levelno)s: %(module)30s %(message)s"
# FORMAT = "%(levelno)s - %(module)-15s - %(funcName)-15s - %(message)s"
#
# DATE_FMT = "%Y-%m-%d %H:%M:%S"
# DATE_FMT = "%Y-%m-%d %H:%M:%S"
# formatter = logging.Formatter(FORMAT, DATE_FMT)
#
# # Create handler and assign
# handler = logging.StreamHandler(sys.stderr)
# handler.setFormatter(formatter)
# logger.handlers = [handler]
# logger.debug("Logging started")
#%%
# Standard imports
import os
from pathlib import Path
import json
from time import sleep
# Ocean imports
import squid_py
from squid_py.ocean.ocean import Ocean
from squid_py.config import Config
from pprint import pprint
import mantaray_utilities as manta_utils
from mantaray_utilities.user import password_map
#%% CONFIG
OCEAN_CONFIG_PATH = Path().cwd() / 'config_nile.ini'
assert OCEAN_CONFIG_PATH.exists(), "{} - path does not exist".format(OCEAN_CONFIG_PATH)
os.environ['OCEAN_CONFIG_PATH'] = str(OCEAN_CONFIG_PATH)
PASSWORD_PATH=Path().cwd() / ".nile_passwords"
assert PASSWORD_PATH.exists()
os.environ["PASSWORD_PATH"] = str(PASSWORD_PATH)
MARKET_PLACE_PROVIDER_ADDRESS="0x376817c638d2a04f475a73af37f7b51a2862d567"
os.environ["MARKET_PLACE_PROVIDER_ADDRESS"] = MARKET_PLACE_PROVIDER_ADDRESS
JSON_TEMPLATE = Path().cwd() / 'metadata_template.json'
assert JSON_TEMPLATE.exists()
#%% ARGPARSE
import argparse
parser = argparse.ArgumentParser(description='Publish audio')
parser.add_argument('--url', type=str, help='URL for input audio file')
parser.add_argument('--price', type=int, help='Selling price in Ocean token')
parser.add_argument('--reward', type=int, help='Reward offered in Ocean token')
parser.add_argument('--number-nodes', type=int, help='Number of processor nodes requested')
args = parser.parse_args()
logging.info("************************************************************".format())
logging.info("*** ETHBERLINZWEI HACKATHON ***".format())
logging.info("*** SPEECH2TEXT ***".format())
logging.info("*** STEP 1 - CLIENT REGISTERS A CLIP INTO OCEAN PROTOCOL ***".format())
logging.info("************************************************************".format())
logging.info("".format())
logging.info("(Step 1.1 not implemented - upload audio file from client to storage)".format())
logging.info("Publishing Audio to NILE network: {}".format(args.url))
logging.info("Will set price to {} OCEAN".format(args.price))
logging.info("Offering {} OCEAN reward".format(args.reward))
logging.info("Requesting {} processors".format(args.number_nodes))
logging.info("".format())
#%%
# Get the configuration file path for this environment
logging.info("Configuration file selected: {}".format(OCEAN_CONFIG_PATH))
# logging.critical("Deployment type: {}".format(manta_utils.config.get_deployment_type()))
logging.info("Squid API version: {}".format(squid_py.__version__))
#%%
# Instantiate Ocean with the default configuration file.
configuration = Config(OCEAN_CONFIG_PATH)
squid_py.ConfigProvider.set_config(configuration)
ocn = Ocean(configuration)
#%%
# Get a publisher account
publisher_acct = manta_utils.user.get_account_by_index(ocn,0)
#%%
logging.info("Publisher account address: {}".format(publisher_acct.address))
logging.info("Publisher account Testnet 'ETH' balance: {:>6.1f}".format(ocn.accounts.balance(publisher_acct).eth/10**18))
logging.info("Publisher account Testnet Ocean balance: {:>6.1f}".format(ocn.accounts.balance(publisher_acct).ocn/10**18))
def publish(url, price, reward, number_nodes):
# metadata = squid_py.ddo.metadata.Metadata.get_example()
# print('Name of asset:', metadata['base']['name'])
with open(JSON_TEMPLATE, 'r') as f:
metadata = json.load(f)
metadata['base']['files'][0]['url'] = url
metadata['base']['price'] = str(price)
metadata['additionalInformation']['reward'] = str(reward)
metadata['additionalInformation']['numberNodes'] = str(number_nodes)
ddo = ocn.assets.create(metadata, publisher_acct)
registered_did = ddo.did
logging.info("New asset registered at {}".format(str(registered_did)))
logging.info("Asset name: {}".format(metadata['base']['name']))
logging.info("Encrypted files to secret store, cipher text: [{}...] . ".format(ddo.metadata['base']['encryptedFiles'][:50]))
return registered_did
registered_did = publish(args.url, args.price, args.reward, args.number_nodes)
#TODO: Better handling based on reciept
print("Wait for the transaction to complete!")
sleep(10)
# %%
ddo = ocn.assets.resolve(registered_did)
# print("Asset '{}' resolved from Aquarius metadata storage: {}".format(ddo.did,ddo.metadata['base']['name']))
# %% [markdown]
# Similarly, we can verify that this asset is registered into the blockchain, and that you are the owner.
# %%
# We need the pure ID string as in the DID registry (a DID without the prefixes)
asset_id = squid_py.did.did_to_id(registered_did)
owner = ocn._keeper.did_registry.contract_concise.getDIDOwner(asset_id)
# print("Asset ID", asset_id, "owned by", owner)
assert str.lower(owner) == str.lower(publisher_acct.address)
logging.info("".format())
logging.info("Successfully registered Audio!".format())
logging.info("Asset Owner: {}".format(owner))
logging.info("Asset DID: {}".format(registered_did))
| [((1639, 1691), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Publish audio"""'}), "(description='Publish audio')\n", (1662, 1691), False, 'import argparse\n'), ((3251, 3276), 'squid_py.config.Config', 'Config', (['OCEAN_CONFIG_PATH'], {}), '(OCEAN_CONFIG_PATH)\n', (3257, 3276), False, 'from squid_py.config import Config\n'), ((3277, 3326), 'squid_py.ConfigProvider.set_config', 'squid_py.ConfigProvider.set_config', (['configuration'], {}), '(configuration)\n', (3311, 3326), False, 'import squid_py\n'), ((3333, 3353), 'squid_py.ocean.ocean.Ocean', 'Ocean', (['configuration'], {}), '(configuration)\n', (3338, 3353), False, 'from squid_py.ocean.ocean import Ocean\n'), ((3403, 3448), 'mantaray_utilities.user.get_account_by_index', 'manta_utils.user.get_account_by_index', (['ocn', '(0)'], {}), '(ocn, 0)\n', (3440, 3448), True, 'import mantaray_utilities as manta_utils\n'), ((4788, 4797), 'time.sleep', 'sleep', (['(10)'], {}), '(10)\n', (4793, 4797), False, 'from time import sleep\n'), ((5176, 5214), 'squid_py.did.did_to_id', 'squid_py.did.did_to_id', (['registered_did'], {}), '(registered_did)\n', (5198, 5214), False, 'import squid_py\n'), ((4000, 4012), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4009, 4012), False, 'import json\n'), ((1055, 1061), 'pathlib.Path', 'Path', ([], {}), '()\n', (1059, 1061), False, 'from pathlib import Path\n'), ((1248, 1254), 'pathlib.Path', 'Path', ([], {}), '()\n', (1252, 1254), False, 'from pathlib import Path\n'), ((1529, 1535), 'pathlib.Path', 'Path', ([], {}), '()\n', (1533, 1535), False, 'from pathlib import Path\n')] |
guillerminaamorin/pyRofex | src/pyRofex/components/messages.py | 14fd623ab1f1a3213e51a9454485ed478912075f | # -*- coding: utf-8 -*-
"""
pyRofex.components.messages
Defines APIs messages templates
"""
# Template for a Market Data Subscription message
MARKET_DATA_SUBSCRIPTION = '{{"type":"smd","level":1, "entries":[{entries}],"products":[{symbols}]}}'
# Template for an Order Subscription message
ORDER_SUBSCRIPTION = '{{"type":"os","account":{{"id":"{a}"}},"snapshotOnlyActive":{snapshot}}}'
# Template to specify an instrument in a market data subscription message
INSTRUMENT = '{{"symbol":"{ticker}","marketId":"{market}"}}'
# Template to insert a Double Quote
DOUBLE_QUOTES = '"{item}"'
| [] |
duboviy/async | course/task_6/flask_app.py | 5055daddc66e5335fb772aeb59493cc63e4a2739 | #!/usr/bin/env python3.4
from flask import Flask
import requests
from fibonacci import fibonacci as fib
app = Flask(__name__)
@app.route('/count/<key>')
def count(key):
return requests.get('http://127.0.0.1:8080/count/{}'.format(key)).text
@app.route('/fibonacci/<n>')
def fibonacci(n):
return str(fib(int(n)))
if __name__ == "__main__":
app.run(host='0.0.0.0', port=8082, debug=True)
| [((112, 127), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (117, 127), False, 'from flask import Flask\n')] |
Patralos/nexpose-client-python | nexpose/nexpose_vulnerabilityexception.py | bec81da29883b1b004046e29a9e7f7a6686467c1 | # Future Imports for py2/3 backwards compat.
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from builtins import object
from .xml_utils import get_attribute, get_content_of
from future import standard_library
standard_library.install_aliases()
def fix_null(data):
if data == 'null':
return 0
return data
class VulnerabilityExceptionStatus(object):
UNDER_REVIEW = "Under Review"
APPROVED = "Approved"
REJECTED = "Rejected"
DELETED = "Deleted" # This state is also used for recalled exceptions!
class VulnerabilityExceptionReason(object):
FALSE_POSITIVE = "False Positive"
COMPENSATING_CONTROL = "Compensating Control"
ACCEPTABLE_USE = "Acceptable Use"
ACCEPTABLE_RISK = "Acceptable Risk"
OTHER = "Other"
class VulnerabilityExceptionScope(object):
ALL_INSTANCES = "All Instances"
ALL_INSTANCES_SPECIFIC_ASSET = "All Instances on a Specific Asset"
ALL_INSTANCES_SPECIFIC_SITE = "All Instances on a Specific Site"
SPECIFIC_INSTANCE_SPECIFIC_ASSET = "Specific Instance of Specific Asset"
class SiloVulnerabilityExceptionDetails(object):
@staticmethod
def CreateFromXML(xml_data):
details = SiloVulnerabilityExceptionDetails()
details.silo_id = get_attribute(xml_data, 'siloId', details.silo_id)
details.oldest_exception_creation_date = get_attribute(xml_data, 'oldestExceptionCreationDate', details.oldest_exception_creation_date) # TODO: date object
details.pending_exception_count = get_attribute(xml_data, 'pendingVulnExceptionsCount', details.pending_exception_count)
return details
def __init__(self):
self.silo_id = ''
self.oldest_exception_creation_date = 'N/A' # TODO: date object
self.pending_exception_count = 0
class VulnerabilityException(object):
@staticmethod
def CreateFromXML(xml_data):
details = VulnerabilityException()
details.id = int(get_attribute(xml_data, 'exception-id', details.id))
details.vulnerability_id = get_attribute(xml_data, 'vuln-id', details.vulnerability_id)
details.vulnerability_key = get_attribute(xml_data, 'vuln-key', details.vulnerability_key)
details.expiration_date = get_attribute(xml_data, 'expiration-date', details.expiration_date) # TODO: date object
details.submitter = get_attribute(xml_data, 'submitter', details.submitter)
details.submitter_comment = get_content_of(xml_data, 'submitter-comment', details.submitter_comment)
details.reviewer = get_attribute(xml_data, 'reviewer', details.reviewer)
details.reviewer_comment = get_content_of(xml_data, 'reviewer-comment', details.reviewer_comment)
details.status = get_attribute(xml_data, 'status', details.status)
details.reason = get_attribute(xml_data, 'reason', details.reason)
details.scope = get_attribute(xml_data, 'scope', details.scope)
details.asset_id = int(fix_null(get_attribute(xml_data, 'device-id', details.asset_id)))
details.asset_port = int(fix_null(get_attribute(xml_data, 'port-no', details.asset_port)))
return details
def __init__(self):
self.id = 0
self.vulnerability_id = ''
self.vulnerability_key = ''
self.expiration_date = '' # TODO: date object
self.submitter = ''
self.submitter_comment = ''
self.reviewer = ''
self.reviewer_comment = ''
self.status = ''
self.reason = ''
self.scope = ''
self.asset_id = 0
self.asset_port = 0
| [((271, 305), 'future.standard_library.install_aliases', 'standard_library.install_aliases', ([], {}), '()\n', (303, 305), False, 'from future import standard_library\n')] |
captainTOKIO/Premchand_Aug2022_fullstack_august_python1 | myproject/IND_Project/backend/signup/apps.py | 5fbbdd106a764c2f862cf933fdcd69d6bf4ebdf0 | from django.apps import AppConfig
class SignupConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'signup'
| [] |
cherish-web/pymc | pymc/mc_enum.py | 9c322abfdcceca0a78b633d85da23e1290c036c8 | # _*_ coding: utf-8 _*_
# @Time : 2021/3/29 上午 08:57
# @Author : cherish_peng
# @Email : [email protected]
# @File : cmd.py
# @Software : PyCharm
from enum import Enum
class EnumSubTitle(Enum):
Request4e = 0x5400
# 请求
Request = 0x5000
# 应答
Respond = 0xD000
Respond4e = 0xD400
class EnumEndCode(Enum):
# 正常应答
Ok = 0x0000
# 异常应答
Err = 0x51C0
class EnumCmd(Enum):
# 成批读
ReadBatch = 0x0401
# 成批写
WriteBatch = 0x1401
class EnumSubCmd(Enum):
# 有存储扩展模块b7=0,b6=0:随机读出,监视数据注册用外
# 按位读写
Bit = 0x0001
# 按字读写
Word = 0x0000
# 有存储扩展模块b7=1,b6=0:随机读出,监视数据注册用外
# 按位读写
BitEx = 0x0081
# 按字读写
WordEx = 0x0080
class EnumType(Enum):
# 位类型
Bit = 0
# 字类型
Word = 1
| [] |
getsentry/sentry-data-schemas | py/sentry_data_schemas/__init__.py | 6b49188a66a24663737c4f5cf4708fe992d011c2 | from importlib.resources import path
from jsonschema_typed import JSONSchema
with path("sentry_data_schemas", "event.schema.json") as schema_path:
EventData = JSONSchema["var:sentry_data_schemas:schema_path"]
| [((83, 131), 'importlib.resources.path', 'path', (['"""sentry_data_schemas"""', '"""event.schema.json"""'], {}), "('sentry_data_schemas', 'event.schema.json')\n", (87, 131), False, 'from importlib.resources import path\n')] |
faroit/deep-fireball | predict.py | b37d08cb5b15359c363e7816fc7c163c1709a5ac | # elsewhere...
import pandas as pd
from keras.models import model_from_json
import random
import sys
import numpy as np
maxlen = 15
step = 3
df = pd.read_pickle('articles.pandas')
text = str.join(' ', df.text.tolist())
chars = set(text)
print('total chars:', len(chars))
char_indices = dict((c, i) for i, c in enumerate(chars))
indices_char = dict((i, c) for i, c in enumerate(chars))
start_index = random.randint(0, len(text) - maxlen - 1)
model = model_from_json(open('model.json').read())
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.load_weights('weights.h5')
def sample(a, temperature=1.0):
# helper function to sample an index from a probability array
a = np.log(a) / temperature
a = np.exp(a) / np.sum(np.exp(a))
return np.argmax(np.random.multinomial(1, a, 1))
for diversity in [0.25]:
print()
print('----- diversity:', diversity)
generated = ''
sentence = text[start_index: start_index + maxlen]
generated += sentence
print('----- Generating with seed: "' + sentence + '"')
sys.stdout.write(generated)
for i in range(200):
x = np.zeros((1, maxlen, len(chars)))
for t, char in enumerate(sentence):
x[0, t, char_indices[char]] = 1.
preds = model.predict(x, verbose=0)[0]
next_index = sample(preds, diversity)
next_char = indices_char[next_index]
generated += next_char
sentence = sentence[1:] + next_char
sys.stdout.write(next_char)
sys.stdout.flush()
print()
| [((149, 182), 'pandas.read_pickle', 'pd.read_pickle', (['"""articles.pandas"""'], {}), "('articles.pandas')\n", (163, 182), True, 'import pandas as pd\n'), ((1068, 1095), 'sys.stdout.write', 'sys.stdout.write', (['generated'], {}), '(generated)\n', (1084, 1095), False, 'import sys\n'), ((708, 717), 'numpy.log', 'np.log', (['a'], {}), '(a)\n', (714, 717), True, 'import numpy as np\n'), ((740, 749), 'numpy.exp', 'np.exp', (['a'], {}), '(a)\n', (746, 749), True, 'import numpy as np\n'), ((791, 821), 'numpy.random.multinomial', 'np.random.multinomial', (['(1)', 'a', '(1)'], {}), '(1, a, 1)\n', (812, 821), True, 'import numpy as np\n'), ((1481, 1508), 'sys.stdout.write', 'sys.stdout.write', (['next_char'], {}), '(next_char)\n', (1497, 1508), False, 'import sys\n'), ((1517, 1535), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1533, 1535), False, 'import sys\n'), ((759, 768), 'numpy.exp', 'np.exp', (['a'], {}), '(a)\n', (765, 768), True, 'import numpy as np\n')] |
Zavioer/SIR-simulation-IBM-ESI | tests/simulation/test_container.py | 45a7b1d4f0e3cec8dcd8284e00f25386b6e77c58 | import unittest
from simulation import container
from simulation import person
class ContainerTestCase(unittest.TestCase):
def setUp(self) -> None:
self.box = container.Container(100, 1000, 300, 1, 0.5)
self.s_instance = person.Person(x=0, y=0, infection_probability=0.25,
recover_probability=0.2, dead_probability=0.05,
infection_range=0.8)
def test_01__check_if_dimensions_was_set_correctly(self):
width = 100
height = 100
self.assertEqual(self.box.width, width,
msg="Container width was set incorrect.")
self.assertEqual(self.box.height, height,
msg="Container height was set incorrect.")
print("> (test_01) Container dimensions are set correctly.")
def test_02__check_if_new_object_added_correctly_to_objects_list(self):
self.box.add_instances(1, "susceptible", infection_probability=0.4,
recover_probability=0.2,
dead_probability=0.05, infection_range=1.25)
self.assertEqual(len(self.box.object_list), 1,
msg="New instance was not correctly added to"
"objects list.")
print("> (test_02) New instance correctly added to object_list.")
def test_03__check_if_container_time_to_live_not_elapsed__return_bool(self):
self.assertIsInstance(self.box.is_alive(), bool,
msg="Box method is_alive was not return bool.")
print("> (test_03) Method is_alive() returns bool type.")
def test_04__check_if_container_lives_in_elapsed_time(self):
self.box.time_to_live = 0
self.assertFalse(self.box.is_alive(), msg="Container instance lives longer"
"than time_to_live attribute.")
print("> (test_04) Container can not have more cycles than time_to_live "
"attribute specified.")
def test_05__check_if_action_time_interval_is_positive(self):
self.assertGreater(self.box.action_interval, 0,
msg="action_interval parameters allows to insert"
"negative values.")
print("> (test_05) Parameter action_interval can not allows to insert "
"negative values.")
def test_06__check_if_container_can_lives(self):
self.box.time_to_live = 100
self.assertTrue(self.box.is_alive(), msg="Container does not live in "
"correctly specified time_to_live.")
print("> (test_06) Container live correctly base on time_to_live"
" parameter.")
def test_07__check_if_possible_move_distance_is_positive(self):
self.assertGreater(self.box.move_distance_length, 0,
msg="move_distance parameter value can be negative.")
print("> (test_07) Parameter move_distance can not be negative.")
def test_08__check_if_possible_move_distance_is_less_than_container_size(self):
self.assertLess(self.box.move_distance_length, self.box.width,
msg="Parameter move_distance can be longer than"
"container size.")
print("> (test_08) Parameter move_distance is smaller than container size.")
def test_09__check_if_action_time_interval_is_less_than_minute(self):
self.assertLessEqual(self.box.action_interval, 60,
msg="action_time_interval could be greater than"
"minute.")
print("> (test_09) Parameter time_interval could not be greater than minute.")
def test_10__check_if_group_could_be_grater_than_population(self):
self.assertRaises(ValueError, self.box.initial_set_up, 900, 100, 10, 0,
infection_probability=0.4, recover_probability=0.2,
dead_probability=0.05, infection_range=1.25)
print("> (test_10) All specified groups can not be greater than population.")
if __name__ == '__main__':
unittest.main()
| [((4184, 4199), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4197, 4199), False, 'import unittest\n'), ((173, 216), 'simulation.container.Container', 'container.Container', (['(100)', '(1000)', '(300)', '(1)', '(0.5)'], {}), '(100, 1000, 300, 1, 0.5)\n', (192, 216), False, 'from simulation import container\n'), ((243, 367), 'simulation.person.Person', 'person.Person', ([], {'x': '(0)', 'y': '(0)', 'infection_probability': '(0.25)', 'recover_probability': '(0.2)', 'dead_probability': '(0.05)', 'infection_range': '(0.8)'}), '(x=0, y=0, infection_probability=0.25, recover_probability=0.2,\n dead_probability=0.05, infection_range=0.8)\n', (256, 367), False, 'from simulation import person\n')] |
Tratty/pontoon | pontoon/base/migrations/0007_auto_20150710_0944.py | ecb903d72f9274f02137b16669cc3c5859f6329c | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import pontoon.base.models
class Migration(migrations.Migration):
dependencies = [
("base", "0006_auto_20150602_0616"),
]
operations = [
migrations.AddField(
model_name="locale",
name="cldr_plurals",
field=models.CommaSeparatedIntegerField(
blank=True,
max_length=11,
verbose_name=b"CLDR Plurals",
validators=[pontoon.base.models.validate_cldr],
),
),
migrations.AlterField(
model_name="resource",
name="format",
field=models.CharField(
blank=True,
max_length=20,
verbose_name=b"Format",
choices=[
(b"po", b"po"),
(b"xliff", b"xliff"),
(b"properties", b"properties"),
(b"dtd", b"dtd"),
(b"inc", b"inc"),
(b"ini", b"ini"),
(b"lang", b"lang"),
(b"l20n", b"l20n"),
],
),
),
migrations.AlterField(
model_name="translation",
name="date",
field=models.DateTimeField(auto_now_add=True),
),
]
| [((380, 523), 'django.db.models.CommaSeparatedIntegerField', 'models.CommaSeparatedIntegerField', ([], {'blank': '(True)', 'max_length': '(11)', 'verbose_name': "b'CLDR Plurals'", 'validators': '[pontoon.base.models.validate_cldr]'}), "(blank=True, max_length=11, verbose_name=\n b'CLDR Plurals', validators=[pontoon.base.models.validate_cldr])\n", (413, 523), False, 'from django.db import models, migrations\n'), ((721, 976), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(20)', 'verbose_name': "b'Format'", 'choices': "[(b'po', b'po'), (b'xliff', b'xliff'), (b'properties', b'properties'), (\n b'dtd', b'dtd'), (b'inc', b'inc'), (b'ini', b'ini'), (b'lang', b'lang'),\n (b'l20n', b'l20n')]"}), "(blank=True, max_length=20, verbose_name=b'Format', choices\n =[(b'po', b'po'), (b'xliff', b'xliff'), (b'properties', b'properties'),\n (b'dtd', b'dtd'), (b'inc', b'inc'), (b'ini', b'ini'), (b'lang', b'lang'\n ), (b'l20n', b'l20n')])\n", (737, 976), False, 'from django.db import models, migrations\n'), ((1345, 1384), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1365, 1384), False, 'from django.db import models, migrations\n')] |
ufo2011/platformio-core | platformio/project/commands/init.py | 0ceae62701731f8b32c34d7993a34dea34aea59c | # Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=line-too-long,too-many-arguments,too-many-locals
import json
import os
import click
from platformio import fs
from platformio.package.commands.install import install_project_dependencies
from platformio.package.manager.platform import PlatformPackageManager
from platformio.platform.exception import UnknownBoard
from platformio.project.config import ProjectConfig
from platformio.project.generator import ProjectGenerator
from platformio.project.helpers import is_platformio_project
def validate_boards(ctx, param, value): # pylint: disable=W0613
pm = PlatformPackageManager()
for id_ in value:
try:
pm.board_config(id_)
except UnknownBoard:
raise click.BadParameter(
"`%s`. Please search for board ID using `platformio boards` "
"command" % id_
)
return value
@click.command("init", short_help="Initialize a project or update existing")
@click.option(
"--project-dir",
"-d",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
@click.option("--ide", type=click.Choice(ProjectGenerator.get_supported_ides()))
@click.option("-e", "--environment", help="Update existing environment")
@click.option("-O", "--project-option", multiple=True)
@click.option("--env-prefix", default="")
@click.option("--no-install-dependencies", is_flag=True)
@click.option("-s", "--silent", is_flag=True)
def project_init_cmd(
project_dir,
board,
ide,
environment,
project_option,
env_prefix,
no_install_dependencies,
silent,
):
is_new_project = not is_platformio_project(project_dir)
if is_new_project:
if not silent:
print_header(project_dir)
init_base_project(project_dir)
if environment:
update_project_env(project_dir, environment, project_option)
elif board:
update_board_envs(project_dir, board, project_option, env_prefix)
# resolve project dependencies
if not no_install_dependencies and (environment or board):
install_project_dependencies(
options=dict(
project_dir=project_dir,
environments=[environment] if environment else [],
silent=silent,
)
)
if ide:
if not silent:
click.echo(
"Updating metadata for the %s IDE..." % click.style(ide, fg="cyan")
)
with fs.cd(project_dir):
config = ProjectConfig.get_instance(
os.path.join(project_dir, "platformio.ini")
)
config.validate()
ProjectGenerator(config, environment, ide, board).generate()
if is_new_project:
init_cvs_ignore(project_dir)
if not silent:
print_footer(is_new_project)
def print_header(project_dir):
if project_dir == os.getcwd():
click.secho("\nThe current working directory ", fg="yellow", nl=False)
try:
click.secho(project_dir, fg="cyan", nl=False)
except UnicodeEncodeError:
click.secho(json.dumps(project_dir), fg="cyan", nl=False)
click.secho(" will be used for the project.", fg="yellow")
click.echo("")
click.echo("The next files/directories have been created in ", nl=False)
try:
click.secho(project_dir, fg="cyan")
except UnicodeEncodeError:
click.secho(json.dumps(project_dir), fg="cyan")
click.echo("%s - Put project header files here" % click.style("include", fg="cyan"))
click.echo(
"%s - Put here project specific (private) libraries"
% click.style("lib", fg="cyan")
)
click.echo("%s - Put project source files here" % click.style("src", fg="cyan"))
click.echo(
"%s - Project Configuration File" % click.style("platformio.ini", fg="cyan")
)
def print_footer(is_new_project):
if is_new_project:
return click.secho(
"\nProject has been successfully initialized! Useful commands:\n"
"`pio run` - process/build project from the current directory\n"
"`pio run --target upload` or `pio run -t upload` "
"- upload firmware to a target\n"
"`pio run --target clean` - clean project (remove compiled files)"
"\n`pio run --help` - additional information",
fg="green",
)
return click.secho(
"Project has been successfully updated!",
fg="green",
)
def init_base_project(project_dir):
with fs.cd(project_dir):
config = ProjectConfig()
config.save()
dir_to_readme = [
(config.get("platformio", "src_dir"), None),
(config.get("platformio", "include_dir"), init_include_readme),
(config.get("platformio", "lib_dir"), init_lib_readme),
(config.get("platformio", "test_dir"), init_test_readme),
]
for (path, cb) in dir_to_readme:
if os.path.isdir(path):
continue
os.makedirs(path)
if cb:
cb(path)
def init_include_readme(include_dir):
with open(os.path.join(include_dir, "README"), mode="w", encoding="utf8") as fp:
fp.write(
"""
This directory is intended for project header files.
A header file is a file containing C declarations and macro definitions
to be shared between several project source files. You request the use of a
header file in your project source file (C, C++, etc) located in `src` folder
by including it, with the C preprocessing directive `#include'.
```src/main.c
#include "header.h"
int main (void)
{
...
}
```
Including a header file produces the same results as copying the header file
into each source file that needs it. Such copying would be time-consuming
and error-prone. With a header file, the related declarations appear
in only one place. If they need to be changed, they can be changed in one
place, and programs that include the header file will automatically use the
new version when next recompiled. The header file eliminates the labor of
finding and changing all the copies as well as the risk that a failure to
find one copy will result in inconsistencies within a program.
In C, the usual convention is to give header files names that end with `.h'.
It is most portable to use only letters, digits, dashes, and underscores in
header file names, and at most one dot.
Read more about using header files in official GCC documentation:
* Include Syntax
* Include Operation
* Once-Only Headers
* Computed Includes
https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
""",
)
def init_lib_readme(lib_dir):
with open(os.path.join(lib_dir, "README"), mode="w", encoding="utf8") as fp:
fp.write(
"""
This directory is intended for project specific (private) libraries.
PlatformIO will compile them to static libraries and link into executable file.
The source code of each library should be placed in a an own separate directory
("lib/your_library_name/[here are source files]").
For example, see a structure of the following two libraries `Foo` and `Bar`:
|--lib
| |
| |--Bar
| | |--docs
| | |--examples
| | |--src
| | |- Bar.c
| | |- Bar.h
| | |- library.json (optional, custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html
| |
| |--Foo
| | |- Foo.c
| | |- Foo.h
| |
| |- README --> THIS FILE
|
|- platformio.ini
|--src
|- main.c
and a contents of `src/main.c`:
```
#include <Foo.h>
#include <Bar.h>
int main (void)
{
...
}
```
PlatformIO Library Dependency Finder will find automatically dependent
libraries scanning project source files.
More information about PlatformIO Library Dependency Finder
- https://docs.platformio.org/page/librarymanager/ldf.html
""",
)
def init_test_readme(test_dir):
with open(os.path.join(test_dir, "README"), mode="w", encoding="utf8") as fp:
fp.write(
"""
This directory is intended for PlatformIO Test Runner and project tests.
Unit Testing is a software testing method by which individual units of
source code, sets of one or more MCU program modules together with associated
control data, usage procedures, and operating procedures, are tested to
determine whether they are fit for use. Unit testing finds problems early
in the development cycle.
More information about PlatformIO Unit Testing:
- https://docs.platformio.org/en/latest/advanced/unit-testing/index.html
""",
)
def init_cvs_ignore(project_dir):
conf_path = os.path.join(project_dir, ".gitignore")
if os.path.isfile(conf_path):
return
with open(conf_path, mode="w", encoding="utf8") as fp:
fp.write(".pio\n")
def update_board_envs(project_dir, board_ids, project_option, env_prefix):
config = ProjectConfig(
os.path.join(project_dir, "platformio.ini"), parse_extra=False
)
used_boards = []
for section in config.sections():
cond = [section.startswith("env:"), config.has_option(section, "board")]
if all(cond):
used_boards.append(config.get(section, "board"))
pm = PlatformPackageManager()
modified = False
for id_ in board_ids:
board_config = pm.board_config(id_)
if id_ in used_boards:
continue
used_boards.append(id_)
modified = True
envopts = {"platform": board_config["platform"], "board": id_}
# find default framework for board
frameworks = board_config.get("frameworks")
if frameworks:
envopts["framework"] = frameworks[0]
for item in project_option:
if "=" not in item:
continue
_name, _value = item.split("=", 1)
envopts[_name.strip()] = _value.strip()
section = "env:%s%s" % (env_prefix, id_)
config.add_section(section)
for option, value in envopts.items():
config.set(section, option, value)
if modified:
config.save()
def update_project_env(project_dir, environment, project_option):
if not project_option:
return
config = ProjectConfig(
os.path.join(project_dir, "platformio.ini"), parse_extra=False
)
section = "env:%s" % environment
if not config.has_section(section):
config.add_section(section)
for item in project_option:
if "=" not in item:
continue
_name, _value = item.split("=", 1)
config.set(section, _name.strip(), _value.strip())
config.save()
| [((1497, 1572), 'click.command', 'click.command', (['"""init"""'], {'short_help': '"""Initialize a project or update existing"""'}), "('init', short_help='Initialize a project or update existing')\n", (1510, 1572), False, 'import click\n'), ((1759, 1848), 'click.option', 'click.option', (['"""-b"""', '"""--board"""'], {'multiple': '(True)', 'metavar': '"""ID"""', 'callback': 'validate_boards'}), "('-b', '--board', multiple=True, metavar='ID', callback=\n validate_boards)\n", (1771, 1848), False, 'import click\n'), ((1926, 1997), 'click.option', 'click.option', (['"""-e"""', '"""--environment"""'], {'help': '"""Update existing environment"""'}), "('-e', '--environment', help='Update existing environment')\n", (1938, 1997), False, 'import click\n'), ((1999, 2052), 'click.option', 'click.option', (['"""-O"""', '"""--project-option"""'], {'multiple': '(True)'}), "('-O', '--project-option', multiple=True)\n", (2011, 2052), False, 'import click\n'), ((2054, 2094), 'click.option', 'click.option', (['"""--env-prefix"""'], {'default': '""""""'}), "('--env-prefix', default='')\n", (2066, 2094), False, 'import click\n'), ((2096, 2151), 'click.option', 'click.option', (['"""--no-install-dependencies"""'], {'is_flag': '(True)'}), "('--no-install-dependencies', is_flag=True)\n", (2108, 2151), False, 'import click\n'), ((2153, 2197), 'click.option', 'click.option', (['"""-s"""', '"""--silent"""'], {'is_flag': '(True)'}), "('-s', '--silent', is_flag=True)\n", (2165, 2197), False, 'import click\n'), ((1193, 1217), 'platformio.package.manager.platform.PlatformPackageManager', 'PlatformPackageManager', ([], {}), '()\n', (1215, 1217), False, 'from platformio.package.manager.platform import PlatformPackageManager\n'), ((3996, 4068), 'click.echo', 'click.echo', (['"""The next files/directories have been created in """'], {'nl': '(False)'}), "('The next files/directories have been created in ', nl=False)\n", (4006, 4068), False, 'import click\n'), ((5148, 5213), 'click.secho', 'click.secho', (['"""Project has been successfully updated!"""'], {'fg': '"""green"""'}), "('Project has been successfully updated!', fg='green')\n", (5159, 5213), False, 'import click\n'), ((9333, 9372), 'os.path.join', 'os.path.join', (['project_dir', '""".gitignore"""'], {}), "(project_dir, '.gitignore')\n", (9345, 9372), False, 'import os\n'), ((9380, 9405), 'os.path.isfile', 'os.path.isfile', (['conf_path'], {}), '(conf_path)\n', (9394, 9405), False, 'import os\n'), ((9923, 9947), 'platformio.package.manager.platform.PlatformPackageManager', 'PlatformPackageManager', ([], {}), '()\n', (9945, 9947), False, 'from platformio.package.manager.platform import PlatformPackageManager\n'), ((2379, 2413), 'platformio.project.helpers.is_platformio_project', 'is_platformio_project', (['project_dir'], {}), '(project_dir)\n', (2400, 2413), False, 'from platformio.project.helpers import is_platformio_project\n'), ((1651, 1744), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'file_okay': '(False)', 'dir_okay': '(True)', 'writable': '(True)', 'resolve_path': '(True)'}), '(exists=True, file_okay=False, dir_okay=True, writable=True,\n resolve_path=True)\n', (1661, 1744), False, 'import click\n'), ((3633, 3644), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3642, 3644), False, 'import os\n'), ((3654, 3727), 'click.secho', 'click.secho', (['"""\nThe current working directory """'], {'fg': '"""yellow"""', 'nl': '(False)'}), '("""\nThe current working directory """, fg=\'yellow\', nl=False)\n', (3665, 3727), False, 'import click\n'), ((3909, 3967), 'click.secho', 'click.secho', (['""" will be used for the project."""'], {'fg': '"""yellow"""'}), "(' will be used for the project.', fg='yellow')\n", (3920, 3967), False, 'import click\n'), ((3976, 3990), 'click.echo', 'click.echo', (['""""""'], {}), "('')\n", (3986, 3990), False, 'import click\n'), ((4086, 4121), 'click.secho', 'click.secho', (['project_dir'], {'fg': '"""cyan"""'}), "(project_dir, fg='cyan')\n", (4097, 4121), False, 'import click\n'), ((4687, 5035), 'click.secho', 'click.secho', (['"""\nProject has been successfully initialized! Useful commands:\n`pio run` - process/build project from the current directory\n`pio run --target upload` or `pio run -t upload` - upload firmware to a target\n`pio run --target clean` - clean project (remove compiled files)\n`pio run --help` - additional information"""'], {'fg': '"""green"""'}), '(\n """\nProject has been successfully initialized! Useful commands:\n`pio run` - process/build project from the current directory\n`pio run --target upload` or `pio run -t upload` - upload firmware to a target\n`pio run --target clean` - clean project (remove compiled files)\n`pio run --help` - additional information"""\n , fg=\'green\')\n', (4698, 5035), False, 'import click\n'), ((5284, 5302), 'platformio.fs.cd', 'fs.cd', (['project_dir'], {}), '(project_dir)\n', (5289, 5302), False, 'from platformio import fs\n'), ((5321, 5336), 'platformio.project.config.ProjectConfig', 'ProjectConfig', ([], {}), '()\n', (5334, 5336), False, 'from platformio.project.config import ProjectConfig\n'), ((9621, 9664), 'os.path.join', 'os.path.join', (['project_dir', '"""platformio.ini"""'], {}), "(project_dir, 'platformio.ini')\n", (9633, 9664), False, 'import os\n'), ((10945, 10988), 'os.path.join', 'os.path.join', (['project_dir', '"""platformio.ini"""'], {}), "(project_dir, 'platformio.ini')\n", (10957, 10988), False, 'import os\n'), ((3214, 3232), 'platformio.fs.cd', 'fs.cd', (['project_dir'], {}), '(project_dir)\n', (3219, 3232), False, 'from platformio import fs\n'), ((1885, 1922), 'platformio.project.generator.ProjectGenerator.get_supported_ides', 'ProjectGenerator.get_supported_ides', ([], {}), '()\n', (1920, 1922), False, 'from platformio.project.generator import ProjectGenerator\n'), ((3750, 3795), 'click.secho', 'click.secho', (['project_dir'], {'fg': '"""cyan"""', 'nl': '(False)'}), "(project_dir, fg='cyan', nl=False)\n", (3761, 3795), False, 'import click\n'), ((4263, 4296), 'click.style', 'click.style', (['"""include"""'], {'fg': '"""cyan"""'}), "('include', fg='cyan')\n", (4274, 4296), False, 'import click\n'), ((4385, 4414), 'click.style', 'click.style', (['"""lib"""'], {'fg': '"""cyan"""'}), "('lib', fg='cyan')\n", (4396, 4414), False, 'import click\n'), ((4475, 4504), 'click.style', 'click.style', (['"""src"""'], {'fg': '"""cyan"""'}), "('src', fg='cyan')\n", (4486, 4504), False, 'import click\n'), ((4566, 4606), 'click.style', 'click.style', (['"""platformio.ini"""'], {'fg': '"""cyan"""'}), "('platformio.ini', fg='cyan')\n", (4577, 4606), False, 'import click\n'), ((5722, 5741), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (5735, 5741), False, 'import os\n'), ((5780, 5797), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (5791, 5797), False, 'import os\n'), ((5896, 5931), 'os.path.join', 'os.path.join', (['include_dir', '"""README"""'], {}), "(include_dir, 'README')\n", (5908, 5931), False, 'import os\n'), ((7447, 7478), 'os.path.join', 'os.path.join', (['lib_dir', '"""README"""'], {}), "(lib_dir, 'README')\n", (7459, 7478), False, 'import os\n'), ((8647, 8679), 'os.path.join', 'os.path.join', (['test_dir', '"""README"""'], {}), "(test_dir, 'README')\n", (8659, 8679), False, 'import os\n'), ((1333, 1432), 'click.BadParameter', 'click.BadParameter', (["('`%s`. Please search for board ID using `platformio boards` command' % id_)"], {}), "(\n '`%s`. Please search for board ID using `platformio boards` command' % id_)\n", (1351, 1432), False, 'import click\n'), ((3299, 3342), 'os.path.join', 'os.path.join', (['project_dir', '"""platformio.ini"""'], {}), "(project_dir, 'platformio.ini')\n", (3311, 3342), False, 'import os\n'), ((4173, 4196), 'json.dumps', 'json.dumps', (['project_dir'], {}), '(project_dir)\n', (4183, 4196), False, 'import json\n'), ((3159, 3186), 'click.style', 'click.style', (['ide'], {'fg': '"""cyan"""'}), "(ide, fg='cyan')\n", (3170, 3186), False, 'import click\n'), ((3399, 3448), 'platformio.project.generator.ProjectGenerator', 'ProjectGenerator', (['config', 'environment', 'ide', 'board'], {}), '(config, environment, ide, board)\n', (3415, 3448), False, 'from platformio.project.generator import ProjectGenerator\n'), ((3855, 3878), 'json.dumps', 'json.dumps', (['project_dir'], {}), '(project_dir)\n', (3865, 3878), False, 'import json\n')] |
DeveloperLY/Python-practice | 12_module_release/message/__init__.py | 85062afee1dc6b60b7011b0e3800b65fc9b9e9b2 | from . import send_message
from . import receive_message | [] |
peopledoc/django-guardian | guardian/decorators.py | 459827c2329975113cbf0d11f4fd476b5689a055 | from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.core.exceptions import PermissionDenied
from django.http import HttpResponseForbidden, HttpResponseRedirect
from django.utils.functional import wraps
from django.utils.http import urlquote
from django.db.models import Model, get_model
from django.db.models.base import ModelBase
from django.db.models.query import QuerySet
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext, TemplateDoesNotExist
from guardian.conf import settings as guardian_settings
from guardian.exceptions import GuardianError
def permission_required(perm, lookup_variables=None, **kwargs):
"""
Decorator for views that checks whether a user has a particular permission
enabled.
Optionally, instances for which check should be made may be passed as an
second argument or as a tuple parameters same as those passed to
``get_object_or_404`` but must be provided as pairs of strings.
:param login_url: if denied, user would be redirected to location set by
this parameter. Defaults to ``django.conf.settings.LOGIN_URL``.
:param redirect_field_name: name of the parameter passed if redirected.
Defaults to ``django.contrib.auth.REDIRECT_FIELD_NAME``.
:param return_403: if set to ``True`` then instead of redirecting to the
login page, response with status code 403 is returned (
``django.http.HttpResponseForbidden`` instance or rendered template -
see :setting:`GUARDIAN_RENDER_403`). Defaults to ``False``.
:param accept_global_perms: if set to ``True``, then *object level
permission* would be required **only if user does NOT have global
permission** for target *model*. If turned on, makes this decorator
like an extension over standard
``django.contrib.admin.decorators.permission_required`` as it would
check for global permissions first. Defaults to ``False``.
Examples::
@permission_required('auth.change_user', return_403=True)
def my_view(request):
return HttpResponse('Hello')
@permission_required('auth.change_user', (User, 'username', 'username'))
def my_view(request, username):
user = get_object_or_404(User, username=username)
return user.get_absolute_url()
@permission_required('auth.change_user',
(User, 'username', 'username', 'groups__name', 'group_name'))
def my_view(request, username, group_name):
user = get_object_or_404(User, username=username,
group__name=group_name)
return user.get_absolute_url()
"""
login_url = kwargs.pop('login_url', settings.LOGIN_URL)
redirect_field_name = kwargs.pop('redirect_field_name', REDIRECT_FIELD_NAME)
return_403 = kwargs.pop('return_403', False)
accept_global_perms = kwargs.pop('accept_global_perms', False)
# Check if perm is given as string in order not to decorate
# view function itself which makes debugging harder
if not isinstance(perm, basestring):
raise GuardianError("First argument must be in format: "
"'app_label.codename or a callable which return similar string'")
def decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
# if more than one parameter is passed to the decorator we try to
# fetch object for which check would be made
obj = None
if lookup_variables:
model, lookups = lookup_variables[0], lookup_variables[1:]
# Parse model
if isinstance(model, basestring):
splitted = model.split('.')
if len(splitted) != 2:
raise GuardianError("If model should be looked up from "
"string it needs format: 'app_label.ModelClass'")
model = get_model(*splitted)
elif type(model) in (Model, ModelBase, QuerySet):
pass
else:
raise GuardianError("First lookup argument must always be "
"a model, string pointing at app/model or queryset. "
"Given: %s (type: %s)" % (model, type(model)))
# Parse lookups
if len(lookups) % 2 != 0:
raise GuardianError("Lookup variables must be provided "
"as pairs of lookup_string and view_arg")
lookup_dict = {}
for lookup, view_arg in zip(lookups[::2], lookups[1::2]):
if view_arg not in kwargs:
raise GuardianError("Argument %s was not passed "
"into view function" % view_arg)
lookup_dict[lookup] = kwargs[view_arg]
obj = get_object_or_404(model, **lookup_dict)
# Handles both original and with object provided permission check
# as ``obj`` defaults to None
has_perm = accept_global_perms and request.user.has_perm(perm)
if not has_perm and not request.user.has_perm(perm, obj):
if return_403:
if guardian_settings.RENDER_403:
try:
response = render_to_response(
guardian_settings.TEMPLATE_403, {},
RequestContext(request))
response.status_code = 403
return response
except TemplateDoesNotExist, e:
if settings.DEBUG:
raise e
elif guardian_settings.RAISE_403:
raise PermissionDenied
return HttpResponseForbidden()
else:
path = urlquote(request.get_full_path())
tup = login_url, redirect_field_name, path
return HttpResponseRedirect("%s?%s=%s" % tup)
return view_func(request, *args, **kwargs)
return wraps(view_func)(_wrapped_view)
return decorator
def permission_required_or_403(perm, *args, **kwargs):
"""
Simple wrapper for permission_required decorator.
Standard Django's permission_required decorator redirects user to login page
in case permission check failed. This decorator may be used to return
HttpResponseForbidden (status 403) instead of redirection.
The only difference between ``permission_required`` decorator is that this
one always set ``return_403`` parameter to ``True``.
"""
kwargs['return_403'] = True
return permission_required(perm, *args, **kwargs)
| [] |
mpgarate/OST-fauxra | images/forms.py | d2aa554a082b14268c72220a0b19f2a306deb4d2 | from django import forms
from django.forms import ModelForm
from images.models import Image
class ImageForm(ModelForm):
class Meta:
model = Image
| [] |
MORIMOTO520212/Arm-crawler | WebIOPi-0.7.1/python/webiopi/devices/analog/__init__.py | 95dca0ea9485e4c20a0910687362010604331b55 | # Copyright 2012-2013 Eric Ptak - trouch.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from webiopi.decorators.rest import request, response
from webiopi.utils.types import M_JSON
class ADC():
def __init__(self, channelCount, resolution, vref):
self._analogCount = channelCount
self._analogResolution = resolution
self._analogMax = 2**resolution - 1
self._analogRef = vref
def __family__(self):
return "ADC"
def checkAnalogChannel(self, channel):
if not 0 <= channel < self._analogCount:
raise ValueError("Channel %d out of range [%d..%d]" % (channel, 0, self._analogCount-1))
def checkAnalogValue(self, value):
if not 0 <= value <= self._analogMax:
raise ValueError("Value %d out of range [%d..%d]" % (value, 0, self._analogMax))
@request("GET", "analog/count")
@response("%d")
def analogCount(self):
return self._analogCount
@request("GET", "analog/resolution")
@response("%d")
def analogResolution(self):
return self._analogResolution
@request("GET", "analog/max")
@response("%d")
def analogMaximum(self):
return int(self._analogMax)
@request("GET", "analog/vref")
@response("%.2f")
def analogReference(self):
return self._analogRef
def __analogRead__(self, channel, diff):
raise NotImplementedError
@request("GET", "analog/%(channel)d/integer")
@response("%d")
def analogRead(self, channel, diff=False):
self.checkAnalogChannel(channel)
return self.__analogRead__(channel, diff)
@request("GET", "analog/%(channel)d/float")
@response("%.2f")
def analogReadFloat(self, channel, diff=False):
return self.analogRead(channel, diff) / float(self._analogMax)
@request("GET", "analog/%(channel)d/volt")
@response("%.2f")
def analogReadVolt(self, channel, diff=False):
if self._analogRef == 0:
raise NotImplementedError
return self.analogReadFloat(channel, diff) * self._analogRef
@request("GET", "analog/*/integer")
@response(contentType=M_JSON)
def analogReadAll(self):
values = {}
for i in range(self._analogCount):
values[i] = self.analogRead(i)
return values
@request("GET", "analog/*/float")
@response(contentType=M_JSON)
def analogReadAllFloat(self):
values = {}
for i in range(self._analogCount):
values[i] = float("%.2f" % self.analogReadFloat(i))
return values
@request("GET", "analog/*/volt")
@response(contentType=M_JSON)
def analogReadAllVolt(self):
values = {}
for i in range(self._analogCount):
values[i] = float("%.2f" % self.analogReadVolt(i))
return values
class DAC(ADC):
def __init__(self, channelCount, resolution, vref):
ADC.__init__(self, channelCount, resolution, vref)
def __family__(self):
return "DAC"
def __analogWrite__(self, channel, value):
raise NotImplementedError
@request("POST", "analog/%(channel)d/integer/%(value)d")
@response("%d")
def analogWrite(self, channel, value):
self.checkAnalogChannel(channel)
self.checkAnalogValue(value)
self.__analogWrite__(channel, value)
return self.analogRead(channel)
@request("POST", "analog/%(channel)d/float/%(value)f")
@response("%.2f")
def analogWriteFloat(self, channel, value):
self.analogWrite(channel, int(value * self._analogMax))
return self.analogReadFloat(channel)
@request("POST", "analog/%(channel)d/volt/%(value)f")
@response("%.2f")
def analogWriteVolt(self, channel, value):
self.analogWriteFloat(channel, value /self._analogRef)
return self.analogReadVolt(channel)
class PWM():
def __init__(self, channelCount, resolution, frequency):
self._pwmCount = channelCount
self._pwmResolution = resolution
self._pwmMax = 2**resolution - 1
self.frequency = frequency
self.period = 1.0/frequency
# Futaba servos standard
self.servo_neutral = 0.00152
self.servo_travel_time = 0.0004
self.servo_travel_angle = 45.0
self.reverse = [False for i in range(channelCount)]
def __family__(self):
return "PWM"
def checkPWMChannel(self, channel):
if not 0 <= channel < self._pwmCount:
raise ValueError("Channel %d out of range [%d..%d]" % (channel, 0, self._pwmCount-1))
def checkPWMValue(self, value):
if not 0 <= value <= self._pwmMax:
raise ValueError("Value %d out of range [%d..%d]" % (value, 0, self._pwmMax))
def __pwmRead__(self, channel):
raise NotImplementedError
def __pwmWrite__(self, channel, value):
raise NotImplementedError
@request("GET", "pwm/count")
@response("%d")
def pwmCount(self):
return self._pwmCount
@request("GET", "pwm/resolution")
@response("%d")
def pwmResolution(self):
return self._pwmResolution
@request("GET", "pwm/max")
@response("%d")
def pwmMaximum(self):
return int(self._pwmMax)
@request("GET", "pwm/%(channel)d/integer")
@response("%d")
def pwmRead(self, channel):
self.checkPWMChannel(channel)
return self.__pwmRead__(channel)
@request("GET", "pwm/%(channel)d/float")
@response("%.2f")
def pwmReadFloat(self, channel):
return self.pwmRead(channel) / float(self._pwmMax)
@request("POST", "pwm/%(channel)d/integer/%(value)d")
@response("%d")
def pwmWrite(self, channel, value):
self.checkPWMChannel(channel)
self.checkPWMValue(value)
self.__pwmWrite__(channel, value)
return self.pwmRead(channel)
@request("POST", "pwm/%(channel)d/float/%(value)f")
@response("%.2f")
def pwmWriteFloat(self, channel, value):
self.pwmWrite(channel, int(value * self._pwmMax))
return self.pwmReadFloat(channel)
def getReverse(self, channel):
self.checkChannel(channel)
return self.reverse[channel]
def setReverse(self, channel, value):
self.checkChannel(channel)
self.reverse[channel] = value
return value
def RatioToAngle(self, value):
f = value
f *= self.period
f -= self.servo_neutral
f *= self.servo_travel_angle
f /= self.servo_travel_time
return f
def AngleToRatio(self, value):
f = value
f *= self.servo_travel_time
f /= self.servo_travel_angle
f += self.servo_neutral
f /= self.period
return f
@request("GET", "pwm/%(channel)d/angle")
@response("%.2f")
def pwmReadAngle(self, channel):
f = self.pwmReadFloat(channel)
f = self.RatioToAngle(f)
if self.reverse[channel]:
f = -f
else:
f = f
return f
@request("POST", "pwm/%(channel)d/angle/%(value)f")
@response("%.2f")
def pwmWriteAngle(self, channel, value):
if self.reverse[channel]:
f = -value
else:
f = value
f = self.AngleToRatio(f)
self.pwmWriteFloat(channel, f)
return self.pwmReadAngle(channel)
@request("GET", "pwm/*")
@response(contentType=M_JSON)
def pwmWildcard(self):
values = {}
for i in range(self._pwmCount):
val = self.pwmReadFloat(i)
values[i] = {}
values[i]["float"] = float("%.2f" % val)
values[i]["angle"] = float("%.2f" % self.RatioToAngle(val))
return values
DRIVERS = {}
DRIVERS["ads1x1x"] = ["ADS1014", "ADS1015", "ADS1114", "ADS1115"]
DRIVERS["mcp3x0x"] = ["MCP3002", "MCP3004", "MCP3008", "MCP3204", "MCP3208"]
DRIVERS["mcp4725"] = ["MCP4725"]
DRIVERS["mcp48XX"] = ["MCP4802", "MCP4812", "MCP4822"]
DRIVERS["mcp492X"] = ["MCP4921", "MCP4922"]
DRIVERS["pca9685"] = ["PCA9685"]
DRIVERS["pcf8591"] = ["PCF8591"]
| [((1370, 1400), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""analog/count"""'], {}), "('GET', 'analog/count')\n", (1377, 1400), False, 'from webiopi.decorators.rest import request, response\n'), ((1406, 1420), 'webiopi.decorators.rest.response', 'response', (['"""%d"""'], {}), "('%d')\n", (1414, 1420), False, 'from webiopi.decorators.rest import request, response\n'), ((1487, 1522), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""analog/resolution"""'], {}), "('GET', 'analog/resolution')\n", (1494, 1522), False, 'from webiopi.decorators.rest import request, response\n'), ((1528, 1542), 'webiopi.decorators.rest.response', 'response', (['"""%d"""'], {}), "('%d')\n", (1536, 1542), False, 'from webiopi.decorators.rest import request, response\n'), ((1623, 1651), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""analog/max"""'], {}), "('GET', 'analog/max')\n", (1630, 1651), False, 'from webiopi.decorators.rest import request, response\n'), ((1657, 1671), 'webiopi.decorators.rest.response', 'response', (['"""%d"""'], {}), "('%d')\n", (1665, 1671), False, 'from webiopi.decorators.rest import request, response\n'), ((1747, 1776), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""analog/vref"""'], {}), "('GET', 'analog/vref')\n", (1754, 1776), False, 'from webiopi.decorators.rest import request, response\n'), ((1782, 1798), 'webiopi.decorators.rest.response', 'response', (['"""%.2f"""'], {}), "('%.2f')\n", (1790, 1798), False, 'from webiopi.decorators.rest import request, response\n'), ((1955, 1999), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""analog/%(channel)d/integer"""'], {}), "('GET', 'analog/%(channel)d/integer')\n", (1962, 1999), False, 'from webiopi.decorators.rest import request, response\n'), ((2005, 2019), 'webiopi.decorators.rest.response', 'response', (['"""%d"""'], {}), "('%d')\n", (2013, 2019), False, 'from webiopi.decorators.rest import request, response\n'), ((2168, 2210), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""analog/%(channel)d/float"""'], {}), "('GET', 'analog/%(channel)d/float')\n", (2175, 2210), False, 'from webiopi.decorators.rest import request, response\n'), ((2216, 2232), 'webiopi.decorators.rest.response', 'response', (['"""%.2f"""'], {}), "('%.2f')\n", (2224, 2232), False, 'from webiopi.decorators.rest import request, response\n'), ((2366, 2407), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""analog/%(channel)d/volt"""'], {}), "('GET', 'analog/%(channel)d/volt')\n", (2373, 2407), False, 'from webiopi.decorators.rest import request, response\n'), ((2413, 2429), 'webiopi.decorators.rest.response', 'response', (['"""%.2f"""'], {}), "('%.2f')\n", (2421, 2429), False, 'from webiopi.decorators.rest import request, response\n'), ((2631, 2665), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""analog/*/integer"""'], {}), "('GET', 'analog/*/integer')\n", (2638, 2665), False, 'from webiopi.decorators.rest import request, response\n'), ((2671, 2699), 'webiopi.decorators.rest.response', 'response', ([], {'contentType': 'M_JSON'}), '(contentType=M_JSON)\n', (2679, 2699), False, 'from webiopi.decorators.rest import request, response\n'), ((2875, 2907), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""analog/*/float"""'], {}), "('GET', 'analog/*/float')\n", (2882, 2907), False, 'from webiopi.decorators.rest import request, response\n'), ((2913, 2941), 'webiopi.decorators.rest.response', 'response', ([], {'contentType': 'M_JSON'}), '(contentType=M_JSON)\n', (2921, 2941), False, 'from webiopi.decorators.rest import request, response\n'), ((3135, 3166), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""analog/*/volt"""'], {}), "('GET', 'analog/*/volt')\n", (3142, 3166), False, 'from webiopi.decorators.rest import request, response\n'), ((3172, 3200), 'webiopi.decorators.rest.response', 'response', ([], {'contentType': 'M_JSON'}), '(contentType=M_JSON)\n', (3180, 3200), False, 'from webiopi.decorators.rest import request, response\n'), ((3666, 3721), 'webiopi.decorators.rest.request', 'request', (['"""POST"""', '"""analog/%(channel)d/integer/%(value)d"""'], {}), "('POST', 'analog/%(channel)d/integer/%(value)d')\n", (3673, 3721), False, 'from webiopi.decorators.rest import request, response\n'), ((3727, 3741), 'webiopi.decorators.rest.response', 'response', (['"""%d"""'], {}), "('%d')\n", (3735, 3741), False, 'from webiopi.decorators.rest import request, response\n'), ((3962, 4015), 'webiopi.decorators.rest.request', 'request', (['"""POST"""', '"""analog/%(channel)d/float/%(value)f"""'], {}), "('POST', 'analog/%(channel)d/float/%(value)f')\n", (3969, 4015), False, 'from webiopi.decorators.rest import request, response\n'), ((4029, 4045), 'webiopi.decorators.rest.response', 'response', (['"""%.2f"""'], {}), "('%.2f')\n", (4037, 4045), False, 'from webiopi.decorators.rest import request, response\n'), ((4217, 4269), 'webiopi.decorators.rest.request', 'request', (['"""POST"""', '"""analog/%(channel)d/volt/%(value)f"""'], {}), "('POST', 'analog/%(channel)d/volt/%(value)f')\n", (4224, 4269), False, 'from webiopi.decorators.rest import request, response\n'), ((4283, 4299), 'webiopi.decorators.rest.response', 'response', (['"""%.2f"""'], {}), "('%.2f')\n", (4291, 4299), False, 'from webiopi.decorators.rest import request, response\n'), ((5536, 5563), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""pwm/count"""'], {}), "('GET', 'pwm/count')\n", (5543, 5563), False, 'from webiopi.decorators.rest import request, response\n'), ((5569, 5583), 'webiopi.decorators.rest.response', 'response', (['"""%d"""'], {}), "('%d')\n", (5577, 5583), False, 'from webiopi.decorators.rest import request, response\n'), ((5644, 5676), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""pwm/resolution"""'], {}), "('GET', 'pwm/resolution')\n", (5651, 5676), False, 'from webiopi.decorators.rest import request, response\n'), ((5682, 5696), 'webiopi.decorators.rest.response', 'response', (['"""%d"""'], {}), "('%d')\n", (5690, 5696), False, 'from webiopi.decorators.rest import request, response\n'), ((5771, 5796), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""pwm/max"""'], {}), "('GET', 'pwm/max')\n", (5778, 5796), False, 'from webiopi.decorators.rest import request, response\n'), ((5802, 5816), 'webiopi.decorators.rest.response', 'response', (['"""%d"""'], {}), "('%d')\n", (5810, 5816), False, 'from webiopi.decorators.rest import request, response\n'), ((5886, 5927), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""pwm/%(channel)d/integer"""'], {}), "('GET', 'pwm/%(channel)d/integer')\n", (5893, 5927), False, 'from webiopi.decorators.rest import request, response\n'), ((5933, 5947), 'webiopi.decorators.rest.response', 'response', (['"""%d"""'], {}), "('%d')\n", (5941, 5947), False, 'from webiopi.decorators.rest import request, response\n'), ((6069, 6108), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""pwm/%(channel)d/float"""'], {}), "('GET', 'pwm/%(channel)d/float')\n", (6076, 6108), False, 'from webiopi.decorators.rest import request, response\n'), ((6114, 6130), 'webiopi.decorators.rest.response', 'response', (['"""%.2f"""'], {}), "('%.2f')\n", (6122, 6130), False, 'from webiopi.decorators.rest import request, response\n'), ((6237, 6289), 'webiopi.decorators.rest.request', 'request', (['"""POST"""', '"""pwm/%(channel)d/integer/%(value)d"""'], {}), "('POST', 'pwm/%(channel)d/integer/%(value)d')\n", (6244, 6289), False, 'from webiopi.decorators.rest import request, response\n'), ((6295, 6309), 'webiopi.decorators.rest.response', 'response', (['"""%d"""'], {}), "('%d')\n", (6303, 6309), False, 'from webiopi.decorators.rest import request, response\n'), ((6515, 6565), 'webiopi.decorators.rest.request', 'request', (['"""POST"""', '"""pwm/%(channel)d/float/%(value)f"""'], {}), "('POST', 'pwm/%(channel)d/float/%(value)f')\n", (6522, 6565), False, 'from webiopi.decorators.rest import request, response\n'), ((6579, 6595), 'webiopi.decorators.rest.response', 'response', (['"""%.2f"""'], {}), "('%.2f')\n", (6587, 6595), False, 'from webiopi.decorators.rest import request, response\n'), ((7414, 7453), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""pwm/%(channel)d/angle"""'], {}), "('GET', 'pwm/%(channel)d/angle')\n", (7421, 7453), False, 'from webiopi.decorators.rest import request, response\n'), ((7459, 7475), 'webiopi.decorators.rest.response', 'response', (['"""%.2f"""'], {}), "('%.2f')\n", (7467, 7475), False, 'from webiopi.decorators.rest import request, response\n'), ((7701, 7751), 'webiopi.decorators.rest.request', 'request', (['"""POST"""', '"""pwm/%(channel)d/angle/%(value)f"""'], {}), "('POST', 'pwm/%(channel)d/angle/%(value)f')\n", (7708, 7751), False, 'from webiopi.decorators.rest import request, response\n'), ((7757, 7773), 'webiopi.decorators.rest.response', 'response', (['"""%.2f"""'], {}), "('%.2f')\n", (7765, 7773), False, 'from webiopi.decorators.rest import request, response\n'), ((8032, 8055), 'webiopi.decorators.rest.request', 'request', (['"""GET"""', '"""pwm/*"""'], {}), "('GET', 'pwm/*')\n", (8039, 8055), False, 'from webiopi.decorators.rest import request, response\n'), ((8061, 8089), 'webiopi.decorators.rest.response', 'response', ([], {'contentType': 'M_JSON'}), '(contentType=M_JSON)\n', (8069, 8089), False, 'from webiopi.decorators.rest import request, response\n')] |
dtroyer/osc-loco | osc_choochoo/tests/v1/test_train.py | 57119ab84528933da9cbcd57dcd4f5b842a58186 | # Copyright 2013 Nebula Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import mock
import os
from osc_choochoo.tests import base
from osc_choochoo.tests import fakes
from osc_choochoo.v1 import train
# Load the plugin init module for the plugin list and show commands
plugin_name = 'osc_choochoo'
plugin_client = 'osc_choochoo.plugin'
class FakeTrainV1Client(object):
def __init__(self, **kwargs):
self.auth_token = kwargs['token']
self.management_url = kwargs['endpoint']
class TestTrainV1(base.TestCommand):
def setUp(self):
super(TestTrainV1, self).setUp()
self.app.client_manager.osc_choochoo = FakeTrainV1Client(
endpoint=fakes.AUTH_URL,
token=fakes.AUTH_TOKEN,
)
class TestTrainList(TestTrainV1):
def setUp(self):
super(TestTrainList, self).setUp()
# Get the command object to test
self.cmd = train.TrainList(self.app, None)
def test_train_list(self):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
collist = ('Name', )
datalist = ['1.txt', '2.txt']
with mock.patch('os.listdir') as mock_list:
mock_list.return_value = datalist
# DisplayCommandBase.take_action() returns two tuples
columns, data = self.cmd.take_action(parsed_args)
self.assertEqual(collist, columns)
for d in data:
self.assertTrue(d[0] + '.txt' in datalist)
class TestTrainShow(TestTrainV1):
def setUp(self):
super(TestTrainShow, self).setUp()
# Get the command object to test
self.cmd = train.TrainShow(self.app, None)
def test_train_show(self):
arglist = [
plugin_name,
]
verifylist = [
('name', plugin_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
collist = ['name', 'data']
datalist = [
plugin_name,
'dummy',
]
with mock.patch('io.open') as mock_open:
mock_open.return_value = mock.MagicMock()
m_file = mock_open.return_value.__enter__.return_value
m_file.read.return_value = 'dummy'
columns, data = self.cmd.take_action(parsed_args)
mock_open.assert_called_once_with(
os.path.join(
train.DATA_PATH,
plugin_name + '.txt',
)
)
self.assertEqual(collist, columns)
self.assertEqual(datalist, data)
| [((1438, 1469), 'osc_choochoo.v1.train.TrainList', 'train.TrainList', (['self.app', 'None'], {}), '(self.app, None)\n', (1453, 1469), False, 'from osc_choochoo.v1 import train\n'), ((2209, 2240), 'osc_choochoo.v1.train.TrainShow', 'train.TrainShow', (['self.app', 'None'], {}), '(self.app, None)\n', (2224, 2240), False, 'from osc_choochoo.v1 import train\n'), ((1700, 1724), 'mock.patch', 'mock.patch', (['"""os.listdir"""'], {}), "('os.listdir')\n", (1710, 1724), False, 'import mock\n'), ((2594, 2615), 'mock.patch', 'mock.patch', (['"""io.open"""'], {}), "('io.open')\n", (2604, 2615), False, 'import mock\n'), ((2667, 2683), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (2681, 2683), False, 'import mock\n'), ((2925, 2976), 'os.path.join', 'os.path.join', (['train.DATA_PATH', "(plugin_name + '.txt')"], {}), "(train.DATA_PATH, plugin_name + '.txt')\n", (2937, 2976), False, 'import os\n')] |
darioncassel/OmniCrawl | scripts/firefox-wrapper.py | 62317e07340df7eb758a1b8de80679b6d4293d49 | #!/usr/bin/env python3
import sys
from os.path import dirname, abspath, join
import subprocess
# Note this does not resolve symbolic links
# https://stackoverflow.com/a/17806123
FIREFOX_BINARY = join(dirname(abspath(__file__)), 'firefox')
argvs = list(sys.argv)
argvs[0] = FIREFOX_BINARY
# geckdriver will run `firefox -version` first to check the version
if len(sys.argv) == 2 and sys.argv[1] == '-version':
subprocess.check_call(argvs)
exit(0)
# First search for the -tmpprofile option
new_profile_path = None
for idx, argv in enumerate(sys.argv):
if argv == '-tmpprofile':
new_profile_path = sys.argv[idx + 1]
break
# If it's present, replace profile with tmp_profile
if new_profile_path:
for idx, argv in enumerate(sys.argv):
if argv == '-profile':
old_profile_path = sys.argv[idx + 1]
subprocess.check_call(['rm', '-r', new_profile_path])
subprocess.check_call(['cp', '-r', old_profile_path, new_profile_path])
argvs[idx+1] = new_profile_path
break
# Firefox will ignore the -tmpprofile option
subprocess.check_call(argvs)
| [((1105, 1133), 'subprocess.check_call', 'subprocess.check_call', (['argvs'], {}), '(argvs)\n', (1126, 1133), False, 'import subprocess\n'), ((417, 445), 'subprocess.check_call', 'subprocess.check_call', (['argvs'], {}), '(argvs)\n', (438, 445), False, 'import subprocess\n'), ((210, 227), 'os.path.abspath', 'abspath', (['__file__'], {}), '(__file__)\n', (217, 227), False, 'from os.path import dirname, abspath, join\n'), ((859, 912), 'subprocess.check_call', 'subprocess.check_call', (["['rm', '-r', new_profile_path]"], {}), "(['rm', '-r', new_profile_path])\n", (880, 912), False, 'import subprocess\n'), ((925, 996), 'subprocess.check_call', 'subprocess.check_call', (["['cp', '-r', old_profile_path, new_profile_path]"], {}), "(['cp', '-r', old_profile_path, new_profile_path])\n", (946, 996), False, 'import subprocess\n')] |
whiteyhat/pretix | src/pretix/base/payment.py | 34d1fcf077a92765cd796d81d1aa6695d4801a9a | import json
import logging
from collections import OrderedDict
from decimal import ROUND_HALF_UP, Decimal
from typing import Any, Dict, Union
import pytz
from django import forms
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.dispatch import receiver
from django.forms import Form
from django.http import HttpRequest
from django.template.loader import get_template
from django.utils.timezone import now
from django.utils.translation import pgettext_lazy, ugettext_lazy as _
from django_countries import Countries
from i18nfield.forms import I18nFormField, I18nTextarea, I18nTextInput
from i18nfield.strings import LazyI18nString
from pretix.base.forms import PlaceholderValidator
from pretix.base.models import (
CartPosition, Event, InvoiceAddress, Order, OrderPayment, OrderRefund,
Quota,
)
from pretix.base.reldate import RelativeDateField, RelativeDateWrapper
from pretix.base.settings import SettingsSandbox
from pretix.base.signals import register_payment_providers
from pretix.base.templatetags.money import money_filter
from pretix.base.templatetags.rich_text import rich_text
from pretix.helpers.money import DecimalTextInput
from pretix.presale.views import get_cart_total
from pretix.presale.views.cart import cart_session, get_or_create_cart_id
logger = logging.getLogger(__name__)
class PaymentProviderForm(Form):
def clean(self):
cleaned_data = super().clean()
for k, v in self.fields.items():
val = cleaned_data.get(k)
if v._required and not val:
self.add_error(k, _('This field is required.'))
class BasePaymentProvider:
"""
This is the base class for all payment providers.
"""
def __init__(self, event: Event):
self.event = event
self.settings = SettingsSandbox('payment', self.identifier, event)
# Default values
if self.settings.get('_fee_reverse_calc') is None:
self.settings.set('_fee_reverse_calc', True)
def __str__(self):
return self.identifier
@property
def is_implicit(self) -> bool:
"""
Returns whether or whether not this payment provider is an "implicit" payment provider that will
*always* and unconditionally be used if is_allowed() returns True and does not require any input.
This is intended to be used by the FreePaymentProvider, which skips the payment choice page.
By default, this returns ``False``. Please do not set this if you don't know exactly what you are doing.
"""
return False
@property
def is_meta(self) -> bool:
"""
Returns whether or whether not this payment provider is a "meta" payment provider that only
works as a settings holder for other payment providers and should never be used directly. This
is a trick to implement payment gateways with multiple payment methods but unified payment settings.
Take a look at the built-in stripe provider to see how this might be used.
By default, this returns ``False``.
"""
return False
@property
def is_enabled(self) -> bool:
"""
Returns whether or whether not this payment provider is enabled.
By default, this is determined by the value of the ``_enabled`` setting.
"""
return self.settings.get('_enabled', as_type=bool)
@property
def test_mode_message(self) -> str:
"""
If this property is set to a string, this will be displayed when this payment provider is selected
while the event is in test mode. You should use it to explain to your user how your plugin behaves,
e.g. if it falls back to a test mode automatically as well or if actual payments will be performed.
If you do not set this (or, return ``None``), pretix will show a default message warning the user
that this plugin does not support test mode payments.
"""
return None
def calculate_fee(self, price: Decimal) -> Decimal:
"""
Calculate the fee for this payment provider which will be added to
final price before fees (but after taxes). It should include any taxes.
The default implementation makes use of the setting ``_fee_abs`` for an
absolute fee and ``_fee_percent`` for a percentage.
:param price: The total value without the payment method fee, after taxes.
"""
fee_abs = self.settings.get('_fee_abs', as_type=Decimal, default=0)
fee_percent = self.settings.get('_fee_percent', as_type=Decimal, default=0)
fee_reverse_calc = self.settings.get('_fee_reverse_calc', as_type=bool, default=True)
places = settings.CURRENCY_PLACES.get(self.event.currency, 2)
if fee_reverse_calc:
return ((price + fee_abs) * (1 / (1 - fee_percent / 100)) - price).quantize(
Decimal('1') / 10 ** places, ROUND_HALF_UP
)
else:
return (price * fee_percent / 100 + fee_abs).quantize(
Decimal('1') / 10 ** places, ROUND_HALF_UP
)
@property
def verbose_name(self) -> str:
"""
A human-readable name for this payment provider. This should
be short but self-explaining. Good examples include 'Bank transfer'
and 'Credit card via Stripe'.
"""
raise NotImplementedError() # NOQA
@property
def public_name(self) -> str:
"""
A human-readable name for this payment provider to be shown to the public.
This should be short but self-explaining. Good examples include 'Bank transfer'
and 'Credit card', but 'Credit card via Stripe' might be to explicit. By default,
this is the same as ``verbose_name``
"""
return self.verbose_name
@property
def identifier(self) -> str:
"""
A short and unique identifier for this payment provider.
This should only contain lowercase letters and in most
cases will be the same as your package name.
"""
raise NotImplementedError() # NOQA
@property
def abort_pending_allowed(self) -> bool:
"""
Whether or not a user can abort a payment in pending start to switch to another
payment method. This returns ``False`` by default which is no guarantee that
aborting a pending payment can never happen, it just hides the frontend button
to avoid users accidentally committing double payments.
"""
return False
@property
def settings_form_fields(self) -> dict:
"""
When the event's administrator visits the event configuration
page, this method is called to return the configuration fields available.
It should therefore return a dictionary where the keys should be (unprefixed)
settings keys and the values should be corresponding Django form fields.
The default implementation returns the appropriate fields for the ``_enabled``,
``_fee_abs``, ``_fee_percent`` and ``_availability_date`` settings mentioned above.
We suggest that you return an ``OrderedDict`` object instead of a dictionary
and make use of the default implementation. Your implementation could look
like this::
@property
def settings_form_fields(self):
return OrderedDict(
list(super().settings_form_fields.items()) + [
('bank_details',
forms.CharField(
widget=forms.Textarea,
label=_('Bank account details'),
required=False
))
]
)
.. WARNING:: It is highly discouraged to alter the ``_enabled`` field of the default
implementation.
"""
places = settings.CURRENCY_PLACES.get(self.event.currency, 2)
d = OrderedDict([
('_enabled',
forms.BooleanField(
label=_('Enable payment method'),
required=False,
)),
('_availability_date',
RelativeDateField(
label=_('Available until'),
help_text=_('Users will not be able to choose this payment provider after the given date.'),
required=False,
)),
('_invoice_text',
I18nFormField(
label=_('Text on invoices'),
help_text=_('Will be printed just below the payment figures and above the closing text on invoices. '
'This will only be used if the invoice is generated before the order is paid. If the '
'invoice is generated later, it will show a text stating that it has already been paid.'),
required=False,
widget=I18nTextarea,
widget_kwargs={'attrs': {'rows': '2'}}
)),
('_total_min',
forms.DecimalField(
label=_('Minimum order total'),
help_text=_('This payment will be available only if the order total is equal to or exceeds the given '
'value. The order total for this purpose may be computed without taking the fees imposed '
'by this payment method into account.'),
localize=True,
required=False,
decimal_places=places,
widget=DecimalTextInput(places=places)
)),
('_total_max',
forms.DecimalField(
label=_('Maximum order total'),
help_text=_('This payment will be available only if the order total is equal to or below the given '
'value. The order total for this purpose may be computed without taking the fees imposed '
'by this payment method into account.'),
localize=True,
required=False,
decimal_places=places,
widget=DecimalTextInput(places=places)
)),
('_fee_abs',
forms.DecimalField(
label=_('Additional fee'),
help_text=_('Absolute value'),
localize=True,
required=False,
decimal_places=places,
widget=DecimalTextInput(places=places)
)),
('_fee_percent',
forms.DecimalField(
label=_('Additional fee'),
help_text=_('Percentage of the order total.'),
localize=True,
required=False,
)),
('_fee_reverse_calc',
forms.BooleanField(
label=_('Calculate the fee from the total value including the fee.'),
help_text=_('We recommend to enable this if you want your users to pay the payment fees of your '
'payment provider. <a href="{docs_url}" target="_blank" rel="noopener">Click here '
'for detailed information on what this does.</a> Don\'t forget to set the correct fees '
'above!').format(docs_url='https://docs.pretix.eu/en/latest/user/payments/fees.html'),
required=False
)),
('_restricted_countries',
forms.MultipleChoiceField(
label=_('Restrict to countries'),
choices=Countries(),
help_text=_('Only allow choosing this payment provider for invoice addresses in the selected '
'countries. If you don\'t select any country, all countries are allowed. This is only '
'enabled if the invoice address is required.'),
widget=forms.CheckboxSelectMultiple(
attrs={'class': 'scrolling-multiple-choice'}
),
required=False,
disabled=not self.event.settings.invoice_address_required
)),
])
d['_restricted_countries']._as_type = list
return d
def settings_form_clean(self, cleaned_data):
"""
Overriding this method allows you to inject custom validation into the settings form.
:param cleaned_data: Form data as per previous validations.
:return: Please return the modified cleaned_data
"""
return cleaned_data
def settings_content_render(self, request: HttpRequest) -> str:
"""
When the event's administrator visits the event configuration
page, this method is called. It may return HTML containing additional information
that is displayed below the form fields configured in ``settings_form_fields``.
"""
return ""
def render_invoice_text(self, order: Order, payment: OrderPayment) -> str:
"""
This is called when an invoice for an order with this payment provider is generated.
The default implementation returns the content of the _invoice_text configuration
variable (an I18nString), or an empty string if unconfigured. For paid orders, the
default implementation always renders a string stating that the invoice is already paid.
"""
if order.status == Order.STATUS_PAID:
return pgettext_lazy('invoice', 'The payment for this invoice has already been received.')
return self.settings.get('_invoice_text', as_type=LazyI18nString, default='')
@property
def payment_form_fields(self) -> dict:
"""
This is used by the default implementation of :py:meth:`payment_form`.
It should return an object similar to :py:attr:`settings_form_fields`.
The default implementation returns an empty dictionary.
"""
return {}
def payment_form(self, request: HttpRequest) -> Form:
"""
This is called by the default implementation of :py:meth:`payment_form_render`
to obtain the form that is displayed to the user during the checkout
process. The default implementation constructs the form using
:py:attr:`payment_form_fields` and sets appropriate prefixes for the form
and all fields and fills the form with data form the user's session.
If you overwrite this, we strongly suggest that you inherit from
``PaymentProviderForm`` (from this module) that handles some nasty issues about
required fields for you.
"""
form = PaymentProviderForm(
data=(request.POST if request.method == 'POST' and request.POST.get("payment") == self.identifier else None),
prefix='payment_%s' % self.identifier,
initial={
k.replace('payment_%s_' % self.identifier, ''): v
for k, v in request.session.items()
if k.startswith('payment_%s_' % self.identifier)
}
)
form.fields = self.payment_form_fields
for k, v in form.fields.items():
v._required = v.required
v.required = False
v.widget.is_required = False
return form
def _is_still_available(self, now_dt=None, cart_id=None, order=None):
now_dt = now_dt or now()
tz = pytz.timezone(self.event.settings.timezone)
availability_date = self.settings.get('_availability_date', as_type=RelativeDateWrapper)
if availability_date:
if self.event.has_subevents and cart_id:
availability_date = min([
availability_date.datetime(se).date()
for se in self.event.subevents.filter(
id__in=CartPosition.objects.filter(
cart_id=cart_id, event=self.event
).values_list('subevent', flat=True)
)
])
elif self.event.has_subevents and order:
availability_date = min([
availability_date.datetime(se).date()
for se in self.event.subevents.filter(
id__in=order.positions.values_list('subevent', flat=True)
)
])
elif self.event.has_subevents:
logger.error('Payment provider is not subevent-ready.')
return False
else:
availability_date = availability_date.datetime(self.event).date()
return availability_date >= now_dt.astimezone(tz).date()
return True
def is_allowed(self, request: HttpRequest, total: Decimal=None) -> bool:
"""
You can use this method to disable this payment provider for certain groups
of users, products or other criteria. If this method returns ``False``, the
user will not be able to select this payment method. This will only be called
during checkout, not on retrying.
The default implementation checks for the _availability_date setting to be either unset or in the future
and for the _total_max and _total_min requirements to be met. It also checks the ``_restrict_countries``
setting.
:param total: The total value without the payment method fee, after taxes.
.. versionchanged:: 1.17.0
The ``total`` parameter has been added. For backwards compatibility, this method is called again
without this parameter if it raises a ``TypeError`` on first try.
"""
timing = self._is_still_available(cart_id=get_or_create_cart_id(request))
pricing = True
if (self.settings._total_max is not None or self.settings._total_min is not None) and total is None:
raise ImproperlyConfigured('This payment provider does not support maximum or minimum amounts.')
if self.settings._total_max is not None:
pricing = pricing and total <= Decimal(self.settings._total_max)
if self.settings._total_min is not None:
pricing = pricing and total >= Decimal(self.settings._total_min)
def get_invoice_address():
if not hasattr(request, '_checkout_flow_invoice_address'):
cs = cart_session(request)
iapk = cs.get('invoice_address')
if not iapk:
request._checkout_flow_invoice_address = InvoiceAddress()
else:
try:
request._checkout_flow_invoice_address = InvoiceAddress.objects.get(pk=iapk, order__isnull=True)
except InvoiceAddress.DoesNotExist:
request._checkout_flow_invoice_address = InvoiceAddress()
return request._checkout_flow_invoice_address
if self.event.settings.invoice_address_required:
restricted_countries = self.settings.get('_restricted_countries', as_type=list)
if restricted_countries:
ia = get_invoice_address()
if str(ia.country) not in restricted_countries:
return False
return timing and pricing
def payment_form_render(self, request: HttpRequest, total: Decimal) -> str:
"""
When the user selects this provider as their preferred payment method,
they will be shown the HTML you return from this method.
The default implementation will call :py:meth:`payment_form`
and render the returned form. If your payment method doesn't require
the user to fill out form fields, you should just return a paragraph
of explanatory text.
"""
form = self.payment_form(request)
template = get_template('pretixpresale/event/checkout_payment_form_default.html')
ctx = {'request': request, 'form': form}
return template.render(ctx)
def checkout_confirm_render(self, request) -> str:
"""
If the user has successfully filled in their payment data, they will be redirected
to a confirmation page which lists all details of their order for a final review.
This method should return the HTML which should be displayed inside the
'Payment' box on this page.
In most cases, this should include a short summary of the user's input and
a short explanation on how the payment process will continue.
"""
raise NotImplementedError() # NOQA
def payment_pending_render(self, request: HttpRequest, payment: OrderPayment) -> str:
"""
Render customer-facing instructions on how to proceed with a pending payment
:return: HTML
"""
return ""
def checkout_prepare(self, request: HttpRequest, cart: Dict[str, Any]) -> Union[bool, str]:
"""
Will be called after the user selects this provider as their payment method.
If you provided a form to the user to enter payment data, this method should
at least store the user's input into their session.
This method should return ``False`` if the user's input was invalid, ``True``
if the input was valid and the frontend should continue with default behavior
or a string containing a URL if the user should be redirected somewhere else.
On errors, you should use Django's message framework to display an error message
to the user (or the normal form validation error messages).
The default implementation stores the input into the form returned by
:py:meth:`payment_form` in the user's session.
If your payment method requires you to redirect the user to an external provider,
this might be the place to do so.
.. IMPORTANT:: If this is called, the user has not yet confirmed their order.
You may NOT do anything which actually moves money.
:param cart: This dictionary contains at least the following keys:
positions:
A list of ``CartPosition`` objects that are annotated with the special
attributes ``count`` and ``total`` because multiple objects of the
same content are grouped into one.
raw:
The raw list of ``CartPosition`` objects in the users cart
total:
The overall total *including* the fee for the payment method.
payment_fee:
The fee for the payment method.
"""
form = self.payment_form(request)
if form.is_valid():
for k, v in form.cleaned_data.items():
request.session['payment_%s_%s' % (self.identifier, k)] = v
return True
else:
return False
def payment_is_valid_session(self, request: HttpRequest) -> bool:
"""
This is called at the time the user tries to place the order. It should return
``True`` if the user's session is valid and all data your payment provider requires
in future steps is present.
"""
raise NotImplementedError() # NOQA
def execute_payment(self, request: HttpRequest, payment: OrderPayment) -> str:
"""
After the user has confirmed their purchase, this method will be called to complete
the payment process. This is the place to actually move the money if applicable.
You will be passed an :py:class:`pretix.base.models.OrderPayment` object that contains
the amount of money that should be paid.
If you need any special behavior, you can return a string
containing the URL the user will be redirected to. If you are done with your process
you should return the user to the order's detail page.
If the payment is completed, you should call ``payment.confirm()``. Please note that ``this`` might
raise a ``Quota.QuotaExceededException`` if (and only if) the payment term of this order is over and
some of the items are sold out. You should use the exception message to display a meaningful error
to the user.
The default implementation just returns ``None`` and therefore leaves the
order unpaid. The user will be redirected to the order's detail page by default.
On errors, you should raise a ``PaymentException``.
:param order: The order object
:param payment: An ``OrderPayment`` instance
"""
return None
def order_pending_mail_render(self, order: Order, payment: OrderPayment) -> str:
"""
After the user has submitted their order, they will receive a confirmation
email. You can return a string from this method if you want to add additional
information to this email.
:param order: The order object
:param payment: The payment object
"""
return ""
def order_change_allowed(self, order: Order) -> bool:
"""
Will be called to check whether it is allowed to change the payment method of
an order to this one.
The default implementation checks for the _availability_date setting to be either unset or in the future,
as well as for the _total_max, _total_min and _restricted_countries settings.
:param order: The order object
"""
ps = order.pending_sum
if self.settings._total_max is not None and ps > Decimal(self.settings._total_max):
return False
if self.settings._total_min is not None and ps < Decimal(self.settings._total_min):
return False
restricted_countries = self.settings.get('_restricted_countries', as_type=list)
if restricted_countries:
try:
ia = order.invoice_address
except InvoiceAddress.DoesNotExist:
return True
else:
if str(ia.country) not in restricted_countries:
return False
return self._is_still_available(order=order)
def payment_prepare(self, request: HttpRequest, payment: OrderPayment) -> Union[bool, str]:
"""
Will be called if the user retries to pay an unpaid order (after the user filled in
e.g. the form returned by :py:meth:`payment_form`) or if the user changes the payment
method.
It should return and report errors the same way as :py:meth:`checkout_prepare`, but
receives an ``Order`` object instead of a cart object.
Note: The ``Order`` object given to this method might be different from the version
stored in the database as it's total will already contain the payment fee for the
new payment method.
"""
form = self.payment_form(request)
if form.is_valid():
for k, v in form.cleaned_data.items():
request.session['payment_%s_%s' % (self.identifier, k)] = v
return True
else:
return False
def payment_control_render(self, request: HttpRequest, payment: OrderPayment) -> str:
"""
Will be called if the *event administrator* views the details of a payment.
It should return HTML code containing information regarding the current payment
status and, if applicable, next steps.
The default implementation returns the verbose name of the payment provider.
:param order: The order object
"""
return ''
def payment_refund_supported(self, payment: OrderPayment) -> bool:
"""
Will be called to check if the provider supports automatic refunding for this
payment.
"""
return False
def payment_partial_refund_supported(self, payment: OrderPayment) -> bool:
"""
Will be called to check if the provider supports automatic partial refunding for this
payment.
"""
return False
def execute_refund(self, refund: OrderRefund):
"""
Will be called to execute an refund. Note that refunds have an amount property and can be partial.
This should transfer the money back (if possible).
On success, you should call ``refund.done()``.
On failure, you should raise a PaymentException.
"""
raise PaymentException(_('Automatic refunds are not supported by this payment provider.'))
def shred_payment_info(self, obj: Union[OrderPayment, OrderRefund]):
"""
When personal data is removed from an event, this method is called to scrub payment-related data
from a payment or refund. By default, it removes all info from the ``info`` attribute. You can override
this behavior if you want to retain attributes that are not personal data on their own, i.e. a
reference to a transaction in an external system. You can also override this to scrub more data, e.g.
data from external sources that is saved in LogEntry objects or other places.
:param order: An order
"""
obj.info = '{}'
obj.save(update_fields=['info'])
class PaymentException(Exception):
pass
class FreeOrderProvider(BasePaymentProvider):
is_implicit = True
is_enabled = True
identifier = "free"
def checkout_confirm_render(self, request: HttpRequest) -> str:
return _("No payment is required as this order only includes products which are free of charge.")
def payment_is_valid_session(self, request: HttpRequest) -> bool:
return True
@property
def verbose_name(self) -> str:
return _("Free of charge")
def execute_payment(self, request: HttpRequest, payment: OrderPayment):
try:
payment.confirm(send_mail=False)
except Quota.QuotaExceededException as e:
raise PaymentException(str(e))
@property
def settings_form_fields(self) -> dict:
return {}
def is_allowed(self, request: HttpRequest, total: Decimal=None) -> bool:
from .services.cart import get_fees
total = get_cart_total(request)
total += sum([f.value for f in get_fees(self.event, request, total, None, None)])
return total == 0
def order_change_allowed(self, order: Order) -> bool:
return False
class BoxOfficeProvider(BasePaymentProvider):
is_implicit = True
is_enabled = True
identifier = "boxoffice"
verbose_name = _("Box office")
def execute_payment(self, request: HttpRequest, payment: OrderPayment):
try:
payment.confirm(send_mail=False)
except Quota.QuotaExceededException as e:
raise PaymentException(str(e))
@property
def settings_form_fields(self) -> dict:
return {}
def is_allowed(self, request: HttpRequest, total: Decimal=None) -> bool:
return False
def order_change_allowed(self, order: Order) -> bool:
return False
def payment_control_render(self, request, payment) -> str:
if not payment.info:
return
payment_info = json.loads(payment.info)
template = get_template('pretixcontrol/boxoffice/payment.html')
ctx = {
'request': request,
'event': self.event,
'settings': self.settings,
'payment_info': payment_info,
'payment': payment,
'provider': self,
}
return template.render(ctx)
class ManualPayment(BasePaymentProvider):
identifier = 'manual'
verbose_name = _('Manual payment')
@property
def test_mode_message(self):
return _('In test mode, you can just manually mark this order as paid in the backend after it has been '
'created.')
@property
def is_implicit(self):
return 'pretix.plugins.manualpayment' not in self.event.plugins
def is_allowed(self, request: HttpRequest, total: Decimal=None):
return 'pretix.plugins.manualpayment' in self.event.plugins and super().is_allowed(request, total)
def order_change_allowed(self, order: Order):
return 'pretix.plugins.manualpayment' in self.event.plugins and super().order_change_allowed(order)
@property
def public_name(self):
return str(self.settings.get('public_name', as_type=LazyI18nString))
@property
def settings_form_fields(self):
d = OrderedDict(
[
('public_name', I18nFormField(
label=_('Payment method name'),
widget=I18nTextInput,
)),
('checkout_description', I18nFormField(
label=_('Payment process description during checkout'),
help_text=_('This text will be shown during checkout when the user selects this payment method. '
'It should give a short explanation on this payment method.'),
widget=I18nTextarea,
)),
('email_instructions', I18nFormField(
label=_('Payment process description in order confirmation emails'),
help_text=_('This text will be included for the {payment_info} placeholder in order confirmation '
'mails. It should instruct the user on how to proceed with the payment. You can use'
'the placeholders {order}, {total}, {currency} and {total_with_currency}'),
widget=I18nTextarea,
validators=[PlaceholderValidator(['{order}', '{total}', '{currency}', '{total_with_currency}'])],
)),
('pending_description', I18nFormField(
label=_('Payment process description for pending orders'),
help_text=_('This text will be shown on the order confirmation page for pending orders. '
'It should instruct the user on how to proceed with the payment. You can use'
'the placeholders {order}, {total}, {currency} and {total_with_currency}'),
widget=I18nTextarea,
validators=[PlaceholderValidator(['{order}', '{total}', '{currency}', '{total_with_currency}'])],
)),
] + list(super().settings_form_fields.items())
)
d.move_to_end('_enabled', last=False)
return d
def payment_form_render(self, request) -> str:
return rich_text(
str(self.settings.get('checkout_description', as_type=LazyI18nString))
)
def checkout_prepare(self, request, total):
return True
def payment_is_valid_session(self, request):
return True
def checkout_confirm_render(self, request):
return self.payment_form_render(request)
def format_map(self, order):
return {
'order': order.code,
'total': order.total,
'currency': self.event.currency,
'total_with_currency': money_filter(order.total, self.event.currency)
}
def order_pending_mail_render(self, order) -> str:
msg = str(self.settings.get('email_instructions', as_type=LazyI18nString)).format_map(self.format_map(order))
return msg
def payment_pending_render(self, request, payment) -> str:
return rich_text(
str(self.settings.get('pending_description', as_type=LazyI18nString)).format_map(self.format_map(payment.order))
)
class OffsettingProvider(BasePaymentProvider):
is_enabled = True
identifier = "offsetting"
verbose_name = _("Offsetting")
is_implicit = True
def execute_payment(self, request: HttpRequest, payment: OrderPayment):
try:
payment.confirm()
except Quota.QuotaExceededException as e:
raise PaymentException(str(e))
def execute_refund(self, refund: OrderRefund):
code = refund.info_data['orders'][0]
try:
order = Order.objects.get(code=code, event__organizer=self.event.organizer)
except Order.DoesNotExist:
raise PaymentException(_('You entered an order that could not be found.'))
p = order.payments.create(
state=OrderPayment.PAYMENT_STATE_PENDING,
amount=refund.amount,
payment_date=now(),
provider='offsetting',
info=json.dumps({'orders': [refund.order.code]})
)
p.confirm()
@property
def settings_form_fields(self) -> dict:
return {}
def is_allowed(self, request: HttpRequest, total: Decimal=None) -> bool:
return False
def order_change_allowed(self, order: Order) -> bool:
return False
def payment_control_render(self, request: HttpRequest, payment: OrderPayment) -> str:
return _('Balanced against orders: %s' % ', '.join(payment.info_data['orders']))
@receiver(register_payment_providers, dispatch_uid="payment_free")
def register_payment_provider(sender, **kwargs):
return [FreeOrderProvider, BoxOfficeProvider, OffsettingProvider, ManualPayment]
| [((1331, 1358), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1348, 1358), False, 'import logging\n'), ((36977, 37042), 'django.dispatch.receiver', 'receiver', (['register_payment_providers'], {'dispatch_uid': '"""payment_free"""'}), "(register_payment_providers, dispatch_uid='payment_free')\n", (36985, 37042), False, 'from django.dispatch import receiver\n'), ((30517, 30532), 'django.utils.translation.ugettext_lazy', '_', (['"""Box office"""'], {}), "('Box office')\n", (30518, 30532), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((31609, 31628), 'django.utils.translation.ugettext_lazy', '_', (['"""Manual payment"""'], {}), "('Manual payment')\n", (31610, 31628), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((35685, 35700), 'django.utils.translation.ugettext_lazy', '_', (['"""Offsetting"""'], {}), "('Offsetting')\n", (35686, 35700), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((1826, 1876), 'pretix.base.settings.SettingsSandbox', 'SettingsSandbox', (['"""payment"""', 'self.identifier', 'event'], {}), "('payment', self.identifier, event)\n", (1841, 1876), False, 'from pretix.base.settings import SettingsSandbox\n'), ((4732, 4784), 'django.conf.settings.CURRENCY_PLACES.get', 'settings.CURRENCY_PLACES.get', (['self.event.currency', '(2)'], {}), '(self.event.currency, 2)\n', (4760, 4784), False, 'from django.conf import settings\n'), ((7969, 8021), 'django.conf.settings.CURRENCY_PLACES.get', 'settings.CURRENCY_PLACES.get', (['self.event.currency', '(2)'], {}), '(self.event.currency, 2)\n', (7997, 8021), False, 'from django.conf import settings\n'), ((15499, 15542), 'pytz.timezone', 'pytz.timezone', (['self.event.settings.timezone'], {}), '(self.event.settings.timezone)\n', (15512, 15542), False, 'import pytz\n'), ((19898, 19968), 'django.template.loader.get_template', 'get_template', (['"""pretixpresale/event/checkout_payment_form_default.html"""'], {}), "('pretixpresale/event/checkout_payment_form_default.html')\n", (19910, 19968), False, 'from django.template.loader import get_template\n'), ((29445, 29540), 'django.utils.translation.ugettext_lazy', '_', (['"""No payment is required as this order only includes products which are free of charge."""'], {}), "('No payment is required as this order only includes products which are free of charge.'\n )\n", (29446, 29540), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((29692, 29711), 'django.utils.translation.ugettext_lazy', '_', (['"""Free of charge"""'], {}), "('Free of charge')\n", (29693, 29711), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((30156, 30179), 'pretix.presale.views.get_cart_total', 'get_cart_total', (['request'], {}), '(request)\n', (30170, 30179), False, 'from pretix.presale.views import get_cart_total\n'), ((31152, 31176), 'json.loads', 'json.loads', (['payment.info'], {}), '(payment.info)\n', (31162, 31176), False, 'import json\n'), ((31196, 31248), 'django.template.loader.get_template', 'get_template', (['"""pretixcontrol/boxoffice/payment.html"""'], {}), "('pretixcontrol/boxoffice/payment.html')\n", (31208, 31248), False, 'from django.template.loader import get_template\n'), ((31692, 31803), 'django.utils.translation.ugettext_lazy', '_', (['"""In test mode, you can just manually mark this order as paid in the backend after it has been created."""'], {}), "('In test mode, you can just manually mark this order as paid in the backend after it has been created.'\n )\n", (31693, 31803), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((13557, 13644), 'django.utils.translation.pgettext_lazy', 'pgettext_lazy', (['"""invoice"""', '"""The payment for this invoice has already been received."""'], {}), "('invoice',\n 'The payment for this invoice has already been received.')\n", (13570, 13644), False, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((15480, 15485), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (15483, 15485), False, 'from django.utils.timezone import now\n'), ((17958, 18053), 'django.core.exceptions.ImproperlyConfigured', 'ImproperlyConfigured', (['"""This payment provider does not support maximum or minimum amounts."""'], {}), "(\n 'This payment provider does not support maximum or minimum amounts.')\n", (17978, 18053), False, 'from django.core.exceptions import ImproperlyConfigured\n'), ((28419, 28485), 'django.utils.translation.ugettext_lazy', '_', (['"""Automatic refunds are not supported by this payment provider."""'], {}), "('Automatic refunds are not supported by this payment provider.')\n", (28420, 28485), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((35090, 35136), 'pretix.base.templatetags.money.money_filter', 'money_filter', (['order.total', 'self.event.currency'], {}), '(order.total, self.event.currency)\n', (35102, 35136), False, 'from pretix.base.templatetags.money import money_filter\n'), ((36067, 36134), 'pretix.base.models.Order.objects.get', 'Order.objects.get', ([], {'code': 'code', 'event__organizer': 'self.event.organizer'}), '(code=code, event__organizer=self.event.organizer)\n', (36084, 36134), False, 'from pretix.base.models import CartPosition, Event, InvoiceAddress, Order, OrderPayment, OrderRefund, Quota\n'), ((17775, 17805), 'pretix.presale.views.cart.get_or_create_cart_id', 'get_or_create_cart_id', (['request'], {}), '(request)\n', (17796, 17805), False, 'from pretix.presale.views.cart import cart_session, get_or_create_cart_id\n'), ((18431, 18452), 'pretix.presale.views.cart.cart_session', 'cart_session', (['request'], {}), '(request)\n', (18443, 18452), False, 'from pretix.presale.views.cart import cart_session, get_or_create_cart_id\n'), ((25543, 25576), 'decimal.Decimal', 'Decimal', (['self.settings._total_max'], {}), '(self.settings._total_max)\n', (25550, 25576), False, 'from decimal import ROUND_HALF_UP, Decimal\n'), ((25661, 25694), 'decimal.Decimal', 'Decimal', (['self.settings._total_min'], {}), '(self.settings._total_min)\n', (25668, 25694), False, 'from decimal import ROUND_HALF_UP, Decimal\n'), ((36405, 36410), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (36408, 36410), False, 'from django.utils.timezone import now\n'), ((36464, 36507), 'json.dumps', 'json.dumps', (["{'orders': [refund.order.code]}"], {}), "({'orders': [refund.order.code]})\n", (36474, 36507), False, 'import json\n'), ((1607, 1635), 'django.utils.translation.ugettext_lazy', '_', (['"""This field is required."""'], {}), "('This field is required.')\n", (1608, 1635), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((4919, 4931), 'decimal.Decimal', 'Decimal', (['"""1"""'], {}), "('1')\n", (4926, 4931), False, 'from decimal import ROUND_HALF_UP, Decimal\n'), ((5073, 5085), 'decimal.Decimal', 'Decimal', (['"""1"""'], {}), "('1')\n", (5080, 5085), False, 'from decimal import ROUND_HALF_UP, Decimal\n'), ((18142, 18175), 'decimal.Decimal', 'Decimal', (['self.settings._total_max'], {}), '(self.settings._total_max)\n', (18149, 18175), False, 'from decimal import ROUND_HALF_UP, Decimal\n'), ((18269, 18302), 'decimal.Decimal', 'Decimal', (['self.settings._total_min'], {}), '(self.settings._total_min)\n', (18276, 18302), False, 'from decimal import ROUND_HALF_UP, Decimal\n'), ((18592, 18608), 'pretix.base.models.InvoiceAddress', 'InvoiceAddress', ([], {}), '()\n', (18606, 18608), False, 'from pretix.base.models import CartPosition, Event, InvoiceAddress, Order, OrderPayment, OrderRefund, Quota\n'), ((36205, 36255), 'django.utils.translation.ugettext_lazy', '_', (['"""You entered an order that could not be found."""'], {}), "('You entered an order that could not be found.')\n", (36206, 36255), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((18721, 18776), 'pretix.base.models.InvoiceAddress.objects.get', 'InvoiceAddress.objects.get', ([], {'pk': 'iapk', 'order__isnull': '(True)'}), '(pk=iapk, order__isnull=True)\n', (18747, 18776), False, 'from pretix.base.models import CartPosition, Event, InvoiceAddress, Order, OrderPayment, OrderRefund, Quota\n'), ((8129, 8155), 'django.utils.translation.ugettext_lazy', '_', (['"""Enable payment method"""'], {}), "('Enable payment method')\n", (8130, 8155), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((8297, 8317), 'django.utils.translation.ugettext_lazy', '_', (['"""Available until"""'], {}), "('Available until')\n", (8298, 8317), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((8346, 8432), 'django.utils.translation.ugettext_lazy', '_', (['"""Users will not be able to choose this payment provider after the given date."""'], {}), "('Users will not be able to choose this payment provider after the given date.'\n )\n", (8347, 8432), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((8560, 8581), 'django.utils.translation.ugettext_lazy', '_', (['"""Text on invoices"""'], {}), "('Text on invoices')\n", (8561, 8581), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((8610, 8877), 'django.utils.translation.ugettext_lazy', '_', (['"""Will be printed just below the payment figures and above the closing text on invoices. This will only be used if the invoice is generated before the order is paid. If the invoice is generated later, it will show a text stating that it has already been paid."""'], {}), "('Will be printed just below the payment figures and above the closing text on invoices. This will only be used if the invoice is generated before the order is paid. If the invoice is generated later, it will show a text stating that it has already been paid.'\n )\n", (8611, 8877), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((9165, 9189), 'django.utils.translation.ugettext_lazy', '_', (['"""Minimum order total"""'], {}), "('Minimum order total')\n", (9166, 9189), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((9218, 9440), 'django.utils.translation.ugettext_lazy', '_', (['"""This payment will be available only if the order total is equal to or exceeds the given value. The order total for this purpose may be computed without taking the fees imposed by this payment method into account."""'], {}), "('This payment will be available only if the order total is equal to or exceeds the given value. The order total for this purpose may be computed without taking the fees imposed by this payment method into account.'\n )\n", (9219, 9440), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((9630, 9661), 'pretix.helpers.money.DecimalTextInput', 'DecimalTextInput', ([], {'places': 'places'}), '(places=places)\n', (9646, 9661), False, 'from pretix.helpers.money import DecimalTextInput\n'), ((9762, 9786), 'django.utils.translation.ugettext_lazy', '_', (['"""Maximum order total"""'], {}), "('Maximum order total')\n", (9763, 9786), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((9815, 10035), 'django.utils.translation.ugettext_lazy', '_', (['"""This payment will be available only if the order total is equal to or below the given value. The order total for this purpose may be computed without taking the fees imposed by this payment method into account."""'], {}), "('This payment will be available only if the order total is equal to or below the given value. The order total for this purpose may be computed without taking the fees imposed by this payment method into account.'\n )\n", (9816, 10035), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((10225, 10256), 'pretix.helpers.money.DecimalTextInput', 'DecimalTextInput', ([], {'places': 'places'}), '(places=places)\n', (10241, 10256), False, 'from pretix.helpers.money import DecimalTextInput\n'), ((10355, 10374), 'django.utils.translation.ugettext_lazy', '_', (['"""Additional fee"""'], {}), "('Additional fee')\n", (10356, 10374), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((10403, 10422), 'django.utils.translation.ugettext_lazy', '_', (['"""Absolute value"""'], {}), "('Absolute value')\n", (10404, 10422), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((10553, 10584), 'pretix.helpers.money.DecimalTextInput', 'DecimalTextInput', ([], {'places': 'places'}), '(places=places)\n', (10569, 10584), False, 'from pretix.helpers.money import DecimalTextInput\n'), ((10687, 10706), 'django.utils.translation.ugettext_lazy', '_', (['"""Additional fee"""'], {}), "('Additional fee')\n", (10688, 10706), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((10735, 10770), 'django.utils.translation.ugettext_lazy', '_', (['"""Percentage of the order total."""'], {}), "('Percentage of the order total.')\n", (10736, 10770), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((10944, 11006), 'django.utils.translation.ugettext_lazy', '_', (['"""Calculate the fee from the total value including the fee."""'], {}), "('Calculate the fee from the total value including the fee.')\n", (10945, 11006), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((11620, 11646), 'django.utils.translation.ugettext_lazy', '_', (['"""Restrict to countries"""'], {}), "('Restrict to countries')\n", (11621, 11646), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((11673, 11684), 'django_countries.Countries', 'Countries', ([], {}), '()\n', (11682, 11684), False, 'from django_countries import Countries\n'), ((11713, 11930), 'django.utils.translation.ugettext_lazy', '_', (['"""Only allow choosing this payment provider for invoice addresses in the selected countries. If you don\'t select any country, all countries are allowed. This is only enabled if the invoice address is required."""'], {}), '("Only allow choosing this payment provider for invoice addresses in the selected countries. If you don\'t select any country, all countries are allowed. This is only enabled if the invoice address is required."\n )\n', (11714, 11930), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((12016, 12090), 'django.forms.CheckboxSelectMultiple', 'forms.CheckboxSelectMultiple', ([], {'attrs': "{'class': 'scrolling-multiple-choice'}"}), "(attrs={'class': 'scrolling-multiple-choice'})\n", (12044, 12090), False, 'from django import forms\n'), ((18898, 18914), 'pretix.base.models.InvoiceAddress', 'InvoiceAddress', ([], {}), '()\n', (18912, 18914), False, 'from pretix.base.models import CartPosition, Event, InvoiceAddress, Order, OrderPayment, OrderRefund, Quota\n'), ((32551, 32575), 'django.utils.translation.ugettext_lazy', '_', (['"""Payment method name"""'], {}), "('Payment method name')\n", (32552, 32575), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((32721, 32769), 'django.utils.translation.ugettext_lazy', '_', (['"""Payment process description during checkout"""'], {}), "('Payment process description during checkout')\n", (32722, 32769), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((32801, 32952), 'django.utils.translation.ugettext_lazy', '_', (['"""This text will be shown during checkout when the user selects this payment method. It should give a short explanation on this payment method."""'], {}), "('This text will be shown during checkout when the user selects this payment method. It should give a short explanation on this payment method.'\n )\n", (32802, 32952), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((33125, 33186), 'django.utils.translation.ugettext_lazy', '_', (['"""Payment process description in order confirmation emails"""'], {}), "('Payment process description in order confirmation emails')\n", (33126, 33186), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((33218, 33465), 'django.utils.translation.ugettext_lazy', '_', (['"""This text will be included for the {payment_info} placeholder in order confirmation mails. It should instruct the user on how to proceed with the payment. You can usethe placeholders {order}, {total}, {currency} and {total_with_currency}"""'], {}), "('This text will be included for the {payment_info} placeholder in order confirmation mails. It should instruct the user on how to proceed with the payment. You can usethe placeholders {order}, {total}, {currency} and {total_with_currency}'\n )\n", (33219, 33465), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((33792, 33843), 'django.utils.translation.ugettext_lazy', '_', (['"""Payment process description for pending orders"""'], {}), "('Payment process description for pending orders')\n", (33793, 33843), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((33875, 34106), 'django.utils.translation.ugettext_lazy', '_', (['"""This text will be shown on the order confirmation page for pending orders. It should instruct the user on how to proceed with the payment. You can usethe placeholders {order}, {total}, {currency} and {total_with_currency}"""'], {}), "('This text will be shown on the order confirmation page for pending orders. It should instruct the user on how to proceed with the payment. You can usethe placeholders {order}, {total}, {currency} and {total_with_currency}'\n )\n", (33876, 34106), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((11035, 11301), 'django.utils.translation.ugettext_lazy', '_', (['"""We recommend to enable this if you want your users to pay the payment fees of your payment provider. <a href="{docs_url}" target="_blank" rel="noopener">Click here for detailed information on what this does.</a> Don\'t forget to set the correct fees above!"""'], {}), '(\'We recommend to enable this if you want your users to pay the payment fees of your payment provider. <a href="{docs_url}" target="_blank" rel="noopener">Click here for detailed information on what this does.</a> Don\\\'t forget to set the correct fees above!\'\n )\n', (11036, 11301), True, 'from django.utils.translation import pgettext_lazy, ugettext_lazy as _\n'), ((33605, 33692), 'pretix.base.forms.PlaceholderValidator', 'PlaceholderValidator', (["['{order}', '{total}', '{currency}', '{total_with_currency}']"], {}), "(['{order}', '{total}', '{currency}',\n '{total_with_currency}'])\n", (33625, 33692), False, 'from pretix.base.forms import PlaceholderValidator\n'), ((34246, 34333), 'pretix.base.forms.PlaceholderValidator', 'PlaceholderValidator', (["['{order}', '{total}', '{currency}', '{total_with_currency}']"], {}), "(['{order}', '{total}', '{currency}',\n '{total_with_currency}'])\n", (34266, 34333), False, 'from pretix.base.forms import PlaceholderValidator\n'), ((15914, 15976), 'pretix.base.models.CartPosition.objects.filter', 'CartPosition.objects.filter', ([], {'cart_id': 'cart_id', 'event': 'self.event'}), '(cart_id=cart_id, event=self.event)\n', (15941, 15976), False, 'from pretix.base.models import CartPosition, Event, InvoiceAddress, Order, OrderPayment, OrderRefund, Quota\n')] |
sag-tgo/EPL_assert_demo | tests/AssertFail/run.py | a43541e4472dfab7da6538ae9f220b5e042d158c | from pysys.basetest import BaseTest
from apama.correlator import CorrelatorHelper
import os
class PySysTest(BaseTest):
def execute(self):
corr = CorrelatorHelper(self, name='correlator')
corr.start(logfile='correlator.log')
corr.injectEPL(os.getenv('APAMA_HOME','') + '/monitors/ManagementImpl.mon')
corr.injectEPL(os.getenv('APAMA_HOME','') + '/monitors/Management.mon')
corr.injectEPL('../../../src/Assert.mon')
corr.injectEPL('TestAssertFail.mon')
self.waitForGrep('correlator.log', 'Removed monitor TestAssertFail')
def validate(self):
self.assertGrep('correlator.log', r' (ERROR|WARN) .*', contains=False)
| [((149, 190), 'apama.correlator.CorrelatorHelper', 'CorrelatorHelper', (['self'], {'name': '"""correlator"""'}), "(self, name='correlator')\n", (165, 190), False, 'from apama.correlator import CorrelatorHelper\n'), ((247, 274), 'os.getenv', 'os.getenv', (['"""APAMA_HOME"""', '""""""'], {}), "('APAMA_HOME', '')\n", (256, 274), False, 'import os\n'), ((325, 352), 'os.getenv', 'os.getenv', (['"""APAMA_HOME"""', '""""""'], {}), "('APAMA_HOME', '')\n", (334, 352), False, 'import os\n')] |
Ziftr/stellard | src/beast/python/beast/env/ReadEnvFile_test.py | 626514cbbb2c6c2b6844315ca98a2bfcbca0b43d | from __future__ import absolute_import, division, print_function, unicode_literals
from unittest import TestCase
from beast.env.ReadEnvFile import read_env_file
from beast.util import Terminal
Terminal.CAN_CHANGE_COLOR = False
JSON = """
{
"FOO": "foo",
"BAR": "bar bar bar",
"CPPFLAGS": "-std=c++11 -frtti -fno-strict-aliasing -DWOMBAT"
}"""
ENV = """
# An env file.
FOO=foo
export BAR="bar bar bar"
CPPFLAGS=-std=c++11 -frtti -fno-strict-aliasing -DWOMBAT
# export BAZ=baz should be ignored.
"""
RESULT = {
'FOO': 'foo',
'BAR': 'bar bar bar',
'CPPFLAGS': '-std=c++11 -frtti -fno-strict-aliasing -DWOMBAT',
}
BAD_ENV = ENV + """
This line isn't right.
NO SPACES IN NAMES="valid value"
"""
class test_ReadEnvFile(TestCase):
def test_read_json(self):
self.assertEqual(read_env_file(JSON), RESULT)
def test_read_env(self):
self.assertEqual(read_env_file(ENV), RESULT)
def test_read_env_error(self):
errors = []
self.assertEqual(read_env_file(BAD_ENV, errors.append), RESULT)
self.assertEqual(errors, [
"WARNING: Didn't understand the following environment file lines:",
"11. >>> This line isn't right.",
'12. >>> NO SPACES IN NAMES="valid value"'])
| [((805, 824), 'beast.env.ReadEnvFile.read_env_file', 'read_env_file', (['JSON'], {}), '(JSON)\n', (818, 824), False, 'from beast.env.ReadEnvFile import read_env_file\n'), ((883, 901), 'beast.env.ReadEnvFile.read_env_file', 'read_env_file', (['ENV'], {}), '(ENV)\n', (896, 901), False, 'from beast.env.ReadEnvFile import read_env_file\n'), ((982, 1019), 'beast.env.ReadEnvFile.read_env_file', 'read_env_file', (['BAD_ENV', 'errors.append'], {}), '(BAD_ENV, errors.append)\n', (995, 1019), False, 'from beast.env.ReadEnvFile import read_env_file\n')] |
pptnz/swa_team2 | signin/tests.py | 253ae83d73c00245d359574d6a16f4eba9830950 | import json
from django.test import TestCase
from django.contrib.auth.models import User
from .models import CustomUser
from django.apps import apps
from .apps import SigninConfig
class SignInTest(TestCase):
def setUp(self):
self.django_user = User.objects.create_user(username='testusername', password='testpassword')
self.custom_user = CustomUser.objects.create(django_user=self.django_user)
def test_apps(self):
self.assertEqual(SigninConfig.name, 'signin')
self.assertEqual(apps.get_app_config('signin').name, 'signin')
def test_sign_in_redirect_page(self):
response = self.client.get('/')
self.assertRedirects(response, '/sign_in/')
def test_get(self):
response = self.client.get('/sign_in/')
self.assertEqual(response.status_code, 200)
def test_wrong_username(self):
response = self.client.post('/sign_in/', {'username': 'wrongusername', 'password': 'testpassword'})
self.assertEqual(response.status_code, 200)
def test_wrong_password(self):
response = self.client.post('/sign_in/', {'username': 'testusername', 'password': 'wrongpassword'})
self.assertEqual(response.status_code, 200)
def test_login(self):
response = self.client.post('/sign_in/', {'username': 'testusername', 'password': 'testpassword'})
self.assertRedirects(response, '/habitmaker/')
# todo: change this link
def test_login_other_page(self):
response = self.client.post('/sign_in/?next=/habitmaker/', {'username': 'testusername', 'password': 'testpassword'})
self.assertRedirects(response, '/habitmaker/')
def test_form_not_valid(self):
response = self.client.post('/sign_in/', {'username': 'testusername'})
self.assertEqual(response.status_code, 200)
def test_email_verification(self):
self.custom_user.authenticate_email()
self.assertTrue(self.custom_user.is_email_authenticated)
def test_already_signed_in(self):
self.client.login(username='testusername', password='testpassword')
response = self.client.get('/sign_in/')
self.assertRedirects(response, '/habitmaker/')
| [((259, 333), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""testusername"""', 'password': '"""testpassword"""'}), "(username='testusername', password='testpassword')\n", (283, 333), False, 'from django.contrib.auth.models import User\n'), ((522, 551), 'django.apps.apps.get_app_config', 'apps.get_app_config', (['"""signin"""'], {}), "('signin')\n", (541, 551), False, 'from django.apps import apps\n')] |
EliHar/BinaryTree-ADT | tree/list/BinaryNode.py | bf220eb8ccb04f6fee7d7a67ef7e9cd00cc6a4c1 | __author__ = 'Elias Haroun'
class BinaryNode(object):
def __init__(self, data, left, right):
self.data = data
self.left = left
self.right = right
def getData(self):
return self.data
def getLeft(self):
return self.left
def getRight(self):
return self.right
def setData(self, data):
self.data = data
def setLeft(self, aNode):
self.left = aNode
def setRight(self, aNode):
self.right = aNode
def hasLeft(self):
return self.getLeft() is not None
def hasRight(self):
return self.getRight() is not None
def isLeaf(self):
return not(self.hasLeft() | self.hasRight())
| [] |
AbrahmAB/booleannet | boolean2/tokenizer.py | a07124047d18a5b7265e050a234969ac58970c7a | """
Main tokenizer.
"""
from itertools import *
import sys, random
import util
import ply.lex as lex
class Lexer:
"""
Lexer for boolean rules
"""
literals = '=*,'
tokens = (
'LABEL', 'ID','STATE', 'ASSIGN', 'EQUAL',
'AND', 'OR', 'NOT',
'NUMBER', 'LPAREN','RPAREN', 'COMMA',
)
reserved = {
'and' : 'AND',
'or' : 'OR',
'not' : 'NOT',
'True' : 'STATE',
'False' : 'STATE',
'Random' : 'STATE',
}
def __init__(self, **kwargs):
# nothing here yet
self.lexer = lex.lex(object=self, **kwargs)
def t_ID( self, t):
"[a-zA-Z_\+\-][a-zA-Z_0-9\+\-]*"
# check for reserved words
t.type = self.reserved.get( t.value, 'ID')
return t
def t_LABEL (self, t):
"[0-9][0-9]*:"
t.value = int(t.value[:-1])
return t
def t_NUMBER(self, t):
"[\+-]*\d+\.?\d*"
try:
t.value = float(t.value)
except ValueError:
util.error( "value too large", t.value )
return t
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_ASSIGN = r'\*'
t_EQUAL = r'='
t_COMMA = r','
t_ignore = ' \t'
t_ignore_COMMENT = r'\#.*'
def t_newline(self, t):
"Newline handling"
r'\n+'
t.lexer.lineno += t.value.count("\n")
def t_error(self, t):
"Error message"
msg = "lexer error in '%s' at '%s'" % (self.last, t.value)
util.error( msg )
def tokenize_line(self, line ):
"Runs the lexer a single line retutns a list of tokens"
tokens = []
self.last = line
self.lexer.input( line )
while 1:
t = self.lexer.token()
if t:
tokens.append(t)
else:
break
return tokens
def tokenize_text(self, text):
"Runs the lexer on text and returns a list of lists of tokens"
return map( self.tokenize_line, util.split(text) )
def init_tokens( tokenlist ):
"""
Returns elments of the list that are initializers
"""
def cond( elem ):
return elem[1].type == 'EQUAL'
return filter( cond, tokenlist)
def label_tokens( tokenlist ):
"""
Returns elements where the first token is a LABEL
(updating rules with labels)
"""
def cond( elem ):
return elem[0].type == 'LABEL'
return filter( cond, tokenlist)
def async_tokens( tokenlist ):
"""
Returns elements where the second token is ASSIGN
(updating rules with no LABELs)
"""
def cond( elem ):
return elem[1].type == 'ASSIGN'
return filter( cond, tokenlist)
def update_tokens( tokenlist ):
"""
Returns tokens that perform updates
"""
def cond( elem ):
return elem[1].type == 'ASSIGN' or elem[2].type == 'ASSIGN'
return filter( cond, tokenlist)
def get_nodes( tokenlist ):
"""
Flattens the list of tokenlist and returns the value of all ID tokens
"""
def cond ( token ):
return token.type == 'ID'
def get( token):
return token.value
nodes = map(get, filter( cond, chain( *tokenlist )))
nodes = set(nodes)
util.check_case( nodes )
return nodes
def tok2line( tokens ):
"""
Turns a list of tokens into a line that can be parsed again
"""
elems = [ str(t.value) for t in tokens ]
if tokens[0].type == 'LABEL':
elems[0] = elems[0] + ':'
return ' '.join( elems )
def test():
"""
Main testrunnner
>>> import util
>>>
>>> text = '''
... A = B = True
... 1: A* = B
... 2: B* = A and B
... C* = not C
... E = False
... F = (1, 2, 3)
... '''
>>>
>>> lexer = Lexer()
>>> tokens = lexer.tokenize_text( text )
>>> tokens[0]
[LexToken(ID,'A',1,0), LexToken(EQUAL,'=',1,2), LexToken(ID,'B',1,4), LexToken(EQUAL,'=',1,6), LexToken(STATE,'True',1,8)]
>>> tokens[1]
[LexToken(LABEL,1,1,0), LexToken(ID,'A',1,3), LexToken(ASSIGN,'*',1,4), LexToken(EQUAL,'=',1,6), LexToken(ID,'B',1,8)]
>>> tokens[2]
[LexToken(LABEL,2,1,0), LexToken(ID,'B',1,3), LexToken(ASSIGN,'*',1,4), LexToken(EQUAL,'=',1,6), LexToken(ID,'A',1,8), LexToken(AND,'and',1,10), LexToken(ID,'B',1,14)]
>>> tokens[3]
[LexToken(ID,'C',1,0), LexToken(ASSIGN,'*',1,1), LexToken(EQUAL,'=',1,3), LexToken(NOT,'not',1,5), LexToken(ID,'C',1,9)]
>>>
>>> get_nodes( tokens )
set(['A', 'C', 'B', 'E', 'F'])
"""
# runs the local suite
import doctest
doctest.testmod( optionflags=doctest.ELLIPSIS + doctest.NORMALIZE_WHITESPACE )
def tokenize( text ):
"A one step tokenizer"
lexer = Lexer()
return lexer.tokenize_text( text )
def modify_states( text, turnon=[], turnoff=[] ):
"""
Turns nodes on and off and comments out lines
that contain assignment to any of the nodes
Will use the main lexer.
"""
turnon = util.as_set( turnon )
turnoff = util.as_set( turnoff )
tokens = tokenize( text )
init = init_tokens( tokens )
init_lines = map(tok2line, init)
# override the initial values
init_lines.extend( [ '%s=True' % node for node in turnon ] )
init_lines.extend( [ '%s=False' % node for node in turnoff ] )
alter = turnon | turnoff
update = update_tokens ( tokens )
update_lines = []
for token in update:
line = tok2line( token)
if token[0].value in alter or token[1].value in alter:
line = '#' + line
update_lines.append( line )
all = init_lines + update_lines
return '\n'.join( all )
if __name__ == '__main__':
test()
lexer = Lexer()
text = """
A = B = C = False
D = True
1: A* = B
2: B* = A and B
C* = not C
D* = A
"""
print modify_states( text, turnon=['A', 'B'], turnoff=['C'] )
| [] |
tomzhang/aiida_core | aiida/cmdline/params/options/test_interactive.py | 949810e9f3daff0f748c5c9aa1dde4f5222bb49b | """Unit tests for the InteractiveOption."""
from __future__ import absolute_import
import unittest
import click
from click.testing import CliRunner
from click.types import IntParamType
from aiida.cmdline.params.options.interactive import InteractiveOption
from aiida.cmdline.params.options import NON_INTERACTIVE
class Only42IntParamType(IntParamType):
"""
Param type that only accepts 42 as valid value
"""
name = 'only42int'
def convert(self, value, param, ctx):
newval = super(Only42IntParamType, self).convert(value, param, ctx)
if newval != 42:
self.fail("Type validation: invalid, should be 42")
return newval
def __repr__(self):
return 'ONLY42INT'
class InteractiveOptionTest(unittest.TestCase):
"""Unit tests for InteractiveOption."""
# pylint: disable=too-many-public-methods, missing-docstring
def simple_command(self, **kwargs):
"""Return a simple command with one InteractiveOption, kwargs get relayed to the option."""
# pylint: disable=no-self-use
@click.command()
@click.option('--opt', prompt='Opt', cls=InteractiveOption, **kwargs)
@NON_INTERACTIVE()
def cmd(opt, non_interactive):
"""test command for InteractiveOption"""
# pylint: disable=unused-argument
click.echo(str(opt))
return cmd
@classmethod
def setUpClass(cls):
cls.runner = CliRunner()
def prompt_output(self, cli_input, converted=None):
"""Return expected output of simple_command, given a commandline cli_input string."""
# pylint: disable=no-self-use
return "Opt: {}\n{}\n".format(cli_input, converted or cli_input)
def test_prompt_str(self):
"""
scenario: using InteractiveOption with type=str
behaviour: giving no option prompts, accepts a string
"""
cmd = self.simple_command(type=str)
runner = CliRunner()
result = runner.invoke(cmd, [], input='TEST\n')
expected = self.prompt_output('TEST')
self.assertIsNone(result.exception)
self.assertIn(expected, result.output)
def test_prompt_empty_input(self):
"""
scenario: using InteractiveOption with type=str and invoking without options
behaviour: pressing enter on empty line at prompt repeats the prompt without a message
"""
cmd = self.simple_command(type=str)
runner = CliRunner()
result = runner.invoke(cmd, [], input='\nTEST\n')
expected = "Opt: \nOpt: TEST\nTEST\n"
self.assertIsNone(result.exception)
self.assertIn(expected, result.output)
def test_prompt_help_default(self):
"""
scenario: using InteractiveOption with type=str and no help parameter and invoking without options
behaviour: entering '?' leads to a default help message being printed and prompt repeated
"""
cmd = self.simple_command(type=str)
runner = CliRunner()
result = runner.invoke(cmd, [], input='?\nTEST\n')
expected_1 = 'Opt: ?\n'
expected_2 = 'Expecting text\n'
expected_3 = 'Opt: TEST\nTEST\n'
self.assertIsNone(result.exception)
self.assertIn(expected_1, result.output)
self.assertIn(expected_2, result.output)
self.assertIn(expected_3, result.output)
def test_prompt_help_custom(self):
"""
scenario: using InteractiveOption with type=str and help message and invoking without options
behaviour: entering '?' leads to the given help message being printed and the prompt repeated
"""
cmd = self.simple_command(type=str, help='Please enter some text')
runner = CliRunner()
result = runner.invoke(cmd, [], input='?\nTEST\n')
expected_1 = 'Opt: ?\n'
expected_2 = 'Please enter some text\n'
expected_3 = 'Opt: TEST\nTEST\n'
self.assertIsNone(result.exception)
self.assertIn(expected_1, result.output)
self.assertIn(expected_2, result.output)
self.assertIn(expected_3, result.output)
def test_prompt_simple(self):
"""
scenario: using InteractiveOption with type=bool
behaviour: giving no option prompts, accepts 'true'
"""
params = [(bool, 'true', 'True'), (int, '98', '98'), (float, '3.14e-7', '3.14e-07')]
for ptype, cli_input, output in params:
cmd = self.simple_command(type=ptype, help='help msg')
runner = CliRunner()
result = runner.invoke(cmd, [], input='\n?\n{}\n'.format(cli_input))
expected_1 = 'Opt: \nOpt: ?\n'
expected_2 = 'help msg\n'
expected_2 += self.prompt_output(cli_input, output)
self.assertIsNone(result.exception)
self.assertIn(expected_1, result.output)
self.assertIn(expected_2, result.output)
@staticmethod
def strip_line(text):
"""returns text without the last line"""
return text.rsplit('\n')[0]
def test_prompt_complex(self):
"""
scenario: using InteractiveOption with type=float
behaviour: giving no option prompts, accepts 3.14e-7
"""
params = [(click.File(), __file__), (click.Path(exists=True), __file__)]
for ptype, cli_input in params:
cmd = self.simple_command(type=ptype, help='help msg')
runner = CliRunner()
result = runner.invoke(cmd, [], input='\n?\n{}\n'.format(cli_input))
expected_1 = 'Opt: \nOpt: ?\n'
expected_2 = 'help msg\n'
expected_2 += self.strip_line(self.prompt_output(cli_input))
self.assertIsNone(result.exception)
self.assertIn(expected_1, result.output)
self.assertIn(expected_2, result.output)
def test_default_value_prompt(self):
"""
scenario: using InteractiveOption with a default value, invoke without options
behaviour: prompt, showing the default value, take default on empty cli_input.
"""
returns = []
cmd = self.simple_command(default='default')
result = self.runner.invoke(cmd, [], input='\n')
returns.append(result)
expected = 'Opt [default]: \ndefault\n'
self.assertIsNone(result.exception)
self.assertIn(expected, result.output)
result = self.runner.invoke(cmd, [], input='TEST\n')
returns.append(result)
expected = 'Opt [default]: TEST\nTEST\n'
self.assertIsNone(result.exception)
self.assertIn(expected, result.output)
return returns
def test_default_value_empty_opt(self):
"""
scenario: InteractiveOption with default value, invoke with empty option (--opt=)
behaviour: accept empty string as input
"""
cmd = self.simple_command(default='default')
runner = CliRunner()
result = runner.invoke(cmd, ['--opt='])
expected = '\n'
self.assertIsNone(result.exception)
self.assertEqual(result.output, expected)
def test_opt_given_valid(self):
"""
scenario: InteractiveOption, invoked with a valid value on the cmdline
behaviour: accept valid value
"""
cmd = self.simple_command(type=int)
runner = CliRunner()
result = runner.invoke(cmd, ['--opt=4'])
expected = '4\n'
self.assertIsNone(result.exception)
self.assertEqual(result.output, expected)
def test_opt_given_invalid(self):
"""
scenario: InteractiveOption, invoked with a valid value on the cmdline
behaviour: accept valid value
"""
cmd = self.simple_command(type=int)
runner = CliRunner()
result = runner.invoke(cmd, ['--opt=foo'])
self.assertIsNotNone(result.exception)
self.assertIn('Invalid value', result.output)
def test_non_interactive(self):
"""
scenario: InteractiveOption, invoked with only --non-interactive (and the option is required)
behaviout: fail
"""
cmd = self.simple_command(required=True)
runner = CliRunner()
result = runner.invoke(cmd, ['--non-interactive'])
self.assertIsNotNone(result.exception)
self.assertIn('Usage: ', result.output)
self.assertIn('Missing option', result.output)
def test_non_interactive_default(self):
"""
scenario: InteractiveOption, invoked with only --non-interactive
behaviour: fail
"""
cmd = self.simple_command(default='default')
runner = CliRunner()
result = runner.invoke(cmd, ['--non-interactive'])
self.assertIsNone(result.exception)
self.assertEqual(result.output, 'default\n')
@staticmethod
def user_callback(_ctx, param, value):
"""
A fake user callback ued for testing.
:param _ctx: The click context
:param param: The parameter name
:param value: The parameter value
:return: The validated parameter
"""
if not value:
return -1
elif value != 42:
raise click.BadParameter('invalid', param=param)
else:
return value
def test_after_callback_valid(self):
"""
scenario: InteractiveOption with a user callback
action: invoke with valid value
behaviour: user callback runs & succeeds
"""
cmd = self.simple_command(callback=self.user_callback, type=int)
result = self.runner.invoke(cmd, ['--opt=42'])
self.assertIsNone(result.exception)
self.assertEqual(result.output, '42\n')
def test_after_callback_invalid(self):
"""
scenario: InteractiveOption with a user callback
action: invoke with invalid value of right type
behaviour: user callback runs & succeeds
"""
cmd = self.simple_command(callback=self.user_callback, type=int)
result = self.runner.invoke(cmd, ['--opt=234234'])
self.assertIsNotNone(result.exception)
self.assertIn('Invalid value', result.output)
self.assertIn('invalid', result.output)
def test_after_callback_wrong_typ(self):
"""
scenario: InteractiveOption with a user callback
action: invoke with invalid value of wrong type
behaviour: user callback does not run
"""
cmd = self.simple_command(callback=self.user_callback, type=int)
result = self.runner.invoke(cmd, ['--opt=bla'])
self.assertIsNotNone(result.exception)
self.assertIn('Invalid value', result.output)
self.assertIn('bla', result.output)
def test_after_callback_empty(self):
"""
scenario: InteractiveOption with a user callback
action: invoke with invalid value of wrong type
behaviour: user callback does not run
"""
cmd = self.simple_command(callback=self.user_callback, type=int)
result = self.runner.invoke(cmd, ['--opt='])
self.assertIsNotNone(result.exception)
self.assertIn('Invalid value', result.output)
self.assertNotIn('empty', result.output)
def test_after_validation_interactive(self):
"""
Test that the type validation gets called on values entered at a prompt.
Scenario:
* InteractiveOption with custom type and prompt set
* invoked without passing the options
* on prompt: first enter an invalid value, then a valid one
Behaviour:
* Prompt for the value
* reject invalid value, prompt again
* accept valid value
"""
cmd = self.simple_command(callback=self.user_callback, type=Only42IntParamType())
result = self.runner.invoke(cmd, [], input='23\n42\n')
self.assertIsNone(result.exception)
self.assertIn('Opt: 23\n', result.output)
self.assertIn('Type validation: invalid', result.output)
self.assertIn('Opt: 42\n42\n', result.output)
def test_after_callback_default_noninteractive(self):
"""
Test that the callback gets called on the default, in line with click 6 behaviour.
Scenario:
* InteractiveOption with user callback and invalid default
* invoke with no options and --non-interactive
Behaviour:
* the default value gets passed through the callback and rejected
"""
# pylint: disable=invalid-name
cmd = self.simple_command(callback=self.user_callback, type=int, default=23)
result = self.runner.invoke(cmd, ['--non-interactive'])
self.assertIsNotNone(result.exception)
self.assertIn('Invalid value', result.output)
def test_default_empty_empty_cli(self):
"""Test that default="" allows to pass an empty cli option."""
cmd = self.simple_command(default="", type=str)
result = self.runner.invoke(cmd, ['--opt='])
self.assertIsNone(result.exception)
self.assertEqual(result.output, '\n')
def test_default_empty_prompt(self):
"""Test that default="" allows to pass an empty cli option."""
cmd = self.simple_command(default="", type=str)
result = self.runner.invoke(cmd, input='\n')
expected = 'Opt []: \n\n'
self.assertIsNone(result.exception)
self.assertIn(expected, result.output)
def test_prompt_dynamic_default(self):
"""Test that dynamic defaults for prompting still work."""
def test_not_required_noninteractive(self):
cmd = self.simple_command(required=False)
result = self.runner.invoke(cmd, ['--non-interactive'])
self.assertIsNone(result.exception)
# I strip, there is typically a \n at the end
self.assertEqual(result.output, 'None\n')
def test_not_required_interactive(self):
cmd = self.simple_command(required=False)
result = self.runner.invoke(cmd, input='value\n')
expected = 'Opt: value\nvalue\n'
self.assertIsNone(result.exception)
self.assertIn(expected, result.output)
def test_not_required_noninteractive_default(self):
cmd = self.simple_command(required=False, default='')
result = self.runner.invoke(cmd, ['--non-interactive'])
self.assertIsNone(result.exception)
self.assertEqual(result.output, '\n')
def test_not_required_interactive_default(self):
cmd = self.simple_command(required=False, default='')
result = self.runner.invoke(cmd, input='\nnot needed\n')
expected = 'Opt []: \n\n'
self.assertIsNone(result.exception)
self.assertIn(expected, result.output)
| [((1079, 1094), 'click.command', 'click.command', ([], {}), '()\n', (1092, 1094), False, 'import click\n'), ((1104, 1172), 'click.option', 'click.option', (['"""--opt"""'], {'prompt': '"""Opt"""', 'cls': 'InteractiveOption'}), "('--opt', prompt='Opt', cls=InteractiveOption, **kwargs)\n", (1116, 1172), False, 'import click\n'), ((1182, 1199), 'aiida.cmdline.params.options.NON_INTERACTIVE', 'NON_INTERACTIVE', ([], {}), '()\n', (1197, 1199), False, 'from aiida.cmdline.params.options import NON_INTERACTIVE\n'), ((1456, 1467), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (1465, 1467), False, 'from click.testing import CliRunner\n'), ((1966, 1977), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (1975, 1977), False, 'from click.testing import CliRunner\n'), ((2476, 2487), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (2485, 2487), False, 'from click.testing import CliRunner\n'), ((3014, 3025), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (3023, 3025), False, 'from click.testing import CliRunner\n'), ((3749, 3760), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (3758, 3760), False, 'from click.testing import CliRunner\n'), ((6921, 6932), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (6930, 6932), False, 'from click.testing import CliRunner\n'), ((7338, 7349), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (7347, 7349), False, 'from click.testing import CliRunner\n'), ((7759, 7770), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (7768, 7770), False, 'from click.testing import CliRunner\n'), ((8176, 8187), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (8185, 8187), False, 'from click.testing import CliRunner\n'), ((8633, 8644), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (8642, 8644), False, 'from click.testing import CliRunner\n'), ((4537, 4548), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (4546, 4548), False, 'from click.testing import CliRunner\n'), ((5447, 5458), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (5456, 5458), False, 'from click.testing import CliRunner\n'), ((5257, 5269), 'click.File', 'click.File', ([], {}), '()\n', (5267, 5269), False, 'import click\n'), ((5283, 5306), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (5293, 5306), False, 'import click\n'), ((9185, 9227), 'click.BadParameter', 'click.BadParameter', (['"""invalid"""'], {'param': 'param'}), "('invalid', param=param)\n", (9203, 9227), False, 'import click\n')] |
fabmiz/osf.io | scripts/migration/migrate_registered_meta.py | 8d86af3f0a6e5388bd5b18383e68e27b65a66247 | """
Changes existing registered_meta on a node to new schema layout
required for the prereg-prize
"""
import json
import sys
import logging
from modularodm import Q
from framework.mongo import database as db
from framework.mongo.utils import from_mongo
from framework.transactions.context import TokuTransaction
from website.models import MetaSchema
from website.app import init_app
from website.project.metadata.schemas import _id_to_name
from scripts import utils as scripts_utils
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
def prepare_nodes(_db=None):
_db = _db or db
_db['node'].update(
{},
{
'$set': {
'registered_schema': []
}
},
multi=True
)
def from_json_or_fail(schema):
# Unstringify stored metadata
try:
schema = json.loads(schema) if schema else {}
except TypeError as e:
if isinstance(schema, dict):
pass
else:
raise e
return schema
def main(dev=False, _db=None):
_db = _db or db
init_app(routes=False)
count = 0
skipped = 0
scripts_utils.add_file_logger(logger, __file__)
logger.info("Iterating over all registrations")
# convert registered_schema to list field
prepare_nodes()
node_documents = _db['node'].find({'is_registration': True})
for node in node_documents:
registered_schemas = []
registered_meta = {}
schemas = node['registered_meta']
if not schemas:
logger.info('Node: {0} is registered but has no registered_meta'.format(node['_id']))
continue
for schema_id, schema in schemas.iteritems():
name = _id_to_name(from_mongo(schema_id))
schema = from_json_or_fail(schema)
# append matching schema to node.registered_schema
try:
meta_schema = MetaSchema.find(
Q('name', 'eq', name)
).sort('-schema_version')[0]
except IndexError as e:
logger.error('No MetaSchema matching name: {0} found for node: {1}.'.format(name, node['_id']))
# Skip over missing schemas
skipped += 1
if dev:
continue
else:
raise e
else:
registered_meta[meta_schema._id] = {
key: {
'value': value
}
for key, value in schema.items()
}
registered_schemas.append(meta_schema._id)
db['node'].update(
{'_id': node['_id']},
{'$set': {
'registered_meta': registered_meta,
'registered_schema': registered_schemas
}}
)
count = count + 1
logger.info('Done with {0} nodes migrated and {1} nodes skipped.'.format(count, skipped))
if __name__ == '__main__':
dry_run = 'dry' in sys.argv
dev = 'dev' in sys.argv
with TokuTransaction():
main(dev=dev)
if dry_run:
raise RuntimeError('Dry run, rolling back transaction.')
| [((497, 524), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (514, 524), False, 'import logging\n'), ((1081, 1103), 'website.app.init_app', 'init_app', ([], {'routes': '(False)'}), '(routes=False)\n', (1089, 1103), False, 'from website.app import init_app\n'), ((1138, 1185), 'scripts.utils.add_file_logger', 'scripts_utils.add_file_logger', (['logger', '__file__'], {}), '(logger, __file__)\n', (1167, 1185), True, 'from scripts import utils as scripts_utils\n'), ((3065, 3082), 'framework.transactions.context.TokuTransaction', 'TokuTransaction', ([], {}), '()\n', (3080, 3082), False, 'from framework.transactions.context import TokuTransaction\n'), ((855, 873), 'json.loads', 'json.loads', (['schema'], {}), '(schema)\n', (865, 873), False, 'import json\n'), ((1734, 1755), 'framework.mongo.utils.from_mongo', 'from_mongo', (['schema_id'], {}), '(schema_id)\n', (1744, 1755), False, 'from framework.mongo.utils import from_mongo\n'), ((1951, 1972), 'modularodm.Q', 'Q', (['"""name"""', '"""eq"""', 'name'], {}), "('name', 'eq', name)\n", (1952, 1972), False, 'from modularodm import Q\n')] |
hypernicon/pyec | pyec/distribution/bayes/structure/basic.py | 7072835c97d476fc45ffc3b34f5c3ec607988e6d | """
Copyright (C) 2012 Alan J Lockett
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from numpy import *
import sys
import weakref
class CyclicException(Exception):
pass
class DuplicateEdgeException(Exception):
pass
class IrreversibleEdgeException(Exception):
pass
class StructureSearch(object):
def __init__(self, scorer, autocommit=False):
self.scorer = scorer
self.autocommit = autocommit
self.network = None
def canReverse(self, newChild, newParent):
"""
check to ensure reverse link is not already present
(In a DAG, it should not be)
"""
if newChild.parents.has_key(newParent.index):
return False
return True
def admissibleEdge(self, var1, var2):
"""Is edge admissible in a DAG?"""
if var1.index == var2.index:
return False
if var1.parents.has_key(var2.index):
return False
if var2.parents.has_key(var1.index):
return False
return True
def merge(self, net, other, data, allowCyclic=False):
"""add the edges from other to self, preventing cycles if asked"""
self.network = net
net.computeEdgeStatistics()
other.computeEdgeStatistics()
indexMap = dict([(v.index, v) for v in net.variables])
undoList = []
def undo(update=True):
for undo2 in reversed(undoList):
undo2(False)
for frm, to in other.edges:
try:
frm2 = indexMap[frm.index]
to2 = indexMap[to.index]
undo2 = self.addEdge(to2, frm2, data, allowCyclic)
frm2.children = None
undoList.append(undo2)
except Exception, msg:
pass
self.network = None
return undo
def cross(self, net, other, data, allowCyclic=False):
self.network = net
net.computeEdgeStatistics()
other.computeEdgeStatistics()
indexMap = dict([(v.index, v) for v in net.variables])
indexMap2 = dict([(v.index, v) for v in other.variables])
undoList = []
if len(net.edges) == 0: return other
if len(other.edges) == 0: return net
if len(net.edges) < net.numVariables / 2 and len(other.edges) < other.numVariables / 2:
return net
def undo(update=True):
for undo2 in reversed(undoList):
undo2(False)
for variable in net.variables:
# pick a parent
if random.random_sample < 0.5:
# Add relationships from other, avoiding cycles
ps = len(variable.parents)
for idx, parent in variable.parents.iteritems():
undoList.append(self.removeEdge(idx, variable, allowCyclic))
parent.children = None
for idx, parent2 in v2.parents.iteritems():
try:
parent = indexMap[parent.index]
undoList.append(self.addEdge(variable, parent, data, allowCyclic))
parent.children = None
except Exception, msg:
pass
net.computeEdgeStatistics()
self.network = None
return undo
def removeEdge(self, i, variable, data=None):
self.network.computeEdgeStatistics()
oldstate = self.network.getComputedState()
toRemove = variable.parents[i]
variable.removeParent(toRemove)
toRemove.children = None
self.network.dirty = True
netref = weakref.ref(self.network)
varref = weakref.ref(variable)
remref = weakref.ref(toRemove)
def undo(update=True):
var = varref()
rem = remref()
net = netref()
if var is not None and rem is not None and net is not None:
var.addParent(rem)
rem.children = None
net.restoreComputedState(oldstate)
try:
self.network.updateVar(variable, data)
except:
undo()
raise
return undo
def addEdge(self, child, parent, data = None, allowCyclic = False):
self.network.computeEdgeStatistics()
oldstate = self.network.getComputedState()
if child.parents.has_key(parent.index):
raise DuplicateEdgeException, "Edge already exists"
child.addParent(parent)
parent.children = None
self.network.dirty = True
parentref = weakref.ref(parent)
childref = weakref.ref(child)
netref = weakref.ref(self.network)
def undo(update=True):
parent = parentref()
child = childref()
network = netref()
if parent is not None and child is not None and network is not None:
parent.children = None
child.removeParent(parent)
network.restoreComputedState(oldstate)
if (not allowCyclic) and not self.network.isAcyclic():
undo()
raise CyclicException, "Adding an edge makes network cyclic"
try:
self.network.updateVar(child, data)
except:
undo()
raise
return undo
def reverseEdge(self, i, variable, data=None, allowCyclic = False):
"""toReverse is new child, variable is new parent"""
self.network.computeEdgeStatistics()
oldstate = self.network.getComputedState()
toReverse = variable.parents[i]
if not self.canReverse(toReverse, variable):
raise IrreversibleEdgeException, "Edge reversal disallowed"
variable.removeParent(toReverse)
toReverse.addParent(variable)
variable.children = None
toReverse.children = None
self.network.dirty = True
varref = weakref.ref(variable)
revref = weakref.ref(toReverse)
netref = weakref.ref(self.network)
def undo(update=True):
variable = varref()
toReverse = revref()
network = netref()
if (variable is not None and
toReverse is not None and
network is not None):
variable.addParent(toReverse)
toReverse.removeParent(variable)
network.restoreComputedState(oldstate)
if (not allowCyclic) and not self.network.isAcyclic():
undo()
raise CyclicException, "Reversing an edge makes nework cyclic"
try:
self.network.updateVar(variable, data)
self.network.updateVar(toReverse, data)
except:
undo()
raise
return undo
def attempt(self, fn, exc):
try:
return fn()
except:
exc()
raise | [] |
pingrunhuang/CodeChallenge | graph/tsp.py | a8e5274e04c47d851836197907266418af4f1a22 | """
given a fully connected undirected graph(If no path exists between two cities, adding an arbitrarily long edge will complete the graph without affecting the optimal tour),
find the path with the lowest cost in total for a salesman to travel from a given start vertex
"""
import time
class Edge:
def __init__(self, target, weight):
self.target = target
self.weight = weight
def __repr__(self):
return self.target
class TSP(object):
"""
This is a fully connected graph with edge weight value positive
"""
def __init__(self):
self.graph = {}
self.prev = {}
self.start = None
def add_vertex(self, name, edges):
self.graph[name] = edges
def permutation(self, edge, result=[]):
if edge.target == self.start:
return result
for x in result:
if x.target == edge.target:
return result
result.append(edge)
for next_edge in self.graph[edge.target]:
self.permutation(next_edge, result)
return result
def tsp_recursive(self, start):
"""
Essentially, the tsp problem is a permutation problem
"""
self.start = start
result = []
for edge in self.graph[start]:
result.append(self.permutation(edge, [Edge(start, 0)]))
smallest_val = 100000
print(result)
path = []
for solution in result:
total_cost = sum(map(lambda x:x.weight, solution))
if smallest_val>total_cost:
path = solution
smallest_val = total_cost
return (smallest_val, path)
def tsp_dp(self, graph, start):
pass
if __name__ == "__main__":
tsp = TSP()
tsp.add_vertex('w', [Edge('y', 1), Edge('x', 6), Edge('z', 3)])
tsp.add_vertex('x', [Edge('w', 6), Edge('z', 3), Edge('y', 4)])
tsp.add_vertex('z', [Edge('y', 2), Edge('w', 3), Edge('x', 3)])
tsp.add_vertex('y', [Edge('w', 1), Edge('x', 3), Edge('z', 2)])
result = tsp.tsp_recursive('x')
print(result) | [] |
only-romano/junkyard | projects/code_combat/8_Cloudrip_Mountain/471-Distracting_Dungeon/distracting_dungeon.py | b60a25b2643f429cdafee438d20f9966178d6f36 | def moveBothTo(point):
while hero.distanceTo(point) > 1:
hero.move(point)
hero.command(peasant, "move", point)
peasant = hero.findNearest(hero.findFriends())
while True:
hero.command(peasant, "buildXY", "decoy", peasant.pos.x + 2, peasant.pos.y)
var nextPoint = {"x": hero.pos.x, "y": hero.pos.y + 28}
moveBothTo(nextPoint)
nextPoint = {"x": hero.pos.x + 28, "y": hero.pos.y}
var enemy = hero.findNearestEnemy()
while enemy:
while enemy.health > 0:
hero.attack(enemy)
enemy = hero.findNearestEnemy()
moveBothTo(nextPoint)
| [] |
pflun/learningAlgorithms | firstBadVersion.py | 3101e989488dfc8a56f1bf256a1c03a837fe7d97 | # The isBadVersion API is already defined for you.
# @param version, an integer
# @return a bool
# def isBadVersion(version):
class Solution(object):
def firstBadVersion(self, n):
start = 1
end = n
while start + 1 < end:
mid = start + (end - start) / 2
if isBadVersion(mid):
end = mid
else:
start = mid
if isBadVersion(start):
return start
elif isBadVersion(end):
return end
| [] |
QizaiMing/ergo-project-manager | issues/migrations/0001_initial.py | 2b02b2ab6d9e48bfccbbca8c05180b07177dcb77 | # Generated by Django 2.2.12 on 2020-05-01 03:34
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Issue',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('description', models.TextField(max_length=2000)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('status', models.CharField(choices=[('To Do', 'To Do'), ('In Progress', 'In Progress'), ('Done', 'Done')], default='To Do', max_length=20)),
('priority', models.CharField(choices=[('Low', 'Low'), ('Medium', 'Medium'), ('High', 'High')], default='Low', max_length=20)),
('assignee', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='assigned', to=settings.AUTH_USER_MODEL)),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issues', to=settings.AUTH_USER_MODEL)),
('linked_to', models.ManyToManyField(related_name='_issue_linked_to_+', to='issues.Issue')),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('content', models.TextField(max_length=1000)),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to=settings.AUTH_USER_MODEL)),
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='issues.Issue')),
],
),
migrations.CreateModel(
name='Attachment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.FileField(upload_to='media/files')),
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='attachments', to='issues.Issue')),
],
),
]
| [((248, 305), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (279, 305), False, 'from django.db import migrations, models\n'), ((435, 528), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (451, 528), False, 'from django.db import migrations, models\n'), ((553, 585), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (569, 585), False, 'from django.db import migrations, models\n'), ((620, 653), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(2000)'}), '(max_length=2000)\n', (636, 653), False, 'from django.db import migrations, models\n'), ((684, 723), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (704, 723), False, 'from django.db import migrations, models\n'), ((755, 790), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (775, 790), False, 'from django.db import migrations, models\n'), ((820, 953), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('To Do', 'To Do'), ('In Progress', 'In Progress'), ('Done', 'Done')]", 'default': '"""To Do"""', 'max_length': '(20)'}), "(choices=[('To Do', 'To Do'), ('In Progress', 'In Progress'\n ), ('Done', 'Done')], default='To Do', max_length=20)\n", (836, 953), False, 'from django.db import migrations, models\n'), ((980, 1096), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('Low', 'Low'), ('Medium', 'Medium'), ('High', 'High')]", 'default': '"""Low"""', 'max_length': '(20)'}), "(choices=[('Low', 'Low'), ('Medium', 'Medium'), ('High',\n 'High')], default='Low', max_length=20)\n", (996, 1096), False, 'from django.db import migrations, models\n'), ((1124, 1245), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""assigned"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='assigned', to=settings.AUTH_USER_MODEL)\n", (1141, 1245), False, 'from django.db import migrations, models\n'), ((1271, 1390), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""issues"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='issues', to=settings.AUTH_USER_MODEL)\n", (1288, 1390), False, 'from django.db import migrations, models\n'), ((1418, 1494), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""_issue_linked_to_+"""', 'to': '"""issues.Issue"""'}), "(related_name='_issue_linked_to_+', to='issues.Issue')\n", (1440, 1494), False, 'from django.db import migrations, models\n'), ((1627, 1720), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1643, 1720), False, 'from django.db import migrations, models\n'), ((1747, 1786), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1767, 1786), False, 'from django.db import migrations, models\n'), ((1817, 1850), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)'}), '(max_length=1000)\n', (1833, 1850), False, 'from django.db import migrations, models\n'), ((1881, 2002), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""comments"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='comments', to=settings.AUTH_USER_MODEL)\n", (1898, 2002), False, 'from django.db import migrations, models\n'), ((2026, 2137), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""comments"""', 'to': '"""issues.Issue"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='comments', to='issues.Issue')\n", (2043, 2137), False, 'from django.db import migrations, models\n'), ((2268, 2361), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2284, 2361), False, 'from django.db import migrations, models\n'), ((2385, 2426), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': '"""media/files"""'}), "(upload_to='media/files')\n", (2401, 2426), False, 'from django.db import migrations, models\n'), ((2455, 2569), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""attachments"""', 'to': '"""issues.Issue"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='attachments', to='issues.Issue')\n", (2472, 2569), False, 'from django.db import migrations, models\n')] |
ffffff0x/python-hacker | com/binghe/hacker/tools/script/ak/check_virus.py | a2dc7f9031669a86bd2c87892c0a8c1e54bb2a79 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- coding: gbk -*-
# Date: 2019/2/22
# Created by 冰河
# Description 将生成的bindshell.exe提交到vscan.novirusthanks.org检测
# 用法 python check_virus.py -f bindshell.exe
# 博客 https://blog.csdn.net/l1028386804
import re
import httplib
import time
import os
import optparse
from urlparse import urlparse
def printResults(url):
status = 200
host = urlparse(url)[1]
path = urlparse(url)[2]
if 'analysis' not in path:
while status != 302:
conn = httplib.HTTPConnection(host)
conn.request('GET', path)
resp = conn.getresponse()
status = resp.status
print '[+] Scanning file...'
conn.close()
time.sleep(15)
print '[+] Scan Complete.'
path = path.replace('file', 'analysis')
conn = httplib.HTTPConnection(host)
conn.request('GET', path)
resp = conn.getresponse()
data = resp.read()
conn.close()
reResults = re.findall(r'Detection rate:.*\)', data)
htmlStripRes = reResults[1]. \
replace('<font color=\'red\'>', ''). \
replace('</font>', '')
print '[+] ' + str(htmlStripRes)
def uploadFile(fileName):
print "[+] Uploading file to NoVirusThanks..."
fileContents = open(fileName, 'rb').read()
header = {'Content-Type': 'multipart/form-data; \
boundary=----WebKitFormBoundaryF17rwCZdGuPNPT9U'}
params = "------WebKitFormBoundaryF17rwCZdGuPNPT9U"
params += "\r\nContent-Disposition: form-data; " + \
"name=\"upfile\"; filename=\"" + str(fileName) + "\""
params += "\r\nContent-Type: " + \
"application/octet stream\r\n\r\n"
params += fileContents
params += "\r\n------WebKitFormBoundaryF17rwCZdGuPNPT9U"
params += "\r\nContent-Disposition: form-data; " + \
"name=\"submitfile\"\r\n"
params += "\r\nSubmit File\r\n"
params += "------WebKitFormBoundaryF17rwCZdGuPNPT9U--\r\n"
conn = httplib.HTTPConnection('vscan.novirusthanks.org')
conn.request("POST", "/", params, header)
response = conn.getresponse()
location = response.getheader('location')
conn.close()
return location
def main():
parser = optparse.OptionParser('usage %prog -f <filename>')
parser.add_option('-f', dest='fileName', type='string', \
help='specify filename')
(options, args) = parser.parse_args()
fileName = options.fileName
if fileName == None:
print parser.usage
exit(0)
elif os.path.isfile(fileName) == False:
print '[+] ' + fileName + ' does not exist.'
exit(0)
else:
loc = uploadFile(fileName)
printResults(loc)
if __name__ == '__main__':
main() | [] |
LoganHaug/reminder-bot | cogs/remind.py | 1bb1853b79e0299240a214e947e8bc29ed34e46e | import asyncio
from typing import Union
import datetime
import time
from discord.ext import commands
import yaml
from cogs import checks
import database
import utils
# Loads the repeating interval dictionary
with open("conversions.yml", "r") as conversion_file:
conversion_dict = yaml.load(conversion_file, Loader=yaml.Loader)
prefix = utils.get_prefix()
class Remind(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.reminders = []
self.tasks = []
asyncio.create_task(self.update_schedule())
async def update_schedule(self):
"""Updates the schedule"""
reminders = database.get_reminders()
new_reminders = []
for reminder in reminders:
if reminder["date"] - time.time() < 0:
database.remove_reminder(reminder)
else:
new_reminders.append(reminder)
self.reminders.clear()
self.reminders.extend(new_reminders)
async def setup_reminders(self):
"""Sets up the reminders"""
await self.clear_tasks()
await self.update_schedule()
scheduled_reminders = []
for task in self.tasks:
if task.get_coro().cr_frame is not None:
scheduled_reminders.append(
task.get_coro().cr_frame.f_locals["reminder"]
)
# Create tasks for all reminders, call the remind function
for reminder in self.reminders:
if reminder not in scheduled_reminders:
task = asyncio.create_task(self.remind(reminder))
self.tasks.append(task)
scheduled_reminders.append(
task.get_coro().cr_frame.f_locals["reminder"]
)
# Run the tasks
asyncio.gather(*self.tasks)
async def clear_tasks(self):
for task in self.tasks:
if task._state == "FINISHED":
self.tasks.remove(task)
async def remind(self, reminder: dict):
"""Execute one reminder"""
# Check if the reminder is in the future and if it exists in the database
if reminder["date"] > time.time() and database.get_reminders(**reminder) != []:
await asyncio.sleep(reminder["date"] - time.time())
# Checks if the reminder is still exists, in case of deletion
if database.get_reminders(**reminder) != [] and reminder in self.reminders:
await self.bot.get_channel(reminder["channel"]).send(
f"Reminder:\n{reminder['reminder_text']}"
)
if reminder["repeating"] != False:
await self.schedule_repeat(reminder)
self.reminders.remove(reminder)
# Remove the reminder
database.remove_reminder(reminder)
# Remove a reminder that has passed
else:
database.remove_reminder(reminder)
async def schedule_repeat(self, reminder: dict):
"""Schedules a repeating reminder"""
if reminder["repeating"] and database.get_reminders(**reminder) != []:
# Calculate when the next reminder should be
reminder_date = datetime.datetime.fromtimestamp(
reminder["date"] + conversion_dict[reminder["repeating"]]
)
# Remove the old reminder
database.remove_reminder(reminder)
# Add the new reminder
database.insert_reminder(
reminder["guild"],
reminder["channel"],
reminder_date.year,
reminder_date.month,
reminder_date.day,
reminder_date.hour,
reminder_date.minute,
reminder["reminder_text"],
reminder["repeating"],
)
asyncio.create_task(self.setup_reminders())
@commands.command(
help="Date should be in month/day/year format, either with slashes or dashes (ex. month/day/year or month-day-year)\n\nRepeating is an interval of time after which the reminder should be sent again, must be either daily, weekly, biweekly, or triweekly\n\nText is the text the reminder will be sent with, wrap with quotations if this contains whitespace",
aliases=["reminder", "add_r", "ar"],
)
@commands.check(checks.is_operator)
async def add_reminder(
self,
ctx,
date: str,
user_time: str,
text: str,
repeating: Union[str, bool] = False,
):
"""Attempts to add a reminder"""
# Checks if the reminder should repeat, and if it is a valid interval
try:
_date = utils.split_date(date)
_time = utils.split_time(user_time)
except UnboundLocalError:
raise commands.UserInputError("Date or time was not in the correct format.")
if repeating and repeating not in conversion_dict:
raise commands.UserInputError()
# Tries to insert the reminder
result = database.insert_reminder(
ctx.guild.id,
ctx.channel.id,
_date["year"],
_date["month"],
_date["day"],
_time["hour"],
_time["minute"],
text,
repeating,
)
# Sends a status message, and restarts the reminders
if result:
await asyncio.create_task(self.setup_reminders())
await ctx.send(
embed=utils.generate_embed(
"Reminder Stored",
f"{date}\n{user_time}\n{text}\nrepeating: {repeating}",
)
)
# This means the insertion of the reminder failed
else:
await ctx.send(
embed=utils.generate_embed(
"Error",
"`This reminder already exists in the database or is not in the future`",
)
)
@add_reminder.error
async def add_reminder_error(self, ctx, error):
"""Called when add_reminder() errors"""
print(error)
if isinstance(error, commands.errors.MissingRequiredArgument):
await ctx.send(
embed=utils.generate_embed(
"Error", f"`{error} Run {prefix}help add_reminder`"
)
)
elif isinstance(error, commands.errors.UserInputError):
await ctx.send(
embed=utils.generate_embed(
"Error", f"`{error} Run {prefix}help add_reminder`"
)
)
elif isinstance(error, commands.errors.CheckFailure):
await ctx.send(
embed=utils.generate_embed(
"Error", "`You do not have permissions for this command`"
)
)
else:
await ctx.send(
embed=utils.generate_embed(
"Error",
f"`An unexpected error has occured, run {prefix}help add_reminder`",
)
)
def setup(bot):
cog = Remind(bot)
bot.add_cog(cog)
asyncio.create_task(cog.setup_reminders())
| [((346, 364), 'utils.get_prefix', 'utils.get_prefix', ([], {}), '()\n', (362, 364), False, 'import utils\n'), ((288, 334), 'yaml.load', 'yaml.load', (['conversion_file'], {'Loader': 'yaml.Loader'}), '(conversion_file, Loader=yaml.Loader)\n', (297, 334), False, 'import yaml\n'), ((3895, 4313), 'discord.ext.commands.command', 'commands.command', ([], {'help': '"""Date should be in month/day/year format, either with slashes or dashes (ex. month/day/year or month-day-year)\n\nRepeating is an interval of time after which the reminder should be sent again, must be either daily, weekly, biweekly, or triweekly\n\nText is the text the reminder will be sent with, wrap with quotations if this contains whitespace"""', 'aliases': "['reminder', 'add_r', 'ar']"}), '(help=\n """Date should be in month/day/year format, either with slashes or dashes (ex. month/day/year or month-day-year)\n\nRepeating is an interval of time after which the reminder should be sent again, must be either daily, weekly, biweekly, or triweekly\n\nText is the text the reminder will be sent with, wrap with quotations if this contains whitespace"""\n , aliases=[\'reminder\', \'add_r\', \'ar\'])\n', (3911, 4313), False, 'from discord.ext import commands\n'), ((4332, 4366), 'discord.ext.commands.check', 'commands.check', (['checks.is_operator'], {}), '(checks.is_operator)\n', (4346, 4366), False, 'from discord.ext import commands\n'), ((644, 668), 'database.get_reminders', 'database.get_reminders', ([], {}), '()\n', (666, 668), False, 'import database\n'), ((1797, 1824), 'asyncio.gather', 'asyncio.gather', (['*self.tasks'], {}), '(*self.tasks)\n', (1811, 1824), False, 'import asyncio\n'), ((5041, 5194), 'database.insert_reminder', 'database.insert_reminder', (['ctx.guild.id', 'ctx.channel.id', "_date['year']", "_date['month']", "_date['day']", "_time['hour']", "_time['minute']", 'text', 'repeating'], {}), "(ctx.guild.id, ctx.channel.id, _date['year'], _date\n ['month'], _date['day'], _time['hour'], _time['minute'], text, repeating)\n", (5065, 5194), False, 'import database\n'), ((2801, 2835), 'database.remove_reminder', 'database.remove_reminder', (['reminder'], {}), '(reminder)\n', (2825, 2835), False, 'import database\n'), ((2906, 2940), 'database.remove_reminder', 'database.remove_reminder', (['reminder'], {}), '(reminder)\n', (2930, 2940), False, 'import database\n'), ((3204, 3299), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (["(reminder['date'] + conversion_dict[reminder['repeating']])"], {}), "(reminder['date'] + conversion_dict[reminder\n ['repeating']])\n", (3235, 3299), False, 'import datetime\n'), ((3375, 3409), 'database.remove_reminder', 'database.remove_reminder', (['reminder'], {}), '(reminder)\n', (3399, 3409), False, 'import database\n'), ((3457, 3685), 'database.insert_reminder', 'database.insert_reminder', (["reminder['guild']", "reminder['channel']", 'reminder_date.year', 'reminder_date.month', 'reminder_date.day', 'reminder_date.hour', 'reminder_date.minute', "reminder['reminder_text']", "reminder['repeating']"], {}), "(reminder['guild'], reminder['channel'],\n reminder_date.year, reminder_date.month, reminder_date.day,\n reminder_date.hour, reminder_date.minute, reminder['reminder_text'],\n reminder['repeating'])\n", (3481, 3685), False, 'import database\n'), ((4688, 4710), 'utils.split_date', 'utils.split_date', (['date'], {}), '(date)\n', (4704, 4710), False, 'import utils\n'), ((4731, 4758), 'utils.split_time', 'utils.split_time', (['user_time'], {}), '(user_time)\n', (4747, 4758), False, 'import utils\n'), ((4959, 4984), 'discord.ext.commands.UserInputError', 'commands.UserInputError', ([], {}), '()\n', (4982, 4984), False, 'from discord.ext import commands\n'), ((798, 832), 'database.remove_reminder', 'database.remove_reminder', (['reminder'], {}), '(reminder)\n', (822, 832), False, 'import database\n'), ((2165, 2176), 'time.time', 'time.time', ([], {}), '()\n', (2174, 2176), False, 'import time\n'), ((2181, 2215), 'database.get_reminders', 'database.get_reminders', ([], {}), '(**reminder)\n', (2203, 2215), False, 'import database\n'), ((3077, 3111), 'database.get_reminders', 'database.get_reminders', ([], {}), '(**reminder)\n', (3099, 3111), False, 'import database\n'), ((4811, 4881), 'discord.ext.commands.UserInputError', 'commands.UserInputError', (['"""Date or time was not in the correct format."""'], {}), "('Date or time was not in the correct format.')\n", (4834, 4881), False, 'from discord.ext import commands\n'), ((765, 776), 'time.time', 'time.time', ([], {}), '()\n', (774, 776), False, 'import time\n'), ((2376, 2410), 'database.get_reminders', 'database.get_reminders', ([], {}), '(**reminder)\n', (2398, 2410), False, 'import database\n'), ((2274, 2285), 'time.time', 'time.time', ([], {}), '()\n', (2283, 2285), False, 'import time\n'), ((5501, 5601), 'utils.generate_embed', 'utils.generate_embed', (['"""Reminder Stored"""', 'f"""{date}\n{user_time}\n{text}\nrepeating: {repeating}"""'], {}), '(\'Reminder Stored\',\n f"""{date}\n{user_time}\n{text}\nrepeating: {repeating}""")\n', (5521, 5601), False, 'import utils\n'), ((5792, 5899), 'utils.generate_embed', 'utils.generate_embed', (['"""Error"""', '"""`This reminder already exists in the database or is not in the future`"""'], {}), "('Error',\n '`This reminder already exists in the database or is not in the future`')\n", (5812, 5899), False, 'import utils\n'), ((6236, 6309), 'utils.generate_embed', 'utils.generate_embed', (['"""Error"""', 'f"""`{error} Run {prefix}help add_reminder`"""'], {}), "('Error', f'`{error} Run {prefix}help add_reminder`')\n", (6256, 6309), False, 'import utils\n'), ((6476, 6549), 'utils.generate_embed', 'utils.generate_embed', (['"""Error"""', 'f"""`{error} Run {prefix}help add_reminder`"""'], {}), "('Error', f'`{error} Run {prefix}help add_reminder`')\n", (6496, 6549), False, 'import utils\n'), ((6714, 6793), 'utils.generate_embed', 'utils.generate_embed', (['"""Error"""', '"""`You do not have permissions for this command`"""'], {}), "('Error', '`You do not have permissions for this command`')\n", (6734, 6793), False, 'import utils\n'), ((6910, 7012), 'utils.generate_embed', 'utils.generate_embed', (['"""Error"""', 'f"""`An unexpected error has occured, run {prefix}help add_reminder`"""'], {}), "('Error',\n f'`An unexpected error has occured, run {prefix}help add_reminder`')\n", (6930, 7012), False, 'import utils\n')] |
csengor/toraman_py | setup.py | 5cb7b39ae073ecc2adcb7cea83b79492ac5aa485 | import setuptools
from toraman.version import __version__
with open('README.md', 'r') as input_file:
long_description = input_file.read()
setuptools.setup(
name='toraman',
version=__version__,
author='Çağatay Onur Şengör',
author_email='[email protected]',
description='A computer-assisted translation tool package',
keywords = ['CAT', 'computer-assisted translation', 'computer-aided translation', 'translation', 'free-to-use'],
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/csengor/toraman-py',
packages=setuptools.find_packages(),
install_requires=[
'lxml',
'python-levenshtein',
'regex'
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
],
)
| [((615, 641), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (639, 641), False, 'import setuptools\n')] |
li-ar/declarations.com.ua | declarations_site/cms_pages/migrations/0015_auto_20150615_0201.py | 343cd86cc5a4bd895f2859ed896728f6416ac223 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms_pages', '0014_homepage_news_count'),
]
operations = [
migrations.AlterField(
model_name='newspage',
name='lead',
field=models.TextField(blank=True, verbose_name='Лід'),
preserve_default=True,
),
]
| [((355, 403), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'verbose_name': '"""Лід"""'}), "(blank=True, verbose_name='Лід')\n", (371, 403), False, 'from django.db import models, migrations\n')] |
nightlessbaron/pytorch-lightning | tests/models/test_grad_norm.py | 239bea5c29cef0d1a0cfb319de5dbc9227aa2a53 | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from unittest import mock
from unittest.mock import patch
import numpy as np
import pytest
from pytorch_lightning import Trainer
from tests.base import EvalModelTemplate
from tests.base.develop_utils import reset_seed
class ModelWithManualGradTracker(EvalModelTemplate):
def __init__(self, norm_type, *args, **kwargs):
super().__init__(*args, **kwargs)
self.stored_grad_norms, self.norm_type = [], float(norm_type)
# validation spoils logger's metrics with `val_loss` records
validation_step = None
val_dataloader = None
def training_step(self, batch, batch_idx, optimizer_idx=None):
# just return a loss, no log or progress bar meta
x, y = batch
loss_val = self.loss(y, self(x.flatten(1, -1)))
return {'loss': loss_val}
def on_after_backward(self):
out, norms = {}, []
prefix = f'grad_{self.norm_type}_norm_'
for name, p in self.named_parameters():
if p.grad is None:
continue
# `np.linalg.norm` implementation likely uses fp64 intermediates
flat = p.grad.data.cpu().numpy().ravel()
norm = np.linalg.norm(flat, self.norm_type)
norms.append(norm)
out[prefix + name] = round(norm, 4)
# handle total norm
norm = np.linalg.norm(norms, self.norm_type)
out[prefix + 'total'] = round(norm, 4)
self.stored_grad_norms.append(out)
@mock.patch.dict(os.environ, {"PL_DEV_DEBUG": "1"})
@pytest.mark.parametrize("norm_type", [1., 1.25, 2, 3, 5, 10, 'inf'])
def test_grad_tracking(tmpdir, norm_type, rtol=5e-3):
# rtol=5e-3 respects the 3 decimals rounding in `.grad_norms` and above
reset_seed()
# use a custom grad tracking module and a list logger
model = ModelWithManualGradTracker(norm_type)
trainer = Trainer(
default_root_dir=tmpdir,
max_epochs=3,
track_grad_norm=norm_type,
log_every_n_steps=1, # request grad_norms every batch
)
result = trainer.fit(model)
assert result == 1, "Training failed"
logged_metrics = trainer.dev_debugger.logged_metrics
assert len(logged_metrics) == len(model.stored_grad_norms)
# compare the logged metrics against tracked norms on `.backward`
for mod, log in zip(model.stored_grad_norms, logged_metrics):
common = mod.keys() & log.keys()
log, mod = [log[k] for k in common], [mod[k] for k in common]
assert np.allclose(log, mod, rtol=rtol)
@pytest.mark.parametrize("log_every_n_steps", [1, 2, 3])
def test_grad_tracking_interval(tmpdir, log_every_n_steps):
""" Test that gradient norms get tracked in the right interval and that everytime the same keys get logged. """
trainer = Trainer(
default_root_dir=tmpdir,
track_grad_norm=2,
log_every_n_steps=log_every_n_steps,
max_steps=10,
)
with patch.object(trainer.logger, "log_metrics") as mocked:
model = EvalModelTemplate()
trainer.fit(model)
expected = trainer.global_step // log_every_n_steps
grad_norm_dicts = []
for _, kwargs in mocked.call_args_list:
metrics = kwargs.get("metrics", {})
grad_norm_dict = {k: v for k, v in metrics.items() if k.startswith("grad_")}
if grad_norm_dict:
grad_norm_dicts.append(grad_norm_dict)
assert len(grad_norm_dicts) == expected
assert all(grad_norm_dicts[0].keys() == g.keys() for g in grad_norm_dicts)
| [((2047, 2097), 'unittest.mock.patch.dict', 'mock.patch.dict', (['os.environ', "{'PL_DEV_DEBUG': '1'}"], {}), "(os.environ, {'PL_DEV_DEBUG': '1'})\n", (2062, 2097), False, 'from unittest import mock\n'), ((2099, 2168), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""norm_type"""', "[1.0, 1.25, 2, 3, 5, 10, 'inf']"], {}), "('norm_type', [1.0, 1.25, 2, 3, 5, 10, 'inf'])\n", (2122, 2168), False, 'import pytest\n'), ((3104, 3159), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""log_every_n_steps"""', '[1, 2, 3]'], {}), "('log_every_n_steps', [1, 2, 3])\n", (3127, 3159), False, 'import pytest\n'), ((2303, 2315), 'tests.base.develop_utils.reset_seed', 'reset_seed', ([], {}), '()\n', (2313, 2315), False, 'from tests.base.develop_utils import reset_seed\n'), ((2440, 2538), 'pytorch_lightning.Trainer', 'Trainer', ([], {'default_root_dir': 'tmpdir', 'max_epochs': '(3)', 'track_grad_norm': 'norm_type', 'log_every_n_steps': '(1)'}), '(default_root_dir=tmpdir, max_epochs=3, track_grad_norm=norm_type,\n log_every_n_steps=1)\n', (2447, 2538), False, 'from pytorch_lightning import Trainer\n'), ((3350, 3457), 'pytorch_lightning.Trainer', 'Trainer', ([], {'default_root_dir': 'tmpdir', 'track_grad_norm': '(2)', 'log_every_n_steps': 'log_every_n_steps', 'max_steps': '(10)'}), '(default_root_dir=tmpdir, track_grad_norm=2, log_every_n_steps=\n log_every_n_steps, max_steps=10)\n', (3357, 3457), False, 'from pytorch_lightning import Trainer\n'), ((1916, 1953), 'numpy.linalg.norm', 'np.linalg.norm', (['norms', 'self.norm_type'], {}), '(norms, self.norm_type)\n', (1930, 1953), True, 'import numpy as np\n'), ((3068, 3100), 'numpy.allclose', 'np.allclose', (['log', 'mod'], {'rtol': 'rtol'}), '(log, mod, rtol=rtol)\n', (3079, 3100), True, 'import numpy as np\n'), ((3502, 3545), 'unittest.mock.patch.object', 'patch.object', (['trainer.logger', '"""log_metrics"""'], {}), "(trainer.logger, 'log_metrics')\n", (3514, 3545), False, 'from unittest.mock import patch\n'), ((3573, 3592), 'tests.base.EvalModelTemplate', 'EvalModelTemplate', ([], {}), '()\n', (3590, 3592), False, 'from tests.base import EvalModelTemplate\n'), ((1755, 1791), 'numpy.linalg.norm', 'np.linalg.norm', (['flat', 'self.norm_type'], {}), '(flat, self.norm_type)\n', (1769, 1791), True, 'import numpy as np\n')] |
junjun315/tensorflow | tensorflow/tools/compatibility/renames_v2.py | 40b800fc24e1eea8642b79087925939121e8e25f | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=line-too-long
"""List of renames to apply when converting from TF 1.0 to TF 2.0.
THIS FILE IS AUTOGENERATED: To update, please run:
bazel build tensorflow/tools/compatibility/update:generate_v2_renames_map
bazel-bin/tensorflow/tools/compatibility/update/generate_v2_renames_map
This file should be updated whenever endpoints are deprecated.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
renames = {
'tf.AUTO_REUSE': 'tf.compat.v1.AUTO_REUSE',
'tf.AttrValue': 'tf.compat.v1.AttrValue',
'tf.COMPILER_VERSION': 'tf.version.COMPILER_VERSION',
'tf.CXX11_ABI_FLAG': 'tf.sysconfig.CXX11_ABI_FLAG',
'tf.ConditionalAccumulator': 'tf.compat.v1.ConditionalAccumulator',
'tf.ConditionalAccumulatorBase': 'tf.compat.v1.ConditionalAccumulatorBase',
'tf.ConfigProto': 'tf.compat.v1.ConfigProto',
'tf.DeviceSpec': 'tf.compat.v1.DeviceSpec',
'tf.Dimension': 'tf.compat.v1.Dimension',
'tf.Event': 'tf.compat.v1.Event',
'tf.FIFOQueue': 'tf.queue.FIFOQueue',
'tf.FixedLenFeature': 'tf.io.FixedLenFeature',
'tf.FixedLenSequenceFeature': 'tf.io.FixedLenSequenceFeature',
'tf.FixedLengthRecordReader': 'tf.compat.v1.FixedLengthRecordReader',
'tf.GIT_VERSION': 'tf.version.GIT_VERSION',
'tf.GPUOptions': 'tf.compat.v1.GPUOptions',
'tf.GRAPH_DEF_VERSION': 'tf.version.GRAPH_DEF_VERSION',
'tf.GRAPH_DEF_VERSION_MIN_CONSUMER': 'tf.version.GRAPH_DEF_VERSION_MIN_CONSUMER',
'tf.GRAPH_DEF_VERSION_MIN_PRODUCER': 'tf.version.GRAPH_DEF_VERSION_MIN_PRODUCER',
'tf.GraphDef': 'tf.compat.v1.GraphDef',
'tf.GraphKeys': 'tf.compat.v1.GraphKeys',
'tf.GraphOptions': 'tf.compat.v1.GraphOptions',
'tf.HistogramProto': 'tf.compat.v1.HistogramProto',
'tf.IdentityReader': 'tf.compat.v1.IdentityReader',
'tf.InteractiveSession': 'tf.compat.v1.InteractiveSession',
'tf.LMDBReader': 'tf.compat.v1.LMDBReader',
'tf.LogMessage': 'tf.compat.v1.LogMessage',
'tf.MONOLITHIC_BUILD': 'tf.sysconfig.MONOLITHIC_BUILD',
'tf.MetaGraphDef': 'tf.compat.v1.MetaGraphDef',
'tf.NameAttrList': 'tf.compat.v1.NameAttrList',
'tf.NoGradient': 'tf.no_gradient',
'tf.NodeDef': 'tf.compat.v1.NodeDef',
'tf.NotDifferentiable': 'tf.no_gradient',
'tf.OpError': 'tf.errors.OpError',
'tf.OptimizerOptions': 'tf.compat.v1.OptimizerOptions',
'tf.PaddingFIFOQueue': 'tf.queue.PaddingFIFOQueue',
'tf.Print': 'tf.compat.v1.Print',
'tf.PriorityQueue': 'tf.queue.PriorityQueue',
'tf.QUANTIZED_DTYPES': 'tf.dtypes.QUANTIZED_DTYPES',
'tf.QueueBase': 'tf.queue.QueueBase',
'tf.RandomShuffleQueue': 'tf.queue.RandomShuffleQueue',
'tf.ReaderBase': 'tf.compat.v1.ReaderBase',
'tf.RunMetadata': 'tf.compat.v1.RunMetadata',
'tf.RunOptions': 'tf.compat.v1.RunOptions',
'tf.Session': 'tf.compat.v1.Session',
'tf.SessionLog': 'tf.compat.v1.SessionLog',
'tf.SparseConditionalAccumulator': 'tf.sparse.SparseConditionalAccumulator',
'tf.SparseFeature': 'tf.io.SparseFeature',
'tf.SparseTensorValue': 'tf.compat.v1.SparseTensorValue',
'tf.Summary': 'tf.compat.v1.Summary',
'tf.SummaryMetadata': 'tf.compat.v1.SummaryMetadata',
'tf.TFRecordReader': 'tf.compat.v1.TFRecordReader',
'tf.TensorInfo': 'tf.compat.v1.TensorInfo',
'tf.TextLineReader': 'tf.compat.v1.TextLineReader',
'tf.VERSION': 'tf.version.VERSION',
'tf.VarLenFeature': 'tf.io.VarLenFeature',
'tf.VariableScope': 'tf.compat.v1.VariableScope',
'tf.WholeFileReader': 'tf.compat.v1.WholeFileReader',
'tf.accumulate_n': 'tf.math.accumulate_n',
'tf.add_check_numerics_ops': 'tf.compat.v1.add_check_numerics_ops',
'tf.add_to_collection': 'tf.compat.v1.add_to_collection',
'tf.add_to_collections': 'tf.compat.v1.add_to_collections',
'tf.all_variables': 'tf.compat.v1.all_variables',
'tf.angle': 'tf.math.angle',
'tf.app.run': 'tf.compat.v1.app.run',
'tf.assert_greater_equal': 'tf.compat.v1.assert_greater_equal',
'tf.assert_integer': 'tf.compat.v1.assert_integer',
'tf.assert_less_equal': 'tf.compat.v1.assert_less_equal',
'tf.assert_near': 'tf.compat.v1.assert_near',
'tf.assert_negative': 'tf.compat.v1.assert_negative',
'tf.assert_non_negative': 'tf.compat.v1.assert_non_negative',
'tf.assert_non_positive': 'tf.compat.v1.assert_non_positive',
'tf.assert_none_equal': 'tf.compat.v1.assert_none_equal',
'tf.assert_positive': 'tf.compat.v1.assert_positive',
'tf.assert_proper_iterable': 'tf.debugging.assert_proper_iterable',
'tf.assert_rank_at_least': 'tf.compat.v1.assert_rank_at_least',
'tf.assert_rank_in': 'tf.compat.v1.assert_rank_in',
'tf.assert_same_float_dtype': 'tf.debugging.assert_same_float_dtype',
'tf.assert_scalar': 'tf.compat.v1.assert_scalar',
'tf.assert_type': 'tf.compat.v1.assert_type',
'tf.assert_variables_initialized': 'tf.compat.v1.assert_variables_initialized',
'tf.assign': 'tf.compat.v1.assign',
'tf.assign_add': 'tf.compat.v1.assign_add',
'tf.assign_sub': 'tf.compat.v1.assign_sub',
'tf.batch_scatter_update': 'tf.compat.v1.batch_scatter_update',
'tf.betainc': 'tf.math.betainc',
'tf.ceil': 'tf.math.ceil',
'tf.check_numerics': 'tf.debugging.check_numerics',
'tf.cholesky': 'tf.linalg.cholesky',
'tf.cholesky_solve': 'tf.linalg.cholesky_solve',
'tf.clip_by_average_norm': 'tf.compat.v1.clip_by_average_norm',
'tf.colocate_with': 'tf.compat.v1.colocate_with',
'tf.conj': 'tf.math.conj',
'tf.container': 'tf.compat.v1.container',
'tf.convert_to_tensor_or_indexed_slices': 'tf.compat.v1.convert_to_tensor_or_indexed_slices',
'tf.convert_to_tensor_or_sparse_tensor': 'tf.compat.v1.convert_to_tensor_or_sparse_tensor',
'tf.count_up_to': 'tf.compat.v1.count_up_to',
'tf.create_partitioned_variables': 'tf.compat.v1.create_partitioned_variables',
'tf.cross': 'tf.linalg.cross',
'tf.cumprod': 'tf.math.cumprod',
'tf.data.make_initializable_iterator': 'tf.compat.v1.data.make_initializable_iterator',
'tf.data.make_one_shot_iterator': 'tf.compat.v1.data.make_one_shot_iterator',
'tf.debugging.is_finite': 'tf.math.is_finite',
'tf.debugging.is_inf': 'tf.math.is_inf',
'tf.debugging.is_nan': 'tf.math.is_nan',
'tf.debugging.is_non_decreasing': 'tf.math.is_non_decreasing',
'tf.debugging.is_strictly_increasing': 'tf.math.is_strictly_increasing',
'tf.decode_base64': 'tf.io.decode_base64',
'tf.decode_compressed': 'tf.io.decode_compressed',
'tf.decode_json_example': 'tf.io.decode_json_example',
'tf.decode_raw': 'tf.io.decode_raw',
'tf.delete_session_tensor': 'tf.compat.v1.delete_session_tensor',
'tf.depth_to_space': 'tf.compat.v1.depth_to_space',
'tf.dequantize': 'tf.quantization.dequantize',
'tf.deserialize_many_sparse': 'tf.io.deserialize_many_sparse',
'tf.diag': 'tf.linalg.tensor_diag',
'tf.diag_part': 'tf.linalg.tensor_diag_part',
'tf.digamma': 'tf.math.digamma',
'tf.dimension_at_index': 'tf.compat.dimension_at_index',
'tf.dimension_value': 'tf.compat.dimension_value',
'tf.disable_eager_execution': 'tf.compat.v1.disable_eager_execution',
'tf.disable_resource_variables': 'tf.compat.v1.disable_resource_variables',
'tf.disable_v2_batch_normalization': 'tf.compat.v1.disable_v2_batch_normalization',
'tf.disable_v2_behavior': 'tf.compat.v1.disable_v2_behavior',
'tf.disable_v2_tensorshape': 'tf.compat.v1.disable_v2_tensorshape',
'tf.distributions.Bernoulli': 'tf.compat.v1.distributions.Bernoulli',
'tf.distributions.Beta': 'tf.compat.v1.distributions.Beta',
'tf.distributions.Categorical': 'tf.compat.v1.distributions.Categorical',
'tf.distributions.Dirichlet': 'tf.compat.v1.distributions.Dirichlet',
'tf.distributions.DirichletMultinomial': 'tf.compat.v1.distributions.DirichletMultinomial',
'tf.distributions.Distribution': 'tf.compat.v1.distributions.Distribution',
'tf.distributions.Exponential': 'tf.compat.v1.distributions.Exponential',
'tf.distributions.FULLY_REPARAMETERIZED': 'tf.compat.v1.distributions.FULLY_REPARAMETERIZED',
'tf.distributions.Gamma': 'tf.compat.v1.distributions.Gamma',
'tf.distributions.Laplace': 'tf.compat.v1.distributions.Laplace',
'tf.distributions.Multinomial': 'tf.compat.v1.distributions.Multinomial',
'tf.distributions.NOT_REPARAMETERIZED': 'tf.compat.v1.distributions.NOT_REPARAMETERIZED',
'tf.distributions.Normal': 'tf.compat.v1.distributions.Normal',
'tf.distributions.RegisterKL': 'tf.compat.v1.distributions.RegisterKL',
'tf.distributions.ReparameterizationType': 'tf.compat.v1.distributions.ReparameterizationType',
'tf.distributions.StudentT': 'tf.compat.v1.distributions.StudentT',
'tf.distributions.Uniform': 'tf.compat.v1.distributions.Uniform',
'tf.distributions.kl_divergence': 'tf.compat.v1.distributions.kl_divergence',
'tf.div': 'tf.compat.v1.div',
'tf.dtypes.as_string': 'tf.strings.as_string',
'tf.enable_eager_execution': 'tf.compat.v1.enable_eager_execution',
'tf.enable_resource_variables': 'tf.compat.v1.enable_resource_variables',
'tf.enable_v2_batch_normalization': 'tf.compat.v1.enable_v2_batch_normalization',
'tf.enable_v2_behavior': 'tf.compat.v1.enable_v2_behavior',
'tf.enable_v2_tensorshape': 'tf.compat.v1.enable_v2_tensorshape',
'tf.encode_base64': 'tf.io.encode_base64',
'tf.erf': 'tf.math.erf',
'tf.erfc': 'tf.math.erfc',
'tf.expm1': 'tf.math.expm1',
'tf.fake_quant_with_min_max_args': 'tf.quantization.fake_quant_with_min_max_args',
'tf.fake_quant_with_min_max_args_gradient': 'tf.quantization.fake_quant_with_min_max_args_gradient',
'tf.fake_quant_with_min_max_vars': 'tf.quantization.fake_quant_with_min_max_vars',
'tf.fake_quant_with_min_max_vars_gradient': 'tf.quantization.fake_quant_with_min_max_vars_gradient',
'tf.fake_quant_with_min_max_vars_per_channel': 'tf.quantization.fake_quant_with_min_max_vars_per_channel',
'tf.fake_quant_with_min_max_vars_per_channel_gradient': 'tf.quantization.fake_quant_with_min_max_vars_per_channel_gradient',
'tf.feature_column.input_layer': 'tf.compat.v1.feature_column.input_layer',
'tf.feature_column.linear_model': 'tf.compat.v1.feature_column.linear_model',
'tf.fft': 'tf.signal.fft',
'tf.fft2d': 'tf.signal.fft2d',
'tf.fft3d': 'tf.signal.fft3d',
'tf.fixed_size_partitioner': 'tf.compat.v1.fixed_size_partitioner',
'tf.floordiv': 'tf.math.floordiv',
'tf.get_collection': 'tf.compat.v1.get_collection',
'tf.get_collection_ref': 'tf.compat.v1.get_collection_ref',
'tf.get_default_graph': 'tf.compat.v1.get_default_graph',
'tf.get_default_session': 'tf.compat.v1.get_default_session',
'tf.get_local_variable': 'tf.compat.v1.get_local_variable',
'tf.get_seed': 'tf.compat.v1.get_seed',
'tf.get_session_handle': 'tf.compat.v1.get_session_handle',
'tf.get_session_tensor': 'tf.compat.v1.get_session_tensor',
'tf.get_variable': 'tf.compat.v1.get_variable',
'tf.get_variable_scope': 'tf.compat.v1.get_variable_scope',
'tf.gfile.FastGFile': 'tf.compat.v1.gfile.FastGFile',
'tf.gfile.GFile': 'tf.io.gfile.GFile',
'tf.gfile.Open': 'tf.io.gfile.GFile',
'tf.global_norm': 'tf.linalg.global_norm',
'tf.global_variables': 'tf.compat.v1.global_variables',
'tf.global_variables_initializer': 'tf.compat.v1.global_variables_initializer',
'tf.glorot_normal_initializer': 'tf.compat.v1.glorot_normal_initializer',
'tf.glorot_uniform_initializer': 'tf.compat.v1.glorot_uniform_initializer',
'tf.graph_util.convert_variables_to_constants': 'tf.compat.v1.graph_util.convert_variables_to_constants',
'tf.graph_util.extract_sub_graph': 'tf.compat.v1.graph_util.extract_sub_graph',
'tf.graph_util.must_run_on_cpu': 'tf.compat.v1.graph_util.must_run_on_cpu',
'tf.graph_util.remove_training_nodes': 'tf.compat.v1.graph_util.remove_training_nodes',
'tf.graph_util.tensor_shape_from_node_def_name': 'tf.compat.v1.graph_util.tensor_shape_from_node_def_name',
'tf.ifft': 'tf.signal.ifft',
'tf.ifft2d': 'tf.signal.ifft2d',
'tf.ifft3d': 'tf.signal.ifft3d',
'tf.igamma': 'tf.math.igamma',
'tf.igammac': 'tf.math.igammac',
'tf.imag': 'tf.math.imag',
'tf.image.resize_area': 'tf.compat.v1.image.resize_area',
'tf.image.resize_bicubic': 'tf.compat.v1.image.resize_bicubic',
'tf.image.resize_bilinear': 'tf.compat.v1.image.resize_bilinear',
'tf.image.resize_nearest_neighbor': 'tf.compat.v1.image.resize_nearest_neighbor',
'tf.image.transpose_image': 'tf.compat.v1.image.transpose_image',
'tf.initialize_all_tables': 'tf.compat.v1.initialize_all_tables',
'tf.initialize_all_variables': 'tf.compat.v1.initialize_all_variables',
'tf.initialize_local_variables': 'tf.compat.v1.initialize_local_variables',
'tf.initialize_variables': 'tf.compat.v1.initialize_variables',
'tf.initializers.constant': 'tf.compat.v1.initializers.constant',
'tf.initializers.global_variables': 'tf.compat.v1.initializers.global_variables',
'tf.initializers.glorot_normal': 'tf.compat.v1.initializers.glorot_normal',
'tf.initializers.glorot_uniform': 'tf.compat.v1.initializers.glorot_uniform',
'tf.initializers.he_normal': 'tf.compat.v1.initializers.he_normal',
'tf.initializers.he_uniform': 'tf.compat.v1.initializers.he_uniform',
'tf.initializers.identity': 'tf.compat.v1.initializers.identity',
'tf.initializers.lecun_normal': 'tf.compat.v1.initializers.lecun_normal',
'tf.initializers.lecun_uniform': 'tf.compat.v1.initializers.lecun_uniform',
'tf.initializers.local_variables': 'tf.compat.v1.initializers.local_variables',
'tf.initializers.ones': 'tf.compat.v1.initializers.ones',
'tf.initializers.orthogonal': 'tf.compat.v1.initializers.orthogonal',
'tf.initializers.random_normal': 'tf.compat.v1.initializers.random_normal',
'tf.initializers.random_uniform': 'tf.compat.v1.initializers.random_uniform',
'tf.initializers.tables_initializer': 'tf.compat.v1.initializers.tables_initializer',
'tf.initializers.truncated_normal': 'tf.compat.v1.initializers.truncated_normal',
'tf.initializers.uniform_unit_scaling': 'tf.compat.v1.initializers.uniform_unit_scaling',
'tf.initializers.variables': 'tf.compat.v1.initializers.variables',
'tf.initializers.variance_scaling': 'tf.compat.v1.initializers.variance_scaling',
'tf.initializers.zeros': 'tf.compat.v1.initializers.zeros',
'tf.invert_permutation': 'tf.math.invert_permutation',
'tf.io.PaddingFIFOQueue': 'tf.queue.PaddingFIFOQueue',
'tf.io.PriorityQueue': 'tf.queue.PriorityQueue',
'tf.io.QueueBase': 'tf.queue.QueueBase',
'tf.io.RandomShuffleQueue': 'tf.queue.RandomShuffleQueue',
'tf.io.tf_record_iterator': 'tf.compat.v1.io.tf_record_iterator',
'tf.is_finite': 'tf.math.is_finite',
'tf.is_inf': 'tf.math.is_inf',
'tf.is_nan': 'tf.math.is_nan',
'tf.is_non_decreasing': 'tf.math.is_non_decreasing',
'tf.is_numeric_tensor': 'tf.debugging.is_numeric_tensor',
'tf.is_strictly_increasing': 'tf.math.is_strictly_increasing',
'tf.is_variable_initialized': 'tf.compat.v1.is_variable_initialized',
'tf.keras.initializers.Identity': 'tf.compat.v1.keras.initializers.Identity',
'tf.keras.initializers.Orthogonal': 'tf.compat.v1.keras.initializers.Orthogonal',
'tf.keras.initializers.TruncatedNormal': 'tf.compat.v1.keras.initializers.TruncatedNormal',
'tf.keras.initializers.VarianceScaling': 'tf.compat.v1.keras.initializers.VarianceScaling',
'tf.keras.initializers.constant': 'tf.compat.v1.keras.initializers.constant',
'tf.keras.initializers.glorot_normal': 'tf.compat.v1.keras.initializers.glorot_normal',
'tf.keras.initializers.glorot_uniform': 'tf.compat.v1.keras.initializers.glorot_uniform',
'tf.keras.initializers.he_normal': 'tf.compat.v1.keras.initializers.he_normal',
'tf.keras.initializers.he_uniform': 'tf.compat.v1.keras.initializers.he_uniform',
'tf.keras.initializers.identity': 'tf.compat.v1.keras.initializers.identity',
'tf.keras.initializers.lecun_normal': 'tf.compat.v1.keras.initializers.lecun_normal',
'tf.keras.initializers.lecun_uniform': 'tf.compat.v1.keras.initializers.lecun_uniform',
'tf.keras.initializers.normal': 'tf.compat.v1.keras.initializers.normal',
'tf.keras.initializers.ones': 'tf.compat.v1.keras.initializers.ones',
'tf.keras.initializers.orthogonal': 'tf.compat.v1.keras.initializers.orthogonal',
'tf.keras.initializers.random_normal': 'tf.compat.v1.keras.initializers.random_normal',
'tf.keras.initializers.random_uniform': 'tf.compat.v1.keras.initializers.random_uniform',
'tf.keras.initializers.truncated_normal': 'tf.compat.v1.keras.initializers.truncated_normal',
'tf.keras.initializers.uniform': 'tf.compat.v1.keras.initializers.uniform',
'tf.keras.initializers.zeros': 'tf.compat.v1.keras.initializers.zeros',
'tf.layers.AveragePooling1D': 'tf.compat.v1.layers.AveragePooling1D',
'tf.layers.AveragePooling2D': 'tf.compat.v1.layers.AveragePooling2D',
'tf.layers.AveragePooling3D': 'tf.compat.v1.layers.AveragePooling3D',
'tf.layers.BatchNormalization': 'tf.compat.v1.layers.BatchNormalization',
'tf.layers.Conv1D': 'tf.compat.v1.layers.Conv1D',
'tf.layers.Conv2D': 'tf.compat.v1.layers.Conv2D',
'tf.layers.Conv2DTranspose': 'tf.compat.v1.layers.Conv2DTranspose',
'tf.layers.Conv3D': 'tf.compat.v1.layers.Conv3D',
'tf.layers.Conv3DTranspose': 'tf.compat.v1.layers.Conv3DTranspose',
'tf.layers.Dense': 'tf.compat.v1.layers.Dense',
'tf.layers.Dropout': 'tf.compat.v1.layers.Dropout',
'tf.layers.Flatten': 'tf.compat.v1.layers.Flatten',
'tf.layers.InputSpec': 'tf.keras.layers.InputSpec',
'tf.layers.Layer': 'tf.compat.v1.layers.Layer',
'tf.layers.MaxPooling1D': 'tf.compat.v1.layers.MaxPooling1D',
'tf.layers.MaxPooling2D': 'tf.compat.v1.layers.MaxPooling2D',
'tf.layers.MaxPooling3D': 'tf.compat.v1.layers.MaxPooling3D',
'tf.layers.SeparableConv1D': 'tf.compat.v1.layers.SeparableConv1D',
'tf.layers.SeparableConv2D': 'tf.compat.v1.layers.SeparableConv2D',
'tf.layers.average_pooling1d': 'tf.compat.v1.layers.average_pooling1d',
'tf.layers.average_pooling2d': 'tf.compat.v1.layers.average_pooling2d',
'tf.layers.average_pooling3d': 'tf.compat.v1.layers.average_pooling3d',
'tf.layers.batch_normalization': 'tf.compat.v1.layers.batch_normalization',
'tf.layers.conv1d': 'tf.compat.v1.layers.conv1d',
'tf.layers.conv2d': 'tf.compat.v1.layers.conv2d',
'tf.layers.conv2d_transpose': 'tf.compat.v1.layers.conv2d_transpose',
'tf.layers.conv3d': 'tf.compat.v1.layers.conv3d',
'tf.layers.conv3d_transpose': 'tf.compat.v1.layers.conv3d_transpose',
'tf.layers.dense': 'tf.compat.v1.layers.dense',
'tf.layers.dropout': 'tf.compat.v1.layers.dropout',
'tf.layers.experimental.keras_style_scope': 'tf.compat.v1.layers.experimental.keras_style_scope',
'tf.layers.experimental.set_keras_style': 'tf.compat.v1.layers.experimental.set_keras_style',
'tf.layers.flatten': 'tf.compat.v1.layers.flatten',
'tf.layers.max_pooling1d': 'tf.compat.v1.layers.max_pooling1d',
'tf.layers.max_pooling2d': 'tf.compat.v1.layers.max_pooling2d',
'tf.layers.max_pooling3d': 'tf.compat.v1.layers.max_pooling3d',
'tf.layers.separable_conv1d': 'tf.compat.v1.layers.separable_conv1d',
'tf.layers.separable_conv2d': 'tf.compat.v1.layers.separable_conv2d',
'tf.lbeta': 'tf.math.lbeta',
'tf.lgamma': 'tf.math.lgamma',
'tf.lin_space': 'tf.linspace',
'tf.local_variables': 'tf.compat.v1.local_variables',
'tf.local_variables_initializer': 'tf.compat.v1.local_variables_initializer',
'tf.log': 'tf.math.log',
'tf.log1p': 'tf.math.log1p',
'tf.log_sigmoid': 'tf.math.log_sigmoid',
'tf.logging.DEBUG': 'tf.compat.v1.logging.DEBUG',
'tf.logging.ERROR': 'tf.compat.v1.logging.ERROR',
'tf.logging.FATAL': 'tf.compat.v1.logging.FATAL',
'tf.logging.INFO': 'tf.compat.v1.logging.INFO',
'tf.logging.TaskLevelStatusMessage': 'tf.compat.v1.logging.TaskLevelStatusMessage',
'tf.logging.WARN': 'tf.compat.v1.logging.WARN',
'tf.logging.debug': 'tf.compat.v1.logging.debug',
'tf.logging.error': 'tf.compat.v1.logging.error',
'tf.logging.fatal': 'tf.compat.v1.logging.fatal',
'tf.logging.flush': 'tf.compat.v1.logging.flush',
'tf.logging.get_verbosity': 'tf.compat.v1.logging.get_verbosity',
'tf.logging.info': 'tf.compat.v1.logging.info',
'tf.logging.log': 'tf.compat.v1.logging.log',
'tf.logging.log_every_n': 'tf.compat.v1.logging.log_every_n',
'tf.logging.log_first_n': 'tf.compat.v1.logging.log_first_n',
'tf.logging.log_if': 'tf.compat.v1.logging.log_if',
'tf.logging.set_verbosity': 'tf.compat.v1.logging.set_verbosity',
'tf.logging.vlog': 'tf.compat.v1.logging.vlog',
'tf.logging.warn': 'tf.compat.v1.logging.warn',
'tf.logging.warning': 'tf.compat.v1.logging.warning',
'tf.logical_xor': 'tf.math.logical_xor',
'tf.losses.absolute_difference': 'tf.compat.v1.losses.absolute_difference',
'tf.losses.add_loss': 'tf.compat.v1.losses.add_loss',
'tf.losses.compute_weighted_loss': 'tf.compat.v1.losses.compute_weighted_loss',
'tf.losses.cosine_distance': 'tf.compat.v1.losses.cosine_distance',
'tf.losses.get_losses': 'tf.compat.v1.losses.get_losses',
'tf.losses.get_regularization_loss': 'tf.compat.v1.losses.get_regularization_loss',
'tf.losses.get_regularization_losses': 'tf.compat.v1.losses.get_regularization_losses',
'tf.losses.get_total_loss': 'tf.compat.v1.losses.get_total_loss',
'tf.losses.hinge_loss': 'tf.compat.v1.losses.hinge_loss',
'tf.losses.huber_loss': 'tf.compat.v1.losses.huber_loss',
'tf.losses.log_loss': 'tf.compat.v1.losses.log_loss',
'tf.losses.mean_pairwise_squared_error': 'tf.compat.v1.losses.mean_pairwise_squared_error',
'tf.losses.mean_squared_error': 'tf.compat.v1.losses.mean_squared_error',
'tf.losses.sigmoid_cross_entropy': 'tf.compat.v1.losses.sigmoid_cross_entropy',
'tf.losses.softmax_cross_entropy': 'tf.compat.v1.losses.softmax_cross_entropy',
'tf.losses.sparse_softmax_cross_entropy': 'tf.compat.v1.losses.sparse_softmax_cross_entropy',
'tf.make_template': 'tf.compat.v1.make_template',
'tf.make_tensor_proto': 'tf.compat.v1.make_tensor_proto',
'tf.manip.gather_nd': 'tf.gather_nd',
'tf.manip.reshape': 'tf.reshape',
'tf.manip.reverse': 'tf.reverse',
'tf.manip.roll': 'tf.roll',
'tf.manip.scatter_nd': 'tf.scatter_nd',
'tf.manip.space_to_batch_nd': 'tf.space_to_batch_nd',
'tf.manip.tile': 'tf.tile',
'tf.matching_files': 'tf.io.matching_files',
'tf.matrix_band_part': 'tf.linalg.band_part',
'tf.matrix_determinant': 'tf.linalg.det',
'tf.matrix_diag': 'tf.linalg.diag',
'tf.matrix_diag_part': 'tf.linalg.diag_part',
'tf.matrix_inverse': 'tf.linalg.inv',
'tf.matrix_set_diag': 'tf.linalg.set_diag',
'tf.matrix_solve': 'tf.linalg.solve',
'tf.matrix_solve_ls': 'tf.linalg.lstsq',
'tf.matrix_transpose': 'tf.linalg.transpose',
'tf.matrix_triangular_solve': 'tf.linalg.triangular_solve',
'tf.metrics.accuracy': 'tf.compat.v1.metrics.accuracy',
'tf.metrics.auc': 'tf.compat.v1.metrics.auc',
'tf.metrics.average_precision_at_k': 'tf.compat.v1.metrics.average_precision_at_k',
'tf.metrics.false_negatives': 'tf.compat.v1.metrics.false_negatives',
'tf.metrics.false_negatives_at_thresholds': 'tf.compat.v1.metrics.false_negatives_at_thresholds',
'tf.metrics.false_positives': 'tf.compat.v1.metrics.false_positives',
'tf.metrics.false_positives_at_thresholds': 'tf.compat.v1.metrics.false_positives_at_thresholds',
'tf.metrics.mean': 'tf.compat.v1.metrics.mean',
'tf.metrics.mean_absolute_error': 'tf.compat.v1.metrics.mean_absolute_error',
'tf.metrics.mean_cosine_distance': 'tf.compat.v1.metrics.mean_cosine_distance',
'tf.metrics.mean_iou': 'tf.compat.v1.metrics.mean_iou',
'tf.metrics.mean_per_class_accuracy': 'tf.compat.v1.metrics.mean_per_class_accuracy',
'tf.metrics.mean_relative_error': 'tf.compat.v1.metrics.mean_relative_error',
'tf.metrics.mean_squared_error': 'tf.compat.v1.metrics.mean_squared_error',
'tf.metrics.mean_tensor': 'tf.compat.v1.metrics.mean_tensor',
'tf.metrics.percentage_below': 'tf.compat.v1.metrics.percentage_below',
'tf.metrics.precision': 'tf.compat.v1.metrics.precision',
'tf.metrics.precision_at_k': 'tf.compat.v1.metrics.precision_at_k',
'tf.metrics.precision_at_thresholds': 'tf.compat.v1.metrics.precision_at_thresholds',
'tf.metrics.precision_at_top_k': 'tf.compat.v1.metrics.precision_at_top_k',
'tf.metrics.recall': 'tf.compat.v1.metrics.recall',
'tf.metrics.recall_at_k': 'tf.compat.v1.metrics.recall_at_k',
'tf.metrics.recall_at_thresholds': 'tf.compat.v1.metrics.recall_at_thresholds',
'tf.metrics.recall_at_top_k': 'tf.compat.v1.metrics.recall_at_top_k',
'tf.metrics.root_mean_squared_error': 'tf.compat.v1.metrics.root_mean_squared_error',
'tf.metrics.sensitivity_at_specificity': 'tf.compat.v1.metrics.sensitivity_at_specificity',
'tf.metrics.sparse_average_precision_at_k': 'tf.compat.v1.metrics.sparse_average_precision_at_k',
'tf.metrics.sparse_precision_at_k': 'tf.compat.v1.metrics.sparse_precision_at_k',
'tf.metrics.specificity_at_sensitivity': 'tf.compat.v1.metrics.specificity_at_sensitivity',
'tf.metrics.true_negatives': 'tf.compat.v1.metrics.true_negatives',
'tf.metrics.true_negatives_at_thresholds': 'tf.compat.v1.metrics.true_negatives_at_thresholds',
'tf.metrics.true_positives': 'tf.compat.v1.metrics.true_positives',
'tf.metrics.true_positives_at_thresholds': 'tf.compat.v1.metrics.true_positives_at_thresholds',
'tf.min_max_variable_partitioner': 'tf.compat.v1.min_max_variable_partitioner',
'tf.model_variables': 'tf.compat.v1.model_variables',
'tf.moving_average_variables': 'tf.compat.v1.moving_average_variables',
'tf.nn.bidirectional_dynamic_rnn': 'tf.compat.v1.nn.bidirectional_dynamic_rnn',
'tf.nn.conv3d_backprop_filter_v2': 'tf.nn.conv3d_backprop_filter',
'tf.nn.ctc_beam_search_decoder_v2': 'tf.nn.ctc_beam_search_decoder',
'tf.nn.ctc_loss_v2': 'tf.nn.ctc_loss',
'tf.nn.depthwise_conv2d_native': 'tf.compat.v1.nn.depthwise_conv2d_native',
'tf.nn.depthwise_conv2d_native_backprop_filter': 'tf.nn.depthwise_conv2d_backprop_filter',
'tf.nn.depthwise_conv2d_native_backprop_input': 'tf.nn.depthwise_conv2d_backprop_input',
'tf.nn.dynamic_rnn': 'tf.compat.v1.nn.dynamic_rnn',
'tf.nn.log_uniform_candidate_sampler': 'tf.random.log_uniform_candidate_sampler',
'tf.nn.quantized_avg_pool': 'tf.compat.v1.nn.quantized_avg_pool',
'tf.nn.quantized_conv2d': 'tf.compat.v1.nn.quantized_conv2d',
'tf.nn.quantized_max_pool': 'tf.compat.v1.nn.quantized_max_pool',
'tf.nn.quantized_relu_x': 'tf.compat.v1.nn.quantized_relu_x',
'tf.nn.raw_rnn': 'tf.compat.v1.nn.raw_rnn',
'tf.nn.relu_layer': 'tf.compat.v1.nn.relu_layer',
'tf.nn.rnn_cell.BasicLSTMCell': 'tf.compat.v1.nn.rnn_cell.BasicLSTMCell',
'tf.nn.rnn_cell.BasicRNNCell': 'tf.compat.v1.nn.rnn_cell.BasicRNNCell',
'tf.nn.rnn_cell.DropoutWrapper': 'tf.compat.v1.nn.rnn_cell.DropoutWrapper',
'tf.nn.rnn_cell.GRUCell': 'tf.compat.v1.nn.rnn_cell.GRUCell',
'tf.nn.rnn_cell.LSTMCell': 'tf.compat.v1.nn.rnn_cell.LSTMCell',
'tf.nn.rnn_cell.MultiRNNCell': 'tf.compat.v1.nn.rnn_cell.MultiRNNCell',
'tf.nn.static_bidirectional_rnn': 'tf.compat.v1.nn.static_bidirectional_rnn',
'tf.nn.static_rnn': 'tf.compat.v1.nn.static_rnn',
'tf.nn.uniform_candidate_sampler': 'tf.random.uniform_candidate_sampler',
'tf.nn.xw_plus_b': 'tf.compat.v1.nn.xw_plus_b',
'tf.op_scope': 'tf.compat.v1.op_scope',
'tf.orthogonal_initializer': 'tf.compat.v1.orthogonal_initializer',
'tf.parse_single_sequence_example': 'tf.io.parse_single_sequence_example',
'tf.parse_tensor': 'tf.io.parse_tensor',
'tf.placeholder': 'tf.compat.v1.placeholder',
'tf.placeholder_with_default': 'tf.compat.v1.placeholder_with_default',
'tf.polygamma': 'tf.math.polygamma',
'tf.profiler.AdviceProto': 'tf.compat.v1.profiler.AdviceProto',
'tf.profiler.GraphNodeProto': 'tf.compat.v1.profiler.GraphNodeProto',
'tf.profiler.MultiGraphNodeProto': 'tf.compat.v1.profiler.MultiGraphNodeProto',
'tf.profiler.OpLogProto': 'tf.compat.v1.profiler.OpLogProto',
'tf.profiler.ProfileOptionBuilder': 'tf.compat.v1.profiler.ProfileOptionBuilder',
'tf.profiler.Profiler': 'tf.compat.v1.profiler.Profiler',
'tf.profiler.advise': 'tf.compat.v1.profiler.advise',
'tf.profiler.profile': 'tf.compat.v1.profiler.profile',
'tf.profiler.write_op_log': 'tf.compat.v1.profiler.write_op_log',
'tf.py_func': 'tf.compat.v1.py_func',
'tf.python_io.TFRecordCompressionType': 'tf.io.TFRecordCompressionType',
'tf.python_io.TFRecordOptions': 'tf.io.TFRecordOptions',
'tf.python_io.TFRecordWriter': 'tf.io.TFRecordWriter',
'tf.python_io.tf_record_iterator': 'tf.compat.v1.python_io.tf_record_iterator',
'tf.qr': 'tf.linalg.qr',
'tf.quantize': 'tf.quantization.quantize',
'tf.quantized_concat': 'tf.quantization.quantized_concat',
'tf.ragged.RaggedTensorValue': 'tf.compat.v1.ragged.RaggedTensorValue',
'tf.ragged.constant_value': 'tf.compat.v1.ragged.constant_value',
'tf.random.get_seed': 'tf.compat.v1.random.get_seed',
'tf.random.set_random_seed': 'tf.compat.v1.random.set_random_seed',
'tf.random_crop': 'tf.image.random_crop',
'tf.random_gamma': 'tf.random.gamma',
'tf.random_normal': 'tf.random.normal',
'tf.random_shuffle': 'tf.random.shuffle',
'tf.random_uniform': 'tf.random.uniform',
'tf.read_file': 'tf.io.read_file',
'tf.real': 'tf.math.real',
'tf.reciprocal': 'tf.math.reciprocal',
'tf.regex_replace': 'tf.strings.regex_replace',
'tf.report_uninitialized_variables': 'tf.compat.v1.report_uninitialized_variables',
'tf.reset_default_graph': 'tf.compat.v1.reset_default_graph',
'tf.resource_loader.get_data_files_path': 'tf.compat.v1.resource_loader.get_data_files_path',
'tf.resource_loader.get_path_to_datafile': 'tf.compat.v1.resource_loader.get_path_to_datafile',
'tf.resource_loader.get_root_dir_with_all_resources': 'tf.compat.v1.resource_loader.get_root_dir_with_all_resources',
'tf.resource_loader.load_resource': 'tf.compat.v1.resource_loader.load_resource',
'tf.resource_loader.readahead_file_path': 'tf.compat.v1.resource_loader.readahead_file_path',
'tf.reverse_v2': 'tf.reverse',
'tf.rint': 'tf.math.rint',
'tf.rsqrt': 'tf.math.rsqrt',
'tf.saved_model.Builder': 'tf.compat.v1.saved_model.Builder',
'tf.saved_model.LEGACY_INIT_OP_KEY': 'tf.compat.v1.saved_model.LEGACY_INIT_OP_KEY',
'tf.saved_model.MAIN_OP_KEY': 'tf.compat.v1.saved_model.MAIN_OP_KEY',
'tf.saved_model.build_tensor_info': 'tf.compat.v1.saved_model.build_tensor_info',
'tf.saved_model.builder.SavedModelBuilder': 'tf.compat.v1.saved_model.builder.SavedModelBuilder',
'tf.saved_model.constants.ASSETS_DIRECTORY': 'tf.saved_model.ASSETS_DIRECTORY',
'tf.saved_model.constants.ASSETS_KEY': 'tf.saved_model.ASSETS_KEY',
'tf.saved_model.constants.LEGACY_INIT_OP_KEY': 'tf.compat.v1.saved_model.constants.LEGACY_INIT_OP_KEY',
'tf.saved_model.constants.MAIN_OP_KEY': 'tf.compat.v1.saved_model.constants.MAIN_OP_KEY',
'tf.saved_model.constants.SAVED_MODEL_FILENAME_PB': 'tf.saved_model.SAVED_MODEL_FILENAME_PB',
'tf.saved_model.constants.SAVED_MODEL_FILENAME_PBTXT': 'tf.saved_model.SAVED_MODEL_FILENAME_PBTXT',
'tf.saved_model.constants.SAVED_MODEL_SCHEMA_VERSION': 'tf.saved_model.SAVED_MODEL_SCHEMA_VERSION',
'tf.saved_model.constants.VARIABLES_DIRECTORY': 'tf.saved_model.VARIABLES_DIRECTORY',
'tf.saved_model.constants.VARIABLES_FILENAME': 'tf.saved_model.VARIABLES_FILENAME',
'tf.saved_model.experimental.save': 'tf.saved_model.save',
'tf.saved_model.get_tensor_from_tensor_info': 'tf.compat.v1.saved_model.get_tensor_from_tensor_info',
'tf.saved_model.load': 'tf.compat.v1.saved_model.load',
'tf.saved_model.loader.load': 'tf.compat.v1.saved_model.loader.load',
'tf.saved_model.loader.maybe_saved_model_directory': 'tf.compat.v1.saved_model.loader.maybe_saved_model_directory',
'tf.saved_model.main_op.main_op': 'tf.compat.v1.saved_model.main_op.main_op',
'tf.saved_model.main_op.main_op_with_restore': 'tf.compat.v1.saved_model.main_op.main_op_with_restore',
'tf.saved_model.main_op_with_restore': 'tf.compat.v1.saved_model.main_op_with_restore',
'tf.saved_model.maybe_saved_model_directory': 'tf.compat.v1.saved_model.maybe_saved_model_directory',
'tf.saved_model.signature_constants.CLASSIFY_INPUTS': 'tf.saved_model.CLASSIFY_INPUTS',
'tf.saved_model.signature_constants.CLASSIFY_METHOD_NAME': 'tf.saved_model.CLASSIFY_METHOD_NAME',
'tf.saved_model.signature_constants.CLASSIFY_OUTPUT_CLASSES': 'tf.saved_model.CLASSIFY_OUTPUT_CLASSES',
'tf.saved_model.signature_constants.CLASSIFY_OUTPUT_SCORES': 'tf.saved_model.CLASSIFY_OUTPUT_SCORES',
'tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY': 'tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY',
'tf.saved_model.signature_constants.PREDICT_INPUTS': 'tf.saved_model.PREDICT_INPUTS',
'tf.saved_model.signature_constants.PREDICT_METHOD_NAME': 'tf.saved_model.PREDICT_METHOD_NAME',
'tf.saved_model.signature_constants.PREDICT_OUTPUTS': 'tf.saved_model.PREDICT_OUTPUTS',
'tf.saved_model.signature_constants.REGRESS_INPUTS': 'tf.saved_model.REGRESS_INPUTS',
'tf.saved_model.signature_constants.REGRESS_METHOD_NAME': 'tf.saved_model.REGRESS_METHOD_NAME',
'tf.saved_model.signature_constants.REGRESS_OUTPUTS': 'tf.saved_model.REGRESS_OUTPUTS',
'tf.saved_model.signature_def_utils.build_signature_def': 'tf.saved_model.build_signature_def',
'tf.saved_model.signature_def_utils.classification_signature_def': 'tf.saved_model.classification_signature_def',
'tf.saved_model.signature_def_utils.is_valid_signature': 'tf.saved_model.is_valid_signature',
'tf.saved_model.signature_def_utils.predict_signature_def': 'tf.saved_model.predict_signature_def',
'tf.saved_model.signature_def_utils.regression_signature_def': 'tf.saved_model.regression_signature_def',
'tf.saved_model.simple_save': 'tf.compat.v1.saved_model.simple_save',
'tf.saved_model.tag_constants.GPU': 'tf.saved_model.GPU',
'tf.saved_model.tag_constants.SERVING': 'tf.saved_model.SERVING',
'tf.saved_model.tag_constants.TPU': 'tf.saved_model.TPU',
'tf.saved_model.tag_constants.TRAINING': 'tf.saved_model.TRAINING',
'tf.saved_model.utils.build_tensor_info': 'tf.compat.v1.saved_model.utils.build_tensor_info',
'tf.saved_model.utils.get_tensor_from_tensor_info': 'tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info',
'tf.scatter_add': 'tf.compat.v1.scatter_add',
'tf.scatter_nd_add': 'tf.compat.v1.scatter_nd_add',
'tf.scatter_nd_sub': 'tf.compat.v1.scatter_nd_sub',
'tf.scatter_nd_update': 'tf.compat.v1.scatter_nd_update',
'tf.scatter_sub': 'tf.compat.v1.scatter_sub',
'tf.scatter_update': 'tf.compat.v1.scatter_update',
'tf.segment_max': 'tf.math.segment_max',
'tf.segment_mean': 'tf.math.segment_mean',
'tf.segment_min': 'tf.math.segment_min',
'tf.segment_prod': 'tf.math.segment_prod',
'tf.segment_sum': 'tf.math.segment_sum',
'tf.self_adjoint_eig': 'tf.linalg.eigh',
'tf.self_adjoint_eigvals': 'tf.linalg.eigvalsh',
'tf.serialize_many_sparse': 'tf.compat.v1.serialize_many_sparse',
'tf.serialize_sparse': 'tf.compat.v1.serialize_sparse',
'tf.serialize_tensor': 'tf.io.serialize_tensor',
'tf.set_random_seed': 'tf.compat.v1.set_random_seed',
'tf.setdiff1d': 'tf.compat.v1.setdiff1d',
'tf.sets.set_difference': 'tf.sets.difference',
'tf.sets.set_intersection': 'tf.sets.intersection',
'tf.sets.set_size': 'tf.sets.size',
'tf.sets.set_union': 'tf.sets.union',
'tf.space_to_depth': 'tf.compat.v1.space_to_depth',
'tf.sparse.matmul': 'tf.sparse.sparse_dense_matmul',
'tf.sparse.merge': 'tf.compat.v1.sparse.merge',
'tf.sparse.placeholder': 'tf.compat.v1.sparse.placeholder',
'tf.sparse.reduce_max_sparse': 'tf.compat.v1.sparse.reduce_max_sparse',
'tf.sparse.reduce_sum_sparse': 'tf.compat.v1.sparse.reduce_sum_sparse',
'tf.sparse_fill_empty_rows': 'tf.sparse.fill_empty_rows',
'tf.sparse_mask': 'tf.sparse.mask',
'tf.sparse_maximum': 'tf.sparse.maximum',
'tf.sparse_merge': 'tf.compat.v1.sparse_merge',
'tf.sparse_minimum': 'tf.sparse.minimum',
'tf.sparse_placeholder': 'tf.compat.v1.sparse_placeholder',
'tf.sparse_reduce_max_sparse': 'tf.compat.v1.sparse_reduce_max_sparse',
'tf.sparse_reduce_sum_sparse': 'tf.compat.v1.sparse_reduce_sum_sparse',
'tf.sparse_reorder': 'tf.sparse.reorder',
'tf.sparse_reset_shape': 'tf.sparse.reset_shape',
'tf.sparse_reshape': 'tf.sparse.reshape',
'tf.sparse_retain': 'tf.sparse.retain',
'tf.sparse_segment_mean': 'tf.compat.v1.sparse_segment_mean',
'tf.sparse_segment_sqrt_n': 'tf.compat.v1.sparse_segment_sqrt_n',
'tf.sparse_segment_sum': 'tf.compat.v1.sparse_segment_sum',
'tf.sparse_slice': 'tf.sparse.slice',
'tf.sparse_softmax': 'tf.sparse.softmax',
'tf.sparse_tensor_dense_matmul': 'tf.sparse.sparse_dense_matmul',
'tf.sparse_tensor_to_dense': 'tf.sparse.to_dense',
'tf.sparse_to_dense': 'tf.compat.v1.sparse_to_dense',
'tf.sparse_to_indicator': 'tf.sparse.to_indicator',
'tf.sparse_transpose': 'tf.sparse.transpose',
'tf.spectral.dct': 'tf.signal.dct',
'tf.spectral.fft': 'tf.signal.fft',
'tf.spectral.fft2d': 'tf.signal.fft2d',
'tf.spectral.fft3d': 'tf.signal.fft3d',
'tf.spectral.idct': 'tf.signal.idct',
'tf.spectral.ifft': 'tf.signal.ifft',
'tf.spectral.ifft2d': 'tf.signal.ifft2d',
'tf.spectral.ifft3d': 'tf.signal.ifft3d',
'tf.spectral.irfft': 'tf.signal.irfft',
'tf.spectral.irfft2d': 'tf.signal.irfft2d',
'tf.spectral.irfft3d': 'tf.signal.irfft3d',
'tf.spectral.rfft': 'tf.signal.rfft',
'tf.spectral.rfft2d': 'tf.signal.rfft2d',
'tf.spectral.rfft3d': 'tf.signal.rfft3d',
'tf.squared_difference': 'tf.math.squared_difference',
'tf.string_join': 'tf.strings.join',
'tf.string_strip': 'tf.strings.strip',
'tf.string_to_hash_bucket_fast': 'tf.strings.to_hash_bucket_fast',
'tf.string_to_hash_bucket_strong': 'tf.strings.to_hash_bucket_strong',
'tf.summary.Event': 'tf.compat.v1.summary.Event',
'tf.summary.FileWriter': 'tf.compat.v1.summary.FileWriter',
'tf.summary.FileWriterCache': 'tf.compat.v1.summary.FileWriterCache',
'tf.summary.SessionLog': 'tf.compat.v1.summary.SessionLog',
'tf.summary.Summary': 'tf.compat.v1.summary.Summary',
'tf.summary.SummaryDescription': 'tf.compat.v1.summary.SummaryDescription',
'tf.summary.TaggedRunMetadata': 'tf.compat.v1.summary.TaggedRunMetadata',
'tf.summary.audio': 'tf.compat.v1.summary.audio',
'tf.summary.get_summary_description': 'tf.compat.v1.summary.get_summary_description',
'tf.summary.histogram': 'tf.compat.v1.summary.histogram',
'tf.summary.image': 'tf.compat.v1.summary.image',
'tf.summary.merge': 'tf.compat.v1.summary.merge',
'tf.summary.merge_all': 'tf.compat.v1.summary.merge_all',
'tf.summary.scalar': 'tf.compat.v1.summary.scalar',
'tf.summary.tensor_summary': 'tf.compat.v1.summary.tensor_summary',
'tf.summary.text': 'tf.compat.v1.summary.text',
'tf.svd': 'tf.linalg.svd',
'tf.tables_initializer': 'tf.compat.v1.tables_initializer',
'tf.test.StubOutForTesting': 'tf.compat.v1.test.StubOutForTesting',
'tf.test.compute_gradient': 'tf.compat.v1.test.compute_gradient',
'tf.test.compute_gradient_error': 'tf.compat.v1.test.compute_gradient_error',
'tf.test.get_temp_dir': 'tf.compat.v1.test.get_temp_dir',
'tf.test.mock': 'tf.compat.v1.test.mock',
'tf.test.test_src_dir_path': 'tf.compat.v1.test.test_src_dir_path',
'tf.to_bfloat16': 'tf.compat.v1.to_bfloat16',
'tf.to_complex128': 'tf.compat.v1.to_complex128',
'tf.to_complex64': 'tf.compat.v1.to_complex64',
'tf.to_double': 'tf.compat.v1.to_double',
'tf.to_float': 'tf.compat.v1.to_float',
'tf.to_int32': 'tf.compat.v1.to_int32',
'tf.to_int64': 'tf.compat.v1.to_int64',
'tf.trace': 'tf.linalg.trace',
'tf.train.AdadeltaOptimizer': 'tf.compat.v1.train.AdadeltaOptimizer',
'tf.train.AdagradDAOptimizer': 'tf.compat.v1.train.AdagradDAOptimizer',
'tf.train.AdagradOptimizer': 'tf.compat.v1.train.AdagradOptimizer',
'tf.train.AdamOptimizer': 'tf.compat.v1.train.AdamOptimizer',
'tf.train.CheckpointSaverHook': 'tf.estimator.CheckpointSaverHook',
'tf.train.CheckpointSaverListener': 'tf.estimator.CheckpointSaverListener',
'tf.train.ChiefSessionCreator': 'tf.compat.v1.train.ChiefSessionCreator',
'tf.train.FeedFnHook': 'tf.estimator.FeedFnHook',
'tf.train.FinalOpsHook': 'tf.estimator.FinalOpsHook',
'tf.train.FtrlOptimizer': 'tf.compat.v1.train.FtrlOptimizer',
'tf.train.GlobalStepWaiterHook': 'tf.estimator.GlobalStepWaiterHook',
'tf.train.GradientDescentOptimizer': 'tf.compat.v1.train.GradientDescentOptimizer',
'tf.train.LoggingTensorHook': 'tf.estimator.LoggingTensorHook',
'tf.train.LooperThread': 'tf.compat.v1.train.LooperThread',
'tf.train.MomentumOptimizer': 'tf.compat.v1.train.MomentumOptimizer',
'tf.train.MonitoredSession': 'tf.compat.v1.train.MonitoredSession',
'tf.train.MonitoredTrainingSession': 'tf.compat.v1.train.MonitoredTrainingSession',
'tf.train.NanLossDuringTrainingError': 'tf.estimator.NanLossDuringTrainingError',
'tf.train.NanTensorHook': 'tf.estimator.NanTensorHook',
'tf.train.NewCheckpointReader': 'tf.compat.v1.train.NewCheckpointReader',
'tf.train.Optimizer': 'tf.compat.v1.train.Optimizer',
'tf.train.ProfilerHook': 'tf.estimator.ProfilerHook',
'tf.train.ProximalAdagradOptimizer': 'tf.compat.v1.train.ProximalAdagradOptimizer',
'tf.train.ProximalGradientDescentOptimizer': 'tf.compat.v1.train.ProximalGradientDescentOptimizer',
'tf.train.QueueRunner': 'tf.compat.v1.train.QueueRunner',
'tf.train.RMSPropOptimizer': 'tf.compat.v1.train.RMSPropOptimizer',
'tf.train.Saver': 'tf.compat.v1.train.Saver',
'tf.train.SaverDef': 'tf.compat.v1.train.SaverDef',
'tf.train.Scaffold': 'tf.compat.v1.train.Scaffold',
'tf.train.SecondOrStepTimer': 'tf.estimator.SecondOrStepTimer',
'tf.train.Server': 'tf.distribute.Server',
'tf.train.SessionCreator': 'tf.compat.v1.train.SessionCreator',
'tf.train.SessionManager': 'tf.compat.v1.train.SessionManager',
'tf.train.SessionRunArgs': 'tf.estimator.SessionRunArgs',
'tf.train.SessionRunContext': 'tf.estimator.SessionRunContext',
'tf.train.SessionRunHook': 'tf.estimator.SessionRunHook',
'tf.train.SessionRunValues': 'tf.estimator.SessionRunValues',
'tf.train.SingularMonitoredSession': 'tf.compat.v1.train.SingularMonitoredSession',
'tf.train.StepCounterHook': 'tf.estimator.StepCounterHook',
'tf.train.StopAtStepHook': 'tf.estimator.StopAtStepHook',
'tf.train.SummarySaverHook': 'tf.estimator.SummarySaverHook',
'tf.train.Supervisor': 'tf.compat.v1.train.Supervisor',
'tf.train.SyncReplicasOptimizer': 'tf.compat.v1.train.SyncReplicasOptimizer',
'tf.train.VocabInfo': 'tf.estimator.VocabInfo',
'tf.train.WorkerSessionCreator': 'tf.compat.v1.train.WorkerSessionCreator',
'tf.train.add_queue_runner': 'tf.compat.v1.train.add_queue_runner',
'tf.train.assert_global_step': 'tf.compat.v1.train.assert_global_step',
'tf.train.basic_train_loop': 'tf.compat.v1.train.basic_train_loop',
'tf.train.batch': 'tf.compat.v1.train.batch',
'tf.train.batch_join': 'tf.compat.v1.train.batch_join',
'tf.train.checkpoint_exists': 'tf.compat.v1.train.checkpoint_exists',
'tf.train.create_global_step': 'tf.compat.v1.train.create_global_step',
'tf.train.do_quantize_training_on_graphdef': 'tf.compat.v1.train.do_quantize_training_on_graphdef',
'tf.train.export_meta_graph': 'tf.compat.v1.train.export_meta_graph',
'tf.train.generate_checkpoint_state_proto': 'tf.compat.v1.train.generate_checkpoint_state_proto',
'tf.train.get_checkpoint_mtimes': 'tf.compat.v1.train.get_checkpoint_mtimes',
'tf.train.get_global_step': 'tf.compat.v1.train.get_global_step',
'tf.train.get_or_create_global_step': 'tf.compat.v1.train.get_or_create_global_step',
'tf.train.global_step': 'tf.compat.v1.train.global_step',
'tf.train.import_meta_graph': 'tf.compat.v1.train.import_meta_graph',
'tf.train.init_from_checkpoint': 'tf.compat.v1.train.init_from_checkpoint',
'tf.train.input_producer': 'tf.compat.v1.train.input_producer',
'tf.train.limit_epochs': 'tf.compat.v1.train.limit_epochs',
'tf.train.match_filenames_once': 'tf.io.match_filenames_once',
'tf.train.maybe_batch': 'tf.compat.v1.train.maybe_batch',
'tf.train.maybe_batch_join': 'tf.compat.v1.train.maybe_batch_join',
'tf.train.maybe_shuffle_batch': 'tf.compat.v1.train.maybe_shuffle_batch',
'tf.train.maybe_shuffle_batch_join': 'tf.compat.v1.train.maybe_shuffle_batch_join',
'tf.train.piecewise_constant': 'tf.compat.v1.train.piecewise_constant',
'tf.train.queue_runner.QueueRunner': 'tf.compat.v1.train.queue_runner.QueueRunner',
'tf.train.queue_runner.add_queue_runner': 'tf.compat.v1.train.queue_runner.add_queue_runner',
'tf.train.queue_runner.start_queue_runners': 'tf.compat.v1.train.queue_runner.start_queue_runners',
'tf.train.range_input_producer': 'tf.compat.v1.train.range_input_producer',
'tf.train.remove_checkpoint': 'tf.compat.v1.train.remove_checkpoint',
'tf.train.replica_device_setter': 'tf.compat.v1.train.replica_device_setter',
'tf.train.shuffle_batch': 'tf.compat.v1.train.shuffle_batch',
'tf.train.shuffle_batch_join': 'tf.compat.v1.train.shuffle_batch_join',
'tf.train.slice_input_producer': 'tf.compat.v1.train.slice_input_producer',
'tf.train.start_queue_runners': 'tf.compat.v1.train.start_queue_runners',
'tf.train.string_input_producer': 'tf.compat.v1.train.string_input_producer',
'tf.train.summary_iterator': 'tf.compat.v1.train.summary_iterator',
'tf.train.update_checkpoint_state': 'tf.compat.v1.train.update_checkpoint_state',
'tf.train.warm_start': 'tf.compat.v1.train.warm_start',
'tf.train.write_graph': 'tf.io.write_graph',
'tf.trainable_variables': 'tf.compat.v1.trainable_variables',
'tf.truncated_normal': 'tf.random.truncated_normal',
'tf.uniform_unit_scaling_initializer': 'tf.compat.v1.uniform_unit_scaling_initializer',
'tf.unsorted_segment_max': 'tf.math.unsorted_segment_max',
'tf.unsorted_segment_mean': 'tf.math.unsorted_segment_mean',
'tf.unsorted_segment_min': 'tf.math.unsorted_segment_min',
'tf.unsorted_segment_prod': 'tf.math.unsorted_segment_prod',
'tf.unsorted_segment_sqrt_n': 'tf.math.unsorted_segment_sqrt_n',
'tf.unsorted_segment_sum': 'tf.math.unsorted_segment_sum',
'tf.variable_axis_size_partitioner': 'tf.compat.v1.variable_axis_size_partitioner',
'tf.variable_op_scope': 'tf.compat.v1.variable_op_scope',
'tf.variable_scope': 'tf.compat.v1.variable_scope',
'tf.variables_initializer': 'tf.compat.v1.variables_initializer',
'tf.variance_scaling_initializer': 'tf.compat.v1.variance_scaling_initializer',
'tf.verify_tensor_all_finite': 'tf.compat.v1.verify_tensor_all_finite',
'tf.wrap_function': 'tf.compat.v1.wrap_function',
'tf.write_file': 'tf.io.write_file',
'tf.zeta': 'tf.math.zeta'
}
| [] |
Canpio/models | deep_speech_2/decoder.py | 72874de98fba93592edee42b776e3d876b1d5504 | """
CTC-like decoder utilitis.
"""
from itertools import groupby
import numpy as np
def ctc_best_path_decode(probs_seq, vocabulary):
"""
Best path decoding, also called argmax decoding or greedy decoding.
Path consisting of the most probable tokens are further post-processed to
remove consecutive repetitions and all blanks.
:param probs_seq: 2-D list of probabilities over the vocabulary for each
character. Each element is a list of float probabilities
for one character.
:type probs_seq: list
:param vocabulary: Vocabulary list.
:type vocabulary: list
:return: Decoding result string.
:rtype: baseline
"""
# dimension verification
for probs in probs_seq:
if not len(probs) == len(vocabulary) + 1:
raise ValueError("probs_seq dimension mismatchedd with vocabulary")
# argmax to get the best index for each time step
max_index_list = list(np.array(probs_seq).argmax(axis=1))
# remove consecutive duplicate indexes
index_list = [index_group[0] for index_group in groupby(max_index_list)]
# remove blank indexes
blank_index = len(vocabulary)
index_list = [index for index in index_list if index != blank_index]
# convert index list to string
return ''.join([vocabulary[index] for index in index_list])
def ctc_decode(probs_seq, vocabulary, method):
"""
CTC-like sequence decoding from a sequence of likelihood probablilites.
:param probs_seq: 2-D list of probabilities over the vocabulary for each
character. Each element is a list of float probabilities
for one character.
:type probs_seq: list
:param vocabulary: Vocabulary list.
:type vocabulary: list
:param method: Decoding method name, with options: "best_path".
:type method: basestring
:return: Decoding result string.
:rtype: baseline
"""
for prob_list in probs_seq:
if not len(prob_list) == len(vocabulary) + 1:
raise ValueError("probs dimension mismatchedd with vocabulary")
if method == "best_path":
return ctc_best_path_decode(probs_seq, vocabulary)
else:
raise ValueError("Decoding method [%s] is not supported.")
| [((1104, 1127), 'itertools.groupby', 'groupby', (['max_index_list'], {}), '(max_index_list)\n', (1111, 1127), False, 'from itertools import groupby\n'), ((973, 992), 'numpy.array', 'np.array', (['probs_seq'], {}), '(probs_seq)\n', (981, 992), True, 'import numpy as np\n')] |
Humbedooh/infrastructure-puppet | modules/gitbox/files/asfgit/hooks/sync.py | a85f797d847b80e877cd5b7c66513970f6f80703 | #!/usr/local/bin/python
import json
import socket
import sys
import asfgit.cfg as cfg
import asfgit.git as git
import asfgit.log as log
import asfgit.util as util
import subprocess, os, time
def main():
ghurl = "git@github:apache/%s.git" % cfg.repo_name
os.chdir("/x1/repos/asf/%s.git" % cfg.repo_name)
try:
for ref in git.stream_refs(sys.stdin):
if ref.is_rewrite():
print("Syncing %s (FORCED)..." % ref.name)
subprocess.check_call(["git", "push", "-f", ghurl, "%s:%s" % (ref.newsha, ref.name)])
else:
print("Syncing %s..." % ref.name)
subprocess.check_call(["git", "push", ghurl, "%s:%s" % (ref.newsha, ref.name)])
except subprocess.CalledProcessError as err:
util.abort("Could not sync with GitHub: %s" % err.output)
| [((265, 313), 'os.chdir', 'os.chdir', (["('/x1/repos/asf/%s.git' % cfg.repo_name)"], {}), "('/x1/repos/asf/%s.git' % cfg.repo_name)\n", (273, 313), False, 'import subprocess, os, time\n'), ((341, 367), 'asfgit.git.stream_refs', 'git.stream_refs', (['sys.stdin'], {}), '(sys.stdin)\n', (356, 367), True, 'import asfgit.git as git\n'), ((768, 825), 'asfgit.util.abort', 'util.abort', (["('Could not sync with GitHub: %s' % err.output)"], {}), "('Could not sync with GitHub: %s' % err.output)\n", (778, 825), True, 'import asfgit.util as util\n'), ((469, 558), 'subprocess.check_call', 'subprocess.check_call', (["['git', 'push', '-f', ghurl, '%s:%s' % (ref.newsha, ref.name)]"], {}), "(['git', 'push', '-f', ghurl, '%s:%s' % (ref.newsha,\n ref.name)])\n", (490, 558), False, 'import subprocess, os, time\n'), ((631, 710), 'subprocess.check_call', 'subprocess.check_call', (["['git', 'push', ghurl, '%s:%s' % (ref.newsha, ref.name)]"], {}), "(['git', 'push', ghurl, '%s:%s' % (ref.newsha, ref.name)])\n", (652, 710), False, 'import subprocess, os, time\n')] |
asmodehn/rosimport | rosimport/_rosdef_loader.py | c63e4769650b1cf19f23fbaa65a356ffae20a536 | from __future__ import absolute_import, division, print_function
import contextlib
import importlib
import site
import tempfile
import shutil
from rosimport import genrosmsg_py, genrossrv_py
"""
A module to setup custom importer for .msg and .srv files
Upon import, it will first find the .msg file, then generate the python module for it, then load it.
TODO...
"""
# We need to be extra careful with python versions
# Ref : https://docs.python.org/dev/library/importlib.html#importlib.import_module
# Ref : http://stackoverflow.com/questions/67631/how-to-import-a-module-given-the-full-path
# Note : Couldn't find a way to make imp.load_source deal with packages or relative imports (necessary for our generated message classes)
import os
import sys
import logging
# Class to allow dynamic search of packages
class RosSearchPath(dict):
"""
Class to allow dynamic search of packages.
This is where we hook up into python import mechanism in order to generate and discover
packages and messages we are depending on.
But it should not be used during the generation of multiple messages in only one package,
as this is too tricky to get right, and too easy to break by mistake.
"""
def __init__(self, **ros_package_paths):
# we use the default ROS_PACKAGE_PATH if already setup in environment.
# This allows us to find message definitions in a ROS distro (and collaborate with pyros_setup)
package_paths = {}
for distropath in [d for d in os.environ.get('ROS_PACKAGE_PATH', '').split(':') if os.path.exists(d)]:
for p in [pkgd for pkgd in os.listdir(distropath) if os.path.exists(os.path.join(distropath, pkgd, 'msg'))]:
package_paths[p] = package_paths.get(p, set()) | {os.path.join(distropath, p, 'msg')}
# we add any extra path
package_paths.update(ros_package_paths)
super(RosSearchPath, self).__init__(package_paths)
def try_import(self, item):
try:
# we need to import the .msg submodule (only one usable as dependency)
mod = importlib.import_module(item + '.msg')
# import succeeded : we should get the namespace path
# and add it to the list of paths to avoid going through this all over again...
for p in mod.__path__:
# Note we want dependencies here. dependencies are ALWAYS '.msg' files in 'msg' directory.
msg_path = os.path.join(p)
# We add a path only if we can find the 'msg' directory
self[item] = self.get(item, set() | ({msg_path} if os.path.exists(msg_path) else set()))
return mod
except ImportError:
# import failed
return None
def __contains__(self, item):
""" True if D has a key k, else False. """
has = super(RosSearchPath, self).__contains__(item)
if not has: # attempt importing. solving ROS path setup problem with python import paths setup.
self.try_import(item)
# Note : if ROS is setup, rospkg.RosPack can find packages
# try again (might work now)
return super(RosSearchPath, self).__contains__(item)
def __getitem__(self, item):
""" x.__getitem__(y) <==> x[y] """
got = super(RosSearchPath, self).get(item)
if got is None:
# attempt discovery by relying on python core import feature.
self.try_import(item)
# Note : if ROS is setup, rospkg.RosPack can find packages
return super(RosSearchPath, self).get(item)
# singleton instance, to keep used ros package paths in cache
ros_import_search_path = RosSearchPath()
def RosLoader(rosdef_extension):
"""
Function generating ROS loaders.
This is used to keep .msg and .srv loaders very similar
"""
if rosdef_extension == '.msg':
loader_origin_subdir = 'msg'
loader_file_extension = rosdef_extension
loader_generated_subdir = 'msg'
loader_generator = genrosmsg_py
elif rosdef_extension == '.srv':
loader_origin_subdir = 'srv'
loader_file_extension = rosdef_extension
loader_generated_subdir = 'srv'
loader_generator = genrossrv_py
else:
raise RuntimeError("RosLoader for a format {0} other than .msg or .srv is not supported".format(rosdef_extension))
import filefinder2.machinery
class ROSDefLoader(filefinder2.machinery.SourceFileLoader):
"""
Python Loader for Rosdef files.
Note : We support ROS layout :
- msg/myMsg.msg
- srv/mySrv.srv
- my_pkg/__init__.py # doesnt really matters ( we rely on PEP 420 )
OR inside the python code:
- my_pkg/__init__.py # doesnt really matters ( we rely on PEP 420 )
- my_pkg/msg/myMsg.msg
- my_pkg/srv/mySrv.srv
BUT the following is also importable relatively,
which is especially useful for tests or intra-package ROS communication,
although it cannot be used as another package dependency (due to ROS limitations)
- my_pkg/__init__.py # doesnt really matters ( we rely on PEP 420 )
- my_pkg/subpkg/__init__.py # doesnt really matters ( we rely on PEP 420 )
- my_pkg/subpkg/msg/myMsg.msg
- my_pkg/subpkg/srv/mySrv.srv
In that case myMsg.py will also be generated under mypkg.msg,
but can be imported relatively from my_pkg/subpkg/module.py with "from .msg import mypkg"
"""
rosimport_tempdir = os.path.join(tempfile.gettempdir(), 'rosimport')
def __init__(self, fullname, path):
self.logger = logging.getLogger(__name__)
# to normalize input
path = os.path.normpath(path)
# Doing this in each loader, in case we are running from different processes,
# avoiding to reload from same file (especially useful for boxed tests).
# But deterministic path to avoid regenerating from the same interpreter
rosimport_path = os.path.join(self.rosimport_tempdir, str(os.getpid()))
if not os.path.exists(rosimport_path):
os.makedirs(rosimport_path)
rospackage = fullname.partition('.')[0]
if os.path.isdir(path):
# if we get a package name ending with msg or srv and a non empty directory
if (
fullname.endswith(loader_origin_subdir) and
any([f.endswith(loader_file_extension) for f in os.listdir(path)])
):
# TODO : dynamic in memory generation (we do not need the file ultimately...)
outdir, gen_rosdef_pkgpath = loader_generator(
# generate message's python code at once, for this package level.
rosdef_files=[os.path.join(path, f) for f in os.listdir(path)],
package=fullname,
sitedir=rosimport_path,
search_path=ros_import_search_path,
)
# TODO : handle thrown exception (cleaner than hacking the search path dict...)
# try:
# generator.generate_messages(package, rosfiles, outdir, search_path)
# except genmsg.MsgNotFound as mnf:
# try:
# mod = importlib.import_module(mnf.package)
# # import succeeded : we should get the namespace path that has '/msg'
# # and add it to the list of paths to avoid going through this all over again...
# for p in mod.__path__:
# # Note we want dependencies here. dependencies are ALWAYS '.msg' files in 'msg' directory.
# msg_path = os.path.join(p, genmsg_MSG_DIR)
# # We add a path only if we can find the 'msg' directory
# search_path[mnf.package] = search_path[mnf.package] + ([msg_path] if os.path.exists(msg_path) else [])
# # Try generation again
# generator.generate_messages(package, rosfiles, outdir, search_path)
# except ImportError:
# # import failed
# return None
if not os.path.exists(gen_rosdef_pkgpath):
raise ImportError("{0} file not found".format(gen_rosdef_pkgpath))
# relying on usual source file loader since we have generated normal python code
super(ROSDefLoader, self).__init__(fullname, gen_rosdef_pkgpath)
def get_gen_path(self):
"""Returning the generated path matching the import"""
return self.path # TODO : maybe useless ?
# return os.path.join(self.outdir_pkg, loader_generated_subdir)
def __repr__(self):
return "ROSDefLoader/{0}({1}, {2})".format(loader_file_extension, self.name, self.path)
@staticmethod
def get_file_extension():
return loader_file_extension
@staticmethod
def get_origin_subdir():
return loader_origin_subdir
@staticmethod
def get_generated_subdir():
return loader_generated_subdir
return ROSDefLoader
ROSMsgLoader = RosLoader(rosdef_extension='.msg')
ROSSrvLoader = RosLoader(rosdef_extension='.srv')
| [((2094, 2132), 'importlib.import_module', 'importlib.import_module', (["(item + '.msg')"], {}), "(item + '.msg')\n", (2117, 2132), False, 'import importlib\n'), ((5560, 5581), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (5579, 5581), False, 'import tempfile\n'), ((5668, 5695), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (5685, 5695), False, 'import logging\n'), ((5748, 5770), 'os.path.normpath', 'os.path.normpath', (['path'], {}), '(path)\n', (5764, 5770), False, 'import os\n'), ((6280, 6299), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (6293, 6299), False, 'import os\n'), ((1564, 1581), 'os.path.exists', 'os.path.exists', (['d'], {}), '(d)\n', (1578, 1581), False, 'import os\n'), ((2460, 2475), 'os.path.join', 'os.path.join', (['p'], {}), '(p)\n', (2472, 2475), False, 'import os\n'), ((6135, 6165), 'os.path.exists', 'os.path.exists', (['rosimport_path'], {}), '(rosimport_path)\n', (6149, 6165), False, 'import os\n'), ((6183, 6210), 'os.makedirs', 'os.makedirs', (['rosimport_path'], {}), '(rosimport_path)\n', (6194, 6210), False, 'import os\n'), ((1623, 1645), 'os.listdir', 'os.listdir', (['distropath'], {}), '(distropath)\n', (1633, 1645), False, 'import os\n'), ((6102, 6113), 'os.getpid', 'os.getpid', ([], {}), '()\n', (6111, 6113), False, 'import os\n'), ((1511, 1549), 'os.environ.get', 'os.environ.get', (['"""ROS_PACKAGE_PATH"""', '""""""'], {}), "('ROS_PACKAGE_PATH', '')\n", (1525, 1549), False, 'import os\n'), ((1664, 1701), 'os.path.join', 'os.path.join', (['distropath', 'pkgd', '"""msg"""'], {}), "(distropath, pkgd, 'msg')\n", (1676, 1701), False, 'import os\n'), ((1771, 1805), 'os.path.join', 'os.path.join', (['distropath', 'p', '"""msg"""'], {}), "(distropath, p, 'msg')\n", (1783, 1805), False, 'import os\n'), ((8502, 8536), 'os.path.exists', 'os.path.exists', (['gen_rosdef_pkgpath'], {}), '(gen_rosdef_pkgpath)\n', (8516, 8536), False, 'import os\n'), ((2615, 2639), 'os.path.exists', 'os.path.exists', (['msg_path'], {}), '(msg_path)\n', (2629, 2639), False, 'import os\n'), ((6562, 6578), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (6572, 6578), False, 'import os\n'), ((6894, 6915), 'os.path.join', 'os.path.join', (['path', 'f'], {}), '(path, f)\n', (6906, 6915), False, 'import os\n'), ((6925, 6941), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (6935, 6941), False, 'import os\n')] |
Ahuge/PyLeague | PyLeague/logger.py | ee8a14061c44c1c26a5102a05e33ad820f2b1b63 | import sys
def color(text, color):
if color == "blue":
color = "0;34m"
elif color == "green":
color = "0;32m"
elif color == "red":
color = "0;31m"
elif color == "yellow":
color = "0;33m"
else:
return text
return "\033[%s%s\033[0m\n" % (color, text)
class NotALogger(object):
def info(self, msg):
sys.stdout.write(
color(msg, "blue")
)
def error(self, msg):
sys.stdout.write(
color(msg, "red")
)
def warning(self, msg):
sys.stdout.write(
color(msg, "yellow")
)
def success(self, msg):
sys.stdout.write(
color(msg, "green")
)
def header(self):
msg = "=" * 50
msg += "\n" + "=" + (" " * 48) + "="
msg += "\n" + "=" + (" " * 48) + "="
msg += "\n" + ("=" * 50)
sys.stdout.write(
color(msg, "green")
)
def line(self):
sys.stdout.write(
color("-" * 50, "blue")
)
log = NotALogger()
__all__ = ["log"]
| [] |
swtwsk/dbt-airflow-manifest-parser | setup.py | fae0049fb8ff3bc7a78488a48a31023f67fbeef3 | """dbt_airflow_factory module."""
from setuptools import find_packages, setup
with open("README.md") as f:
README = f.read()
# Runtime Requirements.
INSTALL_REQUIRES = ["pytimeparse==1.1.8"]
# Dev Requirements
EXTRA_REQUIRE = {
"tests": [
"pytest>=6.2.2, <7.0.0",
"pytest-cov>=2.8.0, <3.0.0",
"tox==3.21.1",
"pre-commit==2.9.3",
"pandas==1.2.5",
"apache-airflow[kubernetes]==2.2.0",
],
"docs": [
"sphinx==4.3.1",
"sphinx-rtd-theme==1.0.0",
"sphinx-click>=3.0,<3.1",
"myst-parser>=0.16, <0.17",
"docutils>=0.17,<0.18",
],
}
setup(
name="dbt-airflow-factory",
version="0.18.0",
description="Library to convert DBT manifest metadata to Airflow tasks",
long_description=README,
long_description_content_type="text/markdown",
license="Apache Software License (Apache 2.0)",
python_requires=">=3",
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
keywords="dbt airflow manifest parser python",
author=u"Piotr Pekala",
author_email="[email protected]",
url="https://github.com/getindata/dbt-airflow-factory/",
packages=find_packages(exclude=["ez_setup", "examples", "tests", "docs"]),
include_package_data=True,
zip_safe=False,
install_requires=INSTALL_REQUIRES,
extras_require=EXTRA_REQUIRE,
)
| [((1299, 1363), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['ez_setup', 'examples', 'tests', 'docs']"}), "(exclude=['ez_setup', 'examples', 'tests', 'docs'])\n", (1312, 1363), False, 'from setuptools import find_packages, setup\n')] |
jmb/NightLightPi | nightlightpi/errorstrings.py | 82f5d37a35e3457e31ca100524011908e5b33c4d | # -*- coding: utf-8; -*-
"""Define error strings raised by the application."""
MISSING_CONFIG_VALUE = """
'{0}' is not specified or invalid in the config file!
""".strip()
| [] |
European-XFEL/karabo-bridge-py | karabo_bridge/tests/test_serialize.py | c4b2847b837ae7156640cb8f787fcf96ac7f632e | import numpy as np
import pytest
from karabo_bridge import serialize, deserialize
from .utils import compare_nested_dict
def test_serialize(data, protocol_version):
msg = serialize(data, protocol_version=protocol_version)
assert isinstance(msg, list)
d, m = deserialize(msg)
compare_nested_dict(data, d)
assert m['source1'] == {'timestamp.tid': 9876543210, 'timestamp': 12345678}
assert m['XMPL/DET/MOD0'] == {}
def test_serialize_with_metadata(data, metadata, protocol_version):
msg = serialize(data, metadata, protocol_version=protocol_version)
d, m = deserialize(msg)
compare_nested_dict(metadata, m)
def test_serialize_with_dummy_timestamps(data, protocol_version):
msg = serialize(data, protocol_version=protocol_version,
dummy_timestamps=True)
d, m = deserialize(msg)
assert set(m['XMPL/DET/MOD0']) == {'timestamp', 'timestamp.sec', 'timestamp.frac'}
assert set(m['source1']) == {'timestamp', 'timestamp.tid'}
assert m['source1']['timestamp.tid'] == 9876543210
assert m['source1']['timestamp'] == 12345678
def test_serialize_with_metadata_and_dummy_timestamp(data, metadata, protocol_version):
msg = serialize(data, metadata, protocol_version=protocol_version,
dummy_timestamps=True)
d, m = deserialize(msg)
compare_nested_dict(metadata, m)
def test_wrong_version(data):
with pytest.raises(ValueError):
serialize(data, protocol_version='3.0')
| [((179, 229), 'karabo_bridge.serialize', 'serialize', (['data'], {'protocol_version': 'protocol_version'}), '(data, protocol_version=protocol_version)\n', (188, 229), False, 'from karabo_bridge import serialize, deserialize\n'), ((275, 291), 'karabo_bridge.deserialize', 'deserialize', (['msg'], {}), '(msg)\n', (286, 291), False, 'from karabo_bridge import serialize, deserialize\n'), ((521, 581), 'karabo_bridge.serialize', 'serialize', (['data', 'metadata'], {'protocol_version': 'protocol_version'}), '(data, metadata, protocol_version=protocol_version)\n', (530, 581), False, 'from karabo_bridge import serialize, deserialize\n'), ((594, 610), 'karabo_bridge.deserialize', 'deserialize', (['msg'], {}), '(msg)\n', (605, 610), False, 'from karabo_bridge import serialize, deserialize\n'), ((726, 799), 'karabo_bridge.serialize', 'serialize', (['data'], {'protocol_version': 'protocol_version', 'dummy_timestamps': '(True)'}), '(data, protocol_version=protocol_version, dummy_timestamps=True)\n', (735, 799), False, 'from karabo_bridge import serialize, deserialize\n'), ((832, 848), 'karabo_bridge.deserialize', 'deserialize', (['msg'], {}), '(msg)\n', (843, 848), False, 'from karabo_bridge import serialize, deserialize\n'), ((1203, 1290), 'karabo_bridge.serialize', 'serialize', (['data', 'metadata'], {'protocol_version': 'protocol_version', 'dummy_timestamps': '(True)'}), '(data, metadata, protocol_version=protocol_version,\n dummy_timestamps=True)\n', (1212, 1290), False, 'from karabo_bridge import serialize, deserialize\n'), ((1319, 1335), 'karabo_bridge.deserialize', 'deserialize', (['msg'], {}), '(msg)\n', (1330, 1335), False, 'from karabo_bridge import serialize, deserialize\n'), ((1414, 1439), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1427, 1439), False, 'import pytest\n'), ((1449, 1488), 'karabo_bridge.serialize', 'serialize', (['data'], {'protocol_version': '"""3.0"""'}), "(data, protocol_version='3.0')\n", (1458, 1488), False, 'from karabo_bridge import serialize, deserialize\n')] |
bpedersen2/indico | indico/testing/fixtures/util.py | 8410ee5f8f8530a8692f3dd2d4015c3074b0aa30 | # This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import inspect
from datetime import datetime
import freezegun
import pytest
from sqlalchemy import DateTime, cast
from sqlalchemy.sql.functions import _FunctionGenerator
@pytest.fixture
def monkeypatch_methods(monkeypatch):
"""Monkeypatch all methods from `cls` onto `target`.
This utility lets you easily mock multiple methods in an existing class.
In case of classmethods the binding will not be changed, i.e. `cls` will
keep pointing to the source class and not the target class.
"""
def _monkeypatch_methods(target, cls):
for name, method in inspect.getmembers(cls, inspect.ismethod):
if method.__self__ is None:
# For unbound methods we need to copy the underlying function
method = method.__func__
monkeypatch.setattr(f'{target}.{name}', method)
return _monkeypatch_methods
@pytest.fixture
def freeze_time(monkeypatch):
"""Return a function that freezes the current time.
It affects datetime.now, date.today, etc. and also SQLAlchemy's `func.now()`
which simply returns the current time from `datetime.now()` instead of
retrieving it using the actual `now()` function of PostgreSQL.
"""
freezers = []
orig_call = _FunctionGenerator.__call__
def FunctionGenerator_call(self, *args, **kwargs):
if self._FunctionGenerator__names == ['now']:
return cast(datetime.now().isoformat(), DateTime)
return orig_call(self, *args, **kwargs)
monkeypatch.setattr(_FunctionGenerator, '__call__', FunctionGenerator_call)
def _freeze_time(time_to_freeze):
freezer = freezegun.freeze_time(time_to_freeze)
freezer.start()
freezers.append(freezer)
yield _freeze_time
for freezer in reversed(freezers):
freezer.stop()
| [((797, 838), 'inspect.getmembers', 'inspect.getmembers', (['cls', 'inspect.ismethod'], {}), '(cls, inspect.ismethod)\n', (815, 838), False, 'import inspect\n'), ((1848, 1885), 'freezegun.freeze_time', 'freezegun.freeze_time', (['time_to_freeze'], {}), '(time_to_freeze)\n', (1869, 1885), False, 'import freezegun\n'), ((1624, 1638), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1636, 1638), False, 'from datetime import datetime\n')] |
RomashkaGang/Update_Checker | config.py | 1763ec5d8110462a72f5015abdc5c5be3e3c9498 | #!/usr/bin/env python3
# encoding: utf-8
import os
# 是否启用调试 若启用 将不再忽略检查过程中发生的任何异常
# 建议在开发环境中启用 在生产环境中禁用
DEBUG_ENABLE = False
# SQLite 数据库文件名
SQLITE_FILE = "saved.db"
# 日志文件名
LOG_FILE = "log.txt"
# 是否启用日志
ENABLE_LOGGER = True
# 循环检查的间隔时间(默认: 180分钟)
LOOP_CHECK_INTERVAL = 180 * 60
# 代理服务器
PROXIES = "127.0.0.1:1080"
# 请求超时
TIMEOUT = 20
# 是否为 Socks5 代理
IS_SOCKS = False
# 是否启用 TG BOT 发送消息的功能
ENABLE_SENDMESSAGE = False
# TG BOT TOKEN
TG_TOKEN = os.environ.get("TG_TOKEN", "")
# 发送消息到...
TG_SENDTO = os.environ.get("TG_SENDTO", "")
if IS_SOCKS:
_PROXIES_DIC = {"http": "socks5h://%s" % PROXIES, "https": "socks5h://%s" % PROXIES}
else:
_PROXIES_DIC = {"http": PROXIES, "https": PROXIES}
| [((453, 483), 'os.environ.get', 'os.environ.get', (['"""TG_TOKEN"""', '""""""'], {}), "('TG_TOKEN', '')\n", (467, 483), False, 'import os\n'), ((508, 539), 'os.environ.get', 'os.environ.get', (['"""TG_SENDTO"""', '""""""'], {}), "('TG_SENDTO', '')\n", (522, 539), False, 'import os\n')] |
zjj1002/aws-cloud-cmdb-system | cmdb-compliance/biz/handlers/asset_hipaa_data.py | 47982007688e5db1272435891cb654ab11d0d60a | from sqlalchemy import or_
from websdk.db_context import DBContext
from libs.base_handler import BaseHandler
from libs.pagination import pagination_util
from models.hipaa_data import HipaaData, model_to_dict
class HipaaDataHandler(BaseHandler):
@pagination_util
def get(self, *args, **kwargs):
key = self.get_argument('key', default=None, strip=True)
hipaa_data_list = []
with DBContext('r') as session:
if key:
# 模糊查所有
hipaa_data_info = session.query(HipaaData).filter(
or_(HipaaData.profile.like('%{}%'.format(key)),
HipaaData.result.like('%{}%'.format(key)),
HipaaData.level.like('%{}%'.format(key)),
HipaaData.region.like('%{}%'.format(key)),
HipaaData.account_id.like('%{}%'.format(key)),
HipaaData.group.like('%{}%'.format(key)),
HipaaData.group.like('%{}%'.format(key)),
HipaaData.check_title.like('%{}%'.format(key)),
HipaaData.check_output.like('%{}%'.format(key)))
).filter(
HipaaData.result != "PASS"
).all()
else:
hipaa_data_info = session.query(HipaaData).filter(
HipaaData.result != "PASS"
).all()
for data in hipaa_data_info:
data_dict = model_to_dict(data)
hipaa_data_list.append(data_dict)
return hipaa_data_list
hipaa_data_host_urls = [
(r"/v1/cmdb/hipaa_data/", HipaaDataHandler),
]
if __name__ == '__main__':
pass | [((412, 426), 'websdk.db_context.DBContext', 'DBContext', (['"""r"""'], {}), "('r')\n", (421, 426), False, 'from websdk.db_context import DBContext\n'), ((1480, 1499), 'models.hipaa_data.model_to_dict', 'model_to_dict', (['data'], {}), '(data)\n', (1493, 1499), False, 'from models.hipaa_data import HipaaData, model_to_dict\n')] |
hellocit/kadai2 | scripts/count.py | 896acc2394ea522d4b0d32db31321aea5b5f5dbd | #!/usr/bin/env python3
import rospy
from std_msgs.msg import Int32
import time
rospy.init_node('count') # ノード名「count」に設定
pub = rospy.Publisher('count_up', Int32, queue_size=1) # パブリッシャ「count_up」を作成
rate = rospy.Rate(10) # 10Hzで実行
n = 0
time.sleep(2)
while not rospy.is_shutdown():
n += 1
if n % 3 == 0:
print("これは%d" % n)
pub.publish(n)
else:
pub.publish(n)
if n == 227:
print("\nThis is unko\n")
rate.sleep()
| [((80, 104), 'rospy.init_node', 'rospy.init_node', (['"""count"""'], {}), "('count')\n", (95, 104), False, 'import rospy\n'), ((159, 207), 'rospy.Publisher', 'rospy.Publisher', (['"""count_up"""', 'Int32'], {'queue_size': '(1)'}), "('count_up', Int32, queue_size=1)\n", (174, 207), False, 'import rospy\n'), ((238, 252), 'rospy.Rate', 'rospy.Rate', (['(10)'], {}), '(10)\n', (248, 252), False, 'import rospy\n'), ((304, 317), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (314, 317), False, 'import time\n'), ((329, 348), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (346, 348), False, 'import rospy\n')] |
alexanderfefelov/nav-add-ons | snmp/nav/smidumps/ZyXEL_GS4012F_mib.py | c63d6942a9b8b1bf220efd7d33fb6be5f6bbb8af | # python version 1.0 DO NOT EDIT
#
# Generated by smidump version 0.4.8:
#
# smidump -f python ZYXEL-GS4012F-MIB
FILENAME = "mibs/ZyXEL/zyxel-GS4012F.mib"
MIB = {
"moduleName" : "ZYXEL-GS4012F-MIB",
"ZYXEL-GS4012F-MIB" : {
"nodetype" : "module",
"language" : "SMIv2",
"organization" :
"""ZyXEL""",
"contact" :
"""""",
"description" :
"""Fault event trap definitions""",
"revisions" : (
{
"date" : "2004-11-03 12:00",
"description" :
"""[Revision added by libsmi due to a LAST-UPDATED clause.]""",
},
{
"date" : "2004-11-01 12:00",
"description" :
"""[Revision added by libsmi due to a LAST-UPDATED clause.]""",
},
),
"identity node" : "faultTrapsMIB",
},
"imports" : (
{"module" : "RFC1155-SMI", "name" : "enterprises"},
{"module" : "SNMPv2-SMI", "name" : "OBJECT-TYPE"},
{"module" : "SNMPv2-TC", "name" : "RowStatus"},
{"module" : "SNMPv2-TC", "name" : "DateAndTime"},
{"module" : "SNMPv2-TC", "name" : "TruthValue"},
{"module" : "SNMPv2-TC", "name" : "StorageType"},
{"module" : "SNMPv2-TC", "name" : "MacAddress"},
{"module" : "RFC1213-MIB", "name" : "DisplayString"},
{"module" : "P-BRIDGE-MIB", "name" : "EnabledStatus"},
{"module" : "Q-BRIDGE-MIB", "name" : "PortList"},
{"module" : "BRIDGE-MIB", "name" : "dot1dBasePort"},
{"module" : "IF-MIB", "name" : "InterfaceIndexOrZero"},
{"module" : "SNMP-FRAMEWORK-MIB", "name" : "SnmpAdminString"},
{"module" : "INET-ADDRESS-MIB", "name" : "InetAddressType"},
{"module" : "INET-ADDRESS-MIB", "name" : "InetAddress"},
{"module" : "DISMAN-PING-MIB", "name" : "OperationResponseStatus"},
{"module" : "OSPF-MIB", "name" : "ospfIfIpAddress"},
{"module" : "OSPF-MIB", "name" : "ospfAddressLessIf"},
{"module" : "OSPF-MIB", "name" : "ospfAreaId"},
{"module" : "OSPF-MIB", "name" : "ospfNbrIpAddr"},
{"module" : "OSPF-MIB", "name" : "ospfNbrAddressLessIndex"},
{"module" : "OSPF-MIB", "name" : "ospfLsdbAreaId"},
{"module" : "OSPF-MIB", "name" : "ospfLsdbType"},
{"module" : "OSPF-MIB", "name" : "ospfLsdbLSID"},
{"module" : "OSPF-MIB", "name" : "ospfLsdbRouterId"},
{"module" : "OSPF-MIB", "name" : "ospfVirtIfAreaID"},
{"module" : "OSPF-MIB", "name" : "ospfVirtIfNeighbor"},
{"module" : "BRIDGE-MIB", "name" : "BridgeId"},
{"module" : "BRIDGE-MIB", "name" : "Timeout"},
),
"typedefs" : {
"UtcTimeStamp" : {
"basetype" : "Unsigned32",
"status" : "current",
"description" :
"""Universal Time Coordinated as a 32-bit value that designates
the number of seconds since Jan 1, 1970 12:00AM.""",
},
"EventIdNumber" : {
"basetype" : "Integer32",
"status" : "current",
"description" :
"""This textual convention describes the index that uniquely
identifies a fault event type in the entire system. Every fault
event type, e.g. link down, has a unique EventIdNumber.""",
},
"EventSeverity" : {
"basetype" : "Enumeration",
"status" : "current",
"critical" : {
"nodetype" : "namednumber",
"number" : "1"
},
"major" : {
"nodetype" : "namednumber",
"number" : "2"
},
"minor" : {
"nodetype" : "namednumber",
"number" : "3"
},
"informational" : {
"nodetype" : "namednumber",
"number" : "4"
},
"description" :
"""This textual convention describes the severity of a fault event.
The decreasing order of severity is shown in the textual
convention.""",
},
"EventServiceAffective" : {
"basetype" : "Enumeration",
"status" : "current",
"noServiceAffected" : {
"nodetype" : "namednumber",
"number" : "1"
},
"serviceAffected" : {
"nodetype" : "namednumber",
"number" : "2"
},
"description" :
"""This textual convention indicates whether an event is immediately
service affecting or not.""",
},
"InstanceType" : {
"basetype" : "Enumeration",
"status" : "current",
"unknown" : {
"nodetype" : "namednumber",
"number" : "1"
},
"node" : {
"nodetype" : "namednumber",
"number" : "2"
},
"shelf" : {
"nodetype" : "namednumber",
"number" : "3"
},
"line" : {
"nodetype" : "namednumber",
"number" : "4"
},
"switch" : {
"nodetype" : "namednumber",
"number" : "5"
},
"lsp" : {
"nodetype" : "namednumber",
"number" : "6"
},
"l2Interface" : {
"nodetype" : "namednumber",
"number" : "7"
},
"l3Interface" : {
"nodetype" : "namednumber",
"number" : "8"
},
"rowIndex" : {
"nodetype" : "namednumber",
"number" : "9"
},
"description" :
"""This textual convention describes the type of an instanceId
associated with each event and by that means specifies how
the instanceId variable should be intepreted.
Various instanceId types are specified below to enable fault
monitoring for different kind of devices from fixed
configuration pizza boxes to multi chassis nodes. All
instanceId types may not need to be used in every device
type.
Note also that instanceId semantics are element type dependent
(e.g. different kind of interface naming conventions may be used)
and thus instanceId usage may vary from element to element.
=========================================================================
Type Description Example form
of InstanceId
=========================================================================
unknown (1) unknown type - Irrelevant-
-------------------------------------------------------------------------
node (2) Associated with events originating from 1
the node. Used for general events that (Node number)
can not be associated with any specific
block. InstanceId value 1 is used for
single node equipment.
-------------------------------------------------------------------------
shelf (3) Associated with events originating from 1
the shelf. In the case of fixed (shelf number)
configuration devices this type is used
for events that are associated with the
physical enclosure, e.g. faults related
to fan etc. InstanceId value 1 is used
for single self equipment.
-------------------------------------------------------------------------
line (4) Associated with events originating from
physical interfaces or associated
components such as line cards.
InstanceId usage examples for faults
originating from:
- Physical port: Simply port number, e.g. .......1
-------------------------------------------------------------------------
switch (5) Associated with events originating from 1
from a switch chip or a switch card. (switch number)
For single switch equipment InstanceId
value 1 is used, for multi swich nodes
InstanceId semantics if for further
study.
-------------------------------------------------------------------------
lsp (6) Associated with events originating from 1
a particular lsp. (lsp index)
NOTE: In this case the InstanceName
contains the lsp name and InstanceId
contains lsp index.
-------------------------------------------------------------------------
l2Interface(7) Associated with events originating from - TBD -
a particular layer 2 interface. Used for
layer 2 related events such as L2 control
protocol faults. InstanceId semantics is
for further study.
-------------------------------------------------------------------------
l3Interface(8) Associated with events originating from - TBD -
a particular layer 3 interface. Used for
layer 3 related events such as L3 control
protocol faults. InstanceId semantics is
for further study.
-------------------------------------------------------------------------
rowIndex (9) Associated with events reporting about a
'logical' or conceptual table that consists
of rows. The Instance Id is the index/key
for a row in the table. The format of the
Instance Id will simply be a series of decimal
numbers seperated by a '.':
=========================================================================""",
},
"EventPersistence" : {
"basetype" : "Enumeration",
"status" : "current",
"normal" : {
"nodetype" : "namednumber",
"number" : "1"
},
"delta" : {
"nodetype" : "namednumber",
"number" : "2"
},
"description" :
"""This textual convention indicates whether the event is delta
(automatically and immediately cleared) or normal (not
automatically cleared).""",
},
"MstiOrCistInstanceIndex" : {
"basetype" : "Integer32",
"status" : "current",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
"description" :
"""This textual convention is an extension of the
MstiInstanceIndex convention. This extension permits the
additional value of zero, which means Common and Internal
Spanning Tree (CIST).""",
},
}, # typedefs
"nodes" : {
"zyxel" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890",
}, # node
"products" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1",
}, # node
"accessSwitch" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5",
}, # node
"esSeries" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8",
}, # node
"gs4012f" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20",
}, # node
"sysInfo" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.1",
}, # node
"sysSwPlatformMajorVers" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""SW platform major version, e.g. 3.""",
}, # scalar
"sysSwPlatformMinorVers" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""SW platform minor version, e.g. 50.""",
}, # scalar
"sysSwModelString" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Model letters, e.g. TJ""",
}, # scalar
"sysSwVersionControlNbr" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Version control number, e.g. 0.""",
}, # scalar
"sysSwDay" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""SW compilation day, e.g. 19.""",
}, # scalar
"sysSwMonth" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""SW compilation month, e.g. 8.""",
}, # scalar
"sysSwYear" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""SW compilation year, e.g. 2004.""",
}, # scalar
"sysHwMajorVers" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""HW major version number, e.g. 1.""",
}, # scalar
"sysHwMinorVers" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""HW minor version number, e.g. 0.""",
}, # scalar
"sysSerialNumber" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial number""",
}, # scalar
"rateLimitSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.2",
}, # node
"rateLimitState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Ingress/egress rate limiting enabled/disabled for the switch.""",
}, # scalar
"rateLimitPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.2.2",
"status" : "current",
"description" :
"""""",
}, # table
"rateLimitPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.2.2.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in rateLimitPortTable.""",
}, # row
"rateLimitPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.2.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Ingress/egress rate limiting enabled/disabled on the port.""",
}, # column
"rateLimitPortCommitRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Commit rate in Kbit/s. The range of FE port is between 0 and 100,000. For GE port, the range is between 0 and 1000,000.""",
}, # column
"rateLimitPortPeakRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Peak rate in Kbit/s. The range of FE port is between 1 and 100,000. For GE port, the range is between 1 and 1000,000.""",
}, # column
"rateLimitPortEgrRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Egress rate in Mbit/s. The granularity of FE port is between 1 and 100. For GE port, the granularity is between 1 and 1000.""",
}, # column
"rateLimitPortPeakState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.2.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Ingress peak rate limiting enabled/disabled on the port.""",
}, # column
"rateLimitPortEgrState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.2.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Egress rate limiting enabled/disabled on the port.""",
}, # column
"rateLimitPortCommitState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.2.2.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Ingress commit rate limiting enabled/disabled on the port.""",
}, # column
"brLimitSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.3",
}, # node
"brLimitState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.3.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Broadcast/multicast/DLF rate limiting enabled/disabled for the switch.""",
}, # scalar
"brLimitPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.3.2",
"status" : "current",
"description" :
"""""",
}, # table
"brLimitPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.3.2.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in brLimitPortTable.""",
}, # row
"brLimitPortBrState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.3.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Broadcast rate limiting enabled/disabled on the port.""",
}, # column
"brLimitPortBrRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.3.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Allowed broadcast rate in pkts/s. For FE port,
the maximum value is 148800. For GE port, the maximum value is 262143.""",
}, # column
"brLimitPortMcState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.3.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Multicast rate limiting enabled/disabled on the port.""",
}, # column
"brLimitPortMcRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.3.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""AAllowed mullticast rate in pkts/s. For FE port,
the maximum value is 148800. For GE port, the maximum value is 262143.""",
}, # column
"brLimitPortDlfState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.3.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Destination lookup failure frames rate limiting enabled/disabled on the port.""",
}, # column
"brLimitPortDlfRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.3.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Allowed destination lookup failure frames rate in pkts/s.
For FE port, the maximum value is 148800. For GE port, the maximum value is 262143.""",
}, # column
"portSecuritySetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.4",
}, # node
"portSecurityState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.4.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"portSecurityPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.4.2",
"status" : "current",
"description" :
"""""",
}, # table
"portSecurityPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.4.2.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in portSecurityPortTable.""",
}, # row
"portSecurityPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.4.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Port Security enabled/disabled on the port.
Active(1) means this port only accept frames from static MAC addresses that are configured for the port,
and dynamic MAC address frames up to the number specified by portSecurityPortCount object.""",
}, # column
"portSecurityPortLearnState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.4.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""MAC address learning enabled/disable on the port.""",
}, # column
"portSecurityPortCount" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.4.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Number of (dynamic) MAC addresses that may be learned on the port.""",
}, # column
"portSecurityMacFreeze" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.4.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"Q-BRIDGE-MIB", "name" : "PortList"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"vlanTrunkSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.5",
}, # node
"vlanTrunkPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.5.1",
"status" : "current",
"description" :
"""""",
}, # table
"vlanTrunkPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.5.1.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in vlanTrunkPortTable.""",
}, # row
"vlanTrunkPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.5.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""VlanTrunking enabled/disabled on the port.
Active(1) to allow frames belonging to unknown
VLAN groups to pass through the switch.""",
}, # column
"ctlProtTransSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.6",
}, # node
"ctlProtTransState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.6.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Bridge control protocol transparency enabled/disabled for the switch""",
}, # scalar
"ctlProtTransTunnelPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.6.2",
"status" : "current",
"description" :
"""""",
}, # table
"ctlProtTransTunnelPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.6.2.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in ctlProtTransTunnelPortTable.""",
}, # row
"ctlProtTransTunnelMode" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.6.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"peer" : {
"nodetype" : "namednumber",
"number" : "0"
},
"tunnel" : {
"nodetype" : "namednumber",
"number" : "1"
},
"discard" : {
"nodetype" : "namednumber",
"number" : "2"
},
"network" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""Bridge control protocol transparency mode for the port.
Modes: Peer(0), Tunnel(1), Discard(2), Network(3)""",
}, # column
"vlanStackSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.7",
}, # node
"vlanStackState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.7.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""VLAN Stacking enabled/disabled for the switch.""",
}, # scalar
"vlanStackTpid" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.7.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""SP TPID in hex format, e.g. 8100.""",
}, # scalar
"vlanStackPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.7.3",
"status" : "current",
"description" :
"""""",
}, # table
"vlanStackPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.7.3.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in vlanStackPortTable.""",
}, # row
"vlanStackPortMode" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.7.3.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"normal" : {
"nodetype" : "namednumber",
"number" : "1"
},
"access" : {
"nodetype" : "namednumber",
"number" : "2"
},
"tunnel" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""Mode of the port.Set Access mode to have the switch add the SP TPID tag to all incoming
frames received on this port. Set Access mode for ingress ports at the
edge of the service provider's network. Set Tunnel mode (available for
Gigabit ports only) for egress ports at the edge of the service provider's
network. In order to support VLAN stacking on a port, the port must be able
to allow frames of 1526 Bytes (1522 Bytes + 4 Bytes for the second tag)
to pass through it. Access (0), tunnel (1)""",
}, # column
"vlanStackPortVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.7.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""VLAN ID used in service provider tag.""",
}, # column
"vlanStackPortPrio" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.7.3.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"prioriry-0" : {
"nodetype" : "namednumber",
"number" : "0"
},
"prioriry-1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"prioriry-2" : {
"nodetype" : "namednumber",
"number" : "2"
},
"prioriry-3" : {
"nodetype" : "namednumber",
"number" : "3"
},
"prioriry-4" : {
"nodetype" : "namednumber",
"number" : "4"
},
"prioriry-5" : {
"nodetype" : "namednumber",
"number" : "5"
},
"prioriry-6" : {
"nodetype" : "namednumber",
"number" : "6"
},
"prioriry-7" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readwrite",
"description" :
"""Priority value for service provider tag.
0 is the lowest priority level and 7 is the highest.""",
}, # column
"dot1xSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.8",
}, # node
"portAuthState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.8.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""802.1x port authentication enabled/disabled for the switch.""",
}, # scalar
"portAuthTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.8.4",
"status" : "current",
"description" :
"""""",
}, # table
"portAuthEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.8.4.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in portAuthTable.""",
}, # row
"portAuthEntryState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.8.4.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""802.1x port authentication enabled or disabled on the port.""",
}, # column
"portReAuthEntryState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.8.4.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""802.1x port re-authentication enabled or disabled on the port.""",
}, # column
"portReAuthEntryTimer" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.8.4.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Re-authentication timer in seconds.""",
}, # column
"hwMonitorInfo" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9",
}, # node
"fanRpmTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.1",
"status" : "current",
"description" :
"""""",
}, # table
"fanRpmEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.1.1",
"status" : "current",
"linkage" : [
"fanRpmIndex",
],
"description" :
"""An entry in fanRpmTable.""",
}, # row
"fanRpmIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Index of FAN.""",
}, # column
"fanRpmCurValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Current speed in Revolutions Per Minute (RPM) on the fan.""",
}, # column
"fanRpmMaxValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Maximum speed measured in Revolutions Per Minute (RPM) on the fan.""",
}, # column
"fanRpmMinValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Minimum speed measured in Revolutions Per Minute (RPM) on the fan.""",
}, # column
"fanRpmLowThresh" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The minimum speed at which a normal fan should work.""",
}, # column
"fanRpmDescr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""'Normal' indicates that this fan is functioning above the minimum speed.
'Error' indicates that this fan is functioning below the minimum speed.""",
}, # column
"tempTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.2",
"status" : "current",
"description" :
"""""",
}, # table
"tempEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.2.1",
"status" : "current",
"linkage" : [
"tempIndex",
],
"description" :
"""An entry in tempTable.""",
}, # row
"tempIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"mac" : {
"nodetype" : "namednumber",
"number" : "1"
},
"cpu" : {
"nodetype" : "namednumber",
"number" : "2"
},
"phy" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""Index of temperature unit. 1:MAC, 2:CPU, 3:PHY""",
}, # column
"tempCurValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The current temperature measured at this sensor.""",
}, # column
"tempMaxValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The maximum temperature measured at this sensor.""",
}, # column
"tempMinValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The minimum temperature measured at this sensor.""",
}, # column
"tempHighThresh" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The upper temperature limit at this sensor.""",
}, # column
"tempDescr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""'Normal' indicates temperatures below the threshold and 'Error' for those above.""",
}, # column
"voltageTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.3",
"status" : "current",
"description" :
"""""",
}, # table
"voltageEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.3.1",
"status" : "current",
"linkage" : [
"voltageIndex",
],
"description" :
"""An entry in voltageTable.""",
}, # row
"voltageIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Index of voltage.""",
}, # column
"voltageCurValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The current voltage reading.""",
}, # column
"voltageMaxValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The maximum voltage measured at this point.""",
}, # column
"voltageMinValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The minimum voltage measured at this point.""",
}, # column
"voltageNominalValue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.3.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The normal voltage at wchich the switch work.""",
}, # column
"voltageLowThresh" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.3.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The minimum voltage at which the switch should work.""",
}, # column
"voltageDescr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.9.3.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""'Normal' indicates that the voltage is within an acceptable operating range
at this point; otherwise 'Error' is displayed.""",
}, # column
"snmpSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10",
}, # node
"snmpGetCommunity" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"snmpSetCommunity" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"snmpTrapCommunity" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"snmpTrapDestTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.4",
"status" : "current",
"description" :
"""""",
}, # table
"snmpTrapDestEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.4.1",
"create" : "true",
"status" : "current",
"linkage" : [
"snmpTrapDestIP",
],
"description" :
"""An entry in snmpTrapDestTable.""",
}, # row
"snmpTrapDestIP" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.4.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "noaccess",
"description" :
"""IP address of trap destination.""",
}, # column
"snmpTrapDestRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.4.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"snmpTrapDestPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.4.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The UDP port of the trap destination.""",
}, # column
"snmpTrapVersion" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.4.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"v1" : {
"nodetype" : "namednumber",
"number" : "0"
},
"v2c" : {
"nodetype" : "namednumber",
"number" : "1"
},
"v3" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""The SNMP protocol version to send traps.""",
}, # column
"snmpTrapUserName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.4.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""The user name for sending SNMPv3 traps.""",
}, # column
"snmpVersion" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"v2c" : {
"nodetype" : "namednumber",
"number" : "0"
},
"v3" : {
"nodetype" : "namednumber",
"number" : "1"
},
"v3v2c" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""The SNMP version to be used. v3v2c means that the manager
can get/set by SNMPv3 and can get by SNMPv2c.""",
}, # scalar
"snmpUserTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.6",
"status" : "current",
"description" :
"""A table that contains SNMPv3 user information.""",
}, # table
"snmpUserEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.6.1",
"status" : "current",
"linkage" : [
"snmpUserName",
],
"description" :
"""An entry of snmpUserTable.""",
}, # row
"snmpUserName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.6.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The user name.""",
}, # column
"snmpUserSecurityLevel" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.6.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"noAuthNoPriv" : {
"nodetype" : "namednumber",
"number" : "0"
},
"authNoPriv" : {
"nodetype" : "namednumber",
"number" : "1"
},
"authPriv" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""The level of security at which SNMP messages can be sent or
with which operations are being processed.""",
}, # column
"snmpUserAuthProtocol" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.6.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"md5" : {
"nodetype" : "namednumber",
"number" : "0"
},
"sha" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""The type of authentication protocol to be used.""",
}, # column
"snmpUserPrivProtocol" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.6.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"des" : {
"nodetype" : "namednumber",
"number" : "0"
},
"aes" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""The type of privacy protocol to be used.""",
}, # column
"snmpTrapGroupTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.7",
"status" : "current",
"description" :
"""""",
}, # table
"snmpTrapGroupEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.7.1",
"status" : "current",
"linkage" : [
"snmpTrapDestIP",
],
"description" :
"""An entry in snmpTrapGroupTable.""",
}, # row
"snmpTrapSystemGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.7.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"coldStart" : {
"nodetype" : "namednumber",
"number" : "0"
},
"warmStart" : {
"nodetype" : "namednumber",
"number" : "1"
},
"fanSpeed" : {
"nodetype" : "namednumber",
"number" : "2"
},
"temperature" : {
"nodetype" : "namednumber",
"number" : "3"
},
"voltage" : {
"nodetype" : "namednumber",
"number" : "4"
},
"reset" : {
"nodetype" : "namednumber",
"number" : "5"
},
"timeSync" : {
"nodetype" : "namednumber",
"number" : "6"
},
"intrusionlock" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"snmpTrapInterfaceGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.7.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"linkup" : {
"nodetype" : "namednumber",
"number" : "0"
},
"linkdown" : {
"nodetype" : "namednumber",
"number" : "1"
},
"autonegotiation" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"snmpTrapAAAGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.7.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"authentication" : {
"nodetype" : "namednumber",
"number" : "0"
},
"accounting" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"snmpTrapIPGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.7.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"ping" : {
"nodetype" : "namednumber",
"number" : "0"
},
"traceroute" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"snmpTrapSwitchGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.10.7.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"stp" : {
"nodetype" : "namednumber",
"number" : "0"
},
"mactable" : {
"nodetype" : "namednumber",
"number" : "1"
},
"rmon" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dateTimeSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11",
}, # node
"dateTimeServerType" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "1"
},
"daytime" : {
"nodetype" : "namednumber",
"number" : "2"
},
"time" : {
"nodetype" : "namednumber",
"number" : "3"
},
"ntp" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readwrite",
"description" :
"""The time service protocol.""",
}, # scalar
"dateTimeServerIP" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""IP address of time server.""",
}, # scalar
"dateTimeZone" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The time difference between UTC. Ex: +01""",
}, # scalar
"dateTimeNewDateYear" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The new date in year.""",
}, # scalar
"dateTimeNewDateMonth" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The new date in month.""",
}, # scalar
"dateTimeNewDateDay" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The new date in day.""",
}, # scalar
"dateTimeNewTimeHour" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The new time in hour.""",
}, # scalar
"dateTimeNewTimeMinute" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The new time in minute.""",
}, # scalar
"dateTimeNewTimeSecond" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""The new time in second.""",
}, # scalar
"dateTimeDaylightSavingTimeSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10",
}, # node
"daylightSavingTimeState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service enabled/disabled for the switch.""",
}, # scalar
"daylightSavingTimeStartDateWeek" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"first" : {
"nodetype" : "namednumber",
"number" : "1"
},
"second" : {
"nodetype" : "namednumber",
"number" : "2"
},
"third" : {
"nodetype" : "namednumber",
"number" : "3"
},
"fourth" : {
"nodetype" : "namednumber",
"number" : "4"
},
"last" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service start week.""",
}, # scalar
"daylightSavingTimeStartDateDay" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"sunday" : {
"nodetype" : "namednumber",
"number" : "0"
},
"monday" : {
"nodetype" : "namednumber",
"number" : "1"
},
"tuesday" : {
"nodetype" : "namednumber",
"number" : "2"
},
"wednesday" : {
"nodetype" : "namednumber",
"number" : "3"
},
"thursday" : {
"nodetype" : "namednumber",
"number" : "4"
},
"friday" : {
"nodetype" : "namednumber",
"number" : "5"
},
"saturday" : {
"nodetype" : "namednumber",
"number" : "6"
},
},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service start day.""",
}, # scalar
"daylightSavingTimeStartDateMonth" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"january" : {
"nodetype" : "namednumber",
"number" : "1"
},
"february" : {
"nodetype" : "namednumber",
"number" : "2"
},
"march" : {
"nodetype" : "namednumber",
"number" : "3"
},
"april" : {
"nodetype" : "namednumber",
"number" : "4"
},
"may" : {
"nodetype" : "namednumber",
"number" : "5"
},
"june" : {
"nodetype" : "namednumber",
"number" : "6"
},
"july" : {
"nodetype" : "namednumber",
"number" : "7"
},
"august" : {
"nodetype" : "namednumber",
"number" : "8"
},
"september" : {
"nodetype" : "namednumber",
"number" : "9"
},
"october" : {
"nodetype" : "namednumber",
"number" : "10"
},
"november" : {
"nodetype" : "namednumber",
"number" : "11"
},
"december" : {
"nodetype" : "namednumber",
"number" : "12"
},
},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service start month.""",
}, # scalar
"daylightSavingTimeStartDateHour" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service start time.""",
}, # scalar
"daylightSavingTimeEndDateWeek" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"first" : {
"nodetype" : "namednumber",
"number" : "1"
},
"second" : {
"nodetype" : "namednumber",
"number" : "2"
},
"third" : {
"nodetype" : "namednumber",
"number" : "3"
},
"fourth" : {
"nodetype" : "namednumber",
"number" : "4"
},
"last" : {
"nodetype" : "namednumber",
"number" : "5"
},
},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service end week.""",
}, # scalar
"daylightSavingTimeEndDateDay" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"sunday" : {
"nodetype" : "namednumber",
"number" : "0"
},
"monday" : {
"nodetype" : "namednumber",
"number" : "1"
},
"tuesday" : {
"nodetype" : "namednumber",
"number" : "2"
},
"wednesday" : {
"nodetype" : "namednumber",
"number" : "3"
},
"thursday" : {
"nodetype" : "namednumber",
"number" : "4"
},
"friday" : {
"nodetype" : "namednumber",
"number" : "5"
},
"saturday" : {
"nodetype" : "namednumber",
"number" : "6"
},
},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service end day.""",
}, # scalar
"daylightSavingTimeEndDateMonth" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"january" : {
"nodetype" : "namednumber",
"number" : "1"
},
"february" : {
"nodetype" : "namednumber",
"number" : "2"
},
"march" : {
"nodetype" : "namednumber",
"number" : "3"
},
"april" : {
"nodetype" : "namednumber",
"number" : "4"
},
"may" : {
"nodetype" : "namednumber",
"number" : "5"
},
"june" : {
"nodetype" : "namednumber",
"number" : "6"
},
"july" : {
"nodetype" : "namednumber",
"number" : "7"
},
"august" : {
"nodetype" : "namednumber",
"number" : "8"
},
"september" : {
"nodetype" : "namednumber",
"number" : "9"
},
"october" : {
"nodetype" : "namednumber",
"number" : "10"
},
"november" : {
"nodetype" : "namednumber",
"number" : "11"
},
"december" : {
"nodetype" : "namednumber",
"number" : "12"
},
},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service end month.""",
}, # scalar
"daylightSavingTimeEndDateHour" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.11.10.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Daylight saving time service end time.""",
}, # scalar
"sysMgmt" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.12",
}, # node
"sysMgmtConfigSave" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.12.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"config_1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"config_2" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""If setting value is given, the variable write index will be set and running-config will be written to the assigned configuration file.
If not, running-config will be written to the booting one.""",
}, # scalar
"sysMgmtBootupConfig" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.12.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"config_1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"config_2" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""The setting value (read index) will be written into non-volatile memory.
While rebooting, the variable write index is equal to read index initially.
You can change the value of write index by CLI / MIB.""",
}, # scalar
"sysMgmtReboot" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.12.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"nothing" : {
"nodetype" : "namednumber",
"number" : "0"
},
"reboot" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""Reboot switch from SNMP. 1:Reboot, 0:Nothing""",
}, # scalar
"sysMgmtDefaultConfig" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.12.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"nothing" : {
"nodetype" : "namednumber",
"number" : "0"
},
"reset_to_default" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""Erase running config and reset to default.""",
}, # scalar
"sysMgmtLastActionStatus" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.12.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "0"
},
"success" : {
"nodetype" : "namednumber",
"number" : "1"
},
"fail" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""Display status of last mgmt action.""",
}, # scalar
"sysMgmtSystemStatus" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.12.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"sysAlarmDetected" : {
"nodetype" : "namednumber",
"number" : "0"
},
"sysTemperatureError" : {
"nodetype" : "namednumber",
"number" : "1"
},
"sysFanRPMError" : {
"nodetype" : "namednumber",
"number" : "2"
},
"sysVoltageRangeError" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""This variable indicates the status of the system.
The sysMgmtAlarmStatus is a bit map represented
a sum, therefore, it can represent multiple defects
simultaneously. The sysNoDefect should be set if and only if
no other flag is set.
The various bit positions are:
0 sysAlarmDetected
1 sysTemperatureError
2 sysFanRPMError
3 sysVoltageRangeError""",
}, # scalar
"sysMgmtCPUUsage" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.12.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Show device CPU load in %, it's the snapshot of CPU load when
getting the values.""",
}, # scalar
"sysMgmtCounterReset" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.12.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"enable" : {
"nodetype" : "namednumber",
"number" : "1"
},
"disable" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""Reset all port counters.""",
}, # scalar
"sysMgmtTftpServiceSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.12.10",
}, # node
"sysMgmtTftpServerIp" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.12.10.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
""" IP address of TFTP server""",
}, # scalar
"sysMgmtTftpRemoteFileName" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.12.10.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""The file name that you want to backup to or restore from TFTP server""",
}, # scalar
"layer2Setup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13",
}, # node
"vlanTypeSetup" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"dot1Q" : {
"nodetype" : "namednumber",
"number" : "1"
},
"port_based" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"igmpSnoopingStateSetup" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"tagVlanPortIsolationState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"stpState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"igmpFilteringStateSetup" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"unknownMulticastFrameForwarding" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"flooding" : {
"nodetype" : "namednumber",
"number" : "1"
},
"drop" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"multicastGrpHostTimeout" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Specify host timeout for all multicast groups when the specific port is in auto mode.""",
}, # scalar
"multicastGrpLeaveTimeout" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Specify leave timeout for all multicast groups.""",
}, # scalar
"reservedMulticastFrameForwarding" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"flooding" : {
"nodetype" : "namednumber",
"number" : "1"
},
"drop" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"igmpsnp8021pPriority" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Set the 802.1p priority of control messages for igmp-snooping(0~8, 8-No Change)""",
}, # scalar
"igmpsnpVlanMode" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"auto" : {
"nodetype" : "namednumber",
"number" : "1"
},
"fixed" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"stpMode" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"rstp" : {
"nodetype" : "namednumber",
"number" : "1"
},
"mrstp" : {
"nodetype" : "namednumber",
"number" : "2"
},
"mstp" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"igmpsnpVlanTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.13",
"status" : "current",
"description" :
"""""",
}, # table
"igmpsnpVlanEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.13.1",
"create" : "true",
"status" : "current",
"linkage" : [
"igmpsnpVid",
],
"description" :
"""An entry in IgmpsnpVlanTable.""",
}, # row
"igmpsnpVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.13.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpsnpVlanName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.13.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"igmpsnpVlanRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.13.13.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ipSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14",
}, # node
"dnsIpAddress" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"defaultMgmt" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"in_band" : {
"nodetype" : "namednumber",
"number" : "0"
},
"out_of_band" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"defaultGateway" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"outOfBandIpSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14.4",
}, # node
"outOfBandIp" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14.4.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"outOfBandSubnetMask" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14.4.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"outOfBandGateway" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14.4.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"maxNumOfInbandIp" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"inbandIpTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14.6",
"status" : "current",
"description" :
"""""",
}, # table
"inbandIpEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14.6.1",
"create" : "true",
"status" : "current",
"linkage" : [
"inbandEntryIp",
"inbandEntrySubnetMask",
],
"description" :
"""An entry in inbandIpTable.""",
}, # row
"inbandEntryIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14.6.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"inbandEntrySubnetMask" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14.6.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"inbandEntryVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14.6.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"inbandEntryRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.14.6.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"filterSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.15",
}, # node
"filterTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.15.1",
"status" : "current",
"description" :
"""""",
}, # table
"filterEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.15.1.1",
"create" : "true",
"status" : "current",
"linkage" : [
"filterMacAddr",
"filterVid",
],
"description" :
"""An entry in filterTable.""",
}, # row
"filterName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.15.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"filterActionState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.15.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"discard_source" : {
"nodetype" : "namednumber",
"number" : "1"
},
"discard_destination" : {
"nodetype" : "namednumber",
"number" : "2"
},
"both" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"filterMacAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.15.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"filterVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.15.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"filterRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.15.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mirrorSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.16",
}, # node
"mirrorState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.16.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"mirrorMonitorPort" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.16.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"mirrorTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.16.3",
"status" : "current",
"description" :
"""""",
}, # table
"mirrorEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.16.3.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in mirrorTable.""",
}, # row
"mirrorMirroredState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.16.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mirrorDirection" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.16.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"ingress" : {
"nodetype" : "namednumber",
"number" : "0"
},
"egress" : {
"nodetype" : "namednumber",
"number" : "1"
},
"both" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"aggrSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.17",
}, # node
"aggrState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.17.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"aggrSystemPriority" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.17.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"aggrGroupTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.17.3",
"status" : "current",
"description" :
"""""",
}, # table
"aggrGroupEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.17.3.1",
"status" : "current",
"linkage" : [
"aggrGroupIndex",
],
"description" :
"""An entry in aggrGroupTable.""",
}, # row
"aggrGroupIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.17.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"aggrGroupState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.17.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"aggrGroupDynamicState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.17.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"aggrPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.17.4",
"status" : "current",
"description" :
"""""",
}, # table
"aggrPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.17.4.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in aggrPortTable.""",
}, # row
"aggrPortGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.17.4.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "0"
},
"t1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"t2" : {
"nodetype" : "namednumber",
"number" : "2"
},
"t3" : {
"nodetype" : "namednumber",
"number" : "3"
},
"t4" : {
"nodetype" : "namednumber",
"number" : "4"
},
"t5" : {
"nodetype" : "namednumber",
"number" : "5"
},
"t6" : {
"nodetype" : "namednumber",
"number" : "6"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"aggrPortDynamicStateTimeout" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.17.4.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accessCtlSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18",
}, # node
"accessCtlTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18.1",
"status" : "current",
"description" :
"""""",
}, # table
"accessCtlEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18.1.1",
"status" : "current",
"linkage" : [
"accessCtlService",
],
"description" :
"""An entry in accessCtlTable.""",
}, # row
"accessCtlService" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"telnet" : {
"nodetype" : "namednumber",
"number" : "1"
},
"ssh" : {
"nodetype" : "namednumber",
"number" : "2"
},
"ftp" : {
"nodetype" : "namednumber",
"number" : "3"
},
"http" : {
"nodetype" : "namednumber",
"number" : "4"
},
"https" : {
"nodetype" : "namednumber",
"number" : "5"
},
"icmp" : {
"nodetype" : "namednumber",
"number" : "6"
},
"snmp" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"accessCtlEnable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accessCtlServicePort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accessCtlTimeout" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"securedClientTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18.2",
"status" : "current",
"description" :
"""""",
}, # table
"securedClientEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18.2.1",
"status" : "current",
"linkage" : [
"securedClientIndex",
],
"description" :
"""An entry in securedClientTable.""",
}, # row
"securedClientIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"securedClientEnable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"securedClientStartIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"securedClientEndIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"securedClientService" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.18.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"telnet" : {
"nodetype" : "namednumber",
"number" : "0"
},
"ftp" : {
"nodetype" : "namednumber",
"number" : "1"
},
"http" : {
"nodetype" : "namednumber",
"number" : "2"
},
"icmp" : {
"nodetype" : "namednumber",
"number" : "3"
},
"snmp" : {
"nodetype" : "namednumber",
"number" : "4"
},
"ssh" : {
"nodetype" : "namednumber",
"number" : "5"
},
"https" : {
"nodetype" : "namednumber",
"number" : "6"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"queuingMethodSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.19",
}, # node
"portQueuingMethodTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.19.1",
"status" : "current",
"description" :
"""""",
}, # table
"portQueuingMethodEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.19.1.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
"portQueuingMethodQueue",
],
"description" :
"""An entry in portQueuingMethodTable.""",
}, # row
"portQueuingMethodQueue" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.19.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""0...7""",
}, # column
"portQueuingMethodWeight" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.19.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""0...15""",
}, # column
"dhcpSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20",
}, # node
"globalDhcpRelay" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.1",
}, # node
"globalDhcpRelayEnable" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"globalDhcpRelayOption82Enable" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"globalDhcpRelayInfoEnable" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"globalDhcpRelayInfoData" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"maxNumberOfGlobalDhcpRelayRemoteServer" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"globalDhcpRelayRemoteServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.1.6",
"status" : "current",
"description" :
"""""",
}, # table
"globalDhcpRelayRemoteServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.1.6.1",
"create" : "true",
"status" : "current",
"linkage" : [
"globalDhcpRelayRemoteServerIp",
],
"description" :
"""An entry in globalDhcpRelayRemoteServerTable.""",
}, # row
"globalDhcpRelayRemoteServerIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.1.6.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"globalDhcpRelayRemoteServerRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.1.6.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpServer" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.2",
}, # node
"maxNumberOfDhcpServers" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The maximum number of DHCP server entries that can be created.
A value of 0 for this object implies that there exists settings for
global DHCP relay.""",
}, # scalar
"dhcpServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.2.2",
"status" : "current",
"description" :
"""""",
}, # table
"dhcpServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.2.2.1",
"create" : "true",
"status" : "current",
"linkage" : [
"dhcpServerVid",
],
"description" :
"""An entry in dhcpServerTable.""",
}, # row
"dhcpServerVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.2.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpServerStartAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpServerPoolSize" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpServerMask" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpServerGateway" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.2.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpServerPrimaryDNS" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.2.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpServerSecondaryDNS" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.2.2.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpServerRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.2.2.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpRelay" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.3",
}, # node
"dhcpRelayInfoData" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.3.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"maxNumberOfDhcpRelay" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.3.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The maximum number of DHCP relay entries that can be created.
A value of 0 for this object implies that there exists settings for
global DHCP relay.""",
}, # scalar
"maxNumberOfDhcpRelayRemoteServer" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.3.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpRelayRemoteServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.3.4",
"status" : "current",
"description" :
"""""",
}, # table
"dhcpRelayRemoteServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.3.4.1",
"create" : "true",
"status" : "current",
"linkage" : [
"dhcpRelayVid",
"dhcpRelayRemoteServerIp",
],
"description" :
"""An entry in dhcpRelayRemoteServerTable.""",
}, # row
"dhcpRelayVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.3.4.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpRelayRemoteServerIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.3.4.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpRelayRemoteServerRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.3.4.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpRelayTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.3.5",
"status" : "current",
"description" :
"""""",
}, # table
"dhcpRelayEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.3.5.1",
"status" : "current",
"linkage" : [
"dhcpRelayVid",
],
"description" :
"""An entry in dhcpRelayTable.""",
}, # row
"dhcpRelayOption82Enable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.3.5.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpRelayInfoEnable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.20.3.5.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"staticRouteSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.21",
}, # node
"maxNumberOfStaticRoutes" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.21.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"staticRouteTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.21.2",
"status" : "current",
"description" :
"""""",
}, # table
"staticRouteEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.21.2.1",
"create" : "true",
"status" : "current",
"linkage" : [
"staticRouteIp",
"staticRouteMask",
],
"description" :
"""An entry in staticRouteTable.""",
}, # row
"staticRouteName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.21.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"staticRouteIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.21.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"staticRouteMask" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.21.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"staticRouteGateway" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.21.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"staticRouteMetric" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.21.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"staticRouteRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.21.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInfo" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.22",
}, # node
"arpTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.22.1",
"status" : "current",
"description" :
"""""",
}, # table
"arpEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.22.1.1",
"status" : "current",
"linkage" : [
"arpIpAddr",
"arpMacVid",
],
"description" :
"""An entry in arpTable.""",
}, # row
"arpIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.22.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpIpAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.22.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpMacAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.22.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpMacVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.22.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.22.1.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"static" : {
"nodetype" : "namednumber",
"number" : "1"
},
"dynamic" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""1-static, 2-dynamic""",
}, # column
"portOpModeSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23",
}, # node
"portOpModePortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23.1",
"status" : "current",
"description" :
"""""",
}, # table
"portOpModePortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23.1.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in portOpModePortTable.""",
}, # row
"portOpModePortFlowCntl" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"off" : {
"nodetype" : "namednumber",
"number" : "0"
},
"on" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"portOpModePortName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "0",
"max" : "32"
},
],
"range" : {
"min" : "0",
"max" : "32"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"portOpModePortLinkUpType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23.1.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"down" : {
"nodetype" : "namednumber",
"number" : "0"
},
"copper" : {
"nodetype" : "namednumber",
"number" : "1"
},
"fiber" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"portOpModePortIntrusionLock" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"portOpModePortLBTestStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23.1.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "0"
},
"underTesting" : {
"nodetype" : "namednumber",
"number" : "1"
},
"success" : {
"nodetype" : "namednumber",
"number" : "2"
},
"fail" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""This entry display latest loopback test status of port while performing loopback test.""",
}, # column
"portOpModePortCounterReset" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.23.1.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"enable" : {
"nodetype" : "namednumber",
"number" : "1"
},
"disable" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""This entry resets port counter.""",
}, # column
"portBasedVlanSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.24",
}, # node
"portBasedVlanPortListTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.24.1",
"status" : "current",
"description" :
"""""",
}, # table
"portBasedVlanPortListEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.24.1.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in portBasedVlanPortListTable.""",
}, # row
"portBasedVlanPortListMembers" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.24.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"Q-BRIDGE-MIB", "name" : "PortList"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"multicastPortSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.25",
}, # node
"multicastPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.25.1",
"status" : "current",
"description" :
"""""",
}, # table
"multicastPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.25.1.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in multicastPortTable.""",
}, # row
"multicastPortImmediateLeave" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.25.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"multicastPortMaxGroupLimited" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.25.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"multicastPortMaxOfGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.25.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""0..255""",
}, # column
"multicastPortIgmpFilteringProfile" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.25.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"multicastPortQuerierMode" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.25.1.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"auto" : {
"nodetype" : "namednumber",
"number" : "1"
},
"fixed" : {
"nodetype" : "namednumber",
"number" : "2"
},
"edge" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""Specify query mode for each port""",
}, # column
"multicastStatus" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26",
}, # node
"multicastStatusTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.1",
"status" : "current",
"description" :
"""""",
}, # table
"multicastStatusEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.1.1",
"status" : "current",
"linkage" : [
"multicastStatusVlanID",
"multicastStatusPort",
"multicastStatusGroup",
],
"description" :
"""An entry in multicastStatusTable.""",
}, # row
"multicastStatusIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastStatusVlanID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastStatusPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastStatusGroup" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.2",
"status" : "current",
"description" :
"""A count table of igmp query/report/leave message.""",
}, # table
"igmpCountEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.2.1",
"status" : "current",
"linkage" : [
"igmpCountIndex",
],
"description" :
"""An entry in igmpCountTable.""",
}, # row
"igmpCountIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Index of IgmpCountEntry. 0 means total count in whole system""",
}, # column
"igmpCountInQuery" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountInReport" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountInLeave" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountInQueryDrop" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountInReportDrop" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountInLeaveDrop" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.2.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountOutQuery" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.2.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountOutReport" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.2.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpCountOutLeave" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.2.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastVlanStatusTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.3",
"status" : "current",
"description" :
"""""",
}, # table
"multicastVlanStatusEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.3.1",
"status" : "current",
"linkage" : [
"multicastVlanStatusVlanID",
],
"description" :
"""An entry in multicastVlanStatusTable.""",
}, # row
"multicastVlanStatusVlanID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastVlanStatusType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"dynamic" : {
"nodetype" : "namednumber",
"number" : "1"
},
"mvr" : {
"nodetype" : "namednumber",
"number" : "2"
},
"static" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"multicastVlanQueryPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.26.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"Q-BRIDGE-MIB", "name" : "PortList"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpFilteringProfileSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.27",
}, # node
"igmpFilteringMaxNumberOfProfile" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.27.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"igmpFilteringProfileTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.27.2",
"status" : "current",
"description" :
"""""",
}, # table
"igmpFilteringProfileEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.27.2.1",
"create" : "true",
"status" : "current",
"linkage" : [
"igmpFilteringProfileName",
"igmpFilteringProfileStartAddress",
"igmpFilteringProfileEndAddress",
],
"description" :
"""An entry in igmpFilteringProfileTable.""",
}, # row
"igmpFilteringProfileName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.27.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpFilteringProfileStartAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.27.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpFilteringProfileEndAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.27.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"igmpFilteringProfileRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.27.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mvrSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28",
}, # node
"maxNumberOfMVR" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"mvrTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.2",
"status" : "current",
"description" :
"""""",
}, # table
"mvrEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.2.1",
"create" : "true",
"status" : "current",
"linkage" : [
"mvrVlanID",
],
"description" :
"""An entry in mvrTable.""",
}, # row
"mvrVlanID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""1..4094""",
}, # column
"mvrName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mvrMode" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"dynamic" : {
"nodetype" : "namednumber",
"number" : "0"
},
"compatible" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mvrRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mvr8021pPriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Set the 802.1p priority of control messages within MVR (0~7)""",
}, # column
"mvrPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.3",
"status" : "current",
"description" :
"""""",
}, # table
"mvrPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.3.1",
"status" : "current",
"linkage" : [
"mvrVlanID",
"dot1dBasePort",
],
"description" :
"""An entry in mvrPortTable.""",
}, # row
"mvrPortRole" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.3.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "1"
},
"source_port" : {
"nodetype" : "namednumber",
"number" : "2"
},
"receiver_port" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mvrPortTagging" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"maxNumberOfMvrGroup" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"mvrGroupTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.5",
"status" : "current",
"description" :
"""""",
}, # table
"mvrGroupEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.5.1",
"create" : "true",
"status" : "current",
"linkage" : [
"mvrVlanID",
"mvrGroupName",
],
"description" :
"""An entry in mvrGroupTable.""",
}, # row
"mvrGroupName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.5.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"mvrGroupStartAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.5.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mvrGroupEndAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.5.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mvrGroupRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.28.5.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"layer3Setup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.29",
}, # node
"routerRipState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.29.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"routerIgmpState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.29.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"routerDvmrpState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.29.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"routerDvmrpThreshold" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.29.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"routerIpmcPortSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.30",
}, # node
"routerIpmcPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.30.1",
"status" : "current",
"description" :
"""""",
}, # table
"routerIpmcPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.30.1.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in routerIpmcPortTable.""",
}, # row
"routerIpmcPortEgressUntagVlan" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.30.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerVrrpSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31",
}, # node
"routerVrrpMaxNumber" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Always set it as 14.""",
}, # scalar
"routerVrrpTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.2",
"status" : "current",
"description" :
"""""",
}, # table
"routerVrrpEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.2.1",
"create" : "true",
"status" : "current",
"linkage" : [
"routerDomainIpAddress",
"routerDomainIpMaskBits",
"routerVrrpVirtualID",
"routerVrrpUplinkGateway",
],
"description" :
"""An entry in routerVrrpTable.""",
}, # row
"routerVrrpVirtualID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerVrrpUplinkGateway" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerVrrpPreempt" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerVrrpInterval" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""1-255""",
}, # column
"routerVrrpPriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""1-254""",
}, # column
"routerVrrpPrimaryVirtualIP" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerVrrpName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.2.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerVrrpSecondaryVirtualIP" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.2.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"rpVrrpRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.2.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerVrrpDomainTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.3",
"status" : "current",
"description" :
"""""",
}, # table
"routerVrrpDomainEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.3.1",
"status" : "current",
"linkage" : [
"routerDomainIpAddress",
"routerDomainIpMaskBits",
],
"description" :
"""An entry in routerVrrpTable.""",
}, # row
"routerVrrpAuthType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.3.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "0"
},
"simple" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerVrrpAuthKey" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.31.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerVrrpStatus" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.32",
}, # node
"routerVrrpStatusTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.32.1",
"status" : "current",
"description" :
"""""",
}, # table
"routerVrrpStatusEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.32.1.1",
"status" : "current",
"linkage" : [
"routerVrrpStatusIpAddress",
"routerVrrpStatusIpMaskBits",
"routerVrrpStatusVirtualID",
],
"description" :
""" """,
}, # row
"routerVrrpStatusIpAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.32.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerVrrpStatusIpMaskBits" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.32.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerVrrpStatusVirtualID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.32.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerVrrpStatusVRStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.32.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerVrrpStatusUpLinkStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.32.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerDomainSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.33",
}, # node
"routerDomainTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.33.1",
"status" : "current",
"description" :
"""""",
}, # table
"routerDomainEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.33.1.1",
"status" : "current",
"linkage" : [
"routerDomainIpAddress",
"routerDomainIpMaskBits",
],
"description" :
"""An entry in routerDomainTable.""",
}, # row
"routerDomainIpAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.33.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerDomainIpMaskBits" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.33.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerDomainVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.33.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routerDomainIpTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.33.2",
"status" : "current",
"description" :
"""""",
}, # table
"routerDomainIpEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.33.2.1",
"status" : "current",
"linkage" : [
"routerDomainIpAddress",
"routerDomainIpMaskBits",
],
"description" :
"""An entry in routerDomainIpTable.""",
}, # row
"routerDomainIpRipDirection" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.33.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "0"
},
"outgoing" : {
"nodetype" : "namednumber",
"number" : "1"
},
"incoming" : {
"nodetype" : "namednumber",
"number" : "2"
},
"both" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerDomainIpRipVersion" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.33.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"v1" : {
"nodetype" : "namednumber",
"number" : "0"
},
"v2b" : {
"nodetype" : "namednumber",
"number" : "1"
},
"v2m" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerDomainIpIgmpVersion" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.33.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "0"
},
"igmp_v1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"igmp_v2" : {
"nodetype" : "namednumber",
"number" : "2"
},
"igmp_v3" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"routerDomainIpDvmrp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.33.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"diffservSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.34",
}, # node
"diffservState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.34.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"diffservMapTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.34.2",
"status" : "current",
"description" :
"""""",
}, # table
"diffservMapEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.34.2.1",
"status" : "current",
"linkage" : [
"diffservMapDscp",
],
"description" :
"""An entry in diffservMapTable.""",
}, # row
"diffservMapDscp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.34.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""0-63""",
}, # column
"diffservMapPriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.34.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""0-7""",
}, # column
"diffservPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.34.3",
"status" : "current",
"description" :
"""""",
}, # table
"diffservPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.34.3.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in diffservPortTable.""",
}, # row
"diffservPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.34.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"clusterSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35",
}, # node
"clusterManager" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.1",
}, # node
"clusterMaxNumOfManager" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"clusterManagerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.1.2",
"status" : "current",
"description" :
"""""",
}, # table
"clusterManagerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.1.2.1",
"create" : "true",
"status" : "current",
"linkage" : [
"clusterManagerVid",
],
"description" :
"""An entry in clusterManagerTable.""",
}, # row
"clusterManagerVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.1.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterManagerName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.1.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"clusterManagerRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.1.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"clusterMembers" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.2",
}, # node
"clusterMaxNumOfMember" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"clusterMemberTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.2.2",
"status" : "current",
"description" :
"""""",
}, # table
"clusterMemberEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.2.2.1",
"create" : "true",
"status" : "current",
"linkage" : [
"clusterMemberMac",
],
"description" :
"""An entry in clusterMemberTable.""",
}, # row
"clusterMemberMac" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.2.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "noaccess",
"description" :
"""""",
}, # column
"clusterMemberName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterMemberModel" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterMemberPassword" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"clusterMemberRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.2.2.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"clusterCandidates" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.3",
}, # node
"clusterCandidateTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.3.1",
"status" : "current",
"description" :
"""""",
}, # table
"clusterCandidateEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.3.1.1",
"status" : "current",
"linkage" : [
"clusterCandidateMac",
],
"description" :
"""An entry in clusterCandidateTable.""",
}, # row
"clusterCandidateMac" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.3.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterCandidateName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.3.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterCandidateModel" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.3.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterStatus" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.4",
}, # node
"clusterStatusRole" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.4.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"none" : {
"nodetype" : "namednumber",
"number" : "0"
},
"manager" : {
"nodetype" : "namednumber",
"number" : "1"
},
"member" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"clusterStatusManager" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.4.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"clsuterStatusMaxNumOfMember" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.4.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"clusterStatusMemberTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.4.4",
"status" : "current",
"description" :
"""""",
}, # table
"clusterStatusMemberEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.4.4.1",
"status" : "current",
"linkage" : [
"clusterStatusMemberMac",
],
"description" :
"""An entry in clusterStatusMemberTable.""",
}, # row
"clusterStatusMemberMac" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.4.4.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterStatusMemberName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.4.4.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterStatusMemberModel" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.4.4.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"clusterStatusMemberStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.35.4.4.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"error" : {
"nodetype" : "namednumber",
"number" : "0"
},
"online" : {
"nodetype" : "namednumber",
"number" : "1"
},
"offline" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"faultMIB" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36",
"status" : "current",
}, # node
"eventObjects" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1",
}, # node
"eventTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1.1",
"status" : "current",
"description" :
"""A list of currently active fault events. All faults
of normal type regardless of their severity level
are recorded in the event table. When a normal
type fault is cleared it is deleted from the event
table.""",
}, # table
"eventEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1.1.1",
"status" : "current",
"linkage" : [
"eventSeqNum",
],
"description" :
"""An entry containing information about an
event in the event table.""",
}, # row
"eventSeqNum" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""This variable represents the sequence number of an event.
Sequence number is incremented monotonically starting
from 0 until it reaches its maximum and wraps around back
to 0.
Sequence number is incremented when
- the state of a normal type fault is set on (the same sequence
number is present in the events table as well as in the trap
that is sent to notify about the fault on event)
- delta event occurs (sequence number present in trap message)
- the state of a normal type fault is set off (sequence number
present in trap that is sent to notify for clearing).""",
}, # column
"eventEventId" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "EventIdNumber"},
},
"access" : "readonly",
"description" :
"""This variable represents the event ID which uniquely
identifies the event in the entire system.""",
}, # column
"eventName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "40"
},
],
"range" : {
"min" : "0",
"max" : "40"
},
},
},
"access" : "readonly",
"description" :
"""This variable represents the name of the event, for
example 'Ethernet Link Down'""",
}, # column
"eventInstanceType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "InstanceType"},
},
"access" : "readonly",
"description" :
"""This variable represents the type of InstanceId of a
particular event in the event table. In brief
the instanceType refers to the type of sub-component
generating this event in the system, for example
switch (5). For more details see the textual
conventions section.
AFFECTS: eventInstanceId,
eventInstanceName""",
}, # column
"eventInstanceId" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""This variable represents the InstanceId of a particular
event in the event current table. In brief the instanceId
refers to the sub-component generating this event in the
system, for example '1' for port 1. For more details see
the textual conventions section.
DEPENDS ON: eventInstanceType""",
}, # column
"eventInstanceName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""This variable is mainly used to store additional information
about the sub-component that is generating an event. For
example this field may specify what cooling fan is faulty.
DEPENDS ON: eventInstanceType""",
}, # column
"eventSeverity" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "EventSeverity"},
},
"access" : "readonly",
"description" :
"""This variable dictates the urgency of action when a event
occurs. There are four severity levels - Critical, Major,
Minor, and Informational. Critical events are those, which
require immediate operator intervention to prevent/reduce
system down time. Major events require quick attention and
Minor events possibly require some attention. Informational
events indicate the occurrence of events that may need to be
investigated.""",
}, # column
"eventSetTime" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1.1.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "UtcTimeStamp"},
},
"access" : "readonly",
"description" :
"""This table contains only normal events and this variable
represents the time when the event become active, i.e. the
number of seconds since Jan 1, 1970 12:00AM.""",
}, # column
"eventDescription" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1.1.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""This variable contains a description of the event and reasons
behind the event. This is a free format alpha-numeric string
that is set by the entity generating this event. This variable
may be empty if there is no usefull information to report.
The maximum length of this string is 255 characters.""",
}, # column
"eventServAffective" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.36.1.1.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "EventServiceAffective"},
},
"access" : "readonly",
"description" :
"""This variable indicates whether the event is service affective or not""",
}, # column
"faultTrapsMIB" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.37",
"status" : "current",
}, # node
"trapInfoObjects" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.37.1",
}, # node
"trapRefSeqNum" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.37.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Indicates the former sequence number of a cleared event
in the event table. Not intended to read but only used in
trap notifications.""",
}, # scalar
"trapPersistence" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.37.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "EventPersistence"},
},
"access" : "readonly",
"description" :
"""Indicates whether the event is delta (automatically and
immediately cleared) or normal (not automatically cleared).
Not intended to read but only used in trap notifications.""",
}, # scalar
"trapSenderNodeId" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.37.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Represents the node ID of the sending network element. If not
supported should be set to 0. Not intended to read but only
used in trap notifications.""",
}, # scalar
"trapNotifications" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.37.2",
}, # node
"ipStatus" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.38",
}, # node
"ipStatusTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.38.1",
"status" : "current",
"description" :
"""""",
}, # table
"ipStatusEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.38.1.1",
"status" : "current",
"linkage" : [
"ipStatusIPAddress",
"ipStatusVid",
],
"description" :
"""""",
}, # row
"ipStatusIPAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.38.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ipStatusVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.38.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ipStatusPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.38.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ipStatusType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.38.1.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"static" : {
"nodetype" : "namednumber",
"number" : "1"
},
"dynamic" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routingStatus" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.39",
}, # node
"routingStatusTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.39.1",
"status" : "current",
"description" :
"""""",
}, # table
"routingStatusEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.39.1.1",
"status" : "current",
"linkage" : [
"routingStatusDestAddress",
],
"description" :
"""""",
}, # row
"routingStatusDestAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.39.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routingStatusDestMaskbits" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.39.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routingStatusGateway" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.39.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routingStatusInterface" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.39.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routingStatusMetric" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.39.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"routingStatusType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.39.1.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"rip" : {
"nodetype" : "namednumber",
"number" : "1"
},
"bgp" : {
"nodetype" : "namednumber",
"number" : "2"
},
"ospf" : {
"nodetype" : "namednumber",
"number" : "3"
},
"static" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfExt" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40",
}, # node
"ospfInterfaceTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.1",
"status" : "current",
"description" :
"""""",
}, # table
"ospfInterfaceEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.1.1",
"status" : "current",
"linkage" : [
"ospfIfIpAddress",
"ospfAddressLessIf",
],
"description" :
"""""",
}, # row
"ospfIfKeyId" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ospfIfMaskbits" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfIfDesignatedRouterID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfIfBackupDesignatedRouterID" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfIfNbrCount" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfIfAdjacentNbrCount" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfIfHelloDueTime" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfAreaExtTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.2",
"status" : "current",
"description" :
"""""",
}, # table
"ospfAreaExtEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.2.1",
"status" : "current",
"linkage" : [
"ospfAreaId",
],
"description" :
"""""",
}, # row
"ospfAreaExtName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ospfRedistributeRouteTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.3",
"status" : "current",
"description" :
"""""",
}, # table
"ospfRedistributeRouteEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.3.1",
"status" : "current",
"linkage" : [
"ospfRedistributeRouteProtocol",
],
"description" :
"""""",
}, # row
"ospfRedistributeRouteProtocol" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.3.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"rip" : {
"nodetype" : "namednumber",
"number" : "1"
},
"static" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfRedistributeRouteState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ospfRedistributeRouteType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ospfRedistributeRouteMetric" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ospfNbrExtTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.4",
"status" : "current",
"description" :
"""""",
}, # table
"ospfNbrExtEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.4.1",
"status" : "current",
"linkage" : [
"ospfNbrIpAddr",
"ospfNbrAddressLessIndex",
],
"description" :
"""""",
}, # row
"ospfNbrExtRole" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.4.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"dr" : {
"nodetype" : "namednumber",
"number" : "1"
},
"backup" : {
"nodetype" : "namednumber",
"number" : "2"
},
"dr_other" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfNbrExtDeadtime" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.4.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfNbrExtInterface" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.4.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfNbrExtRXmtL" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.4.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfNbrExtRqstL" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.4.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfNbrExtDBsmL" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.4.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfLsdbExtTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.5",
"status" : "current",
"description" :
"""""",
}, # table
"ospfLsdbExtEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.5.1",
"status" : "current",
"linkage" : [
"ospfLsdbAreaId",
"ospfLsdbType",
"ospfLsdbLSID",
"ospfLsdbRouterId",
],
"description" :
"""""",
}, # row
"ospfLsdbExtLinkCount" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.5.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfLsdbExtRouteAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.5.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfLsdbExtRouteMaskbits" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.5.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ospfVirtualLinkTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.6",
"status" : "current",
"description" :
"""""",
}, # table
"ospfVirtualLinkEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.6.1",
"status" : "current",
"linkage" : [
"ospfVirtIfAreaID",
"ospfVirtIfNeighbor",
],
"description" :
"""""",
}, # row
"ospfVirtualLinkName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.6.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ospfVirtualLinkKeyId" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.40.6.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"sysLogSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.41",
}, # node
"sysLogState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.41.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""sysLog enabled/disabled for the switch.""",
}, # scalar
"sysLogTypeTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.41.2",
"status" : "current",
"description" :
"""""",
}, # table
"sysLogTypeEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.41.2.1",
"status" : "current",
"linkage" : [
"sysLogTypeIndex",
],
"description" :
"""An entry in sysLogTypeTable.""",
}, # row
"sysLogTypeIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.41.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "noaccess",
"description" :
"""""",
}, # column
"sysLogTypeName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.41.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"sysLogTypeState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.41.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"sysLogTypeFacility" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.41.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"local_user0" : {
"nodetype" : "namednumber",
"number" : "0"
},
"local_user1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"local_user2" : {
"nodetype" : "namednumber",
"number" : "2"
},
"local_user3" : {
"nodetype" : "namednumber",
"number" : "3"
},
"local_user4" : {
"nodetype" : "namednumber",
"number" : "4"
},
"local_user5" : {
"nodetype" : "namednumber",
"number" : "5"
},
"local_user6" : {
"nodetype" : "namednumber",
"number" : "6"
},
"local_user7" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"sysLogServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.41.3",
"status" : "current",
"description" :
"""""",
}, # table
"sysLogServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.41.3.1",
"create" : "true",
"status" : "current",
"linkage" : [
"sysLogServerAddress",
],
"description" :
"""An entry in sysLogServerTable.""",
}, # row
"sysLogServerAddress" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.41.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "noaccess",
"description" :
"""""",
}, # column
"sysLogServerLogLevel" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.41.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"level0" : {
"nodetype" : "namednumber",
"number" : "0"
},
"level0-1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"level0-2" : {
"nodetype" : "namednumber",
"number" : "2"
},
"level0-3" : {
"nodetype" : "namednumber",
"number" : "3"
},
"level0-4" : {
"nodetype" : "namednumber",
"number" : "4"
},
"level0-5" : {
"nodetype" : "namednumber",
"number" : "5"
},
"level0-6" : {
"nodetype" : "namednumber",
"number" : "6"
},
"level0-7" : {
"nodetype" : "namednumber",
"number" : "7"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"sysLogServerRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.41.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mrstp" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42",
}, # node
"mrstpSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1",
}, # node
"mrstpBridgeTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1",
"status" : "current",
"description" :
"""""",
}, # table
"mrstpBridgeEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1",
"status" : "current",
"linkage" : [
"mrstpBridgeIndex",
],
"description" :
"""An entry in mrstpBridgeTable.""",
}, # row
"mrstpBridgeIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The tree index of the MRSTP.""",
}, # column
"mrstpState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Enabled/disabled on the mrstp bridge.""",
}, # column
"mrstpProtocolSpecification" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"unknown" : {
"nodetype" : "namednumber",
"number" : "1"
},
"decLb100" : {
"nodetype" : "namednumber",
"number" : "2"
},
"ieee8021d" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""An indication of what version of the Spanning
Tree Protocol is being run. The value
'decLb100(2)' indicates the DEC LANbridge 100
Spanning Tree protocol. IEEE 802.1d
implementations will return 'ieee8021d(3)'. If
future versions of the IEEE Spanning Tree Protocol
are released that are incompatible with the
current version a new value will be defined.""",
}, # column
"mrstpPriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "65535"
},
],
"range" : {
"min" : "0",
"max" : "65535"
},
},
},
"access" : "readwrite",
"description" :
"""The value of the write-able portion of the Bridge
ID, i.e., the first two octets of the (8 octet
long) Bridge ID. The other (last) 6 octets of the
Bridge ID are given by the value of
dot1dBaseBridgeAddress.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.7""",
}, # column
"mrstpTimeSinceTopologyChange" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "TimeTicks"},
},
"access" : "readonly",
"description" :
"""The time (in hundredths of a second) since the
last time a topology change was detected by the
bridge entity.""",
"reference>" :
"""IEEE 802.1D-1990: Section 6.8.1.1.3""",
}, # column
"mrstpTopChanges" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.6",
"status" : "current",
"access" : "readonly",
"description" :
"""The total number of topology changes detected by
this bridge since the management entity was last
reset or initialized.""",
"reference>" :
"""IEEE 802.1D-1990: Section 6.8.1.1.3""",
}, # column
"mrstpDesignatedRoot" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "BridgeId"},
},
"access" : "readonly",
"description" :
"""The bridge identifier of the root of the spanning
tree as determined by the Spanning Tree Protocol
as executed by this node. This value is used as
the Root Identifier parameter in all Configuration
Bridge PDUs originated by this node.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.1""",
}, # column
"mrstpRootCost" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The cost of the path to the root as seen from
this bridge.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.2""",
}, # column
"mrstpRootPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The port number of the port which offers the
lowest cost path from this bridge to the root
bridge.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.3""",
}, # column
"mrstpMaxAge" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "Timeout"},
},
"access" : "readonly",
"description" :
"""The maximum age of Spanning Tree Protocol
information learned from the network on any port
before it is discarded, in units of hundredths of
a second. This is the actual value that this
bridge is currently using.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.4""",
}, # column
"mrstpHelloTime" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "Timeout"},
},
"access" : "readonly",
"description" :
"""The amount of time between the transmission of
Configuration bridge PDUs by this node on any port
when it is the root of the spanning tree or trying
to become so, in units of hundredths of a second.
This is the actual value that this bridge is
currently using.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.5""",
}, # column
"mrstpHoldTime" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""This time value determines the interval length
during which no more than two Configuration bridge
PDUs shall be transmitted by this node, in units
of hundredths of a second.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.14""",
}, # column
"mrstpForwardDelay" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "Timeout"},
},
"access" : "readonly",
"description" :
"""This time value, measured in units of hundredths
of a second, controls how fast a port changes its
spanning state when moving towards the Forwarding
state. The value determines how long the port
stays in each of the Listening and Learning
states, which precede the Forwarding state. This
value is also used, when a topology change has
been detected and is underway, to age all dynamic
entries in the Forwarding Database. [Note that
this value is the one that this bridge is
currently using, in contrast to
mrstpBridgeForwardDelay which is the value that
this bridge and all others would start using
if/when this bridge were to become the root.]""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.6""",
}, # column
"mrstpBridgeMaxAge" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"parent module" : {
"name" : "BRIDGE-MIB",
"type" : "Timeout",
},
"ranges" : [
{
"min" : "600",
"max" : "4000"
},
],
"range" : {
"min" : "600",
"max" : "4000"
},
},
},
"access" : "readwrite",
"description" :
"""The value that all bridges use for MaxAge when
this bridge is acting as the root. Note that
802.1D-1990 specifies that the range for this
parameter is related to the value of
mrstpBridgeHelloTime. The granularity of this
timer is specified by 802.1D-1990 to be 1 second.
An agent may return a badValue error if a set is
attempted to a value which is not a whole number
of seconds.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.8""",
}, # column
"mrstpBridgeHelloTime" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.15",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"parent module" : {
"name" : "BRIDGE-MIB",
"type" : "Timeout",
},
"ranges" : [
{
"min" : "100",
"max" : "1000"
},
],
"range" : {
"min" : "100",
"max" : "1000"
},
},
},
"access" : "readwrite",
"description" :
"""The value that all bridges use for HelloTime when
this bridge is acting as the root. The
granularity of this timer is specified by 802.1D-
1990 to be 1 second. An agent may return a
badValue error if a set is attempted to a value
which is not a whole number of seconds.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.9""",
}, # column
"mrstpBridgeForwardDelay" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.1.1.16",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"parent module" : {
"name" : "BRIDGE-MIB",
"type" : "Timeout",
},
"ranges" : [
{
"min" : "400",
"max" : "3000"
},
],
"range" : {
"min" : "400",
"max" : "3000"
},
},
},
"access" : "readwrite",
"description" :
"""The value that all bridges use for ForwardDelay
when this bridge is acting as the root. Note that
802.1D-1990 specifies that the range for this
parameter is related to the value of
mrstpBridgeMaxAge. The granularity of this
timer is specified by 802.1D-1990 to be 1 second.
An agent may return a badValue error if a set is
attempted to a value which is not a whole number
of seconds.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.3.10""",
}, # column
"mrstpPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2",
"status" : "current",
"description" :
"""A table that contains port-specific information
for the Spanning Tree Protocol.""",
}, # table
"mrstpPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1",
"status" : "current",
"linkage" : [
"mrstpPort",
],
"description" :
"""A list of information maintained by every port
about the Spanning Tree Protocol state for that
port.""",
}, # row
"mrstpPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "65535"
},
],
"range" : {
"min" : "1",
"max" : "65535"
},
},
},
"access" : "readonly",
"description" :
"""The port number of the port for which this entry
contains Spanning Tree Protocol management
information.""",
"reference>" :
"""IEEE 802.1D-1990: Section 6.8.2.1.2""",
}, # column
"mrstpPortPriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readwrite",
"description" :
"""The value of the priority field which is
contained in the first (in network byte order)
octet of the (2 octet long) Port ID. The other
octet of the Port ID is given by the value of
mrstpPort.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.1""",
}, # column
"mrstpPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"disabled" : {
"nodetype" : "namednumber",
"number" : "1"
},
"blocking" : {
"nodetype" : "namednumber",
"number" : "2"
},
"listening" : {
"nodetype" : "namednumber",
"number" : "3"
},
"learning" : {
"nodetype" : "namednumber",
"number" : "4"
},
"forwarding" : {
"nodetype" : "namednumber",
"number" : "5"
},
"broken" : {
"nodetype" : "namednumber",
"number" : "6"
},
},
},
"access" : "readonly",
"description" :
"""The port's current state as defined by
application of the Spanning Tree Protocol. This
state controls what action a port takes on
reception of a frame. If the bridge has detected
a port that is malfunctioning it will place that
port into the broken(6) state. For ports which
are disabled (see mrstpPortEnable), this object
will have a value of disabled(1).""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.2""",
}, # column
"mrstpPortEnable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"enabled" : {
"nodetype" : "namednumber",
"number" : "1"
},
"disabled" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""The enabled/disabled status of the port.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.2""",
}, # column
"mrstpPortPathCost" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "65535"
},
],
"range" : {
"min" : "1",
"max" : "65535"
},
},
},
"access" : "readwrite",
"description" :
"""The contribution of this port to the path cost of
paths towards the spanning tree root which include
this port. 802.1D-1990 recommends that the
default value of this parameter be in inverse
proportion to the speed of the attached LAN.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.3""",
}, # column
"mrstpPortDesignatedRoot" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "BridgeId"},
},
"access" : "readonly",
"description" :
"""The unique Bridge Identifier of the Bridge
recorded as the Root in the Configuration BPDUs
transmitted by the Designated Bridge for the
segment to which the port is attached.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.4""",
}, # column
"mrstpPortDesignatedCost" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The path cost of the Designated Port of the
segment connected to this port. This value is
compared to the Root Path Cost field in received
bridge PDUs.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.5""",
}, # column
"mrstpPortDesignatedBridge" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "BridgeId"},
},
"access" : "readonly",
"description" :
"""The Bridge Identifier of the bridge which this
port considers to be the Designated Bridge for
this port's segment.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.6""",
}, # column
"mrstpPortDesignatedPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "2",
"max" : "2"
},
],
"range" : {
"min" : "2",
"max" : "2"
},
},
},
"access" : "readonly",
"description" :
"""The Port Identifier of the port on the Designated
Bridge for this port's segment.""",
"reference>" :
"""IEEE 802.1D-1990: Section 4.5.5.7""",
}, # column
"mrstpPortForwardTransitions" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.10",
"status" : "current",
"access" : "readonly",
"description" :
"""The number of times this port has transitioned
from the Learning state to the Forwarding state.""",
}, # column
"mrstpPortOnBridgeIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.1.2.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Indetify the bridge index that this port joined to in MRSTP.""",
}, # column
"mrstpNotifications" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.2",
}, # node
"radiusServerSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43",
}, # node
"radiusAuthServerSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.1",
}, # node
"radiusAuthServerTimeout" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"radiusAuthServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.1.3",
"status" : "current",
"description" :
"""""",
}, # table
"radiusAuthServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.1.3.1",
"status" : "current",
"linkage" : [
"radiusAuthServerIndex",
],
"description" :
"""An entry in radiusAuthServerTable.""",
}, # row
"radiusAuthServerIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.1.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "noaccess",
"description" :
"""""",
}, # column
"radiusAuthServerIpAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.1.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"radiusAuthServerUdpPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.1.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"radiusAuthServerSharedSecret" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.1.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"radiusAcctServerSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.2",
}, # node
"radiusAcctServerTimeout" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"radiusAcctServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.2.2",
"status" : "current",
"description" :
"""""",
}, # table
"radiusAcctServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.2.2.1",
"status" : "current",
"linkage" : [
"radiusAcctServerIndex",
],
"description" :
"""An entry in radiusAcctServerTable.""",
}, # row
"radiusAcctServerIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.2.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "noaccess",
"description" :
"""""",
}, # column
"radiusAcctServerIpAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"radiusAcctServerUdpPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"radiusAcctServerSharedSecret" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.43.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"tacacsServerSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44",
}, # node
"tacacsAuthServerSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.1",
}, # node
"tacacsAuthServerTimeout" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"tacacsAuthServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.1.3",
"status" : "current",
"description" :
"""""",
}, # table
"tacacsAuthServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.1.3.1",
"status" : "current",
"linkage" : [
"tacacsAuthServerIndex",
],
"description" :
"""An entry in tacacsAuthServerTable.""",
}, # row
"tacacsAuthServerIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.1.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "noaccess",
"description" :
"""""",
}, # column
"tacacsAuthServerIpAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.1.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"tacacsAuthServerTcpPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.1.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"tacacsAuthServerSharedSecret" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.1.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"tacacsAcctServerSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.2",
}, # node
"tacacsAcctServerTimeout" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"tacacsAcctServerTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.2.2",
"status" : "current",
"description" :
"""""",
}, # table
"tacacsAcctServerEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.2.2.1",
"status" : "current",
"linkage" : [
"tacacsAcctServerIndex",
],
"description" :
"""An entry in tacacsAcctServerTable.""",
}, # row
"tacacsAcctServerIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.2.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "noaccess",
"description" :
"""""",
}, # column
"tacacsAcctServerIpAddr" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"tacacsAcctServerTcpPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"tacacsAcctServerSharedSecret" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.44.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"aaaSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45",
}, # node
"authenticationSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.1",
}, # node
"authenticationTypeTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.1.1",
"status" : "current",
"description" :
"""""",
}, # table
"authenticationTypeEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.1.1.1",
"status" : "current",
"linkage" : [
"authenticationTypeName",
],
"description" :
"""An entry in authenticationTypeTable.""",
}, # row
"authenticationTypeName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.1.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"authenticationTypeMethodList" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.1.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "OctetString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accountingSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2",
}, # node
"accountingUpdatePeriod" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"accountingTypeTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2.2",
"status" : "current",
"description" :
"""""",
}, # table
"accountingTypeEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2.2.1",
"status" : "current",
"linkage" : [
"accountingTypeName",
],
"description" :
"""An entry in accountingTypeTable.""",
}, # row
"accountingTypeName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"accountingTypeActive" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accountingTypeBroadcast" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accountingTypeMode" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"start-stop" : {
"nodetype" : "namednumber",
"number" : "1"
},
"stop-only" : {
"nodetype" : "namednumber",
"number" : "2"
},
"not-available" : {
"nodetype" : "namednumber",
"number" : "255"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accountingTypeMethod" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"radius" : {
"nodetype" : "namednumber",
"number" : "1"
},
"tacacs" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"accountingTypePrivilege" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.45.2.2.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"privilege-0" : {
"nodetype" : "namednumber",
"number" : "0"
},
"privilege-1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"privilege-2" : {
"nodetype" : "namednumber",
"number" : "2"
},
"privilege-3" : {
"nodetype" : "namednumber",
"number" : "3"
},
"privilege-4" : {
"nodetype" : "namednumber",
"number" : "4"
},
"privilege-5" : {
"nodetype" : "namednumber",
"number" : "5"
},
"privilege-6" : {
"nodetype" : "namednumber",
"number" : "6"
},
"privilege-7" : {
"nodetype" : "namednumber",
"number" : "7"
},
"privilege-8" : {
"nodetype" : "namednumber",
"number" : "8"
},
"privilege-9" : {
"nodetype" : "namednumber",
"number" : "9"
},
"privilege-10" : {
"nodetype" : "namednumber",
"number" : "10"
},
"privilege-11" : {
"nodetype" : "namednumber",
"number" : "11"
},
"privilege-12" : {
"nodetype" : "namednumber",
"number" : "12"
},
"privilege-13" : {
"nodetype" : "namednumber",
"number" : "13"
},
"privilege-14" : {
"nodetype" : "namednumber",
"number" : "14"
},
"not-available" : {
"nodetype" : "namednumber",
"number" : "255"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpSnp" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100",
}, # node
"dhcpSnpVlanTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.1",
"status" : "current",
"description" :
"""""",
}, # table
"dhcpSnpVlanEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.1.1",
"status" : "current",
"linkage" : [
"dhcpSnpVlanEntryVid",
],
"description" :
"""""",
}, # row
"dhcpSnpVlanEntryVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "4094"
},
],
"range" : {
"min" : "1",
"max" : "4094"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpVlanEntryEnable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpSnpVlanEntryOption82Enable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpSnpVlanEntryInfo" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpSnpPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.2",
"status" : "current",
"description" :
"""""",
}, # table
"dhcpSnpPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.2.1",
"status" : "current",
"linkage" : [
"dhcpSnpPortEntryPort",
],
"description" :
"""""",
}, # row
"dhcpSnpPortEntryPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpPortEntryTrust" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"dhcpSnpPortEntryRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2048"
},
],
"range" : {
"min" : "0",
"max" : "2048"
},
},
},
"access" : "readwrite",
"description" :
"""0 means unlimited""",
}, # column
"dhcpSnpBindTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.3",
"status" : "current",
"description" :
"""""",
}, # table
"dhcpSnpBindEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.3.1",
"status" : "current",
"linkage" : [
"dhcpSnpBindEntryMac",
"dhcpSnpBindEntryVid",
],
"description" :
"""""",
}, # row
"dhcpSnpBindEntryMac" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpBindEntryVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpBindEntryIP" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpBindEntryLease" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpBindEntryType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.3.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"dynamic" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpBindEntryPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.3.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"dhcpSnpEnable" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"dhcpSnpDb" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5",
}, # node
"dhcpSnpDbAbort" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "65535"
},
],
"range" : {
"min" : "1",
"max" : "65535"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"dhcpSnpDbWriteDelay" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "65535"
},
],
"range" : {
"min" : "1",
"max" : "65535"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"dhcpSnpDbUrl" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"dhcpSnpDbUrlRenew" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStat" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5",
}, # node
"dhcpSnpDbStatClear" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatDelayExpiry" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatAbortExpiry" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatLastSuccTime" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatLastFailTime" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatLastFailReason" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatTotalAttempt" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatStartupFail" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatSuccTrans" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatFailTrans" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatSuccRead" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatFailRead" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatSuccWrite" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatFailWrite" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDbStatLastIgnoreBindCol" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Last ignored: binding collision""",
}, # scalar
"dhcpSnpDbStatLastIgnoreExpireLease" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Last ignored: expired leases""",
}, # scalar
"dhcpSnpDbStatLastIgnoreInvalidIntf" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Last ignored: invalid interface""",
}, # scalar
"dhcpSnpDbStatLastIgnoreUnsuppVlan" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.20",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Last ignored: unsupported vlans""",
}, # scalar
"dhcpSnpDbStatLastIgnoreParse" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.21",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Last ignored: parsing error""",
}, # scalar
"dhcpSnpDbStatTotalIgnoreBindCol" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.22",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Total ignored: binding collision""",
}, # scalar
"dhcpSnpDbStatTotalIgnoreExpireLease" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.23",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Total ignored: expired leases""",
}, # scalar
"dhcpSnpDbStatTotalIgnoreInvalidIntf" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.24",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Total ignored: invalid interface""",
}, # scalar
"dhcpSnpDbStatTotalIgnoreUnsuppVlan" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.25",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Total ignored: unsupported vlans""",
}, # scalar
"dhcpSnpDbStatTotalIgnoreParse" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.26",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""Total ignored: parsing error""",
}, # scalar
"dhcpSnpDbStatLastIgnoreTime" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.5.5.27",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""""",
}, # scalar
"dhcpSnpDhcpVlan" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.6",
}, # node
"dhcpSnpDhcpVlanVid" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.100.6.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "4094"
},
],
"range" : {
"min" : "0",
"max" : "4094"
},
},
},
"access" : "readwrite",
"description" :
"""0: disable DHCP VLAN.""",
}, # scalar
"ipsg" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.101",
}, # node
"ipsgTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.101.1",
"status" : "current",
"description" :
"""""",
}, # table
"ipsgEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.101.1.1",
"create" : "true",
"status" : "current",
"linkage" : [
"ipsgEntryMac",
"ipsgEntryVid",
],
"description" :
"""""",
}, # row
"ipsgEntryMac" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.101.1.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ipsgEntryVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.101.1.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "4094"
},
],
"range" : {
"min" : "1",
"max" : "4094"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ipsgEntryIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.101.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"ipsgEntryLease" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.101.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""second""",
}, # column
"ipsgEntryType" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.101.1.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"static" : {
"nodetype" : "namednumber",
"number" : "1"
},
"dhcp" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"ipsgEntryPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.101.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""0 means any port""",
}, # column
"ipsgEntryState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.101.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInspect" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102",
}, # node
"arpInspectSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1",
}, # node
"arpInspectState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"arpInspectFilterAgingTime" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"arpInspectLog" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.3",
}, # node
"arpInspectLogEntries" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.3.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1024"
},
],
"range" : {
"min" : "0",
"max" : "1024"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"arpInspectLogRate" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.3.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1024"
},
],
"range" : {
"min" : "0",
"max" : "1024"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"arpInspectLogInterval" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.3.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2147483647"
},
],
"range" : {
"min" : "0",
"max" : "2147483647"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"arpInspectVlanTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.4",
"status" : "current",
"description" :
"""""",
}, # table
"arpInspectVlanEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.4.1",
"status" : "current",
"linkage" : [
"arpInspectVlanVid",
],
"description" :
"""""",
}, # row
"arpInspectVlanVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.4.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "4094"
},
],
"range" : {
"min" : "1",
"max" : "4094"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectVlanLog" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.4.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"all" : {
"nodetype" : "namednumber",
"number" : "1"
},
"none" : {
"nodetype" : "namednumber",
"number" : "2"
},
"permit" : {
"nodetype" : "namednumber",
"number" : "3"
},
"deny" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInspectVlanStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.4.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"enabled" : {
"nodetype" : "namednumber",
"number" : "1"
},
"disabled" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInspectPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.5",
"status" : "current",
"description" :
"""""",
}, # table
"arpInspectPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.5.1",
"status" : "current",
"linkage" : [
"arpInspectPortIndex",
],
"description" :
"""""",
}, # row
"arpInspectPortIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.5.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectPortTrust" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.5.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"trusted" : {
"nodetype" : "namednumber",
"number" : "1"
},
"untrusted" : {
"nodetype" : "namednumber",
"number" : "2"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInspectPortRate" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.5.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "2048"
},
],
"range" : {
"min" : "0",
"max" : "2048"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInspectPortInterval" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.1.5.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "15"
},
],
"range" : {
"min" : "1",
"max" : "15"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInspectStatus" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2",
}, # node
"arpInspectFilterClear" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"arpInspectLogClear" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"arpInspectFilterTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.3",
"status" : "current",
"description" :
"""""",
}, # table
"arpInspectFilterEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.3.1",
"create" : "true",
"status" : "current",
"linkage" : [
"arpInspectFilterMac",
"arpInspectFilterVid",
],
"description" :
"""""",
}, # row
"arpInspectFilterMac" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectFilterVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "4094"
},
],
"range" : {
"min" : "1",
"max" : "4094"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectFilterPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectFilterExpiry" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectFilterReason" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.3.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"macVid" : {
"nodetype" : "namednumber",
"number" : "1"
},
"port" : {
"nodetype" : "namednumber",
"number" : "2"
},
"ip" : {
"nodetype" : "namednumber",
"number" : "3"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectFilterRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.3.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"arpInspectLogTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.4",
"status" : "current",
"description" :
"""""",
}, # table
"arpInspectLogEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.4.1",
"status" : "current",
"linkage" : [
"arpInspectLogMac",
"arpInspectLogVid",
"arpInspectLogPort",
"arpInspectLogIp",
],
"description" :
"""""",
}, # row
"arpInspectLogMac" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.4.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectLogVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.4.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "4094"
},
],
"range" : {
"min" : "1",
"max" : "4094"
},
},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectLogPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.4.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectLogIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.4.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectLogNumPkt" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.4.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectLogTime" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.4.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DateAndTime"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectStatisticsTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.5",
"status" : "current",
"description" :
"""""",
}, # table
"arpInspectStatisticsEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.5.1",
"status" : "current",
"linkage" : [
"arpInspectStatisticsVid",
],
"description" :
"""""",
}, # row
"arpInspectStatisticsVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.5.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectStatisticsReceived" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.5.1.2",
"status" : "current",
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectStatisticsRequest" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.5.1.3",
"status" : "current",
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectStatisticsReply" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.5.1.4",
"status" : "current",
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectStatisticsForward" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.5.1.5",
"status" : "current",
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectStatisticsDrop" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.5.1.6",
"status" : "current",
"access" : "readonly",
"description" :
"""""",
}, # column
"arpInspectStatisticsClear" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.102.2.5.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"trTCMSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.103",
}, # node
"trTCMState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.103.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Two-rate three color marker enabled/disabled for the switch.""",
}, # scalar
"trTCMMode" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.103.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"color-aware" : {
"nodetype" : "namednumber",
"number" : "0"
},
"color-blind" : {
"nodetype" : "namednumber",
"number" : "1"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"trTCMPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.103.3",
"status" : "current",
"description" :
"""""",
}, # table
"trTCMPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.103.3.1",
"create" : "true",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in trTCMPortTable.""",
}, # row
"trTCMPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.103.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""Two-rate three color marker enabled/disabled on the port.""",
}, # column
"trTCMPortCIR" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.103.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Allowed CIR in pkts/s.""",
}, # column
"trTCMPortPIR" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.103.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""Allowed PIR in pkts/s.""",
}, # column
"trTCMPortDscpGreen" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.103.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""0-63""",
}, # column
"trTCMPortDscpYellow" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.103.3.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""0-63""",
}, # column
"trTCMPortDscpRed" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.103.3.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""0-63""",
}, # column
"loopGuardSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.104",
}, # node
"loopGuardState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.104.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"loopGuardPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.104.2",
"status" : "current",
"description" :
"""""",
}, # table
"loopGuardPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.104.2.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in loopGuardPortTable.""",
}, # row
"loopGuardPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.104.2.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"subnetBasedVlanSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.105",
}, # node
"subnetBasedVlanState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.105.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""subnet-based vlan feature enabled/disabled for the switch.""",
}, # scalar
"dhcpVlanOverrideState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.105.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""dhcp vlan override enabled/disabled when subnet-based vlan is enabled.""",
}, # scalar
"subnetBasedVlanTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.105.3",
"status" : "current",
"description" :
"""""",
}, # table
"subnetBasedVlanEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.105.3.1",
"create" : "true",
"status" : "current",
"linkage" : [
"subnetBasedVlanSrcIp",
"subnetBasedVlanSrcMaskBit",
],
"description" :
"""An entry in subnetBasedVlanTable.""",
}, # row
"subnetBasedVlanSrcIp" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.105.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""source ip for subnet-based vlan entry""",
}, # column
"subnetBasedVlanSrcMaskBit" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.105.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "32"
},
],
"range" : {
"min" : "1",
"max" : "32"
},
},
},
"access" : "readonly",
"description" :
"""source ip mask-bits for subnet-based vlan entry""",
}, # column
"subnetBasedVlanName" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.105.3.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"parent module" : {
"name" : "RFC1213-MIB",
"type" : "DisplayString",
},
"ranges" : [
{
"min" : "0",
"max" : "31"
},
],
"range" : {
"min" : "0",
"max" : "31"
},
},
},
"access" : "readwrite",
"description" :
"""name for subnet-based vlan entry""",
}, # column
"subnetBasedVlanVid" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.105.3.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "4094"
},
],
"range" : {
"min" : "1",
"max" : "4094"
},
},
},
"access" : "readwrite",
"description" :
"""vid for subnet-based vlan entry""",
}, # column
"subnetBasedVlanPriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.105.3.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "7"
},
],
"range" : {
"min" : "0",
"max" : "7"
},
},
},
"access" : "readwrite",
"description" :
"""priority for subnet-based vlan entry""",
}, # column
"subnetBasedVlanEntryState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.105.3.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"macAuthenticationSetup" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.106",
}, # node
"macAuthenticationState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.106.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"macAuthenticationNamePrefix" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.106.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"macAuthenticationPassword" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.106.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"macAuthenticationTimeout" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.106.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"macAuthenticationPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.106.5",
"status" : "current",
"description" :
"""""",
}, # table
"macAuthenticationPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.106.5.1",
"status" : "current",
"linkage" : [
"dot1dBasePort",
],
"description" :
"""An entry in macAuthenticationPortTable.""",
}, # row
"macAuthenticationPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.106.5.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mstp" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107",
}, # node
"mstpGen" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.1",
}, # node
"mstpGenState" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
"""Enabled/disabled on the mrstp bridge.""",
}, # scalar
"mstpGenCfgIdName" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"RFC1213-MIB", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""The configuration name that identifies the MST
region and is used as one of the inputs in the
computation of the MST Configuration Identifier.""",
"reference>" :
"""12.12.3.4.2.b)""",
}, # scalar
"mstpGenCfgIdRevLevel" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readwrite",
"description" :
"""This object identifies the MST revision that
identifies the MST region and is used as one
of the inputs in the computation of the MST
configuration Identifier.""",
"reference>" :
"""12.12.3.4.2.c)""",
}, # scalar
"mstpGenCfgIdCfgDigest" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "16",
"max" : "16"
},
],
"range" : {
"min" : "16",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Configuration Digest.""",
"reference>" :
"""12.12.3.3.3.a.4""",
}, # scalar
"mstpGenHelloTime" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"parent module" : {
"name" : "BRIDGE-MIB",
"type" : "Timeout",
},
"ranges" : [
{
"min" : "1",
"max" : "10"
},
],
"range" : {
"min" : "1",
"max" : "10"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"mstpGenMaxAge" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"parent module" : {
"name" : "BRIDGE-MIB",
"type" : "Timeout",
},
"ranges" : [
{
"min" : "6",
"max" : "40"
},
],
"range" : {
"min" : "6",
"max" : "40"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"mstpGenForwardDelay" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"parent module" : {
"name" : "BRIDGE-MIB",
"type" : "Timeout",
},
"ranges" : [
{
"min" : "4",
"max" : "30"
},
],
"range" : {
"min" : "4",
"max" : "30"
},
},
},
"access" : "readwrite",
"description" :
"""""",
}, # scalar
"mstpGenMaxHops" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "4",
"max" : "30"
},
],
"range" : {
"min" : "4",
"max" : "30"
},
},
},
"access" : "readwrite",
"description" :
"""13.22.f)""",
}, # scalar
"mstpGenCistRootPathCost" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
""".""",
}, # scalar
"mstpGenCistRootBrid" : {
"nodetype" : "scalar",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "32",
"max" : "32"
},
],
"range" : {
"min" : "32",
"max" : "32"
},
},
},
"access" : "readonly",
"description" :
""".""",
}, # scalar
"mstMapTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.20",
"status" : "current",
"description" :
"""This table contains one entry for each instance of MSTP.""",
}, # table
"mstMapEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.20.1",
"create" : "true",
"status" : "current",
"linkage" : [
"mstMapIndex",
],
"description" :
"""A conceptual row containing the status of the MSTP instance.""",
}, # row
"mstMapIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.20.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "MstiOrCistInstanceIndex"},
},
"access" : "noaccess",
"description" :
"""Uniquely identifies an instance. The entry of this table with index 0
presents always, represents CIST. When SET operation """,
}, # column
"mstMapVlans1k" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.20.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "0",
"max" : "128"
},
],
"range" : {
"min" : "0",
"max" : "128"
},
},
},
"access" : "readwrite",
"description" :
"""A string of octets containing one bit per VLAN. The
first octet corresponds to VLANs with VlanIndex values
1 through 8; the second octet to VLANs 9 through
16 etc. The most significant bit of each octet
corresponds to the lowest VlanIndex value in that octet.
For each VLAN that is mapped to this MSTP instance,
the bit corresponding to that VLAN is set to '1'.
Empty (zero) most significant octes are not mandatory.""",
}, # column
"mstMapVlans2k" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.20.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "0",
"max" : "128"
},
],
"range" : {
"min" : "0",
"max" : "128"
},
},
},
"access" : "readwrite",
"description" :
"""A string of octets containing one bit per VLAN for
VLANS with VlanIndex values 1024 through 2047. The
first octet corresponds to VLANs with VlanIndex values
1024 through 1031; the second octet to VLANs 1032
through 1039 etc. The most significant bit of each
octet corresponds to the lowest VlanIndex value in that
octet.
For each VLAN that is mapped to this MSTP instance,
the bit corresponding to that VLAN is set to '1'.
Empty (zero) most significant octes are not mandatory.""",
}, # column
"mstMapVlans3k" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.20.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "0",
"max" : "128"
},
],
"range" : {
"min" : "0",
"max" : "128"
},
},
},
"access" : "readwrite",
"description" :
"""A string of octets containing one bit per VLAN for
VLANS with VlanIndex values 2048 through 3071. The
first octet corresponds to VLANs with VlanIndex values
of 2048 through 2055; the second octet to VLANs 2056
through 2063 etc. The most significant bit of each
octet corresponds to the lowest VlanIndex value in that
octet.
For each VLAN that is mapped to this MSTP instance,
the bit corresponding to that VLAN is set to '1'.
Empty (zero) most significant octes are not mandatory.""",
}, # column
"mstMapVlans4k" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.20.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "OctetString",
"ranges" : [
{
"min" : "0",
"max" : "128"
},
],
"range" : {
"min" : "0",
"max" : "128"
},
},
},
"access" : "readwrite",
"description" :
"""A string of octets containing one bit per VLAN for
VLANS with VlanIndex values 3072 through 4095. The
first octet corresponds to VLANs with VlanIndex values
3072 through 3079; the second octet to VLANs 3080
through 3087 etc. The most significant bit of each
octet corresponds to the lowest VlanIndex value in that
octet.
For each VLAN that is mapped to this MSTP instance,
the bit corresponding to that VLAN is set to '1'.
Empty (zero) most significant octes are not mandatory.""",
}, # column
"mstMapRowStatus" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.20.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "RowStatus"},
},
"access" : "readwrite",
"description" :
"""""",
}, # column
"mstVlanTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.30",
"status" : "current",
"description" :
"""This table contains one entry for each VlanId.""",
}, # table
"mstVlanEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.30.1",
"status" : "current",
"linkage" : [
"mstVlanIndex",
],
"description" :
"""Information regarding the instance to which each Vlan is mapped.""",
}, # row
"mstVlanIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.30.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "4094"
},
],
"range" : {
"min" : "1",
"max" : "4094"
},
},
},
"access" : "noaccess",
"description" :
"""The VlanId for which this entry contains the instance mapped.""",
}, # column
"mstVlanMstIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.30.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "MstiOrCistInstanceIndex"},
},
"access" : "readonly",
"description" :
"""An integer with values ranging from 0 to 64 that identify a
the CIST/MSTI instance to which this VLAN is mapped""",
}, # column
"mstpPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.40",
"status" : "current",
"description" :
"""A table that contains generic information about
every port that is associated with this bridge.""",
}, # table
"mstpPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.40.1",
"status" : "current",
"linkage" : [
"mstpPortIndex",
],
"description" :
"""A list of information for each port of the
bridge.""",
}, # row
"mstpPortIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.40.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "65535"
},
],
"range" : {
"min" : "1",
"max" : "65535"
},
},
},
"access" : "noaccess",
"description" :
"""A unique value, greater than zero, for each Port.
The value for each interface sub-layer
must remain constant at least from one re-initialization
of the entity's network management system to the next re-
initialization.""",
}, # column
"mstpPortOperEdgePort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.40.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "TruthValue"},
},
"access" : "readonly",
"description" :
"""""",
"reference>" :
"""""",
}, # column
"mstpPortOperPointToPointMAC" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.40.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "TruthValue"},
},
"access" : "readonly",
"description" :
"""""",
"reference>" :
"""""",
}, # column
"mstpXstTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.50",
"status" : "current",
"description" :
""".""",
}, # table
"mstpXstEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.50.1",
"status" : "current",
"linkage" : [
"mstpXstId",
],
"description" :
""".""",
}, # row
"mstpXstId" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.50.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "MstiOrCistInstanceIndex"},
},
"access" : "readonly",
"description" :
"""0 means CIST.""",
}, # column
"mstpXstBridgePriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.50.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "61440"
},
],
"range" : {
"min" : "0",
"max" : "61440"
},
},
},
"access" : "readwrite",
"default" : "32768",
"description" :
"""Bridge priority, in steps of 4096.""",
}, # column
"mstpXstBridgeId" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.50.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "BridgeId"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstInternalRootCost" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.50.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstRootPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.50.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstTimeSinceTopologyChange" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.50.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "TimeTicks"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstTopologyChangesCount" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.50.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Counter32"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstPortTable" : {
"nodetype" : "table",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.60",
"status" : "current",
"description" :
""".""",
}, # table
"mstpXstPortEntry" : {
"nodetype" : "row",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.60.1",
"status" : "current",
"linkage" : [
"mstpXstPortXstId",
"mstpXstPortIndex",
],
"description" :
""".""",
"reference>" :
""".""",
}, # row
"mstpXstPortXstId" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.60.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"ZYXEL-GS4012F-MIB", "name" : "MstiOrCistInstanceIndex"},
},
"access" : "noaccess",
"description" :
"""0 means CIST.""",
}, # column
"mstpXstPortIndex" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.60.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "65535"
},
],
"range" : {
"min" : "1",
"max" : "65535"
},
},
},
"access" : "readonly",
"description" :
"""The value of mstpPortIndex of the Port
in mstpPortTable.""",
}, # column
"mstpXstPortEnable" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.60.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"P-BRIDGE-MIB", "name" : "EnabledStatus"},
},
"access" : "readwrite",
"description" :
""".""",
}, # column
"mstpXstPortPriority" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.60.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readwrite",
"default" : "128",
"description" :
"""Port priority, in steps of 16.""",
}, # column
"mstpXstPortPathCost" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.60.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "65535"
},
],
"range" : {
"min" : "1",
"max" : "65535"
},
},
},
"access" : "readwrite",
"description" :
""".""",
}, # column
"mstpXstPortState" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.60.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Enumeration",
"disabled" : {
"nodetype" : "namednumber",
"number" : "0"
},
"discarding" : {
"nodetype" : "namednumber",
"number" : "1"
},
"learning" : {
"nodetype" : "namednumber",
"number" : "2"
},
"forwarding" : {
"nodetype" : "namednumber",
"number" : "3"
},
"unknown" : {
"nodetype" : "namednumber",
"number" : "4"
},
},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstPortDesignatedRoot" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.60.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "BridgeId"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstPortDesignatedCost" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.60.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstPortDesignatedBridge" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.60.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"BRIDGE-MIB", "name" : "BridgeId"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpXstPortDesignatedPort" : {
"nodetype" : "column",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.60.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
""".""",
}, # column
"mstpNotifications" : {
"nodetype" : "node",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.70",
}, # node
}, # nodes
"notifications" : {
"eventOnTrap" : {
"nodetype" : "notification",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.37.2.1",
"status" : "current",
"objects" : {
"eventSeqNum" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventEventId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventName" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventSetTime" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventSeverity" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventInstanceType" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventInstanceId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventInstanceName" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventServAffective" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventDescription" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"trapPersistence" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"trapSenderNodeId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"sysObjectID" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
},
"description" :
"""This trap is used to inform network management system that a delta
fault event (events that are automatically cleared) has occured
or a normal fault event (not automatically cleared) state has
been set on.
Objects are used as follows:
- eventSeqNum is the sequence number of the event. For normal
type of events must equal to the sequence number of the event
in the events table.
- eventEventId specifies what fault event has occured.
- eventName specifies the name of the fault event.
- eventSetTime indicates when fault event has occured
(delta events) or when fault has been set on (normal events).
- eventSeverity reports the severity level of the event.
- eventInstanceType indicates what kind of object is faulty.
- eventInstanceId specifies what instance is faulty.
- eventInstanceName may contain textual description for
the faulty object.
- eventServAffective specifies whether the event is
immediately service affcetive.
- eventDescription reports possible additional information about the event.
- trapPersistence tells whether this event is a delta or normal event.
- trapSenderNodeId specifies the node ID of the sending network element if
configuring it is supported for the network element, otherwise 0.
- sysObjectID specifies what kind of equipment reports the fault event.
For more information see the eventTable specification""",
}, # notification
"eventClearedTrap" : {
"nodetype" : "notification",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.37.2.2",
"status" : "current",
"objects" : {
"eventSeqNum" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventEventId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventSetTime" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventInstanceType" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"eventInstanceId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"trapRefSeqNum" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"trapSenderNodeId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
"sysObjectID" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
},
"description" :
"""This trap is used to inform network management system that a normal
type fault event has been cleared (state set off).
Objects are used as follows:
- eventSeqNum is the sequence number of the this clearing event. Note that
the sequence number of the cleared event is reported in the trapRefSeqNum
object.
- eventEventId specifies what event has been cleared.
- eventSetTime indicates when fault event has been cleared.
- eventInstanceType indicates what kind of object has been
faulty.
- eventInstanceId specifies what instance has been faulty.
- trapRefSeqNum specifies the sequence number of the cleared event (i.e.
the sequence number was assigned for the event in the events table).
- trapSenderNodeId specifies the node ID of the sending network element if
configuring it is supported for the network element, otherwise 0.
- sysObjectID specifies what kind of equipment reports the clearing event.
For more information see the eventTable specification""",
}, # notification
"newRoot" : {
"nodetype" : "notification",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.2.1",
"status" : "current",
"objects" : {
"mrstpBridgeIndex" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
},
"description" :
"""""",
}, # notification
"topologyChange" : {
"nodetype" : "notification",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.42.2.2",
"status" : "current",
"objects" : {
"mrstpBridgeIndex" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
},
"description" :
"""""",
}, # notification
"newRoot" : {
"nodetype" : "notification",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.70.1",
"status" : "current",
"objects" : {
"mstpXstId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
},
"description" :
"""""",
}, # notification
"topologyChange" : {
"nodetype" : "notification",
"moduleName" : "ZYXEL-GS4012F-MIB",
"oid" : "1.3.6.1.4.1.890.1.5.8.20.107.70.2",
"status" : "current",
"objects" : {
"mstpXstId" : {
"nodetype" : "object",
"module" : "ZYXEL-GS4012F-MIB"
},
},
"description" :
"""""",
}, # notification
}, # notifications
}
| [] |
yutiansut/RxPY | rx/concurrency/timeoutscheduler.py | c3bbba77f9ebd7706c949141725e220096deabd4 | import logging
from threading import Timer
from datetime import timedelta
from rx.core import Scheduler, Disposable
from rx.disposables import SingleAssignmentDisposable, CompositeDisposable
from .schedulerbase import SchedulerBase
log = logging.getLogger("Rx")
class TimeoutScheduler(SchedulerBase):
"""A scheduler that schedules work via a timed callback based upon platform."""
def schedule(self, action, state=None):
"""Schedules an action to be executed."""
disposable = SingleAssignmentDisposable()
def interval():
disposable.disposable = self.invoke_action(action, state)
timer = Timer(0, interval)
timer.setDaemon(True)
timer.start()
def dispose():
timer.cancel()
return CompositeDisposable(disposable, Disposable.create(dispose))
def schedule_relative(self, duetime, action, state=None):
"""Schedules an action to be executed after duetime."""
scheduler = self
timespan = self.to_timedelta(duetime)
if timespan == timedelta(0):
return scheduler.schedule(action, state)
disposable = SingleAssignmentDisposable()
def interval():
disposable.disposable = self.invoke_action(action, state)
seconds = timespan.total_seconds()
log.debug("timeout: %s", seconds)
timer = Timer(seconds, interval)
timer.setDaemon(True)
timer.start()
def dispose():
timer.cancel()
return CompositeDisposable(disposable, Disposable.create(dispose))
def schedule_absolute(self, duetime, action, state=None):
"""Schedules an action to be executed after duetime."""
duetime = self.to_datetime(duetime)
return self.schedule_relative(duetime - self.now, action, state)
def _start_timer(self, period, action):
timer = Timer(period, action)
timer.setDaemon(True)
timer.start()
return timer
timeout_scheduler = TimeoutScheduler()
| [((241, 264), 'logging.getLogger', 'logging.getLogger', (['"""Rx"""'], {}), "('Rx')\n", (258, 264), False, 'import logging\n'), ((507, 535), 'rx.disposables.SingleAssignmentDisposable', 'SingleAssignmentDisposable', ([], {}), '()\n', (533, 535), False, 'from rx.disposables import SingleAssignmentDisposable, CompositeDisposable\n'), ((647, 665), 'threading.Timer', 'Timer', (['(0)', 'interval'], {}), '(0, interval)\n', (652, 665), False, 'from threading import Timer\n'), ((1155, 1183), 'rx.disposables.SingleAssignmentDisposable', 'SingleAssignmentDisposable', ([], {}), '()\n', (1181, 1183), False, 'from rx.disposables import SingleAssignmentDisposable, CompositeDisposable\n'), ((1381, 1405), 'threading.Timer', 'Timer', (['seconds', 'interval'], {}), '(seconds, interval)\n', (1386, 1405), False, 'from threading import Timer\n'), ((1891, 1912), 'threading.Timer', 'Timer', (['period', 'action'], {}), '(period, action)\n', (1896, 1912), False, 'from threading import Timer\n'), ((816, 842), 'rx.core.Disposable.create', 'Disposable.create', (['dispose'], {}), '(dispose)\n', (833, 842), False, 'from rx.core import Scheduler, Disposable\n'), ((1066, 1078), 'datetime.timedelta', 'timedelta', (['(0)'], {}), '(0)\n', (1075, 1078), False, 'from datetime import timedelta\n'), ((1557, 1583), 'rx.core.Disposable.create', 'Disposable.create', (['dispose'], {}), '(dispose)\n', (1574, 1583), False, 'from rx.core import Scheduler, Disposable\n')] |
edomin/vgazer | vgazer/version/custom_checker/inputproto.py | 3ffe64f2517cbfbe0b0292bacc9fbf7391687e76 | import requests
from bs4 import BeautifulSoup
def Check(auth, mirrors):
response = requests.get("https://www.x.org/releases/individual/proto/")
html = response.content.decode("utf-8")
parsedHtml = BeautifulSoup(html, "html.parser")
links = parsedHtml.find_all("a")
maxVersionMajor = -1
maxVersionMinor = -1
maxVersionPatch = -1
maxVersionSubpatch = -1
for link in links:
if ("inputproto-" in link.text and ".tar.gz" in link.text
and ".sig" not in link.text):
version = link.text.split("-")[1].split(".tar.gz")[0].split(".")
versionMajor = int(version[0])
versionMinor = int(version[1])
if len(version) == 3:
versionPatch = int(version[2])
versionSubpatch = 0
elif len(version) == 2:
versionPatch = 0
versionSubpatch = 0
else:
versionPatch = int(version[2])
versionSubpatch = int(version[3])
if versionMajor > maxVersionMajor:
maxVersionMajor = versionMajor
maxVersionMinor = versionMinor
maxVersionPatch = versionPatch
maxVersionSubpatch = versionSubpatch
versionText = link.text.split("-")[1].split(".tar.gz")[0]
elif (versionMajor == maxVersionMajor
and versionMinor > maxVersionMinor):
maxVersionMinor = versionMinor
maxVersionPatch = versionPatch
maxVersionSubpatch = versionSubpatch
versionText = link.text.split("-")[1].split(".tar.gz")[0]
elif (versionMajor == maxVersionMajor
and versionMinor == maxVersionMinor
and versionPatch > maxVersionPatch):
maxVersionPatch = versionPatch
maxVersionSubpatch = versionSubpatch
versionText = link.text.split("-")[1].split(".tar.gz")[0]
elif (versionMajor == maxVersionMajor
and versionMinor == maxVersionMinor
and versionPatch == maxVersionPatch
and versionSubpatch > maxVersionSubpatch):
maxVersionSubpatch = versionSubpatch
versionText = link.text.split("-")[1].split(".tar.gz")[0]
return versionText
| [((88, 148), 'requests.get', 'requests.get', (['"""https://www.x.org/releases/individual/proto/"""'], {}), "('https://www.x.org/releases/individual/proto/')\n", (100, 148), False, 'import requests\n'), ((210, 244), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""html.parser"""'], {}), "(html, 'html.parser')\n", (223, 244), False, 'from bs4 import BeautifulSoup\n')] |
briangrahamww/pandas-profiling | src/pandas_profiling/model/summary_helpers.py | 62f8e3fd81720d444041069191c4aacd03d79ad5 | import os
import string
from collections import Counter
from datetime import datetime
from functools import partial
from pathlib import Path
from typing import Optional
import numpy as np
import pandas as pd
from scipy.stats.stats import chisquare
from tangled_up_in_unicode import block, block_abbr, category, category_long, script
from pandas_profiling.config import Settings
from pandas_profiling.model.summary_helpers_image import (
extract_exif,
hash_image,
is_image_truncated,
open_image,
)
def mad(arr: np.ndarray) -> np.ndarray:
"""Median Absolute Deviation: a "Robust" version of standard deviation.
Indices variability of the sample.
https://en.wikipedia.org/wiki/Median_absolute_deviation
"""
return np.median(np.abs(arr - np.median(arr)))
def named_aggregate_summary(series: pd.Series, key: str) -> dict:
summary = {
f"max_{key}": np.max(series),
f"mean_{key}": np.mean(series),
f"median_{key}": np.median(series),
f"min_{key}": np.min(series),
}
return summary
def length_summary(series: pd.Series, summary: dict = None) -> dict:
if summary is None:
summary = {}
length = series.str.len()
summary.update({"length": length})
summary.update(named_aggregate_summary(length, "length"))
return summary
def file_summary(series: pd.Series) -> dict:
"""
Args:
series: series to summarize
Returns:
"""
# Transform
stats = series.map(lambda x: os.stat(x))
def convert_datetime(x: float) -> str:
return datetime.fromtimestamp(x).strftime("%Y-%m-%d %H:%M:%S")
# Transform some more
summary = {
"file_size": stats.map(lambda x: x.st_size),
"file_created_time": stats.map(lambda x: x.st_ctime).map(convert_datetime),
"file_accessed_time": stats.map(lambda x: x.st_atime).map(convert_datetime),
"file_modified_time": stats.map(lambda x: x.st_mtime).map(convert_datetime),
}
return summary
def path_summary(series: pd.Series) -> dict:
"""
Args:
series: series to summarize
Returns:
"""
# TODO: optimize using value counts
summary = {
"common_prefix": os.path.commonprefix(series.values.tolist())
or "No common prefix",
"stem_counts": series.map(lambda x: os.path.splitext(x)[0]).value_counts(),
"suffix_counts": series.map(lambda x: os.path.splitext(x)[1]).value_counts(),
"name_counts": series.map(lambda x: os.path.basename(x)).value_counts(),
"parent_counts": series.map(lambda x: os.path.dirname(x)).value_counts(),
"anchor_counts": series.map(lambda x: os.path.splitdrive(x)[0]).value_counts(),
}
summary["n_stem_unique"] = len(summary["stem_counts"])
summary["n_suffix_unique"] = len(summary["suffix_counts"])
summary["n_name_unique"] = len(summary["name_counts"])
summary["n_parent_unique"] = len(summary["parent_counts"])
summary["n_anchor_unique"] = len(summary["anchor_counts"])
return summary
def url_summary(series: pd.Series) -> dict:
"""
Args:
series: series to summarize
Returns:
"""
summary = {
"scheme_counts": series.map(lambda x: x.scheme).value_counts(),
"netloc_counts": series.map(lambda x: x.netloc).value_counts(),
"path_counts": series.map(lambda x: x.path).value_counts(),
"query_counts": series.map(lambda x: x.query).value_counts(),
"fragment_counts": series.map(lambda x: x.fragment).value_counts(),
}
return summary
def count_duplicate_hashes(image_descriptions: dict) -> int:
"""
Args:
image_descriptions:
Returns:
"""
counts = pd.Series(
[x["hash"] for x in image_descriptions if "hash" in x]
).value_counts()
return counts.sum() - len(counts)
def extract_exif_series(image_exifs: list) -> dict:
"""
Args:
image_exifs:
Returns:
"""
exif_keys = []
exif_values: dict = {}
for image_exif in image_exifs:
# Extract key
exif_keys.extend(list(image_exif.keys()))
# Extract values per key
for exif_key, exif_val in image_exif.items():
if exif_key not in exif_values:
exif_values[exif_key] = []
exif_values[exif_key].append(exif_val)
series = {"exif_keys": pd.Series(exif_keys, dtype=object).value_counts().to_dict()}
for k, v in exif_values.items():
series[k] = pd.Series(v).value_counts()
return series
def extract_image_information(
path: Path, exif: bool = False, hash: bool = False
) -> dict:
"""Extracts all image information per file, as opening files is slow
Args:
path: Path to the image
exif: extract exif information
hash: calculate hash (for duplicate detection)
Returns:
A dict containing image information
"""
information: dict = {}
image = open_image(path)
information["opened"] = image is not None
if image is not None:
information["truncated"] = is_image_truncated(image)
if not information["truncated"]:
information["size"] = image.size
if exif:
information["exif"] = extract_exif(image)
if hash:
information["hash"] = hash_image(image)
return information
def image_summary(series: pd.Series, exif: bool = False, hash: bool = False) -> dict:
"""
Args:
series: series to summarize
exif: extract exif information
hash: calculate hash (for duplicate detection)
Returns:
"""
image_information = series.apply(
partial(extract_image_information, exif=exif, hash=hash)
)
summary = {
"n_truncated": sum(
[1 for x in image_information if "truncated" in x and x["truncated"]]
),
"image_dimensions": pd.Series(
[x["size"] for x in image_information if "size" in x],
name="image_dimensions",
),
}
image_widths = summary["image_dimensions"].map(lambda x: x[0])
summary.update(named_aggregate_summary(image_widths, "width"))
image_heights = summary["image_dimensions"].map(lambda x: x[1])
summary.update(named_aggregate_summary(image_heights, "height"))
image_areas = image_widths * image_heights
summary.update(named_aggregate_summary(image_areas, "area"))
if hash:
summary["n_duplicate_hash"] = count_duplicate_hashes(image_information)
if exif:
exif_series = extract_exif_series(
[x["exif"] for x in image_information if "exif" in x]
)
summary["exif_keys_counts"] = exif_series["exif_keys"]
summary["exif_data"] = exif_series
return summary
def get_character_counts(series: pd.Series) -> Counter:
"""Function to return the character counts
Args:
series: the Series to process
Returns:
A dict with character counts
"""
return Counter(series.str.cat())
def counter_to_series(counter: Counter) -> pd.Series:
if not counter:
return pd.Series([], dtype=object)
counter_as_tuples = counter.most_common()
items, counts = zip(*counter_as_tuples)
return pd.Series(counts, index=items)
def unicode_summary(series: pd.Series) -> dict:
# Unicode Character Summaries (category and script name)
character_counts = get_character_counts(series)
character_counts_series = counter_to_series(character_counts)
char_to_block = {key: block(key) for key in character_counts.keys()}
char_to_category_short = {key: category(key) for key in character_counts.keys()}
char_to_script = {key: script(key) for key in character_counts.keys()}
summary = {
"n_characters": len(character_counts_series),
"character_counts": character_counts_series,
"category_alias_values": {
key: category_long(value) for key, value in char_to_category_short.items()
},
"block_alias_values": {
key: block_abbr(value) for key, value in char_to_block.items()
},
}
# Retrieve original distribution
block_alias_counts: Counter = Counter()
per_block_char_counts: dict = {
k: Counter() for k in summary["block_alias_values"].values()
}
for char, n_char in character_counts.items():
block_name = summary["block_alias_values"][char]
block_alias_counts[block_name] += n_char
per_block_char_counts[block_name][char] = n_char
summary["block_alias_counts"] = counter_to_series(block_alias_counts)
summary["block_alias_char_counts"] = {
k: counter_to_series(v) for k, v in per_block_char_counts.items()
}
script_counts: Counter = Counter()
per_script_char_counts: dict = {k: Counter() for k in char_to_script.values()}
for char, n_char in character_counts.items():
script_name = char_to_script[char]
script_counts[script_name] += n_char
per_script_char_counts[script_name][char] = n_char
summary["script_counts"] = counter_to_series(script_counts)
summary["script_char_counts"] = {
k: counter_to_series(v) for k, v in per_script_char_counts.items()
}
category_alias_counts: Counter = Counter()
per_category_alias_char_counts: dict = {
k: Counter() for k in summary["category_alias_values"].values()
}
for char, n_char in character_counts.items():
category_alias_name = summary["category_alias_values"][char]
category_alias_counts[category_alias_name] += n_char
per_category_alias_char_counts[category_alias_name][char] += n_char
summary["category_alias_counts"] = counter_to_series(category_alias_counts)
summary["category_alias_char_counts"] = {
k: counter_to_series(v) for k, v in per_category_alias_char_counts.items()
}
# Unique counts
summary["n_category"] = len(summary["category_alias_counts"])
summary["n_scripts"] = len(summary["script_counts"])
summary["n_block_alias"] = len(summary["block_alias_counts"])
if len(summary["category_alias_counts"]) > 0:
summary["category_alias_counts"].index = summary[
"category_alias_counts"
].index.str.replace("_", " ")
return summary
def histogram_compute(
config: Settings,
finite_values: np.ndarray,
n_unique: int,
name: str = "histogram",
weights: Optional[np.ndarray] = None,
) -> dict:
stats = {}
bins = config.plot.histogram.bins
bins_arg = "auto" if bins == 0 else min(bins, n_unique)
stats[name] = np.histogram(finite_values, bins=bins_arg, weights=weights)
max_bins = config.plot.histogram.max_bins
if bins_arg == "auto" and len(stats[name][1]) > max_bins:
stats[name] = np.histogram(finite_values, bins=max_bins, weights=None)
return stats
def chi_square(
values: Optional[np.ndarray] = None, histogram: Optional[np.ndarray] = None
) -> dict:
if histogram is None:
histogram, _ = np.histogram(values, bins="auto")
return dict(chisquare(histogram)._asdict())
def word_summary(series: pd.Series) -> dict:
# TODO: preprocess (stopwords)
# TODO: configurable lowercase/punctuation etc.
word_lists = series.str.lower().str.split()
words = word_lists.explode()
words = words.str.strip(string.punctuation)
return {"word_counts": words.value_counts()}
| [((5143, 5159), 'pandas_profiling.model.summary_helpers_image.open_image', 'open_image', (['path'], {}), '(path)\n', (5153, 5159), False, 'from pandas_profiling.model.summary_helpers_image import extract_exif, hash_image, is_image_truncated, open_image\n'), ((7511, 7541), 'pandas.Series', 'pd.Series', (['counts'], {'index': 'items'}), '(counts, index=items)\n', (7520, 7541), True, 'import pandas as pd\n'), ((8482, 8491), 'collections.Counter', 'Counter', ([], {}), '()\n', (8489, 8491), False, 'from collections import Counter\n'), ((9056, 9065), 'collections.Counter', 'Counter', ([], {}), '()\n', (9063, 9065), False, 'from collections import Counter\n'), ((9578, 9587), 'collections.Counter', 'Counter', ([], {}), '()\n', (9585, 9587), False, 'from collections import Counter\n'), ((10939, 10998), 'numpy.histogram', 'np.histogram', (['finite_values'], {'bins': 'bins_arg', 'weights': 'weights'}), '(finite_values, bins=bins_arg, weights=weights)\n', (10951, 10998), True, 'import numpy as np\n'), ((929, 943), 'numpy.max', 'np.max', (['series'], {}), '(series)\n', (935, 943), True, 'import numpy as np\n'), ((969, 984), 'numpy.mean', 'np.mean', (['series'], {}), '(series)\n', (976, 984), True, 'import numpy as np\n'), ((1012, 1029), 'numpy.median', 'np.median', (['series'], {}), '(series)\n', (1021, 1029), True, 'import numpy as np\n'), ((1054, 1068), 'numpy.min', 'np.min', (['series'], {}), '(series)\n', (1060, 1068), True, 'import numpy as np\n'), ((5270, 5295), 'pandas_profiling.model.summary_helpers_image.is_image_truncated', 'is_image_truncated', (['image'], {}), '(image)\n', (5288, 5295), False, 'from pandas_profiling.model.summary_helpers_image import extract_exif, hash_image, is_image_truncated, open_image\n'), ((5893, 5949), 'functools.partial', 'partial', (['extract_image_information'], {'exif': 'exif', 'hash': 'hash'}), '(extract_image_information, exif=exif, hash=hash)\n', (5900, 5949), False, 'from functools import partial\n'), ((6127, 6221), 'pandas.Series', 'pd.Series', (["[x['size'] for x in image_information if 'size' in x]"], {'name': '"""image_dimensions"""'}), "([x['size'] for x in image_information if 'size' in x], name=\n 'image_dimensions')\n", (6136, 6221), True, 'import pandas as pd\n'), ((7377, 7404), 'pandas.Series', 'pd.Series', (['[]'], {'dtype': 'object'}), '([], dtype=object)\n', (7386, 7404), True, 'import pandas as pd\n'), ((7808, 7818), 'tangled_up_in_unicode.block', 'block', (['key'], {}), '(key)\n', (7813, 7818), False, 'from tangled_up_in_unicode import block, block_abbr, category, category_long, script\n'), ((7891, 7904), 'tangled_up_in_unicode.category', 'category', (['key'], {}), '(key)\n', (7899, 7904), False, 'from tangled_up_in_unicode import block, block_abbr, category, category_long, script\n'), ((7969, 7980), 'tangled_up_in_unicode.script', 'script', (['key'], {}), '(key)\n', (7975, 7980), False, 'from tangled_up_in_unicode import block, block_abbr, category, category_long, script\n'), ((8541, 8550), 'collections.Counter', 'Counter', ([], {}), '()\n', (8548, 8550), False, 'from collections import Counter\n'), ((9106, 9115), 'collections.Counter', 'Counter', ([], {}), '()\n', (9113, 9115), False, 'from collections import Counter\n'), ((9646, 9655), 'collections.Counter', 'Counter', ([], {}), '()\n', (9653, 9655), False, 'from collections import Counter\n'), ((11134, 11190), 'numpy.histogram', 'np.histogram', (['finite_values'], {'bins': 'max_bins', 'weights': 'None'}), '(finite_values, bins=max_bins, weights=None)\n', (11146, 11190), True, 'import numpy as np\n'), ((11376, 11409), 'numpy.histogram', 'np.histogram', (['values'], {'bins': '"""auto"""'}), "(values, bins='auto')\n", (11388, 11409), True, 'import numpy as np\n'), ((1569, 1579), 'os.stat', 'os.stat', (['x'], {}), '(x)\n', (1576, 1579), False, 'import os\n'), ((3853, 3918), 'pandas.Series', 'pd.Series', (["[x['hash'] for x in image_descriptions if 'hash' in x]"], {}), "([x['hash'] for x in image_descriptions if 'hash' in x])\n", (3862, 3918), True, 'import pandas as pd\n'), ((8199, 8219), 'tangled_up_in_unicode.category_long', 'category_long', (['value'], {}), '(value)\n', (8212, 8219), False, 'from tangled_up_in_unicode import block, block_abbr, category, category_long, script\n'), ((8332, 8349), 'tangled_up_in_unicode.block_abbr', 'block_abbr', (['value'], {}), '(value)\n', (8342, 8349), False, 'from tangled_up_in_unicode import block, block_abbr, category, category_long, script\n'), ((801, 815), 'numpy.median', 'np.median', (['arr'], {}), '(arr)\n', (810, 815), True, 'import numpy as np\n'), ((1643, 1668), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['x'], {}), '(x)\n', (1665, 1668), False, 'from datetime import datetime\n'), ((4663, 4675), 'pandas.Series', 'pd.Series', (['v'], {}), '(v)\n', (4672, 4675), True, 'import pandas as pd\n'), ((5445, 5464), 'pandas_profiling.model.summary_helpers_image.extract_exif', 'extract_exif', (['image'], {}), '(image)\n', (5457, 5464), False, 'from pandas_profiling.model.summary_helpers_image import extract_exif, hash_image, is_image_truncated, open_image\n'), ((5526, 5543), 'pandas_profiling.model.summary_helpers_image.hash_image', 'hash_image', (['image'], {}), '(image)\n', (5536, 5543), False, 'from pandas_profiling.model.summary_helpers_image import extract_exif, hash_image, is_image_truncated, open_image\n'), ((11427, 11447), 'scipy.stats.stats.chisquare', 'chisquare', (['histogram'], {}), '(histogram)\n', (11436, 11447), False, 'from scipy.stats.stats import chisquare\n'), ((2599, 2618), 'os.path.basename', 'os.path.basename', (['x'], {}), '(x)\n', (2615, 2618), False, 'import os\n'), ((2683, 2701), 'os.path.dirname', 'os.path.dirname', (['x'], {}), '(x)\n', (2698, 2701), False, 'import os\n'), ((4541, 4575), 'pandas.Series', 'pd.Series', (['exif_keys'], {'dtype': 'object'}), '(exif_keys, dtype=object)\n', (4550, 4575), True, 'import pandas as pd\n'), ((2427, 2446), 'os.path.splitext', 'os.path.splitext', (['x'], {}), '(x)\n', (2443, 2446), False, 'import os\n'), ((2514, 2533), 'os.path.splitext', 'os.path.splitext', (['x'], {}), '(x)\n', (2530, 2533), False, 'import os\n'), ((2766, 2787), 'os.path.splitdrive', 'os.path.splitdrive', (['x'], {}), '(x)\n', (2784, 2787), False, 'import os\n')] |
ZephyrII/competitive_colaboration | inverse_warp.py | a557d1e23ef2c0b8e3794f085a79bfffb860f9df | # Author: Anurag Ranjan
# Copyright (c) 2019, Anurag Ranjan
# All rights reserved.
# based on github.com/ClementPinard/SfMLearner-Pytorch
from __future__ import division
import torch
from torch.autograd import Variable
pixel_coords = None
def set_id_grid(depth):
global pixel_coords
b, h, w = depth.size()
i_range = Variable(torch.arange(0, h).view(1, h, 1).expand(1,h,w)).type_as(depth) # [1, H, W]
j_range = Variable(torch.arange(0, w).view(1, 1, w).expand(1,h,w)).type_as(depth) # [1, H, W]
ones = Variable(torch.ones(1,h,w)).type_as(depth)
pixel_coords = torch.stack((j_range, i_range, ones), dim=1) # [1, 3, H, W]
def check_sizes(input, input_name, expected):
condition = [input.ndimension() == len(expected)]
for i,size in enumerate(expected):
if size.isdigit():
condition.append(input.size(i) == int(size))
assert(all(condition)), "wrong size for {}, expected {}, got {}".format(input_name, 'x'.join(expected), list(input.size()))
def pixel2cam(depth, intrinsics_inv):
global pixel_coords
"""Transform coordinates in the pixel frame to the camera frame.
Args:
depth: depth maps -- [B, H, W]
intrinsics_inv: intrinsics_inv matrix for each element of batch -- [B, 3, 3]
Returns:
array of (u,v,1) cam coordinates -- [B, 3, H, W]
"""
b, h, w = depth.size()
if (pixel_coords is None) or pixel_coords.size(2) != h or pixel_coords.size(3) != w:
set_id_grid(depth)
current_pixel_coords = pixel_coords[:,:,:h,:w].expand(b,3,h,w).contiguous().view(b, 3, -1) # [B, 3, H*W]
cam_coords = intrinsics_inv.bmm(current_pixel_coords).view(b, 3, h, w)
return cam_coords * depth.unsqueeze(1)
def cam2pixel(cam_coords, proj_c2p_rot, proj_c2p_tr, padding_mode):
"""Transform coordinates in the camera frame to the pixel frame.
Args:
cam_coords: pixel coordinates defined in the first camera coordinates system -- [B, 4, H, W]
proj_c2p_rot: rotation matrix of cameras -- [B, 3, 4]
proj_c2p_tr: translation vectors of cameras -- [B, 3, 1]
Returns:
array of [-1,1] coordinates -- [B, 2, H, W]
"""
b, _, h, w = cam_coords.size()
cam_coords_flat = cam_coords.view(b, 3, -1) # [B, 3, H*W]
if proj_c2p_rot is not None:
pcoords = proj_c2p_rot.bmm(cam_coords_flat)
else:
pcoords = cam_coords_flat
if proj_c2p_tr is not None:
pcoords = pcoords + proj_c2p_tr # [B, 3, H*W]
X = pcoords[:, 0]
Y = pcoords[:, 1]
Z = pcoords[:, 2].clamp(min=1e-3)
X_norm = 2*(X / Z)/(w-1) - 1 # Normalized, -1 if on extreme left, 1 if on extreme right (x = w-1) [B, H*W]
Y_norm = 2*(Y / Z)/(h-1) - 1 # Idem [B, H*W]
if padding_mode == 'zeros':
X_mask = ((X_norm > 1)+(X_norm < -1)).detach()
X_norm[X_mask] = 2 # make sure that no point in warped image is a combinaison of im and gray
Y_mask = ((Y_norm > 1)+(Y_norm < -1)).detach()
Y_norm[Y_mask] = 2
pixel_coords = torch.stack([X_norm, Y_norm], dim=2) # [B, H*W, 2]
return pixel_coords.view(b,h,w,2)
def euler2mat(angle):
"""Convert euler angles to rotation matrix.
Reference: https://github.com/pulkitag/pycaffe-utils/blob/master/rot_utils.py#L174
Args:
angle: rotation angle along 3 axis (in radians) -- size = [B, 3]
Returns:
Rotation matrix corresponding to the euler angles -- size = [B, 3, 3]
"""
B = angle.size(0)
x, y, z = angle[:,0], angle[:,1], angle[:,2]
cosz = torch.cos(z)
sinz = torch.sin(z)
zeros = z.detach()*0
ones = zeros.detach()+1
zmat = torch.stack([cosz, -sinz, zeros,
sinz, cosz, zeros,
zeros, zeros, ones], dim=1).view(B, 3, 3)
cosy = torch.cos(y)
siny = torch.sin(y)
ymat = torch.stack([cosy, zeros, siny,
zeros, ones, zeros,
-siny, zeros, cosy], dim=1).view(B, 3, 3)
cosx = torch.cos(x)
sinx = torch.sin(x)
xmat = torch.stack([ones, zeros, zeros,
zeros, cosx, -sinx,
zeros, sinx, cosx], dim=1).view(B, 3, 3)
rotMat = xmat.bmm(ymat).bmm(zmat)
return rotMat
def quat2mat(quat):
"""Convert quaternion coefficients to rotation matrix.
Args:
quat: first three coeff of quaternion of rotation. fourht is then computed to have a norm of 1 -- size = [B, 3]
Returns:
Rotation matrix corresponding to the quaternion -- size = [B, 3, 3]
"""
norm_quat = torch.cat([quat[:,:1].detach()*0 + 1, quat], dim=1)
norm_quat = norm_quat/norm_quat.norm(p=2, dim=1, keepdim=True)
w, x, y, z = norm_quat[:,0], norm_quat[:,1], norm_quat[:,2], norm_quat[:,3]
B = quat.size(0)
w2, x2, y2, z2 = w.pow(2), x.pow(2), y.pow(2), z.pow(2)
wx, wy, wz = w*x, w*y, w*z
xy, xz, yz = x*y, x*z, y*z
rotMat = torch.stack([w2 + x2 - y2 - z2, 2*xy - 2*wz, 2*wy + 2*xz,
2*wz + 2*xy, w2 - x2 + y2 - z2, 2*yz - 2*wx,
2*xz - 2*wy, 2*wx + 2*yz, w2 - x2 - y2 + z2], dim=1).view(B, 3, 3)
return rotMat
def pose_vec2mat(vec, rotation_mode='euler'):
"""
Convert 6DoF parameters to transformation matrix.
Args:s
vec: 6DoF parameters in the order of tx, ty, tz, rx, ry, rz -- [B, 6]
Returns:
A transformation matrix -- [B, 3, 4]
"""
translation = vec[:, :3].unsqueeze(-1) # [B, 3, 1]
rot = vec[:,3:]
if rotation_mode == 'euler':
rot_mat = euler2mat(rot) # [B, 3, 3]
elif rotation_mode == 'quat':
rot_mat = quat2mat(rot) # [B, 3, 3]
transform_mat = torch.cat([rot_mat, translation], dim=2) # [B, 3, 4]
return transform_mat
def flow_warp(img, flow, padding_mode='zeros'):
"""
Inverse warp a source image to the target image plane.
Args:
img: the source image (where to sample pixels) -- [B, 3, H, W]
flow: flow map of the target image -- [B, 2, H, W]
Returns:
Source image warped to the target image plane
"""
check_sizes(img, 'img', 'BCHW')
check_sizes(flow, 'flow', 'B2HW')
bs, _, h, w = flow.size()
u = flow[:,0,:,:]
v = flow[:,1,:,:]
grid_x = Variable(torch.arange(0, w).view(1, 1, w).expand(1,h,w), requires_grad=False).type_as(u).expand_as(u) # [bs, H, W]
grid_y = Variable(torch.arange(0, h).view(1, h, 1).expand(1,h,w), requires_grad=False).type_as(v).expand_as(v) # [bs, H, W]
X = grid_x + u
Y = grid_y + v
X = 2*(X/(w-1.0) - 0.5)
Y = 2*(Y/(h-1.0) - 0.5)
grid_tf = torch.stack((X,Y), dim=3)
img_tf = torch.nn.functional.grid_sample(img, grid_tf, padding_mode=padding_mode)
return img_tf
def pose2flow(depth, pose, intrinsics, intrinsics_inv, rotation_mode='euler', padding_mode=None):
"""
Converts pose parameters to rigid optical flow
"""
check_sizes(depth, 'depth', 'BHW')
check_sizes(pose, 'pose', 'B6')
check_sizes(intrinsics, 'intrinsics', 'B33')
check_sizes(intrinsics_inv, 'intrinsics', 'B33')
assert(intrinsics_inv.size() == intrinsics.size())
bs, h, w = depth.size()
grid_x = Variable(torch.arange(0, w).view(1, 1, w).expand(1,h,w), requires_grad=False).type_as(depth).expand_as(depth) # [bs, H, W]
grid_y = Variable(torch.arange(0, h).view(1, h, 1).expand(1,h,w), requires_grad=False).type_as(depth).expand_as(depth) # [bs, H, W]
cam_coords = pixel2cam(depth, intrinsics_inv) # [B,3,H,W]
pose_mat = pose_vec2mat(pose, rotation_mode) # [B,3,4]
# Get projection matrix for tgt camera frame to source pixel frame
proj_cam_to_src_pixel = intrinsics.bmm(pose_mat) # [B, 3, 4]
src_pixel_coords = cam2pixel(cam_coords, proj_cam_to_src_pixel[:,:,:3], proj_cam_to_src_pixel[:,:,-1:], padding_mode) # [B,H,W,2]
X = (w-1)*(src_pixel_coords[:,:,:,0]/2.0 + 0.5) - grid_x
Y = (h-1)*(src_pixel_coords[:,:,:,1]/2.0 + 0.5) - grid_y
return torch.stack((X,Y), dim=1)
def flow2oob(flow):
check_sizes(flow, 'flow', 'B2HW')
bs, _, h, w = flow.size()
u = flow[:,0,:,:]
v = flow[:,1,:,:]
grid_x = Variable(torch.arange(0, w).view(1, 1, w).expand(1,h,w), requires_grad=False).type_as(u).expand_as(u) # [bs, H, W]
grid_y = Variable(torch.arange(0, h).view(1, h, 1).expand(1,h,w), requires_grad=False).type_as(v).expand_as(v) # [bs, H, W]
X = grid_x + u
Y = grid_y + v
X = 2*(X/(w-1.0) - 0.5)
Y = 2*(Y/(h-1.0) - 0.5)
oob = (X.abs()>1).add(Y.abs()>1)>0
return oob
def occlusion_mask(grid, depth):
check_sizes(img, 'grid', 'BHW2')
check_sizes(depth, 'depth', 'BHW')
mask = grid
return mask
def inverse_warp(img, depth, pose, intrinsics, intrinsics_inv, rotation_mode='euler', padding_mode='zeros'):
"""
Inverse warp a source image to the target image plane.
Args:
img: the source image (where to sample pixels) -- [B, 3, H, W]
depth: depth map of the target image -- [B, H, W]
pose: 6DoF pose parameters from target to source -- [B, 6]
intrinsics: camera intrinsic matrix -- [B, 3, 3]
intrinsics_inv: inverse of the intrinsic matrix -- [B, 3, 3]
Returns:
Source image warped to the target image plane
"""
check_sizes(img, 'img', 'B3HW')
check_sizes(depth, 'depth', 'BHW')
check_sizes(pose, 'pose', 'B6')
check_sizes(intrinsics, 'intrinsics', 'B33')
check_sizes(intrinsics_inv, 'intrinsics', 'B33')
assert(intrinsics_inv.size() == intrinsics.size())
batch_size, _, img_height, img_width = img.size()
cam_coords = pixel2cam(depth, intrinsics_inv) # [B,3,H,W]
pose_mat = pose_vec2mat(pose, rotation_mode) # [B,3,4]
# Get projection matrix for tgt camera frame to source pixel frame
proj_cam_to_src_pixel = intrinsics.bmm(pose_mat) # [B, 3, 4]
src_pixel_coords = cam2pixel(cam_coords, proj_cam_to_src_pixel[:,:,:3], proj_cam_to_src_pixel[:,:,-1:], padding_mode) # [B,H,W,2]
projected_img = torch.nn.functional.grid_sample(img, src_pixel_coords, padding_mode=padding_mode)
return projected_img
| [((590, 634), 'torch.stack', 'torch.stack', (['(j_range, i_range, ones)'], {'dim': '(1)'}), '((j_range, i_range, ones), dim=1)\n', (601, 634), False, 'import torch\n'), ((3022, 3058), 'torch.stack', 'torch.stack', (['[X_norm, Y_norm]'], {'dim': '(2)'}), '([X_norm, Y_norm], dim=2)\n', (3033, 3058), False, 'import torch\n'), ((3539, 3551), 'torch.cos', 'torch.cos', (['z'], {}), '(z)\n', (3548, 3551), False, 'import torch\n'), ((3563, 3575), 'torch.sin', 'torch.sin', (['z'], {}), '(z)\n', (3572, 3575), False, 'import torch\n'), ((3797, 3809), 'torch.cos', 'torch.cos', (['y'], {}), '(y)\n', (3806, 3809), False, 'import torch\n'), ((3821, 3833), 'torch.sin', 'torch.sin', (['y'], {}), '(y)\n', (3830, 3833), False, 'import torch\n'), ((4003, 4015), 'torch.cos', 'torch.cos', (['x'], {}), '(x)\n', (4012, 4015), False, 'import torch\n'), ((4027, 4039), 'torch.sin', 'torch.sin', (['x'], {}), '(x)\n', (4036, 4039), False, 'import torch\n'), ((5697, 5737), 'torch.cat', 'torch.cat', (['[rot_mat, translation]'], {'dim': '(2)'}), '([rot_mat, translation], dim=2)\n', (5706, 5737), False, 'import torch\n'), ((6626, 6652), 'torch.stack', 'torch.stack', (['(X, Y)'], {'dim': '(3)'}), '((X, Y), dim=3)\n', (6637, 6652), False, 'import torch\n'), ((6665, 6737), 'torch.nn.functional.grid_sample', 'torch.nn.functional.grid_sample', (['img', 'grid_tf'], {'padding_mode': 'padding_mode'}), '(img, grid_tf, padding_mode=padding_mode)\n', (6696, 6737), False, 'import torch\n'), ((7992, 8018), 'torch.stack', 'torch.stack', (['(X, Y)'], {'dim': '(1)'}), '((X, Y), dim=1)\n', (8003, 8018), False, 'import torch\n'), ((10035, 10121), 'torch.nn.functional.grid_sample', 'torch.nn.functional.grid_sample', (['img', 'src_pixel_coords'], {'padding_mode': 'padding_mode'}), '(img, src_pixel_coords, padding_mode=\n padding_mode)\n', (10066, 10121), False, 'import torch\n'), ((3641, 3720), 'torch.stack', 'torch.stack', (['[cosz, -sinz, zeros, sinz, cosz, zeros, zeros, zeros, ones]'], {'dim': '(1)'}), '([cosz, -sinz, zeros, sinz, cosz, zeros, zeros, zeros, ones], dim=1)\n', (3652, 3720), False, 'import torch\n'), ((3846, 3925), 'torch.stack', 'torch.stack', (['[cosy, zeros, siny, zeros, ones, zeros, -siny, zeros, cosy]'], {'dim': '(1)'}), '([cosy, zeros, siny, zeros, ones, zeros, -siny, zeros, cosy], dim=1)\n', (3857, 3925), False, 'import torch\n'), ((4052, 4131), 'torch.stack', 'torch.stack', (['[ones, zeros, zeros, zeros, cosx, -sinx, zeros, sinx, cosx]'], {'dim': '(1)'}), '([ones, zeros, zeros, zeros, cosx, -sinx, zeros, sinx, cosx], dim=1)\n', (4063, 4131), False, 'import torch\n'), ((4937, 5125), 'torch.stack', 'torch.stack', (['[w2 + x2 - y2 - z2, 2 * xy - 2 * wz, 2 * wy + 2 * xz, 2 * wz + 2 * xy, w2 -\n x2 + y2 - z2, 2 * yz - 2 * wx, 2 * xz - 2 * wy, 2 * wx + 2 * yz, w2 -\n x2 - y2 + z2]'], {'dim': '(1)'}), '([w2 + x2 - y2 - z2, 2 * xy - 2 * wz, 2 * wy + 2 * xz, 2 * wz + \n 2 * xy, w2 - x2 + y2 - z2, 2 * yz - 2 * wx, 2 * xz - 2 * wy, 2 * wx + 2 *\n yz, w2 - x2 - y2 + z2], dim=1)\n', (4948, 5125), False, 'import torch\n'), ((536, 555), 'torch.ones', 'torch.ones', (['(1)', 'h', 'w'], {}), '(1, h, w)\n', (546, 555), False, 'import torch\n'), ((341, 359), 'torch.arange', 'torch.arange', (['(0)', 'h'], {}), '(0, h)\n', (353, 359), False, 'import torch\n'), ((440, 458), 'torch.arange', 'torch.arange', (['(0)', 'w'], {}), '(0, w)\n', (452, 458), False, 'import torch\n'), ((6280, 6298), 'torch.arange', 'torch.arange', (['(0)', 'w'], {}), '(0, w)\n', (6292, 6298), False, 'import torch\n'), ((6409, 6427), 'torch.arange', 'torch.arange', (['(0)', 'h'], {}), '(0, h)\n', (6421, 6427), False, 'import torch\n'), ((7208, 7226), 'torch.arange', 'torch.arange', (['(0)', 'w'], {}), '(0, w)\n', (7220, 7226), False, 'import torch\n'), ((7345, 7363), 'torch.arange', 'torch.arange', (['(0)', 'h'], {}), '(0, h)\n', (7357, 7363), False, 'import torch\n'), ((8175, 8193), 'torch.arange', 'torch.arange', (['(0)', 'w'], {}), '(0, w)\n', (8187, 8193), False, 'import torch\n'), ((8304, 8322), 'torch.arange', 'torch.arange', (['(0)', 'h'], {}), '(0, h)\n', (8316, 8322), False, 'import torch\n')] |
ramonsanabria/lingvo | lingvo/core/egdd.py | f38dc3801d36ed08a4117d4a66e6f1f10f76909d | # Lint as: python2, python3
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Exponentiated Gradient Delta-Delta optimizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.training import optimizer
# pylint: enable=g-direct-tensorflow-import
class EGDD(optimizer.Optimizer):
"""A version of GD Momentum with adaptive gain and learning rate.
Exponentiated Gradient Delta-delta optimizer starts with a local gain of 1.0
for every weight and a lr_scale of 1.0 for all weights. The EGDD update rule
applies:
momentum <- mu * momentum + learning_rate * gain * grad
var <- var - lr_scale * momentum
The gain as well as the lr_scale are updated using the unnormalized
exponentiated gradient algorithm [KW97].
Reference: TBA
[KW97] Kivinen, J., & Warmuth, M. K. Exponentiated gradient versus gradient
descent for linear predictors. Information and Computation, 1997.
"""
def __init__(self,
learning_rate,
momentum,
beta=0.9,
gain_learning_rate=0.01,
scale_learning_rate=0.001,
initial_gain=1.0,
min_gain=1e-2,
max_gain=1e2,
initial_scale=1.0,
min_scale=1e-1,
max_scale=1e1,
use_directions=True,
use_signs=True,
name="EGDD"):
"""Construct a new EG-DD optimizer.
Args:
learning_rate: A `Tensor` or a floating point value. The learning rate.
momentum: A `Tensor` or a floating point value.
beta: `float` decay rate of the gradient EMA.
gain_learning_rate: `float` gain learning rate.
scale_learning_rate: `float` scale learning rate.
initial_gain: `float` initial gain.
min_gain: `float` minimum gain.
max_gain: `float` maximum gain,
initial_scale: `float` initial scale.
min_scale: `float` minimum learning rate scale.
max_scale: `float` maximum learning rate scale.
use_directions: `bool` whether to use directions only for scale updates.
use_signs: `bool` whether to use the signs for updating gains.
name: Optional name prefix for the operations created when applying
gradients.
Raises:
ValueError: If the `initial_accumulator_value` is invalid.
"""
super(EGDD, self).__init__(False, name)
self._learning_rate = learning_rate
self._momentum = momentum
self._beta = beta
self._gain_learning_rate = gain_learning_rate
self._scale_learning_rate = scale_learning_rate
self._initial_gain = initial_gain
self._min_gain = min_gain
self._max_gain = max_gain
self._initial_scale = initial_scale
self._min_scale = min_scale
self._max_scale = max_scale
self._use_directions = use_directions
self._use_signs = use_signs
def _create_slots(self, var_list):
for v in var_list:
self._zeros_slot(v, "momentum", self._name)
self._zeros_slot(v, "gbar", self._name)
g_tensor = ops.convert_to_tensor(v)
gain_init = self._initial_gain * array_ops.ones_like(g_tensor)
_ = self._get_or_make_slot(v, self._initial_scale * array_ops.ones((1)),
"lr_scale", self._name)
_ = self._get_or_make_slot(v, gain_init, "gain", self._name)
_ = self._get_or_make_slot(v, array_ops.zeros((1)), "counter", self._name)
def _prepare(self):
learning_rate = self._call_if_callable(self._learning_rate)
self._learning_rate_tensor = ops.convert_to_tensor(
learning_rate, name="learning_rate")
momentum = self._call_if_callable(self._momentum)
self._momentum_tensor = ops.convert_to_tensor(momentum, name="momentum")
def _apply_dense(self, grad, var):
lr_scale = self.get_slot(var, "lr_scale")
momentum = self.get_slot(var, "momentum")
gbar = self.get_slot(var, "gbar")
gain = self.get_slot(var, "gain")
counter = self.get_slot(var, "counter")
counter_updated = state_ops.assign(counter, counter + 1)
# lr_scale update uses normalized grad and momentum to be independent of dim
normalized_grad = grad / (linalg_ops.norm(grad) + 1e-10)
normalized_momentum = momentum / (linalg_ops.norm(momentum) + 1e-10)
# Apply EG updates on lr_scale:
# grad_lr_scale = -inner_product(current_grad, old_momentum)
# lr_scale <- lr_scale * exp(-scale_learning_rate * grad_lr_scale)
lr_scale_unnormalized_updated = clip_ops.clip_by_value(
lr_scale * math_ops.exp(
self._scale_learning_rate * math_ops.reduce_sum(grad * momentum)),
self._min_scale, self._max_scale)
lr_scale_normalized_updated = clip_ops.clip_by_value(
lr_scale * math_ops.exp(self._scale_learning_rate * math_ops.reduce_sum(
normalized_grad * normalized_momentum)), self._min_scale,
self._max_scale)
lr_scale_updated = state_ops.assign(
lr_scale,
array_ops.where(self._use_directions, lr_scale_normalized_updated,
lr_scale_unnormalized_updated))
# remove the bias of zero initialization in gbar
corrected_gbar = gbar / (
1.0 - self._beta**math_ops.maximum(counter_updated - 1, 1))
# Apply EG updates on gain:
# grad_gain = - current_grad * old_gbar
# gain <- gain * exp(-gain_learning_rate * grad_gain)
gain_unnormalized_updated = clip_ops.clip_by_value(
gain * math_ops.exp(self._gain_learning_rate * grad * corrected_gbar),
self._min_gain, self._max_gain)
# Normalized update uses sign(grad) * sign(gbar) as a proxy for grad_gain.
gain_normalized_updated = clip_ops.clip_by_value(
gain * math_ops.exp(self._gain_learning_rate * math_ops.sign(grad) *
math_ops.sign(gbar)), self._min_gain,
self._max_gain)
gain_updated = state_ops.assign(
gain,
array_ops.where(self._use_signs, gain_normalized_updated,
gain_unnormalized_updated))
scaled_g = self._learning_rate_tensor * gain_updated * grad
with ops.control_dependencies([lr_scale_updated, scaled_g]):
momentum_updated = state_ops.assign(
momentum, self._momentum_tensor * momentum + scaled_g)
gbar_updated = state_ops.assign(
gbar, self._beta * gbar + (1.0 - self._beta) * grad)
with ops.control_dependencies([gbar_updated]):
return state_ops.assign_sub(var, lr_scale_updated * momentum_updated)
def _resource_apply_dense(self, grad, var):
return self._apply_dense(grad, var)
# Sparse gradients are not handled currently and is part of future work.
def _resource_apply_sparse(self, grad_values, var, grad_indices):
return control_flow_ops.no_op()
def _apply_sparse(self, grad, var):
return control_flow_ops.no_op()
| [((4580, 4638), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (['learning_rate'], {'name': '"""learning_rate"""'}), "(learning_rate, name='learning_rate')\n", (4601, 4638), False, 'from tensorflow.python.framework import ops\n'), ((4730, 4778), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (['momentum'], {'name': '"""momentum"""'}), "(momentum, name='momentum')\n", (4751, 4778), False, 'from tensorflow.python.framework import ops\n'), ((5051, 5089), 'tensorflow.python.ops.state_ops.assign', 'state_ops.assign', (['counter', '(counter + 1)'], {}), '(counter, counter + 1)\n', (5067, 5089), False, 'from tensorflow.python.ops import state_ops\n'), ((7753, 7777), 'tensorflow.python.ops.control_flow_ops.no_op', 'control_flow_ops.no_op', ([], {}), '()\n', (7775, 7777), False, 'from tensorflow.python.ops import control_flow_ops\n'), ((7828, 7852), 'tensorflow.python.ops.control_flow_ops.no_op', 'control_flow_ops.no_op', ([], {}), '()\n', (7850, 7852), False, 'from tensorflow.python.ops import control_flow_ops\n'), ((4082, 4106), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (['v'], {}), '(v)\n', (4103, 4106), False, 'from tensorflow.python.framework import ops\n'), ((5993, 6094), 'tensorflow.python.ops.array_ops.where', 'array_ops.where', (['self._use_directions', 'lr_scale_normalized_updated', 'lr_scale_unnormalized_updated'], {}), '(self._use_directions, lr_scale_normalized_updated,\n lr_scale_unnormalized_updated)\n', (6008, 6094), False, 'from tensorflow.python.ops import array_ops\n'), ((6935, 7023), 'tensorflow.python.ops.array_ops.where', 'array_ops.where', (['self._use_signs', 'gain_normalized_updated', 'gain_unnormalized_updated'], {}), '(self._use_signs, gain_normalized_updated,\n gain_unnormalized_updated)\n', (6950, 7023), False, 'from tensorflow.python.ops import array_ops\n'), ((7118, 7172), 'tensorflow.python.framework.ops.control_dependencies', 'ops.control_dependencies', (['[lr_scale_updated, scaled_g]'], {}), '([lr_scale_updated, scaled_g])\n', (7142, 7172), False, 'from tensorflow.python.framework import ops\n'), ((7199, 7270), 'tensorflow.python.ops.state_ops.assign', 'state_ops.assign', (['momentum', '(self._momentum_tensor * momentum + scaled_g)'], {}), '(momentum, self._momentum_tensor * momentum + scaled_g)\n', (7215, 7270), False, 'from tensorflow.python.ops import state_ops\n'), ((7303, 7372), 'tensorflow.python.ops.state_ops.assign', 'state_ops.assign', (['gbar', '(self._beta * gbar + (1.0 - self._beta) * grad)'], {}), '(gbar, self._beta * gbar + (1.0 - self._beta) * grad)\n', (7319, 7372), False, 'from tensorflow.python.ops import state_ops\n'), ((7393, 7433), 'tensorflow.python.framework.ops.control_dependencies', 'ops.control_dependencies', (['[gbar_updated]'], {}), '([gbar_updated])\n', (7417, 7433), False, 'from tensorflow.python.framework import ops\n'), ((7448, 7510), 'tensorflow.python.ops.state_ops.assign_sub', 'state_ops.assign_sub', (['var', '(lr_scale_updated * momentum_updated)'], {}), '(var, lr_scale_updated * momentum_updated)\n', (7468, 7510), False, 'from tensorflow.python.ops import state_ops\n'), ((4146, 4175), 'tensorflow.python.ops.array_ops.ones_like', 'array_ops.ones_like', (['g_tensor'], {}), '(g_tensor)\n', (4165, 4175), False, 'from tensorflow.python.ops import array_ops\n'), ((4415, 4433), 'tensorflow.python.ops.array_ops.zeros', 'array_ops.zeros', (['(1)'], {}), '(1)\n', (4430, 4433), False, 'from tensorflow.python.ops import array_ops\n'), ((5202, 5223), 'tensorflow.python.ops.linalg_ops.norm', 'linalg_ops.norm', (['grad'], {}), '(grad)\n', (5217, 5223), False, 'from tensorflow.python.ops import linalg_ops\n'), ((5271, 5296), 'tensorflow.python.ops.linalg_ops.norm', 'linalg_ops.norm', (['momentum'], {}), '(momentum)\n', (5286, 5296), False, 'from tensorflow.python.ops import linalg_ops\n'), ((6472, 6534), 'tensorflow.python.ops.math_ops.exp', 'math_ops.exp', (['(self._gain_learning_rate * grad * corrected_gbar)'], {}), '(self._gain_learning_rate * grad * corrected_gbar)\n', (6484, 6534), False, 'from tensorflow.python.ops import math_ops\n'), ((4234, 4251), 'tensorflow.python.ops.array_ops.ones', 'array_ops.ones', (['(1)'], {}), '(1)\n', (4248, 4251), False, 'from tensorflow.python.ops import array_ops\n'), ((6225, 6265), 'tensorflow.python.ops.math_ops.maximum', 'math_ops.maximum', (['(counter_updated - 1)', '(1)'], {}), '(counter_updated - 1, 1)\n', (6241, 6265), False, 'from tensorflow.python.ops import math_ops\n'), ((5611, 5647), 'tensorflow.python.ops.math_ops.reduce_sum', 'math_ops.reduce_sum', (['(grad * momentum)'], {}), '(grad * momentum)\n', (5630, 5647), False, 'from tensorflow.python.ops import math_ops\n'), ((5810, 5868), 'tensorflow.python.ops.math_ops.reduce_sum', 'math_ops.reduce_sum', (['(normalized_grad * normalized_momentum)'], {}), '(normalized_grad * normalized_momentum)\n', (5829, 5868), False, 'from tensorflow.python.ops import math_ops\n'), ((6814, 6833), 'tensorflow.python.ops.math_ops.sign', 'math_ops.sign', (['gbar'], {}), '(gbar)\n', (6827, 6833), False, 'from tensorflow.python.ops import math_ops\n'), ((6764, 6783), 'tensorflow.python.ops.math_ops.sign', 'math_ops.sign', (['grad'], {}), '(grad)\n', (6777, 6783), False, 'from tensorflow.python.ops import math_ops\n')] |
cloudspectatordevelopment/cudamat | examples/nn_cudamat.py | d26cf019a7855077b7d4344ae1a3202a156c5170 | # This file shows how to implement a single hidden layer neural network for
# performing binary classification on the GPU using cudamat.
from __future__ import division
import pdb
import time
import numpy as np
import cudamat as cm
from cudamat import learn as cl
import util
# initialize CUDA
cm.cublas_init()
# load data
util.load('mnist49.dat', globals())
# Put training data onto the GPU.
dat_train = dat_train/255.
dat_train = dat_train - (np.mean(dat_train, 1)+10**-8)[:, np.newaxis]
dev_train = cm.CUDAMatrix(dat_train)
dev_lbl = cm.CUDAMatrix(lbl_train)
# training parameters
epsilon = 0.01
momentum = 0.9
num_epochs = 30
batch_size = 128
num_batches = dat_train.shape[1]//batch_size
# model parameters
dim_in = dat_train.shape[0]
dim_out = 1
num_hid = 1024
# initialize weights
w_w1 = cm.CUDAMatrix(dim_in ** -0.5 * np.random.randn(dim_in, num_hid))
w_b1 = cm.CUDAMatrix(np.zeros((num_hid, 1)))
w_w2 = cm.CUDAMatrix(num_hid ** -0.5 * np.random.randn(num_hid, dim_out))
w_b2 = cm.CUDAMatrix(np.zeros((dim_out, 1)))
# initialize weight update matrices
wu_w1 = cm.empty(w_w1.shape).assign(0)
wu_b1 = cm.empty(w_b1.shape).assign(0)
wu_w2 = cm.empty(w_w2.shape).assign(0)
wu_b2 = cm.empty(w_b2.shape).assign(0)
# initialize temporary storage
h = cm.empty((num_hid, batch_size))
out = cm.empty((dim_out, batch_size))
delta = cm.empty((num_hid, batch_size))
# Train neural network.
start_time = time.time()
for epoch in range(num_epochs):
print("Epoch %i" % (epoch + 1))
err = []
for batch in range(num_batches):
# get current minibatch
inp = dev_train.slice(batch*batch_size,(batch + 1)*batch_size)
target = dev_lbl.slice(batch*batch_size,(batch + 1)*batch_size)
# forward pass
cm.dot(w_w1.T, inp, target = h)
h.add_col_vec(w_b1)
h.apply_sigmoid()
cm.dot(w_w2.T, h, target = out)
out.add_col_vec(w_b2)
out.apply_sigmoid()
# back prop errors
out.subtract(target) # compute error
# gradients for w_w2 and w_b2
wu_w2.add_dot(h, out.T, beta = momentum)
wu_b2.add_sums(out, axis = 1, beta = momentum)
# compute delta
cm.dot(w_w2, out, target = delta)
# delta = delta * h * (1 - h)
cl.mult_by_sigmoid_deriv(delta, h)
# gradients for w_w1 and w_b1
wu_w1.add_dot(inp, delta.T, beta = momentum)
wu_b1.add_sums(delta, axis = 1, beta = momentum)
# update weights
w_w1.subtract_mult(wu_w1, epsilon/batch_size)
w_b1.subtract_mult(wu_b1, epsilon/batch_size)
w_w2.subtract_mult(wu_w2, epsilon/batch_size)
w_b2.subtract_mult(wu_b2, epsilon/batch_size)
# calculate error on current minibatch
err.append(np.abs(out.asarray())>0.5)
print("Training misclassification rate: %f" % np.mean(err))
print("Time: %f" % (time.time() - start_time))
# Evaluate neural network on test data.
# Load test data onto the GPU.
dat_test = dat_test/255.
dat_test = dat_test - np.mean(dat_test, 1)[:, np.newaxis]
dev_test = cm.CUDAMatrix(dat_test)
dev_lbl = cm.CUDAMatrix(lbl_test)
# Initalize temporary storage.
h = cm.empty((num_hid, dat_test.shape[1]))
out = cm.empty((dim_out, dat_test.shape[1]))
# forward pass
cm.dot(w_w1.T, dev_test, target = h)
h.add_col_vec(w_b1)
h.apply_sigmoid()
cm.dot(w_w2.T, h, target = out)
out.add_col_vec(w_b2)
out.apply_sigmoid()
# compute error
out.subtract(dev_lbl)
print("Testing misclassification rate: %f" % np.mean(np.abs(out.asarray())>0.5))
cm.cublas_shutdown()
| [((296, 312), 'cudamat.cublas_init', 'cm.cublas_init', ([], {}), '()\n', (310, 312), True, 'import cudamat as cm\n'), ((506, 530), 'cudamat.CUDAMatrix', 'cm.CUDAMatrix', (['dat_train'], {}), '(dat_train)\n', (519, 530), True, 'import cudamat as cm\n'), ((541, 565), 'cudamat.CUDAMatrix', 'cm.CUDAMatrix', (['lbl_train'], {}), '(lbl_train)\n', (554, 565), True, 'import cudamat as cm\n'), ((1260, 1291), 'cudamat.empty', 'cm.empty', (['(num_hid, batch_size)'], {}), '((num_hid, batch_size))\n', (1268, 1291), True, 'import cudamat as cm\n'), ((1298, 1329), 'cudamat.empty', 'cm.empty', (['(dim_out, batch_size)'], {}), '((dim_out, batch_size))\n', (1306, 1329), True, 'import cudamat as cm\n'), ((1338, 1369), 'cudamat.empty', 'cm.empty', (['(num_hid, batch_size)'], {}), '((num_hid, batch_size))\n', (1346, 1369), True, 'import cudamat as cm\n'), ((1408, 1419), 'time.time', 'time.time', ([], {}), '()\n', (1417, 1419), False, 'import time\n'), ((3067, 3090), 'cudamat.CUDAMatrix', 'cm.CUDAMatrix', (['dat_test'], {}), '(dat_test)\n', (3080, 3090), True, 'import cudamat as cm\n'), ((3101, 3124), 'cudamat.CUDAMatrix', 'cm.CUDAMatrix', (['lbl_test'], {}), '(lbl_test)\n', (3114, 3124), True, 'import cudamat as cm\n'), ((3161, 3199), 'cudamat.empty', 'cm.empty', (['(num_hid, dat_test.shape[1])'], {}), '((num_hid, dat_test.shape[1]))\n', (3169, 3199), True, 'import cudamat as cm\n'), ((3206, 3244), 'cudamat.empty', 'cm.empty', (['(dim_out, dat_test.shape[1])'], {}), '((dim_out, dat_test.shape[1]))\n', (3214, 3244), True, 'import cudamat as cm\n'), ((3261, 3295), 'cudamat.dot', 'cm.dot', (['w_w1.T', 'dev_test'], {'target': 'h'}), '(w_w1.T, dev_test, target=h)\n', (3267, 3295), True, 'import cudamat as cm\n'), ((3338, 3367), 'cudamat.dot', 'cm.dot', (['w_w2.T', 'h'], {'target': 'out'}), '(w_w2.T, h, target=out)\n', (3344, 3367), True, 'import cudamat as cm\n'), ((3535, 3555), 'cudamat.cublas_shutdown', 'cm.cublas_shutdown', ([], {}), '()\n', (3553, 3555), True, 'import cudamat as cm\n'), ((888, 910), 'numpy.zeros', 'np.zeros', (['(num_hid, 1)'], {}), '((num_hid, 1))\n', (896, 910), True, 'import numpy as np\n'), ((1007, 1029), 'numpy.zeros', 'np.zeros', (['(dim_out, 1)'], {}), '((dim_out, 1))\n', (1015, 1029), True, 'import numpy as np\n'), ((833, 865), 'numpy.random.randn', 'np.random.randn', (['dim_in', 'num_hid'], {}), '(dim_in, num_hid)\n', (848, 865), True, 'import numpy as np\n'), ((951, 984), 'numpy.random.randn', 'np.random.randn', (['num_hid', 'dim_out'], {}), '(num_hid, dim_out)\n', (966, 984), True, 'import numpy as np\n'), ((1076, 1096), 'cudamat.empty', 'cm.empty', (['w_w1.shape'], {}), '(w_w1.shape)\n', (1084, 1096), True, 'import cudamat as cm\n'), ((1115, 1135), 'cudamat.empty', 'cm.empty', (['w_b1.shape'], {}), '(w_b1.shape)\n', (1123, 1135), True, 'import cudamat as cm\n'), ((1154, 1174), 'cudamat.empty', 'cm.empty', (['w_w2.shape'], {}), '(w_w2.shape)\n', (1162, 1174), True, 'import cudamat as cm\n'), ((1193, 1213), 'cudamat.empty', 'cm.empty', (['w_b2.shape'], {}), '(w_b2.shape)\n', (1201, 1213), True, 'import cudamat as cm\n'), ((1746, 1775), 'cudamat.dot', 'cm.dot', (['w_w1.T', 'inp'], {'target': 'h'}), '(w_w1.T, inp, target=h)\n', (1752, 1775), True, 'import cudamat as cm\n'), ((1842, 1871), 'cudamat.dot', 'cm.dot', (['w_w2.T', 'h'], {'target': 'out'}), '(w_w2.T, h, target=out)\n', (1848, 1871), True, 'import cudamat as cm\n'), ((2182, 2213), 'cudamat.dot', 'cm.dot', (['w_w2', 'out'], {'target': 'delta'}), '(w_w2, out, target=delta)\n', (2188, 2213), True, 'import cudamat as cm\n'), ((2263, 2297), 'cudamat.learn.mult_by_sigmoid_deriv', 'cl.mult_by_sigmoid_deriv', (['delta', 'h'], {}), '(delta, h)\n', (2287, 2297), True, 'from cudamat import learn as cl\n'), ((3020, 3040), 'numpy.mean', 'np.mean', (['dat_test', '(1)'], {}), '(dat_test, 1)\n', (3027, 3040), True, 'import numpy as np\n'), ((449, 470), 'numpy.mean', 'np.mean', (['dat_train', '(1)'], {}), '(dat_train, 1)\n', (456, 470), True, 'import numpy as np\n'), ((2835, 2847), 'numpy.mean', 'np.mean', (['err'], {}), '(err)\n', (2842, 2847), True, 'import numpy as np\n'), ((2873, 2884), 'time.time', 'time.time', ([], {}), '()\n', (2882, 2884), False, 'import time\n')] |
znicholls/FAIR | fair/forcing/ozone_tr.py | 599c44ed140b069968ba7d1ca99de40218e42545 | from __future__ import division
import numpy as np
from ..constants import molwt
def regress(emissions,
beta=np.array([2.8249e-4, 1.0695e-4, -9.3604e-4, 99.7831e-4])):
"""Calculates tropospheric ozone forcing from precursor emissions.
Inputs: (nt x 40) emissions array
Keywords:
beta: 4-element array of regression coefficients of precursor
radiative efficiency, W m-2 (Mt yr-1)-1.
order is [CH4, CO, NMVOC, NOx]
Outputs:
tropospheric ozone ERF time series.
"""
if emissions.ndim==2:
em_CH4, em_CO, em_NMVOC, em_NOx = emissions[:,[3, 6, 7, 8]].T
else:
em_CH4, em_CO, em_NMVOC, em_NOx = emissions[[3, 6, 7, 8]]
F_CH4 = beta[0] * em_CH4
F_CO = beta[1] * em_CO
F_NMVOC = beta[2] * em_NMVOC
F_NOx = beta[3] * em_NOx
F = F_CH4 + F_CO + F_NMVOC + F_NOx
return F
def cmip6_stevenson(emissions, C_CH4, T=0, feedback=False,
PI=np.array([722, 170, 10, 4.29]),
beta=np.array([1.77871043e-04, 5.80173377e-05, 2.09151270e-03,
1.94458719e-04])):
"""Calculates tropospheric ozone forcing from precursor emissions based on
Stevenson et al, 2013 10.5194/acp-13-3063-2013
Inputs:
emissions: (nt x 40) numpy array
C_CH4 : (nt) numpy array of methane concentrations, ppb
Keywords:
T : change in surface temperature since pre-industrial
feedback : True or False - include temperature feedback on ozone
forcing?
PI: : 4-element array of pre-industrial CH4 concentrations,
CO emissions, NMVOC emissions and NOx emissions
beta: : coefficients of how CH4 concentrations, CO emissions,
NMVOC emissions and NOx emissions affect forcing
Outputs:
tropospheric ozone ERF time series.
"""
# expand to 2D/1D if not already
if emissions.ndim == 1:
nspec = len(emissions)
emissions = emissions.reshape((1, nspec))
if np.isscalar(C_CH4):
C_CH4 = np.ones(1)*C_CH4
year, em_CO, em_NMVOC, em_NOx = emissions[:,[0, 6, 7, 8]].T
nt = len(year)
F_CH4, F_CO, F_NMVOC, F_NOx = np.zeros((4,nt))
for i in range(nt):
F_CH4[i] = beta[0] * (C_CH4[i]-PI[0])
F_CO[i] = beta[1] * (em_CO[i]-PI[1])
F_NMVOC[i] = beta[2] * (em_NMVOC[i]-PI[2])
F_NOx[i] = beta[3] * (em_NOx[i]-PI[3])
# Include the effect of climate feedback? We fit a curve to the 2000, 2030
# and 2100 best estimates of feedback based on middle-of-the-road
# temperature projections.
def temperature_feedback(T, a=0.03189267, b=1.34966941, c=-0.03214807):
if T<=0:
return 0
else:
return a*np.exp(-b*T)+c
if feedback:
F = F_CH4 + F_CO + F_NMVOC + F_NOx + temperature_feedback(T)
else:
F = F_CH4 + F_CO + F_NMVOC + F_NOx
return F
def stevenson(emissions, C_CH4, T=0, feedback=False, fix_pre1850_RCP=False,
PI=np.array([722, 170, 10, 4.29])):
"""Calculates tropospheric ozone forcing from precursor emissions based on
Stevenson et al, 2013 10.5194/acp-13-3063-2013
Inputs:
emissions: (nt x 40) numpy array
C_CH4 : (nt) numpy array of methane concentrations, ppb
Keywords:
T : change in surface temperature since pre-industrial
feedback : True or False - include temperature feedback on ozone
forcing?
fix_pre1850_RCP: Use different relationship for 1750/65 to 1850 based
on anthropogenic emissions from Skeie et al (2011)
for 1750 (atmos-chem-phys.net/11/11827/2011)
PI: : 4-element array of pre-industrial CH4 concentrations,
CO emissions, NMVOC emissions and NOx emissions
Outputs:
tropospheric ozone ERF time series.
"""
# expand to 2D/1D if not already
if emissions.ndim == 1:
nspec = len(emissions)
emissions = emissions.reshape((1, nspec))
if np.isscalar(C_CH4):
C_CH4 = np.ones(1)*C_CH4
# numbers in denominator are 2000-1750 concs or emissions used in
# Stevenson and traced back to Lamarque et al 2010 for 2000
# https://www.atmos-chem-phys.net/10/7017/2010/
year, em_CO, em_NMVOC, em_NOx = emissions[:,[0, 6, 7, 8]].T
nt = len(year)
F_CH4, F_CO, F_NMVOC, F_NOx = np.zeros((4,nt))
for i in range(nt):
if year[i]>=1850 or fix_pre1850_RCP==False:
F_CH4[i] = 0.166/960 * (C_CH4[i]-PI[0])
F_CO[i] = 0.058/681.8 * (em_CO[i]-PI[1])
F_NMVOC[i] = 0.035/155.84 * (em_NMVOC[i]-PI[2])
F_NOx[i] = 0.119/61.16 * (em_NOx[i] *
molwt.NO / molwt.N - PI[3])
# The RCP scenarios give a negative forcing prior to ~1780. This is
# because the anthropogenic emissions are given to be zero in RCPs but
# not zero in the Skeie numbers which are used here. This can be fixed
# to give a more linear behaviour.
else:
F_CH4[i] = 0.166/960 * (C_CH4[i]-722)
F_CO[i] = 0.058/681.8 * 215.59 * em_CO[i] / 385.59
F_NMVOC[i] = 0.035/155.84 * 51.97 * em_NMVOC[i] / 61.97
F_NOx[i] = 0.119/61.16 * 7.31 * (em_NOx[i]
* molwt.NO / molwt.N) / 11.6
# Include the effect of climate feedback? We fit a curve to the 2000, 2030
# and 2100 best estimates of feedback based on middle-of-the-road
# temperature projections.
def temperature_feedback(T, a=0.03189267, b=1.34966941, c=-0.03214807):
if T<=0:
return 0
else:
return a*np.exp(-b*T)+c
if feedback:
F = F_CH4 + F_CO + F_NMVOC + F_NOx + temperature_feedback(T)
else:
F = F_CH4 + F_CO + F_NMVOC + F_NOx
return F
| [((123, 182), 'numpy.array', 'np.array', (['[0.00028249, 0.00010695, -0.00093604, 0.00997831]'], {}), '([0.00028249, 0.00010695, -0.00093604, 0.00997831])\n', (131, 182), True, 'import numpy as np\n'), ((964, 994), 'numpy.array', 'np.array', (['[722, 170, 10, 4.29]'], {}), '([722, 170, 10, 4.29])\n', (972, 994), True, 'import numpy as np\n'), ((1006, 1078), 'numpy.array', 'np.array', (['[0.000177871043, 5.80173377e-05, 0.0020915127, 0.000194458719]'], {}), '([0.000177871043, 5.80173377e-05, 0.0020915127, 0.000194458719])\n', (1014, 1078), True, 'import numpy as np\n'), ((2076, 2094), 'numpy.isscalar', 'np.isscalar', (['C_CH4'], {}), '(C_CH4)\n', (2087, 2094), True, 'import numpy as np\n'), ((2247, 2264), 'numpy.zeros', 'np.zeros', (['(4, nt)'], {}), '((4, nt))\n', (2255, 2264), True, 'import numpy as np\n'), ((3069, 3099), 'numpy.array', 'np.array', (['[722, 170, 10, 4.29]'], {}), '([722, 170, 10, 4.29])\n', (3077, 3099), True, 'import numpy as np\n'), ((4159, 4177), 'numpy.isscalar', 'np.isscalar', (['C_CH4'], {}), '(C_CH4)\n', (4170, 4177), True, 'import numpy as np\n'), ((4517, 4534), 'numpy.zeros', 'np.zeros', (['(4, nt)'], {}), '((4, nt))\n', (4525, 4534), True, 'import numpy as np\n'), ((2112, 2122), 'numpy.ones', 'np.ones', (['(1)'], {}), '(1)\n', (2119, 2122), True, 'import numpy as np\n'), ((4195, 4205), 'numpy.ones', 'np.ones', (['(1)'], {}), '(1)\n', (4202, 4205), True, 'import numpy as np\n'), ((2815, 2829), 'numpy.exp', 'np.exp', (['(-b * T)'], {}), '(-b * T)\n', (2821, 2829), True, 'import numpy as np\n'), ((5798, 5812), 'numpy.exp', 'np.exp', (['(-b * T)'], {}), '(-b * T)\n', (5804, 5812), True, 'import numpy as np\n')] |
oarepo/oarepo-references-draft | tests/test_publish.py | 7e5ad4225c4ace9781d5de952c3765a65b33fd8e | import uuid
from invenio_indexer.api import RecordIndexer
from invenio_pidstore.models import PersistentIdentifier, PIDStatus
from invenio_records_draft.api import RecordContext
from invenio_records_draft.proxies import current_drafts
from invenio_search import RecordsSearch, current_search, current_search_client
from sample.records.config import DraftRecord, PublishedRecord
from tests.helpers import disable_test_authenticated
def test_publish(app, db, schemas, mappings, prepare_es):
with disable_test_authenticated():
with db.session.begin_nested():
draft_uuid = uuid.uuid4()
rec1 = DraftRecord.create({
'id': '1',
'title': 'rec1'
}, id_=draft_uuid)
draft1_pid = PersistentIdentifier.create(
pid_type='drecid', pid_value='1', status=PIDStatus.REGISTERED,
object_type='rec', object_uuid=draft_uuid
)
published_uuid = uuid.uuid4()
published = PublishedRecord.create({
'id': '3',
'title': 'rec1a'
}, id_=published_uuid)
published_pid = PersistentIdentifier.create(
pid_type='recid', pid_value='3', status=PIDStatus.REGISTERED,
object_type='rec', object_uuid=published_uuid
)
draft2_uuid = uuid.uuid4()
rec2 = DraftRecord.create({
'id': '2',
'title': 'rec2',
'ref': {'$ref': 'http://localhost/drafts/records/1'},
'ref_pub': {'$ref': 'http://localhost/records/3'}
}, id_=draft2_uuid)
draft2_pid = PersistentIdentifier.create(
pid_type='drecid', pid_value='2', status=PIDStatus.REGISTERED,
object_type='rec', object_uuid=draft2_uuid
)
RecordIndexer().index(rec2)
current_search_client.indices.refresh()
current_search_client.indices.flush()
es_draft2 = RecordsSearch(index='draft-records-record-v1.0.0').\
get_record(draft2_pid.object_uuid).execute()
assert len(es_draft2.hits) == 1
current_drafts.publish(RecordContext(record=rec2, record_pid=draft2_pid))
published2_pid = PersistentIdentifier.get(pid_type='recid', pid_value=draft2_pid.pid_value)
pr = PublishedRecord.get_record(published2_pid.object_uuid)
assert pr.dumps() == {
'$schema': 'https://localhost/schemas/records/record-v1.0.0.json',
'id': '2',
'ref': {'$ref': 'http://localhost/records/1'},
'ref_pub': {'$ref': 'http://localhost/records/3'},
'title': 'rec2'
}
current_search_client.indices.refresh()
current_search_client.indices.flush()
es_published2 = RecordsSearch(index='records-record-v1.0.0').\
get_record(published2_pid.object_uuid).execute()
assert len(es_published2.hits) == 1
es_published2 = es_published2.hits[0].to_dict()
es_published2.pop('_created')
es_published2.pop('_updated')
assert es_published2 == {
'$schema': 'https://localhost/schemas/records/record-v1.0.0.json',
'id': '2',
'ref': {'published': '1'},
'ref_pub': {'published': '3'},
'title': 'rec2'}
es_draft2 = RecordsSearch(index='draft-records-record-v1.0.0').\
get_record(draft2_pid.object_uuid).execute()
assert len(es_draft2.hits) == 0
| [((501, 529), 'tests.helpers.disable_test_authenticated', 'disable_test_authenticated', ([], {}), '()\n', (527, 529), False, 'from tests.helpers import disable_test_authenticated\n'), ((1906, 1945), 'invenio_search.current_search_client.indices.refresh', 'current_search_client.indices.refresh', ([], {}), '()\n', (1943, 1945), False, 'from invenio_search import RecordsSearch, current_search, current_search_client\n'), ((1954, 1991), 'invenio_search.current_search_client.indices.flush', 'current_search_client.indices.flush', ([], {}), '()\n', (1989, 1991), False, 'from invenio_search import RecordsSearch, current_search, current_search_client\n'), ((2272, 2346), 'invenio_pidstore.models.PersistentIdentifier.get', 'PersistentIdentifier.get', ([], {'pid_type': '"""recid"""', 'pid_value': 'draft2_pid.pid_value'}), "(pid_type='recid', pid_value=draft2_pid.pid_value)\n", (2296, 2346), False, 'from invenio_pidstore.models import PersistentIdentifier, PIDStatus\n'), ((2360, 2414), 'sample.records.config.PublishedRecord.get_record', 'PublishedRecord.get_record', (['published2_pid.object_uuid'], {}), '(published2_pid.object_uuid)\n', (2386, 2414), False, 'from sample.records.config import DraftRecord, PublishedRecord\n'), ((2717, 2756), 'invenio_search.current_search_client.indices.refresh', 'current_search_client.indices.refresh', ([], {}), '()\n', (2754, 2756), False, 'from invenio_search import RecordsSearch, current_search, current_search_client\n'), ((2765, 2802), 'invenio_search.current_search_client.indices.flush', 'current_search_client.indices.flush', ([], {}), '()\n', (2800, 2802), False, 'from invenio_search import RecordsSearch, current_search, current_search_client\n'), ((596, 608), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (606, 608), False, 'import uuid\n'), ((629, 693), 'sample.records.config.DraftRecord.create', 'DraftRecord.create', (["{'id': '1', 'title': 'rec1'}"], {'id_': 'draft_uuid'}), "({'id': '1', 'title': 'rec1'}, id_=draft_uuid)\n", (647, 693), False, 'from sample.records.config import DraftRecord, PublishedRecord\n'), ((765, 903), 'invenio_pidstore.models.PersistentIdentifier.create', 'PersistentIdentifier.create', ([], {'pid_type': '"""drecid"""', 'pid_value': '"""1"""', 'status': 'PIDStatus.REGISTERED', 'object_type': '"""rec"""', 'object_uuid': 'draft_uuid'}), "(pid_type='drecid', pid_value='1', status=\n PIDStatus.REGISTERED, object_type='rec', object_uuid=draft_uuid)\n", (792, 903), False, 'from invenio_pidstore.models import PersistentIdentifier, PIDStatus\n'), ((975, 987), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (985, 987), False, 'import uuid\n'), ((1012, 1085), 'sample.records.config.PublishedRecord.create', 'PublishedRecord.create', (["{'id': '3', 'title': 'rec1a'}"], {'id_': 'published_uuid'}), "({'id': '3', 'title': 'rec1a'}, id_=published_uuid)\n", (1034, 1085), False, 'from sample.records.config import DraftRecord, PublishedRecord\n'), ((1160, 1301), 'invenio_pidstore.models.PersistentIdentifier.create', 'PersistentIdentifier.create', ([], {'pid_type': '"""recid"""', 'pid_value': '"""3"""', 'status': 'PIDStatus.REGISTERED', 'object_type': '"""rec"""', 'object_uuid': 'published_uuid'}), "(pid_type='recid', pid_value='3', status=\n PIDStatus.REGISTERED, object_type='rec', object_uuid=published_uuid)\n", (1187, 1301), False, 'from invenio_pidstore.models import PersistentIdentifier, PIDStatus\n'), ((1370, 1382), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1380, 1382), False, 'import uuid\n'), ((1402, 1580), 'sample.records.config.DraftRecord.create', 'DraftRecord.create', (["{'id': '2', 'title': 'rec2', 'ref': {'$ref':\n 'http://localhost/drafts/records/1'}, 'ref_pub': {'$ref':\n 'http://localhost/records/3'}}"], {'id_': 'draft2_uuid'}), "({'id': '2', 'title': 'rec2', 'ref': {'$ref':\n 'http://localhost/drafts/records/1'}, 'ref_pub': {'$ref':\n 'http://localhost/records/3'}}, id_=draft2_uuid)\n", (1420, 1580), False, 'from sample.records.config import DraftRecord, PublishedRecord\n'), ((1676, 1815), 'invenio_pidstore.models.PersistentIdentifier.create', 'PersistentIdentifier.create', ([], {'pid_type': '"""drecid"""', 'pid_value': '"""2"""', 'status': 'PIDStatus.REGISTERED', 'object_type': '"""rec"""', 'object_uuid': 'draft2_uuid'}), "(pid_type='drecid', pid_value='2', status=\n PIDStatus.REGISTERED, object_type='rec', object_uuid=draft2_uuid)\n", (1703, 1815), False, 'from invenio_pidstore.models import PersistentIdentifier, PIDStatus\n'), ((2195, 2244), 'invenio_records_draft.api.RecordContext', 'RecordContext', ([], {'record': 'rec2', 'record_pid': 'draft2_pid'}), '(record=rec2, record_pid=draft2_pid)\n', (2208, 2244), False, 'from invenio_records_draft.api import RecordContext\n'), ((1869, 1884), 'invenio_indexer.api.RecordIndexer', 'RecordIndexer', ([], {}), '()\n', (1882, 1884), False, 'from invenio_indexer.api import RecordIndexer\n'), ((2013, 2063), 'invenio_search.RecordsSearch', 'RecordsSearch', ([], {'index': '"""draft-records-record-v1.0.0"""'}), "(index='draft-records-record-v1.0.0')\n", (2026, 2063), False, 'from invenio_search import RecordsSearch, current_search, current_search_client\n'), ((2828, 2872), 'invenio_search.RecordsSearch', 'RecordsSearch', ([], {'index': '"""records-record-v1.0.0"""'}), "(index='records-record-v1.0.0')\n", (2841, 2872), False, 'from invenio_search import RecordsSearch, current_search, current_search_client\n'), ((3380, 3430), 'invenio_search.RecordsSearch', 'RecordsSearch', ([], {'index': '"""draft-records-record-v1.0.0"""'}), "(index='draft-records-record-v1.0.0')\n", (3393, 3430), False, 'from invenio_search import RecordsSearch, current_search, current_search_client\n')] |
xiaotiansf/tiscamera | examples/ROS/tiscamera.py | 8451449788f7429621240e2bbce065d65c5ac10e | import os
import subprocess
from collections import namedtuple
import gi
gi.require_version("Gst", "1.0")
gi.require_version("Tcam", "0.1")
from gi.repository import Tcam, Gst, GLib, GObject
DeviceInfo = namedtuple("DeviceInfo", "status name identifier connection_type")
CameraProperty = namedtuple("CameraProperty", "status value min max default step type flags category group")
# Disable pylint false positives
# pylint:disable=E0712
class Camera:
""""""
def __init__(self, serial, width, height, framerate, color, liveview):
""" Constructor.
Creates the sink pipeline and the source pipeline.
:param serial: Serial number of the camera to use.
:param width: Width of the video format, e.g. 640, 1920 etc,
:param height: Height of the video format, e.g. 480, 1080
:param framerate: Numerator of the frame rate, e.g. 15, 30, 60 etc
:param color: If True, color is used, else gray scale
:param liveview: If True an own live window is opened.
"""
Gst.init([])
self.height = height
self.width = width
self.sample = None
self.samplelocked = False
self.newsample = False
self.pid = -1
self.__remove_tmp_file()
pixelformat = "BGRx"
if not color:
pixelformat = "GRAY8"
if liveview:
p = 'tcambin serial="%s" name=source ! video/x-raw,format=%s,width=%d,height=%d,framerate=%d/1' % (serial, pixelformat, width, height, framerate,)
p += ' ! tee name=t'
p += ' t. ! queue ! videoconvert ! video/x-raw,format=RGB ,width=%d,height=%d,framerate=%d/1! shmsink socket-path=/tmp/ros_mem' % (width, height, framerate,)
p += ' t. ! queue ! videoconvert ! ximagesink'
else:
p = 'tcambin serial="%s" name=source ! video/x-raw,format=%s,width=%d,height=%d,framerate=%d/1' % (
serial, pixelformat, width, height, framerate,)
p += ' ! videoconvert ! video/x-raw,format=RGB ,width=%d,height=%d,framerate=%d/1! shmsink socket-path=/tmp/ros_mem' % (width, height, framerate,)
print(p)
try:
self.pipeline = Gst.parse_launch(p)
except GLib.Error as error:
raise RuntimeError("Error creating pipeline: {0}".format(error))
self.pipeline.set_state(Gst.State.READY)
if self.pipeline.get_state(10 * Gst.SECOND)[0] != Gst.StateChangeReturn.SUCCESS:
raise RuntimeError("Failed to start video stream.")
# Query a pointer to our source, so we can set properties.
self.source = self.pipeline.get_by_name("source")
# Create gscam_config variable with content
gscam = 'shmsrc socket-path=/tmp/ros_mem ! video/x-raw-rgb, width=%d,height=%d,framerate=%d/1' % (width, height, framerate,)
gscam += ',bpp=24,depth=24,blue_mask=16711680, green_mask=65280, red_mask=255 ! ffmpegcolorspace'
os.environ["GSCAM_CONFIG"] = gscam
def start_pipeline(self):
""" Starts the camera sink pipeline and the rosrun process
:return:
"""
try:
self.pipeline.set_state(Gst.State.PLAYING)
self.pid = subprocess.Popen(["rosrun", "gscam", "gscam"])
except GLib.Error as error:
print("Error starting pipeline: {0}".format(error))
raise
def stop_pipeline(self):
""" Stops the camera pipeline. Should also kill the rosrun process, but is not implemented
:return:
"""
self.pipeline.set_state(Gst.State.PAUSED)
self.pipeline.set_state(Gst.State.READY)
self.pipeline.set_state(Gst.State.NULL)
self.pid.kill()
def list_properties(self):
""" Helper function. List available properties
:return:
"""
for name in self.source.get_tcam_property_names():
print(name)
def get_property(self, property_name):
""" Return the value of the passed property.
Use list_properties for querying names of available properties.
:param property_name: Name of the property, e.g. Gain, Exposure, Gain Auto.
:return: Current value of the property.
"""
try:
return CameraProperty(*self.source.get_tcam_property(property_name))
except GLib.Error as error:
raise RuntimeError("Error get Property {0}: {1}", property_name, format(error))
def set_property(self, property_name, value):
""" Set a property. Use list_properties for querying names of available properties.
:param property_name: Name of the property, e.g. Gain, Exposure, Gain Auto.
:param value: Value to be set.
:return:
"""
try:
self.source.set_tcam_property(property_name, value)
except GLib.Error as error:
raise RuntimeError("Error set Property {0}: {1}", property_name, format(error))
def push_property(self, property_name):
""" Simplify push properties, like Auto Focus one push
:param property_name: Name of the property to be pushed
:return:
"""
try:
self.source.set_tcam_property(property_name, True)
except GLib.Error as error:
raise RuntimeError("Error set Property {0}: {1}", property_name, format(error))
def __remove_tmp_file(self):
""" Delete the memory file used by the pipelines to share memory
:return:
"""
try:
os.remove('/tmp/ros_mem')
except OSError:
pass
| [((74, 106), 'gi.require_version', 'gi.require_version', (['"""Gst"""', '"""1.0"""'], {}), "('Gst', '1.0')\n", (92, 106), False, 'import gi\n'), ((107, 140), 'gi.require_version', 'gi.require_version', (['"""Tcam"""', '"""0.1"""'], {}), "('Tcam', '0.1')\n", (125, 140), False, 'import gi\n'), ((207, 273), 'collections.namedtuple', 'namedtuple', (['"""DeviceInfo"""', '"""status name identifier connection_type"""'], {}), "('DeviceInfo', 'status name identifier connection_type')\n", (217, 273), False, 'from collections import namedtuple\n'), ((291, 386), 'collections.namedtuple', 'namedtuple', (['"""CameraProperty"""', '"""status value min max default step type flags category group"""'], {}), "('CameraProperty',\n 'status value min max default step type flags category group')\n", (301, 386), False, 'from collections import namedtuple\n'), ((1041, 1053), 'gi.repository.Gst.init', 'Gst.init', (['[]'], {}), '([])\n', (1049, 1053), False, 'from gi.repository import Tcam, Gst, GLib, GObject\n'), ((2194, 2213), 'gi.repository.Gst.parse_launch', 'Gst.parse_launch', (['p'], {}), '(p)\n', (2210, 2213), False, 'from gi.repository import Tcam, Gst, GLib, GObject\n'), ((3210, 3256), 'subprocess.Popen', 'subprocess.Popen', (["['rosrun', 'gscam', 'gscam']"], {}), "(['rosrun', 'gscam', 'gscam'])\n", (3226, 3256), False, 'import subprocess\n'), ((5513, 5538), 'os.remove', 'os.remove', (['"""/tmp/ros_mem"""'], {}), "('/tmp/ros_mem')\n", (5522, 5538), False, 'import os\n')] |
bertrand-caron/cv_blog_flask | helpers/config.py | ce779db31805f0b1a7bbc9a6f09a7d3fe1af74b2 | from typing import Dict, Any
from yaml import load
def get_config() -> Dict[str, Any]:
try:
return load(open('config/config.yml').read())
except Exception as e:
raise Exception('ERROR: Missing config/config.yml file.') from e
CONFIG = get_config()
| [] |
falckt/raman | raman/unmixing.py | 8f9fae0e211dd49cebaba98e71787bb663be8fcf | # Author: Tillmann Falck <[email protected]>
#
# License: BSD 3 clause
#
# SPDX-License-Identifier: BSD-3-Clause
import collections
from itertools import product
import cvxpy as cp
import numpy as np
def sunsal_tv(A, Y, lambda_1, lambda_tv, sweep='prod', tv_type='iso', additional_constraint='none'):
r"""
Sparse unmixing via variable splitting and augmented Lagrangian and total variation (SUnSAL-TV)
solves the following optimization problem
min || Y - A * X ||_F + lambda_1 || X ||_1 + lambda_TV || X ||_TV
X
subject to X >= 0 # if additional_constraint is 'positive'
sum(X, axis=0) == 1 # if additional_constraint is 'sum_to_one'
with
|| X ||_1 = \sum_i | x_i | # for a flattened array X
|| X ||_TV = \sum_i (\sum_j |X_ij|^p)^(1/p) # p = 1 for non-isotropic and p = 2 for isotropic
Parameters
----------
A: array - N x L, spectral library, where L is the number of library elements and N the number of points in each spectrum
Y: array - N x m_1 x ... x m_d, target spectra, m_1, ..., m_d are spatial dimnesions
lambda_1: float - regularization constant for elementwise sparsity inducing term
lambda_TV: float - regularization constant for TV regularizer (sparse changes along spatial dimensions)
sweep: {'prod', 'zip'} -
tv_type: {'iso', 'non-iso'} - type of total variation norm, isotropic or non-isotropic
additional_constraint: {'none', 'positive', 'sum_to_one'} - additional constraint on solution
Returns
-------
X: array - L x m_1 x ... x m_d
References
----------
[1] M. Iordache, J. M. Bioucas-Dias and A. Plaza, "Total Variation Spatial Regularization for
Sparse Hyperspectral Unmixing," in IEEE Transactions on Geoscience and Remote Sensing,
vol. 50, no. 11, pp. 4484-4502, Nov. 2012.
[2] Matlab implementation, downloaded from
https://github.com/ricardoborsoi/MUA_SparseUnmixing/blob/57802d5b2f77649fb32c2e4c75258f8d91084f7d/sunsal_tv.m
[3] https://dsp.stackexchange.com/questions/57977/isotropic-and-anisotropic-in-the-total-variation-framework
"""
# get dimensions
num_spectra, lib_size = A.shape
sample_dims = Y.shape[1:]
assert Y.shape[0] == num_spectra, 'Size of library does not size of target variables'
# reshape Y from [spectra x Xpos x Ypos x ...] --> [spectra x (Xpos * Ypos * ...)]
Y = Y.reshape((num_spectra, -1))
num_samples = Y.shape[1]
# create optimization variables
positive_solution = (additional_constraint == 'positive')
X = cp.Variable((lib_size, num_samples), nonneg=positive_solution)
p_lambda_1 = cp.Parameter(1, nonneg=True)
p_lambda_tv = cp.Parameter(1, nonneg=True)
# calculate first differences in each direction
idx = np.r_[:num_samples]
idx_s = idx.reshape(sample_dims)
differences = []
for n, d in enumerate(sample_dims):
ia = np.ravel(idx_s.take(indices=np.r_[np.r_[1:d], 0], axis=n))
ib = np.ravel(idx_s.take(indices=np.r_[:d], axis=n))
differences.append(X[:, ia] - X[:, ib])
# compute TV norm
if tv_type == 'iso':
D = [x*x for x in differences]
D = cp.sqrt(cp.sum(D))
tv = cp.sum(D)
elif tv_type == 'non-iso':
D = [cp.sum(cp.abs(x)) for x in differences]
tv = cp.sum(D)
else:
raise ValueError(f'TV norm type `{tv_type}` is not defined')
# define object function
obj = cp.norm(Y - A @ X, p='fro') + p_lambda_1 * cp.pnorm(X, p=1) + p_lambda_tv * tv
# constraints
constr = []
if additional_constraint == 'sum_to_one':
constr.append(cp.sum(X, axis=0) == 1)
# opimiztion problem
prob = cp.Problem(cp.Minimize(obj), constr)
# init parameter sweep
# if lambda_1 and lambda_tv are scalar return result
# otherwise return a dict with (lambda_1, lambda_tv): result
lambda_scalar = True
if not isinstance(lambda_1, collections.Iterable):
lambda_1 = [lambda_1]
else:
lambda_scalar = False
if not isinstance(lambda_tv, collections.Iterable):
lambda_tv = [lambda_tv]
else:
lambda_scalar = False
if sweep == 'prod':
l_iter = product(lambda_1, lambda_tv)
elif sweep == 'zip':
l_iter = zip(lambda_1, lambda_tv)
else:
raise ValueError(f'Parameter sweep `{sweep}` not supported')
results = {}
for l_1, l_tv in l_iter:
p_lambda_1.value = l_1
p_lambda_tv.value = l_tv
# solution
prob.solve(solver=cp.SCS, verbose=True)
results[(l_1, l_tv)] = X.value.reshape((lib_size, ) + sample_dims)
if lambda_scalar:
return results.popitem()[1]
else:
return results
| [((2598, 2660), 'cvxpy.Variable', 'cp.Variable', (['(lib_size, num_samples)'], {'nonneg': 'positive_solution'}), '((lib_size, num_samples), nonneg=positive_solution)\n', (2609, 2660), True, 'import cvxpy as cp\n'), ((2678, 2706), 'cvxpy.Parameter', 'cp.Parameter', (['(1)'], {'nonneg': '(True)'}), '(1, nonneg=True)\n', (2690, 2706), True, 'import cvxpy as cp\n'), ((2725, 2753), 'cvxpy.Parameter', 'cp.Parameter', (['(1)'], {'nonneg': '(True)'}), '(1, nonneg=True)\n', (2737, 2753), True, 'import cvxpy as cp\n'), ((3249, 3258), 'cvxpy.sum', 'cp.sum', (['D'], {}), '(D)\n', (3255, 3258), True, 'import cvxpy as cp\n'), ((3739, 3755), 'cvxpy.Minimize', 'cp.Minimize', (['obj'], {}), '(obj)\n', (3750, 3755), True, 'import cvxpy as cp\n'), ((4236, 4264), 'itertools.product', 'product', (['lambda_1', 'lambda_tv'], {}), '(lambda_1, lambda_tv)\n', (4243, 4264), False, 'from itertools import product\n'), ((3225, 3234), 'cvxpy.sum', 'cp.sum', (['D'], {}), '(D)\n', (3231, 3234), True, 'import cvxpy as cp\n'), ((3356, 3365), 'cvxpy.sum', 'cp.sum', (['D'], {}), '(D)\n', (3362, 3365), True, 'import cvxpy as cp\n'), ((3485, 3512), 'cvxpy.norm', 'cp.norm', (['(Y - A @ X)'], {'p': '"""fro"""'}), "(Y - A @ X, p='fro')\n", (3492, 3512), True, 'import cvxpy as cp\n'), ((3528, 3544), 'cvxpy.pnorm', 'cp.pnorm', (['X'], {'p': '(1)'}), '(X, p=1)\n', (3536, 3544), True, 'import cvxpy as cp\n'), ((3667, 3684), 'cvxpy.sum', 'cp.sum', (['X'], {'axis': '(0)'}), '(X, axis=0)\n', (3673, 3684), True, 'import cvxpy as cp\n'), ((3310, 3319), 'cvxpy.abs', 'cp.abs', (['x'], {}), '(x)\n', (3316, 3319), True, 'import cvxpy as cp\n')] |
ucsb-cs48-w19/6pm-stock-trading | test_stock.py | daf70b684c15182753d8ca9b820238cf9cd5b75c | import pytest
def test_stock():
assert(0 == 0)
| [] |
damirishpreet/TM1py | TM1py/Objects/ElementAttribute.py | 8482d0787fd5a9e5eb05a0288c41b75fc1fc93ac | # -*- coding: utf-8 -*-
import json
from TM1py.Objects.TM1Object import TM1Object
class ElementAttribute(TM1Object):
""" Abstraction of TM1 Element Attributes
"""
valid_types = ['NUMERIC', 'STRING', 'ALIAS']
def __init__(self, name, attribute_type):
self.name = name
self.attribute_type = attribute_type
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def attribute_type(self):
return self._attribute_type
@attribute_type.setter
def attribute_type(self, value):
if value.upper() in ElementAttribute.valid_types:
self._attribute_type = value
else:
raise Exception('{} not a valid Attribute Type.'.format(value))
@property
def body_as_dict(self):
return {"Name": self._name, "Type": self._attribute_type}
@property
def body(self):
return json.dumps(self.body_as_dict, ensure_ascii=False)
@classmethod
def from_json(cls, element_attribute_as_json):
return cls.from_dict(json.loads(element_attribute_as_json))
@classmethod
def from_dict(cls, element_attribute_as_dict):
return cls(name=element_attribute_as_dict['Name'],
attribute_type=element_attribute_as_dict['Type'])
def __eq__(self, other):
return self.name == other
| [((973, 1022), 'json.dumps', 'json.dumps', (['self.body_as_dict'], {'ensure_ascii': '(False)'}), '(self.body_as_dict, ensure_ascii=False)\n', (983, 1022), False, 'import json\n'), ((1121, 1158), 'json.loads', 'json.loads', (['element_attribute_as_json'], {}), '(element_attribute_as_json)\n', (1131, 1158), False, 'import json\n')] |
MaherClinc/stockly-bs | account.py | 4a2c5741673b85bee9100afef0b404520cb10b5d | from sqlalchemy import exc
from sqlalchemy.sql.expression import func
from models import Watchlist, Portfolio, Activity
from app import db
import metric
def buy_stock(ticker, units):
unit_price = metric.get_price(ticker)
total_price = units * unit_price
max_id = db.session.query(func.max(Activity.activity_id)).scalar()
if max_id is None:
old_buying_power = 100000
else:
old_buying_power = Activity.query.filter(Activity.activity_id == max_id).all()[0].buying_power
new_buying_power = old_buying_power - total_price
if new_buying_power > 0:
try:
db.session.add( Activity(ticker=ticker,
units=units, order_type= "b", unit_price=unit_price, total_price=total_price, buying_power=new_buying_power) )
update_portfolio_buy(ticker, units, total_price)
db.session.commit()
return { 'status': True, 'error': None }
except exc.SQLAlchemyError:
return { 'status': False, 'error': 'database error' }
else:
return { 'status': False, 'error': 'Insufficient Funds' }
def sell_stock(ticker, units):
unit_price = metric.get_price(ticker)
row = Portfolio.query.filter(Portfolio.ticker == ticker).all()
if len(row):
available_units = int(row[0].total_units)
units = min(available_units, units) if units >= 1 else int(available_units*units)
total_price = units * unit_price
max_id = db.session.query(func.max(Activity.activity_id)).scalar()
old_buying_power = Activity.query.filter(Activity.activity_id == max_id).all()[0].buying_power
new_buying_power = old_buying_power + total_price
try:
db.session.add( Activity(ticker=ticker,
units=units, order_type= "s", unit_price=unit_price, total_price=total_price, buying_power=new_buying_power) )
update_portfolio_sell(ticker, units, total_price)
db.session.commit()
return { 'status': True, 'amount': units, 'error': None }
except exc.SQLAlchemyError:
return { 'status': False, 'error': 'database error' }
else:
return { 'status': False, 'error': 'No Stock by this name' }
def update_portfolio_buy(ticker, units, total_price):
row = Portfolio.query.filter(Portfolio.ticker == ticker).all()
if len(row):
row[0].total_units = int(row[0].total_units) + units
row[0].total_invested = int(row[0].total_invested) + total_price
else:
db.session.add( Portfolio(ticker=ticker, total_units=units, total_invested=total_price) )
def update_portfolio_sell(ticker, units, total_price):
row = Portfolio.query.filter(Portfolio.ticker == ticker).all()
if len(row):
row[0].total_invested = int(row[0].total_invested) - ((int(row[0].total_invested)/int(row[0].total_units)) * units)
row[0].total_units = int(row[0].total_units) - units
Portfolio.query.filter(Portfolio.total_units == 0).delete()
def get_watchlist():
rows = Watchlist.query.all()
if len(rows):
watchlist = [row.ticker for row in rows]
else:
watchlist = []
return watchlist
def get_portfolio():
rows = Portfolio.query.all()
portfolio = [{'ticker':row.ticker, 'total_units':row.total_units, 'total_invested':row.total_invested} for row in rows]
return portfolio
def is_stock_in_watchlist(ticker):
rows = Watchlist.query.filter(Watchlist.ticker == ticker).all()
return True if len(rows) else False
def add_to_watchlist(ticker):
industry = metric.get_company(ticker)["industry"]
try:
db.session.add( Watchlist(ticker=ticker, industry=industry) )
db.session.commit()
return True
except exc.SQLAlchemyError:
return False
def remove_from_watchlist(ticker):
try:
Watchlist.query.filter(Watchlist.ticker == ticker).delete()
db.session.commit()
return True
except exc.SQLAlchemyError:
return False
| [((202, 226), 'metric.get_price', 'metric.get_price', (['ticker'], {}), '(ticker)\n', (218, 226), False, 'import metric\n'), ((1165, 1189), 'metric.get_price', 'metric.get_price', (['ticker'], {}), '(ticker)\n', (1181, 1189), False, 'import metric\n'), ((3039, 3060), 'models.Watchlist.query.all', 'Watchlist.query.all', ([], {}), '()\n', (3058, 3060), False, 'from models import Watchlist, Portfolio, Activity\n'), ((3216, 3237), 'models.Portfolio.query.all', 'Portfolio.query.all', ([], {}), '()\n', (3235, 3237), False, 'from models import Watchlist, Portfolio, Activity\n'), ((3573, 3599), 'metric.get_company', 'metric.get_company', (['ticker'], {}), '(ticker)\n', (3591, 3599), False, 'import metric\n'), ((3699, 3718), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3716, 3718), False, 'from app import db\n'), ((3913, 3932), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3930, 3932), False, 'from app import db\n'), ((865, 884), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (882, 884), False, 'from app import db\n'), ((1200, 1250), 'models.Portfolio.query.filter', 'Portfolio.query.filter', (['(Portfolio.ticker == ticker)'], {}), '(Portfolio.ticker == ticker)\n', (1222, 1250), False, 'from models import Watchlist, Portfolio, Activity\n'), ((1960, 1979), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1977, 1979), False, 'from app import db\n'), ((2296, 2346), 'models.Portfolio.query.filter', 'Portfolio.query.filter', (['(Portfolio.ticker == ticker)'], {}), '(Portfolio.ticker == ticker)\n', (2318, 2346), False, 'from models import Watchlist, Portfolio, Activity\n'), ((2538, 2609), 'models.Portfolio', 'Portfolio', ([], {'ticker': 'ticker', 'total_units': 'units', 'total_invested': 'total_price'}), '(ticker=ticker, total_units=units, total_invested=total_price)\n', (2547, 2609), False, 'from models import Watchlist, Portfolio, Activity\n'), ((2678, 2728), 'models.Portfolio.query.filter', 'Portfolio.query.filter', (['(Portfolio.ticker == ticker)'], {}), '(Portfolio.ticker == ticker)\n', (2700, 2728), False, 'from models import Watchlist, Portfolio, Activity\n'), ((2946, 2996), 'models.Portfolio.query.filter', 'Portfolio.query.filter', (['(Portfolio.total_units == 0)'], {}), '(Portfolio.total_units == 0)\n', (2968, 2996), False, 'from models import Watchlist, Portfolio, Activity\n'), ((3430, 3480), 'models.Watchlist.query.filter', 'Watchlist.query.filter', (['(Watchlist.ticker == ticker)'], {}), '(Watchlist.ticker == ticker)\n', (3452, 3480), False, 'from models import Watchlist, Portfolio, Activity\n'), ((3645, 3688), 'models.Watchlist', 'Watchlist', ([], {'ticker': 'ticker', 'industry': 'industry'}), '(ticker=ticker, industry=industry)\n', (3654, 3688), False, 'from models import Watchlist, Portfolio, Activity\n'), ((294, 324), 'sqlalchemy.sql.expression.func.max', 'func.max', (['Activity.activity_id'], {}), '(Activity.activity_id)\n', (302, 324), False, 'from sqlalchemy.sql.expression import func\n'), ((640, 775), 'models.Activity', 'Activity', ([], {'ticker': 'ticker', 'units': 'units', 'order_type': '"""b"""', 'unit_price': 'unit_price', 'total_price': 'total_price', 'buying_power': 'new_buying_power'}), "(ticker=ticker, units=units, order_type='b', unit_price=unit_price,\n total_price=total_price, buying_power=new_buying_power)\n", (648, 775), False, 'from models import Watchlist, Portfolio, Activity\n'), ((1734, 1869), 'models.Activity', 'Activity', ([], {'ticker': 'ticker', 'units': 'units', 'order_type': '"""s"""', 'unit_price': 'unit_price', 'total_price': 'total_price', 'buying_power': 'new_buying_power'}), "(ticker=ticker, units=units, order_type='s', unit_price=unit_price,\n total_price=total_price, buying_power=new_buying_power)\n", (1742, 1869), False, 'from models import Watchlist, Portfolio, Activity\n'), ((3845, 3895), 'models.Watchlist.query.filter', 'Watchlist.query.filter', (['(Watchlist.ticker == ticker)'], {}), '(Watchlist.ticker == ticker)\n', (3867, 3895), False, 'from models import Watchlist, Portfolio, Activity\n'), ((1490, 1520), 'sqlalchemy.sql.expression.func.max', 'func.max', (['Activity.activity_id'], {}), '(Activity.activity_id)\n', (1498, 1520), False, 'from sqlalchemy.sql.expression import func\n'), ((434, 487), 'models.Activity.query.filter', 'Activity.query.filter', (['(Activity.activity_id == max_id)'], {}), '(Activity.activity_id == max_id)\n', (455, 487), False, 'from models import Watchlist, Portfolio, Activity\n'), ((1558, 1611), 'models.Activity.query.filter', 'Activity.query.filter', (['(Activity.activity_id == max_id)'], {}), '(Activity.activity_id == max_id)\n', (1579, 1611), False, 'from models import Watchlist, Portfolio, Activity\n')] |
Tilapiatsu/blender-custom_conf | scripts/addons/kekit/ke_fit2grid.py | 05592fedf74e4b7075a6228b8448a5cda10f7753 | bl_info = {
"name": "ke_fit2grid",
"author": "Kjell Emanuelsson",
"category": "Modeling",
"version": (1, 0, 2),
"blender": (2, 80, 0),
}
import bpy
import bmesh
from .ke_utils import get_loops, correct_normal, average_vector
from mathutils import Vector, Matrix
def fit_to_grid(co, grid):
x, y, z = round(co[0] / grid) * grid, round(co[1] / grid) * grid, round(co[2] / grid) * grid
return round(x, 5), round(y, 5), round(z, 5)
class VIEW3D_OT_ke_fit2grid(bpy.types.Operator):
bl_idname = "view3d.ke_fit2grid"
bl_label = "Fit2Grid"
bl_description = "EDIT: Snaps verts of selected VERTS/EDGES/FACES to nearest set world grid step."
bl_options = {'REGISTER', 'UNDO'}
set_grid: bpy.props.FloatProperty()
@classmethod
def poll(cls, context):
return context.object is not None
def execute(self, context):
if not self.set_grid:
grid_setting = bpy.context.scene.kekit.fit2grid
else:
grid_setting = self.set_grid
obj = context.object
if obj.type == 'MESH' and obj.data.is_editmode:
od = obj.data
bm = bmesh.from_edit_mesh(od)
obj_mtx = obj.matrix_world.copy()
verts = [v for v in bm.verts if v.select]
if verts:
vert_cos = [obj_mtx @ v.co for v in verts]
modified = []
for v,co in zip(verts, vert_cos):
new_coords = fit_to_grid(co, grid_setting)
old_coords = tuple([round(i, 5) for i in co])
if new_coords != old_coords:
new_coords = new_coords
v.co = obj_mtx.inverted() @ Vector(new_coords)
modified.append(v)
bpy.ops.mesh.select_all(action='DESELECT')
if modified:
for v in modified:
v.select = True
bmesh.update_edit_mesh(od)
bm.free()
bpy.ops.object.mode_set(mode="OBJECT")
bpy.ops.object.mode_set(mode='EDIT')
if modified:
bpy.ops.mesh.select_mode(type="VERT")
self.report({"INFO"}, "Fit2Grid: %i vert(s) not on grid" % len(modified))
else:
self.report({"INFO"}, "Fit2Grid: On grid - All good!")
else:
self.report({"INFO"}, "Fit2Grid: Nothing Selected?")
elif context.mode == "OBJECT":
new_loc = fit_to_grid(obj.location, grid_setting)
obj.location = new_loc
else:
self.report({"INFO"}, "Fit2Grid: Invalid object/mode - Aborted")
return {'FINISHED'}
# -------------------------------------------------------------------------------------------------
# Class Registration & Unregistration
# -------------------------------------------------------------------------------------------------
def register():
bpy.utils.register_class(VIEW3D_OT_ke_fit2grid)
def unregister():
bpy.utils.unregister_class(VIEW3D_OT_ke_fit2grid)
if __name__ == "__main__":
register()
| [((727, 752), 'bpy.props.FloatProperty', 'bpy.props.FloatProperty', ([], {}), '()\n', (750, 752), False, 'import bpy\n'), ((3011, 3058), 'bpy.utils.register_class', 'bpy.utils.register_class', (['VIEW3D_OT_ke_fit2grid'], {}), '(VIEW3D_OT_ke_fit2grid)\n', (3035, 3058), False, 'import bpy\n'), ((3082, 3131), 'bpy.utils.unregister_class', 'bpy.utils.unregister_class', (['VIEW3D_OT_ke_fit2grid'], {}), '(VIEW3D_OT_ke_fit2grid)\n', (3108, 3131), False, 'import bpy\n'), ((1149, 1173), 'bmesh.from_edit_mesh', 'bmesh.from_edit_mesh', (['od'], {}), '(od)\n', (1169, 1173), False, 'import bmesh\n'), ((1796, 1838), 'bpy.ops.mesh.select_all', 'bpy.ops.mesh.select_all', ([], {'action': '"""DESELECT"""'}), "(action='DESELECT')\n", (1819, 1838), False, 'import bpy\n'), ((1965, 1991), 'bmesh.update_edit_mesh', 'bmesh.update_edit_mesh', (['od'], {}), '(od)\n', (1987, 1991), False, 'import bmesh\n'), ((2034, 2072), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', ([], {'mode': '"""OBJECT"""'}), "(mode='OBJECT')\n", (2057, 2072), False, 'import bpy\n'), ((2089, 2125), 'bpy.ops.object.mode_set', 'bpy.ops.object.mode_set', ([], {'mode': '"""EDIT"""'}), "(mode='EDIT')\n", (2112, 2125), False, 'import bpy\n'), ((2176, 2213), 'bpy.ops.mesh.select_mode', 'bpy.ops.mesh.select_mode', ([], {'type': '"""VERT"""'}), "(type='VERT')\n", (2200, 2213), False, 'import bpy\n'), ((1717, 1735), 'mathutils.Vector', 'Vector', (['new_coords'], {}), '(new_coords)\n', (1723, 1735), False, 'from mathutils import Vector, Matrix\n')] |
en0/pyavl3 | tests/test_update.py | c9dad3189da1f18e935e61d13d7c971aceafd895 | import unittest
from pyavl3 import AVLTree
class AVLTreeUpdateTest(unittest.TestCase):
def test_add_one(self):
a = AVLTree()
a.update({1:'a'})
self.assertEqual(len(a), 1)
| [((129, 138), 'pyavl3.AVLTree', 'AVLTree', ([], {}), '()\n', (136, 138), False, 'from pyavl3 import AVLTree\n')] |
Subsets and Splits