repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
LinkanDawang/FreshMallDemo | apps/orders/models.py | 5b8e2d2e8e137f609e8ac1e29ea013bb3ef34edb | from django.db import models
from utils.models import BaseModel
from users.models import User, Address
from goods.models import GoodsSKU
# Create your models here.
class OrderInfo(BaseModel):
"""订单信息"""
PAY_METHOD = ['1', '2']
PAY_METHOD_CHOICES = (
(1, "货到付款"),
(2, "支付宝"),
)
ORDER_STATUS_CHOICES = (
(1, "待支付"),
(2, "待发货"),
(3, "待收货"),
(4, "待评价"),
(5, "已完成"),
)
"""---------订单信息------------------------"""
PAY_METHODS = {
1: "货到付款",
2: "支付宝",
}
ORDER_STATUS = {
1: "待支付",
2: "待发货",
3: "待收货",
4: "待评价",
5: "已完成",
}
PAY_METHODS_ENUM = {
"CASH": 1,
"ALIPAY": 2
}
ORDER_STATUS_ENUM = {
"UNPAID": 1,
"UNSEND": 2,
"UNRECEIVED": 3,
"UNCOMMENT": 4,
"FINISHED": 5
}
order_id = models.CharField(max_length=64, primary_key=True, verbose_name="订单号")
user = models.ForeignKey(User, on_delete=models.CASCADE, verbose_name="下单用户")
address = models.ForeignKey(Address, on_delete=models.CASCADE, verbose_name="收获地址")
total_count = models.IntegerField(default=1, verbose_name="商品总数")
total_amount = models.DecimalField(max_digits=10, decimal_places=2, verbose_name="商品总金额")
trans_cost = models.DecimalField(max_digits=10, decimal_places=2, verbose_name="运费")
pay_method = models.SmallIntegerField(choices=PAY_METHOD_CHOICES, default=1, verbose_name="支付方式")
status = models.SmallIntegerField(choices=ORDER_STATUS_CHOICES, default=1, verbose_name="订单状态")
trade_id = models.CharField(max_length=100, unique=True, null=True, blank=True, verbose_name="支付编号")
class Meta:
db_table = "df_order_info"
class OrderGoods(BaseModel):
"""订单商品"""
order = models.ForeignKey(OrderInfo, on_delete=models.CASCADE, verbose_name="订单")
sku = models.ForeignKey(GoodsSKU, on_delete=models.CASCADE, verbose_name="订单商品")
count = models.IntegerField(default=1, verbose_name="数量")
price = models.DecimalField(max_digits=10, decimal_places=2, verbose_name="单价")
comment = models.TextField(default="", verbose_name="评价信息")
class Meta:
db_table = "df_order_goods"
| [((911, 980), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'primary_key': '(True)', 'verbose_name': '"""订单号"""'}), "(max_length=64, primary_key=True, verbose_name='订单号')\n", (927, 980), False, 'from django.db import models\n'), ((992, 1062), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'verbose_name': '"""下单用户"""'}), "(User, on_delete=models.CASCADE, verbose_name='下单用户')\n", (1009, 1062), False, 'from django.db import models\n'), ((1077, 1150), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Address'], {'on_delete': 'models.CASCADE', 'verbose_name': '"""收获地址"""'}), "(Address, on_delete=models.CASCADE, verbose_name='收获地址')\n", (1094, 1150), False, 'from django.db import models\n'), ((1169, 1220), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)', 'verbose_name': '"""商品总数"""'}), "(default=1, verbose_name='商品总数')\n", (1188, 1220), False, 'from django.db import models\n'), ((1240, 1314), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(10)', 'decimal_places': '(2)', 'verbose_name': '"""商品总金额"""'}), "(max_digits=10, decimal_places=2, verbose_name='商品总金额')\n", (1259, 1314), False, 'from django.db import models\n'), ((1332, 1403), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(10)', 'decimal_places': '(2)', 'verbose_name': '"""运费"""'}), "(max_digits=10, decimal_places=2, verbose_name='运费')\n", (1351, 1403), False, 'from django.db import models\n'), ((1421, 1509), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'choices': 'PAY_METHOD_CHOICES', 'default': '(1)', 'verbose_name': '"""支付方式"""'}), "(choices=PAY_METHOD_CHOICES, default=1,\n verbose_name='支付方式')\n", (1445, 1509), False, 'from django.db import models\n'), ((1519, 1609), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'choices': 'ORDER_STATUS_CHOICES', 'default': '(1)', 'verbose_name': '"""订单状态"""'}), "(choices=ORDER_STATUS_CHOICES, default=1,\n verbose_name='订单状态')\n", (1543, 1609), False, 'from django.db import models\n'), ((1621, 1714), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'unique': '(True)', 'null': '(True)', 'blank': '(True)', 'verbose_name': '"""支付编号"""'}), "(max_length=100, unique=True, null=True, blank=True,\n verbose_name='支付编号')\n", (1637, 1714), False, 'from django.db import models\n'), ((1821, 1894), 'django.db.models.ForeignKey', 'models.ForeignKey', (['OrderInfo'], {'on_delete': 'models.CASCADE', 'verbose_name': '"""订单"""'}), "(OrderInfo, on_delete=models.CASCADE, verbose_name='订单')\n", (1838, 1894), False, 'from django.db import models\n'), ((1905, 1979), 'django.db.models.ForeignKey', 'models.ForeignKey', (['GoodsSKU'], {'on_delete': 'models.CASCADE', 'verbose_name': '"""订单商品"""'}), "(GoodsSKU, on_delete=models.CASCADE, verbose_name='订单商品')\n", (1922, 1979), False, 'from django.db import models\n'), ((1992, 2041), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)', 'verbose_name': '"""数量"""'}), "(default=1, verbose_name='数量')\n", (2011, 2041), False, 'from django.db import models\n'), ((2054, 2125), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(10)', 'decimal_places': '(2)', 'verbose_name': '"""单价"""'}), "(max_digits=10, decimal_places=2, verbose_name='单价')\n", (2073, 2125), False, 'from django.db import models\n'), ((2140, 2189), 'django.db.models.TextField', 'models.TextField', ([], {'default': '""""""', 'verbose_name': '"""评价信息"""'}), "(default='', verbose_name='评价信息')\n", (2156, 2189), False, 'from django.db import models\n')] |
hunterhector/DDSemantics | event/arguments/prepare/event_vocab.py | 883ef1015bd21d9b8575d8000faf3b506a09f21c | from collections import defaultdict, Counter
import os
import gzip
import json
import pickle
from json.decoder import JSONDecodeError
import logging
from typing import Dict
import pdb
from event import util
from event.arguments.prepare.slot_processor import get_simple_dep, is_propbank_dep
logger = logging.getLogger(__name__)
class TypedEventVocab:
unk_predicate = "unk_predicate-pred"
unk_arg_word = "unk_argument"
unk_frame = "unk_frame"
unk_fe = "unk_fe"
unk_prep = "unk_preposition"
unk_dep = "unk_dep"
unobserved_fe = "__unobserved_fe__"
unobserved_arg = "__unobserved_arg__"
ghost = "__ghost_component__"
def __init__(self, vocab_dir, event_data=None):
self.lookups: Dict[str, Dict[str, int]] = {}
self.oovs: Dict[str, str] = {}
self.vocab_dir = vocab_dir
if not os.path.exists(os.path.join(vocab_dir, "predicate.vocab")):
if event_data is None:
logging.error(
"Vocabulary file not exist and not data " "provided for counting."
)
logger.info("Counting vocabulary.")
vocab_counters = self.get_vocab_count(event_data)
for vocab_name, counter in vocab_counters.items():
raw_vocab_path = os.path.join(vocab_dir, vocab_name + ".vocab")
with open(raw_vocab_path, "w") as out:
for key, value in counter.most_common():
out.write("{}\t{}\n".format(key, value))
logger.info("Done vocabulary counting.")
# Now filter the vocabulary.
logger.info("Filtering vocabulary.")
filtered_vocab = self.filter_vocab(vocab_counters)
logger.info("Done filtering.")
logger.info("Writing filtered vocab to disk.")
for key, vocab in filtered_vocab.items():
with open(os.path.join(self.vocab_dir, key + ".vocab"), "w") as out:
for token, count in vocab:
out.write("{}\t{}\n".format(token, count))
self.pickle_counts()
logger.info("Done.")
else:
logger.info("Will not overwrite vocabulary, using existing.")
if not self.unpickle_counts():
logger.info("Reading counts from .vocab files.")
f_name: str
for f_name in os.listdir(vocab_dir):
if "_" in f_name and f_name.endswith(".vocab"):
vocab_type = f_name.split("_")[0]
else:
continue
self.lookups[vocab_type] = {}
self.oovs[vocab_type] = "unk_" + vocab_type
with open(os.path.join(vocab_dir, f_name)) as vocab_file:
index = 0
for line in vocab_file:
word, count = line.strip().split("\t")
self.lookups[vocab_type][word] = index
index += 1
logger.info(
"Loaded {} types for {}".format(
len(self.lookups[vocab_type]), vocab_type
)
)
self.pickle_counts()
def pickle_counts(self):
with open(os.path.join(self.vocab_dir, "lookups.pickle"), "wb") as out:
pickle.dump(self.lookups, out)
with open(os.path.join(self.vocab_dir, "oovs.pickle"), "wb") as out:
pickle.dump(self.oovs, out)
def unpickle_counts(self):
lookup_pickle = os.path.join(self.vocab_dir, "lookups.pickle")
oov_pickle = os.path.join(self.vocab_dir, "oovs.pickle")
if os.path.exists(lookup_pickle) and os.path.exists(oov_pickle):
logger.info("Directly loading pickled counts.")
with open(lookup_pickle, "rb") as lp:
self.lookups = pickle.load(lp)
with open(oov_pickle, "rb") as op:
self.oovs = pickle.load(op)
return True
else:
return False
def get_vocab_word(self, word, key):
if not word:
return self.oovs[key]
if word in self.lookups[key]:
return word
else:
return self.oovs[key]
@classmethod
def make_arg(cls, text, role):
if role == "NA":
return text + "-" + cls.unk_dep
else:
return text + "-" + role
@staticmethod
def make_predicate(text):
return text.lower() + "-pred"
@staticmethod
def make_fe(frame, fe):
# Do not use frame,fe format to alleviate sparsity.
return fe
def get_arg_entity_rep(self, arg, entity_text):
# If a specific entity text is provided.
rep = self.oovs["argument"]
if entity_text is not None:
# Use the argument's own text.
rep = self.get_vocab_word(entity_text, "argument")
if rep == self.oovs["argument"]:
# Use the text after hypen.
if "-" in entity_text:
rep = self.get_vocab_word(entity_text.split("-")[-1], "argument")
arg_text = arg["text"].lower()
if rep == self.oovs["argument"]:
# Fall back to use the argument's own text.
rep = self.get_vocab_word(arg_text, "argument")
if rep == self.oovs["argument"]:
if "-" in arg_text:
rep = self.get_vocab_word(arg_text.split("-")[-1], "argument")
if rep == self.oovs["argument"]:
# Fall back to NER tag.
if "ner" in arg:
rep = arg["ner"]
return rep
@classmethod
def get_unk_arg_rep(cls):
# This will create a full unknown argument, try to back off to
# a partial unknown argument if possible.
return cls.make_arg(cls.unk_arg_word, cls.unk_dep)
@classmethod
def get_unk_arg_with_dep(cls, dep):
"""Return a backoff version of the representation by using the
actual dep, but unk_arg
Args:
dep
"""
return cls.make_arg(cls.unk_arg_word, dep)
@classmethod
def get_arg_rep_no_dep(cls, entity_rep):
"""Return the backoff version of the argument representation by using
the unk_dep, but the actual entity.
Args:
entity_rep:
Returns:
"""
return cls.make_arg(entity_rep, cls.unk_dep)
def get_arg_rep(self, dep, entity_rep):
if dep.startswith("prep"):
dep = self.get_vocab_word(dep, "preposition")
arg_rep = self.make_arg(entity_rep, dep)
return arg_rep
def get_pred_rep(self, event):
"""
Take the predicates, and get the vocab index for it. This will first
use the predicate itself, if not found, it will try to use the verb
form.
:param event:
:return:
"""
pred = self.get_vocab_word(event["predicate"], "predicate")
if pred == self.oovs["predicate"]:
# Try to see if the verb form help.
if "verb_form" in event:
pred = self.get_vocab_word(event["verb_form"], "predicate")
return self.make_predicate(pred)
def get_fe_rep(self, frame_name, fe_role):
# return self.make_fe(frame_name, fe_role)
return self.get_vocab_word(self.make_fe(frame_name, fe_role), "fe")
@staticmethod
def filter_by_count(counter, min_count):
return [
(key, count) for key, count in counter.most_common() if count >= min_count
]
def filter_vocab(
self,
vocab_counters,
top_num_prep=150,
min_token_count=500,
min_fe_count=50,
min_frame_count=5,
):
filtered_vocab = {
"predicate_min_%d"
% min_token_count: self.filter_by_count(
vocab_counters["predicate"], min_token_count
),
"argument_min_%d"
% min_token_count: self.filter_by_count(
vocab_counters["argument"], min_token_count
),
"preposition_top_%d"
% top_num_prep: vocab_counters["preposition"].most_common(top_num_prep),
"fe_min_%d"
% min_fe_count: self.filter_by_count(vocab_counters["fe"], min_fe_count),
"frame_min_%d"
% min_frame_count: self.filter_by_count(
vocab_counters["frame"], min_frame_count
),
}
for key, counts in filtered_vocab.items():
# Use the base key name for the vocabulary, not including the
# cutoff, (i.e. predicate_min_50 -> predicate)
name = key.split("_")[0]
# Put oov token as a token int he vocab file.
oov = "unk_" + name
counts.insert(0, (oov, 0))
self.lookups[name] = {}
self.oovs[name] = oov
index = 0
for term, _ in counts:
self.lookups[name][term] = index
index += 1
return filtered_vocab
def get_vocab_count(self, data_path):
vocab_counters = defaultdict(Counter)
doc_count = 0
event_count = 0
with gzip.open(data_path) as data:
for line in data:
doc_info = json.loads(line)
for event in doc_info["events"]:
event_count += 1
predicate = event["predicate"]
vocab_counters["predicate"][predicate] += 1
frame = event["frame"]
if not frame == "NA":
vocab_counters["frame"][frame] += 1
for arg in event["arguments"]:
fe_name = arg["feName"]
syn_role = arg["dep"]
arg_text = arg["text"].lower()
vocab_counters["argument"][arg_text] += 1
if not fe_name == "NA":
vocab_counters["fe"][
self.make_fe(event["frame"], fe_name)
] += 1
if syn_role.startswith("prep"):
vocab_counters["preposition"][syn_role] += 1
doc_count += 1
if doc_count % 1000 == 0:
print(
"\rCounted vocab for {} events in "
"{} docs.".format(event_count, doc_count),
end="",
)
return vocab_counters
class EmbbedingVocab:
def __init__(self, vocab_file, with_padding=False, extras=None):
self.vocab_file = vocab_file
self.vocab = {}
self.tf = []
self.extras = []
self.pad = "__PADDING__"
self.padded = False
if with_padding:
# Paddings should be at 0.
self.padded = True
self.vocab[self.pad] = 0
self.tf.append(0)
if extras:
for name in extras:
self.add_extra(name)
self.__read_vocab()
@staticmethod
def with_extras(vocab_file):
"""
Create a EmbeddingVocab with unknown word slots and padding slot.
Args:
vocab_file:
Returns:
"""
return EmbbedingVocab(
vocab_file,
True,
[
TypedEventVocab.unk_frame,
TypedEventVocab.unk_fe,
TypedEventVocab.get_unk_arg_rep(),
TypedEventVocab.unobserved_arg,
TypedEventVocab.unobserved_fe,
TypedEventVocab.ghost,
],
)
def get_index(self, token, unk):
try:
return self.vocab[token]
except KeyError:
if unk:
return self.vocab[unk]
else:
return -1
def extra_size(self):
return len(self.extras)
def add_extra(self, name):
"""Add extra dimensions into the embedding vocab, used for special
tokens.
Args:
name:
Returns:
"""
if name in self.extras:
logger.info(
f"Extra {name} already exist in vocabulary "
f"at index {self.vocab[name]}"
)
return self.vocab[name]
else:
self.extras.append(name)
extra_index = len(self.vocab)
self.vocab[name] = extra_index
self.tf.append(0)
logger.info(
f"Adding {name} as extra dimension {extra_index} "
f"to {self.vocab_file}"
)
return extra_index
def get_size(self):
return len(self.vocab)
def vocab_items(self):
return self.vocab.items()
def get_term_freq(self, token):
return self.tf[self.get_index(token, None)]
def __read_vocab(self):
with open(self.vocab_file) as din:
index = len(self.vocab)
for line in din:
word, count = line.split()
self.vocab[word] = index
self.tf.append(int(count))
index += 1
def create_sentences(
doc,
event_vocab,
output_path,
include_frame=False,
use_simple_dep=False,
prop_arg_only=False,
):
if include_frame:
print("Adding frames to sentences.")
doc_count = 0
event_count = 0
with gzip.open(doc) as data, gzip.open(output_path, "w") as out:
for line in data:
try:
doc_info = json.loads(line)
except JSONDecodeError:
continue
sentence = []
represent_by_id = {}
for entity in doc_info["entities"]:
eid = entity["entityId"]
represent = entity["representEntityHead"]
represent_by_id[eid] = represent
for event in doc_info["events"]:
event_count += 1
sentence.append(event_vocab.get_pred_rep(event))
if include_frame and not event["frame"] == "NA":
frame = event_vocab.get_vocab_word(event["frame"], "frame")
sentence.append(frame)
for arg in event["arguments"]:
dep = arg["dep"]
if (
arg["argStart"] == event["predicateStart"]
and arg["argEnd"] == event["predicateEnd"]
):
dep = "root"
if use_simple_dep:
dep = get_simple_dep(dep)
if prop_arg_only and not is_propbank_dep(dep):
continue
sentence.append(
event_vocab.get_arg_rep(
dep, event_vocab.get_arg_entity_rep(arg, None)
)
)
if include_frame and not arg["feName"] == "NA":
fe = event_vocab.get_fe_rep(frame, arg["feName"])
if not fe == event_vocab.oovs["fe"]:
sentence.append(fe)
if "NA" in sentence:
pdb.set_trace()
doc_count += 1
out.write(str.encode(" ".join(sentence) + "\n"))
if event_count % 1000 == 0:
print(
"\rCreated sentences for {} documents, "
"{} events.".format(doc_count, event_count),
end="",
)
print(
"\rCreated sentences for {} documents, "
"{} events.\n".format(doc_count, event_count),
end="",
)
def write_sentences(
sent_out, event_data, event_vocab, include_frame, simple_dep, prop_arg
):
if not os.path.exists(sent_out):
os.makedirs(sent_out)
fname = "sent_with_frames.gz" if include_frame else "sent_pred_only.gz"
out = os.path.join(sent_out, fname)
if not os.path.exists(out):
create_sentences(
event_data,
event_vocab,
out,
include_frame=include_frame,
use_simple_dep=simple_dep,
prop_arg_only=prop_arg,
)
else:
logger.info(f"Will not overwrite {out}")
def main(event_data, vocab_dir, sent_out, prop_arg):
if not os.path.exists(vocab_dir):
os.makedirs(vocab_dir)
event_vocab = TypedEventVocab(vocab_dir, event_data=event_data)
logger.info("Done loading vocabulary.")
# The 3 boolean are : include_frame,simple_dep, prop_arg
if prop_arg:
# For propbank style training.
logger.info("Creating event sentences in propbank style")
# Include frame or not version for propbank, but always use simple dep
# and propbank style arguments.
write_sentences(sent_out, event_data, event_vocab, False, True, True)
write_sentences(sent_out, event_data, event_vocab, True, True, True)
else:
# For framenet style training.
logger.info("Creating event sentences in FrameNet style")
# Include frame or not version for framenet, but always use complex dep
# and framenet style arguments.
write_sentences(sent_out, event_data, event_vocab, True, False, False)
write_sentences(sent_out, event_data, event_vocab, False, False, False)
if __name__ == "__main__":
parser = util.OptionPerLineParser(
description="Event Vocabulary.", fromfile_prefix_chars="@"
)
parser.add_argument("--vocab_dir", type=str, help="Vocabulary directory.")
parser.add_argument("--input_data", type=str, help="Input data.")
parser.add_argument("--sent_out", type=str, help="Sentence out dir.")
parser.add_argument(
"--prop_arg", action="store_true", help="Propbank arg only.", default=False
)
util.set_basic_log()
args = parser.parse_args()
main(args.input_data, args.vocab_dir, args.sent_out, args.prop_arg)
| [((301, 328), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (318, 328), False, 'import logging\n'), ((16161, 16190), 'os.path.join', 'os.path.join', (['sent_out', 'fname'], {}), '(sent_out, fname)\n', (16173, 16190), False, 'import os\n'), ((17634, 17722), 'event.util.OptionPerLineParser', 'util.OptionPerLineParser', ([], {'description': '"""Event Vocabulary."""', 'fromfile_prefix_chars': '"""@"""'}), "(description='Event Vocabulary.',\n fromfile_prefix_chars='@')\n", (17658, 17722), False, 'from event import util\n'), ((18076, 18096), 'event.util.set_basic_log', 'util.set_basic_log', ([], {}), '()\n', (18094, 18096), False, 'from event import util\n'), ((3633, 3679), 'os.path.join', 'os.path.join', (['self.vocab_dir', '"""lookups.pickle"""'], {}), "(self.vocab_dir, 'lookups.pickle')\n", (3645, 3679), False, 'import os\n'), ((3701, 3744), 'os.path.join', 'os.path.join', (['self.vocab_dir', '"""oovs.pickle"""'], {}), "(self.vocab_dir, 'oovs.pickle')\n", (3713, 3744), False, 'import os\n'), ((9242, 9262), 'collections.defaultdict', 'defaultdict', (['Counter'], {}), '(Counter)\n', (9253, 9262), False, 'from collections import defaultdict, Counter\n'), ((13602, 13616), 'gzip.open', 'gzip.open', (['doc'], {}), '(doc)\n', (13611, 13616), False, 'import gzip\n'), ((13626, 13653), 'gzip.open', 'gzip.open', (['output_path', '"""w"""'], {}), "(output_path, 'w')\n", (13635, 13653), False, 'import gzip\n'), ((16017, 16041), 'os.path.exists', 'os.path.exists', (['sent_out'], {}), '(sent_out)\n', (16031, 16041), False, 'import os\n'), ((16051, 16072), 'os.makedirs', 'os.makedirs', (['sent_out'], {}), '(sent_out)\n', (16062, 16072), False, 'import os\n'), ((16202, 16221), 'os.path.exists', 'os.path.exists', (['out'], {}), '(out)\n', (16216, 16221), False, 'import os\n'), ((16566, 16591), 'os.path.exists', 'os.path.exists', (['vocab_dir'], {}), '(vocab_dir)\n', (16580, 16591), False, 'import os\n'), ((16601, 16623), 'os.makedirs', 'os.makedirs', (['vocab_dir'], {}), '(vocab_dir)\n', (16612, 16623), False, 'import os\n'), ((3428, 3458), 'pickle.dump', 'pickle.dump', (['self.lookups', 'out'], {}), '(self.lookups, out)\n', (3439, 3458), False, 'import pickle\n'), ((3549, 3576), 'pickle.dump', 'pickle.dump', (['self.oovs', 'out'], {}), '(self.oovs, out)\n', (3560, 3576), False, 'import pickle\n'), ((3757, 3786), 'os.path.exists', 'os.path.exists', (['lookup_pickle'], {}), '(lookup_pickle)\n', (3771, 3786), False, 'import os\n'), ((3791, 3817), 'os.path.exists', 'os.path.exists', (['oov_pickle'], {}), '(oov_pickle)\n', (3805, 3817), False, 'import os\n'), ((9324, 9344), 'gzip.open', 'gzip.open', (['data_path'], {}), '(data_path)\n', (9333, 9344), False, 'import gzip\n'), ((864, 906), 'os.path.join', 'os.path.join', (['vocab_dir', '"""predicate.vocab"""'], {}), "(vocab_dir, 'predicate.vocab')\n", (876, 906), False, 'import os\n'), ((960, 1038), 'logging.error', 'logging.error', (['"""Vocabulary file not exist and not data provided for counting."""'], {}), "('Vocabulary file not exist and not data provided for counting.')\n", (973, 1038), False, 'import logging\n'), ((1287, 1333), 'os.path.join', 'os.path.join', (['vocab_dir', "(vocab_name + '.vocab')"], {}), "(vocab_dir, vocab_name + '.vocab')\n", (1299, 1333), False, 'import os\n'), ((2402, 2423), 'os.listdir', 'os.listdir', (['vocab_dir'], {}), '(vocab_dir)\n', (2412, 2423), False, 'import os\n'), ((3354, 3400), 'os.path.join', 'os.path.join', (['self.vocab_dir', '"""lookups.pickle"""'], {}), "(self.vocab_dir, 'lookups.pickle')\n", (3366, 3400), False, 'import os\n'), ((3478, 3521), 'os.path.join', 'os.path.join', (['self.vocab_dir', '"""oovs.pickle"""'], {}), "(self.vocab_dir, 'oovs.pickle')\n", (3490, 3521), False, 'import os\n'), ((3960, 3975), 'pickle.load', 'pickle.load', (['lp'], {}), '(lp)\n', (3971, 3975), False, 'import pickle\n'), ((4051, 4066), 'pickle.load', 'pickle.load', (['op'], {}), '(op)\n', (4062, 4066), False, 'import pickle\n'), ((9411, 9427), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (9421, 9427), False, 'import json\n'), ((13732, 13748), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (13742, 13748), False, 'import json\n'), ((1905, 1949), 'os.path.join', 'os.path.join', (['self.vocab_dir', "(key + '.vocab')"], {}), "(self.vocab_dir, key + '.vocab')\n", (1917, 1949), False, 'import os\n'), ((14776, 14795), 'event.arguments.prepare.slot_processor.get_simple_dep', 'get_simple_dep', (['dep'], {}), '(dep)\n', (14790, 14795), False, 'from event.arguments.prepare.slot_processor import get_simple_dep, is_propbank_dep\n'), ((15425, 15440), 'pdb.set_trace', 'pdb.set_trace', ([], {}), '()\n', (15438, 15440), False, 'import pdb\n'), ((2756, 2787), 'os.path.join', 'os.path.join', (['vocab_dir', 'f_name'], {}), '(vocab_dir, f_name)\n', (2768, 2787), False, 'import os\n'), ((14842, 14862), 'event.arguments.prepare.slot_processor.is_propbank_dep', 'is_propbank_dep', (['dep'], {}), '(dep)\n', (14857, 14862), False, 'from event.arguments.prepare.slot_processor import get_simple_dep, is_propbank_dep\n')] |
dexinl/kids_math | 20.py | 48f6c37e221bbd2484ad19861c61e5ed7d3aa09e | #!/usr/bin/python
import random
count = 20
test_set = []
while count:
a = random.randrange(3,20)
b = random.randrange(3,20)
if a > b and a - b > 1:
if (b, a-b) not in test_set:
test_set.append((b, a-b))
count -= 1
elif b > a and b - a > 1:
if (a, b-a) not in test_set:
test_set.append((a, b-a))
count -= 1
else:
continue
for (a,b) in test_set:
print " %2d + %2d = " % (a,b)
| [] |
xlam/autovirt | autovirt/equipment/domain/equipment.py | a19f9237c8b1123ce4f4b8b396dc88122019d4f8 | from enum import Enum
from functools import reduce
from math import ceil
from typing import Optional, Tuple
from autovirt import utils
from autovirt.exception import AutovirtError
from autovirt.structs import UnitEquipment, RepairOffer
logger = utils.get_logger()
# maximum allowed equipment price
PRICE_MAX = 100000
# value to add and sub from offer quality when filtering
QUALITY_DELTA = 3
class QualityType(Enum):
INSTALLED = "quality"
REQUIRED = "quality_required"
def quantity_to_repair(units: list[UnitEquipment]) -> int:
"""Calculate total quantity of equipment to repair on given units"""
return sum([unit.wear_quantity for unit in units])
def quantity_total(units: list[UnitEquipment]) -> int:
"""Calculate total equipment count on given units"""
return sum([unit.quantity for unit in units])
def filter_offers(
offers: list[RepairOffer], quality: float, quantity: int
) -> list[RepairOffer]:
# select units in range [quality-DELTA ... quality+DELTA] and having enough repair parts
filtered = list(filter(lambda x: x.quality > quality - QUALITY_DELTA, offers))
filtered = list(filter(lambda x: x.quality < quality + QUALITY_DELTA, filtered))
filtered = list(filter(lambda x: x.quantity > quantity, filtered))
filtered = list(filter(lambda x: x.price < PRICE_MAX, filtered))
return filtered
def expected_quality(
qual_rep: float, qual_inst: float, items_total: int, items_wear: int
) -> float:
return (
qual_inst * (items_total - items_wear) + qual_rep * items_wear
) / items_total
def select_offer(
offers: list[RepairOffer], units: list[UnitEquipment], quality: float = None
) -> RepairOffer:
if not quality:
quality = units[0].quality_required
qnt_rep = quantity_to_repair(units)
qnt_total = quantity_total(units)
qual_min = utils.get_min(units, QualityType.INSTALLED.value)
qual_exp = [
expected_quality(o.quality, qual_min, qnt_total, qnt_rep) for o in offers
]
qual_diff = [abs(qual - quality) for qual in qual_exp]
diff_norm = utils.normalize_array(qual_diff)
price_norm = utils.normalize_array([o.price for o in offers])
qp_dist = [p + q for (p, q) in zip(price_norm, diff_norm)]
summary: list = [
[o, price_norm[i], qual_exp[i], qual_diff[i], diff_norm[i], qp_dist[i]]
for i, o in enumerate(offers)
if qual_exp[i] >= quality
]
logger.info(f"listing filtered offers for quality of {quality}:")
for o in summary:
logger.info(
f"id: {o[0].id}, quality: {o[0].quality}, price: {o[0].price},"
f" quantity: {o[0].quantity}, qual_exp: {o[2]:.2f}, qp: {o[5]:.3f}"
)
minimum_qp_item = reduce(lambda x, y: x if x[5] < y[5] else y, summary)
return minimum_qp_item[0]
def select_offer_to_raise_quality(
unit: UnitEquipment, offers: list[RepairOffer], margin: float = 0
) -> Optional[Tuple[RepairOffer, int]]:
required = unit.quality_required + margin
quality_coeff = unit.quantity * (required - unit.quality)
offers = list(filter(lambda o: o.quality >= required, offers))
if not offers:
return None
offer = offers[0]
count_to_replace = ceil(quality_coeff / (offer.quality - unit.quality))
price = count_to_replace * offer.price
for offer_ in offers[1:]:
count = ceil(quality_coeff / (offer_.quality - unit.quality))
price_ = count * offer_.price
if price_ < price:
offer = offer_
count_to_replace = count
return offer, count_to_replace
def split_by_quality(
units: list[UnitEquipment], quality_type: QualityType = QualityType.REQUIRED
) -> dict[float, list[UnitEquipment]]:
"""Split units by quality (required or installed)"""
res: dict[float, list[UnitEquipment]] = {}
for unit in units:
quality = getattr(unit, quality_type.value)
if quality not in res.keys():
res[quality] = []
res[quality].append(unit)
return res
def split_mismatch_quality_units(
units: list[UnitEquipment],
) -> tuple[list[UnitEquipment], list[UnitEquipment]]:
"""Split units into 'normal' and 'mismatch' groups.
Mismatched unit have installed equipment of lower quality then required.
We need to treat them in different manner then normal while repairing.
"""
normal = []
mismatch = []
for unit in units:
if unit.quality < unit.quality_required:
mismatch.append(unit)
else:
normal.append(unit)
return normal, mismatch
| [((247, 265), 'autovirt.utils.get_logger', 'utils.get_logger', ([], {}), '()\n', (263, 265), False, 'from autovirt import utils\n'), ((1852, 1901), 'autovirt.utils.get_min', 'utils.get_min', (['units', 'QualityType.INSTALLED.value'], {}), '(units, QualityType.INSTALLED.value)\n', (1865, 1901), False, 'from autovirt import utils\n'), ((2082, 2114), 'autovirt.utils.normalize_array', 'utils.normalize_array', (['qual_diff'], {}), '(qual_diff)\n', (2103, 2114), False, 'from autovirt import utils\n'), ((2132, 2180), 'autovirt.utils.normalize_array', 'utils.normalize_array', (['[o.price for o in offers]'], {}), '([o.price for o in offers])\n', (2153, 2180), False, 'from autovirt import utils\n'), ((2728, 2781), 'functools.reduce', 'reduce', (['(lambda x, y: x if x[5] < y[5] else y)', 'summary'], {}), '(lambda x, y: x if x[5] < y[5] else y, summary)\n', (2734, 2781), False, 'from functools import reduce\n'), ((3218, 3270), 'math.ceil', 'ceil', (['(quality_coeff / (offer.quality - unit.quality))'], {}), '(quality_coeff / (offer.quality - unit.quality))\n', (3222, 3270), False, 'from math import ceil\n'), ((3360, 3413), 'math.ceil', 'ceil', (['(quality_coeff / (offer_.quality - unit.quality))'], {}), '(quality_coeff / (offer_.quality - unit.quality))\n', (3364, 3413), False, 'from math import ceil\n')] |
mtn/advent16 | day09/part2.py | 0df34237485ee1246532e9eda0ef643e6950d13e | #!/usr/bin/env python3
import re
with open("input.txt") as f:
content = f.read().strip()
def ulen(content):
ans = 0
i = 0
while i < len(content):
if content[i] == "(":
end = content[i:].find(")") + i
instr = content[i+1:end]
chars, times = map(int, content[i+1:end].split("x"))
to_copy = content[end+1:end+1+chars]
to_copy_len = ulen(to_copy)
ans += times * to_copy_len
i = end + 1 + chars
else:
ans += 1
i += 1
return ans
print(ulen(content))
| [] |
Nexuscompute/Cirq | cirq-core/cirq/contrib/quimb/mps_simulator_test.py | 640ef8f82d6a56ec95361388ce7976e096cca906 | # pylint: disable=wrong-or-nonexistent-copyright-notice
import itertools
import math
import numpy as np
import pytest
import sympy
import cirq
import cirq.contrib.quimb as ccq
import cirq.testing
from cirq import value
def assert_same_output_as_dense(circuit, qubit_order, initial_state=0, grouping=None):
mps_simulator = ccq.mps_simulator.MPSSimulator(grouping=grouping)
ref_simulator = cirq.Simulator()
actual = mps_simulator.simulate(circuit, qubit_order=qubit_order, initial_state=initial_state)
expected = ref_simulator.simulate(circuit, qubit_order=qubit_order, initial_state=initial_state)
np.testing.assert_allclose(
actual.final_state.to_numpy(), expected.final_state_vector, atol=1e-4
)
assert len(actual.measurements) == 0
def test_various_gates_1d():
gate_op_cls = [cirq.I, cirq.H, cirq.X, cirq.Y, cirq.Z, cirq.T]
cross_gate_op_cls = [cirq.CNOT, cirq.SWAP]
q0, q1 = cirq.LineQubit.range(2)
for q0_gate_op in gate_op_cls:
for q1_gate_op in gate_op_cls:
for cross_gate_op in cross_gate_op_cls:
circuit = cirq.Circuit(q0_gate_op(q0), q1_gate_op(q1), cross_gate_op(q0, q1))
for initial_state in range(2 * 2):
assert_same_output_as_dense(
circuit=circuit, qubit_order=[q0, q1], initial_state=initial_state
)
def test_various_gates_1d_flip():
q0, q1 = cirq.LineQubit.range(2)
circuit = cirq.Circuit(cirq.H(q1), cirq.CNOT(q1, q0))
assert_same_output_as_dense(circuit=circuit, qubit_order=[q0, q1])
assert_same_output_as_dense(circuit=circuit, qubit_order=[q1, q0])
def test_various_gates_2d():
gate_op_cls = [cirq.I, cirq.H]
cross_gate_op_cls = [cirq.CNOT, cirq.SWAP]
q0, q1, q2, q3, q4, q5 = cirq.GridQubit.rect(3, 2)
for q0_gate_op in gate_op_cls:
for q1_gate_op in gate_op_cls:
for q2_gate_op in gate_op_cls:
for q3_gate_op in gate_op_cls:
for cross_gate_op1 in cross_gate_op_cls:
for cross_gate_op2 in cross_gate_op_cls:
circuit = cirq.Circuit(
q0_gate_op(q0),
q1_gate_op(q1),
cross_gate_op1(q0, q1),
q2_gate_op(q2),
q3_gate_op(q3),
cross_gate_op2(q3, q1),
)
assert_same_output_as_dense(
circuit=circuit, qubit_order=[q0, q1, q2, q3, q4, q5]
)
def test_grouping():
q0, q1, q2 = cirq.LineQubit.range(3)
circuit = cirq.Circuit(
cirq.X(q0) ** 0.1,
cirq.Y(q1) ** 0.2,
cirq.Z(q2) ** 0.3,
cirq.CNOT(q0, q1),
cirq.Y(q1) ** 0.4,
)
groupings = [
None,
{q0: 0, q1: 1, q2: 2},
{q0: 0, q1: 0, q2: 1},
{q0: 0, q1: 1, q2: 0},
{q0: 1, q1: 0, q2: 0},
{q0: 0, q1: 0, q2: 0},
]
for grouping in groupings:
for initial_state in range(2 * 2 * 2):
assert_same_output_as_dense(
circuit=circuit,
qubit_order=[q0, q1, q2],
initial_state=initial_state,
grouping=grouping,
)
def test_grouping_does_not_overlap():
q0, q1 = cirq.LineQubit.range(2)
mps_simulator = ccq.mps_simulator.MPSSimulator(grouping={q0: 0})
with pytest.raises(ValueError, match="Grouping must cover exactly the qubits"):
mps_simulator.simulate(cirq.Circuit(), qubit_order={q0: 0, q1: 1})
def test_same_partial_trace():
qubit_order = cirq.LineQubit.range(2)
q0, q1 = qubit_order
mps_simulator = ccq.mps_simulator.MPSSimulator()
for _ in range(50):
for initial_state in range(4):
circuit = cirq.testing.random_circuit(qubit_order, 3, 0.9)
expected_density_matrix = cirq.final_density_matrix(
circuit, qubit_order=qubit_order, initial_state=initial_state
)
expected_partial_trace = cirq.partial_trace(
expected_density_matrix.reshape(2, 2, 2, 2), keep_indices=[0]
)
final_state = mps_simulator.simulate(
circuit, qubit_order=qubit_order, initial_state=initial_state
).final_state
actual_density_matrix = final_state.partial_trace([q0, q1])
actual_partial_trace = final_state.partial_trace([q0])
np.testing.assert_allclose(actual_density_matrix, expected_density_matrix, atol=1e-4)
np.testing.assert_allclose(actual_partial_trace, expected_partial_trace, atol=1e-4)
def test_probs_dont_sum_up_to_one():
q0 = cirq.NamedQid('q0', dimension=2)
circuit = cirq.Circuit(cirq.measure(q0))
simulator = ccq.mps_simulator.MPSSimulator(
simulation_options=ccq.mps_simulator.MPSOptions(sum_prob_atol=-0.5)
)
with pytest.raises(ValueError, match="Sum of probabilities exceeds tolerance"):
simulator.run(circuit, repetitions=1)
def test_empty():
q0 = cirq.NamedQid('q0', dimension=2)
q1 = cirq.NamedQid('q1', dimension=3)
q2 = cirq.NamedQid('q2', dimension=5)
circuit = cirq.Circuit()
for initial_state in range(2 * 3 * 5):
assert_same_output_as_dense(
circuit=circuit, qubit_order=[q0, q1, q2], initial_state=initial_state
)
def test_cnot():
q0, q1 = cirq.LineQubit.range(2)
circuit = cirq.Circuit(cirq.CNOT(q0, q1))
for initial_state in range(4):
assert_same_output_as_dense(
circuit=circuit, qubit_order=[q0, q1], initial_state=initial_state
)
def test_cnot_flipped():
q0, q1 = cirq.LineQubit.range(2)
circuit = cirq.Circuit(cirq.CNOT(q1, q0))
for initial_state in range(4):
assert_same_output_as_dense(
circuit=circuit, qubit_order=[q0, q1], initial_state=initial_state
)
def test_simulation_state():
q0, q1 = qubit_order = cirq.LineQubit.range(2)
circuit = cirq.Circuit(cirq.CNOT(q1, q0))
mps_simulator = ccq.mps_simulator.MPSSimulator()
ref_simulator = cirq.Simulator()
for initial_state in range(4):
args = mps_simulator._create_simulation_state(initial_state=initial_state, qubits=(q0, q1))
actual = mps_simulator.simulate(circuit, qubit_order=qubit_order, initial_state=args)
expected = ref_simulator.simulate(
circuit, qubit_order=qubit_order, initial_state=initial_state
)
np.testing.assert_allclose(
actual.final_state.to_numpy(), expected.final_state_vector, atol=1e-4
)
assert len(actual.measurements) == 0
def test_three_qubits():
q0, q1, q2 = cirq.LineQubit.range(3)
circuit = cirq.Circuit(cirq.CCX(q0, q1, q2))
with pytest.raises(ValueError, match="Can only handle 1 and 2 qubit operations"):
assert_same_output_as_dense(circuit=circuit, qubit_order=[q0, q1, q2])
def test_measurement_1qubit():
q0, q1 = cirq.LineQubit.range(2)
circuit = cirq.Circuit(cirq.X(q0), cirq.H(q1), cirq.measure(q1))
simulator = ccq.mps_simulator.MPSSimulator()
result = simulator.run(circuit, repetitions=100)
assert sum(result.measurements['q(1)'])[0] < 80
assert sum(result.measurements['q(1)'])[0] > 20
def test_reset():
q = cirq.LineQubit(0)
simulator = ccq.mps_simulator.MPSSimulator()
c = cirq.Circuit(cirq.X(q), cirq.reset(q), cirq.measure(q))
assert simulator.sample(c)['q(0)'][0] == 0
c = cirq.Circuit(cirq.H(q), cirq.reset(q), cirq.measure(q))
assert simulator.sample(c)['q(0)'][0] == 0
c = cirq.Circuit(cirq.reset(q), cirq.measure(q))
assert simulator.sample(c)['q(0)'][0] == 0
def test_measurement_2qubits():
q0, q1, q2 = cirq.LineQubit.range(3)
circuit = cirq.Circuit(cirq.H(q0), cirq.H(q1), cirq.H(q2), cirq.measure(q0, q2))
simulator = ccq.mps_simulator.MPSSimulator()
repetitions = 1024
measurement = simulator.run(circuit, repetitions=repetitions).measurements['q(0),q(2)']
result_counts = {'00': 0, '01': 0, '10': 0, '11': 0}
for i in range(repetitions):
key = str(measurement[i, 0]) + str(measurement[i, 1])
result_counts[key] += 1
for result_count in result_counts.values():
# Expected value is 1/4:
assert result_count > repetitions * 0.15
assert result_count < repetitions * 0.35
def test_measurement_str():
q0 = cirq.NamedQid('q0', dimension=3)
circuit = cirq.Circuit(cirq.measure(q0))
simulator = ccq.mps_simulator.MPSSimulator()
result = simulator.run(circuit, repetitions=7)
assert str(result) == "q0 (d=3)=0000000"
def test_trial_result_str():
q0 = cirq.LineQubit(0)
final_simulator_state = ccq.mps_simulator.MPSState(
qubits=(q0,),
prng=value.parse_random_state(0),
simulation_options=ccq.mps_simulator.MPSOptions(),
)
result = ccq.mps_simulator.MPSTrialResult(
params=cirq.ParamResolver({}),
measurements={'m': np.array([[1]])},
final_simulator_state=final_simulator_state,
)
assert 'output state: TensorNetwork' in str(result)
def test_trial_result_repr_pretty():
q0 = cirq.LineQubit(0)
final_simulator_state = ccq.mps_simulator.MPSState(
qubits=(q0,),
prng=value.parse_random_state(0),
simulation_options=ccq.mps_simulator.MPSOptions(),
)
result = ccq.mps_simulator.MPSTrialResult(
params=cirq.ParamResolver({}),
measurements={'m': np.array([[1]])},
final_simulator_state=final_simulator_state,
)
cirq.testing.assert_repr_pretty_contains(result, 'output state: TensorNetwork')
cirq.testing.assert_repr_pretty(result, "cirq.MPSTrialResult(...)", cycle=True)
def test_empty_step_result():
q0 = cirq.LineQubit(0)
sim = ccq.mps_simulator.MPSSimulator()
step_result = next(sim.simulate_moment_steps(cirq.Circuit(cirq.measure(q0))))
assert 'TensorNetwork' in str(step_result)
def test_step_result_repr_pretty():
q0 = cirq.LineQubit(0)
sim = ccq.mps_simulator.MPSSimulator()
step_result = next(sim.simulate_moment_steps(cirq.Circuit(cirq.measure(q0))))
cirq.testing.assert_repr_pretty_contains(step_result, 'TensorNetwork')
cirq.testing.assert_repr_pretty(step_result, "cirq.MPSSimulatorStepResult(...)", cycle=True)
def test_state_equal():
q0, q1 = cirq.LineQubit.range(2)
state0 = ccq.mps_simulator.MPSState(
qubits=(q0,),
prng=value.parse_random_state(0),
simulation_options=ccq.mps_simulator.MPSOptions(cutoff=1e-3, sum_prob_atol=1e-3),
)
state1a = ccq.mps_simulator.MPSState(
qubits=(q1,),
prng=value.parse_random_state(0),
simulation_options=ccq.mps_simulator.MPSOptions(cutoff=1e-3, sum_prob_atol=1e-3),
)
state1b = ccq.mps_simulator.MPSState(
qubits=(q1,),
prng=value.parse_random_state(0),
simulation_options=ccq.mps_simulator.MPSOptions(cutoff=1729.0, sum_prob_atol=1e-3),
)
assert state0 == state0
assert state0 != state1a
assert state1a != state1b
def test_random_circuits_equal_more_rows():
circuit = cirq.testing.random_circuit(
qubits=cirq.GridQubit.rect(3, 2), n_moments=6, op_density=1.0
)
qubits = circuit.all_qubits()
assert_same_output_as_dense(circuit, qubits)
def test_supremacy_equal_more_cols():
circuit = cirq.testing.random_circuit(
qubits=cirq.GridQubit.rect(2, 3), n_moments=6, op_density=1.0
)
qubits = circuit.all_qubits()
assert_same_output_as_dense(circuit, qubits)
def test_tensor_index_names():
qubits = cirq.LineQubit.range(12)
qubit_map = {qubit: i for i, qubit in enumerate(qubits)}
state = ccq.mps_simulator.MPSState(qubits=qubit_map, prng=value.parse_random_state(0))
assert state.i_str(0) == "i_00"
assert state.i_str(11) == "i_11"
assert state.mu_str(0, 3) == "mu_0_3"
assert state.mu_str(3, 0) == "mu_0_3"
def test_simulate_moment_steps_sample():
q0, q1 = cirq.LineQubit.range(2)
circuit = cirq.Circuit(cirq.H(q0), cirq.CNOT(q0, q1))
simulator = ccq.mps_simulator.MPSSimulator()
for i, step in enumerate(simulator.simulate_moment_steps(circuit)):
if i == 0:
np.testing.assert_almost_equal(
step._simulator_state().to_numpy(),
np.asarray([1.0 / math.sqrt(2), 0.0, 1.0 / math.sqrt(2), 0.0]),
)
# There are two "Tensor()" copies in the string.
assert len(str(step).split('Tensor(')) == 3
samples = step.sample([q0, q1], repetitions=10)
for sample in samples:
assert np.array_equal(sample, [True, False]) or np.array_equal(
sample, [False, False]
)
np.testing.assert_almost_equal(
step._simulator_state().to_numpy(),
np.asarray([1.0 / math.sqrt(2), 0.0, 1.0 / math.sqrt(2), 0.0]),
)
else:
np.testing.assert_almost_equal(
step._simulator_state().to_numpy(),
np.asarray([1.0 / math.sqrt(2), 0.0, 0.0, 1.0 / math.sqrt(2)]),
)
# There are two "Tensor()" copies in the string.
assert len(str(step).split('Tensor(')) == 3
samples = step.sample([q0, q1], repetitions=10)
for sample in samples:
assert np.array_equal(sample, [True, True]) or np.array_equal(
sample, [False, False]
)
def test_sample_seed():
q = cirq.NamedQubit('q')
circuit = cirq.Circuit(cirq.H(q), cirq.measure(q))
simulator = ccq.mps_simulator.MPSSimulator(seed=1234)
result = simulator.run(circuit, repetitions=20)
measured = result.measurements['q']
result_string = ''.join(map(lambda x: str(int(x[0])), measured))
assert result_string == '01011001110111011011'
def test_run_no_repetitions():
q0 = cirq.LineQubit(0)
simulator = ccq.mps_simulator.MPSSimulator()
circuit = cirq.Circuit(cirq.H(q0), cirq.measure(q0))
result = simulator.run(circuit, repetitions=0)
assert len(result.measurements['q(0)']) == 0
def test_run_parameters_not_resolved():
a = cirq.LineQubit(0)
simulator = ccq.mps_simulator.MPSSimulator()
circuit = cirq.Circuit(cirq.XPowGate(exponent=sympy.Symbol('a'))(a), cirq.measure(a))
with pytest.raises(ValueError, match='symbols were not specified'):
_ = simulator.run_sweep(circuit, cirq.ParamResolver({}))
def test_deterministic_gate_noise():
q = cirq.LineQubit(0)
circuit = cirq.Circuit(cirq.I(q), cirq.measure(q))
simulator1 = ccq.mps_simulator.MPSSimulator(noise=cirq.X)
result1 = simulator1.run(circuit, repetitions=10)
simulator2 = ccq.mps_simulator.MPSSimulator(noise=cirq.X)
result2 = simulator2.run(circuit, repetitions=10)
assert result1 == result2
simulator3 = ccq.mps_simulator.MPSSimulator(noise=cirq.Z)
result3 = simulator3.run(circuit, repetitions=10)
assert result1 != result3
def test_nondeterministic_mixture_noise():
q = cirq.LineQubit(0)
circuit = cirq.Circuit(cirq.I(q), cirq.measure(q))
simulator = ccq.mps_simulator.MPSSimulator(
noise=cirq.ConstantQubitNoiseModel(cirq.depolarize(0.5))
)
result1 = simulator.run(circuit, repetitions=50)
result2 = simulator.run(circuit, repetitions=50)
assert result1 != result2
def test_unsupported_noise_fails():
with pytest.raises(ValueError, match='noise must be unitary or mixture but was'):
ccq.mps_simulator.MPSSimulator(noise=cirq.amplitude_damp(0.5))
def test_state_copy():
sim = ccq.mps_simulator.MPSSimulator()
q = cirq.LineQubit(0)
circuit = cirq.Circuit(cirq.H(q), cirq.H(q))
state_Ms = []
for step in sim.simulate_moment_steps(circuit):
state_Ms.append(step.state.M)
for x, y in itertools.combinations(state_Ms, 2):
assert len(x) == len(y)
for i in range(len(x)):
assert not np.shares_memory(x[i], y[i])
def test_simulation_state_initializer():
s = ccq.mps_simulator.MPSState(
qubits=(cirq.LineQubit(0),),
prng=np.random.RandomState(0),
classical_data=cirq.ClassicalDataDictionaryStore(
_records={cirq.MeasurementKey('test'): [(4,)]}
),
)
assert s.qubits == (cirq.LineQubit(0),)
assert s.log_of_measurement_results == {'test': [4]}
def test_act_on_gate():
args = ccq.mps_simulator.MPSState(qubits=cirq.LineQubit.range(3), prng=np.random.RandomState(0))
cirq.act_on(cirq.X, args, [cirq.LineQubit(1)])
np.testing.assert_allclose(
args.state_vector().reshape((2, 2, 2)),
cirq.one_hot(index=(0, 1, 0), shape=(2, 2, 2), dtype=np.complex64),
)
def test_deprecated():
prng = np.random.RandomState(0)
with cirq.testing.assert_deprecated('log_of_measurement_results', deadline='0.16', count=2):
_ = ccq.mps_simulator.MPSState(
qubits=cirq.LineQubit.range(3), prng=prng, log_of_measurement_results={}
)
with cirq.testing.assert_deprecated('positional', deadline='0.16'):
_ = ccq.mps_simulator.MPSState(cirq.LineQubit.range(3), prng=prng)
| [((330, 379), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {'grouping': 'grouping'}), '(grouping=grouping)\n', (360, 379), True, 'import cirq.contrib.quimb as ccq\n'), ((400, 416), 'cirq.Simulator', 'cirq.Simulator', ([], {}), '()\n', (414, 416), False, 'import cirq\n'), ((934, 957), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(2)'], {}), '(2)\n', (954, 957), False, 'import cirq\n'), ((1441, 1464), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(2)'], {}), '(2)\n', (1461, 1464), False, 'import cirq\n'), ((1810, 1835), 'cirq.GridQubit.rect', 'cirq.GridQubit.rect', (['(3)', '(2)'], {}), '(3, 2)\n', (1829, 1835), False, 'import cirq\n'), ((2726, 2749), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(3)'], {}), '(3)\n', (2746, 2749), False, 'import cirq\n'), ((3456, 3479), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(2)'], {}), '(2)\n', (3476, 3479), False, 'import cirq\n'), ((3500, 3548), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {'grouping': '{q0: 0}'}), '(grouping={q0: 0})\n', (3530, 3548), True, 'import cirq.contrib.quimb as ccq\n'), ((3760, 3783), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(2)'], {}), '(2)\n', (3780, 3783), False, 'import cirq\n'), ((3830, 3862), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {}), '()\n', (3860, 3862), True, 'import cirq.contrib.quimb as ccq\n'), ((4841, 4873), 'cirq.NamedQid', 'cirq.NamedQid', (['"""q0"""'], {'dimension': '(2)'}), "('q0', dimension=2)\n", (4854, 4873), False, 'import cirq\n'), ((5210, 5242), 'cirq.NamedQid', 'cirq.NamedQid', (['"""q0"""'], {'dimension': '(2)'}), "('q0', dimension=2)\n", (5223, 5242), False, 'import cirq\n'), ((5252, 5284), 'cirq.NamedQid', 'cirq.NamedQid', (['"""q1"""'], {'dimension': '(3)'}), "('q1', dimension=3)\n", (5265, 5284), False, 'import cirq\n'), ((5294, 5326), 'cirq.NamedQid', 'cirq.NamedQid', (['"""q2"""'], {'dimension': '(5)'}), "('q2', dimension=5)\n", (5307, 5326), False, 'import cirq\n'), ((5341, 5355), 'cirq.Circuit', 'cirq.Circuit', ([], {}), '()\n', (5353, 5355), False, 'import cirq\n'), ((5562, 5585), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(2)'], {}), '(2)\n', (5582, 5585), False, 'import cirq\n'), ((5834, 5857), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(2)'], {}), '(2)\n', (5854, 5857), False, 'import cirq\n'), ((6124, 6147), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(2)'], {}), '(2)\n', (6144, 6147), False, 'import cirq\n'), ((6214, 6246), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {}), '()\n', (6244, 6246), True, 'import cirq.contrib.quimb as ccq\n'), ((6267, 6283), 'cirq.Simulator', 'cirq.Simulator', ([], {}), '()\n', (6281, 6283), False, 'import cirq\n'), ((6857, 6880), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(3)'], {}), '(3)\n', (6877, 6880), False, 'import cirq\n'), ((7142, 7165), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(2)'], {}), '(2)\n', (7162, 7165), False, 'import cirq\n'), ((7252, 7284), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {}), '()\n', (7282, 7284), True, 'import cirq.contrib.quimb as ccq\n'), ((7471, 7488), 'cirq.LineQubit', 'cirq.LineQubit', (['(0)'], {}), '(0)\n', (7485, 7488), False, 'import cirq\n'), ((7505, 7537), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {}), '()\n', (7535, 7537), True, 'import cirq.contrib.quimb as ccq\n'), ((7911, 7934), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(3)'], {}), '(3)\n', (7931, 7934), False, 'import cirq\n'), ((8037, 8069), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {}), '()\n', (8067, 8069), True, 'import cirq.contrib.quimb as ccq\n'), ((8590, 8622), 'cirq.NamedQid', 'cirq.NamedQid', (['"""q0"""'], {'dimension': '(3)'}), "('q0', dimension=3)\n", (8603, 8622), False, 'import cirq\n'), ((8685, 8717), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {}), '()\n', (8715, 8717), True, 'import cirq.contrib.quimb as ccq\n'), ((8855, 8872), 'cirq.LineQubit', 'cirq.LineQubit', (['(0)'], {}), '(0)\n', (8869, 8872), False, 'import cirq\n'), ((9352, 9369), 'cirq.LineQubit', 'cirq.LineQubit', (['(0)'], {}), '(0)\n', (9366, 9369), False, 'import cirq\n'), ((9749, 9828), 'cirq.testing.assert_repr_pretty_contains', 'cirq.testing.assert_repr_pretty_contains', (['result', '"""output state: TensorNetwork"""'], {}), "(result, 'output state: TensorNetwork')\n", (9789, 9828), False, 'import cirq\n'), ((9833, 9912), 'cirq.testing.assert_repr_pretty', 'cirq.testing.assert_repr_pretty', (['result', '"""cirq.MPSTrialResult(...)"""'], {'cycle': '(True)'}), "(result, 'cirq.MPSTrialResult(...)', cycle=True)\n", (9864, 9912), False, 'import cirq\n'), ((9954, 9971), 'cirq.LineQubit', 'cirq.LineQubit', (['(0)'], {}), '(0)\n', (9968, 9971), False, 'import cirq\n'), ((9982, 10014), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {}), '()\n', (10012, 10014), True, 'import cirq.contrib.quimb as ccq\n'), ((10191, 10208), 'cirq.LineQubit', 'cirq.LineQubit', (['(0)'], {}), '(0)\n', (10205, 10208), False, 'import cirq\n'), ((10219, 10251), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {}), '()\n', (10249, 10251), True, 'import cirq.contrib.quimb as ccq\n'), ((10338, 10408), 'cirq.testing.assert_repr_pretty_contains', 'cirq.testing.assert_repr_pretty_contains', (['step_result', '"""TensorNetwork"""'], {}), "(step_result, 'TensorNetwork')\n", (10378, 10408), False, 'import cirq\n'), ((10413, 10509), 'cirq.testing.assert_repr_pretty', 'cirq.testing.assert_repr_pretty', (['step_result', '"""cirq.MPSSimulatorStepResult(...)"""'], {'cycle': '(True)'}), "(step_result,\n 'cirq.MPSSimulatorStepResult(...)', cycle=True)\n", (10444, 10509), False, 'import cirq\n'), ((10545, 10568), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(2)'], {}), '(2)\n', (10565, 10568), False, 'import cirq\n'), ((11799, 11823), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(12)'], {}), '(12)\n', (11819, 11823), False, 'import cirq\n'), ((12190, 12213), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(2)'], {}), '(2)\n', (12210, 12213), False, 'import cirq\n'), ((12289, 12321), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {}), '()\n', (12319, 12321), True, 'import cirq.contrib.quimb as ccq\n'), ((13737, 13757), 'cirq.NamedQubit', 'cirq.NamedQubit', (['"""q"""'], {}), "('q')\n", (13752, 13757), False, 'import cirq\n'), ((13829, 13870), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {'seed': '(1234)'}), '(seed=1234)\n', (13859, 13870), True, 'import cirq.contrib.quimb as ccq\n'), ((14125, 14142), 'cirq.LineQubit', 'cirq.LineQubit', (['(0)'], {}), '(0)\n', (14139, 14142), False, 'import cirq\n'), ((14159, 14191), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {}), '()\n', (14189, 14191), True, 'import cirq.contrib.quimb as ccq\n'), ((14399, 14416), 'cirq.LineQubit', 'cirq.LineQubit', (['(0)'], {}), '(0)\n', (14413, 14416), False, 'import cirq\n'), ((14433, 14465), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {}), '()\n', (14463, 14465), True, 'import cirq.contrib.quimb as ccq\n'), ((14740, 14757), 'cirq.LineQubit', 'cirq.LineQubit', (['(0)'], {}), '(0)\n', (14754, 14757), False, 'import cirq\n'), ((14831, 14875), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {'noise': 'cirq.X'}), '(noise=cirq.X)\n', (14861, 14875), True, 'import cirq.contrib.quimb as ccq\n'), ((14948, 14992), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {'noise': 'cirq.X'}), '(noise=cirq.X)\n', (14978, 14992), True, 'import cirq.contrib.quimb as ccq\n'), ((15096, 15140), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {'noise': 'cirq.Z'}), '(noise=cirq.Z)\n', (15126, 15140), True, 'import cirq.contrib.quimb as ccq\n'), ((15279, 15296), 'cirq.LineQubit', 'cirq.LineQubit', (['(0)'], {}), '(0)\n', (15293, 15296), False, 'import cirq\n'), ((15839, 15871), 'cirq.contrib.quimb.mps_simulator.MPSSimulator', 'ccq.mps_simulator.MPSSimulator', ([], {}), '()\n', (15869, 15871), True, 'import cirq.contrib.quimb as ccq\n'), ((15881, 15898), 'cirq.LineQubit', 'cirq.LineQubit', (['(0)'], {}), '(0)\n', (15895, 15898), False, 'import cirq\n'), ((16073, 16108), 'itertools.combinations', 'itertools.combinations', (['state_Ms', '(2)'], {}), '(state_Ms, 2)\n', (16095, 16108), False, 'import itertools\n'), ((16993, 17017), 'numpy.random.RandomState', 'np.random.RandomState', (['(0)'], {}), '(0)\n', (17014, 17017), True, 'import numpy as np\n'), ((1493, 1503), 'cirq.H', 'cirq.H', (['q1'], {}), '(q1)\n', (1499, 1503), False, 'import cirq\n'), ((1505, 1522), 'cirq.CNOT', 'cirq.CNOT', (['q1', 'q0'], {}), '(q1, q0)\n', (1514, 1522), False, 'import cirq\n'), ((2868, 2885), 'cirq.CNOT', 'cirq.CNOT', (['q0', 'q1'], {}), '(q0, q1)\n', (2877, 2885), False, 'import cirq\n'), ((3559, 3632), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Grouping must cover exactly the qubits"""'}), "(ValueError, match='Grouping must cover exactly the qubits')\n", (3572, 3632), False, 'import pytest\n'), ((4901, 4917), 'cirq.measure', 'cirq.measure', (['q0'], {}), '(q0)\n', (4913, 4917), False, 'import cirq\n'), ((5060, 5133), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Sum of probabilities exceeds tolerance"""'}), "(ValueError, match='Sum of probabilities exceeds tolerance')\n", (5073, 5133), False, 'import pytest\n'), ((5613, 5630), 'cirq.CNOT', 'cirq.CNOT', (['q0', 'q1'], {}), '(q0, q1)\n', (5622, 5630), False, 'import cirq\n'), ((5885, 5902), 'cirq.CNOT', 'cirq.CNOT', (['q1', 'q0'], {}), '(q1, q0)\n', (5894, 5902), False, 'import cirq\n'), ((6175, 6192), 'cirq.CNOT', 'cirq.CNOT', (['q1', 'q0'], {}), '(q1, q0)\n', (6184, 6192), False, 'import cirq\n'), ((6908, 6928), 'cirq.CCX', 'cirq.CCX', (['q0', 'q1', 'q2'], {}), '(q0, q1, q2)\n', (6916, 6928), False, 'import cirq\n'), ((6940, 7015), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Can only handle 1 and 2 qubit operations"""'}), "(ValueError, match='Can only handle 1 and 2 qubit operations')\n", (6953, 7015), False, 'import pytest\n'), ((7193, 7203), 'cirq.X', 'cirq.X', (['q0'], {}), '(q0)\n', (7199, 7203), False, 'import cirq\n'), ((7205, 7215), 'cirq.H', 'cirq.H', (['q1'], {}), '(q1)\n', (7211, 7215), False, 'import cirq\n'), ((7217, 7233), 'cirq.measure', 'cirq.measure', (['q1'], {}), '(q1)\n', (7229, 7233), False, 'import cirq\n'), ((7559, 7568), 'cirq.X', 'cirq.X', (['q'], {}), '(q)\n', (7565, 7568), False, 'import cirq\n'), ((7570, 7583), 'cirq.reset', 'cirq.reset', (['q'], {}), '(q)\n', (7580, 7583), False, 'import cirq\n'), ((7585, 7600), 'cirq.measure', 'cirq.measure', (['q'], {}), '(q)\n', (7597, 7600), False, 'import cirq\n'), ((7670, 7679), 'cirq.H', 'cirq.H', (['q'], {}), '(q)\n', (7676, 7679), False, 'import cirq\n'), ((7681, 7694), 'cirq.reset', 'cirq.reset', (['q'], {}), '(q)\n', (7691, 7694), False, 'import cirq\n'), ((7696, 7711), 'cirq.measure', 'cirq.measure', (['q'], {}), '(q)\n', (7708, 7711), False, 'import cirq\n'), ((7781, 7794), 'cirq.reset', 'cirq.reset', (['q'], {}), '(q)\n', (7791, 7794), False, 'import cirq\n'), ((7796, 7811), 'cirq.measure', 'cirq.measure', (['q'], {}), '(q)\n', (7808, 7811), False, 'import cirq\n'), ((7962, 7972), 'cirq.H', 'cirq.H', (['q0'], {}), '(q0)\n', (7968, 7972), False, 'import cirq\n'), ((7974, 7984), 'cirq.H', 'cirq.H', (['q1'], {}), '(q1)\n', (7980, 7984), False, 'import cirq\n'), ((7986, 7996), 'cirq.H', 'cirq.H', (['q2'], {}), '(q2)\n', (7992, 7996), False, 'import cirq\n'), ((7998, 8018), 'cirq.measure', 'cirq.measure', (['q0', 'q2'], {}), '(q0, q2)\n', (8010, 8018), False, 'import cirq\n'), ((8650, 8666), 'cirq.measure', 'cirq.measure', (['q0'], {}), '(q0)\n', (8662, 8666), False, 'import cirq\n'), ((12241, 12251), 'cirq.H', 'cirq.H', (['q0'], {}), '(q0)\n', (12247, 12251), False, 'import cirq\n'), ((12253, 12270), 'cirq.CNOT', 'cirq.CNOT', (['q0', 'q1'], {}), '(q0, q1)\n', (12262, 12270), False, 'import cirq\n'), ((13785, 13794), 'cirq.H', 'cirq.H', (['q'], {}), '(q)\n', (13791, 13794), False, 'import cirq\n'), ((13796, 13811), 'cirq.measure', 'cirq.measure', (['q'], {}), '(q)\n', (13808, 13811), False, 'import cirq\n'), ((14219, 14229), 'cirq.H', 'cirq.H', (['q0'], {}), '(q0)\n', (14225, 14229), False, 'import cirq\n'), ((14231, 14247), 'cirq.measure', 'cirq.measure', (['q0'], {}), '(q0)\n', (14243, 14247), False, 'import cirq\n'), ((14539, 14554), 'cirq.measure', 'cirq.measure', (['a'], {}), '(a)\n', (14551, 14554), False, 'import cirq\n'), ((14565, 14626), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""symbols were not specified"""'}), "(ValueError, match='symbols were not specified')\n", (14578, 14626), False, 'import pytest\n'), ((14785, 14794), 'cirq.I', 'cirq.I', (['q'], {}), '(q)\n', (14791, 14794), False, 'import cirq\n'), ((14796, 14811), 'cirq.measure', 'cirq.measure', (['q'], {}), '(q)\n', (14808, 14811), False, 'import cirq\n'), ((15324, 15333), 'cirq.I', 'cirq.I', (['q'], {}), '(q)\n', (15330, 15333), False, 'import cirq\n'), ((15335, 15350), 'cirq.measure', 'cirq.measure', (['q'], {}), '(q)\n', (15347, 15350), False, 'import cirq\n'), ((15656, 15731), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""noise must be unitary or mixture but was"""'}), "(ValueError, match='noise must be unitary or mixture but was')\n", (15669, 15731), False, 'import pytest\n'), ((15926, 15935), 'cirq.H', 'cirq.H', (['q'], {}), '(q)\n', (15932, 15935), False, 'import cirq\n'), ((15937, 15946), 'cirq.H', 'cirq.H', (['q'], {}), '(q)\n', (15943, 15946), False, 'import cirq\n'), ((16883, 16949), 'cirq.one_hot', 'cirq.one_hot', ([], {'index': '(0, 1, 0)', 'shape': '(2, 2, 2)', 'dtype': 'np.complex64'}), '(index=(0, 1, 0), shape=(2, 2, 2), dtype=np.complex64)\n', (16895, 16949), False, 'import cirq\n'), ((17027, 17118), 'cirq.testing.assert_deprecated', 'cirq.testing.assert_deprecated', (['"""log_of_measurement_results"""'], {'deadline': '"""0.16"""', 'count': '(2)'}), "('log_of_measurement_results', deadline=\n '0.16', count=2)\n", (17057, 17118), False, 'import cirq\n'), ((17259, 17320), 'cirq.testing.assert_deprecated', 'cirq.testing.assert_deprecated', (['"""positional"""'], {'deadline': '"""0.16"""'}), "('positional', deadline='0.16')\n", (17289, 17320), False, 'import cirq\n'), ((2787, 2797), 'cirq.X', 'cirq.X', (['q0'], {}), '(q0)\n', (2793, 2797), False, 'import cirq\n'), ((2814, 2824), 'cirq.Y', 'cirq.Y', (['q1'], {}), '(q1)\n', (2820, 2824), False, 'import cirq\n'), ((2841, 2851), 'cirq.Z', 'cirq.Z', (['q2'], {}), '(q2)\n', (2847, 2851), False, 'import cirq\n'), ((2895, 2905), 'cirq.Y', 'cirq.Y', (['q1'], {}), '(q1)\n', (2901, 2905), False, 'import cirq\n'), ((3665, 3679), 'cirq.Circuit', 'cirq.Circuit', ([], {}), '()\n', (3677, 3679), False, 'import cirq\n'), ((3949, 3997), 'cirq.testing.random_circuit', 'cirq.testing.random_circuit', (['qubit_order', '(3)', '(0.9)'], {}), '(qubit_order, 3, 0.9)\n', (3976, 3997), False, 'import cirq\n'), ((4036, 4129), 'cirq.final_density_matrix', 'cirq.final_density_matrix', (['circuit'], {'qubit_order': 'qubit_order', 'initial_state': 'initial_state'}), '(circuit, qubit_order=qubit_order, initial_state=\n initial_state)\n', (4061, 4129), False, 'import cirq\n'), ((4611, 4702), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['actual_density_matrix', 'expected_density_matrix'], {'atol': '(0.0001)'}), '(actual_density_matrix, expected_density_matrix,\n atol=0.0001)\n', (4637, 4702), True, 'import numpy as np\n'), ((4709, 4798), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['actual_partial_trace', 'expected_partial_trace'], {'atol': '(0.0001)'}), '(actual_partial_trace, expected_partial_trace,\n atol=0.0001)\n', (4735, 4798), True, 'import numpy as np\n'), ((4995, 5043), 'cirq.contrib.quimb.mps_simulator.MPSOptions', 'ccq.mps_simulator.MPSOptions', ([], {'sum_prob_atol': '(-0.5)'}), '(sum_prob_atol=-0.5)\n', (5023, 5043), True, 'import cirq.contrib.quimb as ccq\n'), ((8964, 8991), 'cirq.value.parse_random_state', 'value.parse_random_state', (['(0)'], {}), '(0)\n', (8988, 8991), False, 'from cirq import value\n'), ((9020, 9050), 'cirq.contrib.quimb.mps_simulator.MPSOptions', 'ccq.mps_simulator.MPSOptions', ([], {}), '()\n', (9048, 9050), True, 'import cirq.contrib.quimb as ccq\n'), ((9120, 9142), 'cirq.ParamResolver', 'cirq.ParamResolver', (['{}'], {}), '({})\n', (9138, 9142), False, 'import cirq\n'), ((9461, 9488), 'cirq.value.parse_random_state', 'value.parse_random_state', (['(0)'], {}), '(0)\n', (9485, 9488), False, 'from cirq import value\n'), ((9517, 9547), 'cirq.contrib.quimb.mps_simulator.MPSOptions', 'ccq.mps_simulator.MPSOptions', ([], {}), '()\n', (9545, 9547), True, 'import cirq.contrib.quimb as ccq\n'), ((9617, 9639), 'cirq.ParamResolver', 'cirq.ParamResolver', (['{}'], {}), '({})\n', (9635, 9639), False, 'import cirq\n'), ((10645, 10672), 'cirq.value.parse_random_state', 'value.parse_random_state', (['(0)'], {}), '(0)\n', (10669, 10672), False, 'from cirq import value\n'), ((10701, 10764), 'cirq.contrib.quimb.mps_simulator.MPSOptions', 'ccq.mps_simulator.MPSOptions', ([], {'cutoff': '(0.001)', 'sum_prob_atol': '(0.001)'}), '(cutoff=0.001, sum_prob_atol=0.001)\n', (10729, 10764), True, 'import cirq.contrib.quimb as ccq\n'), ((10847, 10874), 'cirq.value.parse_random_state', 'value.parse_random_state', (['(0)'], {}), '(0)\n', (10871, 10874), False, 'from cirq import value\n'), ((10903, 10966), 'cirq.contrib.quimb.mps_simulator.MPSOptions', 'ccq.mps_simulator.MPSOptions', ([], {'cutoff': '(0.001)', 'sum_prob_atol': '(0.001)'}), '(cutoff=0.001, sum_prob_atol=0.001)\n', (10931, 10966), True, 'import cirq.contrib.quimb as ccq\n'), ((11049, 11076), 'cirq.value.parse_random_state', 'value.parse_random_state', (['(0)'], {}), '(0)\n', (11073, 11076), False, 'from cirq import value\n'), ((11105, 11169), 'cirq.contrib.quimb.mps_simulator.MPSOptions', 'ccq.mps_simulator.MPSOptions', ([], {'cutoff': '(1729.0)', 'sum_prob_atol': '(0.001)'}), '(cutoff=1729.0, sum_prob_atol=0.001)\n', (11133, 11169), True, 'import cirq.contrib.quimb as ccq\n'), ((11367, 11392), 'cirq.GridQubit.rect', 'cirq.GridQubit.rect', (['(3)', '(2)'], {}), '(3, 2)\n', (11386, 11392), False, 'import cirq\n'), ((11609, 11634), 'cirq.GridQubit.rect', 'cirq.GridQubit.rect', (['(2)', '(3)'], {}), '(2, 3)\n', (11628, 11634), False, 'import cirq\n'), ((11947, 11974), 'cirq.value.parse_random_state', 'value.parse_random_state', (['(0)'], {}), '(0)\n', (11971, 11974), False, 'from cirq import value\n'), ((14669, 14691), 'cirq.ParamResolver', 'cirq.ParamResolver', (['{}'], {}), '({})\n', (14687, 14691), False, 'import cirq\n'), ((16355, 16379), 'numpy.random.RandomState', 'np.random.RandomState', (['(0)'], {}), '(0)\n', (16376, 16379), True, 'import numpy as np\n'), ((16539, 16556), 'cirq.LineQubit', 'cirq.LineQubit', (['(0)'], {}), '(0)\n', (16553, 16556), False, 'import cirq\n'), ((16687, 16710), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(3)'], {}), '(3)\n', (16707, 16710), False, 'import cirq\n'), ((16717, 16741), 'numpy.random.RandomState', 'np.random.RandomState', (['(0)'], {}), '(0)\n', (16738, 16741), True, 'import numpy as np\n'), ((16775, 16792), 'cirq.LineQubit', 'cirq.LineQubit', (['(1)'], {}), '(1)\n', (16789, 16792), False, 'import cirq\n'), ((17361, 17384), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(3)'], {}), '(3)\n', (17381, 17384), False, 'import cirq\n'), ((9171, 9186), 'numpy.array', 'np.array', (['[[1]]'], {}), '([[1]])\n', (9179, 9186), True, 'import numpy as np\n'), ((9668, 9683), 'numpy.array', 'np.array', (['[[1]]'], {}), '([[1]])\n', (9676, 9683), True, 'import numpy as np\n'), ((10077, 10093), 'cirq.measure', 'cirq.measure', (['q0'], {}), '(q0)\n', (10089, 10093), False, 'import cirq\n'), ((10314, 10330), 'cirq.measure', 'cirq.measure', (['q0'], {}), '(q0)\n', (10326, 10330), False, 'import cirq\n'), ((15444, 15464), 'cirq.depolarize', 'cirq.depolarize', (['(0.5)'], {}), '(0.5)\n', (15459, 15464), False, 'import cirq\n'), ((15778, 15802), 'cirq.amplitude_damp', 'cirq.amplitude_damp', (['(0.5)'], {}), '(0.5)\n', (15797, 15802), False, 'import cirq\n'), ((16197, 16225), 'numpy.shares_memory', 'np.shares_memory', (['x[i]', 'y[i]'], {}), '(x[i], y[i])\n', (16213, 16225), True, 'import numpy as np\n'), ((16321, 16338), 'cirq.LineQubit', 'cirq.LineQubit', (['(0)'], {}), '(0)\n', (16335, 16338), False, 'import cirq\n'), ((17174, 17197), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(3)'], {}), '(3)\n', (17194, 17197), False, 'import cirq\n'), ((12839, 12876), 'numpy.array_equal', 'np.array_equal', (['sample', '[True, False]'], {}), '(sample, [True, False])\n', (12853, 12876), True, 'import numpy as np\n'), ((12880, 12918), 'numpy.array_equal', 'np.array_equal', (['sample', '[False, False]'], {}), '(sample, [False, False])\n', (12894, 12918), True, 'import numpy as np\n'), ((13586, 13622), 'numpy.array_equal', 'np.array_equal', (['sample', '[True, True]'], {}), '(sample, [True, True])\n', (13600, 13622), True, 'import numpy as np\n'), ((13626, 13664), 'numpy.array_equal', 'np.array_equal', (['sample', '[False, False]'], {}), '(sample, [False, False])\n', (13640, 13664), True, 'import numpy as np\n'), ((14516, 14533), 'sympy.Symbol', 'sympy.Symbol', (['"""a"""'], {}), "('a')\n", (14528, 14533), False, 'import sympy\n'), ((16461, 16488), 'cirq.MeasurementKey', 'cirq.MeasurementKey', (['"""test"""'], {}), "('test')\n", (16480, 16488), False, 'import cirq\n'), ((12544, 12556), 'math.sqrt', 'math.sqrt', (['(2)'], {}), '(2)\n', (12553, 12556), False, 'import math\n'), ((12569, 12581), 'math.sqrt', 'math.sqrt', (['(2)'], {}), '(2)\n', (12578, 12581), False, 'import math\n'), ((13087, 13099), 'math.sqrt', 'math.sqrt', (['(2)'], {}), '(2)\n', (13096, 13099), False, 'import math\n'), ((13112, 13124), 'math.sqrt', 'math.sqrt', (['(2)'], {}), '(2)\n', (13121, 13124), False, 'import math\n'), ((13291, 13303), 'math.sqrt', 'math.sqrt', (['(2)'], {}), '(2)\n', (13300, 13303), False, 'import math\n'), ((13321, 13333), 'math.sqrt', 'math.sqrt', (['(2)'], {}), '(2)\n', (13330, 13333), False, 'import math\n')] |
winding-lines/determined | e2e_tests/tests/config.py | 231e1ac1df9d77cabc09b724ca2f8070eac0da73 | import os
from pathlib import Path
from typing import Any, Dict
from determined.common import util
MASTER_SCHEME = "http"
MASTER_IP = "localhost"
MASTER_PORT = "8080"
DET_VERSION = None
DEFAULT_MAX_WAIT_SECS = 1800
MAX_TASK_SCHEDULED_SECS = 30
MAX_TRIAL_BUILD_SECS = 90
DEFAULT_TF1_CPU_IMAGE = "determinedai/environments:py-3.7-pytorch-1.7-tf-1.15-cpu-08f9c9b"
DEFAULT_TF2_CPU_IMAGE = (
"determinedai/environments:py-3.8-pytorch-1.9-lightning-1.3-tf-2.4-cpu-08f9c9b"
)
DEFAULT_TF1_GPU_IMAGE = "determinedai/environments:cuda-10.2-pytorch-1.7-tf-1.15-gpu-08f9c9b"
DEFAULT_TF2_GPU_IMAGE = (
"determinedai/environments:cuda-11.1-pytorch-1.9-lightning-1.3-tf-2.4-gpu-08f9c9b"
)
TF1_CPU_IMAGE = os.environ.get("TF1_CPU_IMAGE") or DEFAULT_TF1_CPU_IMAGE
TF2_CPU_IMAGE = os.environ.get("TF2_CPU_IMAGE") or DEFAULT_TF2_CPU_IMAGE
TF1_GPU_IMAGE = os.environ.get("TF1_GPU_IMAGE") or DEFAULT_TF1_GPU_IMAGE
TF2_GPU_IMAGE = os.environ.get("TF2_GPU_IMAGE") or DEFAULT_TF2_GPU_IMAGE
GPU_ENABLED = os.environ.get("DET_TEST_GPU_ENABLED", "1") not in ("0", "false")
PROJECT_ROOT_PATH = Path(__file__).resolve().parents[2]
def fixtures_path(path: str) -> str:
return os.path.join(os.path.dirname(__file__), "fixtures", path)
def tutorials_path(path: str) -> str:
return os.path.join(os.path.dirname(__file__), "../../examples/tutorials", path)
def cv_examples_path(path: str) -> str:
return os.path.join(os.path.dirname(__file__), "../../examples/computer_vision", path)
def nlp_examples_path(path: str) -> str:
return os.path.join(os.path.dirname(__file__), "../../examples/nlp", path)
def nas_examples_path(path: str) -> str:
return os.path.join(os.path.dirname(__file__), "../../examples/nas", path)
def meta_learning_examples_path(path: str) -> str:
return os.path.join(os.path.dirname(__file__), "../../examples/meta_learning", path)
def gan_examples_path(path: str) -> str:
return os.path.join(os.path.dirname(__file__), "../../examples/gan", path)
def decision_trees_examples_path(path: str) -> str:
return os.path.join(os.path.dirname(__file__), "../../examples/decision_trees", path)
def features_examples_path(path: str) -> str:
return os.path.join(os.path.dirname(__file__), "../../examples/features", path)
def model_hub_examples_path(path: str) -> str:
return os.path.join(os.path.dirname(__file__), "../../model_hub/examples", path)
def graphs_examples_path(path: str) -> str:
return os.path.join(os.path.dirname(__file__), "../../examples/graphs", path)
def load_config(config_path: str) -> Any:
with open(config_path) as f:
config = util.safe_load_yaml_with_exceptions(f)
return config
def make_master_url(suffix: str = "") -> str:
return "{}://{}:{}/{}".format(MASTER_SCHEME, MASTER_IP, MASTER_PORT, suffix)
def set_global_batch_size(config: Dict[Any, Any], batch_size: int) -> Dict[Any, Any]:
config = config.copy()
config["hyperparameters"]["global_batch_size"] = batch_size
return config
def set_slots_per_trial(config: Dict[Any, Any], slots: int) -> Dict[Any, Any]:
config = config.copy()
config.setdefault("resources", {})
config["resources"]["slots_per_trial"] = slots
return config
def set_max_length(config: Dict[Any, Any], max_length: Dict[str, int]) -> Dict[Any, Any]:
config = config.copy()
config["searcher"]["max_length"] = max_length
return config
def set_min_validation_period(
config: Dict[Any, Any], min_validation_period: Dict[str, int]
) -> Dict[Any, Any]:
config = config.copy()
config["min_validation_period"] = min_validation_period
return config
def set_min_checkpoint_period(
config: Dict[Any, Any], min_checkpoint_period: Dict[str, int]
) -> Dict[Any, Any]:
config = config.copy()
config["min_checkpoint_period"] = min_checkpoint_period
return config
def set_aggregation_frequency(config: Dict[Any, Any], aggregation_frequency: int) -> Dict[Any, Any]:
config = config.copy()
config.setdefault("optimizations", {})
config["optimizations"]["aggregation_frequency"] = aggregation_frequency
return config
def set_tensor_auto_tuning(config: Dict[Any, Any], auto_tune: bool) -> Dict[Any, Any]:
config = config.copy()
config.setdefault("optimizations", {})
config["optimizations"]["auto_tune_tensor_fusion"] = auto_tune
return config
def set_image(config: Dict[Any, Any], cpu_image: str, gpu_image: str) -> Dict[Any, Any]:
config = config.copy()
config.setdefault("environment", {})
config["environment"]["image"] = {"cpu": cpu_image, "gpu": gpu_image}
return config
def set_tf1_image(config: Dict[Any, Any]) -> Dict[Any, Any]:
return set_image(config, TF1_CPU_IMAGE, TF1_GPU_IMAGE)
def set_tf2_image(config: Dict[Any, Any]) -> Dict[Any, Any]:
return set_image(config, TF2_CPU_IMAGE, TF2_GPU_IMAGE)
def set_shared_fs_data_layer(config: Dict[Any, Any]) -> Dict[Any, Any]:
config = config.copy()
config["data_layer"] = {}
config["data_layer"]["type"] = "shared_fs"
return config
def set_s3_data_layer(config: Dict[Any, Any]) -> Dict[Any, Any]:
config = config.copy()
config["data_layer"] = {}
config["data_layer"]["type"] = "s3"
config["data_layer"]["bucket"] = "yogadl-test"
config["data_layer"]["bucket_directory_path"] = "determined_integration_tests"
return config
def set_random_seed(config: Dict[Any, Any], seed: int) -> Dict[Any, Any]:
config = config.copy()
config.setdefault("reproducibility", {})
config["reproducibility"]["experiment_seed"] = seed
return config
def set_hparam(config: Dict[Any, Any], name: str, value: Any) -> Dict[Any, Any]:
config = config.copy()
config["hyperparameters"][name] = {"type": "const", "val": value}
return config
def set_perform_initial_validation(config: Dict[Any, Any], init_val: bool) -> Dict[Any, Any]:
config = config.copy()
config["perform_initial_validation"] = init_val
return config
def set_pod_spec(config: Dict[Any, Any], pod_spec: Dict[Any, Any]) -> Dict[Any, Any]:
config = config.copy()
config.setdefault("environment", {})
config["environment"]["pod_spec"] = pod_spec
return config
def set_profiling_enabled(config: Dict[Any, Any]) -> Dict[Any, Any]:
config = config.copy()
config.setdefault("profiling", {})
config["profiling"]["enabled"] = True
return config
def set_entrypoint(config: Dict[Any, Any], entrypoint: str) -> Dict[Any, Any]:
config = config.copy()
config["entrypoint"] = entrypoint
return config
| [((703, 734), 'os.environ.get', 'os.environ.get', (['"""TF1_CPU_IMAGE"""'], {}), "('TF1_CPU_IMAGE')\n", (717, 734), False, 'import os\n'), ((776, 807), 'os.environ.get', 'os.environ.get', (['"""TF2_CPU_IMAGE"""'], {}), "('TF2_CPU_IMAGE')\n", (790, 807), False, 'import os\n'), ((849, 880), 'os.environ.get', 'os.environ.get', (['"""TF1_GPU_IMAGE"""'], {}), "('TF1_GPU_IMAGE')\n", (863, 880), False, 'import os\n'), ((922, 953), 'os.environ.get', 'os.environ.get', (['"""TF2_GPU_IMAGE"""'], {}), "('TF2_GPU_IMAGE')\n", (936, 953), False, 'import os\n'), ((993, 1036), 'os.environ.get', 'os.environ.get', (['"""DET_TEST_GPU_ENABLED"""', '"""1"""'], {}), "('DET_TEST_GPU_ENABLED', '1')\n", (1007, 1036), False, 'import os\n'), ((1179, 1204), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1194, 1204), False, 'import os\n'), ((1288, 1313), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1303, 1313), False, 'import os\n'), ((1415, 1440), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1430, 1440), False, 'import os\n'), ((1549, 1574), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1564, 1574), False, 'import os\n'), ((1671, 1696), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1686, 1696), False, 'import os\n'), ((1803, 1828), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1818, 1828), False, 'import os\n'), ((1935, 1960), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1950, 1960), False, 'import os\n'), ((2068, 2093), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2083, 2093), False, 'import os\n'), ((2206, 2231), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2221, 2231), False, 'import os\n'), ((2339, 2364), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2354, 2364), False, 'import os\n'), ((2470, 2495), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2485, 2495), False, 'import os\n'), ((2622, 2660), 'determined.common.util.safe_load_yaml_with_exceptions', 'util.safe_load_yaml_with_exceptions', (['f'], {}), '(f)\n', (2657, 2660), False, 'from determined.common import util\n'), ((1080, 1094), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1084, 1094), False, 'from pathlib import Path\n')] |
nickmflorin/django-proper-architecture-testing | src/greenbudget/app/subaccount/serializers.py | da7c4019697e85f921695144375d2f548f1e98ad | from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers, exceptions
from greenbudget.lib.rest_framework_utils.fields import ModelChoiceField
from greenbudget.lib.rest_framework_utils.serializers import (
EnhancedModelSerializer)
from greenbudget.app.budget.models import BaseBudget
from greenbudget.app.common.serializers import (
EntitySerializer,
AbstractBulkUpdateSerializer,
create_bulk_create_serializer
)
from greenbudget.app.fringe.models import Fringe
from greenbudget.app.group.models import (
BudgetSubAccountGroup,
TemplateSubAccountGroup
)
from .models import SubAccount, BudgetSubAccount, TemplateSubAccount
class SubAccountSimpleSerializer(EnhancedModelSerializer):
id = serializers.IntegerField(read_only=True)
type = serializers.CharField(read_only=True)
identifier = serializers.CharField(
required=False,
allow_blank=False,
allow_null=True,
trim_whitespace=False
)
description = serializers.CharField(
required=False,
allow_blank=False,
allow_null=True,
trim_whitespace=False
)
name = serializers.CharField(
required=False,
allow_blank=True,
allow_null=False,
trim_whitespace=False
)
class Meta:
model = SubAccount
fields = ('id', 'name', 'identifier', 'type', 'description')
class SubAccountSerializer(SubAccountSimpleSerializer):
created_by = serializers.PrimaryKeyRelatedField(read_only=True)
updated_by = serializers.PrimaryKeyRelatedField(read_only=True)
created_at = serializers.DateTimeField(read_only=True)
updated_at = serializers.DateTimeField(read_only=True)
quantity = serializers.IntegerField(
required=False,
allow_null=True
)
rate = serializers.FloatField(required=False, allow_null=True)
multiplier = serializers.FloatField(required=False, allow_null=True)
estimated = serializers.FloatField(read_only=True)
unit = ModelChoiceField(
required=False,
choices=SubAccount.UNITS,
allow_null=True
)
budget = serializers.PrimaryKeyRelatedField(read_only=True)
subaccounts = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
ancestors = EntitySerializer(many=True, read_only=True)
siblings = EntitySerializer(many=True, read_only=True)
account = serializers.IntegerField(read_only=True, source='account.pk')
object_id = serializers.IntegerField(read_only=True)
parent_type = serializers.ChoiceField(
choices=["account", "subaccount"],
read_only=True
)
fringes = serializers.PrimaryKeyRelatedField(
many=True,
required=False,
queryset=Fringe.objects.filter(budget__trash=False)
)
class Meta:
model = SubAccount
fields = SubAccountSimpleSerializer.Meta.fields + (
'identifier', 'name', 'created_by', 'updated_by', 'created_at',
'updated_at', 'quantity', 'rate', 'multiplier', 'unit', 'account',
'object_id', 'parent_type', 'ancestors', 'estimated', 'subaccounts',
'budget', 'siblings', 'fringes')
def validate(self, attrs):
if self.instance is not None and self.instance.subaccounts.count() != 0:
if any([field in attrs for field in self.instance.DERIVING_FIELDS]):
raise exceptions.ValidationError(
"Field can only be updated when the sub account is not "
"derived."
)
return super().validate(attrs)
class BudgetSubAccountSerializer(SubAccountSerializer):
actual = serializers.FloatField(read_only=True)
variance = serializers.FloatField(read_only=True)
group = serializers.PrimaryKeyRelatedField(
required=False,
allow_null=True,
queryset=BudgetSubAccountGroup.objects.all()
)
class Meta:
model = BudgetSubAccount
fields = SubAccountSerializer.Meta.fields + (
'actual', 'variance', 'group')
class TemplateSubAccountSerializer(SubAccountSerializer):
group = serializers.PrimaryKeyRelatedField(
required=False,
allow_null=True,
queryset=TemplateSubAccountGroup.objects.all()
)
class Meta:
model = TemplateSubAccount
fields = SubAccountSerializer.Meta.fields + ('group', )
def create_bulk_create_subaccounts_serializer(model_cls):
data_serializer = BudgetSubAccountSerializer
if model_cls is TemplateSubAccount:
data_serializer = TemplateSubAccountSerializer
base_serializer = create_bulk_create_serializer(data_serializer)
class BulkCreateSubAccountsSerializer(base_serializer):
class Meta(base_serializer.Meta):
model = BaseBudget
def get_serializer_context(self, instance):
return {'parent': instance}
def perform_save(self, serializer, instance, validated_data):
# Note that the updated_by argument is the user updating the
# Account by adding new SubAccount(s), so the SubAccount(s)
# should be denoted as having been created by this user.
return serializer.save(
updated_by=validated_data['updated_by'],
created_by=validated_data['updated_by'],
object_id=instance.pk,
content_type=ContentType.objects.get_for_model(model_cls),
parent=instance,
budget=instance.budget
)
return BulkCreateSubAccountsSerializer
def create_subaccount_bulk_change_serializer(model_cls):
base_serializer = BudgetSubAccountSerializer
if model_cls is TemplateSubAccount:
base_serializer = TemplateSubAccountSerializer
class SubAccountBulkChangeSerializer(base_serializer):
id = serializers.PrimaryKeyRelatedField(
required=True,
queryset=model_cls.objects.all()
)
def validate_id(self, instance):
account = self.parent.parent.instance
if account != instance.parent:
raise exceptions.ValidationError(
"The sub-account %s does not belong to account %s."
% (instance.pk, account.pk)
)
return instance
return SubAccountBulkChangeSerializer
def create_bulk_update_subaccounts_serializer(model_cls):
class BulkUpdateSubAccountsSerializer(AbstractBulkUpdateSerializer):
data = create_subaccount_bulk_change_serializer(model_cls)(
many=True, nested=True)
class Meta:
model = BaseBudget
fields = ('data', )
def update(self, instance, validated_data):
for subaccount, change in validated_data['data']:
serializer = SubAccountSerializer(
instance=subaccount,
data=change,
partial=True
)
serializer.is_valid(raise_exception=True)
serializer.save(
updated_by=validated_data['updated_by'],
suppress_budget_update=validated_data.get(
'suppress_budget_update', False)
)
return instance
return BulkUpdateSubAccountsSerializer
| [((760, 800), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (784, 800), False, 'from rest_framework import serializers, exceptions\n'), ((812, 849), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (833, 849), False, 'from rest_framework import serializers, exceptions\n'), ((867, 967), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(False)', 'allow_blank': '(False)', 'allow_null': '(True)', 'trim_whitespace': '(False)'}), '(required=False, allow_blank=False, allow_null=True,\n trim_whitespace=False)\n', (888, 967), False, 'from rest_framework import serializers, exceptions\n'), ((1020, 1120), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(False)', 'allow_blank': '(False)', 'allow_null': '(True)', 'trim_whitespace': '(False)'}), '(required=False, allow_blank=False, allow_null=True,\n trim_whitespace=False)\n', (1041, 1120), False, 'from rest_framework import serializers, exceptions\n'), ((1166, 1266), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(False)', 'allow_blank': '(True)', 'allow_null': '(False)', 'trim_whitespace': '(False)'}), '(required=False, allow_blank=True, allow_null=False,\n trim_whitespace=False)\n', (1187, 1266), False, 'from rest_framework import serializers, exceptions\n'), ((1489, 1539), 'rest_framework.serializers.PrimaryKeyRelatedField', 'serializers.PrimaryKeyRelatedField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (1523, 1539), False, 'from rest_framework import serializers, exceptions\n'), ((1557, 1607), 'rest_framework.serializers.PrimaryKeyRelatedField', 'serializers.PrimaryKeyRelatedField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (1591, 1607), False, 'from rest_framework import serializers, exceptions\n'), ((1625, 1666), 'rest_framework.serializers.DateTimeField', 'serializers.DateTimeField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (1650, 1666), False, 'from rest_framework import serializers, exceptions\n'), ((1684, 1725), 'rest_framework.serializers.DateTimeField', 'serializers.DateTimeField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (1709, 1725), False, 'from rest_framework import serializers, exceptions\n'), ((1741, 1798), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'required': '(False)', 'allow_null': '(True)'}), '(required=False, allow_null=True)\n', (1765, 1798), False, 'from rest_framework import serializers, exceptions\n'), ((1832, 1887), 'rest_framework.serializers.FloatField', 'serializers.FloatField', ([], {'required': '(False)', 'allow_null': '(True)'}), '(required=False, allow_null=True)\n', (1854, 1887), False, 'from rest_framework import serializers, exceptions\n'), ((1905, 1960), 'rest_framework.serializers.FloatField', 'serializers.FloatField', ([], {'required': '(False)', 'allow_null': '(True)'}), '(required=False, allow_null=True)\n', (1927, 1960), False, 'from rest_framework import serializers, exceptions\n'), ((1977, 2015), 'rest_framework.serializers.FloatField', 'serializers.FloatField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (1999, 2015), False, 'from rest_framework import serializers, exceptions\n'), ((2027, 2102), 'greenbudget.lib.rest_framework_utils.fields.ModelChoiceField', 'ModelChoiceField', ([], {'required': '(False)', 'choices': 'SubAccount.UNITS', 'allow_null': '(True)'}), '(required=False, choices=SubAccount.UNITS, allow_null=True)\n', (2043, 2102), False, 'from greenbudget.lib.rest_framework_utils.fields import ModelChoiceField\n'), ((2146, 2196), 'rest_framework.serializers.PrimaryKeyRelatedField', 'serializers.PrimaryKeyRelatedField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (2180, 2196), False, 'from rest_framework import serializers, exceptions\n'), ((2215, 2276), 'rest_framework.serializers.PrimaryKeyRelatedField', 'serializers.PrimaryKeyRelatedField', ([], {'many': '(True)', 'read_only': '(True)'}), '(many=True, read_only=True)\n', (2249, 2276), False, 'from rest_framework import serializers, exceptions\n'), ((2293, 2336), 'greenbudget.app.common.serializers.EntitySerializer', 'EntitySerializer', ([], {'many': '(True)', 'read_only': '(True)'}), '(many=True, read_only=True)\n', (2309, 2336), False, 'from greenbudget.app.common.serializers import EntitySerializer, AbstractBulkUpdateSerializer, create_bulk_create_serializer\n'), ((2352, 2395), 'greenbudget.app.common.serializers.EntitySerializer', 'EntitySerializer', ([], {'many': '(True)', 'read_only': '(True)'}), '(many=True, read_only=True)\n', (2368, 2395), False, 'from greenbudget.app.common.serializers import EntitySerializer, AbstractBulkUpdateSerializer, create_bulk_create_serializer\n'), ((2410, 2471), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'read_only': '(True)', 'source': '"""account.pk"""'}), "(read_only=True, source='account.pk')\n", (2434, 2471), False, 'from rest_framework import serializers, exceptions\n'), ((2488, 2528), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (2512, 2528), False, 'from rest_framework import serializers, exceptions\n'), ((2547, 2621), 'rest_framework.serializers.ChoiceField', 'serializers.ChoiceField', ([], {'choices': "['account', 'subaccount']", 'read_only': '(True)'}), "(choices=['account', 'subaccount'], read_only=True)\n", (2570, 2621), False, 'from rest_framework import serializers, exceptions\n'), ((3668, 3706), 'rest_framework.serializers.FloatField', 'serializers.FloatField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (3690, 3706), False, 'from rest_framework import serializers, exceptions\n'), ((3722, 3760), 'rest_framework.serializers.FloatField', 'serializers.FloatField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (3744, 3760), False, 'from rest_framework import serializers, exceptions\n'), ((4625, 4671), 'greenbudget.app.common.serializers.create_bulk_create_serializer', 'create_bulk_create_serializer', (['data_serializer'], {}), '(data_serializer)\n', (4654, 4671), False, 'from greenbudget.app.common.serializers import EntitySerializer, AbstractBulkUpdateSerializer, create_bulk_create_serializer\n'), ((2754, 2796), 'greenbudget.app.fringe.models.Fringe.objects.filter', 'Fringe.objects.filter', ([], {'budget__trash': '(False)'}), '(budget__trash=False)\n', (2775, 2796), False, 'from greenbudget.app.fringe.models import Fringe\n'), ((3875, 3910), 'greenbudget.app.group.models.BudgetSubAccountGroup.objects.all', 'BudgetSubAccountGroup.objects.all', ([], {}), '()\n', (3908, 3910), False, 'from greenbudget.app.group.models import BudgetSubAccountGroup, TemplateSubAccountGroup\n'), ((4238, 4275), 'greenbudget.app.group.models.TemplateSubAccountGroup.objects.all', 'TemplateSubAccountGroup.objects.all', ([], {}), '()\n', (4273, 4275), False, 'from greenbudget.app.group.models import BudgetSubAccountGroup, TemplateSubAccountGroup\n'), ((3404, 3501), 'rest_framework.exceptions.ValidationError', 'exceptions.ValidationError', (['"""Field can only be updated when the sub account is not derived."""'], {}), "(\n 'Field can only be updated when the sub account is not derived.')\n", (3430, 3501), False, 'from rest_framework import serializers, exceptions\n'), ((6130, 6246), 'rest_framework.exceptions.ValidationError', 'exceptions.ValidationError', (["('The sub-account %s does not belong to account %s.' % (instance.pk,\n account.pk))"], {}), "(\n 'The sub-account %s does not belong to account %s.' % (instance.pk,\n account.pk))\n", (6156, 6246), False, 'from rest_framework import serializers, exceptions\n'), ((5403, 5447), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['model_cls'], {}), '(model_cls)\n', (5436, 5447), False, 'from django.contrib.contenttypes.models import ContentType\n')] |
busunkim96/dbnd | modules/dbnd/src/dbnd/_core/tracking/managers/callable_tracking.py | 0191fdcd4c4fbd35006f1026d1a55b2abab9097b | import contextlib
import logging
import typing
from typing import Any, Dict, Tuple
import attr
from dbnd._core.configuration import get_dbnd_project_config
from dbnd._core.constants import (
RESULT_PARAM,
DbndTargetOperationStatus,
DbndTargetOperationType,
TaskRunState,
)
from dbnd._core.current import (
current_task_run,
get_databand_run,
is_verbose,
try_get_current_task,
)
from dbnd._core.errors.errors_utils import log_exception
from dbnd._core.log.external_exception_logging import log_exception_to_server
from dbnd._core.parameter.parameter_definition import ParameterDefinition
from dbnd._core.parameter.parameter_value import ParameterFilters
from dbnd._core.settings import TrackingConfig
from dbnd._core.task.tracking_task import TrackingTask
from dbnd._core.task_build.task_context import try_get_current_task
from dbnd._core.task_build.task_definition import TaskDefinition
from dbnd._core.task_build.task_results import FuncResultParameter
from dbnd._core.task_run.task_run import TaskRun
from dbnd._core.task_run.task_run_error import TaskRunError
from dbnd._core.utils.callable_spec import args_to_kwargs
from dbnd._core.utils.timezone import utcnow
from targets import InMemoryTarget, Target
from targets.value_meta import ValueMetaConf
from targets.values import get_value_type_of_obj
if typing.TYPE_CHECKING:
from dbnd._core.task_build.task_decorator import TaskDecorator
logger = logging.getLogger(__name__)
@attr.s
class TrackedFuncCallWithResult(object):
call_args = attr.ib() # type: Tuple[Any]
call_kwargs = attr.ib() # type: Dict[str,Any]
callable = attr.ib()
result = attr.ib(default=None)
def set_result(self, value):
self.result = value
return value
def invoke(self):
func = self.callable
return func(*self.call_args, **self.call_kwargs)
class CallableTrackingManager(object):
def __init__(self, task_decorator):
# type: (CallableTrackingManager, TaskDecorator) -> None
self.task_decorator = task_decorator
self._tracking_task_definition = None
self._call_count = 0
self._call_as_func = False
self._max_call_count = get_dbnd_project_config().max_calls_per_run
@property
def callable(self):
return self.task_decorator.class_or_func
def get_tracking_task_definition(self):
if not self._tracking_task_definition:
self._tracking_task_definition = self._build_tracking_task_definition()
return self._tracking_task_definition
def _build_tracking_task_definition(self):
return TaskDefinition.from_task_decorator(task_decorator=self.task_decorator)
def _call_count_limit_exceeded(self):
if not self._call_as_func:
self._call_count += 1
if self._call_count > self._max_call_count:
logger.info(
"Reached maximum tracking limit of {} tasks. Running function regularly.".format(
self._max_call_count
)
)
self._call_as_func = True
return self._call_as_func
@contextlib.contextmanager
def tracking_context(self, call_args, call_kwargs):
user_code_called = False # whether we got to executing of user code
user_code_finished = False # whether we passed executing of user code
func_call = None
try:
# 1. check that we don't have too many calls
if self._call_count_limit_exceeded():
yield _do_nothing_decorator
return
# 2. Start or reuse existing "main tracking task" that is root for tracked tasks
if not try_get_current_task():
"""
try to get existing task, and if not exists - try to get/create inplace_task_run
"""
from dbnd._core.tracking.script_tracking_manager import (
try_get_inplace_tracking_task_run,
)
inplace_tacking_task = try_get_inplace_tracking_task_run()
if not inplace_tacking_task:
# we didn't manage to start inplace tracking task run, we will not be able to track
yield _do_nothing_decorator
return
tracking_task_definition = self.get_tracking_task_definition()
callable_spec = tracking_task_definition.task_decorator.get_callable_spec()
func_call = TrackedFuncCallWithResult(
callable=self.callable,
call_args=tuple(call_args), # prevent original call_args modification
call_kwargs=dict(call_kwargs), # prevent original kwargs modification
)
# replace any position argument with kwarg if it possible
args, kwargs = args_to_kwargs(
callable_spec.args, func_call.call_args, func_call.call_kwargs,
)
# instantiate inline task
task = TrackingTask.for_func(tracking_task_definition, args, kwargs)
# update upstream/downstream relations - needed for correct tracking
# we can have the task as upstream , as it was executed already
parent_task = current_task_run().task
if not parent_task.task_dag.has_upstream(task):
parent_task.set_upstream(task)
# checking if any of the inputs are the outputs of previous task.
# we can add that task as upstream.
dbnd_run = get_databand_run()
call_kwargs_as_targets = dbnd_run.target_origin.get_for_map(kwargs)
for value_origin in call_kwargs_as_targets.values():
up_task = value_origin.origin_target.task
task.set_upstream(up_task)
# creating task_run as a task we found mid-run
task_run = dbnd_run.create_task_run_at_execution_time(
task, task_engine=current_task_run().task_engine
)
should_capture_log = TrackingConfig.current().capture_tracking_log
with task_run.runner.task_run_execution_context(
handle_sigterm=True, capture_log=should_capture_log
):
task_run.set_task_run_state(state=TaskRunState.RUNNING)
_log_inputs(task_run)
# if we reached this line, then all tracking initialization is
# finished successfully, and we're going to execute user code
user_code_called = True
try:
# tracking_context is context manager - user code will run on yield
yield func_call.set_result
# if we reached this line, this means that user code finished
# successfully without any exceptions
user_code_finished = True
except Exception as ex:
task_run.finished_time = utcnow()
error = TaskRunError.build_from_ex(ex, task_run)
task_run.set_task_run_state(TaskRunState.FAILED, error=error)
raise
else:
task_run.finished_time = utcnow()
# func_call.result should contain result, log it
_log_result(task_run, func_call.result)
task_run.set_task_run_state(TaskRunState.SUCCESS)
except Exception:
if user_code_called and not user_code_finished:
# if we started to call the user code and not got to user_code_finished
# line - it means there was user code exception - so just re-raise it
raise
# else it's either we didn't reached calling user code, or already passed it
# then it's some dbnd tracking error - just log it
if func_call:
_handle_tracking_error("tracking-init", func_call)
else:
log_exception_to_server()
# if we didn't reached user_code_called=True line - there was an error during
# dbnd tracking initialization, so nothing is done - user function wasn't called yet
if not user_code_called:
# tracking_context is context manager - user code will run on yield
yield _do_nothing_decorator
return
def _handle_tracking_error(msg, func_call=None):
log_exception_to_server()
location = " for %s" % func_call.callable if func_call else ""
msg = "Failed during dbnd %s for %s, ignoring, and continue without tracking" % (
msg,
location,
)
if is_verbose():
logger.warning(
msg, exc_info=True,
)
else:
logger.info(msg)
def _do_nothing_decorator(f):
return f
def _log_inputs(task_run):
"""
For tracking mode. Logs InMemoryTarget inputs.
"""
try:
params = task_run.task._params
for param_value in params.get_param_values(ParameterFilters.INPUTS):
param, value = param_value.parameter, param_value.value
if isinstance(param_value, InMemoryTarget):
try:
param = param.modify(
value_meta_conf=ValueMetaConf(
log_preview=True, log_schema=True,
)
)
task_run.tracker.log_parameter_data(
parameter=param,
target=param_value,
value=value,
operation_type=DbndTargetOperationType.read,
operation_status=DbndTargetOperationStatus.OK,
)
except Exception as ex:
log_exception(
"Failed to log input param to tracking store.",
ex=ex,
non_critical=True,
)
except Exception as ex:
log_exception(
"Failed to log input params to tracking store.", ex=ex, non_critical=True
)
def _log_result(task_run, result):
# type: (TaskRun, Any) -> None
"""
For tracking mode. Logs the task result and adds it to the target_origin map to support relationships between
dynamic tasks.
"""
try:
result_param = task_run.task.task_params.get_param_value(RESULT_PARAM)
if not result_param:
logger.debug(
"No result params to log for task {}".format(task_run.task_af_id)
)
return
# we now the parameter value is a target because this is an output param
# the target is created in the task creation
result_param_def, result_target = result_param.parameter, result_param.value
# spread result into relevant fields.
if isinstance(result_param_def, FuncResultParameter):
# assign all returned values to relevant band Outputs
if result is None:
return
for result_name, value in result_param_def.named_results(result):
# we now the parameter value is a target because this is an output param
# the target is created in the task creation
parameter_value = task_run.task.task_params.get_param_value(result_name)
_log_parameter_value(
task_run,
parameter_definition=parameter_value.parameter,
target=parameter_value.value,
value=value,
)
else:
_log_parameter_value(
task_run,
parameter_definition=result_param_def,
target=result_target,
value=result,
)
except Exception as ex:
log_exception(
"Failed to log result to tracking store.", ex=ex, non_critical=True
)
def _log_parameter_value(task_run, parameter_definition, target, value):
# type: (TaskRun, ParameterDefinition, Target, Any) -> None
# make sure it will be logged correctly
parameter_definition = parameter_definition.modify(
value_meta_conf=ValueMetaConf(log_preview=True, log_schema=True)
)
try:
# case what if result is Proxy
value_type = get_value_type_of_obj(value, parameter_definition.value_type)
task_run.run.target_origin.add(target, value, value_type)
except Exception as ex:
log_exception(
"Failed to register result to target tracking.", ex=ex, non_critical=True
)
try:
task_run.tracker.log_parameter_data(
parameter=parameter_definition, # was: task_run.task.task_definition.task_class.result,
target=target,
value=value,
operation_type=DbndTargetOperationType.write, # is it write? (or log?)
operation_status=DbndTargetOperationStatus.OK,
)
except Exception as ex:
log_exception(
"Failed to log result to tracking store.", ex=ex, non_critical=True
)
| [((1443, 1470), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1460, 1470), False, 'import logging\n'), ((1538, 1547), 'attr.ib', 'attr.ib', ([], {}), '()\n', (1545, 1547), False, 'import attr\n'), ((1587, 1596), 'attr.ib', 'attr.ib', ([], {}), '()\n', (1594, 1596), False, 'import attr\n'), ((1636, 1645), 'attr.ib', 'attr.ib', ([], {}), '()\n', (1643, 1645), False, 'import attr\n'), ((1659, 1680), 'attr.ib', 'attr.ib', ([], {'default': 'None'}), '(default=None)\n', (1666, 1680), False, 'import attr\n'), ((8459, 8484), 'dbnd._core.log.external_exception_logging.log_exception_to_server', 'log_exception_to_server', ([], {}), '()\n', (8482, 8484), False, 'from dbnd._core.log.external_exception_logging import log_exception_to_server\n'), ((8682, 8694), 'dbnd._core.current.is_verbose', 'is_verbose', ([], {}), '()\n', (8692, 8694), False, 'from dbnd._core.current import current_task_run, get_databand_run, is_verbose, try_get_current_task\n'), ((2623, 2693), 'dbnd._core.task_build.task_definition.TaskDefinition.from_task_decorator', 'TaskDefinition.from_task_decorator', ([], {'task_decorator': 'self.task_decorator'}), '(task_decorator=self.task_decorator)\n', (2657, 2693), False, 'from dbnd._core.task_build.task_definition import TaskDefinition\n'), ((12375, 12436), 'targets.values.get_value_type_of_obj', 'get_value_type_of_obj', (['value', 'parameter_definition.value_type'], {}), '(value, parameter_definition.value_type)\n', (12396, 12436), False, 'from targets.values import get_value_type_of_obj\n'), ((2206, 2231), 'dbnd._core.configuration.get_dbnd_project_config', 'get_dbnd_project_config', ([], {}), '()\n', (2229, 2231), False, 'from dbnd._core.configuration import get_dbnd_project_config\n'), ((4874, 4952), 'dbnd._core.utils.callable_spec.args_to_kwargs', 'args_to_kwargs', (['callable_spec.args', 'func_call.call_args', 'func_call.call_kwargs'], {}), '(callable_spec.args, func_call.call_args, func_call.call_kwargs)\n', (4888, 4952), False, 'from dbnd._core.utils.callable_spec import args_to_kwargs\n'), ((5042, 5103), 'dbnd._core.task.tracking_task.TrackingTask.for_func', 'TrackingTask.for_func', (['tracking_task_definition', 'args', 'kwargs'], {}), '(tracking_task_definition, args, kwargs)\n', (5063, 5103), False, 'from dbnd._core.task.tracking_task import TrackingTask\n'), ((5569, 5587), 'dbnd._core.current.get_databand_run', 'get_databand_run', ([], {}), '()\n', (5585, 5587), False, 'from dbnd._core.current import current_task_run, get_databand_run, is_verbose, try_get_current_task\n'), ((10038, 10130), 'dbnd._core.errors.errors_utils.log_exception', 'log_exception', (['"""Failed to log input params to tracking store."""'], {'ex': 'ex', 'non_critical': '(True)'}), "('Failed to log input params to tracking store.', ex=ex,\n non_critical=True)\n", (10051, 10130), False, 'from dbnd._core.errors.errors_utils import log_exception\n'), ((11882, 11968), 'dbnd._core.errors.errors_utils.log_exception', 'log_exception', (['"""Failed to log result to tracking store."""'], {'ex': 'ex', 'non_critical': '(True)'}), "('Failed to log result to tracking store.', ex=ex,\n non_critical=True)\n", (11895, 11968), False, 'from dbnd._core.errors.errors_utils import log_exception\n'), ((12250, 12298), 'targets.value_meta.ValueMetaConf', 'ValueMetaConf', ([], {'log_preview': '(True)', 'log_schema': '(True)'}), '(log_preview=True, log_schema=True)\n', (12263, 12298), False, 'from targets.value_meta import ValueMetaConf\n'), ((12539, 12631), 'dbnd._core.errors.errors_utils.log_exception', 'log_exception', (['"""Failed to register result to target tracking."""'], {'ex': 'ex', 'non_critical': '(True)'}), "('Failed to register result to target tracking.', ex=ex,\n non_critical=True)\n", (12552, 12631), False, 'from dbnd._core.errors.errors_utils import log_exception\n'), ((13047, 13133), 'dbnd._core.errors.errors_utils.log_exception', 'log_exception', (['"""Failed to log result to tracking store."""'], {'ex': 'ex', 'non_critical': '(True)'}), "('Failed to log result to tracking store.', ex=ex,\n non_critical=True)\n", (13060, 13133), False, 'from dbnd._core.errors.errors_utils import log_exception\n'), ((3723, 3745), 'dbnd._core.task_build.task_context.try_get_current_task', 'try_get_current_task', ([], {}), '()\n', (3743, 3745), False, 'from dbnd._core.task_build.task_context import try_get_current_task\n'), ((4073, 4108), 'dbnd._core.tracking.script_tracking_manager.try_get_inplace_tracking_task_run', 'try_get_inplace_tracking_task_run', ([], {}), '()\n', (4106, 4108), False, 'from dbnd._core.tracking.script_tracking_manager import try_get_inplace_tracking_task_run\n'), ((5288, 5306), 'dbnd._core.current.current_task_run', 'current_task_run', ([], {}), '()\n', (5304, 5306), False, 'from dbnd._core.current import current_task_run, get_databand_run, is_verbose, try_get_current_task\n'), ((6074, 6098), 'dbnd._core.settings.TrackingConfig.current', 'TrackingConfig.current', ([], {}), '()\n', (6096, 6098), False, 'from dbnd._core.settings import TrackingConfig\n'), ((7256, 7264), 'dbnd._core.utils.timezone.utcnow', 'utcnow', ([], {}), '()\n', (7262, 7264), False, 'from dbnd._core.utils.timezone import utcnow\n'), ((8027, 8052), 'dbnd._core.log.external_exception_logging.log_exception_to_server', 'log_exception_to_server', ([], {}), '()\n', (8050, 8052), False, 'from dbnd._core.log.external_exception_logging import log_exception_to_server\n'), ((5995, 6013), 'dbnd._core.current.current_task_run', 'current_task_run', ([], {}), '()\n', (6011, 6013), False, 'from dbnd._core.current import current_task_run, get_databand_run, is_verbose, try_get_current_task\n'), ((7002, 7010), 'dbnd._core.utils.timezone.utcnow', 'utcnow', ([], {}), '()\n', (7008, 7010), False, 'from dbnd._core.utils.timezone import utcnow\n'), ((7040, 7080), 'dbnd._core.task_run.task_run_error.TaskRunError.build_from_ex', 'TaskRunError.build_from_ex', (['ex', 'task_run'], {}), '(ex, task_run)\n', (7066, 7080), False, 'from dbnd._core.task_run.task_run_error import TaskRunError\n'), ((9819, 9910), 'dbnd._core.errors.errors_utils.log_exception', 'log_exception', (['"""Failed to log input param to tracking store."""'], {'ex': 'ex', 'non_critical': '(True)'}), "('Failed to log input param to tracking store.', ex=ex,\n non_critical=True)\n", (9832, 9910), False, 'from dbnd._core.errors.errors_utils import log_exception\n'), ((9291, 9339), 'targets.value_meta.ValueMetaConf', 'ValueMetaConf', ([], {'log_preview': '(True)', 'log_schema': '(True)'}), '(log_preview=True, log_schema=True)\n', (9304, 9339), False, 'from targets.value_meta import ValueMetaConf\n')] |
Benardi/redis-basics | api.py | 614a15afe47780886bb6088f4ae45c6a7cbc6e22 | import os
import logging
from json import loads, dumps
from datetime import timedelta
from argparse import ArgumentParser
from redis import Redis
from flask import Response, Flask, request
app = Flask(__name__)
log = logging.getLogger(__name__)
parser = ArgumentParser()
parser.add_argument("-a", "--address",
action="store", dest="address",
type=str, required=True,
help="Address for api")
parser.add_argument("-p", "--port",
action="store", dest="port",
type=str, required=True,
help="Port for api")
parser.add_argument("-c", "--crt",
action="store", dest="cert",
type=str, required=False,
help="Path to certificate for this API")
parser.add_argument("-k", "--key",
action="store", dest="key",
type=str, required=False,
help="Path to key of certificate used by this API")
parser.add_argument("-rp", "--redis-port",
action="store", dest="redis-port",
type=str, required=True,
help="Port for Redis client")
args = vars(parser.parse_args())
api_address = args["address"]
api_port = args["port"]
api_cert = args["cert"]
api_key = args["key"]
redis_port = args["redis-port"]
r = Redis(port=redis_port, charset="utf-8", decode_responses=True)
@app.route("/hash", methods=['POST'])
def create_redis_hash():
data = loads(request.data)
success = r.hmset(data["key"], data["pairs"])
if data.get("expire") is not None:
expiration = timedelta(**data.get("expire"))
r.expire(data["key"], expiration)
response_body = {"success": success}
response_body[data["key"]] = r.hgetall(data["key"])
return Response(dumps(response_body), status=200, mimetype="application/json")
@app.route("/hash", methods=['PUT'])
def update_redis_hash():
data = loads(request.data)
success = r.hmset(data["key"], data["pairs"])
if data.get("expire") is not None:
expiration = timedelta(**data.get("expire"))
r.expire(data["key"], expiration)
if data.get("newkey") is not None:
r.rename(data["key"], data["newkey"])
response_body = {"success": success}
if data.get("newkey") is not None:
response_body[data["newkey"]] = r.hgetall(data["newkey"])
else:
response_body[data["key"]] = r.hgetall(data["key"])
return Response(dumps(response_body), status=200, mimetype="application/json")
@app.route("/hash", methods=['GET'])
def get_redis_hash():
response_body = {"success": True}
key = request.headers.get("key")
response_body[key] = r.hgetall(key)
return Response(dumps(response_body), status=200, mimetype="application/json")
@app.route("/key", methods=['DELETE'])
def delete_redis_key():
status = 200
key = request.headers.get("key")
success = r.delete(key)
if not success:
status = 404
response_body = {"success": bool(success)}
return Response(dumps(response_body), status=status, mimetype="application/json")
@app.route("/list", methods=['POST'])
def create_redis_list():
data = loads(request.data)
strat = data.get("strategy")
if strat is not None and strat == "left":
length = r.lpush(data["key"], *data["values"])
else:
length = r.rpush(data["key"], *data["values"])
response_body = {"length": length}
response_body[data["key"]] = r.lrange(data["key"], 0, -1)
return Response(dumps(response_body), status=200, mimetype="application/json")
@app.route("/list", methods=['GET'])
def get_entire_list():
response_body = {"success": True}
key = request.headers.get("key")
response_body[key] = r.lrange(key, 0, -1)
return Response(dumps(response_body), status=200, mimetype="application/json")
@app.route("/list/<idx>", methods=['GET'])
def get_list_at_idx(idx):
response_body = {"success": True}
key = request.headers.get("key")
response_body[key] = {}
response_body[key][str(idx)] = r.lindex(key, idx)
return Response(dumps(response_body), status=200, mimetype="application/json")
@app.route("/set", methods=['POST'])
def create_add_set():
data = loads(request.data)
length = r.sadd(data["key"], *data["values"])
response_body = {"length": length}
response_body[data["key"]] = list(r.smembers(data["key"]))
return Response(dumps(response_body), status=200, mimetype="application/json")
@app.route("/set/<n_items>", methods=['GET'])
def get_n_items_set(n_items):
response_body = {"success": True}
key = request.headers.get("key")
response_body = {key: list(r.srandmember(key, n_items))}
return Response(dumps(response_body), status=200, mimetype="application/json")
@app.route("/set", methods=['GET'])
def get_set():
response_body = {"success": True}
key = request.headers.get("key")
response_body = {key: list(r.smembers(key))}
return Response(dumps(response_body), status=200, mimetype="application/json")
def start_api(address, port, clnt_cert=None, clnt_key=None):
if clnt_cert is None or clnt_key is None:
app.run(host=address, port=port, debug=False)
else:
app.run(host=address, port=port,
ssl_context=(clnt_cert, clnt_key), debug=False)
if api_cert is None or api_key is None:
start_api(api_address, api_port)
else:
start_api(api_address, api_port, api_cert, api_key)
| [((197, 212), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (202, 212), False, 'from flask import Response, Flask, request\n'), ((219, 246), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (236, 246), False, 'import logging\n'), ((257, 273), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (271, 273), False, 'from argparse import ArgumentParser\n'), ((1383, 1445), 'redis.Redis', 'Redis', ([], {'port': 'redis_port', 'charset': '"""utf-8"""', 'decode_responses': '(True)'}), "(port=redis_port, charset='utf-8', decode_responses=True)\n", (1388, 1445), False, 'from redis import Redis\n'), ((1522, 1541), 'json.loads', 'loads', (['request.data'], {}), '(request.data)\n', (1527, 1541), False, 'from json import loads, dumps\n'), ((1985, 2004), 'json.loads', 'loads', (['request.data'], {}), '(request.data)\n', (1990, 2004), False, 'from json import loads, dumps\n'), ((2681, 2707), 'flask.request.headers.get', 'request.headers.get', (['"""key"""'], {}), "('key')\n", (2700, 2707), False, 'from flask import Response, Flask, request\n'), ((2924, 2950), 'flask.request.headers.get', 'request.headers.get', (['"""key"""'], {}), "('key')\n", (2943, 2950), False, 'from flask import Response, Flask, request\n'), ((3231, 3250), 'json.loads', 'loads', (['request.data'], {}), '(request.data)\n', (3236, 3250), False, 'from json import loads, dumps\n'), ((3747, 3773), 'flask.request.headers.get', 'request.headers.get', (['"""key"""'], {}), "('key')\n", (3766, 3773), False, 'from flask import Response, Flask, request\n'), ((4024, 4050), 'flask.request.headers.get', 'request.headers.get', (['"""key"""'], {}), "('key')\n", (4043, 4050), False, 'from flask import Response, Flask, request\n'), ((4290, 4309), 'json.loads', 'loads', (['request.data'], {}), '(request.data)\n', (4295, 4309), False, 'from json import loads, dumps\n'), ((4678, 4704), 'flask.request.headers.get', 'request.headers.get', (['"""key"""'], {}), "('key')\n", (4697, 4704), False, 'from flask import Response, Flask, request\n'), ((4957, 4983), 'flask.request.headers.get', 'request.headers.get', (['"""key"""'], {}), "('key')\n", (4976, 4983), False, 'from flask import Response, Flask, request\n'), ((1847, 1867), 'json.dumps', 'dumps', (['response_body'], {}), '(response_body)\n', (1852, 1867), False, 'from json import loads, dumps\n'), ((2509, 2529), 'json.dumps', 'dumps', (['response_body'], {}), '(response_body)\n', (2514, 2529), False, 'from json import loads, dumps\n'), ((2769, 2789), 'json.dumps', 'dumps', (['response_body'], {}), '(response_body)\n', (2774, 2789), False, 'from json import loads, dumps\n'), ((3089, 3109), 'json.dumps', 'dumps', (['response_body'], {}), '(response_body)\n', (3094, 3109), False, 'from json import loads, dumps\n'), ((3574, 3594), 'json.dumps', 'dumps', (['response_body'], {}), '(response_body)\n', (3579, 3594), False, 'from json import loads, dumps\n'), ((3842, 3862), 'json.dumps', 'dumps', (['response_body'], {}), '(response_body)\n', (3847, 3862), False, 'from json import loads, dumps\n'), ((4155, 4175), 'json.dumps', 'dumps', (['response_body'], {}), '(response_body)\n', (4160, 4175), False, 'from json import loads, dumps\n'), ((4489, 4509), 'json.dumps', 'dumps', (['response_body'], {}), '(response_body)\n', (4494, 4509), False, 'from json import loads, dumps\n'), ((4793, 4813), 'json.dumps', 'dumps', (['response_body'], {}), '(response_body)\n', (4798, 4813), False, 'from json import loads, dumps\n'), ((5060, 5080), 'json.dumps', 'dumps', (['response_body'], {}), '(response_body)\n', (5065, 5080), False, 'from json import loads, dumps\n')] |
Ki-Seki/gadgets | zhihu_spider/ZhihuSpider/spiders/zhihu.py | 6e031e1f6536a15b48e3beb80ba8bf31d2a3db7a | """
启动此 spider 前需要手动启动 Chrome,cmd 命令如下:
cd 进入 Chrome 可执行文件 所在的目录
执行:chrome.exe --remote-debugging-port=9222
此时在浏览器窗口地址栏访问:http://127.0.0.1:9222/json,如果页面出现 json 数据,则表明手动启动成功
启动此 spider 后,注意与命令行交互!
在 settings 当中要做的:
# ROBOTSTXT_OBEY = False # 如果不关闭,parse 方法无法执行
# COOKIES_ENABLED = True # 以便 Request 值在传递时自动传递 cookies
# USER_AGENT = 一个合适的值
# DOWNLOADER_MIDDLEWARES 配置好以备 user agent 的自动变换
"""
import re
import json
import datetime
import scrapy
from scrapy.loader import ItemLoader
from urllib import parse
from ZhihuSpider.utils.browsezhihu import get_cookies
from ZhihuSpider import settings
from ZhihuSpider.items import ZhihuQuestionItem, ZhihuAnswerItem
class ZhihuSpider(scrapy.Spider):
name = 'zhihu'
allowed_domains = ['zhihu.com']
start_urls = ['http://zhihu.com/']
# 通用的 question 第一页 answer 请求 url
# 0: question id, 1: offset, 2: limit
start_answer_urls = 'https://www.zhihu.com/api/v4/questions/{0}/answers?include=data%5B*%5D.is_normal%2Cadmin_closed_comment%2Creward_info%2Cis_collapsed%2Cannotation_action%2Cannotation_detail%2Ccollapse_reason%2Cis_sticky%2Ccollapsed_by%2Csuggest_edit%2Ccomment_count%2Ccan_comment%2Ccontent%2Ceditable_content%2Cattachment%2Cvoteup_count%2Creshipment_settings%2Ccomment_permission%2Ccreated_time%2Cupdated_time%2Creview_info%2Crelevant_info%2Cquestion%2Cexcerpt%2Cis_labeled%2Cpaid_info%2Cpaid_info_content%2Crelationship.is_authorized%2Cis_author%2Cvoting%2Cis_thanked%2Cis_nothelp%2Cis_recognized%3Bdata%5B*%5D.mark_infos%5B*%5D.url%3Bdata%5B*%5D.author.follower_count%2Cvip_info%2Cbadge%5B*%5D.topics%3Bdata%5B*%5D.settings.table_of_content.enabled&offset={1}&limit={2}&sort_by=default&platform=desktop'
headers = {
"HOST": "www.zhihu.com",
"Referer": "https://www.zhihu.com",
"User-Agent": settings.USER_AGENT
}
# 提取主页所有指向问题的 url
def parse(self, response, **kwargs):
# .extract() 是 parsel.selection 中的函数,用于提取元素集合中的 data 域的值
all_urls = response.css("a::attr(href)").extract()
# urllib.parse.urljoin 可以合并两个不完整 url
all_urls = [parse.urljoin(response.url, url) for url in all_urls]
all_urls = filter(lambda x: True if x.startswith("https") else False, all_urls)
for url in all_urls:
# (/|$) 表示匹配 / 或“结束”
match_obj = re.match("(.*zhihu.com/question/(\d+))(/|$).*", url)
if match_obj: # 如果是一个含有指向 question 页的 url
question_url = match_obj.group(1)
question_id = match_obj.group(2)
yield scrapy.Request(question_url, callback=self.parse_question, headers=self.headers
, meta={"question_id": question_id, "url": question_url}) # meta 可以向下传递
def parse_question(self, response):
"""
提取问题页 question item
"""
# 使用 ItemLoader 时,每个字段值都是一个 list
item_loader = ItemLoader(item=ZhihuQuestionItem(), response=response)
item_loader.add_value("question_id", response.meta.get("question_id", 0)) # 使用 meta 来加载
item_loader.add_css("topics", "head > meta[name=keywords]::attr(content)")
item_loader.add_value("url", response.meta.get("url", ''))
item_loader.add_css("title", "h1.QuestionHeader-title::text")
item_loader.add_css("content", ".QuestionRichText span:nth-child(1)::text")
item_loader.add_css("answer_num", ".List-headerText > span::text, .ViewAll:nth-child(1) > a::text")
item_loader.add_css("comments_num", ".QuestionHeader-Comment button::text")
item_loader.add_css("watch_user_num", ".NumberBoard-itemValue::attr(title)")
item_loader.add_css("click_num", ".NumberBoard-itemValue::attr(title)")
# 关于获取 create_time update_time
# request log url of question,接着,将以上 item_loader 的内容改为 meta 字典向下传递
# 最终交到 get_create_update_of_question 中去打包 question_item 然后 yield
# 未完成的部分实现如下
# tmp = response.css(".QuestionHeader-menu > a").extract()[0]
# log_url = parse.urljoin(self.start_urls[0], tmp)
# yield scrapy.Request(log_url, callback=self.get_create_update_of_question, headers=self.headers, meta=......)
question_item = item_loader.load_item()
yield question_item
yield scrapy.Request(self.start_answer_urls.format(response.meta.get("question_id", ''), 0, 20)
, callback=self.parse_answer, headers=self.headers)
# def get_create_update_of_question(self, response):
# pass
def parse_answer(self, response):
"""
提取答案页 answer item
"""
answer_json = json.loads(response.text)
is_end = answer_json["paging"]["is_end"]
next_url = answer_json["paging"]["next"]
for answer in answer_json["data"]:
answer_item = ZhihuAnswerItem()
answer_item["answer_id"] = answer["id"]
answer_item["url"] = answer["url"]
answer_item["question_id"] = answer["question"]["id"]
answer_item["author_id"] = answer["author"]["id"]
answer_item["content"] = answer["content"] if "content" in answer else None
answer_item["praise_num"] = answer["voteup_count"]
answer_item["comments_num"] = answer["comment_count"]
answer_item["create_time"] = answer["created_time"]
answer_item["update_time"] = answer["updated_time"]
answer_item["crawl_time"] = datetime.datetime.now()
yield answer_item
if not is_end:
yield scrapy.Request(next_url, callback=self.parse_answer, headers=self.headers)
def start_requests(self):
# 在使用 selenium 前要用以下 cmd 启动 chrome
# cd "C:\Program Files\Google\Chrome\Application"
# chrome.exe --remote-debugging-port=9222
# 不能使用下面的 python 代码的原因是:这个命令是要求返回值的,除非使用多线程
# os.system('"C:\\Program Files\\Google\\Chrome\\Application\\chrome.exe" --remote-debugging-port=9222')
cookies = get_cookies()
yield scrapy.Request(url=self.start_urls[0], dont_filter=True, cookies=cookies)
| [((4606, 4631), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (4616, 4631), False, 'import json\n'), ((5969, 5982), 'ZhihuSpider.utils.browsezhihu.get_cookies', 'get_cookies', ([], {}), '()\n', (5980, 5982), False, 'from ZhihuSpider.utils.browsezhihu import get_cookies\n'), ((2082, 2114), 'urllib.parse.urljoin', 'parse.urljoin', (['response.url', 'url'], {}), '(response.url, url)\n', (2095, 2114), False, 'from urllib import parse\n'), ((2310, 2363), 're.match', 're.match', (['"""(.*zhihu.com/question/(\\\\d+))(/|$).*"""', 'url'], {}), "('(.*zhihu.com/question/(\\\\d+))(/|$).*', url)\n", (2318, 2363), False, 'import re\n'), ((4800, 4817), 'ZhihuSpider.items.ZhihuAnswerItem', 'ZhihuAnswerItem', ([], {}), '()\n', (4815, 4817), False, 'from ZhihuSpider.items import ZhihuQuestionItem, ZhihuAnswerItem\n'), ((5431, 5454), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5452, 5454), False, 'import datetime\n'), ((5997, 6070), 'scrapy.Request', 'scrapy.Request', ([], {'url': 'self.start_urls[0]', 'dont_filter': '(True)', 'cookies': 'cookies'}), '(url=self.start_urls[0], dont_filter=True, cookies=cookies)\n', (6011, 6070), False, 'import scrapy\n'), ((2901, 2920), 'ZhihuSpider.items.ZhihuQuestionItem', 'ZhihuQuestionItem', ([], {}), '()\n', (2918, 2920), False, 'from ZhihuSpider.items import ZhihuQuestionItem, ZhihuAnswerItem\n'), ((5528, 5602), 'scrapy.Request', 'scrapy.Request', (['next_url'], {'callback': 'self.parse_answer', 'headers': 'self.headers'}), '(next_url, callback=self.parse_answer, headers=self.headers)\n', (5542, 5602), False, 'import scrapy\n'), ((2539, 2680), 'scrapy.Request', 'scrapy.Request', (['question_url'], {'callback': 'self.parse_question', 'headers': 'self.headers', 'meta': "{'question_id': question_id, 'url': question_url}"}), "(question_url, callback=self.parse_question, headers=self.\n headers, meta={'question_id': question_id, 'url': question_url})\n", (2553, 2680), False, 'import scrapy\n')] |
Kyle-Kyle/angr | tests/test_bindiff.py | 345b2131a7a67e3a6ffc7d9fd475146a3e12f837 | import nose
import angr
import logging
l = logging.getLogger("angr.tests.test_bindiff")
import os
test_location = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests')
# todo make a better test
def test_bindiff_x86_64():
binary_path_1 = os.path.join(test_location, 'x86_64', 'bindiff_a')
binary_path_2 = os.path.join(test_location, 'x86_64', 'bindiff_b')
b = angr.Project(binary_path_1, load_options={"auto_load_libs": False})
b2 = angr.Project(binary_path_2, load_options={"auto_load_libs": False})
bindiff = b.analyses.BinDiff(b2)
identical_functions = bindiff.identical_functions
differing_functions = bindiff.differing_functions
unmatched_functions = bindiff.unmatched_functions
# check identical functions
nose.tools.assert_in((0x40064c, 0x40066a), identical_functions)
# check differing functions
nose.tools.assert_in((0x400616, 0x400616), differing_functions)
# check unmatched functions
nose.tools.assert_less_equal(len(unmatched_functions[0]), 1)
nose.tools.assert_less_equal(len(unmatched_functions[1]), 2)
# check for no major regressions
nose.tools.assert_greater(len(identical_functions), len(differing_functions))
nose.tools.assert_less(len(differing_functions), 4)
# check a function diff
fdiff = bindiff.get_function_diff(0x400616, 0x400616)
block_matches = { (a.addr, b.addr) for a, b in fdiff.block_matches }
nose.tools.assert_in((0x40064a, 0x400668), block_matches)
nose.tools.assert_in((0x400616, 0x400616), block_matches)
nose.tools.assert_in((0x40061e, 0x40061e), block_matches)
def run_all():
functions = globals()
all_functions = dict(filter((lambda kv: kv[0].startswith('test_')), functions.items()))
for f in sorted(all_functions.keys()):
if hasattr(all_functions[f], '__call__'):
all_functions[f]()
if __name__ == "__main__":
logging.getLogger("angr.analyses.bindiff").setLevel(logging.DEBUG)
import sys
if len(sys.argv) > 1:
globals()['test_' + sys.argv[1]]()
else:
run_all()
| [((44, 88), 'logging.getLogger', 'logging.getLogger', (['"""angr.tests.test_bindiff"""'], {}), "('angr.tests.test_bindiff')\n", (61, 88), False, 'import logging\n'), ((281, 331), 'os.path.join', 'os.path.join', (['test_location', '"""x86_64"""', '"""bindiff_a"""'], {}), "(test_location, 'x86_64', 'bindiff_a')\n", (293, 331), False, 'import os\n'), ((352, 402), 'os.path.join', 'os.path.join', (['test_location', '"""x86_64"""', '"""bindiff_b"""'], {}), "(test_location, 'x86_64', 'bindiff_b')\n", (364, 402), False, 'import os\n'), ((411, 478), 'angr.Project', 'angr.Project', (['binary_path_1'], {'load_options': "{'auto_load_libs': False}"}), "(binary_path_1, load_options={'auto_load_libs': False})\n", (423, 478), False, 'import angr\n'), ((488, 555), 'angr.Project', 'angr.Project', (['binary_path_2'], {'load_options': "{'auto_load_libs': False}"}), "(binary_path_2, load_options={'auto_load_libs': False})\n", (500, 555), False, 'import angr\n'), ((792, 853), 'nose.tools.assert_in', 'nose.tools.assert_in', (['(4195916, 4195946)', 'identical_functions'], {}), '((4195916, 4195946), identical_functions)\n', (812, 853), False, 'import nose\n'), ((892, 953), 'nose.tools.assert_in', 'nose.tools.assert_in', (['(4195862, 4195862)', 'differing_functions'], {}), '((4195862, 4195862), differing_functions)\n', (912, 953), False, 'import nose\n'), ((1457, 1512), 'nose.tools.assert_in', 'nose.tools.assert_in', (['(4195914, 4195944)', 'block_matches'], {}), '((4195914, 4195944), block_matches)\n', (1477, 1512), False, 'import nose\n'), ((1519, 1574), 'nose.tools.assert_in', 'nose.tools.assert_in', (['(4195862, 4195862)', 'block_matches'], {}), '((4195862, 4195862), block_matches)\n', (1539, 1574), False, 'import nose\n'), ((1581, 1636), 'nose.tools.assert_in', 'nose.tools.assert_in', (['(4195870, 4195870)', 'block_matches'], {}), '((4195870, 4195870), block_matches)\n', (1601, 1636), False, 'import nose\n'), ((145, 171), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (161, 171), False, 'import os\n'), ((1929, 1971), 'logging.getLogger', 'logging.getLogger', (['"""angr.analyses.bindiff"""'], {}), "('angr.analyses.bindiff')\n", (1946, 1971), False, 'import logging\n')] |
nucluster/us_states | main/handle_file.py | 26cca38990b9afb6a2b8cc4d1365409428793c6d | from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
# def handle_uploaded_file(f):
# with open('screenshot.png', 'wb') as destination:
# # for chunk in f.chunks():
# # destination.write(chunk)
# destination.write(f)
with open(
BASE_DIR/'media'/'Greater_coat_of_arms_of_the_United_States.png', 'rb'
) as file:
flag = file.read()
# handle_uploaded_file(flag)
print(type(flag))
print(len(flag))
# print(flag)
# for place in sys.path:
# print(place) | [((38, 52), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (42, 52), False, 'from pathlib import Path\n')] |
iicarus-bit/google-ctf | 2018/finals/pwn-gdb-as-a-service/web_challenge/challenge/gaas.py | 4eb8742bca58ff071ff8f6814d41d9ec7eb1db4b | #!/usr/bin/env python3
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from aiohttp import web
import capstone
import functools
from gdbproc import GDBProcess
import socketio
import asyncio
import codecs
import os
enable_logging = False
premium = 'PREMIUM' in os.environ
if premium:
access_key = os.getenv('PREMIUM_KEY')
runnable = ['/home/user/printwebflag']
else:
access_key = os.getenv('TRIAL_KEY')
runnable = ['/bin/sleep', '20']
MAX_INSN_LEN = 15
capstone_md = capstone.Cs(capstone.CS_ARCH_X86, capstone.CS_MODE_64)
sio = socketio.AsyncServer()
app = web.Application()
sio.attach(app)
with open('index.html') as f:
index_html = f.read()
async def index(request):
if not 'key' in request.cookies:
return web.Response(status=401, text='permission denied (missing key)', content_type='text/html')
if request.cookies['key'] != access_key:
return web.Response(status=401, text='permission denied (invalid key)', content_type='text/html')
return web.Response(text=index_html, content_type='text/html')
app.add_routes([web.get('/', index),
web.get('/{name}', index)])
gdb_sessions = {}
stop_queue_readers = {}
async def on_shutdown(app):
await asyncio.gather(delete_gdb_process(sid) for sid in gdb_sessions.keys())
app.on_shutdown.append(on_shutdown)
def log(msg):
if enable_logging:
print('[*] {}'.format(msg))
@sio.on('connect')
def connect(sid, environ):
log('connected {}'.format(sid))
if not 'key={}'.format(access_key) in environ['HTTP_COOKIE']:
log('access_key not found {}'.format(environ['HTTP_COOKIE']))
return False
@sio.on('disconnect')
async def disconnect(sid):
log('disconnected {}'.format(sid))
await delete_gdb_process(sid)
async def stop_queue_reader(sid, queue):
while True:
pkt = await queue.get()
await update_all(sid)
async def create_gdb_process(sid):
stop_queue = asyncio.Queue()
gdb_sessions[sid] = await GDBProcess.create(runnable, stop_queue, env={'KEY': access_key}, log_fn=log)
loop = asyncio.get_event_loop()
stop_queue_readers[sid] = loop.create_task(stop_queue_reader(sid, stop_queue))
async def delete_gdb_process(sid):
if sid in gdb_sessions:
stop_queue_readers[sid].cancel()
del stop_queue_readers[sid]
await gdb_sessions[sid].release()
del gdb_sessions[sid]
@sio.on('start')
async def start(sid):
await delete_gdb_process(sid)
await create_gdb_process(sid)
# Reading registers doesn't work on ubuntu 18.04 for some reason.
# Step once as a work around
step(sid)
async def update_all(sid):
log('updating sid {}'.format(sid))
regs_task = getregs(sid)
maps_task = getmaps(sid)
asm_task = getasm(sid, {'addr': await gdb_sessions[sid].get_reg('rip'), 'count': 100})
await asyncio.gather(regs_task, maps_task, asm_task)
log('update done')
@sio.on('step')
def step(sid):
gdb_sessions[sid].step()
@sio.on('cont')
def cont(sid):
gdb_sessions[sid].cont()
@sio.on('stop')
def stop(sid):
gdb_sessions[sid].interrupt()
async def getregs(sid):
regs = await gdb_sessions[sid].get_regs()
await sio.emit('regs', regs, room=sid)
@sio.on('mem')
async def getmem(sid, msg):
addr = msg['addr']
count = msg['count']
data = gdb_sessions[sid].read_mem(addr, count)
await sio.emit('mem', {'addr': addr, 'data': data}, room=sid)
async def getmaps(sid):
maps = gdb_sessions[sid].maps()
await sio.emit('maps', maps, room=sid)
@sio.on('break')
async def setbreakpoint(sid, data):
addr = data['addr']
await gdb_sessions[sid].set_breakpoint(addr)
await sio.emit('breakpoints', gdb_sessions[sid].breakpoints(), room=sid)
@sio.on('unbreak')
async def rmbreakpoint(sid, data):
addr = data['addr']
await gdb_sessions[sid].remove_breakpoint(addr)
await sio.emit('breakpoints', gdb_sessions[sid].breakpoints(), room=sid)
@sio.on('search')
async def search(sid, data):
q = data['q']
qtype = data['type']
await sio.emit('search_result', gdb_sessions[sid].search(q.encode(), qtype), room=sid)
async def getasm(sid, data):
addr = data['addr']
count = data['count']
result = []
for _ in range(count):
data = gdb_sessions[sid].read_mem(addr, MAX_INSN_LEN)
try:
disasm = next(capstone_md.disasm_lite(data, addr))
except StopIteration:
break
result.append(disasm)
addr += disasm[1]
await sio.emit('asm', result, room=sid)
if __name__ == '__main__':
web.run_app(app)
| [((1008, 1062), 'capstone.Cs', 'capstone.Cs', (['capstone.CS_ARCH_X86', 'capstone.CS_MODE_64'], {}), '(capstone.CS_ARCH_X86, capstone.CS_MODE_64)\n', (1019, 1062), False, 'import capstone\n'), ((1070, 1092), 'socketio.AsyncServer', 'socketio.AsyncServer', ([], {}), '()\n', (1090, 1092), False, 'import socketio\n'), ((1099, 1116), 'aiohttp.web.Application', 'web.Application', ([], {}), '()\n', (1114, 1116), False, 'from aiohttp import web\n'), ((830, 854), 'os.getenv', 'os.getenv', (['"""PREMIUM_KEY"""'], {}), "('PREMIUM_KEY')\n", (839, 854), False, 'import os\n'), ((917, 939), 'os.getenv', 'os.getenv', (['"""TRIAL_KEY"""'], {}), "('TRIAL_KEY')\n", (926, 939), False, 'import os\n'), ((1506, 1561), 'aiohttp.web.Response', 'web.Response', ([], {'text': 'index_html', 'content_type': '"""text/html"""'}), "(text=index_html, content_type='text/html')\n", (1518, 1561), False, 'from aiohttp import web\n'), ((2408, 2423), 'asyncio.Queue', 'asyncio.Queue', ([], {}), '()\n', (2421, 2423), False, 'import asyncio\n'), ((2538, 2562), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (2560, 2562), False, 'import asyncio\n'), ((4904, 4920), 'aiohttp.web.run_app', 'web.run_app', (['app'], {}), '(app)\n', (4915, 4920), False, 'from aiohttp import web\n'), ((1261, 1355), 'aiohttp.web.Response', 'web.Response', ([], {'status': '(401)', 'text': '"""permission denied (missing key)"""', 'content_type': '"""text/html"""'}), "(status=401, text='permission denied (missing key)',\n content_type='text/html')\n", (1273, 1355), False, 'from aiohttp import web\n'), ((1406, 1500), 'aiohttp.web.Response', 'web.Response', ([], {'status': '(401)', 'text': '"""permission denied (invalid key)"""', 'content_type': '"""text/html"""'}), "(status=401, text='permission denied (invalid key)',\n content_type='text/html')\n", (1418, 1500), False, 'from aiohttp import web\n'), ((1579, 1598), 'aiohttp.web.get', 'web.get', (['"""/"""', 'index'], {}), "('/', index)\n", (1586, 1598), False, 'from aiohttp import web\n'), ((1616, 1641), 'aiohttp.web.get', 'web.get', (['"""/{name}"""', 'index'], {}), "('/{name}', index)\n", (1623, 1641), False, 'from aiohttp import web\n'), ((2452, 2528), 'gdbproc.GDBProcess.create', 'GDBProcess.create', (['runnable', 'stop_queue'], {'env': "{'KEY': access_key}", 'log_fn': 'log'}), "(runnable, stop_queue, env={'KEY': access_key}, log_fn=log)\n", (2469, 2528), False, 'from gdbproc import GDBProcess\n'), ((3270, 3316), 'asyncio.gather', 'asyncio.gather', (['regs_task', 'maps_task', 'asm_task'], {}), '(regs_task, maps_task, asm_task)\n', (3284, 3316), False, 'import asyncio\n')] |
BubuLK/sfepy | examples/multi_physics/piezo_elasticity.py | 3e8e2082c26d574dc334fe3a0e0eeb723f7a6657 | r"""
Piezo-elasticity problem - linear elastic material with piezoelectric
effects.
Find :math:`\ul{u}`, :math:`\phi` such that:
.. math::
- \omega^2 \int_{Y} \rho\ \ul{v} \cdot \ul{u}
+ \int_{Y} D_{ijkl}\ e_{ij}(\ul{v}) e_{kl}(\ul{u})
- \int_{Y_2} g_{kij}\ e_{ij}(\ul{v}) \nabla_k \phi
= 0
\;, \quad \forall \ul{v} \;,
\int_{Y_2} g_{kij}\ e_{ij}(\ul{u}) \nabla_k \psi
+ \int_{Y} K_{ij} \nabla_i \psi \nabla_j \phi
= 0
\;, \quad \forall \psi \;,
where
.. math::
D_{ijkl} = \mu (\delta_{ik} \delta_{jl}+\delta_{il} \delta_{jk}) +
\lambda \ \delta_{ij} \delta_{kl}
\;.
"""
from __future__ import absolute_import
import os
import numpy as nm
from sfepy import data_dir
from sfepy.discrete.fem import MeshIO
from sfepy.mechanics.matcoefs import stiffness_from_lame
import six
def post_process(out, pb, state, extend=False):
"""
Calculate and output the strain and stresses for the given state.
"""
from sfepy.base.base import Struct
from sfepy.discrete.fem import extend_cell_data
ev = pb.evaluate
strain = ev('ev_cauchy_strain.i.Y(u)', mode='el_avg')
stress = ev('ev_cauchy_stress.i.Y(inclusion.D, u)', mode='el_avg')
piezo = -ev('ev_piezo_stress.i.Y2(inclusion.coupling, phi)',
mode='el_avg')
piezo = extend_cell_data(piezo, pb.domain, 'Y2', val=0.0)
piezo_strain = ev('ev_piezo_strain.i.Y(inclusion.coupling, u)',
mode='el_avg')
out['cauchy_strain'] = Struct(name='output_data', mode='cell',
data=strain, dofs=None)
out['elastic_stress'] = Struct(name='output_data', mode='cell',
data=stress, dofs=None)
out['piezo_stress'] = Struct(name='output_data', mode='cell',
data=piezo, dofs=None)
out['piezo_strain'] = Struct(name='output_data', mode='cell',
data=piezo_strain, dofs=None)
out['total_stress'] = Struct(name='output_data', mode='cell',
data=stress + piezo, dofs=None)
return out
filename_mesh = data_dir + '/meshes/2d/special/circle_in_square.mesh'
## filename_mesh = data_dir + '/meshes/2d/special/circle_in_square_small.mesh'
## filename_mesh = data_dir + '/meshes/3d/special/cube_sphere.mesh'
## filename_mesh = data_dir + '/meshes/2d/special/cube_cylinder.mesh'
omega = 1
omega_squared = omega**2
conf_dir = os.path.dirname(__file__)
io = MeshIO.any_from_filename(filename_mesh, prefix_dir=conf_dir)
bbox, dim = io.read_bounding_box(ret_dim=True)
geom = {3 : '3_4', 2 : '2_3'}[dim]
x_left, x_right = bbox[:,0]
options = {
'post_process_hook' : 'post_process',
}
regions = {
'Y' : 'all',
'Y1' : 'cells of group 1',
'Y2' : 'cells of group 2',
'Y2_Surface': ('r.Y1 *v r.Y2', 'facet'),
'Left' : ('vertices in (x < %f)' % (x_left + 1e-3), 'facet'),
'Right' : ('vertices in (x > %f)' % (x_right - 1e-3), 'facet'),
}
fields = {
'displacement' : ('real', dim, 'Y', 1),
'potential' : ('real', 1, 'Y', 1),
}
variables = {
'u' : ('unknown field', 'displacement', 0),
'v' : ('test field', 'displacement', 'u'),
'phi' : ('unknown field', 'potential', 1),
'psi' : ('test field', 'potential', 'phi'),
}
ebcs = {
'u1' : ('Left', {'u.all' : 0.0}),
'u2' : ('Right', {'u.0' : 0.1}),
'phi' : ('Y2_Surface', {'phi.all' : 0.0}),
}
def get_inclusion_pars(ts, coor, mode=None, **kwargs):
"""TODO: implement proper 3D -> 2D transformation of constitutive
matrices."""
if mode == 'qp':
_, dim = coor.shape
sym = (dim + 1) * dim // 2
dielectric = nm.eye(dim, dtype=nm.float64)
# !!!
coupling = nm.ones((dim, sym), dtype=nm.float64)
# coupling[0,1] = 0.2
out = {
# Lame coefficients in 1e+10 Pa.
'D' : stiffness_from_lame(dim=2, lam=0.1798, mu=0.148),
# dielectric tensor
'dielectric' : dielectric,
# piezoelectric coupling
'coupling' : coupling,
'density' : nm.array([[0.1142]]), # in 1e4 kg/m3
}
for key, val in six.iteritems(out):
out[key] = val[None, ...]
return out
materials = {
'inclusion' : (None, 'get_inclusion_pars')
}
functions = {
'get_inclusion_pars' : (get_inclusion_pars,),
}
integrals = {
'i' : 2,
}
equations = {
'1' : """- %f * dw_volume_dot.i.Y(inclusion.density, v, u)
+ dw_lin_elastic.i.Y(inclusion.D, v, u)
- dw_piezo_coupling.i.Y2(inclusion.coupling, v, phi)
= 0""" % omega_squared,
'2' : """dw_piezo_coupling.i.Y2(inclusion.coupling, u, psi)
+ dw_diffusion.i.Y(inclusion.dielectric, psi, phi)
= 0""",
}
solvers = {
'ls' : ('ls.scipy_direct', {}),
'newton' : ('nls.newton',
{'i_max' : 1,
'eps_a' : 1e-10,
}),
}
| [((2455, 2480), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2470, 2480), False, 'import os\n'), ((2486, 2546), 'sfepy.discrete.fem.MeshIO.any_from_filename', 'MeshIO.any_from_filename', (['filename_mesh'], {'prefix_dir': 'conf_dir'}), '(filename_mesh, prefix_dir=conf_dir)\n', (2510, 2546), False, 'from sfepy.discrete.fem import MeshIO\n'), ((1312, 1361), 'sfepy.discrete.fem.extend_cell_data', 'extend_cell_data', (['piezo', 'pb.domain', '"""Y2"""'], {'val': '(0.0)'}), "(piezo, pb.domain, 'Y2', val=0.0)\n", (1328, 1361), False, 'from sfepy.discrete.fem import extend_cell_data\n'), ((1496, 1559), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""cell"""', 'data': 'strain', 'dofs': 'None'}), "(name='output_data', mode='cell', data=strain, dofs=None)\n", (1502, 1559), False, 'from sfepy.base.base import Struct\n'), ((1622, 1685), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""cell"""', 'data': 'stress', 'dofs': 'None'}), "(name='output_data', mode='cell', data=stress, dofs=None)\n", (1628, 1685), False, 'from sfepy.base.base import Struct\n'), ((1747, 1809), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""cell"""', 'data': 'piezo', 'dofs': 'None'}), "(name='output_data', mode='cell', data=piezo, dofs=None)\n", (1753, 1809), False, 'from sfepy.base.base import Struct\n'), ((1869, 1938), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""cell"""', 'data': 'piezo_strain', 'dofs': 'None'}), "(name='output_data', mode='cell', data=piezo_strain, dofs=None)\n", (1875, 1938), False, 'from sfepy.base.base import Struct\n'), ((1998, 2069), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""cell"""', 'data': '(stress + piezo)', 'dofs': 'None'}), "(name='output_data', mode='cell', data=stress + piezo, dofs=None)\n", (2004, 2069), False, 'from sfepy.base.base import Struct\n'), ((3676, 3705), 'numpy.eye', 'nm.eye', (['dim'], {'dtype': 'nm.float64'}), '(dim, dtype=nm.float64)\n', (3682, 3705), True, 'import numpy as nm\n'), ((3739, 3776), 'numpy.ones', 'nm.ones', (['(dim, sym)'], {'dtype': 'nm.float64'}), '((dim, sym), dtype=nm.float64)\n', (3746, 3776), True, 'import numpy as nm\n'), ((4179, 4197), 'six.iteritems', 'six.iteritems', (['out'], {}), '(out)\n', (4192, 4197), False, 'import six\n'), ((3890, 3938), 'sfepy.mechanics.matcoefs.stiffness_from_lame', 'stiffness_from_lame', ([], {'dim': '(2)', 'lam': '(0.1798)', 'mu': '(0.148)'}), '(dim=2, lam=0.1798, mu=0.148)\n', (3909, 3938), False, 'from sfepy.mechanics.matcoefs import stiffness_from_lame\n'), ((4107, 4127), 'numpy.array', 'nm.array', (['[[0.1142]]'], {}), '([[0.1142]])\n', (4115, 4127), True, 'import numpy as nm\n')] |
rafaelbarretomg/Uninter | 01-logica-de-programacao-e-algoritmos/Aula 06/01 Tuplas/1.2 Desempacotamento de parametros em funcoes/ex01.py | 1f84b0103263177122663e991db3a8aeb106a959 | # Desempacotamento de parametros em funcoes
# somando valores de uma tupla
def soma(*num):
soma = 0
print('Tupla: {}' .format(num))
for i in num:
soma += i
return soma
# Programa principal
print('Resultado: {}\n' .format(soma(1, 2)))
print('Resultado: {}\n' .format(soma(1, 2, 3, 4, 5, 6, 7, 8, 9)))
| [] |
theallknowng/eKheti | services/model.py | 85e74f26bde7454293617ba727002c5c81402140 | import pandas
from keras.models import Sequential
from keras.layers import Dense
from keras.wrappers.scikit_learn import KerasClassifier
from keras.utils import np_utils
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.preprocessing import LabelEncoder
from sklearn.pipeline import Pipeline
import keras
import sys
import json
import requests
import numpy as np
# define baseline model
def baseline_model():
# create model
model = Sequential()
model.add(Dense(56, input_dim=28, activation='relu'))
model.add(Dense(112, input_dim=56, activation='relu'))
model.add(Dense(7, activation='softmax'))
# Compile model
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
model = baseline_model()
model.load_weights("../model1.h5")
# data = sys.argv[1]
# data = '{"pH min":5.7,"pH max":7,"nitrogen min":109,"nitrogen max":146,"phosphorus min":20,"phosphorus max":30,"potasium min":78,"potasium max":115,"calcium min":270,"calcium max":990,"magnesium min":46,"magnesium max":96,"sulphur min":10,"sulphur max":10,"iron min":44,"iron max":46,"zinc min":3.87,"zinc max":5.87,"manganese min":4.81,"manganese max":4.81,"copper min":21,"copper max":26,"boron min":1.25,"boron max":2.25,"temperature min":25,"temperature max":35,"precipitation min":50,"precipitation max":60,"irrigation":"yes ","region":"barshi"}'
# data = '{"pH min":7.6,"pH max":7.6,"nitrogen min":150.53,"nitrogen max":150.53,"phosphorus min":55.96,"phosphorus max":55.96,"potasium min":728,"potasium max":728,"calcium min":45.56,"calcium max":45.56,"magnesium min":36.46,"magnesium max":36.46,"sulphur min":44.69,"sulphur max":44.69,"iron min":2.7,"iron max":2.7,"zinc min":0.49,"zinc max":0.49,"manganese min":2.16,"manganese max":2.16,"copper min":3.5,"copper max":3.5,"boron min":0.63,"boron max":0.63,"temperature min":21,"temperature max":31,"precipitation min":60.18,"precipitation max":60.18,"irrigation":"yes ","region":"barshi"}'
data= '{"pH min":5.7,"pH max":7,"nitrogen min":109,"nitrogen max":146,"phosphorus min":20,"phosphorus max":30,"potasium min":78,"potasium max":115,"calcium min":270,"calcium max":990,"magnesium min":46,"magnesium max":96,"sulphur min":10,"sulphur max":10,"iron min":44,"iron max":46,"zinc min":3.87,"zinc max":5.87,"manganese min":4.81,"manganese max":4.81,"copper min":21,"copper max":26,"boron min":1.25,"boron max":2.25,"temperature min":25,"temperature max":35,"precipitation min":50,"precipitation max":60,"irrigation":"yes ","region":"barshi"}'
data = json.loads(data)
dataframe = pandas.DataFrame(data,index=[0])
dataset = dataframe.values
X = dataset[:,0:28].astype(float)
op=model.predict(X)
#op = model.predict_classes(X)
#print(op)
#classes = np.argmax(op)
#print(classes)
best_n = np.argsort(op, axis=1)[:,-7:]
print(best_n[0])
| [((2587, 2603), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (2597, 2603), False, 'import json\n'), ((2620, 2653), 'pandas.DataFrame', 'pandas.DataFrame', (['data'], {'index': '[0]'}), '(data, index=[0])\n', (2636, 2653), False, 'import pandas\n'), ((496, 508), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (506, 508), False, 'from keras.models import Sequential\n'), ((2828, 2850), 'numpy.argsort', 'np.argsort', (['op'], {'axis': '(1)'}), '(op, axis=1)\n', (2838, 2850), True, 'import numpy as np\n'), ((523, 565), 'keras.layers.Dense', 'Dense', (['(56)'], {'input_dim': '(28)', 'activation': '"""relu"""'}), "(56, input_dim=28, activation='relu')\n", (528, 565), False, 'from keras.layers import Dense\n'), ((581, 624), 'keras.layers.Dense', 'Dense', (['(112)'], {'input_dim': '(56)', 'activation': '"""relu"""'}), "(112, input_dim=56, activation='relu')\n", (586, 624), False, 'from keras.layers import Dense\n'), ((640, 670), 'keras.layers.Dense', 'Dense', (['(7)'], {'activation': '"""softmax"""'}), "(7, activation='softmax')\n", (645, 670), False, 'from keras.layers import Dense\n')] |
erhuabushuo/sentry | tests/sentry/api/endpoints/test_project_details.py | 8b3bad10155aaacfdff80910e5972e64304e880c | from django.core.urlresolvers import reverse
from sentry.models import Project
from sentry.testutils import APITestCase
class ProjectDetailsTest(APITestCase):
def test_simple(self):
project = self.project # force creation
self.login_as(user=self.user)
url = reverse('sentry-api-0-project-details', kwargs={'project_id': project.id})
response = self.client.get(url)
assert response.status_code == 200
assert response.data['id'] == str(project.id)
class ProjectUpdateTest(APITestCase):
def test_simple(self):
project = self.project # force creation
self.login_as(user=self.user)
url = reverse('sentry-api-0-project-details', kwargs={'project_id': project.id})
resp = self.client.put(url, data={
'name': 'hello world',
'slug': 'foobar',
})
assert resp.status_code == 200, resp.content
project = Project.objects.get(id=project.id)
assert project.name == 'hello world'
assert project.slug == 'foobar'
class ProjectDeleteTest(APITestCase):
def test_simple(self):
project = self.create_project()
self.login_as(user=self.user)
url = reverse('sentry-api-0-project-details', kwargs={'project_id': project.id})
with self.settings(SENTRY_PROJECT=0):
response = self.client.delete(url)
assert response.status_code == 204
assert not Project.objects.filter(id=project.id).exists()
def test_internal_project(self):
project = self.create_project()
self.login_as(user=self.user)
url = reverse('sentry-api-0-project-details', kwargs={'project_id': project.id})
with self.settings(SENTRY_PROJECT=project.id):
response = self.client.delete(url)
assert response.status_code == 403
| [((289, 363), 'django.core.urlresolvers.reverse', 'reverse', (['"""sentry-api-0-project-details"""'], {'kwargs': "{'project_id': project.id}"}), "('sentry-api-0-project-details', kwargs={'project_id': project.id})\n", (296, 363), False, 'from django.core.urlresolvers import reverse\n'), ((669, 743), 'django.core.urlresolvers.reverse', 'reverse', (['"""sentry-api-0-project-details"""'], {'kwargs': "{'project_id': project.id}"}), "('sentry-api-0-project-details', kwargs={'project_id': project.id})\n", (676, 743), False, 'from django.core.urlresolvers import reverse\n'), ((934, 968), 'sentry.models.Project.objects.get', 'Project.objects.get', ([], {'id': 'project.id'}), '(id=project.id)\n', (953, 968), False, 'from sentry.models import Project\n'), ((1215, 1289), 'django.core.urlresolvers.reverse', 'reverse', (['"""sentry-api-0-project-details"""'], {'kwargs': "{'project_id': project.id}"}), "('sentry-api-0-project-details', kwargs={'project_id': project.id})\n", (1222, 1289), False, 'from django.core.urlresolvers import reverse\n'), ((1626, 1700), 'django.core.urlresolvers.reverse', 'reverse', (['"""sentry-api-0-project-details"""'], {'kwargs': "{'project_id': project.id}"}), "('sentry-api-0-project-details', kwargs={'project_id': project.id})\n", (1633, 1700), False, 'from django.core.urlresolvers import reverse\n'), ((1447, 1484), 'sentry.models.Project.objects.filter', 'Project.objects.filter', ([], {'id': 'project.id'}), '(id=project.id)\n', (1469, 1484), False, 'from sentry.models import Project\n')] |
databook1/python-pptx | tests/test_table.py | 87ca6bf34f9ced17cc4f3c94cf141069429e7583 | # encoding: utf-8
"""Unit-test suite for `pptx.table` module."""
import pytest
from pptx.dml.fill import FillFormat
from pptx.dml.border import BorderFormat
from pptx.enum.text import MSO_ANCHOR
from pptx.oxml.ns import qn
from pptx.oxml.table import CT_Table, CT_TableCell, TcRange
from pptx.shapes.graphfrm import GraphicFrame
from pptx.table import (
_Cell,
_CellCollection,
_Column,
_ColumnCollection,
_Row,
_RowCollection,
Table,
)
from pptx.text.text import TextFrame
from pptx.util import Inches, Length, Pt
from .unitutil.cxml import element, xml
from .unitutil.mock import call, class_mock, instance_mock, property_mock
class DescribeTable(object):
"""Unit-test suite for `pptx.table.Table` objects."""
def it_provides_access_to_its_cells(self, tbl_, tc_, _Cell_, cell_):
row_idx, col_idx = 4, 2
tbl_.tc.return_value = tc_
_Cell_.return_value = cell_
table = Table(tbl_, None)
cell = table.cell(row_idx, col_idx)
tbl_.tc.assert_called_once_with(row_idx, col_idx)
_Cell_.assert_called_once_with(tc_, table)
assert cell is cell_
def it_provides_access_to_its_columns(self, request):
columns_ = instance_mock(request, _ColumnCollection)
_ColumnCollection_ = class_mock(
request, "pptx.table._ColumnCollection", return_value=columns_
)
tbl = element("a:tbl")
table = Table(tbl, None)
columns = table.columns
_ColumnCollection_.assert_called_once_with(tbl, table)
assert columns is columns_
def it_can_iterate_its_grid_cells(self, request, _Cell_):
tbl = element("a:tbl/(a:tr/(a:tc,a:tc),a:tr/(a:tc,a:tc))")
expected_tcs = tbl.xpath(".//a:tc")
expected_cells = _Cell_.side_effect = [
instance_mock(request, _Cell, name="cell%d" % idx) for idx in range(4)
]
table = Table(tbl, None)
cells = list(table.iter_cells())
assert cells == expected_cells
assert _Cell_.call_args_list == [call(tc, table) for tc in expected_tcs]
def it_provides_access_to_its_rows(self, request):
rows_ = instance_mock(request, _RowCollection)
_RowCollection_ = class_mock(
request, "pptx.table._RowCollection", return_value=rows_
)
tbl = element("a:tbl")
table = Table(tbl, None)
rows = table.rows
_RowCollection_.assert_called_once_with(tbl, table)
assert rows is rows_
def it_updates_graphic_frame_width_on_width_change(self, dx_fixture):
table, expected_width = dx_fixture
table.notify_width_changed()
assert table._graphic_frame.width == expected_width
def it_updates_graphic_frame_height_on_height_change(self, dy_fixture):
table, expected_height = dy_fixture
table.notify_height_changed()
assert table._graphic_frame.height == expected_height
# fixtures -------------------------------------------------------
@pytest.fixture
def dx_fixture(self, graphic_frame_):
tbl_cxml = "a:tbl/a:tblGrid/(a:gridCol{w=111},a:gridCol{w=222})"
table = Table(element(tbl_cxml), graphic_frame_)
expected_width = 333
return table, expected_width
@pytest.fixture
def dy_fixture(self, graphic_frame_):
tbl_cxml = "a:tbl/(a:tr{h=100},a:tr{h=200})"
table = Table(element(tbl_cxml), graphic_frame_)
expected_height = 300
return table, expected_height
# fixture components ---------------------------------------------
@pytest.fixture
def _Cell_(self, request):
return class_mock(request, "pptx.table._Cell")
@pytest.fixture
def cell_(self, request):
return instance_mock(request, _Cell)
@pytest.fixture
def graphic_frame_(self, request):
return instance_mock(request, GraphicFrame)
@pytest.fixture
def tbl_(self, request):
return instance_mock(request, CT_Table)
@pytest.fixture
def tc_(self, request):
return instance_mock(request, CT_TableCell)
class DescribeTableBooleanProperties(object):
def it_knows_its_boolean_property_settings(self, boolprop_get_fixture):
table, boolprop_name, expected_value = boolprop_get_fixture
boolprop_value = getattr(table, boolprop_name)
assert boolprop_value is expected_value
def it_can_change_its_boolean_property_settings(self, boolprop_set_fixture):
table, boolprop_name, new_value, expected_xml = boolprop_set_fixture
setattr(table, boolprop_name, new_value)
assert table._tbl.xml == expected_xml
# fixtures -------------------------------------------------------
@pytest.fixture(
params=[
("a:tbl", "first_row", False),
("a:tbl/a:tblPr", "first_row", False),
("a:tbl/a:tblPr{firstRow=1}", "first_row", True),
("a:tbl/a:tblPr{firstRow=0}", "first_row", False),
("a:tbl/a:tblPr{firstRow=true}", "first_row", True),
("a:tbl/a:tblPr{firstRow=false}", "first_row", False),
("a:tbl/a:tblPr{firstCol=1}", "first_col", True),
("a:tbl/a:tblPr{lastRow=0}", "last_row", False),
("a:tbl/a:tblPr{lastCol=true}", "last_col", True),
("a:tbl/a:tblPr{bandRow=false}", "horz_banding", False),
("a:tbl/a:tblPr", "vert_banding", False),
]
)
def boolprop_get_fixture(self, request):
tbl_cxml, boolprop_name, expected_value = request.param
table = Table(element(tbl_cxml), None)
return table, boolprop_name, expected_value
@pytest.fixture(
params=[
("a:tbl", "first_row", True, "a:tbl/a:tblPr{firstRow=1}"),
("a:tbl", "first_row", False, "a:tbl/a:tblPr"),
("a:tbl/a:tblPr", "first_row", True, "a:tbl/a:tblPr{firstRow=1}"),
("a:tbl/a:tblPr", "first_row", False, "a:tbl/a:tblPr"),
(
"a:tbl/a:tblPr{firstRow=true}",
"first_row",
True,
"a:tbl/a:tblPr{firstRow=1}",
),
("a:tbl/a:tblPr{firstRow=false}", "first_row", False, "a:tbl/a:tblPr"),
(
"a:tbl/a:tblPr{bandRow=1}",
"first_row",
True,
"a:tbl/a:tblPr{bandRow=1,firstRow=1}",
),
("a:tbl", "first_col", True, "a:tbl/a:tblPr{firstCol=1}"),
("a:tbl", "last_row", True, "a:tbl/a:tblPr{lastRow=1}"),
("a:tbl", "last_col", True, "a:tbl/a:tblPr{lastCol=1}"),
("a:tbl", "horz_banding", True, "a:tbl/a:tblPr{bandRow=1}"),
("a:tbl", "vert_banding", True, "a:tbl/a:tblPr{bandCol=1}"),
]
)
def boolprop_set_fixture(self, request):
tbl_cxml, boolprop_name, new_value, expected_tbl_cxml = request.param
table = Table(element(tbl_cxml), None)
expected_xml = xml(expected_tbl_cxml)
return table, boolprop_name, new_value, expected_xml
class Describe_Cell(object):
"""Unit-test suite for `pptx.table._Cell` object."""
def it_is_equal_to_other_instance_having_same_tc(self):
tc = element("a:tc")
other_tc = element("a:tc")
cell = _Cell(tc, None)
cell_with_same_tc = _Cell(tc, None)
cell_with_other_tc = _Cell(other_tc, None)
assert cell == cell_with_same_tc
assert cell != cell_with_other_tc
def it_has_a_fill(self, fill_fixture):
cell = fill_fixture
assert isinstance(cell.fill, FillFormat)
def it_knows_whether_it_is_merge_origin_cell(self, origin_fixture):
tc, expected_value = origin_fixture
cell = _Cell(tc, None)
is_merge_origin = cell.is_merge_origin
assert is_merge_origin is expected_value
def it_knows_whether_it_is_spanned(self, spanned_fixture):
tc, expected_value = spanned_fixture
cell = _Cell(tc, None)
is_spanned = cell.is_spanned
assert is_spanned is expected_value
def it_knows_its_margin_settings(self, margin_get_fixture):
cell, margin_prop_name, expected_value = margin_get_fixture
margin_value = getattr(cell, margin_prop_name)
assert margin_value == expected_value
def it_can_change_its_margin_settings(self, margin_set_fixture):
cell, margin_prop_name, new_value, expected_xml = margin_set_fixture
setattr(cell, margin_prop_name, new_value)
assert cell._tc.xml == expected_xml
def it_raises_on_margin_assigned_other_than_int_or_None(
self, margin_raises_fixture
):
cell, margin_attr_name, val_of_invalid_type = margin_raises_fixture
with pytest.raises(TypeError):
setattr(cell, margin_attr_name, val_of_invalid_type)
def it_can_merge_a_range_of_cells(self, TcRange_, tc_range_):
tbl = element("a:tbl/(a:tr/(a:tc,a:tc),a:tr/(a:tc,a:tc))")
tc, other_tc = tbl.tc(0, 0), tbl.tc(1, 1)
TcRange_.return_value = tc_range_
tc_range_.contains_merged_cell = False
tc_range_.dimensions = 2, 2
def tcs(*rowcols):
return (tbl.tc(*rowcol) for rowcol in rowcols)
tc_range_.iter_top_row_tcs.return_value = tcs((0, 0), (0, 1))
tc_range_.iter_left_col_tcs.return_value = tcs((0, 0), (1, 0))
tc_range_.iter_except_left_col_tcs.return_value = tcs((0, 1), (1, 1))
tc_range_.iter_except_top_row_tcs.return_value = tcs((1, 0), (1, 1))
expected_xml = xml(
"a:tbl/(a:tr/(a:tc{gridSpan=2,rowSpan=2},a:tc{rowSpan=2,hMerge=1"
"}),a:tr/(a:tc{gridSpan=2,vMerge=1},a:tc{hMerge=1,vMerge=1}))"
)
cell, other_cell = _Cell(tc, None), _Cell(other_tc, None)
cell.merge(other_cell)
TcRange_.assert_called_once_with(tc, other_tc)
tc_range_.move_content_to_origin.assert_called_once_with()
assert tbl.xml == expected_xml
def but_it_raises_when_cells_are_from_different_tables(self, TcRange_, tc_range_):
TcRange_.return_value = tc_range_
tc_range_.in_same_table = False
cell, other_cell = _Cell(None, None), _Cell(None, None)
with pytest.raises(ValueError) as e:
cell.merge(other_cell)
assert "different table" in str(e.value)
def and_it_raises_when_range_contains_merged_cell(self, TcRange_, tc_range_):
TcRange_.return_value = tc_range_
tc_range_.contains_merged_cell = True
cell, other_cell = _Cell(None, None), _Cell(None, None)
with pytest.raises(ValueError) as e:
cell.merge(other_cell)
assert "contains one or more merged cells" in str(e.value)
def it_knows_how_many_rows_the_merge_spans(self, height_fixture):
tc, expected_value = height_fixture
cell = _Cell(tc, None)
span_height = cell.span_height
assert span_height == expected_value
def it_knows_how_many_columns_the_merge_spans(self, width_fixture):
tc, expected_value = width_fixture
cell = _Cell(tc, None)
span_width = cell.span_width
assert span_width == expected_value
def it_can_split_a_merged_cell(self, split_fixture):
origin_tc, range_tcs = split_fixture
cell = _Cell(origin_tc, None)
cell.split()
assert all(tc.gridSpan == 1 for tc in range_tcs)
assert all(tc.rowSpan == 1 for tc in range_tcs)
assert all(not tc.hMerge for tc in range_tcs)
assert all(not tc.vMerge for tc in range_tcs)
def but_it_raises_when_cell_to_be_split_is_not_merge_origin(self):
tc = element("a:tbl/a:tr/a:tc").xpath("//a:tc")[0]
cell = _Cell(tc, None)
with pytest.raises(ValueError) as e:
cell.split()
assert "not a merge-origin cell" in str(e.value)
def it_knows_what_text_it_contains(self, text_frame_prop_, text_frame_):
text_frame_prop_.return_value = text_frame_
text_frame_.text = "foobar"
cell = _Cell(None, None)
text = cell.text
assert text == "foobar"
def it_can_change_its_text(self, text_frame_prop_, text_frame_):
text_frame_prop_.return_value = text_frame_
cell = _Cell(None, None)
cell.text = "føøbår"
assert text_frame_.text == "føøbår"
def it_knows_its_vertical_anchor_setting(self, anchor_get_fixture):
cell, expected_value = anchor_get_fixture
assert cell.vertical_anchor == expected_value
def it_can_change_its_vertical_anchor(self, anchor_set_fixture):
cell, new_value, expected_xml = anchor_set_fixture
cell.vertical_anchor = new_value
assert cell._tc.xml == expected_xml
def it_knows_it_has_border_settings(self, border_fixture):
cell = border_fixture
assert isinstance(cell.border_left, BorderFormat)
assert isinstance(cell.border_right, BorderFormat)
assert isinstance(cell.border_top, BorderFormat)
assert isinstance(cell.border_bottom, BorderFormat)
assert isinstance(cell.border_tl_br, BorderFormat)
assert isinstance(cell.border_bl_tr, BorderFormat)
# fixtures -------------------------------------------------------
@pytest.fixture(
params=[
("a:tc", None),
("a:tc/a:tcPr", None),
("a:tc/a:tcPr{anchor=t}", MSO_ANCHOR.TOP),
("a:tc/a:tcPr{anchor=ctr}", MSO_ANCHOR.MIDDLE),
("a:tc/a:tcPr{anchor=b}", MSO_ANCHOR.BOTTOM),
]
)
def anchor_get_fixture(self, request):
tc_cxml, expected_value = request.param
cell = _Cell(element(tc_cxml), None)
return cell, expected_value
@pytest.fixture(
params=[
("a:tc", None, "a:tc"),
("a:tc", MSO_ANCHOR.TOP, "a:tc/a:tcPr{anchor=t}"),
("a:tc", MSO_ANCHOR.MIDDLE, "a:tc/a:tcPr{anchor=ctr}"),
("a:tc", MSO_ANCHOR.BOTTOM, "a:tc/a:tcPr{anchor=b}"),
("a:tc/a:tcPr{anchor=t}", MSO_ANCHOR.MIDDLE, "a:tc/a:tcPr{anchor=ctr}"),
("a:tc/a:tcPr{anchor=ctr}", None, "a:tc/a:tcPr"),
]
)
def anchor_set_fixture(self, request):
tc_cxml, new_value, expected_tc_cxml = request.param
cell = _Cell(element(tc_cxml), None)
expected_xml = xml(expected_tc_cxml)
return cell, new_value, expected_xml
@pytest.fixture
def fill_fixture(self, cell):
return cell
@pytest.fixture
def border_fixture(self, cell):
return cell
@pytest.fixture(
params=[("a:tc", 1), ("a:tc{gridSpan=2}", 1), ("a:tc{rowSpan=42}", 42)]
)
def height_fixture(self, request):
tc_cxml, expected_value = request.param
tc = element(tc_cxml)
return tc, expected_value
@pytest.fixture(
params=[
("a:tc/a:tcPr{marL=82296}", "margin_left", Inches(0.09)),
("a:tc/a:tcPr{marR=73152}", "margin_right", Inches(0.08)),
("a:tc/a:tcPr{marT=64008}", "margin_top", Inches(0.07)),
("a:tc/a:tcPr{marB=54864}", "margin_bottom", Inches(0.06)),
("a:tc", "margin_left", Inches(0.1)),
("a:tc/a:tcPr", "margin_right", Inches(0.1)),
("a:tc", "margin_top", Inches(0.05)),
("a:tc/a:tcPr", "margin_bottom", Inches(0.05)),
]
)
def margin_get_fixture(self, request):
tc_cxml, margin_prop_name, expected_value = request.param
cell = _Cell(element(tc_cxml), None)
return cell, margin_prop_name, expected_value
@pytest.fixture(
params=[
("a:tc", "margin_left", Inches(0.08), "a:tc/a:tcPr{marL=73152}"),
("a:tc", "margin_right", Inches(0.08), "a:tc/a:tcPr{marR=73152}"),
("a:tc", "margin_top", Inches(0.08), "a:tc/a:tcPr{marT=73152}"),
("a:tc", "margin_bottom", Inches(0.08), "a:tc/a:tcPr{marB=73152}"),
("a:tc", "margin_left", None, "a:tc"),
("a:tc/a:tcPr{marL=42}", "margin_left", None, "a:tc/a:tcPr"),
]
)
def margin_set_fixture(self, request):
tc_cxml, margin_prop_name, new_value, expected_tc_cxml = request.param
cell = _Cell(element(tc_cxml), None)
expected_xml = xml(expected_tc_cxml)
return cell, margin_prop_name, new_value, expected_xml
@pytest.fixture(
params=["margin_left", "margin_right", "margin_top", "margin_bottom"]
)
def margin_raises_fixture(self, request):
margin_prop_name = request.param
cell = _Cell(element("a:tc"), None)
val_of_invalid_type = "foobar"
return cell, margin_prop_name, val_of_invalid_type
@pytest.fixture(
params=[
("a:tc", False),
("a:tc{gridSpan=1}", False),
("a:tc{hMerge=1}", False),
("a:tc{gridSpan=2,vMerge=1}", False),
("a:tc{gridSpan=2}", True),
("a:tc{rowSpan=2}", True),
("a:tc{gridSpan=2,rowSpan=3}", True),
]
)
def origin_fixture(self, request):
tc_cxml, expected_value = request.param
tc = element(tc_cxml)
return tc, expected_value
@pytest.fixture(
params=[
("a:tc", False),
("a:tc{gridSpan=2}", False),
("a:tc{hMerge=1}", True),
("a:tc{gridSpan=2,vMerge=1}", True),
("a:tc{rowSpan=2,hMerge=true}", True),
("a:tc{gridSpan=2,rowSpan=3}", False),
]
)
def spanned_fixture(self, request):
tc_cxml, expected_value = request.param
tc = element(tc_cxml)
return tc, expected_value
@pytest.fixture(
params=[
(
"a:tbl/(a:tr/(a:tc{gridSpan=2},a:tc{hMerge=1}),a:tr/(a:tc,a:tc))",
0,
[0, 1],
),
(
"a:tbl/(a:tr/(a:tc{rowSpan=2},a:tc),a:tr/(a:tc{vMerge=1},a:tc))",
0,
[0, 2],
),
(
"a:tbl/(a:tr/(a:tc{gridSpan=2,rowSpan=2},a:tc{hMerge=1,rowSpan=2}),"
"a:tr/(a:tc{gridSpan=2,vMerge=1},a:tc{hMerge=1,vMerge=1}))",
0,
[0, 1, 2, 3],
),
]
)
def split_fixture(self, request):
tbl_cxml, origin_tc_idx, range_tc_idxs = request.param
tcs = element(tbl_cxml).xpath("//a:tc")
origin_tc = tcs[origin_tc_idx]
range_tcs = tuple(tcs[idx] for idx in range_tc_idxs)
return origin_tc, range_tcs
@pytest.fixture(
params=[("a:tc", 1), ("a:tc{rowSpan=2}", 1), ("a:tc{gridSpan=24}", 24)]
)
def width_fixture(self, request):
tc_cxml, expected_value = request.param
tc = element(tc_cxml)
return tc, expected_value
# fixture components ---------------------------------------------
@pytest.fixture
def cell(self):
return _Cell(element("a:tc"), None)
@pytest.fixture
def TcRange_(self, request):
return class_mock(request, "pptx.table.TcRange")
@pytest.fixture
def tc_range_(self, request):
return instance_mock(request, TcRange)
@pytest.fixture
def text_frame_(self, request):
return instance_mock(request, TextFrame)
@pytest.fixture
def text_frame_prop_(self, request):
return property_mock(request, _Cell, "text_frame")
class Describe_CellCollection(object):
def it_knows_how_many_cells_it_contains(self, len_fixture):
cells, expected_count = len_fixture
assert len(cells) == expected_count
def it_can_iterate_over_the_cells_it_contains(self, iter_fixture):
cell_collection, _Cell_, calls, expected_cells = iter_fixture
cells = list(cell_collection)
assert _Cell_.call_args_list == calls
assert cells == expected_cells
def it_supports_indexed_access(self, _Cell_, cell_):
tr = element("a:tr/(a:tc, a:tc, a:tc)")
tcs = tr.xpath("//a:tc")
_Cell_.return_value = cell_
cell_collection = _CellCollection(tr, None)
cell = cell_collection[1]
_Cell_.assert_called_once_with(tcs[1], cell_collection)
assert cell is cell_
def it_raises_on_indexed_access_out_of_range(self):
cells = _CellCollection(element("a:tr/a:tc"), None)
with pytest.raises(IndexError):
cells[-1]
with pytest.raises(IndexError):
cells[9]
# fixtures -------------------------------------------------------
@pytest.fixture(params=["a:tr", "a:tr/a:tc", "a:tr/(a:tc, a:tc, a:tc)"])
def iter_fixture(self, request, _Cell_):
tr_cxml = request.param
tr = element(tr_cxml)
tcs = tr.xpath("//a:tc")
cell_collection = _CellCollection(tr, None)
expected_cells = [
instance_mock(request, _Cell, name="cell%d" % idx)
for idx in range(len(tcs))
]
_Cell_.side_effect = expected_cells
calls = [call(tc, cell_collection) for tc in tcs]
return cell_collection, _Cell_, calls, expected_cells
@pytest.fixture(params=[("a:tr", 0), ("a:tr/a:tc", 1), ("a:tr/(a:tc, a:tc)", 2)])
def len_fixture(self, request):
tr_cxml, expected_len = request.param
cells = _CellCollection(element(tr_cxml), None)
return cells, expected_len
# fixture components ---------------------------------------------
@pytest.fixture
def _Cell_(self, request):
return class_mock(request, "pptx.table._Cell")
@pytest.fixture
def cell_(self, request):
return instance_mock(request, _Cell)
class Describe_Column(object):
def it_knows_its_width(self, width_get_fixture):
column, expected_value = width_get_fixture
width = column.width
assert width == expected_value
assert isinstance(width, Length)
def it_can_change_its_width(self, width_set_fixture):
column, new_width, expected_xml, parent_ = width_set_fixture
column.width = new_width
assert column._gridCol.xml == expected_xml
parent_.notify_width_changed.assert_called_once_with()
# fixtures -------------------------------------------------------
@pytest.fixture(
params=[("a:gridCol{w=914400}", Inches(1)), ("a:gridCol{w=10pt}", Pt(10))]
)
def width_get_fixture(self, request):
gridCol_cxml, expected_value = request.param
column = _Column(element(gridCol_cxml), None)
return column, expected_value
@pytest.fixture(
params=[
("a:gridCol{w=12pt}", Inches(1), "a:gridCol{w=914400}"),
("a:gridCol{w=1234}", Inches(1), "a:gridCol{w=914400}"),
]
)
def width_set_fixture(self, request, parent_):
gridCol_cxml, new_width, expected_gridCol_cxml = request.param
column = _Column(element(gridCol_cxml), parent_)
expected_xml = xml(expected_gridCol_cxml)
return column, new_width, expected_xml, parent_
# fixture components ---------------------------------------------
@pytest.fixture
def parent_(self, request):
return instance_mock(request, _ColumnCollection)
class Describe_ColumnCollection(object):
def it_knows_how_many_columns_it_contains(self, len_fixture):
columns, expected_count = len_fixture
assert len(columns) == expected_count
def it_can_iterate_over_the_columns_it_contains(self, iter_fixture):
columns, expected_gridCol_lst = iter_fixture
count = 0
for idx, column in enumerate(columns):
assert isinstance(column, _Column)
assert column._gridCol is expected_gridCol_lst[idx]
count += 1
assert count == len(expected_gridCol_lst)
def it_supports_indexed_access(self, getitem_fixture):
columns, expected_gridCol_lst = getitem_fixture
for idx, gridCol in enumerate(expected_gridCol_lst):
column = columns[idx]
assert isinstance(column, _Column)
assert column._gridCol is gridCol
def it_raises_on_indexed_access_out_of_range(self):
columns = _ColumnCollection(element("a:tbl/a:tblGrid/a:gridCol"), None)
with pytest.raises(IndexError):
columns[-1]
with pytest.raises(IndexError):
columns[9]
# fixtures -------------------------------------------------------
@pytest.fixture(
params=[
"a:tbl/a:tblGrid",
"a:tbl/a:tblGrid/a:gridCol",
"a:tbl/a:tblGrid/(a:gridCol, a:gridCol, a:gridCol)",
]
)
def getitem_fixture(self, request):
tbl_cxml = request.param
tbl = element(tbl_cxml)
columns = _ColumnCollection(tbl, None)
expected_column_lst = tbl.xpath("//a:gridCol")
return columns, expected_column_lst
@pytest.fixture(
params=[
"a:tbl/a:tblGrid",
"a:tbl/a:tblGrid/a:gridCol",
"a:tbl/a:tblGrid/(a:gridCol, a:gridCol, a:gridCol)",
]
)
def iter_fixture(self, request):
tbl_cxml = request.param
tbl = element(tbl_cxml)
columns = _ColumnCollection(tbl, None)
expected_column_lst = tbl.xpath("//a:gridCol")
return columns, expected_column_lst
@pytest.fixture(
params=[
("a:tbl/a:tblGrid", 0),
("a:tbl/a:tblGrid/a:gridCol", 1),
("a:tbl/a:tblGrid/(a:gridCol,a:gridCol)", 2),
]
)
def len_fixture(self, request):
tbl_cxml, expected_len = request.param
columns = _ColumnCollection(element(tbl_cxml), None)
return columns, expected_len
class Describe_Row(object):
def it_knows_its_height(self, height_get_fixture):
row, expected_value = height_get_fixture
height = row.height
assert height == expected_value
assert isinstance(height, Length)
def it_can_change_its_height(self, height_set_fixture):
row, new_height, expected_xml, parent_ = height_set_fixture
row.height = new_height
assert row._tr.xml == expected_xml
parent_.notify_height_changed.assert_called_once_with()
def it_provides_access_to_its_cells(self, cells_fixture):
row, _CellCollection_, cells_ = cells_fixture
cells = row.cells
_CellCollection_.assert_called_once_with(row._tr, row)
assert cells is cells_
# fixtures -------------------------------------------------------
@pytest.fixture
def cells_fixture(self, _CellCollection_, cells_):
row = _Row(element("a:tr"), None)
return row, _CellCollection_, cells_
@pytest.fixture(params=[("a:tr{h=914400}", Inches(1)), ("a:tr{h=10pt}", Pt(10))])
def height_get_fixture(self, request):
tr_cxml, expected_value = request.param
row = _Row(element(tr_cxml), None)
return row, expected_value
@pytest.fixture(
params=[
("a:tr{h=12pt}", Inches(1), "a:tr{h=914400}"),
("a:tr{h=1234}", Inches(1), "a:tr{h=914400}"),
]
)
def height_set_fixture(self, request, parent_):
tr_cxml, new_height, expected_tr_cxml = request.param
row = _Row(element(tr_cxml), parent_)
expected_xml = xml(expected_tr_cxml)
return row, new_height, expected_xml, parent_
# fixture components ---------------------------------------------
@pytest.fixture
def _CellCollection_(self, request, cells_):
return class_mock(request, "pptx.table._CellCollection", return_value=cells_)
@pytest.fixture
def cells_(self, request):
return instance_mock(request, _CellCollection)
@pytest.fixture
def parent_(self, request):
return instance_mock(request, _RowCollection)
class Describe_RowCollection(object):
def it_knows_how_many_rows_it_contains(self, len_fixture):
rows, expected_count = len_fixture
assert len(rows) == expected_count
def it_can_iterate_over_the_rows_it_contains(self, iter_fixture):
rows, expected_tr_lst = iter_fixture
count = 0
for idx, row in enumerate(rows):
assert isinstance(row, _Row)
assert row._tr is expected_tr_lst[idx]
count += 1
assert count == len(expected_tr_lst)
def it_supports_indexed_access(self, getitem_fixture):
rows, expected_tr_lst = getitem_fixture
for idx, tr in enumerate(expected_tr_lst):
row = rows[idx]
assert isinstance(row, _Row)
assert row._tr is tr
def it_raises_on_indexed_access_out_of_range(self):
rows = _RowCollection(element("a:tbl/a:tr"), None)
with pytest.raises(IndexError):
rows[-1]
with pytest.raises(IndexError):
rows[9]
# fixtures -------------------------------------------------------
@pytest.fixture(params=["a:tbl", "a:tbl/a:tr", "a:tbl/(a:tr, a:tr, a:tr)"])
def getitem_fixture(self, request):
tbl_cxml = request.param
tbl = element(tbl_cxml)
rows = _RowCollection(tbl, None)
expected_row_lst = tbl.findall(qn("a:tr"))
return rows, expected_row_lst
@pytest.fixture(params=["a:tbl", "a:tbl/a:tr", "a:tbl/(a:tr, a:tr, a:tr)"])
def iter_fixture(self, request):
tbl_cxml = request.param
tbl = element(tbl_cxml)
rows = _RowCollection(tbl, None)
expected_row_lst = tbl.findall(qn("a:tr"))
return rows, expected_row_lst
@pytest.fixture(params=[("a:tbl", 0), ("a:tbl/a:tr", 1), ("a:tbl/(a:tr, a:tr)", 2)])
def len_fixture(self, request):
tbl_cxml, expected_len = request.param
rows = _RowCollection(element(tbl_cxml), None)
return rows, expected_len
| [((4728, 5322), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[('a:tbl', 'first_row', False), ('a:tbl/a:tblPr', 'first_row', False), (\n 'a:tbl/a:tblPr{firstRow=1}', 'first_row', True), (\n 'a:tbl/a:tblPr{firstRow=0}', 'first_row', False), (\n 'a:tbl/a:tblPr{firstRow=true}', 'first_row', True), (\n 'a:tbl/a:tblPr{firstRow=false}', 'first_row', False), (\n 'a:tbl/a:tblPr{firstCol=1}', 'first_col', True), (\n 'a:tbl/a:tblPr{lastRow=0}', 'last_row', False), (\n 'a:tbl/a:tblPr{lastCol=true}', 'last_col', True), (\n 'a:tbl/a:tblPr{bandRow=false}', 'horz_banding', False), (\n 'a:tbl/a:tblPr', 'vert_banding', False)]"}), "(params=[('a:tbl', 'first_row', False), ('a:tbl/a:tblPr',\n 'first_row', False), ('a:tbl/a:tblPr{firstRow=1}', 'first_row', True),\n ('a:tbl/a:tblPr{firstRow=0}', 'first_row', False), (\n 'a:tbl/a:tblPr{firstRow=true}', 'first_row', True), (\n 'a:tbl/a:tblPr{firstRow=false}', 'first_row', False), (\n 'a:tbl/a:tblPr{firstCol=1}', 'first_col', True), (\n 'a:tbl/a:tblPr{lastRow=0}', 'last_row', False), (\n 'a:tbl/a:tblPr{lastCol=true}', 'last_col', True), (\n 'a:tbl/a:tblPr{bandRow=false}', 'horz_banding', False), (\n 'a:tbl/a:tblPr', 'vert_banding', False)])\n", (4742, 5322), False, 'import pytest\n'), ((5651, 6490), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[('a:tbl', 'first_row', True, 'a:tbl/a:tblPr{firstRow=1}'), ('a:tbl',\n 'first_row', False, 'a:tbl/a:tblPr'), ('a:tbl/a:tblPr', 'first_row', \n True, 'a:tbl/a:tblPr{firstRow=1}'), ('a:tbl/a:tblPr', 'first_row', \n False, 'a:tbl/a:tblPr'), ('a:tbl/a:tblPr{firstRow=true}', 'first_row', \n True, 'a:tbl/a:tblPr{firstRow=1}'), ('a:tbl/a:tblPr{firstRow=false}',\n 'first_row', False, 'a:tbl/a:tblPr'), ('a:tbl/a:tblPr{bandRow=1}',\n 'first_row', True, 'a:tbl/a:tblPr{bandRow=1,firstRow=1}'), ('a:tbl',\n 'first_col', True, 'a:tbl/a:tblPr{firstCol=1}'), ('a:tbl', 'last_row', \n True, 'a:tbl/a:tblPr{lastRow=1}'), ('a:tbl', 'last_col', True,\n 'a:tbl/a:tblPr{lastCol=1}'), ('a:tbl', 'horz_banding', True,\n 'a:tbl/a:tblPr{bandRow=1}'), ('a:tbl', 'vert_banding', True,\n 'a:tbl/a:tblPr{bandCol=1}')]"}), "(params=[('a:tbl', 'first_row', True,\n 'a:tbl/a:tblPr{firstRow=1}'), ('a:tbl', 'first_row', False,\n 'a:tbl/a:tblPr'), ('a:tbl/a:tblPr', 'first_row', True,\n 'a:tbl/a:tblPr{firstRow=1}'), ('a:tbl/a:tblPr', 'first_row', False,\n 'a:tbl/a:tblPr'), ('a:tbl/a:tblPr{firstRow=true}', 'first_row', True,\n 'a:tbl/a:tblPr{firstRow=1}'), ('a:tbl/a:tblPr{firstRow=false}',\n 'first_row', False, 'a:tbl/a:tblPr'), ('a:tbl/a:tblPr{bandRow=1}',\n 'first_row', True, 'a:tbl/a:tblPr{bandRow=1,firstRow=1}'), ('a:tbl',\n 'first_col', True, 'a:tbl/a:tblPr{firstCol=1}'), ('a:tbl', 'last_row', \n True, 'a:tbl/a:tblPr{lastRow=1}'), ('a:tbl', 'last_col', True,\n 'a:tbl/a:tblPr{lastCol=1}'), ('a:tbl', 'horz_banding', True,\n 'a:tbl/a:tblPr{bandRow=1}'), ('a:tbl', 'vert_banding', True,\n 'a:tbl/a:tblPr{bandCol=1}')])\n", (5665, 6490), False, 'import pytest\n'), ((13255, 13463), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[('a:tc', None), ('a:tc/a:tcPr', None), ('a:tc/a:tcPr{anchor=t}',\n MSO_ANCHOR.TOP), ('a:tc/a:tcPr{anchor=ctr}', MSO_ANCHOR.MIDDLE), (\n 'a:tc/a:tcPr{anchor=b}', MSO_ANCHOR.BOTTOM)]"}), "(params=[('a:tc', None), ('a:tc/a:tcPr', None), (\n 'a:tc/a:tcPr{anchor=t}', MSO_ANCHOR.TOP), ('a:tc/a:tcPr{anchor=ctr}',\n MSO_ANCHOR.MIDDLE), ('a:tc/a:tcPr{anchor=b}', MSO_ANCHOR.BOTTOM)])\n", (13269, 13463), False, 'import pytest\n'), ((13718, 14069), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[('a:tc', None, 'a:tc'), ('a:tc', MSO_ANCHOR.TOP, 'a:tc/a:tcPr{anchor=t}'),\n ('a:tc', MSO_ANCHOR.MIDDLE, 'a:tc/a:tcPr{anchor=ctr}'), ('a:tc',\n MSO_ANCHOR.BOTTOM, 'a:tc/a:tcPr{anchor=b}'), ('a:tc/a:tcPr{anchor=t}',\n MSO_ANCHOR.MIDDLE, 'a:tc/a:tcPr{anchor=ctr}'), (\n 'a:tc/a:tcPr{anchor=ctr}', None, 'a:tc/a:tcPr')]"}), "(params=[('a:tc', None, 'a:tc'), ('a:tc', MSO_ANCHOR.TOP,\n 'a:tc/a:tcPr{anchor=t}'), ('a:tc', MSO_ANCHOR.MIDDLE,\n 'a:tc/a:tcPr{anchor=ctr}'), ('a:tc', MSO_ANCHOR.BOTTOM,\n 'a:tc/a:tcPr{anchor=b}'), ('a:tc/a:tcPr{anchor=t}', MSO_ANCHOR.MIDDLE,\n 'a:tc/a:tcPr{anchor=ctr}'), ('a:tc/a:tcPr{anchor=ctr}', None,\n 'a:tc/a:tcPr')])\n", (13732, 14069), False, 'import pytest\n'), ((14544, 14636), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[('a:tc', 1), ('a:tc{gridSpan=2}', 1), ('a:tc{rowSpan=42}', 42)]"}), "(params=[('a:tc', 1), ('a:tc{gridSpan=2}', 1), (\n 'a:tc{rowSpan=42}', 42)])\n", (14558, 14636), False, 'import pytest\n'), ((16335, 16424), 'pytest.fixture', 'pytest.fixture', ([], {'params': "['margin_left', 'margin_right', 'margin_top', 'margin_bottom']"}), "(params=['margin_left', 'margin_right', 'margin_top',\n 'margin_bottom'])\n", (16349, 16424), False, 'import pytest\n'), ((16670, 16912), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[('a:tc', False), ('a:tc{gridSpan=1}', False), ('a:tc{hMerge=1}', False), (\n 'a:tc{gridSpan=2,vMerge=1}', False), ('a:tc{gridSpan=2}', True), (\n 'a:tc{rowSpan=2}', True), ('a:tc{gridSpan=2,rowSpan=3}', True)]"}), "(params=[('a:tc', False), ('a:tc{gridSpan=1}', False), (\n 'a:tc{hMerge=1}', False), ('a:tc{gridSpan=2,vMerge=1}', False), (\n 'a:tc{gridSpan=2}', True), ('a:tc{rowSpan=2}', True), (\n 'a:tc{gridSpan=2,rowSpan=3}', True)])\n", (16684, 16912), False, 'import pytest\n'), ((17164, 17389), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[('a:tc', False), ('a:tc{gridSpan=2}', False), ('a:tc{hMerge=1}', True), (\n 'a:tc{gridSpan=2,vMerge=1}', True), ('a:tc{rowSpan=2,hMerge=true}', \n True), ('a:tc{gridSpan=2,rowSpan=3}', False)]"}), "(params=[('a:tc', False), ('a:tc{gridSpan=2}', False), (\n 'a:tc{hMerge=1}', True), ('a:tc{gridSpan=2,vMerge=1}', True), (\n 'a:tc{rowSpan=2,hMerge=true}', True), ('a:tc{gridSpan=2,rowSpan=3}', \n False)])\n", (17178, 17389), False, 'import pytest\n'), ((17630, 17987), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[('a:tbl/(a:tr/(a:tc{gridSpan=2},a:tc{hMerge=1}),a:tr/(a:tc,a:tc))', 0, [0,\n 1]), ('a:tbl/(a:tr/(a:tc{rowSpan=2},a:tc),a:tr/(a:tc{vMerge=1},a:tc))',\n 0, [0, 2]), (\n 'a:tbl/(a:tr/(a:tc{gridSpan=2,rowSpan=2},a:tc{hMerge=1,rowSpan=2}),a:tr/(a:tc{gridSpan=2,vMerge=1},a:tc{hMerge=1,vMerge=1}))'\n , 0, [0, 1, 2, 3])]"}), "(params=[(\n 'a:tbl/(a:tr/(a:tc{gridSpan=2},a:tc{hMerge=1}),a:tr/(a:tc,a:tc))', 0, [\n 0, 1]), (\n 'a:tbl/(a:tr/(a:tc{rowSpan=2},a:tc),a:tr/(a:tc{vMerge=1},a:tc))', 0, [0,\n 2]), (\n 'a:tbl/(a:tr/(a:tc{gridSpan=2,rowSpan=2},a:tc{hMerge=1,rowSpan=2}),a:tr/(a:tc{gridSpan=2,vMerge=1},a:tc{hMerge=1,vMerge=1}))'\n , 0, [0, 1, 2, 3])])\n", (17644, 17987), False, 'import pytest\n'), ((18519, 18611), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[('a:tc', 1), ('a:tc{rowSpan=2}', 1), ('a:tc{gridSpan=24}', 24)]"}), "(params=[('a:tc', 1), ('a:tc{rowSpan=2}', 1), (\n 'a:tc{gridSpan=24}', 24)])\n", (18533, 18611), False, 'import pytest\n'), ((20502, 20573), 'pytest.fixture', 'pytest.fixture', ([], {'params': "['a:tr', 'a:tr/a:tc', 'a:tr/(a:tc, a:tc, a:tc)']"}), "(params=['a:tr', 'a:tr/a:tc', 'a:tr/(a:tc, a:tc, a:tc)'])\n", (20516, 20573), False, 'import pytest\n'), ((21077, 21162), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[('a:tr', 0), ('a:tr/a:tc', 1), ('a:tr/(a:tc, a:tc)', 2)]"}), "(params=[('a:tr', 0), ('a:tr/a:tc', 1), ('a:tr/(a:tc, a:tc)', 2)]\n )\n", (21091, 21162), False, 'import pytest\n'), ((24380, 24508), 'pytest.fixture', 'pytest.fixture', ([], {'params': "['a:tbl/a:tblGrid', 'a:tbl/a:tblGrid/a:gridCol',\n 'a:tbl/a:tblGrid/(a:gridCol, a:gridCol, a:gridCol)']"}), "(params=['a:tbl/a:tblGrid', 'a:tbl/a:tblGrid/a:gridCol',\n 'a:tbl/a:tblGrid/(a:gridCol, a:gridCol, a:gridCol)'])\n", (24394, 24508), False, 'import pytest\n'), ((24823, 24951), 'pytest.fixture', 'pytest.fixture', ([], {'params': "['a:tbl/a:tblGrid', 'a:tbl/a:tblGrid/a:gridCol',\n 'a:tbl/a:tblGrid/(a:gridCol, a:gridCol, a:gridCol)']"}), "(params=['a:tbl/a:tblGrid', 'a:tbl/a:tblGrid/a:gridCol',\n 'a:tbl/a:tblGrid/(a:gridCol, a:gridCol, a:gridCol)'])\n", (24837, 24951), False, 'import pytest\n'), ((25263, 25394), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[('a:tbl/a:tblGrid', 0), ('a:tbl/a:tblGrid/a:gridCol', 1), (\n 'a:tbl/a:tblGrid/(a:gridCol,a:gridCol)', 2)]"}), "(params=[('a:tbl/a:tblGrid', 0), ('a:tbl/a:tblGrid/a:gridCol',\n 1), ('a:tbl/a:tblGrid/(a:gridCol,a:gridCol)', 2)])\n", (25277, 25394), False, 'import pytest\n'), ((28847, 28921), 'pytest.fixture', 'pytest.fixture', ([], {'params': "['a:tbl', 'a:tbl/a:tr', 'a:tbl/(a:tr, a:tr, a:tr)']"}), "(params=['a:tbl', 'a:tbl/a:tr', 'a:tbl/(a:tr, a:tr, a:tr)'])\n", (28861, 28921), False, 'import pytest\n'), ((29163, 29237), 'pytest.fixture', 'pytest.fixture', ([], {'params': "['a:tbl', 'a:tbl/a:tr', 'a:tbl/(a:tr, a:tr, a:tr)']"}), "(params=['a:tbl', 'a:tbl/a:tr', 'a:tbl/(a:tr, a:tr, a:tr)'])\n", (29177, 29237), False, 'import pytest\n'), ((29476, 29564), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[('a:tbl', 0), ('a:tbl/a:tr', 1), ('a:tbl/(a:tr, a:tr)', 2)]"}), "(params=[('a:tbl', 0), ('a:tbl/a:tr', 1), (\n 'a:tbl/(a:tr, a:tr)', 2)])\n", (29490, 29564), False, 'import pytest\n'), ((943, 960), 'pptx.table.Table', 'Table', (['tbl_', 'None'], {}), '(tbl_, None)\n', (948, 960), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((1438, 1454), 'pptx.table.Table', 'Table', (['tbl', 'None'], {}), '(tbl, None)\n', (1443, 1454), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((1918, 1934), 'pptx.table.Table', 'Table', (['tbl', 'None'], {}), '(tbl, None)\n', (1923, 1934), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((2373, 2389), 'pptx.table.Table', 'Table', (['tbl', 'None'], {}), '(tbl, None)\n', (2378, 2389), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((7274, 7289), 'pptx.table._Cell', '_Cell', (['tc', 'None'], {}), '(tc, None)\n', (7279, 7289), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((7318, 7333), 'pptx.table._Cell', '_Cell', (['tc', 'None'], {}), '(tc, None)\n', (7323, 7333), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((7363, 7384), 'pptx.table._Cell', '_Cell', (['other_tc', 'None'], {}), '(other_tc, None)\n', (7368, 7384), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((7722, 7737), 'pptx.table._Cell', '_Cell', (['tc', 'None'], {}), '(tc, None)\n', (7727, 7737), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((7960, 7975), 'pptx.table._Cell', '_Cell', (['tc', 'None'], {}), '(tc, None)\n', (7965, 7975), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((10841, 10856), 'pptx.table._Cell', '_Cell', (['tc', 'None'], {}), '(tc, None)\n', (10846, 10856), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((11072, 11087), 'pptx.table._Cell', '_Cell', (['tc', 'None'], {}), '(tc, None)\n', (11077, 11087), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((11287, 11309), 'pptx.table._Cell', '_Cell', (['origin_tc', 'None'], {}), '(origin_tc, None)\n', (11292, 11309), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((11700, 11715), 'pptx.table._Cell', '_Cell', (['tc', 'None'], {}), '(tc, None)\n', (11705, 11715), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((12025, 12042), 'pptx.table._Cell', '_Cell', (['None', 'None'], {}), '(None, None)\n', (12030, 12042), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((12239, 12256), 'pptx.table._Cell', '_Cell', (['None', 'None'], {}), '(None, None)\n', (12244, 12256), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((20029, 20054), 'pptx.table._CellCollection', '_CellCollection', (['tr', 'None'], {}), '(tr, None)\n', (20044, 20054), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((20740, 20765), 'pptx.table._CellCollection', '_CellCollection', (['tr', 'None'], {}), '(tr, None)\n', (20755, 20765), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((24689, 24717), 'pptx.table._ColumnCollection', '_ColumnCollection', (['tbl', 'None'], {}), '(tbl, None)\n', (24706, 24717), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((25129, 25157), 'pptx.table._ColumnCollection', '_ColumnCollection', (['tbl', 'None'], {}), '(tbl, None)\n', (25146, 25157), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((29042, 29067), 'pptx.table._RowCollection', '_RowCollection', (['tbl', 'None'], {}), '(tbl, None)\n', (29056, 29067), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((29355, 29380), 'pptx.table._RowCollection', '_RowCollection', (['tbl', 'None'], {}), '(tbl, None)\n', (29369, 29380), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((8729, 8753), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (8742, 8753), False, 'import pytest\n'), ((9731, 9746), 'pptx.table._Cell', '_Cell', (['tc', 'None'], {}), '(tc, None)\n', (9736, 9746), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((9748, 9769), 'pptx.table._Cell', '_Cell', (['other_tc', 'None'], {}), '(other_tc, None)\n', (9753, 9769), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((10161, 10178), 'pptx.table._Cell', '_Cell', (['None', 'None'], {}), '(None, None)\n', (10166, 10178), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((10180, 10197), 'pptx.table._Cell', '_Cell', (['None', 'None'], {}), '(None, None)\n', (10185, 10197), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((10212, 10237), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (10225, 10237), False, 'import pytest\n'), ((10526, 10543), 'pptx.table._Cell', '_Cell', (['None', 'None'], {}), '(None, None)\n', (10531, 10543), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((10545, 10562), 'pptx.table._Cell', '_Cell', (['None', 'None'], {}), '(None, None)\n', (10550, 10562), False, 'from pptx.table import _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table\n'), ((10577, 10602), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (10590, 10602), False, 'import pytest\n'), ((11730, 11755), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (11743, 11755), False, 'import pytest\n'), ((20314, 20339), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (20327, 20339), False, 'import pytest\n'), ((20376, 20401), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (20389, 20401), False, 'import pytest\n'), ((24188, 24213), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (24201, 24213), False, 'import pytest\n'), ((24252, 24277), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (24265, 24277), False, 'import pytest\n'), ((28661, 28686), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (28674, 28686), False, 'import pytest\n'), ((28722, 28747), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (28735, 28747), False, 'import pytest\n'), ((29107, 29117), 'pptx.oxml.ns.qn', 'qn', (['"""a:tr"""'], {}), "('a:tr')\n", (29109, 29117), False, 'from pptx.oxml.ns import qn\n'), ((29420, 29430), 'pptx.oxml.ns.qn', 'qn', (['"""a:tr"""'], {}), "('a:tr')\n", (29422, 29430), False, 'from pptx.oxml.ns import qn\n'), ((14891, 14903), 'pptx.util.Inches', 'Inches', (['(0.09)'], {}), '(0.09)\n', (14897, 14903), False, 'from pptx.util import Inches, Length, Pt\n'), ((14962, 14974), 'pptx.util.Inches', 'Inches', (['(0.08)'], {}), '(0.08)\n', (14968, 14974), False, 'from pptx.util import Inches, Length, Pt\n'), ((15031, 15043), 'pptx.util.Inches', 'Inches', (['(0.07)'], {}), '(0.07)\n', (15037, 15043), False, 'from pptx.util import Inches, Length, Pt\n'), ((15103, 15115), 'pptx.util.Inches', 'Inches', (['(0.06)'], {}), '(0.06)\n', (15109, 15115), False, 'from pptx.util import Inches, Length, Pt\n'), ((15154, 15165), 'pptx.util.Inches', 'Inches', (['(0.1)'], {}), '(0.1)\n', (15160, 15165), False, 'from pptx.util import Inches, Length, Pt\n'), ((15212, 15223), 'pptx.util.Inches', 'Inches', (['(0.1)'], {}), '(0.1)\n', (15218, 15223), False, 'from pptx.util import Inches, Length, Pt\n'), ((15261, 15273), 'pptx.util.Inches', 'Inches', (['(0.05)'], {}), '(0.05)\n', (15267, 15273), False, 'from pptx.util import Inches, Length, Pt\n'), ((15321, 15333), 'pptx.util.Inches', 'Inches', (['(0.05)'], {}), '(0.05)\n', (15327, 15333), False, 'from pptx.util import Inches, Length, Pt\n'), ((15635, 15647), 'pptx.util.Inches', 'Inches', (['(0.08)'], {}), '(0.08)\n', (15641, 15647), False, 'from pptx.util import Inches, Length, Pt\n'), ((15714, 15726), 'pptx.util.Inches', 'Inches', (['(0.08)'], {}), '(0.08)\n', (15720, 15726), False, 'from pptx.util import Inches, Length, Pt\n'), ((15791, 15803), 'pptx.util.Inches', 'Inches', (['(0.08)'], {}), '(0.08)\n', (15797, 15803), False, 'from pptx.util import Inches, Length, Pt\n'), ((15871, 15883), 'pptx.util.Inches', 'Inches', (['(0.08)'], {}), '(0.08)\n', (15877, 15883), False, 'from pptx.util import Inches, Length, Pt\n'), ((22261, 22270), 'pptx.util.Inches', 'Inches', (['(1)'], {}), '(1)\n', (22267, 22270), False, 'from pptx.util import Inches, Length, Pt\n'), ((22295, 22301), 'pptx.util.Pt', 'Pt', (['(10)'], {}), '(10)\n', (22297, 22301), False, 'from pptx.util import Inches, Length, Pt\n'), ((22570, 22579), 'pptx.util.Inches', 'Inches', (['(1)'], {}), '(1)\n', (22576, 22579), False, 'from pptx.util import Inches, Length, Pt\n'), ((22639, 22648), 'pptx.util.Inches', 'Inches', (['(1)'], {}), '(1)\n', (22645, 22648), False, 'from pptx.util import Inches, Length, Pt\n'), ((26665, 26674), 'pptx.util.Inches', 'Inches', (['(1)'], {}), '(1)\n', (26671, 26674), False, 'from pptx.util import Inches, Length, Pt\n'), ((26694, 26700), 'pptx.util.Pt', 'Pt', (['(10)'], {}), '(10)\n', (26696, 26700), False, 'from pptx.util import Inches, Length, Pt\n'), ((26941, 26950), 'pptx.util.Inches', 'Inches', (['(1)'], {}), '(1)\n', (26947, 26950), False, 'from pptx.util import Inches, Length, Pt\n'), ((27000, 27009), 'pptx.util.Inches', 'Inches', (['(1)'], {}), '(1)\n', (27006, 27009), False, 'from pptx.util import Inches, Length, Pt\n')] |
luispedro/imread | imread/tests/test_bmp.py | 7960b744623fe03e6d968893a539bca969715860 | import numpy as np
from imread import imread
from . import file_path
def test_read():
im = imread(file_path('star1.bmp'))
assert np.any(im)
assert im.shape == (128, 128, 3)
def test_indexed():
im = imread(file_path('py-installer-indexed.bmp'))
assert np.any(im)
assert im.shape == (352, 162, 3)
assert np.any(im[:,:,0])
assert np.any(im[:,:,1])
assert np.any(im[:,:,2])
| [((138, 148), 'numpy.any', 'np.any', (['im'], {}), '(im)\n', (144, 148), True, 'import numpy as np\n'), ((273, 283), 'numpy.any', 'np.any', (['im'], {}), '(im)\n', (279, 283), True, 'import numpy as np\n'), ((332, 353), 'numpy.any', 'np.any', (['im[:, :, (0)]'], {}), '(im[:, :, (0)])\n', (338, 353), True, 'import numpy as np\n'), ((361, 382), 'numpy.any', 'np.any', (['im[:, :, (1)]'], {}), '(im[:, :, (1)])\n', (367, 382), True, 'import numpy as np\n'), ((390, 411), 'numpy.any', 'np.any', (['im[:, :, (2)]'], {}), '(im[:, :, (2)])\n', (396, 411), True, 'import numpy as np\n')] |
v3l0c1r4pt0r/bl60x-flash | bl60x_flash/main.py | 065770004629c3e5bf98057677e7a6ca566e9c4a | from serial import Serial
from tqdm import tqdm
import binascii
import hashlib
import struct
import time
import sys
import os
def if_read(ser, data_len):
data = bytearray(0)
received = 0
while received < data_len:
tmp = ser.read(data_len - received)
if len(tmp) == 0:
break
else:
data += tmp
received += len(tmp)
if len(data) != data_len:
return (0, data)
return (1, data)
def reset(ser):
ser.setRTS(0)
time.sleep(0.2)
reset_cnt = 2
while reset_cnt > 0:
ser.setRTS(1)
time.sleep(0.005)
ser.setRTS(0)
time.sleep(0.1)
ser.setRTS(1)
time.sleep(0.005)
ser.setRTS(0)
time.sleep(0.005)
reset_cnt -= 1
def handshake(ser):
ser.setRTS(1)
time.sleep(0.2)
ser.setRTS(0)
time.sleep(0.05)
ser.setRTS(1)
ser.setDTR(1)
time.sleep(0.1)
ser.setDTR(0)
time.sleep(0.1)
def expect_ok(ser):
data = ser.read(2)
if data[0] != 0x4f or data[1] != 0x4b:
err = ser.read(2)
raise ValueError(binascii.hexlify(err))
def expect_data(ser):
expect_ok(ser)
len = ser.read(2)
len = struct.unpack('<h', len)[0]
data = ser.read(len)
return data
def cmd_load_seg_header(ser, file):
header = file.read(0x10)
ser.write(b'\x17\x00\x10\x00' + header)
data = expect_data(ser)
seg_addr, seg_len = struct.unpack('<II', data[0:8])
print(f'{seg_len} bytes @ {hex(seg_addr)}')
return seg_len
def cmd_load_seg_data(ser, data):
ser.write(b'\x18\x00' + struct.pack('<H', len(data)) + data)
expect_ok(ser)
def cmd_load_boot_header(ser, file):
header = file.read(0xb0)
ser.write(b'\x11\x00\xb0\x00' + header)
expect_ok(ser)
def cmd_check_image(ser):
ser.write(b'\x19\x00\x00\x00')
expect_ok(ser)
def cmd_run_image(ser):
ser.write(b'\x1a\x00\x00\x00')
expect_ok(ser)
def load_image(ser, file):
image = open(file, 'rb')
cmd_load_boot_header(ser, image)
total = cmd_load_seg_header(ser, image)
sent = 0
with tqdm(total=total, unit='byte', unit_scale=True) as pbar:
while sent != total:
chunk = image.read(min(total-sent, 4080))
cmd_load_seg_data(ser, chunk)
sent = sent + len(chunk)
pbar.update(len(chunk))
cmd_check_image(ser)
cmd_run_image(ser)
def empty_buffer(ser):
timeout = ser.timeout
ser.timeout = 0.1
if_read(ser, 10000)
ser.timeout = timeout
def send_sync(ser):
empty_buffer(ser)
ser.write(b'\x55' * int(0.006 * ser.baudrate / 10))
expect_ok(ser)
def efl_write_cmd(ser, id, payload = b''):
plen = len(payload)
plen_data = struct.pack('<h', plen)
checksum = struct.pack('<h', sum(plen_data + payload) & 0xff)[0:1]
data = bytes([id]) + checksum + plen_data + payload
ser.write(data)
def efl_cmd_read_memory(ser, addr):
# there is a length parameter here but it doesn't seem to work correctly
efl_write_cmd(ser, 0x51, struct.pack('<II', addr, 0x4))
return expect_data(ser)
def efl_cmd_write_memory(ser, addr, data):
efl_write_cmd(ser, 0x50, struct.pack('<I', len(data)) + data)
expect_ok(ser)
def efl_cmd_read_jid(ser):
efl_write_cmd(ser, 0x36)
return expect_data(ser)
def efl_cmd_flash_erase(ser, addr, len):
end_addr = addr + len - 1
efl_write_cmd(ser, 0x30, struct.pack('<II', addr, end_addr))
timeout = ser.timeout
ser.timeout = 10.0
expect_ok(ser)
ser.timeout = timeout
print(f'Erased {len} bytes @ {hex(addr)}')
def efl_cmd_flash_write(ser, addr, data):
efl_write_cmd(ser, 0x31, struct.pack('<I', addr) + data)
expect_ok(ser)
def efl_cmd_flash_write_check(ser):
efl_write_cmd(ser, 0x3a)
expect_ok(ser)
def efl_cmd_flash_xip_read_start(ser):
efl_write_cmd(ser, 0x60)
expect_ok(ser)
def efl_cmd_flash_xip_read_sha(ser, addr, len):
efl_write_cmd(ser, 0x3e, struct.pack('<II', addr, len))
return expect_data(ser)
def efl_cmd_flash_xip_read_finish(ser):
efl_write_cmd(ser, 0x61)
expect_ok(ser)
def efl_cmd_reset(ser):
efl_write_cmd(ser, 0x21)
expect_ok(ser)
def efl_program_img(ser, addr, data):
data_len = len(data)
efl_cmd_flash_erase(ser, addr, data_len)
print(f'Programming {data_len} bytes @ {hex(addr)}')
sent = 0
with tqdm(total=data_len, unit='byte', unit_scale=True) as pbar:
while sent != data_len:
buf_len = min(2048, data_len - sent)
buf = data[sent:sent + buf_len]
efl_cmd_flash_write(ser, addr + sent, buf)
sent = sent + buf_len
pbar.update(buf_len)
efl_cmd_flash_write_check(ser)
sha256sum = hashlib.sha256(data).digest()
efl_cmd_flash_xip_read_start(ser)
device_sum = efl_cmd_flash_xip_read_sha(ser, addr, data_len)
efl_cmd_flash_xip_read_finish(ser)
if device_sum != sha256sum:
print('Verification failed')
print('Host SHA256:', binascii.hexlify(sha256sum))
print('BL SHA256:', binascii.hexlify(device_sum))
return False
print('Verified by XIP SHA256 hash')
return True
def prepend_fw_header(img, header_file):
if img[0:4] == b'BFNP':
print('Image already has FW header')
return img
with open(header_file, 'rb') as f:
header = f.read()
img = header + (b'\xFF' * (4096-len(header))) + img
return img
def get_contrib_path(name):
sep = os.path.sep
return os.path.dirname(os.path.realpath(__file__)) + sep + 'contrib' + sep + name
def main():
if len(sys.argv) < 3:
print(f'Usage: {sys.argv[0]} <serial port> <firmware bin>')
sys.exit(1)
ser = Serial(sys.argv[1], baudrate=500000, timeout=2)
handshake(ser)
reset(ser)
send_sync(ser)
time.sleep(0.1)
print('Loading helper binary')
load_image(ser, get_contrib_path('eflash_loader_40m.bin'))
time.sleep(0.2)
print()
# at this point, the eflash loader binary is running with efl_ commands
# (which seems to work with a higher baudrate)
ser.baudrate = 2000000
send_sync(ser)
with open(sys.argv[2], 'rb') as f:
data = f.read()
data = prepend_fw_header(data, get_contrib_path('bootheader.bin'))
efl_program_img(ser, 0x10000, data)
efl_cmd_reset(ser)
if __name__ == "__main__":
main()
| [((528, 543), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (538, 543), False, 'import time\n'), ((858, 873), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (868, 873), False, 'import time\n'), ((898, 914), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (908, 914), False, 'import time\n'), ((958, 973), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (968, 973), False, 'import time\n'), ((998, 1013), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (1008, 1013), False, 'import time\n'), ((1499, 1530), 'struct.unpack', 'struct.unpack', (['"""<II"""', 'data[0:8]'], {}), "('<II', data[0:8])\n", (1512, 1530), False, 'import struct\n'), ((2842, 2865), 'struct.pack', 'struct.pack', (['"""<h"""', 'plen'], {}), "('<h', plen)\n", (2853, 2865), False, 'import struct\n'), ((5929, 5976), 'serial.Serial', 'Serial', (['sys.argv[1]'], {'baudrate': '(500000)', 'timeout': '(2)'}), '(sys.argv[1], baudrate=500000, timeout=2)\n', (5935, 5976), False, 'from serial import Serial\n'), ((6038, 6053), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (6048, 6053), False, 'import time\n'), ((6159, 6174), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (6169, 6174), False, 'import time\n'), ((621, 638), 'time.sleep', 'time.sleep', (['(0.005)'], {}), '(0.005)\n', (631, 638), False, 'import time\n'), ((671, 686), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (681, 686), False, 'import time\n'), ((719, 736), 'time.sleep', 'time.sleep', (['(0.005)'], {}), '(0.005)\n', (729, 736), False, 'import time\n'), ((769, 786), 'time.sleep', 'time.sleep', (['(0.005)'], {}), '(0.005)\n', (779, 786), False, 'import time\n'), ((1260, 1284), 'struct.unpack', 'struct.unpack', (['"""<h"""', 'len'], {}), "('<h', len)\n", (1273, 1284), False, 'import struct\n'), ((2193, 2240), 'tqdm.tqdm', 'tqdm', ([], {'total': 'total', 'unit': '"""byte"""', 'unit_scale': '(True)'}), "(total=total, unit='byte', unit_scale=True)\n", (2197, 2240), False, 'from tqdm import tqdm\n'), ((3163, 3190), 'struct.pack', 'struct.pack', (['"""<II"""', 'addr', '(4)'], {}), "('<II', addr, 4)\n", (3174, 3190), False, 'import struct\n'), ((3550, 3584), 'struct.pack', 'struct.pack', (['"""<II"""', 'addr', 'end_addr'], {}), "('<II', addr, end_addr)\n", (3561, 3584), False, 'import struct\n'), ((4121, 4150), 'struct.pack', 'struct.pack', (['"""<II"""', 'addr', 'len'], {}), "('<II', addr, len)\n", (4132, 4150), False, 'import struct\n'), ((4548, 4598), 'tqdm.tqdm', 'tqdm', ([], {'total': 'data_len', 'unit': '"""byte"""', 'unit_scale': '(True)'}), "(total=data_len, unit='byte', unit_scale=True)\n", (4552, 4598), False, 'from tqdm import tqdm\n'), ((5904, 5915), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5912, 5915), False, 'import sys\n'), ((1158, 1179), 'binascii.hexlify', 'binascii.hexlify', (['err'], {}), '(err)\n', (1174, 1179), False, 'import binascii\n'), ((3807, 3830), 'struct.pack', 'struct.pack', (['"""<I"""', 'addr'], {}), "('<I', addr)\n", (3818, 3830), False, 'import struct\n'), ((4916, 4936), 'hashlib.sha256', 'hashlib.sha256', (['data'], {}), '(data)\n', (4930, 4936), False, 'import hashlib\n'), ((5193, 5220), 'binascii.hexlify', 'binascii.hexlify', (['sha256sum'], {}), '(sha256sum)\n', (5209, 5220), False, 'import binascii\n'), ((5253, 5281), 'binascii.hexlify', 'binascii.hexlify', (['device_sum'], {}), '(device_sum)\n', (5269, 5281), False, 'import binascii\n'), ((5725, 5751), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (5741, 5751), False, 'import os\n')] |
zerofox-oss/yelp-avro | lang/py/test/test_avro_builder.py | 913f95a4c34386d0fe9aff843b1a8ea362a1a2c5 | # -*- coding: utf-8 -*-
import unittest
from avro import avro_builder
from avro import schema
class TestAvroSchemaBuilder(unittest.TestCase):
def setUp(self):
self.builder = avro_builder.AvroSchemaBuilder()
def tearDown(self):
del self.builder
@property
def name(self):
return 'foo'
@property
def namespace(self):
return 'ns'
@property
def aliases(self):
return ['new_foo']
@property
def doc(self):
return 'sample doc'
@property
def metadata(self):
return {'key1': 'val1', 'key2': 'val2'}
@property
def enum_symbols(self):
return ['a', 'b']
@property
def fixed_size(self):
return 16
@property
def another_name(self):
return 'bar'
@property
def invalid_schemas(self):
undefined_schema_name = 'unknown'
yield undefined_schema_name
non_avro_schema = {'foo': 'bar'}
yield non_avro_schema
named_schema_without_name = {'name': '', 'type': 'fixed', 'size': 16}
yield named_schema_without_name
invalid_schema = {'name': 'foo', 'type': 'enum', 'symbols': ['a', 'a']}
yield invalid_schema
none_schema = None
yield none_schema
@property
def invalid_names(self):
missing_name = None
yield missing_name
reserved_name = 'int'
yield reserved_name
non_string_name = 100
yield non_string_name
@property
def duplicate_name_err(self):
return '"{0}" is already in use.'
def test_create_primitive_types(self):
self.assertEqual('null', self.builder.create_null())
self.assertEqual('boolean', self.builder.create_boolean())
self.assertEqual('int', self.builder.create_int())
self.assertEqual('long', self.builder.create_long())
self.assertEqual('float', self.builder.create_float())
self.assertEqual('double', self.builder.create_double())
self.assertEqual('bytes', self.builder.create_bytes())
self.assertEqual('string', self.builder.create_string())
def test_create_enum(self):
actual_json = self.builder.begin_enum(self.name, self.enum_symbols).end()
expected_json = {
'type': 'enum',
'name': self.name,
'symbols': self.enum_symbols
}
self.assertEqual(expected_json, actual_json)
def test_create_enum_with_optional_attributes(self):
actual_json = self.builder.begin_enum(
self.name,
self.enum_symbols,
self.namespace,
self.aliases,
self.doc,
**self.metadata
).end()
expected_json = {
'type': 'enum',
'name': self.name,
'symbols': self.enum_symbols,
'namespace': self.namespace,
'aliases': self.aliases,
'doc': self.doc
}
expected_json.update(self.metadata)
self.assertEqual(expected_json, actual_json)
def test_create_enum_with_invalid_name(self):
for invalid_name in self.invalid_names:
self.builder.clear()
with self.assertRaises(schema.SchemaParseException):
self.builder.begin_enum(invalid_name, self.enum_symbols).end()
def test_create_enum_with_dup_name(self):
with self.assertRaisesRegexp(
schema.SchemaParseException,
self.duplicate_name_err.format(self.name)
):
self.builder.begin_record(self.name)
self.builder.add_field(
self.another_name,
self.builder.begin_enum(self.name, self.enum_symbols).end()
)
self.builder.end()
def test_create_enum_with_invalid_symbols(self):
self.single_test_create_enum_with_invalid_symbols(None)
self.single_test_create_enum_with_invalid_symbols('')
self.single_test_create_enum_with_invalid_symbols('a')
self.single_test_create_enum_with_invalid_symbols(['a', 1])
self.single_test_create_enum_with_invalid_symbols([1, 2, 3])
self.single_test_create_enum_with_invalid_symbols(['a', 'a'])
def single_test_create_enum_with_invalid_symbols(self, invalid_symbols):
self.builder.clear()
with self.assertRaises(schema.AvroException):
self.builder.begin_enum(self.name, invalid_symbols).end()
def test_create_fixed(self):
actual_json = self.builder.begin_fixed(self.name, self.fixed_size).end()
expected_json = {
'type': 'fixed',
'name': self.name,
'size': self.fixed_size
}
self.assertEqual(expected_json, actual_json)
def test_create_fixed_with_optional_attributes(self):
actual_json = self.builder.begin_fixed(
self.name,
self.fixed_size,
self.namespace,
self.aliases,
**self.metadata
).end()
expected_json = {
'type': 'fixed',
'name': self.name,
'size': self.fixed_size,
'namespace': self.namespace,
'aliases': self.aliases,
}
expected_json.update(self.metadata)
self.assertEqual(expected_json, actual_json)
def test_create_fixed_with_invalid_name(self):
for invalid_name in self.invalid_names:
self.builder.clear()
with self.assertRaises(schema.SchemaParseException):
self.builder.begin_fixed(invalid_name, self.fixed_size).end()
def test_create_fixed_with_dup_name(self):
with self.assertRaisesRegexp(
schema.SchemaParseException,
self.duplicate_name_err.format(self.name)
):
self.builder.begin_record(self.name)
self.builder.add_field(
self.another_name,
self.builder.begin_fixed(self.name, self.fixed_size).end()
)
self.builder.end()
def test_create_fixed_with_invalid_size(self):
self.single_test_create_fixed_with_invalid_size(None)
self.single_test_create_fixed_with_invalid_size('ten')
def single_test_create_fixed_with_invalid_size(self, invalid_size):
self.builder.clear()
with self.assertRaises(schema.AvroException):
self.builder.begin_fixed(self.name, invalid_size).end()
def test_create_array(self):
actual_json = self.builder.begin_array(self.builder.create_int()).end()
expected_json = {'type': 'array', 'items': 'int'}
self.assertEqual(expected_json, actual_json)
def test_create_array_with_optional_attributes(self):
actual_json = self.builder.begin_array(
self.builder.create_int(),
**self.metadata
).end()
expected_json = {'type': 'array', 'items': 'int'}
expected_json.update(self.metadata)
self.assertEqual(expected_json, actual_json)
def test_create_array_with_complex_type(self):
actual_json = self.builder.begin_array(
self.builder.begin_enum(self.name, self.enum_symbols).end()
).end()
expected_json = {
'type': 'array',
'items': {
'type': 'enum',
'name': self.name,
'symbols': self.enum_symbols
}
}
self.assertEqual(expected_json, actual_json)
def test_create_array_with_invalid_items_type(self):
for invalid_schema in self.invalid_schemas:
self.builder.clear()
with self.assertRaises(schema.AvroException):
self.builder.begin_array(invalid_schema).end()
def test_create_map(self):
actual_json = self.builder.begin_map(self.builder.create_string()).end()
expected_json = {'type': 'map', 'values': 'string'}
self.assertEqual(expected_json, actual_json)
def test_create_map_with_optional_attributes(self):
actual_json = self.builder.begin_map(
self.builder.create_string(),
**self.metadata
).end()
expected_json = {'type': 'map', 'values': 'string'}
expected_json.update(self.metadata)
self.assertEqual(expected_json, actual_json)
def test_create_map_with_complex_type(self):
actual_json = self.builder.begin_map(
self.builder.begin_fixed(self.name, self.fixed_size).end()
).end()
expected_json = {
'type': 'map',
'values': {
'type': 'fixed',
'name': self.name,
'size': self.fixed_size
}
}
self.assertEqual(expected_json, actual_json)
def test_create_map_with_invalid_values_type(self):
for invalid_schema in self.invalid_schemas:
self.builder.clear()
with self.assertRaises(schema.AvroException):
self.builder.begin_map(invalid_schema).end()
def test_create_record(self):
self.builder.begin_record(self.name)
self.builder.add_field(
'bar1',
self.builder.create_int()
)
self.builder.add_field(
'bar2',
self.builder.begin_map(self.builder.create_double()).end()
)
actual_json = self.builder.end()
expected_json = {
'type': 'record',
'name': self.name,
'fields': [
{'name': 'bar1', 'type': 'int'},
{'name': 'bar2', 'type': {'type': 'map', 'values': 'double'}}
]
}
self.assertEqual(expected_json, actual_json)
def test_create_record_with_optional_attributes(self):
self.builder.begin_record(
self.name,
namespace=self.namespace,
aliases=self.aliases,
doc=self.doc,
**self.metadata
)
self.builder.add_field(
self.another_name,
self.builder.create_int()
)
actual_json = self.builder.end()
expected_json = {
'type': 'record',
'name': self.name,
'fields': [{'name': self.another_name, 'type': 'int'}],
'namespace': self.namespace,
'aliases': self.aliases,
'doc': self.doc
}
expected_json.update(self.metadata)
self.assertEqual(expected_json, actual_json)
def test_create_field_with_optional_attributes(self):
self.builder.begin_record(self.name)
self.builder.add_field(
self.another_name,
self.builder.create_boolean(),
has_default=True,
default_value=True,
sort_order='ascending',
aliases=self.aliases,
doc=self.doc,
**self.metadata
)
actual_json = self.builder.end()
expected_field = {
'name': self.another_name,
'type': 'boolean',
'default': True,
'order': 'ascending',
'aliases': self.aliases,
'doc': self.doc
}
expected_field.update(self.metadata)
expected_json = {
'type': 'record',
'name': self.name,
'fields': [expected_field]
}
self.assertEqual(expected_json, actual_json)
def test_create_record_with_no_field(self):
actual_json = self.builder.begin_record(self.name).end()
expected_json = {'type': 'record', 'name': self.name, 'fields': []}
self.assertEqual(expected_json, actual_json)
def test_create_record_with_invalid_name(self):
for invalid_name in self.invalid_names:
self.builder.clear()
with self.assertRaises(schema.SchemaParseException):
self.builder.begin_record(invalid_name)
self.builder.add_field(
self.another_name,
self.builder.create_int()
)
self.builder.end()
def test_create_record_with_dup_name(self):
with self.assertRaisesRegexp(
schema.SchemaParseException,
self.duplicate_name_err.format(self.name)
):
self.builder.begin_record(self.another_name)
self.builder.add_field(
'bar1',
self.builder.begin_enum(self.name, self.enum_symbols).end()
)
self.builder.add_field(
'bar2',
self.builder.begin_record(self.name).end()
)
self.builder.end()
def test_create_record_with_dup_field_name(self):
with self.assertRaisesRegexp(
schema.SchemaParseException,
"{0} already in use.".format(self.another_name)
):
self.builder.begin_record(self.name)
self.builder.add_field(
self.another_name,
self.builder.create_int()
)
self.builder.add_field(
self.another_name,
self.builder.create_string()
)
self.builder.end()
def test_create_field_with_invalid_type(self):
for invalid_schema in self.invalid_schemas:
self.builder.clear()
with self.assertRaises(schema.SchemaParseException):
self.builder.begin_record(self.name)
self.builder.add_field(
self.another_name,
invalid_schema
)
self.builder.end()
def test_create_field_with_invalid_sort_order(self):
with self.assertRaises(schema.SchemaParseException):
self.builder.begin_record(self.name)
self.builder.add_field(
self.another_name,
self.builder.create_int(),
sort_order='asc'
)
self.builder.end()
def test_create_union(self):
actual_json = self.builder.begin_union(
self.builder.create_null(),
self.builder.create_string(),
self.builder.begin_enum(self.name, self.enum_symbols).end()
).end()
expected_json = [
'null',
'string',
{'type': 'enum', 'name': self.name, 'symbols': self.enum_symbols}
]
self.assertEqual(expected_json, actual_json)
def test_create_union_with_empty_sub_schemas(self):
actual_json = self.builder.begin_union().end()
expected_json = []
self.assertEqual(expected_json, actual_json)
def test_create_union_with_nested_union_schema(self):
with self.assertRaises(schema.SchemaParseException):
self.builder.begin_union(
self.builder.begin_union(self.builder.create_int()).end()
).end()
def test_create_union_with_invalid_schema(self):
for invalid_schema in self.invalid_schemas:
self.builder.clear()
with self.assertRaises(schema.SchemaParseException):
self.builder.begin_union(invalid_schema).end()
def test_create_union_with_dup_primitive_schemas(self):
with self.assertRaises(schema.SchemaParseException):
self.builder.begin_union(
self.builder.create_int(),
self.builder.create_int()
).end()
def test_create_union_with_dup_named_schemas(self):
with self.assertRaises(schema.SchemaParseException):
self.builder.begin_union(
self.builder.begin_enum(self.name, self.enum_symbols).end(),
self.builder.begin_fixed(self.name, self.fixed_size).end()
).end()
def test_create_union_with_dup_complex_schemas(self):
with self.assertRaises(schema.SchemaParseException):
self.builder.begin_union(
self.builder.begin_map(self.builder.create_int()).end(),
self.builder.begin_map(self.builder.create_int()).end()
).end()
def test_create_nullable_type(self):
# non-union schema type
actual_json = self.builder.begin_nullable_type(
self.builder.create_int()
).end()
expected_json = ['null', 'int']
self.assertEqual(expected_json, actual_json)
# union schema type
actual_json = self.builder.begin_nullable_type(
[self.builder.create_int()]
).end()
expected_json = ['null', 'int']
self.assertEqual(expected_json, actual_json)
def test_create_nullable_type_with_default_value(self):
# non-union schema type
actual_json = self.builder.begin_nullable_type(
self.builder.create_int(),
10
).end()
expected_json = ['int', 'null']
self.assertEqual(expected_json, actual_json)
# union schema type
actual_json = self.builder.begin_nullable_type(
[self.builder.create_int()],
10
).end()
expected_json = ['int', 'null']
self.assertEqual(expected_json, actual_json)
def test_create_nullable_type_with_null_type(self):
actual_json = self.builder.begin_nullable_type(
self.builder.create_null()
).end()
expected_json = 'null'
self.assertEqual(expected_json, actual_json)
def test_create_nullable_type_with_nullable_type(self):
actual_json = self.builder.begin_nullable_type(
self.builder.begin_union(
self.builder.create_null(),
self.builder.create_long()
).end(),
10
).end()
expected_json = ['null', 'long']
self.assertEqual(expected_json, actual_json)
def test_create_nullable_type_with_invalid_type(self):
for invalid_schema in self.invalid_schemas:
self.builder.clear()
with self.assertRaises(schema.SchemaParseException):
self.builder.begin_nullable_type(invalid_schema)
def test_create_schema_with_preloaded_json(self):
schema_json = {
'type': 'record',
'name': self.name,
'fields': [
{'name': 'field', 'type': {'type': 'map', 'values': 'double'}}
]
}
self.builder.begin_with_schema_json(schema_json)
self.builder.add_field(
'field_new',
self.builder.create_int()
)
actual_json = self.builder.end()
expected_json = schema_json.copy()
expected_json['fields'].append({'name': 'field_new', 'type': 'int'})
self.assertEqual(expected_json, actual_json)
def test_removed_field(self):
self.builder.begin_record(self.name)
self.builder.add_field('bar1', self.builder.create_int())
self.builder.add_field('bar2', self.builder.create_int())
self.builder.remove_field('bar1')
actual_json = self.builder.end()
expected_json = {
'type': 'record',
'name': self.name,
'fields': [{'name': 'bar2', 'type': 'int'}]
}
self.assertEqual(expected_json, actual_json)
def test_removed_nonexistent_field(self):
schema_json = {
'type': 'record',
'name': self.name,
'fields': [{'name': 'bar2', 'type': 'int'}]
}
with self.assertRaises(avro_builder.AvroBuildInvalidOperation):
self.builder.begin_with_schema_json(schema_json)
self.builder.remove_field('bar1')
self.builder.end()
if __name__ == '__main__':
unittest.main()
| [((19622, 19637), 'unittest.main', 'unittest.main', ([], {}), '()\n', (19635, 19637), False, 'import unittest\n'), ((190, 222), 'avro.avro_builder.AvroSchemaBuilder', 'avro_builder.AvroSchemaBuilder', ([], {}), '()\n', (220, 222), False, 'from avro import avro_builder\n')] |
domluna/fun_with_ffi | monte_py/__init__.py | 9fc197b11a3470395db517657d624f0a3aa06958 | import random
def estimate_pi(sims, needles):
trials = []
for _ in xrange(sims):
trials.append(simulate_pi(needles))
mean = sum(trials) / sims
return mean
# use a unit square
def simulate_pi(needles):
hits = 0 # how many hits we hit the circle
for _ in xrange(needles):
x = random.uniform(-1., 1.)
y = random.uniform(-1, 1.)
if x*x + y*y <= 1.0:
hits += 1
return 4. * (hits / float(needles))
| [((317, 342), 'random.uniform', 'random.uniform', (['(-1.0)', '(1.0)'], {}), '(-1.0, 1.0)\n', (331, 342), False, 'import random\n'), ((353, 376), 'random.uniform', 'random.uniform', (['(-1)', '(1.0)'], {}), '(-1, 1.0)\n', (367, 376), False, 'import random\n')] |
UVA-DSI/circuitpython | tools/mpy_ld.py | 35ee4add63a604320d2fbd4e30baef2b5675f9a7 | #!/usr/bin/env python3
#
# This file is part of the MicroPython project, http://micropython.org/
#
# The MIT License (MIT)
#
# Copyright (c) 2019 Damien P. George
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Link .o files to .mpy
"""
import sys, os, struct, re
from elftools.elf import elffile
sys.path.append(os.path.dirname(__file__) + "/../py")
import makeqstrdata as qstrutil
# MicroPython constants
MPY_VERSION = 5
MP_NATIVE_ARCH_X86 = 1
MP_NATIVE_ARCH_X64 = 2
MP_NATIVE_ARCH_ARMV7M = 5
MP_NATIVE_ARCH_ARMV7EMSP = 7
MP_NATIVE_ARCH_ARMV7EMDP = 8
MP_NATIVE_ARCH_XTENSA = 9
MP_NATIVE_ARCH_XTENSAWIN = 10
MP_CODE_BYTECODE = 2
MP_CODE_NATIVE_VIPER = 4
MP_SCOPE_FLAG_VIPERRELOC = 0x20
MP_SCOPE_FLAG_VIPERRODATA = 0x40
MP_SCOPE_FLAG_VIPERBSS = 0x80
MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE = 1
MICROPY_PY_BUILTINS_STR_UNICODE = 2
MP_SMALL_INT_BITS = 31
QSTR_WINDOW_SIZE = 32
# ELF constants
R_386_32 = 1
R_X86_64_64 = 1
R_XTENSA_32 = 1
R_386_PC32 = 2
R_X86_64_PC32 = 2
R_ARM_ABS32 = 2
R_386_GOT32 = 3
R_ARM_REL32 = 3
R_386_PLT32 = 4
R_X86_64_PLT32 = 4
R_XTENSA_PLT = 6
R_386_GOTOFF = 9
R_386_GOTPC = 10
R_ARM_THM_CALL = 10
R_XTENSA_DIFF32 = 19
R_XTENSA_SLOT0_OP = 20
R_ARM_BASE_PREL = 25 # aka R_ARM_GOTPC
R_ARM_GOT_BREL = 26 # aka R_ARM_GOT32
R_ARM_THM_JUMP24 = 30
R_X86_64_REX_GOTPCRELX = 42
R_386_GOT32X = 43
################################################################################
# Architecture configuration
def asm_jump_x86(entry):
return struct.pack("<BI", 0xE9, entry - 5)
def asm_jump_arm(entry):
b_off = entry - 4
if b_off >> 11 == 0 or b_off >> 11 == -1:
# Signed value fits in 12 bits
b0 = 0xE000 | (b_off >> 1 & 0x07FF)
b1 = 0
else:
# Use large jump
b0 = 0xF000 | (b_off >> 12 & 0x07FF)
b1 = 0xB800 | (b_off >> 1 & 0x7FF)
return struct.pack("<HH", b0, b1)
def asm_jump_xtensa(entry):
jump_offset = entry - 4
jump_op = jump_offset << 6 | 6
return struct.pack("<BH", jump_op & 0xFF, jump_op >> 8)
class ArchData:
def __init__(self, name, mpy_feature, qstr_entry_size, word_size, arch_got, asm_jump):
self.name = name
self.mpy_feature = mpy_feature
self.qstr_entry_size = qstr_entry_size
self.word_size = word_size
self.arch_got = arch_got
self.asm_jump = asm_jump
self.separate_rodata = name == "EM_XTENSA" and qstr_entry_size == 4
ARCH_DATA = {
"x86": ArchData(
"EM_386",
MP_NATIVE_ARCH_X86 << 2
| MICROPY_PY_BUILTINS_STR_UNICODE
| MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE,
2,
4,
(R_386_PC32, R_386_GOT32, R_386_GOT32X),
asm_jump_x86,
),
"x64": ArchData(
"EM_X86_64",
MP_NATIVE_ARCH_X64 << 2
| MICROPY_PY_BUILTINS_STR_UNICODE
| MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE,
2,
8,
(R_X86_64_REX_GOTPCRELX,),
asm_jump_x86,
),
"armv7m": ArchData(
"EM_ARM",
MP_NATIVE_ARCH_ARMV7M << 2 | MICROPY_PY_BUILTINS_STR_UNICODE,
2,
4,
(R_ARM_GOT_BREL,),
asm_jump_arm,
),
"armv7emsp": ArchData(
"EM_ARM",
MP_NATIVE_ARCH_ARMV7EMSP << 2 | MICROPY_PY_BUILTINS_STR_UNICODE,
2,
4,
(R_ARM_GOT_BREL,),
asm_jump_arm,
),
"armv7emdp": ArchData(
"EM_ARM",
MP_NATIVE_ARCH_ARMV7EMDP << 2 | MICROPY_PY_BUILTINS_STR_UNICODE,
2,
4,
(R_ARM_GOT_BREL,),
asm_jump_arm,
),
"xtensa": ArchData(
"EM_XTENSA",
MP_NATIVE_ARCH_XTENSA << 2 | MICROPY_PY_BUILTINS_STR_UNICODE,
2,
4,
(R_XTENSA_32, R_XTENSA_PLT),
asm_jump_xtensa,
),
"xtensawin": ArchData(
"EM_XTENSA",
MP_NATIVE_ARCH_XTENSAWIN << 2 | MICROPY_PY_BUILTINS_STR_UNICODE,
4,
4,
(R_XTENSA_32, R_XTENSA_PLT),
asm_jump_xtensa,
),
}
################################################################################
# Helper functions
def align_to(value, align):
return (value + align - 1) & ~(align - 1)
def unpack_u24le(data, offset):
return data[offset] | data[offset + 1] << 8 | data[offset + 2] << 16
def pack_u24le(data, offset, value):
data[offset] = value & 0xFF
data[offset + 1] = value >> 8 & 0xFF
data[offset + 2] = value >> 16 & 0xFF
def xxd(text):
for i in range(0, len(text), 16):
print("{:08x}:".format(i), end="")
for j in range(4):
off = i + j * 4
if off < len(text):
d = int.from_bytes(text[off : off + 4], "little")
print(" {:08x}".format(d), end="")
print()
# Smaller numbers are enabled first
LOG_LEVEL_1 = 1
LOG_LEVEL_2 = 2
LOG_LEVEL_3 = 3
log_level = LOG_LEVEL_1
def log(level, msg):
if level <= log_level:
print(msg)
################################################################################
# Qstr extraction
def extract_qstrs(source_files):
def read_qstrs(f):
with open(f) as f:
vals = set()
objs = set()
for line in f:
while line:
m = re.search(r"MP_OBJ_NEW_QSTR\((MP_QSTR_[A-Za-z0-9_]*)\)", line)
if m:
objs.add(m.group(1))
else:
m = re.search(r"MP_QSTR_[A-Za-z0-9_]*", line)
if m:
vals.add(m.group())
if m:
s = m.span()
line = line[: s[0]] + line[s[1] :]
else:
line = ""
return vals, objs
static_qstrs = ["MP_QSTR_" + qstrutil.qstr_escape(q) for q in qstrutil.static_qstr_list]
qstr_vals = set()
qstr_objs = set()
for f in source_files:
vals, objs = read_qstrs(f)
qstr_vals.update(vals)
qstr_objs.update(objs)
qstr_vals.difference_update(static_qstrs)
return static_qstrs, qstr_vals, qstr_objs
################################################################################
# Linker
class LinkError(Exception):
pass
class Section:
def __init__(self, name, data, alignment, filename=None):
self.filename = filename
self.name = name
self.data = data
self.alignment = alignment
self.addr = 0
self.reloc = []
@staticmethod
def from_elfsec(elfsec, filename):
assert elfsec.header.sh_addr == 0
return Section(elfsec.name, elfsec.data(), elfsec.data_alignment, filename)
class GOTEntry:
def __init__(self, name, sym, link_addr=0):
self.name = name
self.sym = sym
self.offset = None
self.link_addr = link_addr
def isexternal(self):
return self.sec_name.startswith(".external")
def istext(self):
return self.sec_name.startswith(".text")
def isrodata(self):
return self.sec_name.startswith((".rodata", ".data.rel.ro"))
def isbss(self):
return self.sec_name.startswith(".bss")
class LiteralEntry:
def __init__(self, value, offset):
self.value = value
self.offset = offset
class LinkEnv:
def __init__(self, arch):
self.arch = ARCH_DATA[arch]
self.sections = [] # list of sections in order of output
self.literal_sections = [] # list of literal sections (xtensa only)
self.known_syms = {} # dict of symbols that are defined
self.unresolved_syms = [] # list of unresolved symbols
self.mpy_relocs = [] # list of relocations needed in the output .mpy file
def check_arch(self, arch_name):
if arch_name != self.arch.name:
raise LinkError("incompatible arch")
def print_sections(self):
log(LOG_LEVEL_2, "sections:")
for sec in self.sections:
log(LOG_LEVEL_2, " {:08x} {} size={}".format(sec.addr, sec.name, len(sec.data)))
def find_addr(self, name):
if name in self.known_syms:
s = self.known_syms[name]
return s.section.addr + s["st_value"]
raise LinkError("unknown symbol: {}".format(name))
def build_got_generic(env):
env.got_entries = {}
for sec in env.sections:
for r in sec.reloc:
s = r.sym
if not (
s.entry["st_info"]["bind"] == "STB_GLOBAL"
and r["r_info_type"] in env.arch.arch_got
):
continue
s_type = s.entry["st_info"]["type"]
assert s_type in ("STT_NOTYPE", "STT_FUNC", "STT_OBJECT"), s_type
assert s.name
if s.name in env.got_entries:
continue
env.got_entries[s.name] = GOTEntry(s.name, s)
def build_got_xtensa(env):
env.got_entries = {}
env.lit_entries = {}
env.xt_literals = {}
# Extract the values from the literal table
for sec in env.literal_sections:
assert len(sec.data) % env.arch.word_size == 0
# Look through literal relocations to find any global pointers that should be GOT entries
for r in sec.reloc:
s = r.sym
s_type = s.entry["st_info"]["type"]
assert s_type in ("STT_NOTYPE", "STT_FUNC", "STT_OBJECT", "STT_SECTION"), s_type
assert r["r_info_type"] in env.arch.arch_got
assert r["r_offset"] % env.arch.word_size == 0
# This entry is a global pointer
existing = struct.unpack_from("<I", sec.data, r["r_offset"])[0]
if s_type == "STT_SECTION":
assert r["r_addend"] == 0
name = "{}+0x{:x}".format(s.section.name, existing)
else:
assert existing == 0
name = s.name
if r["r_addend"] != 0:
name = "{}+0x{:x}".format(name, r["r_addend"])
idx = "{}+0x{:x}".format(sec.filename, r["r_offset"])
env.xt_literals[idx] = name
if name in env.got_entries:
# Deduplicate GOT entries
continue
env.got_entries[name] = GOTEntry(name, s, existing)
# Go through all literal entries finding those that aren't global pointers so must be actual literals
for i in range(0, len(sec.data), env.arch.word_size):
idx = "{}+0x{:x}".format(sec.filename, i)
if idx not in env.xt_literals:
# This entry is an actual literal
value = struct.unpack_from("<I", sec.data, i)[0]
env.xt_literals[idx] = value
if value in env.lit_entries:
# Deduplicate literals
continue
env.lit_entries[value] = LiteralEntry(
value, len(env.lit_entries) * env.arch.word_size
)
def populate_got(env):
# Compute GOT destination addresses
for got_entry in env.got_entries.values():
sym = got_entry.sym
if hasattr(sym, "resolved"):
sym = sym.resolved
sec = sym.section
addr = sym["st_value"]
got_entry.sec_name = sec.name
got_entry.link_addr += sec.addr + addr
# Get sorted GOT, sorted by external, text, rodata, bss so relocations can be combined
got_list = sorted(
env.got_entries.values(),
key=lambda g: g.isexternal() + 2 * g.istext() + 3 * g.isrodata() + 4 * g.isbss(),
)
# Layout and populate the GOT
offset = 0
for got_entry in got_list:
got_entry.offset = offset
offset += env.arch.word_size
o = env.got_section.addr + got_entry.offset
env.full_text[o : o + env.arch.word_size] = got_entry.link_addr.to_bytes(
env.arch.word_size, "little"
)
# Create a relocation for each GOT entry
for got_entry in got_list:
if got_entry.name == "mp_fun_table":
dest = "mp_fun_table"
elif got_entry.name.startswith("mp_fun_table+0x"):
dest = int(got_entry.name.split("+")[1], 16) // env.arch.word_size
elif got_entry.sec_name.startswith(".text"):
dest = ".text"
elif got_entry.sec_name.startswith(".rodata"):
dest = ".rodata"
elif got_entry.sec_name.startswith(".data.rel.ro"):
dest = ".data.rel.ro"
elif got_entry.sec_name.startswith(".bss"):
dest = ".bss"
else:
assert 0, (got_entry.name, got_entry.sec_name)
env.mpy_relocs.append((".text", env.got_section.addr + got_entry.offset, dest))
# Print out the final GOT
log(LOG_LEVEL_2, "GOT: {:08x}".format(env.got_section.addr))
for g in got_list:
log(
LOG_LEVEL_2,
" {:08x} {} -> {}+{:08x}".format(g.offset, g.name, g.sec_name, g.link_addr),
)
def populate_lit(env):
log(LOG_LEVEL_2, "LIT: {:08x}".format(env.lit_section.addr))
for lit_entry in env.lit_entries.values():
value = lit_entry.value
log(LOG_LEVEL_2, " {:08x} = {:08x}".format(lit_entry.offset, value))
o = env.lit_section.addr + lit_entry.offset
env.full_text[o : o + env.arch.word_size] = value.to_bytes(env.arch.word_size, "little")
def do_relocation_text(env, text_addr, r):
# Extract relevant info about symbol that's being relocated
s = r.sym
s_bind = s.entry["st_info"]["bind"]
s_shndx = s.entry["st_shndx"]
s_type = s.entry["st_info"]["type"]
r_offset = r["r_offset"] + text_addr
r_info_type = r["r_info_type"]
try:
# only for RELA sections
r_addend = r["r_addend"]
except KeyError:
r_addend = 0
# Default relocation type and name for logging
reloc_type = "le32"
log_name = None
if (
env.arch.name == "EM_386"
and r_info_type in (R_386_PC32, R_386_PLT32)
or env.arch.name == "EM_X86_64"
and r_info_type in (R_X86_64_PC32, R_X86_64_PLT32)
or env.arch.name == "EM_ARM"
and r_info_type in (R_ARM_REL32, R_ARM_THM_CALL, R_ARM_THM_JUMP24)
or s_bind == "STB_LOCAL"
and env.arch.name == "EM_XTENSA"
and r_info_type == R_XTENSA_32 # not GOT
):
# Standard relocation to fixed location within text/rodata
if hasattr(s, "resolved"):
s = s.resolved
sec = s.section
if env.arch.separate_rodata and sec.name.startswith(".rodata"):
raise LinkError("fixed relocation to rodata with rodata referenced via GOT")
if sec.name.startswith(".bss"):
raise LinkError(
"{}: fixed relocation to bss (bss variables can't be static)".format(s.filename)
)
if sec.name.startswith(".external"):
raise LinkError(
"{}: fixed relocation to external symbol: {}".format(s.filename, s.name)
)
addr = sec.addr + s["st_value"]
reloc = addr - r_offset + r_addend
if r_info_type in (R_ARM_THM_CALL, R_ARM_THM_JUMP24):
# Both relocations have the same bit pattern to rewrite:
# R_ARM_THM_CALL: bl
# R_ARM_THM_JUMP24: b.w
reloc_type = "thumb_b"
elif (
env.arch.name == "EM_386"
and r_info_type == R_386_GOTPC
or env.arch.name == "EM_ARM"
and r_info_type == R_ARM_BASE_PREL
):
# Relocation to GOT address itself
assert s.name == "_GLOBAL_OFFSET_TABLE_"
addr = env.got_section.addr
reloc = addr - r_offset + r_addend
elif (
env.arch.name == "EM_386"
and r_info_type in (R_386_GOT32, R_386_GOT32X)
or env.arch.name == "EM_ARM"
and r_info_type == R_ARM_GOT_BREL
):
# Relcation pointing to GOT
reloc = addr = env.got_entries[s.name].offset
elif env.arch.name == "EM_X86_64" and r_info_type == R_X86_64_REX_GOTPCRELX:
# Relcation pointing to GOT
got_entry = env.got_entries[s.name]
addr = env.got_section.addr + got_entry.offset
reloc = addr - r_offset + r_addend
elif env.arch.name == "EM_386" and r_info_type == R_386_GOTOFF:
# Relocation relative to GOT
addr = s.section.addr + s["st_value"]
reloc = addr - env.got_section.addr + r_addend
elif env.arch.name == "EM_XTENSA" and r_info_type == R_XTENSA_SLOT0_OP:
# Relocation pointing to GOT, xtensa specific
sec = s.section
if sec.name.startswith(".text"):
# it looks like R_XTENSA_SLOT0_OP into .text is already correctly relocated
return
assert sec.name.startswith(".literal"), sec.name
lit_idx = "{}+0x{:x}".format(sec.filename, r_addend)
lit_ptr = env.xt_literals[lit_idx]
if isinstance(lit_ptr, str):
addr = env.got_section.addr + env.got_entries[lit_ptr].offset
log_name = "GOT {}".format(lit_ptr)
else:
addr = env.lit_section.addr + env.lit_entries[lit_ptr].offset
log_name = "LIT"
reloc = addr - r_offset
reloc_type = "xtensa_l32r"
elif env.arch.name == "EM_XTENSA" and r_info_type == R_XTENSA_DIFF32:
if s.section.name.startswith(".text"):
# it looks like R_XTENSA_DIFF32 into .text is already correctly relocated
return
assert 0
else:
# Unknown/unsupported relocation
assert 0, r_info_type
# Write relocation
if reloc_type == "le32":
(existing,) = struct.unpack_from("<I", env.full_text, r_offset)
struct.pack_into("<I", env.full_text, r_offset, (existing + reloc) & 0xFFFFFFFF)
elif reloc_type == "thumb_b":
b_h, b_l = struct.unpack_from("<HH", env.full_text, r_offset)
existing = (b_h & 0x7FF) << 12 | (b_l & 0x7FF) << 1
if existing >= 0x400000: # 2's complement
existing -= 0x800000
new = existing + reloc
b_h = (b_h & 0xF800) | (new >> 12) & 0x7FF
b_l = (b_l & 0xF800) | (new >> 1) & 0x7FF
struct.pack_into("<HH", env.full_text, r_offset, b_h, b_l)
elif reloc_type == "xtensa_l32r":
l32r = unpack_u24le(env.full_text, r_offset)
assert l32r & 0xF == 1 # RI16 encoded l32r
l32r_imm16 = l32r >> 8
l32r_imm16 = (l32r_imm16 + reloc >> 2) & 0xFFFF
l32r = l32r & 0xFF | l32r_imm16 << 8
pack_u24le(env.full_text, r_offset, l32r)
else:
assert 0, reloc_type
# Log information about relocation
if log_name is None:
if s_type == "STT_SECTION":
log_name = s.section.name
else:
log_name = s.name
log(LOG_LEVEL_3, " {:08x} {} -> {:08x}".format(r_offset, log_name, addr))
def do_relocation_data(env, text_addr, r):
s = r.sym
s_type = s.entry["st_info"]["type"]
r_offset = r["r_offset"] + text_addr
r_info_type = r["r_info_type"]
try:
# only for RELA sections
r_addend = r["r_addend"]
except KeyError:
r_addend = 0
if (
env.arch.name == "EM_386"
and r_info_type == R_386_32
or env.arch.name == "EM_X86_64"
and r_info_type == R_X86_64_64
or env.arch.name == "EM_ARM"
and r_info_type == R_ARM_ABS32
or env.arch.name == "EM_XTENSA"
and r_info_type == R_XTENSA_32
):
# Relocation in data.rel.ro to internal/external symbol
if env.arch.word_size == 4:
struct_type = "<I"
elif env.arch.word_size == 8:
struct_type = "<Q"
sec = s.section
assert r_offset % env.arch.word_size == 0
addr = sec.addr + s["st_value"] + r_addend
if s_type == "STT_SECTION":
log_name = sec.name
else:
log_name = s.name
log(LOG_LEVEL_3, " {:08x} -> {} {:08x}".format(r_offset, log_name, addr))
if env.arch.separate_rodata:
data = env.full_rodata
else:
data = env.full_text
(existing,) = struct.unpack_from(struct_type, data, r_offset)
if sec.name.startswith((".text", ".rodata", ".data.rel.ro", ".bss")):
struct.pack_into(struct_type, data, r_offset, existing + addr)
kind = sec.name
elif sec.name == ".external.mp_fun_table":
assert addr == 0
kind = s.mp_fun_table_offset
else:
assert 0, sec.name
if env.arch.separate_rodata:
base = ".rodata"
else:
base = ".text"
env.mpy_relocs.append((base, r_offset, kind))
else:
# Unknown/unsupported relocation
assert 0, r_info_type
def load_object_file(env, felf):
with open(felf, "rb") as f:
elf = elffile.ELFFile(f)
env.check_arch(elf["e_machine"])
# Get symbol table
symtab = list(elf.get_section_by_name(".symtab").iter_symbols())
# Load needed sections from ELF file
sections_shndx = {} # maps elf shndx to Section object
for idx, s in enumerate(elf.iter_sections()):
if s.header.sh_type in ("SHT_PROGBITS", "SHT_NOBITS"):
if s.data_size == 0:
# Ignore empty sections
pass
elif s.name.startswith((".literal", ".text", ".rodata", ".data.rel.ro", ".bss")):
sec = Section.from_elfsec(s, felf)
sections_shndx[idx] = sec
if s.name.startswith(".literal"):
env.literal_sections.append(sec)
else:
env.sections.append(sec)
elif s.name.startswith(".data"):
raise LinkError("{}: {} non-empty".format(felf, s.name))
else:
# Ignore section
pass
elif s.header.sh_type in ("SHT_REL", "SHT_RELA"):
shndx = s.header.sh_info
if shndx in sections_shndx:
sec = sections_shndx[shndx]
sec.reloc_name = s.name
sec.reloc = list(s.iter_relocations())
for r in sec.reloc:
r.sym = symtab[r["r_info_sym"]]
# Link symbols to their sections, and update known and unresolved symbols
for sym in symtab:
sym.filename = felf
shndx = sym.entry["st_shndx"]
if shndx in sections_shndx:
# Symbol with associated section
sym.section = sections_shndx[shndx]
if sym["st_info"]["bind"] == "STB_GLOBAL":
# Defined global symbol
if sym.name in env.known_syms and not sym.name.startswith(
"__x86.get_pc_thunk."
):
raise LinkError("duplicate symbol: {}".format(sym.name))
env.known_syms[sym.name] = sym
elif sym.entry["st_shndx"] == "SHN_UNDEF" and sym["st_info"]["bind"] == "STB_GLOBAL":
# Undefined global symbol, needs resolving
env.unresolved_syms.append(sym)
def link_objects(env, native_qstr_vals_len, native_qstr_objs_len):
# Build GOT information
if env.arch.name == "EM_XTENSA":
build_got_xtensa(env)
else:
build_got_generic(env)
# Creat GOT section
got_size = len(env.got_entries) * env.arch.word_size
env.got_section = Section("GOT", bytearray(got_size), env.arch.word_size)
if env.arch.name == "EM_XTENSA":
env.sections.insert(0, env.got_section)
else:
env.sections.append(env.got_section)
# Create optional literal section
if env.arch.name == "EM_XTENSA":
lit_size = len(env.lit_entries) * env.arch.word_size
env.lit_section = Section("LIT", bytearray(lit_size), env.arch.word_size)
env.sections.insert(1, env.lit_section)
# Create section to contain mp_native_qstr_val_table
env.qstr_val_section = Section(
".text.QSTR_VAL",
bytearray(native_qstr_vals_len * env.arch.qstr_entry_size),
env.arch.qstr_entry_size,
)
env.sections.append(env.qstr_val_section)
# Create section to contain mp_native_qstr_obj_table
env.qstr_obj_section = Section(
".text.QSTR_OBJ", bytearray(native_qstr_objs_len * env.arch.word_size), env.arch.word_size
)
env.sections.append(env.qstr_obj_section)
# Resolve unknown symbols
mp_fun_table_sec = Section(".external.mp_fun_table", b"", 0)
fun_table = {
key: 68 + idx
for idx, key in enumerate(
[
"mp_type_type",
"mp_type_str",
"mp_type_list",
"mp_type_dict",
"mp_type_fun_builtin_0",
"mp_type_fun_builtin_1",
"mp_type_fun_builtin_2",
"mp_type_fun_builtin_3",
"mp_type_fun_builtin_var",
"mp_stream_read_obj",
"mp_stream_readinto_obj",
"mp_stream_unbuffered_readline_obj",
"mp_stream_write_obj",
]
)
}
for sym in env.unresolved_syms:
assert sym["st_value"] == 0
if sym.name == "_GLOBAL_OFFSET_TABLE_":
pass
elif sym.name == "mp_fun_table":
sym.section = Section(".external", b"", 0)
elif sym.name == "mp_native_qstr_val_table":
sym.section = env.qstr_val_section
elif sym.name == "mp_native_qstr_obj_table":
sym.section = env.qstr_obj_section
elif sym.name in env.known_syms:
sym.resolved = env.known_syms[sym.name]
else:
if sym.name in fun_table:
sym.section = mp_fun_table_sec
sym.mp_fun_table_offset = fun_table[sym.name]
else:
raise LinkError("{}: undefined symbol: {}".format(sym.filename, sym.name))
# Align sections, assign their addresses, and create full_text
env.full_text = bytearray(env.arch.asm_jump(8)) # dummy, to be filled in later
env.full_rodata = bytearray(0)
env.full_bss = bytearray(0)
for sec in env.sections:
if env.arch.separate_rodata and sec.name.startswith((".rodata", ".data.rel.ro")):
data = env.full_rodata
elif sec.name.startswith(".bss"):
data = env.full_bss
else:
data = env.full_text
sec.addr = align_to(len(data), sec.alignment)
data.extend(b"\x00" * (sec.addr - len(data)))
data.extend(sec.data)
env.print_sections()
populate_got(env)
if env.arch.name == "EM_XTENSA":
populate_lit(env)
# Fill in relocations
for sec in env.sections:
if not sec.reloc:
continue
log(
LOG_LEVEL_3,
"{}: {} relocations via {}:".format(sec.filename, sec.name, sec.reloc_name),
)
for r in sec.reloc:
if sec.name.startswith((".text", ".rodata")):
do_relocation_text(env, sec.addr, r)
elif sec.name.startswith(".data.rel.ro"):
do_relocation_data(env, sec.addr, r)
else:
assert 0, sec.name
################################################################################
# .mpy output
class MPYOutput:
def open(self, fname):
self.f = open(fname, "wb")
self.prev_base = -1
self.prev_offset = -1
def close(self):
self.f.close()
def write_bytes(self, buf):
self.f.write(buf)
def write_uint(self, val):
b = bytearray()
b.insert(0, val & 0x7F)
val >>= 7
while val:
b.insert(0, 0x80 | (val & 0x7F))
val >>= 7
self.write_bytes(b)
def write_qstr(self, s):
if s in qstrutil.static_qstr_list:
self.write_bytes(bytes([0, qstrutil.static_qstr_list.index(s) + 1]))
else:
s = bytes(s, "ascii")
self.write_uint(len(s) << 1)
self.write_bytes(s)
def write_reloc(self, base, offset, dest, n):
need_offset = not (base == self.prev_base and offset == self.prev_offset + 1)
self.prev_offset = offset + n - 1
if dest <= 2:
dest = (dest << 1) | (n > 1)
else:
assert 6 <= dest <= 127
assert n == 1
dest = dest << 1 | need_offset
assert 0 <= dest <= 0xFE, dest
self.write_bytes(bytes([dest]))
if need_offset:
if base == ".text":
base = 0
elif base == ".rodata":
base = 1
self.write_uint(offset << 1 | base)
if n > 1:
self.write_uint(n)
def build_mpy(env, entry_offset, fmpy, native_qstr_vals, native_qstr_objs):
# Write jump instruction to start of text
jump = env.arch.asm_jump(entry_offset)
env.full_text[: len(jump)] = jump
log(LOG_LEVEL_1, "arch: {}".format(env.arch.name))
log(LOG_LEVEL_1, "text size: {}".format(len(env.full_text)))
if len(env.full_rodata):
log(LOG_LEVEL_1, "rodata size: {}".format(len(env.full_rodata)))
log(LOG_LEVEL_1, "bss size: {}".format(len(env.full_bss)))
log(LOG_LEVEL_1, "GOT entries: {}".format(len(env.got_entries)))
# xxd(env.full_text)
out = MPYOutput()
out.open(fmpy)
# MPY: header
out.write_bytes(
bytearray(
[
ord("C"),
MPY_VERSION,
env.arch.mpy_feature,
MP_SMALL_INT_BITS,
QSTR_WINDOW_SIZE,
]
)
)
# MPY: kind/len
out.write_uint(len(env.full_text) << 2 | (MP_CODE_NATIVE_VIPER - MP_CODE_BYTECODE))
# MPY: machine code
out.write_bytes(env.full_text)
# MPY: n_qstr_link (assumes little endian)
out.write_uint(len(native_qstr_vals) + len(native_qstr_objs))
for q in range(len(native_qstr_vals)):
off = env.qstr_val_section.addr + q * env.arch.qstr_entry_size
out.write_uint(off << 2)
out.write_qstr(native_qstr_vals[q])
for q in range(len(native_qstr_objs)):
off = env.qstr_obj_section.addr + q * env.arch.word_size
out.write_uint(off << 2 | 3)
out.write_qstr(native_qstr_objs[q])
# MPY: scope_flags
scope_flags = MP_SCOPE_FLAG_VIPERRELOC
if len(env.full_rodata):
scope_flags |= MP_SCOPE_FLAG_VIPERRODATA
if len(env.full_bss):
scope_flags |= MP_SCOPE_FLAG_VIPERBSS
out.write_uint(scope_flags)
# MPY: n_obj
out.write_uint(0)
# MPY: n_raw_code
out.write_uint(0)
# MPY: rodata and/or bss
if len(env.full_rodata):
rodata_const_table_idx = 1
out.write_uint(len(env.full_rodata))
out.write_bytes(env.full_rodata)
if len(env.full_bss):
bss_const_table_idx = bool(env.full_rodata) + 1
out.write_uint(len(env.full_bss))
# MPY: relocation information
prev_kind = None
for base, addr, kind in env.mpy_relocs:
if isinstance(kind, str) and kind.startswith(".text"):
kind = 0
elif kind in (".rodata", ".data.rel.ro"):
if env.arch.separate_rodata:
kind = rodata_const_table_idx
else:
kind = 0
elif isinstance(kind, str) and kind.startswith(".bss"):
kind = bss_const_table_idx
elif kind == "mp_fun_table":
kind = 6
else:
kind = 7 + kind
assert addr % env.arch.word_size == 0, addr
offset = addr // env.arch.word_size
if kind == prev_kind and base == prev_base and offset == prev_offset + 1:
prev_n += 1
prev_offset += 1
else:
if prev_kind is not None:
out.write_reloc(prev_base, prev_offset - prev_n + 1, prev_kind, prev_n)
prev_kind = kind
prev_base = base
prev_offset = offset
prev_n = 1
if prev_kind is not None:
out.write_reloc(prev_base, prev_offset - prev_n + 1, prev_kind, prev_n)
# MPY: sentinel for end of relocations
out.write_bytes(b"\xff")
out.close()
################################################################################
# main
def do_preprocess(args):
if args.output is None:
assert args.files[0].endswith(".c")
args.output = args.files[0][:-1] + "config.h"
static_qstrs, qstr_vals, qstr_objs = extract_qstrs(args.files)
with open(args.output, "w") as f:
print(
"#include <stdint.h>\n"
"typedef uintptr_t mp_uint_t;\n"
"typedef intptr_t mp_int_t;\n"
"typedef uintptr_t mp_off_t;",
file=f,
)
for i, q in enumerate(static_qstrs):
print("#define %s (%u)" % (q, i + 1), file=f)
for i, q in enumerate(sorted(qstr_vals)):
print("#define %s (mp_native_qstr_val_table[%d])" % (q, i), file=f)
for i, q in enumerate(sorted(qstr_objs)):
print(
"#define MP_OBJ_NEW_QSTR_%s ((mp_obj_t)mp_native_qstr_obj_table[%d])" % (q, i),
file=f,
)
if args.arch == "xtensawin":
qstr_type = "uint32_t" # esp32 can only read 32-bit values from IRAM
else:
qstr_type = "uint16_t"
print("extern const {} mp_native_qstr_val_table[];".format(qstr_type), file=f)
print("extern const mp_uint_t mp_native_qstr_obj_table[];", file=f)
def do_link(args):
if args.output is None:
assert args.files[0].endswith(".o")
args.output = args.files[0][:-1] + "mpy"
native_qstr_vals = []
native_qstr_objs = []
if args.qstrs is not None:
with open(args.qstrs) as f:
for l in f:
m = re.match(r"#define MP_QSTR_([A-Za-z0-9_]*) \(mp_native_", l)
if m:
native_qstr_vals.append(m.group(1))
else:
m = re.match(r"#define MP_OBJ_NEW_QSTR_MP_QSTR_([A-Za-z0-9_]*)", l)
if m:
native_qstr_objs.append(m.group(1))
log(LOG_LEVEL_2, "qstr vals: " + ", ".join(native_qstr_vals))
log(LOG_LEVEL_2, "qstr objs: " + ", ".join(native_qstr_objs))
env = LinkEnv(args.arch)
try:
for file in args.files:
load_object_file(env, file)
link_objects(env, len(native_qstr_vals), len(native_qstr_objs))
build_mpy(env, env.find_addr("mpy_init"), args.output, native_qstr_vals, native_qstr_objs)
except LinkError as er:
print("LinkError:", er.args[0])
sys.exit(1)
def main():
import argparse
cmd_parser = argparse.ArgumentParser(description="Run scripts on the pyboard.")
cmd_parser.add_argument(
"--verbose", "-v", action="count", default=1, help="increase verbosity"
)
cmd_parser.add_argument("--arch", default="x64", help="architecture")
cmd_parser.add_argument("--preprocess", action="store_true", help="preprocess source files")
cmd_parser.add_argument("--qstrs", default=None, help="file defining additional qstrs")
cmd_parser.add_argument(
"--output", "-o", default=None, help="output .mpy file (default to input with .o->.mpy)"
)
cmd_parser.add_argument("files", nargs="+", help="input files")
args = cmd_parser.parse_args()
global log_level
log_level = args.verbose
if args.preprocess:
do_preprocess(args)
else:
do_link(args)
if __name__ == "__main__":
main()
| [((2482, 2516), 'struct.pack', 'struct.pack', (['"""<BI"""', '(233)', '(entry - 5)'], {}), "('<BI', 233, entry - 5)\n", (2493, 2516), False, 'import sys, os, struct, re\n'), ((2845, 2871), 'struct.pack', 'struct.pack', (['"""<HH"""', 'b0', 'b1'], {}), "('<HH', b0, b1)\n", (2856, 2871), False, 'import sys, os, struct, re\n'), ((2976, 3023), 'struct.pack', 'struct.pack', (['"""<BH"""', '(jump_op & 255)', '(jump_op >> 8)'], {}), "('<BH', jump_op & 255, jump_op >> 8)\n", (2987, 3023), False, 'import sys, os, struct, re\n'), ((35721, 35787), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run scripts on the pyboard."""'}), "(description='Run scripts on the pyboard.')\n", (35744, 35787), False, 'import argparse\n'), ((1329, 1354), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1344, 1354), False, 'import sys, os, struct, re\n'), ((18532, 18581), 'struct.unpack_from', 'struct.unpack_from', (['"""<I"""', 'env.full_text', 'r_offset'], {}), "('<I', env.full_text, r_offset)\n", (18550, 18581), False, 'import sys, os, struct, re\n'), ((18590, 18668), 'struct.pack_into', 'struct.pack_into', (['"""<I"""', 'env.full_text', 'r_offset', '(existing + reloc & 4294967295)'], {}), "('<I', env.full_text, r_offset, existing + reloc & 4294967295)\n", (18606, 18668), False, 'import sys, os, struct, re\n'), ((21018, 21065), 'struct.unpack_from', 'struct.unpack_from', (['struct_type', 'data', 'r_offset'], {}), '(struct_type, data, r_offset)\n', (21036, 21065), False, 'import sys, os, struct, re\n'), ((21737, 21755), 'elftools.elf.elffile.ELFFile', 'elffile.ELFFile', (['f'], {}), '(f)\n', (21752, 21755), False, 'from elftools.elf import elffile\n'), ((6763, 6786), 'makeqstrdata.qstr_escape', 'qstrutil.qstr_escape', (['q'], {}), '(q)\n', (6783, 6786), True, 'import makeqstrdata as qstrutil\n'), ((18724, 18774), 'struct.unpack_from', 'struct.unpack_from', (['"""<HH"""', 'env.full_text', 'r_offset'], {}), "('<HH', env.full_text, r_offset)\n", (18742, 18774), False, 'import sys, os, struct, re\n'), ((19059, 19117), 'struct.pack_into', 'struct.pack_into', (['"""<HH"""', 'env.full_text', 'r_offset', 'b_h', 'b_l'], {}), "('<HH', env.full_text, r_offset, b_h, b_l)\n", (19075, 19117), False, 'import sys, os, struct, re\n'), ((21156, 21218), 'struct.pack_into', 'struct.pack_into', (['struct_type', 'data', 'r_offset', '(existing + addr)'], {}), '(struct_type, data, r_offset, existing + addr)\n', (21172, 21218), False, 'import sys, os, struct, re\n'), ((35657, 35668), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (35665, 35668), False, 'import sys, os, struct, re\n'), ((10537, 10586), 'struct.unpack_from', 'struct.unpack_from', (['"""<I"""', 'sec.data', "r['r_offset']"], {}), "('<I', sec.data, r['r_offset'])\n", (10555, 10586), False, 'import sys, os, struct, re\n'), ((34833, 34893), 're.match', 're.match', (['"""#define MP_QSTR_([A-Za-z0-9_]*) \\\\(mp_native_"""', 'l'], {}), "('#define MP_QSTR_([A-Za-z0-9_]*) \\\\(mp_native_', l)\n", (34841, 34893), False, 'import sys, os, struct, re\n'), ((6209, 6272), 're.search', 're.search', (['"""MP_OBJ_NEW_QSTR\\\\((MP_QSTR_[A-Za-z0-9_]*)\\\\)"""', 'line'], {}), "('MP_OBJ_NEW_QSTR\\\\((MP_QSTR_[A-Za-z0-9_]*)\\\\)', line)\n", (6218, 6272), False, 'import sys, os, struct, re\n'), ((11552, 11589), 'struct.unpack_from', 'struct.unpack_from', (['"""<I"""', 'sec.data', 'i'], {}), "('<I', sec.data, i)\n", (11570, 11589), False, 'import sys, os, struct, re\n'), ((35018, 35080), 're.match', 're.match', (['"""#define MP_OBJ_NEW_QSTR_MP_QSTR_([A-Za-z0-9_]*)"""', 'l'], {}), "('#define MP_OBJ_NEW_QSTR_MP_QSTR_([A-Za-z0-9_]*)', l)\n", (35026, 35080), False, 'import sys, os, struct, re\n'), ((6397, 6437), 're.search', 're.search', (['"""MP_QSTR_[A-Za-z0-9_]*"""', 'line'], {}), "('MP_QSTR_[A-Za-z0-9_]*', line)\n", (6406, 6437), False, 'import sys, os, struct, re\n'), ((28900, 28934), 'makeqstrdata.static_qstr_list.index', 'qstrutil.static_qstr_list.index', (['s'], {}), '(s)\n', (28931, 28934), True, 'import makeqstrdata as qstrutil\n')] |
edzzn/Manejo_Liberia | ui_mant_libros.py | c735d35b32fc53839acfc48d4e088e69983edf16 | from PyQt4 import QtGui
from ui_mant_libros_new import NewLibrosWindow
from ui_mant_libros_edit import EditLibrosWindow
from ui_mant_libros_id_edit import GetIdEditWindow
# Debug only
import inspect
class MenuLibros(QtGui.QWidget):
"""
Ventana-menu para editar Libros
"""
def __init__(self):
super(MenuLibros, self).__init__()
self.createButtons()
self.setWindowTitle('Mantenimiento Libros')
self.setWindowIcon(QtGui.QIcon('images/user-plus.png'))
self.setWindowTitle("Mantenimiento Libros")
self.setGeometry(650, 300, 150, 100)
def createButtons(self):
btn_new_libros = QtGui.QPushButton('Nuevo')
btn_new_libros.clicked.connect(self.open_new_libros_window)
btn_edit_libros = QtGui.QPushButton('Editar')
btn_edit_libros.clicked.connect(self.open_edit_libros_window)
btn_list_libros = QtGui.QPushButton('Listar')
btn_list_libros.clicked.connect(self.close)
btn_delete_libros = QtGui.QPushButton('Eliminar')
btn_delete_libros.clicked.connect(self.close)
hbox = QtGui.QHBoxLayout()
hbox.addWidget(btn_new_libros)
hbox.addWidget(btn_edit_libros)
hbox.addWidget(btn_list_libros)
hbox.addWidget(btn_delete_libros)
vbox = QtGui.QVBoxLayout()
vbox.addLayout(hbox)
self.setLayout(vbox)
def open_new_libros_window(self):
self.new_libros_view = NewLibrosWindow()
self.new_libros_view.show()
print(inspect.stack()[0][3])
self.close()
def open_edit_libros_window(self):
self.edit_libros_view = GetIdEditWindow()
self.edit_libros_view.show()
print(inspect.stack()[0][3])
self.close()
def open_list_reserva_window(self):
# self.new_reserva_view = NewReserva()
# self.new_reserva_view.show()
print(inspect.stack()[0][3])
self.close()
def open_delete_reserva_window(self):
# self.new_reserva_view = NewReserva()
# self.new_reserva_view.show()
print(inspect.stack()[0][3])
self.close()
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
mainWin = MenuLibros()
mainWin.show()
sys.exit(app.exec_())
| [((2188, 2216), 'PyQt4.QtGui.QApplication', 'QtGui.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (2206, 2216), False, 'from PyQt4 import QtGui\n'), ((658, 684), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['"""Nuevo"""'], {}), "('Nuevo')\n", (675, 684), False, 'from PyQt4 import QtGui\n'), ((780, 807), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['"""Editar"""'], {}), "('Editar')\n", (797, 807), False, 'from PyQt4 import QtGui\n'), ((905, 932), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['"""Listar"""'], {}), "('Listar')\n", (922, 932), False, 'from PyQt4 import QtGui\n'), ((1014, 1043), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['"""Eliminar"""'], {}), "('Eliminar')\n", (1031, 1043), False, 'from PyQt4 import QtGui\n'), ((1115, 1134), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (1132, 1134), False, 'from PyQt4 import QtGui\n'), ((1311, 1330), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (1328, 1330), False, 'from PyQt4 import QtGui\n'), ((1460, 1477), 'ui_mant_libros_new.NewLibrosWindow', 'NewLibrosWindow', ([], {}), '()\n', (1475, 1477), False, 'from ui_mant_libros_new import NewLibrosWindow\n'), ((1645, 1662), 'ui_mant_libros_id_edit.GetIdEditWindow', 'GetIdEditWindow', ([], {}), '()\n', (1660, 1662), False, 'from ui_mant_libros_id_edit import GetIdEditWindow\n'), ((469, 504), 'PyQt4.QtGui.QIcon', 'QtGui.QIcon', (['"""images/user-plus.png"""'], {}), "('images/user-plus.png')\n", (480, 504), False, 'from PyQt4 import QtGui\n'), ((1528, 1543), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (1541, 1543), False, 'import inspect\n'), ((1714, 1729), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (1727, 1729), False, 'import inspect\n'), ((1900, 1915), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (1913, 1915), False, 'import inspect\n'), ((2088, 2103), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (2101, 2103), False, 'import inspect\n')] |
MrStonkus/PokerAi | env/gym_poker_ai/envs/tests/holdem_calc/holdem_argparser.py | 9c43c3a7a9c3ac01f4ee9e3f1f95f0786c35de99 | import argparse
import re
import holdem_calc.holdem_functions as holdem_functions
# Wrapper class which holds the arguments for library calls
# Mocks actual argparse object
class LibArgs:
def __init__(self, board, exact, num, input_file, hole_cards):
self.board = board
self.cards = hole_cards
self.n = num
self.input = input_file
self.exact = exact
# Parses arguments passed to holdem_calc as a library call
def parse_lib_args(args):
error_check_arguments(args)
# Parse hole cards and board
hole_cards, board = None, None
if not args.input:
hole_cards, board = parse_cards(args.cards, args.board)
return hole_cards, args.n, args.exact, board, args.input
# Parses command line arguments to holdem_calc
def parse_args():
# Define possible command line arguments
parser = argparse.ArgumentParser(
description="Find the odds that a Texas Hold'em hand will win. Note "
"that cards must be given in the following format: As, Jc, Td, 3h.")
parser.add_argument("cards", nargs="*", type=str, metavar="hole card",
help="Hole cards you want to find the odds for.")
parser.add_argument("-b", "--board", nargs="*", type=str, metavar="card",
help="Add board cards")
parser.add_argument("-e", "--exact", action="store_true",
help="Find exact odds by enumerating every possible "
"board")
parser.add_argument("-n", type=int, default=100000,
help="Run N Monte Carlo simulations")
parser.add_argument("-i", "--input", type=str,
help="Read hole cards and boards from an input file. "
"Commandline arguments for hole cards and board will "
"be ignored")
# Parse command line arguments and check for errors
args = parser.parse_args()
error_check_arguments(args)
# Parse hole cards and board
hole_cards, board = None, None
if not args.input:
hole_cards, board = parse_cards(args.cards, args.board)
return hole_cards, args.n, args.exact, board, args.input
# Parses a line taken from the input file and returns the hole cards and board
def parse_file_args(line):
if line is None or len(line) == 0:
print(line)
print("Invalid format")
exit()
values = line.split("|")
if len(values) > 2 or len(values) < 1:
print(line)
print("Invalid format")
exit()
hole_cards = values[0].split()
all_cards = list(hole_cards)
board = None
if len(values) == 2:
board = values[1].split()
all_cards.extend(board)
error_check_cards(all_cards)
return parse_cards(hole_cards, board)
# Parses hole cards and board
def parse_cards(cards, board):
hole_cards = create_hole_cards(cards)
if board:
board = parse_board(board)
return hole_cards, board
# Error check the command line arguments
def error_check_arguments(args):
# Check that the number of Monte Carlo simulations is a positive number
if args.n <= 0:
print("Number of Monte Carlo simulations must be positive.")
exit()
# Check that we can open the specified input file
if args.input:
file_name = args.input
try:
input_file = open(file_name, 'r')
input_file.close()
except IOError:
print("Error opening file " + file_name)
exit()
# Check to make sure all cards are of a valid format
all_cards = list(args.cards)
if args.board:
all_cards.extend(args.board)
error_check_cards(all_cards)
# Error check the command line arguments
def error_check_arguments(args):
# Check that the number of Monte Carlo simulations is a positive number
if args.n <= 0:
print("Number of Monte Carlo simulations must be positive.")
exit()
# Check that we can open the specified input file
if args.input:
file_name = args.input
try:
input_file = open(file_name, 'r')
input_file.close()
except IOError:
print("Error opening file " + file_name)
exit()
# Check to make sure all cards are of a valid format
all_cards = list(args.cards)
if args.board:
all_cards.extend(args.board)
error_check_cards(all_cards)
# Checking that the hole cards + board are formatted properly and unique
def error_check_cards(all_cards):
card_re = re.compile('[AKQJT98765432][scdh]')
for card in all_cards:
if card != "?" and not card_re.match(card):
print("Invalid card given.")
exit()
else:
if all_cards.count(card) != 1 and card != "?":
print("The cards given must be unique.")
exit()
# Returns tuple of two-tuple hole_cards: e.g. ((As, Ks), (Ad, Kd), (Jh, Th))
def create_hole_cards(raw_hole_cards):
# Checking that there are an even number of hole cards
if (raw_hole_cards is None or len(raw_hole_cards) < 2 or
len(raw_hole_cards) % 2):
print("You must provide a non-zero even number of hole cards")
exit()
# Create two-tuples out of hole cards
hole_cards, current_hole_cards = [], []
for hole_card in raw_hole_cards:
if hole_card != "?":
current_card = holdem_functions.Card(hole_card)
current_hole_cards.append(current_card)
else:
current_hole_cards.append(None)
if len(current_hole_cards) == 2:
if None in current_hole_cards:
if (current_hole_cards[0] is not None or
current_hole_cards[1] is not None):
print("Unknown hole cards must come in pairs")
exit()
hole_cards.append((current_hole_cards[0], current_hole_cards[1]))
current_hole_cards = []
if hole_cards.count((None, None)) > 1:
print("Can only have one set of unknown hole cards")
return tuple(hole_cards)
# Returns list of board cards: e.g. [As Ks Ad Kd]
def parse_board(board):
if len(board) > 5 or len(board) < 3:
print("Board must have a length of 3, 4, or 5.")
exit()
if "?" in board:
print("Board cannot have unknown cards")
exit()
return create_cards(board)
# Instantiates new cards from the arguments and returns them in a tuple
def create_cards(card_strings):
return [holdem_functions.Card(arg) for arg in card_strings]
| [((858, 1027), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Find the odds that a Texas Hold\'em hand will win. Note that cards must be given in the following format: As, Jc, Td, 3h."""'}), '(description=\n "Find the odds that a Texas Hold\'em hand will win. Note that cards must be given in the following format: As, Jc, Td, 3h."\n )\n', (881, 1027), False, 'import argparse\n'), ((4551, 4586), 're.compile', 're.compile', (['"""[AKQJT98765432][scdh]"""'], {}), "('[AKQJT98765432][scdh]')\n", (4561, 4586), False, 'import re\n'), ((6531, 6557), 'holdem_calc.holdem_functions.Card', 'holdem_functions.Card', (['arg'], {}), '(arg)\n', (6552, 6557), True, 'import holdem_calc.holdem_functions as holdem_functions\n'), ((5421, 5453), 'holdem_calc.holdem_functions.Card', 'holdem_functions.Card', (['hole_card'], {}), '(hole_card)\n', (5442, 5453), True, 'import holdem_calc.holdem_functions as holdem_functions\n')] |
KarlDorogy/Cisc-327-Course-Project-Group-20 | qbay/controllers.py | 0e2c003f78bbdd932381a7a8cbc3aa757da18b24 | from flask import render_template, request, session, redirect
from qbay.models import *
from datetime import date
from qbay import app
def authenticate(inner_function):
"""
:param inner_function: any python function that accepts a user object
Wrap any python function and check the current session to see if
the user has logged in. If login, it will call the inner_function
with the logged in user object.
To wrap a function, we can put a decoration on that function.
Example:
@authenticate
def home_page(user):
pass
"""
def wrapped_inner():
# check did we store the key in the session
if 'logged_in' in session:
email = session['logged_in']
try:
user = User.query.filter_by(email=email).one_or_none()
if user:
# if the user exists, call the inner_function
# with user as parameter
return inner_function(user)
except Exception:
return redirect('/login')
else:
# else, redirect to the login page
return redirect('/login')
# return the wrapped version of the inner_function:
return wrapped_inner
@app.route('/login', methods=['GET'])
def login_get():
return render_template('login.html', message='Please login')
@app.route('/login', methods=['POST'])
def login_post():
email = request.form.get('email')
password = request.form.get('password')
user = login(email, password)
if user:
session['logged_in'] = user.email
"""
Session is an object that contains sharing information
between a user's browser and the end server.
Typically it is packed and stored in the browser cookies.
They will be past along between every request the browser made
to this services. Here we store the user object into the
session, so we can tell if the client has already login
in the following sessions.
"""
# success! go back to the home page
# code 303 is to force a 'GET' request
return redirect('/', code=303)
else:
return render_template('login.html', message='login failed')
@app.route('/')
@authenticate
def home(user):
# gets a list of products that the logged in user owns
user_products = get_products(user.email)
# gets list of user purchased products
products = get_transaction(user.email)
return render_template('index.html', user=user,
owned_products=user_products, orders=products)
@app.route('/register', methods=['GET'])
def register_get():
# templates are stored in the templates folder
return render_template('register.html', message='')
@app.route('/register', methods=['POST'])
def register_post():
email = request.form.get('email')
name = request.form.get('name')
password = request.form.get('password')
password2 = request.form.get('password2')
error_message = None
if password != password2:
error_message = "The passwords do not match"
else:
# use backend api to register the user
success = register(name, email, password)
if not success:
error_message = "Registration Failed."
# if there is any error messages when registering new user
# at the backend, go back to the register page.
if error_message:
return render_template('register.html', message=error_message)
else:
return redirect('/login')
@app.route('/updateuser', methods=['Get'])
def update_user_get():
return render_template('updateuser.html',
message='Please enter new info below:')
@app.route('/updateuser', methods=['POST'])
def update_user_post():
# retrieves current logged in user's email
user_email = session['logged_in']
name = request.form.get('name')
shipping_address = request.form.get('shippingaddress')
postal_code = request.form.get('postalcode')
error_message = None
# use backend api to update the user attributes
success = update_user(user_email, name, shipping_address, postal_code)
if not success:
error_message = "Updating of User Profile Failed."
# if there is any error messages when updateing user profile
# at the backend, go back to the update page.
if error_message:
return render_template('updateuser.html', message=error_message)
else:
return redirect('/', code=303)
@app.route('/updateproduct', methods=['Get'])
def update_product_get():
return render_template('updateproduct.html',
message="Please enter new product info below:",
pName=request.args.get('pName'))
@app.route('/updateproduct', methods=['POST'])
def update_product_post():
new_price = int(request.form.get('new_price'))
new_title = request.form.get('new_title')
new_description = request.form.get('new_description')
title = request.form.get('title')
# use backend api to update the user attributes
success = update_product(new_price, new_title, new_description, title)
error_message = None
if not success:
error_message = "Product Update Failed"
# if there is any error messages when creating a product
# at the backend, go back to the create product page.
if error_message:
return render_template('updateproduct.html', message=error_message,
pName=request.args.get('pName'))
else:
return redirect('/', code=303)
@app.route('/createproduct', methods=['Get'])
def create_product_get():
return render_template('createproduct.html',
message='Please enter product info below:')
@app.route('/createproduct', methods=['POST'])
def create_product_post():
# retrieves current logged in user's email
owner_email = session['logged_in']
today = date.today()
current_date = today.strftime("%d/%m/%Y")
last_modified_date = (current_date[6:10] +
"-" + current_date[3:5] + "-" + current_date[0:2])
price = int(request.form.get('price'))
title = request.form.get('title')
description = request.form.get('description')
error_message = None
# use backend api to update the user attributes
success = create_product(price, title, description,
last_modified_date, owner_email)
if not success:
error_message = "Product Creation Failed."
# if there is any error messages when creating a product
# at the backend, go back to the create product page.
if error_message:
return render_template('createproduct.html', message=error_message)
else:
return redirect('/', code=303)
@app.route('/listings', methods=['GET'])
def available_products_get():
# retrieves current logged in user's email
user_email = session['logged_in']
# gets other user products that are available to purchase
products = get_listings(user_email)
return render_template('available_products.html',
available_products=products)
@app.route('/placeorder', methods=['GET'])
def place_order_get():
return render_template('placeorder.html',
message="Please confirm the purchase below:",
pTitle=request.args.get('pTitle'),
pPrice=request.args.get('pPrice'))
@app.route('/placeorder', methods=['POST'])
def place_order_post():
new_owner = session['logged_in']
product_title = request.args.get('pTitle')
# use backend api to place the product order
success = place_order(new_owner, product_title)
error_message = None
if not success:
error_message = "Placing Order Failed"
# if there is any error messages when ordering product
# at the backend, go back to the available product listings page.
if error_message:
return render_template('available_products.html',
message=error_message)
else:
return redirect('/', code=303)
@app.route('/logout')
def logout():
if 'logged_in' in session:
session.pop('logged_in', None)
return redirect('/')
| [((1255, 1291), 'qbay.app.route', 'app.route', (['"""/login"""'], {'methods': "['GET']"}), "('/login', methods=['GET'])\n", (1264, 1291), False, 'from qbay import app\n'), ((1377, 1414), 'qbay.app.route', 'app.route', (['"""/login"""'], {'methods': "['POST']"}), "('/login', methods=['POST'])\n", (1386, 1414), False, 'from qbay import app\n'), ((2257, 2271), 'qbay.app.route', 'app.route', (['"""/"""'], {}), "('/')\n", (2266, 2271), False, 'from qbay import app\n'), ((2629, 2668), 'qbay.app.route', 'app.route', (['"""/register"""'], {'methods': "['GET']"}), "('/register', methods=['GET'])\n", (2638, 2668), False, 'from qbay import app\n'), ((2799, 2839), 'qbay.app.route', 'app.route', (['"""/register"""'], {'methods': "['POST']"}), "('/register', methods=['POST'])\n", (2808, 2839), False, 'from qbay import app\n'), ((3571, 3612), 'qbay.app.route', 'app.route', (['"""/updateuser"""'], {'methods': "['Get']"}), "('/updateuser', methods=['Get'])\n", (3580, 3612), False, 'from qbay import app\n'), ((3753, 3795), 'qbay.app.route', 'app.route', (['"""/updateuser"""'], {'methods': "['POST']"}), "('/updateuser', methods=['POST'])\n", (3762, 3795), False, 'from qbay import app\n'), ((4545, 4589), 'qbay.app.route', 'app.route', (['"""/updateproduct"""'], {'methods': "['Get']"}), "('/updateproduct', methods=['Get'])\n", (4554, 4589), False, 'from qbay import app\n'), ((4805, 4850), 'qbay.app.route', 'app.route', (['"""/updateproduct"""'], {'methods': "['POST']"}), "('/updateproduct', methods=['POST'])\n", (4814, 4850), False, 'from qbay import app\n'), ((5625, 5669), 'qbay.app.route', 'app.route', (['"""/createproduct"""'], {'methods': "['Get']"}), "('/createproduct', methods=['Get'])\n", (5634, 5669), False, 'from qbay import app\n'), ((5820, 5865), 'qbay.app.route', 'app.route', (['"""/createproduct"""'], {'methods': "['POST']"}), "('/createproduct', methods=['POST'])\n", (5829, 5865), False, 'from qbay import app\n'), ((6842, 6881), 'qbay.app.route', 'app.route', (['"""/listings"""'], {'methods': "['GET']"}), "('/listings', methods=['GET'])\n", (6851, 6881), False, 'from qbay import app\n'), ((7214, 7255), 'qbay.app.route', 'app.route', (['"""/placeorder"""'], {'methods': "['GET']"}), "('/placeorder', methods=['GET'])\n", (7223, 7255), False, 'from qbay import app\n'), ((7528, 7570), 'qbay.app.route', 'app.route', (['"""/placeorder"""'], {'methods': "['POST']"}), "('/placeorder', methods=['POST'])\n", (7537, 7570), False, 'from qbay import app\n'), ((8188, 8208), 'qbay.app.route', 'app.route', (['"""/logout"""'], {}), "('/logout')\n", (8197, 8208), False, 'from qbay import app\n'), ((1320, 1373), 'flask.render_template', 'render_template', (['"""login.html"""'], {'message': '"""Please login"""'}), "('login.html', message='Please login')\n", (1335, 1373), False, 'from flask import render_template, request, session, redirect\n'), ((1445, 1470), 'flask.request.form.get', 'request.form.get', (['"""email"""'], {}), "('email')\n", (1461, 1470), False, 'from flask import render_template, request, session, redirect\n'), ((1486, 1514), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (1502, 1514), False, 'from flask import render_template, request, session, redirect\n'), ((2510, 2601), 'flask.render_template', 'render_template', (['"""index.html"""'], {'user': 'user', 'owned_products': 'user_products', 'orders': 'products'}), "('index.html', user=user, owned_products=user_products,\n orders=products)\n", (2525, 2601), False, 'from flask import render_template, request, session, redirect\n'), ((2751, 2795), 'flask.render_template', 'render_template', (['"""register.html"""'], {'message': '""""""'}), "('register.html', message='')\n", (2766, 2795), False, 'from flask import render_template, request, session, redirect\n'), ((2873, 2898), 'flask.request.form.get', 'request.form.get', (['"""email"""'], {}), "('email')\n", (2889, 2898), False, 'from flask import render_template, request, session, redirect\n'), ((2910, 2934), 'flask.request.form.get', 'request.form.get', (['"""name"""'], {}), "('name')\n", (2926, 2934), False, 'from flask import render_template, request, session, redirect\n'), ((2950, 2978), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (2966, 2978), False, 'from flask import render_template, request, session, redirect\n'), ((2995, 3024), 'flask.request.form.get', 'request.form.get', (['"""password2"""'], {}), "('password2')\n", (3011, 3024), False, 'from flask import render_template, request, session, redirect\n'), ((3647, 3721), 'flask.render_template', 'render_template', (['"""updateuser.html"""'], {'message': '"""Please enter new info below:"""'}), "('updateuser.html', message='Please enter new info below:')\n", (3662, 3721), False, 'from flask import render_template, request, session, redirect\n'), ((3918, 3942), 'flask.request.form.get', 'request.form.get', (['"""name"""'], {}), "('name')\n", (3934, 3942), False, 'from flask import render_template, request, session, redirect\n'), ((3966, 4001), 'flask.request.form.get', 'request.form.get', (['"""shippingaddress"""'], {}), "('shippingaddress')\n", (3982, 4001), False, 'from flask import render_template, request, session, redirect\n'), ((4020, 4050), 'flask.request.form.get', 'request.form.get', (['"""postalcode"""'], {}), "('postalcode')\n", (4036, 4050), False, 'from flask import render_template, request, session, redirect\n'), ((4945, 4974), 'flask.request.form.get', 'request.form.get', (['"""new_title"""'], {}), "('new_title')\n", (4961, 4974), False, 'from flask import render_template, request, session, redirect\n'), ((4997, 5032), 'flask.request.form.get', 'request.form.get', (['"""new_description"""'], {}), "('new_description')\n", (5013, 5032), False, 'from flask import render_template, request, session, redirect\n'), ((5045, 5070), 'flask.request.form.get', 'request.form.get', (['"""title"""'], {}), "('title')\n", (5061, 5070), False, 'from flask import render_template, request, session, redirect\n'), ((5707, 5793), 'flask.render_template', 'render_template', (['"""createproduct.html"""'], {'message': '"""Please enter product info below:"""'}), "('createproduct.html', message=\n 'Please enter product info below:')\n", (5722, 5793), False, 'from flask import render_template, request, session, redirect\n'), ((5991, 6003), 'datetime.date.today', 'date.today', ([], {}), '()\n', (6001, 6003), False, 'from datetime import date\n'), ((6230, 6255), 'flask.request.form.get', 'request.form.get', (['"""title"""'], {}), "('title')\n", (6246, 6255), False, 'from flask import render_template, request, session, redirect\n'), ((6274, 6305), 'flask.request.form.get', 'request.form.get', (['"""description"""'], {}), "('description')\n", (6290, 6305), False, 'from flask import render_template, request, session, redirect\n'), ((7111, 7182), 'flask.render_template', 'render_template', (['"""available_products.html"""'], {'available_products': 'products'}), "('available_products.html', available_products=products)\n", (7126, 7182), False, 'from flask import render_template, request, session, redirect\n'), ((7652, 7678), 'flask.request.args.get', 'request.args.get', (['"""pTitle"""'], {}), "('pTitle')\n", (7668, 7678), False, 'from flask import render_template, request, session, redirect\n'), ((8304, 8317), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (8312, 8317), False, 'from flask import render_template, request, session, redirect\n'), ((2151, 2174), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(303)'}), "('/', code=303)\n", (2159, 2174), False, 'from flask import render_template, request, session, redirect\n'), ((2200, 2253), 'flask.render_template', 'render_template', (['"""login.html"""'], {'message': '"""login failed"""'}), "('login.html', message='login failed')\n", (2215, 2253), False, 'from flask import render_template, request, session, redirect\n'), ((3468, 3523), 'flask.render_template', 'render_template', (['"""register.html"""'], {'message': 'error_message'}), "('register.html', message=error_message)\n", (3483, 3523), False, 'from flask import render_template, request, session, redirect\n'), ((3549, 3567), 'flask.redirect', 'redirect', (['"""/login"""'], {}), "('/login')\n", (3557, 3567), False, 'from flask import render_template, request, session, redirect\n'), ((4435, 4492), 'flask.render_template', 'render_template', (['"""updateuser.html"""'], {'message': 'error_message'}), "('updateuser.html', message=error_message)\n", (4450, 4492), False, 'from flask import render_template, request, session, redirect\n'), ((4518, 4541), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(303)'}), "('/', code=303)\n", (4526, 4541), False, 'from flask import render_template, request, session, redirect\n'), ((4898, 4927), 'flask.request.form.get', 'request.form.get', (['"""new_price"""'], {}), "('new_price')\n", (4914, 4927), False, 'from flask import render_template, request, session, redirect\n'), ((5598, 5621), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(303)'}), "('/', code=303)\n", (5606, 5621), False, 'from flask import render_template, request, session, redirect\n'), ((6191, 6216), 'flask.request.form.get', 'request.form.get', (['"""price"""'], {}), "('price')\n", (6207, 6216), False, 'from flask import render_template, request, session, redirect\n'), ((6729, 6789), 'flask.render_template', 'render_template', (['"""createproduct.html"""'], {'message': 'error_message'}), "('createproduct.html', message=error_message)\n", (6744, 6789), False, 'from flask import render_template, request, session, redirect\n'), ((6815, 6838), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(303)'}), "('/', code=303)\n", (6823, 6838), False, 'from flask import render_template, request, session, redirect\n'), ((8038, 8103), 'flask.render_template', 'render_template', (['"""available_products.html"""'], {'message': 'error_message'}), "('available_products.html', message=error_message)\n", (8053, 8103), False, 'from flask import render_template, request, session, redirect\n'), ((8161, 8184), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(303)'}), "('/', code=303)\n", (8169, 8184), False, 'from flask import render_template, request, session, redirect\n'), ((8262, 8292), 'flask.session.pop', 'session.pop', (['"""logged_in"""', 'None'], {}), "('logged_in', None)\n", (8273, 8292), False, 'from flask import render_template, request, session, redirect\n'), ((1151, 1169), 'flask.redirect', 'redirect', (['"""/login"""'], {}), "('/login')\n", (1159, 1169), False, 'from flask import render_template, request, session, redirect\n'), ((4775, 4800), 'flask.request.args.get', 'request.args.get', (['"""pName"""'], {}), "('pName')\n", (4791, 4800), False, 'from flask import render_template, request, session, redirect\n'), ((7434, 7460), 'flask.request.args.get', 'request.args.get', (['"""pTitle"""'], {}), "('pTitle')\n", (7450, 7460), False, 'from flask import render_template, request, session, redirect\n'), ((7497, 7523), 'flask.request.args.get', 'request.args.get', (['"""pPrice"""'], {}), "('pPrice')\n", (7513, 7523), False, 'from flask import render_template, request, session, redirect\n'), ((5546, 5571), 'flask.request.args.get', 'request.args.get', (['"""pName"""'], {}), "('pName')\n", (5562, 5571), False, 'from flask import render_template, request, session, redirect\n'), ((1052, 1070), 'flask.redirect', 'redirect', (['"""/login"""'], {}), "('/login')\n", (1060, 1070), False, 'from flask import render_template, request, session, redirect\n')] |
stadtulm/cykel | gbfs/serializers.py | b292d958330279654c49beafc3f95a0067274472 | from datetime import timedelta
from django.utils.timezone import now
from preferences import preferences
from rest_framework import fields, serializers
from bikesharing.models import Bike, Station, VehicleType
from cykel.serializers import EnumFieldSerializer
class TimestampSerializer(fields.CharField):
def to_representation(self, value):
return value.timestamp()
class GbfsFreeBikeStatusSerializer(serializers.HyperlinkedModelSerializer):
bike_id = serializers.CharField(source="non_static_bike_uuid", read_only=True)
vehicle_type_id = serializers.CharField(read_only=True)
last_reported = TimestampSerializer(read_only=True)
class Meta:
model = Bike
fields = (
"bike_id",
"vehicle_type_id",
"current_range_meters",
"last_reported",
)
def to_representation(self, instance):
representation = super().to_representation(instance)
# defined by GBFS 2.1: Only if the vehicle has a motor the field is required
if (
instance.vehicle_type is not None
and instance.vehicle_type.propulsion_type
== VehicleType.PropulsionType.HUMAN
):
representation.pop("current_range_meters")
# Default to False TODO: maybe configuration later
representation["is_reserved"] = False
# Default to False TODO: maybe configuration later
representation["is_disabled"] = False
public_geolocation = instance.public_geolocation()
if public_geolocation is not None:
pos = public_geolocation.geo
if pos and pos.x and pos.y:
representation["lat"] = pos.y
representation["lon"] = pos.x
return representation # only return bikes with public geolocation
class GbfsVehicleOnStationSerializer(GbfsFreeBikeStatusSerializer):
def to_representation(self, instance):
representation = super().to_representation(instance)
if representation is None:
return None
representation.pop("lat")
representation.pop("lon")
return representation
class GbfsStationInformationSerializer(serializers.HyperlinkedModelSerializer):
name = serializers.CharField(source="station_name", read_only=True)
capacity = serializers.IntegerField(source="max_bikes", read_only=True)
station_id = serializers.CharField(source="id", read_only=True)
class Meta:
model = Station
fields = (
"name",
"capacity",
"station_id",
)
def to_representation(self, instance):
representation = super().to_representation(instance)
if (
instance.location is not None
and instance.location.x
and instance.location.y
):
representation["lat"] = instance.location.y
representation["lon"] = instance.location.x
return representation
class GbfsStationStatusSerializer(serializers.HyperlinkedModelSerializer):
station_id = serializers.CharField(source="id", read_only=True)
vehicles = serializers.SerializerMethodField()
def get_vehicles(self, obj):
# if configured filter vehicles, where time report
# is older than configure allowed silent timeperiod
bsp = preferences.BikeSharePreferences
if bsp.gbfs_hide_bikes_after_location_report_silence:
available_bikes = obj.bike_set.filter(
availability_status=Bike.Availability.AVAILABLE,
last_reported__gte=now()
- timedelta(hours=bsp.gbfs_hide_bikes_after_location_report_hours),
)
else:
available_bikes = obj.bike_set.filter(
availability_status=Bike.Availability.AVAILABLE
)
vehicles = GbfsVehicleOnStationSerializer(available_bikes, many=True).data
return list(filter(lambda val: val is not None, vehicles))
class Meta:
model = Station
fields = (
"station_id",
"vehicles",
)
def to_representation(self, instance):
representation = super().to_representation(instance)
representation["num_bikes_available"] = len(representation["vehicles"])
representation["num_docks_available"] = (
instance.max_bikes - representation["num_bikes_available"]
)
if representation["num_bikes_available"] > 0:
representation["last_reported"] = max(
(
vehicle["last_reported"]
if vehicle["last_reported"] is not None
else 0
)
for vehicle in representation["vehicles"]
)
else:
# if no bike is at the station, last_report is the current time
# not sure if this is the intended behavior of the field
# or it should be the timestamp of the last bike removed
# but it is not so easy to implement
representation["last_reported"] = int(now().timestamp())
def drop_last_reported(obj):
obj.pop("last_reported")
return obj
representation["vehicles"] = list(
map(drop_last_reported, representation["vehicles"])
)
status = (instance.status == Station.Status.ACTIVE) or False
representation["is_installed"] = status
representation["is_renting"] = status
representation["is_returning"] = status
return representation
class GbfsVehicleTypeSerializer(serializers.HyperlinkedModelSerializer):
vehicle_type_id = serializers.CharField(source="id", read_only=True)
form_factor = EnumFieldSerializer(
read_only=True,
mapping={
VehicleType.FormFactor.BIKE: "bicycle",
VehicleType.FormFactor.ESCOOTER: "scooter",
VehicleType.FormFactor.CAR: "car",
VehicleType.FormFactor.MOPED: "moped",
VehicleType.FormFactor.OTHER: "other",
},
)
propulsion_type = EnumFieldSerializer(
read_only=True,
mapping={
VehicleType.PropulsionType.HUMAN: "human",
VehicleType.PropulsionType.ELECTRIC_ASSIST: "electric_assist",
VehicleType.PropulsionType.ELECTRIC: "electric",
VehicleType.PropulsionType.COMBUSTION: "combustion",
},
)
def to_representation(self, instance):
data = super(GbfsVehicleTypeSerializer, self).to_representation(instance)
# defined by GBFS 2.1: Only if the vehicle has a motor the field is required
if instance.propulsion_type == VehicleType.PropulsionType.HUMAN:
data.pop("max_range_meters")
return data
class Meta:
model = VehicleType
fields = (
"vehicle_type_id",
"form_factor",
"propulsion_type",
"max_range_meters",
"name",
)
| [((474, 542), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'source': '"""non_static_bike_uuid"""', 'read_only': '(True)'}), "(source='non_static_bike_uuid', read_only=True)\n", (495, 542), False, 'from rest_framework import fields, serializers\n'), ((565, 602), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (586, 602), False, 'from rest_framework import fields, serializers\n'), ((2254, 2314), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'source': '"""station_name"""', 'read_only': '(True)'}), "(source='station_name', read_only=True)\n", (2275, 2314), False, 'from rest_framework import fields, serializers\n'), ((2330, 2390), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'source': '"""max_bikes"""', 'read_only': '(True)'}), "(source='max_bikes', read_only=True)\n", (2354, 2390), False, 'from rest_framework import fields, serializers\n'), ((2408, 2458), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'source': '"""id"""', 'read_only': '(True)'}), "(source='id', read_only=True)\n", (2429, 2458), False, 'from rest_framework import fields, serializers\n'), ((3078, 3128), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'source': '"""id"""', 'read_only': '(True)'}), "(source='id', read_only=True)\n", (3099, 3128), False, 'from rest_framework import fields, serializers\n'), ((3144, 3179), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (3177, 3179), False, 'from rest_framework import fields, serializers\n'), ((5674, 5724), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'source': '"""id"""', 'read_only': '(True)'}), "(source='id', read_only=True)\n", (5695, 5724), False, 'from rest_framework import fields, serializers\n'), ((5743, 5998), 'cykel.serializers.EnumFieldSerializer', 'EnumFieldSerializer', ([], {'read_only': '(True)', 'mapping': "{VehicleType.FormFactor.BIKE: 'bicycle', VehicleType.FormFactor.ESCOOTER:\n 'scooter', VehicleType.FormFactor.CAR: 'car', VehicleType.FormFactor.\n MOPED: 'moped', VehicleType.FormFactor.OTHER: 'other'}"}), "(read_only=True, mapping={VehicleType.FormFactor.BIKE:\n 'bicycle', VehicleType.FormFactor.ESCOOTER: 'scooter', VehicleType.\n FormFactor.CAR: 'car', VehicleType.FormFactor.MOPED: 'moped',\n VehicleType.FormFactor.OTHER: 'other'})\n", (5762, 5998), False, 'from cykel.serializers import EnumFieldSerializer\n'), ((6102, 6368), 'cykel.serializers.EnumFieldSerializer', 'EnumFieldSerializer', ([], {'read_only': '(True)', 'mapping': "{VehicleType.PropulsionType.HUMAN: 'human', VehicleType.PropulsionType.\n ELECTRIC_ASSIST: 'electric_assist', VehicleType.PropulsionType.ELECTRIC:\n 'electric', VehicleType.PropulsionType.COMBUSTION: 'combustion'}"}), "(read_only=True, mapping={VehicleType.PropulsionType.\n HUMAN: 'human', VehicleType.PropulsionType.ELECTRIC_ASSIST:\n 'electric_assist', VehicleType.PropulsionType.ELECTRIC: 'electric',\n VehicleType.PropulsionType.COMBUSTION: 'combustion'})\n", (6121, 6368), False, 'from cykel.serializers import EnumFieldSerializer\n'), ((3593, 3598), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (3596, 3598), False, 'from django.utils.timezone import now\n'), ((3617, 3681), 'datetime.timedelta', 'timedelta', ([], {'hours': 'bsp.gbfs_hide_bikes_after_location_report_hours'}), '(hours=bsp.gbfs_hide_bikes_after_location_report_hours)\n', (3626, 3681), False, 'from datetime import timedelta\n'), ((5100, 5105), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (5103, 5105), False, 'from django.utils.timezone import now\n')] |
ngomile/anime-downloader | anime_downloader/extractors/vidstream.py | 14d9cebe8aa4eb9d906b937d7c19fedfa737d184 | import logging
import re
from anime_downloader.extractors.base_extractor import BaseExtractor
from anime_downloader.sites import helpers
logger = logging.getLogger(__name__)
class VidStream(BaseExtractor):
def _get_data(self):
QUALITIES = {
"360":[],
"480":[],
"720":[],
"1080":[],
}
url = self.url.replace('https:////','https://')
soup = helpers.get(url).text
regex = r'https://vidstreaming\.io/download\?[^"]*'
download = re.search(regex,soup).group()
soup = helpers.soupify(helpers.get(download))
links = soup.select('div.mirror_link')[0].select('div.dowload > a')
for a in QUALITIES:
for b in links:
if a in b.text:
QUALITIES[a].append(b.get('href'))
stream_url = QUALITIES[self.quality[:-1]][0] if QUALITIES != {"360":[],"480":[],"720":[],"1080":[],} else links[0].get('href') #In case nothing is found
return {
'stream_url': stream_url,
'referer': download
}
| [((147, 174), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (164, 174), False, 'import logging\n'), ((410, 426), 'anime_downloader.sites.helpers.get', 'helpers.get', (['url'], {}), '(url)\n', (421, 426), False, 'from anime_downloader.sites import helpers\n'), ((585, 606), 'anime_downloader.sites.helpers.get', 'helpers.get', (['download'], {}), '(download)\n', (596, 606), False, 'from anime_downloader.sites import helpers\n'), ((511, 533), 're.search', 're.search', (['regex', 'soup'], {}), '(regex, soup)\n', (520, 533), False, 'import re\n')] |
time-crunched/nlp-toolbox | gui/sum_v1/views.py | b732abd0b2c6b265971efe04a4d70ebe20d2ee8f | import time
import os
from django.shortcuts import render, redirect
from django.http import JsonResponse
from django.views import View
from django.conf import settings
from .forms import File_uploadForm
from .models import File_upload, SummaryRes
from sim_v1.textsummary import TEXTSummary
summary_document_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)),'media','sum_v1','upload')
#summary_document_dir = r'C:\Users\ERDIG\Dropbox\Python\nlp_v1\media\sum_v1\upload'
summary_extraction_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)),'media','sum_v1','temp')
#summary_extraction_dir = r'C:\Users\ERDIG\Dropbox\Python\nlp_v1\media\sum_v1\temp'
summary_ratio = 0.01
class Upload(View):
def post(self, request):
time.sleep(1) # You don't need this line. This is just to delay the process so you can see the progress bar testing locally.
form = File_uploadForm(self.request.POST, self.request.FILES)
print(form.errors)
if form.is_valid():
document = form.save()
data = {'is_valid': True, 'name': document.file.name, 'url': document.file.url}
else:
data = {'is_valid': False}
return JsonResponse(data)
def get(self, request):
for document in File_upload.objects.all():
document.file.delete()
document.delete()
doc_list = File_upload.objects.all()
form = File_uploadForm()
return render(self.request, 'upload.html', {'documents': doc_list, 'form': form,})
def sum_words(request):
if request.method == 'POST':
form = File_uploadForm(request.POST)
if form.is_valid():
form.save()
sum_words = form.cleaned_data['sum_words']
request.session['sum_words'] = sum_words
else:
pass
else:
pass
return redirect('sum_v1:summarize')
def clear_database(request):
for document in File_upload.objects.all():
document.file.delete()
document.delete()
return redirect(request.POST.get('next'))
def Summarize(request):
SummaryRes.objects.all().delete()
summary_word_count = request.session['sum_words']
for document in os.listdir(summary_document_dir):
for filename in os.listdir(summary_extraction_dir):
os.remove(os.path.join(summary_extraction_dir, filename))
text_dir = os.path.join(summary_document_dir, document)
summary = TEXTSummary(text_dir, summary_extraction_dir, summary_ratio, summary_word_count)
summary.textextraction()
summary.summary()
SummaryRes.objects.create(doc = document, summary = summary.summary)
results = SummaryRes.objects.all()
return render(request, 'summarize.html', {'results': results})
| [((1876, 1904), 'django.shortcuts.redirect', 'redirect', (['"""sum_v1:summarize"""'], {}), "('sum_v1:summarize')\n", (1884, 1904), False, 'from django.shortcuts import render, redirect\n'), ((2224, 2256), 'os.listdir', 'os.listdir', (['summary_document_dir'], {}), '(summary_document_dir)\n', (2234, 2256), False, 'import os\n'), ((2743, 2798), 'django.shortcuts.render', 'render', (['request', '"""summarize.html"""', "{'results': results}"], {}), "(request, 'summarize.html', {'results': results})\n", (2749, 2798), False, 'from django.shortcuts import render, redirect\n'), ((346, 371), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (361, 371), False, 'import os\n'), ((539, 564), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (554, 564), False, 'import os\n'), ((755, 768), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (765, 768), False, 'import time\n'), ((1207, 1225), 'django.http.JsonResponse', 'JsonResponse', (['data'], {}), '(data)\n', (1219, 1225), False, 'from django.http import JsonResponse\n'), ((1468, 1542), 'django.shortcuts.render', 'render', (['self.request', '"""upload.html"""', "{'documents': doc_list, 'form': form}"], {}), "(self.request, 'upload.html', {'documents': doc_list, 'form': form})\n", (1474, 1542), False, 'from django.shortcuts import render, redirect\n'), ((2283, 2317), 'os.listdir', 'os.listdir', (['summary_extraction_dir'], {}), '(summary_extraction_dir)\n', (2293, 2317), False, 'import os\n'), ((2409, 2453), 'os.path.join', 'os.path.join', (['summary_document_dir', 'document'], {}), '(summary_document_dir, document)\n', (2421, 2453), False, 'import os\n'), ((2473, 2558), 'sim_v1.textsummary.TEXTSummary', 'TEXTSummary', (['text_dir', 'summary_extraction_dir', 'summary_ratio', 'summary_word_count'], {}), '(text_dir, summary_extraction_dir, summary_ratio, summary_word_count\n )\n', (2484, 2558), False, 'from sim_v1.textsummary import TEXTSummary\n'), ((2341, 2387), 'os.path.join', 'os.path.join', (['summary_extraction_dir', 'filename'], {}), '(summary_extraction_dir, filename)\n', (2353, 2387), False, 'import os\n')] |
dannyqwertz/home-assistant | homeassistant/components/websocket_api/__init__.py | 688bdc6532e514afbdc8efd1f574a7b5c9e8d280 | """
Websocket based API for Home Assistant.
For more details about this component, please refer to the documentation at
https://developers.home-assistant.io/docs/external_api_websocket.html
"""
from homeassistant.core import callback
from homeassistant.loader import bind_hass
from . import commands, connection, const, decorators, http, messages
DOMAIN = const.DOMAIN
DEPENDENCIES = ('http',)
# Backwards compat / Make it easier to integrate
# pylint: disable=invalid-name
ActiveConnection = connection.ActiveConnection
BASE_COMMAND_MESSAGE_SCHEMA = messages.BASE_COMMAND_MESSAGE_SCHEMA
error_message = messages.error_message
result_message = messages.result_message
async_response = decorators.async_response
require_admin = decorators.require_admin
ws_require_user = decorators.ws_require_user
# pylint: enable=invalid-name
@bind_hass
@callback
def async_register_command(hass, command, handler, schema):
"""Register a websocket command."""
handlers = hass.data.get(DOMAIN)
if handlers is None:
handlers = hass.data[DOMAIN] = {}
handlers[command] = (handler, schema)
async def async_setup(hass, config):
"""Initialize the websocket API."""
hass.http.register_view(http.WebsocketAPIView)
commands.async_register_commands(hass)
return True
| [] |
Lenders-Cooperative/Django-DocuSign | test_app/settings.py | 676d966065f6e1e64e1f0db9b7691b9f0c5d73a5 | #
# Created on Tue Dec 21 2021
#
# Copyright (c) 2021 Lenders Cooperative, a division of Summit Technology Group, Inc.
#
"""
Django settings for test_app project.
Generated by 'django-admin startproject' using Django 3.1.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import environ
env = environ.Env()
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "uhtgm(e2y3@5%0x!wy#re#fn+51h*ck88^ocm7d1=hx^@(&7$7"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"los_docusign.apps.LosDocusignConfig",
"test_app.test_organization.apps.TestOrganizationConfig",
"django_lc_utils",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "test_app.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "test_app.wsgi.application"
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": "docusign_new_poc",
"USER": "postgres",
"PASSWORD": "admin",
"HOST": "localhost",
"PORT": "5432",
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = "/static/"
BASE_DIR = Path(__file__).resolve().parent
DOCUSIGN_API_ACCOUNT_ID = env(
"DOCUSIGN_API_ACCOUNT_ID", default="<Docusign API Account Id >"
)
DOCUSIGN_CLIENT_ID = env("DOCUSIGN_CLIENT_ID", default="<Docusign Client Id>")
DOCUSIGN_API_ENDPOINT = env(
"DOCUSIGN_API_ENDPOINT", default="https://demo.docusign.net/restapi/v2.1/accounts/"
)
DOCUSIGN_TOKEN_EXPIRY_IN_SECONDS = env("DOCUSIGN_TOKEN_EXPIRY_IN_SECONDS", default=3600)
DOCUSIGN_AUTHORIZATION_SERVER = env(
"DOCUSIGN_AUTHORIZATION_SERVER", default="account-d.docusign.com"
)
DOCUSIGN_PRIVATE_KEY_FILE = env(
"DOCUSIGN_PRIVATE_KEY_FILE",
default="<Private Key file data>",
)
DOCUSIGN_ENABLE_KBA = env("DOCUSIGN_ENABLE_KBA", default=False)
| [((478, 491), 'environ.Env', 'environ.Env', ([], {}), '()\n', (489, 491), False, 'import environ\n'), ((3481, 3495), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (3485, 3495), False, 'from pathlib import Path\n'), ((569, 583), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (573, 583), False, 'from pathlib import Path\n')] |
doug-lovett/test-schemas-dl | tests/unit/ppr/test_search_query.py | a05e87b983f2c3559c081dd65aff05e2c67e6186 | # Copyright © 2020 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test Suite to ensure the PPR Search Query schema is valid.
"""
import copy
from registry_schemas import validate
from registry_schemas.example_data.ppr import SEARCH_QUERY
def test_valid_search_query_ind_debtor():
"""Assert that the schema is performing as expected for a search by individual debtor."""
query = copy.deepcopy(SEARCH_QUERY)
query['type'] = 'INDIVIDUAL_DEBTOR'
del query['criteria']['debtorName']['business']
del query['criteria']['value']
del query['clientReferenceId']
del query['startDateTime']
del query['endDateTime']
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert is_valid
def test_valid_search_query_bus_debtor():
"""Assert that the schema is performing as expected for a search by business debtor."""
query = copy.deepcopy(SEARCH_QUERY)
query['type'] = 'BUSINESS_DEBTOR'
del query['criteria']['debtorName']['first']
del query['criteria']['debtorName']['second']
del query['criteria']['debtorName']['last']
del query['criteria']['value']
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert is_valid
def test_valid_search_query_airdot():
"""Assert that the schema is performing as expected for a search by aircraft DOT."""
query = copy.deepcopy(SEARCH_QUERY)
query['type'] = 'AIRCRAFT_DOT'
del query['criteria']['debtorName']
query['criteria']['value'] = 'CFYXW'
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert is_valid
def test_valid_search_query_regnum():
"""Assert that the schema is performing as expected for a search by registration number."""
query = copy.deepcopy(SEARCH_QUERY)
query['type'] = 'REGISTRATION_NUMBER'
del query['criteria']['debtorName']
query['criteria']['value'] = '023001B'
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert is_valid
def test_valid_search_query_mhrnum():
"""Assert that the schema is performing as expected for a search by MHR number."""
query = copy.deepcopy(SEARCH_QUERY)
query['type'] = 'MHR_NUMBER'
del query['criteria']['debtorName']
query['criteria']['value'] = '21324'
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert is_valid
def test_valid_search_query_serialnum():
"""Assert that the schema is performing as expected for a search by serial number."""
query = copy.deepcopy(SEARCH_QUERY)
query['type'] = 'SERIAL_NUMBER'
del query['criteria']['debtorName']
query['criteria']['value'] = 'KM8J3CA46JU622994'
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert is_valid
def test_invalid_search_query_missing_type():
"""Assert that an invalid search query fails - type is missing."""
query = copy.deepcopy(SEARCH_QUERY)
del query['type']
del query['criteria']['debtorName']['business']
del query['criteria']['value']
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_search_query_missing_criteria():
"""Assert that an invalid search query fails - criteria is missing."""
query = copy.deepcopy(SEARCH_QUERY)
del query['criteria']
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_search_query_type():
"""Assert that an invalid search query fails - type is invalid."""
query = copy.deepcopy(SEARCH_QUERY)
query['type'] = 'XXXXXXXX'
del query['criteria']['debtorName']['business']
del query['criteria']['value']
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_search_query_criteria():
"""Assert that an invalid search query fails - criteria is invalid."""
query = copy.deepcopy(SEARCH_QUERY)
del query['criteria']['debtorName']['business']
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_search_query_busname():
"""Assert that an invalid search query fails - business name is too short."""
query = copy.deepcopy(SEARCH_QUERY)
del query['criteria']['debtorName']['first']
del query['criteria']['debtorName']['second']
del query['criteria']['debtorName']['last']
del query['criteria']['value']
query['criteria']['debtorName']['business'] = 'XXXX'
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_search_query_value():
"""Assert that an invalid search query fails - value is too long."""
query = copy.deepcopy(SEARCH_QUERY)
del query['criteria']['debtorName']
query['criteria']['value'] = 'XxxxxxxxxxxxxxxxxxxxXxxxxxxxxxxxxxxxxxxxXxxxxxxxxxx'
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_search_query_debtor():
"""Assert that an invalid search query fails - debtor name is invalid."""
query = copy.deepcopy(SEARCH_QUERY)
del query['criteria']['value']
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_search_query_firstname():
"""Assert that an invalid search query fails - debtor first name is too long."""
query = copy.deepcopy(SEARCH_QUERY)
del query['criteria']['value']
del query['criteria']['debtorName']['business']
query['criteria']['debtorName']['first'] = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_search_query_secondname():
"""Assert that an invalid search query fails - debtor second name is too long."""
query = copy.deepcopy(SEARCH_QUERY)
del query['criteria']['value']
del query['criteria']['debtorName']['business']
query['criteria']['debtorName']['second'] = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_search_query_lastname():
"""Assert that an invalid search query fails - debtor last name is too long."""
query = copy.deepcopy(SEARCH_QUERY)
del query['criteria']['value']
del query['criteria']['debtorName']['business']
query['criteria']['debtorName']['last'] = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_search_query_clientref():
"""Assert that an invalid search query fails - client reference id is too long."""
query = copy.deepcopy(SEARCH_QUERY)
del query['criteria']['value']
del query['criteria']['debtorName']['business']
query['clientReferenceId'] = 'XxxxxxxxxxXxxxxxxxxxX'
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_search_query_startts():
"""Assert that an invalid search query fails - start date time format is invalid."""
query = copy.deepcopy(SEARCH_QUERY)
del query['criteria']['value']
del query['criteria']['debtorName']['business']
query['startDateTime'] = 'Xxxxxxxxxx'
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_search_query_endts():
"""Assert that an invalid search query fails - end date time format is invalid."""
query = copy.deepcopy(SEARCH_QUERY)
del query['criteria']['value']
del query['criteria']['debtorName']['business']
query['endDateTime'] = 'Xxxxxxxxxx'
is_valid, errors = validate(query, 'searchQuery', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
| [((921, 948), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (934, 948), False, 'import copy\n'), ((1195, 1232), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (1203, 1232), False, 'from registry_schemas import validate\n'), ((1495, 1522), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (1508, 1522), False, 'import copy\n'), ((1767, 1804), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (1775, 1804), False, 'from registry_schemas import validate\n'), ((2060, 2087), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (2073, 2087), False, 'import copy\n'), ((2228, 2265), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (2236, 2265), False, 'from registry_schemas import validate\n'), ((2528, 2555), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (2541, 2555), False, 'import copy\n'), ((2705, 2742), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (2713, 2742), False, 'from registry_schemas import validate\n'), ((2996, 3023), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (3009, 3023), False, 'import copy\n'), ((3162, 3199), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (3170, 3199), False, 'from registry_schemas import validate\n'), ((3459, 3486), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (3472, 3486), False, 'import copy\n'), ((3640, 3677), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (3648, 3677), False, 'from registry_schemas import validate\n'), ((3923, 3950), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (3936, 3950), False, 'import copy\n'), ((4084, 4121), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (4092, 4121), False, 'from registry_schemas import validate\n'), ((4378, 4405), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (4391, 4405), False, 'import copy\n'), ((4456, 4493), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (4464, 4493), False, 'from registry_schemas import validate\n'), ((4734, 4761), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (4747, 4761), False, 'import copy\n'), ((4904, 4941), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (4912, 4941), False, 'from registry_schemas import validate\n'), ((5190, 5217), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (5203, 5217), False, 'import copy\n'), ((5294, 5331), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (5302, 5331), False, 'from registry_schemas import validate\n'), ((5586, 5613), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (5599, 5613), False, 'import copy\n'), ((5877, 5914), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (5885, 5914), False, 'from registry_schemas import validate\n'), ((6158, 6185), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (6171, 6185), False, 'import copy\n'), ((6337, 6374), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (6345, 6374), False, 'from registry_schemas import validate\n'), ((6624, 6651), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (6637, 6651), False, 'import copy\n'), ((6711, 6748), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (6719, 6748), False, 'from registry_schemas import validate\n'), ((7008, 7035), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (7021, 7035), False, 'import copy\n'), ((7229, 7266), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (7237, 7266), False, 'from registry_schemas import validate\n'), ((7528, 7555), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (7541, 7555), False, 'import copy\n'), ((7750, 7787), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (7758, 7787), False, 'from registry_schemas import validate\n'), ((8046, 8073), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (8059, 8073), False, 'import copy\n'), ((8266, 8303), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (8274, 8303), False, 'from registry_schemas import validate\n'), ((8565, 8592), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (8578, 8592), False, 'import copy\n'), ((8761, 8798), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (8769, 8798), False, 'from registry_schemas import validate\n'), ((9060, 9087), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (9073, 9087), False, 'import copy\n'), ((9241, 9278), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (9249, 9278), False, 'from registry_schemas import validate\n'), ((9536, 9563), 'copy.deepcopy', 'copy.deepcopy', (['SEARCH_QUERY'], {}), '(SEARCH_QUERY)\n', (9549, 9563), False, 'import copy\n'), ((9715, 9752), 'registry_schemas.validate', 'validate', (['query', '"""searchQuery"""', '"""ppr"""'], {}), "(query, 'searchQuery', 'ppr')\n", (9723, 9752), False, 'from registry_schemas import validate\n')] |
kharnam/devopsipy | devopsipy/decorators.py | c3379b1dd5f66e71c826045afde1702030e495d4 | """
Module to contain Pywork decorators
"""
__author__ = 'sergey kharnam'
import re
import time
import itertools
import logging
log = logging.getLogger(__name__)
| [((137, 164), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (154, 164), False, 'import logging\n')] |
stephenfin/django-rest-framework | tests/test_decorators.py | 9d001cd84c1239d708b1528587c183ef30e38c31 | from __future__ import unicode_literals
import pytest
from django.test import TestCase
from rest_framework import status
from rest_framework.authentication import BasicAuthentication
from rest_framework.decorators import (
action, api_view, authentication_classes, detail_route, list_route,
parser_classes, permission_classes, renderer_classes, schema,
throttle_classes
)
from rest_framework.parsers import JSONParser
from rest_framework.permissions import IsAuthenticated
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
from rest_framework.schemas import AutoSchema
from rest_framework.test import APIRequestFactory
from rest_framework.throttling import UserRateThrottle
from rest_framework.views import APIView
class DecoratorTestCase(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
def _finalize_response(self, request, response, *args, **kwargs):
response.request = request
return APIView.finalize_response(self, request, response, *args, **kwargs)
def test_api_view_incorrect(self):
"""
If @api_view is not applied correct, we should raise an assertion.
"""
@api_view
def view(request):
return Response()
request = self.factory.get('/')
self.assertRaises(AssertionError, view, request)
def test_api_view_incorrect_arguments(self):
"""
If @api_view is missing arguments, we should raise an assertion.
"""
with self.assertRaises(AssertionError):
@api_view('GET')
def view(request):
return Response()
def test_calling_method(self):
@api_view(['GET'])
def view(request):
return Response({})
request = self.factory.get('/')
response = view(request)
assert response.status_code == status.HTTP_200_OK
request = self.factory.post('/')
response = view(request)
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
def test_calling_put_method(self):
@api_view(['GET', 'PUT'])
def view(request):
return Response({})
request = self.factory.put('/')
response = view(request)
assert response.status_code == status.HTTP_200_OK
request = self.factory.post('/')
response = view(request)
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
def test_calling_patch_method(self):
@api_view(['GET', 'PATCH'])
def view(request):
return Response({})
request = self.factory.patch('/')
response = view(request)
assert response.status_code == status.HTTP_200_OK
request = self.factory.post('/')
response = view(request)
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
def test_renderer_classes(self):
@api_view(['GET'])
@renderer_classes([JSONRenderer])
def view(request):
return Response({})
request = self.factory.get('/')
response = view(request)
assert isinstance(response.accepted_renderer, JSONRenderer)
def test_parser_classes(self):
@api_view(['GET'])
@parser_classes([JSONParser])
def view(request):
assert len(request.parsers) == 1
assert isinstance(request.parsers[0], JSONParser)
return Response({})
request = self.factory.get('/')
view(request)
def test_authentication_classes(self):
@api_view(['GET'])
@authentication_classes([BasicAuthentication])
def view(request):
assert len(request.authenticators) == 1
assert isinstance(request.authenticators[0], BasicAuthentication)
return Response({})
request = self.factory.get('/')
view(request)
def test_permission_classes(self):
@api_view(['GET'])
@permission_classes([IsAuthenticated])
def view(request):
return Response({})
request = self.factory.get('/')
response = view(request)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_throttle_classes(self):
class OncePerDayUserThrottle(UserRateThrottle):
rate = '1/day'
@api_view(['GET'])
@throttle_classes([OncePerDayUserThrottle])
def view(request):
return Response({})
request = self.factory.get('/')
response = view(request)
assert response.status_code == status.HTTP_200_OK
response = view(request)
assert response.status_code == status.HTTP_429_TOO_MANY_REQUESTS
def test_schema(self):
"""
Checks CustomSchema class is set on view
"""
class CustomSchema(AutoSchema):
pass
@api_view(['GET'])
@schema(CustomSchema())
def view(request):
return Response({})
assert isinstance(view.cls.schema, CustomSchema)
class ActionDecoratorTestCase(TestCase):
def test_defaults(self):
@action(detail=True)
def test_action(request):
"""Description"""
assert test_action.mapping == {'get': 'test_action'}
assert test_action.detail is True
assert test_action.url_path == 'test_action'
assert test_action.url_name == 'test-action'
assert test_action.kwargs == {
'name': 'Test action',
'description': 'Description',
}
def test_detail_required(self):
with pytest.raises(AssertionError) as excinfo:
@action()
def test_action(request):
raise NotImplementedError
assert str(excinfo.value) == "@action() missing required argument: 'detail'"
def test_method_mapping_http_methods(self):
# All HTTP methods should be mappable
@action(detail=False, methods=[])
def test_action():
raise NotImplementedError
for name in APIView.http_method_names:
def method():
raise NotImplementedError
# Python 2.x compatibility - cast __name__ to str
method.__name__ = str(name)
getattr(test_action.mapping, name)(method)
# ensure the mapping returns the correct method name
for name in APIView.http_method_names:
assert test_action.mapping[name] == name
def test_view_name_kwargs(self):
"""
'name' and 'suffix' are mutually exclusive kwargs used for generating
a view's display name.
"""
# by default, generate name from method
@action(detail=True)
def test_action(request):
raise NotImplementedError
assert test_action.kwargs == {
'description': None,
'name': 'Test action',
}
# name kwarg supersedes name generation
@action(detail=True, name='test name')
def test_action(request):
raise NotImplementedError
assert test_action.kwargs == {
'description': None,
'name': 'test name',
}
# suffix kwarg supersedes name generation
@action(detail=True, suffix='Suffix')
def test_action(request):
raise NotImplementedError
assert test_action.kwargs == {
'description': None,
'suffix': 'Suffix',
}
# name + suffix is a conflict.
with pytest.raises(TypeError) as excinfo:
action(detail=True, name='test name', suffix='Suffix')
assert str(excinfo.value) == "`name` and `suffix` are mutually exclusive arguments."
def test_method_mapping(self):
@action(detail=False)
def test_action(request):
raise NotImplementedError
@test_action.mapping.post
def test_action_post(request):
raise NotImplementedError
# The secondary handler methods should not have the action attributes
for name in ['mapping', 'detail', 'url_path', 'url_name', 'kwargs']:
assert hasattr(test_action, name) and not hasattr(test_action_post, name)
def test_method_mapping_already_mapped(self):
@action(detail=True)
def test_action(request):
raise NotImplementedError
msg = "Method 'get' has already been mapped to '.test_action'."
with self.assertRaisesMessage(AssertionError, msg):
@test_action.mapping.get
def test_action_get(request):
raise NotImplementedError
def test_method_mapping_overwrite(self):
@action(detail=True)
def test_action():
raise NotImplementedError
msg = ("Method mapping does not behave like the property decorator. You "
"cannot use the same method name for each mapping declaration.")
with self.assertRaisesMessage(AssertionError, msg):
@test_action.mapping.post
def test_action():
raise NotImplementedError
def test_detail_route_deprecation(self):
with pytest.warns(DeprecationWarning) as record:
@detail_route()
def view(request):
raise NotImplementedError
assert len(record) == 1
assert str(record[0].message) == (
"`detail_route` is deprecated and will be removed in "
"3.10 in favor of `action`, which accepts a `detail` bool. Use "
"`@action(detail=True)` instead."
)
def test_list_route_deprecation(self):
with pytest.warns(DeprecationWarning) as record:
@list_route()
def view(request):
raise NotImplementedError
assert len(record) == 1
assert str(record[0].message) == (
"`list_route` is deprecated and will be removed in "
"3.10 in favor of `action`, which accepts a `detail` bool. Use "
"`@action(detail=False)` instead."
)
def test_route_url_name_from_path(self):
# pre-3.8 behavior was to base the `url_name` off of the `url_path`
with pytest.warns(DeprecationWarning):
@list_route(url_path='foo_bar')
def view(request):
raise NotImplementedError
assert view.url_path == 'foo_bar'
assert view.url_name == 'foo-bar'
| [((856, 875), 'rest_framework.test.APIRequestFactory', 'APIRequestFactory', ([], {}), '()\n', (873, 875), False, 'from rest_framework.test import APIRequestFactory\n'), ((997, 1064), 'rest_framework.views.APIView.finalize_response', 'APIView.finalize_response', (['self', 'request', 'response', '*args'], {}), '(self, request, response, *args, **kwargs)\n', (1022, 1064), False, 'from rest_framework.views import APIView\n'), ((1714, 1731), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (1722, 1731), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((2122, 2146), 'rest_framework.decorators.api_view', 'api_view', (["['GET', 'PUT']"], {}), "(['GET', 'PUT'])\n", (2130, 2146), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((2539, 2565), 'rest_framework.decorators.api_view', 'api_view', (["['GET', 'PATCH']"], {}), "(['GET', 'PATCH'])\n", (2547, 2565), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((2956, 2973), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (2964, 2973), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((2983, 3015), 'rest_framework.decorators.renderer_classes', 'renderer_classes', (['[JSONRenderer]'], {}), '([JSONRenderer])\n', (2999, 3015), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((3263, 3280), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (3271, 3280), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((3290, 3318), 'rest_framework.decorators.parser_classes', 'parser_classes', (['[JSONParser]'], {}), '([JSONParser])\n', (3304, 3318), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((3602, 3619), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (3610, 3619), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((3629, 3674), 'rest_framework.decorators.authentication_classes', 'authentication_classes', (['[BasicAuthentication]'], {}), '([BasicAuthentication])\n', (3651, 3674), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((3977, 3994), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (3985, 3994), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((4004, 4041), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[IsAuthenticated]'], {}), '([IsAuthenticated])\n', (4022, 4041), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((4371, 4388), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (4379, 4388), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((4398, 4440), 'rest_framework.decorators.throttle_classes', 'throttle_classes', (['[OncePerDayUserThrottle]'], {}), '([OncePerDayUserThrottle])\n', (4414, 4440), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((4907, 4924), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (4915, 4924), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((5156, 5175), 'rest_framework.decorators.action', 'action', ([], {'detail': '(True)'}), '(detail=True)\n', (5162, 5175), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((5960, 5992), 'rest_framework.decorators.action', 'action', ([], {'detail': '(False)', 'methods': '[]'}), '(detail=False, methods=[])\n', (5966, 5992), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((6722, 6741), 'rest_framework.decorators.action', 'action', ([], {'detail': '(True)'}), '(detail=True)\n', (6728, 6741), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((6990, 7027), 'rest_framework.decorators.action', 'action', ([], {'detail': '(True)', 'name': '"""test name"""'}), "(detail=True, name='test name')\n", (6996, 7027), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((7276, 7312), 'rest_framework.decorators.action', 'action', ([], {'detail': '(True)', 'suffix': '"""Suffix"""'}), "(detail=True, suffix='Suffix')\n", (7282, 7312), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((7796, 7816), 'rest_framework.decorators.action', 'action', ([], {'detail': '(False)'}), '(detail=False)\n', (7802, 7816), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((8303, 8322), 'rest_framework.decorators.action', 'action', ([], {'detail': '(True)'}), '(detail=True)\n', (8309, 8322), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((8704, 8723), 'rest_framework.decorators.action', 'action', ([], {'detail': '(True)'}), '(detail=True)\n', (8710, 8723), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((1269, 1279), 'rest_framework.response.Response', 'Response', ([], {}), '()\n', (1277, 1279), False, 'from rest_framework.response import Response\n'), ((1587, 1602), 'rest_framework.decorators.api_view', 'api_view', (['"""GET"""'], {}), "('GET')\n", (1595, 1602), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((1778, 1790), 'rest_framework.response.Response', 'Response', (['{}'], {}), '({})\n', (1786, 1790), False, 'from rest_framework.response import Response\n'), ((2193, 2205), 'rest_framework.response.Response', 'Response', (['{}'], {}), '({})\n', (2201, 2205), False, 'from rest_framework.response import Response\n'), ((2612, 2624), 'rest_framework.response.Response', 'Response', (['{}'], {}), '({})\n', (2620, 2624), False, 'from rest_framework.response import Response\n'), ((3062, 3074), 'rest_framework.response.Response', 'Response', (['{}'], {}), '({})\n', (3070, 3074), False, 'from rest_framework.response import Response\n'), ((3472, 3484), 'rest_framework.response.Response', 'Response', (['{}'], {}), '({})\n', (3480, 3484), False, 'from rest_framework.response import Response\n'), ((3851, 3863), 'rest_framework.response.Response', 'Response', (['{}'], {}), '({})\n', (3859, 3863), False, 'from rest_framework.response import Response\n'), ((4088, 4100), 'rest_framework.response.Response', 'Response', (['{}'], {}), '({})\n', (4096, 4100), False, 'from rest_framework.response import Response\n'), ((4487, 4499), 'rest_framework.response.Response', 'Response', (['{}'], {}), '({})\n', (4495, 4499), False, 'from rest_framework.response import Response\n'), ((5003, 5015), 'rest_framework.response.Response', 'Response', (['{}'], {}), '({})\n', (5011, 5015), False, 'from rest_framework.response import Response\n'), ((5626, 5655), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (5639, 5655), False, 'import pytest\n'), ((5681, 5689), 'rest_framework.decorators.action', 'action', ([], {}), '()\n', (5687, 5689), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((7553, 7577), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (7566, 7577), False, 'import pytest\n'), ((7602, 7656), 'rest_framework.decorators.action', 'action', ([], {'detail': '(True)', 'name': '"""test name"""', 'suffix': '"""Suffix"""'}), "(detail=True, name='test name', suffix='Suffix')\n", (7608, 7656), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((9182, 9214), 'pytest.warns', 'pytest.warns', (['DeprecationWarning'], {}), '(DeprecationWarning)\n', (9194, 9214), False, 'import pytest\n'), ((9239, 9253), 'rest_framework.decorators.detail_route', 'detail_route', ([], {}), '()\n', (9251, 9253), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((9660, 9692), 'pytest.warns', 'pytest.warns', (['DeprecationWarning'], {}), '(DeprecationWarning)\n', (9672, 9692), False, 'import pytest\n'), ((9717, 9729), 'rest_framework.decorators.list_route', 'list_route', ([], {}), '()\n', (9727, 9729), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((10213, 10245), 'pytest.warns', 'pytest.warns', (['DeprecationWarning'], {}), '(DeprecationWarning)\n', (10225, 10245), False, 'import pytest\n'), ((10260, 10290), 'rest_framework.decorators.list_route', 'list_route', ([], {'url_path': '"""foo_bar"""'}), "(url_path='foo_bar')\n", (10270, 10290), False, 'from rest_framework.decorators import action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes\n'), ((1657, 1667), 'rest_framework.response.Response', 'Response', ([], {}), '()\n', (1665, 1667), False, 'from rest_framework.response import Response\n')] |
CRE2525/open-tamil | tamilmorse/morse_encode.py | ffc02509f7b8a6a17644c85799a475a8ba623954 | ## -*- coding: utf-8 -*-
#(C) 2018 Muthiah Annamalai
# This file is part of Open-Tamil project
# You may use or distribute this file under terms of MIT license
import codecs
import json
import tamil
import sys
import os
#e.g. python morse_encode.py கலைஞர்
CURRDIR = os.path.dirname(os.path.realpath(__file__))
def encode(text):
with codecs.open(os.path.join(CURRDIR,"data","madurai_tamilmorse.json"),"r","utf-8") as fp:
codebook = json.loads(fp.read())
output = [codebook.get(l,l) for l in tamil.utf8.get_letters(text)]
return u" ".join(output)
if __name__ == u"__main__":
encode(u" ".join([i.decode("utf-8") for i in sys.argv[1:]]))
| [((284, 310), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (300, 310), False, 'import os\n'), ((351, 407), 'os.path.join', 'os.path.join', (['CURRDIR', '"""data"""', '"""madurai_tamilmorse.json"""'], {}), "(CURRDIR, 'data', 'madurai_tamilmorse.json')\n", (363, 407), False, 'import os\n'), ((508, 536), 'tamil.utf8.get_letters', 'tamil.utf8.get_letters', (['text'], {}), '(text)\n', (530, 536), False, 'import tamil\n')] |
Xrenya/algorithms | Leetcode/Python/_1721.py | aded82cacde2f4f2114241907861251e0e2e5638 | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def swapNodes(self, head: Optional[ListNode], k: int) -> Optional[ListNode]:
temp = head
array = []
while temp:
array.append(temp.val)
temp = temp.next
array[k - 1], array[len(array) - k] = array[len(array) - k], array[k - 1]
head = ListNode(0)
dummy = head
for num in array:
dummy.next = ListNode(num)
dummy = dummy.next
return head.next
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def swapNodes(self, head: Optional[ListNode], k: int) -> Optional[ListNode]:
if head is None or head.next is None:
return head
slow = fast = cnt = head
counter = 0
while cnt:
counter += 1
cnt = cnt.next
for _ in range(k - 1):
slow = slow.next
for _ in range(counter - k):
fast = fast.next
slow.val, fast.val = fast.val, slow.val
return head
| [] |
electrumsv/electrumsv | contrib/functional_tests/functional/test_reorg.py | a2d9027ccec338cadfca778888e6ef7f077b1651 | """
Warning - this will reset all components back to a blank state before running the simulation
Runs node1, electrumx1 and electrumsv1 and loads the default wallet on the daemon (so that newly
submitted blocks will be synchronized by ElectrumSV
reorged txid: 'a1fa9460ca105c1396cd338f7fa202bf79a9d244d730e91e19f6302a05b2f07a'
"""
import asyncio
import os
from pathlib import Path
import pytest
import pytest_asyncio
from electrumsv_node import electrumsv_node
from electrumsv_sdk import utils
import logging
import requests
from contrib.functional_tests.websocket_client import TxStateWSClient
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger("simulate-fresh-reorg")
async def wait_for_reog_transaction_update(reorged_txids, reorg_height):
MAX_WAIT_TIME = 10 # seconds
async with TxStateWSClient() as ws_client:
try:
await asyncio.wait_for(ws_client.block_until_confirmed_and_height_updated(
reorged_txids, reorg_height), MAX_WAIT_TIME)
except asyncio.TimeoutError:
logger.exception(f"timed out after {MAX_WAIT_TIME} seconds")
raise
class TestReorg:
@classmethod
def setup_class(cls):
pass
@classmethod
def teardown_class(cls):
pass
@pytest.mark.asyncio
def test_reorg(self, event_loop):
async def test_reorg():
payload = {
"password": "test"
}
REORGED_TXIDS = "a1fa9460ca105c1396cd338f7fa202bf79a9d244d730e91e19f6302a05b2f07a"
# Load the default wallet on ElectrumSV daemon
url = f"http://127.0.0.1:9999/v1/regtest/dapp/wallets/worker1.sqlite/load_wallet"
result = requests.post(url, json=payload)
result.raise_for_status()
# Submit node1 blocks to node
if electrumsv_node.is_node_running():
utils.submit_blocks_from_file(node_id='node1',
filepath=Path(MODULE_DIR).joinpath('../reorg_blocks/node1_blocks.dat'))
else:
logger.exception("node unavailable")
try:
await wait_for_reog_transaction_update([REORGED_TXIDS], 201)
# Todo check state of get_balance; get_coin_state; get_transaction_history
# Submit node2 blocks to node
if electrumsv_node.is_node_running():
utils.submit_blocks_from_file(node_id='node1',
filepath=Path(MODULE_DIR).joinpath('../reorg_blocks/node2_blocks.dat'))
else:
logger.exception("node unavailable")
await wait_for_reog_transaction_update([REORGED_TXIDS], 202)
except asyncio.TimeoutError:
pytest.xfail("work in progress alongside refactoring changes...")
# Todo check state of get_balance; get_coin_state; get_transaction_history
event_loop.run_until_complete(test_reorg())
| [((655, 695), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (674, 695), False, 'import logging\n'), ((705, 746), 'logging.getLogger', 'logging.getLogger', (['"""simulate-fresh-reorg"""'], {}), "('simulate-fresh-reorg')\n", (722, 746), False, 'import logging\n'), ((628, 653), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (643, 653), False, 'import os\n'), ((871, 888), 'contrib.functional_tests.websocket_client.TxStateWSClient', 'TxStateWSClient', ([], {}), '()\n', (886, 888), False, 'from contrib.functional_tests.websocket_client import TxStateWSClient\n'), ((1768, 1800), 'requests.post', 'requests.post', (['url'], {'json': 'payload'}), '(url, json=payload)\n', (1781, 1800), False, 'import requests\n'), ((1897, 1930), 'electrumsv_node.electrumsv_node.is_node_running', 'electrumsv_node.is_node_running', ([], {}), '()\n', (1928, 1930), False, 'from electrumsv_node import electrumsv_node\n'), ((2410, 2443), 'electrumsv_node.electrumsv_node.is_node_running', 'electrumsv_node.is_node_running', ([], {}), '()\n', (2441, 2443), False, 'from electrumsv_node import electrumsv_node\n'), ((2822, 2887), 'pytest.xfail', 'pytest.xfail', (['"""work in progress alongside refactoring changes..."""'], {}), "('work in progress alongside refactoring changes...')\n", (2834, 2887), False, 'import pytest\n'), ((2024, 2040), 'pathlib.Path', 'Path', (['MODULE_DIR'], {}), '(MODULE_DIR)\n', (2028, 2040), False, 'from pathlib import Path\n'), ((2545, 2561), 'pathlib.Path', 'Path', (['MODULE_DIR'], {}), '(MODULE_DIR)\n', (2549, 2561), False, 'from pathlib import Path\n')] |
KhanhThiVo/SimuRLacra | Pyrado/pyrado/environments/mujoco/wam_bic.py | fdeaf2059c2ed80ea696f018c29290510b5c4cb9 | # Copyright (c) 2020, Fabio Muratore, Honda Research Institute Europe GmbH, and
# Technical University of Darmstadt.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of Fabio Muratore, Honda Research Institute Europe GmbH,
# or Technical University of Darmstadt, nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL FABIO MURATORE, HONDA RESEARCH INSTITUTE EUROPE GMBH,
# OR TECHNICAL UNIVERSITY OF DARMSTADT BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import mujoco_py
import numpy as np
import os.path as osp
from init_args_serializer import Serializable
from typing import Optional
import pyrado
from pyrado.environments.barrett_wam import (
goal_pos_init_sim_4dof,
goal_pos_init_sim_7dof,
init_qpos_des_4dof,
init_qpos_des_7dof,
act_space_bic_4dof,
act_space_bic_7dof,
wam_q_limits_up_7dof,
wam_q_limits_lo_7dof,
torque_space_wam_4dof,
torque_space_wam_7dof,
wam_pgains_7dof,
wam_dgains_7dof,
wam_pgains_4dof,
wam_dgains_4dof,
)
from pyrado.environments.mujoco.base import MujocoSimEnv
from pyrado.spaces.base import Space
from pyrado.spaces.box import BoxSpace
from pyrado.spaces.singular import SingularStateSpace
from pyrado.tasks.base import Task
from pyrado.tasks.condition_only import ConditionOnlyTask
from pyrado.tasks.desired_state import DesStateTask
from pyrado.tasks.final_reward import BestStateFinalRewTask, FinalRewTask, FinalRewMode
from pyrado.tasks.goalless import GoallessTask
from pyrado.tasks.masked import MaskedTask
from pyrado.tasks.parallel import ParallelTasks
from pyrado.tasks.reward_functions import ZeroPerStepRewFcn, ExpQuadrErrRewFcn, QuadrErrRewFcn
from pyrado.tasks.sequential import SequentialTasks
from pyrado.utils.data_types import EnvSpec
from pyrado.utils.input_output import print_cbt
class WAMBallInCupSim(MujocoSimEnv, Serializable):
"""
WAM robotic arm from Barrett technologies for the ball-in-the-cup task, controlled by a PD controller.
.. note::
When using the `reset()` function, always pass a meaningful `init_state`
.. seealso::
[1] https://github.com/psclklnk/self-paced-rl/tree/master/sprl/envs/ball_in_a_cup.py
"""
name: str = "wam-bic"
def __init__(
self,
num_dof: int,
frame_skip: int = 4,
dt: Optional[float] = None,
max_steps: int = pyrado.inf,
fixed_init_state: bool = True,
stop_on_collision: bool = True,
observe_ball: bool = False,
observe_cup: bool = False,
task_args: Optional[dict] = None,
):
"""
Constructor
:param num_dof: number of degrees of freedom (4 or 7), depending on which Barrett WAM setup being used
:param frame_skip: number of simulation frames for which the same action is held, results in a multiplier of
the time step size `dt`
:param dt: by default the time step size is the one from the mujoco config file multiplied by the number of
frame skips (legacy from OpenAI environments). By passing an explicit `dt` value, this can be
overwritten. Possible use case if if you know that you recorded a trajectory with a specific `dt`.
:param max_steps: max number of simulation time steps
:param fixed_init_state: enables/disables deterministic, fixed initial state
:param stop_on_collision: set the `failed` flag in the `dict` returned by `_mujoco_step()` to true, if the ball
collides with something else than the desired parts of the cup. This causes the
episode to end. Keep in mind that in case of a negative step reward and no final
cost on failing, this might result in undesired behavior.
:param observe_ball: if `True`, include the 2-dim (x-z plane) cartesian ball position into the observation
:param observe_cup: if `True`, include the 2-dim (x-z plane) cartesian cup position into the observation
:param task_args: arguments for the task construction
"""
Serializable._init(self, locals())
self.fixed_init_state = fixed_init_state
self.observe_ball = observe_ball
self.observe_cup = observe_cup
# Initialize num DoF specific variables
self._num_dof = num_dof
if num_dof == 4:
graph_file_name = "wam_4dof_bic.xml"
self.qpos_des_init = init_qpos_des_4dof
self.p_gains = wam_pgains_4dof
self.d_gains = wam_dgains_4dof
init_ball_pos = np.array([0.723, 0.0, 1.168])
init_cup_goal = goal_pos_init_sim_4dof
elif num_dof == 7:
graph_file_name = "wam_7dof_bic.xml"
self.qpos_des_init = init_qpos_des_7dof
self.p_gains = wam_pgains_7dof
self.d_gains = wam_dgains_7dof
init_ball_pos = np.array([0.828, 0.0, 1.131])
init_cup_goal = goal_pos_init_sim_7dof
else:
raise pyrado.ValueErr(given=num_dof, eq_constraint="4 or 7")
model_path = osp.join(pyrado.MUJOCO_ASSETS_DIR, graph_file_name)
super().__init__(model_path, frame_skip, dt, max_steps, task_args)
# Actual initial joint position (when the WAM moved to the home position)
if num_dof == 4:
self.init_qpos[:4] = np.array([0.0, 0.63, 0.0, 1.27])
self.init_qpos[4] = -0.34 # angle of the first rope segment relative to the cup bottom plate
else:
self.init_qpos[:7] = np.array([0.0, 0.65, 0.0, 1.41, 0.0, -0.28, -1.57])
self.init_qpos[7] = -0.21 # angle of the first rope segment relative to the cup bottom plate
# Set the actual stable initial position. This position would be reached after some time using the internal
# PD controller to stabilize at self._qpos_des_init.
# The initial position of the ball in cartesian coordinates
self._init_state = np.concatenate([self.init_qpos, self.init_qvel, init_ball_pos, init_cup_goal])
if self.fixed_init_state:
self._init_space = SingularStateSpace(self._init_state)
else:
# Add plus/minus one degree to each motor joint and the first rope segment joint
init_state_up = self._init_state.copy()
init_state_up[: self._num_dof] += np.pi / 180 * np.array([0.1, 1, 0.5, 1.0, 0.1, 1.0, 1.0])[: self._num_dof]
init_state_lo = self._init_state.copy()
init_state_lo[: self._num_dof] -= np.pi / 180 * np.array([0.1, 1, 0.5, 1.0, 0.1, 1.0, 1.0])[: self._num_dof]
self._init_space = BoxSpace(init_state_lo, init_state_up)
# Bodies to check fo collision
self._collision_bodies = [
"wam/base_link",
"wam/shoulder_yaw_link",
"wam/shoulder_pitch_link",
"wam/upper_arm_link",
"wam/forearm_link",
"wrist_palm_link",
"wam/wrist_pitch_link",
"wam/wrist_yaw_link",
]
if self._num_dof == 4:
self._collision_bodies = self._collision_bodies[:6]
# We access a private attribute since a method like 'model.geom_names[geom_id]' cannot be used because
# not every geom has a name
self._collision_geom_ids = [self.model._geom_name2id[name] for name in ["cup_geom1", "cup_geom2"]]
self.stop_on_collision = stop_on_collision
self.camera_config = dict(
distance=2.7,
trackbodyid=0, # id of the body to track
elevation=-30, # camera rotation around the axis in the plane
azimuth=-90, # camera rotation around the camera's vertical axis
)
@property
def num_dof(self) -> int:
""" Get the number of degrees of freedom. """
return self._num_dof
@property
def torque_space(self) -> Space:
""" Get the space of joint torques. """
return torque_space_wam_7dof if self._num_dof == 7 else torque_space_wam_4dof
@property
def state_space(self) -> Space:
# The state space has the same shape as the init space (including ball and cup)
state_shape = np.concatenate([self.init_qpos, self.init_qvel, np.empty(3), np.empty(3)]).shape
state_lo, state_up = np.full(state_shape, -pyrado.inf), np.full(state_shape, pyrado.inf)
# Ensure that joint limits of the arm are not reached (5 deg safety margin)
state_lo[: self._num_dof] = wam_q_limits_lo_7dof[: self._num_dof]
state_up[: self._num_dof] = wam_q_limits_up_7dof[: self._num_dof]
return BoxSpace(state_lo, state_up)
@property
def obs_space(self) -> Space:
# Observing the normalized time and optionally the cup and ball position
obs_lo, obs_up, labels = [0.0], [1.0], ["t"]
if self.observe_ball:
obs_lo.extend([-3.0, -3.0])
obs_up.extend([3.0, 3.0])
labels.extend(["ball_x", "ball_z"])
if self.observe_cup:
obs_lo.extend([-3.0, -3.0])
obs_up.extend([3.0, 3.0])
labels.extend(["cup_x", "cup_z"])
return BoxSpace(obs_lo, obs_up, labels=labels)
@property
def act_space(self) -> Space:
# Running a PD controller on joint positions and velocities
return act_space_bic_7dof if self._num_dof == 7 else act_space_bic_4dof
@classmethod
def get_nominal_domain_param(cls, num_dof: int = 7) -> dict:
if num_dof == 7:
return dict(
cup_scale=1.0, # scaling factor for the radius of the cup [-] (should be >0.65)
rope_length=0.41, # length of the rope [m]
ball_mass=0.024, # mass of the ball [kg]
joint_1_damping=0.05, # damping of motor joints [N/s] (default value is small)
joint_2_damping=0.05, # damping of motor joints [N/s] (default value is small)
joint_3_damping=0.05, # damping of motor joints [N/s] (default value is small)
joint_4_damping=0.05, # damping of motor joints [N/s] (default value is small)
joint_5_damping=0.05, # damping of motor joints [N/s] (default value is small)
joint_6_damping=0.05, # damping of motor joints [N/s] (default value is small)
joint_7_damping=0.05, # damping of motor joints [N/s] (default value is small)
joint_1_dryfriction=0.4, # dry friction coefficient of motor joint 1 [-]
joint_2_dryfriction=0.4, # dry friction coefficient of motor joint 2 [-]
joint_3_dryfriction=0.4, # dry friction coefficient of motor joint 3 [-]
joint_4_dryfriction=0.4, # dry friction coefficient of motor joint 4 [-]
joint_5_dryfriction=0.4, # dry friction coefficient of motor joint 5 [-]
joint_6_dryfriction=0.4, # dry friction coefficient of motor joint 6 [-]
joint_7_dryfriction=0.4, # dry friction coefficient of motor joint 7 [-]
rope_damping=1e-4, # damping of rope joints [N/s] (reasonable values are 6e-4 to 1e-6)
)
elif num_dof == 4:
return dict(
cup_scale=1.0, # scaling factor for the radius of the cup [-] (should be >0.65)
rope_length=0.41, # length of the rope [m]
ball_mass=0.024, # mass of the ball [kg]
joint_1_damping=0.05, # damping of motor joints [N/s] (default value is small)
joint_2_damping=0.05, # damping of motor joints [N/s] (default value is small)
joint_3_damping=0.05, # damping of motor joints [N/s] (default value is small)
joint_4_damping=0.05, # damping of motor joints [N/s] (default value is small)
joint_1_dryfriction=0.4, # dry friction coefficient of motor joint 1 [-]
joint_2_dryfriction=0.4, # dry friction coefficient of motor joint 2 [-]
joint_3_dryfriction=0.4, # dry friction coefficient of motor joint 3 [-]
joint_4_dryfriction=0.4, # dry friction coefficient of motor joint 4 [-]
rope_damping=1e-4, # damping of rope joints [N/s] (reasonable values are 6e-4 to 1e-6)
)
else:
raise pyrado.ValueErr(given=num_dof, eq_constraint="4 or 7")
def _create_task(self, task_args: dict) -> Task:
if task_args.get("sparse_rew_fcn", False):
# Create a task with binary reward
return self._create_main_task(task_args)
else:
# Create two (or three) parallel running task.
# 1.) Main task: Desired state task for the cartesian ball distance
# 2.) Deviation task: Desired state task for the cartesian- and joint deviation from the init position
# 3.) Binary Bonus: Adds a binary bonus when ball is catched [inactive by default]
return ParallelTasks(
[
self._create_main_task(task_args),
self._create_deviation_task(task_args),
self._create_main_task(
dict(
sparse_rew_fcn=True,
success_bonus=task_args.get("success_bonus", 0),
)
),
]
)
def _create_main_task(self, task_args: dict) -> Task:
# Create a DesStateTask that masks everything but the ball position
idcs = list(range(self.state_space.flat_dim - 6, self.state_space.flat_dim - 3)) # Cartesian ball position
spec = EnvSpec(
self.spec.obs_space,
self.spec.act_space,
self.spec.state_space.subspace(self.spec.state_space.create_mask(idcs)),
)
# If we do not use copy(), state_des coming from MuJoCo is a reference and updates automatically at each step.
# Note: sim.forward() + get_body_xpos() results in wrong output for state_des, as sim has not been updated to
# init_space.sample(), which is first called in reset()
if task_args.get("sparse_rew_fcn", False):
factor = task_args.get("success_bonus", 1)
# Binary final reward task
main_task = FinalRewTask(
ConditionOnlyTask(
spec,
condition_fcn=self.check_ball_in_cup,
is_success_condition=True,
),
mode=FinalRewMode(always_positive=True),
factor=factor,
)
# Yield -1 on fail after the main task ist done (successfully or not)
dont_fail_after_succ_task = FinalRewTask(
GoallessTask(spec, ZeroPerStepRewFcn()),
mode=FinalRewMode(always_negative=True),
factor=factor,
)
# Augment the binary task with an endless dummy task, to avoid early stopping
task = SequentialTasks((main_task, dont_fail_after_succ_task))
return MaskedTask(self.spec, task, idcs)
else:
state_des = self.sim.data.get_site_xpos("cup_goal") # this is a reference
# state_des_ball = self.sim.data.get_site_xpos("cup_goal") # this is a reference
# state_des_cup = np.array([0.82521, 0, 1.4469]) if self._num_dof == 7 else np.array([0.758, 0, 1.5])
# state_des = np.concatenate([state_des_ball, state_des_cup])
R_default = np.diag([0, 0, 1, 1e-2, 1e-2, 1e-1]) if self._num_dof == 7 else np.diag([0, 0, 1e-2, 1e-2])
rew_fcn = ExpQuadrErrRewFcn(
Q=task_args.get("Q", np.diag([2e1, 1e-4, 2e1])), # distance ball - cup; shouldn't move in y-direction
R=task_args.get("R", R_default), # last joint is really unreliable for 7 dof, thus punish more
)
task = DesStateTask(spec, state_des, rew_fcn)
# Wrap the masked DesStateTask to add a bonus for the best state in the rollout
return BestStateFinalRewTask(
MaskedTask(self.spec, task, idcs),
factor=task_args.get("final_factor", 0.05 * self.max_steps),
)
def _create_deviation_task(self, task_args: dict) -> Task:
idcs = list(range(self.state_space.flat_dim - 3, self.state_space.flat_dim)) # Cartesian cup goal position
spec = EnvSpec(
self.spec.obs_space,
self.spec.act_space,
self.spec.state_space.subspace(self.spec.state_space.create_mask(idcs)),
)
# init cup goal position
state_des = goal_pos_init_sim_7dof if self._num_dof == 7 else goal_pos_init_sim_4dof
rew_fcn = QuadrErrRewFcn(
Q=task_args.get("Q_dev", np.diag([2e-1, 1e-6, 5e0])), # Cartesian distance from init cup position
R=task_args.get(
"R_dev", np.zeros((self.act_space.shape[0], self.act_space.shape[0]))
), # joint space distance from init pose, interferes with R_default from _create_main_task
)
task = DesStateTask(spec, state_des, rew_fcn)
return MaskedTask(self.spec, task, idcs)
def _adapt_model_file(self, xml_model: str, domain_param: dict) -> str:
# First replace special domain parameters
cup_scale = domain_param.pop("cup_scale", None)
rope_length = domain_param.pop("rope_length", None)
if cup_scale is not None:
# See [1, l.93-96]
xml_model = xml_model.replace("[scale_mesh]", str(cup_scale * 0.001))
xml_model = xml_model.replace("[pos_mesh]", str(0.055 - (cup_scale - 1.0) * 0.023))
xml_model = xml_model.replace("[pos_goal]", str(0.1165 + (cup_scale - 1.0) * 0.0385))
xml_model = xml_model.replace("[size_cup]", str(cup_scale * 0.038))
xml_model = xml_model.replace("[size_cup_inner]", str(cup_scale * 0.03))
if rope_length is not None:
# The rope consists of 30 capsules
xml_model = xml_model.replace("[pos_capsule]", str(rope_length / 30))
# Each joint is at the top of each capsule (therefore negative direction from center)
xml_model = xml_model.replace("[pos_capsule_joint]", str(-rope_length / 60))
# Pure visualization component
xml_model = xml_model.replace("[size_capsule_geom]", str(rope_length / 72))
# Resolve mesh directory and replace the remaining domain parameters
return super()._adapt_model_file(xml_model, domain_param)
def _mujoco_step(self, act: np.ndarray) -> dict:
assert self.act_space.contains(act, verbose=True)
# Get the desired positions and velocities for the selected joints
qpos_des = self.qpos_des_init.copy() # the desired trajectory is relative to self._qpos_des_init
qvel_des = np.zeros_like(qpos_des)
if self._num_dof == 4:
np.add.at(qpos_des, [1, 3], act[:2])
np.add.at(qvel_des, [1, 3], act[2:])
elif self._num_dof == 7:
np.add.at(qpos_des, [1, 3, 5], act[:3])
np.add.at(qvel_des, [1, 3, 5], act[3:])
# Compute the position and velocity errors
err_pos = qpos_des - self.state[: self._num_dof]
err_vel = qvel_des - self.state[self.model.nq : self.model.nq + self._num_dof]
# Compute the torques for the PD controller and clip them to their max values
torque = self.p_gains * err_pos + self.d_gains * err_vel
torque = self.torque_space.project_to(torque)
# Apply the torques to the robot
self.sim.data.qfrc_applied[: self._num_dof] = torque
# Call MuJoCo
try:
self.sim.step()
mjsim_crashed = False
except mujoco_py.builder.MujocoException:
# When MuJoCo recognized instabilities in the simulation, it simply kills it.
# Instead, we want the episode to end with a failure.
mjsim_crashed = True
qpos, qvel = self.sim.data.qpos.copy(), self.sim.data.qvel.copy()
ball_pos = self.sim.data.get_body_xpos("ball").copy()
cup_goal = self.sim.data.get_site_xpos("cup_goal").copy()
self.state = np.concatenate([qpos, qvel, ball_pos, cup_goal])
# If desired, check for collisions of the ball with the robot
ball_collided = self.check_ball_collisions() if self.stop_on_collision else False
# If state is out of bounds (this is normally checked by the task, but does not work because of the mask)
state_oob = False if self.state_space.contains(self.state) else True
return dict(
qpos_des=qpos_des,
qvel_des=qvel_des,
qpos=qpos[: self._num_dof],
qvel=qvel[: self._num_dof],
ball_pos=ball_pos,
cup_pos=cup_goal,
failed=mjsim_crashed or ball_collided or state_oob,
)
def check_ball_collisions(self, verbose: bool = False) -> bool:
"""
Check if an undesired collision with the ball occurs.
:param verbose: print messages on collision
:return: `True` if the ball collides with something else than the central parts of the cup
"""
for i in range(self.sim.data.ncon):
# Get current contact object
contact = self.sim.data.contact[i]
# Extract body-id and body-name of both contact geoms
body1 = self.model.geom_bodyid[contact.geom1]
body1_name = self.model.body_names[body1]
body2 = self.model.geom_bodyid[contact.geom2]
body2_name = self.model.body_names[body2]
# Evaluate if the ball collides with part of the WAM (collision bodies)
# or the connection of WAM and cup (geom_ids)
c1 = body1_name == "ball" and (
body2_name in self._collision_bodies or contact.geom2 in self._collision_geom_ids
)
c2 = body2_name == "ball" and (
body1_name in self._collision_bodies or contact.geom1 in self._collision_geom_ids
)
if c1 or c2:
if verbose:
print_cbt(
f"Undesired collision of {body1_name} and {body2_name} detected!",
"y",
)
return True
return False
def check_ball_in_cup(self, *args, verbose: bool = False):
"""
Check if the ball is in the cup.
:param verbose: print messages when ball is in the cup
:return: `True` if the ball is in the cup
"""
for i in range(self.sim.data.ncon):
# Get current contact object
contact = self.sim.data.contact[i]
# Extract body-id and body-name of both contact geoms
body1 = self.model.geom_bodyid[contact.geom1]
body1_name = self.model.body_names[body1]
body2 = self.model.geom_bodyid[contact.geom2]
body2_name = self.model.body_names[body2]
# Evaluate if the ball collides with part of the WAM (collision bodies)
# or the connection of WAM and cup (geom_ids)
cup_inner_id = self.model._geom_name2id["cup_inner"]
c1 = body1_name == "ball" and contact.geom2 == cup_inner_id
c2 = body2_name == "ball" and contact.geom1 == cup_inner_id
if c1 or c2:
if verbose:
print_cbt(f"The ball is in the cup at time step {self.curr_step}.", "y")
return True
return False
def observe(self, state: np.ndarray) -> np.ndarray:
# TODO: Debug print-outs, should be removed in future...
# if self._curr_step == 0:
# print_cbt(f'cup xpos: {self.sim.data.get_body_xpos("cup").copy()}', 'b') # center of frame
# print_cbt(f'cup xipos: {self.sim.data.get_body_xipos("cup").copy()}', 'b') # center of mass
# Observe the normalized time
obs = [self._curr_step / self.max_steps]
# Extract the (x, z) cartesian position of cup and ball (the robot operates in the x-z plane).
# Note: the cup_goal is the mujoco site object marking the goal position for the ball. It is not identical
# to the coordinate system origin of the rigid body object 'cup'
if self.observe_ball:
obs.extend([state[-3], state[-1]])
if self.observe_cup:
obs.extend([state[-6], state[-4]])
return np.array(obs)
| [((6412, 6463), 'os.path.join', 'osp.join', (['pyrado.MUJOCO_ASSETS_DIR', 'graph_file_name'], {}), '(pyrado.MUJOCO_ASSETS_DIR, graph_file_name)\n', (6420, 6463), True, 'import os.path as osp\n'), ((7297, 7375), 'numpy.concatenate', 'np.concatenate', (['[self.init_qpos, self.init_qvel, init_ball_pos, init_cup_goal]'], {}), '([self.init_qpos, self.init_qvel, init_ball_pos, init_cup_goal])\n', (7311, 7375), True, 'import numpy as np\n'), ((9941, 9969), 'pyrado.spaces.box.BoxSpace', 'BoxSpace', (['state_lo', 'state_up'], {}), '(state_lo, state_up)\n', (9949, 9969), False, 'from pyrado.spaces.box import BoxSpace\n'), ((10477, 10516), 'pyrado.spaces.box.BoxSpace', 'BoxSpace', (['obs_lo', 'obs_up'], {'labels': 'labels'}), '(obs_lo, obs_up, labels=labels)\n', (10485, 10516), False, 'from pyrado.spaces.box import BoxSpace\n'), ((18445, 18483), 'pyrado.tasks.desired_state.DesStateTask', 'DesStateTask', (['spec', 'state_des', 'rew_fcn'], {}), '(spec, state_des, rew_fcn)\n', (18457, 18483), False, 'from pyrado.tasks.desired_state import DesStateTask\n'), ((18500, 18533), 'pyrado.tasks.masked.MaskedTask', 'MaskedTask', (['self.spec', 'task', 'idcs'], {}), '(self.spec, task, idcs)\n', (18510, 18533), False, 'from pyrado.tasks.masked import MaskedTask\n'), ((20225, 20248), 'numpy.zeros_like', 'np.zeros_like', (['qpos_des'], {}), '(qpos_des)\n', (20238, 20248), True, 'import numpy as np\n'), ((21581, 21629), 'numpy.concatenate', 'np.concatenate', (['[qpos, qvel, ball_pos, cup_goal]'], {}), '([qpos, qvel, ball_pos, cup_goal])\n', (21595, 21629), True, 'import numpy as np\n'), ((25878, 25891), 'numpy.array', 'np.array', (['obs'], {}), '(obs)\n', (25886, 25891), True, 'import numpy as np\n'), ((5899, 5928), 'numpy.array', 'np.array', (['[0.723, 0.0, 1.168]'], {}), '([0.723, 0.0, 1.168])\n', (5907, 5928), True, 'import numpy as np\n'), ((6680, 6712), 'numpy.array', 'np.array', (['[0.0, 0.63, 0.0, 1.27]'], {}), '([0.0, 0.63, 0.0, 1.27])\n', (6688, 6712), True, 'import numpy as np\n'), ((6866, 6917), 'numpy.array', 'np.array', (['[0.0, 0.65, 0.0, 1.41, 0.0, -0.28, -1.57]'], {}), '([0.0, 0.65, 0.0, 1.41, 0.0, -0.28, -1.57])\n', (6874, 6917), True, 'import numpy as np\n'), ((7441, 7477), 'pyrado.spaces.singular.SingularStateSpace', 'SingularStateSpace', (['self._init_state'], {}), '(self._init_state)\n', (7459, 7477), False, 'from pyrado.spaces.singular import SingularStateSpace\n'), ((7962, 8000), 'pyrado.spaces.box.BoxSpace', 'BoxSpace', (['init_state_lo', 'init_state_up'], {}), '(init_state_lo, init_state_up)\n', (7970, 8000), False, 'from pyrado.spaces.box import BoxSpace\n'), ((9624, 9657), 'numpy.full', 'np.full', (['state_shape', '(-pyrado.inf)'], {}), '(state_shape, -pyrado.inf)\n', (9631, 9657), True, 'import numpy as np\n'), ((9659, 9691), 'numpy.full', 'np.full', (['state_shape', 'pyrado.inf'], {}), '(state_shape, pyrado.inf)\n', (9666, 9691), True, 'import numpy as np\n'), ((16334, 16389), 'pyrado.tasks.sequential.SequentialTasks', 'SequentialTasks', (['(main_task, dont_fail_after_succ_task)'], {}), '((main_task, dont_fail_after_succ_task))\n', (16349, 16389), False, 'from pyrado.tasks.sequential import SequentialTasks\n'), ((16410, 16443), 'pyrado.tasks.masked.MaskedTask', 'MaskedTask', (['self.spec', 'task', 'idcs'], {}), '(self.spec, task, idcs)\n', (16420, 16443), False, 'from pyrado.tasks.masked import MaskedTask\n'), ((17249, 17287), 'pyrado.tasks.desired_state.DesStateTask', 'DesStateTask', (['spec', 'state_des', 'rew_fcn'], {}), '(spec, state_des, rew_fcn)\n', (17261, 17287), False, 'from pyrado.tasks.desired_state import DesStateTask\n'), ((20292, 20328), 'numpy.add.at', 'np.add.at', (['qpos_des', '[1, 3]', 'act[:2]'], {}), '(qpos_des, [1, 3], act[:2])\n', (20301, 20328), True, 'import numpy as np\n'), ((20341, 20377), 'numpy.add.at', 'np.add.at', (['qvel_des', '[1, 3]', 'act[2:]'], {}), '(qvel_des, [1, 3], act[2:])\n', (20350, 20377), True, 'import numpy as np\n'), ((6222, 6251), 'numpy.array', 'np.array', (['[0.828, 0.0, 1.131]'], {}), '([0.828, 0.0, 1.131])\n', (6230, 6251), True, 'import numpy as np\n'), ((6335, 6389), 'pyrado.ValueErr', 'pyrado.ValueErr', ([], {'given': 'num_dof', 'eq_constraint': '"""4 or 7"""'}), "(given=num_dof, eq_constraint='4 or 7')\n", (6350, 6389), False, 'import pyrado\n'), ((13643, 13697), 'pyrado.ValueErr', 'pyrado.ValueErr', ([], {'given': 'num_dof', 'eq_constraint': '"""4 or 7"""'}), "(given=num_dof, eq_constraint='4 or 7')\n", (13658, 13697), False, 'import pyrado\n'), ((15658, 15750), 'pyrado.tasks.condition_only.ConditionOnlyTask', 'ConditionOnlyTask', (['spec'], {'condition_fcn': 'self.check_ball_in_cup', 'is_success_condition': '(True)'}), '(spec, condition_fcn=self.check_ball_in_cup,\n is_success_condition=True)\n', (15675, 15750), False, 'from pyrado.tasks.condition_only import ConditionOnlyTask\n'), ((16852, 16887), 'numpy.diag', 'np.diag', (['[0, 0, 1, 0.01, 0.01, 0.1]'], {}), '([0, 0, 1, 0.01, 0.01, 0.1])\n', (16859, 16887), True, 'import numpy as np\n'), ((16916, 16943), 'numpy.diag', 'np.diag', (['[0, 0, 0.01, 0.01]'], {}), '([0, 0, 0.01, 0.01])\n', (16923, 16943), True, 'import numpy as np\n'), ((17439, 17472), 'pyrado.tasks.masked.MaskedTask', 'MaskedTask', (['self.spec', 'task', 'idcs'], {}), '(self.spec, task, idcs)\n', (17449, 17472), False, 'from pyrado.tasks.masked import MaskedTask\n'), ((20423, 20462), 'numpy.add.at', 'np.add.at', (['qpos_des', '[1, 3, 5]', 'act[:3]'], {}), '(qpos_des, [1, 3, 5], act[:3])\n', (20432, 20462), True, 'import numpy as np\n'), ((20475, 20514), 'numpy.add.at', 'np.add.at', (['qvel_des', '[1, 3, 5]', 'act[3:]'], {}), '(qvel_des, [1, 3, 5], act[3:])\n', (20484, 20514), True, 'import numpy as np\n'), ((7697, 7740), 'numpy.array', 'np.array', (['[0.1, 1, 0.5, 1.0, 0.1, 1.0, 1.0]'], {}), '([0.1, 1, 0.5, 1.0, 0.1, 1.0, 1.0])\n', (7705, 7740), True, 'import numpy as np\n'), ((7870, 7913), 'numpy.array', 'np.array', (['[0.1, 1, 0.5, 1.0, 0.1, 1.0, 1.0]'], {}), '([0.1, 1, 0.5, 1.0, 0.1, 1.0, 1.0])\n', (7878, 7913), True, 'import numpy as np\n'), ((9562, 9573), 'numpy.empty', 'np.empty', (['(3)'], {}), '(3)\n', (9570, 9573), True, 'import numpy as np\n'), ((9575, 9586), 'numpy.empty', 'np.empty', (['(3)'], {}), '(3)\n', (9583, 9586), True, 'import numpy as np\n'), ((15848, 15882), 'pyrado.tasks.final_reward.FinalRewMode', 'FinalRewMode', ([], {'always_positive': '(True)'}), '(always_positive=True)\n', (15860, 15882), False, 'from pyrado.tasks.final_reward import BestStateFinalRewTask, FinalRewTask, FinalRewMode\n'), ((16100, 16119), 'pyrado.tasks.reward_functions.ZeroPerStepRewFcn', 'ZeroPerStepRewFcn', ([], {}), '()\n', (16117, 16119), False, 'from pyrado.tasks.reward_functions import ZeroPerStepRewFcn, ExpQuadrErrRewFcn, QuadrErrRewFcn\n'), ((16143, 16177), 'pyrado.tasks.final_reward.FinalRewMode', 'FinalRewMode', ([], {'always_negative': '(True)'}), '(always_negative=True)\n', (16155, 16177), False, 'from pyrado.tasks.final_reward import BestStateFinalRewTask, FinalRewTask, FinalRewMode\n'), ((18127, 18153), 'numpy.diag', 'np.diag', (['[0.2, 1e-06, 5.0]'], {}), '([0.2, 1e-06, 5.0])\n', (18134, 18153), True, 'import numpy as np\n'), ((18255, 18315), 'numpy.zeros', 'np.zeros', (['(self.act_space.shape[0], self.act_space.shape[0])'], {}), '((self.act_space.shape[0], self.act_space.shape[0]))\n', (18263, 18315), True, 'import numpy as np\n'), ((23540, 23625), 'pyrado.utils.input_output.print_cbt', 'print_cbt', (['f"""Undesired collision of {body1_name} and {body2_name} detected!"""', '"""y"""'], {}), "(f'Undesired collision of {body1_name} and {body2_name} detected!',\n 'y')\n", (23549, 23625), False, 'from pyrado.utils.input_output import print_cbt\n'), ((24834, 24906), 'pyrado.utils.input_output.print_cbt', 'print_cbt', (['f"""The ball is in the cup at time step {self.curr_step}."""', '"""y"""'], {}), "(f'The ball is in the cup at time step {self.curr_step}.', 'y')\n", (24843, 24906), False, 'from pyrado.utils.input_output import print_cbt\n'), ((17022, 17051), 'numpy.diag', 'np.diag', (['[20.0, 0.0001, 20.0]'], {}), '([20.0, 0.0001, 20.0])\n', (17029, 17051), True, 'import numpy as np\n')] |
ToninoTarsi/pyRasp | pyRasp.py | a46bb1dc38c7547b60e24189ecf34310da770042 | # pyRasp
# Copyright (c) Tonino Tarsi 2020. Licensed under MIT.
# requirement :
# Python 3
# pip install pyyaml
# pip install request
# pip install f90nml
from downloadGFSA import downloadGFSA
from prepare_wps import prepare_wps
from ungrib import ungrib
from metgrid import metgrid
from prepare_wrf import prepare_wrf
from real import real
from wrf import wrf
result = downloadGFSA(True)
prepare_wps(result)
ungrib()
metgrid()
prepare_wrf(result)
real()
wrf()
| [((375, 393), 'downloadGFSA.downloadGFSA', 'downloadGFSA', (['(True)'], {}), '(True)\n', (387, 393), False, 'from downloadGFSA import downloadGFSA\n'), ((394, 413), 'prepare_wps.prepare_wps', 'prepare_wps', (['result'], {}), '(result)\n', (405, 413), False, 'from prepare_wps import prepare_wps\n'), ((414, 422), 'ungrib.ungrib', 'ungrib', ([], {}), '()\n', (420, 422), False, 'from ungrib import ungrib\n'), ((423, 432), 'metgrid.metgrid', 'metgrid', ([], {}), '()\n', (430, 432), False, 'from metgrid import metgrid\n'), ((433, 452), 'prepare_wrf.prepare_wrf', 'prepare_wrf', (['result'], {}), '(result)\n', (444, 452), False, 'from prepare_wrf import prepare_wrf\n'), ((453, 459), 'real.real', 'real', ([], {}), '()\n', (457, 459), False, 'from real import real\n'), ((460, 465), 'wrf.wrf', 'wrf', ([], {}), '()\n', (463, 465), False, 'from wrf import wrf\n')] |
namuan/crypto-rider | app/strategies/ema_bb_alligator_strategy.py | f5b47ada60a7cef07e66609e2e92993619c6bfbe | import pandas as pd
import ta
from app.common import reshape_data
from app.strategies.base_strategy import BaseStrategy
pd.set_option("display.max_columns", None)
pd.set_option("display.width", None)
class EMABBAlligatorStrategy(BaseStrategy):
BUY_SIGNAL = "buy_signal"
SELL_SIGNAL = "sell_signal"
def calculate_indicators(self):
df = self.load_df(limit=1000)
_ = df["close_3_ema"]
_ = df["boll"]
ao = ta.momentum.AwesomeOscillatorIndicator(high=df["high"], low=df["low"])
df["AO"] = ao.ao()
return df
def can_sell(self, df):
prev_candle = self.candle(df)
last_ema = prev_candle["close_3_ema"]
last_bb = prev_candle["boll"]
return [
last_ema < last_bb,
(self.candle(df, rewind=-2)["AO"] > 0)
& (self.candle(df, rewind=-1)["AO"] < 0),
prev_candle["volume"] > 0,
]
def can_buy(self, df):
prev_candle = self.candle(df)
last_ema = prev_candle["close_3_ema"]
last_bb = prev_candle["boll"]
return [
last_ema > last_bb,
(self.candle(df, rewind=-2)["AO"] < 0)
& (self.candle(df, rewind=-1)["AO"] > 0),
prev_candle["volume"] > 0,
]
def alert_message(self, df):
prev_candle = self.candle(df)
last_close = prev_candle["close"]
last_ao = prev_candle["AO"]
return (
"Close: {:.2f}, Awesome Oscillator value: {:.2f}".format(
last_close, last_ao
),
)
| [((122, 164), 'pandas.set_option', 'pd.set_option', (['"""display.max_columns"""', 'None'], {}), "('display.max_columns', None)\n", (135, 164), True, 'import pandas as pd\n'), ((165, 201), 'pandas.set_option', 'pd.set_option', (['"""display.width"""', 'None'], {}), "('display.width', None)\n", (178, 201), True, 'import pandas as pd\n'), ((451, 521), 'ta.momentum.AwesomeOscillatorIndicator', 'ta.momentum.AwesomeOscillatorIndicator', ([], {'high': "df['high']", 'low': "df['low']"}), "(high=df['high'], low=df['low'])\n", (489, 521), False, 'import ta\n')] |
TomasBelskis/PythonAutomation | BasicScripts/basics.py | dd2e30abb214e37d84a8952deb834074abdc84a2 | # Python Basics
# String concatenaton
added_strings = str(32) + "_342"
# Getting input
input_from_user = input()
# Basic print function
print(input_from_user)
# Mixing boolean and comparison operations
if (4 < 5) and (5 < 6):
print("True")
# Basic if & if else flow
if name == 'Alice':
print('Hi, Alice.')
elif age < 12:
print("You are not Alice, kiddo.")
elif age > 2000:
print('Unlike you, Alice is not an undead, immortal vampire.')
elif age > 100:
print('You are not Alice, grannie.')
# Loops in Python 3
spam = 0
while spam < 5:
print('Spam, spam!')
spam = spam + 1
# Access loop
while True:
print('Who are you?')
name = input()
if name != 'Joe':
continue
print('Hello, Joe. What is the password? (It is a fish.)')
password = input()
if password = 'swordfish':
break
print('Access granted.')
# For loops using range function
print("My name is")
for i in range(5):
print('Jimmy Five Times (' + str(i) + ')')
# Using starting range
for i in range(12, 16):
print(i)
# Importing modules
import random
for i in range(5):
print(random.randint(1, 10))
# Exiting a python program
import sys
while True:
print('Type exit to exit.')
response = input()
if response == 'exit':
sys.exit()
print('You typed ' + response + '.')
| [] |
wilcoln/klazor | env.example.py | 8f3c40a03a7e61c07eceb6cdbe4d1bb05693727e | DATABASE_OPTIONS = {
'database': 'klazor',
'user': 'root',
'password': '',
'charset': 'utf8mb4',
}
HOSTS = ['127.0.0.1', '67.209.115.211']
| [] |
lzantal/djskell | misc/_local_settings.py | cef71bab8a4dd163b632128666c315e228cc8f0f | """
Django settings.
Generated by 'django-admin startproject' using Django 2.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
#DEBUG = False
DEBUG = True
SERVE_STATIC = DEBUG
ALLOWED_HOSTS = []
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
#'ENGINE': 'django.db.backends.oracle'
#'ENGINE': 'django.db.backends.mysql',
#'ENGINE': 'django.db.backends.sqlite3',
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'mydatabase',
'USER': 'mydatabaseuser',
'PASSWORD': 'mypassword',
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
| [] |
pedrohd21/Agenda-Django | contacts/forms.py | c48a90d76094523fd2060ff735faefbf3c2f808d | from django import forms
from .models import Contact
class ContactForm(forms.ModelForm):
class Meta:
model = Contact
fields = ('name', 'number', 'email', 'category', 'description')
| [] |
ziegenberg/awx | awx/api/urls/ad_hoc_command.py | a3e29317c5d4220fffe28370ec73c73802255246 | # Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from django.urls import re_path
from awx.api.views import (
AdHocCommandList,
AdHocCommandDetail,
AdHocCommandCancel,
AdHocCommandRelaunch,
AdHocCommandAdHocCommandEventsList,
AdHocCommandActivityStreamList,
AdHocCommandNotificationsList,
AdHocCommandStdout,
)
urls = [
re_path(r'^$', AdHocCommandList.as_view(), name='ad_hoc_command_list'),
re_path(r'^(?P<pk>[0-9]+)/$', AdHocCommandDetail.as_view(), name='ad_hoc_command_detail'),
re_path(r'^(?P<pk>[0-9]+)/cancel/$', AdHocCommandCancel.as_view(), name='ad_hoc_command_cancel'),
re_path(r'^(?P<pk>[0-9]+)/relaunch/$', AdHocCommandRelaunch.as_view(), name='ad_hoc_command_relaunch'),
re_path(r'^(?P<pk>[0-9]+)/events/$', AdHocCommandAdHocCommandEventsList.as_view(), name='ad_hoc_command_ad_hoc_command_events_list'),
re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', AdHocCommandActivityStreamList.as_view(), name='ad_hoc_command_activity_stream_list'),
re_path(r'^(?P<pk>[0-9]+)/notifications/$', AdHocCommandNotificationsList.as_view(), name='ad_hoc_command_notifications_list'),
re_path(r'^(?P<pk>[0-9]+)/stdout/$', AdHocCommandStdout.as_view(), name='ad_hoc_command_stdout'),
]
__all__ = ['urls']
| [((383, 409), 'awx.api.views.AdHocCommandList.as_view', 'AdHocCommandList.as_view', ([], {}), '()\n', (407, 409), False, 'from awx.api.views import AdHocCommandList, AdHocCommandDetail, AdHocCommandCancel, AdHocCommandRelaunch, AdHocCommandAdHocCommandEventsList, AdHocCommandActivityStreamList, AdHocCommandNotificationsList, AdHocCommandStdout\n'), ((474, 502), 'awx.api.views.AdHocCommandDetail.as_view', 'AdHocCommandDetail.as_view', ([], {}), '()\n', (500, 502), False, 'from awx.api.views import AdHocCommandList, AdHocCommandDetail, AdHocCommandCancel, AdHocCommandRelaunch, AdHocCommandAdHocCommandEventsList, AdHocCommandActivityStreamList, AdHocCommandNotificationsList, AdHocCommandStdout\n'), ((576, 604), 'awx.api.views.AdHocCommandCancel.as_view', 'AdHocCommandCancel.as_view', ([], {}), '()\n', (602, 604), False, 'from awx.api.views import AdHocCommandList, AdHocCommandDetail, AdHocCommandCancel, AdHocCommandRelaunch, AdHocCommandAdHocCommandEventsList, AdHocCommandActivityStreamList, AdHocCommandNotificationsList, AdHocCommandStdout\n'), ((680, 710), 'awx.api.views.AdHocCommandRelaunch.as_view', 'AdHocCommandRelaunch.as_view', ([], {}), '()\n', (708, 710), False, 'from awx.api.views import AdHocCommandList, AdHocCommandDetail, AdHocCommandCancel, AdHocCommandRelaunch, AdHocCommandAdHocCommandEventsList, AdHocCommandActivityStreamList, AdHocCommandNotificationsList, AdHocCommandStdout\n'), ((786, 830), 'awx.api.views.AdHocCommandAdHocCommandEventsList.as_view', 'AdHocCommandAdHocCommandEventsList.as_view', ([], {}), '()\n', (828, 830), False, 'from awx.api.views import AdHocCommandList, AdHocCommandDetail, AdHocCommandCancel, AdHocCommandRelaunch, AdHocCommandAdHocCommandEventsList, AdHocCommandActivityStreamList, AdHocCommandNotificationsList, AdHocCommandStdout\n'), ((933, 973), 'awx.api.views.AdHocCommandActivityStreamList.as_view', 'AdHocCommandActivityStreamList.as_view', ([], {}), '()\n', (971, 973), False, 'from awx.api.views import AdHocCommandList, AdHocCommandDetail, AdHocCommandCancel, AdHocCommandRelaunch, AdHocCommandAdHocCommandEventsList, AdHocCommandActivityStreamList, AdHocCommandNotificationsList, AdHocCommandStdout\n'), ((1068, 1107), 'awx.api.views.AdHocCommandNotificationsList.as_view', 'AdHocCommandNotificationsList.as_view', ([], {}), '()\n', (1105, 1107), False, 'from awx.api.views import AdHocCommandList, AdHocCommandDetail, AdHocCommandCancel, AdHocCommandRelaunch, AdHocCommandAdHocCommandEventsList, AdHocCommandActivityStreamList, AdHocCommandNotificationsList, AdHocCommandStdout\n'), ((1193, 1221), 'awx.api.views.AdHocCommandStdout.as_view', 'AdHocCommandStdout.as_view', ([], {}), '()\n', (1219, 1221), False, 'from awx.api.views import AdHocCommandList, AdHocCommandDetail, AdHocCommandCancel, AdHocCommandRelaunch, AdHocCommandAdHocCommandEventsList, AdHocCommandActivityStreamList, AdHocCommandNotificationsList, AdHocCommandStdout\n')] |
icexmoon/python-learning-notes | note5/package_test5.py | 838c91d896404290b89992b6517be1b6a79df41f | #test.py
from time_tools import *
# print(compareTimestamp(111,222))
time.showNowTime()
# now time is XX:XX:XX | [] |
fabiommendes/fgarcade | fgarcade/sprites.py | 2bfdb3ca18cb8260048ccfc9e84524987c322221 | import arcade
from arcade import FACE_RIGHT, FACE_DOWN, FACE_UP, FACE_LEFT
class AnimatedWalkingSprite(arcade.Sprite):
def __init__(self, scale: float = 1,
image_x: float = 0, image_y: float = 0,
center_x: float = 0, center_y: float = 0, *,
stand_left, stand_right, left, right, up, down, step=20):
super().__init__(scale=scale, image_x=image_x, image_y=image_y,
center_x=center_x, center_y=center_y)
self.state = FACE_RIGHT
self.stand_right_texture = stand_right
self.stand_left_texture = stand_left
self.walk_left_textures = left
self.walk_right_textures = right
self.walk_up_textures = up
self.walk_down_textures = down
self.cur_texture_index = 0
self.texture_change_distance = step
self.last_texture_change_center_x = 0
self.last_texture_change_center_y = 0
self._update_direction(FACE_RIGHT, self.stand_right_texture)
self.textures = [self._texture]
def _update_direction(self, state, texture):
self.last_texture_change_center_x = self.center_x
self.last_texture_change_center_y = self.center_y
self.state = state
self.cur_texture_index = 0
self._texture = texture
def _rotate(self, delta, list):
if abs(delta) >= self.texture_change_distance:
self.cur_texture_index += 1
self.last_texture_change_center_x = self.center_x
self.last_texture_change_center_y = self.center_y
self._texture = list[self.cur_texture_index % len(list)]
def update_animation(self):
tol = 1.
# Falling
if self.change_y <= -tol:
if self.state != FACE_DOWN:
self._update_direction(FACE_DOWN, self.walk_down_textures[0])
else:
self._rotate(self.center_y - self.last_texture_change_center_y,
self.walk_down_textures)
# Jumping
elif self.change_y >= tol:
if self.state != FACE_UP:
self._update_direction(FACE_UP, self.walk_up_textures[0])
else:
self._rotate(self.center_y - self.last_texture_change_center_y,
self.walk_up_textures)
# Going left
elif self.change_x <= -tol:
if self.state != FACE_LEFT:
self._update_direction(FACE_LEFT, self.stand_left_texture)
else:
self._rotate(self.center_x - self.last_texture_change_center_x,
self.walk_left_textures)
# Going right
elif self.change_x >= tol:
if self.state != FACE_RIGHT:
self._update_direction(FACE_RIGHT, self.stand_right_texture)
else:
self._rotate(self.center_x - self.last_texture_change_center_x,
self.walk_right_textures)
elif abs(self.change_x) < tol and self.state == FACE_DOWN:
self._update_direction(FACE_RIGHT, self.stand_right_texture)
self.textures[0] = self._texture
self.width = self._texture.width * self.scale
self.height = self._texture.height * self.scale | [] |
gh-schen/SiriusEpiClassifier | src/mafUtility.py | 617e0243a95fe1014acfeca25ff6f6ba617d366f | from numpy.core.fromnumeric import transpose
from sklearn import linear_model
from scipy.special import logit
from scipy import stats
from copy import deepcopy
from numpy import random, concatenate, quantile, matmul, transpose
import logging
class singleRegModel():
"""
data struct for running a single regression test
"""
def __init__(self, regressor):
self.regressor = regressor
self.mmodel = None
# params
self.quantile_limit_ = 0.95
def train_binary(self, x_train, y_train):
self.mmodel = deepcopy(self.regressor)
self.mmodel.fit(x_train, y_train)
def train_quant(self, init_x, follow_x, init_y, follow_iter):
self.train_binary(init_x, init_y)
if follow_x is None:
logging.warning("No samples have missing MAF - no follow up training")
return
for i in range(follow_iter):
init_preds = self.mmodel.predict(init_x)
upper_limit = quantile(init_preds, self.quantile_limit_)
follow_y = self.mmodel.predict(follow_x)
follow_y[follow_y > upper_limit] = upper_limit
x_merge = concatenate((init_x, follow_x))
y_merge = concatenate((init_y, follow_y))
self.mmodel = deepcopy(self.regressor)
self.mmodel.fit(x_merge, y_merge)
def predict_prob(self, input_x):
preds = matmul(input_x, transpose(self.mmodel.coef_)) + self.mmodel.intercept_
probs = preds[:,0]
return probs
def predict_quant(self, input_x):
#preds = matmul(input_x, transpose(self.mmodel.coef_)) + self.mmodel.intercept_
#print(preds, self.mmodel.predict(input_x))
#probs = preds[:,0]
#return probs
return self.mmodel.predict(input_x)
class predOutcome():
"""
store output for prediction
"""
def __init__(self):
self.true_y = None
self.test_y = None
self.train_ys = [] # with CV training can have multiple results
self.cancer_status = None # binary: 0 for normal and 1 for cance | [((557, 581), 'copy.deepcopy', 'deepcopy', (['self.regressor'], {}), '(self.regressor)\n', (565, 581), False, 'from copy import deepcopy\n'), ((775, 845), 'logging.warning', 'logging.warning', (['"""No samples have missing MAF - no follow up training"""'], {}), "('No samples have missing MAF - no follow up training')\n", (790, 845), False, 'import logging\n'), ((982, 1024), 'numpy.quantile', 'quantile', (['init_preds', 'self.quantile_limit_'], {}), '(init_preds, self.quantile_limit_)\n', (990, 1024), False, 'from numpy import random, concatenate, quantile, matmul, transpose\n'), ((1160, 1191), 'numpy.concatenate', 'concatenate', (['(init_x, follow_x)'], {}), '((init_x, follow_x))\n', (1171, 1191), False, 'from numpy import random, concatenate, quantile, matmul, transpose\n'), ((1214, 1245), 'numpy.concatenate', 'concatenate', (['(init_y, follow_y)'], {}), '((init_y, follow_y))\n', (1225, 1245), False, 'from numpy import random, concatenate, quantile, matmul, transpose\n'), ((1273, 1297), 'copy.deepcopy', 'deepcopy', (['self.regressor'], {}), '(self.regressor)\n', (1281, 1297), False, 'from copy import deepcopy\n'), ((1415, 1443), 'numpy.transpose', 'transpose', (['self.mmodel.coef_'], {}), '(self.mmodel.coef_)\n', (1424, 1443), False, 'from numpy import random, concatenate, quantile, matmul, transpose\n')] |
hanyas/sds | examples/linreg.py | 3c195fb9cbd88a9284287d62c0eacb6afc4598a7 | import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
from sklearn.linear_model import ARDRegression, LinearRegression
# Parameters of the example
np.random.seed(0)
n_samples, n_features = 100, 100
# Create Gaussian data
X = np.random.randn(n_samples, n_features)
# Create weights with a precision lambda_ of 4.
lambda_ = 4.
w = np.zeros(n_features)
# Only keep 10 weights of interest
relevant_features = np.random.randint(0, n_features, 10)
for i in relevant_features:
w[i] = stats.norm.rvs(loc=0, scale=1. / np.sqrt(lambda_))
# Create noise with a precision alpha of 50.
alpha_ = 50.
noise = stats.norm.rvs(loc=0, scale=1. / np.sqrt(alpha_), size=n_samples)
# Create the target<
y = np.dot(X, w) + noise
clf = ARDRegression(fit_intercept=False, n_iter=1000)
clf.fit(X, y)
ols = LinearRegression(fit_intercept=False)
ols.fit(X, y)
from copy import deepcopy
from sds.distributions.lingauss import SingleOutputLinearGaussianWithKnownPrecision
from sds.distributions.lingauss import SingleOutputLinearGaussianWithKnownMean
from sds.distributions.gaussian import GaussianWithPrecision
from sds.distributions.gaussian import GaussianWithKnownMeanAndDiagonalPrecision
from sds.distributions.gamma import Gamma
likelihood_precision_prior = Gamma(dim=1, alphas=np.ones((1, )),
betas=1e-6 * np.ones((1, )))
parameter_precision_prior = Gamma(dim=n_features, alphas=np.ones((n_features, )),
betas=1e-6 * np.ones((n_features, )))
likelihood_precision_posterior = deepcopy(likelihood_precision_prior)
parameter_precision_posterior = deepcopy(parameter_precision_prior)
parameter_posterior = None
for i in range(100):
# parameter posterior
alphas = parameter_precision_posterior.mean()
parameter_prior = GaussianWithPrecision(dim=n_features,
mu=np.zeros((n_features, )),
lmbda=np.diag(alphas))
parameter_posterior = deepcopy(parameter_prior)
beta = likelihood_precision_posterior.mean()
likelihood_known_precision = SingleOutputLinearGaussianWithKnownPrecision(column_dim=n_features,
lmbda=beta,
affine=False)
stats = likelihood_known_precision.statistics(X, y)
parameter_posterior.nat_param = parameter_prior.nat_param + stats
# likelihood precision posterior
param = parameter_posterior.mean()
likelihood_known_mean = SingleOutputLinearGaussianWithKnownMean(column_dim=n_features,
W=param, affine=False)
stats = likelihood_known_mean.statistics(X, y)
likelihood_precision_posterior.nat_param = likelihood_precision_prior.nat_param + stats
# parameter precision posterior
parameter_likelihood = GaussianWithKnownMeanAndDiagonalPrecision(dim=n_features)
param = parameter_posterior.mean()
stats = parameter_likelihood.statistics(param)
parameter_precision_posterior.nat_param = parameter_precision_prior.nat_param + stats
our_ard = parameter_posterior.mode()
from sds.distributions.composite import MatrixNormalGamma
from sds.distributions.lingauss import LinearGaussianWithDiagonalPrecision
M = np.zeros((1, n_features))
K = 1e-16 * np.eye(n_features)
alphas = 1e-16 * np.ones((1, ))
betas = 1e-16 * np.ones((1, ))
prior = MatrixNormalGamma(column_dim=n_features, row_dim=1,
M=M, K=K, alphas=alphas, betas=betas)
posterior = deepcopy(prior)
likelihood = LinearGaussianWithDiagonalPrecision(column_dim=n_features,
row_dim=1,
affine=False)
stats = likelihood.statistics(X, np.atleast_2d(y).T)
posterior.nat_param = prior.nat_param + stats
our_ols = posterior.mode()[0]
plt.figure(figsize=(6, 5))
plt.title("Weights of the model")
plt.plot(w, color='orange', linestyle='-', linewidth=2, label="Ground truth")
plt.plot(clf.coef_, color='darkblue', linestyle='-', linewidth=2, label="Sklearn ARD")
plt.plot(our_ard, color='red', linestyle='-', linewidth=2, label="Our ARD")
# plt.plot(ols.coef_, color='yellowgreen', linestyle=':', linewidth=2, label="Sklearn OLS")
# plt.plot(our_ols.flatten(), color='cyan', linestyle='-', linewidth=2, label="Our OLS")
plt.xlabel("Features")
plt.ylabel("Values of the weights")
plt.legend(loc=1)
plt.show()
| [((170, 187), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (184, 187), True, 'import numpy as np\n'), ((248, 286), 'numpy.random.randn', 'np.random.randn', (['n_samples', 'n_features'], {}), '(n_samples, n_features)\n', (263, 286), True, 'import numpy as np\n'), ((352, 372), 'numpy.zeros', 'np.zeros', (['n_features'], {}), '(n_features)\n', (360, 372), True, 'import numpy as np\n'), ((428, 464), 'numpy.random.randint', 'np.random.randint', (['(0)', 'n_features', '(10)'], {}), '(0, n_features, 10)\n', (445, 464), True, 'import numpy as np\n'), ((740, 787), 'sklearn.linear_model.ARDRegression', 'ARDRegression', ([], {'fit_intercept': '(False)', 'n_iter': '(1000)'}), '(fit_intercept=False, n_iter=1000)\n', (753, 787), False, 'from sklearn.linear_model import ARDRegression, LinearRegression\n'), ((809, 846), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {'fit_intercept': '(False)'}), '(fit_intercept=False)\n', (825, 846), False, 'from sklearn.linear_model import ARDRegression, LinearRegression\n'), ((1555, 1591), 'copy.deepcopy', 'deepcopy', (['likelihood_precision_prior'], {}), '(likelihood_precision_prior)\n', (1563, 1591), False, 'from copy import deepcopy\n'), ((1624, 1659), 'copy.deepcopy', 'deepcopy', (['parameter_precision_prior'], {}), '(parameter_precision_prior)\n', (1632, 1659), False, 'from copy import deepcopy\n'), ((3380, 3405), 'numpy.zeros', 'np.zeros', (['(1, n_features)'], {}), '((1, n_features))\n', (3388, 3405), True, 'import numpy as np\n'), ((3509, 3602), 'sds.distributions.composite.MatrixNormalGamma', 'MatrixNormalGamma', ([], {'column_dim': 'n_features', 'row_dim': '(1)', 'M': 'M', 'K': 'K', 'alphas': 'alphas', 'betas': 'betas'}), '(column_dim=n_features, row_dim=1, M=M, K=K, alphas=alphas,\n betas=betas)\n', (3526, 3602), False, 'from sds.distributions.composite import MatrixNormalGamma\n'), ((3638, 3653), 'copy.deepcopy', 'deepcopy', (['prior'], {}), '(prior)\n', (3646, 3653), False, 'from copy import deepcopy\n'), ((3667, 3754), 'sds.distributions.lingauss.LinearGaussianWithDiagonalPrecision', 'LinearGaussianWithDiagonalPrecision', ([], {'column_dim': 'n_features', 'row_dim': '(1)', 'affine': '(False)'}), '(column_dim=n_features, row_dim=1,\n affine=False)\n', (3702, 3754), False, 'from sds.distributions.lingauss import LinearGaussianWithDiagonalPrecision\n'), ((3980, 4006), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(6, 5)'}), '(figsize=(6, 5))\n', (3990, 4006), True, 'import matplotlib.pyplot as plt\n'), ((4007, 4040), 'matplotlib.pyplot.title', 'plt.title', (['"""Weights of the model"""'], {}), "('Weights of the model')\n", (4016, 4040), True, 'import matplotlib.pyplot as plt\n'), ((4041, 4118), 'matplotlib.pyplot.plot', 'plt.plot', (['w'], {'color': '"""orange"""', 'linestyle': '"""-"""', 'linewidth': '(2)', 'label': '"""Ground truth"""'}), "(w, color='orange', linestyle='-', linewidth=2, label='Ground truth')\n", (4049, 4118), True, 'import matplotlib.pyplot as plt\n'), ((4119, 4210), 'matplotlib.pyplot.plot', 'plt.plot', (['clf.coef_'], {'color': '"""darkblue"""', 'linestyle': '"""-"""', 'linewidth': '(2)', 'label': '"""Sklearn ARD"""'}), "(clf.coef_, color='darkblue', linestyle='-', linewidth=2, label=\n 'Sklearn ARD')\n", (4127, 4210), True, 'import matplotlib.pyplot as plt\n'), ((4206, 4281), 'matplotlib.pyplot.plot', 'plt.plot', (['our_ard'], {'color': '"""red"""', 'linestyle': '"""-"""', 'linewidth': '(2)', 'label': '"""Our ARD"""'}), "(our_ard, color='red', linestyle='-', linewidth=2, label='Our ARD')\n", (4214, 4281), True, 'import matplotlib.pyplot as plt\n'), ((4464, 4486), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Features"""'], {}), "('Features')\n", (4474, 4486), True, 'import matplotlib.pyplot as plt\n'), ((4487, 4522), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Values of the weights"""'], {}), "('Values of the weights')\n", (4497, 4522), True, 'import matplotlib.pyplot as plt\n'), ((4523, 4540), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(1)'}), '(loc=1)\n', (4533, 4540), True, 'import matplotlib.pyplot as plt\n'), ((4542, 4552), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4550, 4552), True, 'import matplotlib.pyplot as plt\n'), ((712, 724), 'numpy.dot', 'np.dot', (['X', 'w'], {}), '(X, w)\n', (718, 724), True, 'import numpy as np\n'), ((2011, 2036), 'copy.deepcopy', 'deepcopy', (['parameter_prior'], {}), '(parameter_prior)\n', (2019, 2036), False, 'from copy import deepcopy\n'), ((2120, 2218), 'sds.distributions.lingauss.SingleOutputLinearGaussianWithKnownPrecision', 'SingleOutputLinearGaussianWithKnownPrecision', ([], {'column_dim': 'n_features', 'lmbda': 'beta', 'affine': '(False)'}), '(column_dim=n_features, lmbda=\n beta, affine=False)\n', (2164, 2218), False, 'from sds.distributions.lingauss import SingleOutputLinearGaussianWithKnownPrecision\n'), ((2602, 2691), 'sds.distributions.lingauss.SingleOutputLinearGaussianWithKnownMean', 'SingleOutputLinearGaussianWithKnownMean', ([], {'column_dim': 'n_features', 'W': 'param', 'affine': '(False)'}), '(column_dim=n_features, W=param,\n affine=False)\n', (2641, 2691), False, 'from sds.distributions.lingauss import SingleOutputLinearGaussianWithKnownMean\n'), ((2964, 3021), 'sds.distributions.gaussian.GaussianWithKnownMeanAndDiagonalPrecision', 'GaussianWithKnownMeanAndDiagonalPrecision', ([], {'dim': 'n_features'}), '(dim=n_features)\n', (3005, 3021), False, 'from sds.distributions.gaussian import GaussianWithKnownMeanAndDiagonalPrecision\n'), ((3418, 3436), 'numpy.eye', 'np.eye', (['n_features'], {}), '(n_features)\n', (3424, 3436), True, 'import numpy as np\n'), ((3454, 3467), 'numpy.ones', 'np.ones', (['(1,)'], {}), '((1,))\n', (3461, 3467), True, 'import numpy as np\n'), ((3485, 3498), 'numpy.ones', 'np.ones', (['(1,)'], {}), '((1,))\n', (3492, 3498), True, 'import numpy as np\n'), ((1286, 1299), 'numpy.ones', 'np.ones', (['(1,)'], {}), '((1,))\n', (1293, 1299), True, 'import numpy as np\n'), ((1424, 1446), 'numpy.ones', 'np.ones', (['(n_features,)'], {}), '((n_features,))\n', (1431, 1446), True, 'import numpy as np\n'), ((3883, 3899), 'numpy.atleast_2d', 'np.atleast_2d', (['y'], {}), '(y)\n', (3896, 3899), True, 'import numpy as np\n'), ((654, 669), 'numpy.sqrt', 'np.sqrt', (['alpha_'], {}), '(alpha_)\n', (661, 669), True, 'import numpy as np\n'), ((1350, 1363), 'numpy.ones', 'np.ones', (['(1,)'], {}), '((1,))\n', (1357, 1363), True, 'import numpy as np\n'), ((1496, 1518), 'numpy.ones', 'np.ones', (['(n_features,)'], {}), '((n_features,))\n', (1503, 1518), True, 'import numpy as np\n'), ((1892, 1915), 'numpy.zeros', 'np.zeros', (['(n_features,)'], {}), '((n_features,))\n', (1900, 1915), True, 'import numpy as np\n'), ((1968, 1983), 'numpy.diag', 'np.diag', (['alphas'], {}), '(alphas)\n', (1975, 1983), True, 'import numpy as np\n'), ((537, 553), 'numpy.sqrt', 'np.sqrt', (['lambda_'], {}), '(lambda_)\n', (544, 553), True, 'import numpy as np\n')] |
KarizCache/serverless | optimal/tompkins/examples/dask_scheduling_problem_nonetcontention.py | c5735afee29e104f3909f3b0140e993d461a5420 | #!/usr/bin/python3
import os
import json
import re
import ast
import json
from graphviz import Digraph
import pandas as pd
# color the graph
import graph_tool.all as gt
import copy
import matplotlib.colors as mcolors
import sys
import utils
from tompkins.ilp import schedule, jobs_when_where
from collections import defaultdict
from pulp import value
import re
import ast
import json
from graphviz import Digraph
import pandas as pd
# color the graph
import graph_tool.all as gt
import copy
import matplotlib.colors as mcolors
import sys
import seaborn as sns
def get_benchmarks():
benchmarks = {}
for _file in os.listdir(stats_dir):
try:
bnch = _file.rsplit('.', 1)[0]
assert os.path.isfile(os.path.join(stats_dir, f'{bnch}.iopt'))
app = bnch #, scheduler = bnch.rsplit(':', 1)
scheduler = 'vanilla'
benchmarks[bnch] = {'app': app, 'scheduler': scheduler, 'benchmark': bnch}
except AssertionError:
pass
return benchmarks
def build_graph(benchmark):
css_colors = list(mcolors.CSS4_COLORS.keys())
gfile = os.path.join(stats_dir, f'{benchmark}.iopt')
with open(gfile, 'r') as fd:
raw = fd.read().split('\n')
g = gt.Graph(directed=True)
vid_to_vx = {}
name_to_vid = {}
g.vertex_properties['name'] = g.new_vertex_property("string")
g.vertex_properties['worker'] = g.new_vertex_property("string")
g.vertex_properties['color'] = g.new_vertex_property("string", '#e0e0e0')
g.vertex_properties['icolor'] = g.new_vertex_property("int")
g.vertex_properties['output_size'] = g.new_vertex_property("int")
g.vertex_properties['runtime'] = g.new_vertex_property("float")
for ln in raw:
if ln.startswith('v'):
_, vid, name, runtime, output_size = ln.split(',', 4)
v = g.add_vertex()
vid_to_vx[vid] = v
name_to_vid[name] = vid
g.vp.name[v] = name
g.vp.runtime[v] = float(runtime) # 1 second
g.vp.output_size[v] = float(output_size) # 1GB
g.vp.color[v] = '#e0e0e0'
for ln in raw:
if ln.startswith('e'):
_, vsrc, vdst = ln.split(',')
g.add_edge(vid_to_vx[vsrc], vid_to_vx[vdst])
return g
def get_runtime_statistics(benchmark):
tasks = []
statistics = {}
jfile = os.path.join(stats_dir, f'{benchmark}.json')
with open(jfile, 'r') as fd:
stats = ast.literal_eval(fd.read())
for ts in stats:
ops = 'ts'; #ts.replace("(", '').replace(')', '').split("'")[1].split('-')[0]
statistics[ts] = {'key': ts, 'op': ops,
'output_size': stats[ts]['msg']['nbytes'], 'worker': stats[ts]['worker'].split(':')[1].replace('/', '')}
startsstops = stats[ts]['msg']['startstops']
for ss in startsstops:
if ss['action'] == 'compute':
statistics[ts]['compute_end'] = ss['stop']
statistics[ts]['compute_start'] = ss['start']
statistics[ts]['runtime'] = ss['stop'] - ss['start']
cfile = os.path.join(stats_dir, f'{benchmark}.colors')
with open(cfile, 'r') as cfd:
raw = cfd.read().split('\n')
for ln in raw:
if not ln:
continue
ts, color = ln.split(',')
#ts += ')'
statistics[ts]['color'] = int(color)
return statistics
def plot_graph(g, benchmark, optimal=False):
print(benchmark["benchmark"])
post = ".optimal" if optimal else ""
dg = Digraph('G', filename=f'{benchmark["benchmark"]}{post}.gv', format='png')
for v in g.vertices():
dg.attr('node', shape='ellipse', style="filled,solid",
penwidth="3",
fillcolor=g.vp.color[v],
color=worker_color[g.vp.statistics[v]['worker']])
#if benchmark['scheduler'] == "vanilla":
# dg.node(f'{v}')
#else:
dg.node(f'{v}, color({g.vp.icolor[v]})')
for e in g.edges():
#if benchmark['scheduler'] == "vanilla":
# dg.edge(f'{e.source()}', f'{e.target()}')
#else:
dg.edge(f'{e.source()}, color({g.vp.icolor[e.source()]})',
f'{e.target()}, color({g.vp.icolor[e.target()]})')
dg.view(os.path.join(f'{results_dir}',f'{benchmark["benchmark"]}{post}'), quiet=False)
import pulp as pl
import time
def find_optimal(g, bw):
n_workers = 4
workers = [f'w{i}' for i in range(n_workers)]
# Job Release Times - Additional constraints on availablility of Jobs
# R = np.zeros(n)
R = defaultdict(lambda:0)
# Maximum makespan
M = 100
B = defaultdict(lambda:1)
agents = workers
jobs = []
for v in g.vertices():
jobs.append(f't{v}')
n = len(jobs)
m = len(agents)
P = defaultdict(lambda:0)
for e in g.edges():
P[f't{e.source()}',f't{e.target()}'] = 1
# computation
D = defaultdict(lambda:0)
for v in g.vertices():
for a in agents:
D[f't{v}', a] = g.vp.runtime[v] # statistics[g.vp.name[v]]['runtime']
# Communication Delay matrix - Cost of sending results of job from
# agent to agent
#bw = 10*(1<<30)/(1<<3)
bw = bw*(1<<20)/(1<<3)
C = defaultdict(lambda:0)
for v in g.vertices():
for a in agents:
for b in agents:
C[f't{v}', a, b] = 0 if a == b else g.vp.output_size[v]/bw # 0 --> cost_serialization
start = time.time()
# Set up the Mixed Integer Linear Program
prob, X, S, Cmax = schedule(jobs, agents, D, C, R, B, P, M)
solver = pl.GUROBI_CMD()
prob.solve(solver)
latency = time.time() - start
print('-----------------------------------------------> constraints', len(prob.constraints.keys()))
print('----------------------------------------------> # of variables', prob.numVariables())
print('---------------------------------------------->', latency)
print("Makespan: ", value(Cmax))
sched = jobs_when_where(prob, X, S, Cmax)
print("Schedule: ", sched)
sched2 = []
for j in sched:
new = j + (j[1] + D[j[0], j[2]], g.vp.name[int(j[0].replace('t', ''))])
sched2.append(new)
print("Schedule: ", sched2)
return sched2, {'makespan': value(Cmax),
'constraints': len(prob.constraints.keys()),
'variables': prob.numVariables(),
'time': float(latency)}
results_dir = './benchmarks'
stats_dir='./benchmarks'
benchmarks = get_benchmarks()
#benchmarks = ['dom4x61GB1B', 'dom2x41GB1B', 'tree4x61GB1B']
for bnch in benchmarks:
for bw in [1*1024, 16*1024, 512, 32*1024, 8*1024, 4*1024, 2*1024, 256, 128, 64, 32]:
print(f'process {bnch}')
g = build_graph(bnch)
sched2, stats = find_optimal(g, bw)
with open(f'{results_dir}/optimal_compuation_stats.csv', 'a') as fd:
fd.write(f'{bnch},{stats["makespan"]},{stats["constraints"]},{stats["variables"]},{stats["time"]},no,{bw}\n')
with open(f'{results_dir}/{bnch}.nonetworkcontention.{bw}mbps.optimal', 'w') as fd:
for s in sched2:
fd.write(f'v,{s[0]},{s[1]},{s[2]}\n')
#fd.write(f'{s[4]},{s[3]},{s[0]},{s[1]},{s[2]}\n')
#v = int(s[0].replace('t', ''))
#g.vp.worker[v] = s[2]
break
#break
| [((626, 647), 'os.listdir', 'os.listdir', (['stats_dir'], {}), '(stats_dir)\n', (636, 647), False, 'import os\n'), ((1123, 1167), 'os.path.join', 'os.path.join', (['stats_dir', 'f"""{benchmark}.iopt"""'], {}), "(stats_dir, f'{benchmark}.iopt')\n", (1135, 1167), False, 'import os\n'), ((2469, 2513), 'os.path.join', 'os.path.join', (['stats_dir', 'f"""{benchmark}.json"""'], {}), "(stats_dir, f'{benchmark}.json')\n", (2481, 2513), False, 'import os\n'), ((3238, 3284), 'os.path.join', 'os.path.join', (['stats_dir', 'f"""{benchmark}.colors"""'], {}), "(stats_dir, f'{benchmark}.colors')\n", (3250, 3284), False, 'import os\n'), ((3700, 3773), 'graphviz.Digraph', 'Digraph', (['"""G"""'], {'filename': 'f"""{benchmark[\'benchmark\']}{post}.gv"""', 'format': '"""png"""'}), '(\'G\', filename=f"{benchmark[\'benchmark\']}{post}.gv", format=\'png\')\n', (3707, 3773), False, 'from graphviz import Digraph\n'), ((4745, 4768), 'collections.defaultdict', 'defaultdict', (['(lambda : 0)'], {}), '(lambda : 0)\n', (4756, 4768), False, 'from collections import defaultdict\n'), ((4811, 4834), 'collections.defaultdict', 'defaultdict', (['(lambda : 1)'], {}), '(lambda : 1)\n', (4822, 4834), False, 'from collections import defaultdict\n'), ((4979, 5002), 'collections.defaultdict', 'defaultdict', (['(lambda : 0)'], {}), '(lambda : 0)\n', (4990, 5002), False, 'from collections import defaultdict\n'), ((5105, 5128), 'collections.defaultdict', 'defaultdict', (['(lambda : 0)'], {}), '(lambda : 0)\n', (5116, 5128), False, 'from collections import defaultdict\n'), ((5417, 5440), 'collections.defaultdict', 'defaultdict', (['(lambda : 0)'], {}), '(lambda : 0)\n', (5428, 5440), False, 'from collections import defaultdict\n'), ((5635, 5646), 'time.time', 'time.time', ([], {}), '()\n', (5644, 5646), False, 'import time\n'), ((5716, 5756), 'tompkins.ilp.schedule', 'schedule', (['jobs', 'agents', 'D', 'C', 'R', 'B', 'P', 'M'], {}), '(jobs, agents, D, C, R, B, P, M)\n', (5724, 5756), False, 'from tompkins.ilp import schedule, jobs_when_where\n'), ((5770, 5785), 'pulp.GUROBI_CMD', 'pl.GUROBI_CMD', ([], {}), '()\n', (5783, 5785), True, 'import pulp as pl\n'), ((6164, 6197), 'tompkins.ilp.jobs_when_where', 'jobs_when_where', (['prob', 'X', 'S', 'Cmax'], {}), '(prob, X, S, Cmax)\n', (6179, 6197), False, 'from tompkins.ilp import schedule, jobs_when_where\n'), ((1083, 1109), 'matplotlib.colors.CSS4_COLORS.keys', 'mcolors.CSS4_COLORS.keys', ([], {}), '()\n', (1107, 1109), True, 'import matplotlib.colors as mcolors\n'), ((1250, 1273), 'graph_tool.all.Graph', 'gt.Graph', ([], {'directed': '(True)'}), '(directed=True)\n', (1258, 1273), True, 'import graph_tool.all as gt\n'), ((4434, 4499), 'os.path.join', 'os.path.join', (['f"""{results_dir}"""', 'f"""{benchmark[\'benchmark\']}{post}"""'], {}), '(f\'{results_dir}\', f"{benchmark[\'benchmark\']}{post}")\n', (4446, 4499), False, 'import os\n'), ((5823, 5834), 'time.time', 'time.time', ([], {}), '()\n', (5832, 5834), False, 'import time\n'), ((6139, 6150), 'pulp.value', 'value', (['Cmax'], {}), '(Cmax)\n', (6144, 6150), False, 'from pulp import value\n'), ((6437, 6448), 'pulp.value', 'value', (['Cmax'], {}), '(Cmax)\n', (6442, 6448), False, 'from pulp import value\n'), ((739, 778), 'os.path.join', 'os.path.join', (['stats_dir', 'f"""{bnch}.iopt"""'], {}), "(stats_dir, f'{bnch}.iopt')\n", (751, 778), False, 'import os\n')] |
gerhardgossen/harbor | tests/apitests/python/test_robot_account.py | 1d03b8727acb9a3935bf45cd76b61f87c68e2a08 | from __future__ import absolute_import
import unittest
from testutils import ADMIN_CLIENT
from testutils import TEARDOWN
from library.user import User
from library.project import Project
from library.repository import Repository
from library.repository import pull_harbor_image
from library.repository import push_image_to_project
from testutils import harbor_server
from library.base import _assert_status_code
class TestProjects(unittest.TestCase):
@classmethod
def setUp(self):
self.project = Project()
self.user = User()
self.repo = Repository()
@classmethod
def tearDown(self):
print "Case completed"
@unittest.skipIf(TEARDOWN == False, "Test data won't be erased.")
def test_ClearData(self):
#1. Delete repository(RA) by user(UA);
self.repo.delete_repoitory(TestProjects.project_ra_name_a, TestProjects.repo_name_in_project_a.split('/')[1], **TestProjects.USER_RA_CLIENT)
self.repo.delete_repoitory(TestProjects.project_ra_name_b, TestProjects.repo_name_in_project_b.split('/')[1], **TestProjects.USER_RA_CLIENT)
self.repo.delete_repoitory(TestProjects.project_ra_name_c, TestProjects.repo_name_in_project_c.split('/')[1], **TestProjects.USER_RA_CLIENT)
self.repo.delete_repoitory(TestProjects.project_ra_name_a, TestProjects.repo_name_pa.split('/')[1], **TestProjects.USER_RA_CLIENT)
#2. Delete project(PA);
self.project.delete_project(TestProjects.project_ra_id_a, **TestProjects.USER_RA_CLIENT)
self.project.delete_project(TestProjects.project_ra_id_b, **TestProjects.USER_RA_CLIENT)
self.project.delete_project(TestProjects.project_ra_id_c, **TestProjects.USER_RA_CLIENT)
#3. Delete user(UA).
self.user.delete_user(TestProjects.user_ra_id, **ADMIN_CLIENT)
def testRobotAccount(self):
"""
Test case:
Robot Account
Test step and expected result:
1. Create user(UA);
2. Create private project(PA), private project(PB) and public project(PC) by user(UA);
3. Push image(ImagePA) to project(PA), image(ImagePB) to project(PB) and image(ImagePC) to project(PC) by user(UA);
4. Create a new robot account(RA) with pull and push privilige in project(PA) by user(UA);
5. Check robot account info, it should has both pull and push priviliges;
6. Pull image(ImagePA) from project(PA) by robot account(RA), it must be successful;
7. Push image(ImageRA) to project(PA) by robot account(RA), it must be successful;
8. Push image(ImageRA) to project(PB) by robot account(RA), it must be not successful;
9. Pull image(ImagePB) from project(PB) by robot account(RA), it must be not successful;
10. Pull image from project(PC), it must be successful;
11. Push image(ImageRA) to project(PC) by robot account(RA), it must be not successful;
12. Update action property of robot account(RA);
13. Pull image(ImagePA) from project(PA) by robot account(RA), it must be not successful;
14. Push image(ImageRA) to project(PA) by robot account(RA), it must be not successful;
15. Delete robot account(RA), it must be not successful.
Tear down:
1. Delete repository(RA) by user(UA);
2. Delete project(PA);
3. Delete user(UA).
"""
url = ADMIN_CLIENT["endpoint"]
admin_name = ADMIN_CLIENT["username"]
admin_password = ADMIN_CLIENT["password"]
user_ra_password = "Aa123456"
image_project_a = "haproxy"
image_project_b = "hello-world"
image_project_c = "httpd"
image_robot_account = "alpine"
tag = "latest"
print "#1. Create user(UA);"
TestProjects.user_ra_id, user_ra_name = self.user.create_user(user_password = user_ra_password, **ADMIN_CLIENT)
TestProjects.USER_RA_CLIENT=dict(endpoint = url, username = user_ra_name, password = user_ra_password)
print "#2. Create private project(PA), private project(PB) and public project(PC) by user(UA);"
TestProjects.project_ra_id_a, TestProjects.project_ra_name_a = self.project.create_project(metadata = {"public": "false"}, **TestProjects.USER_RA_CLIENT)
TestProjects.project_ra_id_b, TestProjects.project_ra_name_b = self.project.create_project(metadata = {"public": "false"}, **TestProjects.USER_RA_CLIENT)
TestProjects.project_ra_id_c, TestProjects.project_ra_name_c = self.project.create_project(metadata = {"public": "true"}, **TestProjects.USER_RA_CLIENT)
print "#3. Push image(ImagePA) to project(PA), image(ImagePB) to project(PB) and image(ImagePC) to project(PC) by user(UA);"
TestProjects.repo_name_in_project_a, tag_a = push_image_to_project(TestProjects.project_ra_name_a, harbor_server, user_ra_name, user_ra_password, image_project_a, tag)
TestProjects.repo_name_in_project_b, tag_b = push_image_to_project(TestProjects.project_ra_name_b, harbor_server, user_ra_name, user_ra_password, image_project_b, tag)
TestProjects.repo_name_in_project_c, tag_c = push_image_to_project(TestProjects.project_ra_name_c, harbor_server, user_ra_name, user_ra_password, image_project_c, tag)
print "#4. Create a new robot account(RA) with pull and push privilige in project(PA) by user(UA);"
robot_id, robot_account = self.project.add_project_robot_account(TestProjects.project_ra_id_a, TestProjects.project_ra_name_a,
2441000531 ,**TestProjects.USER_RA_CLIENT)
print robot_account.name
print robot_account.token
print "#5. Check robot account info, it should has both pull and push priviliges;"
data = self.project.get_project_robot_account_by_id(TestProjects.project_ra_id_a, robot_id, **TestProjects.USER_RA_CLIENT)
_assert_status_code(robot_account.name, data.name)
print "#6. Pull image(ImagePA) from project(PA) by robot account(RA), it must be successful;"
pull_harbor_image(harbor_server, robot_account.name, robot_account.token, TestProjects.repo_name_in_project_a, tag_a)
print "#7. Push image(ImageRA) to project(PA) by robot account(RA), it must be successful;"
TestProjects.repo_name_pa, _ = push_image_to_project(TestProjects.project_ra_name_a, harbor_server, robot_account.name, robot_account.token, image_robot_account, tag)
print "#8. Push image(ImageRA) to project(PB) by robot account(RA), it must be not successful;"
push_image_to_project(TestProjects.project_ra_name_b, harbor_server, robot_account.name, robot_account.token, image_robot_account, tag, expected_error_message = "unauthorized to access repository")
print "#9. Pull image(ImagePB) from project(PB) by robot account(RA), it must be not successful;"
pull_harbor_image(harbor_server, robot_account.name, robot_account.token, TestProjects.repo_name_in_project_b, tag_b, expected_error_message = "unauthorized to access repository")
print "#10. Pull image from project(PC), it must be successful;"
pull_harbor_image(harbor_server, robot_account.name, robot_account.token, TestProjects.repo_name_in_project_c, tag_c)
print "#11. Push image(ImageRA) to project(PC) by robot account(RA), it must be not successful;"
push_image_to_project(TestProjects.project_ra_name_c, harbor_server, robot_account.name, robot_account.token, image_robot_account, tag, expected_error_message = "unauthorized to access repository")
print "#12. Update action property of robot account(RA);"
self.project.disable_project_robot_account(TestProjects.project_ra_id_a, robot_id, True, **TestProjects.USER_RA_CLIENT)
print "#13. Pull image(ImagePA) from project(PA) by robot account(RA), it must be not successful;"
pull_harbor_image(harbor_server, robot_account.name, robot_account.token, TestProjects.repo_name_in_project_a, tag_a, expected_login_error_message = "unauthorized: authentication required")
print "#14. Push image(ImageRA) to project(PA) by robot account(RA), it must be not successful;"
push_image_to_project(TestProjects.project_ra_name_a, harbor_server, robot_account.name, robot_account.token, image_robot_account, tag, expected_login_error_message = "unauthorized: authentication required")
print "#15. Delete robot account(RA), it must be not successful."
self.project.delete_project_robot_account(TestProjects.project_ra_id_a, robot_id, **TestProjects.USER_RA_CLIENT)
if __name__ == '__main__':
unittest.main() | [] |
ErikKalkoken/slackchannel2pdf | slackchannel2pdf/locales.py | 2848dfaaffbf9a5255c6dbe87dcc1e90d062b820 | import datetime as dt
import logging
from babel import Locale, UnknownLocaleError
from babel.dates import format_datetime, format_time, format_date
import pytz
from tzlocal import get_localzone
from . import settings
logger = logging.getLogger(__name__)
class LocaleHelper:
"""Helpers for converting date & time according to current locale and timezone"""
def __init__(
self,
my_locale: Locale = None,
my_tz: pytz.BaseTzInfo = None,
author_info: dict = None,
) -> None:
"""
Args:
- my_locale: Primary locale to use
- my_tz: Primary timezone to use
- author_info: locale and timezone to use from this Slack response
if my_locale and/or my_tz are not given
"""
self._locale = self._determine_locale(my_locale, author_info)
self._timezone = self._determine_timezone(my_tz, author_info)
@staticmethod
def _determine_locale(my_locale: Locale = None, author_info: dict = None) -> Locale:
if my_locale:
if not isinstance(my_locale, Locale):
raise TypeError("my_locale must be a babel Locale object")
else:
if author_info:
try:
my_locale = Locale.parse(author_info["locale"], sep="-")
except UnknownLocaleError:
logger.warning("Could not use locale info from Slack")
my_locale = Locale.default()
else:
my_locale = Locale.default()
if not my_locale:
my_locale = Locale.parse(settings.FALLBACK_LOCALE)
return my_locale
@staticmethod
def _determine_timezone(
my_tz: pytz.BaseTzInfo = None, author_info: dict = None
) -> pytz.BaseTzInfo:
if my_tz:
if not isinstance(my_tz, pytz.BaseTzInfo):
raise TypeError("my_tz must be of type pytz")
else:
if author_info:
try:
my_tz = pytz.timezone(author_info["tz"])
except pytz.exceptions.UnknownTimeZoneError:
logger.warning("Could not use timezone info from Slack")
my_tz = get_localzone()
else:
my_tz = get_localzone()
if not my_tz:
my_tz = pytz.UTC
return my_tz
@property
def locale(self) -> Locale:
return self._locale
@property
def timezone(self) -> pytz.BaseTzInfo:
return self._timezone
def format_date_full_str(self, my_datetime: dt.datetime) -> str:
return format_date(my_datetime, format="full", locale=self.locale)
def format_datetime_str(self, my_datetime: dt.datetime) -> str:
"""returns formated datetime string for given dt using locale"""
return format_datetime(my_datetime, format="short", locale=self.locale)
def get_datetime_formatted_str(self, ts: int) -> str:
"""return given timestamp as formated datetime string using locale"""
my_datetime = self.get_datetime_from_ts(ts)
return format_datetime(my_datetime, format="short", locale=self.locale)
def get_time_formatted_str(self, ts: int) -> str:
"""return given timestamp as formated datetime string using locale"""
my_datetime = self.get_datetime_from_ts(ts)
return format_time(my_datetime, format="short", locale=self.locale)
def get_datetime_from_ts(self, ts: int) -> dt.datetime:
"""returns datetime object of a unix timestamp with local timezone"""
my_datetime = dt.datetime.fromtimestamp(float(ts), pytz.UTC)
return my_datetime.astimezone(self.timezone)
| [((230, 257), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (247, 257), False, 'import logging\n'), ((2602, 2661), 'babel.dates.format_date', 'format_date', (['my_datetime'], {'format': '"""full"""', 'locale': 'self.locale'}), "(my_datetime, format='full', locale=self.locale)\n", (2613, 2661), False, 'from babel.dates import format_datetime, format_time, format_date\n'), ((2819, 2883), 'babel.dates.format_datetime', 'format_datetime', (['my_datetime'], {'format': '"""short"""', 'locale': 'self.locale'}), "(my_datetime, format='short', locale=self.locale)\n", (2834, 2883), False, 'from babel.dates import format_datetime, format_time, format_date\n'), ((3088, 3152), 'babel.dates.format_datetime', 'format_datetime', (['my_datetime'], {'format': '"""short"""', 'locale': 'self.locale'}), "(my_datetime, format='short', locale=self.locale)\n", (3103, 3152), False, 'from babel.dates import format_datetime, format_time, format_date\n'), ((3353, 3413), 'babel.dates.format_time', 'format_time', (['my_datetime'], {'format': '"""short"""', 'locale': 'self.locale'}), "(my_datetime, format='short', locale=self.locale)\n", (3364, 3413), False, 'from babel.dates import format_datetime, format_time, format_date\n'), ((1581, 1619), 'babel.Locale.parse', 'Locale.parse', (['settings.FALLBACK_LOCALE'], {}), '(settings.FALLBACK_LOCALE)\n', (1593, 1619), False, 'from babel import Locale, UnknownLocaleError\n'), ((1514, 1530), 'babel.Locale.default', 'Locale.default', ([], {}), '()\n', (1528, 1530), False, 'from babel import Locale, UnknownLocaleError\n'), ((2266, 2281), 'tzlocal.get_localzone', 'get_localzone', ([], {}), '()\n', (2279, 2281), False, 'from tzlocal import get_localzone\n'), ((1256, 1300), 'babel.Locale.parse', 'Locale.parse', (["author_info['locale']"], {'sep': '"""-"""'}), "(author_info['locale'], sep='-')\n", (1268, 1300), False, 'from babel import Locale, UnknownLocaleError\n'), ((2009, 2041), 'pytz.timezone', 'pytz.timezone', (["author_info['tz']"], {}), "(author_info['tz'])\n", (2022, 2041), False, 'import pytz\n'), ((1451, 1467), 'babel.Locale.default', 'Locale.default', ([], {}), '()\n', (1465, 1467), False, 'from babel import Locale, UnknownLocaleError\n'), ((2208, 2223), 'tzlocal.get_localzone', 'get_localzone', ([], {}), '()\n', (2221, 2223), False, 'from tzlocal import get_localzone\n')] |
glemaitre/ramp-board-1 | databoard/databoard/default_config.py | a5e9b423a55d196d38232fd94b2f7d53fb35d9d8 | import os
class Config(object):
# FLASK GENERAL CONFIG PARAMETERS
SECRET_KEY = os.getenv('DATABOARD_SECRET_KEY', 'abcdefghijkl')
# abs max upload file size, to throw 413, before saving it
WTF_CSRF_ENABLED = True
LOG_FILENAME = None # if None, output to screen
MAX_CONTENT_LENGTH = 1024 * 1024 * 1024
DEBUG = False
TESTING = False
# FLASK MAIL CONFIG PARAMETERS
MAIL_SERVER = os.getenv('DATABOARD_MAIL_SERVER', 'smtp.gmail.com')
MAIL_PORT = os.getenv('DATABOARD_MAIL_PORT', 587)
MAIL_USERNAME = os.getenv('DATABOARD_MAIL_USERNAME', 'user')
MAIL_PASSWORD = os.getenv('DATABOARD_MAIL_PASSWORD', 'password')
MAIL_DEFAULT_SENDER = (
os.getenv('DATABOARD_MAIL_SENDER_ALIAS', 'RAMP admin'),
os.getenv('DATABOARD_MAIL_SENDER', '[email protected]')
)
MAIL_RECIPIENTS = []
MAIL_USE_TLS = False
MAIL_USE_SSL = True
MAIL_DEBUG = False
SQLALCHEMY_TRACK_MODIFICATIONS = True
SQLALCHEMY_DATABASE_URI = os.getenv('DATABOARD_DB_URL')
SQLALCHEMY_MIGRATE_REPO = os.getenv('DATABOARD_DB_MIGRATE_REPO')
SQLALCHEMY_RECORD_QUERIES = (
True if os.getenv('DATABOARD_DB_PERF', 0) else False
)
class RampConfig(object):
RAMP_ADMIN_MAILS = os.getenv('DATABOARD_ADMIN_MAILS', [])
RAMP_KITS_DIR = 'ramp-kits'
RAMP_DATA_DIR = 'ramp-data'
RAMP_SUBMISSIONS_DIR = 'submissions'
RAMP_SANDBOX_DIR = 'starting_kit'
RAMP_SERVER_PORT = 8080
# make it False if parallel training is not working
# is_parallelize
RAMP_PARALLELIZE = bool(os.getenv('DATABOARD_PARALLELIZE', 1))
######################################################################
class ProductionConfig(Config):
DEPLOYMENT_PATH = os.getenv(
'DATABOARD_DEPLOYMENT_PATH', '/tmp/databoard')
class DevelopmentConfig(Config):
DEBUG = True
MAIL_DEBUG = True
SQLALCHEMY_DATABASE_URI = os.getenv(
'DATABOARD_DB_URL_TEST',
'postgresql://mrramp:mrramp@localhost/databoard_test'
)
DEPLOYMENT_PATH = os.getenv(
'DATABOARD_DEPLOYMENT_PATH_TEST', '/tmp/databoard_test')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = os.getenv(
'DATABOARD_DB_URL_TEST',
'postgresql://mrramp:mrramp@localhost/databoard_test'
)
DEPLOYMENT_PATH = os.getenv(
'DATABOARD_DEPLOYMENT_PATH_TEST',
'/tmp/databoard_test',
)
| [((89, 138), 'os.getenv', 'os.getenv', (['"""DATABOARD_SECRET_KEY"""', '"""abcdefghijkl"""'], {}), "('DATABOARD_SECRET_KEY', 'abcdefghijkl')\n", (98, 138), False, 'import os\n'), ((419, 471), 'os.getenv', 'os.getenv', (['"""DATABOARD_MAIL_SERVER"""', '"""smtp.gmail.com"""'], {}), "('DATABOARD_MAIL_SERVER', 'smtp.gmail.com')\n", (428, 471), False, 'import os\n'), ((488, 525), 'os.getenv', 'os.getenv', (['"""DATABOARD_MAIL_PORT"""', '(587)'], {}), "('DATABOARD_MAIL_PORT', 587)\n", (497, 525), False, 'import os\n'), ((546, 590), 'os.getenv', 'os.getenv', (['"""DATABOARD_MAIL_USERNAME"""', '"""user"""'], {}), "('DATABOARD_MAIL_USERNAME', 'user')\n", (555, 590), False, 'import os\n'), ((611, 659), 'os.getenv', 'os.getenv', (['"""DATABOARD_MAIL_PASSWORD"""', '"""password"""'], {}), "('DATABOARD_MAIL_PASSWORD', 'password')\n", (620, 659), False, 'import os\n'), ((995, 1024), 'os.getenv', 'os.getenv', (['"""DATABOARD_DB_URL"""'], {}), "('DATABOARD_DB_URL')\n", (1004, 1024), False, 'import os\n'), ((1055, 1093), 'os.getenv', 'os.getenv', (['"""DATABOARD_DB_MIGRATE_REPO"""'], {}), "('DATABOARD_DB_MIGRATE_REPO')\n", (1064, 1093), False, 'import os\n'), ((1246, 1284), 'os.getenv', 'os.getenv', (['"""DATABOARD_ADMIN_MAILS"""', '[]'], {}), "('DATABOARD_ADMIN_MAILS', [])\n", (1255, 1284), False, 'import os\n'), ((1730, 1786), 'os.getenv', 'os.getenv', (['"""DATABOARD_DEPLOYMENT_PATH"""', '"""/tmp/databoard"""'], {}), "('DATABOARD_DEPLOYMENT_PATH', '/tmp/databoard')\n", (1739, 1786), False, 'import os\n'), ((1900, 1993), 'os.getenv', 'os.getenv', (['"""DATABOARD_DB_URL_TEST"""', '"""postgresql://mrramp:mrramp@localhost/databoard_test"""'], {}), "('DATABOARD_DB_URL_TEST',\n 'postgresql://mrramp:mrramp@localhost/databoard_test')\n", (1909, 1993), False, 'import os\n'), ((2034, 2100), 'os.getenv', 'os.getenv', (['"""DATABOARD_DEPLOYMENT_PATH_TEST"""', '"""/tmp/databoard_test"""'], {}), "('DATABOARD_DEPLOYMENT_PATH_TEST', '/tmp/databoard_test')\n", (2043, 2100), False, 'import os\n'), ((2190, 2283), 'os.getenv', 'os.getenv', (['"""DATABOARD_DB_URL_TEST"""', '"""postgresql://mrramp:mrramp@localhost/databoard_test"""'], {}), "('DATABOARD_DB_URL_TEST',\n 'postgresql://mrramp:mrramp@localhost/databoard_test')\n", (2199, 2283), False, 'import os\n'), ((2324, 2390), 'os.getenv', 'os.getenv', (['"""DATABOARD_DEPLOYMENT_PATH_TEST"""', '"""/tmp/databoard_test"""'], {}), "('DATABOARD_DEPLOYMENT_PATH_TEST', '/tmp/databoard_test')\n", (2333, 2390), False, 'import os\n'), ((696, 750), 'os.getenv', 'os.getenv', (['"""DATABOARD_MAIL_SENDER_ALIAS"""', '"""RAMP admin"""'], {}), "('DATABOARD_MAIL_SENDER_ALIAS', 'RAMP admin')\n", (705, 750), False, 'import os\n'), ((760, 818), 'os.getenv', 'os.getenv', (['"""DATABOARD_MAIL_SENDER"""', '"""[email protected]"""'], {}), "('DATABOARD_MAIL_SENDER', '[email protected]')\n", (769, 818), False, 'import os\n'), ((1144, 1177), 'os.getenv', 'os.getenv', (['"""DATABOARD_DB_PERF"""', '(0)'], {}), "('DATABOARD_DB_PERF', 0)\n", (1153, 1177), False, 'import os\n'), ((1563, 1600), 'os.getenv', 'os.getenv', (['"""DATABOARD_PARALLELIZE"""', '(1)'], {}), "('DATABOARD_PARALLELIZE', 1)\n", (1572, 1600), False, 'import os\n')] |
carlsummer/python_developer_tools | python_developer_tools/cv/bases/pool/AvgPool2d.py | a8c4365b7cc601cda55648cdfd8c0cb1faae132f | # !/usr/bin/env python
# -- coding: utf-8 --
# @Author zengxiaohui
# Datatime:8/31/2021 1:37 PM
# @File:GlobalAvgPool2d
import torch.nn as nn
from python_developer_tools.cv.bases.activates.swish import h_swish
class GlobalAvgPool2d(nn.Module):
""" Fast implementation of global average pooling from
TResNet: High Performance GPU-Dedicated Architecture
https://arxiv.org/pdf/2003.13630.pdf
Args:
flatten (bool, optional): whether spatial dimensions should be squeezed
"""
def __init__(self, flatten: bool = False) -> None:
super().__init__()
self.flatten = flatten
def forward(self, x):
if self.flatten:
in_size = x.size()
return x.view((in_size[0], in_size[1], -1)).mean(dim=2)
else:
return x.view(x.size(0), x.size(1), -1).mean(-1).view(x.size(0), x.size(1), 1, 1)
class SwishAdaptiveAvgPool2d(nn.Module):
def __init__(self,inplace=True):
super().__init__()
self.avgpool=nn.Sequential(
nn.ReLU6(inplace=inplace),
nn.AdaptiveAvgPool2d((1, 1)),
h_swish()
)
def forward(self, x):
return self.avgpool(x) | [((1036, 1061), 'torch.nn.ReLU6', 'nn.ReLU6', ([], {'inplace': 'inplace'}), '(inplace=inplace)\n', (1044, 1061), True, 'import torch.nn as nn\n'), ((1075, 1103), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1, 1)'], {}), '((1, 1))\n', (1095, 1103), True, 'import torch.nn as nn\n'), ((1117, 1126), 'python_developer_tools.cv.bases.activates.swish.h_swish', 'h_swish', ([], {}), '()\n', (1124, 1126), False, 'from python_developer_tools.cv.bases.activates.swish import h_swish\n')] |
nordme/expyfun | expyfun/_utils.py | e644bba8cbfb6edd2a076099536417d4854d64af | """Some utility functions"""
# Authors: Eric Larson <[email protected]>
#
# License: BSD (3-clause)
import warnings
import operator
from copy import deepcopy
import subprocess
import importlib
import os
import os.path as op
import inspect
import sys
import tempfile
import ssl
from shutil import rmtree
import atexit
import json
from functools import partial
from distutils.version import LooseVersion
from numpy import sqrt, convolve, ones
import logging
import datetime
from timeit import default_timer as clock
from threading import Timer
import numpy as np
import scipy as sp
from ._externals import decorator
# set this first thing to make sure it "takes"
try:
import pyglet
pyglet.options['debug_gl'] = False
del pyglet
except Exception:
pass
# for py3k (eventually)
if sys.version.startswith('2'):
string_types = basestring # noqa
input = raw_input # noqa, input is raw_input in py3k
text_type = unicode # noqa
from __builtin__ import reload
from urllib2 import urlopen # noqa
from cStringIO import StringIO # noqa
else:
string_types = str
text_type = str
from urllib.request import urlopen
input = input
from io import StringIO # noqa, analysis:ignore
from importlib import reload # noqa, analysis:ignore
###############################################################################
# LOGGING
EXP = 25
logging.addLevelName(EXP, 'EXP')
def exp(self, message, *args, **kwargs):
"""Experiment-level logging."""
self.log(EXP, message, *args, **kwargs)
logging.Logger.exp = exp
logger = logging.getLogger('expyfun')
def flush_logger():
"""Flush expyfun logger"""
for handler in logger.handlers:
handler.flush()
def set_log_level(verbose=None, return_old_level=False):
"""Convenience function for setting the logging level
Parameters
----------
verbose : bool, str, int, or None
The verbosity of messages to print. If a str, it can be either DEBUG,
INFO, WARNING, ERROR, or CRITICAL. Note that these are for
convenience and are equivalent to passing in logging.DEBUG, etc.
For bool, True is the same as 'INFO', False is the same as 'WARNING'.
If None, the environment variable EXPYFUN_LOGGING_LEVEL is read, and if
it doesn't exist, defaults to INFO.
return_old_level : bool
If True, return the old verbosity level.
"""
if verbose is None:
verbose = get_config('EXPYFUN_LOGGING_LEVEL', 'INFO')
elif isinstance(verbose, bool):
verbose = 'INFO' if verbose is True else 'WARNING'
if isinstance(verbose, string_types):
verbose = verbose.upper()
logging_types = dict(DEBUG=logging.DEBUG, INFO=logging.INFO,
WARNING=logging.WARNING, ERROR=logging.ERROR,
CRITICAL=logging.CRITICAL)
if verbose not in logging_types:
raise ValueError('verbose must be of a valid type')
verbose = logging_types[verbose]
old_verbose = logger.level
logger.setLevel(verbose)
return (old_verbose if return_old_level else None)
def set_log_file(fname=None,
output_format='%(asctime)s - %(levelname)-7s - %(message)s',
overwrite=None):
"""Convenience function for setting the log to print to a file
Parameters
----------
fname : str, or None
Filename of the log to print to. If None, stdout is used.
To suppress log outputs, use set_log_level('WARN').
output_format : str
Format of the output messages. See the following for examples:
http://docs.python.org/dev/howto/logging.html
e.g., "%(asctime)s - %(levelname)s - %(message)s".
overwrite : bool, or None
Overwrite the log file (if it exists). Otherwise, statements
will be appended to the log (default). None is the same as False,
but additionally raises a warning to notify the user that log
entries will be appended.
"""
handlers = logger.handlers
for h in handlers:
if isinstance(h, logging.FileHandler):
h.close()
logger.removeHandler(h)
if fname is not None:
if op.isfile(fname) and overwrite is None:
warnings.warn('Log entries will be appended to the file. Use '
'overwrite=False to avoid this message in the '
'future.')
mode = 'w' if overwrite is True else 'a'
lh = logging.FileHandler(fname, mode=mode)
else:
""" we should just be able to do:
lh = logging.StreamHandler(sys.stdout)
but because doctests uses some magic on stdout, we have to do this:
"""
lh = logging.StreamHandler(WrapStdOut())
lh.setFormatter(logging.Formatter(output_format))
# actually add the stream handler
logger.addHandler(lh)
###############################################################################
# RANDOM UTILITIES
building_doc = any('sphinx-build' in ((''.join(i[4]).lower() + i[1])
if i[4] is not None else '')
for i in inspect.stack())
def run_subprocess(command, **kwargs):
"""Run command using subprocess.Popen
Run command and wait for command to complete. If the return code was zero
then return, otherwise raise CalledProcessError.
By default, this will also add stdout= and stderr=subproces.PIPE
to the call to Popen to suppress printing to the terminal.
Parameters
----------
command : list of str
Command to run as subprocess (see subprocess.Popen documentation).
**kwargs : objects
Keywoard arguments to pass to ``subprocess.Popen``.
Returns
-------
stdout : str
Stdout returned by the process.
stderr : str
Stderr returned by the process.
"""
# code adapted with permission from mne-python
kw = dict(stderr=subprocess.PIPE, stdout=subprocess.PIPE)
kw.update(kwargs)
p = subprocess.Popen(command, **kw)
stdout_, stderr = p.communicate()
output = (stdout_.decode(), stderr.decode())
if p.returncode:
err_fun = subprocess.CalledProcessError.__init__
if 'output' in _get_args(err_fun):
raise subprocess.CalledProcessError(p.returncode, command, output)
else:
raise subprocess.CalledProcessError(p.returncode, command)
return output
class ZeroClock(object):
"""Clock that uses "clock" function but starts at zero on init."""
def __init__(self):
self._start_time = clock()
def get_time(self):
"""Get time."""
return clock() - self._start_time
def date_str():
"""Produce a date string for the current date and time
Returns
-------
datestr : str
The date string.
"""
return str(datetime.datetime.today()).replace(':', '_')
class WrapStdOut(object):
"""Ridiculous class to work around how doctest captures stdout."""
def __getattr__(self, name):
# Even more ridiculous than this class, this must be sys.stdout (not
# just stdout) in order for this to work (tested on OSX and Linux)
return getattr(sys.stdout, name)
class _TempDir(str):
"""Class for creating and auto-destroying temp dir
This is designed to be used with testing modules.
We cannot simply use __del__() method for cleanup here because the rmtree
function may be cleaned up before this object, so we use the atexit module
instead. Passing del_after and print_del kwargs to the constructor are
helpful primarily for debugging purposes.
"""
def __new__(self, del_after=True, print_del=False):
new = str.__new__(self, tempfile.mkdtemp())
self._del_after = del_after
self._print_del = print_del
return new
def __init__(self):
self._path = self.__str__()
atexit.register(self.cleanup)
def cleanup(self):
if self._del_after is True:
if self._print_del is True:
print('Deleting {} ...'.format(self._path))
rmtree(self._path, ignore_errors=True)
def check_units(units):
"""Ensure user passed valid units type
Parameters
----------
units : str
Must be ``'norm'``, ``'deg'``, or ``'pix'``.
"""
good_units = ['norm', 'pix', 'deg']
if units not in good_units:
raise ValueError('"units" must be one of {}, not {}'
''.format(good_units, units))
###############################################################################
# DECORATORS
# Following deprecated class copied from scikit-learn
class deprecated(object):
"""Decorator to mark a function or class as deprecated.
Issue a warning when the function is called/the class is instantiated and
adds a warning to the docstring.
The optional extra argument will be appended to the deprecation message
and the docstring. Note: to use this with the default value for extra, put
in an empty of parentheses:
>>> from expyfun._utils import deprecated
>>> deprecated() # doctest: +ELLIPSIS
<expyfun._utils.deprecated object at ...>
>>> @deprecated()
... def some_function(): pass
"""
# Adapted from http://wiki.python.org/moin/PythonDecoratorLibrary,
# but with many changes.
# scikit-learn will not import on all platforms b/c it can be
# sklearn or scikits.learn, so a self-contained example is used above
def __init__(self, extra=''):
"""
Parameters
----------
extra: string
to be added to the deprecation messages
"""
self.extra = extra
def __call__(self, obj):
"""Call."""
if isinstance(obj, type):
return self._decorate_class(obj)
else:
return self._decorate_fun(obj)
def _decorate_class(self, cls):
msg = "Class %s is deprecated" % cls.__name__
if self.extra:
msg += "; %s" % self.extra
# FIXME: we should probably reset __new__ for full generality
init = cls.__init__
def wrapped(*args, **kwargs):
warnings.warn(msg, category=DeprecationWarning)
return init(*args, **kwargs)
cls.__init__ = wrapped
wrapped.__name__ = '__init__'
wrapped.__doc__ = self._update_doc(init.__doc__)
wrapped.deprecated_original = init
return cls
def _decorate_fun(self, fun):
"""Decorate function fun"""
msg = "Function %s is deprecated" % fun.__name__
if self.extra:
msg += "; %s" % self.extra
def wrapped(*args, **kwargs):
warnings.warn(msg, category=DeprecationWarning)
return fun(*args, **kwargs)
wrapped.__name__ = fun.__name__
wrapped.__dict__ = fun.__dict__
wrapped.__doc__ = self._update_doc(fun.__doc__)
return wrapped
def _update_doc(self, olddoc):
newdoc = "DEPRECATED"
if self.extra:
newdoc = "%s: %s" % (newdoc, self.extra)
if olddoc:
newdoc = "%s\n\n%s" % (newdoc, olddoc)
return newdoc
if hasattr(inspect, 'signature'): # py35
def _get_args(function, varargs=False):
params = inspect.signature(function).parameters
args = [key for key, param in params.items()
if param.kind not in (param.VAR_POSITIONAL, param.VAR_KEYWORD)]
if varargs:
varargs = [param.name for param in params.values()
if param.kind == param.VAR_POSITIONAL]
if len(varargs) == 0:
varargs = None
return args, varargs
else:
return args
else:
def _get_args(function, varargs=False):
out = inspect.getargspec(function) # args, varargs, keywords, defaults
if varargs:
return out[:2]
else:
return out[0]
@decorator
def verbose_dec(function, *args, **kwargs):
"""Improved verbose decorator to allow functions to override log-level
Do not call this directly to set global verbosrity level, instead use
set_log_level().
Parameters
----------
function : callable
Function to be decorated by setting the verbosity level.
Returns
-------
dec - function
The decorated function
"""
arg_names = _get_args(function)
if len(arg_names) > 0 and arg_names[0] == 'self':
default_level = getattr(args[0], 'verbose', None)
else:
default_level = None
if('verbose' in arg_names):
verbose_level = args[arg_names.index('verbose')]
else:
verbose_level = default_level
if verbose_level is not None:
old_level = set_log_level(verbose_level, True)
# set it back if we get an exception
try:
ret = function(*args, **kwargs)
except Exception:
set_log_level(old_level)
raise
set_log_level(old_level)
return ret
else:
ret = function(*args, **kwargs)
return ret
def _new_pyglet():
import pyglet
return LooseVersion(pyglet.version) >= LooseVersion('1.4')
def _has_video():
if _new_pyglet():
try:
from pyglet.media.codecs.ffmpeg import FFmpegSource # noqa
except ImportError:
return False
else:
try:
from pyglet.media.avbin import AVbinSource # noqa
except ImportError:
try:
from pyglet.media.sources.avbin import AVbinSource # noqa
except ImportError:
return False
return True
def requires_video():
"""Requires FFmpeg/AVbin decorator."""
import pytest
return pytest.mark.skipif(not _has_video(), reason='Requires FFmpeg/AVbin')
def requires_opengl21(func):
"""Requires OpenGL decorator."""
import pytest
import pyglet.gl
vendor = pyglet.gl.gl_info.get_vendor()
version = pyglet.gl.gl_info.get_version()
sufficient = pyglet.gl.gl_info.have_version(2, 0)
return pytest.mark.skipif(not sufficient,
reason='OpenGL too old: %s %s'
% (vendor, version,))(func)
def requires_lib(lib):
"""Requires lib decorator."""
import pytest
try:
importlib.import_module(lib)
except Exception as exp:
val = True
reason = 'Needs %s (%s)' % (lib, exp)
else:
val = False
reason = ''
return pytest.mark.skipif(val, reason=reason)
def _has_scipy_version(version):
return (LooseVersion(sp.__version__) >= LooseVersion(version))
def _get_user_home_path():
"""Return standard preferences path"""
# this has been checked on OSX64, Linux64, and Win32
val = os.getenv('APPDATA' if 'nt' == os.name.lower() else 'HOME', None)
if val is None:
raise ValueError('expyfun config file path could '
'not be determined, please report this '
'error to expyfun developers')
return val
def fetch_data_file(fname):
"""Fetch example remote file
Parameters
----------
fname : str
The remote filename to get. If the filename already exists
on the local system, the file will not be fetched again.
Returns
-------
fname : str
The filename on the local system where the file was downloaded.
"""
path = get_config('EXPYFUN_DATA_PATH', op.join(_get_user_home_path(),
'.expyfun', 'data'))
fname_out = op.join(path, fname)
if not op.isdir(op.dirname(fname_out)):
os.makedirs(op.dirname(fname_out))
fname_url = ('https://github.com/LABSN/expyfun-data/raw/master/{0}'
''.format(fname))
try:
# until we get proper certificates
context = ssl._create_unverified_context()
this_urlopen = partial(urlopen, context=context)
except AttributeError:
context = None
this_urlopen = urlopen
if not op.isfile(fname_out):
try:
with open(fname_out, 'wb') as fid:
www = this_urlopen(fname_url, timeout=30.0)
try:
fid.write(www.read())
finally:
www.close()
except Exception:
os.remove(fname_out)
raise
return fname_out
def get_config_path():
r"""Get path to standard expyfun config file.
Returns
-------
config_path : str
The path to the expyfun configuration file. On windows, this
will be '%APPDATA%\.expyfun\expyfun.json'. On every other
system, this will be $HOME/.expyfun/expyfun.json.
"""
val = op.join(_get_user_home_path(), '.expyfun', 'expyfun.json')
return val
# List the known configuration values
known_config_types = ('RESPONSE_DEVICE',
'AUDIO_CONTROLLER',
'DB_OF_SINE_AT_1KHZ_1RMS',
'EXPYFUN_EYELINK',
'SOUND_CARD_API',
'SOUND_CARD_BACKEND',
'SOUND_CARD_FS',
'SOUND_CARD_NAME',
'SOUND_CARD_FIXED_DELAY',
'TDT_CIRCUIT_PATH',
'TDT_DELAY',
'TDT_INTERFACE',
'TDT_MODEL',
'TDT_TRIG_DELAY',
'TRIGGER_CONTROLLER',
'TRIGGER_ADDRESS',
'WINDOW_SIZE',
'SCREEN_NUM',
'SCREEN_WIDTH',
'SCREEN_DISTANCE',
'SCREEN_SIZE_PIX',
'EXPYFUN_LOGGING_LEVEL',
)
# These allow for partial matches: 'NAME_1' is okay key if 'NAME' is listed
known_config_wildcards = ()
def get_config(key=None, default=None, raise_error=False):
"""Read expyfun preference from env, then expyfun config
Parameters
----------
key : str
The preference key to look for. The os environment is searched first,
then the expyfun config file is parsed.
default : str | None
Value to return if the key is not found.
raise_error : bool
If True, raise an error if the key is not found (instead of returning
default).
Returns
-------
value : str | None
The preference key value.
"""
if key is not None and not isinstance(key, string_types):
raise ValueError('key must be a string')
# first, check to see if key is in env
if key is not None and key in os.environ:
return os.environ[key]
# second, look for it in expyfun config file
config_path = get_config_path()
if not op.isfile(config_path):
key_found = False
val = default
else:
with open(config_path, 'r') as fid:
config = json.load(fid)
if key is None:
return config
key_found = True if key in config else False
val = config.get(key, default)
if not key_found and raise_error is True:
meth_1 = 'os.environ["%s"] = VALUE' % key
meth_2 = 'expyfun.utils.set_config("%s", VALUE)' % key
raise KeyError('Key "%s" not found in environment or in the '
'expyfun config file:\n%s\nTry either:\n'
' %s\nfor a temporary solution, or:\n'
' %s\nfor a permanent one. You can also '
'set the environment variable before '
'running python.'
% (key, config_path, meth_1, meth_2))
return val
def set_config(key, value):
"""Set expyfun preference in config
Parameters
----------
key : str | None
The preference key to set. If None, a tuple of the valid
keys is returned, and ``value`` is ignored.
value : str | None
The value to assign to the preference key. If None, the key is
deleted.
"""
if key is None:
return sorted(known_config_types)
if not isinstance(key, string_types):
raise ValueError('key must be a string')
# While JSON allow non-string types, we allow users to override config
# settings using env, which are strings, so we enforce that here
if not isinstance(value, string_types) and value is not None:
raise ValueError('value must be a string or None')
if key not in known_config_types and not \
any(k in key for k in known_config_wildcards):
warnings.warn('Setting non-standard config type: "%s"' % key)
# Read all previous values
config_path = get_config_path()
if op.isfile(config_path):
with open(config_path, 'r') as fid:
config = json.load(fid)
else:
config = dict()
logger.info('Attempting to create new expyfun configuration '
'file:\n%s' % config_path)
if value is None:
config.pop(key, None)
else:
config[key] = value
# Write all values
directory = op.split(config_path)[0]
if not op.isdir(directory):
os.mkdir(directory)
with open(config_path, 'w') as fid:
json.dump(config, fid, sort_keys=True, indent=0)
###############################################################################
# MISC
def fake_button_press(ec, button='1', delay=0.):
"""Fake a button press after a delay
Notes
-----
This function only works with the keyboard controller (not TDT)!
It uses threads to ensure that control is passed back, so other commands
can be called (like wait_for_presses).
"""
def send():
ec._response_handler._on_pyglet_keypress(button, [], True)
Timer(delay, send).start() if delay > 0. else send()
def fake_mouse_click(ec, pos, button='left', delay=0.):
"""Fake a mouse click after a delay"""
button = dict(left=1, middle=2, right=4)[button] # trans to pyglet
def send():
ec._mouse_handler._on_pyglet_mouse_click(pos[0], pos[1], button, [])
Timer(delay, send).start() if delay > 0. else send()
def _check_pyglet_version(raise_error=False):
"""Check pyglet version, return True if usable.
"""
import pyglet
is_usable = LooseVersion(pyglet.version) >= LooseVersion('1.2')
if raise_error is True and is_usable is False:
raise ImportError('On Linux, you must run at least Pyglet '
'version 1.2, and you are running '
'{0}'.format(pyglet.version))
return is_usable
def _wait_secs(secs, ec=None):
"""Wait a specified number of seconds.
Parameters
----------
secs : float
Number of seconds to wait.
ec : None | expyfun.ExperimentController instance
The ExperimentController.
Notes
-----
This function uses a while loop. Although this slams the CPU, it will
guarantee that events (keypresses, etc.) are processed.
"""
# hog the cpu, checking time
t0 = clock()
if ec is not None:
while (clock() - t0) < secs:
ec._dispatch_events()
ec.check_force_quit()
else:
wins = _get_display().get_windows()
for win in wins:
win.dispatch_events()
def running_rms(signal, win_length):
"""RMS of ``signal`` with rectangular window ``win_length`` samples long.
Parameters
----------
signal : array_like
The (1-dimesional) signal of interest.
win_length : int
Length (in samples) of the rectangular window
"""
return sqrt(convolve(signal ** 2, ones(win_length) / win_length, 'valid'))
def _fix_audio_dims(signal, n_channels):
"""Make it so a valid audio buffer is in the standard dimensions
Parameters
----------
signal : array_like
The signal whose dimensions should be checked and fixed.
n_channels : int
The number of channels that the output should have.
If the input is mono and n_channels=2, it will be tiled to be
shape (2, n_samples). Otherwise, the number of channels in signal
must match n_channels.
Returns
-------
signal_fixed : array
The signal with standard dimensions (n_channels, N).
"""
# Check requested channel output
n_channels = int(operator.index(n_channels))
signal = np.asarray(np.atleast_2d(signal), dtype=np.float32)
# Check dimensionality
if signal.ndim != 2:
raise ValueError('Sound data must have one or two dimensions, got %s.'
% (signal.ndim,))
# Return data with correct dimensions
if n_channels == 2 and signal.shape[0] == 1:
signal = np.tile(signal, (n_channels, 1))
if signal.shape[0] != n_channels:
raise ValueError('signal channel count %d did not match required '
'channel count %d' % (signal.shape[0], n_channels))
return signal
def _sanitize(text_like):
"""Cast as string, encode as UTF-8 and sanitize any escape characters.
"""
return text_type(text_like).encode('unicode_escape').decode('utf-8')
def _sort_keys(x):
"""Sort and return keys of dict"""
keys = list(x.keys()) # note: not thread-safe
idx = np.argsort([str(k) for k in keys])
keys = [keys[ii] for ii in idx]
return keys
def object_diff(a, b, pre=''):
"""Compute all differences between two python variables
Parameters
----------
a : object
Currently supported: dict, list, tuple, ndarray, int, str, bytes,
float, StringIO, BytesIO.
b : object
Must be same type as ``a``.
pre : str
String to prepend to each line.
Returns
-------
diffs : str
A string representation of the differences.
Notes
-----
Taken from mne-python with permission.
"""
out = ''
if type(a) != type(b):
out += pre + ' type mismatch (%s, %s)\n' % (type(a), type(b))
elif isinstance(a, dict):
k1s = _sort_keys(a)
k2s = _sort_keys(b)
m1 = set(k2s) - set(k1s)
if len(m1):
out += pre + ' x1 missing keys %s\n' % (m1)
for key in k1s:
if key not in k2s:
out += pre + ' x2 missing key %s\n' % key
else:
out += object_diff(a[key], b[key], pre + 'd1[%s]' % repr(key))
elif isinstance(a, (list, tuple)):
if len(a) != len(b):
out += pre + ' length mismatch (%s, %s)\n' % (len(a), len(b))
else:
for xx1, xx2 in zip(a, b):
out += object_diff(xx1, xx2, pre='')
elif isinstance(a, (string_types, int, float, bytes)):
if a != b:
out += pre + ' value mismatch (%s, %s)\n' % (a, b)
elif a is None:
if b is not None:
out += pre + ' a is None, b is not (%s)\n' % (b)
elif isinstance(a, np.ndarray):
if not np.array_equal(a, b):
out += pre + ' array mismatch\n'
else:
raise RuntimeError(pre + ': unsupported type %s (%s)' % (type(a), a))
return out
def _check_skip_backend(backend):
from expyfun._sound_controllers import _import_backend
import pytest
if isinstance(backend, dict): # actually an AC
backend = backend['SOUND_CARD_BACKEND']
try:
_import_backend(backend)
except Exception as exc:
pytest.skip('Skipping test for backend %s: %s' % (backend, exc))
def _check_params(params, keys, defaults, name):
if not isinstance(params, dict):
raise TypeError('{0} must be a dict, got type {1}'
.format(name, type(params)))
params = deepcopy(params)
if not isinstance(params, dict):
raise TypeError('{0} must be a dict, got {1}'
.format(name, type(params)))
# Set sensible defaults for values that are not passed
for k in keys:
params[k] = params.get(k, get_config(k, defaults.get(k, None)))
# Check keys
for k in params.keys():
if k not in keys:
raise KeyError('Unrecognized key in {0}["{1}"], must be '
'one of {2}'.format(name, k, ', '.join(keys)))
return params
def _get_display():
import pyglet
try:
display = pyglet.canvas.get_display()
except AttributeError: # < 1.4
display = pyglet.window.get_platform().get_default_display()
return display
| [((797, 824), 'sys.version.startswith', 'sys.version.startswith', (['"""2"""'], {}), "('2')\n", (819, 824), False, 'import sys\n'), ((1390, 1422), 'logging.addLevelName', 'logging.addLevelName', (['EXP', '"""EXP"""'], {}), "(EXP, 'EXP')\n", (1410, 1422), False, 'import logging\n'), ((1582, 1610), 'logging.getLogger', 'logging.getLogger', (['"""expyfun"""'], {}), "('expyfun')\n", (1599, 1610), False, 'import logging\n'), ((6044, 6075), 'subprocess.Popen', 'subprocess.Popen', (['command'], {}), '(command, **kw)\n', (6060, 6075), False, 'import subprocess\n'), ((13999, 14029), 'pyglet.gl.gl_info.get_vendor', 'pyglet.gl.gl_info.get_vendor', ([], {}), '()\n', (14027, 14029), False, 'import pyglet\n'), ((14044, 14075), 'pyglet.gl.gl_info.get_version', 'pyglet.gl.gl_info.get_version', ([], {}), '()\n', (14073, 14075), False, 'import pyglet\n'), ((14093, 14129), 'pyglet.gl.gl_info.have_version', 'pyglet.gl.gl_info.have_version', (['(2)', '(0)'], {}), '(2, 0)\n', (14123, 14129), False, 'import pyglet\n'), ((14573, 14611), 'pytest.mark.skipif', 'pytest.mark.skipif', (['val'], {'reason': 'reason'}), '(val, reason=reason)\n', (14591, 14611), False, 'import pytest\n'), ((15660, 15680), 'os.path.join', 'op.join', (['path', 'fname'], {}), '(path, fname)\n', (15667, 15680), True, 'import os.path as op\n'), ((20812, 20834), 'os.path.isfile', 'op.isfile', (['config_path'], {}), '(config_path)\n', (20821, 20834), True, 'import os.path as op\n'), ((23146, 23153), 'timeit.default_timer', 'clock', ([], {}), '()\n', (23151, 23153), True, 'from timeit import default_timer as clock\n'), ((27768, 27784), 'copy.deepcopy', 'deepcopy', (['params'], {}), '(params)\n', (27776, 27784), False, 'from copy import deepcopy\n'), ((4503, 4540), 'logging.FileHandler', 'logging.FileHandler', (['fname'], {'mode': 'mode'}), '(fname, mode=mode)\n', (4522, 4540), False, 'import logging\n'), ((4810, 4842), 'logging.Formatter', 'logging.Formatter', (['output_format'], {}), '(output_format)\n', (4827, 4842), False, 'import logging\n'), ((6618, 6625), 'timeit.default_timer', 'clock', ([], {}), '()\n', (6623, 6625), True, 'from timeit import default_timer as clock\n'), ((7945, 7974), 'atexit.register', 'atexit.register', (['self.cleanup'], {}), '(self.cleanup)\n', (7960, 7974), False, 'import atexit\n'), ((11845, 11873), 'inspect.getargspec', 'inspect.getargspec', (['function'], {}), '(function)\n', (11863, 11873), False, 'import inspect\n'), ((13199, 13227), 'distutils.version.LooseVersion', 'LooseVersion', (['pyglet.version'], {}), '(pyglet.version)\n', (13211, 13227), False, 'from distutils.version import LooseVersion\n'), ((13231, 13250), 'distutils.version.LooseVersion', 'LooseVersion', (['"""1.4"""'], {}), "('1.4')\n", (13243, 13250), False, 'from distutils.version import LooseVersion\n'), ((14141, 14231), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not sufficient)'], {'reason': "('OpenGL too old: %s %s' % (vendor, version))"}), "(not sufficient, reason='OpenGL too old: %s %s' % (vendor,\n version))\n", (14159, 14231), False, 'import pytest\n'), ((14389, 14417), 'importlib.import_module', 'importlib.import_module', (['lib'], {}), '(lib)\n', (14412, 14417), False, 'import importlib\n'), ((14659, 14687), 'distutils.version.LooseVersion', 'LooseVersion', (['sp.__version__'], {}), '(sp.__version__)\n', (14671, 14687), False, 'from distutils.version import LooseVersion\n'), ((14691, 14712), 'distutils.version.LooseVersion', 'LooseVersion', (['version'], {}), '(version)\n', (14703, 14712), False, 'from distutils.version import LooseVersion\n'), ((15945, 15977), 'ssl._create_unverified_context', 'ssl._create_unverified_context', ([], {}), '()\n', (15975, 15977), False, 'import ssl\n'), ((16001, 16034), 'functools.partial', 'partial', (['urlopen'], {'context': 'context'}), '(urlopen, context=context)\n', (16008, 16034), False, 'from functools import partial\n'), ((16127, 16147), 'os.path.isfile', 'op.isfile', (['fname_out'], {}), '(fname_out)\n', (16136, 16147), True, 'import os.path as op\n'), ((18869, 18891), 'os.path.isfile', 'op.isfile', (['config_path'], {}), '(config_path)\n', (18878, 18891), True, 'import os.path as op\n'), ((20675, 20736), 'warnings.warn', 'warnings.warn', (['(\'Setting non-standard config type: "%s"\' % key)'], {}), '(\'Setting non-standard config type: "%s"\' % key)\n', (20688, 20736), False, 'import warnings\n'), ((21197, 21218), 'os.path.split', 'op.split', (['config_path'], {}), '(config_path)\n', (21205, 21218), True, 'import os.path as op\n'), ((21233, 21252), 'os.path.isdir', 'op.isdir', (['directory'], {}), '(directory)\n', (21241, 21252), True, 'import os.path as op\n'), ((21262, 21281), 'os.mkdir', 'os.mkdir', (['directory'], {}), '(directory)\n', (21270, 21281), False, 'import os\n'), ((21330, 21378), 'json.dump', 'json.dump', (['config', 'fid'], {'sort_keys': '(True)', 'indent': '(0)'}), '(config, fid, sort_keys=True, indent=0)\n', (21339, 21378), False, 'import json\n'), ((22384, 22412), 'distutils.version.LooseVersion', 'LooseVersion', (['pyglet.version'], {}), '(pyglet.version)\n', (22396, 22412), False, 'from distutils.version import LooseVersion\n'), ((22416, 22435), 'distutils.version.LooseVersion', 'LooseVersion', (['"""1.2"""'], {}), "('1.2')\n", (22428, 22435), False, 'from distutils.version import LooseVersion\n'), ((24441, 24467), 'operator.index', 'operator.index', (['n_channels'], {}), '(n_channels)\n', (24455, 24467), False, 'import operator\n'), ((24493, 24514), 'numpy.atleast_2d', 'np.atleast_2d', (['signal'], {}), '(signal)\n', (24506, 24514), True, 'import numpy as np\n'), ((24816, 24848), 'numpy.tile', 'np.tile', (['signal', '(n_channels, 1)'], {}), '(signal, (n_channels, 1))\n', (24823, 24848), True, 'import numpy as np\n'), ((27428, 27452), 'expyfun._sound_controllers._import_backend', '_import_backend', (['backend'], {}), '(backend)\n', (27443, 27452), False, 'from expyfun._sound_controllers import _import_backend\n'), ((28379, 28406), 'pyglet.canvas.get_display', 'pyglet.canvas.get_display', ([], {}), '()\n', (28404, 28406), False, 'import pyglet\n'), ((4215, 4231), 'os.path.isfile', 'op.isfile', (['fname'], {}), '(fname)\n', (4224, 4231), True, 'import os.path as op\n'), ((4267, 4392), 'warnings.warn', 'warnings.warn', (['"""Log entries will be appended to the file. Use overwrite=False to avoid this message in the future."""'], {}), "(\n 'Log entries will be appended to the file. Use overwrite=False to avoid this message in the future.'\n )\n", (4280, 4392), False, 'import warnings\n'), ((5174, 5189), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (5187, 5189), False, 'import inspect\n'), ((6303, 6363), 'subprocess.CalledProcessError', 'subprocess.CalledProcessError', (['p.returncode', 'command', 'output'], {}), '(p.returncode, command, output)\n', (6332, 6363), False, 'import subprocess\n'), ((6396, 6448), 'subprocess.CalledProcessError', 'subprocess.CalledProcessError', (['p.returncode', 'command'], {}), '(p.returncode, command)\n', (6425, 6448), False, 'import subprocess\n'), ((6690, 6697), 'timeit.default_timer', 'clock', ([], {}), '()\n', (6695, 6697), True, 'from timeit import default_timer as clock\n'), ((7765, 7783), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (7781, 7783), False, 'import tempfile\n'), ((8147, 8185), 'shutil.rmtree', 'rmtree', (['self._path'], {'ignore_errors': '(True)'}), '(self._path, ignore_errors=True)\n', (8153, 8185), False, 'from shutil import rmtree\n'), ((10220, 10267), 'warnings.warn', 'warnings.warn', (['msg'], {'category': 'DeprecationWarning'}), '(msg, category=DeprecationWarning)\n', (10233, 10267), False, 'import warnings\n'), ((10740, 10787), 'warnings.warn', 'warnings.warn', (['msg'], {'category': 'DeprecationWarning'}), '(msg, category=DeprecationWarning)\n', (10753, 10787), False, 'import warnings\n'), ((11328, 11355), 'inspect.signature', 'inspect.signature', (['function'], {}), '(function)\n', (11345, 11355), False, 'import inspect\n'), ((15701, 15722), 'os.path.dirname', 'op.dirname', (['fname_out'], {}), '(fname_out)\n', (15711, 15722), True, 'import os.path as op\n'), ((15745, 15766), 'os.path.dirname', 'op.dirname', (['fname_out'], {}), '(fname_out)\n', (15755, 15766), True, 'import os.path as op\n'), ((19016, 19030), 'json.load', 'json.load', (['fid'], {}), '(fid)\n', (19025, 19030), False, 'import json\n'), ((20901, 20915), 'json.load', 'json.load', (['fid'], {}), '(fid)\n', (20910, 20915), False, 'import json\n'), ((27490, 27554), 'pytest.skip', 'pytest.skip', (["('Skipping test for backend %s: %s' % (backend, exc))"], {}), "('Skipping test for backend %s: %s' % (backend, exc))\n", (27501, 27554), False, 'import pytest\n'), ((6885, 6910), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (6908, 6910), False, 'import datetime\n'), ((14884, 14899), 'os.name.lower', 'os.name.lower', ([], {}), '()\n', (14897, 14899), False, 'import os\n'), ((16427, 16447), 'os.remove', 'os.remove', (['fname_out'], {}), '(fname_out)\n', (16436, 16447), False, 'import os\n'), ((21865, 21883), 'threading.Timer', 'Timer', (['delay', 'send'], {}), '(delay, send)\n', (21870, 21883), False, 'from threading import Timer\n'), ((22189, 22207), 'threading.Timer', 'Timer', (['delay', 'send'], {}), '(delay, send)\n', (22194, 22207), False, 'from threading import Timer\n'), ((23192, 23199), 'timeit.default_timer', 'clock', ([], {}), '()\n', (23197, 23199), True, 'from timeit import default_timer as clock\n'), ((23735, 23751), 'numpy.ones', 'ones', (['win_length'], {}), '(win_length)\n', (23739, 23751), False, 'from numpy import sqrt, convolve, ones\n'), ((28461, 28489), 'pyglet.window.get_platform', 'pyglet.window.get_platform', ([], {}), '()\n', (28487, 28489), False, 'import pyglet\n'), ((27028, 27048), 'numpy.array_equal', 'np.array_equal', (['a', 'b'], {}), '(a, b)\n', (27042, 27048), True, 'import numpy as np\n')] |
delimatorres/foodbasket | mixin.py | 2f043d713337581be2165259cdbba4e4a24b656b | import signal
class KillableProcess(object):
def __init__(self):
self.interrupt = False
signal.signal(signal.SIGTERM, self._signal_handler)
signal.signal(signal.SIGINT, self._signal_handler)
def _signal_handler(self, sign, frame):
self.interrupt = True | [((110, 161), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'self._signal_handler'], {}), '(signal.SIGTERM, self._signal_handler)\n', (123, 161), False, 'import signal\n'), ((170, 220), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'self._signal_handler'], {}), '(signal.SIGINT, self._signal_handler)\n', (183, 220), False, 'import signal\n')] |
liubaishuo-github/peening-post-processor | test5.py | 61f4c2d2385469bc1e9d1b7a692b72eb6afd7f75 | def HAHA():
return 1,2,3
a = HAHA()
print(a)
print(a[0])
| [] |
jsun94/nimble | torch/_fx/graph_module.py | e5c899a69677818b1becc58100577441e15ede13 | import torch
import torch.overrides
import linecache
from typing import Type, Dict, List, Any, Union
from .graph import Graph
import copy
# normal exec loses the source code, however we can patch
# the linecache module to still recover it.
# using exec_with_source will add it to our local cache
# and then tools like TorchScript will be able to get source info.
_next_id = 0
def exec_with_source(src: str, globals: Dict[str, Any]):
global _next_id
key = f'<eval_with_key_{_next_id}>'
_next_id += 1
_eval_cache[key] = [line + '\n' for line in src.splitlines()]
exec(compile(src, key, 'exec'), globals)
# patch linecache so that any code we exec using exec_with_source
# works with inspect
_eval_cache : Dict[str, List[str]] = {}
_orig_getlines = linecache.getlines
def patched_getline(*args, **kwargs):
if args[0] in _eval_cache:
return _eval_cache[args[0]]
return _orig_getlines(*args, **kwargs)
linecache.getlines = patched_getline
def _forward_from_src(src : str):
gbls: Dict[str, Any] = {
'torch': torch
}
exec_with_source(src, gbls)
return gbls['forward']
def deserialize_graphmodule(body : dict) -> torch.nn.Module:
"""
Deserialize a GraphModule given the dictionary of the original module,
using the code to reconstruct the graph. We delete the actual graph before
saving the dictionary so that changes to the in-memory graph format do not
get serialized.
"""
# We create a dummy class here because symbolic_trace pulls the forward()
# function off of the class, rather than the instance
class CodeOnlyModule(torch.nn.Module):
def __init__(self, body):
super().__init__()
self.__dict__ = body
CodeOnlyModule.forward = _forward_from_src(body['code'])
from .symbolic_trace import Tracer
# we shouldn't trace into any of the submodules, they were not
# because they were not traced in the original GraphModule
class KeepModules(Tracer):
def is_leaf_module(self, _: torch.nn.Module, __: str) -> bool:
return True
return KeepModules().trace(CodeOnlyModule(body))
# copy an attribute value with qualified name 'target' from 'from_module' to 'to_module'
# This installs empty Modules where none exist yet if they are subpaths of target
def _copy_attr(from_module: torch.nn.Module, to_module: torch.nn.Module, target: str):
*prefix, field = target.split('.')
for item in prefix:
f = getattr(from_module, item)
t = getattr(to_module, item, None)
if f is t:
# we have already installed one of its parents
# (e.g. target = root.linear.weight, but we have already installed root.linear)
# once we install a parent, we no longer need to copy the children
# since all the needed properties will already be present
return
if t is None:
t = torch.nn.Module()
setattr(to_module, item, t)
from_module, to_module = f, t
setattr(to_module, field, getattr(from_module, field))
# Assign attribute 'from_obj' to the qualified name 'target' on 'to_module
# This installs empty Modules where none exist yet if they are subpaths of target
def _assign_attr(from_obj: Any, to_module: torch.nn.Module, target: str):
*prefix, field = target.split('.')
for item in prefix:
t = getattr(to_module, item, None)
if t is None:
t = torch.nn.Module()
setattr(to_module, item, t)
to_module = t
setattr(to_module, field, from_obj)
class GraphModule(torch.nn.Module):
"""
GraphModule is an nn.Module generated from an fx.Graph. GraphModule has
important attributes:
graph : The graph from which this GraphModule was generated
code : The Python source code for the function generated from `graph`
forward : The Python method generated from `graph`
Note that when `graph` is reassigned, `code` and `forward` will be automatically
regenerated.
"""
def __new__(cls: 'Type[GraphModule]', *args, **kwargs):
# each instance of a graph module needs its own forward method
# so create a new singleton class for each instance.
# it is a subclass of the user-defined class, the only difference
# is an extra layer to install the forward method
class GraphModuleImpl(cls): # type: ignore
pass
return super().__new__(GraphModuleImpl)
def __init__(self, root: Union[torch.nn.Module, Dict[str, Any]], graph: Graph):
"""
Construct a GraphModule.
root - `root` can either be an nn.Module instance or a Dict mapping strings to any attribute type.
- In the case that `root` is a Module, any references to Module-based objects (via qualified
name) in the Graph's Nodes' `target` field will be copied over from the respective place
within `root`'s Module hierarchy into the GraphModule's module hierarchy.
- In the case that `root` is a dict, the qualified name found in a Node's `target` will be
looked up directly in the dict's keys. The object mapped to by the Dict will be copied
over into the appropriate place within the GraphModule's module hierarchy.
graph - `graph` contains the nodes this GraphModule should use for code generation
"""
super().__init__()
if isinstance(root, torch.nn.Module):
if hasattr(root, 'training'):
self.training = root.training
for node in graph.nodes:
if node.op in ['get_attr', 'call_module']:
assert isinstance(node.target, str)
_copy_attr(root, self, node.target)
elif isinstance(root, dict):
targets_to_copy = []
for node in graph.nodes:
if node.op in ['get_attr', 'call_module']:
assert isinstance(node.target, str)
if node.target not in root:
raise RuntimeError('Node ' + str(node) + ' referenced target ' + node.target +
' but that target was not provided in `root`!')
targets_to_copy.append(node.target)
# Sort targets in ascending order of the # of atoms.
# This will ensure that less deeply nested attributes are assigned
# before more deeply nested attributes. For example, foo.bar
# will be assigned before foo.bar.baz. Otherwise, we might assign
# the user-provided `foo.bar` and wipe out the previously-assigned
# `foo.bar.baz`
targets_to_copy.sort(key=lambda t: t.count('.'))
for target_to_copy in targets_to_copy:
_assign_attr(root[target_to_copy], self, target_to_copy)
else:
raise RuntimeError('Unsupported type ' + str(root) + ' passed for root!')
self.graph = graph
# TorchScript breaks trying to compile the graph setter because of the
# continued string literal. Issue here: https://github.com/pytorch/pytorch/issues/44842
#
# Shouldn't be an issue since these methods shouldn't be used in TorchScript anyway
__jit_unused_properties__ = ['graph']
@property
def graph(self):
return self._graph
@graph.setter
def graph(self, val) -> None:
self._graph = val
body, result, free_variables = self._graph.python_code(root_module='self')
body = '\n'.join(' ' + line for line in body.split('\n')) + '\n'
self.code = f"""\
def forward(self, {', '.join(free_variables)}):
{body}
return {result}
"""
cls = type(self)
cls.forward = _forward_from_src(self.code)
def __reduce__(self):
dict_without_graph = self.__dict__.copy()
del dict_without_graph['_graph']
return (deserialize_graphmodule, (dict_without_graph,))
# because __reduce__ is defined for serialization,
# we need to define deepcopy otherwise it will call __reduce__
# and cause symbolic tracing to occur every time we try to copy the object
def __deepcopy__(self, memo):
fake_mod = torch.nn.Module()
fake_mod.__dict__ = copy.deepcopy(self.__dict__)
return GraphModule(fake_mod, self.graph)
def __copy__(self):
return GraphModule(self, self.graph)
def __str__(self) -> str:
orig_str = super().__str__()
return '\n'.join([orig_str, self.code])
# workarounds for issues in __torch_function__
# WAR for __torch_function__ not handling tensor lists,
# fix is in https://github.com/pytorch/pytorch/pull/34725
# orig_cat = torch.cat
# def patched_cat(*args, **kwargs):
# tensors = args[0]
# for t in tensors:
# if isinstance(t, Proxy):
# return t.__torch_function__(patched_cat, (), args, kwargs)
# return orig_cat(*args, **kwargs)
# patched_cat.__module__ = 'torch'
# patched_cat.__name__ = 'cat'
# torch.cat = patched_cat
| [((8260, 8277), 'torch.nn.Module', 'torch.nn.Module', ([], {}), '()\n', (8275, 8277), False, 'import torch\n'), ((8306, 8334), 'copy.deepcopy', 'copy.deepcopy', (['self.__dict__'], {}), '(self.__dict__)\n', (8319, 8334), False, 'import copy\n'), ((2927, 2944), 'torch.nn.Module', 'torch.nn.Module', ([], {}), '()\n', (2942, 2944), False, 'import torch\n'), ((3460, 3477), 'torch.nn.Module', 'torch.nn.Module', ([], {}), '()\n', (3475, 3477), False, 'import torch\n')] |
robot0nfire/behem0th | RequestHandler.py | 3931f2a9a2f00b95d82ccb3c5e7c13b3fbb5f4d7 | #
# Copyright (c) 2016 Christoph Heiss <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import os
import json
import struct
import threading
import socket
import queue
import tempfile
import base64
import select
from behem0th import utils, log
BLOCK_SIZE = 4096
class Route:
def handle(self, data, request):
raise NotImplementedError
def send(self, data):
self.handler.send(self.route_name, data)
class FilelistRoute(Route):
def handle(self, data, request):
if request.is_client:
request.client._filelist = data
request.client._rlock.release()
else:
files, events = request.client._merge_filelist(data)
with request.client._rlock:
self.send(request.client._filelist)
for e in events:
request.queue_event(e)
for f in files:
request.queue_file(f[0], f[1])
"""
{
"action": "<action>",
"path": "<relpath-to-file>"
}
<action> can be either 'receive' or 'send'
Payload are base64 encoded chunks (BLOCK_SIZE bytes)
"""
class FileRoute(Route):
def handle(self, data, request):
action = data['action']
path = data['path']
if action == 'receive':
tmpf = tempfile.NamedTemporaryFile(delete=False)
buffer = b''
for chunk in request.recv():
buffer += chunk
if len(buffer) >= BLOCK_SIZE:
tmpf.write(base64.b64decode(buffer[:BLOCK_SIZE]))
buffer = buffer[:BLOCK_SIZE]
tmpf.write(base64.b64decode(buffer))
tmpf.close()
# watchdog reports a file-deleted and a file-created event, so ignore both.
request.client._ignore_next_fsevent(path)
request.client._ignore_next_fsevent(path)
os.rename(tmpf.name, request.client._abspath(path))
request.client._update_metadata(path)
request.client._event_handler._dispatch(
'received', request.client, path, 'file'
)
elif action == 'send':
request.queue_file('send', path)
else:
log.warn('FileRoute: Unknown action \'{0}\', igoring.', action)
# If we are the 'server', we also need to distribute all file request
# to all other clients.
if not request.is_client:
action = 'send' if action == 'receive' else 'request'
request.client._run_on_peers('queue_file', request, action, path)
"""
{
"type": "<type>",
"path": "<relpath-to-file>"
}
<type> can be one of 'file-created', 'file-deleted', 'file-moved'
"""
class EventRoute(Route):
def handle(self, data, request):
f_type, event = data['type'].split('-')
path = data['path']
abspath = request.client._abspath(path)
request.client._ignore_next_fsevent(path)
# TODO: factor out common code with Client._handle_fsevent() and Client._merge_filelist()
if event == 'created':
# create the file/directory
if f_type == 'file':
open(abspath, 'a').close()
else:
os.mkdir(abspath, 0o755)
request.client._add_to_filelist(path, f_type)
elif event == 'deleted':
request.client._remove_from_filelist(path)
os.remove(abspath)
elif event == 'moved':
request.client._remove_from_filelist(path)
os.rename(abspath, data['dest'])
request.client._add_to_filelist(data['dest'], f_type)
else:
log.warn('EventRoute: Unknown event {0}', data)
# For rationale, see FileRoute.handle()
if not request.is_client:
request.client._run_on_peers('queue_event', request, data)
ROUTES = {
'filelist': FilelistRoute(),
'file': FileRoute(),
'event': EventRoute()
}
"""
behem0th's protocol is completely text-based, using utf-8 encoding and
encoded in JSON for easy parsing.
A request usually looks like this:
{ "route": "<route-name>", "data": "<data>" }
'data' holds additional data which is then passed to the route.
There is no special format designed for 'data' and is specific to each route.
After each request there is a newline to separate them. (think of HTTP)
If a route needs to transfer additional data (a 'payload'), it has to send them
in a text-based format, e.g. base-64 encoding for binary data.
After the payload, if any, there has to be another newline to separate it from
the next request.
"""
class RequestHandler(threading.Thread):
req_handler_num = 0
def __init__(self, **kwargs):
super().__init__()
self.daemon = True
self.sync_queue = queue.Queue()
self.routes = {}
self.recvbuf = b''
RequestHandler.req_handler_num += 1
self.name = "request-handler-{0}".format(RequestHandler.req_handler_num)
for key, value in kwargs.items():
setattr(self, key, value)
with self.client._rlock:
self.client._peers.append(self)
self.sock.setblocking(0)
self.is_client = bool(self.client._sock)
for name, route in ROUTES.items():
route.route_name = name
route.handler = self
self.routes[name] = route
def setup(self):
log.info('Connected to {0}:{1}', self.address[0], self.address[1])
# If self.client has a (active) socket, it is a client and
# thus needs to starts syncing up with the server.
if self.is_client:
# Lock the client until the filelist has been sent back by the server.
self.client._rlock.acquire()
self.send('filelist', self.client._filelist)
def close(self):
self.sync_queue.put({'action': 'exit'})
try:
self.sock.shutdown(socket.SHUT_RDWR)
except OSError:
pass
def handle(self, data):
try:
data = json.loads(data)
except ValueError:
log.error('Received invalid data: {0}', data)
return
route = data['route']
data = data['data']
log.info_v('Handling {0}, data:\n{1}', route, data)
if route in self.routes:
self.routes[route].handle(data, self)
else:
log.error("Data received on unknown route '{0}'!", route)
def send(self, route, data):
request = json.dumps({'route': route, 'data': data}) + '\n'
self.sock.sendall(request.encode())
def recv(self):
if self.recvbuf:
# This needs special handling because there could be multiple
# request in recvbuf. If this is the case, we can only yield the first
# one and have to leave to others in recvbuf.
index = self.recvbuf.find(b'\n')
if index == -1:
yield self.recvbuf
self.recvbuf = None
else:
yield self.recvbuf[:index]
self.recvbuf = self.recvbuf[index+1:]
return
while 1:
select.select([self.sock], [], [])
chunk = self.sock.recv(1024)
if not len(chunk):
# If select has signaled the socket is readable, yet .recv()
# returns zero bytes, the other end probably performed
# a close() or shutdown() on the socket.
break
index = chunk.find(b'\n')
if index == -1:
yield chunk
else:
yield chunk[:index]
self.recvbuf = chunk[index+1:]
break
def queue_file(self, action, path):
self.sync_queue.put({
'action': action + '-file',
'path': path
})
def queue_event(self, event):
self.sync_queue.put({
'action': 'send-event',
'event': event
})
def sync_worker(self):
while 1:
entry = self.sync_queue.get()
log.info_v('Processing {0}', entry)
if entry['action'] == 'exit':
break
elif entry['action'] == 'send-file':
path = entry['path']
abspath = self.client._abspath(path)
self.send('file', {
'path': path,
'action': 'receive'
})
for buf in utils.read_file_seq(abspath, BLOCK_SIZE):
self.sock.sendall(base64.b64encode(buf))
self.sock.sendall(b'\n')
self.client._event_handler._dispatch(
'sent', self.client, path, 'file'
)
elif entry['action'] == 'request-file':
self.send('file', {
'path': entry['path'],
'action': 'send'
})
elif entry['action'] == 'send-event':
self.send('event', entry['event'])
self.sync_queue.task_done()
def run(self):
self.setup()
utils.create_thread(self.sync_worker,
name=self.name.replace('request-handler', 'sync-worker'))
while 1:
buffer = b''
for chunk in self.recv():
buffer += chunk
if not len(buffer):
break
self.handle(buffer.decode())
log.info('Disconnected from {0}:{1}', self.address[0], self.address[1])
self.close()
| [((5195, 5208), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (5206, 5208), False, 'import queue\n'), ((5702, 5768), 'behem0th.log.info', 'log.info', (['"""Connected to {0}:{1}"""', 'self.address[0]', 'self.address[1]'], {}), "('Connected to {0}:{1}', self.address[0], self.address[1])\n", (5710, 5768), False, 'from behem0th import utils, log\n'), ((6386, 6440), 'behem0th.log.info_v', 'log.info_v', (['"""Handling {0}, data:\n{1}"""', 'route', 'data'], {}), '("""Handling {0}, data:\n{1}""", route, data)\n', (6396, 6440), False, 'from behem0th import utils, log\n'), ((8860, 8931), 'behem0th.log.info', 'log.info', (['"""Disconnected from {0}:{1}"""', 'self.address[0]', 'self.address[1]'], {}), "('Disconnected from {0}:{1}', self.address[0], self.address[1])\n", (8868, 8931), False, 'from behem0th import utils, log\n'), ((2150, 2191), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (2177, 2191), False, 'import tempfile\n'), ((6239, 6255), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (6249, 6255), False, 'import json\n'), ((6518, 6575), 'behem0th.log.error', 'log.error', (['"""Data received on unknown route \'{0}\'!"""', 'route'], {}), '("Data received on unknown route \'{0}\'!", route)\n', (6527, 6575), False, 'from behem0th import utils, log\n'), ((6620, 6662), 'json.dumps', 'json.dumps', (["{'route': route, 'data': data}"], {}), "({'route': route, 'data': data})\n", (6630, 6662), False, 'import json\n'), ((7147, 7181), 'select.select', 'select.select', (['[self.sock]', '[]', '[]'], {}), '([self.sock], [], [])\n', (7160, 7181), False, 'import select\n'), ((7855, 7890), 'behem0th.log.info_v', 'log.info_v', (['"""Processing {0}"""', 'entry'], {}), "('Processing {0}', entry)\n", (7865, 7890), False, 'from behem0th import utils, log\n'), ((2399, 2423), 'base64.b64decode', 'base64.b64decode', (['buffer'], {}), '(buffer)\n', (2415, 2423), False, 'import base64\n'), ((2877, 2938), 'behem0th.log.warn', 'log.warn', (['"""FileRoute: Unknown action \'{0}\', igoring."""', 'action'], {}), '("FileRoute: Unknown action \'{0}\', igoring.", action)\n', (2885, 2938), False, 'from behem0th import utils, log\n'), ((3755, 3777), 'os.mkdir', 'os.mkdir', (['abspath', '(493)'], {}), '(abspath, 493)\n', (3763, 3777), False, 'import os\n'), ((3907, 3925), 'os.remove', 'os.remove', (['abspath'], {}), '(abspath)\n', (3916, 3925), False, 'import os\n'), ((6280, 6325), 'behem0th.log.error', 'log.error', (['"""Received invalid data: {0}"""', 'data'], {}), "('Received invalid data: {0}', data)\n", (6289, 6325), False, 'from behem0th import utils, log\n'), ((4001, 4033), 'os.rename', 'os.rename', (['abspath', "data['dest']"], {}), "(abspath, data['dest'])\n", (4010, 4033), False, 'import os\n'), ((4103, 4150), 'behem0th.log.warn', 'log.warn', (['"""EventRoute: Unknown event {0}"""', 'data'], {}), "('EventRoute: Unknown event {0}', data)\n", (4111, 4150), False, 'from behem0th import utils, log\n'), ((8134, 8174), 'behem0th.utils.read_file_seq', 'utils.read_file_seq', (['abspath', 'BLOCK_SIZE'], {}), '(abspath, BLOCK_SIZE)\n', (8153, 8174), False, 'from behem0th import utils, log\n'), ((2312, 2349), 'base64.b64decode', 'base64.b64decode', (['buffer[:BLOCK_SIZE]'], {}), '(buffer[:BLOCK_SIZE])\n', (2328, 2349), False, 'import base64\n'), ((8199, 8220), 'base64.b64encode', 'base64.b64encode', (['buf'], {}), '(buf)\n', (8215, 8220), False, 'import base64\n')] |
haochuanwei/hover | tests/utils/test_metrics.py | 53eb38c718e44445b18a97e391b7f90270802b04 | from hover.utils.metrics import classification_accuracy
import numpy as np
def test_classification_accuracy():
true = np.array([1, 2, 3, 4, 5, 6, 7, 7])
pred = np.array([1, 2, 3, 4, 5, 6, 7, 8])
accl = classification_accuracy(true, pred)
accr = classification_accuracy(pred, true)
assert np.allclose(accl, 7/8)
assert np.allclose(accr, 7/8)
| [((123, 157), 'numpy.array', 'np.array', (['[1, 2, 3, 4, 5, 6, 7, 7]'], {}), '([1, 2, 3, 4, 5, 6, 7, 7])\n', (131, 157), True, 'import numpy as np\n'), ((169, 203), 'numpy.array', 'np.array', (['[1, 2, 3, 4, 5, 6, 7, 8]'], {}), '([1, 2, 3, 4, 5, 6, 7, 8])\n', (177, 203), True, 'import numpy as np\n'), ((215, 250), 'hover.utils.metrics.classification_accuracy', 'classification_accuracy', (['true', 'pred'], {}), '(true, pred)\n', (238, 250), False, 'from hover.utils.metrics import classification_accuracy\n'), ((262, 297), 'hover.utils.metrics.classification_accuracy', 'classification_accuracy', (['pred', 'true'], {}), '(pred, true)\n', (285, 297), False, 'from hover.utils.metrics import classification_accuracy\n'), ((309, 333), 'numpy.allclose', 'np.allclose', (['accl', '(7 / 8)'], {}), '(accl, 7 / 8)\n', (320, 333), True, 'import numpy as np\n'), ((343, 367), 'numpy.allclose', 'np.allclose', (['accr', '(7 / 8)'], {}), '(accr, 7 / 8)\n', (354, 367), True, 'import numpy as np\n')] |
rgirish28/blenderseed | scripts/blenderseed.package.py | fee897620d0348f4ea1f5722e1a82c3682ca0178 | #!/usr/bin/python
#
# This source file is part of appleseed.
# Visit https://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2017-2018 Esteban Tovagliari, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the 'Software'), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
from __future__ import print_function
from distutils import archive_util, dir_util
from xml.etree.ElementTree import ElementTree
import argparse
import colorama
import datetime
import glob
import os
import platform
import re
import shutil
import stat
import subprocess
import sys
import time
import traceback
import urllib
#--------------------------------------------------------------------------------------------------
# Constants.
#--------------------------------------------------------------------------------------------------
VERSION = "1.1.0"
SETTINGS_FILENAME = "blenderseed.package.configuration.xml"
#--------------------------------------------------------------------------------------------------
# Utility functions.
#--------------------------------------------------------------------------------------------------
GREEN_CHECKMARK = u"{0}\u2713{1}".format(colorama.Style.BRIGHT + colorama.Fore.GREEN, colorama.Style.RESET_ALL)
RED_CROSSMARK = u"{0}\u2717{1}".format(colorama.Style.BRIGHT + colorama.Fore.RED, colorama.Style.RESET_ALL)
def trace(message):
# encode('utf-8') is required to support output redirection to files or pipes.
print(u" {0}{1}{2}".format(colorama.Style.DIM + colorama.Fore.WHITE, message, colorama.Style.RESET_ALL).encode('utf-8'))
def info(message):
print(u" {0}".format(message).encode('utf-8'))
def progress(message):
print(u" {0}...".format(message).encode('utf-8'))
def warning(message):
print(u" {0}Warning: {1}.{2}".format(colorama.Style.BRIGHT + colorama.Fore.MAGENTA, message, colorama.Style.RESET_ALL).encode('utf-8'))
def fatal(message):
print(u"{0}Fatal: {1}. Aborting.{2}".format(colorama.Style.BRIGHT + colorama.Fore.RED, message, colorama.Style.RESET_ALL).encode('utf-8'))
if sys.exc_info()[0]:
print(traceback.format_exc())
sys.exit(1)
def exe(filepath):
return filepath + ".exe" if os.name == "nt" else filepath
def safe_delete_file(path):
try:
if os.path.exists(path):
os.remove(path)
except OSError:
fatal("Failed to delete file '" + path + "'")
def on_rmtree_error(func, path, exc_info):
# path contains the path of the file that couldn't be removed.
# Let's just assume that it's read-only and unlink it.
os.chmod(path, stat.S_IWRITE)
os.unlink(path)
def safe_delete_directory(path):
Attempts = 10
for attempt in range(Attempts):
try:
if os.path.exists(path):
shutil.rmtree(path, onerror=on_rmtree_error)
return
except OSError:
if attempt < Attempts - 1:
time.sleep(0.5)
else:
fatal("Failed to delete directory '" + path + "'")
def safe_delete_directory_recursively(root_path, directory_name):
safe_delete_directory(os.path.join(root_path, directory_name))
for entry in os.listdir(root_path):
subdirectory = os.path.join(root_path, entry)
if os.path.isdir(subdirectory):
safe_delete_directory_recursively(subdirectory, directory_name)
def safe_make_directory(path):
if not os.path.isdir(path):
os.makedirs(path)
def pushd(path):
old_path = os.getcwd()
os.chdir(path)
return old_path
def copy_glob(input_pattern, output_path):
for input_file in glob.glob(input_pattern):
shutil.copy(input_file, output_path)
#--------------------------------------------------------------------------------------------------
# Settings.
#--------------------------------------------------------------------------------------------------
class Settings:
def load(self):
self.this_dir = os.path.dirname(os.path.realpath(__file__))
self.root_dir = os.path.join(self.this_dir, "..")
print("Loading settings from " + SETTINGS_FILENAME + "...")
tree = ElementTree()
try:
tree.parse(SETTINGS_FILENAME)
except IOError:
fatal("Failed to load configuration file '" + SETTINGS_FILENAME + "'")
self.__load_values(tree)
def print_summary(self):
print("")
print(" Platform: " + self.platform)
print(" Path to appleseed release: " + self.appleseed_release_path)
print(" Path to appleseed binaries: " + self.appleseed_bin_path)
print(" Path to appleseed libraries: " + self.appleseed_lib_path)
print(" Path to appleseed shaders: " + self.appleseed_shaders_path)
print(" Path to appleseed schemas: " + self.appleseed_schemas_path)
print(" Path to appleseed settings: " + self.appleseed_settings_path)
print(" Path to appleseed.python: " + self.appleseed_python_path)
print(" Path to maketx: " + self.maketx_path)
print(" Output directory: " + self.output_dir)
print("")
def __load_values(self, tree):
self.platform = self.__get_required(tree, "platform")
self.appleseed_release_path = self.__get_required(tree, "appleseed_release_path")
os.environ['APPLESEED'] = self.appleseed_release_path
self.appleseed_bin_path = os.path.expandvars(self.__get_required(tree, "appleseed_bin_path"))
self.appleseed_lib_path = os.path.expandvars(self.__get_required(tree, "appleseed_lib_path"))
self.appleseed_shaders_path = os.path.expandvars(self.__get_required(tree, "appleseed_shaders_path"))
self.appleseed_schemas_path = os.path.expandvars(self.__get_required(tree, "appleseed_schemas_path"))
self.appleseed_settings_path = os.path.expandvars(self.__get_required(tree, "appleseed_settings_path"))
self.appleseed_python_path = os.path.expandvars(self.__get_required(tree, "appleseed_python_path"))
self.maketx_path = os.path.expandvars(self.__get_required(tree, "maketx_path"))
self.output_dir = os.path.expandvars(self.__get_required(tree, "output_dir"))
def __get_required(self, tree, key):
value = tree.findtext(key)
if value is None:
fatal("Missing value \"{0}\" in configuration file".format(key))
return value
#--------------------------------------------------------------------------------------------------
# Base package builder.
#--------------------------------------------------------------------------------------------------
class PackageBuilder(object):
def __init__(self, settings, package_version, build_date, no_release=False):
self.settings = settings
self.package_version = package_version
self.build_date = build_date
self.no_release = no_release
def build_package(self):
print("Building package:")
print("")
self.orchestrate()
print("")
print("The package was successfully built.")
def orchestrate(self):
self.remove_leftovers()
self.copy_appleseed_python()
self.copy_binaries()
self.copy_dependencies()
self.copy_schemas()
self.copy_shaders()
self.download_settings_files()
self.remove_pyc_files()
self.post_process_package()
if not self.no_release:
self.deploy_blenderseed_to_stage()
self.clean_stage()
self.build_final_zip_file()
self.remove_stage()
def remove_leftovers(self):
progress("Removing leftovers from previous invocations")
safe_delete_directory(os.path.join(self.settings.root_dir, "appleseed"))
safe_delete_directory("blenderseed")
def copy_appleseed_python(self):
progress("Copying appleseed.python to root directory")
# Create destination directory.
lib_dir = os.path.join(self.settings.root_dir, "appleseed", "lib")
safe_make_directory(lib_dir)
# Copy appleseed.python.
dir_util.copy_tree(self.settings.appleseed_python_path, lib_dir)
# Remove _appleseedpython.so (Python 2) since blenderseed only needs _appleseedpython3.so (Python 3).
# TODO: implement properly.
safe_delete_file(os.path.join(lib_dir, "appleseed", "_appleseedpython.so"))
safe_delete_file(os.path.join(lib_dir, "appleseed", "_appleseedpython.pyd"))
def copy_binaries(self):
progress("Copying binaries to root directory")
# Create destination directory.
bin_dir = os.path.join(self.settings.root_dir, "appleseed", "bin")
safe_make_directory(bin_dir)
# Copy appleseed binaries.
for bin in [exe("appleseed.cli")]:
shutil.copy(os.path.join(self.settings.appleseed_bin_path, bin), bin_dir)
# Copy maketx.
shutil.copy(exe(self.settings.maketx_path), bin_dir)
def copy_schemas(self):
progress("Copying schemas to root directory")
dir_util.copy_tree(self.settings.appleseed_schemas_path, os.path.join(self.settings.root_dir, "appleseed", "schemas"))
safe_delete_file(os.path.join(self.settings.root_dir, "appleseed", "schemas", ".gitignore"))
def copy_shaders(self):
progress("Copying shaders to root directory")
# Create destination directory.
shaders_dir = os.path.join(self.settings.root_dir, "appleseed", "shaders")
safe_make_directory(shaders_dir)
self.__do_copy_shaders(os.path.join(self.settings.appleseed_shaders_path, "appleseed"), shaders_dir)
self.__do_copy_shaders(os.path.join(self.settings.appleseed_shaders_path, "blenderseed"), shaders_dir)
def __do_copy_shaders(self, source_dir, target_dir):
for root, dirs, files in os.walk(source_dir):
for f in files:
if f.endswith(".oso"):
shutil.copy(os.path.join(root, f), target_dir)
def download_settings_files(self):
progress("Downloading settings files to root directory")
# Create destination directory.
settings_dir = os.path.join(self.settings.root_dir, "appleseed", "settings")
safe_make_directory(settings_dir)
for file in ["appleseed.cli.xml"]:
urllib.urlretrieve(
"https://raw.githubusercontent.com/appleseedhq/appleseed/master/sandbox/settings/{0}".format(file),
os.path.join(settings_dir, file))
def remove_pyc_files(self):
progress("Removing pyc files from root directory")
for root, dirs, files in os.walk(os.path.join(self.settings.root_dir, "appleseed", "lib")):
for f in files:
if f.endswith(".pyc"):
safe_delete_file(os.path.join(root, f))
def deploy_blenderseed_to_stage(self):
progress("Deploying blenderseed to staging directory")
shutil.copytree(self.settings.root_dir, "blenderseed", ignore=shutil.ignore_patterns("scripts"))
def clean_stage(self):
progress("Cleaning staging directory")
safe_delete_directory_recursively("blenderseed", "__pycache__")
for subdirectory in [".git", ".idea", "archives", "docs", "scripts", "tests"]:
safe_delete_directory(os.path.join("blenderseed", subdirectory))
for file in [".gitignore", "README.md"]:
safe_delete_file(os.path.join("blenderseed", file))
def build_final_zip_file(self):
progress("Building final zip file from staging directory")
package_name = "blenderseed-{0}-{1}-{2}".format(self.package_version, self.settings.platform, self.build_date)
package_path = os.path.join(self.settings.output_dir, package_name)
archive_util.make_zipfile(package_path, "blenderseed")
info("Package path: {0}".format(package_path + ".zip"))
def remove_stage(self):
progress("Deleting staging directory")
safe_delete_directory("blenderseed")
def run(self, cmdline):
trace("Running command line: {0}".format(cmdline))
os.system(cmdline)
def run_subprocess(self, cmdline):
p = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return p.returncode, out, err
#--------------------------------------------------------------------------------------------------
# Windows package builder.
#--------------------------------------------------------------------------------------------------
class WindowsPackageBuilder(PackageBuilder):
def copy_dependencies(self):
progress("Windows-specific: Copying dependencies")
bin_dir = self.settings.appleseed_bin_path
for dll in ["appleseed.dll", "appleseed.shared.dll"]:
shutil.copy(os.path.join(bin_dir, dll), os.path.join(self.settings.root_dir, "appleseed", "bin"))
def post_process_package(self):
pass
#--------------------------------------------------------------------------------------------------
# Mac package builder.
#--------------------------------------------------------------------------------------------------
class MacPackageBuilder(PackageBuilder):
SYSTEM_LIBS_PREFIXES = [
"/System/Library/",
"/usr/lib/libcurl",
"/usr/lib/libc++",
"/usr/lib/libbz2",
"/usr/lib/libSystem",
#"/usr/lib/libz",
"/usr/lib/libncurses",
"/usr/lib/libobjc.A.dylib"
]
def copy_dependencies(self):
progress("Mac-specific: Copying dependencies")
# Create destination directory.
lib_dir = os.path.join(self.settings.root_dir, "appleseed", "lib")
safe_make_directory(lib_dir)
# Copy appleseed libraries.
for lib in ["libappleseed.dylib", "libappleseed.shared.dylib"]:
shutil.copy(os.path.join(self.settings.appleseed_lib_path, lib), lib_dir)
# Get shared libs needed by binaries.
all_libs = set()
for bin in glob.glob(os.path.join(self.settings.root_dir, "appleseed", "bin", "*")):
libs = self.__get_dependencies_for_file(bin)
all_libs = all_libs.union(libs)
# Get shared libs needed by appleseed.python.
appleseedpython_libs = self.__get_dependencies_for_file(
os.path.join(self.settings.root_dir, "appleseed", "lib", "appleseed", "_appleseedpython3.so"))
all_libs = all_libs.union(appleseedpython_libs)
# Get shared libs needed by libraries.
# TODO: we're not computing the full transitive closure here!
lib_libs = set()
for lib in all_libs:
libs = self.__get_dependencies_for_file(lib)
lib_libs = lib_libs.union(libs)
all_libs = all_libs.union(lib_libs)
if True:
# Print dependencies.
trace(" Dependencies:")
for lib in all_libs:
trace(" {0}".format(lib))
# Copy needed libs to lib directory.
for lib in all_libs:
if True:
trace(" Copying {0} to {1}...".format(lib, lib_dir))
shutil.copy(lib, lib_dir)
def post_process_package(self):
progress("Mac-specific: Post-processing package")
self.__fixup_binaries()
def __fixup_binaries(self):
progress("Mac-specific: Fixing up binaries")
self.set_libraries_ids()
self.__change_library_paths_in_libraries()
self.__change_library_paths_in_executables()
def set_libraries_ids(self):
lib_dir = os.path.join(self.settings.root_dir, "appleseed", "lib")
for dirpath, dirnames, filenames in os.walk(lib_dir):
for filename in filenames:
ext = os.path.splitext(filename)[1]
if ext == ".dylib" or ext == ".so":
lib_path = os.path.join(dirpath, filename)
self.__set_library_id(lib_path, filename)
def __change_library_paths_in_libraries(self):
lib_dir = os.path.join(self.settings.root_dir, "appleseed", "lib")
for dirpath, dirnames, filenames in os.walk(lib_dir):
for filename in filenames:
ext = os.path.splitext(filename)[1]
if ext == ".dylib" or ext == ".so":
lib_path = os.path.join(dirpath, filename)
self.__change_library_paths_in_binary(lib_path)
def __change_library_paths_in_executables(self):
bin_dir = os.path.join(self.settings.root_dir, "appleseed", "bin")
for dirpath, dirnames, filenames in os.walk(bin_dir):
for filename in filenames:
ext = os.path.splitext(filename)[1]
if ext != ".py" and ext != ".conf":
exe_path = os.path.join(dirpath, filename)
self.__change_library_paths_in_binary(exe_path)
# Can be used on executables and dynamic libraries.
def __change_library_paths_in_binary(self, bin_path):
progress("Patching {0}".format(bin_path))
bin_dir = os.path.dirname(bin_path)
lib_dir = os.path.join(self.settings.root_dir, "appleseed", "lib")
path_to_appleseed_lib = os.path.relpath(lib_dir, bin_dir)
# fix_paths set to False because we must retrieve the unmodified dependency in order to replace it by the correct one.
for lib_path in self.__get_dependencies_for_file(bin_path, fix_paths=False):
lib_name = os.path.basename(lib_path)
if path_to_appleseed_lib == ".":
self.__change_library_path(bin_path, lib_path, "@loader_path/{0}".format(lib_name))
else:
self.__change_library_path(bin_path, lib_path, "@loader_path/{0}/{1}".format(path_to_appleseed_lib, lib_name))
def __set_library_id(self, target, name):
self.run('install_name_tool -id "{0}" {1}'.format(name, target))
def __change_library_path(self, target, old, new):
self.run('install_name_tool -change "{0}" "{1}" {2}'.format(old, new, target))
def __get_dependencies_for_file(self, filepath, fix_paths=True):
filename = os.path.basename(filepath)
loader_path = os.path.dirname(filepath)
rpath = "/usr/local/lib/" # TODO: a great simplification
if True:
trace("Gathering dependencies for file")
trace(" {0}".format(filepath))
trace("with @loader_path set to")
trace(" {0}".format(loader_path))
trace("and @rpath hardcoded to")
trace(" {0}".format(rpath))
returncode, out, err = self.run_subprocess(["otool", "-L", filepath])
if returncode != 0:
fatal("Failed to invoke otool(1) to get dependencies for {0}: {1}".format(filepath, err))
libs = set()
for line in out.split("\n")[1:]: # skip the first line
line = line.strip()
# Ignore empty lines.
if len(line) == 0:
continue
# Parse the line.
m = re.match(r"(.*) \(compatibility version .*, current version .*\)", line)
if not m:
fatal("Failed to parse line from otool(1) output: " + line)
lib = m.group(1)
# Ignore self-references (why do these happen?).
if lib == filename:
continue
# Ignore system libs.
if self.__is_system_lib(lib):
continue
# Ignore Qt frameworks.
if re.search(r"Qt.*\.framework", lib):
continue
if fix_paths:
# Handle libs relative to @loader_path.
lib = lib.replace("@loader_path", loader_path)
# Handle libs relative to @rpath.
lib = lib.replace("@rpath", rpath)
# Try to handle other relative libs.
if not os.path.isabs(lib):
# TODO: generalize to a collection of user-specified search paths.
candidate = os.path.join(loader_path, lib)
if not os.path.exists(candidate):
candidate = os.path.join("/usr/local/lib/", lib)
if os.path.exists(candidate):
info("Resolved relative dependency {0} as {1}".format(lib, candidate))
lib = candidate
libs.add(lib)
if True:
trace("Dependencies for file {0}:".format(filepath))
for lib in libs:
if os.path.isfile(lib):
trace(u" {0} {1}".format(GREEN_CHECKMARK, lib))
else:
trace(u" {0} {1}".format(RED_CROSSMARK, lib))
# Don't check for missing dependencies if we didn't attempt to fix them.
if fix_paths:
for lib in libs:
if not os.path.isfile(lib):
fatal("Dependency {0} could not be found on disk".format(lib))
return libs
def __is_system_lib(self, lib):
for prefix in self.SYSTEM_LIBS_PREFIXES:
if lib.startswith(prefix):
return True
return False
#--------------------------------------------------------------------------------------------------
# Linux package builder.
#--------------------------------------------------------------------------------------------------
class LinuxPackageBuilder(PackageBuilder):
SYSTEM_LIBS_PREFIXES = [
"linux",
"librt",
"libpthread",
"libGL",
"libX",
"libselinux",
"libICE",
"libSM",
"libdl",
"libm.so",
"libgcc",
"libc.so",
"/lib64/ld-linux-",
"libstdc++",
"libxcb",
"libdrm",
"libnsl",
"libuuid",
"libgthread",
"libglib",
"libgobject",
"libglapi",
"libffi",
"libfontconfig",
"libutil",
"libpython",
"libxshmfence.so"
]
def plugin_extension(self):
return ".so"
def copy_dependencies(self):
progress("Linux-specific: Copying dependencies")
# Create destination directory.
lib_dir = os.path.join(self.settings.root_dir, "appleseed", "lib")
safe_make_directory(lib_dir)
# Copy appleseed libraries.
for lib in ["libappleseed.so", "libappleseed.shared.so"]:
shutil.copy(os.path.join(self.settings.appleseed_lib_path, lib), lib_dir)
# Get shared libs needed by binaries.
all_libs = set()
for bin in glob.glob(os.path.join(self.settings.root_dir, "appleseed", "bin", "*")):
libs = self.__get_dependencies_for_file(bin)
all_libs = all_libs.union(libs)
# Get shared libs needed by appleseed.python.
appleseedpython_libs = self.__get_dependencies_for_file(
os.path.join(self.settings.root_dir, "appleseed", "lib", "appleseed", "_appleseedpython3.so"))
all_libs = all_libs.union(appleseedpython_libs)
# Get shared libs needed by libraries.
lib_libs = set()
for lib in all_libs:
libs = self.__get_dependencies_for_file(lib)
lib_libs = lib_libs.union(libs)
all_libs = all_libs.union(lib_libs)
# Copy all shared libraries.
for lib in all_libs:
shutil.copy(lib, lib_dir)
def post_process_package(self):
progress("Linux-specific: Post-processing package")
for bin in glob.glob(os.path.join(self.settings.root_dir, "appleseed", "bin", "*")):
self.run("chrpath -r \$ORIGIN/../lib " + bin)
for lib in glob.glob(os.path.join(self.settings.root_dir, "appleseed", "lib", "*.so")):
self.run("chrpath -d " + lib)
appleseed_python_dir = os.path.join(self.settings.root_dir, "appleseed", "lib", "appleseed")
for py_cpp_module in glob.glob(os.path.join(appleseed_python_dir, "*.so")):
self.run("chrpath -r \$ORIGIN/../ " + py_cpp_module)
def __is_system_lib(self, lib):
for prefix in self.SYSTEM_LIBS_PREFIXES:
if lib.startswith(prefix):
return True
return False
def __get_dependencies_for_file(self, filepath):
returncode, out, err = self.run_subprocess(["ldd", filepath])
if returncode != 0:
fatal("Failed to invoke ldd(1) to get dependencies for {0}: {1}".format(filepath, err))
libs = set()
for line in out.split("\n"):
line = line.strip()
# Ignore empty lines.
if len(line) == 0:
continue
# Ignore system libs.
if self.__is_system_lib(line):
continue
# Ignore appleseed libs.
if "libappleseed" in line:
continue
libs.add(line.split()[2])
return libs
#--------------------------------------------------------------------------------------------------
# Entry point.
#--------------------------------------------------------------------------------------------------
def main():
colorama.init()
parser = argparse.ArgumentParser(description="build a blenderseed package from sources")
parser.add_argument("--nozip", action="store_true", help="copies appleseed binaries to blenderseed folder but does not build a release package")
args = parser.parse_args()
no_release = args.nozip
package_version = subprocess.Popen("git describe --long", stdout=subprocess.PIPE, shell=True).stdout.read().strip()
build_date = datetime.date.today().isoformat()
print("blenderseed.package version " + VERSION)
print("")
settings = Settings()
settings.load()
settings.print_summary()
if os.name == "nt":
package_builder = WindowsPackageBuilder(settings, package_version, build_date, no_release)
elif os.name == "posix" and platform.mac_ver()[0] != "":
package_builder = MacPackageBuilder(settings, package_version, build_date, no_release)
elif os.name == "posix" and platform.mac_ver()[0] == "":
package_builder = LinuxPackageBuilder(settings, package_version, build_date, no_release)
else:
fatal("Unsupported platform: " + os.name)
package_builder.build_package()
if __name__ == "__main__":
main()
| [((3170, 3181), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3178, 3181), False, 'import sys\n'), ((3614, 3643), 'os.chmod', 'os.chmod', (['path', 'stat.S_IWRITE'], {}), '(path, stat.S_IWRITE)\n', (3622, 3643), False, 'import os\n'), ((3648, 3663), 'os.unlink', 'os.unlink', (['path'], {}), '(path)\n', (3657, 3663), False, 'import os\n'), ((4216, 4237), 'os.listdir', 'os.listdir', (['root_path'], {}), '(root_path)\n', (4226, 4237), False, 'import os\n'), ((4534, 4545), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4543, 4545), False, 'import os\n'), ((4550, 4564), 'os.chdir', 'os.chdir', (['path'], {}), '(path)\n', (4558, 4564), False, 'import os\n'), ((4652, 4676), 'glob.glob', 'glob.glob', (['input_pattern'], {}), '(input_pattern)\n', (4661, 4676), False, 'import glob\n'), ((26294, 26309), 'colorama.init', 'colorama.init', ([], {}), '()\n', (26307, 26309), False, 'import colorama\n'), ((26324, 26403), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""build a blenderseed package from sources"""'}), "(description='build a blenderseed package from sources')\n", (26347, 26403), False, 'import argparse\n'), ((3109, 3123), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (3121, 3123), False, 'import sys\n'), ((3315, 3335), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (3329, 3335), False, 'import os\n'), ((4157, 4196), 'os.path.join', 'os.path.join', (['root_path', 'directory_name'], {}), '(root_path, directory_name)\n', (4169, 4196), False, 'import os\n'), ((4262, 4292), 'os.path.join', 'os.path.join', (['root_path', 'entry'], {}), '(root_path, entry)\n', (4274, 4292), False, 'import os\n'), ((4304, 4331), 'os.path.isdir', 'os.path.isdir', (['subdirectory'], {}), '(subdirectory)\n', (4317, 4331), False, 'import os\n'), ((4453, 4472), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (4466, 4472), False, 'import os\n'), ((4482, 4499), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (4493, 4499), False, 'import os\n'), ((4686, 4722), 'shutil.copy', 'shutil.copy', (['input_file', 'output_path'], {}), '(input_file, output_path)\n', (4697, 4722), False, 'import shutil\n'), ((5067, 5100), 'os.path.join', 'os.path.join', (['self.this_dir', '""".."""'], {}), "(self.this_dir, '..')\n", (5079, 5100), False, 'import os\n'), ((5185, 5198), 'xml.etree.ElementTree.ElementTree', 'ElementTree', ([], {}), '()\n', (5196, 5198), False, 'from xml.etree.ElementTree import ElementTree\n'), ((9079, 9135), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""lib"""'], {}), "(self.settings.root_dir, 'appleseed', 'lib')\n", (9091, 9135), False, 'import os\n'), ((9215, 9279), 'distutils.dir_util.copy_tree', 'dir_util.copy_tree', (['self.settings.appleseed_python_path', 'lib_dir'], {}), '(self.settings.appleseed_python_path, lib_dir)\n', (9233, 9279), False, 'from distutils import archive_util, dir_util\n'), ((9740, 9796), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""bin"""'], {}), "(self.settings.root_dir, 'appleseed', 'bin')\n", (9752, 9796), False, 'import os\n'), ((10542, 10602), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""shaders"""'], {}), "(self.settings.root_dir, 'appleseed', 'shaders')\n", (10554, 10602), False, 'import os\n'), ((10956, 10975), 'os.walk', 'os.walk', (['source_dir'], {}), '(source_dir)\n', (10963, 10975), False, 'import os\n'), ((11280, 11341), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""settings"""'], {}), "(self.settings.root_dir, 'appleseed', 'settings')\n", (11292, 11341), False, 'import os\n'), ((12830, 12882), 'os.path.join', 'os.path.join', (['self.settings.output_dir', 'package_name'], {}), '(self.settings.output_dir, package_name)\n', (12842, 12882), False, 'import os\n'), ((12891, 12945), 'distutils.archive_util.make_zipfile', 'archive_util.make_zipfile', (['package_path', '"""blenderseed"""'], {}), "(package_path, 'blenderseed')\n", (12916, 12945), False, 'from distutils import archive_util, dir_util\n'), ((13227, 13245), 'os.system', 'os.system', (['cmdline'], {}), '(cmdline)\n', (13236, 13245), False, 'import os\n'), ((13298, 13371), 'subprocess.Popen', 'subprocess.Popen', (['cmdline'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(cmdline, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (13314, 13371), False, 'import subprocess\n'), ((14770, 14826), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""lib"""'], {}), "(self.settings.root_dir, 'appleseed', 'lib')\n", (14782, 14826), False, 'import os\n'), ((16702, 16758), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""lib"""'], {}), "(self.settings.root_dir, 'appleseed', 'lib')\n", (16714, 16758), False, 'import os\n'), ((16803, 16819), 'os.walk', 'os.walk', (['lib_dir'], {}), '(lib_dir)\n', (16810, 16819), False, 'import os\n'), ((17159, 17215), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""lib"""'], {}), "(self.settings.root_dir, 'appleseed', 'lib')\n", (17171, 17215), False, 'import os\n'), ((17260, 17276), 'os.walk', 'os.walk', (['lib_dir'], {}), '(lib_dir)\n', (17267, 17276), False, 'import os\n'), ((17624, 17680), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""bin"""'], {}), "(self.settings.root_dir, 'appleseed', 'bin')\n", (17636, 17680), False, 'import os\n'), ((17725, 17741), 'os.walk', 'os.walk', (['bin_dir'], {}), '(bin_dir)\n', (17732, 17741), False, 'import os\n'), ((18200, 18225), 'os.path.dirname', 'os.path.dirname', (['bin_path'], {}), '(bin_path)\n', (18215, 18225), False, 'import os\n'), ((18244, 18300), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""lib"""'], {}), "(self.settings.root_dir, 'appleseed', 'lib')\n", (18256, 18300), False, 'import os\n'), ((18333, 18366), 'os.path.relpath', 'os.path.relpath', (['lib_dir', 'bin_dir'], {}), '(lib_dir, bin_dir)\n', (18348, 18366), False, 'import os\n'), ((19271, 19297), 'os.path.basename', 'os.path.basename', (['filepath'], {}), '(filepath)\n', (19287, 19297), False, 'import os\n'), ((19321, 19346), 'os.path.dirname', 'os.path.dirname', (['filepath'], {}), '(filepath)\n', (19336, 19346), False, 'import os\n'), ((23362, 23418), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""lib"""'], {}), "(self.settings.root_dir, 'appleseed', 'lib')\n", (23374, 23418), False, 'import os\n'), ((24967, 25036), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""lib"""', '"""appleseed"""'], {}), "(self.settings.root_dir, 'appleseed', 'lib', 'appleseed')\n", (24979, 25036), False, 'import os\n'), ((3142, 3164), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3162, 3164), False, 'import traceback\n'), ((3349, 3364), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (3358, 3364), False, 'import os\n'), ((3781, 3801), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (3795, 3801), False, 'import os\n'), ((5015, 5041), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (5031, 5041), False, 'import os\n'), ((8823, 8872), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""'], {}), "(self.settings.root_dir, 'appleseed')\n", (8835, 8872), False, 'import os\n'), ((9452, 9509), 'os.path.join', 'os.path.join', (['lib_dir', '"""appleseed"""', '"""_appleseedpython.so"""'], {}), "(lib_dir, 'appleseed', '_appleseedpython.so')\n", (9464, 9509), False, 'import os\n'), ((9536, 9594), 'os.path.join', 'os.path.join', (['lib_dir', '"""appleseed"""', '"""_appleseedpython.pyd"""'], {}), "(lib_dir, 'appleseed', '_appleseedpython.pyd')\n", (9548, 9594), False, 'import os\n'), ((10233, 10293), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""schemas"""'], {}), "(self.settings.root_dir, 'appleseed', 'schemas')\n", (10245, 10293), False, 'import os\n'), ((10320, 10394), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""schemas"""', '""".gitignore"""'], {}), "(self.settings.root_dir, 'appleseed', 'schemas', '.gitignore')\n", (10332, 10394), False, 'import os\n'), ((10676, 10739), 'os.path.join', 'os.path.join', (['self.settings.appleseed_shaders_path', '"""appleseed"""'], {}), "(self.settings.appleseed_shaders_path, 'appleseed')\n", (10688, 10739), False, 'import os\n'), ((10785, 10850), 'os.path.join', 'os.path.join', (['self.settings.appleseed_shaders_path', '"""blenderseed"""'], {}), "(self.settings.appleseed_shaders_path, 'blenderseed')\n", (10797, 10850), False, 'import os\n'), ((11759, 11815), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""lib"""'], {}), "(self.settings.root_dir, 'appleseed', 'lib')\n", (11771, 11815), False, 'import os\n'), ((15160, 15221), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""bin"""', '"""*"""'], {}), "(self.settings.root_dir, 'appleseed', 'bin', '*')\n", (15172, 15221), False, 'import os\n'), ((15457, 15554), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""lib"""', '"""appleseed"""', '"""_appleseedpython3.so"""'], {}), "(self.settings.root_dir, 'appleseed', 'lib', 'appleseed',\n '_appleseedpython3.so')\n", (15469, 15554), False, 'import os\n'), ((16274, 16299), 'shutil.copy', 'shutil.copy', (['lib', 'lib_dir'], {}), '(lib, lib_dir)\n', (16285, 16299), False, 'import shutil\n'), ((18602, 18628), 'os.path.basename', 'os.path.basename', (['lib_path'], {}), '(lib_path)\n', (18618, 18628), False, 'import os\n'), ((20181, 20254), 're.match', 're.match', (['"""(.*) \\\\(compatibility version .*, current version .*\\\\)"""', 'line'], {}), "('(.*) \\\\(compatibility version .*, current version .*\\\\)', line)\n", (20189, 20254), False, 'import re\n'), ((20654, 20688), 're.search', 're.search', (['"""Qt.*\\\\.framework"""', 'lib'], {}), "('Qt.*\\\\.framework', lib)\n", (20663, 20688), False, 'import re\n'), ((23746, 23807), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""bin"""', '"""*"""'], {}), "(self.settings.root_dir, 'appleseed', 'bin', '*')\n", (23758, 23807), False, 'import os\n'), ((24043, 24140), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""lib"""', '"""appleseed"""', '"""_appleseedpython3.so"""'], {}), "(self.settings.root_dir, 'appleseed', 'lib', 'appleseed',\n '_appleseedpython3.so')\n", (24055, 24140), False, 'import os\n'), ((24520, 24545), 'shutil.copy', 'shutil.copy', (['lib', 'lib_dir'], {}), '(lib, lib_dir)\n', (24531, 24545), False, 'import shutil\n'), ((24673, 24734), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""bin"""', '"""*"""'], {}), "(self.settings.root_dir, 'appleseed', 'bin', '*')\n", (24685, 24734), False, 'import os\n'), ((24825, 24889), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""lib"""', '"""*.so"""'], {}), "(self.settings.root_dir, 'appleseed', 'lib', '*.so')\n", (24837, 24889), False, 'import os\n'), ((25076, 25118), 'os.path.join', 'os.path.join', (['appleseed_python_dir', '"""*.so"""'], {}), "(appleseed_python_dir, '*.so')\n", (25088, 25118), False, 'import os\n'), ((26753, 26774), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (26772, 26774), False, 'import datetime\n'), ((3819, 3863), 'shutil.rmtree', 'shutil.rmtree', (['path'], {'onerror': 'on_rmtree_error'}), '(path, onerror=on_rmtree_error)\n', (3832, 3863), False, 'import shutil\n'), ((9937, 9988), 'os.path.join', 'os.path.join', (['self.settings.appleseed_bin_path', 'bin'], {}), '(self.settings.appleseed_bin_path, bin)\n', (9949, 9988), False, 'import os\n'), ((11592, 11624), 'os.path.join', 'os.path.join', (['settings_dir', 'file'], {}), '(settings_dir, file)\n', (11604, 11624), False, 'import os\n'), ((12122, 12155), 'shutil.ignore_patterns', 'shutil.ignore_patterns', (['"""scripts"""'], {}), "('scripts')\n", (12144, 12155), False, 'import shutil\n'), ((12427, 12468), 'os.path.join', 'os.path.join', (['"""blenderseed"""', 'subdirectory'], {}), "('blenderseed', subdirectory)\n", (12439, 12468), False, 'import os\n'), ((12549, 12582), 'os.path.join', 'os.path.join', (['"""blenderseed"""', 'file'], {}), "('blenderseed', file)\n", (12561, 12582), False, 'import os\n'), ((13951, 13977), 'os.path.join', 'os.path.join', (['bin_dir', 'dll'], {}), '(bin_dir, dll)\n', (13963, 13977), False, 'import os\n'), ((13979, 14035), 'os.path.join', 'os.path.join', (['self.settings.root_dir', '"""appleseed"""', '"""bin"""'], {}), "(self.settings.root_dir, 'appleseed', 'bin')\n", (13991, 14035), False, 'import os\n'), ((14997, 15048), 'os.path.join', 'os.path.join', (['self.settings.appleseed_lib_path', 'lib'], {}), '(self.settings.appleseed_lib_path, lib)\n', (15009, 15048), False, 'import os\n'), ((21680, 21699), 'os.path.isfile', 'os.path.isfile', (['lib'], {}), '(lib)\n', (21694, 21699), False, 'import os\n'), ((23583, 23634), 'os.path.join', 'os.path.join', (['self.settings.appleseed_lib_path', 'lib'], {}), '(self.settings.appleseed_lib_path, lib)\n', (23595, 23634), False, 'import os\n'), ((3962, 3977), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (3972, 3977), False, 'import time\n'), ((16882, 16908), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (16898, 16908), False, 'import os\n'), ((16995, 17026), 'os.path.join', 'os.path.join', (['dirpath', 'filename'], {}), '(dirpath, filename)\n', (17007, 17026), False, 'import os\n'), ((17339, 17365), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (17355, 17365), False, 'import os\n'), ((17452, 17483), 'os.path.join', 'os.path.join', (['dirpath', 'filename'], {}), '(dirpath, filename)\n', (17464, 17483), False, 'import os\n'), ((17804, 17830), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (17820, 17830), False, 'import os\n'), ((17917, 17948), 'os.path.join', 'os.path.join', (['dirpath', 'filename'], {}), '(dirpath, filename)\n', (17929, 17948), False, 'import os\n'), ((21040, 21058), 'os.path.isabs', 'os.path.isabs', (['lib'], {}), '(lib)\n', (21053, 21058), False, 'import os\n'), ((21179, 21209), 'os.path.join', 'os.path.join', (['loader_path', 'lib'], {}), '(loader_path, lib)\n', (21191, 21209), False, 'import os\n'), ((21360, 21385), 'os.path.exists', 'os.path.exists', (['candidate'], {}), '(candidate)\n', (21374, 21385), False, 'import os\n'), ((22019, 22038), 'os.path.isfile', 'os.path.isfile', (['lib'], {}), '(lib)\n', (22033, 22038), False, 'import os\n'), ((27086, 27104), 'platform.mac_ver', 'platform.mac_ver', ([], {}), '()\n', (27102, 27104), False, 'import platform\n'), ((11076, 11097), 'os.path.join', 'os.path.join', (['root', 'f'], {}), '(root, f)\n', (11088, 11097), False, 'import os\n'), ((11922, 11943), 'os.path.join', 'os.path.join', (['root', 'f'], {}), '(root, f)\n', (11934, 11943), False, 'import os\n'), ((21237, 21262), 'os.path.exists', 'os.path.exists', (['candidate'], {}), '(candidate)\n', (21251, 21262), False, 'import os\n'), ((21300, 21336), 'os.path.join', 'os.path.join', (['"""/usr/local/lib/"""', 'lib'], {}), "('/usr/local/lib/', lib)\n", (21312, 21336), False, 'import os\n'), ((26637, 26712), 'subprocess.Popen', 'subprocess.Popen', (['"""git describe --long"""'], {'stdout': 'subprocess.PIPE', 'shell': '(True)'}), "('git describe --long', stdout=subprocess.PIPE, shell=True)\n", (26653, 26712), False, 'import subprocess\n'), ((27242, 27260), 'platform.mac_ver', 'platform.mac_ver', ([], {}), '()\n', (27258, 27260), False, 'import platform\n')] |
viad00/code_olymp | uts/uts_17_aut_py/2/A.py | 90f20f9fd075e8967d02baf7554fcf24f4ae089c | ser = int(input())
mas = list(map(int, input().split()))
mas.sort()
print(*mas)
| [] |
ongchi/wagtail-katex | wagtailkatex/wagtail_hooks.py | c64b491e765e6b87a90d7cd8602153826ee9fe07 | from django.utils.translation import gettext
from wagtail.admin.rich_text.editors.draftail import features as draftail_features
from wagtail.core import hooks
from .richtext import KaTeXEntityElementHandler, katex_entity_decorator
@hooks.register('register_rich_text_features')
def register_katex_features(features):
features.default_features.append('katex')
"""
Registering the `katex` feature, which uses the `KATEX` Draft.js entity type,
and is stored as HTML with a `<div data-katex-embed="c = \\pm\\sqrt{a^2 + b^2}">` tag.
"""
feature_name = 'katex-embed'
type_ = 'KATEX-EMBED'
features.register_editor_plugin(
'draftail',
feature_name,
draftail_features.EntityFeature(
{
'type': type_,
'icon': 'square-root-alt',
'description': gettext('Equation'),
},
js=[
'wagtailkatex/katex/katex.min.js',
'wagtailkatex/wagtailkatex.js',
],
css={
'all': [
'wagtailkatex/katex/katex.min.css',
]
}
)
)
features.register_converter_rule('contentstate', feature_name, {
'from_database_format': {'div[data-katex-embed]': KaTeXEntityElementHandler()},
'to_database_format': {'entity_decorators': {type_: katex_entity_decorator}},
})
| [((236, 281), 'wagtail.core.hooks.register', 'hooks.register', (['"""register_rich_text_features"""'], {}), "('register_rich_text_features')\n", (250, 281), False, 'from wagtail.core import hooks\n'), ((855, 874), 'django.utils.translation.gettext', 'gettext', (['"""Equation"""'], {}), "('Equation')\n", (862, 874), False, 'from django.utils.translation import gettext\n')] |
real-digital/esque-wire | esque_wire/protocol/serializers/api/elect_preferred_leaders_request.py | eb02c49f38b89ad5e5d25aad15fb4ad795e52807 | ###############################################################
# Autogenerated module. Please don't modify. #
# Edit according file in protocol_generator/templates instead #
###############################################################
from typing import Dict
from ...structs.api.elect_preferred_leaders_request import ElectPreferredLeadersRequestData, TopicPartition
from ._main_serializers import ArraySerializer, ClassSerializer, Schema, int32Serializer, stringSerializer
topicPartitionSchemas: Dict[int, Schema] = {
0: [("topic", stringSerializer), ("partition_id", ArraySerializer(int32Serializer))]
}
topicPartitionSerializers: Dict[int, ClassSerializer[TopicPartition]] = {
version: ClassSerializer(TopicPartition, schema) for version, schema in topicPartitionSchemas.items()
}
topicPartitionSerializers[-1] = topicPartitionSerializers[0]
electPreferredLeadersRequestDataSchemas: Dict[int, Schema] = {
0: [("topic_partitions", ArraySerializer(topicPartitionSerializers[0])), ("timeout_ms", int32Serializer)]
}
electPreferredLeadersRequestDataSerializers: Dict[int, ClassSerializer[ElectPreferredLeadersRequestData]] = {
version: ClassSerializer(ElectPreferredLeadersRequestData, schema)
for version, schema in electPreferredLeadersRequestDataSchemas.items()
}
electPreferredLeadersRequestDataSerializers[-1] = electPreferredLeadersRequestDataSerializers[0]
| [] |
arunrordell/RackHD | test/tests/bootstrap/test_api20_windows_bootstrap.py | 079c21f45cb38f538c502363aa1ff86dbcac3169 | '''
Copyright 2017 Dell Inc. or its subsidiaries. All Rights Reserved.
This script tests arbitrary payload of the RackHD API 2.0 OS bootstrap workflows.
The default case is running a minimum payload Windows OS install.
Other Windows-type OS install cases can be specified by creating a payload file and specifiying it using the '-extra' argument.
This test takes 30-45 minutes to run.
Example payload file (installed in configuration dir):
{"bootstrap-payload":
{"name": "Graph.InstallWindowsServer",
"options": {"defaults": {"version": "2012",
"repo": "http://172.31.128.1:8080/repo/winpe",
"smbRepo": "\\\\172.31.128.1\\windowsServer2012",
"productkey": "XXXXX-XXXXX-XXXXX-XXXXX-XXXXX",
"username": "rackhduser",
"password": "RackHDRocks",
"smbUser": "vagrant",
"smbPassword": "vagrant"}}}
}
Example command line using external payload file:
python run_tests.py -stack 4 -test tests/bootstrap/test_api20_windows_bootstrap.py -extra base_windows_2012_install.json
RackHD Windows installation workflow requires special configuration of the RackHD server:
- A customized WinPE environment installed on RackHD server as documented here:
https://github.com/RackHD/on-tools/tree/master/winpe
- Samba installed on the RackHD server and configured as documented here:
http://rackhd.readthedocs.io/en/latest/rackhd/install_os.html?highlight=os%20install
- Windows 2012 installation distro installed on RackHD server or equivalent NFS mount.
- Windows 2012 activation key in the installation payload file.
'''
import fit_path # NOQA: unused import
from nose.plugins.attrib import attr
import fit_common
import flogging
import random
import json
import time
from nosedep import depends
from datetime import datetime
log = flogging.get_loggers()
# sample default base payload
PAYLOAD = {"name": "Graph.InstallWindowsServer",
"options": {"defaults": {"version": "2012",
"repo": "http://172.31.128.1:8080/repo/winpe",
"smbRepo": "\\\\172.31.128.1\\windowsServer2012",
"productkey": "XXXXX-XXXXX-XXXXX-XXXXX-XXXXX",
"username": "rackhduser",
"password": "RackHDRocks",
"smbUser": "vagrant",
"smbPassword": "vagrant"}}}
# if an external payload file is specified, use that
config = fit_common.fitcfg().get('bootstrap-payload', None)
if config:
PAYLOAD = config
# function to return the value of a field from the workflow response
def findall(obj, key):
if isinstance(obj, dict):
for k, v in obj.items():
if k == key:
log.error(" workflow error: %s", v)
findall(v, key)
elif isinstance(obj, list):
for item in obj:
findall(item, key)
else:
pass
# this routine polls a workflow task ID for completion
def wait_for_workflow_complete(instanceid, start_time, waittime=3200, cycle=30):
log.info_1(" Workflow started at time: " + str(datetime.fromtimestamp(start_time)))
while time.time() - start_time < waittime: # limit test to waittime seconds
result = fit_common.rackhdapi("/api/2.0/workflows/" + instanceid)
if result['status'] != 200:
log.error(" HTTP error: " + result['text'])
return False
if result['json']['status'] in ['running', 'pending']:
log.info_5("{} workflow status: {}".format(result['json']['injectableName'], result['json']['status']))
fit_common.time.sleep(cycle)
elif result['json']['status'] == 'succeeded':
log.info_1("{} workflow status: {}".format(result['json']['injectableName'], result['json']['status']))
end_time = time.time()
log.info_1(" Workflow completed at time: " + str(datetime.fromtimestamp(end_time)))
log.info_1(" Workflow duration: " + str(end_time - start_time))
return True
else:
end_time = time.time()
log.info_1(" Workflow failed at time: " + str(datetime.fromtimestamp(end_time)))
log.info_1(" Workflow duration: " + str(end_time - start_time))
try:
res = json.loads(result['text'])
findall(res, "error")
except:
res = result['text']
log.error(" Workflow failed: status: %s", result['json']['status'])
log.error(" Data: %s", json.dumps(res, indent=4, separators=(',', ':')))
return False
try:
res = json.loads(result['text'])
except:
res = result['text']
log.error(" Workflow Timeout: " + json.dumps(res, indent=4, separators=(',', ':')))
return False
# ------------------------ Tests -------------------------------------
@attr(all=False)
class api20_bootstrap_windows(fit_common.unittest.TestCase):
@classmethod
def setUpClass(cls):
# Get the list of nodes
NODECATALOG = fit_common.node_select()
assert (len(NODECATALOG) != 0), "There are no nodes currently discovered"
# Select one node at random
cls.__NODE = NODECATALOG[random.randint(0, len(NODECATALOG) - 1)]
# Print node Id, node BMC mac ,node type
nodeinfo = fit_common.rackhdapi('/api/2.0/nodes/' + cls.__NODE)['json']
nodesku = fit_common.rackhdapi(nodeinfo.get('sku'))['json']['name']
monurl = "/api/2.0/nodes/" + cls.__NODE + "/catalogs/bmc"
mondata = fit_common.rackhdapi(monurl, action="get")
catalog = mondata['json']
bmcresult = mondata['status']
if bmcresult != 200:
log.info_1(" Node ID: " + cls.__NODE)
log.info_1(" Error on catalog/bmc command")
else:
log.info_1(" Node ID: " + cls.__NODE)
log.info_1(" Node SKU: " + nodesku)
log.info_1(" Node BMC Mac: %s", catalog.get('data')['MAC Address'])
log.info_1(" Node BMC IP Addr: %s", catalog.get('data')['IP Address'])
log.info_1(" Node BMC IP Addr Src: %s", catalog.get('data')['IP Address Source'])
# delete active workflows for specified node
result = fit_common.cancel_active_workflows(cls.__NODE)
assert (result is True), "There are still some active workflows running against the node"
def test01_node_check(self):
# Log node data
nodeinfo = fit_common.rackhdapi('/api/2.0/nodes/' + self.__class__.__NODE)['json']
nodesku = fit_common.rackhdapi(nodeinfo.get('sku'))['json']['name']
log.info_1(" Node ID: %s ", self.__class__.__NODE)
log.info_1(" Node SKU: %s ", nodesku)
log.info_1(" Graph Name: Graph.PowerOn.Node")
# Ensure the compute node is powered on and reachable
result = fit_common.rackhdapi('/api/2.0/nodes/' +
self.__class__.__NODE +
'/workflows',
action='post', payload={"name": "Graph.PowerOn.Node"})
self.assertEqual(result['status'], 201, "Node Power on workflow API failed, see logs.")
self.assertTrue(wait_for_workflow_complete(result['json']['instanceId'], time.time(), 50, 5),
"Node Power on workflow failed, see logs.")
@depends(after=test01_node_check)
def test02_os_install(self):
# Log node data
nodeinfo = fit_common.rackhdapi('/api/2.0/nodes/' + self.__class__.__NODE)['json']
nodesku = fit_common.rackhdapi(nodeinfo.get('sku'))['json']['name']
log.info_1(" Node ID: " + self.__class__.__NODE)
log.info_1(" Node SKU: " + nodesku)
log.info_1(" Graph Name: Graph.InstallWindowsServer")
log.info_1(" Payload: " + fit_common.json.dumps(PAYLOAD))
# launch workflow
workflowid = None
result = fit_common.rackhdapi('/api/2.0/nodes/' +
self.__class__.__NODE +
'/workflows',
action='post', payload=PAYLOAD)
if result['status'] == 201:
# workflow running
log.info_1(" InstanceID: " + result['json']['instanceId'])
workflowid = result['json']['instanceId']
else:
# workflow failed with response code
log.error(" InstanceID: " + result['text'])
self.fail("Workflow failed with response code: " + result['status'])
self.assertTrue(wait_for_workflow_complete(workflowid, time.time()), "OS Install workflow failed, see logs.")
if __name__ == '__main__':
fit_common.unittest.main()
| [((2005, 2027), 'flogging.get_loggers', 'flogging.get_loggers', ([], {}), '()\n', (2025, 2027), False, 'import flogging\n'), ((5140, 5155), 'nose.plugins.attrib.attr', 'attr', ([], {'all': '(False)'}), '(all=False)\n', (5144, 5155), False, 'from nose.plugins.attrib import attr\n'), ((7640, 7672), 'nosedep.depends', 'depends', ([], {'after': 'test01_node_check'}), '(after=test01_node_check)\n', (7647, 7672), False, 'from nosedep import depends\n'), ((8964, 8990), 'fit_common.unittest.main', 'fit_common.unittest.main', ([], {}), '()\n', (8988, 8990), False, 'import fit_common\n'), ((2724, 2743), 'fit_common.fitcfg', 'fit_common.fitcfg', ([], {}), '()\n', (2741, 2743), False, 'import fit_common\n'), ((3504, 3560), 'fit_common.rackhdapi', 'fit_common.rackhdapi', (["('/api/2.0/workflows/' + instanceid)"], {}), "('/api/2.0/workflows/' + instanceid)\n", (3524, 3560), False, 'import fit_common\n'), ((4891, 4917), 'json.loads', 'json.loads', (["result['text']"], {}), "(result['text'])\n", (4901, 4917), False, 'import json\n'), ((5313, 5337), 'fit_common.node_select', 'fit_common.node_select', ([], {}), '()\n', (5335, 5337), False, 'import fit_common\n'), ((5821, 5863), 'fit_common.rackhdapi', 'fit_common.rackhdapi', (['monurl'], {'action': '"""get"""'}), "(monurl, action='get')\n", (5841, 5863), False, 'import fit_common\n'), ((6511, 6557), 'fit_common.cancel_active_workflows', 'fit_common.cancel_active_workflows', (['cls.__NODE'], {}), '(cls.__NODE)\n', (6545, 6557), False, 'import fit_common\n'), ((7120, 7257), 'fit_common.rackhdapi', 'fit_common.rackhdapi', (["('/api/2.0/nodes/' + self.__class__.__NODE + '/workflows')"], {'action': '"""post"""', 'payload': "{'name': 'Graph.PowerOn.Node'}"}), "('/api/2.0/nodes/' + self.__class__.__NODE +\n '/workflows', action='post', payload={'name': 'Graph.PowerOn.Node'})\n", (7140, 7257), False, 'import fit_common\n'), ((8196, 8310), 'fit_common.rackhdapi', 'fit_common.rackhdapi', (["('/api/2.0/nodes/' + self.__class__.__NODE + '/workflows')"], {'action': '"""post"""', 'payload': 'PAYLOAD'}), "('/api/2.0/nodes/' + self.__class__.__NODE +\n '/workflows', action='post', payload=PAYLOAD)\n", (8216, 8310), False, 'import fit_common\n'), ((3416, 3427), 'time.time', 'time.time', ([], {}), '()\n', (3425, 3427), False, 'import time\n'), ((3869, 3897), 'fit_common.time.sleep', 'fit_common.time.sleep', (['cycle'], {}), '(cycle)\n', (3890, 3897), False, 'import fit_common\n'), ((4997, 5045), 'json.dumps', 'json.dumps', (['res'], {'indent': '(4)', 'separators': "(',', ':')"}), "(res, indent=4, separators=(',', ':'))\n", (5007, 5045), False, 'import json\n'), ((5600, 5652), 'fit_common.rackhdapi', 'fit_common.rackhdapi', (["('/api/2.0/nodes/' + cls.__NODE)"], {}), "('/api/2.0/nodes/' + cls.__NODE)\n", (5620, 5652), False, 'import fit_common\n'), ((6733, 6796), 'fit_common.rackhdapi', 'fit_common.rackhdapi', (["('/api/2.0/nodes/' + self.__class__.__NODE)"], {}), "('/api/2.0/nodes/' + self.__class__.__NODE)\n", (6753, 6796), False, 'import fit_common\n'), ((7749, 7812), 'fit_common.rackhdapi', 'fit_common.rackhdapi', (["('/api/2.0/nodes/' + self.__class__.__NODE)"], {}), "('/api/2.0/nodes/' + self.__class__.__NODE)\n", (7769, 7812), False, 'import fit_common\n'), ((3369, 3403), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['start_time'], {}), '(start_time)\n', (3391, 3403), False, 'from datetime import datetime\n'), ((4091, 4102), 'time.time', 'time.time', ([], {}), '()\n', (4100, 4102), False, 'import time\n'), ((4336, 4347), 'time.time', 'time.time', ([], {}), '()\n', (4345, 4347), False, 'import time\n'), ((7545, 7556), 'time.time', 'time.time', ([], {}), '()\n', (7554, 7556), False, 'import time\n'), ((8094, 8124), 'fit_common.json.dumps', 'fit_common.json.dumps', (['PAYLOAD'], {}), '(PAYLOAD)\n', (8115, 8124), False, 'import fit_common\n'), ((8876, 8887), 'time.time', 'time.time', ([], {}), '()\n', (8885, 8887), False, 'import time\n'), ((4556, 4582), 'json.loads', 'json.loads', (["result['text']"], {}), "(result['text'])\n", (4566, 4582), False, 'import json\n'), ((4793, 4841), 'json.dumps', 'json.dumps', (['res'], {'indent': '(4)', 'separators': "(',', ':')"}), "(res, indent=4, separators=(',', ':'))\n", (4803, 4841), False, 'import json\n'), ((4164, 4196), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['end_time'], {}), '(end_time)\n', (4186, 4196), False, 'from datetime import datetime\n'), ((4406, 4438), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['end_time'], {}), '(end_time)\n', (4428, 4438), False, 'from datetime import datetime\n')] |
till-h/alexa | random_number.py | 47891eb97fff375500a032b23fef7a2681b50735 | from flask import Flask, render_template
from flask_ask import Ask, statement
import random
app = Flask(__name__)
ask = Ask(app, '/')
@ask.intent('RandomNumber', convert={'lowerLimit': int, 'upperLimit': int})
def hello(lowerLimit, upperLimit):
if lowerLimit == None:
lowerLimit = 0
if upperLimit == None:
upperLimit = 100
number = random.randint(lowerLimit, upperLimit)
text = render_template('random_number', lowerLimit=lowerLimit, upperLimit=upperLimit, number=number)
return statement(text).simple_card('Flask-Ask Random Number', text)
if __name__ == '__main__':
app.run(debug=True) | [((99, 114), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (104, 114), False, 'from flask import Flask, render_template\n'), ((121, 134), 'flask_ask.Ask', 'Ask', (['app', '"""/"""'], {}), "(app, '/')\n", (124, 134), False, 'from flask_ask import Ask, statement\n'), ((341, 379), 'random.randint', 'random.randint', (['lowerLimit', 'upperLimit'], {}), '(lowerLimit, upperLimit)\n', (355, 379), False, 'import random\n'), ((388, 486), 'flask.render_template', 'render_template', (['"""random_number"""'], {'lowerLimit': 'lowerLimit', 'upperLimit': 'upperLimit', 'number': 'number'}), "('random_number', lowerLimit=lowerLimit, upperLimit=\n upperLimit, number=number)\n", (403, 486), False, 'from flask import Flask, render_template\n'), ((490, 505), 'flask_ask.statement', 'statement', (['text'], {}), '(text)\n', (499, 505), False, 'from flask_ask import Ask, statement\n')] |
askerlee/rift | model/losses.py | d4dbf42b82f1f83dfab18f8da8fe3a1d0a716fa2 | import torch
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
import torchvision.models as models
from model.laplacian import LapLoss
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class EPE(nn.Module):
def __init__(self):
super(EPE, self).__init__()
def forward(self, flow, gt, loss_mask):
loss_map = (flow - gt.detach()) ** 2
loss_map = (loss_map.sum(1, True) + 1e-6) ** 0.5
return (loss_map * loss_mask)
class Ternary(nn.Module):
def __init__(self):
super(Ternary, self).__init__()
patch_size = 7
out_channels = patch_size * patch_size
self.w = np.eye(out_channels).reshape(
(patch_size, patch_size, 1, out_channels))
self.w = np.transpose(self.w, (3, 2, 0, 1))
self.w = torch.tensor(self.w).float().to(device)
def transform(self, img):
patches = F.conv2d(img, self.w, padding=3, bias=None)
transf = patches - img
transf_norm = transf / torch.sqrt(0.81 + transf**2)
return transf_norm
def rgb2gray(self, rgb):
r, g, b = rgb[:, 0:1, :, :], rgb[:, 1:2, :, :], rgb[:, 2:3, :, :]
gray = 0.2989 * r + 0.5870 * g + 0.1140 * b
return gray
def hamming(self, t1, t2):
dist = (t1 - t2) ** 2
dist_norm = torch.mean(dist / (0.1 + dist), 1, True)
return dist_norm
def valid_mask(self, t, padding):
n, _, h, w = t.size()
inner = torch.ones(n, 1, h - 2 * padding, w - 2 * padding).type_as(t)
mask = F.pad(inner, [padding] * 4)
return mask
def forward(self, img0, img1):
img0 = self.transform(self.rgb2gray(img0))
img1 = self.transform(self.rgb2gray(img1))
return self.hamming(img0, img1) * self.valid_mask(img0, 1)
class SOBEL(nn.Module):
def __init__(self):
super(SOBEL, self).__init__()
self.kernelX = torch.tensor([
[1, 0, -1],
[2, 0, -2],
[1, 0, -1],
]).float()
self.kernelY = self.kernelX.clone().T
self.kernelX = self.kernelX.unsqueeze(0).unsqueeze(0).to(device)
self.kernelY = self.kernelY.unsqueeze(0).unsqueeze(0).to(device)
def forward(self, pred, gt):
N, C, H, W = pred.shape[0], pred.shape[1], pred.shape[2], pred.shape[3]
img_stack = torch.cat(
[pred.reshape(N*C, 1, H, W), gt.reshape(N*C, 1, H, W)], 0)
sobel_stack_x = F.conv2d(img_stack, self.kernelX, padding=1)
sobel_stack_y = F.conv2d(img_stack, self.kernelY, padding=1)
pred_X, gt_X = sobel_stack_x[:N*C], sobel_stack_x[N*C:]
pred_Y, gt_Y = sobel_stack_y[:N*C], sobel_stack_y[N*C:]
L1X, L1Y = torch.abs(pred_X-gt_X), torch.abs(pred_Y-gt_Y)
loss = (L1X+L1Y)
return loss
class MeanShift(nn.Conv2d):
def __init__(self, data_mean, data_std, data_range=1, norm=True):
c = len(data_mean)
super(MeanShift, self).__init__(c, c, kernel_size=1)
std = torch.Tensor(data_std)
self.weight.data = torch.eye(c).view(c, c, 1, 1)
if norm:
self.weight.data.div_(std.view(c, 1, 1, 1))
self.bias.data = -1 * data_range * torch.Tensor(data_mean)
self.bias.data.div_(std)
else:
self.weight.data.mul_(std.view(c, 1, 1, 1))
self.bias.data = data_range * torch.Tensor(data_mean)
self.requires_grad = False
class VGGPerceptualLoss(torch.nn.Module):
def __init__(self, rank=0):
super(VGGPerceptualLoss, self).__init__()
blocks = []
pretrained = True
self.vgg_pretrained_features = models.vgg19(pretrained=pretrained).features
self.normalize = MeanShift([0.485, 0.456, 0.406], [0.229, 0.224, 0.225], norm=True).cuda()
for param in self.parameters():
param.requires_grad = False
def forward(self, X, Y, indices=None):
X = self.normalize(X)
Y = self.normalize(Y)
indices = [2, 7, 12, 21, 30]
weights = [1.0/2.6, 1.0/4.8, 1.0/3.7, 1.0/5.6, 10/1.5]
k = 0
loss = 0
for i in range(indices[-1]):
X = self.vgg_pretrained_features[i](X)
Y = self.vgg_pretrained_features[i](Y)
if (i+1) in indices:
loss += weights[k] * (X - Y.detach()).abs().mean() * 0.1
k += 1
return loss
# flow could have any channels.
# https://github.com/coolbeam/OIFlow/blob/main/utils/tools.py
def flow_smooth_delta(flow, if_second_order=False):
def gradient(x):
D_dx = x[:, :, :, 1:] - x[:, :, :, :-1]
D_dy = x[:, :, 1:] - x[:, :, :-1]
return D_dx, D_dy
dx, dy = gradient(flow)
# dx2, dxdy = gradient(dx)
# dydx, dy2 = gradient(dy)
if if_second_order:
dx2, dxdy = gradient(dx)
dydx, dy2 = gradient(dy)
smooth_loss = dx.abs().mean() + dy.abs().mean() + dx2.abs().mean() + dxdy.abs().mean() + dydx.abs().mean() + dy2.abs().mean()
else:
smooth_loss = dx.abs().mean() + dy.abs().mean()
# smooth_loss = dx.abs().mean() + dy.abs().mean() # + dx2.abs().mean() + dxdy.abs().mean() + dydx.abs().mean() + dy2.abs().mean()
# 暂时不上二阶的平滑损失,似乎加上以后就太猛了,无法降低photo loss TODO
return smooth_loss
# flow should have 4 channels.
# https://github.com/coolbeam/OIFlow/blob/main/utils/tools.py
# weight_type='exp' seems to perform better than 'gauss'.
def edge_aware_smoothness_order1(img0, img1, flow, constant=1.0, weight_type='exp', error_type='L1'):
def weight_fn(x):
if weight_type == 'gauss':
y = x ** 2
elif weight_type == 'exp':
y = torch.abs(x)
else:
raise ValueError('')
return y
def gradient_xy(img):
gx = img[:, :, :, :-1] - img[:, :, :, 1:]
gy = img[:, :, :-1, :] - img[:, :, 1:, :]
return gx, gy
def gradweight_xy(img0, img1):
img0_gx, img0_gy = gradient_xy(img0)
img1_gx, img1_gy = gradient_xy(img1)
img0_wx = torch.exp(-torch.mean(weight_fn(constant * img0_gx), 1, keepdim=True))
img0_wy = torch.exp(-torch.mean(weight_fn(constant * img0_gy), 1, keepdim=True))
img1_wx = torch.exp(-torch.mean(weight_fn(constant * img1_gx), 1, keepdim=True))
img1_wy = torch.exp(-torch.mean(weight_fn(constant * img1_gy), 1, keepdim=True))
# First two flow channels: 1->0 flow. So use img1 weights.
# Second two flow channels: 0->1 flow. So use img0 weights.
# weights_x and weights_y are for x and y's spatial gradients, respectively.
weights_x = torch.cat([img1_wx, img1_wx, img0_wx, img0_wx], dim=1)
weights_y = torch.cat([img1_wy, img0_wy, img0_wy, img1_wy], dim=1)
return weights_x, weights_y
def error_fn(x):
if error_type == 'L1':
y = torch.abs(x)
elif error_type == 'abs_robust':
y = (torch.abs(x) + 0.01).pow(0.4)
else:
raise ValueError('')
return y
# The flow gradients along x, y axes, respectively.
# flow_gx, flow_gy have the same number of channels as flow.
# No matter the flow is x- or y-flow, it should be smooth along both x and y axes.
# I.e., a y-flow should also be smooth along x-axis, and x-flow should also be smooth along y-axis.
flow_gx, flow_gy = gradient_xy(flow)
# weights_x, weights_y both have 4 channels, same as flow_gx and flow_gy (if the input flow has 4 channels).
weights_x, weights_y = gradweight_xy(img0, img1)
smoothness_x = error_fn(flow_gx) * weights_x
smoothness_y = error_fn(flow_gy) * weights_y
return torch.mean(smoothness_x) + torch.mean(smoothness_y)
# Dual teaching helps slightly.
def dual_teaching_loss(mid_gt, img_stu, flow_stu, img_tea, flow_tea):
loss_distill = 0
# Ws[0]: weight of teacher -> student.
# Ws[1]: weight of student -> teacher.
# Two directions could take different weights.
# Set Ws[1] to 0 to disable student -> teacher.
Ws = [1, 0.5]
use_lap_loss = False
# Laplacian loss performs better in earlier epochs, but worse in later epochs.
# Moreover, Laplacian loss is significantly slower.
if use_lap_loss:
loss_fun = LapLoss(max_levels=3, reduction='none')
else:
loss_fun = nn.L1Loss(reduction='none')
for i in range(2):
student_error = loss_fun(img_stu, mid_gt).mean(1, True)
teacher_error = loss_fun(img_tea, mid_gt).mean(1, True)
# distill_mask indicates where the warped images according to student's prediction
# is worse than that of the teacher.
# If at some points, the warped image of the teacher is better than the student,
# then regard the flow at these points are more accurate, and use them to teach the student.
distill_mask = (student_error > teacher_error + 0.01).float().detach()
# loss_distill is the sum of the distillation losses at 2 directions.
loss_distill += Ws[i] * ((flow_tea.detach() - flow_stu).abs() * distill_mask).mean()
# Swap student and teacher, and calculate the distillation loss again.
img_stu, flow_stu, img_tea, flow_tea = \
img_tea, flow_tea, img_stu, flow_stu
# The distillation loss from the student to the teacher is given a smaller weight.
# loss_distill = loss_distill / 2
return loss_distill
if __name__ == '__main__':
img0 = torch.zeros(3, 3, 256, 256).float().to(device)
img1 = torch.tensor(np.random.normal(
0, 1, (3, 3, 256, 256))).float().to(device)
ternary_loss = Ternary()
print(ternary_loss(img0, img1).shape)
| [((191, 216), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (214, 216), False, 'import torch\n'), ((779, 813), 'numpy.transpose', 'np.transpose', (['self.w', '(3, 2, 0, 1)'], {}), '(self.w, (3, 2, 0, 1))\n', (791, 813), True, 'import numpy as np\n'), ((920, 963), 'torch.nn.functional.conv2d', 'F.conv2d', (['img', 'self.w'], {'padding': '(3)', 'bias': 'None'}), '(img, self.w, padding=3, bias=None)\n', (928, 963), True, 'import torch.nn.functional as F\n'), ((1340, 1380), 'torch.mean', 'torch.mean', (['(dist / (0.1 + dist))', '(1)', '(True)'], {}), '(dist / (0.1 + dist), 1, True)\n', (1350, 1380), False, 'import torch\n'), ((1568, 1595), 'torch.nn.functional.pad', 'F.pad', (['inner', '([padding] * 4)'], {}), '(inner, [padding] * 4)\n', (1573, 1595), True, 'import torch.nn.functional as F\n'), ((2470, 2514), 'torch.nn.functional.conv2d', 'F.conv2d', (['img_stack', 'self.kernelX'], {'padding': '(1)'}), '(img_stack, self.kernelX, padding=1)\n', (2478, 2514), True, 'import torch.nn.functional as F\n'), ((2539, 2583), 'torch.nn.functional.conv2d', 'F.conv2d', (['img_stack', 'self.kernelY'], {'padding': '(1)'}), '(img_stack, self.kernelY, padding=1)\n', (2547, 2583), True, 'import torch.nn.functional as F\n'), ((3025, 3047), 'torch.Tensor', 'torch.Tensor', (['data_std'], {}), '(data_std)\n', (3037, 3047), False, 'import torch\n'), ((6642, 6696), 'torch.cat', 'torch.cat', (['[img1_wx, img1_wx, img0_wx, img0_wx]'], {'dim': '(1)'}), '([img1_wx, img1_wx, img0_wx, img0_wx], dim=1)\n', (6651, 6696), False, 'import torch\n'), ((6717, 6771), 'torch.cat', 'torch.cat', (['[img1_wy, img0_wy, img0_wy, img1_wy]'], {'dim': '(1)'}), '([img1_wy, img0_wy, img0_wy, img1_wy], dim=1)\n', (6726, 6771), False, 'import torch\n'), ((7684, 7708), 'torch.mean', 'torch.mean', (['smoothness_x'], {}), '(smoothness_x)\n', (7694, 7708), False, 'import torch\n'), ((7711, 7735), 'torch.mean', 'torch.mean', (['smoothness_y'], {}), '(smoothness_y)\n', (7721, 7735), False, 'import torch\n'), ((8272, 8311), 'model.laplacian.LapLoss', 'LapLoss', ([], {'max_levels': '(3)', 'reduction': '"""none"""'}), "(max_levels=3, reduction='none')\n", (8279, 8311), False, 'from model.laplacian import LapLoss\n'), ((8341, 8368), 'torch.nn.L1Loss', 'nn.L1Loss', ([], {'reduction': '"""none"""'}), "(reduction='none')\n", (8350, 8368), True, 'import torch.nn as nn\n'), ((1026, 1056), 'torch.sqrt', 'torch.sqrt', (['(0.81 + transf ** 2)'], {}), '(0.81 + transf ** 2)\n', (1036, 1056), False, 'import torch\n'), ((2732, 2756), 'torch.abs', 'torch.abs', (['(pred_X - gt_X)'], {}), '(pred_X - gt_X)\n', (2741, 2756), False, 'import torch\n'), ((2756, 2780), 'torch.abs', 'torch.abs', (['(pred_Y - gt_Y)'], {}), '(pred_Y - gt_Y)\n', (2765, 2780), False, 'import torch\n'), ((3679, 3714), 'torchvision.models.vgg19', 'models.vgg19', ([], {'pretrained': 'pretrained'}), '(pretrained=pretrained)\n', (3691, 3714), True, 'import torchvision.models as models\n'), ((6878, 6890), 'torch.abs', 'torch.abs', (['x'], {}), '(x)\n', (6887, 6890), False, 'import torch\n'), ((677, 697), 'numpy.eye', 'np.eye', (['out_channels'], {}), '(out_channels)\n', (683, 697), True, 'import numpy as np\n'), ((1491, 1541), 'torch.ones', 'torch.ones', (['n', '(1)', '(h - 2 * padding)', '(w - 2 * padding)'], {}), '(n, 1, h - 2 * padding, w - 2 * padding)\n', (1501, 1541), False, 'import torch\n'), ((1932, 1982), 'torch.tensor', 'torch.tensor', (['[[1, 0, -1], [2, 0, -2], [1, 0, -1]]'], {}), '([[1, 0, -1], [2, 0, -2], [1, 0, -1]])\n', (1944, 1982), False, 'import torch\n'), ((3075, 3087), 'torch.eye', 'torch.eye', (['c'], {}), '(c)\n', (3084, 3087), False, 'import torch\n'), ((3225, 3248), 'torch.Tensor', 'torch.Tensor', (['data_mean'], {}), '(data_mean)\n', (3237, 3248), False, 'import torch\n'), ((3398, 3421), 'torch.Tensor', 'torch.Tensor', (['data_mean'], {}), '(data_mean)\n', (3410, 3421), False, 'import torch\n'), ((5683, 5695), 'torch.abs', 'torch.abs', (['x'], {}), '(x)\n', (5692, 5695), False, 'import torch\n'), ((9474, 9501), 'torch.zeros', 'torch.zeros', (['(3)', '(3)', '(256)', '(256)'], {}), '(3, 3, 256, 256)\n', (9485, 9501), False, 'import torch\n'), ((831, 851), 'torch.tensor', 'torch.tensor', (['self.w'], {}), '(self.w)\n', (843, 851), False, 'import torch\n'), ((9545, 9585), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)', '(3, 3, 256, 256)'], {}), '(0, 1, (3, 3, 256, 256))\n', (9561, 9585), True, 'import numpy as np\n'), ((6949, 6961), 'torch.abs', 'torch.abs', (['x'], {}), '(x)\n', (6958, 6961), False, 'import torch\n')] |
righetti/swarmrobotics | project/python/swarm_simulation.py | f8f6bf72c3aae1f432f3306aebb48fd32a6dd2a7 | import numpy as np
import pybullet as p
import itertools
from robot import Robot
class World():
def __init__(self):
# create the physics simulator
self.physicsClient = p.connect(p.GUI)
p.setGravity(0,0,-9.81)
self.max_communication_distance = 2.0
# We will integrate every 4ms (250Hz update)
self.dt = 1./250.
p.setPhysicsEngineParameter(self.dt, numSubSteps=1)
# Create the plane.
self.planeId = p.loadURDF("../models/plane.urdf")
p.changeDynamics(self.planeId, -1, lateralFriction=5., rollingFriction=0)
self.goalId = p.loadURDF("../models/goal.urdf")
self.goalId = p.loadURDF("../models/goal2.urdf")
# the balls
self.ball1 = p.loadURDF("../models/ball1.urdf")
p.resetBasePositionAndOrientation(self.ball1, [2., 4., 0.5], (0., 0., 0.5, 0.5))
self.ball2 = p.loadURDF("../models/ball2.urdf")
p.resetBasePositionAndOrientation(self.ball2, [4., 2., 0.5], (0., 0., 0.5, 0.5))
p.resetDebugVisualizerCamera(7.0,90.0, -43.0, (1., 1., 0.0))
# Add objects
wallId = p.loadSDF("../models/walls.sdf")[0]
p.resetBasePositionAndOrientation(wallId, [0., -1., 0], (0., 0., 0.5, 0.5))
wallId = p.loadSDF("../models/walls.sdf")[0]
p.resetBasePositionAndOrientation(wallId, [0., 1., 0], (0., 0., 0.5, 0.5))
wallId = p.loadSDF("../models/walls.sdf")[0]
p.resetBasePositionAndOrientation(wallId, [3., -1., 0], (0., 0., 0.5, 0.5))
wallId = p.loadSDF("../models/walls.sdf")[0]
p.resetBasePositionAndOrientation(wallId, [3., 1., 0], (0., 0., 0.5, 0.5))
wallId = p.loadSDF("../models/walls.sdf")[0]
p.resetBasePositionAndOrientation(wallId, [1., 2., 0], (0., 0., 0., 1.))
wallId = p.loadSDF("../models/walls.sdf")[0]
p.resetBasePositionAndOrientation(wallId, [2., -2., 0], (0., 0., 0., 1.))
# tube
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-1., 5., 0], (0., 0., 0., 1.))
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-1., 6., 0], (0., 0., 0., 1.))
# #arena
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-2, 4., 0], (0., 0., 0.5, 0.5))
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-2., 7., 0], (0., 0., 0.5, 0.5))
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-2., 9., 0], (0., 0., 0.5, 0.5))
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-2., 11., 0], (0., 0., 0.5, 0.5))
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-2., 13., 0], (0., 0., 0.5, 0.5))
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-3., 3., 0], (0., 0., 0., 1.))
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-5., 3., 0], (0., 0., 0., 1.))
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-7., 3., 0], (0., 0., 0., 1.))
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-8, 4., 0], (0., 0., 0.5, 0.5))
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-8., 6., 0], (0., 0., 0.5, 0.5))
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-8., 8., 0], (0., 0., 0.5, 0.5))
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-8., 10., 0], (0., 0., 0.5, 0.5))
# wallId = p.loadSDF("../models/walls.sdf")[0]
# p.resetBasePositionAndOrientation(wallId, [-8., 12., 0], (0., 0., 0.5, 0.5))
# create 6 robots
self.robots = []
for (i,j) in itertools.product(range(3), range(2)):
self.robots.append(Robot([1. * i + 0.5, 1. * j - 0.5, 0.3], 2*i+j, self.dt))
p.stepSimulation()
self.time = 0.0
self.stepSimulation()
self.stepSimulation()
def reset(self):
"""
Resets the position of all the robots
"""
for r in self.robots:
r.reset()
p.stepSimulation()
def stepSimulation(self):
"""
Simulates one step simulation
"""
# for each robot construct list of neighbors
for r in self.robots:
r.neighbors = [] #reset neighbors
r.messages_received = [] #reset message received
pos1, or1 = r.get_pos_and_orientation()
for j,r2 in enumerate(self.robots):
if(r.id != r2.id):
pos2, or2 = r2.get_pos_and_orientation()
if(np.linalg.norm(pos1-pos2) < self.max_communication_distance):
r.neighbors.append(j)
# for each robot send and receive messages
for i,r in enumerate(self.robots):
for msg in r.messages_to_send:
if msg[0] in r.neighbors: #then we can send the message
self.robots[msg[0]].messages_received.append([i, msg[1]]) #add the sender id
r.messages_to_send = []
# update the controllers
if self.time > 1.0:
for r in self.robots:
r.compute_controller()
# do one simulation step
p.stepSimulation()
self.time += self.dt
| [((194, 210), 'pybullet.connect', 'p.connect', (['p.GUI'], {}), '(p.GUI)\n', (203, 210), True, 'import pybullet as p\n'), ((219, 244), 'pybullet.setGravity', 'p.setGravity', (['(0)', '(0)', '(-9.81)'], {}), '(0, 0, -9.81)\n', (231, 244), True, 'import pybullet as p\n'), ((386, 437), 'pybullet.setPhysicsEngineParameter', 'p.setPhysicsEngineParameter', (['self.dt'], {'numSubSteps': '(1)'}), '(self.dt, numSubSteps=1)\n', (413, 437), True, 'import pybullet as p\n'), ((490, 524), 'pybullet.loadURDF', 'p.loadURDF', (['"""../models/plane.urdf"""'], {}), "('../models/plane.urdf')\n", (500, 524), True, 'import pybullet as p\n'), ((533, 607), 'pybullet.changeDynamics', 'p.changeDynamics', (['self.planeId', '(-1)'], {'lateralFriction': '(5.0)', 'rollingFriction': '(0)'}), '(self.planeId, -1, lateralFriction=5.0, rollingFriction=0)\n', (549, 607), True, 'import pybullet as p\n'), ((630, 663), 'pybullet.loadURDF', 'p.loadURDF', (['"""../models/goal.urdf"""'], {}), "('../models/goal.urdf')\n", (640, 663), True, 'import pybullet as p\n'), ((686, 720), 'pybullet.loadURDF', 'p.loadURDF', (['"""../models/goal2.urdf"""'], {}), "('../models/goal2.urdf')\n", (696, 720), True, 'import pybullet as p\n'), ((771, 805), 'pybullet.loadURDF', 'p.loadURDF', (['"""../models/ball1.urdf"""'], {}), "('../models/ball1.urdf')\n", (781, 805), True, 'import pybullet as p\n'), ((814, 903), 'pybullet.resetBasePositionAndOrientation', 'p.resetBasePositionAndOrientation', (['self.ball1', '[2.0, 4.0, 0.5]', '(0.0, 0.0, 0.5, 0.5)'], {}), '(self.ball1, [2.0, 4.0, 0.5], (0.0, 0.0, \n 0.5, 0.5))\n', (847, 903), True, 'import pybullet as p\n'), ((916, 950), 'pybullet.loadURDF', 'p.loadURDF', (['"""../models/ball2.urdf"""'], {}), "('../models/ball2.urdf')\n", (926, 950), True, 'import pybullet as p\n'), ((959, 1048), 'pybullet.resetBasePositionAndOrientation', 'p.resetBasePositionAndOrientation', (['self.ball2', '[4.0, 2.0, 0.5]', '(0.0, 0.0, 0.5, 0.5)'], {}), '(self.ball2, [4.0, 2.0, 0.5], (0.0, 0.0, \n 0.5, 0.5))\n', (992, 1048), True, 'import pybullet as p\n'), ((1049, 1112), 'pybullet.resetDebugVisualizerCamera', 'p.resetDebugVisualizerCamera', (['(7.0)', '(90.0)', '(-43.0)', '(1.0, 1.0, 0.0)'], {}), '(7.0, 90.0, -43.0, (1.0, 1.0, 0.0))\n', (1077, 1112), True, 'import pybullet as p\n'), ((1202, 1281), 'pybullet.resetBasePositionAndOrientation', 'p.resetBasePositionAndOrientation', (['wallId', '[0.0, -1.0, 0]', '(0.0, 0.0, 0.5, 0.5)'], {}), '(wallId, [0.0, -1.0, 0], (0.0, 0.0, 0.5, 0.5))\n', (1235, 1281), True, 'import pybullet as p\n'), ((1339, 1417), 'pybullet.resetBasePositionAndOrientation', 'p.resetBasePositionAndOrientation', (['wallId', '[0.0, 1.0, 0]', '(0.0, 0.0, 0.5, 0.5)'], {}), '(wallId, [0.0, 1.0, 0], (0.0, 0.0, 0.5, 0.5))\n', (1372, 1417), True, 'import pybullet as p\n'), ((1475, 1554), 'pybullet.resetBasePositionAndOrientation', 'p.resetBasePositionAndOrientation', (['wallId', '[3.0, -1.0, 0]', '(0.0, 0.0, 0.5, 0.5)'], {}), '(wallId, [3.0, -1.0, 0], (0.0, 0.0, 0.5, 0.5))\n', (1508, 1554), True, 'import pybullet as p\n'), ((1612, 1690), 'pybullet.resetBasePositionAndOrientation', 'p.resetBasePositionAndOrientation', (['wallId', '[3.0, 1.0, 0]', '(0.0, 0.0, 0.5, 0.5)'], {}), '(wallId, [3.0, 1.0, 0], (0.0, 0.0, 0.5, 0.5))\n', (1645, 1690), True, 'import pybullet as p\n'), ((1748, 1826), 'pybullet.resetBasePositionAndOrientation', 'p.resetBasePositionAndOrientation', (['wallId', '[1.0, 2.0, 0]', '(0.0, 0.0, 0.0, 1.0)'], {}), '(wallId, [1.0, 2.0, 0], (0.0, 0.0, 0.0, 1.0))\n', (1781, 1826), True, 'import pybullet as p\n'), ((1882, 1961), 'pybullet.resetBasePositionAndOrientation', 'p.resetBasePositionAndOrientation', (['wallId', '[2.0, -2.0, 0]', '(0.0, 0.0, 0.0, 1.0)'], {}), '(wallId, [2.0, -2.0, 0], (0.0, 0.0, 0.0, 1.0))\n', (1915, 1961), True, 'import pybullet as p\n'), ((4600, 4618), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (4616, 4618), True, 'import pybullet as p\n'), ((5790, 5808), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (5806, 5808), True, 'import pybullet as p\n'), ((1158, 1190), 'pybullet.loadSDF', 'p.loadSDF', (['"""../models/walls.sdf"""'], {}), "('../models/walls.sdf')\n", (1167, 1190), True, 'import pybullet as p\n'), ((1295, 1327), 'pybullet.loadSDF', 'p.loadSDF', (['"""../models/walls.sdf"""'], {}), "('../models/walls.sdf')\n", (1304, 1327), True, 'import pybullet as p\n'), ((1431, 1463), 'pybullet.loadSDF', 'p.loadSDF', (['"""../models/walls.sdf"""'], {}), "('../models/walls.sdf')\n", (1440, 1463), True, 'import pybullet as p\n'), ((1568, 1600), 'pybullet.loadSDF', 'p.loadSDF', (['"""../models/walls.sdf"""'], {}), "('../models/walls.sdf')\n", (1577, 1600), True, 'import pybullet as p\n'), ((1704, 1736), 'pybullet.loadSDF', 'p.loadSDF', (['"""../models/walls.sdf"""'], {}), "('../models/walls.sdf')\n", (1713, 1736), True, 'import pybullet as p\n'), ((1838, 1870), 'pybullet.loadSDF', 'p.loadSDF', (['"""../models/walls.sdf"""'], {}), "('../models/walls.sdf')\n", (1847, 1870), True, 'import pybullet as p\n'), ((4327, 4345), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (4343, 4345), True, 'import pybullet as p\n'), ((4257, 4319), 'robot.Robot', 'Robot', (['[1.0 * i + 0.5, 1.0 * j - 0.5, 0.3]', '(2 * i + j)', 'self.dt'], {}), '([1.0 * i + 0.5, 1.0 * j - 0.5, 0.3], 2 * i + j, self.dt)\n', (4262, 4319), False, 'from robot import Robot\n'), ((5138, 5165), 'numpy.linalg.norm', 'np.linalg.norm', (['(pos1 - pos2)'], {}), '(pos1 - pos2)\n', (5152, 5165), True, 'import numpy as np\n')] |
wt/boto | boto/ec2/elb/__init__.py | 83d5b256c8333307233e1ec7c1e21696e8d32437 | # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
"""
This module provides an interface to the Elastic Compute Cloud (EC2)
load balancing service from AWS.
"""
from boto.connection import AWSQueryConnection
from boto.ec2.instanceinfo import InstanceInfo
from boto.ec2.elb.loadbalancer import LoadBalancer, LoadBalancerZones
from boto.ec2.elb.instancestate import InstanceState
from boto.ec2.elb.healthcheck import HealthCheck
from boto.ec2.elb.listelement import ListElement
from boto.regioninfo import RegionInfo, get_regions, load_regions
import boto
RegionData = load_regions().get('elasticloadbalancing', {})
def regions():
"""
Get all available regions for the ELB service.
:rtype: list
:return: A list of :class:`boto.RegionInfo` instances
"""
return get_regions('elasticloadbalancing', connection_cls=ELBConnection)
def connect_to_region(region_name, **kw_params):
"""
Given a valid region name, return a
:class:`boto.ec2.elb.ELBConnection`.
:param str region_name: The name of the region to connect to.
:rtype: :class:`boto.ec2.ELBConnection` or ``None``
:return: A connection to the given region, or None if an invalid region
name is given
"""
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
class ELBConnection(AWSQueryConnection):
APIVersion = boto.config.get('Boto', 'elb_version', '2012-06-01')
DefaultRegionName = boto.config.get('Boto', 'elb_region_name', 'us-east-1')
DefaultRegionEndpoint = boto.config.get('Boto', 'elb_region_endpoint',
'elasticloadbalancing.us-east-1.amazonaws.com')
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None, debug=0,
https_connection_factory=None, region=None, path='/',
security_token=None, validate_certs=True, profile_name=None):
"""
Init method to create a new connection to EC2 Load Balancing Service.
.. note:: The region argument is overridden by the region specified in
the boto configuration file.
"""
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
self.region = region
super(ELBConnection, self).__init__(aws_access_key_id,
aws_secret_access_key,
is_secure, port, proxy, proxy_port,
proxy_user, proxy_pass,
self.region.endpoint, debug,
https_connection_factory, path,
security_token,
validate_certs=validate_certs,
profile_name=profile_name)
def _required_auth_capability(self):
return ['ec2']
def build_list_params(self, params, items, label):
if isinstance(items, basestring):
items = [items]
for index, item in enumerate(items):
params[label % (index + 1)] = item
def get_all_load_balancers(self, load_balancer_names=None):
"""
Retrieve all load balancers associated with your account.
:type load_balancer_names: list
:keyword load_balancer_names: An optional list of load balancer names.
:rtype: :py:class:`boto.resultset.ResultSet`
:return: A ResultSet containing instances of
:class:`boto.ec2.elb.loadbalancer.LoadBalancer`
"""
params = {}
if load_balancer_names:
self.build_list_params(params, load_balancer_names,
'LoadBalancerNames.member.%d')
return self.get_list('DescribeLoadBalancers', params,
[('member', LoadBalancer)])
def create_load_balancer(self, name, zones, listeners=None, subnets=None,
security_groups=None, scheme='internet-facing', complex_listeners=None):
"""
Create a new load balancer for your account. By default the load
balancer will be created in EC2. To create a load balancer inside a
VPC, parameter zones must be set to None and subnets must not be None.
The load balancer will be automatically created under the VPC that
contains the subnet(s) specified.
:type name: string
:param name: The mnemonic name associated with the new load balancer
:type zones: List of strings
:param zones: The names of the availability zone(s) to add.
:type listeners: List of tuples
:param listeners: Each tuple contains three or four values,
(LoadBalancerPortNumber, InstancePortNumber, Protocol,
[SSLCertificateId]) where LoadBalancerPortNumber and
InstancePortNumber are integer values between 1 and 65535,
Protocol is a string containing either 'TCP', 'SSL', HTTP', or
'HTTPS'; SSLCertificateID is the ARN of a AWS IAM
certificate, and must be specified when doing HTTPS.
:type subnets: list of strings
:param subnets: A list of subnet IDs in your VPC to attach to
your LoadBalancer.
:type security_groups: list of strings
:param security_groups: The security groups assigned to your
LoadBalancer within your VPC.
:type scheme: string
:param scheme: The type of a LoadBalancer. By default, Elastic
Load Balancing creates an internet-facing LoadBalancer with
a publicly resolvable DNS name, which resolves to public IP
addresses.
Specify the value internal for this option to create an
internal LoadBalancer with a DNS name that resolves to
private IP addresses.
This option is only available for LoadBalancers attached
to an Amazon VPC.
:type complex_listeners: List of tuples
:param complex_listeners: Each tuple contains four or five values,
(LoadBalancerPortNumber, InstancePortNumber, Protocol, InstanceProtocol,
SSLCertificateId).
Where:
- LoadBalancerPortNumber and InstancePortNumber are integer
values between 1 and 65535
- Protocol and InstanceProtocol is a string containing either 'TCP',
'SSL', 'HTTP', or 'HTTPS'
- SSLCertificateId is the ARN of an SSL certificate loaded into
AWS IAM
:rtype: :class:`boto.ec2.elb.loadbalancer.LoadBalancer`
:return: The newly created
:class:`boto.ec2.elb.loadbalancer.LoadBalancer`
"""
if not listeners and not complex_listeners:
# Must specify one of the two options
return None
params = {'LoadBalancerName': name,
'Scheme': scheme}
# Handle legacy listeners
if listeners:
for index, listener in enumerate(listeners):
i = index + 1
protocol = listener[2].upper()
params['Listeners.member.%d.LoadBalancerPort' % i] = listener[0]
params['Listeners.member.%d.InstancePort' % i] = listener[1]
params['Listeners.member.%d.Protocol' % i] = listener[2]
if protocol == 'HTTPS' or protocol == 'SSL':
params['Listeners.member.%d.SSLCertificateId' % i] = listener[3]
# Handle the full listeners
if complex_listeners:
for index, listener in enumerate(complex_listeners):
i = index + 1
protocol = listener[2].upper()
InstanceProtocol = listener[3].upper()
params['Listeners.member.%d.LoadBalancerPort' % i] = listener[0]
params['Listeners.member.%d.InstancePort' % i] = listener[1]
params['Listeners.member.%d.Protocol' % i] = listener[2]
params['Listeners.member.%d.InstanceProtocol' % i] = listener[3]
if protocol == 'HTTPS' or protocol == 'SSL':
params['Listeners.member.%d.SSLCertificateId' % i] = listener[4]
if zones:
self.build_list_params(params, zones, 'AvailabilityZones.member.%d')
if subnets:
self.build_list_params(params, subnets, 'Subnets.member.%d')
if security_groups:
self.build_list_params(params, security_groups,
'SecurityGroups.member.%d')
load_balancer = self.get_object('CreateLoadBalancer',
params, LoadBalancer)
load_balancer.name = name
load_balancer.listeners = listeners
load_balancer.availability_zones = zones
load_balancer.subnets = subnets
load_balancer.security_groups = security_groups
return load_balancer
def create_load_balancer_listeners(self, name, listeners=None, complex_listeners=None):
"""
Creates a Listener (or group of listeners) for an existing
Load Balancer
:type name: string
:param name: The name of the load balancer to create the listeners for
:type listeners: List of tuples
:param listeners: Each tuple contains three or four values,
(LoadBalancerPortNumber, InstancePortNumber, Protocol,
[SSLCertificateId]) where LoadBalancerPortNumber and
InstancePortNumber are integer values between 1 and 65535,
Protocol is a string containing either 'TCP', 'SSL', HTTP', or
'HTTPS'; SSLCertificateID is the ARN of a AWS IAM
certificate, and must be specified when doing HTTPS.
:type complex_listeners: List of tuples
:param complex_listeners: Each tuple contains four or five values,
(LoadBalancerPortNumber, InstancePortNumber, Protocol, InstanceProtocol,
SSLCertificateId).
Where:
- LoadBalancerPortNumber and InstancePortNumber are integer
values between 1 and 65535
- Protocol and InstanceProtocol is a string containing either 'TCP',
'SSL', 'HTTP', or 'HTTPS'
- SSLCertificateId is the ARN of an SSL certificate loaded into
AWS IAM
:return: The status of the request
"""
if not listeners and not complex_listeners:
# Must specify one of the two options
return None
params = {'LoadBalancerName': name}
# Handle the simple listeners
if listeners:
for index, listener in enumerate(listeners):
i = index + 1
protocol = listener[2].upper()
params['Listeners.member.%d.LoadBalancerPort' % i] = listener[0]
params['Listeners.member.%d.InstancePort' % i] = listener[1]
params['Listeners.member.%d.Protocol' % i] = listener[2]
if protocol == 'HTTPS' or protocol == 'SSL':
params['Listeners.member.%d.SSLCertificateId' % i] = listener[3]
# Handle the full listeners
if complex_listeners:
for index, listener in enumerate(complex_listeners):
i = index + 1
protocol = listener[2].upper()
InstanceProtocol = listener[3].upper()
params['Listeners.member.%d.LoadBalancerPort' % i] = listener[0]
params['Listeners.member.%d.InstancePort' % i] = listener[1]
params['Listeners.member.%d.Protocol' % i] = listener[2]
params['Listeners.member.%d.InstanceProtocol' % i] = listener[3]
if protocol == 'HTTPS' or protocol == 'SSL':
params['Listeners.member.%d.SSLCertificateId' % i] = listener[4]
return self.get_status('CreateLoadBalancerListeners', params)
def delete_load_balancer(self, name):
"""
Delete a Load Balancer from your account.
:type name: string
:param name: The name of the Load Balancer to delete
"""
params = {'LoadBalancerName': name}
return self.get_status('DeleteLoadBalancer', params)
def delete_load_balancer_listeners(self, name, ports):
"""
Deletes a load balancer listener (or group of listeners)
:type name: string
:param name: The name of the load balancer to create the listeners for
:type ports: List int
:param ports: Each int represents the port on the ELB to be removed
:return: The status of the request
"""
params = {'LoadBalancerName': name}
for index, port in enumerate(ports):
params['LoadBalancerPorts.member.%d' % (index + 1)] = port
return self.get_status('DeleteLoadBalancerListeners', params)
def enable_availability_zones(self, load_balancer_name, zones_to_add):
"""
Add availability zones to an existing Load Balancer
All zones must be in the same region as the Load Balancer
Adding zones that are already registered with the Load Balancer
has no effect.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type zones: List of strings
:param zones: The name of the zone(s) to add.
:rtype: List of strings
:return: An updated list of zones for this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
self.build_list_params(params, zones_to_add,
'AvailabilityZones.member.%d')
obj = self.get_object('EnableAvailabilityZonesForLoadBalancer',
params, LoadBalancerZones)
return obj.zones
def disable_availability_zones(self, load_balancer_name, zones_to_remove):
"""
Remove availability zones from an existing Load Balancer.
All zones must be in the same region as the Load Balancer.
Removing zones that are not registered with the Load Balancer
has no effect.
You cannot remove all zones from an Load Balancer.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type zones: List of strings
:param zones: The name of the zone(s) to remove.
:rtype: List of strings
:return: An updated list of zones for this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
self.build_list_params(params, zones_to_remove,
'AvailabilityZones.member.%d')
obj = self.get_object('DisableAvailabilityZonesForLoadBalancer',
params, LoadBalancerZones)
return obj.zones
def modify_lb_attribute(self, load_balancer_name, attribute, value):
"""Changes an attribute of a Load Balancer
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type attribute: string
:param attribute: The attribute you wish to change.
* crossZoneLoadBalancing - Boolean (true)
* accessLog - :py:class:`AccessLogAttribute` instance
* connectionDraining - :py:class:`ConnectionDrainingAttribute` instance
:type value: string
:param value: The new value for the attribute
:rtype: bool
:return: Whether the operation succeeded or not
"""
bool_reqs = ('crosszoneloadbalancing',)
if attribute.lower() in bool_reqs:
if isinstance(value, bool):
if value:
value = 'true'
else:
value = 'false'
params = {'LoadBalancerName': load_balancer_name}
if attribute.lower() == 'crosszoneloadbalancing':
params['LoadBalancerAttributes.CrossZoneLoadBalancing.Enabled'
] = value
elif attribute.lower() == 'accesslog':
params['LoadBalancerAttributes.AccessLog.Enabled'] = \
value.enabled and 'true' or 'false'
params['LoadBalancerAttributes.AccessLog.S3BucketName'] = \
value.s3_bucket_name
params['LoadBalancerAttributes.AccessLog.S3BucketPrefix'] = \
value.s3_bucket_prefix
params['LoadBalancerAttributes.AccessLog.EmitInterval'] = \
value.emit_interval
elif attribute.lower() == 'connectiondraining':
params['LoadBalancerAttributes.ConnectionDraining.Enabled'] = \
value.enabled and 'true' or 'false'
params['LoadBalancerAttributes.ConnectionDraining.Timeout'] = \
value.timeout
else:
raise ValueError('InvalidAttribute', attribute)
return self.get_status('ModifyLoadBalancerAttributes', params,
verb='GET')
def get_all_lb_attributes(self, load_balancer_name):
"""Gets all Attributes of a Load Balancer
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:rtype: boto.ec2.elb.attribute.LbAttributes
:return: The attribute object of the ELB.
"""
from boto.ec2.elb.attributes import LbAttributes
params = {'LoadBalancerName': load_balancer_name}
return self.get_object('DescribeLoadBalancerAttributes',
params, LbAttributes)
def get_lb_attribute(self, load_balancer_name, attribute):
"""Gets an attribute of a Load Balancer
This will make an EC2 call for each method call.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type attribute: string
:param attribute: The attribute you wish to see.
* accessLog - :py:class:`AccessLogAttribute` instance
* crossZoneLoadBalancing - Boolean
* connectionDraining - :py:class:`ConnectionDrainingAttribute` instance
:rtype: Attribute dependent
:return: The new value for the attribute
"""
attributes = self.get_all_lb_attributes(load_balancer_name)
if attribute.lower() == 'accesslog':
return attributes.access_log
if attribute.lower() == 'crosszoneloadbalancing':
return attributes.cross_zone_load_balancing.enabled
if attribute.lower() == 'connectiondraining':
return attributes.connection_draining
return None
def register_instances(self, load_balancer_name, instances):
"""
Add new Instances to an existing Load Balancer.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type instances: List of strings
:param instances: The instance ID's of the EC2 instances to add.
:rtype: List of strings
:return: An updated list of instances for this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
self.build_list_params(params, instances,
'Instances.member.%d.InstanceId')
return self.get_list('RegisterInstancesWithLoadBalancer',
params, [('member', InstanceInfo)])
def deregister_instances(self, load_balancer_name, instances):
"""
Remove Instances from an existing Load Balancer.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type instances: List of strings
:param instances: The instance ID's of the EC2 instances to remove.
:rtype: List of strings
:return: An updated list of instances for this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
self.build_list_params(params, instances,
'Instances.member.%d.InstanceId')
return self.get_list('DeregisterInstancesFromLoadBalancer',
params, [('member', InstanceInfo)])
def describe_instance_health(self, load_balancer_name, instances=None):
"""
Get current state of all Instances registered to an Load Balancer.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type instances: List of strings
:param instances: The instance ID's of the EC2 instances
to return status for. If not provided,
the state of all instances will be returned.
:rtype: List of :class:`boto.ec2.elb.instancestate.InstanceState`
:return: list of state info for instances in this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
if instances:
self.build_list_params(params, instances,
'Instances.member.%d.InstanceId')
return self.get_list('DescribeInstanceHealth', params,
[('member', InstanceState)])
def configure_health_check(self, name, health_check):
"""
Define a health check for the EndPoints.
:type name: string
:param name: The mnemonic name associated with the load balancer
:type health_check: :class:`boto.ec2.elb.healthcheck.HealthCheck`
:param health_check: A HealthCheck object populated with the desired
values.
:rtype: :class:`boto.ec2.elb.healthcheck.HealthCheck`
:return: The updated :class:`boto.ec2.elb.healthcheck.HealthCheck`
"""
params = {'LoadBalancerName': name,
'HealthCheck.Timeout': health_check.timeout,
'HealthCheck.Target': health_check.target,
'HealthCheck.Interval': health_check.interval,
'HealthCheck.UnhealthyThreshold': health_check.unhealthy_threshold,
'HealthCheck.HealthyThreshold': health_check.healthy_threshold}
return self.get_object('ConfigureHealthCheck', params, HealthCheck)
def set_lb_listener_SSL_certificate(self, lb_name, lb_port,
ssl_certificate_id):
"""
Sets the certificate that terminates the specified listener's SSL
connections. The specified certificate replaces any prior certificate
that was used on the same LoadBalancer and port.
"""
params = {'LoadBalancerName': lb_name,
'LoadBalancerPort': lb_port,
'SSLCertificateId': ssl_certificate_id}
return self.get_status('SetLoadBalancerListenerSSLCertificate', params)
def create_app_cookie_stickiness_policy(self, name, lb_name, policy_name):
"""
Generates a stickiness policy with sticky session lifetimes that follow
that of an application-generated cookie. This policy can only be
associated with HTTP listeners.
This policy is similar to the policy created by
CreateLBCookieStickinessPolicy, except that the lifetime of the special
Elastic Load Balancing cookie follows the lifetime of the
application-generated cookie specified in the policy configuration. The
load balancer only inserts a new stickiness cookie when the application
response includes a new application cookie.
If the application cookie is explicitly removed or expires, the session
stops being sticky until a new application cookie is issued.
"""
params = {'CookieName': name,
'LoadBalancerName': lb_name,
'PolicyName': policy_name}
return self.get_status('CreateAppCookieStickinessPolicy', params)
def create_lb_cookie_stickiness_policy(self, cookie_expiration_period,
lb_name, policy_name):
"""
Generates a stickiness policy with sticky session lifetimes controlled
by the lifetime of the browser (user-agent) or a specified expiration
period. This policy can only be associated only with HTTP listeners.
When a load balancer implements this policy, the load balancer uses a
special cookie to track the backend server instance for each request.
When the load balancer receives a request, it first checks to see if
this cookie is present in the request. If so, the load balancer sends
the request to the application server specified in the cookie. If not,
the load balancer sends the request to a server that is chosen based on
the existing load balancing algorithm.
A cookie is inserted into the response for binding subsequent requests
from the same user to that server. The validity of the cookie is based
on the cookie expiration time, which is specified in the policy
configuration.
None may be passed for cookie_expiration_period.
"""
params = {'LoadBalancerName': lb_name,
'PolicyName': policy_name}
if cookie_expiration_period is not None:
params['CookieExpirationPeriod'] = cookie_expiration_period
return self.get_status('CreateLBCookieStickinessPolicy', params)
def create_lb_policy(self, lb_name, policy_name, policy_type, policy_attributes):
"""
Creates a new policy that contais the necessary attributes depending on
the policy type. Policies are settings that are saved for your load
balancer and that can be applied to the front-end listener, or
the back-end application server.
"""
params = {'LoadBalancerName': lb_name,
'PolicyName': policy_name,
'PolicyTypeName': policy_type}
for index, (name, value) in enumerate(policy_attributes.iteritems(), 1):
params['PolicyAttributes.member.%d.AttributeName' % index] = name
params['PolicyAttributes.member.%d.AttributeValue' % index] = value
else:
params['PolicyAttributes'] = ''
return self.get_status('CreateLoadBalancerPolicy', params)
def delete_lb_policy(self, lb_name, policy_name):
"""
Deletes a policy from the LoadBalancer. The specified policy must not
be enabled for any listeners.
"""
params = {'LoadBalancerName': lb_name,
'PolicyName': policy_name}
return self.get_status('DeleteLoadBalancerPolicy', params)
def set_lb_policies_of_listener(self, lb_name, lb_port, policies):
"""
Associates, updates, or disables a policy with a listener on the load
balancer. Currently only zero (0) or one (1) policy can be associated
with a listener.
"""
params = {'LoadBalancerName': lb_name,
'LoadBalancerPort': lb_port}
if len(policies):
self.build_list_params(params, policies, 'PolicyNames.member.%d')
else:
params['PolicyNames'] = ''
return self.get_status('SetLoadBalancerPoliciesOfListener', params)
def set_lb_policies_of_backend_server(self, lb_name, instance_port, policies):
"""
Replaces the current set of policies associated with a port on which
the back-end server is listening with a new set of policies.
"""
params = {'LoadBalancerName': lb_name,
'InstancePort': instance_port}
if policies:
self.build_list_params(params, policies, 'PolicyNames.member.%d')
else:
params['PolicyNames'] = ''
return self.get_status('SetLoadBalancerPoliciesForBackendServer', params)
def apply_security_groups_to_lb(self, name, security_groups):
"""
Applies security groups to the load balancer.
Applying security groups that are already registered with the
Load Balancer has no effect.
:type name: string
:param name: The name of the Load Balancer
:type security_groups: List of strings
:param security_groups: The name of the security group(s) to add.
:rtype: List of strings
:return: An updated list of security groups for this Load Balancer.
"""
params = {'LoadBalancerName': name}
self.build_list_params(params, security_groups,
'SecurityGroups.member.%d')
return self.get_list('ApplySecurityGroupsToLoadBalancer',
params, None)
def attach_lb_to_subnets(self, name, subnets):
"""
Attaches load balancer to one or more subnets.
Attaching subnets that are already registered with the
Load Balancer has no effect.
:type name: string
:param name: The name of the Load Balancer
:type subnets: List of strings
:param subnets: The name of the subnet(s) to add.
:rtype: List of strings
:return: An updated list of subnets for this Load Balancer.
"""
params = {'LoadBalancerName': name}
self.build_list_params(params, subnets,
'Subnets.member.%d')
return self.get_list('AttachLoadBalancerToSubnets',
params, None)
def detach_lb_from_subnets(self, name, subnets):
"""
Detaches load balancer from one or more subnets.
:type name: string
:param name: The name of the Load Balancer
:type subnets: List of strings
:param subnets: The name of the subnet(s) to detach.
:rtype: List of strings
:return: An updated list of subnets for this Load Balancer.
"""
params = {'LoadBalancerName': name}
self.build_list_params(params, subnets,
'Subnets.member.%d')
return self.get_list('DetachLoadBalancerFromSubnets',
params, None)
| [((1919, 1984), 'boto.regioninfo.get_regions', 'get_regions', (['"""elasticloadbalancing"""'], {'connection_cls': 'ELBConnection'}), "('elasticloadbalancing', connection_cls=ELBConnection)\n", (1930, 1984), False, 'from boto.regioninfo import RegionInfo, get_regions, load_regions\n'), ((2547, 2599), 'boto.config.get', 'boto.config.get', (['"""Boto"""', '"""elb_version"""', '"""2012-06-01"""'], {}), "('Boto', 'elb_version', '2012-06-01')\n", (2562, 2599), False, 'import boto\n'), ((2624, 2679), 'boto.config.get', 'boto.config.get', (['"""Boto"""', '"""elb_region_name"""', '"""us-east-1"""'], {}), "('Boto', 'elb_region_name', 'us-east-1')\n", (2639, 2679), False, 'import boto\n'), ((2708, 2806), 'boto.config.get', 'boto.config.get', (['"""Boto"""', '"""elb_region_endpoint"""', '"""elasticloadbalancing.us-east-1.amazonaws.com"""'], {}), "('Boto', 'elb_region_endpoint',\n 'elasticloadbalancing.us-east-1.amazonaws.com')\n", (2723, 2806), False, 'import boto\n'), ((1701, 1715), 'boto.regioninfo.load_regions', 'load_regions', ([], {}), '()\n', (1713, 1715), False, 'from boto.regioninfo import RegionInfo, get_regions, load_regions\n'), ((3473, 3541), 'boto.regioninfo.RegionInfo', 'RegionInfo', (['self', 'self.DefaultRegionName', 'self.DefaultRegionEndpoint'], {}), '(self, self.DefaultRegionName, self.DefaultRegionEndpoint)\n', (3483, 3541), False, 'from boto.regioninfo import RegionInfo, get_regions, load_regions\n')] |
atomse/basis_set_exchange | basis_set_exchange/cli/bse_cli.py | 7ffd64082c14d2f61eb43f1c2d44792e8b0e394e | '''
Command line interface for the basis set exchange
'''
import argparse
import argcomplete
from .. import version
from .bse_handlers import bse_cli_handle_subcmd
from .check import cli_check_normalize_args
from .complete import (cli_case_insensitive_validator,
cli_family_completer, cli_role_completer, cli_bsname_completer,
cli_write_fmt_completer, cli_read_fmt_completer, cli_reffmt_completer)
def run_bse_cli():
################################################################################################
# NOTE: I am deliberately not using the 'choices' argument in add_argument. I could use it
# for formats, etc, however I wouldn't want to use it for basis set names. Therefore, I handle
# all of that manually so that error output is consistent and clean
################################################################################################
########################################
# Main global options
########################################
parser = argparse.ArgumentParser(description='Description of your program')
parser.add_argument('-V', action='version', version='basis_set_exchange ' + version())
parser.add_argument('-d', '--data-dir', metavar='PATH', help='Override which data directory to use')
parser.add_argument('-o', '--output', metavar='PATH', help='Output to given file rather than stdout')
subparsers = parser.add_subparsers(metavar='subcommand', dest='subcmd')
subparsers.required = True # https://bugs.python.org/issue9253#msg186387
########################################
# Listing of data-independent info
########################################
# list-formats subcommand
subp = subparsers.add_parser('list-formats', help='Output a list of basis set formats that can be used with obtaining a basis set')
subp.add_argument('-n', '--no-description', action='store_true', help='Print only the format names')
# list-writer-formats subcommand
subp = subparsers.add_parser('list-writer-formats', help='Output a list available basis set formats that can be written')
subp.add_argument('-n', '--no-description', action='store_true', help='Print only the format names')
# list-reader-formats
subp = subparsers.add_parser('list-reader-formats', help='Output a list of basis set formats that can be read')
subp.add_argument('-n', '--no-description', action='store_true', help='Print only the format names')
# list-ref-formats subcommand
subp = subparsers.add_parser('list-ref-formats', help='Output a list all available reference formats and descriptions')
subp.add_argument('-n', '--no-description', action='store_true', help='Print only the reference format names')
# list-roles subcommand
subp = subparsers.add_parser('list-roles', help='Output a list all available roles and descriptions')
subp.add_argument('-n', '--no-description', action='store_true', help='Print only the role names')
########################################
# Listing of general info and metadata
########################################
# get-data-dir
subparsers.add_parser('get-data-dir', help='Output the default data directory of this package')
# list-basis-sets subcommand
subp = subparsers.add_parser('list-basis-sets', help='Output a list all available basis sets and descriptions')
subp.add_argument('-n', '--no-description', action='store_true', help='Print only the basis set names')
subp.add_argument('-f', '--family', help='Limit the basis set list to only the specified family').completer = cli_family_completer
subp.add_argument('-r', '--role', help='Limit the basis set list to only the specified role').completer = cli_role_completer
subp.add_argument('-s', '--substr', help='Limit the basis set list to only basis sets whose name contains the specified substring')
subp.add_argument('-e', '--elements', help='Limit the basis set list to only basis sets that contain all the given elements')
# list-families subcommand
subparsers.add_parser('list-families', help='Output a list all available basis set families')
# lookup-by-role
subp = subparsers.add_parser('lookup-by-role', help='Lookup a companion/auxiliary basis by primary basis and role')
subp.add_argument('basis', help='Name of the primary basis we want the auxiliary basis for').completer = cli_bsname_completer
subp.add_argument('role', help='Role of the auxiliary basis to look for').completer = cli_role_completer
#################################
# Output of info
#################################
# get-basis subcommand
subp = subparsers.add_parser('get-basis', help='Output a formatted basis set')
subp.add_argument('basis', help='Name of the basis set to output').completer = cli_bsname_completer
subp.add_argument('fmt', help='Which format to output the basis set as').completer = cli_write_fmt_completer
subp.add_argument('--elements', help='Which elements of the basis set to output. Default is all defined in the given basis')
subp.add_argument('--version', help='Which version of the basis set to output. Default is the latest version')
subp.add_argument('--noheader', action='store_true', help='Do not output the header at the top')
subp.add_argument('--unc-gen', action='store_true', help='Remove general contractions')
subp.add_argument('--unc-spdf', action='store_true', help='Remove combined sp, spd, ... contractions')
subp.add_argument('--unc-seg', action='store_true', help='Remove general contractions')
subp.add_argument('--opt-gen', action='store_true', help='Optimize general contractions')
subp.add_argument('--make-gen', action='store_true', help='Make the basis set as generally-contracted as possible')
# get-refs subcommand
subp = subparsers.add_parser('get-refs', help='Output references for a basis set')
subp.add_argument('basis', help='Name of the basis set to output the references for').completer = cli_bsname_completer
subp.add_argument('reffmt', help='Which format to output the references as').completer = cli_reffmt_completer
subp.add_argument('--elements', help='Which elements to output the references for. Default is all defined in the given basis.')
subp.add_argument('--version', help='Which version of the basis set to get the references for')
# get-info subcommand
subp = subparsers.add_parser('get-info', help='Output general info and metadata for a basis set')
subp.add_argument('basis', help='Name of the basis set to output the info for').completer = cli_bsname_completer
# get-notes subcommand
subp = subparsers.add_parser('get-notes', help='Output the notes for a basis set')
subp.add_argument('basis', help='Name of the basis set to output the notes for').completer = cli_bsname_completer
# get-family subcommand
subp = subparsers.add_parser('get-family', help='Output the family of a basis set')
subp.add_argument('basis', help='Name of the basis set to output the family for').completer = cli_bsname_completer
# get-versions subcommand
subp = subparsers.add_parser('get-versions', help='Output a list all available versions of a basis set')
subp.add_argument('basis', help='Name of the basis set to list the versions of').completer = cli_bsname_completer
subp.add_argument('-n', '--no-description', action='store_true', help='Print only the version numbers')
# get-family-notes subcommand
subp = subparsers.add_parser('get-family-notes', help='Get the notes of a family of basis sets')
subp.add_argument('family', type=str.lower, help='The basis set family to the get the notes of').completer = cli_family_completer
#################################
# Converting basis sets
#################################
subp = subparsers.add_parser('convert-basis', help='Convert basis set files from one format to another')
subp.add_argument('input_file', type=str, help='Basis set file to convert')
subp.add_argument('output_file', type=str, help='Converted basis set file')
subp.add_argument('--in-fmt', type=str, default=None, help='Input format (default: autodetected from input filename').completer = cli_read_fmt_completer
subp.add_argument('--out-fmt', type=str, default=None, help='Output format (default: autodetected from output filename').completer = cli_write_fmt_completer
#################################
# Creating bundles
#################################
subp = subparsers.add_parser('create-bundle', help='Create a bundle of basis sets')
subp.add_argument('fmt', help='Which format to output the basis set as').completer = cli_write_fmt_completer
subp.add_argument('reffmt', help='Which format to output the references as').completer = cli_reffmt_completer
subp.add_argument('bundle_file', help='Bundle/Archive file to create')
subp.add_argument('--archive-type', help='Override the type of archive to create (zip or tbz)')
#############################
# DONE WITH SUBCOMMANDS
#############################
# setup autocomplete
argcomplete.autocomplete(parser, validator=cli_case_insensitive_validator)
# Now parse and handle the args
args = parser.parse_args()
# Check and make sure basis sets, roles, etc, are valid
args = cli_check_normalize_args(args)
# Actually generate the output
output = bse_cli_handle_subcmd(args)
if args.output:
with open(args.output, 'w', encoding='utf-8') as outfile:
outfile.write(output)
else:
print(output)
return 0
| [((1064, 1130), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Description of your program"""'}), "(description='Description of your program')\n", (1087, 1130), False, 'import argparse\n'), ((9187, 9261), 'argcomplete.autocomplete', 'argcomplete.autocomplete', (['parser'], {'validator': 'cli_case_insensitive_validator'}), '(parser, validator=cli_case_insensitive_validator)\n', (9211, 9261), False, 'import argcomplete\n')] |
hanjungwoo1/CodingTest | Backjoon/1929.py | 0112488d04dd53cea1c869439341fb602e699f2a | """
입력 예시
3 16
출력 예시
3
5
7
11
13
"""
import math
left, right = map(int, input().split())
array = [True for i in range(right+1)]
array[1] = 0
for i in range(2, int(math.sqrt(right)) + 1):
if array[i] == True:
j = 2
while i * j <= right:
array[i * j] = False
j += 1
for i in range(left, right+1):
if array[i]:
print(i) | [((166, 182), 'math.sqrt', 'math.sqrt', (['right'], {}), '(right)\n', (175, 182), False, 'import math\n')] |
tianyapiaozi/tensorflow | tensorflow/tools/quantization/quantize_graph_test.py | fb3ce0467766a8e91f1da0ad7ada7c24fde7a73a | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests the graph quantization script.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import numpy as np
from tensorflow.core.framework import graph_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import graph_util
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops as ops_lib
from tensorflow.python.platform import flags as flags_lib
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
from tensorflow.tools.quantization import quantize_graph
flags = flags_lib
FLAGS = flags.FLAGS
def run_graph_def(graph_def, input_map, outputs):
graph = ops_lib.Graph()
with graph.as_default():
importer.import_graph_def(graph_def, input_map={}, name="")
with session.Session(graph=graph) as sess:
results = sess.run(outputs, feed_dict=input_map)
return results
def test_mat_mul(m, n, k, a, b):
"""Tests a MatMul replacement."""
a_constant_name = "a_constant"
b_constant_name = "b_constant"
mat_mul_name = "mat_mul"
float_graph_def = graph_pb2.GraphDef()
a_constant = quantize_graph.create_constant_node(
a_constant_name, value=a, dtype=dtypes.float32, shape=[m, k])
float_graph_def.node.extend([a_constant])
b_constant = quantize_graph.create_constant_node(
b_constant_name, value=b, dtype=dtypes.float32, shape=[k, n])
float_graph_def.node.extend([b_constant])
mat_mul_node = quantize_graph.create_node("MatMul", mat_mul_name,
[a_constant_name, b_constant_name])
quantize_graph.set_attr_dtype(mat_mul_node, "T", dtypes.float32)
quantize_graph.set_attr_bool(mat_mul_node, "transpose_a", False)
quantize_graph.set_attr_bool(mat_mul_node, "transpose_b", False)
float_graph_def.node.extend([mat_mul_node])
test_graph(float_graph_def, {}, [mat_mul_name])
def test_conv(depth, image_width, image_height, image_batch_count, filter_size,
filter_count, stride, padding, input_values, filter_values):
"""Tests a Conv replacement."""
input_constant_name = "input_constant"
filter_constant_name = "filter_constant"
conv_name = "conv"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=input_values,
dtype=dtypes.float32,
shape=[image_batch_count, image_height, image_width, depth])
float_graph_def.node.extend([input_constant])
filter_constant = quantize_graph.create_constant_node(
filter_constant_name,
value=filter_values,
dtype=dtypes.float32,
shape=[filter_size, filter_size, depth, filter_count])
float_graph_def.node.extend([filter_constant])
conv_node = quantize_graph.create_node(
"Conv2D", conv_name, [input_constant_name, filter_constant_name])
quantize_graph.set_attr_dtype(conv_node, "T", dtypes.float32)
quantize_graph.set_attr_int_list(conv_node, "strides", [1, stride, stride, 1])
quantize_graph.set_attr_string(conv_node, "padding", padding)
float_graph_def.node.extend([conv_node])
test_graph(float_graph_def, {}, [conv_name])
def are_tensors_near(a, b, tolerance):
"""Tests whether two tensors are nearly identical.
This is a specialized comparison function designed to help debug problems with
quantization. It prints out information about the differences between tensors
on failure, paying special attention to possible biases by looking at the mean
and absolute average errors.
Args:
a: First comparison tensor.
b: Second comparison tensor.
tolerance: Float value indicating how large an error between values is ok.
Returns:
Boolean indicating whether the two inputs were close enough.
"""
flat_a = a.flatten()
flat_b = b.flatten()
if len(flat_a) != len(flat_b):
tf_logging.info("Tensors are different sizes: " + str(len(flat_a)) + " vs "
+ str(len(flat_b)))
return False
value_count = len(flat_a)
how_many_different = 0
total_difference = 0
total_abs_difference = 0
for index in range(value_count):
a_value = flat_a[index]
b_value = flat_b[index]
difference = a_value - b_value
total_difference += difference
total_abs_difference += abs(difference)
if abs(difference) > tolerance:
how_many_different += 1
mean_difference = total_difference / value_count
mean_abs_difference = total_abs_difference / value_count
proportion_different = (how_many_different * 1.0) / value_count
if how_many_different == 0:
return True
else:
tf_logging.info("Tensors have {0} different values ({1}%), with mean"
" difference {2} and mean absolute difference {3}".format(
how_many_different, proportion_different * 100,
mean_difference, mean_abs_difference))
return False
def get_top_value(input_values):
max_value = None
max_index = None
for index, value in enumerate(input_values.flatten()):
if max_value is None or value > max:
max_value = value
max_index = index
return max_index, max_value
def test_graph(float_graph_def, input_map, output_names, log_graph=False):
"""Runs the float graph through the rewriter and tests the results."""
float_results = run_graph_def(
float_graph_def, input_map,
[output_name + ":0" for output_name in output_names])
# TODO(petewarden): round test is currently failing because there is no
# RoundToSteps op available.
# round_rewriter = quantize_graph.GraphRewriter(float_graph_def, "round")
# round_graph_def = round_rewriter.rewrite(output_name)
# round_results = run_graph_def(round_graph_def, input_map,
# [output_name + ":0"])
# assert are_tensors_near(expected, round_results[0], 1.0)
#
# TODO(petewarden): Add test for "quantize" mode.
eightbit_rewriter = quantize_graph.GraphRewriter(
float_graph_def, "eightbit", quantized_input_range=None)
eightbit_graph_def = eightbit_rewriter.rewrite(output_names)
eightbit_results = run_graph_def(
eightbit_graph_def, input_map,
[output_name + ":0" for output_name in output_names])
for expected, result in zip(float_results, eightbit_results):
assert are_tensors_near(expected, result, 1.0)
if log_graph:
tf_logging.info("8bit:\n%s", str(eightbit_graph_def))
# Test the weights_rounded mode. This uses the default bit_depth.
weights_rounded_rewriter = quantize_graph.GraphRewriter(
float_graph_def, "weights_rounded", quantized_input_range=None)
weights_rounded_graph_def = weights_rounded_rewriter.rewrite(output_names)
weights_rounded_results = run_graph_def(
weights_rounded_graph_def, input_map,
[output_name + ":0" for output_name in output_names])
for expected, result in zip(float_results, weights_rounded_results):
assert are_tensors_near(expected, result, 1.0)
class QuantizeGraphTest(test.TestCase):
def test_negative_const_problem(self):
shape_constant_name = "shape_constant"
shape_constant = quantize_graph.create_constant_node(
shape_constant_name, value=-0.8, dtype=dtypes.float32, shape=[1])
quantization_result = quantize_graph.quantize_weight_eightbit(
shape_constant, b"MIN_COMBINED")
self.assertEqual(4, len(quantization_result))
def test_odd_padding_problem(self):
"""Tests one error case we ran into in a real graph."""
test_conv(1, 4, 4, 1, 3, 1, 2, b"SAME",
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16],
[1, 2, 3, 4, 5, 6, 7, 8, 9])
def test_mat_mul_tiny(self):
# These tests are added to test the generate case where
# min(matrix) == max(matrix), which used to cause problems.
test_mat_mul(1, 1, 1, [2], [3])
test_mat_mul(1, 2, 1, [1], [2, 3])
test_mat_mul(1, 1, 2, [1, 1], [1, 1])
test_mat_mul(1, 1, 2, [0, 0], [1, 1])
# The general case.
test_mat_mul(1, 1, 2, [1, 2], [1, 2])
def test_mat_mul_small(self):
test_mat_mul(2, 4, 3, [1, 2, 3, 4, 5, 6],
[7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18])
def test_conv(self):
test_conv(1, 4, 3, 1, 3, 1, 1, b"SAME",
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
[1, 4, 7, 2, 5, 8, 3, 6, 9])
def test_reshape(self):
"""Tests that MatMul->Reshape->MatMul avoids extra quantize/dequantize."""
def make_matmul(name, a, b):
n = quantize_graph.create_node("MatMul", name, [a.name, b.name])
quantize_graph.set_attr_dtype(n, "T", dtypes.float32)
quantize_graph.set_attr_bool(n, "transpose_a", False)
quantize_graph.set_attr_bool(n, "transpose_b", False)
return n
# matmul_1 = input*weight_1
input_node = quantize_graph.create_constant_node(
"input", value=[0, 1, 2, 3], dtype=dtypes.float32, shape=[4, 1])
weight_1_node = quantize_graph.create_constant_node(
"weight_1",
value=[.5, .6, .7, .8, .9],
dtype=dtypes.float32,
shape=[1, 5])
matmul_1_node = make_matmul("matmul_1", input_node, weight_1_node)
# Reshape 4x5 to 10x2.
new_shape_node = quantize_graph.create_constant_node(
"new_shape_node", value=[10, 2], dtype=dtypes.int32, shape=[2])
reshape_node = quantize_graph.create_node(
"Reshape", "reshape", [matmul_1_node.name, new_shape_node.name])
quantize_graph.set_attr_dtype(reshape_node, "T", dtypes.float32)
# matmul_2_node = reshape*weight_2
weight_2_node = quantize_graph.create_constant_node(
"weight_2", value=[1.5, 2.5], dtype=dtypes.float32, shape=[2, 1])
matmul_2_node = make_matmul("matmul_2", reshape_node, weight_2_node)
g = graph_pb2.GraphDef()
g.node.extend([
input_node, weight_1_node, matmul_1_node, new_shape_node, reshape_node,
weight_2_node, matmul_2_node
])
# Test the graph
test_graph(g, {}, ["matmul_2"])
# Verify there is only one Quantize and one Requantize op.
eightbit_rewriter = quantize_graph.GraphRewriter(
g, "eightbit", quantized_input_range=None)
eightbit_graph_def = eightbit_rewriter.rewrite(["matmul_2"])
ops = [node.op for node in eightbit_graph_def.node]
# No quantize since all inputs are const and can be quantized up-front.
self.assertEqual(0, ops.count("QuantizeV2") + ops.count("Quantize"))
self.assertEqual(1, ops.count("QuantizedReshape"))
# One dequantize at the end.
self.assertEqual(1, ops.count("Dequantize"))
def test_quantize_array(self):
# Test invalid parameters (empty array, or 0 buckets.
self.assertRaises(ValueError, quantize_graph.quantize_array, np.array([]),
2)
self.assertRaises(ValueError, quantize_graph.quantize_array,
np.array([1, 2]), 0)
# Test input array of length 1.
arr = np.array([1])
qarr = quantize_graph.quantize_array(arr, 1)
self.assertEqual(arr, qarr)
qarr = quantize_graph.quantize_array(arr, 2)
self.assertEqual(arr, qarr)
# Test input array with all elements equal.
arr = np.array([1, 1, 1])
qarr = quantize_graph.quantize_array(arr, 10)
self.assertTrue((np.array([1, 1, 1]) == qarr).all())
# Test "normal" input arrays.
arr = np.array([0, 0.3, 0.6, 1])
qarr = quantize_graph.quantize_array(arr, 1)
self.assertTrue((np.array([0.5, 0.5, 0.5, 0.5]) == qarr).all())
qarr = quantize_graph.quantize_array(arr, 2)
self.assertTrue((np.array([0.25, 0.25, 0.75, 0.75]) == qarr).all())
qarr = quantize_graph.quantize_array(arr.reshape((2, 2)), 2)
self.assertTrue((np.array([[0.25, 0.25], [0.75, 0.75]]) == qarr).all())
def test_non_float_concat(self):
concat_dim = quantize_graph.create_constant_node(
"concat_dim", value=0, dtype=dtypes.int32, shape=[])
a = quantize_graph.create_constant_node(
"a",
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.int32,
shape=[2, 2, 3])
b = quantize_graph.create_constant_node(
"b",
value=[13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24],
dtype=dtypes.int32,
shape=[2, 2, 3])
concat = quantize_graph.create_node("Concat", "concat",
[concat_dim.name, a.name, b.name])
quantize_graph.set_attr_int(concat, "N", 2)
quantize_graph.set_attr_dtype(concat, "T", dtypes.int32)
g = graph_pb2.GraphDef()
g.node.extend([concat_dim, a, b, concat])
test_graph(g, {}, [concat.name])
def test_non_float_reshape(self):
a = quantize_graph.create_constant_node(
"a",
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.int32,
shape=[2, 2, 3])
shape = quantize_graph.create_constant_node(
"shape", value=[12], dtype=dtypes.int32, shape=[1])
reshape = quantize_graph.create_node("Reshape", "reshape",
[a.name, shape.name])
quantize_graph.set_attr_dtype(reshape, "T", dtypes.int32)
g = graph_pb2.GraphDef()
g.node.extend([a, shape, reshape])
test_graph(g, {}, [reshape.name])
def test_concat(self):
shape_constant_name = "shape_constant"
a_constant_name = "a_constant"
b_constant_name = "b_constant"
concat_name = "concat"
float_graph_def = graph_pb2.GraphDef()
shape_constant = quantize_graph.create_constant_node(
shape_constant_name, value=0, dtype=dtypes.int32, shape=[])
float_graph_def.node.extend([shape_constant])
a_constant = quantize_graph.create_constant_node(
a_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[2, 2, 3])
float_graph_def.node.extend([a_constant])
b_constant = quantize_graph.create_constant_node(
b_constant_name,
value=[13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24],
dtype=dtypes.float32,
shape=[2, 2, 3])
float_graph_def.node.extend([b_constant])
concat_node = quantize_graph.create_node(
"Concat", concat_name,
[shape_constant_name, a_constant_name, b_constant_name])
quantize_graph.set_attr_int(concat_node, "N", 2)
quantize_graph.set_attr_dtype(concat_node, "T", dtypes.float32)
float_graph_def.node.extend([concat_node])
test_graph(float_graph_def, {}, [concat_name])
# Verify the concat is quantized.
eightbit_rewriter = quantize_graph.GraphRewriter(
float_graph_def, "eightbit", quantized_input_range=None)
eightbit_graph_def = eightbit_rewriter.rewrite([concat_name])
ops = [node.op for node in eightbit_graph_def.node]
self.assertEqual(1, ops.count("QuantizedConcat"))
def test_multiple_outputs(self):
input_constant_name = "input_constant"
split_constant_name = "split_constant"
split_name = "split"
concat_constant_name = "concat_constant"
concat_name = "concat"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[2, 6])
float_graph_def.node.extend([input_constant])
split_constant = quantize_graph.create_constant_node(
split_constant_name, value=1, dtype=dtypes.int32, shape=[])
float_graph_def.node.extend([split_constant])
split_node = quantize_graph.create_node(
"Split", split_name, [split_constant_name, input_constant_name])
quantize_graph.set_attr_int(split_node, "num_split", 2)
quantize_graph.set_attr_dtype(split_node, "T", dtypes.float32)
float_graph_def.node.extend([split_node])
concat_constant = quantize_graph.create_constant_node(
concat_constant_name, value=1, dtype=dtypes.int32, shape=[])
float_graph_def.node.extend([concat_constant])
concat_node = quantize_graph.create_node(
"Concat", concat_name,
[concat_constant_name, split_name + ":0", split_name + ":1"])
quantize_graph.set_attr_int(concat_node, "N", 2)
quantize_graph.set_attr_dtype(concat_node, "T", dtypes.float32)
float_graph_def.node.extend([concat_node])
test_graph(float_graph_def, {}, [concat_name])
def test_node_name_from_input(self):
self.assertEqual("SomeName",
quantize_graph.node_name_from_input("^SomeName:2"))
def test_unique_node_name_from_input(self):
self.assertEqual("__hat__SomeName__port__2",
quantize_graph.unique_node_name_from_input("^SomeName:2"))
def test_identity(self):
input_constant_name = "input_constant"
identity_name = "identity"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[2, 6])
float_graph_def.node.extend([input_constant])
identity_node = quantize_graph.create_node("Identity", identity_name,
[input_constant_name])
quantize_graph.set_attr_dtype(identity_node, "T", dtypes.float32)
float_graph_def.node.extend([identity_node])
mul_name = "mul"
mul_node = quantize_graph.create_node("Mul", mul_name,
[identity_name, identity_name])
quantize_graph.set_attr_dtype(mul_node, "T", dtypes.float32)
float_graph_def.node.extend([mul_node])
test_graph(float_graph_def, {}, [mul_name])
def test_keep_control_edges(self):
no_op_name = "no_op"
a_constant_name = "a_constant"
b_constant_name = "b_constant"
a_check_name = "a_check"
b_check_name = "b_check"
a_identity_name = "a_identity"
b_identity_name = "b_identity"
add_name = "add"
graph_def = graph_pb2.GraphDef()
no_op = quantize_graph.create_node("NoOp", no_op_name, [])
graph_def.node.extend([no_op])
a_constant = quantize_graph.create_constant_node(
a_constant_name, value=1, dtype=dtypes.float32, shape=[])
graph_def.node.extend([a_constant])
a_check_node = quantize_graph.create_node("CheckNumerics", a_check_name,
[a_constant_name])
graph_def.node.extend([a_check_node])
a_identity_node = quantize_graph.create_node(
"Identity", a_identity_name,
[a_constant_name, "^" + a_check_name, "^" + no_op_name])
graph_def.node.extend([a_identity_node])
b_constant = quantize_graph.create_constant_node(
b_constant_name, value=1, dtype=dtypes.float32, shape=[])
graph_def.node.extend([b_constant])
b_check_node = quantize_graph.create_node("CheckNumerics", b_check_name,
[b_constant_name])
graph_def.node.extend([b_check_node])
b_identity_node = quantize_graph.create_node(
"Identity", b_identity_name, [b_constant_name, "^" + b_check_name])
graph_def.node.extend([b_identity_node])
add_node = quantize_graph.create_node("Add", add_name,
[a_identity_name, b_identity_name])
quantize_graph.set_attr_dtype(add_node, "T", dtypes.float32)
graph_def.node.extend([add_node])
expected_output = graph_pb2.GraphDef()
no_op = quantize_graph.create_node("NoOp", no_op_name, [])
expected_output.node.extend([no_op])
a_constant = quantize_graph.create_constant_node(
a_constant_name, value=1, dtype=dtypes.float32, shape=[])
expected_output.node.extend([a_constant])
a_identity_node = quantize_graph.create_node(
"Identity", a_identity_name, [a_constant_name, "^" + no_op_name])
expected_output.node.extend([a_identity_node])
b_constant = quantize_graph.create_constant_node(
b_constant_name, value=1, dtype=dtypes.float32, shape=[])
expected_output.node.extend([b_constant])
add_node = quantize_graph.create_node("Add", add_name,
[a_identity_name, b_constant_name])
quantize_graph.set_attr_dtype(add_node, "T", dtypes.float32)
expected_output.node.extend([add_node])
expected_output.versions.CopyFrom(graph_def.versions)
expected_output.library.CopyFrom(graph_def.library)
output = graph_util.remove_training_nodes(graph_def)
stripped_output = graph_util.extract_sub_graph(output, [add_name])
self.assertProtoEquals(expected_output, stripped_output)
def test_batch_norm(self):
input_constant_name = "input_constant"
mean_constant_name = "mean_constant"
variance_constant_name = "variance_constant"
beta_constant_name = "beta_constant"
gamma_constant_name = "gamma_constant"
batch_norm_name = "batch_norm"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 4, 2, 5, 3, 6, -1, -4, -2, -5, -3, -6],
dtype=dtypes.float32,
shape=[1, 1, 6, 2])
float_graph_def.node.extend([input_constant])
mean_constant = quantize_graph.create_constant_node(
mean_constant_name, value=[10, 20], dtype=dtypes.float32, shape=[2])
float_graph_def.node.extend([mean_constant])
variance_constant = quantize_graph.create_constant_node(
variance_constant_name,
value=[0.25, 0.5],
dtype=dtypes.float32,
shape=[2])
float_graph_def.node.extend([variance_constant])
beta_constant = quantize_graph.create_constant_node(
beta_constant_name, value=[0.1, 0.6], dtype=dtypes.float32, shape=[2])
float_graph_def.node.extend([beta_constant])
gamma_constant = quantize_graph.create_constant_node(
gamma_constant_name, value=[0, 0], dtype=dtypes.float32, shape=[2])
float_graph_def.node.extend([gamma_constant])
batch_norm_node = quantize_graph.create_node(
"BatchNormWithGlobalNormalization", batch_norm_name, [
input_constant_name, mean_constant_name, variance_constant_name,
beta_constant_name, gamma_constant_name
])
quantize_graph.set_attr_dtype(batch_norm_node, "T", dtypes.float32)
quantize_graph.set_attr_bool(batch_norm_node, "scale_after_normalization",
False)
quantize_graph.set_attr_float(batch_norm_node, "variance_epsilon", 0.001)
float_graph_def.node.extend([batch_norm_node])
test_graph(float_graph_def, {}, [batch_norm_name])
def test_max_pool(self):
input_constant_name = "input_constant"
max_pool_name = "max_pool"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[1, 2, 6, 1])
float_graph_def.node.extend([input_constant])
max_pool_node = quantize_graph.create_node("MaxPool", max_pool_name,
[input_constant_name])
quantize_graph.set_attr_int_list(max_pool_node, "ksize", [1, 2, 2, 1])
quantize_graph.set_attr_int_list(max_pool_node, "strides", [1, 1, 1, 1])
quantize_graph.set_attr_string(max_pool_node, "padding", b"SAME")
float_graph_def.node.extend([max_pool_node])
test_graph(float_graph_def, {}, [max_pool_name])
def test_avg_pool(self):
input_constant_name = "input_constant"
avg_pool_name = "avg_pool"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[1, 2, 6, 1])
float_graph_def.node.extend([input_constant])
avg_pool_node = quantize_graph.create_node("AvgPool", avg_pool_name,
[input_constant_name])
quantize_graph.set_attr_dtype(avg_pool_node, "T", dtypes.float32)
quantize_graph.set_attr_int_list(avg_pool_node, "ksize", [1, 2, 2, 1])
quantize_graph.set_attr_int_list(avg_pool_node, "strides", [1, 1, 1, 1])
quantize_graph.set_attr_string(avg_pool_node, "padding", b"SAME")
float_graph_def.node.extend([avg_pool_node])
test_graph(float_graph_def, {}, [avg_pool_name])
def test_relu(self):
input_constant_name = "input_constant"
relu_name = "relu"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[1, 2, 6, 1])
float_graph_def.node.extend([input_constant])
relu_node = quantize_graph.create_node("Relu", relu_name,
[input_constant_name])
quantize_graph.set_attr_dtype(relu_node, "T", dtypes.float32)
float_graph_def.node.extend([relu_node])
test_graph(float_graph_def, {}, [relu_name])
def test_relu_w_fake_quant_w_min_max_vars(self):
input_node = quantize_graph.create_constant_node(
"input",
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[1, 2, 6, 1])
relu_node = quantize_graph.create_node("Relu", "relu", [input_node.name])
quantize_graph.set_attr_dtype(relu_node, "T", dtypes.float32)
min_node = quantize_graph.create_constant_node(
"min_bias_add", value=0, dtype=dtypes.float32, shape=[])
max_node = quantize_graph.create_constant_node(
"max_bias_add", value=12, dtype=dtypes.float32, shape=[])
fake_quant_node = quantize_graph.create_node(
"FakeQuantWithMinMaxVars", "fake_quant",
[relu_node.name, min_node.name, max_node.name])
float_graph_def = graph_pb2.GraphDef()
float_graph_def.node.extend(
[input_node, relu_node, min_node, max_node, fake_quant_node])
test_graph(float_graph_def, {}, [fake_quant_node.name], log_graph=True)
# Verify there is only one Quantize and one Requantize op.
eightbit_rewriter = quantize_graph.GraphRewriter(
float_graph_def, "eightbit", quantized_input_range=None)
eightbit_graph_def = eightbit_rewriter.rewrite([fake_quant_node.name])
ops = [node.op for node in eightbit_graph_def.node]
# No quantize since all inputs are const and can be quantized up-front.
self.assertEqual(0, ops.count("QuantizeV2") + ops.count("Quantize"))
# One dequantize at the end.
self.assertEqual(1, ops.count("Dequantize"))
def test_relu6(self):
input_constant_name = "input_constant"
relu6_name = "relu6"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[1, 2, 6, 1])
float_graph_def.node.extend([input_constant])
relu6_node = quantize_graph.create_node("Relu6", relu6_name,
[input_constant_name])
quantize_graph.set_attr_dtype(relu6_node, "T", dtypes.float32)
float_graph_def.node.extend([relu6_node])
test_graph(float_graph_def, {}, [relu6_name])
def test_bias_add(self):
input_constant_name = "input_constant"
offset_constant_name = "offset_constant"
bias_add_name = "bias_add"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[1, 1, 2, 6])
float_graph_def.node.extend([input_constant])
offset_constant = quantize_graph.create_constant_node(
offset_constant_name,
value=[1, 2, 3, 4, 5, 6],
dtype=dtypes.float32,
shape=[6])
float_graph_def.node.extend([offset_constant])
bias_add_node = quantize_graph.create_node(
"BiasAdd", bias_add_name, [input_constant_name, offset_constant_name])
quantize_graph.set_attr_dtype(bias_add_node, "T", dtypes.float32)
float_graph_def.node.extend([bias_add_node])
test_graph(float_graph_def, {}, [bias_add_name])
def test_quantized_input_range_errors(self):
with self.assertRaises(ValueError):
# Invalid mode.
quantize_graph.GraphRewriter(graph_pb2.GraphDef(), "weights_rounded",
[0, 1])
with self.assertRaises(ValueError):
# Invalid range.
quantize_graph.GraphRewriter(graph_pb2.GraphDef(), "eightbit", [0, -1])
def test_quantized_input_range_bias_add(self):
input_shape = [1, 1, 2, 6]
input_n = quantize_graph.create_node("Placeholder", "input", [])
quantize_graph.set_attr_dtype(input_n, "dtype", dtypes.float32)
quantize_graph.set_attr_shape(input_n, "shape", input_shape)
offset_n = quantize_graph.create_constant_node(
"offset", value=[1, 2, 3, 4, 5, 6], dtype=dtypes.float32, shape=[6])
bias_add_n = quantize_graph.create_node("BiasAdd", "bias_add",
[input_n.name, offset_n.name])
quantize_graph.set_attr_dtype(bias_add_n, "T", dtypes.float32)
float_graph_def = graph_pb2.GraphDef()
float_graph_def.node.extend([input_n, offset_n, bias_add_n])
input_map = {
input_n.name + ":0":
np.reshape([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], input_shape)
}
self._RunTestsForQuantizedInputRange(float_graph_def, input_map,
[bias_add_n.name], [-1, 20.])
self._RunTestsForQuantizedInputRange(float_graph_def, input_map,
[bias_add_n.name], [0, 12.])
def test_quantized_input_range_mat_mul(self):
shapes = [[3, 2], [2, 4]]
inputs = []
for i, shape in enumerate(shapes):
node = quantize_graph.create_node("Placeholder", "input_%s" % i, [])
quantize_graph.set_attr_dtype(node, "dtype", dtypes.float32)
quantize_graph.set_attr_shape(node, "shape", shape)
inputs.append(node)
mat_mul_node = quantize_graph.create_node("MatMul", "mat_mul",
[n.name for n in inputs])
quantize_graph.set_attr_dtype(mat_mul_node, "T", dtypes.float32)
float_graph_def = graph_pb2.GraphDef()
float_graph_def.node.extend(inputs + [mat_mul_node])
input_map = {
inputs[0].name + ":0":
np.reshape([1, 2, 3, 4, 5, 6], shapes[0]),
inputs[1].name + ":0":
np.reshape([.8, .7, .6, .5, .4, .3, .2, .1], shapes[1])
}
self._RunTestsForQuantizedInputRange(float_graph_def, input_map,
[mat_mul_node.name], [-1, 20.])
self._RunTestsForQuantizedInputRange(float_graph_def, input_map,
[mat_mul_node.name], [0, 6.])
def _RunTestsForQuantizedInputRange(self, float_graph_def, input_map,
output_names, input_range):
if sys.version_info[0] == 3:
# uint8->quint8 conversion for numpy is not working currently.
return
quantized_input_map = {}
for k, v in input_map.items():
arr = [
int(
round((n - input_range[0]) * 255 / (input_range[1] - input_range[
0]))) for n in v.flat
]
arr = np.array(arr, np.uint8)
arr = arr.reshape(v.shape)
arr = arr.astype(dtypes.quint8.as_numpy_dtype)
quantized_input_map[k] = arr
output_tensors = [output_name + ":0" for output_name in output_names]
float_results = run_graph_def(float_graph_def, input_map, output_tensors)
# Quantize treating the input as quantized in range <input_range>.
rewriter = quantize_graph.GraphRewriter(float_graph_def, "eightbit",
input_range)
graph_def = rewriter.rewrite(output_names)
results = run_graph_def(graph_def, quantized_input_map, output_tensors)
for expected, result in zip(float_results, results):
assert are_tensors_near(expected, result, .5)
ops = [node.op for node in graph_def.node]
self.assertEqual(0, ops.count("QuantizeV2") + ops.count("Quantize"))
self.assertEqual(len(output_names), ops.count("Dequantize"))
# Quantize without treating input as quantized.
rewriter = quantize_graph.GraphRewriter(
float_graph_def, "eightbit", quantized_input_range=None)
graph_def = rewriter.rewrite(output_names)
results = run_graph_def(graph_def, input_map, output_tensors)
for expected, result in zip(float_results, results):
assert are_tensors_near(expected, result, .5)
ops = [node.op for node in graph_def.node]
self.assertEqual(
len(input_map), ops.count("QuantizeV2") + ops.count("Quantize"))
self.assertEqual(len(output_names), ops.count("Dequantize"))
def test_bias_add_w_fake_quant_w_min_max_vars(self):
input_node = quantize_graph.create_constant_node(
"input",
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
dtype=dtypes.float32,
shape=[1, 1, 2, 5])
offset_node = quantize_graph.create_constant_node(
"offset", value=[1, 2, 3, 4, 5], dtype=dtypes.float32, shape=[5])
bias_add_node = quantize_graph.create_node(
"BiasAdd", "bias_add", [input_node.name, offset_node.name])
quantize_graph.set_attr_dtype(bias_add_node, "T", dtypes.float32)
min_node = quantize_graph.create_constant_node(
"min_bias_add", value=-.5, dtype=dtypes.float32, shape=[])
max_node = quantize_graph.create_constant_node(
"max_bias_add", value=15.5, dtype=dtypes.float32, shape=[])
fake_quant_node = quantize_graph.create_node(
"FakeQuantWithMinMaxVars", "fake_quant",
[bias_add_node.name, min_node.name, max_node.name])
float_graph_def = graph_pb2.GraphDef()
float_graph_def.node.extend([
input_node, offset_node, bias_add_node, min_node, max_node,
fake_quant_node
])
test_graph(float_graph_def, {}, [fake_quant_node.name], log_graph=True)
# Verify there is only one Quantize and one Requantize op.
# Pass in fallback_quantization_range, although it will have no effect
# because the FakeQuantWithMinMaxVars are used instead.
eightbit_rewriter = quantize_graph.GraphRewriter(
float_graph_def,
"eightbit",
quantized_input_range=None,
fallback_quantization_range=[-100, 100])
eightbit_graph_def = eightbit_rewriter.rewrite([fake_quant_node.name])
ops = [node.op for node in eightbit_graph_def.node]
node_names = [node.name for node in eightbit_graph_def.node]
# No quantize since all inputs are const and can be quantized up-front.
self.assertEqual(0, ops.count("QuantizeV2") + ops.count("Quantize"))
# One dequantize at the end.
self.assertEqual(1, ops.count("Dequantize"))
# The fallback constants are not in the graph.
self.assertEqual(0, node_names.count("fallback_quantization_min_value"))
self.assertEqual(0, node_names.count("fallback_quantization_max_value"))
def test_bias_add_w_fallback_min_max_vars(self):
input_node = quantize_graph.create_constant_node(
"input",
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
dtype=dtypes.float32,
shape=[1, 1, 2, 5])
offset_node = quantize_graph.create_constant_node(
"offset", value=[1, 2, 3, 4, 5], dtype=dtypes.float32, shape=[5])
bias_add_node = quantize_graph.create_node(
"BiasAdd", "bias_add", [input_node.name, offset_node.name])
quantize_graph.set_attr_dtype(bias_add_node, "T", dtypes.float32)
float_graph_def = graph_pb2.GraphDef()
float_graph_def.node.extend([input_node, offset_node, bias_add_node])
test_graph(float_graph_def, {}, [bias_add_node.name], log_graph=True)
# Verify there is only one Quantize, one Requantize op, and no
# RequantizationRange op.
eightbit_rewriter = quantize_graph.GraphRewriter(
float_graph_def,
"eightbit",
quantized_input_range=None,
fallback_quantization_range=[-.5, 15.5])
eightbit_graph_def = eightbit_rewriter.rewrite([bias_add_node.name])
ops = [node.op for node in eightbit_graph_def.node]
node_names = [node.name for node in eightbit_graph_def.node]
# No quantize since all inputs are const and can be quantized up-front.
self.assertEqual(0, ops.count("QuantizeV2") + ops.count("Quantize"))
# One dequantize at the end.
self.assertEqual(1, ops.count("Dequantize"))
# No RequantizationRange
self.assertEqual(0, ops.count("RequantizationRange"))
# The fallback constants are in the graph.
self.assertEqual(1, node_names.count("fallback_quantization_min_value"))
self.assertEqual(1, node_names.count("fallback_quantization_max_value"))
def test_remove_redundant_quantization(self):
a_constant_name = "a_constant"
a_constant_min_name = "a_constant_min"
a_constant_max_name = "a_constant_max"
a_dequantize_name = "a_dequantize"
a_quantize_name = "a_quantize"
b_constant_name = "b_constant"
b_constant_min_name = "b_constant_min"
b_constant_max_name = "b_constant_max"
b_dequantize_name = "b_dequantize"
b_quantize_name = "b_quantize"
mat_mul_name = "mat_mul"
graph_def = graph_pb2.GraphDef()
a_constant = quantize_graph.create_constant_node(
a_constant_name, value=(0,), dtype=dtypes.quint8, shape=[])
graph_def.node.extend([a_constant])
a_constant_min = quantize_graph.create_constant_node(
a_constant_min_name, value=2, dtype=dtypes.float32, shape=[])
graph_def.node.extend([a_constant_min])
a_constant_max = quantize_graph.create_constant_node(
a_constant_max_name, value=2, dtype=dtypes.float32, shape=[])
graph_def.node.extend([a_constant_max])
a_dequantize_node = quantize_graph.create_node(
"Dequantize", a_dequantize_name,
[a_constant_name, a_constant_min_name, a_constant_max_name])
quantize_graph.set_attr_dtype(a_dequantize_node, "T", dtypes.uint8)
graph_def.node.extend([a_dequantize_node])
a_quantize_node = quantize_graph.create_node(
"QuantizeV2", a_quantize_name,
[a_dequantize_name, a_dequantize_name + ":1", a_dequantize_name + ":2"])
quantize_graph.set_attr_dtype(a_quantize_node, "T", dtypes.uint8)
graph_def.node.extend([a_quantize_node])
b_constant = quantize_graph.create_constant_node(
b_constant_name, value=(0,), dtype=dtypes.quint8, shape=[])
graph_def.node.extend([b_constant])
b_constant_min = quantize_graph.create_constant_node(
b_constant_min_name, value=3, dtype=dtypes.float32, shape=[])
graph_def.node.extend([b_constant_min])
b_constant_max = quantize_graph.create_constant_node(
b_constant_max_name, value=3, dtype=dtypes.float32, shape=[])
graph_def.node.extend([b_constant_max])
b_dequantize_node = quantize_graph.create_node(
"Dequantize", b_dequantize_name,
[b_constant_name, b_constant_min_name, b_constant_max_name])
quantize_graph.set_attr_dtype(b_dequantize_node, "T", dtypes.uint8)
graph_def.node.extend([b_dequantize_node])
b_quantize_node = quantize_graph.create_node(
"QuantizeV2", b_quantize_name,
[b_dequantize_name, b_dequantize_name + ":1", b_dequantize_name + ":2"])
quantize_graph.set_attr_dtype(b_quantize_node, "T", dtypes.uint8)
graph_def.node.extend([b_quantize_node])
mat_mul_node = quantize_graph.create_node("QuantizedMatMul", mat_mul_name, [
a_quantize_name, b_quantize_name, a_quantize_name + ":1",
a_quantize_name + ":2", b_quantize_name + ":1", b_quantize_name + ":2"
])
quantize_graph.set_attr_dtype(mat_mul_node, "T1", dtypes.uint8)
quantize_graph.set_attr_dtype(mat_mul_node, "T2", dtypes.int32)
graph_def.node.extend([mat_mul_node])
expected_output = graph_pb2.GraphDef()
a_constant = quantize_graph.create_constant_node(
a_constant_name, value=(0,), dtype=dtypes.quint8, shape=[])
expected_output.node.extend([a_constant])
a_constant_min = quantize_graph.create_constant_node(
a_constant_min_name, value=2, dtype=dtypes.float32, shape=[])
expected_output.node.extend([a_constant_min])
a_constant_max = quantize_graph.create_constant_node(
a_constant_max_name, value=2, dtype=dtypes.float32, shape=[])
expected_output.node.extend([a_constant_max])
b_constant = quantize_graph.create_constant_node(
b_constant_name, value=(0,), dtype=dtypes.quint8, shape=[])
expected_output.node.extend([b_constant])
b_constant_min = quantize_graph.create_constant_node(
b_constant_min_name, value=3, dtype=dtypes.float32, shape=[])
expected_output.node.extend([b_constant_min])
b_constant_max = quantize_graph.create_constant_node(
b_constant_max_name, value=3, dtype=dtypes.float32, shape=[])
expected_output.node.extend([b_constant_max])
mat_mul_node = quantize_graph.create_node("QuantizedMatMul", mat_mul_name, [
a_constant_name, b_constant_name, a_constant_min_name,
a_constant_max_name, b_constant_min_name, b_constant_max_name
])
quantize_graph.set_attr_dtype(mat_mul_node, "T1", dtypes.uint8)
quantize_graph.set_attr_dtype(mat_mul_node, "T2", dtypes.int32)
expected_output.node.extend([mat_mul_node])
expected_output.versions.CopyFrom(graph_def.versions)
expected_output.library.CopyFrom(graph_def.library)
rewriter = quantize_graph.GraphRewriter(
graph_def, [mat_mul_name], quantized_input_range=None)
output = rewriter.remove_redundant_quantization(graph_def)
stripped_output = graph_util.extract_sub_graph(output, [mat_mul_name])
self.assertProtoEquals(expected_output, stripped_output)
if __name__ == "__main__":
test.main()
| [((1481, 1496), 'tensorflow.python.framework.ops.Graph', 'ops_lib.Graph', ([], {}), '()\n', (1494, 1496), True, 'from tensorflow.python.framework import ops as ops_lib\n'), ((1888, 1908), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (1906, 1908), False, 'from tensorflow.core.framework import graph_pb2\n'), ((1924, 2026), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['a_constant_name'], {'value': 'a', 'dtype': 'dtypes.float32', 'shape': '[m, k]'}), '(a_constant_name, value=a, dtype=dtypes.\n float32, shape=[m, k])\n', (1959, 2026), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((2088, 2190), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['b_constant_name'], {'value': 'b', 'dtype': 'dtypes.float32', 'shape': '[k, n]'}), '(b_constant_name, value=b, dtype=dtypes.\n float32, shape=[k, n])\n', (2123, 2190), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((2254, 2344), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""MatMul"""', 'mat_mul_name', '[a_constant_name, b_constant_name]'], {}), "('MatMul', mat_mul_name, [a_constant_name,\n b_constant_name])\n", (2280, 2344), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((2387, 2451), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['mat_mul_node', '"""T"""', 'dtypes.float32'], {}), "(mat_mul_node, 'T', dtypes.float32)\n", (2416, 2451), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((2454, 2518), 'tensorflow.tools.quantization.quantize_graph.set_attr_bool', 'quantize_graph.set_attr_bool', (['mat_mul_node', '"""transpose_a"""', '(False)'], {}), "(mat_mul_node, 'transpose_a', False)\n", (2482, 2518), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((2521, 2585), 'tensorflow.tools.quantization.quantize_graph.set_attr_bool', 'quantize_graph.set_attr_bool', (['mat_mul_node', '"""transpose_b"""', '(False)'], {}), "(mat_mul_node, 'transpose_b', False)\n", (2549, 2585), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((3000, 3020), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (3018, 3020), False, 'from tensorflow.core.framework import graph_pb2\n'), ((3040, 3207), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['input_constant_name'], {'value': 'input_values', 'dtype': 'dtypes.float32', 'shape': '[image_batch_count, image_height, image_width, depth]'}), '(input_constant_name, value=input_values,\n dtype=dtypes.float32, shape=[image_batch_count, image_height,\n image_width, depth])\n', (3075, 3207), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((3293, 3457), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['filter_constant_name'], {'value': 'filter_values', 'dtype': 'dtypes.float32', 'shape': '[filter_size, filter_size, depth, filter_count]'}), '(filter_constant_name, value=\n filter_values, dtype=dtypes.float32, shape=[filter_size, filter_size,\n depth, filter_count])\n', (3328, 3457), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((3537, 3633), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Conv2D"""', 'conv_name', '[input_constant_name, filter_constant_name]'], {}), "('Conv2D', conv_name, [input_constant_name,\n filter_constant_name])\n", (3563, 3633), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((3639, 3700), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['conv_node', '"""T"""', 'dtypes.float32'], {}), "(conv_node, 'T', dtypes.float32)\n", (3668, 3700), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((3703, 3781), 'tensorflow.tools.quantization.quantize_graph.set_attr_int_list', 'quantize_graph.set_attr_int_list', (['conv_node', '"""strides"""', '[1, stride, stride, 1]'], {}), "(conv_node, 'strides', [1, stride, stride, 1])\n", (3735, 3781), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((3784, 3845), 'tensorflow.tools.quantization.quantize_graph.set_attr_string', 'quantize_graph.set_attr_string', (['conv_node', '"""padding"""', 'padding'], {}), "(conv_node, 'padding', padding)\n", (3814, 3845), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((6689, 6778), 'tensorflow.tools.quantization.quantize_graph.GraphRewriter', 'quantize_graph.GraphRewriter', (['float_graph_def', '"""eightbit"""'], {'quantized_input_range': 'None'}), "(float_graph_def, 'eightbit',\n quantized_input_range=None)\n", (6717, 6778), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((7266, 7362), 'tensorflow.tools.quantization.quantize_graph.GraphRewriter', 'quantize_graph.GraphRewriter', (['float_graph_def', '"""weights_rounded"""'], {'quantized_input_range': 'None'}), "(float_graph_def, 'weights_rounded',\n quantized_input_range=None)\n", (7294, 7362), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((42440, 42451), 'tensorflow.python.platform.test.main', 'test.main', ([], {}), '()\n', (42449, 42451), False, 'from tensorflow.python.platform import test\n'), ((1528, 1587), 'tensorflow.python.framework.importer.import_graph_def', 'importer.import_graph_def', (['graph_def'], {'input_map': '{}', 'name': '""""""'}), "(graph_def, input_map={}, name='')\n", (1553, 1587), False, 'from tensorflow.python.framework import importer\n'), ((1595, 1623), 'tensorflow.python.client.session.Session', 'session.Session', ([], {'graph': 'graph'}), '(graph=graph)\n', (1610, 1623), False, 'from tensorflow.python.client import session\n'), ((7860, 7966), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['shape_constant_name'], {'value': '(-0.8)', 'dtype': 'dtypes.float32', 'shape': '[1]'}), '(shape_constant_name, value=-0.8, dtype=\n dtypes.float32, shape=[1])\n', (7895, 7966), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((7997, 8069), 'tensorflow.tools.quantization.quantize_graph.quantize_weight_eightbit', 'quantize_graph.quantize_weight_eightbit', (['shape_constant', "b'MIN_COMBINED'"], {}), "(shape_constant, b'MIN_COMBINED')\n", (8036, 8069), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((9532, 9637), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""input"""'], {'value': '[0, 1, 2, 3]', 'dtype': 'dtypes.float32', 'shape': '[4, 1]'}), "('input', value=[0, 1, 2, 3], dtype=\n dtypes.float32, shape=[4, 1])\n", (9567, 9637), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((9662, 9783), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""weight_1"""'], {'value': '[0.5, 0.6, 0.7, 0.8, 0.9]', 'dtype': 'dtypes.float32', 'shape': '[1, 5]'}), "('weight_1', value=[0.5, 0.6, 0.7, 0.8, \n 0.9], dtype=dtypes.float32, shape=[1, 5])\n", (9697, 9783), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((9927, 10031), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""new_shape_node"""'], {'value': '[10, 2]', 'dtype': 'dtypes.int32', 'shape': '[2]'}), "('new_shape_node', value=[10, 2], dtype=\n dtypes.int32, shape=[2])\n", (9962, 10031), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((10055, 10150), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Reshape"""', '"""reshape"""', '[matmul_1_node.name, new_shape_node.name]'], {}), "('Reshape', 'reshape', [matmul_1_node.name,\n new_shape_node.name])\n", (10081, 10150), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((10160, 10224), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['reshape_node', '"""T"""', 'dtypes.float32'], {}), "(reshape_node, 'T', dtypes.float32)\n", (10189, 10224), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((10285, 10391), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""weight_2"""'], {'value': '[1.5, 2.5]', 'dtype': 'dtypes.float32', 'shape': '[2, 1]'}), "('weight_2', value=[1.5, 2.5], dtype=\n dtypes.float32, shape=[2, 1])\n", (10320, 10391), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((10478, 10498), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (10496, 10498), False, 'from tensorflow.core.framework import graph_pb2\n'), ((10789, 10860), 'tensorflow.tools.quantization.quantize_graph.GraphRewriter', 'quantize_graph.GraphRewriter', (['g', '"""eightbit"""'], {'quantized_input_range': 'None'}), "(g, 'eightbit', quantized_input_range=None)\n", (10817, 10860), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((11629, 11642), 'numpy.array', 'np.array', (['[1]'], {}), '([1])\n', (11637, 11642), True, 'import numpy as np\n'), ((11654, 11691), 'tensorflow.tools.quantization.quantize_graph.quantize_array', 'quantize_graph.quantize_array', (['arr', '(1)'], {}), '(arr, 1)\n', (11683, 11691), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((11735, 11772), 'tensorflow.tools.quantization.quantize_graph.quantize_array', 'quantize_graph.quantize_array', (['arr', '(2)'], {}), '(arr, 2)\n', (11764, 11772), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((11863, 11882), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (11871, 11882), True, 'import numpy as np\n'), ((11894, 11932), 'tensorflow.tools.quantization.quantize_graph.quantize_array', 'quantize_graph.quantize_array', (['arr', '(10)'], {}), '(arr, 10)\n', (11923, 11932), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((12034, 12060), 'numpy.array', 'np.array', (['[0, 0.3, 0.6, 1]'], {}), '([0, 0.3, 0.6, 1])\n', (12042, 12060), True, 'import numpy as np\n'), ((12072, 12109), 'tensorflow.tools.quantization.quantize_graph.quantize_array', 'quantize_graph.quantize_array', (['arr', '(1)'], {}), '(arr, 1)\n', (12101, 12109), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((12189, 12226), 'tensorflow.tools.quantization.quantize_graph.quantize_array', 'quantize_graph.quantize_array', (['arr', '(2)'], {}), '(arr, 2)\n', (12218, 12226), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((12493, 12586), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""concat_dim"""'], {'value': '(0)', 'dtype': 'dtypes.int32', 'shape': '[]'}), "('concat_dim', value=0, dtype=dtypes.\n int32, shape=[])\n", (12528, 12586), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((12599, 12728), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""a"""'], {'value': '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]', 'dtype': 'dtypes.int32', 'shape': '[2, 2, 3]'}), "('a', value=[1, 2, 3, 4, 5, 6, 7, 8, 9, \n 10, 11, 12], dtype=dtypes.int32, shape=[2, 2, 3])\n", (12634, 12728), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((12765, 12902), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""b"""'], {'value': '[13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]', 'dtype': 'dtypes.int32', 'shape': '[2, 2, 3]'}), "('b', value=[13, 14, 15, 16, 17, 18, 19,\n 20, 21, 22, 23, 24], dtype=dtypes.int32, shape=[2, 2, 3])\n", (12800, 12902), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((12945, 13031), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Concat"""', '"""concat"""', '[concat_dim.name, a.name, b.name]'], {}), "('Concat', 'concat', [concat_dim.name, a.name, b.\n name])\n", (12971, 13031), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((13071, 13114), 'tensorflow.tools.quantization.quantize_graph.set_attr_int', 'quantize_graph.set_attr_int', (['concat', '"""N"""', '(2)'], {}), "(concat, 'N', 2)\n", (13098, 13114), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((13119, 13175), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['concat', '"""T"""', 'dtypes.int32'], {}), "(concat, 'T', dtypes.int32)\n", (13148, 13175), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((13185, 13205), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (13203, 13205), False, 'from tensorflow.core.framework import graph_pb2\n'), ((13334, 13463), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""a"""'], {'value': '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]', 'dtype': 'dtypes.int32', 'shape': '[2, 2, 3]'}), "('a', value=[1, 2, 3, 4, 5, 6, 7, 8, 9, \n 10, 11, 12], dtype=dtypes.int32, shape=[2, 2, 3])\n", (13369, 13463), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((13504, 13595), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""shape"""'], {'value': '[12]', 'dtype': 'dtypes.int32', 'shape': '[1]'}), "('shape', value=[12], dtype=dtypes.int32,\n shape=[1])\n", (13539, 13595), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((13615, 13685), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Reshape"""', '"""reshape"""', '[a.name, shape.name]'], {}), "('Reshape', 'reshape', [a.name, shape.name])\n", (13641, 13685), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((13731, 13788), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['reshape', '"""T"""', 'dtypes.int32'], {}), "(reshape, 'T', dtypes.int32)\n", (13760, 13788), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((13798, 13818), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (13816, 13818), False, 'from tensorflow.core.framework import graph_pb2\n'), ((14085, 14105), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (14103, 14105), False, 'from tensorflow.core.framework import graph_pb2\n'), ((14127, 14227), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['shape_constant_name'], {'value': '(0)', 'dtype': 'dtypes.int32', 'shape': '[]'}), '(shape_constant_name, value=0, dtype=\n dtypes.int32, shape=[])\n', (14162, 14227), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((14299, 14442), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['a_constant_name'], {'value': '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]', 'dtype': 'dtypes.float32', 'shape': '[2, 2, 3]'}), '(a_constant_name, value=[1, 2, 3, 4, 5, \n 6, 7, 8, 9, 10, 11, 12], dtype=dtypes.float32, shape=[2, 2, 3])\n', (14334, 14442), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((14534, 14685), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['b_constant_name'], {'value': '[13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]', 'dtype': 'dtypes.float32', 'shape': '[2, 2, 3]'}), '(b_constant_name, value=[13, 14, 15, 16,\n 17, 18, 19, 20, 21, 22, 23, 24], dtype=dtypes.float32, shape=[2, 2, 3])\n', (14569, 14685), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((14779, 14889), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Concat"""', 'concat_name', '[shape_constant_name, a_constant_name, b_constant_name]'], {}), "('Concat', concat_name, [shape_constant_name,\n a_constant_name, b_constant_name])\n", (14805, 14889), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((14907, 14955), 'tensorflow.tools.quantization.quantize_graph.set_attr_int', 'quantize_graph.set_attr_int', (['concat_node', '"""N"""', '(2)'], {}), "(concat_node, 'N', 2)\n", (14934, 14955), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((14960, 15023), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['concat_node', '"""T"""', 'dtypes.float32'], {}), "(concat_node, 'T', dtypes.float32)\n", (14989, 15023), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((15186, 15275), 'tensorflow.tools.quantization.quantize_graph.GraphRewriter', 'quantize_graph.GraphRewriter', (['float_graph_def', '"""eightbit"""'], {'quantized_input_range': 'None'}), "(float_graph_def, 'eightbit',\n quantized_input_range=None)\n", (15214, 15275), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((15700, 15720), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (15718, 15720), False, 'from tensorflow.core.framework import graph_pb2\n'), ((15742, 15885), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['input_constant_name'], {'value': '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]', 'dtype': 'dtypes.float32', 'shape': '[2, 6]'}), '(input_constant_name, value=[1, 2, 3, 4,\n 5, 6, 7, 8, 9, 10, 11, 12], dtype=dtypes.float32, shape=[2, 6])\n', (15777, 15885), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((15986, 16086), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['split_constant_name'], {'value': '(1)', 'dtype': 'dtypes.int32', 'shape': '[]'}), '(split_constant_name, value=1, dtype=\n dtypes.int32, shape=[])\n', (16021, 16086), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((16158, 16253), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Split"""', 'split_name', '[split_constant_name, input_constant_name]'], {}), "('Split', split_name, [split_constant_name,\n input_constant_name])\n", (16184, 16253), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((16263, 16318), 'tensorflow.tools.quantization.quantize_graph.set_attr_int', 'quantize_graph.set_attr_int', (['split_node', '"""num_split"""', '(2)'], {}), "(split_node, 'num_split', 2)\n", (16290, 16318), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((16323, 16385), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['split_node', '"""T"""', 'dtypes.float32'], {}), "(split_node, 'T', dtypes.float32)\n", (16352, 16385), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((16454, 16555), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['concat_constant_name'], {'value': '(1)', 'dtype': 'dtypes.int32', 'shape': '[]'}), '(concat_constant_name, value=1, dtype=\n dtypes.int32, shape=[])\n', (16489, 16555), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((16629, 16745), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Concat"""', 'concat_name', "[concat_constant_name, split_name + ':0', split_name + ':1']"], {}), "('Concat', concat_name, [concat_constant_name, \n split_name + ':0', split_name + ':1'])\n", (16655, 16745), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((16762, 16810), 'tensorflow.tools.quantization.quantize_graph.set_attr_int', 'quantize_graph.set_attr_int', (['concat_node', '"""N"""', '(2)'], {}), "(concat_node, 'N', 2)\n", (16789, 16810), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((16815, 16878), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['concat_node', '"""T"""', 'dtypes.float32'], {}), "(concat_node, 'T', dtypes.float32)\n", (16844, 16878), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((17424, 17444), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (17442, 17444), False, 'from tensorflow.core.framework import graph_pb2\n'), ((17466, 17609), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['input_constant_name'], {'value': '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]', 'dtype': 'dtypes.float32', 'shape': '[2, 6]'}), '(input_constant_name, value=[1, 2, 3, 4,\n 5, 6, 7, 8, 9, 10, 11, 12], dtype=dtypes.float32, shape=[2, 6])\n', (17501, 17609), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((17709, 17785), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Identity"""', 'identity_name', '[input_constant_name]'], {}), "('Identity', identity_name, [input_constant_name])\n", (17735, 17785), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((17837, 17902), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['identity_node', '"""T"""', 'dtypes.float32'], {}), "(identity_node, 'T', dtypes.float32)\n", (17866, 17902), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((17989, 18064), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Mul"""', 'mul_name', '[identity_name, identity_name]'], {}), "('Mul', mul_name, [identity_name, identity_name])\n", (18015, 18064), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((18111, 18171), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['mul_node', '"""T"""', 'dtypes.float32'], {}), "(mul_node, 'T', dtypes.float32)\n", (18140, 18171), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((18563, 18583), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (18581, 18583), False, 'from tensorflow.core.framework import graph_pb2\n'), ((18596, 18646), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""NoOp"""', 'no_op_name', '[]'], {}), "('NoOp', no_op_name, [])\n", (18622, 18646), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((18699, 18797), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['a_constant_name'], {'value': '(1)', 'dtype': 'dtypes.float32', 'shape': '[]'}), '(a_constant_name, value=1, dtype=dtypes.\n float32, shape=[])\n', (18734, 18797), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((18861, 18937), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""CheckNumerics"""', 'a_check_name', '[a_constant_name]'], {}), "('CheckNumerics', a_check_name, [a_constant_name])\n", (18887, 18937), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((19048, 19165), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Identity"""', 'a_identity_name', "[a_constant_name, '^' + a_check_name, '^' + no_op_name]"], {}), "('Identity', a_identity_name, [a_constant_name, \n '^' + a_check_name, '^' + no_op_name])\n", (19074, 19165), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((19240, 19338), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['b_constant_name'], {'value': '(1)', 'dtype': 'dtypes.float32', 'shape': '[]'}), '(b_constant_name, value=1, dtype=dtypes.\n float32, shape=[])\n', (19275, 19338), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((19402, 19478), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""CheckNumerics"""', 'b_check_name', '[b_constant_name]'], {}), "('CheckNumerics', b_check_name, [b_constant_name])\n", (19428, 19478), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((19589, 19688), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Identity"""', 'b_identity_name', "[b_constant_name, '^' + b_check_name]"], {}), "('Identity', b_identity_name, [b_constant_name, \n '^' + b_check_name])\n", (19615, 19688), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((19753, 19832), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Add"""', 'add_name', '[a_identity_name, b_identity_name]'], {}), "('Add', add_name, [a_identity_name, b_identity_name])\n", (19779, 19832), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((19879, 19939), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['add_node', '"""T"""', 'dtypes.float32'], {}), "(add_node, 'T', dtypes.float32)\n", (19908, 19939), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((20001, 20021), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (20019, 20021), False, 'from tensorflow.core.framework import graph_pb2\n'), ((20034, 20084), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""NoOp"""', 'no_op_name', '[]'], {}), "('NoOp', no_op_name, [])\n", (20060, 20084), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((20143, 20241), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['a_constant_name'], {'value': '(1)', 'dtype': 'dtypes.float32', 'shape': '[]'}), '(a_constant_name, value=1, dtype=dtypes.\n float32, shape=[])\n', (20178, 20241), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((20314, 20411), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Identity"""', 'a_identity_name', "[a_constant_name, '^' + no_op_name]"], {}), "('Identity', a_identity_name, [a_constant_name, \n '^' + no_op_name])\n", (20340, 20411), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((20484, 20582), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['b_constant_name'], {'value': '(1)', 'dtype': 'dtypes.float32', 'shape': '[]'}), '(b_constant_name, value=1, dtype=dtypes.\n float32, shape=[])\n', (20519, 20582), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((20648, 20727), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Add"""', 'add_name', '[a_identity_name, b_constant_name]'], {}), "('Add', add_name, [a_identity_name, b_constant_name])\n", (20674, 20727), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((20774, 20834), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['add_node', '"""T"""', 'dtypes.float32'], {}), "(add_node, 'T', dtypes.float32)\n", (20803, 20834), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((21007, 21050), 'tensorflow.python.framework.graph_util.remove_training_nodes', 'graph_util.remove_training_nodes', (['graph_def'], {}), '(graph_def)\n', (21039, 21050), False, 'from tensorflow.python.framework import graph_util\n'), ((21073, 21121), 'tensorflow.python.framework.graph_util.extract_sub_graph', 'graph_util.extract_sub_graph', (['output', '[add_name]'], {}), '(output, [add_name])\n', (21101, 21121), False, 'from tensorflow.python.framework import graph_util\n'), ((21487, 21507), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (21505, 21507), False, 'from tensorflow.core.framework import graph_pb2\n'), ((21529, 21681), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['input_constant_name'], {'value': '[1, 4, 2, 5, 3, 6, -1, -4, -2, -5, -3, -6]', 'dtype': 'dtypes.float32', 'shape': '[1, 1, 6, 2]'}), '(input_constant_name, value=[1, 4, 2, 5,\n 3, 6, -1, -4, -2, -5, -3, -6], dtype=dtypes.float32, shape=[1, 1, 6, 2])\n', (21564, 21681), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((21781, 21889), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['mean_constant_name'], {'value': '[10, 20]', 'dtype': 'dtypes.float32', 'shape': '[2]'}), '(mean_constant_name, value=[10, 20],\n dtype=dtypes.float32, shape=[2])\n', (21816, 21889), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((21968, 22084), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['variance_constant_name'], {'value': '[0.25, 0.5]', 'dtype': 'dtypes.float32', 'shape': '[2]'}), '(variance_constant_name, value=[0.25, \n 0.5], dtype=dtypes.float32, shape=[2])\n', (22003, 22084), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((22186, 22296), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['beta_constant_name'], {'value': '[0.1, 0.6]', 'dtype': 'dtypes.float32', 'shape': '[2]'}), '(beta_constant_name, value=[0.1, 0.6],\n dtype=dtypes.float32, shape=[2])\n', (22221, 22296), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((22372, 22479), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['gamma_constant_name'], {'value': '[0, 0]', 'dtype': 'dtypes.float32', 'shape': '[2]'}), '(gamma_constant_name, value=[0, 0],\n dtype=dtypes.float32, shape=[2])\n', (22407, 22479), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((22557, 22752), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""BatchNormWithGlobalNormalization"""', 'batch_norm_name', '[input_constant_name, mean_constant_name, variance_constant_name,\n beta_constant_name, gamma_constant_name]'], {}), "('BatchNormWithGlobalNormalization',\n batch_norm_name, [input_constant_name, mean_constant_name,\n variance_constant_name, beta_constant_name, gamma_constant_name])\n", (22583, 22752), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((22792, 22859), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['batch_norm_node', '"""T"""', 'dtypes.float32'], {}), "(batch_norm_node, 'T', dtypes.float32)\n", (22821, 22859), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((22864, 22950), 'tensorflow.tools.quantization.quantize_graph.set_attr_bool', 'quantize_graph.set_attr_bool', (['batch_norm_node', '"""scale_after_normalization"""', '(False)'], {}), "(batch_norm_node, 'scale_after_normalization', \n False)\n", (22892, 22950), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((22983, 23056), 'tensorflow.tools.quantization.quantize_graph.set_attr_float', 'quantize_graph.set_attr_float', (['batch_norm_node', '"""variance_epsilon"""', '(0.001)'], {}), "(batch_norm_node, 'variance_epsilon', 0.001)\n", (23012, 23056), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((23287, 23307), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (23305, 23307), False, 'from tensorflow.core.framework import graph_pb2\n'), ((23329, 23478), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['input_constant_name'], {'value': '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]', 'dtype': 'dtypes.float32', 'shape': '[1, 2, 6, 1]'}), '(input_constant_name, value=[1, 2, 3, 4,\n 5, 6, 7, 8, 9, 10, 11, 12], dtype=dtypes.float32, shape=[1, 2, 6, 1])\n', (23364, 23478), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((23578, 23653), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""MaxPool"""', 'max_pool_name', '[input_constant_name]'], {}), "('MaxPool', max_pool_name, [input_constant_name])\n", (23604, 23653), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((23705, 23775), 'tensorflow.tools.quantization.quantize_graph.set_attr_int_list', 'quantize_graph.set_attr_int_list', (['max_pool_node', '"""ksize"""', '[1, 2, 2, 1]'], {}), "(max_pool_node, 'ksize', [1, 2, 2, 1])\n", (23737, 23775), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((23780, 23852), 'tensorflow.tools.quantization.quantize_graph.set_attr_int_list', 'quantize_graph.set_attr_int_list', (['max_pool_node', '"""strides"""', '[1, 1, 1, 1]'], {}), "(max_pool_node, 'strides', [1, 1, 1, 1])\n", (23812, 23852), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((23857, 23922), 'tensorflow.tools.quantization.quantize_graph.set_attr_string', 'quantize_graph.set_attr_string', (['max_pool_node', '"""padding"""', "b'SAME'"], {}), "(max_pool_node, 'padding', b'SAME')\n", (23887, 23922), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((24149, 24169), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (24167, 24169), False, 'from tensorflow.core.framework import graph_pb2\n'), ((24191, 24340), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['input_constant_name'], {'value': '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]', 'dtype': 'dtypes.float32', 'shape': '[1, 2, 6, 1]'}), '(input_constant_name, value=[1, 2, 3, 4,\n 5, 6, 7, 8, 9, 10, 11, 12], dtype=dtypes.float32, shape=[1, 2, 6, 1])\n', (24226, 24340), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((24440, 24515), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""AvgPool"""', 'avg_pool_name', '[input_constant_name]'], {}), "('AvgPool', avg_pool_name, [input_constant_name])\n", (24466, 24515), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((24567, 24632), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['avg_pool_node', '"""T"""', 'dtypes.float32'], {}), "(avg_pool_node, 'T', dtypes.float32)\n", (24596, 24632), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((24637, 24707), 'tensorflow.tools.quantization.quantize_graph.set_attr_int_list', 'quantize_graph.set_attr_int_list', (['avg_pool_node', '"""ksize"""', '[1, 2, 2, 1]'], {}), "(avg_pool_node, 'ksize', [1, 2, 2, 1])\n", (24669, 24707), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((24712, 24784), 'tensorflow.tools.quantization.quantize_graph.set_attr_int_list', 'quantize_graph.set_attr_int_list', (['avg_pool_node', '"""strides"""', '[1, 1, 1, 1]'], {}), "(avg_pool_node, 'strides', [1, 1, 1, 1])\n", (24744, 24784), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((24789, 24854), 'tensorflow.tools.quantization.quantize_graph.set_attr_string', 'quantize_graph.set_attr_string', (['avg_pool_node', '"""padding"""', "b'SAME'"], {}), "(avg_pool_node, 'padding', b'SAME')\n", (24819, 24854), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((25069, 25089), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (25087, 25089), False, 'from tensorflow.core.framework import graph_pb2\n'), ((25111, 25260), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['input_constant_name'], {'value': '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]', 'dtype': 'dtypes.float32', 'shape': '[1, 2, 6, 1]'}), '(input_constant_name, value=[1, 2, 3, 4,\n 5, 6, 7, 8, 9, 10, 11, 12], dtype=dtypes.float32, shape=[1, 2, 6, 1])\n', (25146, 25260), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((25356, 25424), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Relu"""', 'relu_name', '[input_constant_name]'], {}), "('Relu', relu_name, [input_constant_name])\n", (25382, 25424), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((25472, 25533), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['relu_node', '"""T"""', 'dtypes.float32'], {}), "(relu_node, 'T', dtypes.float32)\n", (25501, 25533), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((25697, 25834), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""input"""'], {'value': '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]', 'dtype': 'dtypes.float32', 'shape': '[1, 2, 6, 1]'}), "('input', value=[1, 2, 3, 4, 5, 6, 7, 8,\n 9, 10, 11, 12], dtype=dtypes.float32, shape=[1, 2, 6, 1])\n", (25732, 25834), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((25880, 25941), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Relu"""', '"""relu"""', '[input_node.name]'], {}), "('Relu', 'relu', [input_node.name])\n", (25906, 25941), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((25946, 26007), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['relu_node', '"""T"""', 'dtypes.float32'], {}), "(relu_node, 'T', dtypes.float32)\n", (25975, 26007), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((26024, 26121), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""min_bias_add"""'], {'value': '(0)', 'dtype': 'dtypes.float32', 'shape': '[]'}), "('min_bias_add', value=0, dtype=dtypes.\n float32, shape=[])\n", (26059, 26121), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((26141, 26239), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""max_bias_add"""'], {'value': '(12)', 'dtype': 'dtypes.float32', 'shape': '[]'}), "('max_bias_add', value=12, dtype=dtypes.\n float32, shape=[])\n", (26176, 26239), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((26266, 26386), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""FakeQuantWithMinMaxVars"""', '"""fake_quant"""', '[relu_node.name, min_node.name, max_node.name]'], {}), "('FakeQuantWithMinMaxVars', 'fake_quant', [\n relu_node.name, min_node.name, max_node.name])\n", (26292, 26386), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((26422, 26442), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (26440, 26442), False, 'from tensorflow.core.framework import graph_pb2\n'), ((26710, 26799), 'tensorflow.tools.quantization.quantize_graph.GraphRewriter', 'quantize_graph.GraphRewriter', (['float_graph_def', '"""eightbit"""'], {'quantized_input_range': 'None'}), "(float_graph_def, 'eightbit',\n quantized_input_range=None)\n", (26738, 26799), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((27284, 27304), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (27302, 27304), False, 'from tensorflow.core.framework import graph_pb2\n'), ((27326, 27475), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['input_constant_name'], {'value': '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]', 'dtype': 'dtypes.float32', 'shape': '[1, 2, 6, 1]'}), '(input_constant_name, value=[1, 2, 3, 4,\n 5, 6, 7, 8, 9, 10, 11, 12], dtype=dtypes.float32, shape=[1, 2, 6, 1])\n', (27361, 27475), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((27572, 27642), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Relu6"""', 'relu6_name', '[input_constant_name]'], {}), "('Relu6', relu6_name, [input_constant_name])\n", (27598, 27642), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((27691, 27753), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['relu6_node', '"""T"""', 'dtypes.float32'], {}), "(relu6_node, 'T', dtypes.float32)\n", (27720, 27753), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((28019, 28039), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (28037, 28039), False, 'from tensorflow.core.framework import graph_pb2\n'), ((28061, 28210), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['input_constant_name'], {'value': '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]', 'dtype': 'dtypes.float32', 'shape': '[1, 1, 2, 6]'}), '(input_constant_name, value=[1, 2, 3, 4,\n 5, 6, 7, 8, 9, 10, 11, 12], dtype=dtypes.float32, shape=[1, 1, 2, 6])\n', (28096, 28210), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((28312, 28432), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['offset_constant_name'], {'value': '[1, 2, 3, 4, 5, 6]', 'dtype': 'dtypes.float32', 'shape': '[6]'}), '(offset_constant_name, value=[1, 2, 3, 4,\n 5, 6], dtype=dtypes.float32, shape=[6])\n', (28347, 28432), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((28533, 28634), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""BiasAdd"""', 'bias_add_name', '[input_constant_name, offset_constant_name]'], {}), "('BiasAdd', bias_add_name, [input_constant_name,\n offset_constant_name])\n", (28559, 28634), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((28644, 28709), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['bias_add_node', '"""T"""', 'dtypes.float32'], {}), "(bias_add_node, 'T', dtypes.float32)\n", (28673, 28709), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((29277, 29331), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Placeholder"""', '"""input"""', '[]'], {}), "('Placeholder', 'input', [])\n", (29303, 29331), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((29336, 29399), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['input_n', '"""dtype"""', 'dtypes.float32'], {}), "(input_n, 'dtype', dtypes.float32)\n", (29365, 29399), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((29404, 29464), 'tensorflow.tools.quantization.quantize_graph.set_attr_shape', 'quantize_graph.set_attr_shape', (['input_n', '"""shape"""', 'input_shape'], {}), "(input_n, 'shape', input_shape)\n", (29433, 29464), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((29480, 29588), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""offset"""'], {'value': '[1, 2, 3, 4, 5, 6]', 'dtype': 'dtypes.float32', 'shape': '[6]'}), "('offset', value=[1, 2, 3, 4, 5, 6],\n dtype=dtypes.float32, shape=[6])\n", (29515, 29588), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((29611, 29696), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""BiasAdd"""', '"""bias_add"""', '[input_n.name, offset_n.name]'], {}), "('BiasAdd', 'bias_add', [input_n.name, offset_n.name]\n )\n", (29637, 29696), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((29740, 29802), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['bias_add_n', '"""T"""', 'dtypes.float32'], {}), "(bias_add_n, 'T', dtypes.float32)\n", (29769, 29802), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((29826, 29846), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (29844, 29846), False, 'from tensorflow.core.framework import graph_pb2\n'), ((30701, 30774), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""MatMul"""', '"""mat_mul"""', '[n.name for n in inputs]'], {}), "('MatMul', 'mat_mul', [n.name for n in inputs])\n", (30727, 30774), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((30825, 30889), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['mat_mul_node', '"""T"""', 'dtypes.float32'], {}), "(mat_mul_node, 'T', dtypes.float32)\n", (30854, 30889), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((30913, 30933), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (30931, 30933), False, 'from tensorflow.core.framework import graph_pb2\n'), ((32355, 32425), 'tensorflow.tools.quantization.quantize_graph.GraphRewriter', 'quantize_graph.GraphRewriter', (['float_graph_def', '"""eightbit"""', 'input_range'], {}), "(float_graph_def, 'eightbit', input_range)\n", (32383, 32425), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((32955, 33044), 'tensorflow.tools.quantization.quantize_graph.GraphRewriter', 'quantize_graph.GraphRewriter', (['float_graph_def', '"""eightbit"""'], {'quantized_input_range': 'None'}), "(float_graph_def, 'eightbit',\n quantized_input_range=None)\n", (32983, 33044), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((33552, 33681), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""input"""'], {'value': '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]', 'dtype': 'dtypes.float32', 'shape': '[1, 1, 2, 5]'}), "('input', value=[1, 2, 3, 4, 5, 6, 7, 8,\n 9, 10], dtype=dtypes.float32, shape=[1, 1, 2, 5])\n", (33587, 33681), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((33729, 33835), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""offset"""'], {'value': '[1, 2, 3, 4, 5]', 'dtype': 'dtypes.float32', 'shape': '[5]'}), "('offset', value=[1, 2, 3, 4, 5], dtype=\n dtypes.float32, shape=[5])\n", (33764, 33835), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((33860, 33950), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""BiasAdd"""', '"""bias_add"""', '[input_node.name, offset_node.name]'], {}), "('BiasAdd', 'bias_add', [input_node.name,\n offset_node.name])\n", (33886, 33950), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((33960, 34025), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['bias_add_node', '"""T"""', 'dtypes.float32'], {}), "(bias_add_node, 'T', dtypes.float32)\n", (33989, 34025), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((34042, 34142), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""min_bias_add"""'], {'value': '(-0.5)', 'dtype': 'dtypes.float32', 'shape': '[]'}), "('min_bias_add', value=-0.5, dtype=\n dtypes.float32, shape=[])\n", (34077, 34142), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((34161, 34261), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""max_bias_add"""'], {'value': '(15.5)', 'dtype': 'dtypes.float32', 'shape': '[]'}), "('max_bias_add', value=15.5, dtype=\n dtypes.float32, shape=[])\n", (34196, 34261), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((34288, 34412), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""FakeQuantWithMinMaxVars"""', '"""fake_quant"""', '[bias_add_node.name, min_node.name, max_node.name]'], {}), "('FakeQuantWithMinMaxVars', 'fake_quant', [\n bias_add_node.name, min_node.name, max_node.name])\n", (34314, 34412), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((34448, 34468), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (34466, 34468), False, 'from tensorflow.core.framework import graph_pb2\n'), ((34901, 35031), 'tensorflow.tools.quantization.quantize_graph.GraphRewriter', 'quantize_graph.GraphRewriter', (['float_graph_def', '"""eightbit"""'], {'quantized_input_range': 'None', 'fallback_quantization_range': '[-100, 100]'}), "(float_graph_def, 'eightbit',\n quantized_input_range=None, fallback_quantization_range=[-100, 100])\n", (34929, 35031), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((35765, 35894), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""input"""'], {'value': '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]', 'dtype': 'dtypes.float32', 'shape': '[1, 1, 2, 5]'}), "('input', value=[1, 2, 3, 4, 5, 6, 7, 8,\n 9, 10], dtype=dtypes.float32, shape=[1, 1, 2, 5])\n", (35800, 35894), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((35942, 36048), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['"""offset"""'], {'value': '[1, 2, 3, 4, 5]', 'dtype': 'dtypes.float32', 'shape': '[5]'}), "('offset', value=[1, 2, 3, 4, 5], dtype=\n dtypes.float32, shape=[5])\n", (35977, 36048), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((36073, 36163), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""BiasAdd"""', '"""bias_add"""', '[input_node.name, offset_node.name]'], {}), "('BiasAdd', 'bias_add', [input_node.name,\n offset_node.name])\n", (36099, 36163), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((36173, 36238), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['bias_add_node', '"""T"""', 'dtypes.float32'], {}), "(bias_add_node, 'T', dtypes.float32)\n", (36202, 36238), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((36262, 36282), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (36280, 36282), False, 'from tensorflow.core.framework import graph_pb2\n'), ((36553, 36684), 'tensorflow.tools.quantization.quantize_graph.GraphRewriter', 'quantize_graph.GraphRewriter', (['float_graph_def', '"""eightbit"""'], {'quantized_input_range': 'None', 'fallback_quantization_range': '[-0.5, 15.5]'}), "(float_graph_def, 'eightbit',\n quantized_input_range=None, fallback_quantization_range=[-0.5, 15.5])\n", (36581, 36684), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((37914, 37934), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (37932, 37934), False, 'from tensorflow.core.framework import graph_pb2\n'), ((37952, 38052), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['a_constant_name'], {'value': '(0,)', 'dtype': 'dtypes.quint8', 'shape': '[]'}), '(a_constant_name, value=(0,), dtype=\n dtypes.quint8, shape=[])\n', (37987, 38052), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((38118, 38220), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['a_constant_min_name'], {'value': '(2)', 'dtype': 'dtypes.float32', 'shape': '[]'}), '(a_constant_min_name, value=2, dtype=\n dtypes.float32, shape=[])\n', (38153, 38220), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((38290, 38392), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['a_constant_max_name'], {'value': '(2)', 'dtype': 'dtypes.float32', 'shape': '[]'}), '(a_constant_max_name, value=2, dtype=\n dtypes.float32, shape=[])\n', (38325, 38392), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((38465, 38590), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Dequantize"""', 'a_dequantize_name', '[a_constant_name, a_constant_min_name, a_constant_max_name]'], {}), "('Dequantize', a_dequantize_name, [\n a_constant_name, a_constant_min_name, a_constant_max_name])\n", (38491, 38590), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((38607, 38674), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['a_dequantize_node', '"""T"""', 'dtypes.uint8'], {}), "(a_dequantize_node, 'T', dtypes.uint8)\n", (38636, 38674), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((38744, 38879), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""QuantizeV2"""', 'a_quantize_name', "[a_dequantize_name, a_dequantize_name + ':1', a_dequantize_name + ':2']"], {}), "('QuantizeV2', a_quantize_name, [\n a_dequantize_name, a_dequantize_name + ':1', a_dequantize_name + ':2'])\n", (38770, 38879), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((38896, 38961), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['a_quantize_node', '"""T"""', 'dtypes.uint8'], {}), "(a_quantize_node, 'T', dtypes.uint8)\n", (38925, 38961), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((39024, 39124), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['b_constant_name'], {'value': '(0,)', 'dtype': 'dtypes.quint8', 'shape': '[]'}), '(b_constant_name, value=(0,), dtype=\n dtypes.quint8, shape=[])\n', (39059, 39124), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((39190, 39292), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['b_constant_min_name'], {'value': '(3)', 'dtype': 'dtypes.float32', 'shape': '[]'}), '(b_constant_min_name, value=3, dtype=\n dtypes.float32, shape=[])\n', (39225, 39292), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((39362, 39464), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['b_constant_max_name'], {'value': '(3)', 'dtype': 'dtypes.float32', 'shape': '[]'}), '(b_constant_max_name, value=3, dtype=\n dtypes.float32, shape=[])\n', (39397, 39464), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((39537, 39662), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Dequantize"""', 'b_dequantize_name', '[b_constant_name, b_constant_min_name, b_constant_max_name]'], {}), "('Dequantize', b_dequantize_name, [\n b_constant_name, b_constant_min_name, b_constant_max_name])\n", (39563, 39662), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((39679, 39746), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['b_dequantize_node', '"""T"""', 'dtypes.uint8'], {}), "(b_dequantize_node, 'T', dtypes.uint8)\n", (39708, 39746), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((39816, 39951), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""QuantizeV2"""', 'b_quantize_name', "[b_dequantize_name, b_dequantize_name + ':1', b_dequantize_name + ':2']"], {}), "('QuantizeV2', b_quantize_name, [\n b_dequantize_name, b_dequantize_name + ':1', b_dequantize_name + ':2'])\n", (39842, 39951), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((39968, 40033), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['b_quantize_node', '"""T"""', 'dtypes.uint8'], {}), "(b_quantize_node, 'T', dtypes.uint8)\n", (39997, 40033), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((40098, 40299), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""QuantizedMatMul"""', 'mat_mul_name', "[a_quantize_name, b_quantize_name, a_quantize_name + ':1', a_quantize_name +\n ':2', b_quantize_name + ':1', b_quantize_name + ':2']"], {}), "('QuantizedMatMul', mat_mul_name, [\n a_quantize_name, b_quantize_name, a_quantize_name + ':1', \n a_quantize_name + ':2', b_quantize_name + ':1', b_quantize_name + ':2'])\n", (40124, 40299), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((40316, 40379), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['mat_mul_node', '"""T1"""', 'dtypes.uint8'], {}), "(mat_mul_node, 'T1', dtypes.uint8)\n", (40345, 40379), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((40384, 40447), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['mat_mul_node', '"""T2"""', 'dtypes.int32'], {}), "(mat_mul_node, 'T2', dtypes.int32)\n", (40413, 40447), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((40513, 40533), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (40531, 40533), False, 'from tensorflow.core.framework import graph_pb2\n'), ((40551, 40651), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['a_constant_name'], {'value': '(0,)', 'dtype': 'dtypes.quint8', 'shape': '[]'}), '(a_constant_name, value=(0,), dtype=\n dtypes.quint8, shape=[])\n', (40586, 40651), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((40723, 40825), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['a_constant_min_name'], {'value': '(2)', 'dtype': 'dtypes.float32', 'shape': '[]'}), '(a_constant_min_name, value=2, dtype=\n dtypes.float32, shape=[])\n', (40758, 40825), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((40901, 41003), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['a_constant_max_name'], {'value': '(2)', 'dtype': 'dtypes.float32', 'shape': '[]'}), '(a_constant_max_name, value=2, dtype=\n dtypes.float32, shape=[])\n', (40936, 41003), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((41075, 41175), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['b_constant_name'], {'value': '(0,)', 'dtype': 'dtypes.quint8', 'shape': '[]'}), '(b_constant_name, value=(0,), dtype=\n dtypes.quint8, shape=[])\n', (41110, 41175), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((41247, 41349), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['b_constant_min_name'], {'value': '(3)', 'dtype': 'dtypes.float32', 'shape': '[]'}), '(b_constant_min_name, value=3, dtype=\n dtypes.float32, shape=[])\n', (41282, 41349), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((41425, 41527), 'tensorflow.tools.quantization.quantize_graph.create_constant_node', 'quantize_graph.create_constant_node', (['b_constant_max_name'], {'value': '(3)', 'dtype': 'dtypes.float32', 'shape': '[]'}), '(b_constant_max_name, value=3, dtype=\n dtypes.float32, shape=[])\n', (41460, 41527), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((41601, 41789), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""QuantizedMatMul"""', 'mat_mul_name', '[a_constant_name, b_constant_name, a_constant_min_name, a_constant_max_name,\n b_constant_min_name, b_constant_max_name]'], {}), "('QuantizedMatMul', mat_mul_name, [\n a_constant_name, b_constant_name, a_constant_min_name,\n a_constant_max_name, b_constant_min_name, b_constant_max_name])\n", (41627, 41789), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((41807, 41870), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['mat_mul_node', '"""T1"""', 'dtypes.uint8'], {}), "(mat_mul_node, 'T1', dtypes.uint8)\n", (41836, 41870), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((41875, 41938), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['mat_mul_node', '"""T2"""', 'dtypes.int32'], {}), "(mat_mul_node, 'T2', dtypes.int32)\n", (41904, 41938), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((42117, 42204), 'tensorflow.tools.quantization.quantize_graph.GraphRewriter', 'quantize_graph.GraphRewriter', (['graph_def', '[mat_mul_name]'], {'quantized_input_range': 'None'}), '(graph_def, [mat_mul_name],\n quantized_input_range=None)\n', (42145, 42204), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((42295, 42347), 'tensorflow.python.framework.graph_util.extract_sub_graph', 'graph_util.extract_sub_graph', (['output', '[mat_mul_name]'], {}), '(output, [mat_mul_name])\n', (42323, 42347), False, 'from tensorflow.python.framework import graph_util\n'), ((9226, 9286), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""MatMul"""', 'name', '[a.name, b.name]'], {}), "('MatMul', name, [a.name, b.name])\n", (9252, 9286), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((9293, 9346), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['n', '"""T"""', 'dtypes.float32'], {}), "(n, 'T', dtypes.float32)\n", (9322, 9346), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((9353, 9406), 'tensorflow.tools.quantization.quantize_graph.set_attr_bool', 'quantize_graph.set_attr_bool', (['n', '"""transpose_a"""', '(False)'], {}), "(n, 'transpose_a', False)\n", (9381, 9406), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((9413, 9466), 'tensorflow.tools.quantization.quantize_graph.set_attr_bool', 'quantize_graph.set_attr_bool', (['n', '"""transpose_b"""', '(False)'], {}), "(n, 'transpose_b', False)\n", (9441, 9466), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((11436, 11448), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (11444, 11448), True, 'import numpy as np\n'), ((11562, 11578), 'numpy.array', 'np.array', (['[1, 2]'], {}), '([1, 2])\n', (11570, 11578), True, 'import numpy as np\n'), ((17072, 17122), 'tensorflow.tools.quantization.quantize_graph.node_name_from_input', 'quantize_graph.node_name_from_input', (['"""^SomeName:2"""'], {}), "('^SomeName:2')\n", (17107, 17122), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((17241, 17298), 'tensorflow.tools.quantization.quantize_graph.unique_node_name_from_input', 'quantize_graph.unique_node_name_from_input', (['"""^SomeName:2"""'], {}), "('^SomeName:2')\n", (17283, 17298), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((29972, 30036), 'numpy.reshape', 'np.reshape', (['[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]', 'input_shape'], {}), '([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], input_shape)\n', (29982, 30036), True, 'import numpy as np\n'), ((30469, 30530), 'tensorflow.tools.quantization.quantize_graph.create_node', 'quantize_graph.create_node', (['"""Placeholder"""', "('input_%s' % i)", '[]'], {}), "('Placeholder', 'input_%s' % i, [])\n", (30495, 30530), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((30537, 30597), 'tensorflow.tools.quantization.quantize_graph.set_attr_dtype', 'quantize_graph.set_attr_dtype', (['node', '"""dtype"""', 'dtypes.float32'], {}), "(node, 'dtype', dtypes.float32)\n", (30566, 30597), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((30604, 30655), 'tensorflow.tools.quantization.quantize_graph.set_attr_shape', 'quantize_graph.set_attr_shape', (['node', '"""shape"""', 'shape'], {}), "(node, 'shape', shape)\n", (30633, 30655), False, 'from tensorflow.tools.quantization import quantize_graph\n'), ((31053, 31094), 'numpy.reshape', 'np.reshape', (['[1, 2, 3, 4, 5, 6]', 'shapes[0]'], {}), '([1, 2, 3, 4, 5, 6], shapes[0])\n', (31063, 31094), True, 'import numpy as np\n'), ((31139, 31202), 'numpy.reshape', 'np.reshape', (['[0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1]', 'shapes[1]'], {}), '([0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1], shapes[1])\n', (31149, 31202), True, 'import numpy as np\n'), ((31971, 31994), 'numpy.array', 'np.array', (['arr', 'np.uint8'], {}), '(arr, np.uint8)\n', (31979, 31994), True, 'import numpy as np\n'), ((28957, 28977), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (28975, 28977), False, 'from tensorflow.core.framework import graph_pb2\n'), ((29139, 29159), 'tensorflow.core.framework.graph_pb2.GraphDef', 'graph_pb2.GraphDef', ([], {}), '()\n', (29157, 29159), False, 'from tensorflow.core.framework import graph_pb2\n'), ((11954, 11973), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (11962, 11973), True, 'import numpy as np\n'), ((12131, 12161), 'numpy.array', 'np.array', (['[0.5, 0.5, 0.5, 0.5]'], {}), '([0.5, 0.5, 0.5, 0.5])\n', (12139, 12161), True, 'import numpy as np\n'), ((12248, 12282), 'numpy.array', 'np.array', (['[0.25, 0.25, 0.75, 0.75]'], {}), '([0.25, 0.25, 0.75, 0.75])\n', (12256, 12282), True, 'import numpy as np\n'), ((12385, 12423), 'numpy.array', 'np.array', (['[[0.25, 0.25], [0.75, 0.75]]'], {}), '([[0.25, 0.25], [0.75, 0.75]])\n', (12393, 12423), True, 'import numpy as np\n')] |
aroiginfraplan/giscube-admin | layerserver/migrations/0001_initial.py | b7f3131b0186f847f3902df97f982cb288b16a49 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-04-26 09:14
import colorfield.fields
from django.db import migrations, models
import django.db.models.deletion
import giscube.utils
class Migration(migrations.Migration):
initial = True
dependencies = [
('giscube', '0002_update'),
]
operations = [
migrations.CreateModel(
name='GeoJsonLayer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50, unique=True)),
('title', models.CharField(blank=True, max_length=100, null=True)),
('description', models.TextField(blank=True, null=True)),
('keywords', models.CharField(blank=True, max_length=200, null=True)),
('active', models.BooleanField(default=True)),
('visibility', models.CharField(choices=[('private', 'Private'), ('public', 'Public')], default='private', max_length=10)),
('visible_on_geoportal', models.BooleanField(default=False)),
('shapetype', models.CharField(blank=True, choices=[('marker', 'Marker'), ('line', 'Line'), ('polygon', 'Polygon'), ('Circle', 'Circle')], max_length=20, null=True)),
('shape_radius', models.IntegerField(blank=True, null=True)),
('stroke_color', colorfield.fields.ColorField(blank=True, default=b'#FF3333', max_length=18, null=True)),
('stroke_width', models.IntegerField(blank=True, default=1, null=True)),
('stroke_dash_array', models.CharField(blank=True, default='', max_length=25, null=True)),
('fill_color', colorfield.fields.ColorField(blank=True, default=b'#FFC300', max_length=18, null=True)),
('fill_opacity', models.DecimalField(blank=True, decimal_places=1, default=1, max_digits=2, null=True)),
('url', models.CharField(blank=True, max_length=100, null=True)),
('data_file', models.FileField(blank=True, null=True, upload_to=giscube.utils.unique_service_directory)),
('service_path', models.CharField(max_length=255)),
('cache_time', models.IntegerField(blank=True, null=True)),
('last_fetch_on', models.DateField(blank=True, null=True)),
('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='giscube.Category')),
],
options={
'verbose_name': 'GeoJSONLayer',
'verbose_name_plural': 'GeoJSONLayers',
},
),
]
| [((450, 543), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (466, 543), False, 'from django.db import migrations, models\n'), ((567, 611), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'unique': '(True)'}), '(max_length=50, unique=True)\n', (583, 611), False, 'from django.db import migrations, models\n'), ((640, 695), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(100)', 'null': '(True)'}), '(blank=True, max_length=100, null=True)\n', (656, 695), False, 'from django.db import migrations, models\n'), ((730, 769), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (746, 769), False, 'from django.db import migrations, models\n'), ((801, 856), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(200)', 'null': '(True)'}), '(blank=True, max_length=200, null=True)\n', (817, 856), False, 'from django.db import migrations, models\n'), ((886, 919), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (905, 919), False, 'from django.db import migrations, models\n'), ((953, 1063), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('private', 'Private'), ('public', 'Public')]", 'default': '"""private"""', 'max_length': '(10)'}), "(choices=[('private', 'Private'), ('public', 'Public')],\n default='private', max_length=10)\n", (969, 1063), False, 'from django.db import migrations, models\n'), ((1103, 1137), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1122, 1137), False, 'from django.db import migrations, models\n'), ((1170, 1325), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('marker', 'Marker'), ('line', 'Line'), ('polygon', 'Polygon'), ('Circle',\n 'Circle')]", 'max_length': '(20)', 'null': '(True)'}), "(blank=True, choices=[('marker', 'Marker'), ('line', 'Line'\n ), ('polygon', 'Polygon'), ('Circle', 'Circle')], max_length=20, null=True)\n", (1186, 1325), False, 'from django.db import migrations, models\n'), ((1356, 1398), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1375, 1398), False, 'from django.db import migrations, models\n'), ((1556, 1609), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'default': '(1)', 'null': '(True)'}), '(blank=True, default=1, null=True)\n', (1575, 1609), False, 'from django.db import migrations, models\n'), ((1650, 1716), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(25)', 'null': '(True)'}), "(blank=True, default='', max_length=25, null=True)\n", (1666, 1716), False, 'from django.db import migrations, models\n'), ((1872, 1961), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'blank': '(True)', 'decimal_places': '(1)', 'default': '(1)', 'max_digits': '(2)', 'null': '(True)'}), '(blank=True, decimal_places=1, default=1, max_digits=2,\n null=True)\n', (1891, 1961), False, 'from django.db import migrations, models\n'), ((1984, 2039), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(100)', 'null': '(True)'}), '(blank=True, max_length=100, null=True)\n', (2000, 2039), False, 'from django.db import migrations, models\n'), ((2072, 2166), 'django.db.models.FileField', 'models.FileField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': 'giscube.utils.unique_service_directory'}), '(blank=True, null=True, upload_to=giscube.utils.\n unique_service_directory)\n', (2088, 2166), False, 'from django.db import migrations, models\n'), ((2197, 2229), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (2213, 2229), False, 'from django.db import migrations, models\n'), ((2263, 2305), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (2282, 2305), False, 'from django.db import migrations, models\n'), ((2342, 2381), 'django.db.models.DateField', 'models.DateField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (2358, 2381), False, 'from django.db import migrations, models\n'), ((2413, 2527), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""giscube.Category"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.SET_NULL, to='giscube.Category')\n", (2430, 2527), False, 'from django.db import migrations, models\n')] |
pirica/fortnite-leaks-image-generator | SETTINGS.py | c23633862fd7d2286700f932e5dab41decd2ff72 | backgroundurl = "https://storage.needpix.com/rsynced_images/colored-background.jpg" # <- Need to be a Image URL!!!
lang = "en" # <- language code
displayset = True # <- Display the Set of the Item
raritytext = True # <- Display the Rarity of the Item
typeconfig = {
"BannerToken": True,
"AthenaBackpack": True,
"AthenaPetCarrier": True,
"AthenaPet": True,
"AthenaPickaxe": True,
"AthenaCharacter": True,
"AthenaSkyDiveContrail": True,
"AthenaGlider": True,
"AthenaDance": True,
"AthenaEmoji": True,
"AthenaLoadingScreen": True,
"AthenaMusicPack": True,
"AthenaSpray": True,
"AthenaToy": True,
"AthenaBattleBus": True,
"AthenaItemWrap": True
}
interval = 5 # <- Time (in seconds) until the bot checks for leaks again | Recommend: 7
watermark = "" # <- Leave it empty if you dont want one
watermarksize = 25 # <- Size of the Watermark
| [] |
rajeevs1992/pyhealthvault | src/healthvaultlib/tests/testbase.py | 2b6fa7c1687300bcc2e501368883fbb13dc80495 | import unittest
import settings
from healthvaultlib.helpers.connection import Connection
class TestBase(unittest.TestCase):
def setUp(self):
self.connection = self.get_connection()
def get_connection(self):
conn = Connection(settings.HV_APPID, settings.HV_SERVICE_SERVER)
conn.thumbprint = settings.APP_THUMBPRINT
conn.publickey = settings.APP_PUBLIC_KEY
conn.privatekey = settings.APP_PRIVATE_KEY
conn.connect()
conn.set_person_and_record(settings.OFFLINE_PERSON_ID, settings.OFFLINE_RECORD_ID)
return conn
| [((250, 307), 'healthvaultlib.helpers.connection.Connection', 'Connection', (['settings.HV_APPID', 'settings.HV_SERVICE_SERVER'], {}), '(settings.HV_APPID, settings.HV_SERVICE_SERVER)\n', (260, 307), False, 'from healthvaultlib.helpers.connection import Connection\n')] |
StepicOrg/stepik-apps | apps/extensions/migrations/0012_imports_path_urlfield_to_charfield.py | 5825bc9b2444ad4690681964d1bed172706f8796 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-06-09 03:01
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('extensions', '0011_auto_20170502_0908'),
]
operations = [
migrations.AlterField(
model_name='extension',
name='imports_path',
field=models.CharField(default='imports/', max_length=255),
),
]
| [((413, 465), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""imports/"""', 'max_length': '(255)'}), "(default='imports/', max_length=255)\n", (429, 465), False, 'from django.db import migrations, models\n')] |
secureosv/pythia | regtests/bench/thread_collision.py | 459f9e2bc0bb2da57e9fa8326697d9ef3386883a | '''
multi-threading (python3 version)
https://docs.python.org/3/library/threading.html
'''
from time import clock
import threading
THREADS=2
lock = threading.Lock()
A = 0
B = 0
C = 0
def test_globals():
global A, B, C
for i in range(1024*1024):
lock.acquire()
A += 1
B += 2
C = A + B
lock.release()
def main():
print( 'starting threading test')
starttime = clock()
threads = []
for i in range(THREADS):
t = threading.Thread( target=test_globals, args=() )
t.start()
threads.append( t )
for t in threads:
t.join()
print( clock()-starttime)
print('A:', A)
print('B:', B)
print('C:', C)
main() | [((150, 166), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (164, 166), False, 'import threading\n'), ((377, 384), 'time.clock', 'clock', ([], {}), '()\n', (382, 384), False, 'from time import clock\n'), ((431, 477), 'threading.Thread', 'threading.Thread', ([], {'target': 'test_globals', 'args': '()'}), '(target=test_globals, args=())\n', (447, 477), False, 'import threading\n'), ((554, 561), 'time.clock', 'clock', ([], {}), '()\n', (559, 561), False, 'from time import clock\n')] |
scooler/checkers | game/board.py | 90bfe8702c6005c767a8673caed6e7e2f0ce5879 | import numpy as np
class Board:
"""
0 - black
1 - white
"""
def __init__(self):
board = [
[0, 1] * 4,
[1, 0] * 4
] * 4
players_board = [
[0, 1] * 4, # player 1
[1, 0] * 4
] + [[0] * 8] * 4 + [ # 4 rows of nothing
[0, 2] * 4, # player 2
[2, 0] * 4
]
self.board = np.array(board)
self.players_board = np.array(players_board)
self.x_size = 8
self.y_size = 8
# def move(self, x, y, current_player):
# self.board[x, y] = current_player
# def are_same_and_non_zero(self, array):
# return np.unique(array).size == 1 and array[0] != 0
# def is_board_full(self):
# return not np.any(np.unique(self.board) == 0)
def is_finished(self):
"""is game finished"""
return True
# for i in range(0, self.x_size): # rows
# if self.are_same_and_non_zero(self.board[i, :]):
# self.player_who_won = self.board[i, 0]
# self.result = 'Won {} - row {}'.format(self.player(self.player_who_won), i)
# return True
# for i in range(0, self.y_size): # columns
# if self.are_same_and_non_zero(self.board[:, i]):
# self.player_who_won = self.board[0, i]
# self.result = 'Won {} - col {}'.format(self.player(self.player_who_won), i)
# return True
# if self.are_same_and_non_zero(np.diag(self.board)): # diagonal
# self.player_who_won = self.board[1, 1]
# self.result = 'Won {} - diagonal {}'.format(self.player(self.player_who_won), i)
# return True
# if self.are_same_and_non_zero(np.diag(np.flipud(self.board))): # anty-diagonal
# self.player_who_won = self.board[1, 1]
# self.result = 'Won {} - anty-diagonal {}'.format(self.player(self.player_who_won), i)
# return True
# if self.is_board_full():
# self.player_who_won = 0 # nobody
# self.result = 'Draw'
# return True # draw
return False
def show(self):
# print(self.board)
# print(self.players_board)
return
# def player(self, player_no):
# if player_no == 1: return 'Player 1 (X)'
# if player_no == 2: return 'Player 2 (O)'
# def show_player_info(self, player_no):
# print("It's turn of ", self.player(player_no))
| [((331, 346), 'numpy.array', 'np.array', (['board'], {}), '(board)\n', (339, 346), True, 'import numpy as np\n'), ((372, 395), 'numpy.array', 'np.array', (['players_board'], {}), '(players_board)\n', (380, 395), True, 'import numpy as np\n')] |
vogelfenx/storagebot | utils/get_season_things_price.py | 64ab07b068bf645d7cdf5bb1cd5db91c0e2a9228 | def get_season_things_price(thing, amount, price):
if thing == 'wheel':
wheel_price = price[thing]['month'] * amount
return f'Стоимость составит {wheel_price}/месяц'
else:
other_thing_price_week = price[thing]['week'] * amount
other_thing_price_month = price[thing]['month'] * amount
return f'Стоимость составит {other_thing_price_week} р./неделю' + \
f' или {other_thing_price_month} р./месяц' | [] |
zhester/zge | zge/engine.py | 246096a8c1fd26472091aac747a3fffda58f3072 | """
Zoe Game Engine Core Implementation
===================================
Requirements
------------
[pygame](http://www.pygame.org/)
"""
# core packages
# third-party packages
import pygame
# local package
import layer
__version__ = '0.0.0'
#=============================================================================
class Engine( object ):
"""
Simple game engine object.
"""
#=========================================================================
def __init__( self, size ):
"""
Initializes an Engine object.
"""
# pygame initialization
pygame.init()
# initialize the root display surface
self.window = pygame.display.set_mode( size, 0, 32 )
# set the title bar text and iconification text
pygame.display.set_caption( 'Demonstration', 'Demo' )
# set the application icon
icon = pygame.image.load( '../assets/z32.png' )
pygame.display.set_icon( icon )
# create a list of normal display layers
self._layers = []
# create a transparent "top" layer for overlayed information
self._top = layer.TextLayer()
# initialize last tick value
self._last_tick = pygame.time.get_ticks()
self._last_wait = 0
# set an FPS cap
self._fps = 0.0
self._fps_limit = 120.0
self._tick_step = int( round( 1000.0 / self._fps_limit ) )
# engine is currently running
self._is_running = False
# short debug string for various things
self._debug = ''
#=========================================================================
def run( self ):
"""
Run the game loop (does not return until the application quits).
"""
# update tick value before entering the loop
self._last_tick = pygame.time.get_ticks()
# execute infinite application loop
self._is_running = True
while self._is_running:
# process event queue
for event in pygame.event.get():
# check for quit event
if event.type == pygame.QUIT:
self._is_running = False
# check for key event
elif ( event.type == pygame.KEYDOWN ) \
or ( event.type == pygame.KEYUP ) :
self.trigger_key_event( event )
# exit application loop if done
if self._is_running == False:
break
# update the game display
self.update()
# ZIH - simulate hard work
#pygame.time.delay( 3 )
# compute duration of last event/render loop
end_tick = pygame.time.get_ticks()
delta = end_tick - self._last_tick
self._last_tick = end_tick
# update FPS value
if delta > 0:
self._fps = 1000.0 / float( delta )
else:
self._fps = self._fps_limit
# compute remaining time available inside this iteration
if delta < self._tick_step:
self._last_wait = self._tick_step - delta
else:
self._last_wait = 0
# let the OS do other stuff on this core
pygame.time.wait( self._last_wait )
# shut down pygame
pygame.quit()
# return exit status
return 0
#=========================================================================
def trigger_key_event( self, event ):
"""
Initiates key input events.
"""
# ZIH - temp, just seeing how to poll the keys
mods = pygame.key.get_mods()
mod_bits = [
( pygame.KMOD_ALT, 'A' ),
( pygame.KMOD_CTRL, 'C' ),
( pygame.KMOD_SHIFT, 'S' )
]
mod_str = ''.join( b[ 1 ] for b in mod_bits if b[ 0 ] & mods )
if event.type == pygame.KEYUP:
self._debug = '({})'.format( mod_str )
elif event.type == pygame.KEYDOWN:
self._debug = '({}){}'.format(
mod_str,
pygame.key.name( event.key )
)
#=========================================================================
def update( self ):
"""
Updates the display.
"""
# update overlayed information
self._top.set_text(
' [ fps:{:4.0f} sch:{:3} tck:{:08} dbg:{} ]'.format(
self._fps,
self._last_wait,
self._last_tick,
self._debug
)
)
# draw the display on the back buffer
self._draw_layers()
# update the display (swap video buffers)
pygame.display.update()
#=========================================================================
def _draw_layers( self ):
"""
Blits all the display layers onto the back buffer.
"""
# fill the background
self.window.fill( ( 32, 32, 32 ) )
# blit all user layers
for layer in self._layers:
layer.blit( self.window )
# blit the top layer
self._top.blit( self.window )
| [((617, 630), 'pygame.init', 'pygame.init', ([], {}), '()\n', (628, 630), False, 'import pygame\n'), ((700, 736), 'pygame.display.set_mode', 'pygame.display.set_mode', (['size', '(0)', '(32)'], {}), '(size, 0, 32)\n', (723, 736), False, 'import pygame\n'), ((804, 855), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Demonstration"""', '"""Demo"""'], {}), "('Demonstration', 'Demo')\n", (830, 855), False, 'import pygame\n'), ((909, 947), 'pygame.image.load', 'pygame.image.load', (['"""../assets/z32.png"""'], {}), "('../assets/z32.png')\n", (926, 947), False, 'import pygame\n'), ((958, 987), 'pygame.display.set_icon', 'pygame.display.set_icon', (['icon'], {}), '(icon)\n', (981, 987), False, 'import pygame\n'), ((1156, 1173), 'layer.TextLayer', 'layer.TextLayer', ([], {}), '()\n', (1171, 1173), False, 'import layer\n'), ((1238, 1261), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (1259, 1261), False, 'import pygame\n'), ((1870, 1893), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (1891, 1893), False, 'import pygame\n'), ((3407, 3420), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (3418, 3420), False, 'import pygame\n'), ((3722, 3743), 'pygame.key.get_mods', 'pygame.key.get_mods', ([], {}), '()\n', (3741, 3743), False, 'import pygame\n'), ((4795, 4818), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (4816, 4818), False, 'import pygame\n'), ((2063, 2081), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (2079, 2081), False, 'import pygame\n'), ((2755, 2778), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (2776, 2778), False, 'import pygame\n'), ((3335, 3368), 'pygame.time.wait', 'pygame.time.wait', (['self._last_wait'], {}), '(self._last_wait)\n', (3351, 3368), False, 'import pygame\n'), ((5166, 5189), 'layer.blit', 'layer.blit', (['self.window'], {}), '(self.window)\n', (5176, 5189), False, 'import layer\n'), ((4182, 4208), 'pygame.key.name', 'pygame.key.name', (['event.key'], {}), '(event.key)\n', (4197, 4208), False, 'import pygame\n')] |
CHESyrian/Estebyan | Authentication/migrations/0004_auto_20201115_1105.py | 015c0a8e95d033af04ba949942da79a4f5a90488 | # Generated by Django 3.0.6 on 2020-11-15 09:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Authentication', '0003_auto_20201113_2210'),
]
operations = [
migrations.AlterField(
model_name='profiles',
name='Qu_Shares',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='profiles',
name='Questionnais',
field=models.IntegerField(default=0),
),
]
| [((348, 378), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (367, 378), False, 'from django.db import migrations, models\n'), ((508, 538), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (527, 538), False, 'from django.db import migrations, models\n')] |
EdisonBr/MockDados | dashboard/urls.py | c625cba7b93a8f31609549241c5aa71932e26b2d |
from django.urls import path, re_path
from django.views.generic.base import TemplateView
from .views import dashboard_cost, dashboard_energy, MotorDataListView
app_name = 'dashboard'
urlpatterns = [
path('', MotorDataListView.as_view(), name='dashboard_custom'),
#path('', dashboard_custom, name='dashboard_custom'),
path('energy', dashboard_energy, name='dashboard_energy'),
path('cost', dashboard_cost, name='dashboard_cost'),
]
| [((336, 393), 'django.urls.path', 'path', (['"""energy"""', 'dashboard_energy'], {'name': '"""dashboard_energy"""'}), "('energy', dashboard_energy, name='dashboard_energy')\n", (340, 393), False, 'from django.urls import path, re_path\n'), ((399, 450), 'django.urls.path', 'path', (['"""cost"""', 'dashboard_cost'], {'name': '"""dashboard_cost"""'}), "('cost', dashboard_cost, name='dashboard_cost')\n", (403, 450), False, 'from django.urls import path, re_path\n')] |
ejgarcia1991/Courses-and-other-non-professional-projects | Coursera/Python for Everybody Specialization/Python for everybody basics/hourly rate.py | 94794dd1d6cf626de174330311e3fde4d10cd460 | hrs = input("Enter Hours:")
rate = input("Enter rate:")
pay = float(hrs) * float(rate)
print("Pay: " +str(pay)) | [] |
smunaut/litex-boards | litex_boards/platforms/xilinx_kcu105.py | caac75c7dbcba68d9f4fb948107cb5d6ff60e05f | #
# This file is part of LiteX-Boards.
#
# Copyright (c) 2017-2019 Florent Kermarrec <[email protected]>
# SPDX-License-Identifier: BSD-2-Clause
from litex.build.generic_platform import *
from litex.build.xilinx import XilinxPlatform, VivadoProgrammer
# IOs ----------------------------------------------------------------------------------------------
_io = [
# Clk / Rst
("clk125", 0,
Subsignal("p", Pins("G10"), IOStandard("LVDS")),
Subsignal("n", Pins("F10"), IOStandard("LVDS"))
),
("clk300", 0,
Subsignal("p", Pins("AK17"), IOStandard("DIFF_SSTL12")),
Subsignal("n", Pins("AK16"), IOStandard("DIFF_SSTL12"))
),
("cpu_reset", 0, Pins("AN8"), IOStandard("LVCMOS18")),
# Leds
("user_led", 0, Pins("AP8"), IOStandard("LVCMOS18")),
("user_led", 1, Pins("H23"), IOStandard("LVCMOS18")),
("user_led", 2, Pins("P20"), IOStandard("LVCMOS18")),
("user_led", 3, Pins("P21"), IOStandard("LVCMOS18")),
("user_led", 4, Pins("N22"), IOStandard("LVCMOS18")),
("user_led", 5, Pins("M22"), IOStandard("LVCMOS18")),
("user_led", 6, Pins("R23"), IOStandard("LVCMOS18")),
("user_led", 7, Pins("P23"), IOStandard("LVCMOS18")),
# Buttons
("user_btn_c", 0, Pins("AE10"), IOStandard("LVCMOS18")),
("user_btn_n", 0, Pins("AD10"), IOStandard("LVCMOS18")),
("user_btn_s", 0, Pins("AF8"), IOStandard("LVCMOS18")),
("user_btn_w", 0, Pins("AF9"), IOStandard("LVCMOS18")),
("user_btn_e", 0, Pins("AE8"), IOStandard("LVCMOS18")),
# Switches
("user_dip_btn", 0, Pins("AN16"), IOStandard("LVCMOS12")),
("user_dip_btn", 1, Pins("AN19"), IOStandard("LVCMOS12")),
("user_dip_btn", 2, Pins("AP18"), IOStandard("LVCMOS12")),
("user_dip_btn", 3, Pins("AN14"), IOStandard("LVCMOS12")),
# SMA
("user_sma_clock", 0,
Subsignal("p", Pins("D23"), IOStandard("LVDS")),
Subsignal("n", Pins("C23"), IOStandard("LVDS"))
),
("user_sma_clock_p", 0, Pins("D23"), IOStandard("LVCMOS18")),
("user_sma_clock_n", 0, Pins("C23"), IOStandard("LVCMOS18")),
("user_sma_gpio", 0,
Subsignal("p", Pins("H27"), IOStandard("LVDS")),
Subsignal("n", Pins("G27"), IOStandard("LVDS"))
),
("user_sma_gpio_p", 0, Pins("H27"), IOStandard("LVCMOS18")),
("user_sma_gpio_n", 0, Pins("G27"), IOStandard("LVCMOS18")),
# I2C
("i2c", 0,
Subsignal("scl", Pins("J24")),
Subsignal("sda", Pins("J25")),
IOStandard("LVCMOS18")
),
# Serial
("serial", 0,
Subsignal("cts", Pins("L23")),
Subsignal("rts", Pins("K27")),
Subsignal("tx", Pins("K26")),
Subsignal("rx", Pins("G25")),
IOStandard("LVCMOS18")
),
# SPIFlash
("spiflash", 0, # clock needs to be accessed through primitive
Subsignal("cs_n", Pins("U7")),
Subsignal("dq", Pins("AC7 AB7 AA7 Y7")),
IOStandard("LVCMOS18")
),
("spiflash", 1, # clock needs to be accessed through primitive
Subsignal("cs_n", Pins("G26")),
Subsignal("dq", Pins("M20 L20 R21 R22")),
IOStandard("LVCMOS18")
),
# SDCard
("spisdcard", 0,
Subsignal("clk", Pins("AL10")),
Subsignal("cs_n", Pins("AH8")),
Subsignal("mosi", Pins("AD9"), Misc("PULLUP")),
Subsignal("miso", Pins("AP9"), Misc("PULLUP")),
Misc("SLEW=FAST"),
IOStandard("LVCMOS18")
),
("sdcard", 0,
Subsignal("clk", Pins("AL10")),
Subsignal("cmd", Pins("AD9"), Misc("PULLUP True")),
Subsignal("data", Pins("AP9 AN9 AH9 AH8"), Misc("PULLUP True")),
Misc("SLEW=FAST"),
IOStandard("LVCMOS18")
),
# Rotary Encoder
("rotary", 0,
Subsignal("a", Pins("Y21")),
Subsignal("b", Pins("AD26")),
Subsignal("push", Pins("AF28")),
IOStandard("LVCMOS18")
),
# HDMI
("hdmi", 0,
Subsignal("d", Pins(
"AK11 AP11 AP13 AN13 AN11 AM11 AN12 AM12",
"AL12 AK12 AL13 AK13 AD11 AH12 AG12 AJ11",
"AG10 AK8")),
Subsignal("de", Pins("AE11")),
Subsignal("clk", Pins("AF13")),
Subsignal("vsync", Pins("AH13")),
Subsignal("hsync", Pins("AE13")),
Subsignal("spdif", Pins("AE12")),
Subsignal("spdif_out", Pins("AF12")),
IOStandard("LVCMOS18")
),
# DDR4 SDRAM
("ddram", 0,
Subsignal("a", Pins(
"AE17 AH17 AE18 AJ15 AG16 AL17 AK18 AG17",
"AF18 AH19 AF15 AD19 AJ14 AG19"),
IOStandard("SSTL12_DCI")),
Subsignal("ba", Pins("AF17 AL15"), IOStandard("SSTL12_DCI")),
Subsignal("bg", Pins("AG15"), IOStandard("SSTL12_DCI")),
Subsignal("ras_n", Pins("AF14"), IOStandard("SSTL12_DCI")), # A16
Subsignal("cas_n", Pins("AG14"), IOStandard("SSTL12_DCI")), # A15
Subsignal("we_n", Pins("AD16"), IOStandard("SSTL12_DCI")), # A14
Subsignal("cs_n", Pins("AL19"), IOStandard("SSTL12_DCI")),
Subsignal("act_n", Pins("AH14"), IOStandard("SSTL12_DCI")),
#Subsignal("ten", Pins("AH16"), IOStandard("SSTL12_DCI")),
#Subsignal("alert_n", Pins("AJ16"), IOStandard("SSTL12_DCI")),
#Subsignal("par", Pins("AD18"), IOStandard("SSTL12_DCI")),
Subsignal("dm", Pins("AD21 AE25 AJ21 AM21 AH26 AN26 AJ29 AL32"),
IOStandard("POD12_DCI")),
Subsignal("dq", Pins(
"AE23 AG20 AF22 AF20 AE22 AD20 AG22 AE20",
"AJ24 AG24 AJ23 AF23 AH23 AF24 AH22 AG25",
"AL22 AL25 AM20 AK23 AK22 AL24 AL20 AL23",
"AM24 AN23 AN24 AP23 AP25 AN22 AP24 AM22",
"AH28 AK26 AK28 AM27 AJ28 AH27 AK27 AM26",
"AL30 AP29 AM30 AN28 AL29 AP28 AM29 AN27",
"AH31 AH32 AJ34 AK31 AJ31 AJ30 AH34 AK32",
"AN33 AP33 AM34 AP31 AM32 AN31 AL34 AN32"),
IOStandard("POD12_DCI"),
Misc("PRE_EMPHASIS=RDRV_240"),
Misc("EQUALIZATION=EQ_LEVEL2")),
Subsignal("dqs_p", Pins("AG21 AH24 AJ20 AP20 AL27 AN29 AH33 AN34"),
IOStandard("DIFF_POD12_DCI"),
Misc("PRE_EMPHASIS=RDRV_240"),
Misc("EQUALIZATION=EQ_LEVEL2")),
Subsignal("dqs_n", Pins("AH21 AJ25 AK20 AP21 AL28 AP30 AJ33 AP34"),
IOStandard("DIFF_POD12_DCI"),
Misc("PRE_EMPHASIS=RDRV_240"),
Misc("EQUALIZATION=EQ_LEVEL2")),
Subsignal("clk_p", Pins("AE16"), IOStandard("DIFF_SSTL12_DCI")),
Subsignal("clk_n", Pins("AE15"), IOStandard("DIFF_SSTL12_DCI")),
Subsignal("cke", Pins("AD15"), IOStandard("SSTL12_DCI")),
Subsignal("odt", Pins("AJ18"), IOStandard("SSTL12_DCI")),
Subsignal("reset_n", Pins("AL18"), IOStandard("LVCMOS12")),
Misc("SLEW=FAST"),
),
# PCIe
("pcie_x1", 0,
Subsignal("rst_n", Pins("K22"), IOStandard("LVCMOS18")),
Subsignal("clk_p", Pins("AB6")),
Subsignal("clk_n", Pins("AB5")),
Subsignal("rx_p", Pins("AB2")),
Subsignal("rx_n", Pins("AB1")),
Subsignal("tx_p", Pins("AC4")),
Subsignal("tx_n", Pins("AC3"))
),
("pcie_x2", 0,
Subsignal("rst_n", Pins("K22"), IOStandard("LVCMOS18")),
Subsignal("clk_p", Pins("AB6")),
Subsignal("clk_n", Pins("AB5")),
Subsignal("rx_p", Pins("AB2 AD2")),
Subsignal("rx_n", Pins("AB1 AD1")),
Subsignal("tx_p", Pins("AC4 AE4")),
Subsignal("tx_n", Pins("AC3 AE3"))
),
("pcie_x4", 0,
Subsignal("rst_n", Pins("K22"), IOStandard("LVCMOS18")),
Subsignal("clk_p", Pins("AB6")),
Subsignal("clk_n", Pins("AB5")),
Subsignal("rx_p", Pins("AB2 AD2 AF2 AH2")),
Subsignal("rx_n", Pins("AB1 AD1 AF1 AH1")),
Subsignal("tx_p", Pins("AC4 AE4 AG4 AH6")),
Subsignal("tx_n", Pins("AC3 AE3 AG3 AH5"))
),
("pcie_x8", 0,
Subsignal("rst_n", Pins("K22"), IOStandard("LVCMOS18")),
Subsignal("clk_p", Pins("AB6")),
Subsignal("clk_n", Pins("AB5")),
Subsignal("rx_p", Pins("AB2 AD2 AF2 AH2 AJ4 AK2 AM2 AP2")),
Subsignal("rx_n", Pins("AB1 AD1 AF1 AH1 AJ3 AK1 AM1 AP1")),
Subsignal("tx_p", Pins("AC4 AE4 AG4 AH6 AK6 AL4 AM6 AN4")),
Subsignal("tx_n", Pins("AC3 AE3 AG3 AH5 AK5 AL3 AM5 AN3"))
),
# SGMII Clk
("sgmii_clock", 0,
Subsignal("p", Pins("P26"), IOStandard("LVDS_25")),
Subsignal("n", Pins("N26"), IOStandard("LVDS_25"))
),
# SI570
("si570_refclk", 0,
Subsignal("p", Pins("P6")),
Subsignal("n", Pins("P5"))
),
# SMA
("user_sma_mgt_refclk", 0,
Subsignal("p", Pins("V6")),
Subsignal("n", Pins("V5"))
),
("user_sma_mgt_tx", 0,
Subsignal("p", Pins("R4")),
Subsignal("n", Pins("R3"))
),
("user_sma_mgt_rx", 0,
Subsignal("p", Pins("P2")),
Subsignal("n", Pins("P1"))
),
# SFP
("sfp", 0,
Subsignal("txp", Pins("U4")),
Subsignal("txn", Pins("U3")),
Subsignal("rxp", Pins("T2")),
Subsignal("rxn", Pins("T1"))
),
("sfp_tx", 0,
Subsignal("p", Pins("U4")),
Subsignal("n", Pins("U3")),
),
("sfp_rx", 0,
Subsignal("p", Pins("T2")),
Subsignal("n", Pins("T1")),
),
("sfp_tx_disable_n", 0, Pins("AL8"), IOStandard("LVCMOS18")),
("sfp", 1,
Subsignal("txp", Pins("W4")),
Subsignal("txn", Pins("W3")),
Subsignal("rxp", Pins("V2")),
Subsignal("rxn", Pins("V1"))
),
("sfp_tx", 1,
Subsignal("p", Pins("W4")),
Subsignal("n", Pins("W3")),
),
("sfp_rx", 1,
Subsignal("p", Pins("V2")),
Subsignal("n", Pins("V1")),
),
("sfp_tx_disable_n", 1, Pins("D28"), IOStandard("LVCMOS18")),
]
# Connectors ---------------------------------------------------------------------------------------
_connectors = [
("HPC", {
"DP0_C2M_P" : "F6",
"DP0_C2M_N" : "F5",
"DP0_M2C_P" : "E4",
"DP0_M2C_N" : "E3",
"DP1_C2M_P" : "D6",
"DP1_C2M_N" : "D5",
"DP1_M2C_P" : "D2",
"DP1_M2C_N" : "D1",
"DP2_C2M_P" : "C4",
"DP2_C2M_N" : "C3",
"DP2_M2C_P" : "B2",
"DP2_M2C_N" : "B1",
"DP3_C2M_P" : "B6",
"DP3_C2M_N" : "B5",
"DP3_M2C_P" : "A4",
"DP3_M2C_N" : "A3",
"DP4_C2M_P" : "N4",
"DP4_C2M_N" : "N3",
"DP4_M2C_P" : "M2",
"DP4_M2C_N" : "M1",
"DP5_C2M_P" : "J4",
"DP5_C2M_N" : "J3",
"DP5_M2C_P" : "H2",
"DP5_M2C_N" : "H1",
"DP6_C2M_P" : "L4",
"DP6_C2M_N" : "L3",
"DP6_M2C_P" : "K2",
"DP6_M2C_N" : "K1",
"DP7_C2M_P" : "G4",
"DP7_C2M_N" : "G3",
"DP7_M2C_P" : "F2",
"DP7_M2C_N" : "F1",
"LA06_P" : "D13",
"LA06_N" : "C13",
"LA10_P" : "L8",
"LA10_N" : "K8",
"LA14_P" : "B10",
"LA14_N" : "A10",
"LA18_CC_P" : "E22",
"LA18_CC_N" : "E23",
"LA27_P" : "H21",
"LA27_N" : "G21",
"HA01_CC_P" : "E16",
"HA01_CC_N" : "D16",
"HA05_P" : "J15",
"HA05_N" : "J14",
"HA09_P" : "F18",
"HA09_N" : "F17",
"HA13_P" : "B14",
"HA13_N" : "A14",
"HA16_P" : "A19",
"HA16_N" : "A18",
"HA20_P" : "C19",
"HA20_N" : "B19",
"CLK1_M2C_P" : "E25",
"CLK1_M2C_N" : "D25",
"LA00_CC_P" : "H11",
"LA00_CC_N" : "G11",
"LA03_P" : "A13",
"LA03_N" : "A12",
"LA08_P" : "J8",
"LA08_N" : "H8",
"LA12_P" : "E10",
"LA12_N" : "D10",
"LA16_P" : "B9",
"LA16_N" : "A9",
"LA20_P" : "B24",
"LA20_N" : "A24",
"LA22_P" : "G24",
"LA22_N" : "F25",
"LA25_P" : "D20",
"LA25_N" : "D21",
"LA29_P" : "B20",
"LA29_N" : "A20",
"LA31_P" : "B25",
"LA31_N" : "A25",
"LA33_P" : "A27",
"LA33_N" : "A28",
"HA03_P" : "G15",
"HA03_N" : "G14",
"HA07_P" : "L19",
"HA07_N" : "L18",
"HA11_P" : "J19",
"HA11_N" : "J18",
"HA14_P" : "F15",
"HA14_N" : "F14",
"HA18_P" : "B17",
"HA18_N" : "B16",
"HA22_P" : "C18",
"HA22_N" : "C17",
"GBTCLK1_M2C_P" : "H6",
"GBTCLK1_M2C_N" : "H5",
"GBTCLK0_M2C_P" : "K6",
"GBTCLK0_M2C_N" : "K5",
"LA01_CC_P" : "G9",
"LA01_CC_N" : "F9",
"LA05_P" : "L13",
"LA05_N" : "K13",
"LA09_P" : "J9",
"LA09_N" : "H9",
"LA13_P" : "D9",
"LA13_N" : "C9",
"LA17_CC_P" : "D24",
"LA17_CC_N" : "C24",
"LA23_P" : "G22",
"LA23_N" : "F22",
"LA26_P" : "G20",
"LA26_N" : "F20",
"PG_M2C" : "L27",
"HA00_CC_P" : "G17",
"HA00_CC_N" : "G16",
"HA04_P" : "G19",
"HA04_N" : "F19",
"HA08_P" : "K18",
"HA08_N" : "K17",
"HA12_P" : "K16",
"HA12_N" : "J16",
"HA15_P" : "D14",
"HA15_N" : "C14",
"HA19_P" : "D19",
"HA19_N" : "D18",
"PRSNT_M2C_B" : "H24",
"CLK0_M2C_P" : "H12",
"CLK0_M2C_N" : "G12",
"LA02_P" : "K10",
"LA02_N" : "J10",
"LA04_P" : "L12",
"LA04_N" : "K12",
"LA07_P" : "F8",
"LA07_N" : "E8",
"LA11_P" : "K11",
"LA11_N" : "J11",
"LA15_P" : "D8",
"LA15_N" : "C8",
"LA19_P" : "C21",
"LA19_N" : "C22",
"LA21_P" : "F23",
"LA21_N" : "F24",
"LA24_P" : "E20",
"LA24_N" : "E21",
"LA28_P" : "B21",
"LA28_N" : "B22",
"LA30_P" : "C26",
"LA30_N" : "B26",
"LA32_P" : "E26",
"LA32_N" : "D26",
"HA02_P" : "H19",
"HA02_N" : "H18",
"HA06_P" : "L15",
"HA06_N" : "K15",
"HA10_P" : "H17",
"HA10_N" : "H16",
"HA17_CC_P" : "E18",
"HA17_CC_N" : "E17",
"HA21_P" : "E15",
"HA21_N" : "D15",
"HA23_P" : "B15",
"HA23_N" : "A15",
}
),
("LPC", {
"GBTCLK0_M2C_P" : "AA24",
"GBTCLK0_M2C_N" : "AA25",
"LA01_CC_P" : "W25",
"LA01_CC_N" : "Y25",
"LA05_P" : "V27",
"LA05_N" : "V28",
"LA09_P" : "V26",
"LA09_N" : "W26",
"LA13_P" : "AA20",
"LA13_N" : "AB20",
"LA17_CC_P" : "AA32",
"LA17_CC_N" : "AB32",
"LA23_P" : "AD30",
"LA23_N" : "AD31",
"LA26_P" : "AF33",
"LA26_N" : "AG34",
"CLK0_M2C_P" : "AA24",
"CLK0_M2C_N" : "AA25",
"LA02_P" : "AA22",
"LA02_N" : "AB22",
"LA04_P" : "U26",
"LA04_N" : "U27",
"LA07_P" : "V22",
"LA07_N" : "V23",
"LA11_P" : "V21",
"LA11_N" : "W21",
"LA15_P" : "AB25",
"LA15_N" : "AB26",
"LA19_P" : "AA29",
"LA19_N" : "AB29",
"LA21_P" : "AC33",
"LA21_N" : "AD33",
"LA24_P" : "AE32",
"LA24_N" : "AF32",
"LA28_P" : "V31",
"LA28_N" : "W31",
"LA30_P" : "Y31",
"LA30_N" : "Y32",
"LA32_P" : "W30",
"LA32_N" : "Y30",
"LA06_P" : "V29",
"LA06_N" : "W29",
"LA10_P" : "T22",
"LA10_N" : "T23",
"LA14_P" : "U21",
"LA14_N" : "U22",
"LA18_CC_P" : "AB30",
"LA18_CC_N" : "AB31",
"LA27_P" : "AG31",
"LA27_N" : "AG32",
"CLK1_M2C_P" : "AC31",
"CLK1_M2C_N" : "AC32",
"LA00_CC_P" : "W23",
"LA00_CC_N" : "W24",
"LA03_P" : "W28",
"LA03_N" : "Y28",
"LA08_P" : "U24",
"LA08_N" : "U25",
"LA12_P" : "AC22",
"LA12_N" : "AC23",
"LA16_P" : "AB21",
"LA16_N" : "AC21",
"LA20_P" : "AA34",
"LA20_N" : "AB34",
"LA22_P" : "AC34",
"LA22_N" : "AD34",
"LA25_P" : "AE33",
"LA25_N" : "AF34",
"LA29_P" : "U34",
"LA29_N" : "V34",
"LA31_P" : "V33",
"LA31_N" : "W34",
"LA33_P" : "W33",
"LA33_N" : "Y33",
}
),
("pmod0", "AK25 AN21 AH18 AM19 AE26 AF25 AE21 AM17"),
("pmod1", "AL14 AM14 AP16 AP15 AM16 AM15 AN18 AN17"),
]
# Platform -----------------------------------------------------------------------------------------
class Platform(XilinxPlatform):
default_clk_name = "clk125"
default_clk_period = 1e9/125e6
def __init__(self):
XilinxPlatform.__init__(self, "xcku040-ffva1156-2-e", _io, _connectors, toolchain="vivado")
def create_programmer(self):
return VivadoProgrammer()
def do_finalize(self, fragment):
XilinxPlatform.do_finalize(self, fragment)
self.add_period_constraint(self.lookup_request("clk125", loose=True), 1e9/125e6)
self.add_period_constraint(self.lookup_request("clk300", loose=True), 1e9/300e6)
self.add_platform_command("set_property INTERNAL_VREF 0.84 [get_iobanks 44]")
self.add_platform_command("set_property INTERNAL_VREF 0.84 [get_iobanks 45]")
self.add_platform_command("set_property INTERNAL_VREF 0.84 [get_iobanks 46]")
| [((18057, 18152), 'litex.build.xilinx.XilinxPlatform.__init__', 'XilinxPlatform.__init__', (['self', '"""xcku040-ffva1156-2-e"""', '_io', '_connectors'], {'toolchain': '"""vivado"""'}), "(self, 'xcku040-ffva1156-2-e', _io, _connectors,\n toolchain='vivado')\n", (18080, 18152), False, 'from litex.build.xilinx import XilinxPlatform, VivadoProgrammer\n'), ((18198, 18216), 'litex.build.xilinx.VivadoProgrammer', 'VivadoProgrammer', ([], {}), '()\n', (18214, 18216), False, 'from litex.build.xilinx import XilinxPlatform, VivadoProgrammer\n'), ((18263, 18305), 'litex.build.xilinx.XilinxPlatform.do_finalize', 'XilinxPlatform.do_finalize', (['self', 'fragment'], {}), '(self, fragment)\n', (18289, 18305), False, 'from litex.build.xilinx import XilinxPlatform, VivadoProgrammer\n')] |
erinleeryan/2020adventofcode | code/advent_of_code_day3.py | 69f21d3458f57d8fcf006c451416e0509a66cd7a | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import numpy as np
import math
# In[2]:
fileObj = open('../data/advent_of_code_input_day_three.txt', "r") #opens the file in read mode.
items = fileObj. read(). splitlines() #puts the file into an array.
# In[3]:
#print (items)
def split(line):
return list(line)
holding = []
for i, line in enumerate(items):
result = split(line)
holding.append(result)
holding = np.array(holding)
holding[holding == '.'] = 0
holding[holding == '#'] = 1
holding = holding.astype(int)
print (holding)
# In[7]:
def dup_and_count(rightstep, downstep, basedata):
needed_slope_elements = math.floor(basedata.shape[0]/downstep)
replications_needed = (needed_slope_elements* rightstep)/basedata.shape[1]
duplicated = np.tile(basedata, math.ceil(replications_needed))
right = np.arange(0,(needed_slope_elements)*rightstep, rightstep).astype(int)
down = np.arange(0,(needed_slope_elements)*downstep,downstep).astype(int)
moves = []
for ii in range(len(right)):
moves.append(duplicated[down[ii], right[ii]])
hits = np.sum(moves)
return hits
down1_right3 = dup_and_count(3,1,holding)
down1_right1 = dup_and_count(1,1,holding)
down1_right5 = dup_and_count(5,1,holding)
down1_right7 = dup_and_count(7,1,holding)
down2_right1 = dup_and_count(1,2,holding)
results = np.array([down1_right3, down1_right1, down1_right5, down1_right7, down2_right1], dtype=np.int64)
print(results)
product = np.prod(results)
print (product)
# In[ ]:
| [((438, 455), 'numpy.array', 'np.array', (['holding'], {}), '(holding)\n', (446, 455), True, 'import numpy as np\n'), ((1372, 1472), 'numpy.array', 'np.array', (['[down1_right3, down1_right1, down1_right5, down1_right7, down2_right1]'], {'dtype': 'np.int64'}), '([down1_right3, down1_right1, down1_right5, down1_right7,\n down2_right1], dtype=np.int64)\n', (1380, 1472), True, 'import numpy as np\n'), ((1495, 1511), 'numpy.prod', 'np.prod', (['results'], {}), '(results)\n', (1502, 1511), True, 'import numpy as np\n'), ((651, 691), 'math.floor', 'math.floor', (['(basedata.shape[0] / downstep)'], {}), '(basedata.shape[0] / downstep)\n', (661, 691), False, 'import math\n'), ((1120, 1133), 'numpy.sum', 'np.sum', (['moves'], {}), '(moves)\n', (1126, 1133), True, 'import numpy as np\n'), ((804, 834), 'math.ceil', 'math.ceil', (['replications_needed'], {}), '(replications_needed)\n', (813, 834), False, 'import math\n'), ((848, 906), 'numpy.arange', 'np.arange', (['(0)', '(needed_slope_elements * rightstep)', 'rightstep'], {}), '(0, needed_slope_elements * rightstep, rightstep)\n', (857, 906), True, 'import numpy as np\n'), ((929, 985), 'numpy.arange', 'np.arange', (['(0)', '(needed_slope_elements * downstep)', 'downstep'], {}), '(0, needed_slope_elements * downstep, downstep)\n', (938, 985), True, 'import numpy as np\n')] |
Wyverns010/Body-Keypoints-Detection | input_handler.py | 980445da5e87c898a00a8ef1c9e1e143d09d4643 | import os
import traceback
class InputHandler:
IMAGES_PARENT_FOLDER = './images'
def __init__(self):
filesList = []
def listFiles(self,path=''):
if path != '':
self.IMAGES_PARENT_FOLDER = path
try:
self.listFiles = [os.path.join(self.IMAGES_PARENT_FOLDER,imageFile) for imageFile in os.listdir(self.IMAGES_PARENT_FOLDER)\
if os.path.isfile(os.path.join(self.IMAGES_PARENT_FOLDER,imageFile))]
except:
print(traceback.print_exec())
return self.listFiles
if __name__ == '__main__':
obj = InputHandler()
print(obj.listFiles()) | [((290, 340), 'os.path.join', 'os.path.join', (['self.IMAGES_PARENT_FOLDER', 'imageFile'], {}), '(self.IMAGES_PARENT_FOLDER, imageFile)\n', (302, 340), False, 'import os\n'), ((357, 394), 'os.listdir', 'os.listdir', (['self.IMAGES_PARENT_FOLDER'], {}), '(self.IMAGES_PARENT_FOLDER)\n', (367, 394), False, 'import os\n'), ((528, 550), 'traceback.print_exec', 'traceback.print_exec', ([], {}), '()\n', (548, 550), False, 'import traceback\n'), ((442, 492), 'os.path.join', 'os.path.join', (['self.IMAGES_PARENT_FOLDER', 'imageFile'], {}), '(self.IMAGES_PARENT_FOLDER, imageFile)\n', (454, 492), False, 'import os\n')] |
misc0110/bepasty-server | docker/autoconfig.py | 662179671220d680fed57aa90894ffebf57dd4c7 | #!/usr/bin/python
import os
import sys
SITENAME = os.environ.get("BEPASTY_SITENAME", None)
if SITENAME is None:
print("\n\nEnvironment variable BEPASTY_SITENAME must be set.")
sys.exit(1)
SECRET_KEY = os.environ.get("BEPASTY_SECRET_KEY", None)
if SECRET_KEY is None:
print("\n\nEnvironment variable BEPASTY_SECRET_KEY must be set.")
sys.exit(1)
APP_BASE_PATH = os.environ.get("BEPASTY_APP_BASE_PATH", None)
STORAGE_FILESYSTEM_DIRECTORY = os.environ.get(
"BEPASTY_STORAGE_FILESYSTEM_DIRECTORY", "/app/data",
)
DEFAULT_PERMISSIONS = os.environ.get("BEPASTY_DEFAULT_PERMISSIONS", "create,read")
PERMISSIONS = {}
admin_secret = os.environ.get("BEPASTY_ADMIN_SECRET", None)
if admin_secret is not None:
PERMISSIONS.update({admin_secret: "admin,list,create,modify,read,delete"})
try:
max_allowed_file_size = os.environ.get("BEPASTY_MAX_ALLOWED_FILE_SIZE", 5000000000)
MAX_ALLOWED_FILE_SIZE = int(max_allowed_file_size)
except ValueError as err:
print("\n\nInvalid BEPASTY_MAX_ALLOWED_FILE_SIZE: %s", str(err))
sys.exit(1)
try:
max_body_size = os.environ.get("BEPASTY_MAX_BODY_SIZE", 1040384)
MAX_BODY_SIZE = int(max_body_size)
except ValueError as err:
print("\n\nInvalid BEPASTY_MAX_BODY_SIZE: %s", str(err))
sys.exit(1)
| [((52, 92), 'os.environ.get', 'os.environ.get', (['"""BEPASTY_SITENAME"""', 'None'], {}), "('BEPASTY_SITENAME', None)\n", (66, 92), False, 'import os\n'), ((212, 254), 'os.environ.get', 'os.environ.get', (['"""BEPASTY_SECRET_KEY"""', 'None'], {}), "('BEPASTY_SECRET_KEY', None)\n", (226, 254), False, 'import os\n'), ((381, 426), 'os.environ.get', 'os.environ.get', (['"""BEPASTY_APP_BASE_PATH"""', 'None'], {}), "('BEPASTY_APP_BASE_PATH', None)\n", (395, 426), False, 'import os\n'), ((459, 526), 'os.environ.get', 'os.environ.get', (['"""BEPASTY_STORAGE_FILESYSTEM_DIRECTORY"""', '"""/app/data"""'], {}), "('BEPASTY_STORAGE_FILESYSTEM_DIRECTORY', '/app/data')\n", (473, 526), False, 'import os\n'), ((557, 617), 'os.environ.get', 'os.environ.get', (['"""BEPASTY_DEFAULT_PERMISSIONS"""', '"""create,read"""'], {}), "('BEPASTY_DEFAULT_PERMISSIONS', 'create,read')\n", (571, 617), False, 'import os\n'), ((651, 695), 'os.environ.get', 'os.environ.get', (['"""BEPASTY_ADMIN_SECRET"""', 'None'], {}), "('BEPASTY_ADMIN_SECRET', None)\n", (665, 695), False, 'import os\n'), ((186, 197), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (194, 197), False, 'import sys\n'), ((352, 363), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (360, 363), False, 'import sys\n'), ((838, 897), 'os.environ.get', 'os.environ.get', (['"""BEPASTY_MAX_ALLOWED_FILE_SIZE"""', '(5000000000)'], {}), "('BEPASTY_MAX_ALLOWED_FILE_SIZE', 5000000000)\n", (852, 897), False, 'import os\n'), ((1090, 1138), 'os.environ.get', 'os.environ.get', (['"""BEPASTY_MAX_BODY_SIZE"""', '(1040384)'], {}), "('BEPASTY_MAX_BODY_SIZE', 1040384)\n", (1104, 1138), False, 'import os\n'), ((1052, 1063), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1060, 1063), False, 'import sys\n'), ((1269, 1280), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1277, 1280), False, 'import sys\n')] |
drslump/pysh | pysh/transforms/alpha/bangexpr.py | 673cdf2b5ea95dc3209cb294bb91cb2f298bb888 | from io import StringIO
import re
import tokenize
import os
from collections import deque, ChainMap
from functools import lru_cache
from enum import Enum
import pysh
from pysh.path import PathWrapper, Path
from typing import List, Callable, Iterator, Tuple, NamedTuple, Deque, Union, Any
TBangTransformer = Callable[ [List[str]], Iterator[str]]
# runtime symbols
__all__ = ['BangExpr', 'BangOp', 'BangSeq', 'BangGlob', 'BangEnv', 'BangBang']
class BangTokenType(Enum):
OPAQUE = 'OPAQUE'
GLOB = 'GLOB'
LOCAL = 'LOCAL'
ENV = 'ENV'
EXPR = 'EXPR'
OP = 'OP'
class BangToken(NamedTuple):
type: BangTokenType
value: str
span: Tuple[int, int]
TBangLexerToken = Tuple[str, str, Tuple[int,int]]
class BangLexer:
def _tokener(self, token, transformer=lambda x: x, **kwargs):
def cb(s, v):
v = transformer(v, **kwargs)
return None if v is None else (token, v, (s.match.start(), s.match.end()))
return cb
@lru_cache() # it's intended for this to be global
def build_scanner(self):
t = self._tokener
return re.Scanner([
(r'\#.+', t('COMMENT', lambda v: v[1:])),
(r'\\.', t('ESCAPE')),
(r"'( \\. | [^\\']+ )+'", t('SQS', lambda v: v[1:-1])),
(r'"( \\. | [^\\"]+ )+"', t('DQS', lambda v: v[1:-1])),
(r'\$[A-Za-z_][A-Za-z0-9_]*', t('VAR', lambda v: v[1:])),
(r'\${( \\. | [^\\}]+ )+}', t('EXPR', lambda v: v[2:-1])),
(r'[|<>^]+', t('OP')),
(r'[A-Za-z0-9_%*+:.,=/@~\[\]{}-]+', t('OPAQUE')),
(r'\s+', t('WS')),
], flags=re.X)
@lru_cache()
def build_dqs_scanner(self):
t = self._tokener
return re.Scanner([
(r'\\.', t('ESCAPE')),
(r'\$[A-Za-z_][A-Za-z0-9_]*', t('VAR', lambda v: v[1:])),
(r'\${( \\. | [^\\}]+ )+}', t('EXPR', lambda v: v[2:-1])),
(r'[^\\\$]+', t('SQS')) # handle as single quoted
], flags=re.X)
def scan_dqs(self, code: str, offset=0) -> Iterator[TBangLexerToken]:
tokens, remaining = self.build_scanner().scan(code)
if remaining:
raise SyntaxError('Unexpected char <{}> at position {}'.format(remaining[0], len(code)-len(remaining)))
for tkn, val, pos in tokens:
yield tkn, val, (offset+pos[0], offset+pos[1])
def demux_dqs(self, tokens: Iterator[TBangLexerToken]) -> Iterator[TBangLexerToken]:
""" Split double quoted strings into parts
"""
for tkn, val, pos in tokens:
if tkn == 'DQS':
yield from self.scan_dqs(val, offset=pos[0]+1)
else:
yield tkn, val, pos
def scan(self, code: str) -> Iterator[BangToken]:
tokens, remaining = self.build_scanner().scan(code)
if remaining:
raise SyntaxError('Unexpected char at position {}'.format(len(code)-len(remaining)))
# Add a terminating token so we can simplify the parsing
tokens.append(('END', '', (len(code),len(code))))
last_token = last_pos = None
for token, value, pos in self.demux_dqs(tokens):
assert token != 'DQS' # double quoted are demuxed
# Inject whitespace operator if needed
if token != 'OP' and last_token and last_token == 'WS':
yield BangToken(BangTokenType.OP, ' ', last_pos)
if token in ('COMMENT', 'END'):
continue
elif token == 'WS':
pass
elif token == 'OP':
value = value.strip()
yield BangToken(BangTokenType.OP, value, pos)
else:
if token == 'OPAQUE':
if re.search(r'(?!<\\)[~*?{]', value):
yield BangToken(BangTokenType.GLOB, value, pos)
else:
yield BangToken(BangTokenType.OPAQUE, value, pos)
elif token in ('ESCAPE', 'SQS'):
#TODO: handle special escapes \n
value = re.sub(r'\\(.)', r'\1', value)
yield BangToken(BangTokenType.OPAQUE, value, pos)
elif token in ('VAR', 'EXPR'):
value = value.strip()
if value.isalnum() and not value.isdigit():
if value.isupper():
yield BangToken(BangTokenType.ENV, value, pos)
else:
yield BangToken(BangTokenType.LOCAL, value, pos)
else:
assert token == 'EXPR'
value = re.sub(r'\\(.)', r'\1', value)
yield BangToken(BangTokenType.EXPR, value, pos)
else:
assert False, 'unexpected {}, what happened?'.format(token)
last_token, last_pos = token, pos
class BangEnv:
__slots__ = ('name',)
def __init__(self, name):
self.name = name
def __repr__(self):
return 'BangEnv<{}>'.format(self.name)
class BangSeq:
__slots__ = ('items',)
def __init__(self, *items):
self.items = items
def __repr__(self):
return 'BangSeq<{!r}>'.format(self.items)
class BangOp:
__slots__ = ('op',)
def __init__(self, op):
self.op = op
def __repr__(self):
return 'BangOp<{}>'.format(self.op)
class BangGlob:
__slots__ = ('glob',)
def __init__(self, glob):
self.glob = glob
def __repr__(self):
return 'BangGlob<{}>'.format(self.glob)
class BangExpr:
__slots__ = ('args', 'vars')
def __init__(self, *args, locals=None, globals=None):
assert locals is not None
assert globals is not None
self.args = args
self.vars = ChainMap(locals, globals)
def eval_command(self, mut_args):
arg = mut_args.popleft()
cmd = self.vars.get(str(arg))
if cmd is None:
raise RuntimeError('Unable to find {}'.format(arg))
while mut_args:
if isinstance(mut_args[0], BangOp):
break
arg = mut_args.popleft()
cmd = cmd(self.eval_expr(arg))
return cmd
def eval_expr(self, expr: Any) -> Union[str, Iterator[Path]]:
if isinstance(expr, BangSeq):
return self.eval_seq(expr)
elif isinstance(expr, BangEnv):
return os.environ[expr.name]
elif isinstance(expr, BangGlob):
return PathWrapper().glob(expr.glob)
else:
return str(expr)
def eval_seq(self, seq: BangSeq) -> Union[str, Iterator[Path]]:
exprs: Deque[Any] = deque(seq.items)
accum = ''
while exprs:
expr = exprs.popleft()
if isinstance(expr, BangGlob):
if exprs:
raise RuntimeError('Globbing can only occur at the end of a seq')
return PathWrapper(accum).glob(expr.glob)
accum += self.eval_expr(expr)
return accum
def eval(self):
mut_args = deque(self.args)
cmd = self.eval_command(mut_args)
while mut_args:
arg = mut_args.popleft()
assert isinstance(arg, BangOp), 'Expected OP but found: {}'.format(arg)
assert len(mut_args) > 0, 'No operands left!'
if arg.op == '|':
cmd |= self.eval_command(mut_args)
elif arg.op == '^':
cmd ^= self.eval_command(mut_args)
elif arg.op == '>':
cmd = cmd > self.eval_expr(mut_args.popleft())
elif arg.op == '>>':
cmd = cmd >> self.eval_expr(mut_args.popleft())
else:
raise RuntimeError('Unsupported operator {}'.format(arg.op))
return cmd
def __str__(self):
return str(self.eval())
def __repr__(self):
return 'BangExpr<{!r}>'.format(self.args)
class BangBang:
__slots__ = ('code',)
def __init__(self, code):
self.code = code
def eval(self):
#TODO: Detect shebang and use it instead of default shell
import sys, subprocess
result = subprocess.run(
['bash', '-c', self.code],
encoding='utf-8',
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if result.stderr:
print(result.stderr, file=sys.stderr)
if result.returncode > 0:
if result.stdout:
print(result.stdout)
raise pysh.ExitStatusError(result.returncode)
return result.stdout
def __str__(self):
return str(self.eval())
def __repr__(self):
return 'BangBang<{}>'.format(self.code)
def parse_bangexpr(code: str) -> str:
as_str = lambda s: "'{}'".format(s.replace("\\", "\\\\").replace("'", "\\'"))
lexer = BangLexer().scan(code)
seq = []
exprs = []
while True:
tkn = next(lexer, None)
if tkn and tkn.type != BangTokenType.OP:
if tkn.type in (BangTokenType.LOCAL, BangTokenType.EXPR):
seq.append(tkn.value)
elif tkn.type == BangTokenType.ENV:
seq.append('pysh.BangEnv({})'.format(as_str(tkn.value)))
elif tkn.type == BangTokenType.OPAQUE:
seq.append('{}'.format(as_str(tkn.value)))
elif tkn.type == BangTokenType.GLOB:
seq.append('pysh.BangGlob({})'.format(as_str(tkn.value)))
else:
assert False, 'Unexpected token {}'.format(tkn.type)
continue
if seq:
if len(seq) > 1:
exprs.append('pysh.BangSeq({})'.format(', '.join(seq)))
else:
exprs.append(seq[0])
seq = []
if not tkn:
break
assert tkn.type == BangTokenType.OP
if tkn.value == ' ':
continue
exprs.append('pysh.BangOp("{}")'.format(tkn.value))
# We need to provide locals/globals so we can resolve commands to variables
return 'pysh.BangExpr({}, locals=locals(), globals=globals())'.format(', '.join(exprs))
def transform(code: StringIO, transformer: TBangTransformer) -> Iterator[str]:
""" Scans python code to transform bang expressions.
Given some python code it will extract bang expressions and process
them with a callback that can report back the transformation.
Returns a generator that allows to consume the transformed code
line by line.
"""
tokens = tokenize.generate_tokens(code.readline)
bangexpr = [] # type: List[str]
bangcont = False
prebang = None
ptkn = None
indent = 0
bang_indent = -100
last_bang_line = -100
for ctkn in tokens:
if ctkn.type == tokenize.INDENT:
indent += 1
if last_bang_line + 1 == ctkn.start[0]:
bang_indent = indent
elif ctkn.type == tokenize.DEDENT:
indent -= 1
if bang_indent > indent:
bang_indent = -100
# due to continuations we can't rely on NEWLINE tokens, instead we have
# use the lexical information to detect when we're on a new line
#TODO: Support indent/dedent for multiline
if ptkn and ctkn.start[0] > ptkn.start[0]:
if bangcont or bang_indent == indent:
if ctkn.type is tokenize.ENDMARKER:
raise SyntaxError('BangExpr continuation at program end')
line = ctkn.line.rstrip('\r\n')
bangexpr.append(line)
bangcont = line.endswith('\\')
last_bang_line = ctkn.start[0]
elif bangexpr:
lines = list(transformer(bangexpr))
assert len(lines) <= len(bangexpr)
if lines and prebang:
lines[0] = prebang + lines[0]
yield from lines
bangexpr = []
last_bang_line = ptkn.start[0]
else:
yield ptkn.line
ptkn = ctkn
if bangexpr:
continue
if ctkn.string == '!':
col = ctkn.start[1]
prebang = ctkn.line[0:col]
line = ctkn.line[col+1:].lstrip(' \t').rstrip('\r\n')
bangexpr.append(line.rstrip('\\'))
bangcont = line.endswith('\\')
last_bang_line = ctkn.start[0]
assert not bangexpr, bangexpr
def transformer(lines: List[str]) -> Iterator[str]:
if lines[0].startswith('!'):
#TODO: Detect $ident to expose them on env when evaluated
lines[0] = lines[0][1:]
code = '\n'.join(lines)
code = code.strip().replace("'", "\\'").replace("\\", "\\\\")
code = "pysh.BangBang('{}')".format(code)
lines = code.split('\n')
for line in lines:
yield line
else:
yield from parse_bangexpr(' '.join(lines)).split('\n')
from io import StringIO
code = r'''
foo = ! ls foo${bar}.* \
| grep foo
> /dev/null
foo = r' ls foo${bar} ' >> expr
expr<' ls foo${bar} '
!! #!/bin/fish
ls .*
'''.strip()
#TODO: !! is probably better solved with:
# locals are solved with inspect.frame.f_locals
sh << r'''
# << means with variables interpolated
# < is plain text
ls .*
'''
for line in transform(StringIO(code), transformer):
print(line.rstrip('\n'))
from pysh.command import command
ls = command('ls')
grep = command('grep')
bar = 10
print('::BangExpr::')
be = BangExpr('ls', BangSeq('foo', bar, BangGlob('.*')), BangOp("|"), 'grep', 'foo', 'baz', BangOp(">"), '/dev/null', locals=locals(), globals=globals())
# print(be)
print('::BangBang::')
bb = BangBang('''#!/bin/bash
ls *.py''')
print(bb) | [((13474, 13487), 'pysh.command.command', 'command', (['"""ls"""'], {}), "('ls')\n", (13481, 13487), False, 'from pysh.command import command\n'), ((13495, 13510), 'pysh.command.command', 'command', (['"""grep"""'], {}), "('grep')\n", (13502, 13510), False, 'from pysh.command import command\n'), ((989, 1000), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (998, 1000), False, 'from functools import lru_cache\n'), ((1646, 1657), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (1655, 1657), False, 'from functools import lru_cache\n'), ((10568, 10607), 'tokenize.generate_tokens', 'tokenize.generate_tokens', (['code.readline'], {}), '(code.readline)\n', (10592, 10607), False, 'import tokenize\n'), ((13375, 13389), 'io.StringIO', 'StringIO', (['code'], {}), '(code)\n', (13383, 13389), False, 'from io import StringIO\n'), ((5822, 5847), 'collections.ChainMap', 'ChainMap', (['locals', 'globals'], {}), '(locals, globals)\n', (5830, 5847), False, 'from collections import deque, ChainMap\n'), ((6697, 6713), 'collections.deque', 'deque', (['seq.items'], {}), '(seq.items)\n', (6702, 6713), False, 'from collections import deque, ChainMap\n'), ((7108, 7124), 'collections.deque', 'deque', (['self.args'], {}), '(self.args)\n', (7113, 7124), False, 'from collections import deque, ChainMap\n'), ((8208, 8320), 'subprocess.run', 'subprocess.run', (["['bash', '-c', self.code]"], {'encoding': '"""utf-8"""', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['bash', '-c', self.code], encoding='utf-8', stdout=\n subprocess.PIPE, stderr=subprocess.PIPE)\n", (8222, 8320), False, 'import sys, subprocess\n'), ((8550, 8589), 'pysh.ExitStatusError', 'pysh.ExitStatusError', (['result.returncode'], {}), '(result.returncode)\n', (8570, 8589), False, 'import pysh\n'), ((6968, 6986), 'pysh.path.PathWrapper', 'PathWrapper', (['accum'], {}), '(accum)\n', (6979, 6986), False, 'from pysh.path import PathWrapper, Path\n'), ((3748, 3783), 're.search', 're.search', (['"""(?!<\\\\\\\\)[~*?{]"""', 'value'], {}), "('(?!<\\\\\\\\)[~*?{]', value)\n", (3757, 3783), False, 'import re\n'), ((6527, 6540), 'pysh.path.PathWrapper', 'PathWrapper', ([], {}), '()\n', (6538, 6540), False, 'from pysh.path import PathWrapper, Path\n'), ((4086, 4117), 're.sub', 're.sub', (['"""\\\\\\\\(.)"""', '"""\\\\1"""', 'value'], {}), "('\\\\\\\\(.)', '\\\\1', value)\n", (4092, 4117), False, 'import re\n'), ((4671, 4702), 're.sub', 're.sub', (['"""\\\\\\\\(.)"""', '"""\\\\1"""', 'value'], {}), "('\\\\\\\\(.)', '\\\\1', value)\n", (4677, 4702), False, 'import re\n')] |
Vikas-kum/incubator-mxnet | example/bayesian-methods/data_loader.py | ba02bf2fe2da423caa59ddb3fd5e433b90b730bf | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import numpy
import os
import ssl
def load_mnist(training_num=50000):
data_path = os.path.join(os.path.dirname(os.path.realpath('__file__')), 'mnist.npz')
if not os.path.isfile(data_path):
from six.moves import urllib
origin = (
'https://github.com/sxjscience/mxnet/raw/master/example/bayesian-methods/mnist.npz'
)
print('Downloading data from %s to %s' % (origin, data_path))
ctx = ssl._create_unverified_context()
with urllib.request.urlopen(origin, context=ctx) as u, open(data_path, 'wb') as f:
f.write(u.read())
print('Done!')
dat = numpy.load(data_path)
X = (dat['X'][:training_num] / 126.0).astype('float32')
Y = dat['Y'][:training_num]
X_test = (dat['X_test'] / 126.0).astype('float32')
Y_test = dat['Y_test']
Y = Y.reshape((Y.shape[0],))
Y_test = Y_test.reshape((Y_test.shape[0],))
return X, Y, X_test, Y_test
def load_toy():
training_data = numpy.loadtxt('toy_data_train.txt')
testing_data = numpy.loadtxt('toy_data_test_whole.txt')
X = training_data[:, 0].reshape((training_data.shape[0], 1))
Y = training_data[:, 1].reshape((training_data.shape[0], 1))
X_test = testing_data[:, 0].reshape((testing_data.shape[0], 1))
Y_test = testing_data[:, 1].reshape((testing_data.shape[0], 1))
return X, Y, X_test, Y_test
def load_synthetic(theta1, theta2, sigmax, num=20):
flag = numpy.random.randint(0, 2, (num,))
X = flag * numpy.random.normal(theta1, sigmax, (num,)) \
+ (1.0 - flag) * numpy.random.normal(theta1 + theta2, sigmax, (num,))
return X
| [((1456, 1477), 'numpy.load', 'numpy.load', (['data_path'], {}), '(data_path)\n', (1466, 1477), False, 'import numpy\n'), ((1803, 1838), 'numpy.loadtxt', 'numpy.loadtxt', (['"""toy_data_train.txt"""'], {}), "('toy_data_train.txt')\n", (1816, 1838), False, 'import numpy\n'), ((1858, 1898), 'numpy.loadtxt', 'numpy.loadtxt', (['"""toy_data_test_whole.txt"""'], {}), "('toy_data_test_whole.txt')\n", (1871, 1898), False, 'import numpy\n'), ((2262, 2296), 'numpy.random.randint', 'numpy.random.randint', (['(0)', '(2)', '(num,)'], {}), '(0, 2, (num,))\n', (2282, 2296), False, 'import numpy\n'), ((996, 1021), 'os.path.isfile', 'os.path.isfile', (['data_path'], {}), '(data_path)\n', (1010, 1021), False, 'import os\n'), ((1269, 1301), 'ssl._create_unverified_context', 'ssl._create_unverified_context', ([], {}), '()\n', (1299, 1301), False, 'import ssl\n'), ((941, 969), 'os.path.realpath', 'os.path.realpath', (['"""__file__"""'], {}), "('__file__')\n", (957, 969), False, 'import os\n'), ((1315, 1358), 'six.moves.urllib.request.urlopen', 'urllib.request.urlopen', (['origin'], {'context': 'ctx'}), '(origin, context=ctx)\n', (1337, 1358), False, 'from six.moves import urllib\n'), ((2312, 2355), 'numpy.random.normal', 'numpy.random.normal', (['theta1', 'sigmax', '(num,)'], {}), '(theta1, sigmax, (num,))\n', (2331, 2355), False, 'import numpy\n'), ((2383, 2435), 'numpy.random.normal', 'numpy.random.normal', (['(theta1 + theta2)', 'sigmax', '(num,)'], {}), '(theta1 + theta2, sigmax, (num,))\n', (2402, 2435), False, 'import numpy\n')] |
mickeyckm/nanodegree-freshtomatoes | start.py | 12776f7e46d6c42a4755a0b81e60eb1a5a65de08 | import os
import tmdbsimple as tmdb
import media
import fresh_tomatoes as ft
movies = []
if os.environ.get('TMDB_API', False):
# Retrieve API KEY
tmdb.API_KEY = os.environ['TMDB_API']
# TMDB Movie Ids
movie_ids = [271110, 297761, 246655, 278154, 135397, 188927]
# Get Configuration
configuration = tmdb.Configuration().info()
image_base_url = configuration['images']['secure_base_url']
image_width = "w500"
for movie_id in movie_ids:
m = tmdb.Movies(movie_id)
# Retrieve Image URL
minfo = m.info()
poster_image_url = image_base_url + image_width + minfo['poster_path']
# Retrieve Youtube Video URL
videos = m.videos()
video = videos['results'][0]
youtube_url = 'https://youtube.com/watch?v=' + video['key']
# Append Movie object
movie = media.Movie(m.title)
movie.storyline = m.overview
movie.poster_url = poster_image_url
movie.trailer_url = youtube_url
movies.append(movie)
else:
# Avatar
avatar = media.Movie("Avatar")
avatar.storyline = ("A paraplegic marine dispatched to the moon Pandora "
"on a unique mission becomes torn between following "
"his orders and protecting the world he feels is "
"his home.")
avatar.poster_url = ("https://upload.wikimedia.org/wikipedia/"
"en/b/b0/Avatar-Teaser-Poster.jpg")
avatar.trailer_url = "https://www.youtube.com/watch?v=-9ceBgWV8io"
# Deadpool
deadpool = media.Movie("Deadpool")
deadpool.storyline = ("A fast-talking mercenary with a morbid sense of "
"humor is subjected to a rogue experiment that "
"leaves him with accelerated healing powers and a "
"quest for revenge.")
deadpool.poster_url = ("https://upload.wikimedia.org/wikipedia/en/4/46/"
"Deadpool_poster.jpg")
deadpool.trailer_url = "https://www.youtube.com/watch?v=gtTfd6tISfw"
# Ghostbusters
ghostbusters = media.Movie("Ghostbusters")
ghostbusters.storyline = ("Following a ghost invasion of Manhattan, "
"paranormal enthusiasts Erin Gilbert and Abby "
"Yates, nuclear engineer Jillian Holtzmann, "
"and subway worker Patty Tolan band together "
"to stop the otherworldly threat.")
ghostbusters.poster_url = ("https://upload.wikimedia.org/wikipedia/"
"en/3/32/Ghostbusters_2016_film_poster.png")
ghostbusters.trailer_url = "https://www.youtube.com/watch?v=w3ugHP-yZXw"
# Olympus
olympus = media.Movie("Olympus Has Fallen")
olympus.storyline = ("Disgraced Secret Service agent (and former "
"presidential guard) Mike Banning finds himself "
"trapped inside the White House in the wake of a "
"terrorist attack; using his inside knowledge, "
"Banning works with national security to rescue "
"the President from his kidnappers.")
olympus.poster_url = ("https://upload.wikimedia.org/wikipedia/en/b/bf/"
"Olympus_Has_Fallen_poster.jpg")
olympus.trailer_url = "https://www.youtube.com/watch?v=vwx1f0kyNwI"
# Angry Birds
angry_birds = media.Movie("The Angry Birds Movie")
angry_birds.storyline = ("Find out why the birds are so angry. When an "
"island populated by happy, flightless birds "
"is visited by mysterious green piggies, it's "
"up to three unlikely outcasts - Red, Chuck "
"and Bomb - to figure out what the pigs are up "
"to.")
angry_birds.poster_url = ("https://upload.wikimedia.org/wikipedia/en/f/"
"f9/The_Angry_Birds_Movie_poster.png")
angry_birds.trailer_url = "https://www.youtube.com/watch?v=1U2DKKqxHgE"
# Ironman
ironman = media.Movie("Iron Man")
ironman.storyline = ("After being held captive in an Afghan cave, "
"billionaire engineer Tony Stark creates a unique "
"weaponized suit of armor to fight evil.")
ironman.poster_url = ("https://upload.wikimedia.org/wikipedia/en/7/70/"
"Ironmanposter.JPG")
ironman.trailer_url = "https://www.youtube.com/watch?v=8hYlB38asDY"
movies = [avatar, deadpool, ghostbusters, olympus, angry_birds, ironman]
ft.open_movies_page(movies)
| [((94, 127), 'os.environ.get', 'os.environ.get', (['"""TMDB_API"""', '(False)'], {}), "('TMDB_API', False)\n", (108, 127), False, 'import os\n'), ((4715, 4742), 'fresh_tomatoes.open_movies_page', 'ft.open_movies_page', (['movies'], {}), '(movies)\n', (4734, 4742), True, 'import fresh_tomatoes as ft\n'), ((1066, 1087), 'media.Movie', 'media.Movie', (['"""Avatar"""'], {}), "('Avatar')\n", (1077, 1087), False, 'import media\n'), ((1586, 1609), 'media.Movie', 'media.Movie', (['"""Deadpool"""'], {}), "('Deadpool')\n", (1597, 1609), False, 'import media\n'), ((2127, 2154), 'media.Movie', 'media.Movie', (['"""Ghostbusters"""'], {}), "('Ghostbusters')\n", (2138, 2154), False, 'import media\n'), ((2781, 2814), 'media.Movie', 'media.Movie', (['"""Olympus Has Fallen"""'], {}), "('Olympus Has Fallen')\n", (2792, 2814), False, 'import media\n'), ((3493, 3529), 'media.Movie', 'media.Movie', (['"""The Angry Birds Movie"""'], {}), "('The Angry Birds Movie')\n", (3504, 3529), False, 'import media\n'), ((4200, 4223), 'media.Movie', 'media.Movie', (['"""Iron Man"""'], {}), "('Iron Man')\n", (4211, 4223), False, 'import media\n'), ((487, 508), 'tmdbsimple.Movies', 'tmdb.Movies', (['movie_id'], {}), '(movie_id)\n', (498, 508), True, 'import tmdbsimple as tmdb\n'), ((861, 881), 'media.Movie', 'media.Movie', (['m.title'], {}), '(m.title)\n', (872, 881), False, 'import media\n'), ((326, 346), 'tmdbsimple.Configuration', 'tmdb.Configuration', ([], {}), '()\n', (344, 346), True, 'import tmdbsimple as tmdb\n')] |
sarafs1926/qiskit-metal | qiskit_metal/_gui/elements_ui.py | cf2ce8125ebe8f21b6d1b85362466fd57db2cada | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './elements_ui.ui',
# licensing of './elements_ui.ui' applies.
#
# Created: Wed Jun 16 14:29:03 2021
# by: pyside2-uic running on PySide2 5.13.2
#
# WARNING! All changes made in this file will be lost!
from PySide2 import QtCore, QtGui, QtWidgets
class Ui_ElementsWindow(object):
def setupUi(self, ElementsWindow):
ElementsWindow.setObjectName("ElementsWindow")
ElementsWindow.resize(841, 623)
self.centralwidget = QtWidgets.QWidget(ElementsWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout_2.setSpacing(0)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setSizeConstraint(
QtWidgets.QLayout.SetDefaultConstraint)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.btn_refresh = QtWidgets.QPushButton(self.centralwidget)
self.btn_refresh.setCursor(QtCore.Qt.ClosedHandCursor)
self.btn_refresh.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/refresh"), QtGui.QIcon.Normal,
QtGui.QIcon.Off)
self.btn_refresh.setIcon(icon)
self.btn_refresh.setIconSize(QtCore.QSize(20, 20))
self.btn_refresh.setAutoDefault(False)
self.btn_refresh.setDefault(False)
self.btn_refresh.setFlat(True)
self.btn_refresh.setObjectName("btn_refresh")
self.horizontalLayout.addWidget(self.btn_refresh)
self.label = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum,
QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(
self.label.sizePolicy().hasHeightForWidth())
self.label.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label.setFont(font)
self.label.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing |
QtCore.Qt.AlignVCenter)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.combo_element_type = QtWidgets.QComboBox(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred,
QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(
self.combo_element_type.sizePolicy().hasHeightForWidth())
self.combo_element_type.setSizePolicy(sizePolicy)
self.combo_element_type.setCurrentText("")
self.combo_element_type.setSizeAdjustPolicy(
QtWidgets.QComboBox.AdjustToContents)
self.combo_element_type.setObjectName("combo_element_type")
self.horizontalLayout.addWidget(self.combo_element_type)
self.line = QtWidgets.QFrame(self.centralwidget)
self.line.setFrameShape(QtWidgets.QFrame.VLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.horizontalLayout.addWidget(self.line)
self.label_3 = QtWidgets.QLabel(self.centralwidget)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.horizontalLayout.addWidget(self.label_3)
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setObjectName("label_2")
self.horizontalLayout.addWidget(self.label_2)
self.lineEdit = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit.setObjectName("lineEdit")
self.horizontalLayout.addWidget(self.lineEdit)
self.label_4 = QtWidgets.QLabel(self.centralwidget)
self.label_4.setObjectName("label_4")
self.horizontalLayout.addWidget(self.label_4)
self.lineEdit_2 = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit_2.setObjectName("lineEdit_2")
self.horizontalLayout.addWidget(self.lineEdit_2)
self.line_2 = QtWidgets.QFrame(self.centralwidget)
self.line_2.setFrameShape(QtWidgets.QFrame.VLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.horizontalLayout.addWidget(self.line_2)
self.verticalLayout.addLayout(self.horizontalLayout)
self.tableElements = QtWidgets.QTableView(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(
self.tableElements.sizePolicy().hasHeightForWidth())
self.tableElements.setSizePolicy(sizePolicy)
self.tableElements.setProperty("showDropIndicator", False)
self.tableElements.setDragDropOverwriteMode(False)
self.tableElements.setAlternatingRowColors(True)
self.tableElements.setSortingEnabled(False)
self.tableElements.setObjectName("tableElements")
self.verticalLayout.addWidget(self.tableElements)
self.verticalLayout_2.addLayout(self.verticalLayout)
ElementsWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar()
self.menubar.setGeometry(QtCore.QRect(0, 0, 841, 22))
self.menubar.setObjectName("menubar")
ElementsWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(ElementsWindow)
self.statusbar.setEnabled(True)
self.statusbar.setObjectName("statusbar")
ElementsWindow.setStatusBar(self.statusbar)
self.retranslateUi(ElementsWindow)
QtCore.QObject.connect(self.combo_element_type,
QtCore.SIGNAL("currentIndexChanged(QString)"),
ElementsWindow.combo_element_type)
QtCore.QObject.connect(self.btn_refresh, QtCore.SIGNAL("clicked()"),
ElementsWindow.force_refresh)
QtCore.QMetaObject.connectSlotsByName(ElementsWindow)
def retranslateUi(self, ElementsWindow):
ElementsWindow.setWindowTitle(
QtWidgets.QApplication.translate("ElementsWindow", "MainWindow",
None, -1))
self.btn_refresh.setToolTip(
QtWidgets.QApplication.translate("ElementsWindow",
"Force refresh the table ", None,
-1))
self.btn_refresh.setStatusTip(
QtWidgets.QApplication.translate("ElementsWindow",
"Force refresh the table ", None,
-1))
self.btn_refresh.setWhatsThis(
QtWidgets.QApplication.translate("ElementsWindow",
"Force refresh the table ", None,
-1))
self.btn_refresh.setAccessibleDescription(
QtWidgets.QApplication.translate("ElementsWindow",
"Force refresh the table ", None,
-1))
self.label.setText(
QtWidgets.QApplication.translate("ElementsWindow", "Element type: ",
None, -1))
self.combo_element_type.setToolTip(
QtWidgets.QApplication.translate(
"ElementsWindow",
"<html><head/><body><p>Select the element table you wish to view</p></body></html>",
None, -1))
self.label_3.setText(
QtWidgets.QApplication.translate("ElementsWindow", " Filter: ",
None, -1))
self.label_2.setText(
QtWidgets.QApplication.translate("ElementsWindow", "Component: ",
None, -1))
self.label_4.setText(
QtWidgets.QApplication.translate("ElementsWindow", " Layer: ",
None, -1))
from . import main_window_rc_rc
| [((531, 564), 'PySide2.QtWidgets.QWidget', 'QtWidgets.QWidget', (['ElementsWindow'], {}), '(ElementsWindow)\n', (548, 564), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((655, 696), 'PySide2.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.centralwidget'], {}), '(self.centralwidget)\n', (676, 696), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((896, 919), 'PySide2.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', ([], {}), '()\n', (917, 919), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((1111, 1134), 'PySide2.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (1132, 1134), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((1226, 1267), 'PySide2.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1247, 1267), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((1383, 1396), 'PySide2.QtGui.QIcon', 'QtGui.QIcon', ([], {}), '()\n', (1394, 1396), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((1868, 1904), 'PySide2.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1884, 1904), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((1926, 2014), 'PySide2.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Minimum'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Minimum)\n', (1947, 2014), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((2292, 2305), 'PySide2.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (2303, 2305), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((2658, 2697), 'PySide2.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2677, 2697), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((2719, 2809), 'PySide2.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Preferred', 'QtWidgets.QSizePolicy.Minimum'], {}), '(QtWidgets.QSizePolicy.Preferred, QtWidgets.\n QSizePolicy.Minimum)\n', (2740, 2809), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((3405, 3441), 'PySide2.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.centralwidget'], {}), '(self.centralwidget)\n', (3421, 3441), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((3670, 3706), 'PySide2.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (3686, 3706), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((3722, 3735), 'PySide2.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (3733, 3735), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((3948, 3984), 'PySide2.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (3964, 3984), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((4109, 4148), 'PySide2.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4128, 4148), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((4275, 4311), 'PySide2.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4291, 4311), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((4438, 4477), 'PySide2.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4457, 4477), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((4609, 4645), 'PySide2.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4625, 4645), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((4951, 4991), 'PySide2.QtWidgets.QTableView', 'QtWidgets.QTableView', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4971, 4991), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((5013, 5105), 'PySide2.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Expanding', 'QtWidgets.QSizePolicy.Expanding'], {}), '(QtWidgets.QSizePolicy.Expanding, QtWidgets.\n QSizePolicy.Expanding)\n', (5034, 5105), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((5879, 5899), 'PySide2.QtWidgets.QMenuBar', 'QtWidgets.QMenuBar', ([], {}), '()\n', (5897, 5899), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((6081, 6117), 'PySide2.QtWidgets.QStatusBar', 'QtWidgets.QStatusBar', (['ElementsWindow'], {}), '(ElementsWindow)\n', (6101, 6117), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((6650, 6703), 'PySide2.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['ElementsWindow'], {}), '(ElementsWindow)\n', (6687, 6703), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((1420, 1446), 'PySide2.QtGui.QPixmap', 'QtGui.QPixmap', (['""":/refresh"""'], {}), "(':/refresh')\n", (1433, 1446), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((1584, 1604), 'PySide2.QtCore.QSize', 'QtCore.QSize', (['(20)', '(20)'], {}), '(20, 20)\n', (1596, 1604), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((5933, 5960), 'PySide2.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(841)', '(22)'], {}), '(0, 0, 841, 22)\n', (5945, 5960), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((6391, 6436), 'PySide2.QtCore.SIGNAL', 'QtCore.SIGNAL', (['"""currentIndexChanged(QString)"""'], {}), "('currentIndexChanged(QString)')\n", (6404, 6436), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((6553, 6579), 'PySide2.QtCore.SIGNAL', 'QtCore.SIGNAL', (['"""clicked()"""'], {}), "('clicked()')\n", (6566, 6579), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((6801, 6875), 'PySide2.QtWidgets.QApplication.translate', 'QtWidgets.QApplication.translate', (['"""ElementsWindow"""', '"""MainWindow"""', 'None', '(-1)'], {}), "('ElementsWindow', 'MainWindow', None, -1)\n", (6833, 6875), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((6971, 7063), 'PySide2.QtWidgets.QApplication.translate', 'QtWidgets.QApplication.translate', (['"""ElementsWindow"""', '"""Force refresh the table """', 'None', '(-1)'], {}), "('ElementsWindow',\n 'Force refresh the table ', None, -1)\n", (7003, 7063), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((7202, 7294), 'PySide2.QtWidgets.QApplication.translate', 'QtWidgets.QApplication.translate', (['"""ElementsWindow"""', '"""Force refresh the table """', 'None', '(-1)'], {}), "('ElementsWindow',\n 'Force refresh the table ', None, -1)\n", (7234, 7294), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((7433, 7525), 'PySide2.QtWidgets.QApplication.translate', 'QtWidgets.QApplication.translate', (['"""ElementsWindow"""', '"""Force refresh the table """', 'None', '(-1)'], {}), "('ElementsWindow',\n 'Force refresh the table ', None, -1)\n", (7465, 7525), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((7676, 7768), 'PySide2.QtWidgets.QApplication.translate', 'QtWidgets.QApplication.translate', (['"""ElementsWindow"""', '"""Force refresh the table """', 'None', '(-1)'], {}), "('ElementsWindow',\n 'Force refresh the table ', None, -1)\n", (7708, 7768), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((7896, 7974), 'PySide2.QtWidgets.QApplication.translate', 'QtWidgets.QApplication.translate', (['"""ElementsWindow"""', '"""Element type: """', 'None', '(-1)'], {}), "('ElementsWindow', 'Element type: ', None, -1)\n", (7928, 7974), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((8077, 8231), 'PySide2.QtWidgets.QApplication.translate', 'QtWidgets.QApplication.translate', (['"""ElementsWindow"""', '"""<html><head/><body><p>Select the element table you wish to view</p></body></html>"""', 'None', '(-1)'], {}), "('ElementsWindow',\n '<html><head/><body><p>Select the element table you wish to view</p></body></html>'\n , None, -1)\n", (8109, 8231), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((8315, 8390), 'PySide2.QtWidgets.QApplication.translate', 'QtWidgets.QApplication.translate', (['"""ElementsWindow"""', '""" Filter: """', 'None', '(-1)'], {}), "('ElementsWindow', ' Filter: ', None, -1)\n", (8347, 8390), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((8479, 8554), 'PySide2.QtWidgets.QApplication.translate', 'QtWidgets.QApplication.translate', (['"""ElementsWindow"""', '"""Component: """', 'None', '(-1)'], {}), "('ElementsWindow', 'Component: ', None, -1)\n", (8511, 8554), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n'), ((8643, 8717), 'PySide2.QtWidgets.QApplication.translate', 'QtWidgets.QApplication.translate', (['"""ElementsWindow"""', '""" Layer: """', 'None', '(-1)'], {}), "('ElementsWindow', ' Layer: ', None, -1)\n", (8675, 8717), False, 'from PySide2 import QtCore, QtGui, QtWidgets\n')] |
manishaverma1012/programs | Python/function.py | dd77546219eab2f2ee81dd0d599b78ebd8f95957 | def cube(number):
return number*number*number
digit = input(" the cube of which digit do you want >")
result = cube(int(digit))
print(result)
| [] |
elifesciences/proofreader-python | tests/test_runner.py | 89d807253e17a1731c7ce15f7dd382e49c1c835a | try:
from unittest.mock import patch
except ImportError: # pragma: no cover
from mock import patch
from proofreader.runner import run, _run_command
def test_it_will_return_1_exit_code_on_failure(bad_py_file):
try:
run(targets=[bad_py_file.strpath])
except SystemExit as exception:
assert exception.code == 1
def test_it_will_return_zero_exit_code_on_success(good_py_file):
try:
run(targets=[good_py_file.strpath])
except SystemExit as exception:
assert exception.code == 0
def test_it_returns_zero_exit_code_on_builtin_shadowing_fail(builtin_fail_py_file):
try:
run(targets=[builtin_fail_py_file.strpath])
except SystemExit as exception:
assert exception.code == 0
def test_run_command_will_return_a_bool():
with patch('proofreader.runner.Popen') as mock_popen:
mock_popen.returncode = 0
result = _run_command('dummy_cmd', [''], [''])
assert isinstance(result, bool)
def test_will_return_zero_on_success_with_license_check(good_py_file):
try:
run(targets=[good_py_file.strpath], check_licenses=True)
except SystemExit as exception:
assert exception.code == 0
| [((238, 272), 'proofreader.runner.run', 'run', ([], {'targets': '[bad_py_file.strpath]'}), '(targets=[bad_py_file.strpath])\n', (241, 272), False, 'from proofreader.runner import run, _run_command\n'), ((428, 463), 'proofreader.runner.run', 'run', ([], {'targets': '[good_py_file.strpath]'}), '(targets=[good_py_file.strpath])\n', (431, 463), False, 'from proofreader.runner import run, _run_command\n'), ((638, 681), 'proofreader.runner.run', 'run', ([], {'targets': '[builtin_fail_py_file.strpath]'}), '(targets=[builtin_fail_py_file.strpath])\n', (641, 681), False, 'from proofreader.runner import run, _run_command\n'), ((807, 840), 'mock.patch', 'patch', (['"""proofreader.runner.Popen"""'], {}), "('proofreader.runner.Popen')\n", (812, 840), False, 'from mock import patch\n'), ((907, 944), 'proofreader.runner._run_command', '_run_command', (['"""dummy_cmd"""', "['']", "['']"], {}), "('dummy_cmd', [''], [''])\n", (919, 944), False, 'from proofreader.runner import run, _run_command\n'), ((1075, 1131), 'proofreader.runner.run', 'run', ([], {'targets': '[good_py_file.strpath]', 'check_licenses': '(True)'}), '(targets=[good_py_file.strpath], check_licenses=True)\n', (1078, 1131), False, 'from proofreader.runner import run, _run_command\n')] |
wofeicaoge/Tanim | tanim/core/container/container.py | 8ef17834a4ba51092f28c0d5becec25aecd01a62 | from tanim.utils.config_ops import digest_config
from tanim.utils.iterables import list_update
# Currently, this is only used by both Scene and Mobject.
# Still, we abstract its functionality here, albeit purely nominally.
# All actual implementation has to be handled by derived classes for now.
class Container(object):
def __init__(self, **kwargs):
digest_config(self, kwargs)
self.submobjects = [] # Is it really better to name it submobjects?
def add(self, *mobjects):
if self in mobjects:
raise Exception("Mobject cannot contain self")
self.submobjects = list_update(self.submobjects, mobjects)
return self
def add_to_back(self, *mobjects):
self.remove(*mobjects)
self.submobjects = list(mobjects) + self.submobjects
return self
def remove(self, *mobjects, ):
for mobject in mobjects:
for submod in self.submobjects:
if isinstance(submod, GroupContainer):
submod.remove(mobject)
elif mobject == submod:
self.submobjects.remove(mobject)
return self
class GroupContainer(Container):
def __init__(self, *containers, **kwargs):
self.add(*containers)
| [((368, 395), 'tanim.utils.config_ops.digest_config', 'digest_config', (['self', 'kwargs'], {}), '(self, kwargs)\n', (381, 395), False, 'from tanim.utils.config_ops import digest_config\n'), ((619, 658), 'tanim.utils.iterables.list_update', 'list_update', (['self.submobjects', 'mobjects'], {}), '(self.submobjects, mobjects)\n', (630, 658), False, 'from tanim.utils.iterables import list_update\n')] |
ZACHSTRIVES/AUCSS-StaffPlatform | article.py | f2d6597853e85b06f057292025d83edbb4184361 | from config import *
def fetch_all_article():
try:
cur = db.cursor()
sql = "SELECT * FROM article WHERE article_status='N'"
db.ping(reconnect=True)
cur.execute(sql)
result = cur.fetchall()
db.commit()
cur.close()
return result
except Exception as e:
print(e)
def add_article_to_db(title, due):
try:
cur = db.cursor()
sql = "INSERT INTO article(article_title,article_dueday)VALUES ('%s','%s')" % (title, due)
db.ping(reconnect=True)
cur.execute(sql)
db.commit()
cur.close()
except Exception as e:
print(e)
def fetch_all_mkt_staff():
try:
cur = db.cursor()
sql = "SELECT Name,email FROM user WHERE type=5"
db.ping(reconnect=True)
cur.execute(sql)
result = cur.fetchall()
db.commit()
cur.close()
return result
except Exception as e:
print(e)
def get_article_id(title):
try:
cur = db.cursor()
sql = "SELECT article_id FROM article WHERE article_title='%s' AND article_status='N'" % title
db.ping(reconnect=True)
cur.execute(sql)
result = cur.fetchone()
db.commit()
cur.close()
return result
except Exception as e:
print(e)
def add_works_to_db(article_id, type, staff, work_due):
try:
cur = db.cursor()
sql = "INSERT INTO article_works(works_type,works_article,works_dueday,works_staff)VALUES (%s,%s,'%s','%s');" % (
type, article_id, work_due, staff)
db.ping(reconnect=True)
cur.execute(sql)
db.commit()
cur.close()
except Exception as e:
print(e)
def get_article_s_work(id):
try:
cur = db.cursor()
sql = "SELECT * FROM article_works WHERE works_article=%s ORDER BY works_type" % id
db.ping(reconnect=True)
cur.execute(sql)
result = cur.fetchall()
db.commit()
cur.close()
return result
except Exception as e:
print(e)
def get_user_name(email):
try:
cur = db.cursor()
sql = "SELECT Name FROM user WHERE email='%s'" % email
db.ping(reconnect=True)
cur.execute(sql)
result = cur.fetchone()
db.commit()
cur.close()
return result
except Exception as e:
print(e)
def get_works_list(articles):
res = {}
for i in range(0, len(articles)):
id = articles[i][0]
work = []
works = get_article_s_work(id)
for w in works:
my_list = [w[0], w[1], w[3], get_user_name(w[5])[0]]
work.append(my_list)
res[id] = work
return res
def get_your_task_with_article(email, id):
try:
cur = db.cursor()
sql = "SELECT * FROM article_works WHERE works_staff='%s' AND works_article=%s" % (email, id)
db.ping(reconnect=True)
cur.execute(sql)
result = cur.fetchall()
db.commit()
cur.close()
return result
except Exception as e:
print(e)
def get_task_list(email, articles):
res = {}
for a in articles:
id = a[0]
tasks = get_your_task_with_article(email, id)
res[id] = tasks
return res
def update_finish_status(type, id):
try:
type = int(type)
cur = db.cursor()
sql = ''
if type == 1:
sql = "UPDATE article SET banner_status='Y' WHERE article_id=%s" % id
elif type == 2:
sql = "UPDATE article SET text_status='Y' WHERE article_id=%s" % id
elif type == 3:
sql = "UPDATE article SET style_status='Y' WHERE article_id=%s" % id
db.ping(reconnect=True)
cur.execute(sql)
db.commit()
cur.close()
except Exception as e:
print(e)
def update_task_status(id):
try:
cur = db.cursor()
sql = "UPDATE article_works SET is_finished='Y' WHERE works_num=%s" % id
db.ping(reconnect=True)
cur.execute(sql)
db.commit()
cur.close()
except Exception as e:
print(e)
def finish_task_in_db(task, article, type):
update_task_status(task)
update_finish_status(type, article)
def count_person_performance(type, email):
try:
cur = db.cursor()
sql = "SELECT * FROM article_works WHERE works_staff='%s' AND works_type=%s AND is_finished='Y'" % (email, type)
db.ping(reconnect=True)
cur.execute(sql)
res = cur.fetchall()
db.commit()
cur.close()
return res
except Exception as e:
print(e)
def count_performance():
all_staff = fetch_all_mkt_staff()
performance_list = []
for s in all_staff:
email = s[1]
banner = count_person_performance(1, email)
text = count_person_performance(2, email)
style = count_person_performance(3, email)
p_list = [s[0], len(banner), len(text), len(style)]
performance_list.append(p_list)
return performance_list
| [] |
dwang-ischool/w205 | 12-Querying-Data-II/just_filtering.py | ebcdf684dc653951691faaa2787896a2d2406539 | #!/usr/bin/env python
"""Extract events from kafka and write them to hdfs
"""
import json
from pyspark.sql import SparkSession, Row
from pyspark.sql.functions import udf
@udf('boolean')
def is_purchase(event_as_json):
event = json.loads(event_as_json)
if event['event_type'] == 'purchase_sword':
return True
return False
def main():
"""main
"""
spark = SparkSession \
.builder \
.appName("ExtractEventsJob") \
.getOrCreate()
raw_events = spark \
.read \
.format("kafka") \
.option("kafka.bootstrap.servers", "kafka:29092") \
.option("subscribe", "events") \
.option("startingOffsets", "earliest") \
.option("endingOffsets", "latest") \
.load()
purchase_events = raw_events \
.select(raw_events.value.cast('string').alias('raw'),
raw_events.timestamp.cast('string')) \
.filter(is_purchase('raw'))
extracted_purchase_events = purchase_events \
.rdd \
.map(lambda r: Row(timestamp=r.timestamp, **json.loads(r.raw))) \
.toDF()
extracted_purchase_events.printSchema()
extracted_purchase_events.show()
if __name__ == "__main__":
main()
| [((173, 187), 'pyspark.sql.functions.udf', 'udf', (['"""boolean"""'], {}), "('boolean')\n", (176, 187), False, 'from pyspark.sql.functions import udf\n'), ((232, 257), 'json.loads', 'json.loads', (['event_as_json'], {}), '(event_as_json)\n', (242, 257), False, 'import json\n'), ((389, 437), 'pyspark.sql.SparkSession.builder.appName', 'SparkSession.builder.appName', (['"""ExtractEventsJob"""'], {}), "('ExtractEventsJob')\n", (417, 437), False, 'from pyspark.sql import SparkSession, Row\n'), ((1072, 1089), 'json.loads', 'json.loads', (['r.raw'], {}), '(r.raw)\n', (1082, 1089), False, 'import json\n')] |
navjotk/pysz | test.py | 6d75aa4fe24713ed893a2301c143006dace6fd77 | import numpy as np
from pysz import compress, decompress
def test_compress_decompress():
a = np.linspace(0, 100, num=1000000).reshape((100, 100, 100)).astype(np.float32)
tolerance = 0.0001
compressed = compress(a, tolerance=tolerance)
recovered = decompress(compressed, a.shape, a.dtype)
assert(a.shape == recovered.shape)
assert(np.allclose(a, recovered, atol=tolerance))
test_compress_decompress()
| [((216, 248), 'pysz.compress', 'compress', (['a'], {'tolerance': 'tolerance'}), '(a, tolerance=tolerance)\n', (224, 248), False, 'from pysz import compress, decompress\n'), ((266, 306), 'pysz.decompress', 'decompress', (['compressed', 'a.shape', 'a.dtype'], {}), '(compressed, a.shape, a.dtype)\n', (276, 306), False, 'from pysz import compress, decompress\n'), ((362, 403), 'numpy.allclose', 'np.allclose', (['a', 'recovered'], {'atol': 'tolerance'}), '(a, recovered, atol=tolerance)\n', (373, 403), True, 'import numpy as np\n'), ((99, 131), 'numpy.linspace', 'np.linspace', (['(0)', '(100)'], {'num': '(1000000)'}), '(0, 100, num=1000000)\n', (110, 131), True, 'import numpy as np\n')] |
PasaLab/SparkDQ | sparkdq/outliers/params/KSigmaParams.py | 16d50210747ef7de03cf36d689ce26ff7445f63a | import json
from sparkdq.outliers.params.OutlierSolverParams import OutlierSolverParams
from sparkdq.outliers.OutlierSolver import OutlierSolver
class KSigmaParams(OutlierSolverParams):
def __init__(self, deviation=1.5):
self.deviation = deviation
def model(self):
return OutlierSolver.kSigma
@staticmethod
def from_json(json_str):
d = json.loads(json_str)
return KSigmaParams(d["deviation"])
| [((382, 402), 'json.loads', 'json.loads', (['json_str'], {}), '(json_str)\n', (392, 402), False, 'import json\n')] |
dunzoit/alerta-contrib | webhooks/sentry/alerta_sentry.py | 57dd47d5bb0c994fce036ae1eea2c3a88ef352c4 |
from alerta.models.alert import Alert
from alerta.webhooks import WebhookBase
class SentryWebhook(WebhookBase):
def incoming(self, query_string, payload):
# For Sentry v9
# Defaults to value before Sentry v9
if 'request' in payload.get('event'):
key = 'request'
else:
key = 'sentry.interfaces.Http'
if payload.get('event')[key]['env'].get('ENV', 'prod') == 'prod':
environment = 'Production'
else:
environment = 'Development'
if payload['level'] == 'error':
severity = 'critical'
else:
severity = 'ok'
return Alert(
resource=payload['culprit'],
event=payload['event']['event_id'],
environment=environment,
severity=severity,
service=[payload['project']],
group='Application',
value=payload['level'],
text='{}\n{}\n{}'.format(payload['message'], payload['event'].get('title', ''), payload['url']),
tags=['{}={}'.format(k, v) for k, v in payload['event']['tags']],
attributes={'modules': ['{}=={}'.format(k, v) for k, v in payload['event']['modules'].items()]},
origin='sentry.io',
raw_data=str(payload)
)
| [] |
Subsets and Splits