text
stringlengths
3
1.05M
import pytest from django.urls import resolve, reverse from begameshopapp.users.models import User pytestmark = pytest.mark.django_db def test_detail(user: User): assert ( reverse("users:detail", kwargs={"username": user.username}) == f"/users/{user.username}/" ) assert resolve(f"/users/{user.username}/").view_name == "users:detail" def test_update(): assert reverse("users:update") == "/users/~update/" assert resolve("/users/~update/").view_name == "users:update" def test_redirect(): assert reverse("users:redirect") == "/users/~redirect/" assert resolve("/users/~redirect/").view_name == "users:redirect"
import numpy as np from tqdm import tqdm from math import exp import os import signal import json import argparse from dataset import CRSdataset from model import BERTModel, SASRecModel, SASBERT import torch.nn as nn from torch import optim import torch from nltk.translate.bleu_score import sentence_bleu import nltk import re import pickle import logging import time import torch.nn.functional as F import transformers from transformers import BertModel, BertTokenizer, BertConfig import pandas as pd import numpy as np from tqdm import tqdm from torch.utils.data import * import ipdb import math import random from os.path import join import inspect import re from torch.optim import Adam def var_name(p): for line in inspect.getframeinfo(inspect.currentframe().f_back)[3]: m = re.search(r'\bvarname\s*\(\s*([A-Za-z_][A-Za-z0-9_]*)\s*\)', line) if m: return m.group(1) def set_seed(args): random.seed(args.seed) np.random.seed(args.seed) torch.manual_seed(args.seed) if args.use_cuda: torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark = False def create_logger(args): """ 将日志输出到日志文件和控制台 """ logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') # 创建一个handler,用于写入日志文件 file_handler = logging.FileHandler(filename=args.log_path) file_handler.setFormatter(formatter) file_handler.setLevel(logging.INFO) logger.addHandler(file_handler) # 创建一个handler,用于将日志输出到控制台 console = logging.StreamHandler() console.setLevel(logging.DEBUG) console.setFormatter(formatter) logger.addHandler(console) return logger def setup_args(): train = argparse.ArgumentParser() train.add_argument("-model_type", "--model_type", type=str, default='Ours') train.add_argument("-exp_name", "--exp_name", type=str, default='modelv1') # about train setting train.add_argument("-batch_size", "--batch_size", type=int, default=8) # todo train.add_argument("-lr_bert", "--lr_bert", type=float, default=1e-5) train.add_argument("-lr_sasrec", "--lr_sasrec", type=float, default=1e-3) train.add_argument("-epoch", "--epoch", type=int, default=500) train.add_argument("-use_cuda", "--use_cuda", type=bool, default=True) train.add_argument("-gpu", "--gpu", type=str, default='1') train.add_argument('--do_eval', action='store_true') train.add_argument("-use_size", "--use_size", type=int, default=-1) # pad_size,与其他模型不统一 train.add_argument("-seed", "--seed", type=int, default=43) # todo train.add_argument("-max_c_length", "--max_c_length", type=int, default=256) # pad_size,与其他模型不统一 # about model setting train.add_argument("-init_add", "--init_add", action="store_true", default=False) train.add_argument("-bert_path","--bert_path",type=str,\ default="../../pretrain_model/wwm_ext/", help='要加载的模型的位置') train.add_argument("-model_save_path", "--model_save_path", type=str, default='saved_model/{}') # todo train.add_argument("-sasrec_save_path","--sasrec_save_path",type=str, \ default='sasrec_{}.pth') # todo train.add_argument("-fusion_save_path","--fusion_save_path",type=str, \ default='fusion_save_path_{}.pth') # todo train.add_argument("-load_exp_name","--load_exp_name",type=str, \ default='v1') train.add_argument("-model_load_path", "--model_load_path", type=str, default='saved_model/{}') # todo train.add_argument("-load_model", "--load_model", action="store_true", default=False) train.add_argument("-sasrec_load_path","--sasrec_load_path",type=str,\ default="sasrec_{}.pth", help='要加载的模型的位置') train.add_argument("-fusion_load_path","--fusion_load_path",type=str, \ default='fusion_save_path_{}.pth') # todo # about dataset and data setting train.add_argument("--raw", action="store_true", default=False) train.add_argument("-train_data_file","--train_data_file",type=str,\ default="../../data/train_data.pkl", help='要处理的数据的位置') train.add_argument("-valid_data_file","--valid_data_file",type=str,\ default="../../data/valid_data.pkl", help='要处理的数据的位置') train.add_argument("-test_data_file","--test_data_file",type=str,\ default="../../data/test_data.pkl", help='要处理的数据的位置') train.add_argument("-vocab_path","--vocab_path",type=str,\ default="../../pretrain_model/wwm_ext/vocab.txt", help='用于初始化分词器的字典') # other train.add_argument('--log_path', default='log/{}.log', type=str, required=False, help='训练日志存放位置') #todo # SASRec train.add_argument("--hidden_size", type=int, default=50, \ help="hidden size of transformer model") train.add_argument("--num_hidden_layers", type=int, default=2, \ help="number of layers") train.add_argument('--num_attention_heads', default=1, type=int) train.add_argument('--hidden_act', default="gelu", type=str) # gelu relu train.add_argument("--attention_probs_dropout_prob", type=float, \ default=0.2, help="attention dropout p") train.add_argument("--hidden_dropout_prob", type=float, default=0.2, \ help="hidden dropout p") train.add_argument("--initializer_range", type=float, default=0.02) train.add_argument('--max_seq_length', default=100, type=int) train.add_argument('--item_size', default=33834, type=int) # train.add_argument("--weight_decay", type=float, default=0.0000, help="weight_decay of adam") train.add_argument("--adam_beta1", type=float, default=0.9, help="adam first beta value") train.add_argument("--adam_beta2", type=float, default=0.99, help="adam second beta value") train.add_argument("--sasrec_emb_save_path", type=str, default='saved_model/sasrec_embed.pth') train.add_argument("--is_save_sasrec_embed", default=False, action='store_true') return train class TrainLoop_Ours(): def __init__(self, opt, args): self.opt = opt self.args = args self.batch_size = self.opt['batch_size'] self.epoch = self.opt['epoch'] self.use_cuda = opt['use_cuda'] self.device = "cuda:{}".format( self.args.gpu) if self.use_cuda else 'cpu' self.args.device = self.device self.build_data() self.build_model() self.optimizer = self.model.get_optimizer() def build_data(self): # 初始化分词器 self.tokenizer = BertTokenizer( vocab_file=self.opt['vocab_path']) # 初始化分词器 # build and save self.dataset self.dataset = {'train': None, 'valid': None, 'test': None} self.dataset_loader = {'train': None, 'valid': None, 'test': None} for subset in self.dataset: self.dataset[subset] = CRSdataset(logger, subset, self.opt[f'{subset}_data_file'], self.args, self.tokenizer) self.dataset_loader[subset] = torch.utils.data.DataLoader( dataset=self.dataset[subset], batch_size=self.batch_size, shuffle=True) # self.args.item_size += 1 self.movie_num = self.dataset['train'].movie_num self.args.item_size = self.dataset['train'].movie_num def build_model(self): # BERT用来编码用户回复历史 # SAS用来编码用户交互的item历史 self.model = SASBERT(self.opt, self.args, self.movie_num) if self.use_cuda: self.model.to(self.device) # todo def train(self): losses = [] # 预报一次清零一IC best_val_NDCG = 0.0 gen_stop = False patience = 0 max_patience = 3 for i in range(self.epoch): train_loss = [] for batch_idx, batch_data in tqdm( enumerate(self.dataset_loader['train'])): ####################################### 检验输入输出ok # print("[Context] ", batch_data[0]) # print("[Context] ", '\n'.join(self.vector2sentence(batch_data[0]))) # print("[Movie]", batch_data[3]) # ipdb.set_trace() self.model.train() # model前向 self.zero_grad() # 梯度清0 batch_data = [data.to(self.device) for data in batch_data] logit = self.model(batch_data) y = batch_data[3] loss = self.model.compute_loss(logit, y, 'train') train_loss.append(loss.item()) losses.append(loss.item()) loss.backward() # model反向, 计算梯度 self.optimizer.step() # 利用梯度更新参数 if (batch_idx + 1) % 50 == 0: # 从上次预报到现在为止的loss均值,每50个batch预报一次 loss = sum(losses) / len(losses) logger.info('loss is %.4f' % (loss)) losses = [] logger.info( f'Epoch {i}, train loss = {sum(train_loss)/len(train_loss)}') # 验证集 # metrics_test = self.val('train') metrics_test = self.val('valid') _ = self.val('test') if best_val_NDCG > metrics_test["NDCG50"]: patience += 1 logger.info(f"[Patience = {patience}]") if patience >= max_patience: gen_stop = True else: patience = 0 best_val_NDCG = metrics_test["NDCG50"] self.model.save_model('BERT SASRec Fusion') logger.info(f"[Model saved]") if gen_stop == True: break def val(self, subset): assert subset in ['train', 'test', 'valid'] self.model.eval() val_dataset_loader = self.dataset_loader[subset] metrics_test = { "Loss": 0, "NDCG1": 0, "NDCG10": 0, "NDCG50": 0, "MRR1": 0, "MRR10": 0, "MRR50": 0, "count": 0 } losses = [] # for batch_idx, batch_data in tqdm(enumerate(val_dataset_loader)): for batch_idx, batch_data in enumerate(val_dataset_loader): # print("[Context] ", batch_data[0]) # print("[Context] ", '\n'.join(self.vector2sentence(batch_data[0]))) # print("[MovieHistory]\n", batch_data[4]) # print("[MovieMask]\n", batch_data[6]) # print("[Movie]", batch_data[3]) # ipdb.set_trace() with torch.no_grad(): # 不需要反向传播梯度 batch_data = [data.to(self.device) for data in batch_data] logit = self.model(batch_data) # 前向推断 y = batch_data[3] # Movie loss = self.model.compute_loss(logit, y) self.compute_metircs(logit, y, metrics_test) losses.append(loss.item()) # test 结束 metrics_test['Loss'] = sum(losses) / len(losses) for key in metrics_test: if 'NDCG' in key or 'MRR' in key: # metrics_test[key] = round(metrics_test[key] / metrics_test['count'] * 3, 4) metrics_test[key] = round(metrics_test[key] / metrics_test['count'], 4) logger.info(f"{subset} set's metrics = {metrics_test}") return metrics_test def compute_metircs(self, logit, y, metrics): for K in [1, 10, 50]: # pred = logit.max(-1, keepdim=True)[1] # acc += pred.eq(y.view_as(pred)).sum().item() # 记得加item() pred, pred_id = torch.topk(logit, K, dim=1) # id=[batch_size, K] for i, gt in enumerate(y): gt = gt.item() cand_ids = pred_id[i].tolist() if gt in cand_ids: rank = cand_ids.index(gt) metrics['NDCG' + str(K)] += 1.0 / math.log(rank + 2.0, 2) metrics['MRR' + str(K)] += 1.0 / (rank + 1.0) assert len(y.shape) == 1 metrics['count'] += y.shape[0] # metrics['count'] = int(metrics['count']/3) def vector2sentence(self, batch_sen, compat=True): # 一个batch的sentence 从id换成token sentences = [] # for sen in batch_sen.numpy(): # sentences.append(self.tokenizer.convert_ids_to_tokens(sen)) for sen in batch_sen.numpy().tolist(): sentence = [] for word in sen: if word != 0: sentence.append(self.tokenizer.convert_ids_to_tokens(word)) # elif word==3: # sentence.append('_UNK_') if compat: sentence = ''.join(sentence) sentences.append(sentence) return sentences @classmethod def optim_opts(self): """ Fetch optimizer selection. By default, collects everything in torch.optim, as well as importing: - qhm / qhmadam if installed from github.com/facebookresearch/qhoptim Override this (and probably call super()) to add your own optimizers. """ # first pull torch.optim in optims = { k.lower(): v for k, v in optim.__dict__.items() if not k.startswith('__') and k[0].isupper() } try: import apex.optimizers.fused_adam as fused_adam optims['fused_adam'] = fused_adam.FusedAdam except ImportError: pass try: # https://openreview.net/pdf?id=S1fUpoR5FQ from qhoptim.pyt import QHM, QHAdam optims['qhm'] = QHM optims['qhadam'] = QHAdam except ImportError: # no QHM installed pass logger.info(optims) return optims def init_optim(self): bert_param_optimizer = list( self.model.BERT.named_parameters()) # 模型参数名字列表 other_param_optimizer = list(self.model.SASRec.named_parameters()) + \ list(self.model.fusion.named_parameters()) # no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight'] # optimizer_grouped_parameters = [ # {'params': [p for n, p in bert_param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01}, # {'params': [p for n, p in bert_param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}] # self.optimizer = transformers.AdamW(optimizer_grouped_parameters, lr=self.opt['lr_bert']) self.optimizer = transformers.AdamW([{ 'params': bert_param_optimizer, 'lr': self.opt['lr_bert'] }, { 'params': other_param_optimizer }], lr=self.opt['lr_sasrec']) # self.scheduler = transformers.WarmupLinearSchedule(\ # self.optimizer, warmup_steps=self.opt['warmup_steps'], t_total=len(self.dataset_loader['train']) * self.epoch) def zero_grad(self): """ Zero out optimizer. It is recommended you call this in train_step. It automatically handles gradient accumulation if agent is called with --update-freq. """ self.optimizer.zero_grad() class TrainLoop_SASRec(): def __init__(self, opt, args): self.opt = opt self.args = args self.batch_size = self.args.batch_size self.epoch = self.args.epoch self.use_cuda = self.args.use_cuda self.device = "cuda:{}".format( self.args.gpu) if self.use_cuda else 'cpu' self.args.device = self.device self.build_data() # default_neg_sampled:[batch_size, item_num+1]: [gt, all_item_id] self.default_neg_sampled = torch.tensor([0] + [i for i in range(1, self.args.item_size)], dtype=torch.long)\ .repeat(self.args.batch_size, 1)\ .to(self.device) self.build_model() self.init_optim() def build_data(self): # 初始化分词器 self.tokenizer = BertTokenizer( vocab_file=self.args.vocab_path) # 初始化分词器 # build and save self.dataset self.dataset = {'train': None, 'valid': None, 'test': None} self.dataset_loader = {'train': None, 'valid': None, 'test': None} for subset in self.dataset: self.dataset[subset] = CRSdataset(logger, subset, self.opt[f'{subset}_data_file'], self.args, self.tokenizer) self.dataset_loader[subset] = torch.utils.data.DataLoader( dataset=self.dataset[subset], batch_size=self.batch_size, shuffle=True) # self.dataset['train'].movie_num 是增加了unk之后的电影数量,+1是他们提高1位,增加0的电影总数 self.item_size = self.dataset['train'].movie_num + 1 self.args.item_size = self.item_size def build_model(self): self.model = SASRecModel(args=self.args) if self.args.load_model: self.model.load_model(self.args.sasrec_load_path) if self.use_cuda: self.model.to(self.device) def train(self): losses = [] # 预报一次清零一IC best_val_NDCG = 0.0 gen_stop = False patience = 0 max_patience = 5 for i in range(self.epoch): train_loss = [] # for batch_idx, batch_data in tqdm(enumerate(self.rec_train_dataloader)): for batch_idx, batch_data in enumerate(self.dataset_loader['train']): self.model.train() self.zero_grad() batch_data = [data.to(self.device) for data in batch_data] input_ids, target_pos, input_mask, sample_negs = batch_data[-4:] # print(input_ids) # print(target_pos) # input_ids: [batch_size, seq_len] # input_mask:[batch_size, seq_len] # sequence_output:[batch_size, seq_len, hidden_size] sequence_output = self.model(input_ids, input_mask, self.args.use_cuda) # sequence_output:[batch_size, seq_len, hidden_size] # pos_ids:[batch, seq_len] # neg_ids:[batch, seq_len] # loss:[batch*seq_len, 1] loss = self.model.cross_entropy(sequence_output, pos_ids=target_pos, neg_ids=sample_negs, use_cuda=self.use_cuda) train_loss.append(loss.item()) losses.append(loss.item()) loss.backward() # loss反向传播 self.optimizer.step() if (batch_idx + 1) % 1000000000000000 == 0: loss = sum(losses) / len(losses) logger.info('loss is %.4f' % (loss)) losses = [] logger.info( f'Epoch {i}, train loss = {sum(train_loss)/len(train_loss)}') # metrics_test = self.val('train') metrics_test = self.val('valid') _ = self.val('test') if best_val_NDCG > metrics_test["NDCG50"]: patience += 1 logger.info(f"[Patience = {patience}]") if patience >= max_patience: gen_stop = True else: patience = 0 best_val_NDCG = metrics_test["NDCG50"] self.model.save_model(self.args.sasrec_save_path) logger.info(f"[Model saved in {self.args.sasrec_save_path}]") if gen_stop: break # metrics_test = self.val('test') def val(self, subset): assert subset in ['train', 'test', 'valid'] self.model.eval() val_dataset_loader = self.dataset_loader[subset] metrics_test = { "Loss": 0, "NDCG1": 0, "NDCG10": 0, "NDCG50": 0, "MRR1": 0, "MRR10": 0, "MRR50": 0, "count": 0 } losses = [] for batch_idx, batch_data in enumerate(val_dataset_loader): with torch.no_grad(): batch_data = [data.to(self.device) for data in batch_data] _, _, _, predict_ids, input_ids, target_pos, input_mask, sample_negs = batch_data # print(input_ids) # print(target_pos) # print(predict_ids) # sequence_output:[batch_size, seq_len, hidden_size] sequence_output = self.model(input_ids, input_mask, self.args.use_cuda) # loss:[batch*seq_len, 1] loss = self.model.cross_entropy(sequence_output, target_pos, sample_negs, self.use_cuda) # [batch_size, item_num] for i in range(predict_ids.shape[0]): self.default_neg_sampled[i][0] = predict_ids[i] # seq_out: [batch, seq_len, hidden] # test_neg_sample: [batch, item_num] # test_logits: [batch, item_num] # 推荐的结果 test_logits = self.predict( sequence_output, self.default_neg_sampled[:predict_ids.shape[0]], self.use_cuda) self.compute_metircs(test_logits, metrics_test) losses.append(loss.item()) # test 结束 metrics_test['Loss'] = sum(losses) / len(losses) for key in metrics_test: if 'NDCG' in key or 'MRR' in key: metrics_test[key] = round( metrics_test[key] / metrics_test['count'], 4) logger.info(f"{subset} set's metrics = {metrics_test}") return metrics_test # seq_out: [batch, seq_len, hidden] # test_neg_sample: [batch, item_num] # test_logits: [batch, item_num] def predict(self, seq_out, test_neg_sample, use_cuda=True): # shorten: 只要每个batch最后一个item的representation与所有candidate representation的点击 # test_item_emb: [batch, item_num, hidden_size] test_item_emb = self.model.embeddings.item_embeddings(test_neg_sample) # seq_out:[batch, seq_len, hidden] # => [batch,1,hidden] seq_out = seq_out[:, -1, :].unsqueeze(1) # batch 中的最后一个item # seq_out: [batch, 1, hidden] # test_item_emb: [batch, item_num, hidden_size] # test_logits:[batch, 1, item_num] test_logits = torch.matmul(seq_out, test_item_emb.transpose(1, 2)) # print(test_logits.shape) #p # test_logits: [batch, item_num], 即可计算出每个batch最后一个item与所有candidate item的内积分数 test_logits = test_logits[:, -1, :] return test_logits def compute_metircs(self, logit, metrics): MRR1, NDCG1 = self.get_metric(logit, topk=1) # ipdb.set_trace() metrics['MRR1'] += MRR1 metrics['NDCG1'] += NDCG1 MRR10, NDCG10 = self.get_metric(logit, topk=10) metrics['MRR10'] += MRR10 metrics['NDCG10'] += NDCG10 MRR50, NDCG50 = self.get_metric(logit, topk=50) metrics['MRR50'] += MRR50 metrics['NDCG50'] += NDCG50 metrics['count'] += 1 # test_logits: [batch, item_num] def get_metric(self, test_logits, topk=10): NDCG = 0.0 MRR = 0.0 # [batch] 最终每个 example 中 正确答案的排位 ranks = test_logits.argsort(descending=True).argsort()[:, 0].cpu() ranks_size = int(ranks.size(0)) for rank in ranks: if rank < topk: NDCG += float(1.0 / np.log2(rank + 2.0)) MRR += float(1.0 / np.array(rank + 1.0)) return MRR / ranks_size, NDCG / ranks_size def save_embed(self): torch.save(self.model.embeddings.item_embeddings.state_dict(), self.args.sasrec_emb_save_path) def vector2sentence(self, batch_sen, compat=True): # 一个batch的sentence 从id换成token sentences = [] # for sen in batch_sen.numpy(): # sentences.append(self.tokenizer.convert_ids_to_tokens(sen)) for sen in batch_sen.numpy().tolist(): sentence = [] for word in sen: if word != 0: sentence.append(self.tokenizer.convert_ids_to_tokens(word)) # elif word==3: # sentence.append('_UNK_') if compat: sentence = ''.join(sentence) sentences.append(sentence) return sentences @classmethod def optim_opts(self): """ Fetch optimizer selection. By default, collects everything in torch.optim, as well as importing: - qhm / qhmadam if installed from github.com/facebookresearch/qhoptim Override this (and probably call super()) to add your own optimizers. """ # first pull torch.optim in optims = { k.lower(): v for k, v in optim.__dict__.items() if not k.startswith('__') and k[0].isupper() } try: import apex.optimizers.fused_adam as fused_adam optims['fused_adam'] = fused_adam.FusedAdam except ImportError: pass try: # https://openreview.net/pdf?id=S1fUpoR5FQ from qhoptim.pyt import QHM, QHAdam optims['qhm'] = QHM optims['qhadam'] = QHAdam except ImportError: # no QHM installed pass logger.info(optims) return optims def init_optim(self): betas = (self.args.adam_beta1, self.args.adam_beta2) self.optimizer = Adam(self.model.parameters(), lr=self.args.lr_sasrec, betas=betas, weight_decay=self.args.weight_decay) print("Total Parameters:", sum([p.nelement() for p in self.model.parameters()])) def zero_grad(self): """ Zero out optimizer. It is recommended you call this in train_step. It automatically handles gradient accumulation if agent is called with --update-freq. """ self.optimizer.zero_grad() class TrainLoop_BERT(): def __init__(self, opt, args): self.opt = opt self.args = args self.batch_size = self.opt['batch_size'] self.epoch = self.opt['epoch'] self.use_cuda = opt['use_cuda'] self.device = "cuda:{}".format( self.args.gpu) if self.use_cuda else 'cpu' self.args.device = self.device self.build_data() self.build_model() self.init_optim() def build_data(self): self.tokenizer = BertTokenizer( vocab_file=self.opt['vocab_path']) # 初始化分词器 # build and save dataset self.dataset = {'train': None, 'valid': None, 'test': None} self.dataset_loader = {'train': None, 'valid': None, 'test': None} for subset in self.dataset: self.dataset[subset] = CRSdataset(logger, subset, self.opt[f'{subset}_data_file'], self.args, self.tokenizer) self.dataset_loader[subset] = torch.utils.data.DataLoader( dataset=self.dataset[subset], batch_size=self.batch_size, shuffle=True) self.movie_num = self.dataset['train'].movie_num def build_model(self): self.model = BERTModel(self.args, self.movie_num) if self.use_cuda: self.model.to(self.device) def train(self): losses = [] # 预报一次清零一次 best_val_NDCG = 0.0 gen_stop = False patience = 0 max_patience = 5 for i in range(self.epoch): train_loss = [] for batch_idx, batch_data in tqdm(enumerate(self.dataset_loader['train'])): self.model.train() self.zero_grad() contexts, types, masks, y, _, _, _, _ = (data.to( self.device) for data in batch_data) # 检验输入输出ok # logger.info("[Context] ", batch_data[0]) # logger.info("[Context] ", '\n'.join(self.vector2sentence(contexts.cpu()))) # logger.info("[GT] ", y) # ipdb.set_trace() # logit: [batch_size, num_class] logit = self.model([contexts, types, masks], raw_return=False) # logger.info(logit[y]) # logit: [batch_size, num_class] # y:[batch_size] # loss:[batch_size] loss = self.model.compute_loss(logit, y, 'train') train_loss.append(loss.item()) losses.append(loss.item()) loss.backward() self.optimizer.step() # logger.info('loss = ', loss) if (batch_idx + 1) % 50 == 0: # 从上次预报到现在为止的loss均值,每50个batch预报一次 loss = sum(losses) / len(losses) logger.info('loss is %.4f' % (loss)) losses = [] logger.info(f'Epoch {i}, train loss = {sum(train_loss)/len(train_loss)}') # metrics_test = self.val('train') metrics_test = self.val('valid') _ = self.val('test') if best_val_NDCG > metrics_test["NDCG50"]: patience += 1 logger.info(f"[Patience = {patience}]") if patience >= max_patience: gen_stop = True else: patience = 0 best_val_NDCG = metrics_test["NDCG50"] self.model.save_model(self.opt['model_save_path']) logger.info("[Model saved in {}]".format( self.opt['model_save_path'])) if gen_stop: break def val(self, subset): assert subset in ['train', 'test', 'valid'] self.model.eval() val_dataset_loader = self.dataset_loader[subset] metrics_test = { "Loss": 0, "NDCG1": 0, "NDCG10": 0, "NDCG50": 0, "MRR1": 0, "MRR10": 0, "MRR50": 0, "count": 0 } losses = [] for batch_idx, batch_data in enumerate(val_dataset_loader): with torch.no_grad(): contexts, types, masks, y, _, _, _, _ = (data.to(self.device) for data in batch_data) # logit: [batch_size, num_class] logit = self.model([contexts, types, masks], raw_return=False) # ipdb.set_trace() # logit: [batch_size, num_class] # y: [batch_size] # loss:[batch_size] loss = self.model.compute_loss(logit, y) self.compute_metircs(logit, y, metrics_test) losses.append(loss.item()) metrics_test['Loss'] = sum(losses) / len(losses) for key in metrics_test: if 'NDCG' in key or 'MRR' in key: metrics_test[key] = round(metrics_test[key] / metrics_test['count'], 4) logger.info(f"{subset} set's metrics = {metrics_test}") return metrics_test def compute_metircs(self, logit, y, metrics): for K in [1, 10, 50]: # pred = logit.max(-1, keepdim=True)[1] # acc += pred.eq(y.view_as(pred)).sum().item() # 记得加item() pred, pred_id = torch.topk(logit, K, dim=1) # id=[batch_size, K] for i, gt in enumerate(y): gt = gt.item() cand_ids = pred_id[i].tolist() if gt in cand_ids: rank = cand_ids.index(gt) metrics['NDCG' + str(K)] += 1.0 / math.log(rank + 2.0, 2) metrics['MRR' + str(K)] += 1.0 / (rank + 1.0) # metrics['count'] += 1 # metrics['count'] = int(metrics['count']/3) assert len(y.shape) == 1 metrics['count'] += y.shape[0] def vector2sentence(self, batch_sen, compat=True): # 一个batch的sentence 从id换成token sentences = [] # for sen in batch_sen.numpy(): # sentences.append(self.tokenizer.convert_ids_to_tokens(sen)) for sen in batch_sen.numpy().tolist(): sentence = [] for word in sen: if word != 0: sentence.append(self.tokenizer.convert_ids_to_tokens(word)) # elif word==3: # sentence.append('_UNK_') if compat: sentence = ''.join(sentence) sentences.append(sentence) return sentences @classmethod def optim_opts(self): """ Fetch optimizer selection. By default, collects everything in torch.optim, as well as importing: - qhm / qhmadam if installed from github.com/facebookresearch/qhoptim Override this (and probably call super()) to add your own optimizers. """ # first pull torch.optim in optims = { k.lower(): v for k, v in optim.__dict__.items() if not k.startswith('__') and k[0].isupper() } try: import apex.optimizers.fused_adam as fused_adam optims['fused_adam'] = fused_adam.FusedAdam except ImportError: pass try: # https://openreview.net/pdf?id=S1fUpoR5FQ from qhoptim.pyt import QHM, QHAdam optims['qhm'] = QHM optims['qhadam'] = QHAdam except ImportError: # no QHM installed pass logger.info(optims) return optims def init_optim(self): param_optimizer = list(self.model.bert.named_parameters()) # 模型参数名字列表 no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight'] optimizer_grouped_parameters = [{ 'params': [p for n, p in param_optimizer] }] fc_optimizer = list(self.model.fc.named_parameters()) # 模型参数名字列表 optimizer_grouped_parameters += [{ 'params': [p for n, p in fc_optimizer], 'lr': self.opt['lr_sasrec'] }] # self.optimizer = transformers.AdamW(self.model.parameters(), lr=self.opt['lr']) self.optimizer = transformers.AdamW(optimizer_grouped_parameters, lr=self.opt['lr_bert']) # self.scheduler = transformers.WarmupLinearSchedule( # self.optimizer, warmup_steps=self.opt['warmup_steps'], t_total=len(self.dataset_loader['train']) * self.epoch) def zero_grad(self): """ Zero out optimizer. It is recommended you call this in train_step. It automatically handles gradient accumulation if agent is called with --update-freq. """ self.optimizer.zero_grad() def main(): args = setup_args().parse_args() args.log_path = args.log_path.format(args.exp_name) global logger logger = create_logger(args) logger.info(vars(args)) if not args.do_eval: args.model_save_path = args.model_save_path.format(args.exp_name) if not os.path.exists(args.model_save_path): os.mkdir(args.model_save_path) args.fusion_save_path = join(args.model_save_path, args.fusion_save_path.format(args.exp_name)) args.sasrec_save_path = join(args.model_save_path, args.sasrec_save_path.format(args.exp_name)) if args.load_model: args.model_load_path = args.model_load_path.format(args.load_exp_name) if not os.path.exists(args.model_load_path): logger.info('!No existing load exp dictionary') exit(0) args.fusion_load_path = join(args.model_load_path, args.fusion_load_path.format(args.load_exp_name)) args.sasrec_load_path = join(args.model_load_path, args.sasrec_load_path.format(args.load_exp_name)) set_seed(args) if args.model_type == 'Ours': loop = TrainLoop_Ours(vars(args), args) elif args.model_type == 'BERT': loop = TrainLoop_BERT(vars(args), args) elif args.model_type == 'SASRec': loop = TrainLoop_SASRec(vars(args), args) if args.do_eval: loop.val('test') if args.is_save_sasrec_embed: loop.save_embed() else: loop.train() if __name__ == '__main__': main()
// This code is part of the project "Theoretically Efficient Parallel Graph // Algorithms Can Be Fast and Scalable", presented at Symposium on Parallelism // in Algorithms and Architectures, 2018. // Copyright (c) 2018 Laxman Dhulipala, Guy Blelloch, and Julian Shun // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. #pragma once #include "macros.h" #include "pbbslib/sequence_ops.h" namespace gbbs { namespace intersection { template <class Nghs> inline size_t intersect(Nghs *A, Nghs *B, uintE a, uintE b) { uintT i = 0, j = 0, nA = A->degree, nB = B->degree; auto nghA = A->neighbors; auto nghB = B->neighbors; size_t ans = 0; while (i < nA && j < nB) { if (std::get<0>(nghA[i]) == std::get<0>(nghB[j])) i++, j++, ans++; else if (std::get<0>(nghA[i]) < std::get<0>(nghB[j])) i++; else j++; } return ans; } template <class Nghs, class F> inline size_t intersect_f(Nghs *A, Nghs *B, const F &f) { uintT i = 0, j = 0, nA = A->degree, nB = B->degree; auto nghA = A->neighbors; auto nghB = B->neighbors; uintE a = A->id, b = B->id; size_t ans = 0; while (i < nA && j < nB) { if (std::get<0>(nghA[i]) == std::get<0>(nghB[j])) { f(a, b, std::get<0>(nghA[i])); i++, j++, ans++; } else if (std::get<0>(nghA[i]) < std::get<0>(nghB[j])) { i++; } else { j++; } } return ans; } constexpr const size_t _bs_merge_base = 32; constexpr const size_t _seq_merge_thresh = 2048; template <class SeqA, class SeqB, class F> size_t seq_merge_full(SeqA &A, SeqB &B, F &f) { using T = typename SeqA::value_type; size_t nA = A.size(), nB = B.size(); size_t i = 0, j = 0; size_t ct = 0; while (i < nA && j < nB) { const T &a = A[i]; const T &b = B[j]; if (a == b) { f(a); i++; j++; ct++; } else if (a < b) { i++; } else { j++; } } return ct; } template <class SeqA, class SeqB, class F> size_t seq_merge(const SeqA &A, const SeqB &B, const F &f) { using T = typename SeqA::value_type; size_t nA = A.size(); size_t ct = 0; for (size_t i = 0; i < nA; i++) { const T &a = A[i]; size_t mB = pbbslib::binary_search(B, a, std::less<T>()); if (mB < B.size() && a == B[mB]) { f(a); ct++; } } return ct; } template <class SeqA, class SeqB, class F> size_t merge(const SeqA &A, const SeqB &B, const F &f) { using T = typename SeqA::value_type; size_t nA = A.size(); size_t nB = B.size(); size_t nR = nA + nB; if (nR < _seq_merge_thresh) { // handles (small, small) using linear-merge return intersection::seq_merge_full(A, B, f); } else if (nB < nA) { return intersection::merge(B, A, f); } else if (nA < _bs_merge_base) { return intersection::seq_merge(A, B, f); } else { size_t mA = nA / 2; size_t mB = pbbslib::binary_search(B, A[mA], std::less<T>()); size_t m_left = 0; size_t m_right = 0; par_do( [&]() { m_left = intersection::merge(A.slice(0, mA), B.slice(0, mB), f); }, [&]() { m_right = intersection::merge(A.slice(mA, nA), B.slice(mB, nB), f); }); return m_left + m_right; } } template <class Nghs, class F> inline size_t intersect_f_par(Nghs *A, Nghs *B, const F &f) { uintT nA = A->degree, nB = B->degree; uintE *nghA = (uintE *)(A->neighbors); uintE *nghB = (uintE *)(B->neighbors); // Will not work if W is not pbbslib::empty, should assert. auto seqA = pbbslib::make_sequence<uintE>(nghA, nA); auto seqB = pbbslib::make_sequence<uintE>(nghB, nB); uintE a = A->id; uintE b = B->id; auto merge_f = [&](uintE ngh) { f(a, b, ngh); }; return intersection::merge(seqA, seqB, merge_f); } } // namespace intersection } // namespace gbbs
#!/usr/bin/env python3 import utils utils.check_version((3,7)) utils.clear() print("Hello there! Would you like to learn more about me?") answer=input(": ") answer=answer.lower() if answer=="yes": print("""My name is Nolan McIntire. My current favorite game would be League of Legends though it would be hard to choose my favorite game of all time. When it comes to the class I am really only concerned with the amount of work, but I think I should be fine. I am excited about the a few new games coming out soon as well as a few of my classes. My stackoverflow id number is 11991293. The URL to my github profile is https://github.com/RockTank10""") else: print("Ok, I see how it is...") print("Have a good day!")
#!/usr/bin/env python3 # Copyright (c) 2014-2018 The Machinecoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the fundrawtransaction RPC.""" from decimal import Decimal from test_framework.test_framework import MachinecoinTestFramework from test_framework.util import ( assert_equal, assert_fee_amount, assert_greater_than, assert_greater_than_or_equal, assert_raises_rpc_error, connect_nodes_bi, count_bytes, find_vout_for_address, ) def get_unspent(listunspent, amount): for utx in listunspent: if utx['amount'] == amount: return utx raise AssertionError('Could not find unspent with amount={}'.format(amount)) class RawTransactionsTest(MachinecoinTestFramework): def set_test_params(self): self.num_nodes = 4 self.setup_clean_chain = True def skip_test_if_missing_module(self): self.skip_if_no_wallet() def setup_network(self, split=False): self.setup_nodes() connect_nodes_bi(self.nodes, 0, 1) connect_nodes_bi(self.nodes, 1, 2) connect_nodes_bi(self.nodes, 0, 2) connect_nodes_bi(self.nodes, 0, 3) def run_test(self): min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee'] # This test is not meant to test fee estimation and we'd like # to be sure all txs are sent at a consistent desired feerate for node in self.nodes: node.settxfee(min_relay_tx_fee) # if the fee's positive delta is higher than this value tests will fail, # neg. delta always fail the tests. # The size of the signature of every input may be at most 2 bytes larger # than a minimum sized signature. # = 2 bytes * minRelayTxFeePerByte feeTolerance = 2 * min_relay_tx_fee/1000 self.nodes[2].generate(1) self.sync_all() self.nodes[0].generate(121) self.sync_all() # ensure that setting changePosition in fundraw with an exact match is handled properly rawmatch = self.nodes[2].createrawtransaction([], {self.nodes[2].getnewaddress():50}) rawmatch = self.nodes[2].fundrawtransaction(rawmatch, {"changePosition":1, "subtractFeeFromOutputs":[0]}) assert_equal(rawmatch["changepos"], -1) watchonly_address = self.nodes[0].getnewaddress() watchonly_pubkey = self.nodes[0].getaddressinfo(watchonly_address)["pubkey"] watchonly_amount = Decimal(200) self.nodes[3].importpubkey(watchonly_pubkey, "", True) watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount) # Lock UTXO so nodes[0] doesn't accidentally spend it watchonly_vout = find_vout_for_address(self.nodes[0], watchonly_txid, watchonly_address) self.nodes[0].lockunspent(False, [{"txid": watchonly_txid, "vout": watchonly_vout}]) self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0) self.nodes[0].generate(1) self.sync_all() ############### # simple test # ############### inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 1.0 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert(len(dec_tx['vin']) > 0) #test that we have enough inputs ############################## # simple test with two coins # ############################## inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 2.2 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert(len(dec_tx['vin']) > 0) #test if we have enough inputs ############################## # simple test with two coins # ############################## inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 2.6 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert(len(dec_tx['vin']) > 0) assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '') ################################ # simple test with two outputs # ################################ inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 for out in dec_tx['vout']: totalOut += out['value'] assert(len(dec_tx['vin']) > 0) assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '') ######################################################################### # test a fundrawtransaction with a VIN greater than the required amount # ######################################################################### utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}] outputs = { self.nodes[0].getnewaddress() : 1.0 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 for out in dec_tx['vout']: totalOut += out['value'] assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee ##################################################################### # test a fundrawtransaction with which will not get a change output # ##################################################################### utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}] outputs = { self.nodes[0].getnewaddress() : Decimal(5.0) - fee - feeTolerance } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 for out in dec_tx['vout']: totalOut += out['value'] assert_equal(rawtxfund['changepos'], -1) assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee #################################################### # test a fundrawtransaction with an invalid option # #################################################### utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ] outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_raises_rpc_error(-3, "Unexpected key foo", self.nodes[2].fundrawtransaction, rawtx, {'foo':'bar'}) # reserveChangeKey was deprecated and is now removed assert_raises_rpc_error(-3, "Unexpected key reserveChangeKey", lambda: self.nodes[2].fundrawtransaction(hexstring=rawtx, options={'reserveChangeKey': True})) ############################################################ # test a fundrawtransaction with an invalid change address # ############################################################ utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ] outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_raises_rpc_error(-5, "changeAddress must be a valid machinecoin address", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':'foobar'}) ############################################################ # test a fundrawtransaction with a provided change address # ############################################################ utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ] outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) change = self.nodes[2].getnewaddress() assert_raises_rpc_error(-8, "changePosition out of bounds", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':change, 'changePosition':2}) rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 0}) dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) out = dec_tx['vout'][0] assert_equal(change, out['scriptPubKey']['addresses'][0]) ######################################################### # test a fundrawtransaction with a provided change type # ######################################################### utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ] outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[2].fundrawtransaction, rawtx, {'change_type': None}) assert_raises_rpc_error(-5, "Unknown change type ''", self.nodes[2].fundrawtransaction, rawtx, {'change_type': ''}) rawtx = self.nodes[2].fundrawtransaction(rawtx, {'change_type': 'bech32'}) dec_tx = self.nodes[2].decoderawtransaction(rawtx['hex']) assert_equal('witness_v0_keyhash', dec_tx['vout'][rawtx['changepos']]['scriptPubKey']['type']) ######################################################################### # test a fundrawtransaction with a VIN smaller than the required amount # ######################################################################### utx = get_unspent(self.nodes[2].listunspent(), 1) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}] outputs = { self.nodes[0].getnewaddress() : 1.0 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) # 4-byte version + 1-byte vin count + 36-byte prevout then script_len rawtx = rawtx[:82] + "0100" + rawtx[84:] dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 for i, out in enumerate(dec_tx['vout']): totalOut += out['value'] if out['scriptPubKey']['addresses'][0] in outputs: matchingOuts+=1 else: assert_equal(i, rawtxfund['changepos']) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex']) assert_equal(matchingOuts, 1) assert_equal(len(dec_tx['vout']), 2) ########################################### # test a fundrawtransaction with two VINs # ########################################### utx = get_unspent(self.nodes[2].listunspent(), 1) utx2 = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ] outputs = { self.nodes[0].getnewaddress() : 6.0 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 for out in dec_tx['vout']: totalOut += out['value'] if out['scriptPubKey']['addresses'][0] in outputs: matchingOuts+=1 assert_equal(matchingOuts, 1) assert_equal(len(dec_tx['vout']), 2) matchingIns = 0 for vinOut in dec_tx['vin']: for vinIn in inputs: if vinIn['txid'] == vinOut['txid']: matchingIns+=1 assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params ######################################################### # test a fundrawtransaction with two VINs and two vOUTs # ######################################################### utx = get_unspent(self.nodes[2].listunspent(), 1) utx2 = get_unspent(self.nodes[2].listunspent(), 5) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ] outputs = { self.nodes[0].getnewaddress() : 6.0, self.nodes[0].getnewaddress() : 1.0 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 for out in dec_tx['vout']: totalOut += out['value'] if out['scriptPubKey']['addresses'][0] in outputs: matchingOuts+=1 assert_equal(matchingOuts, 2) assert_equal(len(dec_tx['vout']), 3) ############################################## # test a fundrawtransaction with invalid vin # ############################################## inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin! outputs = { self.nodes[0].getnewaddress() : 1.0} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx) ############################################################ #compare fee of a standard pubkeyhash transaction inputs = [] outputs = {self.nodes[1].getnewaddress():1.1} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ #compare fee of a standard pubkeyhash transaction with multiple outputs inputs = [] outputs = {self.nodes[1].getnewaddress():1.1,self.nodes[1].getnewaddress():1.2,self.nodes[1].getnewaddress():0.1,self.nodes[1].getnewaddress():1.3,self.nodes[1].getnewaddress():0.2,self.nodes[1].getnewaddress():0.3} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[0].sendmany("", outputs) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ #compare fee of a 2of2 multisig p2sh transaction # create 2of2 addr addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[1].getnewaddress() addr1Obj = self.nodes[1].getaddressinfo(addr1) addr2Obj = self.nodes[1].getaddressinfo(addr2) mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] inputs = [] outputs = {mSigObj:1.1} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[0].sendtoaddress(mSigObj, 1.1) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ #compare fee of a standard pubkeyhash transaction # create 4of5 addr addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[1].getnewaddress() addr3 = self.nodes[1].getnewaddress() addr4 = self.nodes[1].getnewaddress() addr5 = self.nodes[1].getnewaddress() addr1Obj = self.nodes[1].getaddressinfo(addr1) addr2Obj = self.nodes[1].getaddressinfo(addr2) addr3Obj = self.nodes[1].getaddressinfo(addr3) addr4Obj = self.nodes[1].getaddressinfo(addr4) addr5Obj = self.nodes[1].getaddressinfo(addr5) mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])['address'] inputs = [] outputs = {mSigObj:1.1} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[0].sendtoaddress(mSigObj, 1.1) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ # spend a 2of2 multisig transaction over fundraw # create 2of2 addr addr1 = self.nodes[2].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[2].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] # send 1.2 MAC to msig addr txId = self.nodes[0].sendtoaddress(mSigObj, 1.2) self.sync_all() self.nodes[1].generate(1) self.sync_all() oldBalance = self.nodes[1].getbalance() inputs = [] outputs = {self.nodes[1].getnewaddress():1.1} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) fundedTx = self.nodes[2].fundrawtransaction(rawtx) signedTx = self.nodes[2].signrawtransactionwithwallet(fundedTx['hex']) txId = self.nodes[2].sendrawtransaction(signedTx['hex']) self.sync_all() self.nodes[1].generate(1) self.sync_all() # make sure funds are received at node1 assert_equal(oldBalance+Decimal('1.10000000'), self.nodes[1].getbalance()) ############################################################ # locked wallet test self.nodes[1].encryptwallet("test") self.stop_nodes() self.start_nodes() # This test is not meant to test fee estimation and we'd like # to be sure all txs are sent at a consistent desired feerate for node in self.nodes: node.settxfee(min_relay_tx_fee) connect_nodes_bi(self.nodes,0,1) connect_nodes_bi(self.nodes,1,2) connect_nodes_bi(self.nodes,0,2) connect_nodes_bi(self.nodes,0,3) # Again lock the watchonly UTXO or nodes[0] may spend it, because # lockunspent is memory-only and thus lost on restart self.nodes[0].lockunspent(False, [{"txid": watchonly_txid, "vout": watchonly_vout}]) self.sync_all() # drain the keypool self.nodes[1].getnewaddress() self.nodes[1].getrawchangeaddress() inputs = [] outputs = {self.nodes[0].getnewaddress():1.1} rawtx = self.nodes[1].createrawtransaction(inputs, outputs) # fund a transaction that requires a new key for the change output # creating the key must be impossible because the wallet is locked assert_raises_rpc_error(-4, "Keypool ran out, please call keypoolrefill first", self.nodes[1].fundrawtransaction, rawtx) #refill the keypool self.nodes[1].walletpassphrase("test", 100) self.nodes[1].keypoolrefill(8) #need to refill the keypool to get an internal change address self.nodes[1].walletlock() assert_raises_rpc_error(-13, "walletpassphrase", self.nodes[1].sendtoaddress, self.nodes[0].getnewaddress(), 1.2) oldBalance = self.nodes[0].getbalance() inputs = [] outputs = {self.nodes[0].getnewaddress():1.1} rawtx = self.nodes[1].createrawtransaction(inputs, outputs) fundedTx = self.nodes[1].fundrawtransaction(rawtx) #now we need to unlock self.nodes[1].walletpassphrase("test", 600) signedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex']) txId = self.nodes[1].sendrawtransaction(signedTx['hex']) self.nodes[1].generate(1) self.sync_all() # make sure funds are received at node1 assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance()) ############################################### # multiple (~19) inputs tx test | Compare fee # ############################################### #empty node1, send some small coins from node0 to node1 self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True) self.sync_all() self.nodes[0].generate(1) self.sync_all() for i in range(0,20): self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01) self.nodes[0].generate(1) self.sync_all() #fund a tx with ~20 small inputs inputs = [] outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04} rawtx = self.nodes[1].createrawtransaction(inputs, outputs) fundedTx = self.nodes[1].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[1].sendmany("", outputs) signedFee = self.nodes[1].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs ############################################# # multiple (~19) inputs tx test | sign/send # ############################################# #again, empty node1, send some small coins from node0 to node1 self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True) self.sync_all() self.nodes[0].generate(1) self.sync_all() for i in range(0,20): self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01) self.nodes[0].generate(1) self.sync_all() #fund a tx with ~20 small inputs oldBalance = self.nodes[0].getbalance() inputs = [] outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04} rawtx = self.nodes[1].createrawtransaction(inputs, outputs) fundedTx = self.nodes[1].fundrawtransaction(rawtx) fundedAndSignedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex']) txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(oldBalance+Decimal('50.19000000'), self.nodes[0].getbalance()) #0.19+block reward ##################################################### # test fundrawtransaction with OP_RETURN and no vin # ##################################################### rawtx = "0100000000010000000000000000066a047465737400000000" dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(len(dec_tx['vin']), 0) assert_equal(len(dec_tx['vout']), 1) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert_greater_than(len(dec_tx['vin']), 0) # at least one vin assert_equal(len(dec_tx['vout']), 2) # one change output added ################################################## # test a fundrawtransaction using only watchonly # ################################################## inputs = [] outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = self.nodes[3].fundrawtransaction(rawtx, {'includeWatching': True }) res_dec = self.nodes[0].decoderawtransaction(result["hex"]) assert_equal(len(res_dec["vin"]), 1) assert_equal(res_dec["vin"][0]["txid"], watchonly_txid) assert("fee" in result.keys()) assert_greater_than(result["changepos"], -1) ############################################################### # test fundrawtransaction using the entirety of watched funds # ############################################################### inputs = [] outputs = {self.nodes[2].getnewaddress() : watchonly_amount} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) # Backward compatibility test (2nd param is includeWatching) result = self.nodes[3].fundrawtransaction(rawtx, True) res_dec = self.nodes[0].decoderawtransaction(result["hex"]) assert_equal(len(res_dec["vin"]), 2) assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid) assert_greater_than(result["fee"], 0) assert_greater_than(result["changepos"], -1) assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10) signedtx = self.nodes[3].signrawtransactionwithwallet(result["hex"]) assert(not signedtx["complete"]) signedtx = self.nodes[0].signrawtransactionwithwallet(signedtx["hex"]) assert(signedtx["complete"]) self.nodes[0].sendrawtransaction(signedtx["hex"]) self.nodes[0].generate(1) self.sync_all() ####################### # Test feeRate option # ####################### # Make sure there is exactly one input so coin selection can't skew the result assert_equal(len(self.nodes[3].listunspent(1)), 1) inputs = [] outputs = {self.nodes[3].getnewaddress() : 1} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = self.nodes[3].fundrawtransaction(rawtx) # uses min_relay_tx_fee (set by settxfee) result2 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee}) result3 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 10*min_relay_tx_fee}) assert_raises_rpc_error(-4, "Fee exceeds maximum configured by -maxtxfee", self.nodes[3].fundrawtransaction, rawtx, {"feeRate": 1}) result_fee_rate = result['fee'] * 1000 / count_bytes(result['hex']) assert_fee_amount(result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate) assert_fee_amount(result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate) ################################ # Test no address reuse occurs # ################################ result3 = self.nodes[3].fundrawtransaction(rawtx) res_dec = self.nodes[0].decoderawtransaction(result3["hex"]) changeaddress = "" for out in res_dec['vout']: if out['value'] > 1.0: changeaddress += out['scriptPubKey']['addresses'][0] assert(changeaddress != "") nextaddr = self.nodes[3].getnewaddress() # Now the change address key should be removed from the keypool assert(changeaddress != nextaddr) ###################################### # Test subtractFeeFromOutputs option # ###################################### # Make sure there is exactly one input so coin selection can't skew the result assert_equal(len(self.nodes[3].listunspent(1)), 1) inputs = [] outputs = {self.nodes[2].getnewaddress(): 1} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = [self.nodes[3].fundrawtransaction(rawtx), # uses min_relay_tx_fee (set by settxfee) self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": []}), # empty subtraction list self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0]}), # uses min_relay_tx_fee (set by settxfee) self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee}), self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee, "subtractFeeFromOutputs": [0]})] dec_tx = [self.nodes[3].decoderawtransaction(tx_['hex']) for tx_ in result] output = [d['vout'][1 - r['changepos']]['value'] for d, r in zip(dec_tx, result)] change = [d['vout'][r['changepos']]['value'] for d, r in zip(dec_tx, result)] assert_equal(result[0]['fee'], result[1]['fee'], result[2]['fee']) assert_equal(result[3]['fee'], result[4]['fee']) assert_equal(change[0], change[1]) assert_equal(output[0], output[1]) assert_equal(output[0], output[2] + result[2]['fee']) assert_equal(change[0] + result[0]['fee'], change[2]) assert_equal(output[3], output[4] + result[4]['fee']) assert_equal(change[3] + result[3]['fee'], change[4]) inputs = [] outputs = {self.nodes[2].getnewaddress(): value for value in (1.0, 1.1, 1.2, 1.3)} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = [self.nodes[3].fundrawtransaction(rawtx), # split the fee between outputs 0, 2, and 3, but not output 1 self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0, 2, 3]})] dec_tx = [self.nodes[3].decoderawtransaction(result[0]['hex']), self.nodes[3].decoderawtransaction(result[1]['hex'])] # Nested list of non-change output amounts for each transaction output = [[out['value'] for i, out in enumerate(d['vout']) if i != r['changepos']] for d, r in zip(dec_tx, result)] # List of differences in output amounts between normal and subtractFee transactions share = [o0 - o1 for o0, o1 in zip(output[0], output[1])] # output 1 is the same in both transactions assert_equal(share[1], 0) # the other 3 outputs are smaller as a result of subtractFeeFromOutputs assert_greater_than(share[0], 0) assert_greater_than(share[2], 0) assert_greater_than(share[3], 0) # outputs 2 and 3 take the same share of the fee assert_equal(share[2], share[3]) # output 0 takes at least as much share of the fee, and no more than 2 satoshis more, than outputs 2 and 3 assert_greater_than_or_equal(share[0], share[2]) assert_greater_than_or_equal(share[2] + Decimal(2e-8), share[0]) # the fee is the same in both transactions assert_equal(result[0]['fee'], result[1]['fee']) # the total subtracted from the outputs is equal to the fee assert_equal(share[0] + share[2] + share[3], result[0]['fee']) if __name__ == '__main__': RawTransactionsTest().main()
$(function() { $('#grade').click(function() { MicroModal.show('grade-modal') }) })
from django.urls import path from . import views app_name = "suppliers" urlpatterns = [ path("", views.SupplierListView.as_view(), name="supplier-list"), path("search/", views.SupplierSearch.as_view(), name="supplier-search"), path( "detail/<slug:slug>", views.SupplierDetailView.as_view(), name="supplier-detail" ), path("create", views.SupplierCreateView.as_view(), name="supplier-create"), path( "update/<slug:slug>", views.SupplierUpdateView.as_view(), name="supplier-update" ), path( "delete/<slug:slug>", views.SupplierDeleteView.as_view(), name="supplier-delete" ), ]
from django.shortcuts import render, HttpResponse from django.utils.safestring import mark_safe import datetime from datetime import timedelta import json import requests import csv import os from bs4 import BeautifulSoup import numpy as np import pandas as pd from .models import CSV def home_page(request): print("date:", datetime.date.today().day) day = datetime.date.today().day month = datetime.date.today().month year = datetime.date.today().year return render(request, "Home_page.html", {'Day': str(day).zfill(2), 'Month': str(month).zfill(2), 'Year': year}) def graph_plot(request): if request.method == 'POST': # print(request.POST) a = request.POST['from'] b = request.POST['to'] ans = a.split('-') bns = b.split('-') aa = datetime.date(int(ans[0]),int(ans[1]),int(ans[2])) bb = datetime.date(int(bns[0]),int(bns[1]),int(bns[2])) query_results = [] last = [] t = ['x', '00:00', '00:05', '00:10', '00:15', '00:20', '00:25', '00:30', '00:35', '00:40', '00:45', '00:50', '00:55', '01:00', '01:05', '01:10', '01:15', '01:20', '01:25', '01:30', '01:35', '01:40', '01:45', '01:50', '01:55', '02:00', '02:05', '02:10', '02:15', '02:20', '02:25', '02:30', '02:35', '02:40', '02:45', '02:50', '02:55', '03:00', '03:05', '03:10', '03:15', '03:20', '03:25', '03:30', '03:35', '03:40', '03:45', '03:50', '03:55', '04:00', '04:05', '04:10', '04:15', '04:20', '04:25', '04:30', '04:35', '04:40', '04:45', '04:50', '04:55', '05:00', '05:05', '05:10', '05:15', '05:20', '05:25', '05:30', '05:35', '05:40', '05:45', '05:50', '05:55', '06:00', '06:05', '06:10', '06:15', '06:20', '06:25', '06:30', '06:35', '06:40', '06:45', '06:50', '06:55', '07:00', '07:05', '07:10', '07:15', '07:20', '07:25', '07:30', '07:35', '07:40', '07:45', '07:50', '07:55', '08:00', '08:05', '08:10', '08:15', '08:20', '08:25', '08:30', '08:35', '08:40', '08:45', '08:50', '08:55', '09:00', '09:05', '09:10', '09:15', '09:20', '09:25', '09:30', '09:35', '09:40', '09:45', '09:50', '09:55', '10:00', '10:05', '10:10', '10:15', '10:20', '10:25', '10:30', '10:35', '10:40', '10:45', '10:50', '10:55', '11:00', '11:05', '11:10', '11:15', '11:20', '11:25', '11:30', '11:35', '11:40', '11:45', '11:50', '11:55', '12:00', '12:05', '12:10', '12:15', '12:20', '12:25', '12:30', '12:35', '12:40', '12:45', '12:50', '12:55', '13:00', '13:05', '13:10', '13:15', '13:20', '13:25', '13:30', '13:35', '13:40', '13:45', '13:50', '13:55', '14:00', '14:05', '14:10', '14:15', '14:20', '14:25', '14:30', '14:35', '14:40', '14:45', '14:50', '14:55', '15:00', '15:05', '15:10', '15:15', '15:20', '15:25', '15:30', '15:35', '15:40', '15:45', '15:50', '15:55', '16:00', '16:05', '16:10', '16:15', '16:20', '16:25', '16:30', '16:35', '16:40', '16:45', '16:50', '16:55', '17:00', '17:05', '17:10', '17:15', '17:20', '17:25', '17:30', '17:35', '17:40', '17:45', '17:50', '17:55', '18:00', '18:05', '18:10', '18:15', '18:20', '18:25', '18:30', '18:35', '18:40', '18:45', '18:50', '18:55', '19:00', '19:05', '19:10', '19:15', '19:20', '19:25', '19:30', '19:35', '19:40', '19:45', '19:50', '19:55', '20:00', '20:05', '20:10', '20:15', '20:20', '20:25', '20:30', '20:35', '20:40', '20:45', '20:50', '20:55', '21:00', '21:05', '21:10', '21:15', '21:20', '21:25', '21:30', '21:35', '21:40', '21:45', '21:50', '21:55', '22:00', '22:05', '22:10', '22:15', '22:20', '22:25', '22:30', '22:35', '22:40', '22:45', '22:50', '22:55', '23:00', '23:05', '23:10', '23:15', '23:20', '23:25', '23:30', '23:35', '23:40', '23:45', '23:50', '23:55'] for i in range((bb-aa).days+1): query_results.append(CSV.objects.filter(date = aa+timedelta(days=i)).order_by('timestamp')) q = [str(aa+timedelta(days=i))] for x in query_results[i]: q.append(x.load_value) last.append(q) last.insert(0,t) else: last = None # print ('sadf',last) cont = { # "query_results":query_results, 'Load':last, # 'Tarikh':a, # 'T':mark_safe(t), # 'Load': [x.load_value for x in query_results], # 'Time': [x.timestamp for x in query_results], } return HttpResponse(json.dumps(cont),content_type='application/json') def forecasted_plot(request): if request.method == 'POST': qq = request.POST['fc'] ans = qq.split('-') aa = datetime.date(int(ans[0]),int(ans[1]),int(ans[2])) day = aa.day month = aa.month year = aa.year query_results = [] l = [] query_results.append((CSV.objects.filter(date = aa).order_by('timestamp'))) q = [str(aa)] for x in query_results[0]: q.append(x.load_value) # weights = [0.8019, 0.0426, 0.0226, -0.0169, 0.1497] ARIMA_load = ['Forecasted with ARIMA'] WMA_load = ['Forecasted with WMA'] SMA_load = ['Forecasted with SMA'] LSTM_load = ['Forecasted with LSTM'] SES_load = ['Forecasted with SES'] GRU_load = ['Forecasted with GRU'] RNN_load = ['Forecasted with RNN'] csv_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) print(str(day).zfill(2)+"-"+str(month).zfill(2)+"-"+str(year)+'.csv') arima_csv = pd.read_csv(os.path.join(csv_path, 'predictions/ARIMA/'+str(day).zfill(2)+"-"+str(month).zfill(2)+"-"+str(year)+'.csv'))['load'].values ARIMA_load.extend(list(arima_csv)) wma_csv = pd.read_csv(os.path.join(csv_path, 'predictions/WMA/'+str(day).zfill(2)+"-"+str(month).zfill(2)+"-"+str(year)+'.csv'))['load'].values WMA_load.extend(list(wma_csv)) sma_csv = pd.read_csv(os.path.join(csv_path, 'predictions/SMA/'+str(day).zfill(2)+"-"+str(month).zfill(2)+"-"+str(year)+'.csv'))['load'].values SMA_load.extend(list(sma_csv)) ses_csv = pd.read_csv(os.path.join(csv_path, 'predictions/SES/'+str(day).zfill(2)+"-"+str(month).zfill(2)+"-"+str(year)+'.csv'))['load'].values SES_load.extend(list(ses_csv)) lstm_csv = pd.read_csv(os.path.join(csv_path, 'predictions/LSTM/'+str(day).zfill(2)+"-"+str(month).zfill(2)+"-"+str(year)+'.csv'))['load'].values LSTM_load.extend(list(lstm_csv)) gru_csv = pd.read_csv(os.path.join(csv_path, 'predictions/GRU/'+str(day).zfill(2)+"-"+str(month).zfill(2)+"-"+str(year)+'.csv'))['load'].values GRU_load.extend(list(gru_csv)) rnn_csv = pd.read_csv(os.path.join(csv_path, 'predictions/RNN/'+str(day).zfill(2)+"-"+str(month).zfill(2)+"-"+str(year)+'.csv'))['load'].values RNN_load.extend(list(rnn_csv)) l.append(SMA_load) l.append(WMA_load) l.append(SES_load) l.append(ARIMA_load) l.append(LSTM_load) l.append(GRU_load) l.append(RNN_load) l.append(q) #q==actual redu = q.count(None) length = len(q)-redu t = ['x', '00:00', '00:05', '00:10', '00:15', '00:20', '00:25', '00:30', '00:35', '00:40', '00:45', '00:50', '00:55', '01:00', '01:05', '01:10', '01:15', '01:20', '01:25', '01:30', '01:35', '01:40', '01:45', '01:50', '01:55', '02:00', '02:05', '02:10', '02:15', '02:20', '02:25', '02:30', '02:35', '02:40', '02:45', '02:50', '02:55', '03:00', '03:05', '03:10', '03:15', '03:20', '03:25', '03:30', '03:35', '03:40', '03:45', '03:50', '03:55', '04:00', '04:05', '04:10', '04:15', '04:20', '04:25', '04:30', '04:35', '04:40', '04:45', '04:50', '04:55', '05:00', '05:05', '05:10', '05:15', '05:20', '05:25', '05:30', '05:35', '05:40', '05:45', '05:50', '05:55', '06:00', '06:05', '06:10', '06:15', '06:20', '06:25', '06:30', '06:35', '06:40', '06:45', '06:50', '06:55', '07:00', '07:05', '07:10', '07:15', '07:20', '07:25', '07:30', '07:35', '07:40', '07:45', '07:50', '07:55', '08:00', '08:05', '08:10', '08:15', '08:20', '08:25', '08:30', '08:35', '08:40', '08:45', '08:50', '08:55', '09:00', '09:05', '09:10', '09:15', '09:20', '09:25', '09:30', '09:35', '09:40', '09:45', '09:50', '09:55', '10:00', '10:05', '10:10', '10:15', '10:20', '10:25', '10:30', '10:35', '10:40', '10:45', '10:50', '10:55', '11:00', '11:05', '11:10', '11:15', '11:20', '11:25', '11:30', '11:35', '11:40', '11:45', '11:50', '11:55', '12:00', '12:05', '12:10', '12:15', '12:20', '12:25', '12:30', '12:35', '12:40', '12:45', '12:50', '12:55', '13:00', '13:05', '13:10', '13:15', '13:20', '13:25', '13:30', '13:35', '13:40', '13:45', '13:50', '13:55', '14:00', '14:05', '14:10', '14:15', '14:20', '14:25', '14:30', '14:35', '14:40', '14:45', '14:50', '14:55', '15:00', '15:05', '15:10', '15:15', '15:20', '15:25', '15:30', '15:35', '15:40', '15:45', '15:50', '15:55', '16:00', '16:05', '16:10', '16:15', '16:20', '16:25', '16:30', '16:35', '16:40', '16:45', '16:50', '16:55', '17:00', '17:05', '17:10', '17:15', '17:20', '17:25', '17:30', '17:35', '17:40', '17:45', '17:50', '17:55', '18:00', '18:05', '18:10', '18:15', '18:20', '18:25', '18:30', '18:35', '18:40', '18:45', '18:50', '18:55', '19:00', '19:05', '19:10', '19:15', '19:20', '19:25', '19:30', '19:35', '19:40', '19:45', '19:50', '19:55', '20:00', '20:05', '20:10', '20:15', '20:20', '20:25', '20:30', '20:35', '20:40', '20:45', '20:50', '20:55', '21:00', '21:05', '21:10', '21:15', '21:20', '21:25', '21:30', '21:35', '21:40', '21:45', '21:50', '21:55', '22:00', '22:05', '22:10', '22:15', '22:20', '22:25', '22:30', '22:35', '22:40', '22:45', '22:50', '22:55', '23:00', '23:05', '23:10', '23:15', '23:20', '23:25', '23:30', '23:35', '23:40', '23:45', '23:50', '23:55'] l.insert(0, t) else: l = None print(l) def mean_absolute_percentage_error(y_pred, y_true): print('mape:', y_pred, y_true) try: y_true, y_pred = np.array(y_true), np.array(y_pred) mape = np.mean(np.abs((y_true - y_pred) / y_true)) * 100 except Exception as e: mape = e return mape def root_mean_square_error(y_pred, y_true): print('rmse:', y_pred, y_true) try: y_true, y_pred = np.array(y_true), np.array(y_pred) rmse = np.sqrt((y_pred - y_true)**2).mean() except Exception as e: rmse = e return rmse cont = { # "query_results":query_results, 'forecasted_Load': l, 'rmseSMA': round(root_mean_square_error(l[1][1:length], l[8][1:length]), 2), 'rmseWMA': round(root_mean_square_error(l[2][1:length], l[8][1:length]), 2), 'rmseSES': round(root_mean_square_error(l[3][1:length], l[8][1:length]), 2), 'rmseARIMA': round(root_mean_square_error(l[4][1:length], l[8][1:length]), 2), 'rmseLSTM': round(root_mean_square_error(l[5][1:length], l[8][1:length]), 2), 'rmseGRU': round(root_mean_square_error(l[6][1:length], l[8][1:length]), 2), 'rmseRNN': round(root_mean_square_error(l[7][1:length], l[8][1:length]), 2), 'mapeSMA': round(mean_absolute_percentage_error(l[1][1:length], l[8][1:length]),2), 'mapeWMA': round(mean_absolute_percentage_error(l[2][1:length], l[8][1:length]),2), 'mapeSES': round(mean_absolute_percentage_error(l[3][1:length], l[8][1:length]),2), 'mapeARIMA': round(mean_absolute_percentage_error(l[4][1:length], l[8][1:length]),2), 'mapeLSTM': round(mean_absolute_percentage_error(l[5][1:length], l[8][1:length]),2), 'mapeGRU': round(mean_absolute_percentage_error(l[6][1:length], l[8][1:length]),2), 'mapeRNN': round(mean_absolute_percentage_error(l[7][1:length], l[8][1:length]),2), # 'Tarikh':a, # 'T':mark_safe(t), # 'Load': [x.load_value for x in query_results], # 'Time': [x.timestamp for x in query_results], } # print(cont) return HttpResponse(json.dumps(cont),content_type='application/json')
import http from flask import Blueprint, jsonify, request, render_template, redirect, url_for from helpers.trello import search_cards, get_client, create_webhook, update_webhook from formatters.trello.cards import format_card_extension_data_response from auth import hubspot_signature_required from repositories import AssociationsRepository, WebhooksRepository module = Blueprint("trello.associations", __name__) @module.route("/search") def search(): query = request.args.get("q") cards = search_cards(query=query) response = [{"name": card.name, "id": card.id} for card in cards] return jsonify(response) @module.route("/search_frame", methods=["GET"]) def search_frame(): deal_name = request.args.get("dealname") return render_template("trello/cards/search_frame.html", deal_name=deal_name) @module.route("/search_frame", methods=["POST"]) def create_association(): deal_id = request.args.get("hs_object_id") card_id = request.form.get("card_id") AssociationsRepository.create(deal_id, card_id) callback_url = url_for("trello.webhooks.handle", _external=True) webhooks = WebhooksRepository.find_by(card_id=card_id) if len(webhooks) > 0: for webhook in webhooks: update_webhook(webhook_id=webhook.webhook_id, callback_url=callback_url) webhook.url = callback_url WebhooksRepository.save(webhook) else: webhook_data = create_webhook(callback_url=callback_url, card_id=card_id) WebhooksRepository.create( webhook_id=webhook_data["id"], card_id=card_id, url=callback_url, ) return redirect(url_for("trello.associations.search_frame_success")) @module.route("/search_frame_success", methods=["GET"]) def search_frame_success(): return render_template("trello/cards/search_frame_success.html") @module.route("/", methods=["DELETE"]) @hubspot_signature_required def delete_association(): deal_id = request.args.get("hs_object_id") AssociationsRepository.delete_by_deal_id(deal_id) return "", http.HTTPStatus.NO_CONTENT @module.route("/card_extension") @hubspot_signature_required def card_extension_data(): deal_id = request.args["hs_object_id"] deal_associated = AssociationsRepository.is_deal_associated(deal_id) card = None if deal_associated: association = AssociationsRepository.find_one_by_deal_id(deal_id) trello = get_client() card = trello.get_card(card_id=association.card_id) card.members = [trello.get_member(m) for m in card.idMembers] response = format_card_extension_data_response( deal_associated=deal_associated, card=card ) return jsonify(response)
from empire.core import * from empire.data_structures.interfaces.abstract_map import AbstractMap from empire.util.log import * from copy import deepcopy T = TypeVar('T') U = TypeVar('U') class ESMap(AbstractMap): """ Basic map implementation that does not throw exceptions. Please note that it does not stop errors such as trying to call merge using an integer as value. It mostly traps errors in get/del operations and adds the safe_get() method (from SafeDict class) """ def __init__(self, data: Union[AbstractMap, dict, None] = None): if data: if type(data) is dict: self._inner_dict: Dict[T, U] = dict(data) else: self._inner_dict: Dict[T, U] = dict(data.get_python_dict()) else: self._inner_dict: Dict[T, U] = {} @property def keys(self) -> Union[KeysView[T], Any]: return self._inner_dict.keys() @property def values(self) -> Union[ValuesView[U], Any]: return self._inner_dict.values() @property def is_empty(self) -> bool: return len(self._inner_dict) == 0 def clear(self): self._inner_dict.clear() def get(self, key: T, default: U = None) -> U: return self._inner_dict.get(key, default) def safe_get(self, key: T, default_value: U = None, triggers_on_absent_key: bool = True, triggers_on_exception: bool = True, triggers_on_none_value: bool = True, triggers_on_empty_string: bool = True, triggers_on_stringified_nones: bool = True) -> U: """ Safely attempts to get the value at 'key' from 'dictionary'. :param key: the key to get the value from :param default_value: the default value to return :param triggers_on_absent_key: returns the default value if 'key' is not in 'dictionary' :param triggers_on_exception: returns the default value if any exception would occur :param triggers_on_none_value: returns the default value if the value at 'key' is None :param triggers_on_empty_string: returns the default value if the value at 'key' is an empty strings :param triggers_on_stringified_nones: the value 'None' can be encountered using boto """ try: if key in self._inner_dict.keys(): dict_value = self._inner_dict[key] if triggers_on_none_value and dict_value is None: return default_value elif type(dict_value) is str: if (triggers_on_empty_string and dict_value == '') or \ (triggers_on_stringified_nones and dict_value.lower() == 'none'): return default_value else: return self._inner_dict[key] else: return self._inner_dict[key] else: if triggers_on_absent_key: return default_value else: return None except Exception: if triggers_on_exception: return default_value else: return None def get_and_delete(self, key: T, default: U = None) -> U: if key not in self._inner_dict: return default else: return self._inner_dict.pop(key, default) def get_python_dict(self) -> dict: return self._inner_dict def get_set_if_not_exist(self, key: T, default: U = None) -> U: return self._inner_dict.setdefault(key, default) def initialize_keys(self, keys: Iterable[T], value: U = None): for key in keys: self._inner_dict[key] = value def keys_copy(self) -> List[T]: return list(self._inner_dict.keys()) def merge(self, other: Union[AbstractMap, dict]) -> bool: if type(other) is dict: self._inner_dict.update(other) else: for key in other.keys: self._inner_dict[key] = other[key] return True def values_copy(self) -> List[U]: return list(self._inner_dict.values()) def __add__(self, other: Union[AbstractMap, dict]) -> AbstractMap: new = ESMap() other_keys: KeysView[T] = other.keys() if type(other) is dict else other.keys for key in other_keys: new[key] = other[key] for key in self._inner_dict.keys(): new[key] = self._inner_dict[key] return new def __and__(self, other: Union[AbstractMap, dict]) -> AbstractMap: new = ESMap() other_keys: KeysView[T] = other.keys() if type(other) is dict else other.keys for key in other_keys: if key in self._inner_dict: new[key] = other[key] return new def __contains__(self, item: Union[T, Tuple[T, U]]) -> bool: if type(item) is tuple: return item in self._inner_dict.items() else: return item in self._inner_dict def __copy__(self): try: return ESMap(self._inner_dict) except Exception as error: Log.error('An error occurred while copying map', __file__, get_function_name(), error=error) return ESMap() def __deepcopy__(self, memodict={}): try: return ESMap(deepcopy(self._inner_dict)) except Exception as error: Log.error('An error occurred in deep copy', __file__, get_function_name(), error=error) return ESMap() def __delitem__(self, key: T): if key in self._inner_dict: del self._inner_dict[key] def __eq__(self, other) -> bool: try: return self._inner_dict == other except Exception: return False def __ge__(self, other) -> bool: try: return len(self._inner_dict) >= len(other) except Exception: return False def __gt__(self, other) -> bool: try: return len(self._inner_dict) > len(other) except Exception: return False def __getitem__(self, item: T) -> U: if item in self._inner_dict: return self._inner_dict[item] else: return None def __iadd__(self, other: Union[AbstractMap, dict]) -> AbstractMap: other_keys: KeysView[T] = other.keys() if type(other) is dict else other.keys for key in other_keys: if key not in self._inner_dict: self._inner_dict[key] = other[key] return self def __iand__(self, other: Union[AbstractMap, dict]) -> AbstractMap: other_keys: KeysView[T] = other.keys() if type(other) is dict else other.keys for key in self.keys_copy(): if key not in other_keys: del self._inner_dict[key] else: self._inner_dict[key] = other[key] return self def __ior__(self, other: Union[AbstractMap, dict]) -> AbstractMap: self.merge(other) return self def __iter__(self): return self._inner_dict.__iter__() def __ixor__(self, other: Union[AbstractMap, dict]) -> AbstractMap: other_keys: KeysView[T] = other.keys() if type(other) is dict else other.keys for key in self.keys_copy(): if key not in other_keys: del self._inner_dict[key] return self def __le__(self, other) -> bool: try: return len(self._inner_dict) <= len(other) except Exception: return False def __len__(self) -> int: return len(self._inner_dict) def __lt__(self, other) -> bool: try: return len(self._inner_dict) < len(other) except Exception: return False def __ne__(self, other) -> bool: return not (self == other) def __or__(self, other: Union[AbstractMap, dict]) -> AbstractMap: new = ESMap() other_keys: KeysView[T] = other.keys() if type(other) is dict else other.keys for key in self._inner_dict.keys(): new[key] = self._inner_dict[key] for key in other_keys: new[key] = other[key] return new def __setitem__(self, key: T, value: U): self._inner_dict[key] = value def __repr__(self) -> str: return repr(self._inner_dict) def __reversed__(self): return self._inner_dict.__reversed__() def __str__(self) -> str: return str(self._inner_dict) def __xor__(self, other: Union[AbstractMap, dict]) -> AbstractMap: new = ESMap() other_keys: KeysView[T] = other.keys() if type(other) is dict else other.keys for key in other_keys: if key in self._inner_dict: new[key] = self._inner_dict[key] return new if __name__ == '__main__': bm = BasicMap() bm2 = BasicMap({'x': 32, 'y': 64, 'z': 128}) bm3 = BasicMap(bm2) bm4 = BasicMap() assert bm2 == bm3 assert bm != bm2 bm['a'] = 1 bm['b'] = 2 bm['x'] = 16 bm4.initialize_keys(['d', 'e', 'f', 'g', 'h', 'i'], 0) assert bm < bm4 assert bm <= bm4 assert bm4 > bm2 assert bm4 >= bm2 bm5 = bm | bm2 # merges bm and bm2, bm2 has precedence over bm bm6 = bm & bm2 # keeps common keys between bm and bm2, bm2 has precedence over bm bm7 = bm ^ bm2 # keeps common keys between bm and bm2, bm has precedence over bm2 bm8 = bm + bm2 # merges bm and bm2, bm has precedence over bm2 print('BM5', bm5) print('BM6', bm6) print('BM7', bm7) print('BM8', bm8)
export default "M8.6 9.6C9 10.2 9.5 10.7 10.2 11H14.2C14.5 10.9 14.7 10.7 14.9 10.5C15.9 9.5 16.3 8 15.8 6.7L15.7 6.5C15.6 6.2 15.4 6 15.2 5.8C15.1 5.6 14.9 5.5 14.8 5.3C14.4 5 14 4.7 13.6 4.3C12.7 3.4 12.6 2 13.1 1C12.6 1.1 12.1 1.4 11.7 1.8C10.2 3 9.6 5.1 10.3 7V7.2C10.3 7.3 10.2 7.4 10.1 7.5C10 7.6 9.8 7.5 9.7 7.4L9.6 7.3C9 6.5 8.9 5.3 9.3 4.3C8.4 5.1 7.9 6.4 8 7.7C8 8 8.1 8.3 8.2 8.6C8.2 8.9 8.4 9.3 8.6 9.6M12.3 8.1C12.4 7.6 12.2 7.2 12.1 6.8C12 6.4 12 6 12.2 5.6L12.5 6.2C12.9 6.8 13.6 7 13.8 7.8V8.1C13.8 8.6 13.6 9.1 13.3 9.4C13.1 9.5 12.9 9.7 12.7 9.7C12.1 9.9 11.4 9.6 11 9.2C11.8 9.2 12.2 8.6 12.3 8.1M15 12V14H14L13 22H11L10 14H9V12H15Z"
// @flow strict import getIcon from './get-icon'; import { ICONS } from '../constants'; test('getIcon', () => { expect(getIcon('twitter')).toBe(ICONS.TWITTER); expect(getIcon('github')).toBe(ICONS.GITHUB); expect(getIcon('vkontakte')).toBe(ICONS.VKONTAKTE); expect(getIcon('telegram')).toEqual(ICONS.TELEGRAM); expect(getIcon('email')).toEqual(ICONS.EMAIL); expect(getIcon('rss')).toEqual(ICONS.RSS); expect(getIcon('linkedin')).toEqual(ICONS.LINKEDIN); expect(getIcon('instagram')).toEqual(ICONS.INSTAGRAM); expect(getIcon('line')).toEqual(ICONS.LINE); expect(getIcon('facebook')).toEqual(ICONS.FACEBOOK); expect(getIcon('gitlab')).toEqual(ICONS.GITLAB); expect(getIcon('weibo')).toEqual(ICONS.WEIBO); expect(getIcon('codepen')).toEqual(ICONS.CODEPEN); expect(getIcon('youtube')).toEqual(ICONS.YOUTUBE); expect(getIcon('soundcloud')).toEqual(ICONS.SOUNDCLOUD); expect(getIcon('medium')).toEqual(ICONS.MEDIUM); });
""" Copyright (c) 2018 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import numpy as np import traceback from collections import namedtuple from caffe._caffe import log as LOG from caffe._caffe import Layer as BaseLayer InputDetection = namedtuple('InputDetection', 'item_id, det_conf, anchor_id,' 'xmin, ymin, xmax, ymax,' 'x_pos, y_pos') OutputDetection = namedtuple('OutputDetection', 'item_id, action,' 'det_conf, action_conf,' 'xmin, ymin, xmax, ymax,' 'anchor_id, x_pos, y_pos') INPUT_RECORD_SIZE = 10 OUTPUT_RECORD_SIZE = 11 class ExtendedActionsDetectionOutputLayer(BaseLayer): @staticmethod def _translate_prediction(record): bbox = InputDetection(item_id=int(record[0]), det_conf=float(record[2]), anchor_id=int(record[7]), xmin=float(record[3]), ymin=float(record[4]), xmax=float(record[5]), ymax=float(record[6]), x_pos=int(record[8]), y_pos=int(record[9])) return bbox @staticmethod def _parse_detections(data, record_size, converter): assert data.size % record_size == 0, 'incorrect record_size' records = data.reshape([-1, record_size]) detections = [] for record in records: detection = converter(record) detections.append(detection) return detections @staticmethod def _match_detections_with_actions(detections, anchors): actions = [] for det in detections: action_prediction =\ anchors[det.anchor_id][det.item_id, det.y_pos, det.x_pos, :] action_label = np.argmax(action_prediction) action_conf = np.max(action_prediction) action = OutputDetection(item_id=det.item_id, action=action_label, det_conf=det.det_conf, action_conf=action_conf, xmin=det.xmin, ymin=det.ymin, xmax=det.xmax, ymax=det.ymax, anchor_id=det.anchor_id, x_pos=det.x_pos, y_pos=det.y_pos) actions.append(action) return actions @staticmethod def _convert_actions_to_blob(actions, record_size): records = [] for action in actions: record = [action.item_id, action.action, action.det_conf, action.action_conf, action.xmin, action.ymin, action.xmax, action.ymax, action.anchor_id, action.x_pos, action.y_pos] assert len(record) == record_size records.append(record) out_blob = np.array(records, dtype=np.float32) out_blob = out_blob.reshape([1, 1, len(records), record_size]) return out_blob def _load_params(self, param_str): layer_params = eval(param_str) assert 'num_anchors' in layer_params self._num_anchors = layer_params['num_anchors'] def _init_states(self): pass def setup(self, bottom, top): self._load_params(self.param_str) self._init_states() def forward(self, bottom, top): try: assert len(bottom) == self._num_anchors + 1 assert len(top) == 1 detections_data = np.array(bottom[0].data) anchors_data = [] for i in xrange(self._num_anchors): anchors_data.append(np.array(bottom[i + 1].data)) all_detections = self._parse_detections( detections_data, INPUT_RECORD_SIZE, self._translate_prediction) all_actions = self._match_detections_with_actions(all_detections, anchors_data) matches_blob = self._convert_actions_to_blob(all_actions, OUTPUT_RECORD_SIZE) out_shape = matches_blob.shape top[0].reshape(out_shape[0], out_shape[1], out_shape[2], out_shape[3]) top[0].data[...] = matches_blob except Exception: LOG('ExtendedActionsDetectionOutputLayer exception: {}'.format(traceback.format_exc())) exit() def backward(self, top, propagate_down, bottom): pass def reshape(self, bottom, top): top[0].reshape(1, 1, 1, OUTPUT_RECORD_SIZE)
// Copyright 2009 the Sputnik authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /** * @name: S15.5.5.1_A1; * @section: 15.5.5.1; * @assertion: length property contains the number of characters in the String value represented by this String object; * @description: Create strings and check its length; */ var __str__instance = new String("ABC\u0041\u0042\u0043"); ////////////////////////////////////////////////////////////////////////////// //CHECK#1 if (__str__instance.length !== 6) { $ERROR('#1: var __str__instance = new String("ABC\\u0041\\u0042\\u0043"); __str__instance.length === 6, where __str__instance is new String("ABC\\u0041\\u0042\\u0043"). Actual: __str__instance.length ==='+__str__instance.length ); } // ////////////////////////////////////////////////////////////////////////////// __str__instance = new String; ////////////////////////////////////////////////////////////////////////////// //CHECK#2 if (__str__instance.length !== 0) { $ERROR('#2: __str__instance = new String; __str__instance.length === 0, where __str__instance is new String. Actual: __str__instance.length ==='+__str__instance.length ); } // //////////////////////////////////////////////////////////////////////////////
module.exports = { ...require("./25"), ...require("./antonia"), ...require("./arantxa"), ...require("./axelserrat"), ...require("./azriel"), ...require("./caps"), ...require("./da_uiz"), ...require("./f"), ...require("./fibonaxis"), ...require("./holi"), ...require("./linen"), ...require("./love"), ...require("./manueh"), ...require("./matiasdrogas"), ...require("./matutec"), ...require("./monikml"), ...require("./periko"), ...require("./pozo"), ...require("./sartenazo"), };
const db = require('../data/db-config'); function getPlants() { return db('plants'); } function getPlantByID(plant_id) { return db('plants').where('plant_id', plant_id).first(); } function getPlantsByUserId(user_id) { // select * from plants // left join user_plants // on user_plants.plant_id = plants.plant_id // where user_plants.user_id = {user_id}; return db('plants') .leftJoin('user_plants', { 'user_plants.plant_id': 'plants.plant_id', }) .where('user_plants.user_id', user_id); } async function createUser(user) { const [newUser] = await db('plants').insert(user, [ 'plant_id', 'username', 'password', 'phoneNumber', ]); return newUser; } async function updateUser(user, plant_id) { const [updatedUser] = await db('plants') .where('plant_id', plant_id) .update(user, ['plant_id', 'username', 'password', 'phoneNumber']); return updatedUser; } async function deleteUser(plant_id) { const [deletedUser] = await db('plants') .where('plant_id', plant_id) .delete(['plant_id', 'username', 'password', 'phoneNumber']); return deletedUser; } module.exports = { getPlants, getPlantByID, findUsername, createUser, updateUser, deleteUser, };
from kandbox_planner.planner_engine.rl.env.reward.reward_function import RewardFunction import kandbox_planner.util.planner_date_util as date_util class WithinWorkingHourReward(RewardFunction): """ Has the following members """ rule_code = "within_working_hour" rule_name = "Job is between start and end time of the worker" message_template = "Job time ({}-{}) is out of working hour" def evalute_normal_single_worker_n_job(self, env=None, job = None): # worker = None, worker = env.workers_dict[job['scheduled_worker_code']] result = { 'score': 0, 'message':[self.rule_name], } # return score, violated_rules (negative values) # return self.weight * 1 for day_i, working_slot in enumerate(worker['working_minutes']): working_slot_with_day = [ working_slot[0] + (24*60*day_i), working_slot[1] + (24*60*day_i), ] cliped_slot = date_util.clip_time_period( p1=working_slot_with_day, p2=[job['assigned_start_minutes'], job['assigned_start_minutes'] + job['scheduled_duration_minutes']] ) if len(cliped_slot) > 1: if ( cliped_slot[0] == job['assigned_start_minutes'] ) & ( cliped_slot[1] == job['assigned_start_minutes'] + job['scheduled_duration_minutes']): result['score'] = 1 return result # Partial fit, reject for now #TODO result['score'] = -1 result['message'] = self.message_template.format(job['assigned_start_minutes'], job['assigned_start_minutes'] + job['scheduled_duration_minutes']) return result else: continue # If the start time does not fall in working hour, reject it. result['score'] = -1 result['message'] = self.message_template.format(job['assigned_start_minutes'], job['assigned_start_minutes'] + job['scheduled_duration_minutes']) return result ''' def evalute_action_normal(self, env=None, action = None): a_job = self.generate_virtural_job_from_action(env, action) worker = env.workers_dict[a_job['scheduled_worker_code']] return self.evalute_normal_single_worker(env, worker, a_job) def evalute_normal(self, env=None, job_index_list = []): # return score, violated_rules (negative values) # return self.weight * 1 if len(job_index_list) < 1: return -1 job_index_list = []#TODO for job_i in job_index_list: cur_job = env.jobs[job_i] if len(cur_job['assigned_workers'] ) < 1: # Job not assigned return -1 found_partial_slot = False no_slot_found = False for assignment in cur_job['assigned_workers']: if self.evalute_normal_single_worker(env, env.workers_dict[assignment['worker_code']], cur_job ) == -1: return -1 return 1 '''
import { loadFixture, Nuxt } from '../utils' describe.posix('basic sockets', () => { test('/', async () => { const options = await loadFixture('sockets') const nuxt = new Nuxt(options) await nuxt.ready() await nuxt.server.listen() const { html } = await nuxt.server.renderRoute('/') expect(html).toContain('<h1>Served over sockets!</h1>') await nuxt.close() }) })
import React from 'react' import Textfit from 'react-textfit' import styled from '@emotion/styled' const Container = styled.div([], props => ({ fontWeight: 'bold', width: '100vw', padding: '0 2.5vw', textAlign: 'center', ...(props.background && { background: props.background }), ...(props.color && { color: props.color }), ...(props.caps && { textTransform: 'uppercase' }) })) export default function Fit({ caps, children, className, compressor, max = 250, style = {}, ...rest }) { return ( <Container className={className} style={style} caps={caps} {...rest}> <Textfit compressor={compressor} mode="single" max={max}>{children}</Textfit> </Container> ) }
module.exports = { extends: [ 'stylelint-config-recommended-scss', 'stylelint-config-rational-order' ], ignoreFiles: ['src/**/dist/*.{css,scss}'], rules: { 'font-family-no-missing-generic-family-keyword': null, 'no-descending-specificity': null, 'selector-pseudo-class-no-unknown': [ true, { ignorePseudoClasses: ['global'] } ] } }
zipdata({"4818501":[23,"北名古屋市","熊之庄","御榊60番地"],"4810038":[23,"北名古屋市","徳重"],"4810046":[23,"北名古屋市","石橋"],"4810033":[23,"北名古屋市","西之保"],"4810043":[23,"北名古屋市","沖村"],"4818510":[23,"北名古屋市","熊之庄","十二社66-3"],"4810039":[23,"北名古屋市","法成寺"],"4810000":[23,"北名古屋市",""],"4818555":[23,"北名古屋市","山之腰","天神東18"],"4818543":[23,"北名古屋市","九之坪","西城屋敷51"],"4810036":[23,"北名古屋市","山之腰"],"4810034":[23,"北名古屋市","北野"],"4818531":[23,"北名古屋市","西之保","清水田15番地"],"4810011":[23,"北名古屋市","高田寺"],"4818504":[23,"北名古屋市","熊之庄","古井281"],"4810001":[23,"北名古屋市","六ツ師"],"4818558":[23,"北名古屋市","沖村","西ノ川1"],"4810013":[23,"北名古屋市","二子"],"4818511":[23,"北名古屋市","高田寺","東の川12"],"4810032":[23,"北名古屋市","弥勒寺西"],"4810031":[23,"北名古屋市","弥勒寺東"],"4810006":[23,"北名古屋市","熊之庄"],"4818515":[23,"北名古屋市","熊之庄","十二社45-2"],"4818550":[23,"北名古屋市","沖村","山ノ神54番地"],"4818503":[23,"北名古屋市","熊之庄","古井281"],"4818601":[23,"北名古屋市","宇福寺","神明65"],"4810041":[23,"北名古屋市","九之坪"],"4818535":[23,"北名古屋市","徳重","西沼65番地"],"4818681":[23,"北名古屋市","沖村","権現35-2"],"4818540":[23,"北名古屋市","沖村","岡1"],"4810045":[23,"北名古屋市","中之郷"],"4818513":[23,"北名古屋市","鹿田","東若宮3962-2"],"4810003":[23,"北名古屋市","能田"],"4810044":[23,"北名古屋市","加島新田"],"4810035":[23,"北名古屋市","宇福寺"],"4818539":[23,"北名古屋市","徳重","北出52-2"],"4818505":[23,"北名古屋市","六ツ師","女夫越1番地"],"4810042":[23,"北名古屋市","野崎"],"4810002":[23,"北名古屋市","片場"],"4818502":[23,"北名古屋市","熊之庄","古井281"],"4810012":[23,"北名古屋市","久地野"],"4810005":[23,"北名古屋市","薬師寺"],"4818678":[23,"北名古屋市","宇福寺","神明51"],"4818508":[23,"北名古屋市","六ツ師","女夫越5"],"4818588":[23,"北名古屋市","熊之庄","御榊53番地"],"4810004":[23,"北名古屋市","鹿田"],"4818533":[23,"北名古屋市","九之坪","半野27"],"4818615":[23,"北名古屋市","西之保","青野東73"],"4810037":[23,"北名古屋市","鍜治ケ一色"],"4810014":[23,"北名古屋市","井瀬木"],"4818526":[23,"北名古屋市","片場","新町29番地"]} );
/*Author : Sai Bhargav */ //Static responses const beginMessage = `Hi. Thank you for using your personal assistant \ncould you please try like asking \n1.who won the player of the match with teams and which season \n2.who won the toss \n3.who won the match etc., \nand also you can chit chat with bot asking some friendly questions the asiistant will responds to you. ` const restartEN = `What else I can do for you?` const StartDialog = `Hola, Welcome to the cricket assistant, \n\nYou can chit chat with me \n\nYou can ask me \n\nWho won the Man of the match\n\nwho is winner and \n\nwho won the toss from 2008-2019 IPL season` module.exports = { // TVresHI, TVresEN, beginMessage,restartEN,StartDialog }
""" Copyright (c) 2015-2021 Ad Schellevis <[email protected]> All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import ipaddress import os import subprocess import sys def parse_address(addr): parse_result = {'port': '0'} if addr.count(':') > 1: # parse IPv6 address parse_result['addr'] = addr.split('[')[0] parse_result['ipproto'] = 'ipv6' if addr.find('[') > -1: parse_result['port'] = addr.split('[')[1].split(']')[0] else: # parse IPv4 address parse_result['ipproto'] = 'ipv4' parse_result['addr'] = addr.split(':')[0] if addr.find(':') > -1: parse_result['port'] = addr.split(':')[1] return parse_result def fetch_rule_labels(): result = dict() descriptions = dict() # query descriptions from active ruleset so we can search and display rule descriptions as well. if os.path.isfile('/tmp/rules.debug'): with open('/tmp/rules.debug', "rt", encoding="utf-8") as f_in: for line in f_in: lbl = line.split(' label ')[-1] if line.find(' label ') > -1 else "" rule_label = lbl.split('"')[1] if lbl.count('"') >= 2 else None descriptions[rule_label] = ''.join(lbl.split('"')[2:]).strip().strip('# : ') sp = subprocess.run(['/sbin/pfctl', '-vvPsr'], capture_output=True, text=True) for line in sp.stdout.strip().split('\n'): if line.startswith('@'): line_id = line.split()[0][1:] if line.find(' label ') > -1: rid = ''.join(line.split(' label ')[-1:]).strip()[1:].split('"')[0] result[line_id] = {'rid': rid, 'descr': None} if rid in descriptions: result[line_id]['descr'] = descriptions[rid] return result def query_states(rule_label, filter_str): result = list() try: filter_network = ipaddress.ip_network(filter_str.strip()) except ValueError: filter_network = None rule_labels = fetch_rule_labels() sp = subprocess.run(['/sbin/pfctl', '-vvs', 'state'], capture_output=True, text=True) record = None for line in sp.stdout.strip().split('\n'): parts = line.split() if line.startswith(" ") and len(parts) > 1 and record: if parts[0] == 'age': for part in line.split(","): part = part.strip() if part.startswith("rule "): record["rule"] = part.split()[-1] if record["rule"] in rule_labels: record["label"] = rule_labels[record["rule"]]["rid"] record["descr"] = rule_labels[record["rule"]]["descr"] elif part.startswith("age "): record["age"] = part.split()[-1] elif part.startswith("expires in"): record["expires"] = part.split()[-1] elif part.endswith("pkts"): record["pkts"] = [int(s) for s in part.split()[0].split(':')] elif part.endswith("bytes"): record["bytes"] = [int(s) for s in part.split()[0].split(':')] elif parts[0] == "id:": # XXX: in order to kill a state, we need to pass both the id and the creator, so it seeems to make # sense to uniquely identify the state by the combined number record["id"] = "%s/%s" % (parts[1], parts[3]) search_line = " ".join(str(item) for item in filter(None, record.values())) if rule_label != "" and record['label'].lower().find(rule_label) == -1: # label continue elif filter_network is not None: try: match = False for field in ['src_addr', 'dst_addr', 'nat_addr']: addr = ipaddress.ip_network(record[field]) if field is not None and ipaddress.ip_network(filter_network).overlaps(addr): match = True break if not match: continue except: continue elif filter_str != "" and search_line.lower().find(filter_str.lower()) == -1: # apply filter when provided continue if parts[0] == "id:": # append to response result.append(record) elif len(parts) >= 6: record = { 'label': '', 'descr': '', 'nat_addr': None, 'nat_port': None, 'iface': parts[0], 'proto': parts[1], 'src_addr': parse_address(parts[2])['addr'], 'src_port': parse_address(parts[2])['port'], 'ipproto': parse_address(parts[2])['ipproto'] } if parts[3].find('(') > -1: # NAT enabled record['nat_addr'] = parts[3][1:].split(':')[0] if parts[3].find(':') > -1: record['nat_port'] = parts[3].split(':')[1][:-1] record['dst_addr'] = parse_address(parts[-2])['addr'] record['dst_port'] = parse_address(parts[-2])['port'] if parts[-3] == '->': record['direction'] = 'out' else: record['direction'] = 'in' record['state'] = parts[-1] return result
/* * Header Messages * * This contains all the text for the Header component. */ import { defineMessages } from "react-intl"; export const scope = "boilerplate.components.Header"; export default defineMessages({ home: { id: `${scope}.home`, defaultMessage: "HOME", }, about: { id: `${scope}.about`, defaultMessage: "ABOUT", }, projects: { id: `${scope}.projects`, defaultMessage: "PROJECTS", }, resume: { id: `${scope}.resume`, defaultMessage: "RESUME", }, });
(function() { /** * Image utility. * @static * @constructor */ tracking.Image = {}; /** * Computes gaussian blur. Adapted from * https://github.com/kig/canvasfilters. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array. * @param {number} width The image width. * @param {number} height The image height. * @param {number} diameter Gaussian blur diameter, must be greater than 1. * @return {array} The edge pixels in a linear [r,g,b,a,...] array. */ tracking.Image.blur = function(pixels, width, height, diameter) { diameter = Math.abs(diameter); if (diameter <= 1) { throw new Error('Diameter should be greater than 1.'); } var radius = diameter / 2; var len = Math.ceil(diameter) + (1 - (Math.ceil(diameter) % 2)); var weights = new Float32Array(len); var rho = (radius + 0.5) / 3; var rhoSq = rho * rho; var gaussianFactor = 1 / Math.sqrt(2 * Math.PI * rhoSq); var rhoFactor = -1 / (2 * rho * rho); var wsum = 0; var middle = Math.floor(len / 2); for (var i = 0; i < len; i++) { var x = i - middle; var gx = gaussianFactor * Math.exp(x * x * rhoFactor); weights[i] = gx; wsum += gx; } for (var j = 0; j < weights.length; j++) { weights[j] /= wsum; } return this.separableConvolve(pixels, width, height, weights, weights, false); }; /** * Computes the integral image for summed, squared, rotated and sobel pixels. * @param {array} pixels The pixels in a linear [r,g,b,a,...] array to loop * through. * @param {number} width The image width. * @param {number} height The image height. * @param {array} opt_integralImage Empty array of size `width * height` to * be filled with the integral image values. If not specified compute sum * values will be skipped. * @param {array} opt_integralImageSquare Empty array of size `width * * height` to be filled with the integral image squared values. If not * specified compute squared values will be skipped. * @param {array} opt_tiltedIntegralImage Empty array of size `width * * height` to be filled with the rotated integral image values. If not * specified compute sum values will be skipped. * @param {array} opt_integralImageSobel Empty array of size `width * * height` to be filled with the integral image of sobel values. If not * specified compute sobel filtering will be skipped. * @static */ tracking.Image.computeIntegralImage = function(pixels, width, height, opt_integralImage, opt_integralImageSquare, opt_tiltedIntegralImage, opt_integralImageSobel) { if (arguments.length < 4) { throw new Error('You should specify at least one output array in the order: sum, square, tilted, sobel.'); } var pixelsSobel; if (opt_integralImageSobel) { pixelsSobel = tracking.Image.sobel(pixels, width, height); } for (var i = 0; i < height; i++) { for (var j = 0; j < width; j++) { var w = i * width * 4 + j * 4; var pixel = ~~(pixels[w] * 0.299 + pixels[w + 1] * 0.587 + pixels[w + 2] * 0.114); if (opt_integralImage) { this.computePixelValueSAT_(opt_integralImage, width, i, j, pixel); } if (opt_integralImageSquare) { this.computePixelValueSAT_(opt_integralImageSquare, width, i, j, pixel * pixel); } if (opt_tiltedIntegralImage) { var w1 = w - width * 4; var pixelAbove = ~~(pixels[w1] * 0.299 + pixels[w1 + 1] * 0.587 + pixels[w1 + 2] * 0.114); this.computePixelValueRSAT_(opt_tiltedIntegralImage, width, i, j, pixel, pixelAbove || 0); } if (opt_integralImageSobel) { this.computePixelValueSAT_(opt_integralImageSobel, width, i, j, pixelsSobel[w]); } } } }; /** * Helper method to compute the rotated summed area table (RSAT) by the * formula: * * RSAT(x, y) = RSAT(x-1, y-1) + RSAT(x+1, y-1) - RSAT(x, y-2) + I(x, y) + I(x, y-1) * * @param {number} width The image width. * @param {array} RSAT Empty array of size `width * height` to be filled with * the integral image values. If not specified compute sum values will be * skipped. * @param {number} i Vertical position of the pixel to be evaluated. * @param {number} j Horizontal position of the pixel to be evaluated. * @param {number} pixel Pixel value to be added to the integral image. * @static * @private */ tracking.Image.computePixelValueRSAT_ = function(RSAT, width, i, j, pixel, pixelAbove) { var w = i * width + j; RSAT[w] = (RSAT[w - width - 1] || 0) + (RSAT[w - width + 1] || 0) - (RSAT[w - width - width] || 0) + pixel + pixelAbove; }; /** * Helper method to compute the summed area table (SAT) by the formula: * * SAT(x, y) = SAT(x, y-1) + SAT(x-1, y) + I(x, y) - SAT(x-1, y-1) * * @param {number} width The image width. * @param {array} SAT Empty array of size `width * height` to be filled with * the integral image values. If not specified compute sum values will be * skipped. * @param {number} i Vertical position of the pixel to be evaluated. * @param {number} j Horizontal position of the pixel to be evaluated. * @param {number} pixel Pixel value to be added to the integral image. * @static * @private */ tracking.Image.computePixelValueSAT_ = function(SAT, width, i, j, pixel) { var w = i * width + j; SAT[w] = (SAT[w - width] || 0) + (SAT[w - 1] || 0) + pixel - (SAT[w - width - 1] || 0); }; /** * Converts a color from a colorspace based on an RGB color model to a * grayscale representation of its luminance. The coefficients represent the * measured intensity perception of typical trichromat humans, in * particular, human vision is most sensitive to green and least sensitive * to blue. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array. * @param {number} width The image width. * @param {number} height The image height. * @param {boolean} fillRGBA If the result should fill all RGBA values with the gray scale * values, instead of returning a single value per pixel. * @param {Uint8ClampedArray} The grayscale pixels in a linear array ([p,p,p,a,...] if fillRGBA * is true and [p1, p2, p3, ...] if fillRGBA is false). * @static */ tracking.Image.grayscale = function(pixels, width, height, fillRGBA) { var gray = new Uint8ClampedArray(fillRGBA ? pixels.length : pixels.length >> 2); var p = 0; var w = 0; for (var i = 0; i < height; i++) { for (var j = 0; j < width; j++) { var value = pixels[w] * 0.299 + pixels[w + 1] * 0.587 + pixels[w + 2] * 0.114; gray[p++] = value; if (fillRGBA) { gray[p++] = value; gray[p++] = value; gray[p++] = pixels[w + 3]; } w += 4; } } return gray; }; /** * Fast horizontal separable convolution. A point spread function (PSF) is * said to be separable if it can be broken into two one-dimensional * signals: a vertical and a horizontal projection. The convolution is * performed by sliding the kernel over the image, generally starting at the * top left corner, so as to move the kernel through all the positions where * the kernel fits entirely within the boundaries of the image. Adapted from * https://github.com/kig/canvasfilters. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array. * @param {number} width The image width. * @param {number} height The image height. * @param {array} weightsVector The weighting vector, e.g [-1,0,1]. * @param {number} opaque * @return {array} The convoluted pixels in a linear [r,g,b,a,...] array. */ tracking.Image.horizontalConvolve = function(pixels, width, height, weightsVector, opaque) { var side = weightsVector.length; var halfSide = Math.floor(side / 2); var output = new Float32Array(width * height * 4); var alphaFac = opaque ? 1 : 0; for (var y = 0; y < height; y++) { for (var x = 0; x < width; x++) { var sy = y; var sx = x; var offset = (y * width + x) * 4; var r = 0; var g = 0; var b = 0; var a = 0; for (var cx = 0; cx < side; cx++) { var scy = sy; var scx = Math.min(width - 1, Math.max(0, sx + cx - halfSide)); var poffset = (scy * width + scx) * 4; var wt = weightsVector[cx]; r += pixels[poffset] * wt; g += pixels[poffset + 1] * wt; b += pixels[poffset + 2] * wt; a += pixels[poffset + 3] * wt; } output[offset] = r; output[offset + 1] = g; output[offset + 2] = b; output[offset + 3] = a + alphaFac * (255 - a); } } return output; }; /** * Fast vertical separable convolution. A point spread function (PSF) is * said to be separable if it can be broken into two one-dimensional * signals: a vertical and a horizontal projection. The convolution is * performed by sliding the kernel over the image, generally starting at the * top left corner, so as to move the kernel through all the positions where * the kernel fits entirely within the boundaries of the image. Adapted from * https://github.com/kig/canvasfilters. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array. * @param {number} width The image width. * @param {number} height The image height. * @param {array} weightsVector The weighting vector, e.g [-1,0,1]. * @param {number} opaque * @return {array} The convoluted pixels in a linear [r,g,b,a,...] array. */ tracking.Image.verticalConvolve = function(pixels, width, height, weightsVector, opaque) { var side = weightsVector.length; var halfSide = Math.floor(side / 2); var output = new Float32Array(width * height * 4); var alphaFac = opaque ? 1 : 0; for (var y = 0; y < height; y++) { for (var x = 0; x < width; x++) { var sy = y; var sx = x; var offset = (y * width + x) * 4; var r = 0; var g = 0; var b = 0; var a = 0; for (var cy = 0; cy < side; cy++) { var scy = Math.min(height - 1, Math.max(0, sy + cy - halfSide)); var scx = sx; var poffset = (scy * width + scx) * 4; var wt = weightsVector[cy]; r += pixels[poffset] * wt; g += pixels[poffset + 1] * wt; b += pixels[poffset + 2] * wt; a += pixels[poffset + 3] * wt; } output[offset] = r; output[offset + 1] = g; output[offset + 2] = b; output[offset + 3] = a + alphaFac * (255 - a); } } return output; }; /** * Fast separable convolution. A point spread function (PSF) is said to be * separable if it can be broken into two one-dimensional signals: a * vertical and a horizontal projection. The convolution is performed by * sliding the kernel over the image, generally starting at the top left * corner, so as to move the kernel through all the positions where the * kernel fits entirely within the boundaries of the image. Adapted from * https://github.com/kig/canvasfilters. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array. * @param {number} width The image width. * @param {number} height The image height. * @param {array} horizWeights The horizontal weighting vector, e.g [-1,0,1]. * @param {array} vertWeights The vertical vector, e.g [-1,0,1]. * @param {number} opaque * @return {array} The convoluted pixels in a linear [r,g,b,a,...] array. */ tracking.Image.separableConvolve = function(pixels, width, height, horizWeights, vertWeights, opaque) { var vertical = this.verticalConvolve(pixels, width, height, vertWeights, opaque); return this.horizontalConvolve(vertical, width, height, horizWeights, opaque); }; /** * Compute image edges using Sobel operator. Computes the vertical and * horizontal gradients of the image and combines the computed images to * find edges in the image. The way we implement the Sobel filter here is by * first grayscaling the image, then taking the horizontal and vertical * gradients and finally combining the gradient images to make up the final * image. Adapted from https://github.com/kig/canvasfilters. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array. * @param {number} width The image width. * @param {number} height The image height. * @return {array} The edge pixels in a linear [r,g,b,a,...] array. */ tracking.Image.sobel = function(pixels, width, height) { pixels = this.grayscale(pixels, width, height, true); var output = new Float32Array(width * height * 4); var sobelSignVector = new Float32Array([-1, 0, 1]); var sobelScaleVector = new Float32Array([1, 2, 1]); var vertical = this.separableConvolve(pixels, width, height, sobelSignVector, sobelScaleVector); var horizontal = this.separableConvolve(pixels, width, height, sobelScaleVector, sobelSignVector); for (var i = 0; i < output.length; i += 4) { var v = vertical[i]; var h = horizontal[i]; var p = Math.sqrt(h * h + v * v); output[i] = p; output[i + 1] = p; output[i + 2] = p; output[i + 3] = 255; } return output; }; /** * Equalizes the histogram of a grayscale image, normalizing the * brightness and increasing the contrast of the image. * @param {pixels} pixels The grayscale pixels in a linear array. * @param {number} width The image width. * @param {number} height The image height. * @return {array} The equalized grayscale pixels in a linear array. */ tracking.Image.equalizeHist = function(pixels, width, height){ var equalized = new Uint8ClampedArray(pixels.length); var histogram = new Array(256); for(var i=0; i < 256; i++) histogram[i] = 0; for(var i=0; i < pixels.length; i++){ equalized[i] = pixels[i]; histogram[pixels[i]]++; } var prev = histogram[0]; for(var i=0; i < 256; i++){ histogram[i] += prev; prev = histogram[i]; } var norm = 255 / pixels.length; for(var i=0; i < pixels.length; i++) equalized[i] = (histogram[pixels[i]] * norm + 0.5) | 0; return equalized; } }());
"use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.serve = exports.configureRig = exports.deployAzureStorage = exports.packageSolution = exports.configureWebpack = void 0; const ConfigureWebpackTask_1 = require("./webpack/ConfigureWebpackTask"); const PackageSolutionTask_1 = require("./packageSolution/PackageSolutionTask"); const DeployAzureStorageTask_1 = require("./deployAzureStorage/DeployAzureStorageTask"); const ConfigureRigTask_1 = require("./configJson/ConfigureRigTask"); const SpfxServeTask_1 = require("./spfxServe/SpfxServeTask"); var GulpTaskTerminalProvider_1 = require("./utilities/GulpTaskTerminalProvider"); Object.defineProperty(exports, "GulpTaskTerminalProvider", { enumerable: true, get: function () { return GulpTaskTerminalProvider_1.GulpTaskTerminalProvider; } }); __exportStar(require("./cumulativeManifestProcessor/GulpTaskCumulativeManifestProcessor"), exports); exports.configureWebpack = new ConfigureWebpackTask_1.ConfigureWebpackTask(); exports.packageSolution = new PackageSolutionTask_1.PackageSolutionTask(); exports.deployAzureStorage = new DeployAzureStorageTask_1.DeployAzureStorageTask(); exports.configureRig = new ConfigureRigTask_1.ConfigureRigTask(); exports.serve = new SpfxServeTask_1.SpfxServeTask(); //# sourceMappingURL=index.js.map
a = [] if not a: print('he ')
import React from "react"; import { connect } from "react-redux"; import styles from "./PartyInput.module.css"; import coverStyles from "../../CoverPage.module.css"; import changeParty from "../../../../../actions/coverPage/changeParty"; class PartyInput extends React.Component { state = { parties: { plaintiff: { text: "", error: true }, defendant: { text: "", error: true }, }, }; componentDidMount = () => { const { propParties } = this.props; this.setState({ parties: propParties }); }; onChange = (e) => { const { changeParty } = this.props; const { parties } = this.state; this.setState({ parties: { ...parties, [e.target.name]: { ...parties[e.target.name], text: e.target.value, }, }, }); changeParty(e); }; render() { const { party, propParties } = this.props; const { parties } = this.state; const errorStyle = propParties[party].error ? styles.error : ""; return ( <form className={coverStyles.form}> <input className={`${coverStyles.textInput} ${errorStyle}`} type="text" name={party} value={parties[party].text} placeholder={party} onChange={(e) => this.onChange(e)} id={`${party}Input`} /> </form> ); } } const mapStateToProps = (state) => { const { plaintiff, defendant } = state.cover; if (plaintiff !== undefined && defendant !== undefined) { return { propParties: { plaintiff, defendant, }, }; } else { return { propParties: { plaintiff: { text: "", error: true }, defendant: { text: "", error: true }, }, }; } }; export default connect(mapStateToProps, { changeParty })(PartyInput);
module.exports = { name: 'ping', description: 'Ping!', cooldown: 3, execute(message, args) { message.channel.send('Pong!'); }, };
/* * This header is generated by classdump-dyld 1.0 * on Saturday, June 1, 2019 at 6:52:24 PM Mountain Standard Time * Operating System: Version 12.1.1 (Build 16C5050a) * Image Source: /System/Library/PrivateFrameworks/AMPCoreUI.framework/AMPCoreUI * classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos. */ #import <AMPCoreUI/AMPCoreUI-Structs.h> #import <UIKitCore/UIViewController.h> @class UIImageView; @interface AMPPrivacyIconViewController : UIViewController { UIImageView* _imageView; } @property (nonatomic,retain) UIImageView * imageView; //@synthesize imageView=_imageView - In the implementation block -(id)initWithImage:(id)arg1 ; -(UIImageView *)imageView; -(CGSize)preferredContentSize; -(void)viewDidLoad; -(void)viewWillLayoutSubviews; -(void)setImageView:(UIImageView *)arg1 ; @end
/*! @azure/msal-browser v2.22.1 2022-03-07 */ 'use strict'; import { __extends, __awaiter, __generator } from '../_virtual/_tslib.js'; import { StringUtils, ThrottlingUtils, ClientAuthError } from '@azure/msal-common'; import { BrowserAuthError } from '../error/BrowserAuthError.js'; import { TemporaryCacheKeys, ApiId } from '../utils/BrowserConstants.js'; import { InteractionHandler } from './InteractionHandler.js'; /* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. */ var RedirectHandler = /** @class */ (function (_super) { __extends(RedirectHandler, _super); function RedirectHandler(authCodeModule, storageImpl, authCodeRequest, browserRequestLogger, browserCrypto) { var _this = _super.call(this, authCodeModule, storageImpl, authCodeRequest, browserRequestLogger) || this; _this.browserCrypto = browserCrypto; return _this; } /** * Redirects window to given URL. * @param urlNavigate */ RedirectHandler.prototype.initiateAuthRequest = function (requestUrl, params) { return __awaiter(this, void 0, void 0, function () { var navigationOptions, navigate; return __generator(this, function (_a) { switch (_a.label) { case 0: this.browserRequestLogger.verbose("RedirectHandler.initiateAuthRequest called"); if (!!StringUtils.isEmpty(requestUrl)) return [3 /*break*/, 7]; // Cache start page, returns to this page after redirectUri if navigateToLoginRequestUrl is true if (params.redirectStartPage) { this.browserRequestLogger.verbose("RedirectHandler.initiateAuthRequest: redirectStartPage set, caching start page"); this.browserStorage.setTemporaryCache(TemporaryCacheKeys.ORIGIN_URI, params.redirectStartPage, true); } // Set interaction status in the library. this.browserStorage.setTemporaryCache(TemporaryCacheKeys.CORRELATION_ID, this.authCodeRequest.correlationId, true); this.browserStorage.cacheCodeRequest(this.authCodeRequest, this.browserCrypto); this.browserRequestLogger.infoPii("RedirectHandler.initiateAuthRequest: Navigate to: " + requestUrl); navigationOptions = { apiId: ApiId.acquireTokenRedirect, timeout: params.redirectTimeout, noHistory: false }; if (!(typeof params.onRedirectNavigate === "function")) return [3 /*break*/, 4]; this.browserRequestLogger.verbose("RedirectHandler.initiateAuthRequest: Invoking onRedirectNavigate callback"); navigate = params.onRedirectNavigate(requestUrl); if (!(navigate !== false)) return [3 /*break*/, 2]; this.browserRequestLogger.verbose("RedirectHandler.initiateAuthRequest: onRedirectNavigate did not return false, navigating"); return [4 /*yield*/, params.navigationClient.navigateExternal(requestUrl, navigationOptions)]; case 1: _a.sent(); return [2 /*return*/]; case 2: this.browserRequestLogger.verbose("RedirectHandler.initiateAuthRequest: onRedirectNavigate returned false, stopping navigation"); return [2 /*return*/]; case 3: return [3 /*break*/, 6]; case 4: // Navigate window to request URL this.browserRequestLogger.verbose("RedirectHandler.initiateAuthRequest: Navigating window to navigate url"); return [4 /*yield*/, params.navigationClient.navigateExternal(requestUrl, navigationOptions)]; case 5: _a.sent(); return [2 /*return*/]; case 6: return [3 /*break*/, 8]; case 7: // Throw error if request URL is empty. this.browserRequestLogger.info("RedirectHandler.initiateAuthRequest: Navigate url is empty"); throw BrowserAuthError.createEmptyNavigationUriError(); case 8: return [2 /*return*/]; } }); }); }; /** * Handle authorization code response in the window. * @param hash */ RedirectHandler.prototype.handleCodeResponseFromHash = function (locationHash, state, authority, networkModule, clientId) { return __awaiter(this, void 0, void 0, function () { var stateKey, requestState, authCodeResponse, nonceKey, cachedNonce, cachedCcsCred, tokenResponse; return __generator(this, function (_a) { switch (_a.label) { case 0: this.browserRequestLogger.verbose("RedirectHandler.handleCodeResponse called"); // Check that location hash isn't empty. if (StringUtils.isEmpty(locationHash)) { throw BrowserAuthError.createEmptyHashError(locationHash); } // Interaction is completed - remove interaction status. this.browserStorage.setInteractionInProgress(false); stateKey = this.browserStorage.generateStateKey(state); requestState = this.browserStorage.getTemporaryCache(stateKey); if (!requestState) { throw ClientAuthError.createStateNotFoundError("Cached State"); } authCodeResponse = this.authModule.handleFragmentResponse(locationHash, requestState); nonceKey = this.browserStorage.generateNonceKey(requestState); cachedNonce = this.browserStorage.getTemporaryCache(nonceKey); // Assign code to request this.authCodeRequest.code = authCodeResponse.code; if (!authCodeResponse.cloud_instance_host_name) return [3 /*break*/, 2]; return [4 /*yield*/, this.updateTokenEndpointAuthority(authCodeResponse.cloud_instance_host_name, authority, networkModule)]; case 1: _a.sent(); _a.label = 2; case 2: authCodeResponse.nonce = cachedNonce || undefined; authCodeResponse.state = requestState; // Add CCS parameters if available if (authCodeResponse.client_info) { this.authCodeRequest.clientInfo = authCodeResponse.client_info; } else { cachedCcsCred = this.checkCcsCredentials(); if (cachedCcsCred) { this.authCodeRequest.ccsCredential = cachedCcsCred; } } // Remove throttle if it exists if (clientId) { ThrottlingUtils.removeThrottle(this.browserStorage, clientId, this.authCodeRequest); } return [4 /*yield*/, this.authModule.acquireToken(this.authCodeRequest, authCodeResponse)]; case 3: tokenResponse = _a.sent(); this.browserStorage.cleanRequestByState(state); return [2 /*return*/, tokenResponse]; } }); }); }; return RedirectHandler; }(InteractionHandler)); export { RedirectHandler }; //# sourceMappingURL=RedirectHandler.js.map
import React, { Fragment, useEffect, useState } from "react"; import { useTranslation } from "react-i18next"; import { BreakLine, Card, CardSubHeader, StatusTable, Row, SubmitBar, Loader, CardSectionHeader, ConnectingCheckPoints, CheckPoint, ActionBar, Menu, LinkButton, Toast, Rating, ActionLinks, } from "@egovernments/digit-ui-react-components"; import ActionModal from "./Modal"; import TLCaption from "../../../components/TLCaption"; import { useQueryClient } from "react-query"; import { Link, useHistory, useParams } from "react-router-dom"; import { actions } from "react-table"; const ApplicationDetails = (props) => { const tenantId = Digit.ULBService.getCurrentTenantId(); const state = tenantId.split(".")[0]; const { t } = useTranslation(); const history = useHistory(); const queryClient = useQueryClient(); let { id: applicationNumber } = useParams(); const [displayMenu, setDisplayMenu] = useState(false); const [selectedAction, setSelectedAction] = useState(null); const [config, setCurrentConfig] = useState({}); const [showModal, setShowModal] = useState(false); const [showToast, setShowToast] = useState(null); const DSO = Digit.UserService.hasAccess(["FSM_DSO"]) || false; // console.log("find DSO here", DSO) const { isLoading, isError, data: applicationDetails, error } = Digit.Hooks.fsm.useApplicationDetail(t, tenantId, applicationNumber); const { isLoading: isDataLoading, isSuccess, data: applicationData } = Digit.Hooks.fsm.useSearch( tenantId, { applicationNos: applicationNumber }, { staleTime: Infinity } ); const { isLoading: updatingApplication, isError: updateApplicationError, data: updateResponse, error: updateError, mutate, } = Digit.Hooks.fsm.useApplicationActions(tenantId); const workflowDetails = Digit.Hooks.useWorkflowDetails({ tenantId: applicationDetails?.tenantId || tenantId, id: applicationNumber, moduleCode: "FSM", role: "FSM_EMPLOYEE", serviceData: applicationDetails, }); useEffect(() => { if (showToast) { workflowDetails.revalidate(); } }, [showToast]); function onActionSelect(action) { setSelectedAction(action); setDisplayMenu(false); } useEffect(() => { switch (selectedAction) { case "DSO_ACCEPT": case "ACCEPT": case "ASSIGN": case "GENERATE_DEMAND": case "FSM_GENERATE_DEMAND": case "REASSIGN": case "COMPLETE": case "COMPLETED": case "CANCEL": case "SENDBACK": case "DSO_REJECT": case "REJECT": case "DECLINE": case "REASSING": return setShowModal(true); case "SUBMIT": case "FSM_SUBMIT": return history.push("/digit-ui/employee/fsm/modify-application/" + applicationNumber); case "PAY": case "FSM_PAY": case "ADDITIONAL_PAY_REQUEST": return history.push(`/digit-ui/employee/payment/collect/FSM.TRIP_CHARGES/${applicationNumber}`); default: console.log("default case"); break; } }, [selectedAction]); //TODO: remove after conformation that no information of this sort is needed // const getTimelineCaptions = (checkpoint) => { // if (checkpoint.status === "COMPLAINT_FILED" && complaintDetails?.audit) { // const caption = { // date: Digit.DateUtils.ConvertTimestampToDate(complaintDetails.audit.details.createdTime), // name: complaintDetails.audit.citizen.name, // mobileNumber: complaintDetails.audit.citizen.mobileNumber, // source: complaintDetails.audit.source, // }; // return <TLCaption data={caption} />; // } // return checkpoint.caption && checkpoint.caption.length !== 0 ? <TLCaption data={checkpoint.caption[0]} /> : null; // }; const closeModal = () => { setSelectedAction(null); setShowModal(false); }; const closeToast = () => { setShowToast(null); }; const submitAction = (data) => { // console.log("find submit action data here", data); mutate(data, { onError: (error, variables) => { // console.log("find error here",error) setShowToast({ key: "error", action: error }); setTimeout(closeToast, 5000); }, onSuccess: (data, variables) => { setShowToast({ key: "success", action: selectedAction }); setTimeout(closeToast, 5000); queryClient.invalidateQueries("FSM_CITIZEN_SEARCH"); const inbox = queryClient.getQueryData("FUNCTION_RESET_INBOX"); inbox?.revalidate(); }, }); closeModal(); }; const getTimelineCaptions = (checkpoint) => { // console.log("tl", checkpoint); const __comment = checkpoint?.comment?.split("~"); const reason = __comment ? __comment[0] : null; const reason_comment = __comment ? __comment[1] : null; if (checkpoint.status === "CREATED") { const caption = { date: checkpoint?.auditDetails?.created, name: checkpoint?.assigner, mobileNumber: applicationData?.citizen.mobileNumber, source: applicationData?.source || "", }; return <TLCaption data={caption} />; } else if ( checkpoint.status === "PENDING_APPL_FEE_PAYMENT" || checkpoint.status === "ASSING_DSO" || checkpoint.status === "PENDING_DSO_APPROVAL" || checkpoint.status === "DSO_REJECTED" || checkpoint.status === "CANCELED" || checkpoint.status === "REJECTED" ) { const caption = { date: checkpoint?.auditDetails?.created, name: checkpoint?.assigner, comment: reason ? t(`ES_ACTION_REASON_${reason}`) : null, otherComment: reason_comment ? reason_comment : null, }; return <TLCaption data={caption} />; } else if (checkpoint.status === "DSO_INPROGRESS") { const caption = { name: checkpoint?.assigner, mobileNumber: checkpoint?.assigner?.mobileNumber, date: `${t("CS_FSM_EXPECTED_DATE")} ${Digit.DateUtils.ConvertTimestampToDate(applicationData?.possibleServiceDate)}`, }; return <TLCaption data={caption} />; } else if (checkpoint.status === "COMPLETED") { return ( <div> <Rating withText={true} text={t(`ES_FSM_YOU_RATED`)} currentRating={checkpoint.rating} /> <Link to={`/digit-ui/employee/fsm/rate-view/${applicationNumber}`}> <ActionLinks>{t("CS_FSM_RATE_VIEW")}</ActionLinks> </Link> </div> ); } }; if (isLoading) { return <Loader />; } return ( <React.Fragment> {!isLoading ? ( <React.Fragment> <Card style={{ position: "relative" }}> {/* {!DSO && ( <LinkButton label={<span style={{ color: "#f47738", marginLeft: "8px" }}>{t("ES_APPLICATION_DETAILS_VIEW_AUDIT_TRAIL")}</span>} style={{ position: "absolute", top: 0, right: 20 }} onClick={() => { history.push(props.parentRoute + "/application-audit/" + applicationNumber); }} /> )} */} {applicationDetails?.applicationDetails.map((detail, index) => ( <React.Fragment key={index}> {index === 0 ? ( <CardSubHeader style={{ marginBottom: "16px" }}>{t(detail.title)}</CardSubHeader> ) : ( <CardSectionHeader style={{ marginBottom: "16px", marginTop: "32px" }}>{t(detail.title)}</CardSectionHeader> )} <StatusTable> {detail?.values?.map((value, index) => { if (value.map === true && value.value !== "N/A") { return <Row key={t(value.title)} label={t(value.title)} text={<img src={t(value.value)} alt="" />} />; } return ( <Row key={t(value.title)} label={t(value.title)} text={t(value.value) || "N/A"} last={index === detail?.values?.length - 1} caption={value.caption} className="border-none" /> ); })} </StatusTable> </React.Fragment> ))} <BreakLine /> {(workflowDetails?.isLoading || isDataLoading) && <Loader />} {!workflowDetails?.isLoading && !isDataLoading && ( <Fragment> <CardSectionHeader style={{ marginBottom: "16px", marginTop: "32px" }}> {t("ES_APPLICATION_DETAILS_APPLICATION_TIMELINE")} </CardSectionHeader> {workflowDetails?.data?.timeline && workflowDetails?.data?.timeline?.length === 1 ? ( <CheckPoint isCompleted={true} label={t("CS_COMMON_" + workflowDetails?.data?.timeline[0]?.status)} customChild={getTimelineCaptions(workflowDetails?.data?.timeline[0])} /> ) : ( <ConnectingCheckPoints> {workflowDetails?.data?.timeline && workflowDetails?.data?.timeline.map((checkpoint, index, arr) => { return ( <React.Fragment key={index}> <CheckPoint keyValue={index} isCompleted={index === 0} label={t("CS_COMMON_FSM_" + checkpoint.status)} customChild={getTimelineCaptions(checkpoint)} /> </React.Fragment> ); })} </ConnectingCheckPoints> )} </Fragment> )} </Card> {/* {console.log("above show modal", showModal)} */} {showModal ? ( <ActionModal t={t} action={selectedAction} tenantId={tenantId} state={state} id={applicationNumber} closeModal={closeModal} submitAction={submitAction} actionData={workflowDetails?.data?.timeline} /> ) : null} {showToast && ( <Toast error={showToast.key === "error" ? true : false} label={t(showToast.key === "success" ? `ES_FSM_${showToast.action}_UPDATE_SUCCESS` : showToast.action)} onClose={closeToast} /> )} {!workflowDetails?.isLoading && workflowDetails?.data?.nextActions?.length > 0 && ( <ActionBar> {displayMenu && workflowDetails?.data?.nextActions ? ( <Menu localeKeyPrefix={"ES_FSM"} options={workflowDetails?.data?.nextActions.map((action) => action.action)} t={t} onSelect={onActionSelect} /> ) : null} <SubmitBar label={t("ES_COMMON_TAKE_ACTION")} onSubmit={() => setDisplayMenu(!displayMenu)} /> </ActionBar> )} </React.Fragment> ) : ( <Loader /> )} </React.Fragment> ); }; export default ApplicationDetails;
/**************************************************************************** Copyright (c) 2019-2022 Xiamen Yaji Software Co., Ltd. http://www.cocos.com Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated engine source code (the "Software"), a limited, worldwide, royalty-free, non-assignable, revocable and non-exclusive license to use Cocos Creator solely to develop games on your target platforms. You shall not use Cocos Creator software for developing other software or tools that's used for developing games. You are not granted to publish, distribute, sublicense, and/or sell copies of Cocos Creator. The software or tools in this License Agreement are licensed, not sold. Xiamen Yaji Software Co., Ltd. reserves all rights not expressly granted to you. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ****************************************************************************/ #pragma once #include "GLES2Std.h" #include "gfx-base/GFXShader.h" namespace cc { namespace gfx { struct GLES2GPUShader; class CC_GLES2_API GLES2Shader final : public Shader { public: GLES2Shader(); ~GLES2Shader() override; inline GLES2GPUShader *gpuShader() const { return _gpuShader; } protected: void doInit(const ShaderInfo &info) override; void doDestroy() override; GLES2GPUShader *_gpuShader = nullptr; }; } // namespace gfx } // namespace cc
export{}from"a"
__filename__ = 'multithread_demo.py' __author__ = '[email protected]' import multiprocessing import time import random class HelloWorld(object): def __init__(self): self.my_number = 1 self.my_number_2 = multiprocessing.Value('i', 1) #self.lock = threading.Lock() self.lock = multiprocessing.Lock() def thread_target1(self, parameter = None): if parameter: print '{0} this parameter has been passed'.format(str(parameter)) try: time.sleep(parameter) print 'Wake up time' except: #who cares pass else: print 'hello world.... this is stupid' time.sleep(10) return 'More stupid stuff' def thread_target2(self, parameter = None): time.sleep(.1*random.randint(0,10)) self.my_number += 1 time.sleep(float(parameter)) self.my_number += 1 print self.my_number def thread_target3(self, parameter = None): time.sleep(.1*random.randint(0,10)) with self.my_number_2.get_lock(): self.my_number_2.value += 1 time.sleep(float(parameter)) with self.my_number_2.get_lock(): self.my_number_2.value += 1 print self.my_number_2.value def demo1(self): for i in range(10): #this_thread = threading.Thread(target = self.thread_target1, args = (i,)).start() this_thread = multiprocessing.Process(target = self.thread_target1).start() #print 'Thread count: {0}'.format(threading.active_count()) print 'Process count: {0}'.format(multiprocessing.active_children()) #This should return something print this_thread def demo2(self): for i in range(10): #this_thread = threading.Thread(target = self.thread_target1, args = (i,)).start() this_process = multiprocessing.Process(target = self.thread_target1, args = (i,)) this_process.daemon = True this_process.start() #print 'Thread count: {0}'.format(threading.active_count()) print 'Process count: {0}'.format(multiprocessing.active_children()) time.sleep(60) def demo3(self): for i in range(10): #this_thread = threading.Thread(target = self.thread_target1, args = (i,)).start() this_process = multiprocessing.Process(target = self.thread_target2, args = (i,)) this_process.daemon = False this_process.start() #print 'Thread count: {0}'.format(threading.active_count()) print 'Process count: {0} My Number: {1}'.format(multiprocessing.active_children(), self.my_number) #print 'Thread count: {0} My Number: {1}'.format(threading.active_count(), self.my_number) def demo4(self): for i in range(10): #this_thread = threading.Thread(target = self.thread_target1, args = (i,)).start() this_process = multiprocessing.Process(target = self.thread_target3, args = (i,)) this_process.daemon = False this_process.start() #print 'Thread count: {0}'.format(threading.active_count()) print 'Process count: {0} My Number {1}'.format(multiprocessing.active_children(), self.my_number) #print 'Thread count: {0} My Number: {1}'.format(threading.active_count(), self.my_number) test = HelloWorld() #test.demo1() #test.demo2() #test.demo3() test.demo4()
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import json import logging import os import appengine_blobstore as blobstore from appengine_wrappers import urlfetch import object_store from file_system import FileSystem, StatInfo from StringIO import StringIO from future import Future from zipfile import ZipFile, BadZipfile ZIP_KEY = 'zipball' USERNAME = None PASSWORD = None def _MakeKey(version): return ZIP_KEY + '.' + str(version) class _AsyncFetchFutureZip(object): def __init__(self, fetcher, blobstore, key_to_set, key_to_delete=None): self._fetcher = fetcher self._fetch = fetcher.FetchAsync(ZIP_KEY, username=USERNAME, password=PASSWORD) self._blobstore = blobstore self._key_to_set = key_to_set self._key_to_delete = key_to_delete def Get(self): try: result = self._fetch.Get() # Check if Github authentication failed. if result.status_code == 401: logging.error('Github authentication failed for %s, falling back to ' 'unauthenticated.' % USERNAME) blob = self._fetcher.Fetch(ZIP_KEY).content else: blob = result.content except urlfetch.DownloadError as e: logging.error('Bad github zip file: %s' % e) return None if self._key_to_delete is not None: self._blobstore.Delete(_MakeKey(self._key_to_delete), blobstore.BLOBSTORE_GITHUB) try: return_zip = ZipFile(StringIO(blob)) except BadZipfile as e: logging.error('Bad github zip file: %s' % e) return None self._blobstore.Set(_MakeKey(self._key_to_set), blob, blobstore.BLOBSTORE_GITHUB) return return_zip class GithubFileSystem(FileSystem): """FileSystem implementation which fetches resources from github. """ def __init__(self, fetcher, object_store, blobstore): self._fetcher = fetcher self._object_store = object_store self._blobstore = blobstore self._version = None self._GetZip(self.Stat(ZIP_KEY).version) def _GetZip(self, version): blob = self._blobstore.Get(_MakeKey(version), blobstore.BLOBSTORE_GITHUB) if blob is not None: try: self._zip_file = Future(value=ZipFile(StringIO(blob))) except BadZipfile as e: self._blobstore.Delete(_MakeKey(version), blobstore.BLOBSTORE_GITHUB) logging.error('Bad github zip file: %s' % e) self._zip_file = Future(value=None) else: self._zip_file = Future( delegate=_AsyncFetchFutureZip(self._fetcher, self._blobstore, version, key_to_delete=self._version)) self._version = version def _ReadFile(self, path): try: zip_file = self._zip_file.Get() except Exception as e: logging.error('Github ReadFile error: %s' % e) return '' if zip_file is None: logging.error('Bad github zip file.') return '' prefix = zip_file.namelist()[0][:-1] return zip_file.read(prefix + path) def _ListDir(self, path): try: zip_file = self._zip_file.Get() except Exception as e: logging.error('Github ListDir error: %s' % e) return [] if zip_file is None: logging.error('Bad github zip file.') return [] filenames = zip_file.namelist() # Take out parent directory name (GoogleChrome-chrome-app-samples-c78a30f) filenames = [f[len(filenames[0]) - 1:] for f in filenames] # Remove the path of the directory we're listing from the filenames. filenames = [f[len(path):] for f in filenames if f != path and f.startswith(path)] # Remove all files not directly in this directory. return [f for f in filenames if f[:-1].count('/') == 0] def Read(self, paths, binary=False): version = self.Stat(ZIP_KEY).version if version != self._version: self._GetZip(version) result = {} for path in paths: if path.endswith('/'): result[path] = self._ListDir(path) else: result[path] = self._ReadFile(path) return Future(value=result) def _DefaultStat(self, path): version = 0 # Cache for a minute so we don't try to keep fetching bad data. self._object_store.Set(path, version, object_store.GITHUB_STAT, time=60) return StatInfo(version) def Stat(self, path): version = self._object_store.Get(path, object_store.GITHUB_STAT).Get() if version is not None: return StatInfo(version) try: result = self._fetcher.Fetch('commits/HEAD', username=USERNAME, password=PASSWORD) except urlfetch.DownloadError as e: logging.error('GithubFileSystem Stat: %s' % e) return self._DefaultStat(path) # Check if Github authentication failed. if result.status_code == 401: logging.error('Github authentication failed for %s, falling back to ' 'unauthenticated.' % USERNAME) try: result = self._fetcher.Fetch('commits/HEAD') except urlfetch.DownloadError as e: logging.error('GithubFileSystem Stat: %s' % e) return self._DefaultStat(path) version = (json.loads(result.content).get('commit', {}) .get('tree', {}) .get('sha', None)) # Check if the JSON was valid, and set to 0 if not. if version is not None: self._object_store.Set(path, version, object_store.GITHUB_STAT) else: logging.warning('Problem fetching commit hash from github.') return self._DefaultStat(path) return StatInfo(version)
import json, boto3, base64, hashlib def main(event, context): ''' - triggered by core/authentication - event => {credentials: {key: ''}, options: {key: ''}} - returns a connection object with (at least) a 'mask' property, which is overlaid onto _/connection/{connection_id}.json to enable administrator access - remove this function completely to disable administrator access ''' env = context.client_context.env if context.client_context and context.client_context.env else event.get('_env', {}) client_context = base64.b64encode(bytes(json.dumps({'env': env}), 'utf-8')).decode('utf-8') connection_record = {'name': '', 'mask': {}} if event.get('credentials') and event.get('options'): if hashlib.sha512(bytes(event['credentials'].get('key', ''), 'utf-8')).hexdigest() == event['options'].get('key'): connection_record = {'@name': event['options'].get('@name', 'system'), 'mask': {'*': '*'}} return connection_record
import { responseFromJson } from "@chiselstrike/api" export default async function chisel(req) { if (req.method == 'GET') { try { let resp_json = []; await Person.cursor().forEach(p => resp_json.push(p)) return responseFromJson(resp_json); } catch (e) { return responseFromJson(e, 500); } } return responseFromJson("Only GET is allowed", 405); }
// TODO: Include packages needed for this application const inquirer = require('inquirer'); const fs = require('fs') const generateMarkdown = require('./utils/generateMarkdown') // TODO: Create an array of questions for user input const questions = inquirer.prompt([ { type: 'input', name: 'title', message: 'What is your Project Title?' }, { type: 'input', name: 'description', message: 'Describe your Project?' }, { type: 'input', name: 'installationInstructions', message: 'What are your Installation Instructions?' }, { type: 'input', name: 'usageInformation', message: 'What is your Projects Usage Information?' }, { type: 'input', name: 'contributionGuidelines', message: 'What is your Projects Contribution Guidelines?' }, { type: 'input', name: 'testInstructions', message: 'What is your Projects Test Instructions?' }, { type: 'input', name: 'githubUser', message: 'Please Enter Your Github Username-' }, { type: 'input', name: 'emailAddress', message: 'What is your Email Address?' }, { type: 'list', name: 'license', message: 'Choose the Licence used for this Project-', choices: [ 'Apache License 2.0', 'GNU General Public License v3.0', 'MIT License', 'BSD 2-Clause "Simplified" License', 'BSD 3-Clause "New" or "Revised" License', 'Boost Software License 1.0', 'Creative Commons Zero v1.0 Universal', 'Eclipse Public License 2.0', 'GNU General Public License v2.0', 'GNU Lesser General Public License v2.1', 'Mozilla Public License 2.0', 'The Unlicense', ] } ]).then((data) => { let markdown = generateMarkdown(data) writeToFile(markdown, 'sample.md') }) function writeToFile(markdown, fileName) { fs.writeFile(fileName, markdown, (err) => err ? console.log(err) : console.log('Successfully created sample.md!') ); } // TODO: Create a function to initialize app function init() { questions } // Function call to initialize app init();
module.exports = function(app) { app.get('/api/currentUser', function(req, res) { res.json(req.user); }); };
#include "../dll/zeroload/zeroload.h" #include <stdlib.h> #include <stdio.h> void print_hash(const char *str) { printf("zl_compute_hash(\"%s\", 0) = %08x\n", str, zl_compute_hash(str, 0)); } void print_hashes() { print_hash("ntdll.dll"); print_hash("kernel32.dll"); print_hash("VirtualAlloc"); print_hash("VirtualFree"); print_hash("VirtualLock"); print_hash("VirtualUnlock"); print_hash("LoadLibraryA"); print_hash("SearchPathA"); print_hash("CreateFileA"); print_hash("ReadFile"); print_hash("GetFileSize"); print_hash("CloseHandle"); print_hash("NtFlushInstructionCache"); } void test_local_load(char *szDll) { DWORD dwBytesRead = 0; LPBYTE lpFileAddr = NULL; PZEROLOAD_STATE pState = zl_state_init(TRUE, FALSE, 100); lpFileAddr = zl_load_read_library_file(pState, "dll.dll", dwBytesRead); zl_load_image(pState, lpFileAddr, NULL, 0); } int main(int argc, char *argv[]) { print_hashes(); test_local_load("dll.dll"); system("PAUSE"); return 0; }
/** * Generated bundle index. Do not edit. */ export * from './public-api'; //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi8uLi8uLi8uLi8uLi8uLi8uLi8uLi9zcmMvY2RrL2NsaXBib2FyZC9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQTs7R0FFRztBQUVILGNBQWMsY0FBYyxDQUFDIiwic291cmNlc0NvbnRlbnQiOlsiLyoqXG4gKiBHZW5lcmF0ZWQgYnVuZGxlIGluZGV4LiBEbyBub3QgZWRpdC5cbiAqL1xuXG5leHBvcnQgKiBmcm9tICcuL3B1YmxpYy1hcGknO1xuIl19
/* * Common values for SHA algorithms */ #ifndef _CRYPTO_SHA_H #define _CRYPTO_SHA_H #include <linux/types.h> #define SHA1_DIGEST_SIZE 20 #define SHA1_BLOCK_SIZE 64 #define SHA224_DIGEST_SIZE 28 #define SHA224_BLOCK_SIZE 64 #define SHA256_DIGEST_SIZE 32 #define SHA256_BLOCK_SIZE 64 #define SHA384_DIGEST_SIZE 48 #define SHA384_BLOCK_SIZE 128 #define SHA512_DIGEST_SIZE 64 #define SHA512_BLOCK_SIZE 128 #define SHA1_H0 0x67452301UL #define SHA1_H1 0xefcdab89UL #define SHA1_H2 0x98badcfeUL #define SHA1_H3 0x10325476UL #define SHA1_H4 0xc3d2e1f0UL #define SHA224_H0 0xc1059ed8UL #define SHA224_H1 0x367cd507UL #define SHA224_H2 0x3070dd17UL #define SHA224_H3 0xf70e5939UL #define SHA224_H4 0xffc00b31UL #define SHA224_H5 0x68581511UL #define SHA224_H6 0x64f98fa7UL #define SHA224_H7 0xbefa4fa4UL #define SHA256_H0 0x6a09e667UL #define SHA256_H1 0xbb67ae85UL #define SHA256_H2 0x3c6ef372UL #define SHA256_H3 0xa54ff53aUL #define SHA256_H4 0x510e527fUL #define SHA256_H5 0x9b05688cUL #define SHA256_H6 0x1f83d9abUL #define SHA256_H7 0x5be0cd19UL #define SHA384_H0 0xcbbb9d5dc1059ed8ULL #define SHA384_H1 0x629a292a367cd507ULL #define SHA384_H2 0x9159015a3070dd17ULL #define SHA384_H3 0x152fecd8f70e5939ULL #define SHA384_H4 0x67332667ffc00b31ULL #define SHA384_H5 0x8eb44a8768581511ULL #define SHA384_H6 0xdb0c2e0d64f98fa7ULL #define SHA384_H7 0x47b5481dbefa4fa4ULL #define SHA512_H0 0x6a09e667f3bcc908ULL #define SHA512_H1 0xbb67ae8584caa73bULL #define SHA512_H2 0x3c6ef372fe94f82bULL #define SHA512_H3 0xa54ff53a5f1d36f1ULL #define SHA512_H4 0x510e527fade682d1ULL #define SHA512_H5 0x9b05688c2b3e6c1fULL #define SHA512_H6 0x1f83d9abfb41bd6bULL #define SHA512_H7 0x5be0cd19137e2179ULL struct sha1_state { u64 count; u32 state[SHA1_DIGEST_SIZE / 4]; u8 buffer[SHA1_BLOCK_SIZE]; }; struct sha256_state { u64 count; u32 state[SHA256_DIGEST_SIZE / 4]; u8 buf[SHA256_BLOCK_SIZE]; u8 fgFirstPacket; }; struct sha512_state { u64 count[2]; u64 state[SHA512_DIGEST_SIZE / 8]; u8 buf[SHA512_BLOCK_SIZE]; }; #endif
"""Utility functions for stat evaluation.""" import numpy as np from frites.utils import nonsorted_unique from frites.dataset.ds_utils import multi_to_uni_conditions def permute_mi_vector(y, suj, mi_type='cc', inference='rfx', n_perm=1000, random_state=None): """Permute regressor variable for performing non-parameteric statistics. Parameters ---------- y : array_like Array of shape (n_epochs,) to be permuted suj : array_like Array of shape (n_epochs,) used for permuting per subject mi_type : {'cc', 'cd', 'ccd'} Mutual information type inference : {'ffx', 'rfx'} Inference type (fixed or random effect) n_perm : int | 1000 Number of permutations to return random_state : int | None Fix the random state of the machine (use it for reproducibility). If None, a random state is randomly assigned. Returns ------- y_p : list List of length (n_perm,) of random permutation of the regressor """ # fix the random starting point rnd_start = np.random.randint(1000) if not isinstance( random_state, int) else random_state y_p = [] for p in range(n_perm): rnd = np.random.RandomState(rnd_start + p) if inference == 'ffx': # FFX (FIXED EFFECT) # subject-wise randomization y_p += [rnd.permutation(y)] elif inference == 'rfx': # RFX (RANDOM EFFECT) _y = y.copy() for s in np.unique(suj): # find everywhere the subject is present is_suj = suj == s # randomize per subject _y[is_suj] = rnd.permutation(y[is_suj]) y_p += [_y] assert len(y_p) == n_perm return y_p def permute_mi_trials(suj, inference='rfx', n_perm=1000, random_state=None): """Generate random partitions for swapping trials. Parameters ---------- suj : array_like Array of shape (n_epochs,) used for permuting per subject inference : {'ffx', 'rfx'} Inference type (fixed or random effect) n_perm : int | 1000 Number of permutations to return random_state : int | None Fix the random state of the machine (use it for reproducibility). If None, a random state is randomly assigned. Returns ------- y_p : list List of length (n_perm,) of random partitions for permuting trials """ # fix the random starting point rnd_start = np.random.randint(1000) if not isinstance( random_state, int) else random_state n_trials = len(suj) y_p = [] for p in range(n_perm): rnd = np.random.RandomState(rnd_start + p) y = np.arange(n_trials) if inference == 'ffx': # FFX (FIXED EFFECT) # subject-wise randomization y_p += [rnd.permutation(y)] elif inference == 'rfx': # RFX (RANDOM EFFECT) _y = y.copy() for s in np.unique(suj): # find everywhere the subject is present is_suj = suj == s # randomize per subject _y[is_suj] = rnd.permutation(y[is_suj]) y_p += [_y] assert len(y_p) == n_perm return y_p def bootstrap_partitions(n_epochs, *groups, n_partitions=200, random_state=None): """Generate partitions for bootstrap. Parameters ---------- n_epochs : int Number of epochs groups : array_like Groups within which permutations are performed. Should be arrays of shape (n_epochs,) and of type int n_partitions : int | 200 Number of partitions to get random_state : int | None Fix the random state of the machine (use it for reproducibility). If None, a random state is randomly assigned. Returns ------- partitions : list List of arrays describing the partitions within groups or not """ from sklearn.utils import resample # define the random state rnd = np.random.randint(1000) if not isinstance( random_state, int) else random_state # manage groups if not len(groups): groups = np.zeros((n_epochs), dtype=int) else: if len(groups) == 1: groups = groups[0] else: groups = multi_to_uni_conditions( [np.stack(groups, axis=1)], var_name='boot', verbose=False)[0] u_groups = nonsorted_unique(groups) # generate the partitions partitions = [] for n_p in range(n_partitions): _part = np.arange(n_epochs) for n_g in u_groups: is_group = groups == n_g n_group = is_group.sum() _part[is_group] = resample( _part[is_group], n_samples=n_group, random_state=rnd + n_p) partitions.append(_part) return partitions def dist_to_ci(dist, cis=[99], inference='ffx', rfx_es='mi', pop_mean=None): """Extract confidence bounds of a distribution. Parameters ---------- dist : array_like Distribution of shape (n_boots, 1, n_times) cis : list | [99] List of confidence levels inference : {'ffx', 'rfx'} Statistical model of the group rfx_es : {'mi', 'tvalues'} RFX effect size type. Use either 'mi' (for MI in bits) or 'tvalues' if a t-test is required pop_mean : float | None Value to use for performing the t-test Returns ------- cis : array_like Array describing the bounds of the confidence intervals. This array has a shape of (n_cis, 2, n_times) """ assert inference in ['ffx', 'rfx'] assert isinstance(cis, (list, tuple, np.ndarray)) assert rfx_es in ['mi', 'tvalues'] assert dist.ndim == 3 # group level effect for the rfx if (inference == 'rfx') and (rfx_es == 'mi'): dist = dist.mean(1, keepdims=True) elif (inference == 'rfx') and (rfx_es == 'tvalues'): raise NotImplementedError() # assert isinstance(pop_mean, (int, float)) # from frites.config import CONFIG # s_hat = CONFIG['TTEST_MNE_SIGMA'] # sigma = s_hat * np.var(dist, axis=1, ddof=1).max() # dist = ttest_1samp(dist, pop_mean, axis=1, implementation='mne', # method='absolute', sigma=sigma)[:, np.newaxis, :] assert dist.shape[1] == 1 # (n_boots, 1, n_times) _, _, n_times = dist.shape # find bounds x_ci = np.zeros((len(cis), 2, n_times)) for n_ci, ci in enumerate(cis): half_alpha = (100. - ci) / 2. x_ci[n_ci, 0, :] = np.percentile(dist, half_alpha, axis=0) x_ci[n_ci, 1, :] = np.percentile(dist, (100. - half_alpha), axis=0) return x_ci
"use strict"; var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "default", { enumerable: true, get: function get() { return _Collapse.default; } }); Object.defineProperty(exports, "unstable_StrictModeCollapse", { enumerable: true, get: function get() { return _StrictModeCollapse.default; } }); var _Collapse = _interopRequireDefault(require("./Collapse")); var _StrictModeCollapse = _interopRequireDefault(require("./StrictModeCollapse"));
import React, { useContext, useState } from "react"; import Container from "../../components/Container"; import { Input } from "antd"; import Context from "../../context/context"; import "./help.css"; const InputField = ({ cb, name }) => { return ( <Input onChange={cb} size="large" placeholder={"Enter " + name} style={{ borderRadius: 20, padding: 15, fontSize: 20, fontWeight: "700", }} /> ); }; const Episode = ({ value, epSetData }) => { return ( <div> <h4>Episode</h4> <InputField name={"Name"} cb={(e) => { epSetData({ ...value, name: e.target.value }); }} /> <InputField name={"Descrip"} cb={(e) => { epSetData({ ...value, desc: e.target.value }); }} /> <InputField name={"Episode Number"} cb={(e) => { epSetData({ ...value, episodeNumber: e.target.value }); }} /> <InputField name={"Air Date"} cb={(e) => { epSetData({ ...value, airDate: e.target.value }); }} /> </div> ); }; const Season = ({ value, setData }) => { return ( <div style={{ marginLeft: 50, marginBottom: 20, marginTop: 10 }}> <h3>Season</h3> <InputField name={"season name"} cb={(e) => { setData({ ...value, name: e.target.value }); }} /> <InputField name={"season number"} cb={(e) => { setData({ ...value, seasonNumber: e.target.value }); }} /> <div style={{ marginLeft: 15 }}> {value.episodes.map((episode, i) => { return ( <Episode epSetData={(values) => { setData({ ...value, episodes: value.episodes.map((a, j) => i === j ? { ...a, ...values } : a ), }); }} /> ); })} </div> <button onClick={() => { setData({ ...value, episodes: [...value.episodes, { picture: "null" }], }); }} > new Episode </button> </div> ); }; const Help = () => { const [contentState, setContentState] = useState({}); const [seasonState, setSeasonState] = useState([]); const { newContent } = useContext(Context); return ( <div> <Container> <div style={{ display: "flex", flexDirection: "row", justifyContent: "space-between", }} > <div> <h2>Add Content</h2> </div> </div> {/* {JSON.stringify({ ...contentState, avatar: "null", thumbnail: "null", seasons: seasonState, })} */} <form onSubmit={(e) => { e.preventDefault(); }} > <InputField name={"name"} cb={(e) => { setContentState((contnt) => { contnt.name = e.target.value; return contnt; }); }} /> <InputField name={"desc"} cb={(e) => { setContentState((contnt) => { contnt.desc = e.target.value; return contnt; }); }} /> <InputField name={"director"} cb={(e) => { setContentState((contnt) => { contnt.director = e.target.value; return contnt; }); }} /> <InputField name={"trailer"} cb={(e) => { setContentState((contnt) => { contnt.trailer = e.target.value; return contnt; }); }} /> <InputField name={"start date"} cb={(e) => { setContentState((contnt) => { contnt.startDate = e.target.value; return contnt; }); }} /> <InputField name={"end date"} cb={(e) => { setContentState((contnt) => { contnt.endDate = e.target.value; return contnt; }); }} /> <InputField name={"imbd score"} cb={(e) => { setContentState((contnt) => { contnt.imbdScore = e.target.value; return contnt; }); }} /> </form> <div> {seasonState.map((aSeason, i) => { return ( <div key={i}> <Season value={aSeason} setData={(values) => setSeasonState((a) => { return a.map((b, j) => j === i ? { ...b, ...values } : b ); }) } /> </div> ); })} </div> <button style={{ border: "white" }} onClick={() => { setSeasonState((a) => { return [...a, { episodes: [] }]; }); }} > Add Season </button> <button onClick={() => { newContent({ ...contentState, avatar: "null", thumbnail: "null", seasons: seasonState, }); }} > Submit </button> </Container> </div> ); }; export default Help;
const setScore = require('../src/js/helpers/setScore'); jest.mock('../src/js/helpers/setScore'); describe('Testing the post functionality', () => { it('Should save the score into the API with filled fields', () => { setScore.mockResolvedValue({ result: 'Leaderboard score created correctly.', }); setScore('Test', 4000) .then((data) => { expect(data.result).toMatch('Leaderboard score created correctly.'); }); }); it('Should not save the score into the API with empty score', () => { setScore.mockResolvedValue({ result: 'You need to provide a valid score for the leaderboard', }); setScore('Test', '') .then((data) => { expect(data.result).toMatch('You need to provide a valid score for the leaderboard'); }); }); });
const moment = require('moment'); // The original date 'Thu Apr 11 2019 18:39:00 GMT+0800' is taken out from the database // and needs to be formatted as a local format '2019-04-11' // (arr, 'YYYY-MM-DD') // Shorthand. The element of arr must be a string. // (arr, 'workdate') // Shorthand // (arr, ['begindate', 'enddate']) // Shorthand // (arr, {format: 'YYYY-MM-DD', date: ['begindate', 'enddate']}) /** @name my.formatDate */ const fn = (arr, options) => { if (!arr.length) return arr; let format = 'YYYY-MM-DD'; let dateNames = ['date']; // The default name of the date field // Simple processing if the array element is a pure string if (typeof arr[0] === 'string') { // Short form: // (arr, 'YYYY-MM-DD') <= (arr, {format: 'YYYY-MM-DD'}) if (typeof options === 'string') { format = options; } return arr.map(date => moment(date).format(format)); } // If the array element is an object, the date field for each specified // name is processed. Support for the following short form: // (arr, ['begindate', 'enddate']) <= (arr, {date: ['begindate', 'enddate']}) // If the argument is a string or an array, then it is date if (typeof options === 'string' || Array.isArray(options)) { dateNames = options; } // If the argument is an object, it is a standard argument (may have date and/or format arguments) else if (options && typeof options === 'object') { options.date && (dateNames = options.date); options.format && (format = options.format); } // If date is a string, it is converted to an array (because multiple date fields are to be supported) if (typeof dateNames === 'string') { dateNames = [dateNames]; } // Process each array element and return the processed array return arr.map(row => { dateNames.forEach(dateName => { const dateValue = row[dateName]; if (dateValue) { row[dateName] = moment(dateValue).format(format); } }); return row; }); }; module.exports = fn;
/**************************************************************************** * arch/ceva/src/common/up_releasepending.c * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The * ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * ****************************************************************************/ /**************************************************************************** * Included Files ****************************************************************************/ #include <nuttx/config.h> #include <debug.h> #include <nuttx/arch.h> #include "sched/sched.h" #include "up_internal.h" /**************************************************************************** * Public Functions ****************************************************************************/ /**************************************************************************** * Name: up_release_pending * * Description: * Release and ready-to-run tasks that have * collected in the pending task list. This can call a * context switch if a new task is placed at the head of * the ready to run list. * ****************************************************************************/ void up_release_pending(void) { struct tcb_s *rtcb = this_task(); sinfo("From TCB=%p\n", rtcb); /* Merge the g_pendingtasks list into the ready-to-run task list */ if (sched_mergepending()) { /* The currently active task has changed! We will need to switch * contexts. */ /* Update scheduler parameters */ sched_suspend_scheduler(rtcb); /* Are we operating in interrupt context? */ if (CURRENT_REGS) { /* Yes, then we have to do things differently. Just copy the * CURRENT_REGS into the OLD rtcb. */ rtcb->xcp.regs = CURRENT_REGS; /* Restore the exception context of the rtcb at the (new) head * of the ready-to-run task list. */ rtcb = this_task(); /* Update scheduler parameters */ sched_resume_scheduler(rtcb); /* Then switch contexts */ CURRENT_REGS = rtcb->xcp.regs; } /* No, then we will need to perform the user context switch */ else { struct tcb_s *nexttcb = this_task(); /* Update scheduler parameters */ sched_resume_scheduler(nexttcb); /* Switch context to the context of the task at the head of the * ready to run list. */ up_switchcontext(&rtcb->xcp.regs, nexttcb->xcp.regs); /* up_switchcontext forces a context switch to the task at the * head of the ready-to-run list. It does not 'return' in the * normal sense. When it does return, it is because the blocked * task is again ready to run and has execution priority. */ } } }
'use strict'; var $ = require('jquery'); var App = require('../../app'); var Backbone = require('backbone'); var Marionette = require('backbone.marionette'); var NUSMods = require('../../nusmods'); var _ = require('underscore'); var selectResultTemplate = require('../templates/select_result.hbs'); var template = require('../templates/select.hbs'); require('select2'); module.exports = Marionette.ItemView.extend({ className: 'form-group', template: template, events: { 'select2-selecting': 'onSelect2Selecting' }, ui: { 'input': 'input' }, onMouseenter: function (event) { var button = $(event.currentTarget); button.children('span').hide(); button.children('i').removeClass('hidden'); }, onMouseleave: function (event) { var button = $(event.currentTarget); button.children('span').show(); button.children('i').addClass('hidden'); }, onMouseup: function (event) { event.stopPropagation(); var button = $(event.currentTarget); var add = button.hasClass('add'); App.request((add ? 'add' : 'remove') + 'Module', button.data('semester'), button.data('code')); button .toggleClass('add remove label-default nm-module-added') .prop('title', (add ? 'Add to' : 'Remove from') + 'Timetable') .children('i').toggleClass('fa-plus fa-times'); }, onSelect2Open: function () { $('#select2-drop') .on('mouseenter', 'a', this.onMouseenter) .on('mouseleave', 'a', this.onMouseleave) .on('mouseup', 'a', this.onMouseup); }, onSelect2Selecting: function(event) { event.preventDefault(); Backbone.history.navigate('modules/' + event.val, {trigger: true}); this.ui.input.select2('close'); this.$(':focus').blur(); }, onShow: function () { _.bindAll(this, 'onMouseup', 'onSelect2Open'); var PAGE_SIZE = 50; this.ui.input.select2({ multiple: true, formatResult: function (object) { return selectResultTemplate(object); }, query: function (options) { NUSMods.getCodesAndTitles().then(function (data) { var i, results = [], pushResult = function (i) { var code = data[i].ModuleCode; var semesters = data[i].Semesters; var sems = [{semester: 1}, {semester: 2}]; for (var j = 0; j < semesters.length; j++) { var semester = semesters[j]; if (semester === 1 || semester === 2) { sems[semester - 1].offered = true; sems[semester - 1].selected = App.request('isModuleSelected', semester, code); } } return results.push({ id: code, semesters: sems, text: code + ' ' + data[i].ModuleTitle }); }; var re = new RegExp(options.term, 'i'); for (i = options.context || 0; i < data.length; i++) { if (!options.term || data[i].ModuleCode.search(re) !== -1 || data[i].ModuleTitle.search(re) !== -1) { if (pushResult(i) === PAGE_SIZE) { i++; break; } } } options.callback({ context: i, more: i < data.length, results: results }); }); } }); this.ui.input.one('select2-open', this.onSelect2Open); var that = this; this.ui.input.on('select2-open', this.showDarkBackdrop); this.ui.input.on('select2-close', function () { that.$(':focus').blur(); that.hideDarkBackdrop(); }); }, showDarkBackdrop: function () { var $modalBackdrop = $('<div class="modal-backdrop nm-search-backdrop"></div>'); $('body') .addClass('modal-open') .append($modalBackdrop); setTimeout(function () { $modalBackdrop.addClass('in'); }, 0); }, hideDarkBackdrop: function () { $('body') .removeClass('modal-open'); var $modalBackdrop = $('.nm-search-backdrop'); $modalBackdrop .one('webkitTransitionEnd otransitionend oTransitionEnd msTransitionEnd transitionend', function () { $modalBackdrop.remove(); }) .removeClass('in'); } });
"use strict"; var env = require('./env'); function AsyncLoopbackConnection(url) { var m = url.match(/loopback:(\w+)/); if (!m) { throw new Error('invalid url'); } this.id = m[1]; this.lstn = {}; this.queue = []; if (this.id in AsyncLoopbackConnection.pipes) { throw new Error('duplicate'); } AsyncLoopbackConnection.pipes[this.id] = this; var pair = this.pair(); if (pair && pair.queue.length) { pair.write(); } } AsyncLoopbackConnection.pipes = {}; env.streams.loopback = AsyncLoopbackConnection; AsyncLoopbackConnection.prototype.pair = function () { var pairId = this.id.match(/./g).reverse().join(''); return AsyncLoopbackConnection.pipes[pairId]; }; AsyncLoopbackConnection.prototype.on = function (evname, fn) { if (evname in this.lstn) { throw new Error('multiple listeners not supported'); } this.lstn[evname] = fn; }; AsyncLoopbackConnection.prototype.receive = function (string) { this.lstn.data && this.lstn.data(string); }; AsyncLoopbackConnection.prototype.write = function (obj) { var self = this; obj && self.queue.push(obj.toString()); setTimeout(function () { var pair = self.pair(); if (!pair) { return; } while (self.queue.length) { pair.receive(self.queue.shift()); } }, 1); }; AsyncLoopbackConnection.prototype.close = function () { delete AsyncLoopbackConnection.pipes[this.id]; var pair = this.pair(); pair && pair.close(); this.lstn.close && this.lstn.close(); };
/*! WOW - v1.1.2 - 2015-08-19 * Copyright (c) 2015 Matthieu Aussaguel; Licensed MIT */ (function () { var a, b, c, d, e, f = function (a, b) { return function () { return a.apply(b, arguments) } }, g = [].indexOf || function (a) { for (var b = 0, c = this.length; c > b; b++)if (b in this && this[b] === a) return b; return -1 }; b = function () { function a() { } return a.prototype.extend = function (a, b) { var c, d; for (c in b) d = b[c], null == a[c] && (a[c] = d); return a }, a.prototype.isMobile = function (a) { return /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(a) }, a.prototype.createEvent = function (a, b, c, d) { var e; return null == b && (b = !1), null == c && (c = !1), null == d && (d = null), null != document.createEvent ? (e = document.createEvent("CustomEvent"), e.initCustomEvent(a, b, c, d)) : null != document.createEventObject ? (e = document.createEventObject(), e.eventType = a) : e.eventName = a, e }, a.prototype.emitEvent = function (a, b) { return null != a.dispatchEvent ? a.dispatchEvent(b) : b in (null != a) ? a[b]() : "on" + b in (null != a) ? a["on" + b]() : void 0 }, a.prototype.addEvent = function (a, b, c) { return null != a.addEventListener ? a.addEventListener(b, c, !1) : null != a.attachEvent ? a.attachEvent("on" + b, c) : a[b] = c }, a.prototype.removeEvent = function (a, b, c) { return null != a.removeEventListener ? a.removeEventListener(b, c, !1) : null != a.detachEvent ? a.detachEvent("on" + b, c) : delete a[b] }, a.prototype.innerHeight = function () { return "innerHeight" in window ? window.innerHeight : document.documentElement.clientHeight }, a }(), c = this.WeakMap || this.MozWeakMap || (c = function () { function a() { this.keys = [], this.values = [] } return a.prototype.get = function (a) { var b, c, d, e, f; for (f = this.keys, b = d = 0, e = f.length; e > d; b = ++d)if (c = f[b], c === a) return this.values[b] }, a.prototype.set = function (a, b) { var c, d, e, f, g; for (g = this.keys, c = e = 0, f = g.length; f > e; c = ++e)if (d = g[c], d === a) return void (this.values[c] = b); return this.keys.push(a), this.values.push(b) }, a }()), a = this.MutationObserver || this.WebkitMutationObserver || this.MozMutationObserver || (a = function () { function a() { "undefined" != typeof console && null !== console && console.warn("MutationObserver is not supported by your browser."), "undefined" != typeof console && null !== console && console.warn("WOW.js cannot detect dom mutations, please call .sync() after loading new content.") } return a.notSupported = !0, a.prototype.observe = function () { }, a }()), d = this.getComputedStyle || function (a) { return this.getPropertyValue = function (b) { var c; return "float" === b && (b = "styleFloat"), e.test(b) && b.replace(e, function (a, b) { return b.toUpperCase() }), (null != (c = a.currentStyle) ? c[b] : void 0) || null }, this }, e = /(\-([a-z]){1})/g, this.WOW = function () { function e(a) { null == a && (a = {}), this.scrollCallback = f(this.scrollCallback, this), this.scrollHandler = f(this.scrollHandler, this), this.resetAnimation = f(this.resetAnimation, this), this.start = f(this.start, this), this.scrolled = !0, this.config = this.util().extend(a, this.defaults), null != a.scrollContainer && (this.config.scrollContainer = document.querySelector(a.scrollContainer)), this.animationNameCache = new c, this.wowEvent = this.util().createEvent(this.config.boxClass) } return e.prototype.defaults = { boxClass: "wow", animateClass: "animated", offset: 0, mobile: !0, live: !0, callback: null, scrollContainer: null }, e.prototype.init = function () { var a; return this.element = window.document.documentElement, "interactive" === (a = document.readyState) || "complete" === a ? this.start() : this.util().addEvent(document, "DOMContentLoaded", this.start), this.finished = [] }, e.prototype.start = function () { var b, c, d, e; if (this.stopped = !1, this.boxes = function () { var a, c, d, e; for (d = this.element.querySelectorAll("." + this.config.boxClass), e = [], a = 0, c = d.length; c > a; a++)b = d[a], e.push(b); return e }.call(this), this.all = function () { var a, c, d, e; for (d = this.boxes, e = [], a = 0, c = d.length; c > a; a++)b = d[a], e.push(b); return e }.call(this), this.boxes.length) if (this.disabled()) this.resetStyle(); else for (e = this.boxes, c = 0, d = e.length; d > c; c++)b = e[c], this.applyStyle(b, !0); return this.disabled() || (this.util().addEvent(this.config.scrollContainer || window, "scroll", this.scrollHandler), this.util().addEvent(window, "resize", this.scrollHandler), this.interval = setInterval(this.scrollCallback, 50)), this.config.live ? new a(function (a) { return function (b) { var c, d, e, f, g; for (g = [], c = 0, d = b.length; d > c; c++)f = b[c], g.push(function () { var a, b, c, d; for (c = f.addedNodes || [], d = [], a = 0, b = c.length; b > a; a++)e = c[a], d.push(this.doSync(e)); return d }.call(a)); return g } }(this)).observe(document.body, {childList: !0, subtree: !0}) : void 0 }, e.prototype.stop = function () { return this.stopped = !0, this.util().removeEvent(this.config.scrollContainer || window, "scroll", this.scrollHandler), this.util().removeEvent(window, "resize", this.scrollHandler), null != this.interval ? clearInterval(this.interval) : void 0 }, e.prototype.sync = function () { return a.notSupported ? this.doSync(this.element) : void 0 }, e.prototype.doSync = function (a) { var b, c, d, e, f; if (null == a && (a = this.element), 1 === a.nodeType) { for (a = a.parentNode || a, e = a.querySelectorAll("." + this.config.boxClass), f = [], c = 0, d = e.length; d > c; c++)b = e[c], g.call(this.all, b) < 0 ? (this.boxes.push(b), this.all.push(b), this.stopped || this.disabled() ? this.resetStyle() : this.applyStyle(b, !0), f.push(this.scrolled = !0)) : f.push(void 0); return f } }, e.prototype.show = function (a) { return this.applyStyle(a), a.className = a.className + " " + this.config.animateClass, null != this.config.callback && this.config.callback(a), this.util().emitEvent(a, this.wowEvent), this.util().addEvent(a, "animationend", this.resetAnimation), this.util().addEvent(a, "oanimationend", this.resetAnimation), this.util().addEvent(a, "webkitAnimationEnd", this.resetAnimation), this.util().addEvent(a, "MSAnimationEnd", this.resetAnimation), a }, e.prototype.applyStyle = function (a, b) { var c, d, e; return d = a.getAttribute("data-wow-duration"), c = a.getAttribute("data-wow-delay"), e = a.getAttribute("data-wow-iteration"), this.animate(function (f) { return function () { return f.customStyle(a, b, d, c, e) } }(this)) }, e.prototype.animate = function () { return "requestAnimationFrame" in window ? function (a) { return window.requestAnimationFrame(a) } : function (a) { return a() } }(), e.prototype.resetStyle = function () { var a, b, c, d, e; for (d = this.boxes, e = [], b = 0, c = d.length; c > b; b++)a = d[b], e.push(a.style.visibility = "visible"); return e }, e.prototype.resetAnimation = function (a) { var b; return a.type.toLowerCase().indexOf("animationend") >= 0 ? (b = a.target || a.srcElement, b.className = b.className.replace(this.config.animateClass, "").trim()) : void 0 }, e.prototype.customStyle = function (a, b, c, d, e) { return b && this.cacheAnimationName(a), a.style.visibility = b ? "hidden" : "visible", c && this.vendorSet(a.style, {animationDuration: c}), d && this.vendorSet(a.style, {animationDelay: d}), e && this.vendorSet(a.style, {animationIterationCount: e}), this.vendorSet(a.style, {animationName: b ? "none" : this.cachedAnimationName(a)}), a }, e.prototype.vendors = ["moz", "webkit"], e.prototype.vendorSet = function (a, b) { var c, d, e, f; d = []; for (c in b) e = b[c], a["" + c] = e, d.push(function () { var b, d, g, h; for (g = this.vendors, h = [], b = 0, d = g.length; d > b; b++)f = g[b], h.push(a["" + f + c.charAt(0).toUpperCase() + c.substr(1)] = e); return h }.call(this)); return d }, e.prototype.vendorCSS = function (a, b) { var c, e, f, g, h, i; for (h = d(a), g = h.getPropertyCSSValue(b), f = this.vendors, c = 0, e = f.length; e > c; c++)i = f[c], g = g || h.getPropertyCSSValue("-" + i + "-" + b); return g }, e.prototype.animationName = function (a) { var b; try { b = this.vendorCSS(a, "animation-name").cssText } catch (c) { b = d(a).getPropertyValue("animation-name") } return "none" === b ? "" : b }, e.prototype.cacheAnimationName = function (a) { return this.animationNameCache.set(a, this.animationName(a)) }, e.prototype.cachedAnimationName = function (a) { return this.animationNameCache.get(a) }, e.prototype.scrollHandler = function () { return this.scrolled = !0 }, e.prototype.scrollCallback = function () { var a; return !this.scrolled || (this.scrolled = !1, this.boxes = function () { var b, c, d, e; for (d = this.boxes, e = [], b = 0, c = d.length; c > b; b++)a = d[b], a && (this.isVisible(a) ? this.show(a) : e.push(a)); return e }.call(this), this.boxes.length || this.config.live) ? void 0 : this.stop() }, e.prototype.offsetTop = function (a) { for (var b; void 0 === a.offsetTop;)a = a.parentNode; for (b = a.offsetTop; a = a.offsetParent;)b += a.offsetTop; return b }, e.prototype.isVisible = function (a) { var b, c, d, e, f; return c = a.getAttribute("data-wow-offset") || this.config.offset, f = this.config.scrollContainer && this.config.scrollContainer.scrollTop || window.pageYOffset, e = f + Math.min(this.element.clientHeight, this.util().innerHeight()) - c, d = this.offsetTop(a), b = d + a.clientHeight, e >= d && b >= f }, e.prototype.util = function () { return null != this._util ? this._util : this._util = new b }, e.prototype.disabled = function () { return !this.config.mobile && this.util().isMobile(navigator.userAgent) }, e }() }).call(this);
(window.webpackJsonp=window.webpackJsonp||[]).push([[87],{528:function(t,a,e){"use strict";e.r(a);var r=e(57),s=Object(r.a)({},(function(){var t=this,a=t.$createElement,e=t._self._c||a;return e("ContentSlotsDistributor",{attrs:{"slot-key":t.$parent.slotKey}},[e("h1",{attrs:{id:"_5-mvvm框架速查-vue"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#_5-mvvm框架速查-vue"}},[t._v("#")]),t._v(" 5.MVVM框架速查(Vue)")]),t._v(" "),e("h2",{attrs:{id:"🐢第一部分-vue"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#🐢第一部分-vue"}},[t._v("#")]),t._v(" 🐢第一部分:Vue")]),t._v(" "),e("hr"),t._v(" "),e("h2",{attrs:{id:"🦀第二部分-vue-cli"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#🦀第二部分-vue-cli"}},[t._v("#")]),t._v(" 🦀第二部分:Vue-Cli")]),t._v(" "),e("hr"),t._v(" "),e("h2",{attrs:{id:"🐟第三部分-axios"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#🐟第三部分-axios"}},[t._v("#")]),t._v(" 🐟第三部分:Axios")]),t._v(" "),e("hr"),t._v(" "),e("h2",{attrs:{id:"🐡第四部分-vue-router"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#🐡第四部分-vue-router"}},[t._v("#")]),t._v(" 🐡第四部分:Vue-Router")]),t._v(" "),e("hr"),t._v(" "),e("h2",{attrs:{id:"🐬第五部分-vuex"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#🐬第五部分-vuex"}},[t._v("#")]),t._v(" 🐬第五部分:Vuex")]),t._v(" "),e("hr"),t._v(" "),e("h2",{attrs:{id:"🦐第六部分-element-ui"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#🦐第六部分-element-ui"}},[t._v("#")]),t._v(" 🦐第六部分:Element.ui")]),t._v(" "),e("hr"),t._v(" "),e("h2",{attrs:{id:"📚参考列表-致敬"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#📚参考列表-致敬"}},[t._v("#")]),t._v(" 📚参考列表(致敬)")])])}),[],!1,null,null,null);a.default=s.exports}}]);
/* * This header is generated by classdump-dyld 1.5 * on Tuesday, November 10, 2020 at 10:19:54 PM Mountain Standard Time * Operating System: Version 14.2 (Build 18K57) * Image Source: /System/Library/PrivateFrameworks/AppleMediaServicesUI.framework/AppleMediaServicesUI * classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos. Updated by Kevin Bradley. */ #import <AppleMediaServicesUI/AMSUICommonViewController.h> #import <libobjc.A.dylib/AMSUIWebPagePresenter.h> @protocol AMSUIWebPagePresenter; @class AMSUIWebAppearance, AMSUIWebLoadingPageModel, UIViewController, AMSUIWebClientContext, AMSUILoadingView, AMSBinaryPromise, UIView, NSString; @interface AMSUIWebPlaceholderViewController : AMSUICommonViewController <AMSUIWebPagePresenter> { BOOL _animateFadeIn; BOOL _hasAppeared; BOOL _isVisible; BOOL _shouldSnapshot; AMSUIWebAppearance* _appearance; AMSUIWebLoadingPageModel* _model; UIViewController*<AMSUIWebPagePresenter> _originalViewController; AMSUIWebClientContext* _context; AMSUILoadingView* _loadingView; AMSBinaryPromise* _snapshotPromise; UIView* _snapshotView; UIView* _visibleView; } @property (nonatomic,retain) AMSUIWebClientContext * context; //@synthesize context=_context - In the implementation block @property (assign,nonatomic) BOOL hasAppeared; //@synthesize hasAppeared=_hasAppeared - In the implementation block @property (assign,nonatomic) BOOL isVisible; //@synthesize isVisible=_isVisible - In the implementation block @property (nonatomic,retain) AMSUILoadingView * loadingView; //@synthesize loadingView=_loadingView - In the implementation block @property (assign,nonatomic) BOOL shouldSnapshot; //@synthesize shouldSnapshot=_shouldSnapshot - In the implementation block @property (nonatomic,retain) AMSBinaryPromise * snapshotPromise; //@synthesize snapshotPromise=_snapshotPromise - In the implementation block @property (nonatomic,retain) UIView * snapshotView; //@synthesize snapshotView=_snapshotView - In the implementation block @property (nonatomic,retain) UIView * visibleView; //@synthesize visibleView=_visibleView - In the implementation block @property (assign,nonatomic) BOOL animateFadeIn; //@synthesize animateFadeIn=_animateFadeIn - In the implementation block @property (nonatomic,retain) AMSUIWebAppearance * appearance; //@synthesize appearance=_appearance - In the implementation block @property (nonatomic,retain) AMSUIWebLoadingPageModel * model; //@synthesize model=_model - In the implementation block @property (nonatomic,retain) UIViewController*<AMSUIWebPagePresenter> originalViewController; //@synthesize originalViewController=_originalViewController - In the implementation block @property (readonly) unsigned long long hash; @property (readonly) Class superclass; @property (copy,readonly) NSString * description; @property (copy,readonly) NSString * debugDescription; -(void)dealloc; -(AMSUIWebClientContext *)context; -(void)setContext:(AMSUIWebClientContext *)arg1 ; -(id)initWithContext:(id)arg1 ; -(AMSUIWebLoadingPageModel *)model; -(BOOL)isVisible; -(void)setModel:(AMSUIWebLoadingPageModel *)arg1 ; -(AMSUIWebAppearance *)appearance; -(void)setAppearance:(AMSUIWebAppearance *)arg1 ; -(void)viewWillLayoutSubviews; -(void)loadView; -(void)viewWillAppear:(BOOL)arg1 ; -(void)viewDidAppear:(BOOL)arg1 ; -(void)viewDidDisappear:(BOOL)arg1 ; -(UIView *)snapshotView; -(void)setSnapshotView:(UIView *)arg1 ; -(AMSUILoadingView *)loadingView; -(void)setLoadingView:(AMSUILoadingView *)arg1 ; -(BOOL)hasAppeared; -(void)setHasAppeared:(BOOL)arg1 ; -(void)setIsVisible:(BOOL)arg1 ; -(void)_applyAppearance; -(void)willPresentPageModel:(id)arg1 appearance:(id)arg2 ; -(void)willAppearAfterDismiss; -(id)initWithModel:(id)arg1 context:(id)arg2 appearance:(id)arg3 ; -(void)setAnimateFadeIn:(BOOL)arg1 ; -(id)initWithSnapshot:(id)arg1 context:(id)arg2 appearance:(id)arg3 ; -(void)setOriginalViewController:(UIViewController*<AMSUIWebPagePresenter>)arg1 ; -(void)awaitSnapshotWithCompletion:(/*^block*/id)arg1 ; -(AMSBinaryPromise *)snapshotPromise; -(void)_transitionToLoadingAnimated:(BOOL)arg1 ; -(void)_transitionToSnapshot; -(void)_startReappearTransitionTimerAnimated:(BOOL)arg1 ; -(UIView *)visibleView; -(BOOL)animateFadeIn; -(void)setVisibleView:(UIView *)arg1 ; -(void)_replacePrimaryViewWithView:(id)arg1 animated:(BOOL)arg2 ; -(UIViewController*<AMSUIWebPagePresenter>)originalViewController; -(BOOL)shouldSnapshot; -(void)setShouldSnapshot:(BOOL)arg1 ; -(void)setSnapshotPromise:(AMSBinaryPromise *)arg1 ; @end
/** * Copyright (c) 2013-present, Facebook, Inc. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @flow * @format */ 'use strict'; const invariant = require('invariant'); const {GraphQLList} = require('graphql'); const {IRVisitor, SchemaUtils} = require('graphql-compiler'); const {getStorageKey, stableCopy} = require('relay-runtime'); import type {Batch, Fragment} from 'graphql-compiler'; import type { ConcreteArgument, ConcreteArgumentDefinition, ConcreteFragment, ConcreteField, ConcreteLinkedField, ConcreteSelection, ConcreteScalarField, RequestNode, } from 'relay-runtime'; const {getRawType, isAbstractType, getNullableType} = SchemaUtils; declare function generate(node: Batch): RequestNode; declare function generate(node: Fragment): ConcreteFragment; /** * @public * * Converts a GraphQLIR node into a plain JS object representation that can be * used at runtime. */ function generate(node: Batch | Fragment): RequestNode | ConcreteFragment { invariant( ['Batch', 'Fragment'].indexOf(node.kind) >= 0, 'RelayCodeGenerator: Unknown AST kind `%s`. Source: %s.', node.kind, getErrorMessage(node), ); return IRVisitor.visit(node, RelayCodeGenVisitor); } const RelayCodeGenVisitor = { leave: { Batch(node): RequestNode { invariant(node.requests.length !== 0, 'Batch must contain Requests.'); if (isSingleRequest(node)) { const request = node.requests[0]; return { kind: 'Request', operationKind: request.root.operation, name: node.name, id: request.id, text: request.text, metadata: node.metadata, fragment: node.fragment, operation: { kind: 'Operation', name: request.root.name, argumentDefinitions: request.root.argumentDefinitions, selections: flattenArray(request.root.selections), }, }; } else { return { kind: 'BatchRequest', operationKind: node.requests[0].root.operation, name: node.name, metadata: node.metadata, fragment: node.fragment, requests: node.requests.map(request => { const isDeferrableFragment = request.metadata && request.metadata.deferrable; const operation = isDeferrableFragment ? { kind: 'DeferrableOperation', name: request.root.name, argumentDefinitions: request.root.argumentDefinitions, selections: flattenArray(request.root.selections), fragmentName: request.metadata.fragmentName, rootFieldVariable: request.metadata.rootFieldVariable, } : { kind: 'Operation', name: request.root.name, argumentDefinitions: request.root.argumentDefinitions, selections: flattenArray(request.root.selections), }; return { name: request.name, id: request.id, text: request.text, argumentDependencies: request.argumentDependencies.map( dependency => ({ name: dependency.argumentName, fromRequestName: dependency.fromName, fromRequestPath: dependency.fromPath, ifList: dependency.ifList, ifNull: dependency.ifNull, maxRecurse: dependency.maxRecurse, }), ), operation, }; }), }; } }, Fragment(node): ConcreteFragment { return { kind: 'Fragment', name: node.name, type: node.type.toString(), metadata: node.metadata || null, argumentDefinitions: node.argumentDefinitions, selections: flattenArray(node.selections), }; }, LocalArgumentDefinition(node): ConcreteArgumentDefinition { return { kind: 'LocalArgument', name: node.name, type: node.type.toString(), defaultValue: node.defaultValue, }; }, RootArgumentDefinition(node): ConcreteArgumentDefinition { return { kind: 'RootArgument', name: node.name, type: node.type ? node.type.toString() : null, }; }, Condition(node, key, parent, ancestors): ConcreteSelection { invariant( node.condition.kind === 'Variable', 'RelayCodeGenerator: Expected static `Condition` node to be ' + 'pruned or inlined. Source: %s.', getErrorMessage(ancestors[0]), ); return { kind: 'Condition', passingValue: node.passingValue, condition: node.condition.variableName, selections: flattenArray(node.selections), }; }, FragmentSpread(node): ConcreteSelection { return { kind: 'FragmentSpread', name: node.name, args: valuesOrNull(sortByName(node.args)), }; }, DeferrableFragmentSpread(node): ConcreteSelection { return { kind: 'DeferrableFragmentSpread', name: node.name, args: valuesOrNull(sortByName(node.args)), rootFieldVariable: node.rootFieldVariable, storageKey: node.storageKey, }; }, InlineFragment(node): ConcreteSelection { return { kind: 'InlineFragment', type: node.typeCondition.toString(), selections: flattenArray(node.selections), }; }, LinkedField(node): Array<ConcreteSelection> { // Note: it is important that the arguments of this field be sorted to // ensure stable generation of storage keys for equivalent arguments // which may have originally appeared in different orders across an app. const handles = (node.handles && node.handles.map(handle => { return { kind: 'LinkedHandle', alias: node.alias, name: node.name, args: valuesOrNull(sortByName(node.args)), handle: handle.name, key: handle.key, filters: handle.filters, }; })) || []; const type = getRawType(node.type); const field: ConcreteLinkedField = { kind: 'LinkedField', alias: node.alias, name: node.name, storageKey: null, args: valuesOrNull(sortByName(node.args)), concreteType: !isAbstractType(type) ? type.toString() : null, plural: isPlural(node.type), selections: flattenArray(node.selections), }; // Precompute storageKey if possible field.storageKey = getStaticStorageKey(field); return [field].concat(handles); }, ScalarField(node): Array<ConcreteSelection> { // Note: it is important that the arguments of this field be sorted to // ensure stable generation of storage keys for equivalent arguments // which may have originally appeared in different orders across an app. const handles = (node.handles && node.handles.map(handle => { return { kind: 'ScalarHandle', alias: node.alias, name: node.name, args: valuesOrNull(sortByName(node.args)), handle: handle.name, key: handle.key, filters: handle.filters, }; })) || []; const field: ConcreteScalarField = { kind: 'ScalarField', alias: node.alias, name: node.name, args: valuesOrNull(sortByName(node.args)), selections: valuesOrUndefined(flattenArray(node.selections)), storageKey: null, }; // Precompute storageKey if possible field.storageKey = getStaticStorageKey(field); return [field].concat(handles); }, Variable(node, key, parent): ConcreteArgument { return { kind: 'Variable', name: parent.name, variableName: node.variableName, type: parent.type ? parent.type.toString() : null, }; }, Literal(node, key, parent): ConcreteArgument { return { kind: 'Literal', name: parent.name, value: stableCopy(node.value), type: parent.type ? parent.type.toString() : null, }; }, Argument(node, key, parent, ancestors): ?ConcreteArgument { if (['Variable', 'Literal'].indexOf(node.value.kind) < 0) { const valueString = JSON.stringify(node.value, null, 2); throw new Error( 'RelayCodeGenerator: Complex argument values (Lists or ' + 'InputObjects with nested variables) are not supported, argument ' + `\`${node.name}\` had value \`${valueString}\`. ` + `Source: ${getErrorMessage(ancestors[0])}.`, ); } return node.value.value !== null ? node.value : null; }, }, }; function isSingleRequest(batch: Batch): boolean { return ( batch.requests.length === 1 && batch.requests[0].argumentDependencies.length === 0 ); } function isPlural(type: any): boolean { return getNullableType(type) instanceof GraphQLList; } function valuesOrUndefined<T>(array: ?Array<T>): ?Array<T> { return !array || array.length === 0 ? undefined : array; } function valuesOrNull<T>(array: ?Array<T>): ?Array<T> { return !array || array.length === 0 ? null : array; } function flattenArray<T>(array: Array<Array<T>>): Array<T> { return array ? Array.prototype.concat.apply([], array) : []; } function sortByName<T: {name: string}>(array: Array<T>): Array<T> { return array instanceof Array ? array.sort((a, b) => (a.name < b.name ? -1 : a.name > b.name ? 1 : 0)) : array; } function getErrorMessage(node: any): string { return `document ${node.name}`; } /** * Pre-computes storage key if possible and advantageous. Storage keys are * generated for fields with supplied arguments that are all statically known * (ie. literals, no variables) at build time. */ function getStaticStorageKey(field: ConcreteField): ?string { if ( !field.args || field.args.length === 0 || field.args.some(arg => arg.kind !== 'Literal') ) { return null; } return getStorageKey(field, {}); } module.exports = {generate};
# Generated by Django 2.1.5 on 2019-04-08 00:33 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('app', '0009_post_visibleto'), ] operations = [ migrations.AddField( model_name='remotefriend', name='displayName', field=models.CharField(blank=True, max_length=50, null=True, unique=True), ), migrations.AddField( model_name='remotefriend', name='host', field=models.URLField(blank=True, null=True), ), migrations.AddField( model_name='remotefriend', name='url', field=models.URLField(blank=True, null=True), ), migrations.AddField( model_name='remotefriendrequest', name='displayName', field=models.CharField(blank=True, max_length=50, null=True, unique=True), ), migrations.AddField( model_name='remotefriendrequest', name='host', field=models.URLField(blank=True, null=True), ), migrations.AddField( model_name='remotefriendrequest', name='url', field=models.URLField(blank=True, null=True), ), migrations.AlterField( model_name='post', name='visibleTo', field=models.ManyToManyField(blank=True, related_name='visibleTo', to='app.Author'), ), migrations.AlterField( model_name='remotefriend', name='author', field=models.URLField(blank=True, null=True), ), ]
#!/usr/bin/env node /* * Copyright 2019 balena.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ const path = require('path') const Bluebird = require('bluebird') const chalk = require('chalk') const gitBranch = require('git-branch') const runner = require('./runner') const packageJSON = require('../package.json') const ENV_VAR_NETLIFY_TOKEN = 'NETLIFY_AUTH_TOKEN' const TOKEN_NETLIFY = process.env[ENV_VAR_NETLIFY_TOKEN] const OPTION_COMMAND = process.argv[2] || 'deploy' const OPTION_DEPLOY = OPTION_COMMAND === 'deploy' && TOKEN_NETLIFY const OPTION_CONTRACT_PATH = process.argv[3] const OPTIONS_OUTPUT_DIRECTORY = path.resolve(process.cwd(), 'dist') // Get a list of paths were the contract file might live // in. The choice is just one if the user passes it as // a command line argument. const contractPaths = OPTION_CONTRACT_PATH ? [ path.resolve(process.cwd(), OPTION_CONTRACT_PATH) ] : [ path.resolve(process.cwd(), 'meta.json') ] const printHeader = () => { console.error(` / / __ _ _ __ __| |_ __ / / / _\` | '_ \\ / _\` | '__| / /__| (_| | | | | (_| | | \\____/\\__,_|_| |_|\\__,_|_| `) console.error(packageJSON.description) console.error(`Version v${packageJSON.version}`) console.error() } const log = (message) => { console.log(chalk.blue('[landr]'), message) } const abort = (message) => { console.error(chalk.red('[error]'), message) process.exit(1) } const loadContract = async (paths) => { for (const contractPath of paths) { log(`Trying to load repository contract from ${contractPath}`) try { require(contractPath) return contractPath } catch (error) { continue } } return null } Bluebird.try(async () => { printHeader() if (OPTION_COMMAND !== 'deploy' && OPTION_COMMAND !== 'build') { abort(`Unknown command: ${OPTION_COMMAND}`) } // TODO: Add option to generate contract, instead of loading it from FS const contractPath = await loadContract(contractPaths) if (!contractPath) { abort('Could not load contract file') } if (OPTION_COMMAND === 'deploy' && !OPTION_DEPLOY) { abort(`Omitting deployment. Please set ${ENV_VAR_NETLIFY_TOKEN}`) } const branch = await gitBranch(process.cwd()) log(`Current branch is ${branch}`) const results = await runner.run({ contractPath, branch, outputDir: OPTIONS_OUTPUT_DIRECTORY, deploy: Boolean(OPTION_DEPLOY), netlifyToken: TOKEN_NETLIFY, logger: log }) if (OPTION_DEPLOY && TOKEN_NETLIFY) { const domainSetupUrl = `${results.adminUrl}/settings/domain/setup` log(`Visit ${results.url}`) log(`Head over to ${domainSetupUrl} to setup a different domain`) } }).catch((error) => { console.error(error) process.exit(1) })
/* * This header is generated by classdump-dyld 1.5 * on Tuesday, November 10, 2020 at 10:16:47 PM Mountain Standard Time * Operating System: Version 14.2 (Build 18K57) * Image Source: /System/Library/PrivateFrameworks/ATFoundation.framework/ATFoundation * classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos. Updated by Kevin Bradley. */ @class ATMovingAverage; @interface ATThroughputCalculator : NSObject { BOOL _suspended; double _lastUpdate; double _startTime; double _currentValue; ATMovingAverage* _average; } @property (nonatomic,readonly) double throughput; -(id)init; -(void)resume; -(void)suspend; -(double)throughput; -(BOOL)update:(double)arg1 ; @end
import codecs import copy import json import os import shutil from functools import reduce from urllib import parse as url_parser from typing import Dict from pathlib import Path import traceback from flask import Response from .logger_helper import get_logger from . import context _logger = get_logger() """ 文件管理系统 用于管理存储在文件中mock数据,数据组 """ class FileSystemError(Exception): pass class FileManager: """ FileSystem入口 管理整个mock服务器的数据文件 数据文件以数据组的形式保存于数据目录下,每个数据组下包含若干组数据和一个匹配规则文件。 数据组之间可以继承[注意:不要循环继承]。 """ def __init__(self): self.data_dir = None self.data_groups: Dict[str, DataGroup] = dict() self.current_data_group = None def set_temp_group(self, path): temp_group = DataGroup.create_from_dir(path) self.current_data_group = temp_group def set_root(self, path): data_root = Path(path).absolute() if not data_root.exists(): try: data_root.mkdir(parents=True) except Exception: raise FileSystemError(f'Data root path not found. And make new dir failed. {path}') if not os.path.isdir(path): raise FileSystemError(f'Set data root path fail. {path} is not a dir') self.data_dir = path self.scan() def scan(self): self.data_groups.clear() # 由于数据组只存在于数据目录下,因此没有递归查找的处理 data_group_dir_names = os.listdir(self.data_dir) for dir_name in data_group_dir_names: group = DataGroup.create_from_dir(os.path.join(self.data_dir, dir_name)) if group: self.data_groups[group.name] = group def add_group(self, name, json_obj): """ 新建数据组 如果数据组已存在,则新数据与旧数据组合并 """ group_abspath = os.path.abspath(os.path.join(self.data_dir, name)) if not os.path.exists(group_abspath): os.mkdir(group_abspath) new_group = DataGroup.create_from_data(group_abspath, json_obj) old_group = self.data_groups.get(name) if old_group: old_group.merge(new_group) else: self.data_groups[new_group.name] = new_group self.scan() def update_group(self, origin_name, name, json_obj): """ 更新数据组 """ group = self.data_groups.get(origin_name) if group: group.conf = json_obj group.write_conf() if origin_name != name: group.rename(name) self.scan() def set_current_data_group(self, name): if not name or name == '': self.current_data_group = None return True target_group = self.data_groups.get(name) if target_group: self.current_data_group = target_group return True else: return False class DataGroup: """ Mock数据组 根据数据组目录下conf.json文件,判断请求是否匹配某一条mock数据。 如果匹配则返回mock数据中的response。 如果不匹配则返回None。 """ CONF = 'conf.json' @classmethod def create_from_data(cls, dir_path, json_data): group = cls(dir_path) group.conf = json_data group.write_conf() group.scan() return group @classmethod def create_from_dir(cls, dir_path): if not os.path.isdir(dir_path): return None sub_file_names = os.listdir(dir_path) if DataGroup.CONF not in sub_file_names: return None group = cls(dir_path) group.read_conf() group.scan() return group def __init__(self, dir_path): self.dir_path = dir_path self.name = os.path.basename(dir_path) self.conf_path = os.path.abspath(os.path.join(dir_path, DataGroup.CONF)) self.conf = None self.data_dict: Dict[str, Data] = {} def scan(self): self.data_dict = {} sub_file_names = os.listdir(self.dir_path) for file_name in sub_file_names: data = Data.create_from_dir(os.path.join(self.dir_path, file_name)) if data: self.data_dict[data.name] = data def merge(self, group): self.conf['filters'] += group.conf['filters'] self.conf['parent'] = group.conf['parent'] self.write_conf() def rename(self, name): new_path = os.path.join(os.path.dirname(self.dir_path), name) os.rename(self.dir_path, new_path) self.name = name self.dir_path = os.path.abspath(new_path) self.conf_path = os.path.abspath(os.path.join(new_path, DataGroup.CONF)) def delete(self): shutil.rmtree(self.dir_path) def read_conf(self): self.conf = json.loads(codecs.open(self.conf_path, 'r', 'utf-8').read()) def write_conf(self): f = codecs.open(self.conf_path, 'w', 'utf-8') conf_str = json.dumps(self.conf, ensure_ascii=False, indent=4) f.write(conf_str) f.close() def get_response(self, url, requset_data = None): for req_filter in self.conf['filters']: contents = req_filter['contents'] # 跳过空的过滤条件 if len(contents) == 0: continue # 检查filters/contents中的字符是否都在url中 hit = True for content in contents: if content not in url: hit = False break # 判断filters/request是否存在(手工配置),且当前url的requset_data不为空,则进行对比校验 if hit and 'body_filter' in req_filter.keys() and requset_data != None: try: if req_filter['body_filter'] not in requset_data.decode(): hit = False except Exception: traceback.print_exc() _logger.error(f'请检查config文件,URL为{contents}是否配置了body_filter字段,会触发对请求参数的校验!') # 如果未命中继续查找 if not hit: continue # 命中, 返回mock数据 data = self.data_dict[req_filter['response']] resp_headers = [('lyrebird', 'mock;'+data.name)] for item in data.resp_headers: if isinstance(item, str): name = item value = data.resp_headers[name] elif isinstance(item, list): name = item[0] value = item[1] else: continue if name.lower() in ('content-length', 'connection', 'content-encoding', 'transfer-encoding'): continue resp_headers.append((name, value)) return Response(data.resp_data, data.resp_code, resp_headers) # 如果conf中配置了parent,开始递归查找 if self.conf.get('parent'): parent_path = self.conf['parent'] if not os.path.isabs(parent_path): parent_path = os.path.abspath(os.path.join(self.dir_path, parent_path)) if not os.path.exists(parent_path): raise FileSystemError('Can not find parent data group') parent_group = DataGroup.create_from_dir(parent_path) return parent_group.get_response(url) # 没有匹配任何数据 return None def add_data(self, name, json_data): data_dir = os.path.join(self.dir_path, name) if not os.path.exists(data_dir): os.mkdir(data_dir) Data.create_from_data(data_dir, json_data) self.scan() def update_data(self, origin_name, name, json_data): data = self.data_dict.get(origin_name) data.json_data = json_data data.write_file() data.rename(name) self.scan() def add_data_and_filter(self, flow): url = flow['request']['url'] result = url_parser.urlparse(url) resp_name = result.path.replace('/', '.')[1:]+'_'+flow['id'] self.add_data(resp_name, flow) # TODO make simple filters. if result.path == '' or result.path == '/': filter_contents = [result.hostname] else: filter_contents = [result.path] self.conf['filters'].append({'contents':filter_contents, 'response':resp_name}) self.write_conf() self.scan() class Data: """ Mock数据文件管理 request.json >保存请求头,URL request_data.json request_data.form request_data.bin >保存请求体,根据不同格式扩展名不同 response.json >响应状态码,响应头 response_data.json response_data.bin >响应数据,根据不同格式扩展名不同 """ @classmethod def create_from_data(cls, data_dir, json_data): data = cls(data_dir) data.json_data = json_data data.write_file() return data @classmethod def create_from_dir(cls, data_dir): if not os.path.isdir(data_dir): return None return cls(data_dir) def __init__(self, path): self.name = os.path.basename(path) self.path = path self.json_data = None @property def resp_data(self): contents = os.listdir(self.path) if 'response_data.json' in contents: resp_body_file = os.path.join(self.path, 'response_data.json') elif 'response_data.bin' in contents: resp_body_file = os.path.join(self.path, 'response_data.bin') else: return '' f = codecs.open(resp_body_file, 'r', 'utf-8') res = f.read() f.close() return res @property def resp_headers(self): f = codecs.open(os.path.join(self.path, 'response.json')) resp = json.loads(f.read()) f.close() return resp['headers'] @property def resp_code(self): f = codecs.open(os.path.join(self.path, 'response.json')) resp = json.loads(f.read()) f.close() return resp['code'] def rename(self, name): new_path = os.path.join(os.path.dirname(self.path), name) os.rename(self.path, new_path) self.name = name self.path = new_path def write_file(self): if not self.json_data: raise FileSystemError('Write to file error. Not set json data') req = self.json_data.get('request') if req: req_header = {'url': req.get('url'), 'method': req.get('method', 'GET'), 'headers': req.get('headers')} if req_header: self._write_file('request', req_header) req_data = req.get('data') if req_data: self._write_file('request_data', req_data) resp = self.json_data.get('response') if resp: # 保存时去掉数据源标记 resp_header = {'code': resp.get('code', 200), 'headers': copy.deepcopy(resp.get('headers'))} resp_header['headers'].pop('lyrebird', None) if resp_header: self._write_file('response', resp_header) resp_data = resp.get('data') if resp_data: self._write_file('response_data', resp_data) def _write_file(self, name, data): if isinstance(data, (str, bytes)): name = name + '.bin' if isinstance(data, str): data = data.encode() f = codecs.open(os.path.join(self.path, name), 'wb') f.write(data) f.close() elif isinstance(data, (dict, list)): name = name + '.json' data = json.dumps(data, ensure_ascii=False, indent=4) f = codecs.open(os.path.join(self.path, name), 'w', 'utf-8') f.write(data) f.close() else: raise FileSystemError(f'Write to file error. Unsupported type {type(data)}') def read_file(self): self.json_data = dict() req_h = self._read_file('request') req_data = self._read_file('request_data') self.json_data['request'] = req = dict() if req_h: req['url'] = req_h.get('url') req['method'] = req_h.get('method') req['headers'] = req_h.get('headers') if req_data: req['data'] = req_data resp_h = self._read_file('response') resp_data = self._read_file('response_data') self.json_data['response'] = resp = dict() if resp_h: resp['code'] = resp_h.get('code') resp['headers'] = resp_h.get('headers') if resp_data: resp['data'] = resp_data def _read_file(self, name): file_path = os.path.join(self.path, name+'.json') if os.path.exists(file_path): return json.loads(codecs.open(file_path, 'r', 'utf-8').read()) file_path = os.path.join(self.path, name+'.bin') if os.path.exists(file_path): return codecs.open(file_path, 'rb').read().decode() def delete(self): shutil.rmtree(self.path) class DataGroupConfBuilder: def __init__(self): self.host = False self.path = True self.split_path = False self.query = False self.split_query = False self._conf = dict() self._conf['parent'] = None self._conf['filters'] = [] def add_filter_by_req_ctx(self, req_ctx_dict): conf = context.application.conf _filter = dict() url_filter_contents = [] ctx_id = req_ctx_dict['id'] url = req_ctx_dict['request']['url'] result = url_parser.urlparse(url) ''' hotfix new conf did not contains record settings if conf.get('mock') and conf.get('mock').get('record'): record = conf.get('mock').get('record') self.host = record.get('host', self.host) self.path = record.get('path', self.path) self.split_path = record.get('split_path', self.split_path) self.query = record.get('query', self.query) self.split_query = record.get('split_query', self.split_query) ''' if self.host: url_filter_contents.append(result.hostname) if self.path and len(result.path) > 1: if self.split_path: path_items = result.path.split('/') url_filter_contents += path_items else: url_filter_contents.append(result.path) if self.query and len(result.query) > 0: if self.split_query: query_items = result.query.split('&') url_filter_contents += query_items else: url_filter_contents.append(result.query) _filter['contents'] = list(filter(lambda x: True if len(x) > 0 else False, url_filter_contents)) _filter['response'] = result.path.replace('/', '.')[1:] + '_' + ctx_id self._conf['filters'].append(_filter) return self def build(self): return self._conf class DataHelper: @staticmethod def to_dict(req, req_data, resp, resp_data): data = dict() req_dict = data['request'] = dict() if req: req_dict.update(json.loads(req)) if req_data: try: req_data = json.loads(req_data) except Exception: pass req_dict['data'] = req_data resp_dict = data['response'] = dict() if resp: resp_dict.update(json.loads(resp)) if resp_data: try: resp_data = json.loads(resp_data) except Exception: pass resp_dict['data'] = resp_data return data
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals import frappe from frappe.model.document import get_controller from frappe.utils import get_datetime, nowdate, get_url from frappe.website.router import get_pages, get_all_page_context_from_doctypes from six import iteritems from six.moves.urllib.parse import quote, urljoin no_cache = 1 base_template_path = "templates/www/sitemap.xml" def get_context(context): """generate the sitemap XML""" # the site might be accessible from multiple host_names # for e.g gadgets.erpnext.com and gadgetsinternational.com # so it should be picked from the request host = frappe.utils.get_host_name_from_request() links = [] for route, page in iteritems(get_pages()): if page.sitemap: links.append({ "loc": get_url(quote(page.name.encode("utf-8"))), "lastmod": nowdate() }) for route, data in iteritems(get_public_pages_from_doctypes()): links.append({ "loc": get_url(quote((route or "").encode("utf-8"))), "lastmod": get_datetime(data.get("modified")).strftime("%Y-%m-%d") }) return {"links":links} def get_public_pages_from_doctypes(): '''Returns pages from doctypes that are publicly accessible''' def get_sitemap_routes(): routes = {} doctypes_with_web_view = [d.name for d in frappe.db.get_all('DocType', { 'has_web_view': 1, 'allow_guest_to_view': 1 })] for doctype in doctypes_with_web_view: controller = get_controller(doctype) meta = frappe.get_meta(doctype) condition_field = meta.is_published_field or controller.website.condition_field try: res = frappe.db.get_all(doctype, ['route', 'name', 'modified'], { condition_field: 1 }) for r in res: routes[r.route] = {"doctype": doctype, "name": r.name, "modified": r.modified} except Exception as e: if not frappe.db.is_missing_column(e): raise e return routes return frappe.cache().get_value("sitemap_routes", get_sitemap_routes)
# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You may # not use this file except in compliance with the License. A copy of the # License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either # express or implied. See the License for the specific language governing # permissions and limitations under the License. """Integration tests for the SageMaker Endpoint API. """ import boto3 import pytest import logging import time from typing import Dict from acktest.aws import s3 from acktest.resources import random_suffix_name from acktest.k8s import resource as k8s from e2e import ( service_marker, create_sagemaker_resource, wait_sagemaker_endpoint_status, wait_resource_endpoint_status, ) from e2e.replacement_values import REPLACEMENT_VALUES from e2e.common import config as cfg FAIL_UPDATE_ERROR_MESSAGE = "unable to update endpoint. check FailureReason" @pytest.fixture(scope="module") def name_suffix(): return random_suffix_name("xgboost-endpoint", 32) @pytest.fixture(scope="module") def single_container_model(name_suffix): model_resource_name = name_suffix + "-model" replacements = REPLACEMENT_VALUES.copy() replacements["MODEL_NAME"] = model_resource_name model_reference, model_spec, model_resource = create_sagemaker_resource( resource_plural=cfg.MODEL_RESOURCE_PLURAL, resource_name=model_resource_name, spec_file="xgboost_model", replacements=replacements, ) assert model_resource is not None assert k8s.get_resource_arn(model_resource) is not None yield (model_reference, model_resource) _, deleted = k8s.delete_custom_resource(model_reference) assert deleted @pytest.fixture(scope="module") def multi_variant_config(name_suffix, single_container_model): config_resource_name = name_suffix + "-multi-variant-config" (_, model_resource) = single_container_model model_resource_name = model_resource["spec"].get("modelName", None) replacements = REPLACEMENT_VALUES.copy() replacements["ENDPOINT_CONFIG_NAME"] = config_resource_name replacements["MODEL_NAME"] = model_resource_name config_reference, config_spec, config_resource = create_sagemaker_resource( resource_plural=cfg.ENDPOINT_CONFIG_RESOURCE_PLURAL, resource_name=config_resource_name, spec_file="endpoint_config_multi_variant", replacements=replacements, ) assert config_resource is not None assert k8s.get_resource_arn(config_resource) is not None yield (config_reference, config_resource) _, deleted = k8s.delete_custom_resource(config_reference) assert deleted @pytest.fixture(scope="module") def single_variant_config(name_suffix, single_container_model): config_resource_name = name_suffix + "-single-variant-config" (_, model_resource) = single_container_model model_resource_name = model_resource["spec"].get("modelName", None) replacements = REPLACEMENT_VALUES.copy() replacements["ENDPOINT_CONFIG_NAME"] = config_resource_name replacements["MODEL_NAME"] = model_resource_name config_reference, config_spec, config_resource = create_sagemaker_resource( resource_plural=cfg.ENDPOINT_CONFIG_RESOURCE_PLURAL, resource_name=config_resource_name, spec_file="endpoint_config_single_variant", replacements=replacements, ) assert config_resource is not None assert k8s.get_resource_arn(config_resource) is not None yield (config_reference, config_resource) _, deleted = k8s.delete_custom_resource(config_reference) assert deleted @pytest.fixture(scope="module") def xgboost_endpoint(name_suffix, single_variant_config): endpoint_resource_name = name_suffix (_, config_resource) = single_variant_config config_resource_name = config_resource["spec"].get("endpointConfigName", None) replacements = REPLACEMENT_VALUES.copy() replacements["ENDPOINT_NAME"] = endpoint_resource_name replacements["ENDPOINT_CONFIG_NAME"] = config_resource_name reference, spec, resource = create_sagemaker_resource( resource_plural=cfg.ENDPOINT_RESOURCE_PLURAL, resource_name=endpoint_resource_name, spec_file="endpoint_base", replacements=replacements, ) assert resource is not None yield (reference, resource, spec) # Delete the k8s resource if not already deleted by tests if k8s.get_resource_exists(reference): _, deleted = k8s.delete_custom_resource(reference) assert deleted @pytest.fixture(scope="module") def faulty_config(name_suffix, single_container_model): replacements = REPLACEMENT_VALUES.copy() # copy model data to a temp S3 location and delete it after model is created on SageMaker model_bucket = replacements["SAGEMAKER_DATA_BUCKET"] copy_source = { "Bucket": model_bucket, "Key": "sagemaker/model/xgboost-mnist-model.tar.gz", } model_destination_key = "sagemaker/model/delete/xgboost-mnist-model.tar.gz" s3.copy_object(model_bucket, copy_source, model_destination_key) model_resource_name = name_suffix + "faulty-model" replacements["MODEL_NAME"] = model_resource_name replacements["MODEL_LOCATION"] = f"s3://{model_bucket}/{model_destination_key}" model_reference, model_spec, model_resource = create_sagemaker_resource( resource_plural=cfg.MODEL_RESOURCE_PLURAL, resource_name=model_resource_name, spec_file="xgboost_model_with_model_location", replacements=replacements, ) assert model_resource is not None model_resource = k8s.get_resource(model_reference) assert k8s.get_resource_arn(model_resource) is not None s3.delete_object(model_bucket, model_destination_key) config_resource_name = name_suffix + "-faulty-config" (_, model_resource) = single_container_model model_resource_name = model_resource["spec"].get("modelName", None) replacements["ENDPOINT_CONFIG_NAME"] = config_resource_name config_reference, config_spec, config_resource = create_sagemaker_resource( resource_plural=cfg.ENDPOINT_CONFIG_RESOURCE_PLURAL, resource_name=config_resource_name, spec_file="endpoint_config_multi_variant", replacements=replacements, ) assert config_resource is not None assert k8s.get_resource_arn(config_resource) is not None yield (config_reference, config_resource) for cr in (model_reference, config_reference): _, deleted = k8s.delete_custom_resource(cr) assert deleted @service_marker @pytest.mark.canary class TestEndpoint: status_creating: str = "Creating" status_inservice: str = "InService" status_udpating: str = "Updating" def _get_resource_endpoint_arn(self, resource: Dict): assert ( "ackResourceMetadata" in resource["status"] and "arn" in resource["status"]["ackResourceMetadata"] ) return resource["status"]["ackResourceMetadata"]["arn"] def _describe_sagemaker_endpoint(self, sagemaker_client, endpoint_name: str): try: return sagemaker_client.describe_endpoint(EndpointName=endpoint_name) except BaseException: logging.error( f"SageMaker could not find a endpoint with the name {endpoint_name}" ) return None def _assert_endpoint_status_in_sync( self, sagemaker_client, endpoint_name, reference, expected_status ): assert ( wait_sagemaker_endpoint_status( sagemaker_client, endpoint_name, expected_status ) == wait_resource_endpoint_status(reference, expected_status, 2) == expected_status ) def create_endpoint_test(self, sagemaker_client, xgboost_endpoint): (reference, resource, _) = xgboost_endpoint assert k8s.get_resource_exists(reference) # endpoint has correct arn and status endpoint_name = resource["spec"].get("endpointName", None) assert endpoint_name is not None assert ( self._get_resource_endpoint_arn(resource) == self._describe_sagemaker_endpoint(sagemaker_client, endpoint_name)[ "EndpointArn" ] ) # endpoint transitions Creating -> InService state self._assert_endpoint_status_in_sync( sagemaker_client, endpoint_name, reference, self.status_creating ) assert k8s.wait_on_condition(reference, "ACK.ResourceSynced", "False") self._assert_endpoint_status_in_sync( sagemaker_client, endpoint_name, reference, self.status_inservice ) assert k8s.wait_on_condition(reference, "ACK.ResourceSynced", "True") def update_endpoint_failed_test( self, sagemaker_client, single_variant_config, faulty_config, xgboost_endpoint ): (endpoint_reference, _, endpoint_spec) = xgboost_endpoint (_, faulty_config_resource) = faulty_config faulty_config_name = faulty_config_resource["spec"].get( "endpointConfigName", None ) endpoint_spec["spec"]["endpointConfigName"] = faulty_config_name endpoint_resource = k8s.patch_custom_resource(endpoint_reference, endpoint_spec) endpoint_resource = k8s.wait_resource_consumed_by_controller(endpoint_reference) assert endpoint_resource is not None # endpoint transitions Updating -> InService state self._assert_endpoint_status_in_sync( sagemaker_client, endpoint_reference.name, endpoint_reference, self.status_udpating, ) assert k8s.wait_on_condition(endpoint_reference, "ACK.ResourceSynced", "False") endpoint_resource = k8s.get_resource(endpoint_reference) assert ( endpoint_resource["status"].get("lastEndpointConfigNameForUpdate", None) == faulty_config_name ) self._assert_endpoint_status_in_sync( sagemaker_client, endpoint_reference.name, endpoint_reference, self.status_inservice, ) assert k8s.wait_on_condition(endpoint_reference, "ACK.ResourceSynced", "True") assert k8s.assert_condition_state_message( endpoint_reference, "ACK.Terminal", "True", FAIL_UPDATE_ERROR_MESSAGE, ) endpoint_resource = k8s.get_resource(endpoint_reference) assert endpoint_resource["status"].get("failureReason", None) is not None # additional check: endpoint using old endpoint config (_, old_config_resource) = single_variant_config current_config_name = endpoint_resource["status"].get( "latestEndpointConfigName" ) assert ( current_config_name is not None and current_config_name == old_config_resource["spec"].get("endpointConfigName", None) ) def update_endpoint_successful_test( self, sagemaker_client, multi_variant_config, xgboost_endpoint ): (endpoint_reference, endpoint_resource, endpoint_spec) = xgboost_endpoint endpoint_name = endpoint_resource["spec"].get("endpointName", None) production_variants = self._describe_sagemaker_endpoint( sagemaker_client, endpoint_name )["ProductionVariants"] old_variant_instance_count = production_variants[0]["CurrentInstanceCount"] old_variant_name = production_variants[0]["VariantName"] (_, new_config_resource) = multi_variant_config new_config_name = new_config_resource["spec"].get("endpointConfigName", None) endpoint_spec["spec"]["endpointConfigName"] = new_config_name endpoint_resource = k8s.patch_custom_resource(endpoint_reference, endpoint_spec) endpoint_resource = k8s.wait_resource_consumed_by_controller(endpoint_reference) assert endpoint_resource is not None # endpoint transitions Updating -> InService state self._assert_endpoint_status_in_sync( sagemaker_client, endpoint_reference.name, endpoint_reference, self.status_udpating, ) assert k8s.wait_on_condition(endpoint_reference, "ACK.ResourceSynced", "False") assert k8s.assert_condition_state_message( endpoint_reference, "ACK.Terminal", "False", None ) endpoint_resource = k8s.get_resource(endpoint_reference) assert ( endpoint_resource["status"].get("lastEndpointConfigNameForUpdate", None) == new_config_name ) self._assert_endpoint_status_in_sync( sagemaker_client, endpoint_reference.name, endpoint_reference, self.status_inservice, ) assert k8s.wait_on_condition(endpoint_reference, "ACK.ResourceSynced", "True") assert k8s.assert_condition_state_message( endpoint_reference, "ACK.Terminal", "False", None ) endpoint_resource = k8s.get_resource(endpoint_reference) assert endpoint_resource["status"].get("failureReason", None) is None # RetainAllVariantProperties - variant properties were retained + is a multi-variant endpoint new_production_variants = self._describe_sagemaker_endpoint( sagemaker_client, endpoint_name )["ProductionVariants"] assert len(new_production_variants) > 1 new_variant_instance_count = None for variant in new_production_variants: if variant["VariantName"] == old_variant_name: new_variant_instance_count = variant["CurrentInstanceCount"] assert new_variant_instance_count == old_variant_instance_count def delete_endpoint_test(self, sagemaker_client, xgboost_endpoint): (reference, resource, _) = xgboost_endpoint endpoint_name = resource["spec"].get("endpointName", None) _, deleted = k8s.delete_custom_resource(reference) assert deleted # resource is removed from management from controller side if call to deleteEndpoint succeeds. # Sagemaker also removes a 'Deleting' endpoint pretty quickly, but there might be a lag # We wait maximum of 30 seconds for this clean up to happen endpoint_deleted = False for _ in range(3): time.sleep(10) if ( self._describe_sagemaker_endpoint(sagemaker_client, endpoint_name) is None ): endpoint_deleted = True break assert endpoint_deleted def test_driver( self, sagemaker_client, single_variant_config, faulty_config, multi_variant_config, xgboost_endpoint, ): self.create_endpoint_test(sagemaker_client, xgboost_endpoint) self.update_endpoint_failed_test( sagemaker_client, single_variant_config, faulty_config, xgboost_endpoint ) # Note: the test has been intentionally ordered to run a successful update after a failed update # check that controller updates the endpoint, removes the terminal condition and clears the failure reason self.update_endpoint_successful_test( sagemaker_client, multi_variant_config, xgboost_endpoint ) self.delete_endpoint_test(sagemaker_client, xgboost_endpoint)
import saltclass import numpy as np train_X = np.array([[10, 0, 0], [0, 20, 0], [4, 13, 5]]) train_y = np.array([0, 1, 1]) vocab = ['statistics', 'medicine', 'crime'] object_from_df = saltclass.SALT(train_X, train_y, vocabulary=vocab, language='en') X = np.array([[10, 12, 0], [14, 3, 52]]) object_from_df.enrich(method='kmeans', include_unlabeled=True, unlabeled_matrix=X) object_from_df.enrich(method='kmeans', include_unlabeled=True, unlabeled_dir='D:/Data/unlabeled/') object_from_file = saltclass.SALT.data_from_dir(train_dir='D:/data/train2/', language='en') object_from_file.enrich(method='lda') object_from_file.train(classifier='svm') object_from_file.print_info() prediction = object_from_file.predict(data_file='second_test.txt') print(object_from_file.vocabulary) print(object_from_file.newdata) print([k for (k, v) in object_from_file.vocabulary.items() if object_from_file.newdata[0][v] != 0]) print(prediction) # stc_object = STClassifier(train_X, train_y, vocabulary=['statistics', 'medicine', 'crime'], language='en') # stc_object.kmeans_enrich(num_clusters=2) # stc_object.train(classifier='SVM') # stc_object.print_info() # prediction = stc_object.predict(data_file='first_test.txt') # print(stc_object.newdata) # print(prediction) # object_from_file = stclassifier.STClassifier.from_data_dir(train_dir='D:/train/', language='en') # object_from_file.print_info() # print(object_from_file.vocabulary) # object_from_file.kmeans_enrich(num_clusters=2) # print(object_from_file.X) # object_from_file.train(classifier='SVM', gamma=3) # prediction = object_from_file.predict(data_file='first_test.txt') # print(object_from_file.newdata) # print(prediction) # print(STClassifier.__init__.__doc__) # print(help(STClassifier.__init__))
# -*- coding: utf-8 -*- """ meraki_sdk This file was automatically generated for meraki by APIMATIC v2.0 ( https://apimatic.io ). """ from meraki_sdk.api_helper import APIHelper from meraki_sdk.configuration import Configuration from meraki_sdk.controllers.base_controller import BaseController from meraki_sdk.http.auth.custom_header_auth import CustomHeaderAuth class MGPortForwardingRulesController(BaseController): """A Controller to access Endpoints in the meraki_sdk API.""" def get_device_cellular_gateway_settings_port_forwarding_rules(self, serial): """Does a GET request to /devices/{serial}/cellularGateway/settings/portForwardingRules. Returns the port forwarding rules for a single MG. Args: serial (string): TODO: type description here. Example: Returns: mixed: Response from the API. Successful operation Raises: APIException: When an error occurs while fetching the data from the remote API. This exception includes the HTTP Response code, an error message, and the HTTP body that was received in the request. """ # Validate required parameters self.validate_parameters(serial=serial) # Prepare query URL _url_path = '/devices/{serial}/cellularGateway/settings/portForwardingRules' _url_path = APIHelper.append_url_with_template_parameters(_url_path, { 'serial': serial }) _query_builder = Configuration.base_uri _query_builder += _url_path _query_url = APIHelper.clean_url(_query_builder) # Prepare headers _headers = { 'accept': 'application/json' } # Prepare and execute request _request = self.http_client.get(_query_url, headers=_headers) CustomHeaderAuth.apply(_request) _context = self.execute_request(_request) self.validate_response(_context) # Return appropriate type return APIHelper.json_deserialize(_context.response.raw_body) def update_device_cellular_gateway_settings_port_forwarding_rules(self, options=dict()): """Does a PUT request to /devices/{serial}/cellularGateway/settings/portForwardingRules. Updates the port forwarding rules for a single MG. Args: options (dict, optional): Key-value pairs for any of the parameters to this API Endpoint. All parameters to the endpoint are supplied through the dictionary with their names being the key and their desired values being the value. A list of parameters that can be used are:: serial -- string -- TODO: type description here. Example: update_device_cellular_gateway_settings_port_forwarding_rul es -- UpdateDeviceCellularGatewaySettingsPortForwardingRulesM odel -- TODO: type description here. Example: Returns: mixed: Response from the API. Successful operation Raises: APIException: When an error occurs while fetching the data from the remote API. This exception includes the HTTP Response code, an error message, and the HTTP body that was received in the request. """ # Validate required parameters self.validate_parameters(serial=options.get("serial")) # Prepare query URL _url_path = '/devices/{serial}/cellularGateway/settings/portForwardingRules' _url_path = APIHelper.append_url_with_template_parameters(_url_path, { 'serial': options.get('serial', None) }) _query_builder = Configuration.base_uri _query_builder += _url_path _query_url = APIHelper.clean_url(_query_builder) # Prepare headers _headers = { 'accept': 'application/json', 'content-type': 'application/json; charset=utf-8' } # Prepare and execute request _request = self.http_client.put(_query_url, headers=_headers, parameters=APIHelper.json_serialize(options.get('update_device_cellular_gateway_settings_port_forwarding_rules'))) CustomHeaderAuth.apply(_request) _context = self.execute_request(_request) self.validate_response(_context) # Return appropriate type return APIHelper.json_deserialize(_context.response.raw_body)
import Plugin from 'paella-core/js/core/Plugin'; import { loadPluginsOfType } from './Plugin'; const g_shortcuts = {}; export async function loadKeyShortcutPlugins(player) { await loadPluginsOfType(player, "keyshortcut", async (plugin) => { const shortcuts = await plugin.getKeys(); shortcuts.forEach(shortcut => { g_shortcuts[shortcut.keyCode] = g_shortcuts[shortcut.keyCode] || []; shortcut.plugin = plugin; g_shortcuts[shortcut.keyCode].push(shortcut); }); }); console.log(g_shortcuts); window.onkeyup = async (event) => { const shortcut = g_shortcuts[event.code]; if (shortcut) { await shortcut.forEach(async s => { const altStatus = !s.keyModifiers?.altKey || (s.keyModifiers?.altKey && event.altKey); const ctrlStatus = !s.keyModifiers?.ctrlKey || (s.keyModifiers?.ctrlKey && event.ctrlKey); const shiftStatus = !s.keyModifiers?.shiftKey || (s.keyModifiers?.shiftKey && event.shiftKey); if (altStatus && ctrlStatus && shiftStatus) { await s.action(event); } }); } } } export const KeyCodes = { "Digit1": "Digit1", "Digit2": "Digit2", "Digit3": "Digit3", "Digit4": "Digit4", "Digit5": "Digit5", "Digit6": "Digit6", "Digit7": "Digit7", "Digit8": "Digit8", "Digit9": "Digit9", "Digit0": "Digit0", "KeyA": "KeyA", "KeyB": "KeyB", "KeyC": "KeyC", "KeyD": "KeyD", "KeyE": "KeyE", "KeyF": "KeyF", "KeyG": "KeyG", "KeyH": "KeyH", "KeyI": "KeyI", "KeyJ": "KeyJ", "KeyK": "KeyK", "KeyL": "KeyL", "KeyM": "KeyM", "KeyN": "KeyN", "KeyO": "KeyO", "KeyP": "KeyP", "KeyQ": "KeyQ", "KeyR": "KeyR", "KeyS": "KeyS", "KeyT": "KeyT", "KeyU": "KeyU", "KeyV": "KeyV", "KeyW": "KeyW", "KeyX": "KeyX", "KeyY": "KeyY", "KeyZ": "KeyZ", "Comma": "Comma", "Period": "Period", "Semicolon": "Semicolon", "Quote": "Quote", "BracketLeft": "BracketLeft", "BracketRight": "BracketRight", "Backquote": "Backquote", "Backslash": "Backslash", "Minus": "Minus", "Equal": "Equal", "AltLeft": "AltLeft", "AltRight": "AltRight", "CapsLock": "CapsLock", "ControlLeft": "ControlLeft", "ControlRight": "ControlRight", "OSLeft": "OSLeft", "OSRight": "OSRight", "ShiftLeft": "ShiftLeft", "ShiftRight": "ShiftRight", "ContextMenu": "ContextMenu", "Enter": "Enter", "Space": "Space", "Tab": "Tab", "Delete": "Delete", "End": "End", "Help": "Help", "Home": "Home", "Insert": "Insert", "PageDown": "PageDown", "PageUp": "PageUp", "ArrowDown": "ArrowDown", "ArrowLeft": "ArrowLeft", "ArrowRight": "ArrowRight", "ArrowUp": "ArrowUp", "Escape": "Escape", "PrintScreen": "PrintScreen", "ScrollLock": "ScrollLock", "Pause": "Pause" }; export default class KeyShortcutPlugin extends Plugin { get type() { return "keyshortcut"; } /** * * @returns [{ keyCode: KeyCode, keyModifiers: [KeyModifiers], description: string, action: async function }] */ async getKeys() { return []; } }
import numpy as np from scipy.linalg import solve # 8.1 5(3) x1 = np.array([4.0, 4.2, 4.5, 4.7, 5.1, 5.5, 5.9, 6.3, 6.8, 7.1]) y1 = np.array([102.56, 113.18, 130.11, 142.05, 167.53, 195.14, 224.87, 256.73, 299.50, 326.72]) polyCoeff1 = np.polyfit(x1, y1, 3) poly1 = np.poly1d(polyCoeff1) error1 = np.sum((poly1(x1) - y1) ** 2) print(poly1) print(error1) # 8.2 example1 A = np.array([ [1, 1/2, 1/3], [1/2, 1/3, 1/4], [1/3, 1/4, 1/5] ]) b = np.array([2/np.pi, 1/np.pi, (np.pi**2 - 4)/(np.pi**3)]) polyCoeff2 = solve(A, b)[::-1] poly2 = np.poly1d(polyCoeff2) print(poly2)
""" The module contains functions facilitating setting tight-binding parameters and initializing Hamiltonian objects from a Python dictionary. """ from __future__ import absolute_import import sys import numpy as np from nanonet.tb.orbitals import Orbitals from nanonet.tb import tb_params as dme from nanonet.tb.hamiltonian import Hamiltonian from nanonet.tb.hamiltonian_sparse import HamiltonianSp def set_tb_params(**kwargs): """Initialize a set of the user-defined tight-binding parameters. Parameters ---------- **kwargs : Returns ------- """ for item in kwargs: if item.startswith('PARAMS_') or item.startswith('OV_'): setattr(dme, item, kwargs[item]) def initializer(**kwargs): """Creates a Hamiltonian object from a set of parameters stored in a Python dictionary. This functions is used by CLI scripts to create Hamiltonian objects from a configuration file (normally in a yaml format) which is previously parsed into a Python dictionary data structure. Parameters ---------- kwargs : dict Dictionary of parameters needed to make a Hamiltonian object. **kwargs : Returns ------- """ set_tb_params(**kwargs) Orbitals.orbital_sets = kwargs.get('orbital_sets', {'Si': 'SiliconSP3D5S', 'H': 'HydrogenS'}) sys.modules[__name__].VERBOSITY = kwargs.get('VERBOSITY', 1) xyz = kwargs.get('xyz', {}) nn_distance = kwargs.get('nn_distance', 2.7) sparse = kwargs.get('sparse', 0) sigma = kwargs.get('sigma', 1.1) num_eigs = kwargs.get('num_eigs', 14) if sparse: h = HamiltonianSp(xyz=xyz, nn_distance=nn_distance, sigma=sigma, num_eigs=num_eigs) else: h = Hamiltonian(xyz=xyz, nn_distance=nn_distance) h.initialize() primitive_cell = kwargs.get('primitive_cell', [0, 0, 0]) if np.sum(np.abs(np.array(primitive_cell))) > 0: h.set_periodic_bc(primitive_cell=primitive_cell) return h
(function() { 'use strict'; var jhiAlert = { template: '<div class="alerts" ng-cloak="">' + '<div ng-repeat="alert in $ctrl.alerts" ng-class="[alert.position, {\'toast\': alert.toast}]">' + '<uib-alert ng-cloak="" type="{{alert.type}}" close="alert.close($ctrl.alerts)"><pre ng-bind-html="alert.msg"></pre></uib-alert>' + '</div>' + '</div>', controller: jhiAlertController }; angular .module('BlurAdmin.theme.components') .component('jhiAlert', jhiAlert); jhiAlertController.$inject = ['$scope', 'AlertService']; function jhiAlertController($scope, AlertService) { var vm = this; vm.alerts = AlertService.get(); $scope.$on('$destroy', function () { vm.alerts = []; }); } })();
'use strict' const path = require('path') const utils = require('./utils') const webpack = require('webpack') const config = require('../config') const merge = require('webpack-merge') const baseWebpackConfig = require('./webpack.base.conf') const CopyWebpackPlugin = require('copy-webpack-plugin') const HtmlWebpackPlugin = require('html-webpack-plugin') const ExtractTextPlugin = require('extract-text-webpack-plugin') const OptimizeCSSPlugin = require('optimize-css-assets-webpack-plugin') const UglifyJsPlugin = require('uglifyjs-webpack-plugin') function resolve (dir) { return path.join(__dirname, '..', dir) } const env = require('../config/prod.env') const webpackConfig = merge(baseWebpackConfig, { module: { rules: utils.styleLoaders({ sourceMap: config.build.productionSourceMap, extract: true, usePostCSS: true }) }, devtool: config.build.productionSourceMap ? config.build.devtool : false, output: { path: config.build.assetsRoot, filename: utils.assetsPath('js/[name].[chunkhash].js'), chunkFilename: utils.assetsPath('js/[id].[chunkhash].js') }, plugins: [ // http://vuejs.github.io/vue-loader/en/workflow/production.html new webpack.DefinePlugin({ 'process.env': env }), new UglifyJsPlugin({ uglifyOptions: { compress: { warnings: false } }, sourceMap: config.build.productionSourceMap, parallel: true }), // extract css into its own file new ExtractTextPlugin({ filename: utils.assetsPath('css/[name].[contenthash].css'), // Setting the following option to `false` will not extract CSS from codesplit chunks. // Their CSS will instead be inserted dynamically with style-loader when the codesplit chunk has been loaded by webpack. // increasing file size: https://github.com/vuejs-templates/webpack/issues/1110 allChunks: false, }), // Compress extracted CSS. We are using this plugin so that possible // duplicated CSS from different components can be deduped. new OptimizeCSSPlugin({ cssProcessorOptions: config.build.productionSourceMap ? { safe: true, map: { inline: false } } : { safe: true } }), // generate dist index.html with correct asset hash for caching. // you can customize output by editing /index.html // see https://github.com/ampedandwired/html-webpack-plugin new HtmlWebpackPlugin({ filename: config.build.index, template: 'index.html', inject: true, favicon: resolve('favicon.ico'), title: 'vue-yixiu', minify: { removeComments: true, collapseWhitespace: true, removeAttributeQuotes: true // more options: // https://github.com/kangax/html-minifier#options-quick-reference }, // necessary to consistently work with multiple chunks via CommonsChunkPlugin chunksSortMode: 'dependency' }), // keep module.id stable when vender modules does not change new webpack.HashedModuleIdsPlugin(), // enable scope hoisting new webpack.optimize.ModuleConcatenationPlugin(), // split vendor js into its own file new webpack.optimize.CommonsChunkPlugin({ name: 'vendor', minChunks (module) { // any required modules inside node_modules are extracted to vendor return ( module.resource && /\.js$/.test(module.resource) && module.resource.indexOf( path.join(__dirname, '../node_modules') ) === 0 ) } }), // extract webpack runtime and module manifest to its own file in order to // prevent vendor hash from being updated whenever app bundle is updated new webpack.optimize.CommonsChunkPlugin({ name: 'manifest', minChunks: Infinity }), // This instance extracts shared chunks from code splitted chunks and bundles them // in a separate chunk, similar to the vendor chunk // see: https://webpack.js.org/plugins/commons-chunk-plugin/#extra-async-commons-chunk new webpack.optimize.CommonsChunkPlugin({ name: 'app', async: 'vendor-async', children: true, minChunks: 3 }), // copy custom static assets new CopyWebpackPlugin([ { from: path.resolve(__dirname, '../static'), to: config.build.assetsSubDirectory, ignore: ['.*'] } ]) ] }) if (config.build.productionGzip) { const CompressionWebpackPlugin = require('compression-webpack-plugin') webpackConfig.plugins.push( new CompressionWebpackPlugin({ asset: '[path].gz[query]', algorithm: 'gzip', test: new RegExp( '\\.(' + config.build.productionGzipExtensions.join('|') + ')$' ), threshold: 10240, minRatio: 0.8 }) ) } if (config.build.bundleAnalyzerReport) { const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin webpackConfig.plugins.push(new BundleAnalyzerPlugin()) } module.exports = webpackConfig
# -*- coding: utf-8 -*- import os from flask import _app_ctx_stack from .query import Query from .base import BaseOpenDirectory # CONFIGURATION OPEN_DIRECTORY_SERVER = os.environ.get('OPEN_DIRECTORY_SERVER', 'localhost') OPEN_DIRECTORY_BASE_DN = os.environ.get('OPEN_DIRECTORY_BASE_DN', None) class OpenDirectory(BaseOpenDirectory): def __init__(self, app=None): config = { 'OPEN_DIRECTORY_SERVER': OPEN_DIRECTORY_SERVER, 'OPEN_DIRECTORY_BASE_DN': OPEN_DIRECTORY_BASE_DN, } super().__init__(**config) self.app = app if app is not None: self.init_app(app) def init_app(self, app): """Initialize the extension with the application. :param app: The :class:`flask.Flask` application to register the extension with. """ self.config.update(app.config) if not hasattr(app, 'extensions'): app.extensions = {} # pragma: no cover app.extensions['open_directory'] = self if hasattr(app, 'teardown_appcontext'): app.teardown_appcontext(self.teardown) else: app.teardown_request(self.teardown) # pragma: no cover def teardown(self, exception): """Clean-up for the extension. """ ctx = _app_ctx_stack.top if ctx is not None: if hasattr(ctx, 'open_directory_connection'): ctx.open_directory_connection.unbind() del(ctx.open_directory_connection) def query(self, model=None, **kwargs) -> Query: """Create a query with this instance as it's ``open_directory`` attribute. :param model: An optional :class:`ModelABC` subclass to set for the query. :param kwargs: Extra key-word arguments to pass to the :class:`Query`, constructor. """ kwargs['open_directory'] = self kwargs['model'] = model return Query(**kwargs)
#ifdef __OBJC__ #import <UIKit/UIKit.h> #else #ifndef FOUNDATION_EXPORT #if defined(__cplusplus) #define FOUNDATION_EXPORT extern "C" #else #define FOUNDATION_EXPORT extern #endif #endif #endif #import "UIButton+LXExpandBtn.h" FOUNDATION_EXPORT double LXCategoryVersionNumber; FOUNDATION_EXPORT const unsigned char LXCategoryVersionString[];
import { extend } from '../shared/utils.js'; export default function moduleExtendParams(params, allModulesParams) { return function extendParams(obj = {}) { const moduleParamName = Object.keys(obj)[0]; const moduleParams = obj[moduleParamName]; if (typeof moduleParams !== 'object' || moduleParams === null) { extend(allModulesParams, obj); return; } if (['navigation', 'pagination', 'scrollbar'].indexOf(moduleParamName) >= 0 && params[moduleParamName] === true) { params[moduleParamName] = { auto: true }; } if (!(moduleParamName in params && 'enabled' in moduleParams)) { extend(allModulesParams, obj); return; } if (params[moduleParamName] === true) { params[moduleParamName] = { enabled: true }; } if (typeof params[moduleParamName] === 'object' && !('enabled' in params[moduleParamName])) { params[moduleParamName].enabled = true; } if (!params[moduleParamName]) params[moduleParamName] = { enabled: false }; extend(allModulesParams, obj); }; }
const ExceptionInterface = Jymfony.Component.Routing.Exception.ExceptionInterface; /** * Exception thrown when a mandatory parameter is missing during url generation. * * @memberOf Jymfony.Component.Routing.Exception */ export default class MissingMandatoryParametersException extends mix(InvalidArgumentException, ExceptionInterface) { }
//// [moduleAssignmentCompat1.js] var A; (function (A) { var C = (function () { function C() { } return C; })(); A.C = C; })(A || (A = {})); var B; (function (B) { var C = (function () { function C() { } return C; })(); B.C = C; var D = (function () { function D() { } return D; })(); })(B || (B = {})); var a; var b; // no error a = b; b = a;
describe('Core.getCellMetaAtRow', () => { const id = 'testContainer'; beforeEach(function() { this.$container = $(`<div id="${id}"></div>`).appendTo('body'); }); afterEach(function() { if (this.$container) { destroy(); this.$container.remove(); } }); it('should return a row of cell meta in a form of an array', () => { handsontable(); const rowOfMeta = getCellMetaAtRow(0); expect(rowOfMeta.length).toBe(5); expect(rowOfMeta[0].row).toBe(0); expect(rowOfMeta[1].row).toBe(0); expect(rowOfMeta[2].row).toBe(0); expect(rowOfMeta[3].row).toBe(0); expect(rowOfMeta[4].row).toBe(0); expect(rowOfMeta[0].col).toBe(0); expect(rowOfMeta[1].col).toBe(1); expect(rowOfMeta[2].col).toBe(2); expect(rowOfMeta[3].col).toBe(3); expect(rowOfMeta[4].col).toBe(4); expect(rowOfMeta[0].prop).toBe(0); expect(rowOfMeta[1].prop).toBe(1); expect(rowOfMeta[2].prop).toBe(2); expect(rowOfMeta[3].prop).toBe(3); expect(rowOfMeta[4].prop).toBe(4); }); });
from configparser import ConfigParser from dotenv import load_dotenv from mop.azure.utils.create_configuration import change_dir, OPERATIONSPATH, CONFVARIABLES class PyPolicyRunner(): def __init__(self): load_dotenv() with change_dir(OPERATIONSPATH): self.config = ConfigParser() self.config.read(CONFVARIABLES) def run(self, tenant_id, subscripiton_id, client_id, client_secret, customer_id, shared_key): pass
import React from 'react'; import { View, Text } from 'react-native'; import { Actions } from 'react-native-router-flux'; import ButtonSideMenu from './buttonSideMenu'; import LinearGradient from 'react-native-linear-gradient';//eslint-disable-line const styles = { header: { borderWidth: 1, borderColor: '#bce8f1', }, headerText: { paddingTop: 11, paddingBottom: 10, paddingLeft: 20, color: '#31708f', fontWeight: 'bold', backgroundColor: 'transparent', }, }; const SideMenu = () => <View> <LinearGradient colors={['#d9edf7', '#c4e3f3']} style={styles.header} > <Text style={styles.headerText}>Menu</Text> </LinearGradient> <ButtonSideMenu title="Employees" onPress={Actions.employees} /> <ButtonSideMenu title="News" onPress={Actions.news} /> <ButtonSideMenu title="Blusers" onPress={Actions.employees} /> <ButtonSideMenu title="Blusers" onPress={Actions.news} /> </View> ; export default SideMenu;
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import absolute_import, print_function import os import sys from pex.common import die, open_zip from pex.executor import Executor from pex.interpreter import PythonInterpreter from pex.interpreter_constraints import matched_interpreters from pex.tracer import TRACER from pex.variables import ENV __all__ = ('bootstrap_pex',) def pex_info_name(entry_point): """Return the PEX-INFO for an entry_point""" return os.path.join(entry_point, 'PEX-INFO') def is_compressed(entry_point): return os.path.exists(entry_point) and not os.path.exists(pex_info_name(entry_point)) def read_pexinfo_from_directory(entry_point): with open(pex_info_name(entry_point), 'rb') as fp: return fp.read() def read_pexinfo_from_zip(entry_point): with open_zip(entry_point) as zf: return zf.read('PEX-INFO') def read_pex_info_content(entry_point): """Return the raw content of a PEX-INFO.""" if is_compressed(entry_point): return read_pexinfo_from_zip(entry_point) else: return read_pexinfo_from_directory(entry_point) def get_pex_info(entry_point): """Return the PexInfo object for an entry point.""" from . import pex_info pex_info_content = read_pex_info_content(entry_point) if pex_info_content: return pex_info.PexInfo.from_json(pex_info_content) raise ValueError('Invalid entry_point: %s' % entry_point) def find_in_path(target_interpreter): if os.path.exists(target_interpreter): return target_interpreter for directory in os.getenv('PATH', '').split(os.pathsep): try_path = os.path.join(directory, target_interpreter) if os.path.exists(try_path): return try_path def find_compatible_interpreters(pex_python_path, compatibility_constraints): """Find all compatible interpreters on the system within the supplied constraints and use PEX_PYTHON_PATH if it is set. If not, fall back to interpreters on $PATH. """ if pex_python_path: interpreters = [] for binary in pex_python_path.split(os.pathsep): try: interpreters.append(PythonInterpreter.from_binary(binary)) except Executor.ExecutionError: print("Python interpreter %s in PEX_PYTHON_PATH failed to load properly." % binary, file=sys.stderr) if not interpreters: die('PEX_PYTHON_PATH was defined, but no valid interpreters could be identified. Exiting.') else: if not os.getenv('PATH', ''): # no $PATH, use sys.executable interpreters = [PythonInterpreter.get()] else: # get all qualifying interpreters found in $PATH interpreters = PythonInterpreter.all() return list(matched_interpreters( interpreters, compatibility_constraints, meet_all_constraints=True)) def _select_pex_python_interpreter(target_python, compatibility_constraints): target = find_in_path(target_python) if not target: die('Failed to find interpreter specified by PEX_PYTHON: %s' % target) if compatibility_constraints: pi = PythonInterpreter.from_binary(target) if not list(matched_interpreters([pi], compatibility_constraints, meet_all_constraints=True)): die('Interpreter specified by PEX_PYTHON (%s) is not compatible with specified ' 'interpreter constraints: %s' % (target, str(compatibility_constraints))) if not os.path.exists(target): die('Target interpreter specified by PEX_PYTHON %s does not exist. Exiting.' % target) return target def _select_interpreter(pex_python_path, compatibility_constraints): compatible_interpreters = find_compatible_interpreters( pex_python_path, compatibility_constraints) if not compatible_interpreters: die('Failed to find compatible interpreter for constraints: %s' % str(compatibility_constraints)) # TODO: https://github.com/pantsbuild/pex/issues/430 target = min(compatible_interpreters).binary if os.path.exists(target): return target def maybe_reexec_pex(compatibility_constraints): """ Handle environment overrides for the Python interpreter to use when executing this pex. This function supports interpreter filtering based on interpreter constraints stored in PEX-INFO metadata. If PEX_PYTHON is set in a pexrc, it attempts to obtain the binary location of the interpreter specified by PEX_PYTHON. If PEX_PYTHON_PATH is set, it attempts to search the path for a matching interpreter in accordance with the interpreter constraints. If both variables are present in a pexrc, this function gives precedence to PEX_PYTHON_PATH and errors out if no compatible interpreters can be found on said path. If neither variable is set, fall through to plain pex execution using PATH searching or the currently executing interpreter. :param compatibility_constraints: list of requirements-style strings that constrain the Python interpreter to re-exec this pex with. """ if os.environ.pop('SHOULD_EXIT_BOOTSTRAP_REEXEC', None): # We've already been here and selected an interpreter. Continue to execution. return target = None with TRACER.timed('Selecting runtime interpreter based on pexrc', V=3): if ENV.PEX_PYTHON and not ENV.PEX_PYTHON_PATH: # preserve PEX_PYTHON re-exec for backwards compatibility # TODO: Kill this off completely in favor of PEX_PYTHON_PATH # https://github.com/pantsbuild/pex/issues/431 target = _select_pex_python_interpreter(ENV.PEX_PYTHON, compatibility_constraints) elif ENV.PEX_PYTHON_PATH: target = _select_interpreter(ENV.PEX_PYTHON_PATH, compatibility_constraints) if target and os.path.realpath(target) != os.path.realpath(sys.executable): cmdline = [target] + sys.argv TRACER.log('Re-executing: cmdline="%s", sys.executable="%s", PEX_PYTHON="%s", ' 'PEX_PYTHON_PATH="%s", COMPATIBILITY_CONSTRAINTS="%s"' % (cmdline, sys.executable, ENV.PEX_PYTHON, ENV.PEX_PYTHON_PATH, compatibility_constraints)) ENV.delete('PEX_PYTHON') ENV.delete('PEX_PYTHON_PATH') os.environ['SHOULD_EXIT_BOOTSTRAP_REEXEC'] = '1' os.execve(target, cmdline, ENV.copy()) def bootstrap_pex(entry_point): from .finders import register_finders register_finders() pex_info = get_pex_info(entry_point) maybe_reexec_pex(pex_info.interpreter_constraints) from . import pex pex.PEX(entry_point).execute() def bootstrap_pex_env(entry_point): """Bootstrap the current runtime environment using a given pex.""" from .environment import PEXEnvironment from .finders import register_finders from .pex_info import PexInfo register_finders() PEXEnvironment(entry_point, PexInfo.from_pex(entry_point)).activate()
# *********************************** # Author: Pedro Jorge De Los Santos # E-mail: [email protected] # Blog: numython.github.io # License: MIT License # *********************************** from nusa.core import Element, Model import nusa.core as nc import numpy as np import numpy.linalg as la class Node(nc.Node): def __init__(self,coordinates): nc.Node.__init__(self,coordinates) self.z = coordinates[2] class LT3D4(Element): """ Truss element for finite element analysis *nodes* : Tuple of :class:`~nusa.core.Node` Connectivity for element *E* : float Young modulus *nu* : float Poisson's Ratio """ def __init__(self,nodes,E,nu): Element.__init__(self,etype="LT3D4") self.nodes = nodes self.E = E self.nu = nu @property def V(self): n1,n2,n3,n4 = self.getNodes() V = np.array([[1, n1.x, n1.y, n1.z], [1, n2.x, n2.y, n2.z], [1, n3.x, n3.y, n3.z], [1, n4.x, n4.y, n4.z]]) return la.det(V)/6 @property def D(self): E,nu = self.E, self.nu mult = E/((1+nu)*(1-2*nu)) k1 = 1-nu k2 = (1-2*nu)/2 M = np.array([ [k1, nu, nu, 0, 0, 0], [nu, k1, nu, 0, 0, 0], [nu, nu, k1, 0, 0, 0], [ 0, 0, 0, k2, 0, 0], [ 0, 0, 0, 0, k2, 0], [ 0, 0, 0, 0, 0, k2]]) return mult*M @property def B(self): n1,n2,n3,n4 = self.getNodes() x1,y1,z1 = n1.x, n1.y, n1.z x2,y2,z2 = n2.x, n2.y, n2.z x3,y3,z3 = n3.x, n3.y, n3.z x4,y4,z4 = n4.x, n4.y, n4.z a1 = np.array([[x2,y2,z2], [x3,y3,z3], [x4,y4,z4]]) a2 = np.array([[x2,y2,z2], [x3,y3,z3], [x4,y4,z4]]) def get_element_stiffness(self): pass def get_nodes(self): return self.nodes if __name__=='__main__': E, nu = 200e9, 0.3 coords = {1:(0,0,0), 2:(1,0,0), 3:(0,1,0), 4:(0,0,1)} nodos = [] for n in coords.keys(): x,y,z = coords[n][0], coords[n][1], coords[n][2] nd = Node((x,y,z)) nodos.append(nd) e1 = LT3D4(nodos,E,nu) print(e1.V, e1.D)
import os import urllib.request import subprocess import glob import cv2 import math import numpy as np from config import get_args def load_txt(file_path, mode='trainval'): filename = 'ava_file_names_{}_v2.1.txt'.format(mode) filename = os.path.join(file_path, filename) with open(filename, 'r') as f: video_names = f.readlines() return video_names def is_video(video_name, output_dir): filename = os.path.join(output_dir, video_name) return os.path.isfile(filename) def video_crawler(video_name, mode='trainval', output_dir=''): url = 'https://s3.amazonaws.com/ava-dataset/{}/{}'.format(mode, video_name) if is_video(video_name, output_dir): print('Already exist video: {0}'.format(video_name)) else: print('Download video: {0}'.format(video_name)) output_dir = os.path.join(output_dir, video_name) urllib.request.urlretrieve(url, output_dir) def ava_crawler(output_dir, args, mode='trainval'): file_path = os.path.join(args.base_dir, 'data/ava_file_names') video_names = load_txt(file_path, mode) output_dir = os.path.join(output_dir, mode) if not os.path.isdir(output_dir): os.mkdir(output_dir) for i, video_name in enumerate(video_names): print('count: {0}, video_name: {1}'.format(i, video_name)) video_crawler(video_name, mode=mode, output_dir=output_dir) def video_crop(video_name, video_path, cropped_dir, mode='trainval'): start_time = 900 end_time = 1800 origin_video_filename = '{}/{}'.format(video_path, video_name) cropped_video_filename = '{}/{}.mp4'.format(cropped_dir, video_name.split('.')[0]) status = False if not os.path.isfile(origin_video_filename): print('Video does not exist: {0}'.format(video_name)) elif os.path.isfile(cropped_video_filename): print('Already exist cropped video: {0}'.format(video_name)) else: command = [ 'ffmpeg', '-i', '"%s"' % origin_video_filename, '-ss', str(start_time), '-t', str(end_time - start_time), '-c:v', 'libx264', '-c:a', 'ac3', '-threads', '1', '-loglevel', 'panic', '"{}"'.format(cropped_video_filename) ] command = ' '.join(command) try: print("\tProcessing video: {}".format(video_name)) output = subprocess.check_output(command, shell=True, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as err: # print('status :: ', status, ', error print :: ', err.output.decode('euc-kr')) return status, err.output status = os.path.exists(cropped_video_filename) return status def process_frame(frame, output_folder, video_id, frame_number, current_second, resize_min_size=400, jpg_quality=85): # Compute output dimensions height, width, _ = frame.shape ratio = float(height) / float(width) if ratio > 1.0: W = resize_min_size H = int(ratio * float(W)) else: H = resize_min_size W = int(float(H) / ratio) # Resize frame resized_frame = cv2.resize(frame, (W, H)) # Generate destination path frame_number = str(frame_number) current_second = '0' * (4 - len(str(current_second))) + str(current_second) frame_number = '0' * (2 - len(frame_number))+frame_number dst_filename = "{}_{}_{}.jpg".format(video_id, current_second, frame_number) dst_filename = os.path.join(output_folder, dst_filename) # Save frame cv2.imwrite(dst_filename, resized_frame, [cv2.IMWRITE_JPEG_QUALITY, jpg_quality]) def video2frame(video_path, output_folder, video_id, resize_min_size=400, fps=25): print('video_path :: ', video_path) video_capture = cv2.VideoCapture(video_path) video_fps = video_capture.get(cv2.CAP_PROP_FPS) frame_count = int(video_capture.get(cv2.CAP_PROP_FRAME_COUNT)) frame_time_step = 1 / float(frame_count) print("FPS: {}".format(video_fps)) print("frameCount: {}".format(frame_count)) print("frame_time_step: {}".format(frame_time_step)) current_second = 0 if video_fps > 29: lin = np.linspace(0, math.floor(video_fps), fps) else: lin = np.linspace(0, math.floor(video_fps)-1, fps) while video_capture.isOpened(): f = 0 frame_number = 0 total_frame = 0 for i, elem in enumerate(lin): ret, frame = video_capture.read() total_frame += 1 if ret: process_frame(frame, output_folder, video_id, frame_number, current_second, resize_min_size=resize_min_size) frame_number += 1 if i != 0: f = int(elem) - int(lin[i-1]) else: f = int(lin[i+1]) - int(elem) for _ in range(f-1): ret, frame = video_capture.read() total_frame += 1 if total_frame >= video_fps: break else: break if total_frame >= video_fps: break ret, frame = video_capture.retrieve() if not ret: break current_second += 1 def main(args): # 1. download ava video save_video_dir = os.path.join(args.base_dir, 'video') if not os.path.isdir(save_video_dir): os.mkdir(save_video_dir) for data_mode in ['trainval', 'test']: ava_crawler(save_video_dir, args, mode=data_mode) # 2. Crop video cropped_dir = os.path.join(args.base_dir, 'cropped_video') if not os.path.isdir(cropped_dir): os.mkdir(cropped_dir) for data_mode in ['trainval', 'test']: video_path = save_video_dir video_path = os.path.join(video_path, data_mode) cropped_path = os.path.join(cropped_dir, data_mode) if not os.path.isdir(cropped_path): os.mkdir(cropped_path) video_list = glob.glob(video_path + '/*') for video in video_list: video_name = os.path.basename(video) video_crop(video_name, video_path, cropped_path, mode=data_mode) # 3. Video to Frame frame_dir = os.path.join(args.base_dir, 'frame') if not os.path.isdir(frame_dir): os.mkdir(frame_dir) for data_mode in ['trainval', 'test']: cropped_path = os.path.join(cropped_dir, data_mode) video_list = glob.glob(cropped_path + '/*.mp4') frame_path = os.path.join(frame_dir, data_mode) if not os.path.isdir(frame_path): os.mkdir(frame_path) for video in video_list: video_name = os.path.basename(video).split('.')[0] out_dir = os.path.join(frame_path, video_name) if not os.path.isdir(out_dir): os.mkdir(out_dir) video2frame(video, out_dir, video_name) if __name__ == "__main__": args = get_args() main(args)
# Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys # sys.path.insert(0, os.path.abspath(".")) # -- Project information ----------------------------------------------------- project = "Simple Navier-Stokes Solver" copyright = "2022, Naoki Hori" author = "Naoki Hori" # The full version, including alpha/beta/rc tags release = "0.4" # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named "sphinx.ext.*") or your custom # ones. sys.path.append(os.path.abspath("./ext")) extensions = [ "myliteralinclude", "details", "sphinx.ext.mathjax", ] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = [] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = "alabaster" html_theme_options = { "fixed_sidebar": "true", "github_user": "NaokiHori", "github_repo": "SimpleNavierStokesSolver", "github_type": "true", } # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] rst_prolog = """ .. role:: c-lang(code) :language: c .. role:: python(code) :language: python .. role:: sh(code) :language: sh """ mathjax_path = "https://cdn.jsdelivr.net/npm/mathjax@2/MathJax.js?config=TeX-AMS-MML_HTMLorMML" mathjax3_config = { "TeX": { "Macros": { "der": ["{\\dfrac{\\partial #1}{\\partial #2}}", 2], "mst": ["{\\gamma^{#1 #2}}", 2], "gx": ["{\\xi}"], "gy": ["{\\eta}"], "ux": ["{u_x}"], "uy": ["{u_y}"], "intrp": ["{\\overline{#1}^{#2}}", 2], "diffe": ["{\\delta_{#2} {#1}}", 2], "vat": ["{\\left. {#1} \\right|_{#2}}", 2], "ave": ["{\\left\\langle {#1} \\right\\rangle_{#2}}", 2], "pimm": ["i-1 "], "pim": ["i-\\frac{1}{2}"], "pic": ["i "], "pip": ["i+\\frac{1}{2}"], "pipp": ["i+1 "], "pjmm": ["j-1 "], "pjm": ["j-\\frac{1}{2}"], "pjc": ["j "], "pjp": ["j+\\frac{1}{2}"], "pjpp": ["j+1 "], "ximm": ["i-\\frac{1}{2}"], "xim": ["i "], "xic": ["i+\\frac{1}{2}"], "xip": ["i+1 "], "xipp": ["i+\\frac{3}{2}"], "xjmm": ["j-1 "], "xjm": ["j-\\frac{1}{2}"], "xjc": ["j "], "xjp": ["j+\\frac{1}{2}"], "xjpp": ["j+1 "], "yimm": ["i-1 "], "yim": ["i-\\frac{1}{2}"], "yic": ["i "], "yip": ["i+\\frac{1}{2}"], "yipp": ["i+1 "], "yjmm": ["j-\\frac{1}{2}"], "yjm": ["j "], "yjc": ["j+\\frac{1}{2}"], "yjp": ["j+1 "], "yjpp": ["j+\\frac{3}{2}"], } } }
# SOURCE: https://github.com/clinicalml/cfrnet, MIT-License import tensorflow as tf import numpy as np SQRT_CONST = 1e-10 def get_nonlinearity_by_name(name): if name.lower() == 'elu': return tf.nn.elu else: return tf.nn.relu def build_mlp(x, num_layers=1, num_units=16, dropout=0.0, nonlinearity=tf.nn.elu, weight_initialisation_std=0.1): input_dim = int(x.shape[-1]) h_in, weights_in, biases_in = [x], [], [] for i in range(0, num_layers): if i == 0: ''' If using variable selection, first layer is just rescaling''' weights_in.append(tf.Variable(tf.random_normal([input_dim, num_units], stddev=weight_initialisation_std / np.sqrt(input_dim)))) else: weights_in.append(tf.Variable(tf.random_normal([num_units, num_units], stddev=weight_initialisation_std / np.sqrt(num_units)))) biases_in.append(tf.Variable(tf.zeros([1, num_units]))) z = tf.matmul(h_in[i], weights_in[i]) + biases_in[i] h_in.append(nonlinearity(z)) h_in[i + 1] = tf.nn.dropout(h_in[i + 1], 1.0 - dropout) h_rep = h_in[len(h_in) - 1] return h_rep, weights_in, biases_in def validation_split(D_exp, val_fraction): """ Construct a train/validation split """ n = D_exp['x'].shape[0] if val_fraction > 0: n_valid = int(val_fraction*n) n_train = n-n_valid I = np.random.permutation(range(0,n)) I_train = I[:n_train] I_valid = I[n_train:] else: I_train = range(n) I_valid = [] return I_train, I_valid def log(logfile,str): """ Log a string in a file """ with open(logfile,'a') as f: f.write(str+'\n') print str def save_config(fname): """ Save configuration """ flagdict = FLAGS.__dict__['__flags'] s = '\n'.join(['%s: %s' % (k,str(flagdict[k])) for k in sorted(flagdict.keys())]) f = open(fname,'w') f.write(s) f.close() def load_data(fname): """ Load data set """ if fname[-3:] == 'npz': data_in = np.load(fname) data = {'x': data_in['x'], 't': data_in['t'], 'yf': data_in['yf']} try: data['ycf'] = data_in['ycf'] except: data['ycf'] = None else: if FLAGS.sparse>0: data_in = np.loadtxt(open(fname+'.y',"rb"),delimiter=",") x = load_sparse(fname+'.x') else: data_in = np.loadtxt(open(fname,"rb"),delimiter=",") x = data_in[:,5:] data['x'] = x data['t'] = data_in[:,0:1] data['yf'] = data_in[:,1:2] data['ycf'] = data_in[:,2:3] data['HAVE_TRUTH'] = not data['ycf'] is None data['dim'] = data['x'].shape[1] data['n'] = data['x'].shape[0] return data def load_sparse(fname): """ Load sparse data set """ E = np.loadtxt(open(fname,"rb"),delimiter=",") H = E[0,:] n = int(H[0]) d = int(H[1]) E = E[1:,:] S = sparse.coo_matrix((E[:,2],(E[:,0]-1,E[:,1]-1)),shape=(n,d)) S = S.todense() return S def safe_sqrt(x, lbound=SQRT_CONST): ''' Numerically safe version of TensorFlow sqrt ''' return tf.sqrt(tf.clip_by_value(x, lbound, np.inf)) def lindisc(X,p,t): ''' Linear MMD ''' it = tf.where(t>0)[:,0] ic = tf.where(t<1)[:,0] Xc = tf.gather(X,ic) Xt = tf.gather(X,it) mean_control = tf.reduce_mean(Xc,reduction_indices=0) mean_treated = tf.reduce_mean(Xt,reduction_indices=0) c = tf.square(2*p-1)*0.25 f = tf.sign(p-0.5) mmd = tf.reduce_sum(tf.square(p*mean_treated - (1-p)*mean_control)) mmd = f*(p-0.5) + safe_sqrt(c + mmd) return mmd def mmd2_lin(X,t,p): ''' Linear MMD ''' it = tf.where(t>0)[:,0] ic = tf.where(t<1)[:,0] Xc = tf.gather(X,ic) Xt = tf.gather(X,it) mean_control = tf.reduce_mean(Xc,reduction_indices=0) mean_treated = tf.reduce_mean(Xt,reduction_indices=0) mmd = tf.reduce_sum(tf.square(2.0*p*mean_treated - 2.0*(1.0-p)*mean_control)) return mmd def mmd2_rbf(X,t,p,sig): """ Computes the l2-RBF MMD for X given t """ it = tf.where(t>0)[:,0] ic = tf.where(t<1)[:,0] Xc = tf.gather(X,ic) Xt = tf.gather(X,it) Kcc = tf.exp(-pdist2sq(Xc,Xc)/tf.square(sig)) Kct = tf.exp(-pdist2sq(Xc,Xt)/tf.square(sig)) Ktt = tf.exp(-pdist2sq(Xt,Xt)/tf.square(sig)) m = tf.to_float(tf.shape(Xc)[0]) n = tf.to_float(tf.shape(Xt)[0]) mmd = tf.square(1.0-p)/(m*(m-1.0))*(tf.reduce_sum(Kcc)-m) mmd = mmd + tf.square(p)/(n*(n-1.0))*(tf.reduce_sum(Ktt)-n) mmd = mmd - 2.0*p*(1.0-p)/(m*n)*tf.reduce_sum(Kct) mmd = 4.0*mmd return mmd def pdist2sq(X,Y): """ Computes the squared Euclidean distance between all pairs x in X, y in Y """ C = -2*tf.matmul(X,tf.transpose(Y)) nx = tf.reduce_sum(tf.square(X),1,keep_dims=True) ny = tf.reduce_sum(tf.square(Y),1,keep_dims=True) D = (C + tf.transpose(ny)) + nx return D def pdist2(X,Y): """ Returns the tensorflow pairwise distance matrix """ return safe_sqrt(pdist2sq(X,Y)) def pop_dist(X,t): it = tf.where(t>0)[:,0] ic = tf.where(t<1)[:,0] Xc = tf.gather(X,ic) Xt = tf.gather(X,it) nc = tf.to_float(tf.shape(Xc)[0]) nt = tf.to_float(tf.shape(Xt)[0]) ''' Compute distance matrix''' M = pdist2(Xt,Xc) return M def wasserstein(X,t,p,lam=10,its=10,sq=False,backpropT=False): """ Returns the Wasserstein distance between treatment groups """ it = tf.where(t>0)[:,0] ic = tf.where(t<1)[:,0] Xc = tf.gather(X,ic) Xt = tf.gather(X,it) nc = tf.to_float(tf.shape(Xc)[0]) nt = tf.to_float(tf.shape(Xt)[0]) ''' Compute distance matrix''' if sq: M = pdist2sq(Xt,Xc) else: M = safe_sqrt(pdist2sq(Xt,Xc)) ''' Estimate lambda and delta ''' M_mean = tf.reduce_mean(M) M_drop = tf.nn.dropout(M,10/(nc*nt)) delta = tf.stop_gradient(tf.reduce_max(M)) eff_lam = tf.stop_gradient(lam/M_mean) ''' Compute new distance matrix ''' Mt = M row = delta*tf.ones(tf.shape(M[0:1,:])) col = tf.concat([delta*tf.ones(tf.shape(M[:,0:1])),tf.zeros((1,1))], 0) Mt = tf.concat([M, row], 0) Mt = tf.concat([Mt, col], 1) ''' Compute marginal vectors ''' a = tf.concat([p*tf.ones(tf.shape(tf.where(t>0)[:,0:1]))/nt, (1-p)*tf.ones((1,1))], 0) b = tf.concat([(1-p)*tf.ones(tf.shape(tf.where(t<1)[:,0:1]))/nc, p*tf.ones((1,1))], 0) ''' Compute kernel matrix''' Mlam = eff_lam*Mt K = tf.exp(-Mlam) + 1e-6 # added constant to avoid nan U = K*Mt ainvK = K/a u = a for i in range(0,its): u = 1.0/(tf.matmul(ainvK,(b/tf.transpose(tf.matmul(tf.transpose(u),K))))) v = b/(tf.transpose(tf.matmul(tf.transpose(u),K))) T = u*(tf.transpose(v)*K) if not backpropT: T = tf.stop_gradient(T) E = T*Mt D = 2*tf.reduce_sum(E) return D, Mlam def simplex_project(x,k): """ Projects a vector x onto the k-simplex """ d = x.shape[0] mu = np.sort(x,axis=0)[::-1] nu = (np.cumsum(mu)-k)/range(1,d+1) I = [i for i in range(0,d) if mu[i]>nu[i]] theta = nu[I[-1]] w = np.maximum(x-theta,0) return w
import numpy as np from collections import defaultdict # the type of float to use throughout the session. _FLOATX = 'float32' _EPSILON = 10e-8 _UID_PREFIXES = defaultdict(int) _IMAGE_DIM_ORDERING = 'th' def epsilon(): '''Returns the value of the fuzz factor used in numeric expressions. ''' return _EPSILON def set_epsilon(e): '''Sets the value of the fuzz factor used in numeric expressions. ''' global _EPSILON _EPSILON = e def floatx(): '''Returns the default float type, as a string (e.g. 'float16', 'float32', 'float64'). ''' return _FLOATX def set_floatx(floatx): global _FLOATX if floatx not in {'float16', 'float32', 'float64'}: raise Exception('Unknown floatx type: ' + str(floatx)) _FLOATX = str(floatx) def cast_to_floatx(x): '''Cast a Numpy array to floatx. ''' return np.asarray(x, dtype=_FLOATX) def image_dim_ordering(): '''Returns the image dimension ordering convention ('th' or 'tf'). ''' return _IMAGE_DIM_ORDERING def set_image_dim_ordering(dim_ordering): '''Sets the value of the image dimension ordering convention ('th' or 'tf'). ''' global _IMAGE_DIM_ORDERING if dim_ordering not in {'tf', 'th'}: raise Exception('Unknown dim_ordering:', dim_ordering) _IMAGE_DIM_ORDERING = str(dim_ordering) def get_uid(prefix=''): _UID_PREFIXES[prefix] += 1 return _UID_PREFIXES[prefix]
/** * Copyright IBM Corp. 2019, 2020 * * This source code is licensed under the Apache-2.0 license found in the * LICENSE file in the root directory of this source tree. * * Code generated by @carbon/icon-build-helpers. DO NOT EDIT. */ 'use strict'; var Icon = require('../Icon-1083255b.js'); var React = require('react'); require('@carbon/icon-helpers'); require('prop-types'); function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } var React__default = /*#__PURE__*/_interopDefaultLegacy(React); var _path, _path2; var Calculator16 = /*#__PURE__*/React__default['default'].forwardRef(function Calculator16(_ref, ref) { var children = _ref.children, rest = Icon._objectWithoutProperties(_ref, ["children"]); return /*#__PURE__*/React__default['default'].createElement(Icon.Icon, Icon._extends({ width: 16, height: 16, viewBox: "0 0 32 32", xmlns: "http://www.w3.org/2000/svg", fill: "currentColor", ref: ref }, rest), _path || (_path = /*#__PURE__*/React__default['default'].createElement("path", { d: "M26,4V28H6V4H26m0-2H6A2,2,0,0,0,4,4V28a2,2,0,0,0,2,2H26a2,2,0,0,0,2-2V4A2,2,0,0,0,26,2Z" })), _path2 || (_path2 = /*#__PURE__*/React__default['default'].createElement("path", { d: "M9 23H11V25H9zM21 23H23V25H21zM9 18H11V20H9zM21 18H23V20H21zM9 13H11V15H9zM15 23H17V25H15zM15 18H17V20H15zM15 13H17V15H15zM21 13H23V15H21zM9 7H23V10H9z" })), children); }); module.exports = Calculator16;
from reapy import reascript_api as RPR class Source: def __init__(self, id): self.id = id def __eq__(self, other): return self.id == other.id and isinstance(other, Source) @property def _args(self): return self.id, def delete(self): """ Delete source. Be sure that no references to source remains. """ RPR.PCM_Source_Destroy(self.id) @property def filename(self): """ Return source file name. Returns ------- filename : str Source file name. """ _, filename, _ = RPR.GetMediaSourceFileName(self.id, "", 10**5) return filename @property def length(self, unit="seconds"): """ Return source length in `unit`. Parameters ---------- unit : {"beats", "seconds"} Returns ------- length : float Source length in `unit`. """ length, _, is_quantized = RPR.GetMediaSourceLength(self.id, 0) if is_quantized: if unit == "beats": return length elif unit == "seconds": raise NotImplementedError else: if unit == "beats": raise NotImplementedError elif unit == "seconds": return length @property def n_channels(self): """ Return number of channels in source media. Returns ------- n_channels : int Number of channels in source media. """ n_channels = RPR.GetMediaSourceNumChannels(self.id) return n_channels @property def sample_rate(self): """ Return source sample rate. Returns ------- sample_rate : int Source sample rate. """ sample_rate = RPR.GetMediaSourceSampleRate(self.id) return sample_rate @property def type(self): """ Return source type ("WAV, "MIDI", etc.). Returns ------- type : str Source type. """ _, type, _ = RPR.GetMediaSourceType(self.id, "", 10**5) return type
// MOST Web Framework 2.0 Codename Blueshift Copyright (c) 2017-2022, THEMOST LP All rights reserved const {FunctionContext} = require('./functions'); /** * @augments DataModel */ class DataFilterResolver { constructor() { // } resolveMember(member, callback) { if (/\//.test(member)) { let arr = member.split('/'); callback(null, arr.slice(arr.length - 2).join('.')); } else { callback(null, this.viewAdapter.concat('.', member)); } } resolveMethod(name, args, callback) { callback = callback || function () { }; if (typeof DataFilterResolver.prototype[name] === 'function') { let a = args || []; a.push(callback); try { return DataFilterResolver.prototype[name].apply(this, a); } catch (e) { return callback(e); } } callback(); } /** * @param {Function} callback */ me(callback) { let functionContext = new FunctionContext(this.context, this); functionContext.user().then(function (value) { callback(null, value); }).catch(function (err) { callback(err); }); } /** * @param {Function} callback */ now(callback) { callback(null, new Date()); } /** * @param {Function} callback */ today(callback) { let res = new Date(); res.setHours(0, 0, 0, 0); callback(null, res); } /** * @param {Function} callback */ lang(callback) { let culture = this.context.culture(); if (culture) { return callback(null, culture.substr(0, 2)); } else { return callback(null, "en"); } } user(callback) { return this.me(callback); } } module.exports = { DataFilterResolver };
// Copyright (c) 2011-present, Facebook, Inc. All rights reserved. // This source code is licensed under both the GPLv2 (found in the // COPYING file in the root directory) and Apache 2.0 License // (found in the LICENSE.Apache file in the root directory). // Copyright (c) 2011 The LevelDB Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. See the AUTHORS file for names of contributors. // // A WriteBatchWithIndex with a binary searchable index built for all the keys // inserted. #pragma once #ifndef ROCKSDB_LITE #include <memory> #include <string> #include <vector> #include "rocksdb/comparator.h" #include "rocksdb/iterator.h" #include "rocksdb/slice.h" #include "rocksdb/status.h" #include "rocksdb/write_batch.h" #include "rocksdb/write_batch_base.h" namespace ROCKSDB_NAMESPACE { class ColumnFamilyHandle; class Comparator; class DB; class ReadCallback; struct ReadOptions; struct DBOptions; enum WriteType { kPutRecord, kMergeRecord, kDeleteRecord, kSingleDeleteRecord, kDeleteRangeRecord, kLogDataRecord, kXIDRecord, kUnknownRecord, }; // an entry for Put, Merge, Delete, or SingleDelete entry for write batches. // Used in WBWIIterator. struct WriteEntry { WriteType type = kUnknownRecord; Slice key; Slice value; }; // Iterator of one column family out of a WriteBatchWithIndex. class WBWIIterator { public: virtual ~WBWIIterator() {} virtual bool Valid() const = 0; virtual void SeekToFirst() = 0; virtual void SeekToLast() = 0; virtual void Seek(const Slice& key) = 0; virtual void SeekForPrev(const Slice& key) = 0; virtual void Next() = 0; virtual void Prev() = 0; // the return WriteEntry is only valid until the next mutation of // WriteBatchWithIndex virtual WriteEntry Entry() const = 0; virtual Status status() const = 0; }; // A WriteBatchWithIndex with a binary searchable index built for all the keys // inserted. // In Put(), Merge() Delete(), or SingleDelete(), the same function of the // wrapped will be called. At the same time, indexes will be built. // By calling GetWriteBatch(), a user will get the WriteBatch for the data // they inserted, which can be used for DB::Write(). // A user can call NewIterator() to create an iterator. class WriteBatchWithIndex : public WriteBatchBase { public: // backup_index_comparator: the backup comparator used to compare keys // within the same column family, if column family is not given in the // interface, or we can't find a column family from the column family handle // passed in, backup_index_comparator will be used for the column family. // reserved_bytes: reserved bytes in underlying WriteBatch // max_bytes: maximum size of underlying WriteBatch in bytes // overwrite_key: if true, overwrite the key in the index when inserting // the same key as previously, so iterator will never // show two entries with the same key. explicit WriteBatchWithIndex( const Comparator* backup_index_comparator = BytewiseComparator(), size_t reserved_bytes = 0, bool overwrite_key = false, size_t max_bytes = 0); ~WriteBatchWithIndex() override; WriteBatchWithIndex(WriteBatchWithIndex&&); WriteBatchWithIndex& operator=(WriteBatchWithIndex&&); using WriteBatchBase::Put; Status Put(ColumnFamilyHandle* column_family, const Slice& key, const Slice& value) override; Status Put(const Slice& key, const Slice& value) override; using WriteBatchBase::Merge; Status Merge(ColumnFamilyHandle* column_family, const Slice& key, const Slice& value) override; Status Merge(const Slice& key, const Slice& value) override; using WriteBatchBase::Delete; Status Delete(ColumnFamilyHandle* column_family, const Slice& key) override; Status Delete(const Slice& key) override; using WriteBatchBase::SingleDelete; Status SingleDelete(ColumnFamilyHandle* column_family, const Slice& key) override; Status SingleDelete(const Slice& key) override; using WriteBatchBase::DeleteRange; Status DeleteRange(ColumnFamilyHandle* /* column_family */, const Slice& /* begin_key */, const Slice& /* end_key */) override { return Status::NotSupported( "DeleteRange unsupported in WriteBatchWithIndex"); } Status DeleteRange(const Slice& /* begin_key */, const Slice& /* end_key */) override { return Status::NotSupported( "DeleteRange unsupported in WriteBatchWithIndex"); } using WriteBatchBase::PutLogData; Status PutLogData(const Slice& blob) override; using WriteBatchBase::Clear; void Clear() override; using WriteBatchBase::GetWriteBatch; WriteBatch* GetWriteBatch() override; // Create an iterator of a column family. User can call iterator.Seek() to // search to the next entry of or after a key. Keys will be iterated in the // order given by index_comparator. For multiple updates on the same key, // each update will be returned as a separate entry, in the order of update // time. // // The returned iterator should be deleted by the caller. WBWIIterator* NewIterator(ColumnFamilyHandle* column_family); // Create an iterator of the default column family. WBWIIterator* NewIterator(); // Will create a new Iterator that will use WBWIIterator as a delta and // base_iterator as base. // // This function is only supported if the WriteBatchWithIndex was // constructed with overwrite_key=true. // // The returned iterator should be deleted by the caller. // The base_iterator is now 'owned' by the returned iterator. Deleting the // returned iterator will also delete the base_iterator. // // Updating write batch with the current key of the iterator is not safe. // We strongly recommend users not to do it. It will invalidate the current // key() and value() of the iterator. This invalidation happens even before // the write batch update finishes. The state may recover after Next() is // called. Iterator* NewIteratorWithBase(ColumnFamilyHandle* column_family, Iterator* base_iterator, const ReadOptions* opts = nullptr); // default column family Iterator* NewIteratorWithBase(Iterator* base_iterator); // Similar to DB::Get() but will only read the key from this batch. // If the batch does not have enough data to resolve Merge operations, // MergeInProgress status may be returned. Status GetFromBatch(ColumnFamilyHandle* column_family, const DBOptions& options, const Slice& key, std::string* value); // Similar to previous function but does not require a column_family. // Note: An InvalidArgument status will be returned if there are any Merge // operators for this key. Use previous method instead. Status GetFromBatch(const DBOptions& options, const Slice& key, std::string* value) { return GetFromBatch(nullptr, options, key, value); } // Similar to DB::Get() but will also read writes from this batch. // // This function will query both this batch and the DB and then merge // the results using the DB's merge operator (if the batch contains any // merge requests). // // Setting read_options.snapshot will affect what is read from the DB // but will NOT change which keys are read from the batch (the keys in // this batch do not yet belong to any snapshot and will be fetched // regardless). Status GetFromBatchAndDB(DB* db, const ReadOptions& read_options, const Slice& key, std::string* value); // An overload of the above method that receives a PinnableSlice Status GetFromBatchAndDB(DB* db, const ReadOptions& read_options, const Slice& key, PinnableSlice* value); Status GetFromBatchAndDB(DB* db, const ReadOptions& read_options, ColumnFamilyHandle* column_family, const Slice& key, std::string* value); // An overload of the above method that receives a PinnableSlice Status GetFromBatchAndDB(DB* db, const ReadOptions& read_options, ColumnFamilyHandle* column_family, const Slice& key, PinnableSlice* value); void MultiGetFromBatchAndDB(DB* db, const ReadOptions& read_options, ColumnFamilyHandle* column_family, const size_t num_keys, const Slice* keys, PinnableSlice* values, Status* statuses, bool sorted_input); // Records the state of the batch for future calls to RollbackToSavePoint(). // May be called multiple times to set multiple save points. void SetSavePoint() override; // Remove all entries in this batch (Put, Merge, Delete, SingleDelete, // PutLogData) since the most recent call to SetSavePoint() and removes the // most recent save point. // If there is no previous call to SetSavePoint(), behaves the same as // Clear(). // // Calling RollbackToSavePoint invalidates any open iterators on this batch. // // Returns Status::OK() on success, // Status::NotFound() if no previous call to SetSavePoint(), // or other Status on corruption. Status RollbackToSavePoint() override; // Pop the most recent save point. // If there is no previous call to SetSavePoint(), Status::NotFound() // will be returned. // Otherwise returns Status::OK(). Status PopSavePoint() override; void SetMaxBytes(size_t max_bytes) override; size_t GetDataSize() const; private: friend class PessimisticTransactionDB; friend class WritePreparedTxn; friend class WriteUnpreparedTxn; friend class WriteBatchWithIndex_SubBatchCnt_Test; // Returns the number of sub-batches inside the write batch. A sub-batch // starts right before inserting a key that is a duplicate of a key in the // last sub-batch. size_t SubBatchCnt(); Status GetFromBatchAndDB(DB* db, const ReadOptions& read_options, ColumnFamilyHandle* column_family, const Slice& key, PinnableSlice* value, ReadCallback* callback); void MultiGetFromBatchAndDB(DB* db, const ReadOptions& read_options, ColumnFamilyHandle* column_family, const size_t num_keys, const Slice* keys, PinnableSlice* values, Status* statuses, bool sorted_input, ReadCallback* callback); struct Rep; std::unique_ptr<Rep> rep; }; } // namespace ROCKSDB_NAMESPACE #endif // !ROCKSDB_LITE
function(page, callback){ var that = this; var key = '%GOOGLEAPIKEY_PSI_NEW%'; //<-- add your API key here https://developers.google.com/speed/docs/insights/v2/first-app#APIKey din't forget to enable it for Google Page Speed Insights if(key==='%'+'GOOGLEAPIKEY_PSI_NEW%'){ callback(that.createResult('SPEED', '"Page Speed Insights v5 FCP and FID desktop" rule not yet enabled! Set <a href="https://developers.google.com/speed/docs/insights/v5/get-started" target="_blank">Google API Key</a> in <a href="'+that.getGlobals().rulesUrl+'" target="_blank">Settings</a>.', "warning")); return; } var strategy = 'desktop'; var url = page.getURL('first'); var type = 'info'; var fcp_color = 'lightgreen'; var fid_color = 'lightgreen'; var psi ='https://www.googleapis.com/pagespeedonline/v5/runPagespeed?url='+encodeURIComponent(url)+'&strategy='+strategy+'&key='+key; fetch(psi) .then( function(response) { if (response.status !== 200) { callback(that.createResult('SPEED', 'No Page Speed Insights v5 FCP and FID desktop data. (Response Status '+response.status+' '+response.text+')', "warning")); return; } response.json().then(function(data) { if (data.loadingExperience.hasOwnProperty('metrics')){ var first_contentful_paint_data=data.loadingExperience.metrics.FIRST_CONTENTFUL_PAINT_MS.percentile; var first_input_delay_data=data.loadingExperience.metrics.FIRST_INPUT_DELAY_MS.percentile; var first_contentful_paint_score=data.loadingExperience.metrics.FIRST_CONTENTFUL_PAINT_MS.category; var first_input_delay_score=data.loadingExperience.metrics.FIRST_INPUT_DELAY_MS.category; } else { callback(that.createResult('SPEED', 'No Page Speed Insights v5 FCP and FID desktop data.', "warning")); return; } if(first_contentful_paint_data > 1000) { fcp_color = "orange"; } if(first_contentful_paint_data > 2500) { fcp_color = "red"; } if(first_input_delay_data > 50) { fid_color = "orange"; } if(first_input_delay_data > 250) { fid_color = "red"; } callback(that.createResult( 'SPEED', 'First Contentful Paint (FCP): <span style="background-color:'+fcp_color+';font-weight:bold;">&nbsp;'+first_contentful_paint_data+'&nbsp;</span> <span style="background-color:'+fcp_color+';font-weight:bold;">&nbsp;'+first_contentful_paint_score+'&nbsp;</span><br>First Input Delay (FID): <span style="background-color:'+fid_color+';font-weight:bold;">&nbsp;'+first_input_delay_data+'&nbsp;</span> <span style="background-color:'+fid_color+';font-weight:bold;">&nbsp;'+first_input_delay_score+'&nbsp;</span><br><a href="https://developers.google.com/speed/pagespeed/insights/?hl=en&url='+url+'" target="_blank">Page Speed Insights</a>', type)); }); } ) .catch(function(err) { callback(that.createResult('SPEED', 'No Page Speed Insights v5 FCP and FID <b>desktop</b> data. '+err+' <a href="https://developers.google.com/speed/pagespeed/insights/?hl=en&url='+url+'" target="_blank">Page Speed Insights</a>', "warning")); }); //callback(this.createResult('test', "async test", "warning")); //return this.waitForAsync(); //return('async'); }
"use strict";function t(t){return t&&"object"==typeof t&&"default"in t?t.default:t}Object.defineProperty(exports,"__esModule",{value:!0});var e,n,r,i,o,a=t(require("jsbi")),u=t(require("tiny-invariant")),s=(t(require("tiny-warning")),require("@ethersproject/address")),c=t(require("big.js")),d=t(require("toformat")),f=t(require("decimal.js-light")),p=require("@ethersproject/solidity"),h=require("@ethersproject/contracts"),l=require("@ethersproject/networks"),m=require("@ethersproject/providers"),v=t(require("@uniswap/v2-core/build/IUniswapV2Pair.json"));(r=exports.ChainId||(exports.ChainId={}))[r.MAINNET=1]="MAINNET",r[r.ROPSTEN=3]="ROPSTEN",r[r.RINKEBY=4]="RINKEBY",r[r["GÖRLI"]=5]="GÖRLI",r[r.KOVAN=42]="KOVAN",r[r.BSCMAINNET=56]="BSCMAINNET",r[r.BSCTESTNET=97]="BSCTESTNET",r[r.MATICMAINNET=137]="MATICMAINNET",r[r.AVALANCHE=43114]="AVALANCHE",(i=exports.TradeType||(exports.TradeType={}))[i.EXACT_INPUT=0]="EXACT_INPUT",i[i.EXACT_OUTPUT=1]="EXACT_OUTPUT",(o=exports.Rounding||(exports.Rounding={}))[o.ROUND_DOWN=0]="ROUND_DOWN",o[o.ROUND_HALF_UP=1]="ROUND_HALF_UP",o[o.ROUND_UP=2]="ROUND_UP";var y,T=function(t){switch(t){case exports.ChainId.BSCMAINNET:case exports.ChainId.BSCTESTNET:return!0;default:return!1}},w=function(t){switch(t){case exports.ChainId.MATICMAINNET:return!0;default:return!1}},A=function(t){switch(t){case exports.ChainId.AVALANCHE:return!0;default:return!1}},I=((e={})[exports.ChainId.AVALANCHE]="0xefa94DE7a4656D787667C749f7E1223D71E9FD88",e),x=function(t){return T(t)?"0xcA143Ce32Fe78f1f7019d7d551a6402fC5350c73":w(t)?"0x5757371414417b8C6CAad45bAeF941aBc7d3Ab32":A(t)?I[exports.ChainId.AVALANCHE]:"0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f"},g="0x96e8ac4277198ff8b6f785478aa9a39f403cb768dd02cbee326c3e7da348845f",E="0x00fb7f630766e6a796048ea87d01acd3068e8ff67d078148a3fa3f4a84f69bd5",k=function(t){return T(t)?E:w(t)?"0x96e8ac4277198ff8b6f785478aa9a39f403cb768dd02cbee326c3e7da348845f":A(t)?"0x40231f6b438bce0797c9ada29b718a87ea0a5cea3fe9a771abdd76bd41a3e545":g},C=a.BigInt(1e3),N=a.BigInt(0),b=a.BigInt(1),O=a.BigInt(2),q=a.BigInt(3),B=a.BigInt(5),R=a.BigInt(10),_=a.BigInt(100),S=a.BigInt(997),P=a.BigInt(1e3),D=a.BigInt(9975),U=a.BigInt(1e4);!function(t){t.uint8="uint8",t.uint256="uint256"}(y||(y={}));var F=((n={})[y.uint8]=a.BigInt("0xff"),n[y.uint256]=a.BigInt("0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"),n);function H(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}function M(t,e,n){return e&&H(t.prototype,e),n&&H(t,n),t}function j(){return(j=Object.assign||function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var r in n)Object.prototype.hasOwnProperty.call(n,r)&&(t[r]=n[r])}return t}).apply(this,arguments)}function W(t,e){t.prototype=Object.create(e.prototype),t.prototype.constructor=t,t.__proto__=e}function L(t){return(L=Object.setPrototypeOf?Object.getPrototypeOf:function(t){return t.__proto__||Object.getPrototypeOf(t)})(t)}function V(t,e){return(V=Object.setPrototypeOf||function(t,e){return t.__proto__=e,t})(t,e)}function X(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],(function(){}))),!0}catch(t){return!1}}function K(t,e,n){return(K=X()?Reflect.construct:function(t,e,n){var r=[null];r.push.apply(r,e);var i=new(Function.bind.apply(t,r));return n&&V(i,n.prototype),i}).apply(null,arguments)}function Y(t){var e="function"==typeof Map?new Map:void 0;return(Y=function(t){if(null===t||-1===Function.toString.call(t).indexOf("[native code]"))return t;if("function"!=typeof t)throw new TypeError("Super expression must either be null or a function");if(void 0!==e){if(e.has(t))return e.get(t);e.set(t,n)}function n(){return K(t,arguments,L(this).constructor)}return n.prototype=Object.create(t.prototype,{constructor:{value:n,enumerable:!1,writable:!0,configurable:!0}}),V(n,t)})(t)}function G(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}function J(t,e){(null==e||e>t.length)&&(e=t.length);for(var n=0,r=new Array(e);n<e;n++)r[n]=t[n];return r}function Q(t,e){var n;if("undefined"==typeof Symbol||null==t[Symbol.iterator]){if(Array.isArray(t)||(n=function(t,e){if(t){if("string"==typeof t)return J(t,void 0);var n=Object.prototype.toString.call(t).slice(8,-1);return"Object"===n&&t.constructor&&(n=t.constructor.name),"Map"===n||"Set"===n?Array.from(t):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?J(t,void 0):void 0}}(t))||e&&t&&"number"==typeof t.length){n&&(t=n);var r=0;return function(){return r>=t.length?{done:!0}:{done:!1,value:t[r++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}return(n=t[Symbol.iterator]()).next.bind(n)}var $="setPrototypeOf"in Object,z=function(t){function e(){var n;return(n=t.call(this)||this).isInsufficientReservesError=!0,n.name=n.constructor.name,$&&Object.setPrototypeOf(G(n),(this instanceof e?this.constructor:void 0).prototype),n}return W(e,t),e}(Y(Error)),Z=function(t){function e(){var n;return(n=t.call(this)||this).isInsufficientInputAmountError=!0,n.name=n.constructor.name,$&&Object.setPrototypeOf(G(n),(this instanceof e?this.constructor:void 0).prototype),n}return W(e,t),e}(Y(Error));function tt(t,e){a.greaterThanOrEqual(t,N)||u(!1),a.lessThanOrEqual(t,F[e])||u(!1)}function et(t){try{return s.getAddress(t)}catch(t){u(!1)}}function nt(t){return t instanceof a?t:a.BigInt("bigint"==typeof t?t.toString():t)}function rt(t){tt(t,y.uint256);var e,n=N;if(a.greaterThan(t,q))for(n=t,e=a.add(a.divide(t,O),b);a.lessThan(e,n);)n=e,e=a.divide(a.add(a.divide(t,e),e),O);else a.notEqual(t,N)&&(n=b);return n}function it(t,e,n,r){if(n>0||u(!1),t.length<=n||u(!1),0===t.length)return t.push(e),null;var i=t.length===n;if(i&&r(t[t.length-1],e)<=0)return e;for(var o=0,a=t.length;o<a;){var s=o+a>>>1;r(t[s],e)<=0?o=s+1:a=s}return t.splice(o,0,e),i?t.pop():null}var ot,at=function(){function t(t,e,n){tt(a.BigInt(t),y.uint8),this.decimals=t,this.symbol=e,this.name=n}return t.getETHER=function(e){return T(e)?new t(18,"BNB","Binance"):w(e)?new t(18,"MATIC","Polygon"):A(e)?new t(18,"AVAX","Avalanche"):new t(18,"ETH","Ether")},t}(),ut=at.getETHER,st=function(t){function e(e,n,r,i,o){var a;return(a=t.call(this,r,i,o)||this).chainId=e,a.address=et(n),a}W(e,t);var n=e.prototype;return n.equals=function(t){return this===t||this.chainId===t.chainId&&this.address===t.address},n.sortsBefore=function(t){return this.chainId!==t.chainId&&u(!1),this.address===t.address&&u(!1),this.address.toLowerCase()<t.address.toLowerCase()},e}(at);function ct(t,e){return t instanceof st&&e instanceof st?t.equals(e):!(t instanceof st||e instanceof st||t!==e)}var dt,ft,pt=((ot={})[exports.ChainId.MAINNET]=new st(exports.ChainId.MAINNET,"0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",18,"WETH","Wrapped Ether"),ot[exports.ChainId.ROPSTEN]=new st(exports.ChainId.ROPSTEN,"0xc778417E063141139Fce010982780140Aa0cD5Ab",18,"WETH","Wrapped Ether"),ot[exports.ChainId.RINKEBY]=new st(exports.ChainId.RINKEBY,"0xc778417E063141139Fce010982780140Aa0cD5Ab",18,"WETH","Wrapped Ether"),ot[exports.ChainId.GÖRLI]=new st(exports.ChainId.GÖRLI,"0xB4FBF271143F4FBf7B91A5ded31805e42b2208d6",18,"WETH","Wrapped Ether"),ot[exports.ChainId.KOVAN]=new st(exports.ChainId.KOVAN,"0xd0A1E359811322d97991E03f863a0C30C2cF029C",18,"WETH","Wrapped Ether"),ot[exports.ChainId.BSCMAINNET]=new st(exports.ChainId.BSCMAINNET,"0xbb4CdB9CBd36B01bD1cBaEBF2De08d9173bc095c",18,"WBNB","Wrapped BNB"),ot[exports.ChainId.BSCTESTNET]=new st(exports.ChainId.BSCTESTNET,"0xaE8E19eFB41e7b96815649A6a60785e1fbA84C1e",18,"WBNB","Wrapped BNB"),ot[exports.ChainId.MATICMAINNET]=new st(exports.ChainId.MATICMAINNET,"0x0d500B1d8E8eF31E21C99d1Db9A6444d3ADf1270",18,"WMATIC","Wrapped Matic"),ot[exports.ChainId.AVALANCHE]=new st(exports.ChainId.AVALANCHE,"0xB31f66AA3C1e785363F0875A1B74E27b85FD66c7",18,"WAVAX","Wrapped Avax"),ot),ht=d(f),lt=d(c),mt=((dt={})[exports.Rounding.ROUND_DOWN]=ht.ROUND_DOWN,dt[exports.Rounding.ROUND_HALF_UP]=ht.ROUND_HALF_UP,dt[exports.Rounding.ROUND_UP]=ht.ROUND_UP,dt),vt=((ft={})[exports.Rounding.ROUND_DOWN]=0,ft[exports.Rounding.ROUND_HALF_UP]=1,ft[exports.Rounding.ROUND_UP]=3,ft),yt=function(){function t(t,e){void 0===e&&(e=b),this.numerator=nt(t),this.denominator=nt(e)}var e=t.prototype;return e.invert=function(){return new t(this.denominator,this.numerator)},e.add=function(e){var n=e instanceof t?e:new t(nt(e));return a.equal(this.denominator,n.denominator)?new t(a.add(this.numerator,n.numerator),this.denominator):new t(a.add(a.multiply(this.numerator,n.denominator),a.multiply(n.numerator,this.denominator)),a.multiply(this.denominator,n.denominator))},e.subtract=function(e){var n=e instanceof t?e:new t(nt(e));return a.equal(this.denominator,n.denominator)?new t(a.subtract(this.numerator,n.numerator),this.denominator):new t(a.subtract(a.multiply(this.numerator,n.denominator),a.multiply(n.numerator,this.denominator)),a.multiply(this.denominator,n.denominator))},e.lessThan=function(e){var n=e instanceof t?e:new t(nt(e));return a.lessThan(a.multiply(this.numerator,n.denominator),a.multiply(n.numerator,this.denominator))},e.equalTo=function(e){var n=e instanceof t?e:new t(nt(e));return a.equal(a.multiply(this.numerator,n.denominator),a.multiply(n.numerator,this.denominator))},e.greaterThan=function(e){var n=e instanceof t?e:new t(nt(e));return a.greaterThan(a.multiply(this.numerator,n.denominator),a.multiply(n.numerator,this.denominator))},e.multiply=function(e){var n=e instanceof t?e:new t(nt(e));return new t(a.multiply(this.numerator,n.numerator),a.multiply(this.denominator,n.denominator))},e.divide=function(e){var n=e instanceof t?e:new t(nt(e));return new t(a.multiply(this.numerator,n.denominator),a.multiply(this.denominator,n.numerator))},e.toSignificant=function(t,e,n){void 0===e&&(e={groupSeparator:""}),void 0===n&&(n=exports.Rounding.ROUND_HALF_UP),Number.isInteger(t)||u(!1),t>0||u(!1),ht.set({precision:t+1,rounding:mt[n]});var r=new ht(this.numerator.toString()).div(this.denominator.toString()).toSignificantDigits(t);return r.toFormat(r.decimalPlaces(),e)},e.toFixed=function(t,e,n){return void 0===e&&(e={groupSeparator:""}),void 0===n&&(n=exports.Rounding.ROUND_HALF_UP),Number.isInteger(t)||u(!1),t>=0||u(!1),lt.DP=t,lt.RM=vt[n],new lt(this.numerator.toString()).div(this.denominator.toString()).toFormat(t,e)},M(t,[{key:"quotient",get:function(){return a.divide(this.numerator,this.denominator)}},{key:"remainder",get:function(){return new t(a.remainder(this.numerator,this.denominator),this.denominator)}}]),t}(),Tt=d(c),wt=function(t){function e(e,n){var r,i=nt(n);return tt(i,y.uint256),(r=t.call(this,i,a.exponentiate(R,a.BigInt(e.decimals)))||this).currency=e,r}W(e,t),e.ether=function(t,n){return new e(ut(n),t)};var n=e.prototype;return n.add=function(t){return ct(this.currency,t.currency)||u(!1),new e(this.currency,a.add(this.raw,t.raw))},n.subtract=function(t){return ct(this.currency,t.currency)||u(!1),new e(this.currency,a.subtract(this.raw,t.raw))},n.toSignificant=function(e,n,r){return void 0===e&&(e=6),void 0===r&&(r=exports.Rounding.ROUND_DOWN),t.prototype.toSignificant.call(this,e,n,r)},n.toFixed=function(e,n,r){return void 0===e&&(e=this.currency.decimals),void 0===r&&(r=exports.Rounding.ROUND_DOWN),e<=this.currency.decimals||u(!1),t.prototype.toFixed.call(this,e,n,r)},n.toExact=function(t){return void 0===t&&(t={groupSeparator:""}),Tt.DP=this.currency.decimals,new Tt(this.numerator.toString()).div(this.denominator.toString()).toFormat(t)},M(e,[{key:"raw",get:function(){return this.numerator}}]),e}(yt),At=function(t){function e(e,n){var r;return(r=t.call(this,e,n)||this).token=e,r}W(e,t);var n=e.prototype;return n.add=function(t){return this.token.equals(t.token)||u(!1),new e(this.token,a.add(this.raw,t.raw))},n.subtract=function(t){return this.token.equals(t.token)||u(!1),new e(this.token,a.subtract(this.raw,t.raw))},e}(wt),It=function(t){function e(e,n,r,i){var o;return(o=t.call(this,i,r)||this).baseCurrency=e,o.quoteCurrency=n,o.scalar=new yt(a.exponentiate(R,a.BigInt(e.decimals)),a.exponentiate(R,a.BigInt(n.decimals))),o}W(e,t),e.fromRoute=function(t){for(var n,r=[],i=Q(t.pairs.entries());!(n=i()).done;){var o=n.value,a=o[1];r.push(t.path[o[0]].equals(a.token0)?new e(a.reserve0.currency,a.reserve1.currency,a.reserve0.raw,a.reserve1.raw):new e(a.reserve1.currency,a.reserve0.currency,a.reserve1.raw,a.reserve0.raw))}return r.slice(1).reduce((function(t,e){return t.multiply(e)}),r[0])};var n=e.prototype;return n.invert=function(){return new e(this.quoteCurrency,this.baseCurrency,this.numerator,this.denominator)},n.multiply=function(n){ct(this.quoteCurrency,n.baseCurrency)||u(!1);var r=t.prototype.multiply.call(this,n);return new e(this.baseCurrency,n.quoteCurrency,r.denominator,r.numerator)},n.quote=function(e,n){return ct(e.currency,this.baseCurrency)||u(!1),this.quoteCurrency instanceof st?new At(this.quoteCurrency,t.prototype.multiply.call(this,e.raw).quotient):wt.ether(t.prototype.multiply.call(this,e.raw).quotient,n)},n.toSignificant=function(t,e,n){return void 0===t&&(t=6),this.adjusted.toSignificant(t,e,n)},n.toFixed=function(t,e,n){return void 0===t&&(t=4),this.adjusted.toFixed(t,e,n)},M(e,[{key:"raw",get:function(){return new yt(this.numerator,this.denominator)}},{key:"adjusted",get:function(){return t.prototype.multiply.call(this,this.scalar)}}]),e}(yt),xt={},gt=function(){function t(e,n){var r=e.token.sortsBefore(n.token)?[e,n]:[n,e];this.liquidityToken=new st(r[0].token.chainId,t.getAddress(r[0].token,r[1].token),18,"UNI-V2","Uniswap V2"),this.tokenAmounts=r}t.getAddress=function(t,e){var n,r,i,o,a,u=t.sortsBefore(e)?[t,e]:[e,t],c=u[0].chainId;return void 0===(null===(n=xt)||void 0===n||null===(r=n[u[0].address])||void 0===r?void 0:r[u[1].address])&&(xt=j({},xt,((a={})[u[0].address]=j({},null===(i=xt)||void 0===i?void 0:i[u[0].address],((o={})[u[1].address]=s.getCreate2Address(x(c),p.keccak256(["bytes"],[p.pack(["address","address"],[u[0].address,u[1].address])]),k(c)),o)),a))),xt[u[0].address][u[1].address]};var e=t.prototype;return e.involvesToken=function(t){return t.equals(this.token0)||t.equals(this.token1)},e.priceOf=function(t){return this.involvesToken(t)||u(!1),t.equals(this.token0)?this.token0Price:this.token1Price},e.reserveOf=function(t){return this.involvesToken(t)||u(!1),t.equals(this.token0)?this.reserve0:this.reserve1},e.getOutputAmount=function(e){if(this.involvesToken(e.token)||u(!1),a.equal(this.reserve0.raw,N)||a.equal(this.reserve1.raw,N))throw new z;var n=this.reserveOf(e.token),r=this.reserveOf(e.token.equals(this.token0)?this.token1:this.token0),i=a.multiply(e.raw,T(this.chainId)?D:S),o=a.multiply(i,r.raw),s=a.add(a.multiply(n.raw,T(this.chainId)?U:P),i),c=new At(e.token.equals(this.token0)?this.token1:this.token0,a.divide(o,s));if(a.equal(c.raw,N))throw new Z;return[c,new t(n.add(e),r.subtract(c))]},e.getInputAmount=function(e){if(this.involvesToken(e.token)||u(!1),a.equal(this.reserve0.raw,N)||a.equal(this.reserve1.raw,N)||a.greaterThanOrEqual(e.raw,this.reserveOf(e.token).raw))throw new z;var n=this.reserveOf(e.token),r=this.reserveOf(e.token.equals(this.token0)?this.token1:this.token0),i=a.multiply(a.multiply(r.raw,e.raw),T(this.chainId)?U:P),o=a.multiply(a.subtract(n.raw,e.raw),T(this.chainId)?D:S),s=new At(e.token.equals(this.token0)?this.token1:this.token0,a.add(a.divide(i,o),b));return[s,new t(r.add(s),n.subtract(e))]},e.getLiquidityMinted=function(t,e,n){t.token.equals(this.liquidityToken)||u(!1);var r,i=e.token.sortsBefore(n.token)?[e,n]:[n,e];if(i[0].token.equals(this.token0)&&i[1].token.equals(this.token1)||u(!1),a.equal(t.raw,N))r=a.subtract(rt(a.multiply(i[0].raw,i[1].raw)),C);else{var o=a.divide(a.multiply(i[0].raw,t.raw),this.reserve0.raw),s=a.divide(a.multiply(i[1].raw,t.raw),this.reserve1.raw);r=a.lessThanOrEqual(o,s)?o:s}if(!a.greaterThan(r,N))throw new Z;return new At(this.liquidityToken,r)},e.getLiquidityValue=function(t,e,n,r,i){var o;if(void 0===r&&(r=!1),this.involvesToken(t)||u(!1),e.token.equals(this.liquidityToken)||u(!1),n.token.equals(this.liquidityToken)||u(!1),a.lessThanOrEqual(n.raw,e.raw)||u(!1),r){i||u(!1);var s=nt(i);if(a.equal(s,N))o=e;else{var c=rt(a.multiply(this.reserve0.raw,this.reserve1.raw)),d=rt(s);if(a.greaterThan(c,d)){var f=a.multiply(e.raw,a.subtract(c,d)),p=a.add(a.multiply(c,B),d),h=a.divide(f,p);o=e.add(new At(this.liquidityToken,h))}else o=e}}else o=e;return new At(t,a.divide(a.multiply(n.raw,this.reserveOf(t).raw),o.raw))},M(t,[{key:"token0Price",get:function(){return new It(this.token0,this.token1,this.tokenAmounts[0].raw,this.tokenAmounts[1].raw)}},{key:"token1Price",get:function(){return new It(this.token1,this.token0,this.tokenAmounts[1].raw,this.tokenAmounts[0].raw)}},{key:"chainId",get:function(){return this.token0.chainId}},{key:"token0",get:function(){return this.tokenAmounts[0].token}},{key:"token1",get:function(){return this.tokenAmounts[1].token}},{key:"reserve0",get:function(){return this.tokenAmounts[0]}},{key:"reserve1",get:function(){return this.tokenAmounts[1]}}]),t}(),Et=function(){function t(t,e,n){t.length>0||u(!1),t.every((function(e){return e.chainId===t[0].chainId}))||u(!1),e instanceof st&&t[0].involvesToken(e)||e===ut(t[0].chainId)&&t[0].involvesToken(pt[t[0].chainId])||u(!1),void 0===n||n instanceof st&&t[t.length-1].involvesToken(n)||n===ut(t[0].chainId)&&t[t.length-1].involvesToken(pt[t[0].chainId])||u(!1);for(var r,i=[e instanceof st?e:pt[t[0].chainId]],o=Q(t.entries());!(r=o()).done;){var a=r.value,s=a[1],c=i[a[0]];c.equals(s.token0)||c.equals(s.token1)||u(!1);var d=c.equals(s.token0)?s.token1:s.token0;i.push(d)}this.pairs=t,this.path=i,this.midPrice=It.fromRoute(this),this.input=e,this.output=null!=n?n:i[i.length-1]}return M(t,[{key:"chainId",get:function(){return this.pairs[0].chainId}}]),t}(),kt=new yt(_),Ct=function(t){function e(){return t.apply(this,arguments)||this}W(e,t);var n=e.prototype;return n.toSignificant=function(t,e,n){return void 0===t&&(t=5),this.multiply(kt).toSignificant(t,e,n)},n.toFixed=function(t,e,n){return void 0===t&&(t=2),this.multiply(kt).toFixed(t,e,n)},e}(yt);function Nt(t,e){return ct(t.inputAmount.currency,e.inputAmount.currency)||u(!1),ct(t.outputAmount.currency,e.outputAmount.currency)||u(!1),t.outputAmount.equalTo(e.outputAmount)?t.inputAmount.equalTo(e.inputAmount)?0:t.inputAmount.lessThan(e.inputAmount)?-1:1:t.outputAmount.lessThan(e.outputAmount)?1:-1}function bt(t,e){var n=Nt(t,e);return 0!==n?n:t.priceImpact.lessThan(e.priceImpact)?-1:t.priceImpact.greaterThan(e.priceImpact)?1:t.route.path.length-e.route.path.length}function Ot(t,e){return t instanceof At?t:t.currency===ut(e)?new At(pt[e],t.raw):void u(!1)}function qt(t,e){return t instanceof st?t:t===ut(e)?pt[e]:void u(!1)}var Bt=function(){function t(t,e,n){var r=new Array(t.path.length),i=new Array(t.pairs.length);if(n===exports.TradeType.EXACT_INPUT){ct(e.currency,t.input)||u(!1),r[0]=Ot(e,t.chainId);for(var o=0;o<t.path.length-1;o++){var a=t.pairs[o].getOutputAmount(r[o]),s=a[1];r[o+1]=a[0],i[o]=s}}else{ct(e.currency,t.output)||u(!1),r[r.length-1]=Ot(e,t.chainId);for(var c=t.path.length-1;c>0;c--){var d=t.pairs[c-1].getInputAmount(r[c]),f=d[1];r[c-1]=d[0],i[c-1]=f}}var p,h,l,m=t.chainId;this.route=t,this.tradeType=n,this.inputAmount=n===exports.TradeType.EXACT_INPUT?e:t.input===ut(m)?wt.ether(r[0].raw,m):r[0],this.outputAmount=n===exports.TradeType.EXACT_OUTPUT?e:t.output===ut(m)?wt.ether(r[r.length-1].raw,m):r[r.length-1],this.executionPrice=new It(this.inputAmount.currency,this.outputAmount.currency,this.inputAmount.raw,this.outputAmount.raw),this.nextMidPrice=It.fromRoute(new Et(i,t.input)),this.priceImpact=(p=this.outputAmount,l=(h=t.midPrice.raw.multiply(this.inputAmount.raw)).subtract(p.raw).divide(h),new Ct(l.numerator,l.denominator))}t.exactIn=function(e,n){return new t(e,n,exports.TradeType.EXACT_INPUT)},t.exactOut=function(e,n){return new t(e,n,exports.TradeType.EXACT_OUTPUT)};var e=t.prototype;return e.minimumAmountOut=function(t){if(t.lessThan(N)&&u(!1),this.tradeType===exports.TradeType.EXACT_OUTPUT)return this.outputAmount;var e=new yt(b).add(t).invert().multiply(this.outputAmount.raw).quotient;return this.outputAmount instanceof At?new At(this.outputAmount.token,e):wt.ether(e,this.route.chainId)},e.maximumAmountIn=function(t){if(t.lessThan(N)&&u(!1),this.tradeType===exports.TradeType.EXACT_INPUT)return this.inputAmount;var e=new yt(b).add(t).multiply(this.inputAmount.raw).quotient;return this.inputAmount instanceof At?new At(this.inputAmount.token,e):wt.ether(e,this.route.chainId)},t.bestTradeExactIn=function(e,n,r,i,o,a,s){var c=void 0===i?{}:i,d=c.maxNumResults,f=void 0===d?3:d,p=c.maxHops,h=void 0===p?3:p;void 0===o&&(o=[]),void 0===a&&(a=n),void 0===s&&(s=[]),e.length>0||u(!1),h>0||u(!1),a===n||o.length>0||u(!1);var l=n instanceof At?n.token.chainId:r instanceof st?r.chainId:void 0;void 0===l&&u(!1);for(var m=Ot(n,l),v=qt(r,l),y=0;y<e.length;y++){var T=e[y];if((T.token0.equals(m.token)||T.token1.equals(m.token))&&!T.reserve0.equalTo(N)&&!T.reserve1.equalTo(N)){var w=void 0;try{w=T.getOutputAmount(m)[0]}catch(t){if(t.isInsufficientInputAmountError)continue;throw t}if(w.token.equals(v))it(s,new t(new Et([].concat(o,[T]),a.currency,r),a,exports.TradeType.EXACT_INPUT),f,bt);else if(h>1&&e.length>1){var A=e.slice(0,y).concat(e.slice(y+1,e.length));t.bestTradeExactIn(A,w,r,{maxNumResults:f,maxHops:h-1},[].concat(o,[T]),a,s)}}}return s},t.bestTradeExactOut=function(e,n,r,i,o,a,s){var c=void 0===i?{}:i,d=c.maxNumResults,f=void 0===d?3:d,p=c.maxHops,h=void 0===p?3:p;void 0===o&&(o=[]),void 0===a&&(a=r),void 0===s&&(s=[]),e.length>0||u(!1),h>0||u(!1),a===r||o.length>0||u(!1);var l=r instanceof At?r.token.chainId:n instanceof st?n.chainId:void 0;void 0===l&&u(!1);for(var m=Ot(r,l),v=qt(n,l),y=0;y<e.length;y++){var T=e[y];if((T.token0.equals(m.token)||T.token1.equals(m.token))&&!T.reserve0.equalTo(N)&&!T.reserve1.equalTo(N)){var w=void 0;try{w=T.getInputAmount(m)[0]}catch(t){if(t.isInsufficientReservesError)continue;throw t}if(w.token.equals(v))it(s,new t(new Et([T].concat(o),n,a.currency),a,exports.TradeType.EXACT_OUTPUT),f,bt);else if(h>1&&e.length>1){var A=e.slice(0,y).concat(e.slice(y+1,e.length));t.bestTradeExactOut(A,n,w,{maxNumResults:f,maxHops:h-1},[T].concat(o),a,s)}}}return s},t}();function Rt(t){return"0x"+t.raw.toString(16)}var _t,St=function(){function t(){}return t.swapCallParameters=function(t,e){var n=t.inputAmount.currency===ut(t.route.chainId),r=t.outputAmount.currency===ut(t.route.chainId);n&&r&&u(!1),!("ttl"in e)||e.ttl>0||u(!1);var i,o,a,s=et(e.recipient),c=Rt(t.maximumAmountIn(e.allowedSlippage)),d=Rt(t.minimumAmountOut(e.allowedSlippage)),f=t.route.path.map((function(t){return t.address})),p="ttl"in e?"0x"+(Math.floor((new Date).getTime()/1e3)+e.ttl).toString(16):"0x"+e.deadline.toString(16),h=Boolean(e.feeOnTransfer);switch(t.tradeType){case exports.TradeType.EXACT_INPUT:n?(i=h?"swapExactETHForTokensSupportingFeeOnTransferTokens":"swapExactETHForTokens",o=[d,f,s,p],a=c):r?(i=h?"swapExactTokensForETHSupportingFeeOnTransferTokens":"swapExactTokensForETH",o=[c,d,f,s,p],a="0x0"):(i=h?"swapExactTokensForTokensSupportingFeeOnTransferTokens":"swapExactTokensForTokens",o=[c,d,f,s,p],a="0x0");break;case exports.TradeType.EXACT_OUTPUT:h&&u(!1),n?(i="swapETHForExactTokens",o=[d,f,s,p],a=c):r?(i="swapTokensForExactETH",o=[d,c,f,s,p],a="0x0"):(i="swapTokensForExactTokens",o=[d,c,f,s,p],a="0x0")}return{methodName:i,args:o,value:a}},t}(),Pt=[{constant:!0,inputs:[],name:"decimals",outputs:[{name:"",type:"uint8"}],payable:!1,stateMutability:"view",type:"function"},{constant:!0,inputs:[{name:"",type:"address"}],name:"balanceOf",outputs:[{name:"",type:"uint256"}],payable:!1,stateMutability:"view",type:"function"}],Dt=((_t={})[exports.ChainId.MAINNET]={"0xE0B7927c4aF23765Cb51314A0E0521A9645F0E2A":9},_t[exports.ChainId.BSCMAINNET]={"0xE0B7927c4aF23765Cb51314A0E0521A9645F0E2A":9},_t),Ut=function(){function t(){}return t.fetchTokenData=function(t,e,n,r,i){try{var o,a,u=function(n){return new st(t,e,n,r,i)};void 0===n&&(n=m.getDefaultProvider(l.getNetwork(t)));var s="number"==typeof(null===(o=Dt)||void 0===o||null===(a=o[t])||void 0===a?void 0:a[e]);return Promise.resolve(s?u(Dt[t][e]):Promise.resolve(new h.Contract(e,Pt,n).decimals().then((function(n){var r,i,o;return Dt=j({},Dt,((o={})[t]=j({},null===(r=Dt)||void 0===r?void 0:r[t],((i={})[e]=n,i)),o)),n}))).then(u))}catch(t){return Promise.reject(t)}},t.fetchPairData=function(t,e,n){try{void 0===n&&(n=m.getDefaultProvider(l.getNetwork(t.chainId))),t.chainId!==e.chainId&&u(!1);var r=gt.getAddress(t,e);return Promise.resolve(new h.Contract(r,v.abi,n).getReserves()).then((function(n){var r=n[0],i=n[1],o=t.sortsBefore(e)?[r,i]:[i,r];return new gt(new At(t,o[0]),new At(e,o[1]))}))}catch(t){return Promise.reject(t)}},t}();exports.JSBI=a,exports.Currency=at,exports.CurrencyAmount=wt,exports.FACTORY_ADDRESS="0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f",exports.FACTORY_ADDRESS_BSC="0xcA143Ce32Fe78f1f7019d7d551a6402fC5350c73",exports.Fetcher=Ut,exports.Fraction=yt,exports.GET_ETHER=ut,exports.GET_FACTORY_ADDRESS=x,exports.GET_INIT_CODE_HASH=k,exports.INIT_CODE_HASH=g,exports.INIT_CODE_HASH_BSC=E,exports.InsufficientInputAmountError=Z,exports.InsufficientReservesError=z,exports.MINIMUM_LIQUIDITY=C,exports.Pair=gt,exports.Percent=Ct,exports.Price=It,exports.Route=Et,exports.Router=St,exports.Token=st,exports.TokenAmount=At,exports.Trade=Bt,exports.WETH=pt,exports.currencyEquals=ct,exports.inputOutputComparator=Nt,exports.isAVAX=A,exports.isBSC=T,exports.isMATIC=w,exports.tradeComparator=bt; //# sourceMappingURL=sdk.cjs.production.min.js.map
from running.config import Configuration import pytest def test_override(): c = Configuration({ "a": {"b": 1, "c": 42}, "d": ["foo", "bar"] }) c.override("a.c", 43) c.override("d.1", "buzz") assert c.get("a")["b"] == 1 assert c.get("a")["c"] == 43 assert c.get("d") == ["foo", "buzz"] def test_combine(): c1 = Configuration({ "a": {"b": 1, "c": 42}, "d": ["foo", "bar"] }) c2 = Configuration({ "a": {"b": 2, "e": 43}, "d": ["fizz", "buzz"], "f": 100 }) c = c1.combine(c2) assert c.get("a") == {"b": 2, "c": 42, "e": 43} assert c.get("d") == ["foo", "bar", "fizz", "buzz"] assert c.get("f") == 100 def test_combine_fail(): c1 = Configuration({ "a": "val1", "b": "b" }) c2 = Configuration({ "a": "val2", "c": "c" }) with pytest.raises(TypeError): c1.combine(c2) def test_resolve_suites(): c = Configuration({"suites": { "dacapo2006": { "type": "DaCapo", "release": "2006", "path": "/usr/share/benchmarks/dacapo/dacapo-2006-10-MR2.jar", "timing_iteration": 3 } }}) c.resolve_class() dacapo2006 = c.get("suites")["dacapo2006"] assert dacapo2006.release == "2006" assert dacapo2006.path.stem == "dacapo-2006-10-MR2" def test_resolve_modifiers(): c = Configuration({"modifiers": { "ss": { "type": "EnvVar", "var": "MMTK_PLAN", "val": "SemiSpace" } }}) c.resolve_class() ss = c.get("modifiers")["ss"] assert ss.var == "MMTK_PLAN" assert ss.val == "SemiSpace" def test_resolve_jvms(): c = Configuration({"runtimes": { "adoptopenjdk-8": { "type": "OpenJDK", "release": "8", "home": "/usr/lib/jvm/adoptopenjdk-8-hotspot-amd64" } }}) c.resolve_class() jdk8 = c.get("runtimes")["adoptopenjdk-8"] assert str( jdk8.executable) == "/usr/lib/jvm/adoptopenjdk-8-hotspot-amd64/bin/java" assert jdk8.release == 8
from wisdem.aeroelasticse.runFAST_pywrapper import runFAST_pywrapper, runFAST_pywrapper_batch from wisdem.aeroelasticse.CaseGen_IEC import CaseGen_IEC eagle = False iec = CaseGen_IEC() iec.Turbine_Class = 'III' # I, II, III, IV iec.Turbulence_Class = 'A' iec.D = 198. iec.z_hub = 119. TMax = 120. Vrated = 8.3 Ttrans = 30. TStart = 0. # Turbine Data iec.init_cond = {} # can leave as {} if data not available iec.init_cond[("ElastoDyn","RotSpeed")] = {'U':[3., 5., 7., 8.3, 25.]} iec.init_cond[("ElastoDyn","RotSpeed")]['val'] = [2.92, 4.88, 6.81, 7.835, 7.88] iec.init_cond[("ElastoDyn","BlPitch1")] = {'U':[3., 8.2, 9., 11., 13., 15., 17., 19., 21., 23., 25]} iec.init_cond[("ElastoDyn","BlPitch1")]['val'] = [0., 0., 2.93, 8.76, 12.01, 14.82, 17.37, 19.73, 21.96, 24.08, 26.10] iec.init_cond[("ElastoDyn","BlPitch2")] = iec.init_cond[("ElastoDyn","BlPitch1")] iec.init_cond[("ElastoDyn","BlPitch3")] = iec.init_cond[("ElastoDyn","BlPitch1")] # DLC inputs iec.dlc_inputs = {} # iec.dlc_inputs['DLC'] = [1.1, 1.3, 1.4, 1.5, 5.1] # iec.dlc_inputs['U'] = [[3., 5., 7., 9., 11., 13., 15., 17., 19., 21., 23., 25], [3., 5., 7., 9., 11., 13., 15., 17., 19., 21., 23., 25],[Vrated - 2., Vrated, Vrated + 2.],[3., 5., 7., 9., 11., 13., 15., 17., 19., 21., 23., 25], [Vrated - 2., Vrated, Vrated + 2., 25.]] # iec.dlc_inputs['Seeds'] = [range(1,7), range(1,7),[],[], range(1,7)] # iec.dlc_inputs['Yaw'] = [[], [], [], [], []] iec.dlc_inputs['DLC'] = [1.1] iec.dlc_inputs['U'] = [[9.]] iec.dlc_inputs['Seeds'] = [[2]] iec.dlc_inputs['Yaw'] = [[]] iec.PC_MaxRat = 2. iec.TStart = Ttrans iec.TMax = TMax # wind file length iec.transient_dir_change = 'both' # '+','-','both': sign for transient events in EDC, EWS iec.transient_shear_orientation = 'both' # 'v','h','both': vertical or horizontal shear for EWS # Naming, file management, etc iec.wind_dir = 'outputs/wind' iec.case_name_base = 'IEA10' if eagle: iec.Turbsim_exe = '/projects/windse/importance_sampling/WT_Codes/Turbsim/TurbSim/bin/TurbSim_glin64' iec.cores = 36 else: iec.Turbsim_exe = '/Users/pbortolo/work/2_openfast/TurbSim/bin/TurbSim_glin64' iec.cores = 1 iec.debug_level = 2 iec.parallel_windfile_gen = True iec.run_dir = 'outputs/IEA10' # Run case generator / wind file writing case_inputs = {} case_inputs[('Fst','OutFileFmt')] = {'vals':[1], 'group':0} case_inputs[("Fst","CompHydro")] = {'vals':[0], 'group':0} case_inputs[("Fst","CompSub")] = {'vals':[0], 'group':0} case_inputs[("Fst","DT")] = {'vals':[0.01], 'group':0} case_inputs[("Fst","DT_Out")] = {'vals':[1.], 'group':0} case_inputs[("Fst","SttsTime")] = {'vals':[10.], 'group':0} case_inputs[("Fst","TMax")] = {'vals':[TMax], 'group':0} case_inputs[("Fst","TStart")] = {'vals':[TStart], 'group':0} case_inputs[("ElastoDyn","PtfmSgDOF")] = {'vals':["False"], 'group':0} case_inputs[("ElastoDyn","PtfmSwDOF")] = {'vals':["False"], 'group':0} case_inputs[("ElastoDyn","PtfmHvDOF")] = {'vals':["False"], 'group':0} case_inputs[("ElastoDyn","PtfmRDOF")] = {'vals':["False"], 'group':0} case_inputs[("ElastoDyn","PtfmPDOF")] = {'vals':["False"], 'group':0} case_inputs[("ElastoDyn","PtfmYDOF")] = {'vals':["False"], 'group':0} case_inputs[("ElastoDyn","TwFADOF1")] = {'vals':["True"], 'group':0} case_inputs[("ElastoDyn","TwFADOF2")] = {'vals':["True"], 'group':0} case_inputs[("ElastoDyn","TwSSDOF1")] = {'vals':["True"], 'group':0} case_inputs[("ElastoDyn","TwSSDOF2")] = {'vals':["True"], 'group':0} case_inputs[("ElastoDyn","FlapDOF1")] = {'vals':["True"], 'group':0} case_inputs[("ElastoDyn","FlapDOF2")] = {'vals':["True"], 'group':0} case_inputs[("ElastoDyn","EdgeDOF")] = {'vals':["True"], 'group':0} case_inputs[("ElastoDyn","DrTrDOF")] = {'vals':["False"], 'group':0} case_inputs[("ElastoDyn","GenDOF")] = {'vals':["True"], 'group':0} case_inputs[("ElastoDyn","YawDOF")] = {'vals':["False"], 'group':0} case_inputs[("AeroDyn15","WakeMod")] = {'vals':[1], 'group':0} case_inputs[("AeroDyn15","DBEMT_Mod")] = {'vals':[1], 'group':0} case_inputs[("AeroDyn15","tau1_const")] = {'vals':[20], 'group':0} case_inputs[("AeroDyn15","AFAeroMod")] = {'vals':[2], 'group':0} case_inputs[("AeroDyn15","TwrPotent")] = {'vals':[0], 'group':0} case_inputs[("AeroDyn15","TwrShadow")] = {'vals':['False'], 'group':0} case_inputs[("AeroDyn15","TwrAero")] = {'vals':['False'], 'group':0} case_inputs[("AeroDyn15","SkewMod")] = {'vals':[2], 'group':0} case_inputs[("AeroDyn15","TipLoss")] = {'vals':['True'], 'group':0} case_inputs[("AeroDyn15","HubLoss")] = {'vals':['True'], 'group':0} case_inputs[("AeroDyn15","TanInd")] = {'vals':['True'], 'group':0} case_inputs[("AeroDyn15","AIDrag")] = {'vals':['True'], 'group':0} case_inputs[("AeroDyn15","TIDrag")] = {'vals':['True'], 'group':0} case_inputs[("AeroDyn15","UseBlCm")] = {'vals':['True'], 'group':0} case_list, case_name_list = iec.execute(case_inputs=case_inputs) # Run FAST cases fastBatch = runFAST_pywrapper_batch(FAST_ver='OpenFAST',dev_branch = True) if eagle: fastBatch.FAST_exe = '/home/pbortolo/openfast/build/glue-codes/openfast/openfast' # Path to executable fastBatch.FAST_InputFile = 'OpenFAST_BAR_01.fst' # FAST input file (ext=.fst) fastBatch.FAST_directory = '//Users/pbortolo/work/3_projects/5_IEAtask37/IEA-10.0-198-RWT/openfast' # Path to fst directory files else: fastBatch.FAST_exe = '/Users/pbortolo/work/2_openfast/openfast/build/glue-codes/openfast/openfast' # Path to executable fastBatch.FAST_InputFile = 'IEA-10.0-198-RWT.fst' # FAST input file (ext=.fst) fastBatch.FAST_directory = '/Users/pbortolo/work/3_projects/5_IEAtask37/IEA-10.0-198-RWT/openfast' # Path to fst directory files fastBatch.FAST_runDirectory = iec.run_dir fastBatch.case_list = case_list fastBatch.case_name_list = case_name_list fastBatch.debug_level = 2 if eagle: fastBatch.run_multi(36) else: fastBatch.run_serial()
// @ts-check "use strict"; const TestDiscovery = require("./helper/test-discovery"); const TestCase = require("./helper/test-case"); const path = require("path"); const { argv } = require("yargs"); const testSuite = (argv.language) ? TestDiscovery.loadSomeTests(__dirname + "/languages", argv.language) // load complete test suite : TestDiscovery.loadAllTests(__dirname + "/languages"); const pretty = 'pretty' in argv; // define tests for all tests in all languages in the test suite for (const language in testSuite) { if (!testSuite.hasOwnProperty(language)) { continue; } (function (language, testFiles) { describe("Testing language '" + language + "'", function () { this.timeout(10000); for (const filePath of testFiles) { const fileName = path.basename(filePath, path.extname(filePath)); it("– should pass test case '" + fileName + "'", function () { if (path.extname(filePath) === '.test') { TestCase.runTestCase(language, filePath, pretty); } else { TestCase.runTestsWithHooks(language, require(filePath)); } }); } }); })(language, testSuite[language]); }
# standard libraries import threading import numpy import queue import logging from nion.utils import Event from nion.utils import Observable from nion.swift.model import HardwareSource from ..aux_files.config import read_data class OptSpecDevice(Observable.Observable): def __init__(self, MANUFACTURER): self.property_changed_event = Event.Event() self.property_changed_power_event = Event.Event() self.communicating_event = Event.Event() self.busy_event = Event.Event() self.send_gratings = Event.Event() self.warn_panel = Event.Event() self.send_data = Event.Event() self.warn_panel_over = Event.Event() self.__queue = queue.Queue() self.__running = False self.__successful = False self.__model = MANUFACTURER self.__thread = None def init(self): if self.__model == 'DEBUG': from . import spec_vi as optSpec elif self.__model == 'ATTOLIGHT': from . import spec_attolight as optSpec elif self.__model == 'PRINCETON': from . import spec as optSpec self.__sendmessage = optSpec.SENDMYMESSAGEFUNC(self.sendMessageFactory()) if self.__model == 'PRINCETON': set_file = read_data.FileManager('global_settings') SERIAL_PORT_PRINCETON = set_file.settings["spectrometer"]["COM_PRINCETON"] self.__Spec = optSpec.OptSpectrometer(self.__sendmessage, SERIAL_PORT_PRINCETON) else: self.__Spec = optSpec.OptSpectrometer(self.__sendmessage) if not self.__Spec.success: return False self.__gratings = self.__Spec.gratingNames() self.send_gratings.fire(self.__gratings) self.__lpmms = self.__Spec.gratingLPMM() self.__fl = self.__Spec.get_specFL() self.__cameraSize = 25.6 self.__cameraPixels = self.__Spec.camera_pixels() self.__cameraName = self.__Spec.which_camera() self.__devAngle = self.__Spec.deviation_angle() self.__eirecamera = HardwareSource.HardwareSourceManager().get_hardware_source_for_hardware_source_id( self.__cameraName) return (True and self.__eirecamera is not None) def upt(self): self.property_changed_event.fire('wav_f') self.property_changed_event.fire('grating_f') self.property_changed_event.fire('entrance_slit_f') self.property_changed_event.fire('exit_slit_f') self.property_changed_event.fire('which_slit_f') self.property_changed_event.fire('lpmm_f') self.property_changed_event.fire('dispersion_nmmm_f') self.property_changed_event.fire('pixel_size_f') self.property_changed_event.fire('dispersion_pixels_f') self.property_changed_event.fire('fov_f') self.property_changed_event.fire('camera_size_f') self.property_changed_event.fire('camera_pixels_f') self.property_changed_event.fire('focalLength_f') self.upt_calibs() if not self.__successful: self.__successful = True def upt_values(self): self.property_changed_event.fire('wav_f') self.property_changed_event.fire('lpmm_f') self.property_changed_event.fire('dispersion_nmmm_f') self.property_changed_event.fire('pixel_size_f') self.property_changed_event.fire('dispersion_pixels_f') self.property_changed_event.fire('fov_f') self.property_changed_event.fire('camera_size_f') self.property_changed_event.fire('camera_pixels_f') self.property_changed_event.fire('focalLength_f') self.upt_calibs() def upt_calibs(self): if self.__eirecamera.camera.camera_model == 'Newton' or self.__eirecamera.camera.camera_model == 'ProEM+: 1600xx(2)B eXcelon': self.__eirecamera.camera.calibration = [{"offset": 0, "scale": 1, "units": ""}, {"offset": self.__wl - self.dispersion_f * self.__cameraSize / 2., "scale": self.dispersion_f * self.__cameraSize / self.__cameraPixels, "units": "nm"}] else: logging.info('***OPT SPECT***: Camera not configured in upt_calibs.') # elif self.__eirecamera.camera.camera_model == 'ProEM+: 1600xx(2)B eXcelon': # if self.__eirecamera.camera.sizey == 1: # self.__eirecamera.camera.calibration = [{"offset": self.__wl - self.dispersion_f * self.__cameraSize / 2., # "scale": self.dispersion_f * self.__cameraSize / self.__cameraPixels, # "units": "nm"}] # else: # self.__eirecamera.camera.calibration = [{"offset": 0, "scale": 1, "units": ""}, # {"offset": self.__wl - self.dispersion_f * self.__cameraSize / 2., # "scale": self.dispersion_f * self.__cameraSize / self.__cameraPixels, # "units": "nm"}] def measure(self): self.__running = True self.busy_event.fire('') self.warn_panel.fire() self.__thread = threading.Thread(target=self.measureThread) self.__thread.start() def measureThread(self): index = 0 while self.__running: cam_data = self.__eirecamera.grab_next_to_finish()[0] cam_hor = numpy.sum(cam_data.data, axis=0) if len(cam_data.data.shape) > 1 else cam_data.data cam_total = numpy.sum(cam_hor) self.send_data.fire(cam_total, index) index += 1 if index == 200: index = 0 def abort(self): try: if self.__running: self.__running = False self.__thread.join() self.warn_panel_over.fire() except AttributeError: #No measure was happening pass self.property_changed_event.fire('') def sendMessageFactory(self): def sendMessage(message): if message: self.__running = False self.upt_values() self.property_changed_event.fire('wav_f') if self.__successful: self.upt_calibs() return sendMessage @property def wav_f(self): try: self.__wl = self.__Spec.get_wavelength() return format(self.__wl, '.3f') except AttributeError: return 'None' @wav_f.setter def wav_f(self, value): if self.__wl != float(value) and 0 <= float(value) <= 2500: self.__wl = float(value) self.busy_event.fire("") if not self.__running: threading.Thread(target=self.__Spec.set_wavelength, args=(self.__wl,)).start() self.__running = True @property def grating_f(self): try: self.__grating = self.__Spec.get_grating() return self.__grating except AttributeError: return 0 @grating_f.setter def grating_f(self, value): if self.__grating != value: self.__grating = value self.busy_event.fire("") if not self.__running: threading.Thread(target=self.__Spec.set_grating, args=(self.__grating,)).start() self.__running = True @property def lpmm_f(self): try: return self.__lpmms[self.__grating] except AttributeError: return 'None' @property def inc_angle_f(self): try: return self.dif_angle_f - self.__devAngle except AttributeError: return 'None' @property def dif_angle_f(self): ''' This is somewhat complicated. devAngle is a spectrometer property and are simple a contraint between two slits (central and camera center) and two angles. Incidence minus diffraction angle is always constant in a given spectrometer. Please see equation 2.4 in diffraction grating handbook by Christopher Palmer. abs2 is the incidence plus the diffracted angle divided by two. ''' try: ab2 = numpy.arcsin((1 / 2. * 1e-6 * self.__wl * self.lpmm_f) / numpy.cos(self.__devAngle / 2.)) return (2 * ab2 + self.__devAngle) / 2. except AttributeError: return 'None' @property def dispersion_f(self): ''' Also confusing but just derivate diffraction equation. Note that alpha depends on wavelength but its derivative is zero because input is fixed. We wanna see difracted beam angle dispersion and not entrance. See diffraction grating handbook by Christopher Palmer. This is often called reciprocal linear dispersion. It is measured in nm/mm. ''' try: return 1e6 / self.__lpmms[self.__grating] * numpy.cos(self.dif_angle_f) / self.__fl except AttributeError: return 'None' @property def entrance_slit_f(self): try: self.__entrance_slit = self.__Spec.get_entrance() return self.__entrance_slit except AttributeError: return 'None' @entrance_slit_f.setter def entrance_slit_f(self, value): if self.__entrance_slit != float(value) and 0 <= float(value) <= 5000: self.__entrance_slit = float(value) self.busy_event.fire("") if not self.__running: threading.Thread(target=self.__Spec.set_entrance, args=(self.__entrance_slit,)).start() self.__running = True @property def exit_slit_f(self): try: self.__exit_slit = self.__Spec.get_exit() return self.__exit_slit except AttributeError: return 'None' @exit_slit_f.setter def exit_slit_f(self, value): if self.__exit_slit != float(value) and 0 <= float(value) <= 5000: self.__exit_slit = float(value) self.busy_event.fire("") if not self.__running: threading.Thread(target=self.__Spec.set_exit, args=(self.__exit_slit,)).start() self.__running = True @property def which_slit_f(self): try: self.__slit_choice = self.__Spec.get_which() return self.__slit_choice except AttributeError: return -1 @which_slit_f.setter def which_slit_f(self, value): if self.__slit_choice != value: self.__slit_choice = value self.busy_event.fire("") if not self.__running: threading.Thread(target=self.__Spec.set_which, args=(self.__slit_choice,)).start() self.__running = True @property def camera_size_f(self): try: return format(self.__cameraSize, '.1f') except AttributeError: return 'None' @camera_size_f.setter def camera_size_f(self, value): self.__cameraSize = float(value) self.upt_values() @property def camera_pixels_f(self): try: return format(self.__cameraPixels, '.0f') except AttributeError: return 'None' @camera_pixels_f.setter def camera_pixels_f(self, value): self.__cameraPixels = int(value) self.upt_values() @property def focalLength_f(self): try: return format(self.__fl, '.0f') except AttributeError: return 'None' @focalLength_f.setter def focalLength_f(self, value): self.__fl = int(value) self.upt_values() @property def pixel_size_f(self): try: return self.__cameraSize / self.__cameraPixels * 1e3 except AttributeError: return 'None' @property def dispersion_nmmm_f(self): try: return format(self.dispersion_f, '.3f') except ValueError: return 'None' except AttributeError: return 'None' @property def dispersion_pixels_f(self): try: return format(self.dispersion_f * self.__cameraSize / self.__cameraPixels, '.3f') except AttributeError: return 'None' @property def fov_f(self): try: return format(self.dispersion_f * self.__cameraSize, '.3f') except AttributeError: return 'None'
from featuretools import Relationship, Timedelta, primitives from featuretools.entityset.relationship import RelationshipPath from featuretools.primitives.base import ( AggregationPrimitive, PrimitiveBase, TransformPrimitive ) from featuretools.primitives.utils import serialize_primitive from featuretools.utils.wrangle import ( _check_time_against_column, _check_timedelta ) from featuretools.variable_types import ( Boolean, Categorical, Datetime, DatetimeTimeIndex, Discrete, Id, Index, Numeric, NumericTimeIndex, Variable ) class FeatureBase(object): def __init__(self, entity, base_features, relationship_path, primitive, name=None, names=None): """Base class for all features Args: entity (Entity): entity this feature is being calculated for base_features (list[FeatureBase]): list of base features for primitive relationship_path (RelationshipPath): path from this entity to the entity of the base features. primitive (:class:`.PrimitiveBase`): primitive to calculate. if not initialized when passed, gets initialized with no arguments """ assert all(isinstance(f, FeatureBase) for f in base_features), \ "All base features must be features" self.entity_id = entity.id self.entityset = entity.entityset.metadata self.base_features = base_features # initialize if not already initialized if not isinstance(primitive, PrimitiveBase): primitive = primitive() self.primitive = primitive self.relationship_path = relationship_path self._name = name self._names = names assert self._check_input_types(), ("Provided inputs don't match input " "type requirements") def __getitem__(self, key): assert self.number_output_features > 1, \ 'can only access slice of multi-output feature' assert self.number_output_features > key, \ 'index is higher than the number of outputs' return FeatureOutputSlice(self, key) @classmethod def from_dictionary(cls, arguments, entityset, dependencies, primitives_deserializer): raise NotImplementedError("Must define from_dictionary on FeatureBase subclass") def rename(self, name): """Rename Feature, returns copy""" feature_copy = self.copy() feature_copy._name = name return feature_copy def copy(self): raise NotImplementedError("Must define copy on FeatureBase subclass") def get_name(self): if not self._name: self._name = self.generate_name() return self._name def get_names(self): if not self._names: self._names = self.generate_names() return self._names def get_feature_names(self): n = self.number_output_features if n == 1: names = [self.get_name()] else: names = self.get_names() return names def get_function(self): return self.primitive.get_function() def get_dependencies(self, deep=False, ignored=None, copy=True): """Returns features that are used to calculate this feature ..note:: If you only want the features that make up the input to the feature function use the base_features attribute instead. """ deps = [] for d in self.base_features[:]: deps += [d] if hasattr(self, "where") and self.where: deps += [self.where] if ignored is None: ignored = set([]) deps = [d for d in deps if d.unique_name() not in ignored] if deep: for dep in deps[:]: # copy so we don't modify list we iterate over deep_deps = dep.get_dependencies(deep, ignored) deps += deep_deps return deps def get_depth(self, stop_at=None): """Returns depth of feature""" max_depth = 0 stop_at_set = set() if stop_at is not None: stop_at_set = set([i.unique_name() for i in stop_at]) if self.unique_name() in stop_at_set: return 0 for dep in self.get_dependencies(deep=True, ignored=stop_at_set): max_depth = max(dep.get_depth(stop_at=stop_at), max_depth) return max_depth + 1 def _check_input_types(self): if len(self.base_features) == 0: return True input_types = self.primitive.input_types if input_types is not None: if type(input_types[0]) != list: input_types = [input_types] for t in input_types: zipped = list(zip(t, self.base_features)) if all([issubclass(f.variable_type, v) for v, f in zipped]): return True else: return True return False @property def entity(self): """Entity this feature belongs too""" return self.entityset[self.entity_id] @property def number_output_features(self): return self.primitive.number_output_features def __repr__(self): return "<Feature: %s>" % (self.get_name()) def hash(self): return hash(self.get_name() + self.entity.id) def __hash__(self): # logger.warning("To hash a feature, use feature.hash()") return self.hash() @property def variable_type(self): feature = self variable_type = self.primitive.return_type while variable_type is None: # get variable_type of first base feature base_feature = feature.base_features[0] variable_type = base_feature.variable_type # only the original time index should exist # so make this feature's return type just a Datetime if variable_type == DatetimeTimeIndex: variable_type = Datetime elif variable_type == NumericTimeIndex: variable_type = Numeric elif variable_type == Index: variable_type = Categorical # direct features should keep the Id return type, but all other features should get # converted to Categorical if not isinstance(feature, DirectFeature) and variable_type == Id: variable_type = Categorical feature = base_feature return variable_type @property def default_value(self): return self.primitive.default_value def get_arguments(self): raise NotImplementedError("Must define get_arguments on FeatureBase subclass") def to_dictionary(self): return { 'type': type(self).__name__, 'dependencies': [dep.unique_name() for dep in self.get_dependencies()], 'arguments': self.get_arguments(), } def _handle_binary_comparision(self, other, Primitive, PrimitiveScalar): if isinstance(other, FeatureBase): return Feature([self, other], primitive=Primitive) return Feature([self], primitive=PrimitiveScalar(other)) def __eq__(self, other): """Compares to other by equality""" return self._handle_binary_comparision(other, primitives.Equal, primitives.EqualScalar) def __ne__(self, other): """Compares to other by non-equality""" return self._handle_binary_comparision(other, primitives.NotEqual, primitives.NotEqualScalar) def __gt__(self, other): """Compares if greater than other""" return self._handle_binary_comparision(other, primitives.GreaterThan, primitives.GreaterThanScalar) def __ge__(self, other): """Compares if greater than or equal to other""" return self._handle_binary_comparision(other, primitives.GreaterThanEqualTo, primitives.GreaterThanEqualToScalar) def __lt__(self, other): """Compares if less than other""" return self._handle_binary_comparision(other, primitives.LessThan, primitives.LessThanScalar) def __le__(self, other): """Compares if less than or equal to other""" return self._handle_binary_comparision(other, primitives.LessThanEqualTo, primitives.LessThanEqualToScalar) def __add__(self, other): """Add other""" return self._handle_binary_comparision(other, primitives.AddNumeric, primitives.AddNumericScalar) def __radd__(self, other): return self.__add__(other) def __sub__(self, other): """Subtract other""" return self._handle_binary_comparision(other, primitives.SubtractNumeric, primitives.SubtractNumericScalar) def __rsub__(self, other): return Feature([self], primitive=primitives.ScalarSubtractNumericFeature(other)) def __div__(self, other): """Divide by other""" return self._handle_binary_comparision(other, primitives.DivideNumeric, primitives.DivideNumericScalar) def __truediv__(self, other): return self.__div__(other) def __rtruediv__(self, other): return self.__rdiv__(other) def __rdiv__(self, other): return Feature([self], primitive=primitives.DivideByFeature(other)) def __mul__(self, other): """Multiply by other""" if isinstance(other, FeatureBase): if self.variable_type == Boolean and other.variable_type == Boolean: return Feature([self, other], primitive=primitives.MultiplyBoolean) return self._handle_binary_comparision(other, primitives.MultiplyNumeric, primitives.MultiplyNumericScalar) def __rmul__(self, other): return self.__mul__(other) def __mod__(self, other): """Take modulus of other""" return self._handle_binary_comparision(other, primitives.ModuloNumeric, primitives.ModuloNumericScalar) def __rmod__(self, other): return Feature([self], primitive=primitives.ModuloByFeature(other)) def __and__(self, other): return self.AND(other) def __rand__(self, other): return Feature([other, self], primitive=primitives.And) def __or__(self, other): return self.OR(other) def __ror__(self, other): return Feature([other, self], primitive=primitives.Or) def __not__(self, other): return self.NOT(other) def __abs__(self): return Feature([self], primitive=primitives.Absolute) def __neg__(self): return Feature([self], primitive=primitives.Negate) def AND(self, other_feature): """Logical AND with other_feature""" return Feature([self, other_feature], primitive=primitives.And) def OR(self, other_feature): """Logical OR with other_feature""" return Feature([self, other_feature], primitive=primitives.Or) def NOT(self): """Creates inverse of feature""" return Feature([self], primitive=primitives.Not) def isin(self, list_of_output): return Feature([self], primitive=primitives.IsIn(list_of_outputs=list_of_output)) def is_null(self): """Compares feature to null by equality""" return Feature([self], primitive=primitives.IsNull) def __invert__(self): return self.NOT() def unique_name(self): return u"%s: %s" % (self.entity_id, self.get_name()) def relationship_path_name(self): return self.relationship_path.name class IdentityFeature(FeatureBase): """Feature for entity that is equivalent to underlying variable""" def __init__(self, variable, name=None): entity_id = variable.entity_id self.variable = variable.entityset.metadata[entity_id][variable.id] self.return_type = type(variable) super(IdentityFeature, self).__init__(entity=variable.entity, base_features=[], relationship_path=RelationshipPath([]), primitive=PrimitiveBase, name=name) @classmethod def from_dictionary(cls, arguments, entityset, dependencies, primitives_deserializer): entity_id = arguments['entity_id'] variable_id = arguments['variable_id'] variable = entityset[entity_id][variable_id] return cls(variable=variable, name=arguments['name']) def copy(self): """Return copy of feature""" return IdentityFeature(self.variable) def generate_name(self): return self.variable.name def get_depth(self, stop_at=None): return 0 def get_arguments(self): return { 'name': self._name, 'variable_id': self.variable.id, 'entity_id': self.variable.entity_id, } @property def variable_type(self): return type(self.variable) class DirectFeature(FeatureBase): """Feature for child entity that inherits a feature value from a parent entity""" input_types = [Variable] return_type = None def __init__(self, base_feature, child_entity, relationship=None, name=None): base_feature = _check_feature(base_feature) self.parent_entity = base_feature.entity relationship = self._handle_relationship(child_entity, relationship) super(DirectFeature, self).__init__(entity=child_entity, base_features=[base_feature], relationship_path=RelationshipPath([(True, relationship)]), primitive=PrimitiveBase, name=name) def _handle_relationship(self, child_entity, relationship): if relationship: relationship_child = relationship.child_entity assert child_entity.id == relationship_child.id, \ 'child_entity must be the relationship child entity' assert self.parent_entity.id == relationship.parent_entity.id, \ 'Base feature must be defined on the relationship parent entity' else: child_relationships = child_entity.entityset.get_forward_relationships(child_entity.id) possible_relationships = (r for r in child_relationships if r.parent_entity.id == self.parent_entity.id) relationship = next(possible_relationships, None) if not relationship: raise RuntimeError('No relationship from "%s" to "%s" found.' % (child_entity.id, self.parent_entity.id)) # Check for another path. elif next(possible_relationships, None): message = "There are multiple relationships to the base entity. " \ "You must specify a relationship." raise RuntimeError(message) return relationship @classmethod def from_dictionary(cls, arguments, entityset, dependencies, primitives_deserializer): base_feature = dependencies[arguments['base_feature']] relationship = Relationship.from_dictionary(arguments['relationship'], entityset) child_entity = relationship.child_entity return cls(base_feature=base_feature, child_entity=child_entity, relationship=relationship, name=arguments['name']) @property def variable(self): return self.base_features[0].variable @property def number_output_features(self): return self.base_features[0].number_output_features @property def default_value(self): return self.base_features[0].default_value def copy(self): """Return copy of feature""" _is_forward, relationship = self.relationship_path[0] return DirectFeature(self.base_features[0], self.entity, relationship=relationship) @property def variable_type(self): return self.base_features[0].variable_type def generate_name(self): return self._name_from_base(self.base_features[0].get_name()) def get_feature_names(self): return [self._name_from_base(base_name) for base_name in self.base_features[0].get_feature_names()] def get_arguments(self): _is_forward, relationship = self.relationship_path[0] return { 'name': self._name, 'base_feature': self.base_features[0].unique_name(), 'relationship': relationship.to_dictionary(), } def _name_from_base(self, base_name): return u"%s.%s" % (self.relationship_path_name(), base_name) class AggregationFeature(FeatureBase): # Feature to condition this feature by in # computation (e.g. take the Count of products where the product_id is # "basketball".) where = None #: (str or :class:`.Timedelta`): Use only some amount of previous data from # each time point during calculation use_previous = None def __init__(self, base_features, parent_entity, primitive, relationship_path=None, use_previous=None, where=None, name=None): if hasattr(base_features, '__iter__'): base_features = [_check_feature(bf) for bf in base_features] msg = "all base features must share the same entity" assert len(set([bf.entity for bf in base_features])) == 1, msg else: base_features = [_check_feature(base_features)] for bf in base_features: if bf.number_output_features > 1: raise ValueError("Cannot stack on whole multi-output feature.") self.child_entity = base_features[0].entity relationship_path, self._path_is_unique = \ self._handle_relationship_path(parent_entity, relationship_path) self.parent_entity = parent_entity.entityset.metadata[parent_entity.id] if where is not None: self.where = _check_feature(where) msg = "Where feature must be defined on child entity {}".format( self.child_entity.id) assert self.where.entity.id == self.child_entity.id, msg if use_previous: assert self.child_entity.time_index is not None, ( "Applying function that requires time index to entity that " "doesn't have one") self.use_previous = _check_timedelta(use_previous) assert len(base_features) > 0 time_index = base_features[0].entity.time_index time_col = base_features[0].entity[time_index] assert time_index is not None, ("Use previous can only be defined " "on entities with a time index") assert _check_time_against_column(self.use_previous, time_col) super(AggregationFeature, self).__init__(entity=parent_entity, base_features=base_features, relationship_path=relationship_path, primitive=primitive, name=name) def _handle_relationship_path(self, parent_entity, relationship_path): if relationship_path: assert all(not is_forward for is_forward, _r in relationship_path), \ 'All relationships in path must be backward' _is_forward, first_relationship = relationship_path[0] first_parent = first_relationship.parent_entity assert parent_entity.id == first_parent.id, \ 'parent_entity must match first relationship in path.' _is_forward, last_relationship = relationship_path[-1] assert self.child_entity.id == last_relationship.child_entity.id, \ 'Base feature must be defined on the entity at the end of relationship_path' path_is_unique = parent_entity.entityset \ .has_unique_forward_path(self.child_entity.id, parent_entity.id) else: paths = parent_entity.entityset \ .find_backward_paths(parent_entity.id, self.child_entity.id) first_path = next(paths, None) if not first_path: raise RuntimeError('No backward path from "%s" to "%s" found.' % (parent_entity.id, self.child_entity.id)) # Check for another path. elif next(paths, None): message = "There are multiple possible paths to the base entity. " \ "You must specify a relationship path." raise RuntimeError(message) relationship_path = RelationshipPath([(False, r) for r in first_path]) path_is_unique = True return relationship_path, path_is_unique @classmethod def from_dictionary(cls, arguments, entityset, dependencies, primitives_deserializer): base_features = [dependencies[name] for name in arguments['base_features']] relationship_path = [Relationship.from_dictionary(r, entityset) for r in arguments['relationship_path']] parent_entity = relationship_path[0].parent_entity relationship_path = RelationshipPath([(False, r) for r in relationship_path]) primitive = primitives_deserializer.deserialize_primitive(arguments['primitive']) use_previous_data = arguments['use_previous'] use_previous = use_previous_data and Timedelta.from_dictionary(use_previous_data) where_name = arguments['where'] where = where_name and dependencies[where_name] return cls(base_features=base_features, parent_entity=parent_entity, primitive=primitive, relationship_path=relationship_path, use_previous=use_previous, where=where, name=arguments['name']) def copy(self): return AggregationFeature(self.base_features, parent_entity=self.parent_entity, relationship_path=self.relationship_path, primitive=self.primitive, use_previous=self.use_previous, where=self.where) def _where_str(self): if self.where is not None: where_str = u" WHERE " + self.where.get_name() else: where_str = '' return where_str def _use_prev_str(self): if self.use_previous is not None and hasattr(self.use_previous, 'get_name'): use_prev_str = u", Last {}".format(self.use_previous.get_name()) else: use_prev_str = u'' return use_prev_str def generate_name(self): return self.primitive.generate_name(base_feature_names=[bf.get_name() for bf in self.base_features], relationship_path_name=self.relationship_path_name(), parent_entity_id=self.parent_entity.id, where_str=self._where_str(), use_prev_str=self._use_prev_str()) def generate_names(self): return self.primitive.generate_names(base_feature_names=[bf.get_name() for bf in self.base_features], relationship_path_name=self.relationship_path_name(), parent_entity_id=self.parent_entity.id, where_str=self._where_str(), use_prev_str=self._use_prev_str()) def get_arguments(self): return { 'name': self._name, 'base_features': [feat.unique_name() for feat in self.base_features], 'relationship_path': [r.to_dictionary() for _, r in self.relationship_path], 'primitive': serialize_primitive(self.primitive), 'where': self.where and self.where.unique_name(), 'use_previous': self.use_previous and self.use_previous.get_arguments(), } def relationship_path_name(self): if self._path_is_unique: return self.child_entity.id else: return self.relationship_path.name class TransformFeature(FeatureBase): def __init__(self, base_features, primitive, name=None): # Any edits made to this method should also be made to the # new_class_init method in make_trans_primitive if hasattr(base_features, '__iter__'): base_features = [_check_feature(bf) for bf in base_features] msg = "all base features must share the same entity" assert len(set([bf.entity for bf in base_features])) == 1, msg else: base_features = [_check_feature(base_features)] for bf in base_features: if bf.number_output_features > 1: raise ValueError("Cannot stack on whole multi-output feature.") super(TransformFeature, self).__init__(entity=base_features[0].entity, base_features=base_features, relationship_path=RelationshipPath([]), primitive=primitive, name=name) @classmethod def from_dictionary(cls, arguments, entityset, dependencies, primitives_deserializer): base_features = [dependencies[name] for name in arguments['base_features']] primitive = primitives_deserializer.deserialize_primitive(arguments['primitive']) return cls(base_features=base_features, primitive=primitive, name=arguments['name']) def copy(self): return TransformFeature(self.base_features, self.primitive) def generate_name(self): return self.primitive.generate_name(base_feature_names=[bf.get_name() for bf in self.base_features]) def generate_names(self): return self.primitive.generate_names(base_feature_names=[bf.get_name() for bf in self.base_features]) def get_arguments(self): return { 'name': self._name, 'base_features': [feat.unique_name() for feat in self.base_features], 'primitive': serialize_primitive(self.primitive) } class GroupByTransformFeature(TransformFeature): def __init__(self, base_features, primitive, groupby, name=None): if not isinstance(groupby, FeatureBase): groupby = IdentityFeature(groupby) assert issubclass(groupby.variable_type, Discrete) self.groupby = groupby if hasattr(base_features, '__iter__'): base_features.append(groupby) else: base_features = [base_features, groupby] super(GroupByTransformFeature, self).__init__(base_features=base_features, primitive=primitive, name=name) @classmethod def from_dictionary(cls, arguments, entityset, dependencies, primitives_deserializer): base_features = [dependencies[name] for name in arguments['base_features']] primitive = primitives_deserializer.deserialize_primitive(arguments['primitive']) groupby = dependencies[arguments['groupby']] return cls(base_features=base_features, primitive=primitive, groupby=groupby, name=arguments['name']) def copy(self): # the groupby feature is appended to base_features in the __init__ # so here we separate them again return GroupByTransformFeature(self.base_features[:-1], self.primitive, self.groupby) def generate_name(self): # exclude the groupby feature from base_names since it has a special # place in the feature name base_names = [bf.get_name() for bf in self.base_features[:-1]] _name = self.primitive.generate_name(base_names) return u"{} by {}".format(_name, self.groupby.get_name()) def generate_names(self): base_names = [bf.get_name() for bf in self.base_features[:-1]] _names = self.primitive.generate_names(base_names) names = [name + " by {}".format(self.groupby.get_name()) for name in _names] return names def get_arguments(self): # Do not include groupby in base_features. feature_names = [feat.unique_name() for feat in self.base_features if feat.unique_name() != self.groupby.unique_name()] return { 'name': self._name, 'base_features': feature_names, 'primitive': serialize_primitive(self.primitive), 'groupby': self.groupby.unique_name(), } class Feature(object): """ Alias to create feature. Infers the feature type based on init parameters. """ def __new__(self, base, entity=None, groupby=None, parent_entity=None, primitive=None, use_previous=None, where=None): # either direct or indentity if primitive is None and entity is None: return IdentityFeature(base) elif primitive is None and entity is not None: return DirectFeature(base, entity) elif primitive is not None and parent_entity is not None: assert isinstance(primitive, AggregationPrimitive) or issubclass(primitive, AggregationPrimitive) return AggregationFeature(base, parent_entity=parent_entity, use_previous=use_previous, where=where, primitive=primitive) elif primitive is not None: assert (isinstance(primitive, TransformPrimitive) or issubclass(primitive, TransformPrimitive)) if groupby is not None: return GroupByTransformFeature(base, primitive=primitive, groupby=groupby) return TransformFeature(base, primitive=primitive) raise Exception("Unrecognized feature initialization") class FeatureOutputSlice(FeatureBase): """ Class to access specific multi output feature column """ def __init__(self, base_feature, n, name=None): base_features = [base_feature] self.num_output_parent = base_feature.number_output_features msg = "cannot access slice from single output feature" assert(self.num_output_parent > 1), msg msg = "cannot access column that is not between 0 and " + str(self.num_output_parent - 1) assert(n < self.num_output_parent), msg self.n = n self._name = name self.base_features = base_features self.base_feature = base_features[0] self.entity_id = base_feature.entity_id self.entityset = base_feature.entityset self.primitive = base_feature.primitive self.relationship_path = base_feature.relationship_path def __getitem__(self, key): raise ValueError("Cannot get item from slice of multi output feature") def generate_name(self): return self.base_feature.get_names()[self.n] @property def number_output_features(self): return 1 def get_arguments(self): return { 'name': self._name, 'base_feature': self.base_feature, 'n': self.n } @classmethod def from_dictionary(cls, arguments, entityset, dependencies, primitives_deserializer): base_feature = arguments['base_feature'] n = arguments['n'] name = arguments['name'] return cls(base_feature=base_feature, n=n, name=name) def _check_feature(feature): if isinstance(feature, Variable): return IdentityFeature(feature) elif isinstance(feature, FeatureBase): return feature raise Exception("Not a feature")
const passport = require('passport'); const router = require('express').Router(); const InstagramStrategy = require('passport-instagram').Strategy; const { User } = require('../db/models'); const instagramAPI = require('../db/models/instagramAPI'); module.exports = router; if (!process.env.INSTAGRAM_CLIENT_ID || !process.env.INSTAGRAM_CLIENT_SECRET) { console.log('Instagram client ID / secret not found. Skipping Google OAuth.'); } else { const instagramConfig = { clientID: process.env.INSTAGRAM_CLIENT_ID, clientSecret: process.env.INSTAGRAM_CLIENT_SECRET, callbackURL: process.env.INSTAGRAM_CALLBACK }; const strategy = new InstagramStrategy( instagramConfig, (token, refreshToken, profile, done) => { const instagramId = profile.id; const name = profile.displayName; instagramAPI.setImages(token); User.findOrCreate({ where: { instagramId }, defaults: { name } }) .then(([user]) => done(null, user)) .catch(done); } ); passport.use(strategy); router.get('/', passport.authenticate('instagram', { scope: 'basic' })); router.get( '/callback', passport.authenticate('instagram', { successRedirect: '/home', failureRedirect: '/login' }) ); }
// // FGViewController.h // iFugaDemo // // Created by Sergey Gavrilyuk on 12-07-16. // Copyright (c) 2012 Sergey Gavrilyuk. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // #import <UIKit/UIKit.h> #import "FGController.h" @interface FGViewController : UIViewController<FGControllerDelegate> { FGController* _fgController; } @property (retain, nonatomic) IBOutlet UIImageView *imageView; @end
import torch import torch.nn as nn __all__ = ['OPS', 'ResNetBasicblock', 'SearchSpaceNames'] OPS = { 'none' : lambda C_in, C_out, stride, affine, track_running_stats: Zero(C_in, C_out, stride), 'avg_pool_3x3' : lambda C_in, C_out, stride, affine, track_running_stats: POOLING(C_in, C_out, stride, 'avg', affine, track_running_stats), 'max_pool_3x3' : lambda C_in, C_out, stride, affine, track_running_stats: POOLING(C_in, C_out, stride, 'max', affine, track_running_stats), 'nor_conv_7x7' : lambda C_in, C_out, stride, affine, track_running_stats: ReLUConvBN(C_in, C_out, (7,7), (stride,stride), (3,3), (1,1), affine, track_running_stats), 'nor_conv_3x3' : lambda C_in, C_out, stride, affine, track_running_stats: ReLUConvBN(C_in, C_out, (3,3), (stride,stride), (1,1), (1,1), affine, track_running_stats), 'nor_conv_1x1' : lambda C_in, C_out, stride, affine, track_running_stats: ReLUConvBN(C_in, C_out, (1,1), (stride,stride), (0,0), (1,1), affine, track_running_stats), 'sep_conv_3x3' : lambda C_in, C_out, stride, affine, track_running_stats: DualSepConv(C_in, C_out, (3,3), (stride,stride), (1,1), (1,1), affine, track_running_stats), 'sep_conv_5x5' : lambda C_in, C_out, stride, affine, track_running_stats: DualSepConv(C_in, C_out, (5,5), (stride,stride), (2,2), (1,1), affine, track_running_stats), 'dil_conv_3x3' : lambda C_in, C_out, stride, affine, track_running_stats: SepConv(C_in, C_out, (3,3), (stride,stride), (2,2), (2,2), affine, track_running_stats), 'dil_conv_5x5' : lambda C_in, C_out, stride, affine, track_running_stats: SepConv(C_in, C_out, (5,5), (stride,stride), (4,4), (2,2), affine, track_running_stats), 'skip_connect' : lambda C_in, C_out, stride, affine, track_running_stats: Identity() if stride == 1 and C_in == C_out else FactorizedReduce(C_in, C_out, stride, affine, track_running_stats), } CONNECT_NAS_BENCHMARK = ['none', 'skip_connect', 'nor_conv_3x3'] NAS_BENCH_201 = ['none', 'skip_connect', 'nor_conv_1x1', 'nor_conv_3x3', 'avg_pool_3x3'] DARTS_SPACE = ['none', 'skip_connect', 'sep_conv_3x3', 'sep_conv_5x5', 'dil_conv_3x3', 'dil_conv_5x5', 'avg_pool_3x3', 'max_pool_3x3'] SearchSpaceNames = {'connect-nas' : CONNECT_NAS_BENCHMARK, 'nas-bench-201': NAS_BENCH_201, 'darts' : DARTS_SPACE} class ReLUConvBN(nn.Module): def __init__(self, C_in, C_out, kernel_size, stride, padding, dilation, affine, track_running_stats=True): super(ReLUConvBN, self).__init__() self.op = nn.Sequential( nn.ReLU(inplace=False), # TODO change to post-activation nn.Conv2d(C_in, C_out, kernel_size, stride=stride, padding=padding, dilation=dilation, bias=False), nn.BatchNorm2d(C_out, affine=affine, track_running_stats=track_running_stats), # nn.ReLU(inplace=True), ) def forward(self, x): return self.op(x) class SepConv(nn.Module): def __init__(self, C_in, C_out, kernel_size, stride, padding, dilation, affine, track_running_stats=True): super(SepConv, self).__init__() self.op = nn.Sequential( nn.ReLU(inplace=False), nn.Conv2d(C_in, C_in, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation, groups=C_in, bias=False), nn.Conv2d(C_in, C_out, kernel_size=1, padding=0, bias=False), nn.BatchNorm2d(C_out, affine=affine, track_running_stats=track_running_stats), ) def forward(self, x): return self.op(x) class DualSepConv(nn.Module): def __init__(self, C_in, C_out, kernel_size, stride, padding, dilation, affine, track_running_stats=True): super(DualSepConv, self).__init__() self.op_a = SepConv(C_in, C_in , kernel_size, stride, padding, dilation, affine, track_running_stats) self.op_b = SepConv(C_in, C_out, kernel_size, 1, padding, dilation, affine, track_running_stats) def forward(self, x): x = self.op_a(x) x = self.op_b(x) return x class ResNetBasicblock(nn.Module): def __init__(self, inplanes, planes, stride, affine=True): super(ResNetBasicblock, self).__init__() assert stride == 1 or stride == 2, 'invalid stride {:}'.format(stride) self.conv_a = ReLUConvBN(inplanes, planes, 3, stride, 1, 1, affine) self.conv_b = ReLUConvBN( planes, planes, 3, 1, 1, 1, affine) if stride == 2: self.downsample = nn.Sequential( nn.AvgPool2d(kernel_size=2, stride=2, padding=0), nn.Conv2d(inplanes, planes, kernel_size=1, stride=1, padding=0, bias=False)) elif inplanes != planes: self.downsample = ReLUConvBN(inplanes, planes, 1, 1, 0, 1, affine) else: self.downsample = None self.in_dim = inplanes self.out_dim = planes self.stride = stride self.num_conv = 2 def extra_repr(self): string = '{name}(inC={in_dim}, outC={out_dim}, stride={stride})'.format(name=self.__class__.__name__, **self.__dict__) return string def forward(self, inputs): basicblock = self.conv_a(inputs) basicblock = self.conv_b(basicblock) if self.downsample is not None: residual = self.downsample(inputs) else: residual = inputs return residual + basicblock class POOLING(nn.Module): def __init__(self, C_in, C_out, stride, mode, affine=True, track_running_stats=True): super(POOLING, self).__init__() if C_in == C_out: self.preprocess = None else: self.preprocess = ReLUConvBN(C_in, C_out, 1, 1, 0, 1, affine, track_running_stats) if mode == 'avg' : self.op = nn.AvgPool2d(3, stride=stride, padding=1, count_include_pad=False) elif mode == 'max': self.op = nn.MaxPool2d(3, stride=stride, padding=1) else : raise ValueError('Invalid mode={:} in POOLING'.format(mode)) def forward(self, inputs): if self.preprocess: x = self.preprocess(inputs) else : x = inputs return self.op(x) class Identity(nn.Module): def __init__(self): super(Identity, self).__init__() def forward(self, x): return x class Zero(nn.Module): def __init__(self, C_in, C_out, stride): super(Zero, self).__init__() self.C_in = C_in self.C_out = C_out self.stride = stride self.is_zero = True def forward(self, x): if self.C_in == self.C_out: if self.stride == 1: return x.mul(0.) else : return x[:,:,::self.stride,::self.stride].mul(0.) else: shape = list(x.shape) shape[1] = self.C_out zeros = x.new_zeros(shape, dtype=x.dtype, device=x.device) return zeros def extra_repr(self): return 'C_in={C_in}, C_out={C_out}, stride={stride}'.format(**self.__dict__) class FactorizedReduce(nn.Module): def __init__(self, C_in, C_out, stride, affine, track_running_stats): super(FactorizedReduce, self).__init__() self.stride = stride self.C_in = C_in self.C_out = C_out self.relu = nn.ReLU(inplace=False) if stride == 2: #assert C_out % 2 == 0, 'C_out : {:}'.format(C_out) C_outs = [C_out // 2, C_out - C_out // 2] self.convs = nn.ModuleList() for i in range(2): self.convs.append( nn.Conv2d(C_in, C_outs[i], 1, stride=stride, padding=0, bias=False) ) self.pad = nn.ConstantPad2d((0, 1, 0, 1), 0) elif stride == 1: self.conv = nn.Conv2d(C_in, C_out, 1, stride=stride, padding=0, bias=False) else: raise ValueError('Invalid stride : {:}'.format(stride)) self.bn = nn.BatchNorm2d(C_out, affine=affine, track_running_stats=track_running_stats) def forward(self, x): if self.stride == 2: x = self.relu(x) y = self.pad(x) out = torch.cat([self.convs[0](x), self.convs[1](y[:,:,1:,1:])], dim=1) else: out = self.conv(x) out = self.bn(out) # if self.stride == 2: # y = self.pad(x) # out = torch.cat([self.convs[0](x), self.convs[1](y[:,:,1:,1:])], dim=1) # else: # out = self.conv(x) # out = self.bn(out) # out = self.relu(out) return out def extra_repr(self): return 'C_in={C_in}, C_out={C_out}, stride={stride}'.format(**self.__dict__) # Auto-ReID: Searching for a Part-Aware ConvNet for Person Re-Identification, ICCV 2019 class PartAwareOp(nn.Module): def __init__(self, C_in, C_out, stride, part=4): super().__init__() self.part = 4 self.hidden = C_in // 3 self.avg_pool = nn.AdaptiveAvgPool2d(1) self.local_conv_list = nn.ModuleList() for i in range(self.part): self.local_conv_list.append( nn.Sequential(nn.ReLU(), nn.Conv2d(C_in, self.hidden, 1), nn.BatchNorm2d(self.hidden, affine=True)) ) self.W_K = nn.Linear(self.hidden, self.hidden) self.W_Q = nn.Linear(self.hidden, self.hidden) if stride == 2 : self.last = FactorizedReduce(C_in + self.hidden, C_out, 2) elif stride == 1: self.last = FactorizedReduce(C_in + self.hidden, C_out, 1) else: raise ValueError('Invalid Stride : {:}'.format(stride)) def forward(self, x): batch, C, H, W = x.size() assert H >= self.part, 'input size too small : {:} vs {:}'.format(x.shape, self.part) IHs = [0] for i in range(self.part): IHs.append( min(H, int((i+1)*(float(H)/self.part))) ) local_feat_list = [] for i in range(self.part): feature = x[:, :, IHs[i]:IHs[i+1], :] xfeax = self.avg_pool(feature) xfea = self.local_conv_list[i]( xfeax ) local_feat_list.append( xfea ) part_feature = torch.cat(local_feat_list, dim=2).view(batch, -1, self.part) part_feature = part_feature.transpose(1,2).contiguous() part_K = self.W_K(part_feature) part_Q = self.W_Q(part_feature).transpose(1,2).contiguous() weight_att = torch.bmm(part_K, part_Q) attention = torch.softmax(weight_att, dim=2) aggreateF = torch.bmm(attention, part_feature).transpose(1,2).contiguous() features = [] for i in range(self.part): feature = aggreateF[:, :, i:i+1].expand(batch, self.hidden, IHs[i+1]-IHs[i]) feature = feature.view(batch, self.hidden, IHs[i+1]-IHs[i], 1) features.append( feature ) features = torch.cat(features, dim=2).expand(batch, self.hidden, H, W) final_fea = torch.cat((x,features), dim=1) outputs = self.last( final_fea ) return outputs # Searching for A Robust Neural Architecture in Four GPU Hours class GDAS_Reduction_Cell(nn.Module): def __init__(self, C_prev_prev, C_prev, C, reduction_prev, multiplier, affine, track_running_stats): super(GDAS_Reduction_Cell, self).__init__() if reduction_prev: self.preprocess0 = FactorizedReduce(C_prev_prev, C, 2, affine, track_running_stats) else: self.preprocess0 = ReLUConvBN(C_prev_prev, C, 1, 1, 0, 1, affine, track_running_stats) self.preprocess1 = ReLUConvBN(C_prev, C, 1, 1, 0, 1, affine, track_running_stats) self.multiplier = multiplier self.reduction = True self.ops1 = nn.ModuleList( [nn.Sequential( nn.ReLU(inplace=False), nn.Conv2d(C, C, (1, 3), stride=(1, 2), padding=(0, 1), groups=8, bias=False), nn.Conv2d(C, C, (3, 1), stride=(2, 1), padding=(1, 0), groups=8, bias=False), nn.BatchNorm2d(C, affine=True), nn.ReLU(inplace=False), nn.Conv2d(C, C, 1, stride=1, padding=0, bias=False), nn.BatchNorm2d(C, affine=True)), nn.Sequential( nn.ReLU(inplace=False), nn.Conv2d(C, C, (1, 3), stride=(1, 2), padding=(0, 1), groups=8, bias=False), nn.Conv2d(C, C, (3, 1), stride=(2, 1), padding=(1, 0), groups=8, bias=False), nn.BatchNorm2d(C, affine=True), nn.ReLU(inplace=False), nn.Conv2d(C, C, 1, stride=1, padding=0, bias=False), nn.BatchNorm2d(C, affine=True))]) self.ops2 = nn.ModuleList( [nn.Sequential( nn.MaxPool2d(3, stride=1, padding=1), nn.BatchNorm2d(C, affine=True)), nn.Sequential( nn.MaxPool2d(3, stride=2, padding=1), nn.BatchNorm2d(C, affine=True))]) def forward(self, s0, s1, drop_prob = -1): s0 = self.preprocess0(s0) s1 = self.preprocess1(s1) X0 = self.ops1[0] (s0) X1 = self.ops1[1] (s1) if self.training and drop_prob > 0.: X0, X1 = drop_path(X0, drop_prob), drop_path(X1, drop_prob) #X2 = self.ops2[0] (X0+X1) X2 = self.ops2[0] (s0) X3 = self.ops2[1] (s1) if self.training and drop_prob > 0.: X2, X3 = drop_path(X2, drop_prob), drop_path(X3, drop_prob) return torch.cat([X0, X1, X2, X3], dim=1)
/** * @version 0.9 */ import axios from 'axios' import VersionCheck from 'react-native-version-check' import countries from '../../../../../assets/jsons/other/country-codes' import Log from '../../../../../services/Log/Log' import BlocksoftDict from '../../../../../../crypto/common/BlocksoftDict' import currencyActions from '../../../../Stores/Currency/CurrencyActions' export default { maxVersion: 71, updateQuery: { 1: { queryString: `ALTER TABLE account ADD COLUMN transactions_scan_time INTEGER NULL`, checkQueryString: false, checkQueryField : false }, 3: { queryString: `ALTER TABLE currency ADD COLUMN is_hidden INTEGER NOT NULL DEFAULT 0`, // if = 1 - removed }, 4: { queryString: `ALTER TABLE card ADD COLUMN country_code VARCHAR(32) NULL`, // if = 'ua' - ukraine afterFunction: async (dbInterface) => { try { const { array: cards } = await dbInterface.setQueryString('SELECT * FROM card').query() for(let i = 0; i < cards.length; i++){ const link = `https://lookup.binlist.net/${cards[i].number}` Log.log('DB/Update Migration 4 axios ' + link) const res = await axios.get(link) await dbInterface .setTableName('card') .setUpdateData({ key: { id: cards[i].id }, updateObj: { country_code: res.data.country.numeric } }) .update() } } catch (e) { Log.err('DB/Update afterFunction - Migration 4 error', e) } } }, 5: { afterFunction: async (dbInterface) => { await dbInterface.setQueryString(`INSERT INTO settings ([paramKey], [paramValue]) VALUES ('local_currency', 'USD')`).query() } }, 6: { queryString: `ALTER TABLE card ADD COLUMN currency VARCHAR(32) NULL`, afterFunction: async (dbInterface) => { const { array: cards } = await dbInterface.setQueryString(`SELECT * FROM card`).query() for(let i = 0; i < cards.length; i++){ const tmpCountry = countries.find(item => item.iso === cards[i].country_code) await dbInterface .setTableName('card') .setUpdateData({ key: { id: cards[i].id }, updateObj: { currency: tmpCountry.currencyCode } }) .update() } } }, 7: { afterFunction: async (dbInterface) => { try { const { array: cards } = await dbInterface.setQueryString('SELECT * FROM card').query() for(let i = 0; i < cards.length; i++){ const link =`https://lookup.binlist.net/${cards[i].number}` Log.log('DB/Update Migration 7 axios ' + link) const res = await axios.get(link) await dbInterface .setTableName('card') .setUpdateData({ key: { id: cards[i].id }, updateObj: { country_code: res.data.country.numeric } }) .update() } } catch (e) { Log.err('DB/Update afterFunction - Migration 7 error', e) } } }, 8: { afterFunction: async (dbInterface) => { const { array: cards } = await dbInterface.setQueryString(`SELECT * FROM card`).query() for(let i = 0; i < cards.length; i++){ const tmpCountry = countries.find(item => item.iso === cards[i].country_code) await dbInterface .setTableName('card') .setUpdateData({ key: { id: cards[i].id }, updateObj: { currency: tmpCountry.currencyCode } }) .update() } } }, 9: { queryString: `ALTER TABLE account_balance ADD COLUMN balance_fix DECIMAL(50,20) NULL`, }, 10: { queryString: `ALTER TABLE account_balance ADD COLUMN balance_txt VARCHAR(256) NULL`, }, 11: { afterFunction: async (dbInterface) => { await dbInterface.setQueryString(`ALTER TABLE account_balance RENAME TO tmp`).query() await dbInterface.setQueryString(`CREATE TABLE IF NOT EXISTS account_balance ( id INTEGER PRIMARY KEY AUTOINCREMENT, balance_fix DECIMAL(50,20) NULL, balance_txt VARCHAR(256) NULL, balance_scan_time INTEGER NOT NULL, status INTEGER NOT NULL, currency_code VARCHAR(32) NOT NULL, wallet_hash VARCHAR(256) NOT NULL, account_id INTEGER NOT NULL, FOREIGN KEY(wallet_hash) REFERENCES wallet(wallet_hash), FOREIGN KEY(account_id) REFERENCES account(id) )`).query() await dbInterface.setQueryString(` INSERT INTO account_balance(balance_fix, balance_txt, balance_scan_time, status, currency_code, wallet_hash, account_id) SELECT balance_fix, balance_txt, balance_scan_time, status, currency_code, wallet_hash, account_id FROM tmp `).query() await dbInterface.setQueryString(`DROP TABLE tmp`).query() } }, 12: { afterFunction: async (dbInterface) => { try { const { array: cryptocurrencies } = await dbInterface.setQueryString(`SELECT * FROM currency`).query() const addedCryptocurrencies = [] let item, currencyCode if (cryptocurrencies) { for(item of cryptocurrencies){ addedCryptocurrencies.push(item.currency_code) } for(currencyCode of BlocksoftDict.Codes) { if(addedCryptocurrencies.indexOf(currencyCode) === -1){ await currencyActions.addCurrency({ currencyCode: currencyCode }, 1, 0) } } } Log.log('DB/Update afterFunction - Migration 9 finish') } catch (e) { Log.err('DB/Update afterFunction - Migration 9 error', e) } } }, 13: { queryString: `ALTER TABLE account_balance ADD COLUMN balance_provider VARCHAR(256) NULL`, }, 14: { queryString: `ALTER TABLE wallet ADD COLUMN wallet_is_backed_up INTEGER NULL`, afterFunction: async () => { try { Log.log('DB/Update afterFunction - Migration 14 started') await currencyActions.addCurrency({ currencyCode: 'TRX' }, 1, 0) Log.log('DB/Update afterFunction - Migration 14 finish') } catch (e) { Log.err('DB/Update afterFunction - Migration 14 error', e) } } }, 15 : { queryString : ` CREATE TABLE IF NOT EXISTS custom_currency ( id INTEGER PRIMARY KEY AUTOINCREMENT, is_hidden INTEGER NOT NULL DEFAULT 0, currency_code VARCHAR(32) NOT NULL, currency_symbol VARCHAR(32) NOT NULL, currency_name VARCHAR(256) NOT NULL, token_type VARCHAR(32) NOT NULL, token_address VARCHAR(256) NOT NULL, token_decimals INTEGER NOT NULL, token_json TEXT NULL ) ` }, 16 : { queryString : ` CREATE TABLE IF NOT EXISTS transactions_used_outputs ( id INTEGER PRIMARY KEY AUTOINCREMENT, currency_code VARCHAR(256) NULL, output_tx_id VARCHAR(256) NULL, output_vout VARCHAR(256) NULL, output_address VARCHAR(256) NULL, use_tx_id VARCHAR(256) NULL, created_at DATETIME NULL ) ` }, 17: { queryString: `ALTER TABLE wallet ADD COLUMN wallet_is_subscribed INTEGER NULL`, }, 18: { queryString: `ALTER TABLE wallet ADD COLUMN wallet_is_subscribed_json TEXT NULL`, }, 19 : { queryString : ` CREATE TABLE IF NOT EXISTS transactions_created_inputs ( id INTEGER PRIMARY KEY AUTOINCREMENT, currency_code VARCHAR(256) NULL, input_address VARCHAR(256) NULL, input_amount INTEGER NULL, input_index INTEGER NULL, from_address VARCHAR(256) NULL, use_tx_id VARCHAR(256) NULL, created_at DATETIME NULL ) ` }, 20 : { queryString : ` CREATE TABLE IF NOT EXISTS transactions_scanners_tmp ( id INTEGER PRIMARY KEY AUTOINCREMENT, currency_code VARCHAR(256) NULL, address VARCHAR(256) NULL, tmp_key VARCHAR(256) NULL, tmp_sub_key VARCHAR(256) NULL, tmp_val TEXT, created_at DATETIME NULL ) ` }, 21: { queryString: `ALTER TABLE account ADD COLUMN already_shown INTEGER NULL`, afterFunction: async (dbInterface) => { try { await dbInterface.setQueryString(`UPDATE account SET currency_code='BTC' WHERE currency_code='BTC_SEGWIT'`).query() await dbInterface.setQueryString(`UPDATE account_balance SET currency_code='BTC' WHERE currency_code='BTC_SEGWIT'`).query() await dbInterface.setQueryString(`UPDATE transactions SET currency_code='BTC' WHERE currency_code='BTC_SEGWIT'`).query() Log.log('DB/Update afterFunction - Migration 21 finish') } catch (e) { Log.err('DB/Update afterFunction - Migration 21 error', e) } } }, 22: { queryString: `ALTER TABLE account ADD COLUMN wallet_pub_id INTEGER NULL`, }, 23: { queryString: `CREATE TABLE IF NOT EXISTS wallet_pub ( id INTEGER PRIMARY KEY AUTOINCREMENT, currency_code VARCHAR(256) NOT NULL, wallet_hash VARCHAR(256) NOT NULL, wallet_pub_type VARCHAR(256) NOT NULL, wallet_pub_value VARCHAR(256) NOT NULL, wallet_pub_last_index INTEGER NULL, status INTEGER NULL, balance_fix DECIMAL(50,20) NULL, balance_txt VARCHAR(256) NULL, balance_provider VARCHAR(256) NULL, balance_scan_time INTEGER NOT NULL, transactions_scan_time INTEGER NULL, FOREIGN KEY(wallet_hash) REFERENCES wallet(wallet_hash) )` }, 24: { queryString: `ALTER TABLE wallet ADD COLUMN wallet_is_hd INTEGER NULL` }, 25: { queryString: `ALTER TABLE transactions_used_outputs ADD COLUMN account_id INTEGER NULL` }, 26: { queryString: `ALTER TABLE transactions_used_outputs ADD COLUMN wallet_hash VARCHAR(256) NULL` }, 27: { queryString: `DELETE FROM transactions WHERE currency_code = 'ETH' OR currency_code LIKE 'ETH_%'` }, 28: { queryString: `ALTER TABLE wallet ADD COLUMN wallet_use_unconfirmed INTEGER NULL` }, 29: { queryString: `ALTER TABLE account_balance ADD COLUMN balance_scan_log TEXT NULL` }, 30: { queryString: `ALTER TABLE wallet_pub ADD COLUMN balance_scan_log TEXT NULL` }, 31: { queryString: `ALTER TABLE account ADD COLUMN transactions_scan_log TEXT NULL` }, 32: { queryString: `ALTER TABLE transactions ADD COLUMN transactions_scan_time INTEGER NULL` }, 33: { queryString: `ALTER TABLE transactions ADD COLUMN transactions_scan_log TEXT NULL` }, 34: { queryString: `ALTER TABLE wallet ADD COLUMN wallet_is_hide_transaction_for_fee TEXT NULL`, afterFunction: async (dbInterface) => { try { const { array: wallets } = await dbInterface.setQueryString('SELECT * FROM wallet').query() if (wallets && wallets.length > 0) { for (const wallet of wallets) { await dbInterface.setQueryString(`UPDATE wallet SET wallet_is_hide_transaction_for_fee=1 WHERE wallet_hash='${wallet.wallet_hash}'`).query() } } Log.log('DB/Update afterFunction - Migration 34 finish') } catch (e) { Log.err('DB/Update afterFunction - Migration 34 error ' + e.message) } } }, 35: { queryString: `ALTER TABLE currency ADD COLUMN price_change_percentage_24h DECIMAL(18,10) NULL` }, 36: { queryString: `ALTER TABLE currency ADD COLUMN price_change_24h DECIMAL(18,10) NULL` }, 37: { queryString: `ALTER TABLE currency ADD COLUMN price_high_24h DECIMAL(18,10) NULL` }, 38: { queryString: `ALTER TABLE currency ADD COLUMN price_low_24h DECIMAL(18,10) NULL` }, 39: { queryString: `ALTER TABLE currency ADD COLUMN price_last_updated DATETIME NULL` }, 40: { queryString: `ALTER TABLE currency ADD COLUMN price_provider VARCHAR(255) NULL` }, 41: { queryString: `ALTER TABLE card ADD COLUMN card_verification_json VARCHAR(256) NULL` }, 42: { queryString: `ALTER TABLE account_balance ADD COLUMN unconfirmed_fix DECIMAL(50,20) NULL` }, 43 : { queryString: `ALTER TABLE account_balance ADD COLUMN unconfirmed_txt VARCHAR(256) NULL` }, 44: { queryString: `ALTER TABLE wallet_pub ADD COLUMN unconfirmed_fix DECIMAL(50,20) NULL` }, 45 : { queryString: `ALTER TABLE wallet_pub ADD COLUMN unconfirmed_txt VARCHAR(256) NULL` }, 46 : { queryString: `CREATE TABLE IF NOT EXISTS app_task ( id INTEGER PRIMARY KEY AUTOINCREMENT, wallet_hash VARCHAR(256) NULL, currency_code VARCHAR(256) NULL, task_group VARCHAR(256) NULL, task_name VARCHAR(32) NOT NULL, task_json TEXT NULL, task_status INTEGER NULL, task_created INTEGER NOT NULL, task_started INTEGER NULL, task_log TEXT NULL, task_finished INTEGER NULL )` }, 47 : { queryString: ` CREATE TABLE IF NOT EXISTS app_news ( id INTEGER PRIMARY KEY AUTOINCREMENT, wallet_hash VARCHAR(256) NULL, currency_code VARCHAR(256) NULL, news_source VARCHAR(256) NULL, news_group VARCHAR(256) NULL, news_priority INTEGER NULL, news_name VARCHAR(256) NOT NULL, news_json TEXT NULL, news_custom_title TEXT NULL, news_custom_text TEXT NULL, news_custom_created INTEGER NULL, news_status INTEGER NULL, news_created INTEGER NOT NULL, news_log TEXT NULL, news_shown_popup INTEGER NULL, news_shown_list INTEGER NULL, news_removed INTEGER NULL ) ` }, 48 : { queryString: `ALTER TABLE wallet_pub ADD COLUMN transactions_scan_log TEXT NULL` }, 49 : { queryString: `ALTER TABLE app_news ADD COLUMN news_need_popup INTEGER NULL` }, 50 : { queryString: `ALTER TABLE app_news ADD COLUMN news_server_id VARCHAR(256) NULL` }, 51 : { queryString : `ALTER TABLE transactions ADD transaction_fee_currency_code VARCHAR(256) NULL` }, 52: { queryString: `ALTER TABLE wallet ADD COLUMN wallet_use_legacy INTEGER NULL` }, 53: { afterFunction: async (dbInterface) => { try { const version = VersionCheck.getCurrentVersion().split('.')[2] if(version >= 407) { await dbInterface.setQueryString(`INSERT INTO settings ([paramKey], [paramValue]) VALUES ('btcShowTwoAddress', '1')`).query() } Log.log('DB/Update afterFunction - Migration 53 finish') } catch (e) { Log.err('DB/Update afterFunction - Migration 53', e) } } }, 54: { queryString: `ALTER TABLE wallet ADD COLUMN wallet_allow_replace_by_fee INTEGER NULL` }, 55: { queryString: `ALTER TABLE transactions ADD COLUMN transactions_other_hashes TEXT NULL` }, 56: { queryString: `DELETE FROM app_news` }, 57: { queryString: `DROP TABLE IF EXISTS wallet_backup` }, 58 : { queryString : `DROP TABLE IF EXISTS currency_history` }, 59 : { queryString : `UPDATE account SET transactions_scan_log=''` }, 60 : { queryString : `UPDATE account_balance SET balance_scan_log=''` }, 61 : { queryString : `UPDATE wallet_pub SET transactions_scan_log='', balance_scan_log=''` }, 62: { queryString: `ALTER TABLE account_balance ADD COLUMN balance_scan_block VARCHAR(32) NULL` }, 63: { queryString: `ALTER TABLE wallet_pub ADD COLUMN balance_scan_block VARCHAR(32) NULL` }, 64: { queryString: `ALTER TABLE transactions ADD COLUMN hidden_at DATETIME NULL` }, 65 : { queryString : `UPDATE transactions SET transactions_scan_log=''` }, 66 : { queryString : `UPDATE wallet SET wallet_allow_replace_by_fee=1` }, 67: { afterFunction: async () => { try { Log.log('DB/Update afterFunction - Migration 67 started') // dont put it or like this - slow phones are complaining ((( await currencyActions.addCurrency({ currencyCode: 'ETH_DAIM' }, 1, 0) Log.log('DB/Update afterFunction - Migration 67 finish') } catch (e) { Log.log('DB/Update afterFunction - Migration 67 error ' + e.message) } } }, 68: { queryString: `ALTER TABLE account ADD COLUMN changes_log TEXT NULL` }, 69: { queryString: `ALTER TABLE account ADD COLUMN is_main INTEGER NULL DEFAULT 1` }, 70: { queryString: `ALTER TABLE wallet ADD COLUMN wallet_cashback VARCHAR(256) NULL` }, 71: { queryString: `ALTER TABLE app_news ADD COLUMN news_unique_key VARCHAR(256) NULL` }, } }
(function(d){ const l = d['lt'] = d['lt'] || {}; l.dictionary=Object.assign( l.dictionary||{}, {"%0 of %1":"","Align center":"Centruoti","Align left":"Lygiuoti į kairę","Align right":"Lygiuoti į dešinę",Aquamarine:"Aquamarine",Big:"Didelis",Black:"Juoda","Block quote":"Citata",Blue:"Mėlyna",Bold:"Paryškintas","Bulleted List":"Sąrašas",Cancel:"Atšaukti","Cannot upload file:":"Negalima įkelti failo:","Centered image":"Vaizdas centre","Change image text alternative":"Pakeisti vaizdo alternatyvųjį tekstą","Choose heading":"Pasirinkite antraštę",Column:"Stulpelis","Could not insert image at the current position.":"Nepavyko įterpti vaizdo į dabartinę vietą.","Could not obtain resized image URL.":"Nepavyko gauti pakeisto dydžio paveiksliuko URL.","Decrease indent":"Sumažinti atitraukimą",Default:"Numatyta","Delete column":"Ištrinti stulpelį","Delete row":"Ištrinti eilutę","Dim grey":"Pilkšva","Document colors":"",Downloadable:"","Dropdown toolbar":"","Edit block":"Redaguoti bloką","Edit link":"Keisti nuorodą","Editor toolbar":"","Enter image caption":"Įveskite vaizdo antraštę","Font Background Color":"Šrifto fono spalva","Font Color":"Šrifto spalva","Font Family":"Šrifto šeima","Font Size":"Šrifto dydis","Full size image":"Pilno dydžio vaizdas",Green:"Žalia",Grey:"Pilka","Header column":"Antraštės stulpelis","Header row":"Antraštės eilutė",Heading:"Antraštė","Heading 1":"Antraštė 1","Heading 2":"Antraštė 2","Heading 3":"Antraštė 3","Heading 4":"Antraštė 4","Heading 5":"Antraštė 5","Heading 6":"Antraštė 6",Huge:"Milžiniškas","Image resize list":"","Image toolbar":"","image widget":"vaizdų valdiklis","Increase indent":"Padidinti atitraukimą","Insert column left":"Įterpti stulpelį kairėje","Insert column right":"Įterpti stulpelį dešinėje","Insert image":"Įterpti vaizdą","Insert image or file":"Įterpti vaizdą ar failą","Insert media":"Įterpkite media","Insert row above":"Įterpti eilutę aukščiau","Insert row below":"Įterpti eilutę žemiau","Insert table":"Įterpti lentelę","Inserting image failed":"Nepavyko įterpti vaizdo",Italic:"Kursyvas",Justify:"Lygiuoti per visą plotį","Left aligned image":"Vaizdas kairėje","Light blue":"Šviesiai mėlyna","Light green":"Šviesiai žalia","Light grey":"Šviesiai pilka",Link:"Pridėti nuorodą","Link URL":"Nuorodos URL","Media URL":"Media URL","media widget":"media valdiklis","Merge cell down":"Prijungti langelį apačioje","Merge cell left":"Prijungti langelį kairėje","Merge cell right":"Prijungti langelį dešinėje","Merge cell up":"Prijungti langelį viršuje","Merge cells":"Sujungti langelius",Next:"","Numbered List":"Numeruotas rąrašas","Open in a new tab":"","Open link in new tab":"Atidaryti nuorodą naujame skirtuke",Orange:"Oranžinė",Original:"",Paragraph:"Paragrafas","Paste the media URL in the input.":"Įklijuokite media URL adresą į įvedimo lauką.",Previous:"",Purple:"Violetinė",Red:"Raudona",Redo:"Pirmyn","Remove color":"Pašalinti spalvą","Resize image":"","Resize image to %0":"","Resize image to the original size":"","Rich Text Editor":"Raiškiojo teksto redaktorius","Rich Text Editor, %0":"Raiškiojo teksto redaktorius, %0","Right aligned image":"Vaizdas dešinėje",Row:"Eilutė",Save:"Išsaugoti","Select column":"","Select row":"","Selecting resized image failed":"Nepavyko pasirinkti pakeisto vaizdo","Show more items":"","Side image":"Vaizdas šone",Small:"Mažas","Split cell horizontally":"Padalinti langelį horizontaliai","Split cell vertically":"Padalinti langelį vertikaliai","Table toolbar":"","Text alignment":"Teksto lygiavimas","Text alignment toolbar":"","Text alternative":"Alternatyvusis tekstas","The URL must not be empty.":"URL negali būti tuščias.","This link has no URL":"Ši nuorda neturi URL","This media URL is not supported.":"Šis media URL yra nepalaikomas.",Tiny:"Mažytis","Tip: Paste the URL into the content to embed faster.":"Patarimas: norėdami greičiau įterpti media tiesiog įklijuokite URL į turinį.",Turquoise:"Turkio",Underline:"Pabrauktas",Undo:"Atgal",Unlink:"Pašalinti nuorodą","Upload failed":"Įkelti nepavyko","Upload in progress":"Įkelima",White:"Balta",Yellow:"Geltona"} );l.getPluralForm=function(n){return (n % 10 == 1 && (n % 100 > 19 || n % 100 < 11) ? 0 : (n % 10 >= 2 && n % 10 <=9) && (n % 100 > 19 || n % 100 < 11) ? 1 : n % 1 != 0 ? 2: 3);;};})(window.CKEDITOR_TRANSLATIONS||(window.CKEDITOR_TRANSLATIONS={}));
import FWCore.ParameterSet.Config as cms # magnetic field # cms geometry # tracker geometry # tracker numbering # KFUpdatoerESProducer from TrackingTools.KalmanUpdators.KFUpdatorESProducer_cfi import * # Chi2MeasurementEstimatorESProducer from TrackingTools.KalmanUpdators.Chi2MeasurementEstimator_cfi import * # KFTrajectoryFitterESProducer # KFTrajectorySmootherESProducer # KFFittingSmootherESProducer from TrackingTools.TrackFitters.RungeKuttaFitters_cff import * # PropagatorWithMaterialESProducer from TrackingTools.MaterialEffects.MaterialPropagator_cfi import * # PropagatorWithMaterialESProducer from TrackingTools.MaterialEffects.OppositeMaterialPropagator_cfi import * # stripCPE from RecoLocalTracker.SiStripRecHitConverter.StripCPEfromTrackAngle_cfi import * from RecoLocalTracker.SiStripRecHitConverter.SiStripRecHitMatcher_cfi import * #TransientTrackingBuilder from RecoTracker.TransientTrackingRecHit.TransientTrackingRecHitBuilder_cfi import * import RecoTracker.TrackProducer.TrackProducer_cfi # include TrackProducer and clone with new module label cosmictrackfinderCosmics = RecoTracker.TrackProducer.TrackProducer_cfi.TrackProducer.clone( src = 'cosmicCandidateFinderP5', TTRHBuilder = 'WithTrackAngle', AlgorithmName = 'cosmic', Fitter = 'RKFittingSmoother', TrajectoryInEvent = True )
/** * Provides a bridge between the Managers Namespace and the resolving of const DSUs * @namespace Resolvers */ module.exports = { getProductResolver: require('./ProductResolver'), getBatchResolver: require('./BatchResolver') }