text
stringlengths 3
1.05M
|
---|
import { combineReducers } from "redux";
import calculator from "./calculator";
export default combineReducers({ calculator });
|
import React from 'react';
import { makeStyles } from '@material-ui/core/styles';
import GridList from '@material-ui/core/GridList';
import GridListTile from '@material-ui/core/GridListTile';
import GridListTileBar from '@material-ui/core/GridListTileBar';
import Modal from '@material-ui/core/Modal';
import ChevronLeft from '@material-ui/icons/ChevronLeft';
import ChevronRight from '@material-ui/icons/ChevronRight';
function rand() {
return 4;
// return Math.round(Math.random() * 20) - 10;
}
function getModalStyle() {
const top = 50 + rand();
const left = 50 + rand();
return {
top: `${top}%`,
left: `${left}%`,
transform: `translate(-${top}%, -${left}%)`,
};
}
const useStyles = makeStyles(theme => ({
root: {
display: 'flex',
flexWrap: 'wrap',
justifyContent: 'space-around',
overflow: 'hidden',
backgroundColor: theme.palette.background.paper,
},
gridList: {
flexWrap: 'nowrap',
// Promote the list into his own layer on Chrome. This cost memory but helps keeping high FPS.
transform: 'translateZ(0)',
},
title: {
fontSize: '0.8rem',
},
titleBar: {
background:
'linear-gradient(to top, rgba(0,0,0,0.7) 0%, rgba(0,0,0,0.3) 70%, rgba(0,0,0,0) 100%)',
},
paper: {
position: 'absolute',
backgroundColor: theme.palette.background.paper,
boxShadow: theme.shadows[5],
outline: 'none',
},
img: {
width: '250px',
},
arrow: {
color: "#737475",
fontSize: '4rem',
cursor: 'pointer',
marginBottom: '54%',
}
}));
export default function HorizonalGallery({ source }) {
const classes = useStyles();
const [open, setOpen] = React.useState(false);
// getModalStyle is not a pure function, we roll the style only on the first render
const [modalStyle] = React.useState(getModalStyle);
const handleOpen = () => {
setOpen(true);
};
const handleClose = () => {
setOpen(false);
};
// selected index
const [selectedImage, setSelectedImage] = React.useState(0);
const handleImageClick = (img) => {
handleOpen();
setSelectedImage(img);
}
const selectedImageInfo = source[selectedImage];
const handleImageNext = (step) => {
setSelectedImage(selectedImage + step);
}
return (
<div className={classes.root}>
<GridList className={classes.gridList} cols={5.5}>
{source.map((tile, index) => (
<GridListTile key={tile.img} onClick={() => { handleImageClick(index) }} style={{ cursor: 'pointer', }}>
<img src={tile.img} alt={tile.title} />
<GridListTileBar
title={tile.title}
classes={{
root: classes.titleBar,
title: classes.title,
}}
subtitle={<span>{tile.author}</span>}
/>
</GridListTile>
))}
</GridList>
<Modal
aria-labelledby="image-modal-title"
aria-describedby="image-modal-description"
open={open}
onClose={handleClose}
>
<div style={modalStyle} className={classes.paper}>
{selectedImage != 0 ? <ChevronLeft className={classes.arrow} onClick={() => { handleImageNext(-1) }} /> : <ChevronLeft className={classes.arrow} style={{ opacity: 0 }} />}
{selectedImageInfo && <img src={selectedImageInfo.img} alt={selectedImageInfo.title} className={classes.img} />}
{selectedImage != source.length - 1 ? <ChevronRight className={classes.arrow} onClick={() => { handleImageNext(1) }} /> : <ChevronRight className={classes.arrow} style={{ opacity: 0 }} />}
</div>
</Modal>
</div>
);
}
|
import React from 'react';
import ReactDOM from 'react-dom';
import App from './App';
import {BrowserRouter as Router} from 'react-router-dom'
import {MuiPickersUtilsProvider} from '@material-ui/pickers';
import DateFnsUtils from '@date-io/date-fns';
import * as serviceWorker from './serviceWorker';
ReactDOM.render(
<Router>
<MuiPickersUtilsProvider utils={DateFnsUtils}>
<App/>
</MuiPickersUtilsProvider>
</Router>,
document.getElementById('root')
);
serviceWorker.unregister(); |
# Create a dictionary of accounts to make deletion easier to manage
from Account import *
accountsDict = {}
nextAccountNumber = 0
# Create two accounts
oAccount = Account("Ryan", 100, "ryanpassword")
ryansAccountNumber = nextAccountNumber
accountsDict[ryansAccountNumber] = oAccount
nextAccountNumber += 1
oAccount = Account("Larry", 320, "larrypassword")
larrysAccountNumber = nextAccountNumber
accountsDict[larrysAccountNumber] = oAccount
print("calling methods on the two accounts...")
accountsDict[ryansAccountNumber].deposit(50, "ryanpassword")
accountsDict[larrysAccountNumber].withdraw(20, "larrypassword")
# Create a third account interactively
userName = input("Enter the username for the account: ")
userBalance = int(input("Enter the starting balance for the account: "))
userPassword = input("Enter the password for the user: ")
# Add the new acount to the dictionary
oAccount = Account(userName, userBalance, userPassword)
newAccountNumber = nextAccountNumber
accountsDict[newAccountNumber] = oAccount
nextAccountNumber += 1
# Add some cash to the new account
accountsDict[newAccountNumber].deposit(450, userPassword)
|
# Copyright 2017 Neural Networks and Deep Learning lab, MIPT
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import collections
from typing import Dict, Any
import json
import numpy as np
import tensorflow as tf
from tensorflow.contrib.layers import xavier_initializer as xav
from deeppavlov.core.layers import tf_attention_mechanisms as am
from deeppavlov.core.layers import tf_layers
from deeppavlov.core.common.errors import ConfigError
from deeppavlov.core.commands.utils import expand_path
from deeppavlov.core.common.registry import register
from deeppavlov.core.models.lr_scheduled_tf_model import LRScheduledTFModel
from deeppavlov.core.models.component import Component
from deeppavlov.core.common.log import get_logger
from deeppavlov.models.go_bot.tracker import Tracker
import deeppavlov.models.go_bot.templates as templ
log = get_logger(__name__)
@register("go_bot")
class GoalOrientedBot(LRScheduledTFModel):
"""
The dialogue bot is based on https://arxiv.org/abs/1702.03274, which introduces
Hybrid Code Networks that combine an RNN with domain-specific knowledge
and system action templates.
The network handles dialogue policy management.
Inputs features of an utterance and predicts label of a bot action
(classification task).
An LSTM with a dense layer for input features and a dense layer for it's output.
Softmax is used as an output activation function.
Todo:
add docstring for trackers.
Parameters:
tokenizer: one of tokenizers from
:doc:`deeppavlov.models.tokenizers </apiref/models/tokenizers>` module.
tracker: dialogue state tracker from
:doc:`deeppavlov.models.go_bot.tracker </apiref/models/go_bot>`.
hidden_size: size of rnn hidden layer.
action_size: size of rnn output (equals to number of bot actions).
obs_size: input features' size (must be equal to sum of output sizes of
``bow_embedder``, ``embedder``, ``intent_classifier``,
``tracker.num_features`` plus size of context features(=6) and
``action_size``).
dropout_rate: probability of weights dropping out.
l2_reg_coef: l2 regularization weight (applied to input and output layer).
dense_size: rnn input size.
attention_mechanism: describes attention applied to embeddings of input tokens.
* **type** – type of attention mechanism, possible values are ``'general'``, ``'bahdanau'``, ``'light_general'``, ``'light_bahdanau'``, ``'cs_general'`` and ``'cs_bahdanau'``.
* **hidden_size** – attention hidden state size.
* **max_num_tokens** – maximum number of input tokens.
* **depth** – number of averages used in constrained attentions
(``'cs_bahdanau'`` or ``'cs_general'``).
* **action_as_key** – whether to use action from previous timestep as key
to attention.
* **intent_as_key** – use utterance intents as attention key or not.
* **projected_align** – whether to use output projection.
network_parameters: dictionary with network parameters (for compatibility with release 0.1.1, deprecated in the future)
template_path: file with mapping between actions and text templates
for response generation.
template_type: type of used response templates in string format.
word_vocab: vocabulary of input word tokens
(:class:`~deeppavlov.core.data.vocab.DefaultVocabulary` recommended).
bow_embedder: instance of one-hot word encoder
:class:`~deeppavlov.models.embedders.bow_embedder.BoWEmbedder`.
embedder: one of embedders from
:doc:`deeppavlov.models.embedders </apiref/models/embedders>` module.
slot_filler: component that outputs slot values for a given utterance
(:class:`~deeppavlov.models.slotfill.slotfill.DstcSlotFillingNetwork`
recommended).
intent_classifier: component that outputs intents probability distribution
for a given utterance (
:class:`~deeppavlov.models.classifiers.keras_classification_model.KerasClassificationModel`
recommended).
database: database that will be used during inference to perform
``api_call_action`` actions and get ``'db_result'`` result (
:class:`~deeppavlov.core.data.sqlite_database.Sqlite3Database`
recommended).
api_call_action: label of the action that corresponds to database api call
(it must be present in your ``template_path`` file), during interaction
it will be used to get ``'db_result'`` from ``database``.
use_action_mask: if ``True``, network output will be applied with a mask
over allowed actions.
debug: whether to display debug output.
"""
GRAPH_PARAMS = ["hidden_size", "action_size", "dense_size", "obs_size",
"attention_mechanism"]
DEPRECATED = ["end_learning_rate", "decay_steps", "decay_power"]
def __init__(self,
tokenizer: Component,
tracker: Tracker,
template_path: str,
save_path: str,
hidden_size: int = 128,
obs_size: int = None,
action_size: int = None,
dropout_rate: float = 0.,
l2_reg_coef: float = 0.,
dense_size: int = None,
attention_mechanism: dict = None,
network_parameters: Dict[str, Any] = {},
load_path: str = None,
template_type: str = "DefaultTemplate",
word_vocab: Component = None,
bow_embedder: Component = None,
embedder: Component = None,
slot_filler: Component = None,
intent_classifier: Component = None,
database: Component = None,
api_call_action: str = None, # TODO: make it unrequired
use_action_mask: bool = False,
debug: bool = False,
**kwargs):
if any(p in network_parameters for p in self.DEPRECATED):
log.warning(f"parameters {self.DEPRECATED} are deprecated,"
" for learning rate schedule documentation see"
" deeppavlov.core.models.lr_scheduled_tf_model"
" or read gitub tutorial on super convergence.")
if 'learning_rate' in network_parameters:
kwargs['learning_rate'] = network_parameters.pop('learning_rate')
super().__init__(load_path=load_path, save_path=save_path, **kwargs)
self.tokenizer = tokenizer
self.tracker = tracker
self.bow_embedder = bow_embedder
self.embedder = embedder
self.slot_filler = slot_filler
self.intent_classifier = intent_classifier
self.use_action_mask = use_action_mask
self.debug = debug
self.word_vocab = word_vocab
template_path = expand_path(template_path)
template_type = getattr(templ, template_type)
log.info("[loading templates from {}]".format(template_path))
self.templates = templ.Templates(template_type).load(template_path)
self.n_actions = len(self.templates)
log.info("{} templates loaded".format(self.n_actions))
self.database = database
self.api_call_id = None
if api_call_action is not None:
self.api_call_id = self.templates.actions.index(api_call_action)
self.intents = []
if callable(self.intent_classifier):
self.intents = self.intent_classifier.get_main_component().classes
new_network_parameters = {
'hidden_size': hidden_size,
'action_size': action_size,
'obs_size': obs_size,
'dropout_rate': dropout_rate,
'l2_reg_coef': l2_reg_coef,
'dense_size': dense_size,
'attn': attention_mechanism
}
if 'attention_mechanism' in network_parameters:
network_parameters['attn'] = network_parameters.pop('attention_mechanism')
new_network_parameters.update(network_parameters)
self._init_network(**new_network_parameters)
self.reset()
def _init_network(self, hidden_size, action_size, obs_size, dropout_rate,
l2_reg_coef, dense_size, attn):
# initialize network
dense_size = dense_size or hidden_size
if obs_size is None:
obs_size = 6 + self.tracker.num_features + self.n_actions
if callable(self.bow_embedder):
obs_size += len(self.word_vocab)
if callable(self.embedder):
obs_size += self.embedder.dim
if callable(self.intent_classifier):
obs_size += len(self.intents)
log.info(f"Calculated input size for `GoalOrientedBotNetwork` is {obs_size}")
if action_size is None:
action_size = self.n_actions
if attn:
attn['token_size'] = attn.get('token_size') or self.embedder.dim
attn['action_as_key'] = attn.get('action_as_key', False)
attn['intent_as_key'] = attn.get('intent_as_key', False)
key_size = 0
if attn['action_as_key']:
key_size += self.n_actions
if attn['intent_as_key'] and callable(self.intent_classifier):
key_size += len(self.intents)
key_size = key_size or 1
attn['key_size'] = attn.get('key_size') or key_size
# specify model options
self.opt = {
'hidden_size': hidden_size,
'action_size': action_size,
'obs_size': obs_size,
'dense_size': dense_size,
'dropout_rate': dropout_rate,
'l2_reg_coef': l2_reg_coef,
'attention_mechanism': attn
}
# initialize parameters
self._init_network_params()
# build computational graph
self._build_graph()
# initialize session
self.sess = tf.Session()
self.sess.run(tf.global_variables_initializer())
if tf.train.checkpoint_exists(str(self.load_path.resolve())):
log.info("[initializing `{}` from saved]".format(self.__class__.__name__))
self.load()
else:
log.info("[initializing `{}` from scratch]".format(self.__class__.__name__))
def _encode_context(self, context, db_result=None):
# tokenize input
tokens = self.tokenizer([context.lower().strip()])[0]
if self.debug:
log.debug("Tokenized text= `{}`".format(' '.join(tokens)))
# Bag of words features
bow_features = []
if callable(self.bow_embedder):
tokens_idx = self.word_vocab(tokens)
bow_features = self.bow_embedder([tokens_idx])[0]
bow_features = bow_features.astype(np.float32)
# Embeddings
emb_features = []
emb_context = np.array([], dtype=np.float32)
if callable(self.embedder):
if self.attn:
if tokens:
pad = np.zeros((self.attn.max_num_tokens,
self.attn.token_size),
dtype=np.float32)
sen = np.array(self.embedder([tokens])[0])
# TODO : Unsupport of batch_size more than 1
emb_context = np.concatenate((pad, sen))
emb_context = emb_context[-self.attn.max_num_tokens:]
else:
emb_context = np.zeros((self.attn.max_num_tokens,
self.attn.token_size),
dtype=np.float32)
else:
emb_features = self.embedder([tokens], mean=True)[0]
# random embedding instead of zeros
if np.all(emb_features < 1e-20):
emb_dim = self.embedder.dim
emb_features = np.fabs(np.random.normal(0, 1/emb_dim, emb_dim))
# Intent features
intent_features = []
if callable(self.intent_classifier):
intent_features = self.intent_classifier([context])[0]
if self.debug:
intent = self.intents[np.argmax(intent_features[0])]
log.debug("Predicted intent = `{}`".format(intent))
attn_key = np.array([], dtype=np.float32)
if self.attn:
if self.attn.action_as_key:
attn_key = np.hstack((attn_key, self.prev_action))
if self.attn.intent_as_key:
attn_key = np.hstack((attn_key, intent_features))
if len(attn_key) == 0:
attn_key = np.array([1], dtype=np.float32)
# Text entity features
if callable(self.slot_filler):
self.tracker.update_state(self.slot_filler([tokens])[0])
if self.debug:
log.debug("Slot vals: {}".format(self.slot_filler([tokens])))
state_features = self.tracker.get_features()
# Other features
result_matches_state = 0.
if self.db_result is not None:
result_matches_state = all(v == self.db_result.get(s)
for s, v in self.tracker.get_state().items()
if v != 'dontcare') * 1.
context_features = np.array([bool(db_result) * 1.,
(db_result == {}) * 1.,
(self.db_result is None) * 1.,
bool(self.db_result) * 1.,
(self.db_result == {}) * 1.,
result_matches_state],
dtype=np.float32)
if self.debug:
log.debug("Context features = {}".format(context_features))
debug_msg = "num bow features = {}, ".format(len(bow_features)) +\
"num emb features = {}, ".format(len(emb_features)) +\
"num intent features = {}, ".format(len(intent_features)) +\
"num state features = {}, ".format(len(state_features)) +\
"num context features = {}, ".format(len(context_features)) +\
"prev_action shape = {}".format(len(self.prev_action))
log.debug(debug_msg)
concat_feats = np.hstack((bow_features, emb_features, intent_features,
state_features, context_features, self.prev_action))
return concat_feats, emb_context, attn_key
def _encode_response(self, act):
return self.templates.actions.index(act)
def _decode_response(self, action_id):
"""
Convert action template id and entities from tracker
to final response.
"""
template = self.templates.templates[int(action_id)]
slots = self.tracker.get_state()
if self.db_result is not None:
for k, v in self.db_result.items():
slots[k] = str(v)
resp = template.generate_text(slots)
# in api calls replace unknown slots to "dontcare"
if (self.templates.ttype is templ.DualTemplate) and\
(action_id == self.api_call_id):
resp = re.sub("#([A-Za-z]+)", "dontcare", resp).lower()
if self.debug:
log.debug("Pred response = {}".format(resp))
return resp
def calc_action_mask(self, previous_action):
mask = np.ones(self.n_actions, dtype=np.float32)
if self.use_action_mask:
known_entities = {**self.tracker.get_state(), **(self.db_result or {})}
for a_id in range(self.n_actions):
tmpl = str(self.templates.templates[a_id])
for entity in set(re.findall('#([A-Za-z]+)', tmpl)):
if entity not in known_entities:
mask[a_id] = 0.
# forbid two api calls in a row
if np.any(previous_action):
prev_act_id = np.argmax(previous_action)
if prev_act_id == self.api_call_id:
mask[prev_act_id] = 0.
return mask
def prepare_data(self, x, y):
b_features, b_u_masks, b_a_masks, b_actions = [], [], [], []
b_emb_context, b_keys = [], [] # for attention
max_num_utter = max(len(d_contexts) for d_contexts in x)
for d_contexts, d_responses in zip(x, y):
self.reset()
if self.debug:
preds = self._infer_dialog(d_contexts)
d_features, d_a_masks, d_actions = [], [], []
d_emb_context, d_key = [], [] # for attention
for context, response in zip(d_contexts, d_responses):
if context.get('db_result') is not None:
self.db_result = context['db_result']
features, emb_context, key = \
self._encode_context(context['text'], context.get('db_result'))
d_features.append(features)
d_emb_context.append(emb_context)
d_key.append(key)
d_a_masks.append(self.calc_action_mask(self.prev_action))
action_id = self._encode_response(response['act'])
d_actions.append(action_id)
# previous action is teacher-forced here
self.prev_action *= 0.
self.prev_action[action_id] = 1.
if self.debug:
log.debug("True response = `{}`".format(response['text']))
if preds[0].lower() != response['text'].lower():
log.debug("Pred response = `{}`".format(preds[0]))
preds = preds[1:]
if d_a_masks[-1][action_id] != 1.:
log.warn("True action forbidden by action mask.")
# padding to max_num_utter
num_padds = max_num_utter - len(d_contexts)
d_features.extend([np.zeros_like(d_features[0])] * num_padds)
d_emb_context.extend([np.zeros_like(d_emb_context[0])] * num_padds)
d_key.extend([np.zeros_like(d_key[0])] * num_padds)
d_u_mask = [1] * len(d_contexts) + [0] * num_padds
d_a_masks.extend([np.zeros_like(d_a_masks[0])] * num_padds)
d_actions.extend([0] * num_padds)
b_features.append(d_features)
b_emb_context.append(d_emb_context)
b_keys.append(d_key)
b_u_masks.append(d_u_mask)
b_a_masks.append(d_a_masks)
b_actions.append(d_actions)
return b_features, b_emb_context, b_keys, b_u_masks, b_a_masks, b_actions
def train_on_batch(self, x, y):
return self.network_train_on_batch(*self.prepare_data(x, y))
def _infer(self, context, db_result=None, prob=False):
if db_result is not None:
self.db_result = db_result
features, emb_context, key = self._encode_context(context, db_result)
action_mask = self.calc_action_mask(self.prev_action)
probs = self.network_call([[features]], [[emb_context]], [[key]],
[[action_mask]], prob=True)
pred_id = np.argmax(probs)
# one-hot encoding seems to work better then probabilities
if prob:
self.prev_action = probs
else:
self.prev_action *= 0
self.prev_action[pred_id] = 1
return self._decode_response(pred_id)
def _infer_dialog(self, contexts):
self.reset()
res = []
for context in contexts:
if context.get('prev_resp_act') is not None:
action_id = self._encode_response(context.get('prev_resp_act'))
# previous action is teacher-forced
self.prev_action *= 0.
self.prev_action[action_id] = 1.
res.append(self._infer(context['text'], db_result=context.get('db_result')))
return res
def make_api_call(self, slots):
db_results = []
if self.database is not None:
# filter slot keys with value equal to 'dontcare' as
# there is no such value in database records
# and remove unknown slot keys (for example, 'this' in dstc2 tracker)
db_slots = {s: v for s, v in slots.items()
if (v != 'dontcare') and (s in self.database.keys)}
db_results = self.database([db_slots])[0]
else:
log.warn("No database specified.")
log.info("Made api_call with {}, got {} results.".format(slots, len(db_results)))
# filter api results if there are more than one
if len(db_results) > 1:
db_results = [r for r in db_results if r != self.db_result]
return db_results[0] if db_results else {}
def __call__(self, batch):
if isinstance(batch[0], str):
res = []
for x in batch:
pred = self._infer(x)
# if made api_call, then respond with next prediction
prev_act_id = np.argmax(self.prev_action)
if prev_act_id == self.api_call_id:
db_result = self.make_api_call(self.tracker.get_state())
res.append(self._infer(x, db_result=db_result))
else:
res.append(pred)
return res
return [self._infer_dialog(x) for x in batch]
def reset(self):
self.tracker.reset_state()
self.db_result = None
self.prev_action = np.zeros(self.n_actions, dtype=np.float32)
self.reset_network_state()
if self.debug:
log.debug("Bot reset.")
def destroy(self):
if callable(getattr(self.slot_filler, 'destroy', None)):
self.slot_filler.destroy()
if callable(getattr(self.embedder, 'destroy', None)):
self.embedder.destroy()
if callable(getattr(self.intent_classifier, 'destroy', None)):
self.intent_classifier.destroy()
super().destroy()
def network_call(self, features, emb_context, key, action_mask, prob=False):
feed_dict = {
self._features: features,
self._dropout_keep_prob: 1.,
self._utterance_mask: [[1.]],
self._initial_state: (self.state_c, self.state_h),
self._action_mask: action_mask
}
if self.attn:
feed_dict[self._emb_context] = emb_context
feed_dict[self._key] = key
probs, prediction, state =\
self.sess.run([self._probs, self._prediction, self._state],
feed_dict=feed_dict)
self.state_c, self._state_h = state
if prob:
return probs
return prediction
def network_train_on_batch(self, features, emb_context, key, utter_mask,
action_mask, action):
feed_dict = {
self._dropout_keep_prob: 1.,
self._utterance_mask: utter_mask,
self._features: features,
self._action: action,
self._action_mask: action_mask
}
if self.attn:
feed_dict[self._emb_context] = emb_context
feed_dict[self._key] = key
_, loss_value, prediction = \
self.sess.run([self._train_op, self._loss, self._prediction],
feed_dict=feed_dict)
return {'loss': loss_value}
def _init_network_params(self):
self.dropout_rate = self.opt['dropout_rate']
self.hidden_size = self.opt['hidden_size']
self.action_size = self.opt['action_size']
self.obs_size = self.opt['obs_size']
self.dense_size = self.opt['dense_size']
self.l2_reg = self.opt['l2_reg_coef']
attn = self.opt.get('attention_mechanism')
if attn:
self.opt['attention_mechanism'] = attn
self.attn = \
collections.namedtuple('attention_mechanism', attn.keys())(**attn)
self.obs_size -= attn['token_size']
else:
self.attn = None
def _build_graph(self):
self._add_placeholders()
# build body
_logits, self._state = self._build_body()
# probabilities normalization : elemwise multiply with action mask
_logits_exp = tf.multiply(tf.exp(_logits), self._action_mask)
_logits_exp_sum = tf.expand_dims(tf.reduce_sum(_logits_exp, -1), -1)
self._probs = tf.squeeze(_logits_exp / _logits_exp_sum, name='probs')
# loss, train and predict operations
self._prediction = tf.argmax(self._probs, axis=-1, name='prediction')
# _weights = tf.expand_dims(self._utterance_mask, -1)
# TODO: try multiplying logits to action_mask
onehots = tf.one_hot(self._action, self.action_size)
_loss_tensor = tf.nn.softmax_cross_entropy_with_logits_v2(
logits=_logits, labels=onehots
)
# multiply with batch utterance mask
_loss_tensor = tf.multiply(_loss_tensor, self._utterance_mask)
self._loss = tf.reduce_mean(_loss_tensor, name='loss')
self._loss += self.l2_reg * tf.losses.get_regularization_loss()
self._train_op = self.get_train_op(self._loss)
def _add_placeholders(self):
self._dropout_keep_prob = tf.placeholder_with_default(1.0,
shape=[],
name='dropout_prob')
self._features = tf.placeholder(tf.float32,
[None, None, self.obs_size],
name='features')
self._action = tf.placeholder(tf.int32,
[None, None],
name='ground_truth_action')
self._action_mask = tf.placeholder(tf.float32,
[None, None, self.action_size],
name='action_mask')
self._utterance_mask = tf.placeholder(tf.float32,
shape=[None, None],
name='utterance_mask')
self._batch_size = tf.shape(self._features)[0]
zero_state = tf.zeros([self._batch_size, self.hidden_size], dtype=tf.float32)
_initial_state_c = \
tf.placeholder_with_default(zero_state, shape=[None, self.hidden_size])
_initial_state_h = \
tf.placeholder_with_default(zero_state, shape=[None, self.hidden_size])
self._initial_state = tf.nn.rnn_cell.LSTMStateTuple(_initial_state_c,
_initial_state_h)
if self.attn:
_emb_context_shape = \
[None, None, self.attn.max_num_tokens, self.attn.token_size]
self._emb_context = tf.placeholder(tf.float32,
_emb_context_shape,
name='emb_context')
self._key = tf.placeholder(tf.float32,
[None, None, self.attn.key_size],
name='key')
def _build_body(self):
# input projection
_units = tf.layers.dense(self._features, self.dense_size,
kernel_regularizer=tf.nn.l2_loss,
kernel_initializer=xav())
if self.attn:
attn_scope = "attention_mechanism/{}".format(self.attn.type)
with tf.variable_scope(attn_scope):
if self.attn.type == 'general':
_attn_output = am.general_attention(
self._key,
self._emb_context,
hidden_size=self.attn.hidden_size,
projected_align=self.attn.projected_align)
elif self.attn.type == 'bahdanau':
_attn_output = am.bahdanau_attention(
self._key,
self._emb_context,
hidden_size=self.attn.hidden_size,
projected_align=self.attn.projected_align)
elif self.attn.type == 'cs_general':
_attn_output = am.cs_general_attention(
self._key,
self._emb_context,
hidden_size=self.attn.hidden_size,
depth=self.attn.depth,
projected_align=self.attn.projected_align)
elif self.attn.type == 'cs_bahdanau':
_attn_output = am.cs_bahdanau_attention(
self._key,
self._emb_context,
hidden_size=self.attn.hidden_size,
depth=self.attn.depth,
projected_align=self.attn.projected_align)
elif self.attn.type == 'light_general':
_attn_output = am.light_general_attention(
self._key,
self._emb_context,
hidden_size=self.attn.hidden_size,
projected_align=self.attn.projected_align)
elif self.attn.type == 'light_bahdanau':
_attn_output = am.light_bahdanau_attention(
self._key,
self._emb_context,
hidden_size=self.attn.hidden_size,
projected_align=self.attn.projected_align)
else:
raise ValueError("wrong value for attention mechanism type")
_units = tf.concat([_units, _attn_output], -1)
_units = tf_layers.variational_dropout(_units,
keep_prob=self._dropout_keep_prob)
# recurrent network unit
_lstm_cell = tf.nn.rnn_cell.LSTMCell(self.hidden_size)
_utter_lengths = tf.to_int32(tf.reduce_sum(self._utterance_mask, axis=-1))
_output, _state = tf.nn.dynamic_rnn(_lstm_cell,
_units,
time_major=False,
initial_state=self._initial_state,
sequence_length=_utter_lengths)
_output = tf.reshape(_output, (self._batch_size, -1, self.hidden_size))
_output = tf_layers.variational_dropout(_output,
keep_prob=self._dropout_keep_prob)
# output projection
_logits = tf.layers.dense(_output, self.action_size,
kernel_regularizer=tf.nn.l2_loss,
kernel_initializer=xav(), name='logits')
return _logits, _state
def load(self, *args, **kwargs):
self.load_params()
super().load(*args, **kwargs)
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
self.save_params()
def save_params(self):
path = str(self.save_path.with_suffix('.json').resolve())
log.info('[saving parameters to {}]'.format(path))
with open(path, 'w', encoding='utf8') as fp:
json.dump(self.opt, fp)
def load_params(self):
path = str(self.load_path.with_suffix('.json').resolve())
log.info('[loading parameters from {}]'.format(path))
with open(path, 'r', encoding='utf8') as fp:
params = json.load(fp)
for p in self.GRAPH_PARAMS:
if self.opt.get(p) != params.get(p):
raise ConfigError("`{}` parameter must be equal to saved model "
"parameter value `{}`, but is equal to `{}`"
.format(p, params.get(p), self.opt.get(p)))
def process_event(self, event_name, data):
super().process_event(event_name, data)
def reset_network_state(self):
# set zero state
self.state_c = np.zeros([1, self.hidden_size], dtype=np.float32)
self.state_h = np.zeros([1, self.hidden_size], dtype=np.float32)
|
export default vm => [
{
attrs: {
prop: 'name',
minWidth: '200',
'show-overflow-tooltip': true,
label: '名称'
},
render: (h, scope) => {
let childNode = [h('span', {}, scope.row.name)];
return h('span', {}, childNode);
}
},
{
prop: 'code',
minWidth: '120',
label: '编码'
},
{
prop: 'path',
minWidth: '140',
label: '路径'
},
{
attrs: {
prop: 'isRouter',
minWidth: '100',
label: '路由'
},
render: (h, scope) =>
h(
'el-tag',
{
props: {
effect: 'plain',
size: 'mini',
type: scope.row.isRouter === 1 ? 'success' : 'danger'
}
},
scope.row.isRouter === 1 ? '是' : '否'
)
},
{
prop: 'cmpPath',
minWidth: '140',
label: '组件路径'
},
{
attrs: {
prop: 'hidden',
minWidth: '100',
label: '隐藏'
},
render: (h, scope) =>
h(
'el-tag',
{
props: {
effect: 'plain',
size: 'mini',
type: scope.row.hidden === 1 ? 'success' : 'danger'
}
},
scope.row.hidden === 1 ? '是' : '否'
)
},
{
prop: 'sort',
minWidth: '80',
label: '排序'
},
{
attrs: {
prop: 'icon',
minWidth: '80',
label: '图标'
},
render: (h, scope) =>
h('svg-icon', {
props: {
'icon-class': scope.row.icon
},
style: {
color: '#999'
}
})
},
{
attrs: {
prop: 'statusName',
minWidth: '80',
label: '状态'
},
render: (h, scope) =>
h(
'el-tag',
{
props: {
effect: 'plain',
size: 'mini',
type: scope.row.status === 1 ? 'success' : 'danger'
}
},
scope.row.statusName
)
},
{
prop: 'createdAt',
minWidth: '160',
label: '创建时间'
},
{
attrs: {
prop: 'action',
width: '100',
fixed: vm.mobile ? false : 'right',
label: '操作'
},
render: (h, scope) => {
let childNode = [
h(
'el-button',
{
props: {
type: 'text',
size: 'small'
},
on: {
click: e => {
e.stopPropagation();
vm.handleDataUpdate(scope.row.id);
}
},
directives: [
{
name: 'permission',
value: 'edit-menu'
}
]
},
'编辑'
)
];
return h('div', {}, childNode);
}
}
];
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
class Waitress {
constructor(validator, timeoutFormatter) {
this.waiters = new Map();
this.timeoutFormatter = timeoutFormatter;
this.validator = validator;
this.currentID = 0;
}
resolve(payload) {
for (const entry of this.waiters.entries()) {
const index = entry[0];
const waiter = entry[1];
if (waiter.timedout) {
this.waiters.delete(index);
}
else if (this.validator(payload, waiter.matcher)) {
clearTimeout(waiter.timer);
waiter.resolved = true;
waiter.resolve(payload);
this.waiters.delete(index);
}
}
}
remove(ID) {
const waiter = this.waiters.get(ID);
if (waiter) {
if (!waiter.timedout && waiter.timer) {
clearTimeout(waiter.timer);
}
this.waiters.delete(ID);
}
}
waitFor(matcher, timeout) {
const ID = this.currentID++;
const promise = new Promise((resolve, reject) => {
const object = { matcher, resolve, reject, timedout: false, resolved: false, ID };
this.waiters.set(ID, object);
});
const start = () => {
const waiter = this.waiters.get(ID);
if (waiter && !waiter.resolved && !waiter.timer) {
waiter.timer = setTimeout(() => {
const message = this.timeoutFormatter(matcher, timeout);
waiter.timedout = true;
waiter.reject(new Error(message));
}, timeout);
}
return { promise, ID };
};
return { ID, start };
}
}
exports.default = Waitress;
//# sourceMappingURL=waitress.js.map |
import pandas as pd
from backlight.labelizer.common import Label
from backlight.labelizer.common import TernaryDirection as TD
from backlight.signal.signal import Signal
def _r(a: int, b: int) -> float:
return a / b if b != 0 else 0
def calc_ternary_metrics(sig: Signal, lbl: Label) -> pd.DataFrame:
"""Compute metrics on ternary signal.
Args:
sig : Signal to evaluate.
lbl : Correct answer.
Returns:
DataFrame of metrics.
"""
sig = sig.dropna()
lbl = lbl.dropna()
uu = ((sig.pred == TD.U.value) & (lbl.label == TD.U.value)).sum()
un = ((sig.pred == TD.U.value) & (lbl.label == TD.N.value)).sum()
ud = ((sig.pred == TD.U.value) & (lbl.label == TD.D.value)).sum()
nu = ((sig.pred == TD.N.value) & (lbl.label == TD.U.value)).sum()
nn = ((sig.pred == TD.N.value) & (lbl.label == TD.N.value)).sum()
nd = ((sig.pred == TD.N.value) & (lbl.label == TD.D.value)).sum()
du = ((sig.pred == TD.D.value) & (lbl.label == TD.U.value)).sum()
dn = ((sig.pred == TD.D.value) & (lbl.label == TD.N.value)).sum()
dd = ((sig.pred == TD.D.value) & (lbl.label == TD.D.value)).sum()
total = len(sig)
cnt_u = uu + un + ud
cnt_n = nu + nn + nd
cnt_d = du + dn + dd
hit_ratio = _r(uu + dd, uu + ud + du + dd)
hit_ratio_u = _r(uu, uu + ud)
hit_ratio_d = _r(dd, du + dd)
hedge_ratio = _r(uu + un + dn + dd, uu + un + ud + du + dn + dd)
hedge_ratio_u = _r(uu + un, uu + un + ud)
hedge_ratio_d = _r(dn + dd, du + dn + dd)
neutral_ratio = _r(cnt_n, total)
coverage = _r(cnt_u + cnt_d, total) # = 1.0 - neutral_ratio
coverage_u = _r(cnt_u, total)
coverage_d = _r(cnt_d, total)
lbl = lbl.reindex(sig.index)
pl = lbl[sig.pred != TD.N.value].label_diff.copy()
pl.loc[sig.pred == TD.D.value] *= -1
avg_pl = pl.mean()
total_pl = pl.sum()
m = pd.DataFrame.from_records(
[
("cnt_uu", uu),
("cnt_un", un),
("cnt_ud", ud),
("cnt_nu", nu),
("cnt_nn", nn),
("cnt_nd", nd),
("cnt_du", du),
("cnt_dn", dn),
("cnt_dd", dd),
("cnt_total", total),
("hit_ratio", hit_ratio),
("hit_ratio_u", hit_ratio_u),
("hit_ratio_d", hit_ratio_d),
("hedge_ratio", hedge_ratio),
("hedge_ratio_u", hedge_ratio_u),
("hedge_ratio_d", hedge_ratio_d),
("neutral_ratio", neutral_ratio),
("coverage", coverage),
("coverage_u", coverage_u),
("coverage_d", coverage_d),
("avg_pl", avg_pl),
("total_pl", total_pl),
]
).set_index(0)
del m.index.name
m.columns = ["metrics"]
return m.T
|
var reactComponentSymbol = Symbol.for("r2wc.reactComponent");
var renderSymbol = Symbol.for("r2wc.reactRender");
var shouldRenderSymbol = Symbol.for("r2wc.shouldRender");
var define = {
// Creates a getter/setter that re-renders everytime a property is set.
expando: function (receiver, key, value) {
Object.defineProperty(receiver, key, {
enumerable: true,
get: function () {
return value;
},
set: function (newValue) {
value = newValue;
this[renderSymbol]();
}
});
receiver[renderSymbol]();
}
}
/**
* Converts a React component into a webcomponent by wrapping it in a Proxy object.
* @param {ReactComponent}
* @param {React}
* @param {ReactDOM}
* @param {Object} options - Optional parameters
* @param {String?} options.shadow - Use shadow DOM rather than light DOM.
*/
export default function (ReactComponent, React, ReactDOM, options = {}, embeddStyle = "") {
var eventHandling = {
dispatchEvent: {},
addEventListener: {},
removeEventListener: {}
}
var renderAddedProperties = { isConnected: "isConnected" in HTMLElement.prototype };
var rendering = false;
// Create the web component "class"
var WebComponent = function () {
var self = Reflect.construct(HTMLElement, arguments, this.constructor);
if (options.shadow) {
var sr = self.attachShadow({ mode: 'open' });
}
return self;
};
// Make the class extend HTMLElement
var targetPrototype = Object.create(HTMLElement.prototype);
targetPrototype.constructor = WebComponent;
// But have that prototype be wrapped in a proxy.
var proxyPrototype = new Proxy(targetPrototype, {
has: function (target, key) {
return key in ReactComponent.propTypes ||
key in targetPrototype;
},
// when any undefined property is set, create a getter/setter that re-renders
set: function (target, key, value, receiver) {
if (rendering) {
renderAddedProperties[key] = true;
}
if (typeof key === "symbol" || renderAddedProperties[key] || key in target) {
return Reflect.set(target, key, value, receiver);
} else {
define.expando(receiver, key, value)
}
return true;
},
// makes sure the property looks writable
getOwnPropertyDescriptor: function (target, key) {
var own = Reflect.getOwnPropertyDescriptor(target, key);
if (own) {
return own;
}
if (key in ReactComponent.propTypes) {
return { configurable: true, enumerable: true, writable: true, value: undefined };
}
}
});
WebComponent.prototype = proxyPrototype;
// Setup lifecycle methods
targetPrototype.connectedCallback = function () {
// Once connected, it will keep updating the innerHTML.
// We could add a render method to allow this as well.
this[shouldRenderSymbol] = true;
this[renderSymbol]();
};
targetPrototype[renderSymbol] = function () {
if (this[shouldRenderSymbol] === true) {
var data = {};
Object.keys(this).forEach(function (key) {
if (renderAddedProperties[key] !== false) {
data[key] = this[key];
}
}, this);
rendering = true;
// Container is either shadow DOM or light DOM depending on `shadow` option.
let container = options.shadow ? this.shadowRoot : this;
// add eventhandling stuff
let dispatchEvent = function (ev) { container.dispatchEvent(ev) };
let addEventListener = function (n, f) { container.addEventListener(n, f) };
let removeEventListener = function (n, f) { container.removeEventListener(n, f) };
eventHandling.dispatchEvent = dispatchEvent;
eventHandling.addEventListener = addEventListener;
eventHandling.removeEventListener = removeEventListener;
// Use react to render element in container
this[reactComponentSymbol] = ReactDOM.render(React.createElement(ReactComponent, data), container);
// adding styleSheets
if (options.css && options.shadow) {
if (options.embeddCss) {
let style = document.createElement('style');
style.innerHTML = embeddStyle;
container.appendChild(style);
} else {
let style = document.createElement('link');
style["rel"] = "stylesheet";
style["type"] = "text/css";
style["href"] = options.css;
container.appendChild(style);
}
}
rendering = false;
}
};
// Handle attributes changing
if (ReactComponent.propTypes) {
WebComponent.observedAttributes = Object.keys(ReactComponent.propTypes);
targetPrototype.attributeChangedCallback = function (name, oldValue, newValue) {
// TODO: handle type conversion
this[name] = newValue;
};
}
WebComponent.eventHandling = eventHandling;
return WebComponent;
}
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Professor(models.Model):
user = models.OneToOneField(User, null=True, blank=True)
name = models.CharField(max_length=100)
info = models.TextField(max_length=1000)
image = models.ImageField(null=True, blank=True)
hidden = models.BooleanField(default=False)
def __str__(self):
return self.name
|
(function($){
//var baseURL = 'https://almiranezgecilie68358.ipage.com/BSIT2019/goodbuy/';
//var baseURL = 'http://localhost/goodBuy1/';
var baseURL = 'http://paranaque.goodbuy-bolinao.com/';
// var baseURL = 'http://localhost/goodbuyParanaque/';
var timer = null;
var timeout = 500;
//var baseURL = 'https://almiranezgecilie68358.ipage.com/BSIT2019/goodbuy/';
//sessionStorage.setItem('terminal_id',0);
//sessionStorage.setItem('start_day_success',false);
$('#btn-pay').attr("disabled", true);
$('#payment').on('keyup', function () {
compute_change('#payment');
var finalPayment = parseFloat($(this).val() || 0) + parseFloat($('#use-pts').val() || 0);
if((parseFloat($(this).val() || 0) < parseFloat($('#grand-total').val() || 0)) || (finalPayment < parseFloat($('#grand-total').val() || 0)))
{
$('#btn-pay').attr("disabled", true);
}
else
{
$('#add-error').empty();
$('#btn-pay').attr("disabled",false);
}
});
$("#div-prod-info").hide();
if(sessionStorage.getItem('start_day_success') == 'true')
{
$('#form-pos :input').removeAttr("disabled");
}
else
{
$('#day-status').html('Start Day');
$('#form-pos :input').attr('disabled','disabled');
$("#cashier-mode-menu").children().prop('disabled',true);
$("#btn-end-day").prop('disabled',true);
}
disable_rpt_type();
$("#rpt-type").on('changed.bs.select', function (event, clickedIndex, newValue, oldValue) {
disable_rpt_type();
});
$("#div-card").hide();
refreshCardList(baseURL, 'cashier/sales_management/get_membership_id');
$('#mem-id').on('keyup', function () {
var address = "cashier/sales_management/get_card_no";
if($(this).val())
{
$('#ul-card').empty();
$.ajax({
type: "POST",
url: baseURL + address,
context: document.body,
cache : false,
data: $('#dp-form').serialize() + "&ajax_card=" +$(this).val(),
dataType: 'json',
success: function(data) {
resultCtr = 1;
$("#div-card").show();
$.each(data, function(i, p) {
$.each(p, function(i, p) {
if(p)
{
if(resultCtr == 1)
$('#ul-card').empty();
//$('#ul-prod-name').empty();
resultCtr++;
result = p.toString().split(',');
if (result[2])
{
$("#ul-card").append('<li class="li-card li-ajax" data-value="'+result[0]+'" data-points="'+ result[1]+'">'+result[2]+'</li>');
}
else
$("#ul-card").append('<li class="li-card li-ajax" data-value="'+result[0]+'" data-points="'+ result[1]+'">'+result[0]+'</li>');
}
});
});
},
error: function(req, textStatus, errorThrown) {
//alert('Prod Name Keyup Error: ' + textStatus + ' ' +errorThrown);
}
});
}
else
{
$('#stored-pts').html('');
}
$("#div-card").hide();
});
var href = null;
if($("#rpt-type").val() == 'X')
{
href = 'cashier/x_report/'+$('#date-from').val()+'\\'+$('#date-to').val()+'\\'+$('#terminal').val()+'\\'+$('#cashier').val();
}
else if($("#rpt-type").val() == 'Z')
{
href = baseURL + 'cashier/z_report/'+$('#date-from').val()+'\\'+$('#date-to').val()+'\\'+$('#terminal').val()+'\\'+$('#cashier').val();
}
$("#btn-rpt").on('click', function () {
if($("#rpt-type").val() == 'open')
{
href = baseURL + 'cashier/open_report/'+$('#date-from').val()+'\\'+$('#date-to').val()+'\\'+$('#terminal').val()+'\\'+$('#cashier').val();
}
window.open(href,'popup', 'width=800,height=650,scrollbars=no,resizable=yes'); //'width=800,height=650,scrollbars=no,resizable=yes'
});
if (sessionStorage.getItem('cashier_success') == 'true') {
showNotification('bg-black','Paid Successfully!', 'top', 'right', null, null);
sessionStorage.setItem('cashier_success','false');
}
if((window.location.href.indexOf('cashier/cashier_mode') > -1) || window.location.href.indexOf("return_item/replacement/") > -1)
//tag_scanner(baseURL);
refreshProdCodeList(baseURL, "cashier/sales_management/get_sale_prod_info", false);
//refreshProdCodeList(baseURL, "admin/sales_management/get_sale_prod_info", true);
$("#div-prod-code").hide();
submit_form(baseURL, '#form-return', "#btn-submit-return",$('#form-return').attr('action'), 'cashier/return_item/replacement/');
$('#prod-code').on('keyup', function () {
var address = "cashier/sales_management/get_prod_codes";
clearTimeout(timer);
timer = setTimeout(function(){
generate_prod_codes(baseURL, address, '#prod-code');
}, timeout);
});
$('#prod-code2').on('keyup', function () {
var address = "cashier/sales_management/get_prod_codes";
clearTimeout(timer);
timer = setTimeout(function(){
generate_prod_codes(baseURL, address, '#prod-code2');
}, timeout);
});
$('.check-return').on('click', function () {
if(window.location.href.indexOf("return_item/replacement") > -1)
{
var title = "Are you sure you want to leave this page and cancel return transaction?";
var msg = "This action cannot be undone.";
var url = baseURL + "cashier/return_item/cancel_return/"+$('#trans-id').val();
showConfirmMessage(title, msg, url);
return false;
}
});
$('#add-prod').on('click', function () {
get_pv_info(baseURL, 'cashier/get_pv_info');
updateScroll();
$("#prod-code").focus();
return false;
});
$('#btn-open-sday').on('click', function () {
submit_modal_form('#modal-start-day','#btn-start-day', '#form-start-day', $('#form-start-day').attr('action'), null);
return false;
});
$('#add-prod2').on('click', function () {
get_pv_info2(baseURL, 'cashier/get_pv_info');
return false;
});
$("#cash").prop('disabled',true);
$('#btn-save').on('click', function (e) {
e.preventDefault();
e.stopImmediatePropagation();
$('#btn-save').attr("disabled", true);
$('.validation-errors').empty();
$.ajax({
type: "POST",
url: $('#form-replacement').attr('action'),
context: document.body,
cache : false,
data: $('#form-replacement').serialize(),
dataType: 'json',
success: function(data) {
$.each(data, function (i,p) {
$('#'+i+'_error').html(p);
if(data.error)
$('#cash_error').html(data.error_msg);
});
if(data.success)
{
window.location = baseURL + 'cashier/return_item/receipt/'+data.trans_id;
}
},
error: function(req, textStatus, errorThrown) {
//this is going to happen when you send something different from a 200 OK HTTP
alert('Save Error: ' + textStatus + ' ' +errorThrown);
},
complete: function(){
$('#btn-save').attr("disabled", false);
}
});
});
special_discount();
$('#sdisc-type').on('changed.bs.select', function (event, clickedIndex, newValue, oldValue) {
special_discount();
});
$('#discount').on('keyup', function () {
if($("input[name='d-type']:checked").val() == 'percent' && parseFloat($(this).val() ) > 100)
{
$('#add-error').html('Invalid discount percent.');
$('#add-prod').attr("disabled", true);
}
else
{
$('#add-error').empty();
$('#add-prod').attr("disabled",false);
}
});
$('#receipt-discount').on('keyup', function () {
if($("input[name='d-type2']:checked").val() == 'percent' && parseFloat($(this).val() ) > 100)
{
$('#d-perc-error').html('Invalid discount percent.');
$('#btn-pay').attr("disabled", true);
}
else
{
$('#btn-pay').attr("disabled", false);
$('#d-perc-error').empty();
$('#new-totall').val(new_total_amt());
if(new_total_amt() == 0)
{
$('#grand-total').val(parseFloat($('#totall').val()));
$('#earned-pts').html('EARNED POINTS: '+ Math.abs(parseFloat($('#totall').val())/200) );
}
else
{
$('#grand-total').val(parseFloat($('#new-totall').val()));
$('#earned-pts').html('EARNED POINTS: '+ Math.abs(parseFloat($('#new-totall').val())/200) );
}
special_discount();
compute_change('#payment');
}
});
$('#btn-pay').click(function(e) {
e.preventDefault();
e.stopImmediatePropagation();
$('.validation-errors').empty();
$('#btn-pay').attr("disabled", true);
$.ajax({
type: "POST",
url: $('#form-pos').attr('action'),
context: document.body,
cache : false,
data: $('#form-pos').serialize(),
dataType: 'json',
success: function(data) {
$.each(data, function (i,p) {
$('#'+i+'_error').html(p);
if(data.error == 'true') $('[name="'+i+'"]').val(p);
});
if(data.success)
{
window.location = baseURL + 'cashier/cashier_mode/'+data.invoice;
sessionStorage.setItem('cashier_success','true');
}
$(this).prop("disabled", false);
},
error: function(req, textStatus, errorThrown) {
//this is going to happen when you send something different from a 200 OK HTTP
alert('Pay Error: ' + textStatus + ' ' +errorThrown);
},
complete: function(){
$('#btn-pay').attr("disabled", false);
}
});
});
$('#open-payout').on('click', function () {
submit_modal_form('#modal-payout','#btn-payout', '#form-payout', $('#form-payout').attr('action'), null);
return false;
});
$('#open-ns').on('click', function () {
submit_modal_form('#modal-ns','#btn-ns', '#form-ns', $('#form-ns').attr('action'), null);
return false;
});
$('.cashier-confirm').on('click', function () {
showConfirmMessage($(this).attr('data-title'), $(this).attr('data-msg'), $(this).attr('data-url'));
return false;
});
$('.open-print-window').on('click', function() {
window.open($(this).attr('data-href'),'popup', 'width=800,height=650,scrollbars=no,resizable=yes'); //'width=800,height=650,scrollbars=no,resizable=yes'
});
$('#dt-orders').DataTable({
"stateSave": true
});
$('#dt-orders tbody').on('click', '.open-deliver-date', function () {
//load_modal_form('#modal-add-qty','#form-add-qty', 'admin/inventory/get_prod_id/'+ $(this).attr('data-prod-id'));
$('#modal-order-no').attr('value', $(this).attr('data-order-no'));
$('#modal-deliver-date').modal('show');
submit_modal_form('#modal-deliver-date','#btn-set-deliver-date', '#form-set-deliver-date', $(this).attr('data-href'), null);
return false;
});
$('#dt-orders tbody').on('click', '.confirm', function () {
showConfirmMessage($(this).attr('data-title'), $(this).attr('data-msg'), $(this).attr('data-url'));
return false;
});
$('#discount').attr("disabled", true);
$('#receipt-discount').attr("disabled", true);
$('.d-type').on('click', function () {
if($(this).val() == 'none')
{
$('#discount').attr("disabled", true);
$('#discount').val("");
}
else
$('#discount').attr("disabled", false);
});
$('.d-type2').on('click', function () {
if($(this).val() == 'none')
{
$('#receipt-discount').attr("disabled", true);
$('#receipt-discount').val("");
$('#new-totall').val(0);
$('#grand-total').val(parseFloat($('#totall').val()));
}
else
$('#receipt-discount').attr("disabled", false);
});
var sessionTimer = setInterval(function(){
$.ajax({
url: baseURL + 'cashier/sessiontimeout',
context: document.body,
cache : false,
beforeSend: function(){},
success: function(data){
console.info(data);
}
});
},120000);
}(jQuery));
//https://stackoverflow.com/questions/18614301/keep-overflow-div-scrolled-to-bottom-unless-user-scrolls-up
function special_discount()
{
var total = total_amt();
var newTotal = new_total_amt();
if($('#sdisc-type').val() == 'SC' || $('#sdisc-type').val() == 'PWD')
{
if(newTotal == 0)
{
$('#totall').val(total - (total*0.20));
$('#grand-total').val($('#totall').val());
$('#earned-pts').html('EARNED POINTS: '+ Math.abs(parseFloat($('#totall').val())/200) );
}
else
{
$('#new-totall').val(newTotal - (newTotal*0.20));
$('#grand-total').val($('#new-totall').val());
$('#earned-pts').html('EARNED POINTS: '+ Math.abs(parseFloat($('#new-totall').val())/200) );
}
}
else
{
if(newTotal == 0)
{
$('#totall').val(total);
$('#grand-total').val($('#totall').val());
$('#earned-pts').html('EARNED POINTS: '+ Math.abs(parseFloat($('#totall').val())/200) );
}
else
{
$('#new-totall').val(newTotal);
$('#grand-total').val($('#new-totall').val());
$('#earned-pts').html('EARNED POINTS: '+ Math.abs(parseFloat($('#new-totall').val())/200) );
}
}
}
function compute_change(paymentSelector)
{
var cash = parseFloat($(paymentSelector).val());
if(new_total_amt() > 0)
$('#change').html((cash-new_total_amt()));
else
$('#change').html((cash-total_amt()));
}
function disable_rpt_type()
{
if($("#rpt-type").val() == 'X')
$(".rpt-date").prop('disabled',true);
else
$(".rpt-date").prop('disabled',false);
}
function updateScroll(){
var element = document.getElementById("item-list");
element.scrollTop = element.scrollHeight;
}
function generate_prod_codes(baseURL, address, selector)
{
$( ".validation-errors" ).empty();
if($(selector).val())
{
$('#ul-prod-code').empty();
$.ajax({
type: "POST",
url: baseURL + address,
context: document.body,
cache : false,
data: $('#dp-form').serialize() + "&ajax_prod_code=" +$(selector).val(),
dataType: 'json',
success: function(data) {
resultCtr = 1;
$.each(data, function(i, p) {
$.each(p, function(i, p) {
$("#div-prod-code").show();
//end
if(p)
{
if(resultCtr == 1)
$('#ul-prod-name').empty();
resultCtr++;
dropdownArray = p.toString().split('DIV');
if(dropdownArray[0] == 0)
$("#ul-prod-code").append('<li class="li-prod-code li-ajax" style="color:red;" data-value="'+dropdownArray[2]+'">'+dropdownArray[1]+'</li>');
else
$("#ul-prod-code").append('<li class="li-prod-code li-ajax" data-value="'+dropdownArray[2]+'">'+dropdownArray[1]+'</li>');
}
});
});
},
error: function(req, textStatus, errorThrown) {
//alert('Generate Prod. Code Error: ' + textStatus + ' ' +errorThrown);
}
});
}
$("#div-prod-code").hide();
}
function refreshCardList(baseURL, address) {
$('#ul-card li').off();
$("#ul-card").on("click", '.li-card' , function(){
card = $(this).attr('data-value');
if(card.length == 11)
{
$.ajax({
type: "POST",
url: baseURL + address,
data: $('#dp-form').serialize() + "&ajax_card=" +card,
dataType: 'json',
success: function(data) {
$("#div-card").show();
$.each(data, function(i, p) {
$.each(p, function(i, p) {
if(p)
{
result = p.toString().split(',');
$('#mem-id').val(result[0]);
$('#stored-pts').html('<b> STORED POINTS: '+result[1]+'</b>');
$("#div-card").hide();
}
});
});
},
error: function(req, textStatus, errorThrown) {
//alert('Refresh Card Error: ' + textStatus + ' ' +errorThrown);
}
});
}
else
{
$('#mem-id').val( $(this).attr('data-value'));
$('#stored-pts').html('<b>STORED PTS: </b>' +$(this).attr('data-points'));
}
$("#div-card").hide();
});
}
function refreshProdCodeList(baseURL, address, replacement) {
$('#ul-prod-code li').off();
$("#ul-prod-code").on("click", '.li-prod-code' , function(){
sku = $(this).attr('data-value');
$("#prod-code").val($(this).attr('data-value'));
$("#prod-code2").val($(this).attr('data-value'));
$("#prod-qty2").val(1);
$("#prod-qty").val(1);
$.ajax({
type: "POST",
url: baseURL + address,
context: document.body,
cache : false,
data: $('#dp-form').serialize() + "&ajax_sku=" +sku,
dataType: 'json',
success: function(data) {
$("#div-prod-info").show();
$.each(data, function(i, p) {
$.each(p, function(i, p) {
if(p)
{
//console.log(i.toString() + " " + p.toString());
if(i.toString() == 'sku')
{
$("#prod-code").val(p.toString());
$("#prod-qty").val(1);
$("#prod-code2").val(p.toString());
$("#prod-qty2").val(1);
//console.log(p.toString());
}
$('#'+i.toString()).val(p.toString());
}
});
});
},
error: function(req, textStatus, errorThrown) {
//alert('Refresh Prod. Code Error: ' + textStatus + ' ' +errorThrown);
}
});
$("#div-prod-code").hide();
});
}
function get_pv_info2(baseURL, address)
{
$.ajax({
type: "POST",
url: baseURL + address,
context: document.body,
cache : false,
data: $('#dp-form').serialize() + "&ajax_sku=" +$('#prod-code2').val(),
dataType: 'json',
success: function(data) {
resultCtr = 1;
sku = stock = price = null;
$.each(data, function(i, p) {
$.each(p, function(i, p) {
if(p)
{
result = p.toString().split('#')
sku = result[0];
name = result[1];
price = result[2];
stock = result[3];
options = result[4];
//console.log(p.toString());
}
});
});
sku = $('#prod-code2').val();
qty = parseInt($('#prod-qty2').val(), 10);
$('.validation-errors').empty();
if((!qty || qty == 0) && !sku)
{
$('#add-error').html('Error: Product code and quantity are required.');
}
else if(!sku)
{
$('#add-error').html('Error: Product code is required.');
}
else if(!qty || qty == 0)
{
$('#add-error').html('Error: Product quantity is required.');
}
else if( qty <= 0 || stock < qty || stock == 0)
{
$('#add-error').html('Error: Invalid Product Quantity.');
}
else
{
discount = 0;
amt = qty * price;
var index = getIndex('input[name^="sku[]"]', sku);
if(index < 0)
clientQty = qty;
else
clientQty = parseInt($('input[name^="qty[]"]').eq(index).val(), 10) + qty;
if(!clientQty)
clientQty = 0;
//console.log('SKU: '+ sku + ' QTY: ' + clientQty + ' INDEX: '+ index);
invalid_qty(baseURL, 'return', sku, clientQty, name + '('+options+')', price, discount, qty, amt, index);
$('#prod-code2').val(null);
$('#prod-qty2').val(null);
$("#div-prod-info").hide();
}
},
error: function(req, textStatus, errorThrown) {
alert('Prod Name Keyup Error: ' + textStatus + ' ' +errorThrown);
}
});
}
function get_pv_info(baseURL, address)
{
$.ajax({
type: "POST",
url: baseURL + address,
context: document.body,
cache : false,
data: $('#dp-form').serialize() + "&ajax_sku=" +$('#prod-code').val(),
dataType: 'json',
success: function(data) {
resultCtr = 1;
sku = stock = price = null;
$.each(data, function(i, p) {
$.each(p, function(i, p) {
if(p)
{
result = p.toString().split('#')
sku = result[0];
name = result[1];
price = result[2];
stock = result[3];
options = result[4];
}
});
});
sku = $('#prod-code').val();
qty = parseInt($('#prod-qty').val(), 10);
$('.validation-errors').empty();
if(!sku)
{
$('#add-error').html('Error: Product code is required.');
}
else if(!qty || qty == 0)
{
$('#add-error').html('Error: Product quantity is required.');
}
else if( qty <= 0 || stock < qty || stock == 0)
{
$('#add-error').html('Error: Invalid Product Quantity.');
}
else
{
if($("input[name='d-type']:checked").val() == 'amount')
{
if($('#discount').val())
{
discount = (price * qty ) - parseFloat($('#discount').val());
amt = parseFloat($('#discount').val());
}
else
{
discount = 0;
amt = price;
}
}
else if($("input[name='d-type']:checked").val() == 'percent')
{
if($('#discount').val())
{
amt = (price * qty) - ((price * qty) * (parseFloat($('#discount').val()) / 100));
discount = (price * qty) - amt;
}
else
{
discount = 0;
amt = price;
}
//discount = price - (qty *(price * (parseFloat($('#discount').val()) / 100)));
}
else
{
discount = 0;
amt = qty * price;
}
var index = getIndex('input[name^="sku[]"]', sku);
if(index < 0)
clientQty = qty;
else
clientQty = parseInt($('input[name^="qty[]"]').eq(index).val(), 10) + qty;
//console.log('Index: '+index + ' Qty: ' + clientQty);
if(!clientQty)
clientQty = 0;
invalid_qty(baseURL, 'cashier_mode',sku, clientQty, name + '('+options+')', price, discount, qty, amt, index);
//End invalid quantity
$('#prod-code').val(null);
$('#prod-qty').val(1);
//$('#receipt-discount').val(null);
$('#discount').val(null);
$(".non").prop("checked", true);
$("#div-prod-info").hide();
}
//console.log(sku);
},
error: function(req, textStatus, errorThrown) {
alert('Prod Name Keyup Error: ' + textStatus + ' ' +errorThrown);
}
});
}
function invalid_qty(baseURL, caller, sku, clientQty, name, price, discount, qty, amt, index)
{
$.ajax({
type: "POST",
url: baseURL + 'cashier/sales_management/check_qty',
context: document.body,
cache : false,
data: $('#dp-form').serialize() + "&ajax_sku=" + sku + "&ajax_qty=" +clientQty,
dataType: 'json',
success: function(data) {
$.each(data, function (i,p) {
if(data.error == true)
{
if(caller == 'return')
$('#cash_error').html(data.msg);
else
$('#add-error').html(data.msg);
hasError = true;
}
else
{
if(caller == 'return')
addToReplacementBox(sku, name, price, qty, amt, index);
else
addToInvoiceBox(sku, name, price, discount, qty, amt, index);
refreshRemoveItem();
total_amt_qty();
$('#totall').val(total_amt());
$('#earned-pts').html('EARNED POINTS: '+ total_amt()/200);
$('#grand-total').val($('#totall').val());
special_discount();
compute_change('#payment');
}
});
},
error: function(req, textStatus, errorThrown) {
alert('Check Qty Error: ' + textStatus + ' ' +errorThrown);
}
});
}
function getIndex(selector, value)
{
var ctr = -1;
var finalCtr = -1;
$(selector).each(function() {
ctr++;
if(value == $(this).val())
{
finalCtr = ctr;
return;
}
});
if(finalCtr != -1)
return finalCtr;
else
return -1;
}
function total_amt_qty()
{
$('#total-amt').empty();
$('#totall').empty();
var total = 0;
$('input[name^="amt[]"]').each(function() {
total = total + parseFloat($(this).val());
});
var returnAmt = parseFloat($('#return-amt').val());
$('#total-amt').html(total);
$('#totall').val(total);
$('#balance').html();
//Math.abs(total - returnAmt)
$('.input-total-amt').val(total);
$('.input-total-amt').html('₱ ' + total);
var balance = returnAmt - total;
$('.input-balance').val(balance);
$('.input-balance').html('₱ ' + balance);
/*
let balance = Math.abs(total - returnAmt);
if(total < returnAmt)
{
$('.input-balance').val(balance);
$('.input-balance').html('₱ ' + balance);
}
else
{
$('.input-balance').val(0);
$('.input-balance').html('₱ ' + 0);
}
*/
if(total > returnAmt)
{
$("#cash").prop('required',true);
$("#cash").prop('disabled',false);
}
else
{
$("#cash").prop('required',false);
$("#cash").prop('disabled',true);
$("#cash").val( 0);
}
$('#total-qty').empty();
total = 0;
$('input[name^="qty[]"]').each(function() {
total = total + parseInt($(this).val(), 10);
});
$('#total-qty').html(total);
}
//if ctr
function total_qty()
{
var total = 0;
$('input[name^="qty[]"]').each(function() {
total = total + parseInt($(this).val(), 10);
});
return total;
}
function total_amt()
{
var total = 0;
$('input[name^="amt[]"]').each(function() {
total = total + parseFloat($(this).val());
});
return total;
}
function total_discount()
{
var total = 0;
$('input[name^="discount[]"]').each(function() {
total = total + parseFloat($(this).val());
});
return total;
}
function new_total_amt()
{
var amt = 0;
var total = total_amt();
var receiptDiscount = $('#receipt-discount').val();
if($("input[name='d-type2']:checked").val() == 'amount')
{
amt = receiptDiscount;
}
else if($("input[name='d-type2']:checked").val() == 'percent')
{
amt = total_amt() - ( total_amt() * (parseFloat(receiptDiscount) / 100));
}
else
{
amt = total_amt();
}
return amt || 0;
}
function addToReplacementBox(sku, name, price, qty, amt, index)
{
if(index != -1)
{
//$('#'+sku).attr('value', );
var currentAmt = parseFloat($('input[name^="amt[]"]').eq(index).val()) + amt;
var currentQty = parseInt($('input[name^="qty[]"]').eq(index).val(), 10) +qty;
$('input[name^="amt[]"]').eq(index).val(currentAmt);
$('input[name^="qty[]"]').eq(index).val(currentQty);
$('#amt-'+sku).empty();
$('#qty-'+sku).empty();
$('#amt-'+sku).html(currentAmt);
$('#qty-'+sku).html(currentQty);
}
else
{
$('.remove-item').last().remove();
$("#dt-invoice").find('tbody')
.append($('<tr>')
.append($('<td>')
.append($('<p>'+name+'</p>')
)
.append($('<input style="border: none;border-color: transparent;" readonly>')
.attr('type', 'hidden')
.attr('value', sku)
.attr('name', 'sku[]')
)
)
.append($('<td>')
.append($('<p>'+price+'</p>')
)
)
.append($('<td>')
.append($('<div id="qty-'+sku+'">'+qty+'</div>')
)
)
.append($('<td>')
.append($('<div id="amt-'+sku+'">'+amt+'</div>')
)
)
.append($('<td>')
.append($('<button>')
.attr('class', 'btn btn-xs bg-red waves-effect remove-item')
.attr('type', 'button')
.attr('id', sku)
.text('x')
)
.append($('<input style="border: none;border-color: transparent;">')
.attr('type', 'hidden')
.attr('value', qty)
.attr('name', 'qty[]')
)
.append($('<input style="border: none;border-color: transparent;">')
.attr('type', 'hidden')
.attr('value', amt)
.attr('name', 'amt[]')
)
)
);
}
}
function addToInvoiceBox(sku, name, price, discount, qty, amt, index)
{
if(index != -1)
{
//$('#'+sku).attr('value', );
var currentAmt = parseFloat($('input[name^="amt[]"]').eq(index).val()) + amt;
var currentDiscount = parseFloat($('input[name^="discount[]"]').eq(index).val()) + discount;
var currentQty = parseInt($('input[name^="qty[]"]').eq(index).val(), 10) +qty;
$('input[name^="amt[]"]').eq(index).val(currentAmt);
$('input[name^="discount[]"]').eq(index).val(currentDiscount);
$('input[name^="qty[]"]').eq(index).val(currentQty);
$('#amt-'+sku).empty();
$('#qty-'+sku).empty();
$('#discount-'+sku).empty();
$('#amt-'+sku).html(currentAmt);
$('#qty-'+sku).html(currentQty);
$('#discount-'+sku).html(currentDiscount);
}
else
{
$('.remove-item').last().remove();
$("#dt-invoice").find('tbody')
.append($('<tr>')
.append($('<td>')
.append($('<p>'+name+'</p>')
)
.append($('<input style="border: none;border-color: transparent;" readonly>')
.attr('type', 'hidden')
.attr('value', sku)
.attr('name', 'sku[]')
)
)
.append($('<td>')
.append($('<p>'+price+'</p>')
)
)
.append($('<td>')
.append($('<div id="discount-'+sku+'">'+discount+'</div>')
)
)
.append($('<td>')
.append($('<div id="qty-'+sku+'">'+qty+'</div>')
)
)
.append($('<td>')
.append($('<div id="amt-'+sku+'">'+amt+'</div>')
)
)
.append($('<td>')
.append($('<button>')
.attr('class', 'btn btn-xs bg-red waves-effect remove-item')
.attr('type', 'button')
.attr('id', sku)
.text('x')
)
.append($('<input style="border: none;border-color: transparent;">')
.attr('type', 'hidden')
.attr('value', qty)
.attr('name', 'qty[]')
)
.append($('<input style="border: none;border-color: transparent;">')
.attr('type', 'hidden')
.attr('value', amt)
.attr('name', 'amt[]')
)
.append($('<input style="border: none;border-color: transparent;">')
.attr('type', 'hidden')
.attr('value', discount)
.attr('name', 'discount[]')
)
)
);
}
}
function refreshRemoveItem() {
// Remove handler from existing elements
$('.remove-item').off();
// Re-add event handler for all matching elements
$(".remove-item").on('click', function () {
$('#cash_error').empty();
var selector = '#' + $(this).attr('id');
$(selector).parent().parent().remove();
total_amt_qty();
$('#grand-total').val($('#totall').val());
special_discount();
compute_change('#payment');
$('#earned-pts').html('EARNED POINTS: '+ total_amt()/200);
});
}
function submit_modal_form(modal, submitBtn, formSelector, submitURL, func)
{
$(modal).modal('show');
//console.log($(formSelector));
$(formSelector)[0].reset();//[0]
$('.validation-errors').empty();
$(submitBtn).click(function(e) {
e.preventDefault();
e.stopImmediatePropagation();
$.ajax({
type: "POST",
url: submitURL,
context: document.body,
cache : false,
data: $(formSelector).serialize(),
dataType: 'json',
success: function(data) {
$.each(data, function (i,p) {
$('#'+i+'_error').html(p);
if(data.error == 'true')
$('[name="'+i+'"]').val(p);
if(data.refresh == 'true')
location.reload();
});
if(data.start_day_success)
{
window.location.href = baseURL+'cashier/cashier_mode';
}
else if(data.success)
{
window.location.reload();
$(formSelector)[0].reset();
}
},
error: function(req, textStatus, errorThrown) {
//this is going to happen when you send something different from a 200 OK HTTP
//alert('Submit Modal Error: ' + textStatus + ' ' +errorThrown);
alert('An error occurred. Please try again.');
location.reload();
}
});
});
}
/*
function tag_scanner(baseURL)
{
var successBeep = new Audio(baseURL + 'assets/beep.mp3');
var failedBeep = new Audio(baseURL + 'assets/longBeep.mp3');
const player = document.getElementById('player2');
if($('#is-mobile').val())
{
const constraints = {
video: true, video: { facingMode: { exact: "environment" } },
};
navigator.mediaDevices.getUserMedia(constraints)
.then((stream) => {
player.srcObject = stream;
});
}
else
{
const constraints = {
video: true,
};
navigator.mediaDevices.getUserMedia(constraints)
.then((stream) => {
player.srcObject = stream;
});
}
$("#div-prod-info").hide();
var qr = new QCodeDecoder();
if (!(qr.isCanvasSupported() && qr.hasGetUserMedia())) {
alert('Your browser doesn\'t match the required specs.');
throw new Error('Canvas and getUserMedia are required');
}
var video = document.getElementById('player2');
result = null;
resultHandler = function(err, result) {
if(result && result.indexOf('Name =') != -1)
{
result = result.split('Name =');
result = result[0].split(' = ');
result = result[1].trim();
var patt = new RegExp(/^[A-Z]{4}([1-9][0-9]{0,2}|9000)$/);
var match = patt.test(result);
if(($("#prod-code").val() != result))
{
address = "cashier/sales_management/get_sale_prod_info";
$.ajax({
type: "POST",
url: baseURL + address,
context: document.body,
cache : false,
data: $('#dp-form').serialize() + "&ajax_sku=" +result,
dataType: 'json',
success: function(data) {
$('.validation-errors').empty();
//showNotification('bg-teal','Product Scanned!', 'top', 'right', null, null);
$.each(data, function(i, p) {
if(i.toString() == 'error')
{
$('#scan-error').html(p.toString());
failedBeep.play();
}
else
{
$.each(p, function(i, p) {
if(p)
{
$("#div-prod-info").show();
$("#prod-code").val(result);
$("#prod-code2").val(result);
if(i.toString() == 'sku')
{
$("#prod-code").val(p.toString());
$("#prod-code2").val(p.toString());
}
else
$("#" + i.toString()).val(p.toString());
}
});
//if($("#prod-qty").val() >= 1)
//$("#prod-qty").val(1+parseInt($("#prod-qty").val(), 10));
//else
$("#prod-qty").val(1);
$("#prod-qty2").val(1);
successBeep.play();
}
});
},
error: function(req, textStatus, errorThrown) {
alert('Tag Scanner Error: ' + textStatus + ' ' +errorThrown);
}
});
}
}
else
{
if(result)
{
successBeep.play();
result = result.split(',');
address = "cashier/sales_management/get_points";
var str = result[1];
if(str.length > 15)
result[1] = result[1].substring(0,15);
$('#mem-id').val(result[1]);
$.ajax({
type: "POST",
url: baseURL + address,
context: document.body,
cache : false,
data: $('#dp-form').serialize() + "&ajax_membership_id=" +result[1],
dataType: 'json',
success: function(data) {
$('.validation-errors').empty();
//showNotification('bg-teal','Product Scanned!', 'top', 'right', null, null);
$.each(data, function(i, p) {
$('#stored-pts').html('<b> STORED POINTS: '+data.points+'</b>');
});
},
error: function(req, textStatus, errorThrown) {
alert('Tag Scanner Error: ' + textStatus + ' ' +errorThrown);
}
});
}
}
}
qr.decodeFromCamera(video, resultHandler);
}
*/
function showNotification(colorName, text, placementFrom, placementAlign, animateEnter, animateExit) {
if (colorName === null || colorName === '') { colorName = 'bg-black'; }
if (text === null || text === '') { text = 'Turning standard Bootstrap alerts'; }
if (animateEnter === null || animateEnter === '') { animateEnter = 'animated fadeInDown'; }
if (animateExit === null || animateExit === '') { animateExit = 'animated fadeOutUp'; }
var allowDismiss = true;
$.notify({
message: text
},
{
type: colorName,
allow_dismiss: allowDismiss,
newest_on_top: true,
timer: 1000,
placement: {
from: placementFrom,
align: placementAlign
},
animate: {
enter: animateEnter,
exit: animateExit
},
template: '<div data-notify="container" class="bootstrap-notify-container alert alert-dismissible {0} ' + (allowDismiss ? "p-r-35" : "") + '" role="alert">' +
'<button type="button" aria-hidden="true" class="close" data-notify="dismiss">×</button>' +
'<span data-notify="icon"></span> ' +
'<span data-notify="title">{1}</span> ' +
'<span data-notify="message">{2}</span>' +
'<div class="progress" data-notify="progressbar">' +
'<div class="progress-bar progress-bar-{0}" role="progressbar" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100" style="width: 0%;"></div>' +
'</div>' +
'<a href="{3}" target="{4}" data-notify="url"></a>' +
'</div>'
});
}
function showConfirmMessage(t, m, url, redirectURL = null, caller = null) {
swal({
title: t,
text: m,
type: "warning",
showCancelButton: true,
confirmButtonColor: "#DD6B55",
confirmButtonText: "Yes",
closeOnConfirm: false
}, function (isConfirm) {
if (isConfirm) {
if(redirectURL)
window.location.replace(redirectURL);
else
window.location.replace(url);
}
//swal("Deleted!", "Your imaginary file has been deleted.", "success");
});
}
function submit_form(baseURL, formSelector, submitBtn, submitURL, redirectURL)
{
$('.validation-errors').empty();
$(submitBtn).click(function(e) {
e.preventDefault();
e.stopImmediatePropagation();
$(submitBtn).attr("disabled", true);
$.ajax({
type: "POST",
url: submitURL,
context: document.body,
cache : false,
data: $(formSelector).serialize(),
dataType: 'json',
success: function(data) {
$.each(data, function (i,p) {
$('#'+i+'_error').html(p);
if(i == 'trans-url')
{
window.location = baseURL+redirectURL+p.toString();
console.log(baseURL+redirectURL+p.toString());
}
if(i == 'success')
{
window.location = baseURL+redirectURL;
}
});
},
error: function(req, textStatus, errorThrown) {
//alert('Form Submit Error: ' + textStatus + ' ' +errorThrown);
alert('An error occurred. Please try again.');
},
complete: function(){
$(submitBtn).attr("disabled", false);
}
});
});
}
/*$(".chk-sku").change(function() {
if(this.checked) {
parseFloat($('#'+$(".chk-sku").val()+'-amt').text());
}
});*/
/*$('#sbtn-submit-return').on('click', function(e) {
e.preventDefault();
$('.rowCont tr').each(function(row, tr){
if($(this).find('input').is(':checked')) { // I have added the if condition here
var TableData = new Array();
TableData[row] = {
"sku[]" : $(tr).find('td:eq(0)').text(),
"qty[]" : $(tr).find('td:eq(3)').text()
}
TableData = JSON.stringify(TableData);
console.log(TableData);
$.ajax({
type : "POST",
url : baseURL + "cashier/return_item/add",
cache : "false",
data : {data:TableData},
success : function(result){
console.log(result);
}
});
} // Here for the checkbox if condition
}); // each function
}); // clicking orderSave button
*/ |
export default rlux;
|
# coding: utf-8
"""
Timeline events
This feature allows an app to create and configure custom events that can show up in the timelines of certain CRM object like contacts, companies, or deals. You'll find multiple use cases for this API in the sections below. # noqa: E501
The version of the OpenAPI document: v3
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from hubspot.crm.timeline.configuration import Configuration
class TimelineEventTemplateCreateRequest(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
"object_type": "str",
"name": "str",
"header_template": "str",
"detail_template": "str",
"tokens": "list[TimelineEventTemplateToken]",
}
attribute_map = {
"object_type": "objectType",
"name": "name",
"header_template": "headerTemplate",
"detail_template": "detailTemplate",
"tokens": "tokens",
}
def __init__(
self,
object_type=None,
name=None,
header_template=None,
detail_template=None,
tokens=None,
local_vars_configuration=None,
): # noqa: E501
"""TimelineEventTemplateCreateRequest - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._object_type = None
self._name = None
self._header_template = None
self._detail_template = None
self._tokens = None
self.discriminator = None
self.object_type = object_type
self.name = name
if header_template is not None:
self.header_template = header_template
if detail_template is not None:
self.detail_template = detail_template
self.tokens = tokens
@property
def object_type(self):
"""Gets the object_type of this TimelineEventTemplateCreateRequest. # noqa: E501
The type of CRM object you want this template applicable for. Currently we support [contacts, companies, deals]. # noqa: E501
:return: The object_type of this TimelineEventTemplateCreateRequest. # noqa: E501
:rtype: str
"""
return self._object_type
@object_type.setter
def object_type(self, object_type):
"""Sets the object_type of this TimelineEventTemplateCreateRequest.
The type of CRM object you want this template applicable for. Currently we support [contacts, companies, deals]. # noqa: E501
:param object_type: The object_type of this TimelineEventTemplateCreateRequest. # noqa: E501
:type: str
"""
if (
self.local_vars_configuration.client_side_validation and object_type is None
): # noqa: E501
raise ValueError(
"Invalid value for `object_type`, must not be `None`"
) # noqa: E501
self._object_type = object_type
@property
def name(self):
"""Gets the name of this TimelineEventTemplateCreateRequest. # noqa: E501
The template name. # noqa: E501
:return: The name of this TimelineEventTemplateCreateRequest. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this TimelineEventTemplateCreateRequest.
The template name. # noqa: E501
:param name: The name of this TimelineEventTemplateCreateRequest. # noqa: E501
:type: str
"""
if (
self.local_vars_configuration.client_side_validation and name is None
): # noqa: E501
raise ValueError(
"Invalid value for `name`, must not be `None`"
) # noqa: E501
self._name = name
@property
def header_template(self):
"""Gets the header_template of this TimelineEventTemplateCreateRequest. # noqa: E501
This uses Markdown syntax with Handlebars and event-specific data to render HTML on a timeline as a header. # noqa: E501
:return: The header_template of this TimelineEventTemplateCreateRequest. # noqa: E501
:rtype: str
"""
return self._header_template
@header_template.setter
def header_template(self, header_template):
"""Sets the header_template of this TimelineEventTemplateCreateRequest.
This uses Markdown syntax with Handlebars and event-specific data to render HTML on a timeline as a header. # noqa: E501
:param header_template: The header_template of this TimelineEventTemplateCreateRequest. # noqa: E501
:type: str
"""
self._header_template = header_template
@property
def detail_template(self):
"""Gets the detail_template of this TimelineEventTemplateCreateRequest. # noqa: E501
This uses Markdown syntax with Handlebars and event-specific data to render HTML on a timeline when you expand the details. # noqa: E501
:return: The detail_template of this TimelineEventTemplateCreateRequest. # noqa: E501
:rtype: str
"""
return self._detail_template
@detail_template.setter
def detail_template(self, detail_template):
"""Sets the detail_template of this TimelineEventTemplateCreateRequest.
This uses Markdown syntax with Handlebars and event-specific data to render HTML on a timeline when you expand the details. # noqa: E501
:param detail_template: The detail_template of this TimelineEventTemplateCreateRequest. # noqa: E501
:type: str
"""
self._detail_template = detail_template
@property
def tokens(self):
"""Gets the tokens of this TimelineEventTemplateCreateRequest. # noqa: E501
A collection of tokens that can be used as custom properties on the event and to create fully fledged CRM objects. # noqa: E501
:return: The tokens of this TimelineEventTemplateCreateRequest. # noqa: E501
:rtype: list[TimelineEventTemplateToken]
"""
return self._tokens
@tokens.setter
def tokens(self, tokens):
"""Sets the tokens of this TimelineEventTemplateCreateRequest.
A collection of tokens that can be used as custom properties on the event and to create fully fledged CRM objects. # noqa: E501
:param tokens: The tokens of this TimelineEventTemplateCreateRequest. # noqa: E501
:type: list[TimelineEventTemplateToken]
"""
if (
self.local_vars_configuration.client_side_validation and tokens is None
): # noqa: E501
raise ValueError(
"Invalid value for `tokens`, must not be `None`"
) # noqa: E501
self._tokens = tokens
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TimelineEventTemplateCreateRequest):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, TimelineEventTemplateCreateRequest):
return True
return self.to_dict() != other.to_dict()
|
const nock = require('nock')
const mergePullRequest = require('../bin/merge-pr')
process.env.GH_TOKEN = '12345token'
process.env.CIRCLE_SHA1 = 'e07e589fbeb379'
process.env.CIRCLE_PULL_REQUEST = 'https://github.com/NewThingsCo/circleci-automerge/pull/1'
process.env.CIRCLE_PROJECT_USERNAME = 'NewThingsCo'
process.env.CIRCLE_PROJECT_REPONAME = 'circleci-automerge'
const disableConsoleLog = () => {
console.log = function() {}
}
beforeAll(disableConsoleLog)
afterEach(nock.cleanAll)
const testMergeRequest = function (uri, reqBody) {
const {headers} = this.req
expect(uri).toBe('/repos/NewThingsCo/circleci-automerge/pulls/1/merge')
expect(headers.authorization[0]).toBe('token 12345token')
expect(headers.accept[0]).toBe('application/vnd.github.v3+json')
expect(reqBody).toBe('{"sha":"e07e589fbeb379"}')
return {message: 'Pull Request successfully merged'}
}
test('should merge given pull request', async () => {
nock('https://api.github.com')
.put('/repos/NewThingsCo/circleci-automerge/pulls/1/merge')
.reply(200, testMergeRequest)
const response = await mergePullRequest()
expect(response).toEqual({message: 'Pull Request successfully merged'})
})
test('should handle service errors when merging pull request', async () => {
nock('https://api.github.com')
.put('/repos/NewThingsCo/circleci-automerge/pulls/1/merge')
.reply(401, 'Unauthorized')
try {
await mergePullRequest()
} catch (error) {
expect(error).toEqual('Unauthorized')
}
})
test('should handle network errors when merging pull request', async () => {
nock('https://api.github.com')
.put('/repos/NewThingsCo/circleci-automerge/pulls/1/merge')
.replyWithError('Unexpected error')
try {
await mergePullRequest()
} catch (error) {
expect(error.type).toEqual('system')
expect(error.message).toEqual('request to https://api.github.com/repos/NewThingsCo/circleci-automerge/pulls/1/merge failed, reason: Unexpected error')
}
})
test('should fallback to github api if CIRCLE_PULL_REQUEST not set', async () => {
delete process.env.CIRCLE_PULL_REQUEST
nock('https://api.github.com')
.get('/repos/NewThingsCo/circleci-automerge/pulls?state=open')
.reply(200, [{number: 1, head: {sha: 'e07e589fbeb379'}}])
nock('https://api.github.com')
.put('/repos/NewThingsCo/circleci-automerge/pulls/1/merge')
.reply(200, testMergeRequest)
const response = await mergePullRequest()
expect(response).toEqual({message: 'Pull Request successfully merged'})
})
|
export default {
GENERIC_ERROR: 'Something went wrong. Please contact us',
EDIT_DETAILS_MENTOR_SUCCESS: 'Your details updated successfully',
EDIT_DETAILS_APPLICATION_SUBMITTED:
"Thanks for joining us! We'll approve your application ASAP.",
EDIT_DETAILS_DELETE_ACCOUNT_CONFIRM:
'Are you sure you want to delete your account?',
CARD_APPLY_TOOLTIP:
"Submit your details to get connect with mentors",
CARD_ANONYMOUS_LIKE_TOOLTIP:
'Click here to Login / Register<br />to add this mentor to favorites',
LOGOUT: 'Logout',
};
|
# Copyright 2019 The SQLFlow Authors. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sqlflow_submitter.tensorflow.train import train
from sqlflow_submitter.tensorflow.predict import pred
datasource = "mysql://root:root@tcp(127.0.0.1:3306)/?maxAllowedPacket=0"
select = "SELECT * FROM iris.train;"
validate_select = "SELECT * FROM iris.test;"
feature_column_names = [
"sepal_length",
"sepal_width",
"petal_length",
"petal_width"]
feature_column_code = '''feature_columns=[tf.feature_column.numeric_column("sepal_length", shape=[1]),
tf.feature_column.numeric_column("sepal_width", shape=[1]),
tf.feature_column.numeric_column("petal_length", shape=[1]),
tf.feature_column.numeric_column("petal_width", shape=[1])]'''
feature_metas = {
"sepal_length": {
"feature_name": "sepal_length",
"dtype": "float32",
"delimiter": "",
"shape": [1],
"is_sparse": "false" == "true"
},
"sepal_width": {
"feature_name": "sepal_width",
"dtype": "float32",
"delimiter": "",
"shape": [1],
"is_sparse": "false" == "true"
},
"petal_length": {
"feature_name": "petal_length",
"dtype": "float32",
"delimiter": "",
"shape": [1],
"is_sparse": "false" == "true"
},
"petal_width": {
"feature_name": "petal_width",
"dtype": "float32",
"delimiter": "",
"shape": [1],
"is_sparse": "false" == "true"
}}
label_meta = {
"feature_name": "class",
"dtype": "int64",
"delimiter": "",
"shape": [1],
"is_sparse": "false" == "true"
}
if __name__ == "__main__":
train(is_keras_model=False,
datasource=datasource,
estimator="tf.estimator.DNNClassifier",
select=select,
validate_select=validate_select,
feature_column_code=feature_column_code,
feature_column_names=feature_column_names,
feature_metas=feature_metas,
label_meta=label_meta,
model_params={"n_classes": 3, "hidden_units":[10,20]},
save="mymodel",
batch_size=1,
epochs=1,
verbose=0)
pred(is_keras_model=False,
datasource=datasource,
estimator="tf.estimator.DNNClassifier",
select=select,
result_table="iris.predict",
feature_column_code=feature_column_code,
feature_column_names=feature_column_names,
feature_metas=feature_metas,
label_meta=label_meta,
model_params={"n_classes": 3, "hidden_units":[10,20]},
save="mymodel",
batch_size=1)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import base64
import json
from flask import Flask, request
import argparse
import sys
import os.path
from datetime import datetime
from PIL import Image
import numpy as np
import os
import io
import tensorflow as tf
from tensorflow.python.platform import gfile
import captcha_model as captcha
import config
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
IMAGE_WIDTH = config.IMAGE_WIDTH
IMAGE_HEIGHT = config.IMAGE_HEIGHT
CHAR_SETS = config.CHAR_SETS
CLASSES_NUM = config.CLASSES_NUM
CHARS_NUM = config.CHARS_NUM
FLAGS = None
def one_hot_to_texts(recog_result):
texts = []
for i in xrange(recog_result.shape[0]):
index = recog_result[i]
texts.append(''.join([CHAR_SETS[i] for i in index]))
return texts
def input_data(image_base64):
images = np.zeros([1, IMAGE_HEIGHT*IMAGE_WIDTH], dtype='float32')
files = []
file_name = "cap"
image = Image.open(io.BytesIO(base64.b64decode(image_base64)))
image_gray = image.convert('L')
image_resize = image_gray.resize(size=(IMAGE_WIDTH, IMAGE_HEIGHT))
image.close()
input_img = np.array(image_resize, dtype='float32')
input_img = np.multiply(input_img.flatten(), 1./255) - 0.5
images[0, :] = input_img
base_name = os.path.basename(file_name)
files.append(base_name)
return images, files
def run_predict(image_base64):
with tf.Graph().as_default(), tf.device('/cpu:0'):
input_images, input_filenames = input_data(image_base64)
images = tf.constant(input_images)
logits = captcha.inference(images, keep_prob=1)
result = captcha.output(logits)
saver = tf.train.Saver()
sess = tf.Session()
saver.restore(sess, tf.train.latest_checkpoint(FLAGS.checkpoint_dir))
# print(tf.train.latest_checkpoint(FLAGS.checkpoint_dir))
recog_result = sess.run(result)
sess.close()
text = one_hot_to_texts(recog_result)
total_count = len(input_filenames)
result = ""
for i in range(total_count):
result = text[i]
return result
api = Flask("hello")
@api.route('/ping', methods=['POST'])
def ping():
data = json.loads(request.get_data(as_text=True))
res = run_predict(data['image'])
return res
def main(_):
api.run(host="0.0.0.0")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--checkpoint_dir',
type=str,
default='./captcha_train',
help='Directory where to restore checkpoint.'
)
parser.add_argument(
'--captcha_dir',
type=str,
default='./data/test_data/test.jpg',
help='Directory where to get captcha images.'
)
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
|
import React, { useEffect } from "react"
import { Link } from "gatsby"
import Layout from "../components/layout"
import Image from "../components/image"
import SEO from "../components/seo"
import "./styles.css"
const IndexPage = () => {
useEffect(() => {
setTimeout(function () {
window.ReactNativeWebView.postMessage("List of Chineese app ban by Indian Goverment.!")
}, 2000)
}, [])
return (
<Layout className="index">
<SEO title="Home" />
<h1
style={{
fontSize: 26,
textAlign: "center",
marginBottom: 25,
color: "rgb(58, 82, 217)",
}}
>
List of Chineese app ban by Indian Goverment.
</h1>
<div className="logo-wrap">
<ul>
<li>
<Image filename="1.png" alt="tiktok" /> Tiktok
<span>Downloads- 1B+</span>
</li>
<li>
<Image filename="2.png" alt="Shareit" /> Shareit
<span>Downloads- 1B+</span>
</li>
<li>
<Image filename="3.png" alt="Kwai" /> Kwai
<span>Downloads- 100M+</span>
</li>
<li>
<Image filename="4.png" alt="UC Browser" /> UC Browser
<span>Downloads- 500M+</span>
</li>
<li>
<Image filename="5.png" alt="Baidu map" /> Baidu map
<span>Downloads- 1M+</span>
</li>
<li>
<Image filename="6.png" alt="Shein" /> Shein
<span>Downloads- 100M+</span>
</li>
<li>
<Image filename="7.png" alt=" Clash of Kings" /> Clash of Kings
<span>Downloads- 50M+</span>
</li>
<li>
<Image filename="8.png" alt=" DU battery saver" /> DU battery saver
<span>Downloads- 100K+</span>
</li>
<li>
<Image filename="9.png" alt="Helo" /> Helo
<span>Downloads- 100M+</span>
</li>
<li>
<Image filename="10.png" alt="Likee" /> Likee
<span>Downloads- 500M+</span>
</li>
<li>
<Image filename="11.png" alt="YouCam makeup" /> YouCam makeup
<span>Downloads- 100M+</span>
</li>
<li>Mi Community</li>
<li>CM Browers</li>
<li>Virus Cleaner</li>
<li>APUS Browser</li>
<li>ROMWE</li>
<li>Club Factory</li>
<li>Newsdog</li>
<li>Beutry Plus</li>
<li>WeChat</li>
<li>UC News</li>
<li>QQ Mail</li>
<li>Weibo</li>
<li>Xender</li>
<li>QQ Music</li>
<li>QQ Newsfeed</li>
<li>Bigo Live</li>
<li>SelfieCity</li>
<li>Mail Master</li>
<li>Parallel Space</li>
<li>Mi Video Call Xiaomi</li>
<li>WeSync</li>
<li>ES File Explorer</li>
<li>Viva Video QU Video Inc</li>
<li>Meitu</li>
<li>Vigo Video</li>
<li>New Video Status</li>
<li>DU Recorder</li>
<li>Vault- Hide</li>
<li>Cache Cleaner DU App studio</li>
<li>DU Cleaner</li>
<li>DU Browser</li>
<li>Hago Play With New Friends</li>
<li>Cam Scanner</li>
<li>Clean Master Cheetah Mobile</li>
<li>Wonder Camera</li>
<li>Photo Wonder</li>
<li>QQ Player</li>
<li>We Meet</li>
<li>Sweet Selfie</li>
<li>Baidu Translate</li>
<li>Vmate</li>
<li>QQ International</li>
<li>QQ Security Center</li>
<li>QQ Launcher</li>
<li>U Video</li>
<li>V fly Status Video</li>
<li>Mobile Legends</li>
<li>DU Privacy</li>
</ul>
</div>
</Layout>
)
}
export default IndexPage
|
# -*- coding: utf-8 -*-
"""Representation of edit operations in HGVS variants
NARefAlt and AARefAlt are abstractions of several major variant
types. They are distinguished by whether the ref and alt elements
of the structure. The HGVS grammar for NA and AA are subtly
different (e.g., the ref AA in a protein substitution is part of the
location).
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import attr
from bioutils.sequences import aa_to_aa1, aa1_to_aa3
import vvhgvs
from vvhgvs.exceptions import HGVSError, HGVSUnsupportedOperationError
import six
@attr.s(slots=True)
class Edit(object):
def format(self, conf=None):
return str(self)
def _format_config_na(self, conf=None):
max_ref_length = vvhgvs.global_config.formatting.max_ref_length
if conf and "max_ref_length" in conf:
max_ref_length = conf["max_ref_length"]
return max_ref_length
def _format_config_aa(self, conf=None):
p_3_letter = vvhgvs.global_config.formatting.p_3_letter
p_term_asterisk = vvhgvs.global_config.formatting.p_term_asterisk
if conf and "p_3_letter" in conf and conf["p_3_letter"] is not None:
p_3_letter = conf["p_3_letter"]
if conf and "p_term_asterisk" in conf and conf["p_term_asterisk"] is not None:
p_term_asterisk = conf["p_term_asterisk"]
return p_3_letter, p_term_asterisk
def _del_ins_lengths(self, ilen):
raise HGVSUnsupportedOperationError("internal function _del_ins_lengths not implemented for this variant type")
@attr.s(slots=True)
class NARefAlt(Edit):
"""
represents substitutions, deletions, insertions, and indels.
:ivar ref: reference sequence or length
:ivar alt: alternate sequence
:ivar uncertain: boolean indicating whether the variant is uncertain/undetermined
"""
ref = attr.ib(default=None)
alt = attr.ib(default=None)
uncertain = attr.ib(default=False)
@property
def ref_s(self):
"""
returns a string representing the ref sequence, if it is not None and smells like a sequence
>>> NARefAlt("ACGT").ref_s
u'ACGT'
>>> NARefAlt("7").ref_s
>>> NARefAlt(7).ref_s
"""
return self.ref if (isinstance(self.ref, six.string_types) and self.ref and self.ref[0] in "ACGTUN") else None
@property
def ref_n(self):
"""
returns an integer, either from the `ref` instance variable if it's a number, or the length of
ref if it's a string, or None otherwise
>>> NARefAlt("ACGT").ref_n
4
>>> NARefAlt("7").ref_n
7
>>> NARefAlt(7).ref_n
7
"""
try:
return int(self.ref)
except ValueError:
return len(self.ref) if self.ref else None
def format(self, conf=None):
if self.ref is None and self.alt is None:
raise HGVSError("RefAlt: ref and alt sequences are both undefined")
max_ref_length = self._format_config_na(conf)
if max_ref_length is not None:
ref = self.ref_s
if ref is None or len(ref) > max_ref_length:
ref = ''
else:
ref = self.ref
# subst and delins
if self.ref is not None and self.alt is not None:
if self.ref == self.alt:
s = "{ref}=".format(ref=ref)
elif len(self.alt) == 1 and len(self.ref) == 1 and not self.ref.isdigit(): # don't turn del5insT into 5>T
s = "{self.ref}>{self.alt}".format(self=self)
else:
s = "del{ref}ins{alt}".format(ref=ref, alt=self.alt)
# del case
elif self.ref is not None:
s = "del{ref}".format(ref=ref)
# ins case
else: # self.alt is not None
s = "ins{self.alt}".format(self=self)
return "(" + s + ")" if self.uncertain else s
__str__ = format
def _set_uncertain(self):
"""sets the uncertain flag to True; used primarily by the HGVS grammar
:returns: self
"""
self.uncertain = True
return self
@property
def type(self):
"""return the type of this Edit
:returns: edit type (str)
"""
if self.ref is not None and self.alt is not None:
if self.ref == self.alt:
edit_type = "identity"
elif len(self.alt) == 1 and len(self.ref) == 1 and not self.ref.isdigit():
edit_type = "sub"
else:
edit_type = "delins"
elif self.ref is not None:
edit_type = "del"
else:
edit_type = "ins"
return edit_type
def _del_ins_lengths(self, ilen):
"""returns (del_len, ins_len).
Unspecified ref or alt returns None for del_len or ins_len respectively.
"""
if self.ref == self.alt:
del_len = ins_len = 0
else:
del_len = 0 if self.ref is None else ilen
ins_len = 0 if self.alt is None else len(self.alt)
return (del_len, ins_len)
@attr.s(slots=True)
class AARefAlt(Edit):
ref = attr.ib(default=None)
alt = attr.ib(default=None)
uncertain = attr.ib(default=False)
def __attrs_post_init__(self):
self.ref = aa_to_aa1(self.ref)
self.alt = aa_to_aa1(self.alt)
def format(self, conf=None):
if self.ref is None and self.alt is None:
# raise HGVSError("RefAlt: ref and alt sequences are both undefined")
return "="
p_3_letter, p_term_asterisk = self._format_config_aa(conf)
# subst and delins
if self.ref is not None and self.alt is not None:
if self.ref == self.alt:
if p_3_letter:
s = "{ref}=".format(ref=aa1_to_aa3(self.ref))
if p_term_asterisk and s == "Ter=":
s = "*="
else:
s = "{ref}=".format(ref=self.ref)
elif len(self.ref) == 1 and len(self.alt) == 1:
if p_3_letter:
s = aa1_to_aa3(self.alt)
if p_term_asterisk and s == "Ter":
s = "*"
else:
s = self.alt
else:
if p_3_letter:
s = "delins{alt}".format(alt=aa1_to_aa3(self.alt))
if p_term_asterisk and s == "delinsTer":
s = "delins*"
else:
s = "delins{alt}".format(alt=self.alt)
# del case
elif self.ref is not None and self.alt is None:
s = "del"
# ins case
elif self.ref is None and self.alt is not None:
if p_3_letter:
s = "ins{alt}".format(alt=aa1_to_aa3(self.alt))
if p_term_asterisk and s == "insTer":
s = "ins*"
else:
s = "ins{alt}".format(alt=self.alt)
else:
raise RuntimeError("Should not be here")
return "(" + s + ")" if self.uncertain else s
__str__ = format
def _set_uncertain(self):
"""sets the uncertain flag to True; used primarily by the HGVS grammar
:returns: self
"""
self.uncertain = True
return self
@property
def type(self):
"""return the type of this Edit
:returns: edit type (str)
"""
if self.ref is not None and self.alt is not None:
if self.ref == self.alt:
edit_type = "identity"
elif len(self.ref) == 1 and len(self.alt) == 1:
edit_type = "sub"
else:
edit_type = "delins"
elif self.ref is not None and self.alt is None:
edit_type = "del"
elif self.ref is None and self.alt is not None:
edit_type = "ins"
return edit_type
def _del_ins_lengths(self, ilen):
"""returns (del_len, ins_len).
Unspecified ref or alt returns None for del_len or ins_len respectively.
"""
if self.ref == self.alt:
del_len = ins_len = 0
else:
del_len = 0 if (self.ref is None or self.alt == "") else ilen
ins_len = 0 if self.alt is None else len(self.alt)
return (del_len, ins_len)
@attr.s(slots=True)
class AASub(AARefAlt):
def format(self, conf=None):
p_3_letter, p_term_asterisk = self._format_config_aa(conf)
if p_3_letter:
s = aa1_to_aa3(self.alt) if self.alt != "?" else self.alt
if p_term_asterisk and s == "Ter":
s = "*"
else:
s = self.alt
return "(" + s + ")" if self.uncertain else s
__str__ = format
@property
def type(self):
"""return the type of this Edit
:returns: edit type (str)
"""
return "sub"
@attr.s(slots=True)
class AAFs(Edit):
ref = attr.ib(default=None)
alt = attr.ib(default=None)
length = attr.ib(default=None)
uncertain = attr.ib(default=False)
def __attrs_post_init__(self):
self.ref = aa_to_aa1(self.ref)
self.alt = aa_to_aa1(self.alt)
def format(self, conf=None):
p_3_letter, p_term_asterisk = self._format_config_aa(conf)
st_length = self.length or ""
if p_3_letter:
if p_term_asterisk:
s = "{alt}fs*{length}".format(alt=aa1_to_aa3(self.alt), length=st_length)
else:
s = "{alt}fsTer{length}".format(alt=aa1_to_aa3(self.alt), length=st_length)
else:
s = "{alt}fs*{length}".format(alt=self.alt, length=st_length)
return "(" + s + ")" if self.uncertain else s
__str__ = format
def _set_uncertain(self):
"""sets the uncertain flag to True; used primarily by the HGVS grammar
:returns: self
"""
self.uncertain = True
return self
@property
def type(self):
"""return the type of this Edit
:returns: edit type (str)
"""
return "fs"
@attr.s(slots=True)
class AAExt(Edit):
ref = attr.ib(default=None)
alt = attr.ib(default=None)
aaterm = attr.ib(default=None)
length = attr.ib(default=None)
uncertain = attr.ib(default=False)
def __attrs_post_init__(self):
self.ref = aa_to_aa1(self.ref)
self.alt = aa_to_aa1(self.alt)
self.aaterm = aa_to_aa1(self.aaterm)
def format(self, conf=None):
p_3_letter, p_term_asterisk = self._format_config_aa(conf)
st_alt = self.alt or ""
st_aaterm = self.aaterm or ""
st_length = self.length or ""
if p_3_letter:
st_alt = aa1_to_aa3(st_alt)
st_aaterm = aa1_to_aa3(st_aaterm)
if p_term_asterisk and st_alt == "Ter":
st_alt = "*"
if p_term_asterisk and st_aaterm == "Ter":
st_aaterm = "*"
s = "{alt}ext{term}{length}".format(alt=st_alt, term=st_aaterm, length=st_length)
return "(" + s + ")" if self.uncertain else s
__str__ = format
def _set_uncertain(self):
"""sets the uncertain flag to True; used primarily by the HGVS grammar
:returns: self
"""
self.uncertain = True
return self
@property
def type(self):
"""return the type of this Edit
:returns: edit type (str)
"""
return "ext"
def _del_ins_lengths(self, ilen):
"""returns (del_len, ins_len).
Unspecified ref or alt returns None for del_len or ins_len respectively.
"""
return (0, abs(self.length))
@attr.s(slots=True)
class Dup(Edit):
ref = attr.ib(default=None)
uncertain = attr.ib(default=False)
def format(self, conf=None):
max_ref_length = self._format_config_na(conf)
if max_ref_length is not None:
ref = self.ref_s
if ref is None or len(ref) > max_ref_length:
ref = ''
else:
ref = self.ref
return "dup" + (ref or "")
__str__ = format
@property
def ref_s(self):
"""
returns a string representing the ref sequence, if it is not None and smells like a sequence
"""
return self.ref if (isinstance(self.ref, six.string_types) and self.ref and self.ref[0] in "ACGTUN") else None
def _set_uncertain(self):
"""sets the uncertain flag to True; used primarily by the HGVS grammar
:returns: self
"""
self.uncertain = True
return self
@property
def type(self):
"""return the type of this Edit
:returns: edit type (str)
"""
return "dup"
def _del_ins_lengths(self, ilen):
"""returns (del_len, ins_len).
Unspecified ref or alt returns None for del_len or ins_len respectively.
"""
if self.ref is not None and self.ref != "":
assert len(self.ref) == ilen
return (0, ilen)
@attr.s(slots=True)
class Repeat(Edit):
ref = attr.ib(default=None)
min = attr.ib(default=None)
max = attr.ib(default=None)
uncertain = attr.ib(default=False)
def format(self, conf=None):
if self.min > self.max:
raise HGVSError("Repeat min count must be less than or equal to max count")
max_ref_length = self._format_config_na(conf)
ref = self.ref
if max_ref_length is not None and (ref is None or len(ref) > max_ref_length):
ref = ''
if self.min == self.max:
return "{ref}[{min}]".format(ref=ref, min=self.min)
return "{ref}({min}_{max})".format(ref=ref, min=self.min, max=self.max)
__str__ = format
def _set_uncertain(self):
"""sets the uncertain flag to True; used primarily by the HGVS grammar
:returns: self
"""
self.uncertain = True
return self
@property
def type(self):
"""return the type of this Edit
:returns: edit type (str)
"""
return "repeat"
@attr.s(slots=True)
class NACopy(Edit):
"""Represent copy number variants (Invitae-specific use)
This class is intended for Invitae use only and does not represent
a standard HGVS concept. The class may be changed, moved, or
removed without notice.
"""
copy = attr.ib(default=None)
uncertain = attr.ib(default=False)
def __str__(self):
s = "copy{}".format(self.copy)
return "(" + s + ")" if self.uncertain else s
def _set_uncertain(self):
"""sets the uncertain flag to True; used primarily by the HGVS grammar
:returns: self
"""
self.uncertain = True
return self
@property
def type(self):
"""return the type of this Edit
:returns: edit type (str)
"""
return "copy"
def _del_ins_lengths(self, ilen):
"""returns (del_len, ins_len).
Unspecified ref or alt returns None for del_len or ins_len respectively.
"""
return (0, ilen * self.copy)
@attr.s(slots=True)
class Inv(Edit):
"""Inversion
"""
ref = attr.ib(default=None)
uncertain = attr.ib(default=False)
def __str__(self):
return "inv"
def _set_uncertain(self):
"""sets the uncertain flag to True; used primarily by the HGVS grammar
:returns: self
"""
self.uncertain = True
return self
@property
def ref_s(self):
return self.ref if (isinstance(self.ref, six.string_types) and self.ref and self.ref[0] in "ACGTUN") else None
@property
def ref_n(self):
"""
returns an integer, either from the `seq` instance variable if it's a number,
or None otherwise
"""
try:
return int(self.ref)
except ValueError:
return None
@property
def type(self):
"""return the type of this Edit
:returns: edit type (str)
"""
return "inv"
def _del_ins_lengths(self, ilen):
"""returns (del_len, ins_len).
Unspecified ref or alt returns None for del_len or ins_len respectively.
"""
return (ilen, ilen)
@attr.s(slots=True)
class Conv(Edit):
"""Conversion
"""
from_ac = attr.ib(default=None)
from_type = attr.ib(default=None)
from_pos = attr.ib(default=None)
uncertain = attr.ib(default=False)
def __str__(self):
if self.from_ac and self.from_type and self.from_pos:
s = "con{self.from_ac}:{self.from_type}.{self.from_pos}".format(self=self)
else:
s = "con"
return "(" + s + ")" if self.uncertain else s
def _set_uncertain(self):
"""sets the uncertain flag to True; used primarily by the HGVS grammar
:returns: self
"""
self.uncertain = True
return self
@property
def type(self):
"""return the type of this Edit
:returns: edit type (str)
"""
return "con"
if __name__ == "__main__":
import doctest
doctest.testmod()
# <LICENSE>
# Copyright 2018 HGVS Contributors (https://github.com/biocommons/hgvs)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# </LICENSE>
|
'use strict'
const connection = require('../connection')
const test = require('tape')
const dedent = require('dedent')
const Table = require('./partitioning')
const sinon = require('sinon')
test('getPartitionStrings', function (t) {
const sandbox = sinon.sandbox.create()
const expected = dedent`
PARTITION \`from20110125\` VALUES LESS THAN (TO_DAYS('2011-01-26')),
PARTITION \`from20110126\` VALUES LESS THAN (TO_DAYS('2011-01-27')),
PARTITION \`from20110127\` VALUES LESS THAN (TO_DAYS('2011-01-28')),
PARTITION \`from20110128\` VALUES LESS THAN (TO_DAYS('2011-01-29')),
PARTITION \`from20110129\` VALUES LESS THAN (TO_DAYS('2011-01-30')),
PARTITION \`from20110130\` VALUES LESS THAN (TO_DAYS('2011-01-31')),
PARTITION \`from20110131\` VALUES LESS THAN (TO_DAYS('2011-02-01')),`
sandbox.useFakeTimers(new Date('2011-01-31').getTime())
const result = Table.getPartitionStrings()
t.equal(result, expected)
sandbox.restore()
t.end()
})
test('getPartitionsThatShouldExist', function (t) {
t.plan(1)
const sandbox = sinon.sandbox.create()
const clock = sandbox.useFakeTimers()
clock.tick(new Date('2017-04-16').getTime())
const currentPartitions = [
'from20170412',
'from20170413',
'from20170414'
]
const dataRetention = 5
const result = Table.getPartitionsThatShouldExist(dataRetention, currentPartitions)
t.deepEqual(result, [
{ name: 'from20170412', description: 736797 },
{ name: 'from20170413', description: 736798 },
{ name: 'from20170414', description: 736799 },
{ name: 'from20170415', description: 736800 },
{ name: 'from20170416', description: 736801 }
])
sandbox.restore()
t.end()
})
test('cleanup', function (t) {
connection.destroy()
t.end()
})
|
const config = {
siteTitle: 'نتائج السودان ',
siteTitleShort: 'نتائج السودان',
siteTitleAlt: 'نتائج السودان',
siteLogo: '/logos/logo-1024.png',
siteUrl: 'https://sudansscr.com',
pathPrefix: '',
dateFromFormat: 'YYYY-MM-DD',
dateFormat: 'MMMM Do, YYYY',
siteDescription:
'موقع مختص في تقديم وعرض نتائج الشهادة السودانية وشهادة الاساس ويساعد في استخراج الشهادة بصورة مجانية من وزارة التربية والتعليم.',
siteRss: '/rss.xml',
googleAnalyticsID: 'UA-163125090-1',
postDefaultCategoryID: 'نتائج السودان ',
userName: 'نتائج السودان ',
userEmail: '[email protected]',
userTwitter: 'omar_m7a',
gatsby_disqus_name: 'disqus_hjOB22hyru',
menuLinks: [
{
name: 'الشهادة السودانية',
link: '/categories/الشهادة-السودانية/'
},
{
name: 'شهادة الاساس ',
link: '/categories/شهادة-الاساس/'
},
{
name: ' استخراج نتيجة الشهادة السودانية',
link: 'https://results.sudansscr.com/'
}
],
themeColor: '#3F80FF', // Used for setting manifest and progress theme colors.
backgroundColor: '#ffffff'
};
// Make sure pathPrefix is empty if not needed
if (config.pathPrefix === '/') {
config.pathPrefix = '';
} else {
// Make sure pathPrefix only contains the first forward slash
config.pathPrefix = `/${config.pathPrefix.replace(/^\/|\/$/g, '')}`;
}
// Make sure siteUrl doesn't have an ending forward slash
if (config.siteUrl.substr(-1) === '/')
config.siteUrl = config.siteUrl.slice(0, -1);
// Make sure siteRss has a starting forward slash
if (config.siteRss && config.siteRss[0] !== '/')
config.siteRss = `/${config.siteRss}`;
module.exports = config;
|
/*!
* EaselPlugin 3.4.2
* https://greensock.com
*
* @license Copyright 2020, GreenSock. All rights reserved.
* Subject to the terms at https://greensock.com/standard-license or for Club GreenSock members, the agreement issued with that membership.
* @author: Jack Doyle, [email protected]
*/
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e=e||self).window=e.window||{})}(this,function(e){"use strict";function k(){return"undefined"!=typeof window}function l(){return h||k()&&(h=window.gsap)&&h.registerPlugin&&h}function m(){return r||t&&t.createjs||t||{}}function n(e){return console.warn(e)}function o(e){var t=e.getBounds&&e.getBounds();t||(t=e.nominalBounds||{x:0,y:0,width:100,height:100},e.setBounds&&e.setBounds(t.x,t.y,t.width,t.height)),e.cache&&e.cache(t.x,t.y,t.width,t.height),n("EaselPlugin: for filters to display in EaselJS, you must call the object's cache() method first. GSAP attempted to use the target's getBounds() for the cache but that may not be completely accurate. "+e)}function p(e,t,r){(b=b||m().ColorFilter)||n("EaselPlugin error: The EaselJS ColorFilter JavaScript file wasn't loaded.");for(var i,l,s,u,a,f,c=e.filters||[],d=c.length;d--;)if(c[d]instanceof b){l=c[d];break}if(l||(l=new b,c.push(l),e.filters=c),s=l.clone(),null!=t.tint)i=h.utils.splitColor(t.tint),u=null!=t.tintAmount?+t.tintAmount:1,s.redOffset=i[0]*u,s.greenOffset=i[1]*u,s.blueOffset=i[2]*u,s.redMultiplier=s.greenMultiplier=s.blueMultiplier=1-u;else for(a in t)"exposure"!==a&&"brightness"!==a&&(s[a]=+t[a]);for(null!=t.exposure?(s.redOffset=s.greenOffset=s.blueOffset=255*(t.exposure-1),s.redMultiplier=s.greenMultiplier=s.blueMultiplier=1):null!=t.brightness&&(u=t.brightness-1,s.redOffset=s.greenOffset=s.blueOffset=0<u?255*u:0,s.redMultiplier=s.greenMultiplier=s.blueMultiplier=1-Math.abs(u)),d=8;d--;)l[a=M[d]]!==s[a]&&(f=r.add(l,a,l[a],s[a]))&&(f.op="easel_colorFilter");r._props.push("easel_colorFilter"),e.cacheID||o(e)}function u(e,t){if(!(e instanceof Array&&t instanceof Array))return t;var r,i,n=[],l=0,o=0;for(r=0;r<4;r++){for(i=0;i<5;i++)o=4===i?e[l+4]:0,n[l+i]=e[l]*t[i]+e[l+1]*t[i+5]+e[l+2]*t[i+10]+e[l+3]*t[i+15]+o;l+=5}return n}function z(e,t,r){(d=d||m().ColorMatrixFilter)||n("EaselPlugin: The EaselJS ColorMatrixFilter JavaScript file wasn't loaded.");for(var i,l,s,a,f=e.filters||[],c=f.length;-1<--c;)if(f[c]instanceof d){s=f[c];break}for(s||(s=new d(w.slice()),f.push(s),e.filters=f),l=s.matrix,i=w.slice(),null!=t.colorize&&(i=function _colorize(e,t,r){isNaN(r)&&(r=1);var i=h.utils.splitColor(t),n=i[0]/255,l=i[1]/255,o=i[2]/255,s=1-r;return u([s+r*n*x,r*n*y,r*n*_,0,0,r*l*x,s+r*l*y,r*l*_,0,0,r*o*x,r*o*y,s+r*o*_,0,0,0,0,0,1,0],e)}(i,t.colorize,Number(t.colorizeAmount))),null!=t.contrast&&(i=function _setContrast(e,t){return isNaN(t)?e:u([t+=.01,0,0,0,128*(1-t),0,t,0,0,128*(1-t),0,0,t,0,128*(1-t),0,0,0,1,0],e)}(i,Number(t.contrast))),null!=t.hue&&(i=function _setHue(e,t){if(isNaN(t))return e;t*=Math.PI/180;var r=Math.cos(t),i=Math.sin(t);return u([x+r*(1-x)+i*-x,y+r*-y+i*-y,_+r*-_+i*(1-_),0,0,x+r*-x+.143*i,y+r*(1-y)+.14*i,_+r*-_+-.283*i,0,0,x+r*-x+i*-(1-x),y+r*-y+i*y,_+r*(1-_)+i*_,0,0,0,0,0,1,0,0,0,0,0,1],e)}(i,Number(t.hue))),null!=t.saturation&&(i=function _setSaturation(e,t){if(isNaN(t))return e;var r=1-t,i=r*x,n=r*y,l=r*_;return u([i+t,n,l,0,0,i,n+t,l,0,0,i,n,l+t,0,0,0,0,0,1,0],e)}(i,Number(t.saturation))),c=i.length;-1<--c;)i[c]!==l[c]&&(a=r.add(l,c,l[c],i[c]))&&(a.op="easel_colorMatrixFilter");r._props.push("easel_colorMatrixFilter"),e.cacheID||o(),r._matrix=l}function A(e){h=e||l(),k()&&(t=window),h&&(g=1)}var h,g,t,r,b,d,M="redMultiplier,greenMultiplier,blueMultiplier,alphaMultiplier,redOffset,greenOffset,blueOffset,alphaOffset".split(","),w=[1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0],x=.212671,y=.71516,_=.072169,i={version:"3.4.2",name:"easel",init:function init(e,t,r,i,l){var o,s,u,a,f,c,d;for(o in g||(A(),h||n("Please gsap.registerPlugin(EaselPlugin)")),this.target=e,t)if(f=t[o],"colorFilter"===o||"tint"===o||"tintAmount"===o||"exposure"===o||"brightness"===o)u||(p(e,t.colorFilter||t,this),u=!0);else if("saturation"===o||"contrast"===o||"hue"===o||"colorize"===o||"colorizeAmount"===o)a||(z(e,t.colorMatrixFilter||t,this),a=!0);else if("frame"===o){if("string"==typeof f&&"="!==f.charAt(1)&&(c=e.labels))for(d=0;d<c.length;d++)c[d].label===f&&(f=c[d].position);(s=this.add(e,"gotoAndStop",e.currentFrame,f,i,l,Math.round))&&(s.op=o)}else null!=e[o]&&this.add(e,o,"get",f)},render:function render(e,t){for(var r=t._pt;r;)r.r(e,r.d),r=r._next;t.target.cacheID&&t.target.updateCache()},register:A,registerCreateJS:function(e){r=e}};l()&&h.registerPlugin(i),e.EaselPlugin=i,e.default=i;if (typeof(window)==="undefined"||window!==e){Object.defineProperty(e,"__esModule",{value:!0})} else {delete e.default}});
|
import enrollment from './enrollment'
import download from './download'
import { Router } from 'express'
import actions from './actions'
import tickets from './tickets'
import destroy from './destroy'
import refund from './refund'
import create from './create'
import update from './update'
import cancel from './cancel'
import edit from './edit'
import list from './list'
import show from './show'
import voyd from './void'
const router = new Router({ mergeParams: true })
router.get('/', list)
router.post('/', create)
router.get('/download', download)
router.get('/:id', show)
router.get('/:id/actions', actions)
router.get('/:id/edit', edit)
router.get('/:id/enrollment', enrollment)
router.get('/:id/tickets', tickets)
router.patch('/:id/cancel', cancel)
router.patch('/:id/refund', refund)
router.patch('/:id/void', voyd)
router.patch('/:id', update)
router.delete('/:id', destroy)
export default router
|
import json
import boto3
import time
from pprint import pprint
import urllib
import base64
print("loading function")
kinesis_client = boto3.client("kinesis")
dynamodb_clinet = boto3.resource("dynamodb")
def lambda_handler(event, context):
# record_set = []
# event
print("Event is :")
pprint(event)
# record
print("Record is : ")
record = event["Records"][0]["kinesis"]
pprint(record)
# data and partition from record is :
print("Data and Partitoon Key is : ")
data = record["data"]
partition_key = record["partitionKey"]
# decode them
data = str(base64.b64decode(data).decode("utf-8"))
data = json.loads(data)
partition_key = partition_key
print(type(data))
pprint(data)
pprint(partition_key)
# write data to dynamo db table
table = dynamodb_clinet.Table("taxi_fleet_data2")
# for k in data.keys():
# data[k] = str(data[k])
response = table.put_item(Item=data)
pprint(response)
print("/home/lost+found/)
|
!function(global,factory){if("function"==typeof define&&define.amd)define("/Plugin/icheck",["exports","Plugin"],factory);else if("undefined"!=typeof exports)factory(exports,require("Plugin"));else{var mod={exports:{}};factory(mod.exports,global.Plugin),global.PluginIcheck=mod.exports}}(this,function(exports,_Plugin2){"use strict";Object.defineProperty(exports,"__esModule",{value:!0});var _Plugin3=babelHelpers.interopRequireDefault(_Plugin2),ICheck=function(_Plugin){function ICheck(){return babelHelpers.classCallCheck(this,ICheck),babelHelpers.possibleConstructorReturn(this,(ICheck.__proto__||Object.getPrototypeOf(ICheck)).apply(this,arguments))}return babelHelpers.inherits(ICheck,_Plugin),babelHelpers.createClass(ICheck,[{key:"getName",value:function(){return"iCheck"}}],[{key:"getDefaults",value:function(){return{}}}]),ICheck}(_Plugin3.default);_Plugin3.default.register("iCheck",ICheck),exports.default=ICheck}); |
import 'jest-canvas-mock';
import Phaser from 'phaser';
import GetNameScene from '../Scenes/GetNameScene';
test('GetName-Scene is an instance of Scene', () => {
expect(GetNameScene.prototype instanceof Phaser.Scene).toBe(true);
});
test('GetName-Scene is not an instance of Text', () => {
expect(GetNameScene.prototype instanceof Phaser.GameObjects.Text).not.toBe(true);
}); |
"use strict";
const simple = { // * Simple instruments
notObject : function notObject ( str ) {
let result = false
if ( str == null ) { result = true }
if ( typeof str == 'string' ) { result = true }
if ( typeof str == 'number' ) { result = true }
if ( typeof str == 'boolean' ) { result = true }
if ( typeof str == 'function' ) { result = true }
return result
} // notObject func.
, isObject : str => !simple.notObject(str)
, copy : obj => JSON.parse ( JSON.stringify(obj) )
, folderKind : test => test instanceof Array ? 'array' : 'object'
, getIterator : list => Object.keys ( list )
, removeLast ( path ) {
let list = path.split('/')
list.pop()
return list.join('/')
} // removeLast func.
, getPenult ( path ) {
let list = path.split ( '/' )
list.pop()
return list.pop()
} // getPenult func.
, getUlt ( path ) {
let list = path.split ( '/' )
return list.pop()
} // getUlt func.
} // simple
module.exports = simple |
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.GovernorVotesQuorumFraction__factory = void 0;
const ethers_1 = require("ethers");
const _abi = [
{
anonymous: false,
inputs: [
{
indexed: false,
internalType: "uint256",
name: "proposalId",
type: "uint256",
},
],
name: "ProposalCanceled",
type: "event",
},
{
anonymous: false,
inputs: [
{
indexed: false,
internalType: "uint256",
name: "proposalId",
type: "uint256",
},
{
indexed: false,
internalType: "address",
name: "proposer",
type: "address",
},
{
indexed: false,
internalType: "address[]",
name: "targets",
type: "address[]",
},
{
indexed: false,
internalType: "uint256[]",
name: "values",
type: "uint256[]",
},
{
indexed: false,
internalType: "string[]",
name: "signatures",
type: "string[]",
},
{
indexed: false,
internalType: "bytes[]",
name: "calldatas",
type: "bytes[]",
},
{
indexed: false,
internalType: "uint256",
name: "startBlock",
type: "uint256",
},
{
indexed: false,
internalType: "uint256",
name: "endBlock",
type: "uint256",
},
{
indexed: false,
internalType: "string",
name: "description",
type: "string",
},
],
name: "ProposalCreated",
type: "event",
},
{
anonymous: false,
inputs: [
{
indexed: false,
internalType: "uint256",
name: "proposalId",
type: "uint256",
},
],
name: "ProposalExecuted",
type: "event",
},
{
anonymous: false,
inputs: [
{
indexed: false,
internalType: "uint256",
name: "oldQuorumNumerator",
type: "uint256",
},
{
indexed: false,
internalType: "uint256",
name: "newQuorumNumerator",
type: "uint256",
},
],
name: "QuorumNumeratorUpdated",
type: "event",
},
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "address",
name: "voter",
type: "address",
},
{
indexed: false,
internalType: "uint256",
name: "proposalId",
type: "uint256",
},
{
indexed: false,
internalType: "uint8",
name: "support",
type: "uint8",
},
{
indexed: false,
internalType: "uint256",
name: "weight",
type: "uint256",
},
{
indexed: false,
internalType: "string",
name: "reason",
type: "string",
},
],
name: "VoteCast",
type: "event",
},
{
inputs: [],
name: "BALLOT_TYPEHASH",
outputs: [
{
internalType: "bytes32",
name: "",
type: "bytes32",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [],
name: "COUNTING_MODE",
outputs: [
{
internalType: "string",
name: "",
type: "string",
},
],
stateMutability: "pure",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "proposalId",
type: "uint256",
},
{
internalType: "uint8",
name: "support",
type: "uint8",
},
],
name: "castVote",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "proposalId",
type: "uint256",
},
{
internalType: "uint8",
name: "support",
type: "uint8",
},
{
internalType: "uint8",
name: "v",
type: "uint8",
},
{
internalType: "bytes32",
name: "r",
type: "bytes32",
},
{
internalType: "bytes32",
name: "s",
type: "bytes32",
},
],
name: "castVoteBySig",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "proposalId",
type: "uint256",
},
{
internalType: "uint8",
name: "support",
type: "uint8",
},
{
internalType: "string",
name: "reason",
type: "string",
},
],
name: "castVoteWithReason",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "address[]",
name: "targets",
type: "address[]",
},
{
internalType: "uint256[]",
name: "values",
type: "uint256[]",
},
{
internalType: "bytes[]",
name: "calldatas",
type: "bytes[]",
},
{
internalType: "bytes32",
name: "descriptionHash",
type: "bytes32",
},
],
name: "execute",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "payable",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "account",
type: "address",
},
{
internalType: "uint256",
name: "blockNumber",
type: "uint256",
},
],
name: "getVotes",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "proposalId",
type: "uint256",
},
{
internalType: "address",
name: "account",
type: "address",
},
],
name: "hasVoted",
outputs: [
{
internalType: "bool",
name: "",
type: "bool",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "address[]",
name: "targets",
type: "address[]",
},
{
internalType: "uint256[]",
name: "values",
type: "uint256[]",
},
{
internalType: "bytes[]",
name: "calldatas",
type: "bytes[]",
},
{
internalType: "bytes32",
name: "descriptionHash",
type: "bytes32",
},
],
name: "hashProposal",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "pure",
type: "function",
},
{
inputs: [],
name: "name",
outputs: [
{
internalType: "string",
name: "",
type: "string",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "proposalId",
type: "uint256",
},
],
name: "proposalDeadline",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "proposalId",
type: "uint256",
},
],
name: "proposalSnapshot",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [],
name: "proposalThreshold",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "address[]",
name: "targets",
type: "address[]",
},
{
internalType: "uint256[]",
name: "values",
type: "uint256[]",
},
{
internalType: "bytes[]",
name: "calldatas",
type: "bytes[]",
},
{
internalType: "string",
name: "description",
type: "string",
},
],
name: "propose",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "blockNumber",
type: "uint256",
},
],
name: "quorum",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [],
name: "quorumDenominator",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [],
name: "quorumNumerator",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "target",
type: "address",
},
{
internalType: "uint256",
name: "value",
type: "uint256",
},
{
internalType: "bytes",
name: "data",
type: "bytes",
},
],
name: "relay",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "proposalId",
type: "uint256",
},
],
name: "state",
outputs: [
{
internalType: "enum IGovernor.ProposalState",
name: "",
type: "uint8",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "bytes4",
name: "interfaceId",
type: "bytes4",
},
],
name: "supportsInterface",
outputs: [
{
internalType: "bool",
name: "",
type: "bool",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [],
name: "token",
outputs: [
{
internalType: "contract ERC20Votes",
name: "",
type: "address",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "newQuorumNumerator",
type: "uint256",
},
],
name: "updateQuorumNumerator",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [],
name: "version",
outputs: [
{
internalType: "string",
name: "",
type: "string",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [],
name: "votingDelay",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [],
name: "votingPeriod",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
stateMutability: "payable",
type: "receive",
},
];
class GovernorVotesQuorumFraction__factory {
static createInterface() {
return new ethers_1.utils.Interface(_abi);
}
static connect(address, signerOrProvider) {
return new ethers_1.Contract(address, _abi, signerOrProvider);
}
}
exports.GovernorVotesQuorumFraction__factory = GovernorVotesQuorumFraction__factory;
GovernorVotesQuorumFraction__factory.abi = _abi;
//# sourceMappingURL=GovernorVotesQuorumFraction__factory.js.map |
import React from 'react';
const td = (props) => {
const { children } = props;
return (
<td>{ children }</td>
);
};
export default td;
|
# ===============================================================================
# Copyright 2019 ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
from traits.api import List, HasTraits, Str, Int
from traitsui.api import TabularEditor, View, UItem
from traitsui.tabular_adapter import TabularAdapter
class Identifier(HasTraits):
identifier = Str
irradiation = Str
level = Str
position = Int
class Adapter(TabularAdapter):
columns = [('Identifier', 'identifier'),
('Irradiation', 'irradiation'),
('Level', 'level'),
('Position', 'position')]
class AssociatedIdentifiersView(HasTraits):
items = List
def add_items(self, irposs):
def factory(irpos):
i = Identifier(identifier=irpos.identifier,
irradiation=irpos.level.irradiation.name,
level=irpos.level.name,
position=irpos.position)
return i
self.items.extend([factory(i) for i in irposs])
def traits_view(self):
v = View(UItem('items', editor=TabularEditor(adapter=Adapter())),
width=500, resizable=True, title='Associated Irradiation Positions')
return v
# ============= EOF =============================================
|
!function(){"use strict";function e(e){function r(e){return e.toLocaleLowerCase?e.toLocaleLowerCase():e.toLowerCase()}function t(e){return e.replace(d,"\x00$1\x00").replace(/\0$/,"").replace(/^\0/,"").split("\x00")}function n(e,r){return(!e.match(i)||1===r)&&parseFloat(e)||e.replace(f," ").replace(u,"")||0}e=e||{};var a=e.desc?-1:1,o=-a,i=/^0/,f=/\s+/g,u=/^\s+|\s+$/g,c=/[^\x00-\x80]/,s=/^0x[0-9a-f]+$/i,d=/(0x[\da-fA-F]+|(^[\+\-]?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?(?=\D|\s|$))|\d+)/g,p=/(^([\w ]+,?[\w ]+)?[\w ]+,?[\w ]+\d+:\d+(:\d+)?[\w ]?|^\d{1,4}[\/\-]\d{1,4}[\/\-]\d{1,4}|^\w+, \w+ \d+, \d{4})/,l=e.insensitive?function(e){return r(""+e).replace(u,"")}:function(e){return(""+e).replace(u,"")};return function(e,r){var i=l(e),f=l(r);if(!i&&!f)return 0;if(!i&&f)return o;if(i&&!f)return a;var u=t(i),d=t(f),w=parseInt(i.match(s),16)||1!==u.length&&Date.parse(i),x=parseInt(f.match(s),16)||w&&f.match(p)&&Date.parse(f)||null;if(x){if(x>w)return o;if(w>x)return a}for(var m=u.length,h=d.length,v=0,g=Math.max(m,h);g>v;v++){var N=n(u[v]||"",m),y=n(d[v]||"",h);if(isNaN(N)!==isNaN(y))return isNaN(N)?a:o;if(c.test(N+y)&&N.localeCompare){var C=N.localeCompare(y);if(C>0)return a;if(0>C)return o;if(v===g-1)return 0}else{if(y>N)return o;if(N>y)return a}}return 0}}"object"==typeof exports&&"object"==typeof module?module.exports=e:"function"==typeof define&&define.amd?define([],function(){return e}):"object"==typeof exports?exports.natsort=e:("undefined"!=typeof window?window:this).natsort=e}(); |
var ie = document.all != null;
var moz = !ie && document.getElementById != null && document.layers == null;
/**
* @dependencies Bs_Misc.lib.js
* @author sam blum <sam-at-blueshoes-dot-org>, andrej arn <andrej-at-blueshoes-dot-org>
* @package javascript_core
* @subpackage lang
* @copyright blueshoes.org
*/
/**
* tells if the given var is null as we (from blueshoes) understand it.
* we think it's null if one of these is true:
* - the type is 'undefined' (typeof())
* - var == null
*
* javascript thinks that a null value (var x = null) is of type 'object'.
* we don't think that's funny. look at these lines:
* var a = new Object(); alert(typeof(a)); if (a) alert('true'); => object, and true
* var b = null; alert(typeof(b)); if (b) alert('true'); => object, BUT NOT TRUE!
*
* @param mixed theVar
* @return bool
*/
function bs_isNull(theVar) {
if (typeof(theVar) == 'undefined') return true;
if (theVar == null) return true;
return false;
}
/**
* Any object but not null returns TRUE
* @param mixed theVar
* @return bool
*/
function bs_isObject(theVar) {
ret = false;
if (typeof(theVar) == 'object') {
ret = !bs_isNull(theVar);
}
return ret;
}
/**
* undefined, null and empty string return true. but the number 0 does not.
* @param mixed theVar
* @return bool
*/
function bs_isEmpty(theVar) {
if (bs_isNull(theVar)) return true;
if (theVar == '') return true;
return false;
}
/**
* A more precise type detect
* The extra types we want are
* 'null' for null (not 'object' as js defines it)
* 'array' for Array (not 'object' as js defines it)
*
* @status experimental
*/
function bs_typeOf(theVar) {
ret = 'undefined';
switch (typeof(theVar)) {
case 'boolean': ret = 'boolean'; break;
case 'number': ret = 'number'; break;
case 'string': ret = 'string'; break;
case 'function': ret = 'function'; break;
case 'object':
if (bs_isNull(theVar)) {
ret = 'null';
break;
}
if (theVar.concat && theVar.join && theVar.sort && theVar.pop) { // not 100% but 99.9%
ret = 'array';
break;
}
break;
case 'undefined':
default:
ret = 'undefined';
}
return ret;
}
/**
* Intelligent is TRUE tester.
* Tells whether a value should be considered as true or not.
* this is useful for ini files for example where all these values
* should be treated as TRUE:
* Yes, Y, Ja, Oui, On, True (string or bool), 1 (string or int) => all case insensitive
* everything else, like No, Off, False, 0, is treated as FALSE.
* @param string value
* @return bool
*/
function bs_isTrue(value) {
var trueVals = new Array('true','on','y','yes',1,'1','ja','oui');
if (value == '') return false;
if (typeof(value) == 'string') value = value.toLowerCase();
if (value == true) return true;
for (var i=0; i<trueVals.length; i++) {
if (value == trueVals[i].toLowerCase()) return true;
}
return false;
}
/**
* tells if object is an instance of the class (constructor) specified.
*
* example: instanceOf(myArray, Array);
*
* this works like js 1.4: myArray instanceof Array
*
* @param object object
* @param constructor constructor
* @return bool
*/
function instanceOf(object, constructor) {
while (object != null) {
if (object == constructor.prototype) return true;
object = object.__proto__;
}
return false;
}
/**
* Merge array AND objects from left to write. That is the last param overwites the first if
* keys are the same.
*
* @param object obj1
* @param object obj2 (overwrites obj2)
* @return object (false if one of the params isn't an object!)
*/
function bs_arrayMerge(obj1, obj2) {
if (!bs_isObject(obj1) || !bs_isObject(obj2)) return false;
for (var key in obj2) {obj1[key] = obj2[key];}
return obj1;
}
/**
* Takes a vector ( == js-Array() ) or string and transforms it to a hash of key => TRUE.
*
* Sample:
* aArray = new Array('a', 'b', 'c');
* aHash = bs_arrayFlip(aArray);
*
* aHash is now aHash['a'] = true
* aHash['b'] = true
* aHash['c'] = true
* @param aArray an Array()
* @return a hash ( == js-Object() )
*/
function bs_arrayFlip(aArray) {
var aHash = new Object();
type = bs_typeOf(aArray);
if (type == 'array') {
for (var i=0; i<aArray.length; i++) {
aHash[aArray[i]] = true;
}
} else if (type == 'string') {
if (aArray != '') {
aHash[aArray] = true;
}
}
return aHash;
}
/**
* takes a querystring like ?foo=bar&hello=world and returns an array:
* array['foo'] = bar
* array['hello'] = world
*
* does not like special cases yet, avoid things like:
* ?foo[0]=bar&foo[1]=wodka
*
* @param string queryString (if not given then window.location.search is used.)
* @return array (hash, may be empty)
*/
function queryStringToHash(queryString) {
if (typeof(queryString) == 'undefined') {
var queryString = window.location.search;
}
var ret = new Array;
if (bs_isEmpty(queryString)) return ret;
queryString = queryString.substr(1);
if (bs_isEmpty(queryString)) return ret;
var junks = queryString.split('&');
for (var i=0; i<junks.length; i++) {
var x = junks[i].split('=');
if (x.length == 2) {
ret[x[0]] = x[1];
} else {
ret[x[0]] = '';
}
}
return ret;
}
/**
* dumps any var similar to php's var_dump.
*
* - loops arrays/objects recursively
* - omits functions (by default)
* - alerts the result (or optionally returns it)
*
*
* EXAMPLE USAGE:
* var a = new Object();
* a[0] = 30;
* a['sam'] = 15;
* a['bob'] = 20;
* a['str'] = 'blah';
* a['arr'] = new Array('foo', 'bar', 'lala');
* a.push = function(param) { var a; }
* dump(a, false, true);
*
* param showFunctions:
* old: if functions should be included aswell (with code),
* default is false (which means functions are still listed).
* new: 0 = not listed
* 1 = function "functionname" is listed
* 2 = function with code is listed
* the default is int 0.
*
*
* @param mixed theVar (your variable)
* @param bool doReturn (if set to true then the result will be returned instead of alert()ed.)
* @param mixed showFunctions (see above)
* @param string _out (the temp output when called recursively. used internally only!)
* @param string _indent (indent on recursive loop for nicer results. used internally only!)
* @param int _numCall (function call counter, used internally only!)
* @return mixed (see above)
* @author blueshoes.org (andrej-at-blueshoes-dot-org)
*/
function dump(theVar, doReturn, showFunctions, _out, _indent, _numCall) {
if (!_indent) {
_indent = ' ';
_bsDumpOverallNumCall = 1;
} else {
_indent += ' ';
_bsDumpOverallNumCall++;
}
if (_bsDumpOverallNumCall < 8) {
if (_out) {
var isInternal = true;
} else {
_out = '';
_numCall = 1;
}
var goOn = true;
if (_numCall > 10) {
goOn = false;
if (!doReturn) {
goOn = confirm("There have been 10 recursive calls so far. Maybe you have an endless loop. Do you want to continue?");
}
if (!goOn) {
_out += _indent + "error/warning: nesting levels too deep (>10 times)!\n";
} else {
_numCall = 0;
}
}
if (goOn) {
switch (typeof(theVar)) {
case 'object':
for (var key in theVar) {
switch (typeof(theVar[key])) {
case 'function':
if (typeof(showFunctions) == 'boolean') {
if (showFunctions) {
_out += _indent + 'function "' + key + '" => ' + theVar[key] + "\n";
} else {
_out += _indent + 'function "' + key + "\n";
}
} else {
if (showFunctions == 2) {
_out += _indent + 'function "' + key + '" => ' + theVar[key] + "\n";
} else if (showFunctions == 1) {
_out += _indent + 'function "' + key + "\n";
} else { //0
//ignore it. default.
}
}
break;
case 'undefined':
//do nothing
break;
case 'object':
_out += _indent + key;
//if (theVar[key] instanceof Array) {
if (instanceOf(theVar[key], Array)) {
_out += ' (Array) => \n';
//} else if (theVar[key] instanceof Date) {
} else if (instanceOf(theVar[key], Date)) {
_out += ' (Date) => '+ theVar[key] +'\n';
} else {
_out += ' (Object) => \n';
}
_out = dump(theVar[key], doReturn, showFunctions, _out, _indent + " ", _numCall+1);
break;
case 'number':
//if (theVar instanceof Date) alert('date');
if (instanceOf(theVar, Date)) alert('date');
default:
_out += _indent + typeof(theVar[key]) + ' "' + key + '" => ' + theVar[key] + "\n";
}
}
break;
default:
_out += _indent + typeof(theVar) + ' => ' + theVar + "\n";
}
}
}
if (isInternal || doReturn) {
return _out;
} else {
alert(_out);
return;
}
}
function Position(x, y) {
this.x = x;
this.y = y;
};
/**
* calculates the absolute-absolute x and y position of your element to the left upper point.
*
* param stopIfAbsolute:
* if something is in a container that is positioned absolute
* (or has set overflow to auto or scroll which causes the same result) you can
* stop going up and asking parents.
*
* @param element el (return of document.getElementById().)
* @param bool stopIfAbsolute (see above, default is false)
* @return object (with .x and .y values set)
* @throws bool false
*/
function getAbsolutePos(el, stopIfAbsolute) {
if (bs_isNull(el)) {
return {x:0, y:0};
}
var res = {x:el.offsetLeft, y:el.offsetTop};
if (el.offsetParent) {
if (el.offsetParent.currentStyle && el.offsetParent.currentStyle.position) {
var position = el.offsetParent.currentStyle.position;
var overflow = el.offsetParent.currentStyle.overflow;
} else if (document.defaultView) {
var position = document.defaultView.getComputedStyle(el, null).getPropertyValue("position");
var overflow = document.defaultView.getComputedStyle(el, null).getPropertyValue("overflow");
} else {
return false; //throw
}
if ((stopIfAbsolute != true ) || ((position != 'absolute') && (position != 'relative') && (overflow != 'auto') && (overflow != 'scroll'))) {
var tmp = getAbsolutePos(el.offsetParent, stopIfAbsolute);
res.x += tmp.x;
res.y += tmp.y;
}
}
return res;
};
/**
* returns the dimensions of an element (position-x, position-y, width, height)
* @access public
* @param element elm (return of document.getElementById().)
* @param bool stopIfAbsolute (see above, default is false)
* @return object (with .x .y .w .h set for x-axis, y-axis, width and height)
* @see getAbsolutePos()
*/
function getElementDimensions(elm, stopIfAbsolute) {
var ret = getAbsolutePos(elm, stopIfAbsolute);
if (!ret) return ret;
ret.w = elm.offsetWidth;
ret.h = elm.offsetHeight;
return ret;
}
/**
* finds the actual background color of the elment specified. the color does not need to be
* set for that element, it can be in the parent element.
* @param mixed elm (element or element id)
* @return string (ie: hex code like '#FFFFFF'. moz: string like 'rgb(230, 230, 230)'
* @throws bool false
* @since bs-4.6
*/
function bs_findBackgroundColor(elm) {
if (typeof(elm) == 'string') {
elm = document.getElementById(elm);
}
if (typeof(elm) == 'undefined') return false;
if (moz) {
try {
var col = document.defaultView.getComputedStyle(elm, null).getPropertyValue("background-color");
} catch (e) {
return false;
}
} else {
if (typeof(elm.currentStyle) == 'undefined') return false;
var col = elm.currentStyle.backgroundColor;
}
if ((typeof(col) != 'undefined') && (col != 'transparent') && (col != '')) {
return col;
} else {
return bs_findBackgroundColor(elm.parentNode);
}
}
/**
* toggles the visibility of the tag types specified.
*
* this is useful to hide all select and iframe elements on a webpage
* so that layers can 'overlap' them.
* also flash and java used to be un-overlappable using z-index. is that
* still the case?
*
* @param bool show (true = show, false = hide)
* @param array tags (default is 'select' and 'iframe')
* @return void
*/
function bs_toggleVisibility(show, tags) {
try {
if (typeof(tags) == 'undefined') tags = new Array('select', 'iframe');
for (var tag in tags) {
var elms = document.getElementsByTagName(tags[tag]);
for (var e = 0; e < elms.length; e++) {
elms[e].style.visibility = (show) ? 'visible' : 'hidden';
}
}
} catch (e) {
//unsupported browser
}
}
// see http://www.crockford.com/javascript/inheritance.html
Function.prototype.method = function (name, func) {
this.prototype[name] = func;
return this;
};
Function.method('inherits', function (parent) {
var d = 0, p = (this.prototype = new parent());
this.method('uber', function uber(name) {
var f, r, t = d, v = parent.prototype;
if (t) {
while (t) {
v = v.constructor.prototype;
t -= 1;
}
f = v[name];
} else {
f = p[name];
if (f == this[name]) {
f = v[name];
}
}
d += 1;
r = f.apply(this, Array.prototype.slice.apply(arguments, [1]));
d -= 1;
return r;
});
return this;
});
Function.method('swiss', function (parent) {
for (var i = 1; i < arguments.length; i += 1) {
var name = arguments[i];
this.prototype[name] = parent.prototype[name];
}
return this;
});
//andrej
Function.method('extend', function (object) {
for (property in object.prototype) {
this.prototype[property] = object.prototype[property];
}
});
/*
Object.prototype.extend = function(object) {
for (property in object.prototype) {
this.prototype[property] = object.prototype[property];
}
}
*/
/**
* How do I emulate some of IE's special DOM functions in mozilla/gecko/NS6+
* - insertAdjacentElement
* - insertAdjacentHTML
* - insertAdjacentText
* - innerText
*/
if ("undefined" != typeof(HTMLElement)) {
if ("undefined" == typeof(HTMLElement.insertAdjacentElement)) {
HTMLElement.prototype.insertAdjacentElement = function(where, parsedNode) {
switch(where){
case 'beforeBegin':
this.parentNode.insertBefore(parsedNode,this)
break;
case 'afterBegin':
this.insertBefore(parsedNode,this.firstChild);
break;
case 'beforeEnd':
this.appendChild(parsedNode);
break;
case 'afterEnd':
if (this.nextSibling) this.parentNode.insertBefore(parsedNode,this.nextSibling);
else this.parentNode.appendChild(parsedNode);
break;
}
}
/*
HTMLElement.prototype.insertAdjacentHTML = function(where, htmlStr) {
var r = this.ownerDocument.createRange();
r.setStartBefore(this);
var parsedHTML = r.createContextualFragment(htmlStr);
this.insertAdjacentElement(where,parsedHTML);
}
HTMLElement.prototype.insertAdjacentText = function(where, txtStr) {
var parsedText = document.createTextNode(txtStr);
this.insertAdjacentElement(where,parsedText);
}
*/
} // end if
/*
if (("undefined" == typeof(HTMLElement.innerText)) && moz) {
HTMLElement.prototype.innerText getter = function() { //
return this.innerHTML.replace(/<[^>]+>/g,"");
}
HTMLElement.prototype.innerText setter = function(txtStr) { //
var parsedText = document.createTextNode(txtStr);
this.innerHTML = "";
this.appendChild(parsedText);
}
HTMLElement.prototype.innerText = function(txtStr) { //
var parsedText = document.createTextNode(txtStr);
this.innerHTML = "";
this.appendChild(parsedText);
}
} // end if
*/
} // end if
///*
if (moz) { // set up ie environment for Moz
extendEventObject();
emulateAttachEvent();
//emulateFromToElement();
emulateEventHandlers(["click", "dblclick", "mouseover", "mouseout",
"mousedown", "mouseup", "mousemove",
"keydown", "keypress", "keyup"]);
//emulateDocumentAll();
//emulateElement()
emulateCurrentStyle(["left", "right", "top", "bottom", "width", "height"]);
emulateHTMLModel();
// Mozilla returns the wrong button number
Event.LEFT = 1;
Event.MIDDLE = 2;
Event.RIGHT = 3;
}
else {
Event = {};
// IE is returning wrong button number as well :-)
Event.LEFT = 1;
Event.MIDDLE = 4;
Event.RIGHT = 2;
}
//*/
/*
* Extends the event object with srcElement, cancelBubble, returnValue,
* fromElement and toElement
*/
function extendEventObject() {
Event.prototype.__defineSetter__("returnValue", function (b) {
if (!b) this.preventDefault();
return b;
});
Event.prototype.__defineSetter__("cancelBubble", function (b) {
if (b) this.stopPropagation();
return b;
});
Event.prototype.__defineGetter__("srcElement", function () {
var node = this.target;
while (node.nodeType != 1) node = node.parentNode;
return node;
});
Event.prototype.__defineGetter__("fromElement", function () {
var node;
if (this.type == "mouseover")
node = this.relatedTarget;
else if (this.type == "mouseout")
node = this.target;
if (!node) return;
while (node.nodeType != 1) node = node.parentNode;
return node;
});
Event.prototype.__defineGetter__("toElement", function () {
var node;
if (this.type == "mouseout")
node = this.relatedTarget;
else if (this.type == "mouseover")
node = this.target;
if (!node) return;
while (node.nodeType != 1) node = node.parentNode;
return node;
});
Event.prototype.__defineGetter__("offsetX", function () {
return this.layerX;
});
Event.prototype.__defineGetter__("offsetY", function () {
return this.layerY;
});
}
/*
* Emulates element.attachEvent as well as detachEvent
*/
function emulateAttachEvent() {
HTMLDocument.prototype.attachEvent =
HTMLElement.prototype.attachEvent = function (sType, fHandler) {
var shortTypeName = sType.replace(/on/, "");
fHandler._ieEmuEventHandler = function (e) {
window.event = e;
return fHandler();
};
this.addEventListener(shortTypeName, fHandler._ieEmuEventHandler, false);
};
HTMLDocument.prototype.detachEvent =
HTMLElement.prototype.detachEvent = function (sType, fHandler) {
var shortTypeName = sType.replace(/on/, "");
if (typeof fHandler._ieEmuEventHandler == "function")
this.removeEventListener(shortTypeName, fHandler._ieEmuEventHandler, false);
else
this.removeEventListener(shortTypeName, fHandler, true);
};
}
/*
* This function binds the event object passed along in an
* event to window.event
*/
function emulateEventHandlers(eventNames) {
for (var i = 0; i < eventNames.length; i++) {
document.addEventListener(eventNames[i], function (e) {
window.event = e;
}, true); // using capture
}
}
/*
* Simple emulation of document.all
* this one is far from complete. Be cautious
*/
function emulateAllModel() {
var allGetter = function () {
var a = this.getElementsByTagName("*");
var node = this;
a.tags = function (sTagName) {
return node.getElementsByTagName(sTagName);
};
return a;
};
HTMLDocument.prototype.__defineGetter__("all", allGetter);
HTMLElement.prototype.__defineGetter__("all", allGetter);
}
function extendElementModel() {
HTMLElement.prototype.__defineGetter__("parentElement", function () {
if (this.parentNode == this.ownerDocument) return null;
return this.parentNode;
});
HTMLElement.prototype.__defineGetter__("children", function () {
var tmp = [];
var j = 0;
var n;
for (var i = 0; i < this.childNodes.length; i++) {
n = this.childNodes[i];
if (n.nodeType == 1) {
tmp[j++] = n;
if (n.name) { // named children
if (!tmp[n.name])
tmp[n.name] = [];
tmp[n.name][tmp[n.name].length] = n;
}
if (n.id) // child with id
tmp[n.id] = n
}
}
return tmp;
});
HTMLElement.prototype.contains = function (oEl) {
if (oEl == this) return true;
if (oEl == null) return false;
return this.contains(oEl.parentNode);
};
}
/*
document.defaultView.getComputedStyle(el1,<BR>null).getPropertyValue('top');
*/
function emulateCurrentStyle(properties) {
HTMLElement.prototype.__defineGetter__("currentStyle", function () {
var cs = {};
var el = this;
for (var i = 0; i < properties.length; i++) {
//cs.__defineGetter__(properties[i], function () {
// window.status = "i: " + i ;
// return document.defaultView.getComputedStyle(el, null).getPropertyValue(properties[i]);
//});
cs.__defineGetter__(properties[i], encapsulateObjects(el, properties[i]));
}
return cs;
});
}
// used internally for emualteCurrentStyle
function encapsulateObjects(el, sProperty) {
return function () {
return document.defaultView.getComputedStyle(el, null).getPropertyValue(sProperty);
};
}
function emulateHTMLModel() {
// This function is used to generate a html string for the text properties/methods
// It replaces '\n' with "<BR"> as well as fixes consecutive white spaces
// It also repalaces some special characters
function convertTextToHTML(s) {
s = s.replace(/\&/g, "&").replace(/</g, "<").replace(/>/g, ">").replace(/\n/g, "<BR>");
while (/\s\s/.test(s))
s = s.replace(/\s\s/, " ");
return s.replace(/\s/g, " ");
}
HTMLElement.prototype.insertAdjacentHTML = function (sWhere, sHTML) {
var df; // : DocumentFragment
var r = this.ownerDocument.createRange();
switch (String(sWhere).toLowerCase()) {
case "beforebegin":
r.setStartBefore(this);
df = r.createContextualFragment(sHTML);
this.parentNode.insertBefore(df, this);
break;
case "afterbegin":
r.selectNodeContents(this);
r.collapse(true);
df = r.createContextualFragment(sHTML);
this.insertBefore(df, this.firstChild);
break;
case "beforeend":
r.selectNodeContents(this);
r.collapse(false);
df = r.createContextualFragment(sHTML);
this.appendChild(df);
break;
case "afterend":
r.setStartAfter(this);
df = r.createContextualFragment(sHTML);
this.parentNode.insertBefore(df, this.nextSibling);
break;
}
};
HTMLElement.prototype.__defineSetter__("outerHTML", function (sHTML) {
var r = this.ownerDocument.createRange();
r.setStartBefore(this);
var df = r.createContextualFragment(sHTML);
this.parentNode.replaceChild(df, this);
return sHTML;
});
HTMLElement.prototype.__defineGetter__("canHaveChildren", function () {
switch (this.tagName) {
case "AREA":
case "BASE":
case "BASEFONT":
case "COL":
case "FRAME":
case "HR":
case "IMG":
case "BR":
case "INPUT":
case "ISINDEX":
case "LINK":
case "META":
case "PARAM":
return false;
}
return true;
});
HTMLElement.prototype.__defineGetter__("outerHTML", function () {
var attr, attrs = this.attributes;
var str = "<" + this.tagName;
for (var i = 0; i < attrs.length; i++) {
attr = attrs[i];
if (attr.specified)
str += " " + attr.name + '="' + attr.value + '"';
}
if (!this.canHaveChildren)
return str + ">";
return str + ">" + this.innerHTML + "</" + this.tagName + ">";
});
HTMLElement.prototype.__defineSetter__("innerText", function (sText) {
this.innerHTML = convertTextToHTML(sText);
return sText;
});
var tmpGet;
HTMLElement.prototype.__defineGetter__("innerText", tmpGet = function () {
var r = this.ownerDocument.createRange();
r.selectNodeContents(this);
return r.toString();
});
HTMLElement.prototype.__defineSetter__("outerText", function (sText) {
this.outerHTML = convertTextToHTML(sText);
return sText;
});
HTMLElement.prototype.__defineGetter__("outerText", tmpGet);
HTMLElement.prototype.insertAdjacentText = function (sWhere, sText) {
this.insertAdjacentHTML(sWhere, convertTextToHTML(sText));
};
}
/**
* encodes a string to be used as filename. this is used for example for the texttype class.
*
* NOTE: THIS IS THE JAVASCRIPT IMPLEMENTATION OF core/file/Bs_FileUtil.class.php
*
* examples:
* 1) this is a
* multiline string
* becomes: "this_eis_ea_e_nmultiline_estring
* _e = space (empty), _n = newline
*
* @access public
* @var string $filename
* @param char $e (escape character, default is the underscore '_'.)
* @return string
*/
function encodeFilename(filename, e) {
if (typeof(e) == 'undefined') e = '_';
//now replace everything that's not 0-9 a-z A-Z with it's ascii value, eg '_038' for '&'.
var ret = '';
for (var i=0; i<filename.length; i++) {
var chr = filename.substr(i,1);
if (chr == e) {
ret += chr;
continue;
}
ord = chr.charCodeAt(1);
if ((ord < 48) || (ord > 122) || ((ord > 57) && (ord < 65)) || ((ord > 90) && (ord < 97))) {
if (ord < 10) {
ret += e + '00' + ord;
} else if (ord < 100) {
ret += e + '0' + ord;
} else {
ret += e + ord;
}
} else {
ret += chr;
}
}
return ret;
}
|
'use babel';
import axios from 'axios'
const search = crate => `https://crates.io/api/v1/crates?page=1&per_page=20&q=${crate}&sort=`
export default {
provide() {
return {
selector: '.source.toml',
inclusionPriority: 1,
excludeLowerPriority: true,
suggestionPriority: 2,
getSuggestions({
editor,
bufferPosition,
scopeDescriptor,
prefix
}) {
// Avoids completion for '[..]' sections and 'crate = {..}'
if(bufferPosition.column !== prefix.length) {
return [];
}
{
// Is this inside the [dependencies] section?
const text = editor.getTextInBufferRange([[0, 0], bufferPosition])
const last_bracket = text.lastIndexOf('[')
if(last_bracket == -1) {
return []
}
const last_section = text.substring(last_bracket, last_bracket + 14)
if(last_section != '[dependencies]') {
return []
}
}
return new Promise(resolve => {
axios.get(search(prefix)).then(({data}) => {
const suggestions = data.crates.map(({
name,
max_version,
description
}) => ({
text: `${name} = "${max_version}"`,
description
}))
resolve(suggestions)
})
})
}
}
},
activate(state) {},
deactivate() {},
serialize() {}
};
|
const { User } = require("../models");
const bcrypt = require("bcrypt");
const jwt = require("jsonwebtoken");
exports.signin = (req, res) => {
/*
*POST api/auth-admin/signin
* this function signin or login
*/
const { username, password } = req.body;
User.findOne({
where: {
username: username
}
}).then((user) => {
if (user) {
const checkPassword = bcrypt.compareSync(password, user.password); // true
if (checkPassword) {
const token = jwt.sign(
{
user: {
id: user.id,
username: user.username
}
},
"secret"
);
res.status(200).json({
message: "Success Signin",
data: { token, role: user.role }
});
} else {
res.status(403).json({
message: "Invalid Signin"
});
}
} else {
res.status(403).json({
message: "Invalid Signin"
});
}
});
};
exports.signup = (req, res) => {
/*
*POST api/auth-admin/signup
* this function signup
*/
const { username, password, name, role } = req.body;
const hashPassword = bcrypt.hashSync(password, 10);
console.log(username);
User.create({
username,
password: hashPassword,
name,
role
})
.then((user) => {
res.status(201).json({
message: "Success Create User",
data: user
});
})
.catch((err) => {
if (err.errors[0].message) {
const message = err.errors[0].message;
res.status(403).json({
message: message
});
} else {
res.status(500).json({
message: "Something Went Wrong"
});
}
});
};
exports.me = (req, res) => {
/*
* GET api/auth-admin/me
* this function get me
*/
const { id } = req.user;
User.findOne({
where: {
id: id
}
})
.then((user) => {
res.status(200).json({
message: "Success Read User",
data: user
});
})
.catch((err) => {
res.status(500).json({
message: "Something Went Wrong"
});
});
};
exports.update = (req, res) => {
const { id } = req.params;
const { username, password, name, role } = req.body;
const hashPassword = bcrypt.hashSync(password, 10);
User.findOne({
where: { id: id }
})
.then((user) => {
if (user) {
user
.update({
username,
password: hashPassword,
name,
role
})
.then((updatedUser) => {
delCache("User");
res.status(200).json({
message: "Success Update User",
data: user
});
})
.catch((err) => {
if (err.errors[0].message) {
const message = err.errors[0].message;
res.status(403).json({
message: message
});
} else {
res.status(500).json({
message: "Something Went Wrong"
});
}
});
} else {
res.status(404).json({
message: "User Not Found"
});
}
})
.catch((err) => {
if (err.errors[0].message) {
const message = err.errors[0].message;
res.status(403).json({
message: message
});
} else {
res.status(500).json({
message: "Something Went Wrong"
});
}
});
};
|
/*
* $Id: rawdeflate.js,v 0.4 2013/02/23 01:55:59 dankogai Exp dankogai $
*
* Original:
* http://www.onicos.com/staff/iz/amuse/javascript/expert/deflate.txt
*/
(function(){
/* Copyright (C) 1999 Masanao Izumo <[email protected]>
* Version: 1.0.1
* LastModified: Dec 25 1999
*/
/* Interface:
* data = zip_deflate(src);
*/
/* constant parameters */
var zip_WSIZE = 32768; // Sliding Window size
var zip_STORED_BLOCK = 0;
var zip_STATIC_TREES = 1;
var zip_DYN_TREES = 2;
/* for deflate */
var zip_DEFAULT_LEVEL = 6;
var zip_FULL_SEARCH = true;
var zip_INBUFSIZ = 32768; // Input buffer size
var zip_INBUF_EXTRA = 64; // Extra buffer
var zip_OUTBUFSIZ = 1024 * 8;
var zip_window_size = 2 * zip_WSIZE;
var zip_MIN_MATCH = 3;
var zip_MAX_MATCH = 258;
var zip_BITS = 16;
// for SMALL_MEM
var zip_LIT_BUFSIZE = 0x2000;
var zip_HASH_BITS = 13;
// for MEDIUM_MEM
// var zip_LIT_BUFSIZE = 0x4000;
// var zip_HASH_BITS = 14;
// for BIG_MEM
// var zip_LIT_BUFSIZE = 0x8000;
// var zip_HASH_BITS = 15;
if(zip_LIT_BUFSIZE > zip_INBUFSIZ)
alert("error: zip_INBUFSIZ is too small");
if((zip_WSIZE<<1) > (1<<zip_BITS))
alert("error: zip_WSIZE is too large");
if(zip_HASH_BITS > zip_BITS-1)
alert("error: zip_HASH_BITS is too large");
if(zip_HASH_BITS < 8 || zip_MAX_MATCH != 258)
alert("error: Code too clever");
var zip_DIST_BUFSIZE = zip_LIT_BUFSIZE;
var zip_HASH_SIZE = 1 << zip_HASH_BITS;
var zip_HASH_MASK = zip_HASH_SIZE - 1;
var zip_WMASK = zip_WSIZE - 1;
var zip_NIL = 0; // Tail of hash chains
var zip_TOO_FAR = 4096;
var zip_MIN_LOOKAHEAD = zip_MAX_MATCH + zip_MIN_MATCH + 1;
var zip_MAX_DIST = zip_WSIZE - zip_MIN_LOOKAHEAD;
var zip_SMALLEST = 1;
var zip_MAX_BITS = 15;
var zip_MAX_BL_BITS = 7;
var zip_LENGTH_CODES = 29;
var zip_LITERALS =256;
var zip_END_BLOCK = 256;
var zip_L_CODES = zip_LITERALS + 1 + zip_LENGTH_CODES;
var zip_D_CODES = 30;
var zip_BL_CODES = 19;
var zip_REP_3_6 = 16;
var zip_REPZ_3_10 = 17;
var zip_REPZ_11_138 = 18;
var zip_HEAP_SIZE = 2 * zip_L_CODES + 1;
var zip_H_SHIFT = parseInt((zip_HASH_BITS + zip_MIN_MATCH - 1) /
zip_MIN_MATCH);
/* variables */
var zip_free_queue;
var zip_qhead, zip_qtail;
var zip_initflag;
var zip_outbuf = null;
var zip_outcnt, zip_outoff;
var zip_complete;
var zip_window;
var zip_d_buf;
var zip_l_buf;
var zip_prev;
var zip_bi_buf;
var zip_bi_valid;
var zip_block_start;
var zip_ins_h;
var zip_hash_head;
var zip_prev_match;
var zip_match_available;
var zip_match_length;
var zip_prev_length;
var zip_strstart;
var zip_match_start;
var zip_eofile;
var zip_lookahead;
var zip_max_chain_length;
var zip_max_lazy_match;
var zip_compr_level;
var zip_good_match;
var zip_nice_match;
var zip_dyn_ltree;
var zip_dyn_dtree;
var zip_static_ltree;
var zip_static_dtree;
var zip_bl_tree;
var zip_l_desc;
var zip_d_desc;
var zip_bl_desc;
var zip_bl_count;
var zip_heap;
var zip_heap_len;
var zip_heap_max;
var zip_depth;
var zip_length_code;
var zip_dist_code;
var zip_base_length;
var zip_base_dist;
var zip_flag_buf;
var zip_last_lit;
var zip_last_dist;
var zip_last_flags;
var zip_flags;
var zip_flag_bit;
var zip_opt_len;
var zip_static_len;
var zip_deflate_data;
var zip_deflate_pos;
/* objects (deflate) */
var zip_DeflateCT = function() {
this.fc = 0; // frequency count or bit string
this.dl = 0; // father node in Huffman tree or length of bit string
}
var zip_DeflateTreeDesc = function() {
this.dyn_tree = null; // the dynamic tree
this.static_tree = null; // corresponding static tree or NULL
this.extra_bits = null; // extra bits for each code or NULL
this.extra_base = 0; // base index for extra_bits
this.elems = 0; // max number of elements in the tree
this.max_length = 0; // max bit length for the codes
this.max_code = 0; // largest code with non zero frequency
}
/* Values for max_lazy_match, good_match and max_chain_length, depending on
* the desired pack level (0..9). The values given below have been tuned to
* exclude worst case performance for pathological files. Better values may be
* found for specific files.
*/
var zip_DeflateConfiguration = function(a, b, c, d) {
this.good_length = a; // reduce lazy search above this match length
this.max_lazy = b; // do not perform lazy search above this match length
this.nice_length = c; // quit search above this match length
this.max_chain = d;
}
var zip_DeflateBuffer = function() {
this.next = null;
this.len = 0;
this.ptr = new Array(zip_OUTBUFSIZ);
this.off = 0;
}
/* constant tables */
var zip_extra_lbits = new Array(
0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0);
var zip_extra_dbits = new Array(
0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13);
var zip_extra_blbits = new Array(
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7);
var zip_bl_order = new Array(
16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15);
var zip_configuration_table = new Array(
new zip_DeflateConfiguration(0, 0, 0, 0),
new zip_DeflateConfiguration(4, 4, 8, 4),
new zip_DeflateConfiguration(4, 5, 16, 8),
new zip_DeflateConfiguration(4, 6, 32, 32),
new zip_DeflateConfiguration(4, 4, 16, 16),
new zip_DeflateConfiguration(8, 16, 32, 32),
new zip_DeflateConfiguration(8, 16, 128, 128),
new zip_DeflateConfiguration(8, 32, 128, 256),
new zip_DeflateConfiguration(32, 128, 258, 1024),
new zip_DeflateConfiguration(32, 258, 258, 4096));
/* routines (deflate) */
var zip_deflate_start = function(level) {
var i;
if(!level)
level = zip_DEFAULT_LEVEL;
else if(level < 1)
level = 1;
else if(level > 9)
level = 9;
zip_compr_level = level;
zip_initflag = false;
zip_eofile = false;
if(zip_outbuf != null)
return;
zip_free_queue = zip_qhead = zip_qtail = null;
zip_outbuf = new Array(zip_OUTBUFSIZ);
zip_window = new Array(zip_window_size);
zip_d_buf = new Array(zip_DIST_BUFSIZE);
zip_l_buf = new Array(zip_INBUFSIZ + zip_INBUF_EXTRA);
zip_prev = new Array(1 << zip_BITS);
zip_dyn_ltree = new Array(zip_HEAP_SIZE);
for(i = 0; i < zip_HEAP_SIZE; i++)
zip_dyn_ltree[i] = new zip_DeflateCT();
zip_dyn_dtree = new Array(2*zip_D_CODES+1);
for(i = 0; i < 2*zip_D_CODES+1; i++)
zip_dyn_dtree[i] = new zip_DeflateCT();
zip_static_ltree = new Array(zip_L_CODES+2);
for(i = 0; i < zip_L_CODES+2; i++)
zip_static_ltree[i] = new zip_DeflateCT();
zip_static_dtree = new Array(zip_D_CODES);
for(i = 0; i < zip_D_CODES; i++)
zip_static_dtree[i] = new zip_DeflateCT();
zip_bl_tree = new Array(2*zip_BL_CODES+1);
for(i = 0; i < 2*zip_BL_CODES+1; i++)
zip_bl_tree[i] = new zip_DeflateCT();
zip_l_desc = new zip_DeflateTreeDesc();
zip_d_desc = new zip_DeflateTreeDesc();
zip_bl_desc = new zip_DeflateTreeDesc();
zip_bl_count = new Array(zip_MAX_BITS+1);
zip_heap = new Array(2*zip_L_CODES+1);
zip_depth = new Array(2*zip_L_CODES+1);
zip_length_code = new Array(zip_MAX_MATCH-zip_MIN_MATCH+1);
zip_dist_code = new Array(512);
zip_base_length = new Array(zip_LENGTH_CODES);
zip_base_dist = new Array(zip_D_CODES);
zip_flag_buf = new Array(parseInt(zip_LIT_BUFSIZE / 8));
}
var zip_deflate_end = function() {
zip_free_queue = zip_qhead = zip_qtail = null;
zip_outbuf = null;
zip_window = null;
zip_d_buf = null;
zip_l_buf = null;
zip_prev = null;
zip_dyn_ltree = null;
zip_dyn_dtree = null;
zip_static_ltree = null;
zip_static_dtree = null;
zip_bl_tree = null;
zip_l_desc = null;
zip_d_desc = null;
zip_bl_desc = null;
zip_bl_count = null;
zip_heap = null;
zip_depth = null;
zip_length_code = null;
zip_dist_code = null;
zip_base_length = null;
zip_base_dist = null;
zip_flag_buf = null;
}
var zip_reuse_queue = function(p) {
p.next = zip_free_queue;
zip_free_queue = p;
}
var zip_new_queue = function() {
var p;
if(zip_free_queue != null)
{
p = zip_free_queue;
zip_free_queue = zip_free_queue.next;
}
else
p = new zip_DeflateBuffer();
p.next = null;
p.len = p.off = 0;
return p;
}
var zip_head1 = function(i) {
return zip_prev[zip_WSIZE + i];
}
var zip_head2 = function(i, val) {
return zip_prev[zip_WSIZE + i] = val;
}
/* put_byte is used for the compressed output, put_ubyte for the
* uncompressed output. However unlzw() uses window for its
* suffix table instead of its output buffer, so it does not use put_ubyte
* (to be cleaned up).
*/
var zip_put_byte = function(c) {
zip_outbuf[zip_outoff + zip_outcnt++] = c;
if(zip_outoff + zip_outcnt == zip_OUTBUFSIZ)
zip_qoutbuf();
}
/* Output a 16 bit value, lsb first */
var zip_put_short = function(w) {
w &= 0xffff;
if(zip_outoff + zip_outcnt < zip_OUTBUFSIZ - 2) {
zip_outbuf[zip_outoff + zip_outcnt++] = (w & 0xff);
zip_outbuf[zip_outoff + zip_outcnt++] = (w >>> 8);
} else {
zip_put_byte(w & 0xff);
zip_put_byte(w >>> 8);
}
}
/* ==========================================================================
* Insert string s in the dictionary and set match_head to the previous head
* of the hash chain (the most recent string with same hash key). Return
* the previous length of the hash chain.
* IN assertion: all calls to to INSERT_STRING are made with consecutive
* input characters and the first MIN_MATCH bytes of s are valid
* (except for the last MIN_MATCH-1 bytes of the input file).
*/
var zip_INSERT_STRING = function() {
zip_ins_h = ((zip_ins_h << zip_H_SHIFT)
^ (zip_window[zip_strstart + zip_MIN_MATCH - 1] & 0xff))
& zip_HASH_MASK;
zip_hash_head = zip_head1(zip_ins_h);
zip_prev[zip_strstart & zip_WMASK] = zip_hash_head;
zip_head2(zip_ins_h, zip_strstart);
}
/* Send a code of the given tree. c and tree must not have side effects */
var zip_SEND_CODE = function(c, tree) {
zip_send_bits(tree[c].fc, tree[c].dl);
}
/* Mapping from a distance to a distance code. dist is the distance - 1 and
* must not have side effects. dist_code[256] and dist_code[257] are never
* used.
*/
var zip_D_CODE = function(dist) {
return (dist < 256 ? zip_dist_code[dist]
: zip_dist_code[256 + (dist>>7)]) & 0xff;
}
/* ==========================================================================
* Compares to subtrees, using the tree depth as tie breaker when
* the subtrees have equal frequency. This minimizes the worst case length.
*/
var zip_SMALLER = function(tree, n, m) {
return tree[n].fc < tree[m].fc ||
(tree[n].fc == tree[m].fc && zip_depth[n] <= zip_depth[m]);
}
/* ==========================================================================
* read string data
*/
var zip_read_buff = function(buff, offset, n) {
var i;
for(i = 0; i < n && zip_deflate_pos < zip_deflate_data.length; i++)
buff[offset + i] =
zip_deflate_data.charCodeAt(zip_deflate_pos++) & 0xff;
return i;
}
/* ==========================================================================
* Initialize the "longest match" routines for a new file
*/
var zip_lm_init = function() {
var j;
/* Initialize the hash table. */
for(j = 0; j < zip_HASH_SIZE; j++)
// zip_head2(j, zip_NIL);
zip_prev[zip_WSIZE + j] = 0;
/* prev will be initialized on the fly */
/* Set the default configuration parameters:
*/
zip_max_lazy_match = zip_configuration_table[zip_compr_level].max_lazy;
zip_good_match = zip_configuration_table[zip_compr_level].good_length;
if(!zip_FULL_SEARCH)
zip_nice_match = zip_configuration_table[zip_compr_level].nice_length;
zip_max_chain_length = zip_configuration_table[zip_compr_level].max_chain;
zip_strstart = 0;
zip_block_start = 0;
zip_lookahead = zip_read_buff(zip_window, 0, 2 * zip_WSIZE);
if(zip_lookahead <= 0) {
zip_eofile = true;
zip_lookahead = 0;
return;
}
zip_eofile = false;
/* Make sure that we always have enough lookahead. This is important
* if input comes from a device such as a tty.
*/
while(zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile)
zip_fill_window();
/* If lookahead < MIN_MATCH, ins_h is garbage, but this is
* not important since only literal bytes will be emitted.
*/
zip_ins_h = 0;
for(j = 0; j < zip_MIN_MATCH - 1; j++) {
// UPDATE_HASH(ins_h, window[j]);
zip_ins_h = ((zip_ins_h << zip_H_SHIFT) ^ (zip_window[j] & 0xff)) & zip_HASH_MASK;
}
}
/* ==========================================================================
* Set match_start to the longest match starting at the given string and
* return its length. Matches shorter or equal to prev_length are discarded,
* in which case the result is equal to prev_length and match_start is
* garbage.
* IN assertions: cur_match is the head of the hash chain for the current
* string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
*/
var zip_longest_match = function(cur_match) {
var chain_length = zip_max_chain_length; // max hash chain length
var scanp = zip_strstart; // current string
var matchp; // matched string
var len; // length of current match
var best_len = zip_prev_length; // best match length so far
/* Stop when cur_match becomes <= limit. To simplify the code,
* we prevent matches with the string of window index 0.
*/
var limit = (zip_strstart > zip_MAX_DIST ? zip_strstart - zip_MAX_DIST : zip_NIL);
var strendp = zip_strstart + zip_MAX_MATCH;
var scan_end1 = zip_window[scanp + best_len - 1];
var scan_end = zip_window[scanp + best_len];
/* Do not waste too much time if we already have a good match: */
if(zip_prev_length >= zip_good_match)
chain_length >>= 2;
// Assert(encoder->strstart <= window_size-MIN_LOOKAHEAD, "insufficient lookahead");
do {
// Assert(cur_match < encoder->strstart, "no future");
matchp = cur_match;
/* Skip to next match if the match length cannot increase
* or if the match length is less than 2:
*/
if(zip_window[matchp + best_len] != scan_end ||
zip_window[matchp + best_len - 1] != scan_end1 ||
zip_window[matchp] != zip_window[scanp] ||
zip_window[++matchp] != zip_window[scanp + 1]) {
continue;
}
/* The check at best_len-1 can be removed because it will be made
* again later. (This heuristic is not always a win.)
* It is not necessary to compare scan[2] and match[2] since they
* are always equal when the other bytes match, given that
* the hash keys are equal and that HASH_BITS >= 8.
*/
scanp += 2;
matchp++;
/* We check for insufficient lookahead only every 8th comparison;
* the 256th check will be made at strstart+258.
*/
do {
} while(zip_window[++scanp] == zip_window[++matchp] &&
zip_window[++scanp] == zip_window[++matchp] &&
zip_window[++scanp] == zip_window[++matchp] &&
zip_window[++scanp] == zip_window[++matchp] &&
zip_window[++scanp] == zip_window[++matchp] &&
zip_window[++scanp] == zip_window[++matchp] &&
zip_window[++scanp] == zip_window[++matchp] &&
zip_window[++scanp] == zip_window[++matchp] &&
scanp < strendp);
len = zip_MAX_MATCH - (strendp - scanp);
scanp = strendp - zip_MAX_MATCH;
if(len > best_len) {
zip_match_start = cur_match;
best_len = len;
if(zip_FULL_SEARCH) {
if(len >= zip_MAX_MATCH) break;
} else {
if(len >= zip_nice_match) break;
}
scan_end1 = zip_window[scanp + best_len-1];
scan_end = zip_window[scanp + best_len];
}
} while((cur_match = zip_prev[cur_match & zip_WMASK]) > limit
&& --chain_length != 0);
return best_len;
}
/* ==========================================================================
* Fill the window when the lookahead becomes insufficient.
* Updates strstart and lookahead, and sets eofile if end of input file.
* IN assertion: lookahead < MIN_LOOKAHEAD && strstart + lookahead > 0
* OUT assertions: at least one byte has been read, or eofile is set;
* file reads are performed for at least two bytes (required for the
* translate_eol option).
*/
var zip_fill_window = function() {
var n, m;
// Amount of free space at the end of the window.
var more = zip_window_size - zip_lookahead - zip_strstart;
/* If the window is almost full and there is insufficient lookahead,
* move the upper half to the lower one to make room in the upper half.
*/
if(more == -1) {
/* Very unlikely, but possible on 16 bit machine if strstart == 0
* and lookahead == 1 (input done one byte at time)
*/
more--;
} else if(zip_strstart >= zip_WSIZE + zip_MAX_DIST) {
/* By the IN assertion, the window is not empty so we can't confuse
* more == 0 with more == 64K on a 16 bit machine.
*/
// Assert(window_size == (ulg)2*WSIZE, "no sliding with BIG_MEM");
// System.arraycopy(window, WSIZE, window, 0, WSIZE);
for(n = 0; n < zip_WSIZE; n++)
zip_window[n] = zip_window[n + zip_WSIZE];
zip_match_start -= zip_WSIZE;
zip_strstart -= zip_WSIZE; /* we now have strstart >= MAX_DIST: */
zip_block_start -= zip_WSIZE;
for(n = 0; n < zip_HASH_SIZE; n++) {
m = zip_head1(n);
zip_head2(n, m >= zip_WSIZE ? m - zip_WSIZE : zip_NIL);
}
for(n = 0; n < zip_WSIZE; n++) {
/* If n is not on any hash chain, prev[n] is garbage but
* its value will never be used.
*/
m = zip_prev[n];
zip_prev[n] = (m >= zip_WSIZE ? m - zip_WSIZE : zip_NIL);
}
more += zip_WSIZE;
}
// At this point, more >= 2
if(!zip_eofile) {
n = zip_read_buff(zip_window, zip_strstart + zip_lookahead, more);
if(n <= 0)
zip_eofile = true;
else
zip_lookahead += n;
}
}
/* ==========================================================================
* Processes a new input file and return its compressed length. This
* function does not perform lazy evaluationof matches and inserts
* new strings in the dictionary only for unmatched strings or for short
* matches. It is used only for the fast compression options.
*/
var zip_deflate_fast = function() {
while(zip_lookahead != 0 && zip_qhead == null) {
var flush; // set if current block must be flushed
/* Insert the string window[strstart .. strstart+2] in the
* dictionary, and set hash_head to the head of the hash chain:
*/
zip_INSERT_STRING();
/* Find the longest match, discarding those <= prev_length.
* At this point we have always match_length < MIN_MATCH
*/
if(zip_hash_head != zip_NIL &&
zip_strstart - zip_hash_head <= zip_MAX_DIST) {
/* To simplify the code, we prevent matches with the string
* of window index 0 (in particular we have to avoid a match
* of the string with itself at the start of the input file).
*/
zip_match_length = zip_longest_match(zip_hash_head);
/* longest_match() sets match_start */
if(zip_match_length > zip_lookahead)
zip_match_length = zip_lookahead;
}
if(zip_match_length >= zip_MIN_MATCH) {
// check_match(strstart, match_start, match_length);
flush = zip_ct_tally(zip_strstart - zip_match_start,
zip_match_length - zip_MIN_MATCH);
zip_lookahead -= zip_match_length;
/* Insert new strings in the hash table only if the match length
* is not too large. This saves time but degrades compression.
*/
if(zip_match_length <= zip_max_lazy_match) {
zip_match_length--; // string at strstart already in hash table
do {
zip_strstart++;
zip_INSERT_STRING();
/* strstart never exceeds WSIZE-MAX_MATCH, so there are
* always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH
* these bytes are garbage, but it does not matter since
* the next lookahead bytes will be emitted as literals.
*/
} while(--zip_match_length != 0);
zip_strstart++;
} else {
zip_strstart += zip_match_length;
zip_match_length = 0;
zip_ins_h = zip_window[zip_strstart] & 0xff;
// UPDATE_HASH(ins_h, window[strstart + 1]);
zip_ins_h = ((zip_ins_h<<zip_H_SHIFT) ^ (zip_window[zip_strstart + 1] & 0xff)) & zip_HASH_MASK;
//#if MIN_MATCH != 3
// Call UPDATE_HASH() MIN_MATCH-3 more times
//#endif
}
} else {
/* No match, output a literal byte */
flush = zip_ct_tally(0, zip_window[zip_strstart] & 0xff);
zip_lookahead--;
zip_strstart++;
}
if(flush) {
zip_flush_block(0);
zip_block_start = zip_strstart;
}
/* Make sure that we always have enough lookahead, except
* at the end of the input file. We need MAX_MATCH bytes
* for the next match, plus MIN_MATCH bytes to insert the
* string following the next match.
*/
while(zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile)
zip_fill_window();
}
}
var zip_deflate_better = function() {
/* Process the input block. */
while(zip_lookahead != 0 && zip_qhead == null) {
/* Insert the string window[strstart .. strstart+2] in the
* dictionary, and set hash_head to the head of the hash chain:
*/
zip_INSERT_STRING();
/* Find the longest match, discarding those <= prev_length.
*/
zip_prev_length = zip_match_length;
zip_prev_match = zip_match_start;
zip_match_length = zip_MIN_MATCH - 1;
if(zip_hash_head != zip_NIL &&
zip_prev_length < zip_max_lazy_match &&
zip_strstart - zip_hash_head <= zip_MAX_DIST) {
/* To simplify the code, we prevent matches with the string
* of window index 0 (in particular we have to avoid a match
* of the string with itself at the start of the input file).
*/
zip_match_length = zip_longest_match(zip_hash_head);
/* longest_match() sets match_start */
if(zip_match_length > zip_lookahead)
zip_match_length = zip_lookahead;
/* Ignore a length 3 match if it is too distant: */
if(zip_match_length == zip_MIN_MATCH &&
zip_strstart - zip_match_start > zip_TOO_FAR) {
/* If prev_match is also MIN_MATCH, match_start is garbage
* but we will ignore the current match anyway.
*/
zip_match_length--;
}
}
/* If there was a match at the previous step and the current
* match is not better, output the previous match:
*/
if(zip_prev_length >= zip_MIN_MATCH &&
zip_match_length <= zip_prev_length) {
var flush; // set if current block must be flushed
// check_match(strstart - 1, prev_match, prev_length);
flush = zip_ct_tally(zip_strstart - 1 - zip_prev_match,
zip_prev_length - zip_MIN_MATCH);
/* Insert in hash table all strings up to the end of the match.
* strstart-1 and strstart are already inserted.
*/
zip_lookahead -= zip_prev_length - 1;
zip_prev_length -= 2;
do {
zip_strstart++;
zip_INSERT_STRING();
/* strstart never exceeds WSIZE-MAX_MATCH, so there are
* always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH
* these bytes are garbage, but it does not matter since the
* next lookahead bytes will always be emitted as literals.
*/
} while(--zip_prev_length != 0);
zip_match_available = 0;
zip_match_length = zip_MIN_MATCH - 1;
zip_strstart++;
if(flush) {
zip_flush_block(0);
zip_block_start = zip_strstart;
}
} else if(zip_match_available != 0) {
/* If there was no match at the previous position, output a
* single literal. If there was a match but the current match
* is longer, truncate the previous match to a single literal.
*/
if(zip_ct_tally(0, zip_window[zip_strstart - 1] & 0xff)) {
zip_flush_block(0);
zip_block_start = zip_strstart;
}
zip_strstart++;
zip_lookahead--;
} else {
/* There is no previous match to compare with, wait for
* the next step to decide.
*/
zip_match_available = 1;
zip_strstart++;
zip_lookahead--;
}
/* Make sure that we always have enough lookahead, except
* at the end of the input file. We need MAX_MATCH bytes
* for the next match, plus MIN_MATCH bytes to insert the
* string following the next match.
*/
while(zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile)
zip_fill_window();
}
}
var zip_init_deflate = function() {
if(zip_eofile)
return;
zip_bi_buf = 0;
zip_bi_valid = 0;
zip_ct_init();
zip_lm_init();
zip_qhead = null;
zip_outcnt = 0;
zip_outoff = 0;
zip_match_available = 0;
if(zip_compr_level <= 3)
{
zip_prev_length = zip_MIN_MATCH - 1;
zip_match_length = 0;
}
else
{
zip_match_length = zip_MIN_MATCH - 1;
zip_match_available = 0;
zip_match_available = 0;
}
zip_complete = false;
}
/* ==========================================================================
* Same as above, but achieves better compression. We use a lazy
* evaluation for matches: a match is finally adopted only if there is
* no better match at the next window position.
*/
var zip_deflate_internal = function(buff, off, buff_size) {
var n;
if(!zip_initflag)
{
zip_init_deflate();
zip_initflag = true;
if(zip_lookahead == 0) { // empty
zip_complete = true;
return 0;
}
}
if((n = zip_qcopy(buff, off, buff_size)) == buff_size)
return buff_size;
if(zip_complete)
return n;
if(zip_compr_level <= 3) // optimized for speed
zip_deflate_fast();
else
zip_deflate_better();
if(zip_lookahead == 0) {
if(zip_match_available != 0)
zip_ct_tally(0, zip_window[zip_strstart - 1] & 0xff);
zip_flush_block(1);
zip_complete = true;
}
return n + zip_qcopy(buff, n + off, buff_size - n);
}
var zip_qcopy = function(buff, off, buff_size) {
var n, i, j;
n = 0;
while(zip_qhead != null && n < buff_size)
{
i = buff_size - n;
if(i > zip_qhead.len)
i = zip_qhead.len;
// System.arraycopy(qhead.ptr, qhead.off, buff, off + n, i);
for(j = 0; j < i; j++)
buff[off + n + j] = zip_qhead.ptr[zip_qhead.off + j];
zip_qhead.off += i;
zip_qhead.len -= i;
n += i;
if(zip_qhead.len == 0) {
var p;
p = zip_qhead;
zip_qhead = zip_qhead.next;
zip_reuse_queue(p);
}
}
if(n == buff_size)
return n;
if(zip_outoff < zip_outcnt) {
i = buff_size - n;
if(i > zip_outcnt - zip_outoff)
i = zip_outcnt - zip_outoff;
// System.arraycopy(outbuf, outoff, buff, off + n, i);
for(j = 0; j < i; j++)
buff[off + n + j] = zip_outbuf[zip_outoff + j];
zip_outoff += i;
n += i;
if(zip_outcnt == zip_outoff)
zip_outcnt = zip_outoff = 0;
}
return n;
}
/* ==========================================================================
* Allocate the match buffer, initialize the various tables and save the
* location of the internal file attribute (ascii/binary) and method
* (DEFLATE/STORE).
*/
var zip_ct_init = function() {
var n; // iterates over tree elements
var bits; // bit counter
var length; // length value
var code; // code value
var dist; // distance index
if(zip_static_dtree[0].dl != 0) return; // ct_init already called
zip_l_desc.dyn_tree = zip_dyn_ltree;
zip_l_desc.static_tree = zip_static_ltree;
zip_l_desc.extra_bits = zip_extra_lbits;
zip_l_desc.extra_base = zip_LITERALS + 1;
zip_l_desc.elems = zip_L_CODES;
zip_l_desc.max_length = zip_MAX_BITS;
zip_l_desc.max_code = 0;
zip_d_desc.dyn_tree = zip_dyn_dtree;
zip_d_desc.static_tree = zip_static_dtree;
zip_d_desc.extra_bits = zip_extra_dbits;
zip_d_desc.extra_base = 0;
zip_d_desc.elems = zip_D_CODES;
zip_d_desc.max_length = zip_MAX_BITS;
zip_d_desc.max_code = 0;
zip_bl_desc.dyn_tree = zip_bl_tree;
zip_bl_desc.static_tree = null;
zip_bl_desc.extra_bits = zip_extra_blbits;
zip_bl_desc.extra_base = 0;
zip_bl_desc.elems = zip_BL_CODES;
zip_bl_desc.max_length = zip_MAX_BL_BITS;
zip_bl_desc.max_code = 0;
// Initialize the mapping length (0..255) -> length code (0..28)
length = 0;
for(code = 0; code < zip_LENGTH_CODES-1; code++) {
zip_base_length[code] = length;
for(n = 0; n < (1<<zip_extra_lbits[code]); n++)
zip_length_code[length++] = code;
}
// Assert (length == 256, "ct_init: length != 256");
/* Note that the length 255 (match length 258) can be represented
* in two different ways: code 284 + 5 bits or code 285, so we
* overwrite length_code[255] to use the best encoding:
*/
zip_length_code[length-1] = code;
/* Initialize the mapping dist (0..32K) -> dist code (0..29) */
dist = 0;
for(code = 0 ; code < 16; code++) {
zip_base_dist[code] = dist;
for(n = 0; n < (1<<zip_extra_dbits[code]); n++) {
zip_dist_code[dist++] = code;
}
}
// Assert (dist == 256, "ct_init: dist != 256");
dist >>= 7; // from now on, all distances are divided by 128
for( ; code < zip_D_CODES; code++) {
zip_base_dist[code] = dist << 7;
for(n = 0; n < (1<<(zip_extra_dbits[code]-7)); n++)
zip_dist_code[256 + dist++] = code;
}
// Assert (dist == 256, "ct_init: 256+dist != 512");
// Construct the codes of the static literal tree
for(bits = 0; bits <= zip_MAX_BITS; bits++)
zip_bl_count[bits] = 0;
n = 0;
while(n <= 143) { zip_static_ltree[n++].dl = 8; zip_bl_count[8]++; }
while(n <= 255) { zip_static_ltree[n++].dl = 9; zip_bl_count[9]++; }
while(n <= 279) { zip_static_ltree[n++].dl = 7; zip_bl_count[7]++; }
while(n <= 287) { zip_static_ltree[n++].dl = 8; zip_bl_count[8]++; }
/* Codes 286 and 287 do not exist, but we must include them in the
* tree construction to get a canonical Huffman tree (longest code
* all ones)
*/
zip_gen_codes(zip_static_ltree, zip_L_CODES + 1);
/* The static distance tree is trivial: */
for(n = 0; n < zip_D_CODES; n++) {
zip_static_dtree[n].dl = 5;
zip_static_dtree[n].fc = zip_bi_reverse(n, 5);
}
// Initialize the first block of the first file:
zip_init_block();
}
/* ==========================================================================
* Initialize a new block.
*/
var zip_init_block = function() {
var n; // iterates over tree elements
// Initialize the trees.
for(n = 0; n < zip_L_CODES; n++) zip_dyn_ltree[n].fc = 0;
for(n = 0; n < zip_D_CODES; n++) zip_dyn_dtree[n].fc = 0;
for(n = 0; n < zip_BL_CODES; n++) zip_bl_tree[n].fc = 0;
zip_dyn_ltree[zip_END_BLOCK].fc = 1;
zip_opt_len = zip_static_len = 0;
zip_last_lit = zip_last_dist = zip_last_flags = 0;
zip_flags = 0;
zip_flag_bit = 1;
}
/* ==========================================================================
* Restore the heap property by moving down the tree starting at node k,
* exchanging a node with the smallest of its two sons if necessary, stopping
* when the heap property is re-established (each father smaller than its
* two sons).
*/
var zip_pqdownheap = function(
tree, // the tree to restore
k) { // node to move down
var v = zip_heap[k];
var j = k << 1; // left son of k
while(j <= zip_heap_len) {
// Set j to the smallest of the two sons:
if(j < zip_heap_len &&
zip_SMALLER(tree, zip_heap[j + 1], zip_heap[j]))
j++;
// Exit if v is smaller than both sons
if(zip_SMALLER(tree, v, zip_heap[j]))
break;
// Exchange v with the smallest son
zip_heap[k] = zip_heap[j];
k = j;
// And continue down the tree, setting j to the left son of k
j <<= 1;
}
zip_heap[k] = v;
}
/* ==========================================================================
* Compute the optimal bit lengths for a tree and update the total bit length
* for the current block.
* IN assertion: the fields freq and dad are set, heap[heap_max] and
* above are the tree nodes sorted by increasing frequency.
* OUT assertions: the field len is set to the optimal bit length, the
* array bl_count contains the frequencies for each bit length.
* The length opt_len is updated; static_len is also updated if stree is
* not null.
*/
var zip_gen_bitlen = function(desc) { // the tree descriptor
var tree = desc.dyn_tree;
var extra = desc.extra_bits;
var base = desc.extra_base;
var max_code = desc.max_code;
var max_length = desc.max_length;
var stree = desc.static_tree;
var h; // heap index
var n, m; // iterate over the tree elements
var bits; // bit length
var xbits; // extra bits
var f; // frequency
var overflow = 0; // number of elements with bit length too large
for(bits = 0; bits <= zip_MAX_BITS; bits++)
zip_bl_count[bits] = 0;
/* In a first pass, compute the optimal bit lengths (which may
* overflow in the case of the bit length tree).
*/
tree[zip_heap[zip_heap_max]].dl = 0; // root of the heap
for(h = zip_heap_max + 1; h < zip_HEAP_SIZE; h++) {
n = zip_heap[h];
bits = tree[tree[n].dl].dl + 1;
if(bits > max_length) {
bits = max_length;
overflow++;
}
tree[n].dl = bits;
// We overwrite tree[n].dl which is no longer needed
if(n > max_code)
continue; // not a leaf node
zip_bl_count[bits]++;
xbits = 0;
if(n >= base)
xbits = extra[n - base];
f = tree[n].fc;
zip_opt_len += f * (bits + xbits);
if(stree != null)
zip_static_len += f * (stree[n].dl + xbits);
}
if(overflow == 0)
return;
// This happens for example on obj2 and pic of the Calgary corpus
// Find the first bit length which could increase:
do {
bits = max_length - 1;
while(zip_bl_count[bits] == 0)
bits--;
zip_bl_count[bits]--; // move one leaf down the tree
zip_bl_count[bits + 1] += 2; // move one overflow item as its brother
zip_bl_count[max_length]--;
/* The brother of the overflow item also moves one step up,
* but this does not affect bl_count[max_length]
*/
overflow -= 2;
} while(overflow > 0);
/* Now recompute all bit lengths, scanning in increasing frequency.
* h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
* lengths instead of fixing only the wrong ones. This idea is taken
* from 'ar' written by Haruhiko Okumura.)
*/
for(bits = max_length; bits != 0; bits--) {
n = zip_bl_count[bits];
while(n != 0) {
m = zip_heap[--h];
if(m > max_code)
continue;
if(tree[m].dl != bits) {
zip_opt_len += (bits - tree[m].dl) * tree[m].fc;
tree[m].fc = bits;
}
n--;
}
}
}
/* ==========================================================================
* Generate the codes for a given tree and bit counts (which need not be
* optimal).
* IN assertion: the array bl_count contains the bit length statistics for
* the given tree and the field len is set for all tree elements.
* OUT assertion: the field code is set for all tree elements of non
* zero code length.
*/
var zip_gen_codes = function(tree, // the tree to decorate
max_code) { // largest code with non zero frequency
var next_code = new Array(zip_MAX_BITS+1); // next code value for each bit length
var code = 0; // running code value
var bits; // bit index
var n; // code index
/* The distribution counts are first used to generate the code values
* without bit reversal.
*/
for(bits = 1; bits <= zip_MAX_BITS; bits++) {
code = ((code + zip_bl_count[bits-1]) << 1);
next_code[bits] = code;
}
/* Check that the bit counts in bl_count are consistent. The last code
* must be all ones.
*/
// Assert (code + encoder->bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
// "inconsistent bit counts");
// Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
for(n = 0; n <= max_code; n++) {
var len = tree[n].dl;
if(len == 0)
continue;
// Now reverse the bits
tree[n].fc = zip_bi_reverse(next_code[len]++, len);
// Tracec(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
// n, (isgraph(n) ? n : ' '), len, tree[n].fc, next_code[len]-1));
}
}
/* ==========================================================================
* Construct one Huffman tree and assigns the code bit strings and lengths.
* Update the total bit length for the current block.
* IN assertion: the field freq is set for all tree elements.
* OUT assertions: the fields len and code are set to the optimal bit length
* and corresponding code. The length opt_len is updated; static_len is
* also updated if stree is not null. The field max_code is set.
*/
var zip_build_tree = function(desc) { // the tree descriptor
var tree = desc.dyn_tree;
var stree = desc.static_tree;
var elems = desc.elems;
var n, m; // iterate over heap elements
var max_code = -1; // largest code with non zero frequency
var node = elems; // next internal node of the tree
/* Construct the initial heap, with least frequent element in
* heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
* heap[0] is not used.
*/
zip_heap_len = 0;
zip_heap_max = zip_HEAP_SIZE;
for(n = 0; n < elems; n++) {
if(tree[n].fc != 0) {
zip_heap[++zip_heap_len] = max_code = n;
zip_depth[n] = 0;
} else
tree[n].dl = 0;
}
/* The pkzip format requires that at least one distance code exists,
* and that at least one bit should be sent even if there is only one
* possible code. So to avoid special checks later on we force at least
* two codes of non zero frequency.
*/
while(zip_heap_len < 2) {
var xnew = zip_heap[++zip_heap_len] = (max_code < 2 ? ++max_code : 0);
tree[xnew].fc = 1;
zip_depth[xnew] = 0;
zip_opt_len--;
if(stree != null)
zip_static_len -= stree[xnew].dl;
// new is 0 or 1 so it does not have extra bits
}
desc.max_code = max_code;
/* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
* establish sub-heaps of increasing lengths:
*/
for(n = zip_heap_len >> 1; n >= 1; n--)
zip_pqdownheap(tree, n);
/* Construct the Huffman tree by repeatedly combining the least two
* frequent nodes.
*/
do {
n = zip_heap[zip_SMALLEST];
zip_heap[zip_SMALLEST] = zip_heap[zip_heap_len--];
zip_pqdownheap(tree, zip_SMALLEST);
m = zip_heap[zip_SMALLEST]; // m = node of next least frequency
// keep the nodes sorted by frequency
zip_heap[--zip_heap_max] = n;
zip_heap[--zip_heap_max] = m;
// Create a new node father of n and m
tree[node].fc = tree[n].fc + tree[m].fc;
// depth[node] = (char)(MAX(depth[n], depth[m]) + 1);
if(zip_depth[n] > zip_depth[m] + 1)
zip_depth[node] = zip_depth[n];
else
zip_depth[node] = zip_depth[m] + 1;
tree[n].dl = tree[m].dl = node;
// and insert the new node in the heap
zip_heap[zip_SMALLEST] = node++;
zip_pqdownheap(tree, zip_SMALLEST);
} while(zip_heap_len >= 2);
zip_heap[--zip_heap_max] = zip_heap[zip_SMALLEST];
/* At this point, the fields freq and dad are set. We can now
* generate the bit lengths.
*/
zip_gen_bitlen(desc);
// The field len is now set, we can generate the bit codes
zip_gen_codes(tree, max_code);
}
/* ==========================================================================
* Scan a literal or distance tree to determine the frequencies of the codes
* in the bit length tree. Updates opt_len to take into account the repeat
* counts. (The contribution of the bit length codes will be added later
* during the construction of bl_tree.)
*/
var zip_scan_tree = function(tree,// the tree to be scanned
max_code) { // and its largest code of non zero frequency
var n; // iterates over all tree elements
var prevlen = -1; // last emitted length
var curlen; // length of current code
var nextlen = tree[0].dl; // length of next code
var count = 0; // repeat count of the current code
var max_count = 7; // max repeat count
var min_count = 4; // min repeat count
if(nextlen == 0) {
max_count = 138;
min_count = 3;
}
tree[max_code + 1].dl = 0xffff; // guard
for(n = 0; n <= max_code; n++) {
curlen = nextlen;
nextlen = tree[n + 1].dl;
if(++count < max_count && curlen == nextlen)
continue;
else if(count < min_count)
zip_bl_tree[curlen].fc += count;
else if(curlen != 0) {
if(curlen != prevlen)
zip_bl_tree[curlen].fc++;
zip_bl_tree[zip_REP_3_6].fc++;
} else if(count <= 10)
zip_bl_tree[zip_REPZ_3_10].fc++;
else
zip_bl_tree[zip_REPZ_11_138].fc++;
count = 0; prevlen = curlen;
if(nextlen == 0) {
max_count = 138;
min_count = 3;
} else if(curlen == nextlen) {
max_count = 6;
min_count = 3;
} else {
max_count = 7;
min_count = 4;
}
}
}
/* ==========================================================================
* Send a literal or distance tree in compressed form, using the codes in
* bl_tree.
*/
var zip_send_tree = function(tree, // the tree to be scanned
max_code) { // and its largest code of non zero frequency
var n; // iterates over all tree elements
var prevlen = -1; // last emitted length
var curlen; // length of current code
var nextlen = tree[0].dl; // length of next code
var count = 0; // repeat count of the current code
var max_count = 7; // max repeat count
var min_count = 4; // min repeat count
/* tree[max_code+1].dl = -1; */ /* guard already set */
if(nextlen == 0) {
max_count = 138;
min_count = 3;
}
for(n = 0; n <= max_code; n++) {
curlen = nextlen;
nextlen = tree[n+1].dl;
if(++count < max_count && curlen == nextlen) {
continue;
} else if(count < min_count) {
do { zip_SEND_CODE(curlen, zip_bl_tree); } while(--count != 0);
} else if(curlen != 0) {
if(curlen != prevlen) {
zip_SEND_CODE(curlen, zip_bl_tree);
count--;
}
// Assert(count >= 3 && count <= 6, " 3_6?");
zip_SEND_CODE(zip_REP_3_6, zip_bl_tree);
zip_send_bits(count - 3, 2);
} else if(count <= 10) {
zip_SEND_CODE(zip_REPZ_3_10, zip_bl_tree);
zip_send_bits(count-3, 3);
} else {
zip_SEND_CODE(zip_REPZ_11_138, zip_bl_tree);
zip_send_bits(count-11, 7);
}
count = 0;
prevlen = curlen;
if(nextlen == 0) {
max_count = 138;
min_count = 3;
} else if(curlen == nextlen) {
max_count = 6;
min_count = 3;
} else {
max_count = 7;
min_count = 4;
}
}
}
/* ==========================================================================
* Construct the Huffman tree for the bit lengths and return the index in
* bl_order of the last bit length code to send.
*/
var zip_build_bl_tree = function() {
var max_blindex; // index of last bit length code of non zero freq
// Determine the bit length frequencies for literal and distance trees
zip_scan_tree(zip_dyn_ltree, zip_l_desc.max_code);
zip_scan_tree(zip_dyn_dtree, zip_d_desc.max_code);
// Build the bit length tree:
zip_build_tree(zip_bl_desc);
/* opt_len now includes the length of the tree representations, except
* the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
*/
/* Determine the number of bit length codes to send. The pkzip format
* requires that at least 4 bit length codes be sent. (appnote.txt says
* 3 but the actual value used is 4.)
*/
for(max_blindex = zip_BL_CODES-1; max_blindex >= 3; max_blindex--) {
if(zip_bl_tree[zip_bl_order[max_blindex]].dl != 0) break;
}
/* Update opt_len to include the bit length tree and counts */
zip_opt_len += 3*(max_blindex+1) + 5+5+4;
// Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
// encoder->opt_len, encoder->static_len));
return max_blindex;
}
/* ==========================================================================
* Send the header for a block using dynamic Huffman trees: the counts, the
* lengths of the bit length codes, the literal tree and the distance tree.
* IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
*/
var zip_send_all_trees = function(lcodes, dcodes, blcodes) { // number of codes for each tree
var rank; // index in bl_order
// Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
// Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
// "too many codes");
// Tracev((stderr, "\nbl counts: "));
zip_send_bits(lcodes-257, 5); // not +255 as stated in appnote.txt
zip_send_bits(dcodes-1, 5);
zip_send_bits(blcodes-4, 4); // not -3 as stated in appnote.txt
for(rank = 0; rank < blcodes; rank++) {
// Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
zip_send_bits(zip_bl_tree[zip_bl_order[rank]].dl, 3);
}
// send the literal tree
zip_send_tree(zip_dyn_ltree,lcodes-1);
// send the distance tree
zip_send_tree(zip_dyn_dtree,dcodes-1);
}
/* ==========================================================================
* Determine the best encoding for the current block: dynamic trees, static
* trees or store, and output the encoded block to the zip file.
*/
var zip_flush_block = function(eof) { // true if this is the last block for a file
var opt_lenb, static_lenb; // opt_len and static_len in bytes
var max_blindex; // index of last bit length code of non zero freq
var stored_len; // length of input block
stored_len = zip_strstart - zip_block_start;
zip_flag_buf[zip_last_flags] = zip_flags; // Save the flags for the last 8 items
// Construct the literal and distance trees
zip_build_tree(zip_l_desc);
// Tracev((stderr, "\nlit data: dyn %ld, stat %ld",
// encoder->opt_len, encoder->static_len));
zip_build_tree(zip_d_desc);
// Tracev((stderr, "\ndist data: dyn %ld, stat %ld",
// encoder->opt_len, encoder->static_len));
/* At this point, opt_len and static_len are the total bit lengths of
* the compressed block data, excluding the tree representations.
*/
/* Build the bit length tree for the above two trees, and get the index
* in bl_order of the last bit length code to send.
*/
max_blindex = zip_build_bl_tree();
// Determine the best encoding. Compute first the block length in bytes
opt_lenb = (zip_opt_len +3+7)>>3;
static_lenb = (zip_static_len+3+7)>>3;
// Trace((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u dist %u ",
// opt_lenb, encoder->opt_len,
// static_lenb, encoder->static_len, stored_len,
// encoder->last_lit, encoder->last_dist));
if(static_lenb <= opt_lenb)
opt_lenb = static_lenb;
if(stored_len + 4 <= opt_lenb // 4: two words for the lengths
&& zip_block_start >= 0) {
var i;
/* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
* Otherwise we can't have processed more than WSIZE input bytes since
* the last block flush, because compression would have been
* successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
* transform a block into a stored block.
*/
zip_send_bits((zip_STORED_BLOCK<<1)+eof, 3); /* send block type */
zip_bi_windup(); /* align on byte boundary */
zip_put_short(stored_len);
zip_put_short(~stored_len);
// copy block
/*
p = &window[block_start];
for(i = 0; i < stored_len; i++)
put_byte(p[i]);
*/
for(i = 0; i < stored_len; i++)
zip_put_byte(zip_window[zip_block_start + i]);
} else if(static_lenb == opt_lenb) {
zip_send_bits((zip_STATIC_TREES<<1)+eof, 3);
zip_compress_block(zip_static_ltree, zip_static_dtree);
} else {
zip_send_bits((zip_DYN_TREES<<1)+eof, 3);
zip_send_all_trees(zip_l_desc.max_code+1,
zip_d_desc.max_code+1,
max_blindex+1);
zip_compress_block(zip_dyn_ltree, zip_dyn_dtree);
}
zip_init_block();
if(eof != 0)
zip_bi_windup();
}
/* ==========================================================================
* Save the match info and tally the frequency counts. Return true if
* the current block must be flushed.
*/
var zip_ct_tally = function(
dist, // distance of matched string
lc) { // match length-MIN_MATCH or unmatched char (if dist==0)
zip_l_buf[zip_last_lit++] = lc;
if(dist == 0) {
// lc is the unmatched char
zip_dyn_ltree[lc].fc++;
} else {
// Here, lc is the match length - MIN_MATCH
dist--; // dist = match distance - 1
// Assert((ush)dist < (ush)MAX_DIST &&
// (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
// (ush)D_CODE(dist) < (ush)D_CODES, "ct_tally: bad match");
zip_dyn_ltree[zip_length_code[lc]+zip_LITERALS+1].fc++;
zip_dyn_dtree[zip_D_CODE(dist)].fc++;
zip_d_buf[zip_last_dist++] = dist;
zip_flags |= zip_flag_bit;
}
zip_flag_bit <<= 1;
// Output the flags if they fill a byte
if((zip_last_lit & 7) == 0) {
zip_flag_buf[zip_last_flags++] = zip_flags;
zip_flags = 0;
zip_flag_bit = 1;
}
// Try to guess if it is profitable to stop the current block here
if(zip_compr_level > 2 && (zip_last_lit & 0xfff) == 0) {
// Compute an upper bound for the compressed length
var out_length = zip_last_lit * 8;
var in_length = zip_strstart - zip_block_start;
var dcode;
for(dcode = 0; dcode < zip_D_CODES; dcode++) {
out_length += zip_dyn_dtree[dcode].fc * (5 + zip_extra_dbits[dcode]);
}
out_length >>= 3;
// Trace((stderr,"\nlast_lit %u, last_dist %u, in %ld, out ~%ld(%ld%%) ",
// encoder->last_lit, encoder->last_dist, in_length, out_length,
// 100L - out_length*100L/in_length));
if(zip_last_dist < parseInt(zip_last_lit/2) &&
out_length < parseInt(in_length/2))
return true;
}
return (zip_last_lit == zip_LIT_BUFSIZE-1 ||
zip_last_dist == zip_DIST_BUFSIZE);
/* We avoid equality with LIT_BUFSIZE because of wraparound at 64K
* on 16 bit machines and because stored blocks are restricted to
* 64K-1 bytes.
*/
}
/* ==========================================================================
* Send the block data compressed using the given Huffman trees
*/
var zip_compress_block = function(
ltree, // literal tree
dtree) { // distance tree
var dist; // distance of matched string
var lc; // match length or unmatched char (if dist == 0)
var lx = 0; // running index in l_buf
var dx = 0; // running index in d_buf
var fx = 0; // running index in flag_buf
var flag = 0; // current flags
var code; // the code to send
var extra; // number of extra bits to send
if(zip_last_lit != 0) do {
if((lx & 7) == 0)
flag = zip_flag_buf[fx++];
lc = zip_l_buf[lx++] & 0xff;
if((flag & 1) == 0) {
zip_SEND_CODE(lc, ltree); /* send a literal byte */
// Tracecv(isgraph(lc), (stderr," '%c' ", lc));
} else {
// Here, lc is the match length - MIN_MATCH
code = zip_length_code[lc];
zip_SEND_CODE(code+zip_LITERALS+1, ltree); // send the length code
extra = zip_extra_lbits[code];
if(extra != 0) {
lc -= zip_base_length[code];
zip_send_bits(lc, extra); // send the extra length bits
}
dist = zip_d_buf[dx++];
// Here, dist is the match distance - 1
code = zip_D_CODE(dist);
// Assert (code < D_CODES, "bad d_code");
zip_SEND_CODE(code, dtree); // send the distance code
extra = zip_extra_dbits[code];
if(extra != 0) {
dist -= zip_base_dist[code];
zip_send_bits(dist, extra); // send the extra distance bits
}
} // literal or match pair ?
flag >>= 1;
} while(lx < zip_last_lit);
zip_SEND_CODE(zip_END_BLOCK, ltree);
}
/* ==========================================================================
* Send a value on a given number of bits.
* IN assertion: length <= 16 and value fits in length bits.
*/
var zip_Buf_size = 16; // bit size of bi_buf
var zip_send_bits = function(
value, // value to send
length) { // number of bits
/* If not enough room in bi_buf, use (valid) bits from bi_buf and
* (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
* unused bits in value.
*/
if(zip_bi_valid > zip_Buf_size - length) {
zip_bi_buf |= (value << zip_bi_valid);
zip_put_short(zip_bi_buf);
zip_bi_buf = (value >> (zip_Buf_size - zip_bi_valid));
zip_bi_valid += length - zip_Buf_size;
} else {
zip_bi_buf |= value << zip_bi_valid;
zip_bi_valid += length;
}
}
/* ==========================================================================
* Reverse the first len bits of a code, using straightforward code (a faster
* method would use a table)
* IN assertion: 1 <= len <= 15
*/
var zip_bi_reverse = function(
code, // the value to invert
len) { // its bit length
var res = 0;
do {
res |= code & 1;
code >>= 1;
res <<= 1;
} while(--len > 0);
return res >> 1;
}
/* ==========================================================================
* Write out any remaining bits in an incomplete byte.
*/
var zip_bi_windup = function() {
if(zip_bi_valid > 8) {
zip_put_short(zip_bi_buf);
} else if(zip_bi_valid > 0) {
zip_put_byte(zip_bi_buf);
}
zip_bi_buf = 0;
zip_bi_valid = 0;
}
var zip_qoutbuf = function() {
if(zip_outcnt != 0) {
var q, i;
q = zip_new_queue();
if(zip_qhead == null)
zip_qhead = zip_qtail = q;
else
zip_qtail = zip_qtail.next = q;
q.len = zip_outcnt - zip_outoff;
// System.arraycopy(zip_outbuf, zip_outoff, q.ptr, 0, q.len);
for(i = 0; i < q.len; i++)
q.ptr[i] = zip_outbuf[zip_outoff + i];
zip_outcnt = zip_outoff = 0;
}
}
var zip_deflate = function(str, level) {
var i, j;
zip_deflate_data = str;
zip_deflate_pos = 0;
if(typeof level == "undefined")
level = zip_DEFAULT_LEVEL;
zip_deflate_start(level);
var buff = new Array(1024);
var aout = [];
while((i = zip_deflate_internal(buff, 0, buff.length)) > 0) {
var cbuf = new Array(i);
for(j = 0; j < i; j++){
cbuf[j] = String.fromCharCode(buff[j]);
}
aout[aout.length] = cbuf.join("");
}
zip_deflate_data = null; // G.C.
return aout.join("");
}
if (! window.RawDeflate) RawDeflate = {};
RawDeflate.deflate = zip_deflate;
})();
|
define(['jquery'], function ($) {
return $('#sandbox');
});
|
#! /usr/bin/env python
# -*- coding: UTF-8 -*-
import wx
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import os
import time
import threading
import inspect
import ctypes
from _main import RTxxx_main
from _main import RTyyyy_main
from ui import RTyyyy_uidef
from ui import RTxxx_uidef
from ui import uidef
from ui import uivar
from ui import uilang
from ui import ui_cfg_flexspinor
from ui import ui_cfg_flexspinand
from ui import ui_cfg_semcnor
from ui import ui_cfg_semcnand
from ui import ui_cfg_usdhcsd
from ui import ui_cfg_usdhcmmc
from ui import ui_cfg_recoveryspinor
g_main_win = None
g_task_detectUsbhid = None
g_task_playSound = None
g_task_increaseGauge = None
g_task_accessMem = None
g_RTyyyy_task_allInOneAction = None
g_RTxxx_task_allInOneAction = None
g_RTyyyy_task_showSettedEfuse = None
def _async_raise(tid, exctype):
tid = ctypes.c_long(tid)
if not inspect.isclass(exctype):
exctype = type(exctype)
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, ctypes.py_object(exctype))
if res == 0:
raise ValueError("invalid thread id")
elif res != 1:
ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, None)
raise SystemError("PyThreadState_SetAsyncExc failed")
class secBootMain(RTxxx_main.secBootRTxxxMain):
def __init__(self, parent):
RTxxx_main.secBootRTxxxMain.__init__(self, parent)
self.isAccessMemTaskPending = False
self.accessMemType = ''
self.lastTime = None
def _startGaugeTimer( self ):
self.lastTime = time.time()
self.initGauge()
def _stopGaugeTimer( self ):
self.deinitGauge()
self.updateCostTime()
def callbackSetMcuSeries( self, event ):
pass
def callbackSetMcuDevice( self, event ):
self.setTargetSetupValue()
self._switchEfuseGroup()
self._setUartUsbPort()
if self.isMcuSeriesChanged:
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_callbackSetMcuSeries()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_callbackSetMcuSeries()
else:
pass
self.isMcuSeriesChanged = False
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_callbackSetMcuDevice()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_callbackSetMcuDevice()
else:
pass
def callbackSetBootDevice( self, event ):
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_callbackSetBootDevice()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_callbackSetBootDevice()
else:
pass
def callbackBootDeviceConfiguration( self, event ):
if self.bootDevice == RTyyyy_uidef.kBootDevice_FlexspiNor or \
self.bootDevice == RTxxx_uidef.kBootDevice_FlexspiNor or \
self.bootDevice == RTxxx_uidef.kBootDevice_QuadspiNor:
if self.tgt.isSipFlexspiNorDevice:
self.popupMsgBox(uilang.kMsgLanguageContentDict['bootDeviceInfo_hasOnchipSerialNor'][self.languageIndex])
return
if self.checkIfSubWinHasBeenOpened():
return
if self.bootDevice == RTyyyy_uidef.kBootDevice_FlexspiNor or \
self.bootDevice == RTxxx_uidef.kBootDevice_FlexspiNor or \
self.bootDevice == RTxxx_uidef.kBootDevice_QuadspiNor:
flexspiNorFrame = ui_cfg_flexspinor.secBootUiCfgFlexspiNor(None)
if self.bootDevice == RTxxx_uidef.kBootDevice_QuadspiNor:
flexspiNorFrame.SetTitle(uilang.kSubLanguageContentDict['quadspinor_title'][self.languageIndex])
else:
flexspiNorFrame.SetTitle(uilang.kSubLanguageContentDict['flexspinor_title'][self.languageIndex])
flexspiNorFrame.setNecessaryInfo(self.mcuSeries, self.tgt.flexspiFreqs, self.cfgFdcbBinFilename)
flexspiNorFrame.Show(True)
elif self.bootDevice == RTyyyy_uidef.kBootDevice_FlexspiNand:
flexspiNandFrame = ui_cfg_flexspinand.secBootUiFlexspiNand(None)
flexspiNandFrame.SetTitle(u"FlexSPI NAND Device Configuration")
flexspiNandFrame.Show(True)
elif self.bootDevice == RTyyyy_uidef.kBootDevice_SemcNor:
semcNorFrame = ui_cfg_semcnor.secBootUiSemcNor(None)
semcNorFrame.SetTitle(u"SEMC NOR Device Configuration")
semcNorFrame.Show(True)
elif self.bootDevice == RTyyyy_uidef.kBootDevice_SemcNand:
semcNandFrame = ui_cfg_semcnand.secBootUiCfgSemcNand(None)
semcNandFrame.SetTitle(uilang.kSubLanguageContentDict['semcnand_title'][self.languageIndex])
semcNandFrame.setNecessaryInfo(self.tgt.isSwEccSetAsDefaultInNandOpt)
semcNandFrame.Show(True)
elif self.bootDevice == RTyyyy_uidef.kBootDevice_UsdhcSd:
usdhcSdFrame = ui_cfg_usdhcsd.secBootUiUsdhcSd(None)
usdhcSdFrame.SetTitle(uilang.kSubLanguageContentDict['usdhcsd_title'][self.languageIndex])
usdhcSdFrame.Show(True)
elif self.bootDevice == RTyyyy_uidef.kBootDevice_UsdhcMmc:
usdhcMmcFrame = ui_cfg_usdhcmmc.secBootUiUsdhcMmc(None)
usdhcMmcFrame.SetTitle(uilang.kSubLanguageContentDict['usdhcmmc_title'][self.languageIndex])
usdhcMmcFrame.Show(True)
elif self.bootDevice == RTyyyy_uidef.kBootDevice_LpspiNor or \
self.bootDevice == RTxxx_uidef.kBootDevice_FlexcommSpiNor:
recoverySpiNorFrame = ui_cfg_recoveryspinor.secBootUiCfgRecoverySpiNor(None)
if self.bootDevice == RTxxx_uidef.kBootDevice_FlexcommSpiNor:
recoverySpiNorFrame.SetTitle(uilang.kSubLanguageContentDict['flexcommspinor_title'][self.languageIndex])
else:
recoverySpiNorFrame.SetTitle(uilang.kSubLanguageContentDict['lpspinor_title'][self.languageIndex])
recoverySpiNorFrame.setNecessaryInfo(self.mcuSeries)
recoverySpiNorFrame.Show(True)
else:
pass
def _setUartUsbPort( self ):
usbIdList = []
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
usbIdList = self.RTyyyy_getUsbid()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
usbIdList = self.RTxxx_getUsbid()
else:
pass
retryToDetectUsb = False
showError = True
self.setPortSetupValue(self.connectStage, usbIdList, retryToDetectUsb, showError)
def callbackSetUartPort( self, event ):
self._setUartUsbPort()
def callbackSetUsbhidPort( self, event ):
self._setUartUsbPort()
def callbackSetOneStep( self, event ):
if not self.isToolRunAsEntryMode:
self.getOneStepConnectMode()
else:
self.initOneStepConnectMode()
self.popupMsgBox(uilang.kMsgLanguageContentDict['connectError_cannotSetOneStep'][self.languageIndex])
def callbackConnectToDevice( self, event ):
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_callbackConnectToDevice()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_callbackConnectToDevice()
else:
pass
def callbackSetSecureBootType( self, event ):
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_callbackSetSecureBootType()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_callbackSetSecureBootType()
else:
pass
def callbackAllInOneAction( self, event ):
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_callbackAllInOneAction()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_callbackAllInOneAction()
else:
pass
def callbackChangedAppFile( self, event ):
self.getUserAppFilePath()
self.setCostTime(0)
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_setSecureBootButtonColor()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_setSecureBootButtonColor()
else:
pass
def callbackSetAppFormat( self, event ):
self.getUserAppFileFormat()
def callbackGenImage( self, event ):
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_callbackGenImage()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_callbackGenImage()
else:
pass
def callbackFlashImage( self, event ):
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_callbackFlashImage()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_callbackFlashImage()
else:
pass
def task_doAccessMem( self ):
while True:
if self.isAccessMemTaskPending:
if self.accessMemType == 'ScanFuse':
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_scanAllFuseRegions()
if self.isSbFileEnabledToGen:
self.initSbEfuseBdfileContent()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_scanAllOtpRegions()
else:
pass
elif self.accessMemType == 'BurnFuse':
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_burnAllFuseRegions()
if self.isSbFileEnabledToGen:
self.genSbEfuseImage()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_burnAllOtpRegions()
else:
pass
elif self.accessMemType == 'SaveFuse':
self.saveFuseRegions()
elif self.accessMemType == 'LoadFuse':
self.loadFuseRegions()
elif self.accessMemType == 'ReadMem':
if self.connectStage == uidef.kConnectStage_ExternalMemory:
self.readRamMemory()
elif self.connectStage == uidef.kConnectStage_Reset:
self.readBootDeviceMemory()
else:
pass
elif self.accessMemType == 'EraseMem':
self.eraseBootDeviceMemory()
elif self.accessMemType == 'WriteMem':
if self.connectStage == uidef.kConnectStage_ExternalMemory:
self.writeRamMemory()
elif self.connectStage == uidef.kConnectStage_Reset:
self.writeBootDeviceMemory()
else:
pass
else:
pass
self.isAccessMemTaskPending = False
self._stopGaugeTimer()
time.sleep(1)
def callbackScanFuse( self, event ):
if self.connectStage == uidef.kConnectStage_ExternalMemory or \
self.connectStage == uidef.kConnectStage_Reset:
self._startGaugeTimer()
self.isAccessMemTaskPending = True
self.accessMemType = 'ScanFuse'
else:
self.popupMsgBox(uilang.kMsgLanguageContentDict['connectError_hasnotEnterFl'][self.languageIndex])
def callbackBurnFuse( self, event ):
if self.connectStage == uidef.kConnectStage_ExternalMemory or \
self.connectStage == uidef.kConnectStage_Reset:
self._startGaugeTimer()
self.isAccessMemTaskPending = True
self.accessMemType = 'BurnFuse'
else:
self.popupMsgBox(uilang.kMsgLanguageContentDict['connectError_hasnotEnterFl'][self.languageIndex])
def callbackSaveFuse( self, event ):
if self.connectStage == uidef.kConnectStage_ExternalMemory or \
self.connectStage == uidef.kConnectStage_Reset:
self._startGaugeTimer()
self.isAccessMemTaskPending = True
self.accessMemType = 'SaveFuse'
else:
self.popupMsgBox(uilang.kMsgLanguageContentDict['connectError_hasnotEnterFl'][self.languageIndex])
def callbackLoadFuse( self, event ):
if self.connectStage == uidef.kConnectStage_ExternalMemory or \
self.connectStage == uidef.kConnectStage_Reset:
self._startGaugeTimer()
self.isAccessMemTaskPending = True
self.accessMemType = 'LoadFuse'
else:
self.popupMsgBox(uilang.kMsgLanguageContentDict['connectError_hasnotEnterFl'][self.languageIndex])
def callbackViewMem( self, event ):
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_callbackViewMem()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_callbackViewMem()
else:
pass
def callbackClearMem( self, event ):
self.clearMem()
def _doReadMem( self ):
if self.connectStage == uidef.kConnectStage_ExternalMemory or \
self.connectStage == uidef.kConnectStage_Reset:
self._startGaugeTimer()
self.isAccessMemTaskPending = True
self.accessMemType = 'ReadMem'
else:
self.popupMsgBox(uilang.kMsgLanguageContentDict['connectError_hasnotEnterFl'][self.languageIndex])
def callbackReadMem( self, event ):
if not self.isToolRunAsEntryMode:
self._doReadMem()
else:
self.popupMsgBox(uilang.kMsgLanguageContentDict['operMemError_notAvailUnderEntry'][self.languageIndex])
def _doEraseMem( self ):
if self.connectStage == uidef.kConnectStage_Reset:
self._startGaugeTimer()
self.isAccessMemTaskPending = True
self.accessMemType = 'EraseMem'
else:
self.popupMsgBox(uilang.kMsgLanguageContentDict['connectError_hasnotCfgBootDevice'][self.languageIndex])
def callbackEraseMem( self, event ):
if not self.isToolRunAsEntryMode:
self._doEraseMem()
else:
self.popupMsgBox(uilang.kMsgLanguageContentDict['operMemError_notAvailUnderEntry'][self.languageIndex])
def _doWriteMem( self ):
if self.connectStage == uidef.kConnectStage_ExternalMemory or \
self.connectStage == uidef.kConnectStage_Reset:
self._startGaugeTimer()
self.isAccessMemTaskPending = True
self.accessMemType = 'WriteMem'
else:
self.popupMsgBox(uilang.kMsgLanguageContentDict['connectError_hasnotEnterFl'][self.languageIndex])
def callbackWriteMem( self, event ):
if not self.isToolRunAsEntryMode:
self._doWriteMem()
else:
self.popupMsgBox(uilang.kMsgLanguageContentDict['operMemError_notAvailUnderEntry'][self.languageIndex])
def _doExecuteApp( self ):
if self.connectStage == uidef.kConnectStage_ExternalMemory or \
self.connectStage == uidef.kConnectStage_Reset:
self.executeAppInFlexram()
else:
self.popupMsgBox(uilang.kMsgLanguageContentDict['connectError_hasnotEnterFl'][self.languageIndex])
def callbackExecuteApp( self, event ):
if not self.isToolRunAsEntryMode:
self._doExecuteApp()
else:
self.popupMsgBox(uilang.kMsgLanguageContentDict['operMemError_notAvailUnderEntry'][self.languageIndex])
def callbackClearLog( self, event ):
self.clearLog()
def callbackSaveLog( self, event ):
self.saveLog()
def _stopTask( self, thread ):
_async_raise(thread.ident, SystemExit)
def _deinitToolToExit( self ):
uivar.setAdvancedSettings(uidef.kAdvancedSettings_Tool, self.toolCommDict)
uivar.deinitVar()
#exit(0)
self._stopTask(g_task_detectUsbhid)
self._stopTask(g_task_playSound)
self._stopTask(g_task_increaseGauge)
self._stopTask(g_task_accessMem)
self._stopTask(g_RTyyyy_task_allInOneAction)
self._stopTask(g_RTxxx_task_allInOneAction)
self._stopTask(g_RTyyyy_task_showSettedEfuse)
global g_main_win
g_main_win.Show(False)
try:
self.Destroy()
except:
pass
def callbackExit( self, event ):
self._deinitToolToExit()
def callbackClose( self, event ):
self._deinitToolToExit()
def _switchToolRunMode( self ):
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_switchToolRunMode()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_switchToolRunMode()
else:
pass
self.enableOneStepForEntryMode()
def callbackSetRunModeAsEntry( self, event ):
self.setToolRunMode()
self._switchToolRunMode()
def callbackSetRunModeAsMaster( self, event ):
self.setToolRunMode()
self._switchToolRunMode()
def callbackSetUsbDetectionAsDynamic( self, event ):
self.setUsbDetection()
def callbackSetUsbDetectionAsStatic( self, event ):
self.setUsbDetection()
def callbackSetSoundEffectAsContra( self, event ):
self.setSoundEffect()
def callbackSetSoundEffectAsMario( self, event ):
self.setSoundEffect()
def callbackSetSoundEffectAsQuiet( self, event ):
self.setSoundEffect()
def callbackSetGenSbFileAsYes( self, event ):
self.setGenSbFile()
def callbackSetGenSbFileAsNo( self, event ):
self.setGenSbFile()
def callbackSetImageReadbackAsAutomatic( self, event ):
self.setImageReadback()
def callbackSetImageReadbackAsManual( self, event ):
self.setImageReadback()
def callbackSetFlashloaderResidentToDefault( self, event ):
self.setFlashloaderResident()
def callbackSetFlashloaderResidentToItcm( self, event ):
self.setFlashloaderResident()
def callbackSetFlashloaderResidentToDtcm( self, event ):
self.setFlashloaderResident()
def callbackSetFlashloaderResidentToOcram( self, event ):
self.setFlashloaderResident()
def _switchEfuseGroup( self ):
self.setEfuseGroup()
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_updateFuseGroupText()
self.RTyyyy_updateFuseRegionField()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_updateOtpGroupText()
self.RTxxx_updateOtpRegionField()
else:
pass
def callbackSetEfuseGroupTo0( self, event ):
self._switchEfuseGroup()
def callbackSetEfuseGroupTo1( self, event ):
self._switchEfuseGroup()
def callbackSetEfuseGroupTo2( self, event ):
self._switchEfuseGroup()
def callbackSetEfuseGroupTo3( self, event ):
self._switchEfuseGroup()
def callbackSetEfuseGroupTo4( self, event ):
self._switchEfuseGroup()
def callbackSetEfuseGroupTo5( self, event ):
self._switchEfuseGroup()
def callbackSetEfuseGroupTo6( self, event ):
self._switchEfuseGroup()
def callbackSetEfuseLockerAsAutomatic( self, event ):
self.setEfuseLocker()
def callbackSetEfuseLockerAsManual( self, event ):
self.setEfuseLocker()
def _switchFlexspiXipRegion( self ):
self.setFlexspiXipRegion()
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_updateFlexspiNorMemBase()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
pass
else:
pass
def callbackSetFlexspiXipRegionTo0( self, event ):
self._switchFlexspiXipRegion()
def callbackSetFlexspiXipRegionTo1( self, event ):
self._switchFlexspiXipRegion()
def _doSetLanguage( self ):
self.setLanguage()
if self.mcuSeries in uidef.kMcuSeries_iMXRTyyyy:
self.RTyyyy_setLanguage()
elif self.mcuSeries == uidef.kMcuSeries_iMXRTxxx:
self.RTxxx_setLanguage()
else:
pass
def callbackSetLanguageAsEnglish( self, event ):
self._doSetLanguage()
def callbackSetLanguageAsChinese( self, event ):
self._doSetLanguage()
def callbackShowHomePage( self, event ):
msgText = ((uilang.kMsgLanguageContentDict['homePage_info'][self.languageIndex]))
wx.MessageBox(msgText, uilang.kMsgLanguageContentDict['homePage_title'][self.languageIndex], wx.OK | wx.ICON_INFORMATION)
def callbackShowAboutAuthor( self, event ):
msgText = ((uilang.kMsgLanguageContentDict['aboutAuthor_author'][self.languageIndex]) +
(uilang.kMsgLanguageContentDict['aboutAuthor_email1'][self.languageIndex]) +
(uilang.kMsgLanguageContentDict['aboutAuthor_email2'][self.languageIndex]) +
(uilang.kMsgLanguageContentDict['aboutAuthor_blog'][self.languageIndex]))
wx.MessageBox(msgText, uilang.kMsgLanguageContentDict['aboutAuthor_title'][self.languageIndex], wx.OK | wx.ICON_INFORMATION)
def callbackShowContributors( self, event ):
msgText = ((uilang.kMsgLanguageContentDict['contributors_info'][self.languageIndex]))
wx.MessageBox(msgText, uilang.kMsgLanguageContentDict['contributors_title'][self.languageIndex], wx.OK | wx.ICON_INFORMATION)
def callbackShowSpecialThanks( self, event ):
msgText = ((uilang.kMsgLanguageContentDict['specialThanks_info'][self.languageIndex]))
wx.MessageBox(msgText, uilang.kMsgLanguageContentDict['specialThanks_title'][self.languageIndex], wx.OK | wx.ICON_INFORMATION)
def callbackShowRevisionHistory( self, event ):
msgText = ((uilang.kMsgLanguageContentDict['revisionHistory_v1_0_0'][self.languageIndex]) +
(uilang.kMsgLanguageContentDict['revisionHistory_v1_1_0'][self.languageIndex]) +
(uilang.kMsgLanguageContentDict['revisionHistory_v1_2_0'][self.languageIndex]) +
(uilang.kMsgLanguageContentDict['revisionHistory_v1_3_0'][self.languageIndex]) +
(uilang.kMsgLanguageContentDict['revisionHistory_v1_4_0'][self.languageIndex]) +
(uilang.kMsgLanguageContentDict['revisionHistory_v2_0_0'][self.languageIndex]) +
(uilang.kMsgLanguageContentDict['revisionHistory_v2_1_0'][self.languageIndex]) +
(uilang.kMsgLanguageContentDict['revisionHistory_v2_2_0'][self.languageIndex]) +
(uilang.kMsgLanguageContentDict['revisionHistory_v2_3_0'][self.languageIndex]) +
(uilang.kMsgLanguageContentDict['revisionHistory_v2_3_1'][self.languageIndex]))
wx.MessageBox(msgText, uilang.kMsgLanguageContentDict['revisionHistory_title'][self.languageIndex], wx.OK | wx.ICON_INFORMATION)
if __name__ == '__main__':
app = wx.App()
g_main_win = secBootMain(None)
g_main_win.SetTitle(u"NXP MCU Boot Utility v2.3.1")
g_main_win.Show()
g_task_detectUsbhid = threading.Thread(target=g_main_win.task_doDetectUsbhid)
g_task_detectUsbhid.setDaemon(True)
g_task_detectUsbhid.start()
g_task_playSound = threading.Thread(target=g_main_win.task_doPlaySound)
g_task_playSound.setDaemon(True)
g_task_playSound.start()
g_task_increaseGauge = threading.Thread(target=g_main_win.task_doIncreaseGauge)
g_task_increaseGauge.setDaemon(True)
g_task_increaseGauge.start()
g_task_accessMem = threading.Thread(target=g_main_win.task_doAccessMem)
g_task_accessMem.setDaemon(True)
g_task_accessMem.start()
g_RTyyyy_task_allInOneAction = threading.Thread(target=g_main_win.RTyyyy_task_doAllInOneAction)
g_RTyyyy_task_allInOneAction.setDaemon(True)
g_RTyyyy_task_allInOneAction.start()
g_RTxxx_task_allInOneAction = threading.Thread(target=g_main_win.RTxxx_task_doAllInOneAction)
g_RTxxx_task_allInOneAction.setDaemon(True)
g_RTxxx_task_allInOneAction.start()
g_RTyyyy_task_showSettedEfuse = threading.Thread(target=g_main_win.RTyyyy_task_doShowSettedEfuse)
g_RTyyyy_task_showSettedEfuse.setDaemon(True)
g_RTyyyy_task_showSettedEfuse.start()
app.MainLoop()
|
var searchData=
[
['_5fbaseurls',['_BaseUrls',['../class_simple_web_socket_server_library_1_1_simple_web_socket_http_server_1_1_web_socket_http_server.html#a70e5c60e525049aed662d4f174413f43',1,'SimpleWebSocketServerLibrary::SimpleWebSocketHttpServer::WebSocketHttpServer']]],
['_5fbuffersize',['_BufferSize',['../class_simple_web_socket_server_library_1_1_simple_web_socket_handler_1_1_web_socket_handler.html#a82f255d10f7ada8915b0083a90aa488e',1,'SimpleWebSocketServerLibrary.SimpleWebSocketHandler.WebSocketHandler._BufferSize()'],['../class_simple_web_socket_server_library_1_1_w_socket_server_1_1_web_socket_server.html#a6d5baef80e87cbb0f7b6588498ea33c0',1,'SimpleWebSocketServerLibrary.WSocketServer.WebSocketServer._BufferSize()']]],
['_5fclientinfo',['_ClientInfo',['../class_simple_web_socket_server_library_1_1_w_socket_server_1_1_web_socket_server.html#a43967b2701368bc8a17258f14e22707b',1,'SimpleWebSocketServerLibrary::WSocketServer::WebSocketServer']]],
['_5flistwithconnections',['_ListWithConnections',['../class_simple_web_socket_server_library_1_1_simple_web_socket_handler_1_1_web_socket_handler.html#a4cba1d5137e38a568d552bc6c6c6d68c',1,'SimpleWebSocketServerLibrary::SimpleWebSocketHandler::WebSocketHandler']]],
['_5fport',['_Port',['../class_simple_web_socket_server_library_1_1_simple_web_socket_http_server_1_1_web_socket_http_server.html#abe8db2f7583ba87d2731ca04a751ed28',1,'SimpleWebSocketServerLibrary::SimpleWebSocketHttpServer::WebSocketHttpServer']]],
['_5fstop',['_Stop',['../class_simple_web_socket_server_library_1_1_simple_web_socket_http_server_1_1_web_socket_http_server.html#ae945702bb623e9f65e14f23668c86cb1',1,'SimpleWebSocketServerLibrary::SimpleWebSocketHttpServer::WebSocketHttpServer']]],
['_5fwebsockethandler',['_WebsocketHandler',['../class_simple_web_socket_server_library_1_1_simple_web_socket_server.html#a4bfb888a3f666e48f561a55373eb4bd3',1,'SimpleWebSocketServerLibrary.SimpleWebSocketServer._WebsocketHandler()'],['../class_simple_web_socket_server_library_1_1_simple_web_socket_http_server_1_1_web_socket_http_server.html#a4de0d450cb58e0ddabc5a270dd7806c8',1,'SimpleWebSocketServerLibrary.SimpleWebSocketHttpServer.WebSocketHttpServer._WebsocketHandler()']]],
['_5fwebsockethttpserver',['_WebSocketHttpServer',['../class_simple_web_socket_server_library_1_1_simple_web_socket_server.html#af16d2b86849d66572a4bbf06ddc5e94e',1,'SimpleWebSocketServerLibrary::SimpleWebSocketServer']]]
];
|
module.exports = {
"stories": [
"../src/**/*.stories.mdx",
"../src/**/*.stories.@(js|jsx|ts|tsx)"
],
"addons": [
"@storybook/addon-links",
"@storybook/addon-essentials",
]
} |
/*
Função para selecionar um elemento HTML
getElement('div') -> seleciona um elemento <div></div>
getElement('.div') -> seleciona um elemento de classe div <div class="div"></div>
getElement('#div') -> seleciona um elemento de ID div <div id="div"></div>
*/
function getElement(element) {
return window.document.querySelector(element)
}
//Função para criar um novo elemento HTML
function newElement(element) {
return window.document.createElement(element);
}
// Criando os elementos <span></span>
var clockFace = getElement('.clock-face');
for (var i = 1; i <= 12; i++) {
var number = newElement('span');
number.innerHTML = `${i}`;
number.className = `number`;
clockFace.appendChild(number);
}
// Pegando as <div></div> de hora, minutos e segundos
var clockHour = getElement('.clock-hour');
var clockMinute = getElement('.clock-minute');
var clockSecond = getElement('.clock-second');
// Função para dizer a hora
function setDate() {
const horaAtual = new Date(); // a função Date() pega diversas informações do sistema, inclusive a hora! :)
const hora = horaAtual.getHours(); // pegando a hora do sistema
const minutos = horaAtual.getMinutes(); // pegando os minutos do sistema
const segundos = horaAtual.getSeconds(); // pegando os segundos do sistema
/*
Para transformar os segundos em graus:
0% = 0deg
100% = 360deg
( segundos / 60 ) -> teremos a porcentagem
( segundos / 60) * 360 -> teremos o valor em graus
Assim, 30 segundos são 180 graus
*/
const horaDeg = ((hora / 12) * 360) + 90; // Aqui dividimos por 12, pois são as 12h
const minutosDeg = ((minutos / 60) * 360) + 90;
const segundosDeg = ((segundos / 60) * 360) + 90; // Aqui adicionamos 90 porque nosso relógio inicia na posição 90 graus. Assim deixamos a posição do ponteiro correta
clockHour.style.transform = `rotate(${horaDeg}deg)`;
clockMinute.style.transform = `rotate(${minutosDeg}deg)`;
// Fazendo a mudança da rotação dos ponteiros do relógio
clockSecond.style.transform = `rotate(${segundosDeg}deg)`;
}
// Executando a função a cada segundo
setInterval(setDate, 1000); |
const path = require('path');
const mongoose = require('mongoose');
const bcrypt = require("bcrypt");
require('dotenv').config({ path: path.resolve(__dirname, '../../../.env') })
const mongoURL = process.env.auth;
const mongoAdmin = process.env.authuser;
const mongoPW = process.env.authpw;
const bocAuth = `mongodb://${mongoAdmin}:${mongoPW}@${mongoURL}:27017/bocauth`;
const bocAuthOptions = {
useNewUrlParser: true,
useUnifiedTopology: true
};
const connection = mongoose.createConnection(bocAuth, bocAuthOptions);
mongoose.connect(bocAuth, bocAuthOptions);
const UserSchema = new mongoose.Schema({
email: { type: String, required: true, unique: true },
password: { type: String, required: false },
firstName: { type: String, required: false},
googleId: { type: String, required: false},
lastName: { type: String, required: false}
});
var User = new mongoose.model('User', UserSchema);
var findUserByEmail = async function (email, cb) {
try{
User.findOne( {email: email}, function (err, data) {
if (err){
cb(err)
}
else{
cb(null, data)
}
});
} catch(err) {
cb(err);
}
}
var addNewUser = async function (data, cb) {
var newUser = new User(data);
newUser.save(function (err) {
if (err) {
cb(err);
} else {
cb(null, newUser)
}
});
}
module.exports.connection = connection;
module.exports.user = User;
module.exports.addNewUser = addNewUser;
module.exports.findUserByEmail = findUserByEmail; |
# -*- coding: utf-8 -*-
'''
Created on 2019-04-20
@FileName: base_dao.py
@Description: 基础dao
@author: 'Aaron.Qiu'
@version V1.0.0
'''
from sqlalchemy import create_engine,Column,String,Integer,ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker,relationship
'''
# default
engine = create_engine('mysql://scott:tiger@localhost/foo')
# mysql-python
engine = create_engine('mysql+mysqldb://scott:tiger@localhost/foo')
engine = create_engine("mysql+mysqldb://weiyz:123@localhost:3306/test")
# MySQL-connector-python
engine = create_engine('mysql+mysqlconnector://scott:tiger@localhost/foo')
# OurSQL
engine = create_engine('mysql+oursql://scott:tiger@localhost/foo')
'''
Base = declarative_base()
# 定义Component对象:
class Component(Base):
# 表的名字:
__tablename__ = 't_component'
# 表的结构:
id = Column(Integer, primary_key=True)
category1 = Column(String(256))
category2 = Column(String(256))
category3 = Column(String(256))
category4 = Column(String(256))
category_type_num = Column(String(64))
def __repr__(self):
return "id:%d,category1:%s,category2:%s,category3:%s,category4:%s,categoryTypeNum:%s" % (self.id, self.category1, self.category2,self.category3,self.category4,self.category_type_num)
# 定义Component对象:
class ComponentProps(Base):
# 表的名字:
__tablename__ = 't_component_prop'
# 表的结构:
id = Column(Integer, primary_key=True)
prop_name = Column(String(512))
prop_value = Column(String(1024))
category_type_num = Column(String(64))
# category_type_num = Column(String(64), ForeignKey('t_component.category_type_num'))
# 请注意,设置外键的时候用的是表名.字段名。其实在表和表类的抉择中,只要参数是字符串,往往是表名;如果是对象则是表类对象。
component_id = Column(Integer, ForeignKey('t_component.id'))
component = relationship('Component', backref="ComponentProps")
# component = relationship('Component', foreign_keys=category_type_num)
# componentFollow = relationship('Component', foreign_keys=component_id)
engine = create_engine('sqlite:///app.db?check_same_thread=False', echo=True)
Session = sessionmaker(bind=engine)
def create_db():
# 创建数据表
Base.metadata.create_all(engine)
def drop_db():
# 删除数据表
Base.metadata.drop_all(engine)
|
import React, { Component } from 'react';
import { searchProduct, fetchProducts } from '../../_action/searchActions';
import { connect } from 'react-redux';
export class SearchForm extends Component {
onChange = e => {
this.props.searchProduct(e.target.value);
}
onSubmit = e => {
e.preventDefault();
this.props.fetchProducts(this.props.text);
//this.props.setLoading();
}
render() {
return ( <div className="jumbotron jumbotron-fluid mt-5 text-center">
<div className="container">
<h4>Building Product Selection Platform</h4>
<form id="searchForm" onSubmit={this.onSubmit}>
<input
type="text"
className="form-control"
name="searchText"
placeholder="Search Products ..."
onChange={this.onChange}
/>
<button type="submit" className="btn btn-primary btn-bg mt-3">
Search
</button>
</form>
</div>
</div>
);
}
}
const mapStateToProps = state => ({
text: state.products.text
})
export default connect(mapStateToProps, {searchProduct, fetchProducts})(SearchForm); |
import React from "react"
import Layout from "../components/layout"
import SEO from "../components/seo"
const NotFoundPage = () => (
<Layout>
<SEO title="404 error" />
<div class="gg">
<h1
style={{
color: "white",
fontFamily: "Courier New",
fontWeight: "lighter",
}}
>
page not found :(
</h1>
<p>oh no! you must be lost...</p>
<a href="/" class="arrows4">
<p>go back home →</p>
</a>
</div>
</Layout>
)
export default NotFoundPage
|
/*!
* jquery.fancytree.gridnav.js
*
* Support keyboard navigation for trees with embedded input controls.
* (Extension module for jquery.fancytree.js: https://github.com/mar10/fancytree/)
*
* Copyright (c) 2014, Martin Wendt (http://wwWendt.de)
*
* Released under the MIT license
* https://github.com/mar10/fancytree/wiki/LicenseInfo
*
* @version 2.3.0
* @date 2014-08-17T10:39
*/
;(function($, window, document, undefined) {
"use strict";
/*******************************************************************************
* Private functions and variables
*/
// Allow these navigation keys even when input controls are focused
var KC = $.ui.keyCode,
// which keys are *not* handled by embedded control, but passed to tree
// navigation handler:
NAV_KEYS = {
"text": [KC.UP, KC.DOWN],
"checkbox": [KC.UP, KC.DOWN, KC.LEFT, KC.RIGHT],
"radiobutton": [KC.UP, KC.DOWN, KC.LEFT, KC.RIGHT],
"select-one": [KC.LEFT, KC.RIGHT],
"select-multiple": [KC.LEFT, KC.RIGHT]
};
/* Calculate TD column index (considering colspans).*/
function getColIdx($tr, $td) {
var colspan,
td = $td.get(0),
idx = 0;
$tr.children().each(function () {
if( this === td ) {
return false;
}
colspan = $(this).prop("colspan");
idx += colspan ? colspan : 1;
});
return idx;
}
/* Find TD at given column index (considering colspans).*/
function findTdAtColIdx($tr, colIdx) {
var colspan,
res = null,
idx = 0;
$tr.children().each(function () {
if( idx >= colIdx ) {
res = $(this);
return false;
}
colspan = $(this).prop("colspan");
idx += colspan ? colspan : 1;
});
return res;
}
/* Find adjacent cell for a given direction. Skip empty cells and consider merged cells */
function findNeighbourTd($target, keyCode){
var $tr, colIdx,
$td = $target.closest("td"),
$tdNext = null;
switch( keyCode ){
case KC.LEFT:
$tdNext = $td.prev();
break;
case KC.RIGHT:
$tdNext = $td.next();
break;
case KC.UP:
case KC.DOWN:
$tr = $td.parent();
colIdx = getColIdx($tr, $td);
while( true ) {
$tr = (keyCode === KC.UP) ? $tr.prev() : $tr.next();
if( !$tr.length ) {
break;
}
// Skip hidden rows
if( $tr.is(":hidden") ) {
continue;
}
// Find adjacent cell in the same column
$tdNext = findTdAtColIdx($tr, colIdx);
// Skip cells that don't conatain a focusable element
if( $tdNext && $tdNext.find(":input").length ) {
break;
}
}
break;
}
return $tdNext;
}
/*******************************************************************************
* Extension code
*/
$.ui.fancytree.registerExtension({
name: "gridnav",
version: "0.0.1",
// Default options for this extension.
options: {
autofocusInput: false, // Focus first embedded input if node gets activated
handleCursorKeys: true // Allow UP/DOWN in inputs to move to prev/next node
},
treeInit: function(ctx){
// gridnav requires the table extension to be loaded before itself
this._requireExtension("table", true, true);
this._super(ctx);
this.$container.addClass("fancytree-ext-gridnav");
// Activate node if embedded input gets focus (due to a click)
this.$container.on("focusin", function(event){
var ctx2,
node = $.ui.fancytree.getNode(event.target);
if( node && !node.isActive() ){
// Call node.setActive(), but also pass the event
ctx2 = ctx.tree._makeHookContext(node, event);
ctx.tree._callHook("nodeSetActive", ctx2, true);
}
});
},
nodeSetActive: function(ctx, flag) {
var $outer,
opts = ctx.options.gridnav,
node = ctx.node,
event = ctx.originalEvent || {},
triggeredByInput = $(event.target).is(":input");
flag = (flag !== false);
this._super(ctx, flag);
if( flag ){
if( ctx.options.titlesTabbable ){
if( !triggeredByInput ) {
$(node.span).find("span.fancytree-title").focus();
node.setFocus();
}
// If one node is tabbable, the container no longer needs to be
ctx.tree.$container.attr("tabindex", "-1");
// ctx.tree.$container.removeAttr("tabindex");
} else if( opts.autofocusInput && !triggeredByInput ){
// Set focus to input sub input (if node was clicked, but not
// when TAB was pressed )
$outer = $(node.tr || node.span);
$outer.find(":input:enabled:first").focus();
}
}
},
nodeKeydown: function(ctx) {
var inputType, handleKeys, $td,
opts = ctx.options.gridnav,
event = ctx.originalEvent,
$target = $(event.target);
// jQuery
inputType = $target.is(":input:enabled") ? $target.prop("type") : null;
// ctx.tree.debug("ext-gridnav nodeKeydown", event, inputType);
if( inputType && opts.handleCursorKeys ){
handleKeys = NAV_KEYS[inputType];
if( handleKeys && $.inArray(event.which, handleKeys) >= 0 ){
$td = findNeighbourTd($target, event.which);
// ctx.node.debug("ignore keydown in input", event.which, handleKeys);
if( $td && $td.length ) {
$td.find(":input:enabled").focus();
// Prevent Fancytree default navigation
return false;
}
}
return true;
}
ctx.tree.debug("ext-gridnav NOT HANDLED", event, inputType);
return this._super(ctx);
}
});
}(jQuery, window, document));
|
import React, { useEffect, useState } from "react";
import "./Dashboard.scss";
import { Col, Row, Spinner } from "react-bootstrap";
import { useSelector } from "react-redux";
import { ProjectColumn } from "./ProjectColumn/ProjectColumn";
import { IssueColumn } from "./IssueColumn/IssueColumn";
import { TimelineColumn } from "components/TimelineColumn/TimelineColumn";
import { useHistory } from "react-router-dom";
const Dashboard = () => {
const projects = useSelector((state) => state.projects.data);
const user = useSelector((state) => state.user);
const [isLoading, setIsLoading] = useState(true);
const history = useHistory();
useEffect(() => {
if (isLoading) {
if (user.info.accessToken === "") {
history.push("/");
}
}
return () => {
setIsLoading(false);
};
});
// Wait if user or projects hasn't finished being fetched from store
if (user.info.accessToken === "" || projects === undefined) {
return <Spinner animation="border" variant="primary" />;
} else {
// If there are timelines that exist, include timeline column
if (user.timelines.data.length > 0) {
return (
<React.Fragment>
{/* Wrapper for entire page*/}
<div className="dashboard">
<Row style={{ width: "1000px" }}>
<Col sm={6}>
{/* Project column */}
<ProjectColumn
userProjects={user.projects.data}
projects={projects}
/>
{/* Assigned to me column*/}
<IssueColumn
header="Assigned to Me"
userIssues={user.assignedIssues.data}
/>
{/* Column for watched issues*/}
<IssueColumn
header="Watched Issues"
userIssues={user.watchedIssues.data}
/>
</Col>
<Col sm={6}>
<TimelineColumn timelines={user.timelines.data} />
</Col>
</Row>
</div>
</React.Fragment>
);
}
// Else no timelines exist, don't include timeline column
else {
return (
<React.Fragment>
{/* Wrapper for entire page*/}
<div className="dashboard">
<Row
style={{
display: "flex",
justifyContent: "center",
width: "1000px",
}}
>
<Col sm={6}>
{/* Project column */}
<ProjectColumn
userProjects={user.projects.data}
projects={projects}
/>
{/* Assigned to me column*/}
<IssueColumn
header="Assigned to Me"
userIssues={user.assignedIssues.data}
/>
{/* Column for watched issues*/}
<IssueColumn
header="Watched Issues"
userIssues={user.watchedIssues.data}
/>
</Col>
</Row>
</div>
</React.Fragment>
);
}
}
};
export default Dashboard;
|
const MuState = require('./MuState');
describe('MuState', ()=>{
let initialState, muState;
beforeEach(()=>{
initialState = {
a: {
a1: {
a2: {
a3: 3,
a4: 4
}
}
},
b: {
b1: 5,
b2: 6
},
c: {
c1: [1, 2, 3],
c2: [{c3: 3}, {c4: 4}]
}
};
muState = new MuState(initialState);
});
it('should return the same object if values have not changed', ()=>{
const ref1 = muState.b;
const ref2 = muState.b;
const ref3 = muState.a.a1;
const ref4 = muState.a.a1;
expect(ref1).toBe(ref2);
expect(ref3).toBe(ref4);
});
it('should return a new object if any descendant value has changed', ()=>{
const ref1 = muState.b;
muState.b.b1 = 10;
const ref2 = muState.b;
const ref3 = muState.a;
muState.a.a1.a2.a3 = 10;
const ref4 = muState.a;
expect(ref1).not.toBe(ref2);
expect(ref3).not.toBe(ref4);
});
it('should support setting values', ()=>{
const ref1 = muState.b;
muState.b.b1 = 10;
const ref2 = muState.b;
expect(muState.b.b1).toBe(10);
});
it('should support arrays in the chain of objects', ()=>{
const ref1 = muState.c;
muState.c.c2[0].c3 = 4;
const ref2 = muState.c;
expect(ref1).not.toBe(ref2);
expect(muState.c.c2[0].c3).toBe(4);
});
it('should support adding to an array using the push method', ()=>{
const ref1 = muState.c.c1;
muState.c.c1.push(4);
const ref2 = muState.c.c1;
expect(ref1).not.toBe(ref2);
expect(muState.c.c1).toEqual([1,2,3,4]);
});
it('should support removing array items with the pop method', ()=>{
const ref1 = muState.c.c1;
const poppedValue = muState.c.c1.pop();
const ref2 = muState.c.c1;
expect(ref1).not.toBe(ref2);
expect(poppedValue).toBe(3);
expect(muState.c.c1).toEqual([1,2]);
});
it('should support adding object keys', ()=>{
const ref1 = muState.a;
muState.a.a1.a2.a5 = 10;
const ref2 = muState.a;
expect(ref1).not.toBe(ref2);
expect(muState.a.a1.a2).toEqual({a3: 3, a4: 4, a5: 10});
});
it('should support deleting object keys', ()=>{
const ref1 = muState.a;
delete muState.a.a1.a2.a4;
const ref2 = muState.a;
expect(ref1).not.toBe(ref2);
expect(muState.a.a1.a2).toEqual({a3: 3});
});
}); |
import day from 'dayjs';
import { insertAt, filterBy } from '@/utils/array';
import {
ADD_SIDECAR, _FLAGGED, MODE, _CREATE, _CLONE, _STAGE
} from '@/config/query-params';
import { escapeHtml } from '@/utils/string';
import { DATE_FORMAT, TIME_FORMAT } from '@/store/prefs';
import { PRIVATE } from '@/plugins/steve/resource-proxy';
import { RIO } from '@/config/types';
import { formatSi } from '@/utils/units';
import { get } from '@/utils/object';
const EMPTY = {};
export default {
applyDefaults(mode) {
const spec = this.spec;
if ( mode === _CREATE || mode === _CLONE ) {
delete spec.app;
spec.version = 'v0';
} else if ( mode === _STAGE ) {
spec.app = this.app;
delete spec.version;
}
if ( mode === _CREATE ) {
spec.weight = 10000;
} else if ( mode === _CLONE ) {
delete spec.weight;
} else if ( mode === _STAGE ) {
spec.weight = 0;
}
},
app() {
const spec = this.spec || EMPTY;
const status = this.status || EMPTY;
const metadata = this.metadata || EMPTY;
return spec.app || status.computedApp || metadata.name;
},
version() {
const spec = this.spec || EMPTY;
const status = this.status || EMPTY;
const uid = ((this.metadata || EMPTY)['uid'] || '').replace(/-.*$/, '');
return spec.version || status.computedVersion || uid || '?';
},
nameDisplay() {
const version = this.version;
if ( version === 'v0' ) {
return this.app;
}
return `${ this.app }@${ this.version }`;
},
namespaceApp() {
return `${ this.metadata.namespace }:${ this.app }`;
},
imageDisplay() {
if ( this.spec.build && !this.spec.image ) {
return 'Building from Git...';
}
return (this.spec.image || '')
.replace(/^(index\.)?docker.io\/(library\/)?/i, '')
.replace(/@sha256:[0-9a-f]+$/i, '')
.replace(/:latest$/i, '')
.replace(/localhost:5442\/(.*)/i, '$1 (local)');
},
createdDisplay() {
const dateFormat = escapeHtml( this.$rootGetters['prefs/get'](DATE_FORMAT));
const timeFormat = escapeHtml( this.$rootGetters['prefs/get'](TIME_FORMAT));
return day(this.metadata.creationTimestamp).format(`${ dateFormat } ${ timeFormat }`);
},
versionWithDateDisplay() {
return `${ this.version } (${ this.createdDisplay })`;
},
scales() {
const status = this.status || {};
const scaleStatus = status.scaleStatus || {};
const auto = !!this.spec.autoscale;
const fixed = (typeof this.spec.replicas === 'undefined' ? 1 : this.spec.replicas || 0);
const available = scaleStatus.available || 0;
const current = (typeof status.computedReplicas === 'undefined' ? available : status.computedReplicas || 0);
const unavailable = scaleStatus.unavailable || 0;
const global = this.spec.global === true;
let desired = fixed;
let min, max;
if ( auto ) {
min = this.spec.autoscale.minReplicas;
max = this.spec.autoscale.maxReplicas;
desired = `${ min } - ${ max }`;
}
if ( global ) {
desired = current;
} else if ( typeof this[PRIVATE].pendingScale === 'number' ) {
desired = this[PRIVATE].pendingScale;
}
const missing = Math.max(0, desired - available - unavailable);
return {
global,
auto,
min,
max,
current,
desired,
available,
unavailable,
starting: missing > 0 ? missing : 0,
stopping: missing < 0 ? -1 * missing : 0,
};
},
showDesiredScale() {
const scales = this.scales;
return !scales.global && scales.current !== scales.desired;
},
complexScale() {
const { stopping, starting, unavailable } = this.scales;
return stopping !== 0 || starting !== 0 || unavailable !== 0;
},
scaleParts() {
const {
available, unavailable, starting, stopping
} = this.scales;
const out = [
{
label: 'Available',
color: 'bg-success',
textColor: 'text-success',
value: available
},
{
label: 'Unavailable',
color: 'bg-error',
textColor: 'text-error',
value: unavailable
},
];
if ( starting ) {
out.push({
label: 'Starting',
color: 'bg-info',
textColor: 'text-info',
value: starting
});
}
if ( stopping ) {
out.push({
label: 'Stopping',
color: 'bg-warning',
textColor: 'text-warning',
value: stopping
});
}
return out;
},
scaleUp() {
return () => {
let scale;
if ( this.scales.global ) {
return;
}
if ( this[PRIVATE].scaleTimer ) {
scale = this[PRIVATE].pendingScale;
} else {
scale = this.scales.desired;
}
scale = scale || 0;
this[PRIVATE].pendingScale = scale + 1;
this.saveScale();
};
},
scaleDown() {
return () => {
let scale;
if ( this.scales.global ) {
return;
}
if ( this[PRIVATE].scaleTimer ) {
scale = this[PRIVATE].pendingScale;
} else {
scale = this.scales.desired;
}
scale = scale || 1;
this[PRIVATE].pendingScale = Math.max(scale - 1, 0);
this.saveScale();
};
},
saveScale() {
return () => {
if ( this[PRIVATE].scaleTimer ) {
clearTimeout(this[PRIVATE].scaleTimer);
}
this[PRIVATE].scaleTimer = setTimeout(async() => {
try {
await this.patch([{
op: 'replace',
path: '/spec/replicas',
value: this[PRIVATE].pendingScale
}]);
} catch (err) {
this.$dispatch('growl/fromError', { title: 'Error updating scale', err }, { root: true });
}
this[PRIVATE].scaleTimer = null;
this[PRIVATE].pendingScale = null;
}, 500);
};
},
allVersions() {
const services = this.$getters['all'](RIO.SERVICE);
const out = filterBy(services, {
app: this.app,
'metadata.namespace': this.metadata.namespace,
});
return out;
},
weightsOfApp() {
let desired = 0;
let current = 0;
let count = 0;
for ( const service of this.allVersions ) {
const weights = service.weights;
desired += weights.desired || 0;
current += weights.current || 0;
count++;
}
return {
desired,
current,
count
};
},
weights() {
let current = 0;
let desired = 0;
const fromSpec = this.spec.weight;
if ( this.status ) {
const fromStatus = this.status.computedWeight;
if ( typeof fromStatus === 'number' ) {
current = fromStatus;
} else if ( typeof fromSpec === 'number' ) {
current = fromSpec;
}
if ( typeof fromSpec === 'number' ) {
desired = fromSpec;
} else if ( typeof fromStatus === 'number' ) {
desired = fromStatus;
}
}
return { current, desired };
},
weightsPercent() {
const self = this.weights;
const app = this.weightsOfApp;
let desired = 0;
let current = 0;
if ( self.desired && app.desired ) {
desired = self.desired / app.desired * 100;
}
if ( self.current && app.current ) {
current = self.current / app.current * 100;
}
return { current, desired };
},
saveWeightPercent() {
return (newPercent) => {
const appInfo = this.weightsOfApp;
const totalWeight = appInfo.desired;
const currentPercent = (totalWeight === 0 ? 0 : this.weights.desired / totalWeight);
const currentWeight = this.spec.weight || 0;
const totalOfOthers = totalWeight - currentWeight;
const count = appInfo.count;
if ( currentPercent === 100 ) {
if ( newPercent === 100 ) {
return;
} else if ( newPercent === 0 ) {
return this.saveWeight(0);
}
const weight = newWeight(100 - newPercent) / (count - 1);
for ( const svc of this.allVersions ) {
if ( svc.id === this.id ) {
continue;
}
svc.saveWeight(weight);
}
} else if ( totalOfOthers === 0 || newPercent === 100 ) {
this.saveWeight(10000);
for ( const svc of this.allVersions ) {
if ( svc.id === this.id ) {
continue;
}
svc.saveWeight(0);
}
} else {
const weight = newWeight(newPercent);
this.saveWeight(weight);
}
function newWeight(percent) {
if ( percent === 0 ) {
return 0;
}
const out = Math.round(totalOfOthers / (1 - (percent / 100))) - totalOfOthers;
return out;
}
};
},
saveWeight() {
return async(neu) => {
console.log('Save Weight', this.spec.app, this.spec.version, neu); // eslint-disable-line no-console
try {
await this.patch([{
op: 'replace',
path: '/spec/weight',
value: neu
}]);
} catch (err) {
this.$dispatch('growl/fromError', { title: 'Error updating weight', err }, { root: true });
}
};
},
pauseOrResume() {
return async(pause = true) => {
try {
await this.patch([{
op: 'replace',
path: '/spec/rollout/pause',
value: pause
}]);
} catch (err) {
this.$dispatch('growl/fromError', { title: 'Error updating pause', err }, { root: true });
}
};
},
pause() {
this.pauseOrResume(true);
},
resume() {
this.pauseOrResume(false);
},
goToStage() {
return (moreQuery = {}) => {
const location = this.detailLocation;
location.query = {
...location.query,
[MODE]: _STAGE,
...moreQuery
};
this.currentRouter().push(location);
};
},
_availableActions() {
const links = this.links || {};
const out = this._standardActions;
let isPaused = false;
if ( this.spec.rollout && this.spec.rollout.pause ) {
isPaused = true;
}
insertAt(out, 2, {
action: 'pause',
label: 'Pause Rollout',
icon: 'icon icon-gear',
enabled: !!links.update && !isPaused,
});
insertAt(out, 2, {
action: 'resume',
label: 'Resume Rollout',
icon: 'icon icon-gear',
enabled: !!links.update && isPaused,
});
insertAt(out, 2, {
action: 'addSidecar',
label: 'Add a Sidecar',
icon: 'icon icon-circle-plus',
enabled: !!links.update,
});
insertAt(out, 2, {
action: 'goToStage',
label: 'Stage New Version',
icon: 'icon icon-copy',
enabled: !!links.update,
});
insertAt(out, 2, { divider: true });
return out;
},
addSidecar() {
return () => {
return this.goToEdit({ [ADD_SIDECAR]: _FLAGGED });
};
},
networkBytes() {
const read = get(this, 'metadata.computed.fields.readBytesPerSecond') || 0;
const write = get(this, 'metadata.computed.fields.writeBytesPerSecond') || 0;
return read + write;
},
networkDisplay() {
return formatSi(this.networkBytes, { suffix: 'Bps' });
},
p95() {
const out = get(this, 'metadata.computed.fields.p95') || 0;
return out;
},
p95Display() {
return `${ this.p95 }ms`;
},
connections() {
const out = get(this, 'metadata.computed.fields.openConnections') || 0;
return out;
},
};
|
# create element in matrix
arr = [ [1,2,4,29],
[3,4,6,19] ]
print(arr)
# create matrix of set size
N = 3
M = 2
A = []
for i in range(N):
A.append([0]*M)
print(A)
# with generate nums
import random
N = 3
M = 2
A = [[0]*M for i in range(N)]
# fill in with random num
for i in range(N):
for j in range(M):
A[i][j] = random.randint(0,20)
print(A)
print("# - - - #")
# output matrix
for row in A:
for elem in row:
print(elem, end = ' ')
print()
print("# - - - #")
for i in range(len(A)):
for j in range(len(A[i])):
print(A[i][j], end = ' ')
print()
# input manualy
A = []
for i in range(3):
A.append(list(map(int, input().split())))
A = []
for i in range(3):
row = input().split()
for i in range(len(row)):
row[i] - int(row[i])
A.append(row)
|
import Ext_field_Panel from '../../Ext/field/Panel.js';
export default class Ext_form_Panel extends Ext_field_Panel {
static PROPERTIES() { return [
'activeChildTabIndex',
'activeItem',
'alignSelf',
'allowFocusingDisabledChildren',
'alwaysOnTop',
'anchor',
'anchorPosition',
'api',
'ariaAttributes',
'ariaDescribedBy',
'ariaLabel',
'ariaLabelledBy',
'autoDestroy',
'autoSize',
'axisLock',
'baseParams',
'bbar',
'bind',
'bodyBorder',
'bodyCls',
'bodyPadding',
'bodyStyle',
'border',
'bottom',
'bubbleDirty',
'buttonAlign',
'buttons',
'buttonToolbar',
'cardSwitchAnimation',
'centered',
'closable',
'closeAction',
'closeToolText',
'cls',
'collapsed',
'collapsible',
'constrainAlign',
'contentEl',
'control',
'controller',
'data',
'defaultFocus',
'defaultListenerScope',
'defaults',
'defaultToolWeights',
'defaultType',
'dirty',
'disabled',
'displayed',
'docked',
'draggable',
'enableSubmissionForm',
'enctype',
'fieldDefaults',
'fieldSeparators',
'flex',
'floated',
'focusableContainer',
'focusCls',
'fullscreen',
'header',
'headerPosition',
'height',
'hidden',
'hideAnimation',
'hideMode',
'hideOnMaskTap',
'html',
'icon',
'iconAlign',
'iconCls',
'id',
'inactiveChildTabIndex',
'innerCls',
'inputBorders',
'instanceCls',
'itemId',
'items',
'jsonSubmit',
'keyMap',
'keyMapEnabled',
'keyMapTarget',
'layout',
'lbar',
'left',
'listeners',
'manageBorders',
'margin',
'masked',
'maxHeight',
'maxWidth',
'method',
'minButtonWidth',
'minHeight',
'minWidth',
'modal',
'modelValidation',
'multipartDetection',
'name',
'nameable',
'nameHolder',
'padding',
'paramOrder',
'paramsAsHash',
'plugins',
'publishes',
'rbar',
'record',
'reference',
'referenceHolder',
'relative',
'renderTo',
'resetFocusPosition',
'resizable',
'right',
'ripple',
'scrollable',
'session',
'shadow',
'shareableName',
'shim',
'showAnimation',
'standardButtons',
'standardSubmit',
'stateful',
'statefulDefaults',
'stateId',
'style',
'submitOnAction',
'tabIndex',
'tbar',
'timeout',
'title',
'titleAlign',
'titleCollapse',
'toFrontOnShow',
'toolDefaults',
'tools',
'tooltip',
'top',
'touchAction',
'tpl',
'tplWriteMode',
'trackResetOnLoad',
'translatable',
'twoWayBindable',
'ui',
'url',
'userCls',
'userSelectable',
'viewModel',
'weight',
'weighted',
'width',
'x',
'xtype',
'y',
'zIndex',
]};
static EVENTS() { return [
{name:'activate', parameters:'newActiveItem,sender,oldActiveItem'},
{name:'activeItemchange', parameters:'sender,value,oldValue'},
{name:'add', parameters:'sender,item,index'},
{name:'added', parameters:'sender,container,index'},
{name:'beforeactiveItemchange', parameters:'sender,value,oldValue,undefined'},
{name:'beforebottomchange', parameters:'sender,value,oldValue,undefined'},
{name:'beforecenteredchange', parameters:'sender,value,oldValue,undefined'},
{name:'beforecollapse', parameters:'sender'},
{name:'beforedisabledchange', parameters:'sender,value,oldValue,undefined'},
{name:'beforedockedchange', parameters:'sender,value,oldValue,undefined'},
{name:'beforeexpand', parameters:'sender'},
{name:'beforeheightchange', parameters:'sender,value,oldValue,undefined'},
{name:'beforehiddenchange', parameters:'sender,value,oldValue,undefined'},
{name:'beforehide', parameters:'sender'},
{name:'beforeleftchange', parameters:'sender,value,oldValue,undefined'},
{name:'beforemaxHeightchange', parameters:'sender,value,oldValue,undefined'},
{name:'beforemaxWidthchange', parameters:'sender,value,oldValue,undefined'},
{name:'beforeminHeightchange', parameters:'sender,value,oldValue,undefined'},
{name:'beforeminWidthchange', parameters:'sender,value,oldValue,undefined'},
{name:'beforeorientationchange', parameters:''},
{name:'beforeresizedragstart', parameters:'sender,context'},
{name:'beforerightchange', parameters:'sender,value,oldValue,undefined'},
{name:'beforescrollablechange', parameters:'sender,value,oldValue,undefined'},
{name:'beforeshow', parameters:'sender'},
{name:'beforesubmit', parameters:'sender,values,options,e'},
{name:'beforetofront', parameters:'sender'},
{name:'beforetopchange', parameters:'sender,value,oldValue,undefined'},
{name:'beforewidthchange', parameters:'sender,value,oldValue,undefined'},
{name:'blur', parameters:'sender,event'},
{name:'bottomchange', parameters:'sender,value,oldValue'},
{name:'centeredchange', parameters:'sender,value,oldValue'},
{name:'collapse', parameters:'sender'},
{name:'deactivate', parameters:'oldActiveItem,sender,newActiveItem'},
{name:'destroy', parameters:''},
{name:'dirtychange', parameters:'sender,dirty'},
{name:'disabledchange', parameters:'sender,value,oldValue'},
{name:'dockedchange', parameters:'sender,value,oldValue'},
{name:'drawerhide', parameters:'sender'},
{name:'drawershow', parameters:'sender'},
{name:'erased', parameters:'sender'},
{name:'exception', parameters:'sender,result'},
{name:'expand', parameters:'sender'},
{name:'floatingchange', parameters:'sender,positioned'},
{name:'focus', parameters:'sender,event'},
{name:'focusenter', parameters:'sender,event'},
{name:'focusleave', parameters:'sender,event'},
{name:'fullscreen', parameters:'sender'},
{name:'heightchange', parameters:'sender,value,oldValue'},
{name:'hiddenchange', parameters:'sender,value,oldValue'},
{name:'hide', parameters:'sender'},
{name:'initialize', parameters:'sender'},
{name:'leftchange', parameters:'sender,value,oldValue'},
{name:'maxHeightchange', parameters:'sender,value,oldValue'},
{name:'maxWidthchange', parameters:'sender,value,oldValue'},
{name:'minHeightchange', parameters:'sender,value,oldValue'},
{name:'minWidthchange', parameters:'sender,value,oldValue'},
{name:'move', parameters:'sender,item,toIndex,fromIndex'},
{name:'moved', parameters:'sender,container,toIndex,fromIndex'},
{name:'orientationchange', parameters:''},
{name:'painted', parameters:'sender,element'},
{name:'positionedchange', parameters:'sender,positioned'},
{name:'remove', parameters:'sender,item,index'},
{name:'removed', parameters:'sender,container,index'},
{name:'renderedchange', parameters:'sender,item,rendered'},
{name:'resize', parameters:'element,info'},
{name:'resizedrag', parameters:'sender,context'},
{name:'resizedragcancel', parameters:'sender,context'},
{name:'resizedragend', parameters:'sender,context'},
{name:'resizedragstart', parameters:'sender,context'},
{name:'rightchange', parameters:'sender,value,oldValue'},
{name:'scrollablechange', parameters:'sender,value,oldValue'},
{name:'show', parameters:'sender'},
{name:'submit', parameters:'sender,result,e'},
{name:'tofront', parameters:'sender'},
{name:'topchange', parameters:'sender,value,oldValue'},
{name:'updatedata', parameters:'sender,newData'},
{name:'widthchange', parameters:'sender,value,oldValue'},
{name:'ready', parameters:'cmp,cmpObj'},
{name:'created', parameters:'cmp'}
]};
static getProperties(properties) {
properties = properties.concat(Ext_form_Panel.PROPERTIES());
return Ext_field_Panel.getProperties(properties);
}
static getEvents(events) {
events = events.concat(Ext_form_Panel.EVENTS());
return Ext_field_Panel.getEvents(events);
}
static get observedAttributes() {
var attrs = super.observedAttributes
Ext_form_Panel.PROPERTIES().forEach(function (property, index, array) {
attrs.push(property)
})
Ext_form_Panel.EVENTS().forEach(function (eventparameter, index, array) {
attrs.push('on' + eventparameter.name)
})
return attrs
}
constructor(properties, events) {
super (
properties.concat(Ext_form_Panel.PROPERTIES()),
events.concat(Ext_form_Panel.EVENTS())
)
}
connectedCallback() {
super.connectedCallback()
}
attributeChangedCallback(attrName, oldVal, newVal) {
super.attributeChangedCallback(attrName, oldVal, newVal)
}
}
|
(function() {var implementors = {};
implementors["core"] = [];
if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() |
function clickdelButton(e, formId) {
e.preventDefault();
if(confirm('Do you really want to delete this?')) {
document.getElementById(formId).submit();
}
} |
# Copyright (C) 2021 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import json
import six
from blinkpy.common.net.luci_auth import LuciAuth
# These characters always appear at the beginning of the RPC response.
SEARCHBUILDS_RESPONSE_PREFIX = b")]}'"
class Rpc(object):
def __init__(self, host):
self._host = host
def luci_rpc(self, url, data):
"""Fetches json data through Luci RPCs
Args:
url: url for the rpc call
data: the request body in json format
Returns:
On success: Returns the json representation of the response.
Otherwise: None
"""
luci_token = LuciAuth(self._host).get_access_token()
headers = {
'Authorization': 'Bearer ' + luci_token,
'Accept': 'application/json',
'Content-Type': 'application/json',
}
if six.PY3:
body = json.dumps(data).encode("utf-8")
else:
body = json.dumps(data)
response = self._host.web.request('POST', url, data=body, headers=headers)
if response.getcode() == 200:
response_body = response.read()
if response_body.startswith(SEARCHBUILDS_RESPONSE_PREFIX):
response_body = response_body[len(SEARCHBUILDS_RESPONSE_PREFIX
):]
return json.loads(response_body)
_log.error(
"RPC request failed. Status=%s, url=%s" %
(response.status, url))
_log.debug("Full RPC response: %s" % str(response))
return None
|
angular.module("umbraco").controller("bulkEdit.dashboard.controller", function(
$scope,
appState,
bulkEditApi,
contentResource,
dataTypeResource,
dialogService,
navigationService,
notificationsService) {
// Initialization Methods ////////////////////////////////////////////////////
/**
* @method init
* @description Triggered on the controller loaded, kicks off any initialization functions.
*/
$scope.init = function() {
$scope.setVariables();
$scope.listenForExit();
$scope.buildDocTypeOptions();
$scope.getSavedSearches();
};
/**
* @method setVariables
* @description Sets up the initial variables for the view.
*/
$scope.setVariables = function() {
$scope.config = {
hideBreadcrumbs: false,
hideIdCol: false,
hideNav: true,
itemsPerPage: 10
};
$scope.currentPage = 0;
$scope.currentSavedSearchPage = 0;
$scope.allDocTypes = [];
$scope.doctypes = [
{ Name: "-Select Doctype-", Alias: "", Id: 0 }
];
$scope.doctype = $scope.doctypes[0];
$scope.haveSetEditorWatcher = false;
$scope.isSelectingProperty = false;
$scope.isRowDirty = [];
$scope.isSaving = [];
$scope.properties = [{ Name: "-Select Property-", Id: 0 }];
$scope.resultProperties = [];
$scope.propertiesToEdit = [];
$scope.propertyEditors = [];
$scope.propertyToAdd = $scope.properties[0];
$scope.results = [];
$scope.savedSearches = [];
$scope.showSavedSearch = false;
$scope.startNode = {
icon: "",
id: 0,
key: "",
name: ""
};
};
// Event Handler Methods /////////////////////////////////////////////////////
/**
* @method addPropertyToEditList
* @returns {void}
* @description Adds the property selected to the edit list and creates
* a matching editor for each search result.
*/
$scope.addPropertyToEditList = function() {
var property = $scope.propertyToAdd;
$scope.propertiesToEdit.push(property);
// Get the property editor for the property.
$scope
.getPropertyEditor(property.DataTypeDefinitionId)
.then(function(editor) {
// Loop through all results.
for (var i = 0; i < $scope.results.length; i++) {
// Add the editor to a list of editors for the result.
var thisEditor = JSON.parse(JSON.stringify(editor));
$scope.addEditorForPropertyToResultItem(
property.Alias,
thisEditor,
i
);
}
});
// Start a watcher to see when the editor updates.
$scope.startWatchingEditors();
// Reset propertyToAdd to '-select property-' for select element.
$scope.propertyToAdd = $scope.getFilteredAvailableProperties()[
0
];
$scope.isSelectingProperty = false;
};
/**
* @method exportAsCsv
* @returns {void}
* @description Called when the 'Export as CSV' button is clicked. Builds
* a CSV file and downloads it.
*/
$scope.exportAsCsv = function() {
var csvUrl = "/Umbraco/backoffice/ORCCsv/CsvExport/GetPublishedContent";
var data = {
format: "Csv",
contentTypeAlias: $scope.doctype.Alias,
rootId: $scope.startNode.id
};
$scope.openPage("GET", csvUrl, data);
};
/**
* @method handleStartNodePickerSelection
* @param {Object} data - modal object returned by dialogService.contentPicker()
* @description Event handler triggered by a content picker dialog. If there
* is a node selected, updates $scope.startNode with the node's information.
*/
$scope.handleStartNodePickerSelection = function(data) {
if (data) {
$scope.startNode = data;
}
};
/**
* @method loadDocType
* @returns {void}
* @description Handles the doctype selection action, loading the doc type's
* details to build a list of selectable properties.
*/
$scope.loadDocType = function() {
if ($scope.doctype.Id !== 0) {
$scope.getDocTypeById(
$scope.doctype.Id,
function(response) {
$scope.properties = $scope.buildPropertiesForDoctype(
response
);
$scope.properties = $scope.sortArrayAlphaByProp(
$scope.properties,
"Name"
);
$scope.properties.unshift({
Name: "-Select Property-",
Id: 0
});
}
);
}
};
/**
* @method onConfigDialogConfirmation
* @param {Object} data
* @returns {void}
* @description Handles results from config dialog changes, modifying the
* configuration accordingly.
*/
$scope.onConfigDialogConfirmation = function(data) {
if (data) {
$scope.config = JSON.parse(JSON.stringify(data));
// reset current page to avoid issues with current page being too high.
$scope.currentPage = 0;
$scope.currentSavedSearchPage = 0;
if ($scope.config.hideNav) {
$scope.hideNav();
} else {
$scope.showNav();
}
}
};
/**
* @method onDeleteSavedSearchDialogConfirmation
* @param {Object} data
* @returns {void}
* @description Delete the indicated saved search and then update the list
* of saved searches. Called by a delete confirmation dialog.
*/
$scope.onDeleteSavedSearchDialogConfirmation = function(data) {
bulkEditApi.deleteSavedSearchByGuid(data.guid).then(function(result) {
$scope.getSavedSearches();
});
};
/**
* @method openConfigDialog
* @returns {void}
* @description Open the dialog for configuring the editor's settings.
*/
$scope.openConfigDialog = function() {
dialogService.open({
template: "/App_Plugins/UmbracoBulkEdit/views/configDialog.html",
dialogData: JSON.parse(JSON.stringify($scope.config)),
callback: $scope.onConfigDialogConfirmation
});
};
/**
* @method openDeleteSavedSearchDialog
* @param {string} guid
* @returns {void}
* @description Opens a dialog to confirm whether or not to delete the
* saved search indicated by the GUID.
*/
$scope.openDeleteSavedSearchDialog = function(guid) {
dialogService.open({
template: "/App_Plugins/UmbracoBulkEdit/views/deleteSavedSearchDialog.html",
dialogData: { guid: guid },
callback: $scope.onDeleteSavedSearchDialogConfirmation
});
};
/**
* @method openPropertySelection
* @returns {void}
* @description Toggles open the select element that contains the list of
* properties to potentially edit.
*/
$scope.openPropertySelection = function() {
$scope.isSelectingProperty = true;
};
/**
* @method openStartNodePicker
* @returns {void}
* @description Opens the content picker dialog for the start node, and sends
* the data returned to $scope.handleStartNodePickerSelection.
*/
$scope.openStartNodePicker = function() {
dialogService.contentPicker({
multipicker: false,
callback: $scope.handleStartNodePickerSelection
});
dialogService.closeAll();
};
/**
* @method runSavedSearch
* @param {JSON} search
* @returns {void}
* @description Run a previously saved search by setting node id and
* alias then triggering $scope.search().
*/
$scope.runSavedSearch = function(search) {
var options = JSON.parse(search.Options);
contentResource.getById(options.rootId).then(function(data) {
$scope.startNode = data;
for (var i = 0; i < $scope.doctypes.length; i++) {
var doctype = $scope.doctypes[i];
if (doctype.Alias == options.alias) {
$scope.doctype = $scope.doctypes[i];
}
}
$scope.loadDocType();
$scope.search();
});
};
/**
* @method saveAll
* @returns {void}
* @description Saves all nodes on the page via API.
*/
$scope.saveAll = function() {
notificationsService.info("Saving...", "saving all nodes.");
var nodesToSave = [];
var perPage = $scope.config.itemsPerPage;
for (var i = $scope.currentPage * perPage; i < ($scope.currentPage + 1) * perPage; i++) {
if ($scope.results[i]) {
var node = $scope.results[i];
var editors = $scope.propertyEditors[i];
var nodeToSave = {
id: node.Id,
properties: []
};
for (var j = 0; j < $scope.propertiesToEdit.length; j++) {
var propToEdit = $scope.propertiesToEdit[j];
var editor = editors[j];
nodeToSave.properties.push({
alias: propToEdit.Alias,
value: editor.value
});
$scope.results[i][propToEdit.Alias] = editor.value;
}
nodesToSave.push(nodeToSave);
}
}
bulkEditApi.saveNodes(nodesToSave).then(function(result) {
notificationsService.success(
"Saved!",
"All nodes were successfully saved."
);
});
};
/**
* @method saveNode
* @param {number} index - The index of the result to save.
* @returns {void}
* @description Saves the result node at the indicated index.
*/
$scope.saveNode = function(index) {
$scope.isSaving[index] = true;
var node = $scope.results[index];
notificationsService.info(
"Saving...",
"saving node " + node.Id + "."
);
var editors = $scope.propertyEditors[index];
for (var i = 0; i < $scope.propertiesToEdit.length; i++) {
var propToEdit = $scope.propertiesToEdit[i];
var editor = editors[i];
var savedCount = 0;
bulkEditApi.savePropertyForNode(node.Id, propToEdit.Alias, editor.value).then(function(result) {
$scope.isSaving[index] = false;
$scope.overwritePropValue(
propToEdit.Alias,
editor.value,
index
);
savedCount += 1;
if (savedCount >= $scope.propertiesToEdit.length) {
notificationsService.success("Saved!", "Node " + node.Id + " was successfully saved.");
}
});
}
};
/**
* @method search
* @returns {void}
* @description Called when 'Search' button is clicked. Requests a list of
* matching content.
*/
$scope.search = function() {
$scope.getContent($scope.startNode, $scope.doctype.Alias).then(function(results) {
$scope.showSavedSearch = false;
$scope.saveSearchIfUnique(
$scope.startNode,
$scope.doctype
);
});
if ($scope.config.hideNav) {
$scope.currentPage = 0;
$scope.hideNav();
}
};
/**
* @method toggleSavedSearchPanel
* @returns {void}
* @description Toggles the visibility of the saved search dialog.
*/
$scope.toggleSavedSearchPanel = function() {
$scope.showSavedSearch = !$scope.showSavedSearch;
};
// Helper Methods ////////////////////////////////////////////////////////////
/**
* @method addEditorForPropertyToResultItem
* @param {string} propertyAlias - the alias of the property the editor is for.
* @param {JSON} edtior - the config data for the editor
* @param {number} index - the index of the item to add.
* @returns {void}
* @description Assigns the property editor that matches the property alias
* onto a result item, assigning the applicable value to the editor from
* the result.
*/
$scope.addEditorForPropertyToResultItem = function(propertyAlias, editor, index) {
var result = $scope.results[index];
var value = result[propertyAlias];
editor.value = value;
var editors = [];
if ($scope.propertyEditors.length > index) {
editors = $scope.propertyEditors[index];
}
editors.push(editor);
$scope.propertyEditors[index] = editors;
};
/**
* @method buildDocTypeOptions
* @returns {void}
* @description Builds an array of docTypes from what is available in the
* Umbraco back office. Will attempt to sort out non-docType content types,
* but must rely on naming conventions to do so as contentTypeResource.getAll()'s
* response has no indicator as to whether a contentType is a docType.
* NOTE: Currently not being used as we've hard-wired selectable doctypes. --Kyle
*/
$scope.buildDocTypeOptions = function() {
bulkEditApi.getAllContentTypes().then(function(response) {
var types = response.data.results;
if (types && types.length > 0) {
$scope.allDocTypes = JSON.parse(JSON.stringify(types));
$scope.doctypes = [];
for (var i = 0; i < types.length; i++) {
var type = types[i];
$scope.doctypes.push(types[i]);
}
// Sort types alphabetically.
$scope.doctypes = $scope.sortArrayAlphaByProp(
$scope.doctypes,
"Name"
);
$scope.doctypes.unshift({
Name: "-Select Doctype-",
Alias: "",
Id: 0
});
$scope.doctype = $scope.doctypes[0];
}
});
};
/**
* @method buildPropertiesForDoctype
* @param {JSON} doctype
* @returns {Object[]}
* @description Builds an array of properties in the provided doctype.
*/
$scope.buildPropertiesForDoctype = function(doctype) {
var properties = [];
if (doctype && doctype.PropertyGroups && doctype.PropertyGroups.length > 0) {
for (var i = 0; i < doctype.PropertyGroups.length; i++) {
var group = doctype.PropertyGroups[i];
if (
group &&
group.PropertyTypes &&
group.PropertyTypes.length > 0
) {
for (var j = 0; j < group.PropertyTypes.length; j++) {
var property = group.PropertyTypes[j];
if (property.PropertyEditorAlias.indexOf("Grid") < 0) {
properties.push(property);
}
}
}
}
}
if (doctype && doctype.ContentTypeComposition && doctype.ContentTypeComposition.length > 0) {
for (var i = 0; i < doctype.ContentTypeComposition.length; i++) {
var contentTypeProperties = $scope.buildPropertiesForDoctype(doctype.ContentTypeComposition[i]);
if (contentTypeProperties.length > 0) {
properties = properties.concat(contentTypeProperties);
}
}
}
return properties;
};
/**
* @method checkIfResultRowsAreDirty
* @returns {boolean[]}
* @description Iterates through all result nodes, compares the state of
* their editors with their original values for edited properties, and
* determines if that row is "dirty" or not.
*/
$scope.checkIfResultRowsAreDirty = function() {
var allEditors = JSON.parse(
JSON.stringify($scope.propertyEditors)
);
var propsToEdit = JSON.parse(
JSON.stringify($scope.propertiesToEdit)
);
var results = JSON.parse(JSON.stringify($scope.results));
for (var i = 0; i < results.length; i++) {
var isDirty = false;
var node = results[i];
var editors = allEditors[i];
if (editors && editors.length > 0) {
for (var j = 0; j < editors.length; j++) {
var propToEdit = propsToEdit[j];
var editor = editors[j];
if (node[propToEdit.Alias] !== editor.value) {
isDirty = true;
}
}
}
$scope.isRowDirty[i] = isDirty;
}
return $scope.isRowDirty;
};
/**
* @method getContent
* @param {Object} node
* @param {string} doctypeAlias
* @returns {void}
* @description Calls API for list of content with matching doctype alias
* that is beneath the node.
*/
$scope.getContent = function(node, doctypeAlias) {
return bulkEditApi.getMatchingContent(node.id, doctypeAlias).then(function(response) {
if (response && response.data) {
$scope.results = response.data;
$scope.propertiesToEdit = [];
$scope.propertyEditors = [];
$scope.resultProperties = $scope.properties;
$scope.propertyToAdd = $scope.resultProperties[
0
];
console.info("Results", $scope.results);
return $scope.results;
}
}, function(error) {
console.error("Error with getContent() in bulkEdit.dashboard.controller.js: ", error);
});
};
/**
* @method getCurrentPage
* @returns {Object[]}
* @description Returns an array of results for displaying on the
* current page.
*/
$scope.getCurrentPage = function() {
var results = [];
var perPage = $scope.config.itemsPerPage;
if ($scope.results.length > $scope.currentPage * perPage) {
for (var i = $scope.currentPage * perPage; i < ($scope.currentPage + 1) * perPage; i++) {
if ($scope.results[i]) {
results.push($scope.results[i]);
}
}
}
return results;
};
/**
* @method getCurrentSavedSearchPage
* @returns {Object[]}
* @description Returns an array of saved searches for displaying on the
* current page.
*/
$scope.getCurrentSavedSearchPage = function() {
var searches = [];
var perPage = $scope.config.itemsPerPage;
if ($scope.savedSearches.length > $scope.currentSavedSearchPage * perPage) {
for (var i = $scope.currentSavedSearchPage * perPage; i < ($scope.currentSavedSearchPage + 1) * perPage; i++) {
if ($scope.savedSearches[i]) {
searches.push($scope.savedSearches[i]);
}
}
}
return searches;
};
$scope.getDocTypeById = function(id, callback) {
var doctype = false;
for (var i = 0; i < $scope.allDocTypes.length; i++) {
if (id == $scope.allDocTypes[i].Id) {
doctype = JSON.parse(
JSON.stringify($scope.allDocTypes[i])
);
}
}
if (callback) {
callback(doctype);
}
};
/**
* @method getEditCellClass
* @returns {string}
* @description Returns a string for the cell class with the needed spans.
*/
$scope.getEditCellClass = function(defaultClass) {
var classes = "cell ";
if (defaultClass && typeof defaultClass == "string") {
classes += defaultClass + " ";
}
var length = $scope.propertiesToEdit.length;
if (length < 2) {
if ($scope.config.hideIdCol) {
classes += "span9";
} else {
classes += "span8";
}
} else if (length == 2) {
if ($scope.config.hideIdCol) {
classes += "span4-5";
} else {
classes += "span4";
}
} else if (length == 3) {
if ($scope.config.hideIdCol) {
classes += "span3";
} else {
classes += "span2-67";
}
}
return classes;
};
/**
* @method getFilteredAvailableProperties
* @returns {Array}
* @description Returns a filtered selection of resultProperties that aren't
* already selected.
*/
$scope.getFilteredAvailableProperties = function() {
var available = [];
var props = $scope.resultProperties;
var selected = $scope.propertiesToEdit;
for (var i = 0; i < props.length; i++) {
var prop = props[i];
var propAlreadySelected = false;
for (var j = 0; j < selected.length; j++) {
if (selected[j].Id == prop.Id) {
propAlreadySelected = true;
}
}
if (!propAlreadySelected) {
available.push(prop);
}
}
return available;
};
/**
* @method getJsonPop
* @param {string} stringifiedJSON
* @param {string} paramName
* @returns {any}
* @description Return the value of the indicated parameter belonging to the
* JSON object.
*/
$scope.getJsonProp = function(stringifiedJSON, paramName) {
try {
var jsonAsObject = JSON.parse(stringifiedJSON);
return jsonAsObject[paramName];
} catch (err) {
return "";
}
};
/**
* @method getPages
* @returns {number[]}
* @description returns an array of page numbers.
*/
$scope.getPages = function() {
var pages = [];
var current = $scope.currentPage;
var shouldAddFirst = false;
var shouldAddLast = false;
var maxPage = Math.ceil(
$scope.results.length / $scope.config.itemsPerPage
);
var max = 0;
var min = 0;
if (current < 6 && maxPage > 10) {
max = 9;
shouldAddLast = true;
} else if (maxPage < 11) {
max = maxPage - 1;
} else {
shouldAddFirst = true;
if (maxPage - current > 5) {
shouldAddLast = true;
max = current + 5;
min = current - 4;
} else {
min = maxPage - 10;
max = maxPage - 1;
}
}
if (shouldAddFirst) {
pages.push(1);
}
for (var i = min; i <= max; i++) {
pages.push(i + 1);
}
if (shouldAddLast) {
pages.push(maxPage);
}
return pages;
};
/**
* @method getPath
* @param {string} result
* @returns {string}
* @description Create a breadcrumb-style path to represent the locastion of
* a node.
*/
$scope.getPath = function(result) {
var path = result.Path.split(", ").join(" > ");
return path;
};
/**
* @method getPropertyEditor
* @param {number} id
* @returns {promise} - JSON
* @description Returns the editor config object for the datatype with
* the matching datatype after fetching it from the API.
*/
$scope.getPropertyEditor = function(id) {
return bulkEditApi.getDataTypeById(id).then(function(result) {
if (result && result !== null) {
var data = result.data;
var editor = {
alias: "propEditor",
config: data.config,
label: "Placeholder",
view: data.view,
value: null
};
return editor;
} else {
return false;
}
});
};
/**
* @method getResultIndex
* @param {Object} result
* @returns {number}
* @description Looks at the node result passed to it and determines its
* index in the array of $scope.results. Returns that number.
*/
$scope.getResultIndex = function(result) {
var index = 0;
for (var i = 0; i < $scope.results.length; i++) {
if (result.Id == $scope.results[i].Id) {
index = i;
}
}
return index;
};
/**
* @method getSavedSearches
* @returns {Array} an Array of Object
* @description Request a list of previous bulk edit searches from the API.
*/
$scope.getSavedSearches = function() {
return bulkEditApi.getAllSavedSearches().then(function(response) {
$scope.savedSearches = response.data.results;
return response.data.results;
});
};
/**
* @method getSavedSearchPages
* @returns {Array} an Array of Object
* @description Builds an array of the previously saved search listings that
* are visible on the current page and returns them.
*/
$scope.getSavedSearchPages = function() {
var pages = [];
var current = $scope.currentSavedSearchPage;
var shouldAddFirst = false;
var shouldAddLast = false;
var maxPage = Math.ceil(
$scope.savedSearches.length / $scope.config.itemsPerPage
);
var max = 0;
var min = 0;
if (current < 6 && maxPage > 10) {
max = 9;
shouldAddLast = true;
} else if (maxPage < 11) {
max = maxPage - 1;
} else {
shouldAddFirst = true;
if (maxPage - current > 5) {
shouldAddLast = true;
max = current + 5;
min = current - 4;
} else {
min = maxPage - 10;
max = maxPage - 1;
}
}
if (shouldAddFirst) {
pages.push(1);
}
for (var i = min; i <= max; i++) {
pages.push(i + 1);
}
if (shouldAddLast) {
pages.push(maxPage);
}
return pages;
};
/**
* @method gotoPage
* @param {number} page
* @returns {void}
* @description Changes the currently viewed page of results.
*/
$scope.gotoPage = function(page) {
page = Number(page);
page = page - 1;
$scope.currentPage = page;
};
/**
* @method gotoSavedSearchPage
* @param {number} page
* @returns {void}
* @description Changes the currently viewed page of saved searches.
*/
$scope.gotoSavedSearchPage = function(page) {
page = Number(page);
page = page - 1;
$scope.currentSavedSearchPage = page;
};
/**
* @method hideNav
* @returns {void}
* @description Hides the navigation panel so we have more space to work
* with.
*/
$scope.hideNav = function() {
// hide the tree.
appState.setGlobalState("showNavigation", false);
// get the width of the remaining left column.
var lc = document.querySelector("#leftcolumn");
var columnWidth = window.getComputedStyle(lc).width;
// manually change the 'left' property of the #contentwrapper to
// hide the whitespace created by collapsing the menu.
var cw = document.querySelector("#contentwrapper");
var styles = cw.getAttribute("style");
if (styles == null) {
styles = "";
}
styles += " left: " + columnWidth + ";";
cw.setAttribute("style", styles);
// listen for resize because it'll auto-pop the sidebar.
window.addEventListener(
"resize",
$scope.resetWrapperOffsetOnResize
);
};
/**
* @method listenForExit
* @returns {void}
* @description Listen for when the $scope is exited/destroyed, to finish up some stuff.
*/
$scope.listenForExit = function() {
$scope.$on('$destroy', function() {
// reset the nav to its default state.
$scope.showNav();
});
};
/**
* @method openPage
* @param {string} verb - must be 'GET or 'POST'
* @param {string} url
* @param {JSON} data
* @param {string} target (can be a name or "_blank", defaults to "_self")
*/
$scope.openPage = function(verb, url, data, target) {
var form = document.createElement("form");
form.action = url;
form.method = verb;
form.target = target || "_self";
if (data) {
for (var key in data) {
var input = document.createElement("textarea");
input.name = key;
input.value = typeof data[key] === "object"
? JSON.stringify(data[key])
: data[key];
form.appendChild(input);
}
}
form.style.display = "none";
document.body.appendChild(form);
form.submit();
};
/**
* @method overwritePropValue
* @param {string} alias
* @param {any} value
* @param {number} index
* @returns {JSON}
* @description overwrites the property on the client-side version of the
* result's property at the given index to sync it with the prop editor
* value.
*/
$scope.overwritePropValue = function(alias, value, index) {
$scope.results[index][alias] = value;
return $scope.results[index];
};
/**
* @method resetWrapperOffsetOnResize
* @returns {void}
* @description Removes the style we applied to the #contentwrapper div.
* We call this when the page is resized because Umbraco re-activates
* the navigation menu, which will overlap.
*/
$scope.resetWrapperOffsetOnResize = function() {
var cw = document.querySelector("#contentwrapper");
cw.setAttribute("style", "");
window.removeEventListener(
"resize",
$scope.resetWrapperOffsetOnResize
);
};
/**
* @method saveSearchIfUnique
* @param {JSON} startNode
* @param {JSON} docType
* @returns {promise}
* @description If we haven't saved this search before, save a new one.
*/
$scope.saveSearchIfUnique = function(startNode, docType) {
var searchName = "All " + docType.Alias + " under " + startNode.name;
var isUnique = true;
// Loop through every existing saved search to make sure we're not
// requesting to save a duplicate.
for (var i = 0; i < $scope.savedSearches.length; i++) {
var search = $scope.savedSearches[i];
var options = JSON.parse(search.Options);
if (options.rootId == startNode.id && options.alias == docType.Alias) {
isUnique = false;
}
}
// If it's unique, go ahead and post the search then update our
// saved searches from the server.
if (isUnique) {
return bulkEditApi.postSavedSearch(searchName, startNode.id, docType.Alias).then(function(response) {
return $scope.getSavedSearches();
});
} else {
return $scope.savedSearches;
}
};
/**
* @method showNav
* @returns {void}
* @description Resets the tree to its default display state.
*/
$scope.showNav = function() {
// show the tree.
appState.setGlobalState("showNavigation", true);
$scope.resetWrapperOffsetOnResize();
};
/**
* @method sortArrayAlphaByProp
* @param {Object[]} array
* @param {string} propName
* @returns {Object[]}
* @description Sorts the provided array alphabetically per the named
* property, and then returns the sorted array.
*/
$scope.sortArrayAlphaByProp = function(array, propName) {
array.sort(function(a, b) {
var textA = a[propName].toUpperCase();
var textB = b[propName].toUpperCase();
return textA < textB ? -1 : textA > textB ? 1 : 0;
});
return array;
};
/**
* @method startWatchingEditors
* @returns {void}
* @description Starts $scope.watch on propertyEditors to help the scope
* know when those are being touched.
*/
$scope.startWatchingEditors = function() {
if (!$scope.haveSetEditorWatcher) {
$scope.$watch("propertyEditors", function() { $scope.checkIfResultRowsAreDirty(); }, true);
$scope.$watch("results", function() { $scope.checkIfResultRowsAreDirty(); }, true);
$scope.haveSetEditorWatcher = true;
}
};
// Call $scope.init() ////////////////////////////////////////////////////////
$scope.init();
});
|
# -*- coding: utf-8 -*-
"""Top-level package for hypothesis_torch."""
__author__ = """Lea Provenzano"""
__email__ = '[email protected]'
__version__ = '0.0.0'
|
from flaky import (
flaky,
)
@flaky(max_runs=3)
def test_miner_hashrate(web3_empty, wait_for_miner_start):
web3 = web3_empty
hashrate = web3.eth.hashrate
assert hashrate > 0
|
module.exports={A:{A:{"1":"E A B","2":"I D F pB"},B:{"1":"C N O Q J K L","2":"b KB OB R S T U M W X G"},C:{"2":"0 1 2 3 4 5 6 7 8 9 oB WB H c I D F E A B C N O Q J K L d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB eB IB VB P LB MB NB Y PB QB RB SB TB UB JB HB a XB YB ZB b KB OB nB R S T U M W X G wB zB"},D:{"2":"0 1 2 3 4 5 6 7 8 9 H c I D F E A B C N O Q J K L d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB eB IB VB P LB MB NB Y PB QB RB SB TB UB JB HB a XB YB ZB b KB OB R S T U M W X G 3B fB gB"},E:{"2":"H c I D F E A B C N O hB aB jB kB lB mB bB Z V qB rB"},F:{"2":"0 1 2 3 4 5 6 7 8 9 E B C Q J K L d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB IB P LB MB NB Y PB QB RB SB TB UB JB HB a sB tB uB vB Z cB xB V"},G:{"2":"F aB yB dB 0B 1B 2B bC 4B 5B 6B 7B 8B 9B AC BC CC DC EC FC GC HC"},H:{"2":"IC"},I:{"2":"WB H G JC KC LC MC dB NC OC"},J:{"2":"D A"},K:{"2":"A B C P Z cB V"},L:{"2":"G"},M:{"2":"M"},N:{"1":"A B"},O:{"2":"PC"},P:{"2":"H QC RC SC TC UC bB VC WC XC YC"},Q:{"2":"ZC"},R:{"2":"aC"},S:{"2":"iB"}},B:6,C:"JPEG XR image format"};
|
// We register the TypeScript evaluator in gatsby-config so we don't need to do
// it in any other .js file. It automatically reads TypeScript config from
// tsconfig.json.
// eslint-disable-next-line @typescript-eslint/no-var-requires
require("ts-node").register({ project: "./tsconfig.gatsby.json" })
// Use a TypeScript version of gatsby-config.js.
module.exports = require("./src/gatsby-api/config")
|
"""setup.py file."""
import uuid
from setuptools import setup, find_packages
try: # for pip >= 10
from pip._internal.req import parse_requirements
except ImportError: # for pip <= 9.0.3
from pip.req import parse_requirements
__author__ = 'David Barroso <[email protected]>'
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
try:
reqs = [str(ir.req) for ir in install_reqs]
except:
reqs = [str(ir.requirement) for ir in install_reqs]
setup(
name="napalm-fortios",
version="0.4.1",
packages=find_packages(),
author="David Barroso",
author_email="[email protected]",
description="Network Automation and Programmability Abstraction Layer with Multivendor support",
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url="https://github.com/napalm-automation/napalm-fortios",
include_package_data=True,
install_requires=reqs,
)
|
#!/usr/bin/env node
/*
Copyright 2016 The Trustees of University of Arizona
Licensed under the Apache License, Version 2.0 (the "License" );
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
var util = require('util');
var utils = require('./utils.js');
var clientConfig = require('./client_config.js');
var restler = require('restler');
var minimist = require('minimist');
var fs = require('fs');
var async = require('async');
var path = require('path');
function get_default_client_config_path() {
return util.format("%s/%s", __dirname, "client_config.json");
}
function parse_args(args) {
var options = {
user: "",
password: "",
config_path: ""
};
// skip first two args
// 1: node
// 2: *.js script
var argv = minimist(args.slice(2));
// parse
options.user = argv.u || "";
options.password = argv.p || "";
options.config_path = argv.c || get_default_client_config_path();
return options;
}
function check_config(conf) {
if(conf.user && conf.password && conf.service_host && conf.service_port > 0) {
return true;
}
return false;
}
function list_datasets(host, port, user, password, callback) {
var url = util.format("http://%s:%d/datasets/list", host, port);
restler.get(url).on('complete', function(result, response) {
if(result instanceof Error) {
//utils.log_error(util.format("[%s:%d] %s", host, port, result));
callback(result, null);
return;
} else {
//utils.log_info(util.format("[%s:%d] %s", host, port, JSON.stringify(result)));
callback(null, result);
return;
}
});
}
(function main() {
utils.log_info("List datasets");
var param = parse_args(process.argv);
var client_config = clientConfig.get_config(param.config_path, {
"user": param.user,
"password": param.password
});
if(client_config == null) {
utils.log_error("cannot read configuration");
process.exit(1);
}
if(!check_config(client_config)) {
utils.log_error("arguments are not given properly");
process.exit(1);
}
try {
var host = client_config.service_host;
var port = client_config.service_port;
list_datasets(host, port, client_config.user, client_config.password, function(err, result) {
if(err) {
utils.log_error(util.format("[%s:%d] %s", host, port, err));
return;
}
for(i in result) {
item = result[i];
console.log(JSON.stringify(item))
//utils.log_info(JSON.stringify(item));
}
process.exit(0);
});
} catch (e) {
utils.log_error(util.format("Exception occured: %s", e));
process.exit(1);
}
})();
|
/*判断是否是移动设备*/
function isMobile(){
return navigator.userAgent.match(/iPhone|iPad|iPod|Android|android|BlackBerry|IEMobile/i) ? true : false;
}
//判断是否是在线ShowDoc
function is_showdoc_online(){
var host = window.location.host;
if(host.indexOf("showdoc.cc") > -1 || host.indexOf("wu.com") > -1){
return true;
}else{
return false;
}
}
//给文字加上颜色
function set_text_color( id , color){
var cookie_key = "is_"+id+"_click";
var is_click = getCookie(cookie_key);
if (!is_click) {
$("#"+id).css("color",color);
};
$("#"+id).click(function(){
var is_click = getCookie(cookie_key);
if (!is_click) {
$(this).css("color","");
setCookie(cookie_key , 1 , 900);
};
});
}
///设置cookie
function setCookie(NameOfCookie, value, expiredays)
{
//@参数:三个变量用来设置新的cookie:
//cookie的名称,存储的Cookie值,
// 以及Cookie过期的时间.
// 这几行是把天数转换为合法的日期
var ExpireDate = new Date ();
ExpireDate.setTime(ExpireDate.getTime() + (expiredays * 24 * 3600 * 1000));
// 下面这行是用来存储cookie的,只需简单的为"document.cookie"赋值即可.
// 注意日期通过toGMTstring()函数被转换成了GMT时间。
document.cookie = NameOfCookie + "=" + escape(value) +
((expiredays == null) ? "" : "; expires=" + ExpireDate.toGMTString());
}
///获取cookie值
function getCookie(NameOfCookie)
{
// 首先我们检查下cookie是否存在.
// 如果不存在则document.cookie的长度为0
if (document.cookie.length > 0)
{
// 接着我们检查下cookie的名字是否存在于document.cookie
// 因为不止一个cookie值存储,所以即使document.cookie的长度不为0也不能保证我们想要的名字的cookie存在
//所以我们需要这一步看看是否有我们想要的cookie
//如果begin的变量值得到的是-1那么说明不存在
begin = document.cookie.indexOf(NameOfCookie+"=");
if (begin != -1)
{
// 说明存在我们的cookie.
begin += NameOfCookie.length+1;//cookie值的初始位置
end = document.cookie.indexOf(";", begin);//结束位置
if (end == -1) end = document.cookie.length;//没有;则end为字符串结束位置
return unescape(document.cookie.substring(begin, end)); }
}
return null;
// cookie不存在返回null
}
///删除cookie
function delCookie (NameOfCookie)
{
// 该函数检查下cookie是否设置,如果设置了则将过期时间调到过去的时间;
//剩下就交给操作系统适当时间清理cookie啦
if (getCookie(NameOfCookie)) {
document.cookie = NameOfCookie + "=" +
"; expires=Thu, 01-Jan-70 00:00:01 GMT";
}
} |
// @flow
import React from 'react'
import APILoader from '../utils/APILoader'
import isFun from '../utils/isFun'
import log from '../utils/log'
import { toLnglat } from '../utils/common'
import withPropsReactive from '../utils/withPropsReactive'
const Component = React.Component
const Children = React.Children
const containerStyle = {
width: '100%',
height: '100%'
}
const wrapperStyle = {
width: '100%',
height: '100%',
position: 'relative'
}
// Native supported dynamic props by Amap
const NativeDynamicProps: Array<string> = [
'layers',
'zoom',
'center',
'labelzIndex',
// 'lang', native error in JSSDK when 3D viewMode
'mapStyle',
'features',
'cursor',
'pitch'
]
/*
* Props below can set by 'setStatus' altogether
*/
const StatusDynamicProps: Array<string> = [
'animateEnable',
'doubleClickZoom',
'dragEnable',
'isHotspot',
'jogEnable',
'keyboardEnable',
'resizeEnable',
'rotateEnable',
'scrollWheel',
'touchZoom',
'zoomEnable'
]
const StaticProps: Array<string> = [
'view',
'zooms',
'showIndoorMap',
'indoorMap',
'expandZoomRange',
'showBuildingBlock',
'viewMode',
'pitchEnable',
'buildingAnimation',
'skyColor'
]
const CreateProps = NativeDynamicProps.concat(StatusDynamicProps, StaticProps)
// const reservedPropName = [
// 'amapkey',
// 'version',
// 'useAMapUI',
// 'onInstanceCreated',
// 'events',
// 'loading',
// 'plugins'
// ]
const defaultOpts = {
MapType: {
showRoad: false,
showTraffic: false,
defaultType: 0
},
ToolBar: {
position: 'RB',
noIpLocate: true,
locate: true,
liteStyle: true,
autoPosition: false
},
OverView: {},
ControlBar: {}
}
class BaseMap extends Component<MapProps, {mapLoaded: boolean}> {
pluginMap: Object
loader: Object
map: Object
mapWrapper: ?HTMLDivElement
setterMap: Object
converterMap: Object
constructor(props: MapProps) {
super(props)
this.state = {
mapLoaded: false
}
const self = this
this.setterMap = {
zoom(val) {
self.map.setZoom(val)
},
cursor(val) {
self.map.setDefaultCursor(val)
},
labelzIndex(val) {
self.map.setlabelzIndex(val)
}
}
this.converterMap = {
center: toLnglat,
mapStyle: styleStr => {
if (styleStr.indexOf('amap://styles') === 0) {
return styleStr
}
return `amap://styles/${styleStr}`
}
}
if (typeof window !== 'undefined') {
this.pluginMap = {}
new APILoader({
key: props.amapkey,
useAMapUI: props.useAMapUI,
version: props.version,
protocol: props.protocol
}).load().then(() => {
this.createInstance()
if (!this.state.mapLoaded) {
this.setState({
mapLoaded: true
})
}
})
}
}
get instance() {
return this.map
}
componentWillReceiveProps(nextProps: MapProps) {
if (this.state.mapLoaded) {
this.updateMapProps(this.props, nextProps)
}
}
renderChildren() {
return Children.map(this.props.children, (child) => {
if (child) {
const cType = child.type
/* 针对下面两种组件不注入地图相关属性
* 1. 明确声明不需要注入的
* 2. DOM 元素
*/
if (cType.preventAmap || (typeof cType === 'string')) {
return child
}
return React.cloneElement(child, {
__map__: this.map
})
}
return child
})
}
createInstance() {
if (!this.map) {
const options = this.buildCreateOptions()
this.map = new window.AMap.Map(this.mapWrapper, options)
// install map plugins
this.setPlugins(this.props)
this.props.onInstanceCreated && this.props.onInstanceCreated()
}
}
buildCreateOptions() {
const props = this.props
const options = {}
CreateProps.forEach((key) => {
if (key in props) {
options[key] = this.getSetterValue(key, props)
}
})
return options
}
updateMapProps(prevProps: MapProps, nextProps: MapProps) {
const nextMapStatus = {}
let statusChangeFlag = false
let statusPropExist = false
StatusDynamicProps.forEach((key) => {
if (key in nextProps) {
statusPropExist = true
if (this.detectPropChanged(key, prevProps, nextProps)) {
statusChangeFlag = true
nextMapStatus[key] = nextProps[key]
}
}
})
statusChangeFlag && this.map.setStatus(nextMapStatus)
if (statusPropExist && 'status' in nextProps) {
log.warning(`以下这些属性可以单独提供进行配置,也可以统一作为‘status’属性配置;但是请不要同时使用这两种方式。\n(${StatusDynamicProps.join(', ')})`)
}
StaticProps.forEach((key) => {
if (key in nextProps) {
if (this.detectPropChanged(key, prevProps, nextProps)) {
log.warning(`'${key}' 是一个静态属性,地图实例创建成功后无法修改`)
}
}
})
this.setPlugins(nextProps)
}
getSetterValue(key: string, props: MapProps) {
if (key in this.converterMap) {
return this.converterMap[key](props[key])
}
return props[key]
}
detectPropChanged(key: string, prevProps: MapProps, nextProps: MapProps) {
return prevProps[key] !== nextProps[key]
}
setPlugins(props: MapProps) {
const pluginList = ['Scale', 'ToolBar', 'MapType', 'OverView', 'ControlBar']
if ('plugins' in props) {
const plugins = props.plugins
if (plugins && plugins.length) {
plugins.forEach((p) => {
let name, config, visible
if (typeof p === 'string') {
name = p
config = null
visible = true
} else {
name = p.name
config = p.options || {}
visible = (('visible' in config) && (typeof config.visible === 'boolean')) ? config.visible : true
delete config.visible
}
const idx = pluginList.indexOf(name)
if (idx === -1) {
log.warning(`没有 ‘${name}’ 这个插件,请检查是否拼写错误`)
} else {
if (visible) {
pluginList.splice(idx, 1)
this.installPlugin(name, config)
}
}
})
}
}
this.removeOrDisablePlugins(pluginList)
}
removeOrDisablePlugins(plugins: any[]) {
if (plugins && plugins.length) {
plugins.forEach((p) => {
if (p in this.pluginMap) {
// ControlBar has no 'hide' method
if (p === 'ControlBar') {
this.map.removeControl(this.pluginMap[p])
delete this.pluginMap[p]
} else {
this.pluginMap[p].hide()
}
}
})
}
}
installPlugin(name: string, opts: ?Object) {
opts = opts || {}
switch (name) {
case 'Scale':
case 'ToolBar':
case 'OverView':
case 'MapType':
this.setMapPlugin(name, opts)
break
case 'ControlBar':
this.setControlBar(opts)
break
default:
// do nothing
}
}
setMapPlugin(name: string, opts: Object) {
if (this.pluginMap[name]) {
this.pluginMap[name].show()
} else {
const { onCreated, ...restOpts } = opts
const initOpts = {...defaultOpts[name], ...restOpts}
this.map.plugin([`AMap.${name}`], () => {
this.pluginMap[name] = new window.AMap[name](initOpts)
this.map.addControl(this.pluginMap[name])
if (isFun(onCreated)) {
onCreated(this.pluginMap[name])
}
})
}
}
setControlBar(opts: Object) {
if (this.pluginMap.ControlBar) {
// do nothing
} else {
const { onCreated, ...restOpts } = opts
const initOpts = {...defaultOpts.ControlBar, ...restOpts}
this.map.plugin(['AMap.ControlBar'], () => {
this.pluginMap.ControlBar = new window.AMap.ControlBar(initOpts)
this.map.addControl(this.pluginMap.ControlBar)
if (isFun(onCreated)) {
onCreated(this.pluginMap.ControlBar)
}
})
}
}
render() {
return (<div style={wrapperStyle}>
<div ref={(div)=>{ this.mapWrapper = div }} style={containerStyle}>
{
this.state.mapLoaded ? null : this.props.loading || null
}
</div>
<div>{ this.state.mapLoaded ? this.renderChildren() : null }</div>
</div>)
}
}
export default withPropsReactive(BaseMap)
|
"use strict";
// Test rhdp-search-results component
xdescribe('Search Results (list)', function() {
var wc;
beforeEach(function() {
document.body.insertBefore(document.createElement('rhdp-search-results'), document.body.firstChild);
wc = document.body.firstChild;
});
afterEach(function() {
document.body.removeChild(document.body.firstChild);
});
it('should have a results object', function() {
var results = {hits: {hits: []}};
wc.set('results', results);
expect(wc.results).toEqual(results);
});
it('should have named slots (top and bottom)', function() {
var slots = wc.shadowRoot.querySelectorAll('slot');
expect(slots[0].name).toEqual('top');
expect(slots[2].name).toEqual('bottom');
});
}); |
import React, {useState} from 'react';
import clsx from 'clsx';
import {
useThemeConfig,
useAnnouncementBar,
MobileSecondaryMenuFiller,
ThemeClassNames,
useScrollPosition,
useWindowSize,
} from '@docusaurus/theme-common';
import Logo from '@theme/Logo';
import IconArrow from '@theme/IconArrow';
import {translate} from '@docusaurus/Translate';
import {DocSidebarItems} from '@theme/DocSidebarItem';
import styles from './styles.module.css';
import SidebarAd from '../../components/SidebarAd';
import CarbonAd from '../../components/CarbonAd';
function useShowAnnouncementBar() {
const {isActive} = useAnnouncementBar();
const [showAnnouncementBar, setShowAnnouncementBar] = useState(isActive);
useScrollPosition(
({scrollY}) => {
if (isActive) {
setShowAnnouncementBar(scrollY === 0);
}
},
[isActive],
);
return isActive && showAnnouncementBar;
}
function HideableSidebarButton({onClick}) {
return (
<button
type="button"
title={translate({
id: 'theme.docs.sidebar.collapseButtonTitle',
message: 'Collapse sidebar',
description: 'The title attribute for collapse button of doc sidebar',
})}
aria-label={translate({
id: 'theme.docs.sidebar.collapseButtonAriaLabel',
message: 'Collapse sidebar',
description: 'The title attribute for collapse button of doc sidebar',
})}
className={clsx(
'button button--secondary button--outline',
styles.collapseSidebarButton,
)}
onClick={onClick}>
<IconArrow className={styles.collapseSidebarButtonIcon} />
</button>
);
}
function DocSidebarDesktop({path, sidebar, onCollapse, isHidden}) {
const showAnnouncementBar = useShowAnnouncementBar();
const {
navbar: {hideOnScroll},
hideableSidebar,
} = useThemeConfig();
return (
<div
className={clsx(styles.sidebar, {
[styles.sidebarWithHideableNavbar]: hideOnScroll,
[styles.sidebarHidden]: isHidden,
})}>
{hideOnScroll && <Logo tabIndex={-1} className={styles.sidebarLogo} />}
<nav
className={clsx('menu thin-scrollbar', styles.menu, {
[styles.menuWithAnnouncementBar]: showAnnouncementBar,
})}>
<ul className={clsx(ThemeClassNames.docs.docSidebarMenu, 'menu__list')}>
<DocSidebarItems items={sidebar} activePath={path} level={1} />
</ul>
</nav>
{hideableSidebar && <HideableSidebarButton onClick={onCollapse} />}
</div>
);
}
const DocSidebarMobileSecondaryMenu = ({toggleSidebar, sidebar, path}) => {
return (
<ul className={clsx(ThemeClassNames.docs.docSidebarMenu, 'menu__list')}>
<DocSidebarItems
items={sidebar}
activePath={path}
onItemClick={() => toggleSidebar()}
level={1}
/>
<div className="margin--md">
<SidebarAd position="mobile_sidebar" />
</div>
</ul>
);
};
function DocSidebarMobile(props) {
return (
<MobileSecondaryMenuFiller
component={DocSidebarMobileSecondaryMenu}
props={props}
/>
);
}
const DocSidebarDesktopMemo = React.memo(DocSidebarDesktop);
const DocSidebarMobileMemo = React.memo(DocSidebarMobile);
export default function DocSidebar(props) {
const windowSize = useWindowSize(); // Desktop sidebar visible on hydration: need SSR rendering
const shouldRenderSidebarDesktop =
windowSize === 'desktop' || windowSize === 'ssr'; // Mobile sidebar not visible on hydration: can avoid SSR rendering
const shouldRenderSidebarMobile = windowSize === 'mobile';
return (
<>
{shouldRenderSidebarDesktop && <DocSidebarDesktopMemo {...props} />}
{shouldRenderSidebarMobile && <DocSidebarMobileMemo {...props} />}
</>
);
}
|
"use strict";
let Supervisor = {
identification : 0,
processes : {},
send : function (party, message, data, identifier) {
/* <CONNECTION-FOR-(party)>.send({
identifier : identifier,
message : message,
data : data
}); */
},
receive : function (message, data, identifier) {
// Supervisor.receive, for the most part, does not guarantee that any function that is invoked directly by the server is validated (although it does do some checking for convenience).
var unsuccessful = function (reason) {
return {
success : false,
reason : reason
};
};
if (arguments.length < 2 || arguments.length > 3) {
// This is actually an issue with the way Supervisor is invoked
return unsuccessful("Supervisor.receive() expects 2 or 3 arguments, but received " + arguments.length + ".");
}
// Check the format of the arguments
if (typeof message !== "string")
return unsuccessful("The parameter `message` should have been a string, but had type `" + (typeof message) + "`.");
if (typeof data !== "object")
return unsuccessful("The parameter `data` should have been an object, but had type `" + (typeof data) + "`.");
if (arguments.length >= 3) {
if (typeof identifier !== "number")
return unsuccessful("The parameter `identifier` should have been a number, but had type `" + (typeof identifier) + "`.");
if (identifier !== Math.floor(identifier) || identifier < 0)
return unsuccessful("The parameter `identifier` should have been a natural number, but had value `" + identifier + "`.");
}
switch (message) {
case "initiate":
// Initiate a battle between two parties
// data: parties, data, rules, flags, callback
identifier = Supervisor.identification;
// Check that all the rules are matched
var valid = true;
// data.rules.banned.items
// Pokémon (form(e)s)
// moves
// abilities
if (!data.hasOwnProperty("rules"))
return unsuccessful("The parameter `data` should have had a `rules` property.");
if (typeof data.rules !== "object")
return unsuccessful("The parameter `data.rules` should have been an object, but had type `" + (typeof data.rules) + "`.");
if (!data.rules.hasOwnProperty("clauses"))
return unsuccessful("The parameter `data.rules` should have had a `clauses` property.");
if (!Array.isArray(data.rules.clauses))
return unsuccessful("The parameter `data.rules.clauses` should have been an array.");
if (data.rules.timer !== null && typeof data.rules.timer !== "number")
return unsuccessful("The parameter `data.rules.timer` should have been a number or null, but had type `" + (typeof data.rules.timer) + "`.");
if (foreach(data.rules.clauses, function (clause) {
if (typeof clause !== "object")
return true;
if (!clause.hasOwnProperty("regards"))
return true;
switch (clause.regards) {
case "Pokémon":
break;
case "party":
break;
case "move":
break;
}
})) {
return unsuccessful("One of the elements of `data.rules.clauses` was malformed.");
}
if (valid) {
if (!data.hasOwnProperty("parties"))
return unsuccessful("The parameter `data` should have had a `parties` property.");
if (!Array.isArray(data.parties))
return unsuccessful("The parameter `data.parties` should have been an array.");
if (!data.hasOwnProperty("data"))
return unsuccessful("The parameter `data` should have had a `data` property.");
if (typeof data.data !== "object")
return unsuccessful("The parameter `data.data` should have been an object, but had type `" + (typeof data.data) + "`.");
var battle = BattleContext();
Supervisor.processes[identifier] = {
parties : data.parties,
parameters : data.data,
rules : data.rules,
relay : [],
relayed : 0,
battle : battle,
timer : null
};
if (!data.data.hasOwnProperty("teamA"))
return unsuccessful("The parameter `data.data` should have had a `teamA` property.");
if (typeof data.data.teamA !== "object")
return unsuccessful("The parameter `data.data.teamA` should have been an object, but had type `" + (typeof data.data.teamA) + "`.");
if (!data.data.hasOwnProperty("teamB"))
return unsuccessful("The parameter `data.data` should have had a `teamB` property.");
if (typeof data.data.teamA !== "object")
return unsuccessful("The parameter `data.data.teamB` should have been an object, but had type `" + (typeof data.data.teamB) + "`.");
// It would be good to validate teamA and teamB, but we're trusting the server anyway — the responses in `initialise` are really just to help out anyone debugging the function
var teamA = new trainer(data.data.teamA.trainer), teamB = new trainer(data.data.teamB.trainer);
teamA.type = data.data.teamA.type;
teamB.type = data.data.teamB.type;
var callback = function (flags, trainers) {
Supervisor.countdown(identifier, true);
data.callback(flags, trainers, Supervisor.record(identifier));
battle.destroy();
delete Supervisor.processes[identifier];
};
if (!data.data.hasOwnProperty("seed"))
return unsuccessful("The parameter `data.data` should have had a `seed` property.");
if (typeof data.data.seed !== "number")
return unsuccessful("The parameter `data.data.seed` should have been a number, but had type `" + (typeof data.data.seed) + "`.");
if (!data.data.hasOwnProperty("parameters"))
return unsuccessful("The parameter `data.data` should have had a `parameters` property.");
if (typeof data.data.parameters !== "object")
return unsuccessful("The parameter `data.data.parameters` should have been an object, but had type `" + (typeof data.data.parameters) + "`.");
battle.random.seed = data.data.seed;
var illegalBattle;
if (teamA.identification === 0) { /* Code for wild battles */
illegalBattle = !battle.beginWildBattle(teamB, teamA.party.pokemon, data.data.parameters, callback);
} else if (teamB.identification === 0) {
illegalBattle = !battle.beginWildBattle(teamA, teamB.party.pokemon, data.data.parameters, callback);
} else {
illegalBattle = !battle.beginOnline(data.data.seed, teamA, teamB, data.data.parameters, callback);
}
if (illegalBattle) {
return unsuccessful("The battle was illegal in some form (probably due to one of the trainers not having any valid Pokémon).");
}
foreach(data.parties, function (party) {
Supervisor.send(party, "initiate", {
rules : data.rules,
data : data.data
}, identifier);
});
Supervisor.countdown(identifier);
return {
success : true,
identification : Supervisor.identification ++
};
} else {
return {
success : false,
reason : "The parties did not conform to the battle rules."
};
}
case "spectate":
// Another party joins a battle (as a spectator)
// data: spectators
if (!data.hasOwnProperty("spectators"))
return unsuccessful("The parameter `data` should have had a `spectators` property.");
if (!Array.isArray(data.spectators))
return unsuccessful("The parameter `data.spectators` should have been an array.");
var process = Supervisor.processes[identifier], parties = [];
foreach(data.spectators, function (spectator) {
var party = spectator.party;
parties.push(party);
// Initiate the party's battle
var recording = JSONCopy(process.parameters);
if (spectator.perspective !== recording.teamA.trainer.identification) {
if (spectator.perspective === recording.teamB.trainer.identification) {
var temp = recording.teamA;
recording.teamA = recording.teamB;
recording.teamB = temp;
} else {
return unsuccessful("One of the spectators was trying to observe from the perspective of a trainer who was not battling.");
}
}
Supervisor.send(party, "initiate", {
rules : process.rules,
data : recording
}, identifier);
// Bring the party up to date on all the actions taken so far
Supervisor.send(party, "actions", process.relay.slice(0, process.relayed), identifier);
});
process.parties = process.parties.concat(parties);
return {
success : true
};
case "leave":
// A party stops spectating a battle
// data: parties
if (!data.hasOwnProperty("parties"))
return unsuccessful("The parameter `data` should have had a `parties` property.");
if (!Array.isArray(data.parties))
return unsuccessful("The parameter `data.parties` should have been an array.");
var process = Supervisor.processes[identifier];
foreach(data.parties, function (party) {
process.parties.removeElementsOfValue(party);
Supervisor.send(party, "terminate", "stopped spectating", identifier);
});
return {
success: true
};
case "terminate":
// Terminates a battle that is in progress
// data: reason
if (!data.hasOwnProperty("reason"))
return unsuccessful("The parameter `data` should have had a `reason` property.");
var process = Supervisor.processes[identifier];
process.battle.end({
"outcome" : "termination"
}, true);
foreach(process.parties, function (party) {
Supervisor.send(party, "terminate", data.reason, identifier);
});
delete Supervisor.processes[identifier];
return {
success : true,
process : process
};
case "force":
// Forces a particular outcome during a battle
// There is very little good reason to use this, apart from for testing reasons. It can be easily abused.
var process = Supervisor.processes[identifier];
var alliedVictory = null;
if (data !== null) {
if ((data.hasOwnProperty("winner") && data.winner === process.battle.alliedTrainers.first().identification) || (data.hasOwnProperty("loser") && data.loser === process.battle.opposingTrainers.first().identification))
alliedVictory = true;
else if ((data.hasOwnProperty("winner") && data.winner === process.battle.opposingTrainers.first().identification) || (data.hasOwnProperty("loser") && data.loser === process.battle.alliedTrainers.first().identification))
alliedVictory = false;
}
process.battle.end({
"outcome" : alliedVictory === null ? "draw" : (alliedVictory === true ? "allied victory" : (alliedVictory === false ? "opposing victory" : "termination")),
"forced" : true
}, true);
foreach(process.parties, function (party) {
Supervisor.send(party, "force", data, identifier);
});
delete Supervisor.processes[identifier];
return {
success : true,
process : process
};
case "relay":
// Sends data between two battling parties
// data: party, team, data (party here being not a Pokémon party, but a participant)
// Assumes the party sending the data was one of the parties involved in the process it is sending to
// The party should be an identifier matches up with a trainer team
var process = Supervisor.processes[identifier], valid = true;
if (!data.hasOwnProperty("data"))
return unsuccessful("The parameter `data` should have had a `data` property.");
if (!Array.isArray(data.data))
return unsuccessful("The parameter `data.data` should have been an array.");
if (!foreach(data.data, function (datum) {
if (typeof datum !== "object" || datum === null)
return true;
})) {
foreach(data.data, function (action) {
action.trainer = data.team;
});
// Assumes that the correct number of actions will be sent at once (i.e. no split data packets)
var issues = [];
var selection = process.relay.slice(process.relayed).filter(action => action.action === "command" && action.trainer === data.team).length;
if (process.battle.communicationForTrainerIsValid(data.team, data.data, selection, issues)) {
process.relay = process.relay.concat(data.data);
var actionsToSend = process.relay.slice(process.relayed);
if (process.battle.state.kind === "waiting" && process.battle.hasCommunicationForTrainers(process.battle.state.for, actionsToSend)) {
process.battle.receiveActions(actionsToSend);
foreach(process.parties, function (party) {
Supervisor.send(party, "actions", actionsToSend, identifier);
});
Supervisor.countdown(identifier, process.battle.finished);
process.relayed = process.relay.length;
}
} else {
return {
success : false,
reason : "The input sent by the client was invalid.",
party : data.party,
input : data.data,
issues : issues
};
}
} else {
return unsuccessful("One of the elements of `data.data` was not an object.");
}
return {
success : true
};
case "sync":
// Checks the clients for the different parties are in sync with the main battle
// data: party, data : { state }
var process = Supervisor.processes[identifier], battle = process.battle, issues = [];
var assert = function (parameter, server, client) {
if ((typeof server !== "object" && client !== server) || (typeof server === "object" && JSON.stringify(server) !== JSON.stringify(client))) {
issues.push({
"reason" : "desynchronised",
"party" : data.party,
"state" : parameter,
"server" : server,
"client" : client
});
}
};
assert("turn", battle.turns, data.state.turn);
assert("seed", battle.random.seed, data.state.seed);
assert("weather", battle.weather, data.state.weather);
foreach(battle.allTrainers(), function (trainer) {
assert("trainer: " + trainer.identification, trainer.store(), data.state.trainers[trainer.identification]);
});
if (issues.notEmpty()) {
return {
success : false,
reason : "Not every process was in sync.",
issues : issues
};
} else {
return {
success : true
};
}
case "replay":
// Plays a recorded battle for a player
// data: recording, spectators
if (!data.hasOwnProperty("spectators"))
return unsuccessful("The parameter `data` should have had a `spectators` property.");
if (!Array.isArray(data.spectators))
return unsuccessful("The parameter `data.spectators` should have been an array.");
if (!data.hasOwnProperty("recording"))
return unsuccessful("The parameter `data` should have had a `recording` property.");
if (typeof data.recording !== "object")
return unsuccessful("The parameter `data.recording` should been an object, but had type `" + (typeof data.recording) + "`.");
var process = data.recording, parties = [];
foreach(data.spectators, function (spectator) {
var party = spectator.party;
parties.push(party);
// Initiate the party's battle
var recording = JSONCopy(process.parameters);
if (spectator.perspective !== recording.teamA.trainer.identification) {
if (spectator.perspective === recording.teamB.trainer.identification) {
var temp = recording.teamA;
recording.teamA = recording.teamB;
recording.teamB = temp;
} else {
return unsuccessful("One of the spectators was trying to observe from the perspective of a trainer who was not battling.");
}
}
Supervisor.send(party, "initiate", {
rules : process.rules,
data : recording
}, identifier);
// Bring the party up to date on all the actions taken so far
Supervisor.send(party, "actions", process.relay, identifier);
});
process.parties = process.parties.concat(parties);
return {
success : true
};
default:
// An invalid `message` value has been sent
return {
success : false,
reason : "The `message` parameter was not valid (" + message + ")."
};
}
},
countdown : function (identifier, cancel) {
var unsuccessful = function (reason) {
return {
success : false,
reason : reason
};
};
if (Supervisor.processes.hasOwnProperty(identifier)) {
var process = Supervisor.processes[identifier];
if (process.timer !== null) {
clearTimeout(process.timer);
process.timer = null;
foreach(process.parties, function (party) {
Supervisor.send(party, "countdown", null, identifier);
});
}
if (!cancel && process.rules.timer !== null) {
process.timer = setTimeout(function () {
var waitingFor = process.battle.trainersWaitingFor(process.battle.state.for, process.relay.slice(process.relayed));
if (waitingFor.notEmpty()) {
Supervisor.receive("force", waitingFor.length === 1 ? {
loser : waitingFor.first()
} : null, identifier);
}
}, process.rules.timer);
foreach(process.parties, function (party) {
Supervisor.send(party, "countdown", {
correction : 0, // Should be some estimation of the time it takes to send a message to the party
duration : process.rules.timer
}, identifier);
});
}
} else {
return unsuccessful("No battle existed with the provided identifier `" + identifier + "`.");
}
},
record : function (identifier, recordUnfinishedBattle) {
var unsuccessful = function (reason) {
return {
success : false,
reason : reason
};
};
if (Supervisor.processes.hasOwnProperty(identifier)) {
var process = Supervisor.processes[identifier];
if (!process.battle.active || recordUnfinishedBattle) {
return {
success : true,
recording : JSONCopy({
parameters : process.parameters,
rules : process.rules,
relay : process.relay
})
};
} else {
return unsuccessful("The battle you tried to record has not finished yet.");
}
} else {
return unsuccessful("No battle existed with the provided identifier `" + identifier + "`.");
}
}
}; |
const mix = require('laravel-mix');
/*
|--------------------------------------------------------------------------
| Mix Asset Management
|--------------------------------------------------------------------------
|
| Mix provides a clean, fluent API for defining some Webpack build steps
| for your Laravel application. By default, we are compiling the Sass
| file for the application as well as bundling up all the JS files.
|
*/
mix.js('resources/js/app.js', 'public/js').js('resources/js/js/form.js', 'public/js')
.sass('resources/sass/app.scss', 'public/css');
|
/**
* Created by yumodev on 18/2/5.
*/
'use strict';
import React, {Component} from 'react'
import {StyleSheet, Text, View, BackAndroid} from 'react-native'
let Dimensions = require('Dimensions');
let totalWidth = Dimensions.get('window').width;
let totalHeight = Dimensions.get('window').height;
let ConfirmDialog = React.createClass({
render : function () {
return(
<View style={styles.confirmCont} >
<View style={styles.dialogStyle}>
<Text style={styles.textPromptStyle}> {this.props.promptToUser}</Text>
<Text style={styles.yesButton} onPress={this.props.userConfirmed} numberOfLines={3}>确定</Text>
<Text style={styles.cancelButton} onPress={this.props.userCanceled} numberOfLines={3}>取消</Text>
</View>
</View>
)
}
});
let styles = StyleSheet.create({
confirmCont:{
position:'absolute',
top:0,
width:totalWidth,
height:totalHeight,
backgroundColor:'rgba(52,52,52, 0.5)'
},
dialogStyle:{
position:'absolute',
top:totalHeight * 0.4,
left:totalWidth * 0.1,
right:totalWidth * 0.1,
width:totalWidth * 0.8,
height : totalHeight * 0.3,
backgroundColor:'white'
},
textPrompt:{
position:'absolute',
top:10,
left:10,
fontSize:20,
color:'black'
},
yesButton:{
position:'absolute',
bottom:10,
left:10,
width: totalWidth * 0.35,
height: totalHeight * 0.12,
backgroundColor:'gray',
fontSize:20,
color:'white',
textAlign:'center'
},
cancelButton:{
position:'absolute',
bottom:10,
right:10,
width:totalWidth * 0.35,
height:totalHeight * 0.12,
backgroundColor:'gray',
fontSize:20,
color:'white',
textAlign:'center'
}
});
export default ConfirmDialog;
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `tf.data.experimental.SqlDataset`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl.testing import parameterized
import sqlite3
from tensorflow.python.data.experimental.ops import readers
from tensorflow.python.data.kernel_tests import checkpoint_test_base
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.framework import combinations
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
class SqlDatasetTestBase(test_base.DatasetTestBase):
"""Base class for setting up and testing SqlDataset."""
def _createSqlDataset(self,
query,
output_types,
driver_name="sqlite",
num_repeats=1):
dataset = readers.SqlDataset(driver_name, self.data_source_name, query,
output_types).repeat(num_repeats)
return dataset
def setUp(self):
super(SqlDatasetTestBase, self).setUp()
self.data_source_name = os.path.join(test.get_temp_dir(), "tftest.sqlite")
conn = sqlite3.connect(self.data_source_name)
c = conn.cursor()
c.execute("DROP TABLE IF EXISTS students")
c.execute("DROP TABLE IF EXISTS people")
c.execute("DROP TABLE IF EXISTS townspeople")
c.execute("DROP TABLE IF EXISTS data")
c.execute(
"CREATE TABLE IF NOT EXISTS students (id INTEGER NOT NULL PRIMARY KEY, "
"first_name VARCHAR(100), last_name VARCHAR(100), motto VARCHAR(100), "
"school_id VARCHAR(100), favorite_nonsense_word VARCHAR(100), "
"desk_number INTEGER, income INTEGER, favorite_number INTEGER, "
"favorite_big_number INTEGER, favorite_negative_number INTEGER, "
"favorite_medium_sized_number INTEGER, brownie_points INTEGER, "
"account_balance INTEGER, registration_complete INTEGER)")
c.executemany(
"INSERT INTO students (first_name, last_name, motto, school_id, "
"favorite_nonsense_word, desk_number, income, favorite_number, "
"favorite_big_number, favorite_negative_number, "
"favorite_medium_sized_number, brownie_points, account_balance, "
"registration_complete) "
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
[("John", "Doe", "Hi!", "123", "n\0nsense", 9, 0, 2147483647,
9223372036854775807, -2, 32767, 0, 0, 1),
("Jane", "Moe", "Hi again!", "1000", "nonsense\0", 127, -20000,
-2147483648, -9223372036854775808, -128, -32768, 255, 65535, 0)])
c.execute(
"CREATE TABLE IF NOT EXISTS people (id INTEGER NOT NULL PRIMARY KEY, "
"first_name VARCHAR(100), last_name VARCHAR(100), state VARCHAR(100))")
c.executemany(
"INSERT INTO PEOPLE (first_name, last_name, state) VALUES (?, ?, ?)",
[("Benjamin", "Franklin", "Pennsylvania"), ("John", "Doe",
"California")])
c.execute(
"CREATE TABLE IF NOT EXISTS townspeople (id INTEGER NOT NULL PRIMARY "
"KEY, first_name VARCHAR(100), last_name VARCHAR(100), victories "
"FLOAT, accolades FLOAT, triumphs FLOAT)")
c.executemany(
"INSERT INTO townspeople (first_name, last_name, victories, "
"accolades, triumphs) VALUES (?, ?, ?, ?, ?)",
[("George", "Washington", 20.00,
1331241.321342132321324589798264627463827647382647382643874,
9007199254740991.0),
("John", "Adams", -19.95,
1331241321342132321324589798264627463827647382647382643874.0,
9007199254740992.0)])
c.execute("CREATE TABLE IF NOT EXISTS data (col1 INTEGER)")
c.executemany("INSERT INTO DATA VALUES (?)", [(0,), (1,), (2,)])
conn.commit()
conn.close()
class SqlDatasetTest(SqlDatasetTestBase, parameterized.TestCase):
# Test that SqlDataset can read from a database table.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSet(self):
for _ in range(2): # Run twice to verify statelessness of db operations.
dataset = self._createSqlDataset(
query="SELECT first_name, last_name, motto FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string),
num_repeats=2)
self.assertDatasetProduces(
dataset,
expected_output=[(b"John", b"Doe", b"Hi!"),
(b"Jane", b"Moe", b"Hi again!")] * 2,
num_test_iterations=2)
# Test that SqlDataset works on a join query.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetJoinQuery(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT students.first_name, state, motto FROM students "
"INNER JOIN people "
"ON students.first_name = people.first_name "
"AND students.last_name = people.last_name",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
self.assertEqual((b"John", b"California", b"Hi!"),
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that SqlDataset can read a database entry with a null-terminator
# in the middle of the text and place the entry in a `string` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetNullTerminator(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, favorite_nonsense_word "
"FROM students ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
self.assertEqual((b"John", b"Doe", b"n\0nsense"), self.evaluate(get_next()))
self.assertEqual((b"Jane", b"Moe", b"nonsense\0"),
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that SqlDataset works when used on two different queries.
# Because the output types of the dataset must be determined at graph-creation
# time, the two queries must have the same number and types of columns.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetReuseSqlDataset(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, motto FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
self.assertEqual((b"John", b"Doe", b"Hi!"), self.evaluate(get_next()))
self.assertEqual((b"Jane", b"Moe", b"Hi again!"), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, state FROM people "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
self.assertEqual((b"John", b"Doe", b"California"),
self.evaluate(get_next()))
self.assertEqual((b"Benjamin", b"Franklin", b"Pennsylvania"),
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that an `OutOfRangeError` is raised on the first call to
# `get_next_str_only` if result set is empty.
@combinations.generate(test_base.default_test_combinations())
def testReadEmptyResultSet(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, motto FROM students "
"WHERE first_name = 'Nonexistent'",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that an error is raised when `driver_name` is invalid.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetWithInvalidDriverName(self):
with self.assertRaises(errors.InvalidArgumentError):
dataset = self._createSqlDataset(
driver_name="sqlfake",
query="SELECT first_name, last_name, motto FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string))
self.assertDatasetProduces(dataset, expected_output=[])
# Test that an error is raised when a column name in `query` is nonexistent
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetWithInvalidColumnName(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, fake_column FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
with self.assertRaises(errors.UnknownError):
self.evaluate(get_next())
# Test that an error is raised when there is a syntax error in `query`.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetOfQueryWithSyntaxError(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELEmispellECT first_name, last_name, motto FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
with self.assertRaises(errors.UnknownError):
self.evaluate(get_next())
# Test that an error is raised when the number of columns in `query`
# does not match the length of `, output_types`.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetWithMismatchBetweenColumnsAndOutputTypes(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(get_next())
# Test that no results are returned when `query` is an insert query rather
# than a select query. In particular, the error refers to the number of
# output types passed to the op not matching the number of columns in the
# result set of the query (namely, 0 for an insert statement.)
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetOfInsertQuery(self):
get_next = self.getNext(
self._createSqlDataset(
query="INSERT INTO students (first_name, last_name, motto) "
"VALUES ('Foo', 'Bar', 'Baz'), ('Fizz', 'Buzz', 'Fizzbuzz')",
output_types=(dtypes.string, dtypes.string, dtypes.string)))
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(get_next())
# Test that `SqlDataset` can read an integer from a SQLite database table and
# place it in an `int8` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetInt8(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, desk_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int8)))
self.assertEqual((b"John", 9), self.evaluate(get_next()))
self.assertEqual((b"Jane", 127), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a negative or 0-valued integer from a
# SQLite database table and place it in an `int8` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetInt8NegativeAndZero(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, income, favorite_negative_number "
"FROM students "
"WHERE first_name = 'John' ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int8, dtypes.int8)))
self.assertEqual((b"John", 0, -2), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a large (positive or negative) integer from
# a SQLite database table and place it in an `int8` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetInt8MaxValues(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT desk_number, favorite_negative_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.int8, dtypes.int8)))
self.assertEqual((9, -2), self.evaluate(get_next()))
# Max and min values of int8
self.assertEqual((127, -128), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read an integer from a SQLite database table and
# place it in an `int16` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetInt16(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, desk_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int16)))
self.assertEqual((b"John", 9), self.evaluate(get_next()))
self.assertEqual((b"Jane", 127), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a negative or 0-valued integer from a
# SQLite database table and place it in an `int16` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetInt16NegativeAndZero(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, income, favorite_negative_number "
"FROM students "
"WHERE first_name = 'John' ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int16, dtypes.int16)))
self.assertEqual((b"John", 0, -2), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a large (positive or negative) integer from
# a SQLite database table and place it in an `int16` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetInt16MaxValues(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, favorite_medium_sized_number "
"FROM students ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int16)))
# Max value of int16
self.assertEqual((b"John", 32767), self.evaluate(get_next()))
# Min value of int16
self.assertEqual((b"Jane", -32768), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read an integer from a SQLite database table and
# place it in an `int32` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetInt32(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, desk_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int32)))
self.assertEqual((b"John", 9), self.evaluate(get_next()))
self.assertEqual((b"Jane", 127), self.evaluate(get_next()))
# Test that `SqlDataset` can read a negative or 0-valued integer from a
# SQLite database table and place it in an `int32` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetInt32NegativeAndZero(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, income FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int32)))
self.assertEqual((b"John", 0), self.evaluate(get_next()))
self.assertEqual((b"Jane", -20000), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a large (positive or negative) integer from
# a SQLite database table and place it in an `int32` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetInt32MaxValues(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, favorite_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int32)))
# Max value of int32
self.assertEqual((b"John", 2147483647), self.evaluate(get_next()))
# Min value of int32
self.assertEqual((b"Jane", -2147483648), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a numeric `varchar` from a SQLite database
# table and place it in an `int32` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetInt32VarCharColumnAsInt(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, school_id FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int32)))
self.assertEqual((b"John", 123), self.evaluate(get_next()))
self.assertEqual((b"Jane", 1000), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read an integer from a SQLite database table
# and place it in an `int64` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetInt64(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, desk_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int64)))
self.assertEqual((b"John", 9), self.evaluate(get_next()))
self.assertEqual((b"Jane", 127), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a negative or 0-valued integer from a
# SQLite database table and place it in an `int64` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetInt64NegativeAndZero(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, income FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int64)))
self.assertEqual((b"John", 0), self.evaluate(get_next()))
self.assertEqual((b"Jane", -20000), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a large (positive or negative) integer from
# a SQLite database table and place it in an `int64` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetInt64MaxValues(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, favorite_big_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.int64)))
# Max value of int64
self.assertEqual((b"John", 9223372036854775807), self.evaluate(get_next()))
# Min value of int64
self.assertEqual((b"Jane", -9223372036854775808), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read an integer from a SQLite database table and
# place it in a `uint8` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetUInt8(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, desk_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.uint8)))
self.assertEqual((b"John", 9), self.evaluate(get_next()))
self.assertEqual((b"Jane", 127), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read the minimum and maximum uint8 values from a
# SQLite database table and place them in `uint8` tensors.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetUInt8MinAndMaxValues(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, brownie_points FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.uint8)))
# Min value of uint8
self.assertEqual((b"John", 0), self.evaluate(get_next()))
# Max value of uint8
self.assertEqual((b"Jane", 255), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read an integer from a SQLite database table
# and place it in a `uint16` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetUInt16(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, desk_number FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.uint16)))
self.assertEqual((b"John", 9), self.evaluate(get_next()))
self.assertEqual((b"Jane", 127), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read the minimum and maximum uint16 values from a
# SQLite database table and place them in `uint16` tensors.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetUInt16MinAndMaxValues(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, account_balance FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.uint16)))
# Min value of uint16
self.assertEqual((b"John", 0), self.evaluate(get_next()))
# Max value of uint16
self.assertEqual((b"Jane", 65535), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a 0-valued and 1-valued integer from a
# SQLite database table and place them as `True` and `False` respectively
# in `bool` tensors.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetBool(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, registration_complete FROM students "
"ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.bool)))
self.assertEqual((b"John", True), self.evaluate(get_next()))
self.assertEqual((b"Jane", False), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read an integer that is not 0-valued or 1-valued
# from a SQLite database table and place it as `True` in a `bool` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetBoolNotZeroOrOne(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, favorite_medium_sized_number "
"FROM students ORDER BY first_name DESC",
output_types=(dtypes.string, dtypes.bool)))
self.assertEqual((b"John", True), self.evaluate(get_next()))
self.assertEqual((b"Jane", True), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a float from a SQLite database table
# and place it in a `float64` tensor.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetFloat64(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, victories FROM townspeople "
"ORDER BY first_name",
output_types=(dtypes.string, dtypes.string, dtypes.float64)))
self.assertEqual((b"George", b"Washington", 20.0),
self.evaluate(get_next()))
self.assertEqual((b"John", b"Adams", -19.95), self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a float from a SQLite database table beyond
# the precision of 64-bit IEEE, without throwing an error. Test that
# `SqlDataset` identifies such a value as equal to itself.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetFloat64OverlyPrecise(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, accolades FROM townspeople "
"ORDER BY first_name",
output_types=(dtypes.string, dtypes.string, dtypes.float64)))
self.assertEqual(
(b"George", b"Washington",
1331241.321342132321324589798264627463827647382647382643874),
self.evaluate(get_next()))
self.assertEqual(
(b"John", b"Adams",
1331241321342132321324589798264627463827647382647382643874.0),
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that `SqlDataset` can read a float from a SQLite database table,
# representing the largest integer representable as a 64-bit IEEE float
# such that the previous integer is also representable as a 64-bit IEEE float.
# Test that `SqlDataset` can distinguish these two numbers.
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetFloat64LargestConsecutiveWholeNumbersNotEqual(self):
get_next = self.getNext(
self._createSqlDataset(
query="SELECT first_name, last_name, triumphs FROM townspeople "
"ORDER BY first_name",
output_types=(dtypes.string, dtypes.string, dtypes.float64)))
self.assertNotEqual((b"George", b"Washington", 9007199254740992.0),
self.evaluate(get_next()))
self.assertNotEqual((b"John", b"Adams", 9007199254740991.0),
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Test that SqlDataset can stop correctly when combined with batch
@combinations.generate(test_base.default_test_combinations())
def testReadResultSetWithBatchStop(self):
dataset = self._createSqlDataset(
query="SELECT * FROM data", output_types=(dtypes.int32))
dataset = dataset.map(lambda x: array_ops.identity(x))
get_next = self.getNext(dataset.batch(2))
self.assertAllEqual(self.evaluate(get_next()), [0, 1])
self.assertAllEqual(self.evaluate(get_next()), [2])
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
class SqlDatasetCheckpointTest(SqlDatasetTestBase,
checkpoint_test_base.CheckpointTestBase,
parameterized.TestCase):
def _build_dataset(self, num_repeats):
data_source_name = os.path.join(test.get_temp_dir(), "tftest.sqlite")
driver_name = array_ops.placeholder_with_default(
array_ops.constant("sqlite", dtypes.string), shape=[])
query = ("SELECT first_name, last_name, motto FROM students ORDER BY "
"first_name DESC")
output_types = (dtypes.string, dtypes.string, dtypes.string)
return readers.SqlDataset(driver_name, data_source_name, query,
output_types).repeat(num_repeats)
@combinations.generate(
combinations.times(test_base.default_test_combinations(),
checkpoint_test_base.default_test_combinations()))
def test(self, verify_fn):
num_repeats = 4
num_outputs = num_repeats * 2
verify_fn(self, lambda: self._build_dataset(num_repeats), num_outputs)
if __name__ == "__main__":
test.main()
|
function Person(initialAge) {
// Add some more code to run some checks on initialAge
if (initialAge >= 0) {
age = initialAge;
} else {
age = 0;
console.log("Age is not valid, setting age to 0.");
}
this.amIOld = function() {
// Do some computations in here and print out the correct statement to the console
if (age < 13) {
console.log("You are young.");
} else if (age >= 13 && age < 18) {
console.log("You are a teenager.");
} else {
console.log("You are old.");
}
};
this.yearPasses = function() {
// Increment the age of the person in here
age++;
};
}
|
import { createSlice } from '@reduxjs/toolkit';
import { getCategories } from '../data/repo';
const categoriesSlice = createSlice({
name: 'categories',
initialState: { data: [] },
reducers: {
updateCategories: (state, action) => {
state.data = action.payload.data.categories;
}
}
});
export const getCategoriesAsync = () => dispatch => {
getCategories()
.then(result => dispatch(categoriesSlice.actions.updateCategories(result)))
};
export const selectCategories = state => state.categories.data;
export default categoriesSlice.reducer; |
import {metadata} from '../src/metadata';
import {decorators} from '../src/decorators';
describe('metadata', () => {
it('can be located by key', () => {
var found = metadata.getOwn(metadata.resource, HasMetadata);
expect(found instanceof SampleMetadata).toBe(true);
});
it('can be normalized to handle the fallback metadata location', () => {
var found = metadata.getOwn(metadata.resource, HasFallbackMetadata);
expect(found instanceof SampleMetadata).toBe(true);
});
it('can override base metadata', () => {
var found = metadata.getOwn(metadata.resource, OverridesMetadata);
expect(found.id).toBe(3);
});
it('can inherit base metadata when searching deep by type', () => {
var found = metadata.get(metadata.resource, DerivedWithBaseMetadata);
expect(found instanceof SampleMetadata).toBe(true);
});
it('can be added with function', () => {
class Annotated {}
decorators(new sampleES7Decorator()).on(Annotated);
var found = metadata.getOwn(metadata.resource, Annotated);
expect(found instanceof SampleMetadata).toBe(true);
});
describe('when searching', () => {
it('returns undefined if the input type is falsy', () => {
expect(metadata.getOwn(metadata.resource, undefined)).toBe(undefined);
expect(metadata.getOwn(metadata.resource, null)).toBe(undefined);
});
it('returns undefined if no metadata is defined for the type', () => {
var found = metadata.getOwn(metadata.resource, HasNoMetadata);
expect(found).toBe(undefined);
});
it('retruns the base metadata when serching deep if no metadata is defined for the type', () => {
var found = metadata.get(metadata.resource, DerivedWithBaseMetadata);
expect(found instanceof SampleMetadata).toBe(true);
});
});
class SampleMetadata {
constructor(id) {
this.id = id;
}
}
function sampleES7Decorator(value){
return function(target){
metadata.define(metadata.resource, new SampleMetadata(value), target);
}
}
let HasMetadata = decorators(sampleES7Decorator()).on(class {});
let HasFallbackMetadata = decorators(sampleES7Decorator()).on(class {});
let HasOneMetadataInstance = decorators(sampleES7Decorator()).on(class {});
let OverridesMetadata = decorators(sampleES7Decorator(3)).on(class extends HasMetadata {});
class DerivedWithBaseMetadata extends HasMetadata {}
metadata.define('another', 'foo', DerivedWithBaseMetadata);
class HasNoMetadata {}
class DerivedTypeWithNoMetadata extends HasMetadata {}
});
|
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
var winston_1 = __importDefault(require("winston"));
var smashggJsLevels = {
levels: {
error: 0,
warn: 1,
info: 2,
verbose: 3,
debug: 4,
queries: 5
},
colors: {
error: 'red',
warn: 'yellow',
info: 'green',
verbose: 'blue',
debug: 'magenta',
queries: 'cyan'
}
};
var tempLogger = winston_1.default.createLogger({
level: 'info',
levels: smashggJsLevels.levels,
format: winston_1.default.format.combine(winston_1.default.format.splat(), winston_1.default.format.simple(), winston_1.default.format.colorize()),
transports: [
new winston_1.default.transports.Console()
]
});
var logQuery = function (msg) {
tempLogger.log('queries', msg);
};
var logger = Object.assign(tempLogger, { queries: logQuery });
exports.default = logger;
winston_1.default.addColors(smashggJsLevels.colors);
function setLogLevel(level) {
logger.level = level;
}
exports.setLogLevel = setLogLevel;
function addLog(type, options) {
switch (type.toLowerCase()) {
case 'console':
logger.add(new winston_1.default.transports.Console(options));
break;
case 'file':
logger.add(new winston_1.default.transports.File(options));
break;
default:
throw new Error(type + " is not valid for addLog. Valid values: [console, file]");
}
}
exports.addLog = addLog;
function disableLog() {
logger.transports.forEach(function (transport) {
transport.silent = true;
});
}
exports.disableLog = disableLog;
function enableLog() {
logger.transports.forEach(function (transport) {
transport.silent = false;
});
}
exports.enableLog = enableLog;
exports.levels = {
QUERIES: 'queries',
DEBUG: 'debug',
VERBOSE: 'verbose',
INFO: 'info',
WARN: 'warn',
ERROR: 'error'
};
|
"use strict";
/* Generated from https://d3teyb21fexa9r.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json, version 1.13.0 */
Object.defineProperty(exports, "__esModule", { value: true });
const resource_1 = require("../resource");
class SecurityGroup extends resource_1.ResourceBase {
constructor(properties) {
super('AWS::ElastiCache::SecurityGroup', properties);
}
}
exports.default = SecurityGroup;
|
const canFinish = require("../main/canFinish.js");
// Question
// There are a total of numCourses courses you have to take, labeled from 0 to numCourses - 1. You are given an array prerequisites where prerequisites[i] = [ai, bi] indicates that you must take course bi first if you want to take course ai.
// For example, the pair [0, 1], indicates that to take course 0 you have to first take course 1.
// Return true if you can finish all courses. Otherwise, return false.
describe("コースが終了可能かの結果を返却する", () => {
describe("canFinish(number numCourses, number[][] prerequisites) を実行するとき", () => {
test("numCourses = 2, prerequisites = 3を引数に [[1,0]]が返却されること", () => {
expect(canFinish(2, [[1, 0]])).toBeTruthy();
});
test("numCourses = 2, prerequisites = 3を引数に [[1,0],[0,1]]が返却されること", () => {
expect(
canFinish(2, [
[1, 0],
[0, 1],
])
).toBeFalsy();
});
});
});
|
// / <reference types="Cypress" />
import ProductPageObject from '../../../../support/pages/module/sw-product.page-object';
/**
* @deprecated tag:v6.5.0 - will be removed, use `sw-promotion-v2` instead
* @feature-deprecated (flag:FEATURE_NEXT_13810)
*/
describe('Promotion: Test promotion with individual codes', () => {
// eslint-disable-next-line no-undef
before(() => {
cy.onlyOnFeature('FEATURE_NEXT_13810');
});
beforeEach(() => {
cy.loginViaApi()
.then(() => {
return cy.createDefaultFixture('promotion');
})
.then(() => {
return cy.createProductFixture();
})
.then(() => {
return cy.createCustomerFixture();
})
.then(() => {
cy.openInitialPage(`${Cypress.env('admin')}#/sw/promotion/index`);
});
});
it('@marketing: use individual promotion codes', () => {
const page = new ProductPageObject();
// Request we want to wait for later
cy.intercept({
url: `**/${Cypress.env('apiPath')}/promotion`,
method: 'POST'
}).as('saveData');
cy.intercept({
url: `**/${Cypress.env('apiPath')}/_action/sync`,
method: 'POST'
}).as('saveDiscount');
// Active code in promotion
cy.contains(`${page.elements.dataGridRow}--0 a`, 'Thunder Tuesday').click();
cy.get('#sw-field--promotion-name').should('be.visible');
cy.get('input[name="sw-field--promotion-active"]').click();
cy.get('.sw-promotion-sales-channel-select').typeMultiSelectAndCheck('Storefront');
cy.get('.sw-promotion-sales-channel-select .sw-select-selection-list__input')
.type('{esc}');
cy.get('input[name="sw-field--promotion-useCodes"]').click();
cy.get('input[name="sw-field--promotion-useIndividualCodes"]').click();
// Set individual code
cy.get('.sw-promotion-code-form__link-manage-individual').should('be.visible');
cy.get('.sw-promotion-code-form__link-manage-individual').click();
cy.get('.sw-promotion-code-form__modal-individual').should('be.visible');
cy.get('#sw-field--promotion-individualCodePattern').typeAndCheck('code-%d');
cy.get('.sw-promotion-individualcodes__top-bar > .sw-button')
.click();
cy.wait('@filteredResultCall').its('response.statusCode').should('equal', 200);
cy.awaitAndCheckNotification('Generated 10 new codes.');
cy.get('.sw-modal__close').click();
cy.get('.sw-modal').should('not.exist');
// Add discount
cy.get('a[title="Discounts"]').click();
cy.get(page.elements.loader).should('not.exist');
cy.get('.sw-button--ghost').should('be.visible');
cy.contains('.sw-button--ghost', 'Add discount').click();
cy.get(page.elements.loader).should('not.exist');
cy.wait('@filteredResultCall').its('response.statusCode').should('equal', 200);
cy.get('.sw-promotion-discount-component').should('be.visible');
cy.get('.sw-promotion-discount-component__discount-value').should('be.visible');
cy.get('.sw-promotion-discount-component__discount-value input')
.clear()
.type('54');
// Save final promotion
cy.get('.sw-promotion-detail__save-action').click();
cy.wait('@saveDiscount').its('response.statusCode').should('equal', 200);
// Verify Promotion in Storefront
cy.visit('/');
cy.get('.product-box').should('be.visible');
cy.get('.btn-buy').click();
cy.get('.offcanvas.is-open').should('be.visible');
cy.get('#addPromotionOffcanvasCartInput').type('code-0');
cy.get('#addPromotionOffcanvasCart').click();
cy.get('.alert-success .icon-checkmark-circle').should('be.visible');
cy.get('.cart-item-promotion .cart-item-label').contains('Thunder Tuesday');
});
it('@marketing: use invalid individual promotion codes', () => {
const page = new ProductPageObject();
// Active code in promotion
cy.contains(`${page.elements.dataGridRow}--0 a`, 'Thunder Tuesday').click();
cy.get('#sw-field--promotion-name').should('be.visible');
cy.get('input[name="sw-field--promotion-active"]').click();
cy.get('.sw-promotion-sales-channel-select').typeMultiSelectAndCheck('Storefront');
cy.get('.sw-promotion-sales-channel-select .sw-select-selection-list__input')
.type('{esc}');
cy.get('input[name="sw-field--promotion-useCodes"]').click();
cy.get('input[name="sw-field--promotion-useIndividualCodes"]').click();
// Set individual code
cy.get('.sw-promotion-code-form__link-manage-individual').should('be.visible');
cy.get('.sw-promotion-code-form__link-manage-individual').click();
cy.get('.sw-promotion-code-form__modal-individual').should('be.visible');
cy.get('#sw-field--promotion-individualCodePattern').typeAndCheck('my-code');
cy.get('.sw-promotion-individualcodes__top-bar > .sw-button')
.click();
cy.get('.sw-notifications__notification--0 > .sw-alert__body').should('be.visible');
cy.get('#sw-field--promotion-individualCodePattern').clear().typeAndCheck('code-%d');
cy.get('#sw-field--generateCount').clear().typeAndCheck(11);
cy.get('.sw-promotion-individualcodes__top-bar > .sw-button')
.click();
cy.get('.sw-notifications__notification--0 > .sw-alert__body').should('be.visible');
cy.get('#sw-field--promotion-individualCodePattern').clear().typeAndCheck('code-%s');
cy.get('#sw-field--generateCount').clear().typeAndCheck(27);
cy.get('.sw-promotion-individualcodes__top-bar > .sw-button')
.click();
cy.get('.sw-notifications__notification--0 > .sw-alert__body').should('be.visible');
});
it('@marketing: create promotion with promotion name', () => {
cy.get('a[href="#/sw/promotion/create"]').click();
// Create promotion
cy.get('.sw-promotion-detail').should('be.visible');
cy.get('input[name="sw-field--promotion-active"]').click();
cy.get('.sw-promotion-sales-channel-select').typeMultiSelectAndCheck('Storefront');
cy.get('.sw-promotion-sales-channel-select .sw-select-selection-list__input')
.type('{esc}');
cy.get('input[name="sw-field--promotion-useCodes"]').click();
cy.get('input[name="sw-field--promotion-useIndividualCodes"]').click();
// Set individual code
cy.get('.sw-promotion-code-form__link-manage-individual').should('be.visible');
cy.get('.sw-promotion-code-form__link-manage-individual').click();
cy.get('.sw-notifications__notification--0 > .sw-alert__body').should('be.visible');
});
});
|
var searchData=
[
['c_5fcentroid_5flist',['C_Centroid_List',['../class_c___centroid___list.html',1,'']]],
['c_5fdmum',['C_DMUM',['../class_c___d_m_u_m.html',1,'']]],
['c_5fdmum_5fresult',['C_DMUM_Result',['../struct_c___d_m_u_m___result.html',1,'']]],
['c_5fedge',['C_Edge',['../class_c___edge.html',1,'']]],
['c_5fhistogram',['C_Histogram',['../class_c___histogram.html',1,'']]],
['c_5fhistogram_5fcomparison',['C_Histogram_Comparison',['../class_c___histogram___comparison.html',1,'']]],
['c_5ftimer',['C_Timer',['../class_c___timer.html',1,'']]],
['c_5ftritom',['C_Tritom',['../class_c___tritom.html',1,'']]],
['c_5ftrixel',['C_Trixel',['../class_c___trixel.html',1,'']]],
['c_5fvertex',['C_Vertex',['../class_c___vertex.html',1,'']]]
];
|
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = global || self, factory(global.mobx = {}));
}(this, (function (exports) { 'use strict';
var niceErrors = {
0: "Invalid value for configuration 'enforceActions', expected 'never', 'always' or 'observed'",
1: function _(annotationType, key) {
return "Cannot apply '" + annotationType + "' to '" + key.toString() + "': Field not found.";
},
/*
2(prop) {
return `invalid decorator for '${prop.toString()}'`
},
3(prop) {
return `Cannot decorate '${prop.toString()}': action can only be used on properties with a function value.`
},
4(prop) {
return `Cannot decorate '${prop.toString()}': computed can only be used on getter properties.`
},
*/
5: "'keys()' can only be used on observable objects, arrays, sets and maps",
6: "'values()' can only be used on observable objects, arrays, sets and maps",
7: "'entries()' can only be used on observable objects, arrays and maps",
8: "'set()' can only be used on observable objects, arrays and maps",
9: "'remove()' can only be used on observable objects, arrays and maps",
10: "'has()' can only be used on observable objects, arrays and maps",
11: "'get()' can only be used on observable objects, arrays and maps",
12: "Invalid annotation",
13: "Dynamic observable objects cannot be frozen. If you're passing observables to 3rd party component/function that calls Object.freeze, pass copy instead: toJS(observable)",
14: "Intercept handlers should return nothing or a change object",
15: "Observable arrays cannot be frozen. If you're passing observables to 3rd party component/function that calls Object.freeze, pass copy instead: toJS(observable)",
16: "Modification exception: the internal structure of an observable array was changed.",
17: function _(index, length) {
return "[mobx.array] Index out of bounds, " + index + " is larger than " + length;
},
18: "mobx.map requires Map polyfill for the current browser. Check babel-polyfill or core-js/es6/map.js",
19: function _(other) {
return "Cannot initialize from classes that inherit from Map: " + other.constructor.name;
},
20: function _(other) {
return "Cannot initialize map from " + other;
},
21: function _(dataStructure) {
return "Cannot convert to map from '" + dataStructure + "'";
},
22: "mobx.set requires Set polyfill for the current browser. Check babel-polyfill or core-js/es6/set.js",
23: "It is not possible to get index atoms from arrays",
24: function _(thing) {
return "Cannot obtain administration from " + thing;
},
25: function _(property, name) {
return "the entry '" + property + "' does not exist in the observable map '" + name + "'";
},
26: "please specify a property",
27: function _(property, name) {
return "no observable property '" + property.toString() + "' found on the observable object '" + name + "'";
},
28: function _(thing) {
return "Cannot obtain atom from " + thing;
},
29: "Expecting some object",
30: "invalid action stack. did you forget to finish an action?",
31: "missing option for computed: get",
32: function _(name, derivation) {
return "Cycle detected in computation " + name + ": " + derivation;
},
33: function _(name) {
return "The setter of computed value '" + name + "' is trying to update itself. Did you intend to update an _observable_ value, instead of the computed property?";
},
34: function _(name) {
return "[ComputedValue '" + name + "'] It is not possible to assign a new value to a computed value.";
},
35: "There are multiple, different versions of MobX active. Make sure MobX is loaded only once or use `configure({ isolateGlobalState: true })`",
36: "isolateGlobalState should be called before MobX is running any reactions",
37: function _(method) {
return "[mobx] `observableArray." + method + "()` mutates the array in-place, which is not allowed inside a derivation. Use `array.slice()." + method + "()` instead";
},
38: "'ownKeys()' can only be used on observable objects",
39: "'defineProperty()' can only be used on observable objects"
};
var errors = niceErrors ;
function die(error) {
for (var _len = arguments.length, args = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
args[_key - 1] = arguments[_key];
}
{
var e = typeof error === "string" ? error : errors[error];
if (typeof e === "function") e = e.apply(null, args);
throw new Error("[MobX] " + e);
}
}
var mockGlobal = {};
function getGlobal() {
if (typeof globalThis !== "undefined") {
return globalThis;
}
if (typeof window !== "undefined") {
return window;
}
if (typeof global !== "undefined") {
return global;
}
if (typeof self !== "undefined") {
return self;
}
return mockGlobal;
}
var assign = Object.assign;
var getDescriptor = Object.getOwnPropertyDescriptor;
var defineProperty = Object.defineProperty;
var objectPrototype = Object.prototype;
var EMPTY_ARRAY = [];
Object.freeze(EMPTY_ARRAY);
var EMPTY_OBJECT = {};
Object.freeze(EMPTY_OBJECT);
var hasProxy = typeof Proxy !== "undefined";
var plainObjectString = /*#__PURE__*/Object.toString();
function assertProxies() {
if (!hasProxy) {
die( "`Proxy` objects are not available in the current environment. Please configure MobX to enable a fallback implementation.`" );
}
}
function warnAboutProxyRequirement(msg) {
if ( globalState.verifyProxies) {
die("MobX is currently configured to be able to run in ES5 mode, but in ES5 MobX won't be able to " + msg);
}
}
function getNextId() {
return ++globalState.mobxGuid;
}
/**
* Makes sure that the provided function is invoked at most once.
*/
function once(func) {
var invoked = false;
return function () {
if (invoked) {
return;
}
invoked = true;
return func.apply(this, arguments);
};
}
var noop = function noop() {};
function isFunction(fn) {
return typeof fn === "function";
}
function isStringish(value) {
var t = typeof value;
switch (t) {
case "string":
case "symbol":
case "number":
return true;
}
return false;
}
function isObject(value) {
return value !== null && typeof value === "object";
}
function isPlainObject(value) {
if (!isObject(value)) {
return false;
}
var proto = Object.getPrototypeOf(value);
if (proto == null) {
return true;
}
var protoConstructor = Object.hasOwnProperty.call(proto, "constructor") && proto.constructor;
return typeof protoConstructor === "function" && protoConstructor.toString() === plainObjectString;
} // https://stackoverflow.com/a/37865170
function isGenerator(obj) {
var constructor = obj == null ? void 0 : obj.constructor;
if (!constructor) {
return false;
}
if ("GeneratorFunction" === constructor.name || "GeneratorFunction" === constructor.displayName) {
return true;
}
return false;
}
function addHiddenProp(object, propName, value) {
defineProperty(object, propName, {
enumerable: false,
writable: true,
configurable: true,
value: value
});
}
function addHiddenFinalProp(object, propName, value) {
defineProperty(object, propName, {
enumerable: false,
writable: false,
configurable: true,
value: value
});
}
function createInstanceofPredicate(name, theClass) {
var propName = "isMobX" + name;
theClass.prototype[propName] = true;
return function (x) {
return isObject(x) && x[propName] === true;
};
}
function isES6Map(thing) {
return thing instanceof Map;
}
function isES6Set(thing) {
return thing instanceof Set;
}
var hasGetOwnPropertySymbols = typeof Object.getOwnPropertySymbols !== "undefined";
/**
* Returns the following: own enumerable keys and symbols.
*/
function getPlainObjectKeys(object) {
var keys = Object.keys(object); // Not supported in IE, so there are not going to be symbol props anyway...
if (!hasGetOwnPropertySymbols) {
return keys;
}
var symbols = Object.getOwnPropertySymbols(object);
if (!symbols.length) {
return keys;
}
return [].concat(keys, symbols.filter(function (s) {
return objectPrototype.propertyIsEnumerable.call(object, s);
}));
} // From Immer utils
// Returns all own keys, including non-enumerable and symbolic
var ownKeys = typeof Reflect !== "undefined" && Reflect.ownKeys ? Reflect.ownKeys : hasGetOwnPropertySymbols ? function (obj) {
return Object.getOwnPropertyNames(obj).concat(Object.getOwnPropertySymbols(obj));
} :
/* istanbul ignore next */
Object.getOwnPropertyNames;
function stringifyKey(key) {
if (typeof key === "string") {
return key;
}
if (typeof key === "symbol") {
return key.toString();
}
return new String(key).toString();
}
function toPrimitive(value) {
return value === null ? null : typeof value === "object" ? "" + value : value;
}
function hasProp(target, prop) {
return objectPrototype.hasOwnProperty.call(target, prop);
} // From Immer utils
var getOwnPropertyDescriptors = Object.getOwnPropertyDescriptors || function getOwnPropertyDescriptors(target) {
// Polyfill needed for Hermes and IE, see https://github.com/facebook/hermes/issues/274
var res = {}; // Note: without polyfill for ownKeys, symbols won't be picked up
ownKeys(target).forEach(function (key) {
res[key] = getDescriptor(target, key);
});
return res;
};
function _defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
function _createClass(Constructor, protoProps, staticProps) {
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
if (staticProps) _defineProperties(Constructor, staticProps);
Object.defineProperty(Constructor, "prototype", {
writable: false
});
return Constructor;
}
function _extends() {
_extends = Object.assign || function (target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) {
if (Object.prototype.hasOwnProperty.call(source, key)) {
target[key] = source[key];
}
}
}
return target;
};
return _extends.apply(this, arguments);
}
function _inheritsLoose(subClass, superClass) {
subClass.prototype = Object.create(superClass.prototype);
subClass.prototype.constructor = subClass;
_setPrototypeOf(subClass, superClass);
}
function _setPrototypeOf(o, p) {
_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
o.__proto__ = p;
return o;
};
return _setPrototypeOf(o, p);
}
function _assertThisInitialized(self) {
if (self === void 0) {
throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
}
return self;
}
function _unsupportedIterableToArray(o, minLen) {
if (!o) return;
if (typeof o === "string") return _arrayLikeToArray(o, minLen);
var n = Object.prototype.toString.call(o).slice(8, -1);
if (n === "Object" && o.constructor) n = o.constructor.name;
if (n === "Map" || n === "Set") return Array.from(o);
if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen);
}
function _arrayLikeToArray(arr, len) {
if (len == null || len > arr.length) len = arr.length;
for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i];
return arr2;
}
function _createForOfIteratorHelperLoose(o, allowArrayLike) {
var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"];
if (it) return (it = it.call(o)).next.bind(it);
if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") {
if (it) o = it;
var i = 0;
return function () {
if (i >= o.length) return {
done: true
};
return {
done: false,
value: o[i++]
};
};
}
throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.");
}
var storedAnnotationsSymbol = /*#__PURE__*/Symbol("mobx-stored-annotations");
/**
* Creates a function that acts as
* - decorator
* - annotation object
*/
function createDecoratorAnnotation(annotation) {
function decorator(target, property) {
storeAnnotation(target, property, annotation);
}
return Object.assign(decorator, annotation);
}
/**
* Stores annotation to prototype,
* so it can be inspected later by `makeObservable` called from constructor
*/
function storeAnnotation(prototype, key, annotation) {
if (!hasProp(prototype, storedAnnotationsSymbol)) {
addHiddenProp(prototype, storedAnnotationsSymbol, _extends({}, prototype[storedAnnotationsSymbol]));
} // @override must override something
if ( isOverride(annotation) && !hasProp(prototype[storedAnnotationsSymbol], key)) {
var fieldName = prototype.constructor.name + ".prototype." + key.toString();
die("'" + fieldName + "' is decorated with 'override', " + "but no such decorated member was found on prototype.");
} // Cannot re-decorate
assertNotDecorated(prototype, annotation, key); // Ignore override
if (!isOverride(annotation)) {
prototype[storedAnnotationsSymbol][key] = annotation;
}
}
function assertNotDecorated(prototype, annotation, key) {
if ( !isOverride(annotation) && hasProp(prototype[storedAnnotationsSymbol], key)) {
var fieldName = prototype.constructor.name + ".prototype." + key.toString();
var currentAnnotationType = prototype[storedAnnotationsSymbol][key].annotationType_;
var requestedAnnotationType = annotation.annotationType_;
die("Cannot apply '@" + requestedAnnotationType + "' to '" + fieldName + "':" + ("\nThe field is already decorated with '@" + currentAnnotationType + "'.") + "\nRe-decorating fields is not allowed." + "\nUse '@override' decorator for methods overriden by subclass.");
}
}
/**
* Collects annotations from prototypes and stores them on target (instance)
*/
function collectStoredAnnotations(target) {
if (!hasProp(target, storedAnnotationsSymbol)) {
if ( !target[storedAnnotationsSymbol]) {
die("No annotations were passed to makeObservable, but no decorated members have been found either");
} // We need a copy as we will remove annotation from the list once it's applied.
addHiddenProp(target, storedAnnotationsSymbol, _extends({}, target[storedAnnotationsSymbol]));
}
return target[storedAnnotationsSymbol];
}
var $mobx = /*#__PURE__*/Symbol("mobx administration");
var Atom = /*#__PURE__*/function () {
// for effective unobserving. BaseAtom has true, for extra optimization, so its onBecomeUnobserved never gets called, because it's not needed
/**
* Create a new atom. For debugging purposes it is recommended to give it a name.
* The onBecomeObserved and onBecomeUnobserved callbacks can be used for resource management.
*/
function Atom(name_) {
if (name_ === void 0) {
name_ = "Atom@" + getNextId() ;
}
this.name_ = void 0;
this.isPendingUnobservation_ = false;
this.isBeingObserved_ = false;
this.observers_ = new Set();
this.diffValue_ = 0;
this.lastAccessedBy_ = 0;
this.lowestObserverState_ = IDerivationState_.NOT_TRACKING_;
this.onBOL = void 0;
this.onBUOL = void 0;
this.name_ = name_;
} // onBecomeObservedListeners
var _proto = Atom.prototype;
_proto.onBO = function onBO() {
if (this.onBOL) {
this.onBOL.forEach(function (listener) {
return listener();
});
}
};
_proto.onBUO = function onBUO() {
if (this.onBUOL) {
this.onBUOL.forEach(function (listener) {
return listener();
});
}
}
/**
* Invoke this method to notify mobx that your atom has been used somehow.
* Returns true if there is currently a reactive context.
*/
;
_proto.reportObserved = function reportObserved$1() {
return reportObserved(this);
}
/**
* Invoke this method _after_ this method has changed to signal mobx that all its observers should invalidate.
*/
;
_proto.reportChanged = function reportChanged() {
startBatch();
propagateChanged(this);
endBatch();
};
_proto.toString = function toString() {
return this.name_;
};
return Atom;
}();
var isAtom = /*#__PURE__*/createInstanceofPredicate("Atom", Atom);
function createAtom(name, onBecomeObservedHandler, onBecomeUnobservedHandler) {
if (onBecomeObservedHandler === void 0) {
onBecomeObservedHandler = noop;
}
if (onBecomeUnobservedHandler === void 0) {
onBecomeUnobservedHandler = noop;
}
var atom = new Atom(name); // default `noop` listener will not initialize the hook Set
if (onBecomeObservedHandler !== noop) {
onBecomeObserved(atom, onBecomeObservedHandler);
}
if (onBecomeUnobservedHandler !== noop) {
onBecomeUnobserved(atom, onBecomeUnobservedHandler);
}
return atom;
}
function identityComparer(a, b) {
return a === b;
}
function structuralComparer(a, b) {
return deepEqual(a, b);
}
function shallowComparer(a, b) {
return deepEqual(a, b, 1);
}
function defaultComparer(a, b) {
if (Object.is) {
return Object.is(a, b);
}
return a === b ? a !== 0 || 1 / a === 1 / b : a !== a && b !== b;
}
var comparer = {
identity: identityComparer,
structural: structuralComparer,
"default": defaultComparer,
shallow: shallowComparer
};
function deepEnhancer(v, _, name) {
// it is an observable already, done
if (isObservable(v)) {
return v;
} // something that can be converted and mutated?
if (Array.isArray(v)) {
return observable.array(v, {
name: name
});
}
if (isPlainObject(v)) {
return observable.object(v, undefined, {
name: name
});
}
if (isES6Map(v)) {
return observable.map(v, {
name: name
});
}
if (isES6Set(v)) {
return observable.set(v, {
name: name
});
}
if (typeof v === "function" && !isAction(v) && !isFlow(v)) {
if (isGenerator(v)) {
return flow(v);
} else {
return autoAction(name, v);
}
}
return v;
}
function shallowEnhancer(v, _, name) {
if (v === undefined || v === null) {
return v;
}
if (isObservableObject(v) || isObservableArray(v) || isObservableMap(v) || isObservableSet(v)) {
return v;
}
if (Array.isArray(v)) {
return observable.array(v, {
name: name,
deep: false
});
}
if (isPlainObject(v)) {
return observable.object(v, undefined, {
name: name,
deep: false
});
}
if (isES6Map(v)) {
return observable.map(v, {
name: name,
deep: false
});
}
if (isES6Set(v)) {
return observable.set(v, {
name: name,
deep: false
});
}
{
die("The shallow modifier / decorator can only used in combination with arrays, objects, maps and sets");
}
}
function referenceEnhancer(newValue) {
// never turn into an observable
return newValue;
}
function refStructEnhancer(v, oldValue) {
if ( isObservable(v)) {
die("observable.struct should not be used with observable values");
}
if (deepEqual(v, oldValue)) {
return oldValue;
}
return v;
}
var OVERRIDE = "override";
var override = /*#__PURE__*/createDecoratorAnnotation({
annotationType_: OVERRIDE,
make_: make_,
extend_: extend_
});
function isOverride(annotation) {
return annotation.annotationType_ === OVERRIDE;
}
function make_(adm, key) {
// Must not be plain object
if ( adm.isPlainObject_) {
die("Cannot apply '" + this.annotationType_ + "' to '" + adm.name_ + "." + key.toString() + "':" + ("\n'" + this.annotationType_ + "' cannot be used on plain objects."));
} // Must override something
if ( !hasProp(adm.appliedAnnotations_, key)) {
die("'" + adm.name_ + "." + key.toString() + "' is annotated with '" + this.annotationType_ + "', " + "but no such annotated member was found on prototype.");
}
return 0
/* Cancel */
;
}
function extend_(adm, key, descriptor, proxyTrap) {
die("'" + this.annotationType_ + "' can only be used with 'makeObservable'");
}
function createActionAnnotation(name, options) {
return {
annotationType_: name,
options_: options,
make_: make_$1,
extend_: extend_$1
};
}
function make_$1(adm, key, descriptor, source) {
var _this$options_;
// bound
if ((_this$options_ = this.options_) != null && _this$options_.bound) {
return this.extend_(adm, key, descriptor, false) === null ? 0
/* Cancel */
: 1
/* Break */
;
} // own
if (source === adm.target_) {
return this.extend_(adm, key, descriptor, false) === null ? 0
/* Cancel */
: 2
/* Continue */
;
} // prototype
if (isAction(descriptor.value)) {
// A prototype could have been annotated already by other constructor,
// rest of the proto chain must be annotated already
return 1
/* Break */
;
}
var actionDescriptor = createActionDescriptor(adm, this, key, descriptor, false);
defineProperty(source, key, actionDescriptor);
return 2
/* Continue */
;
}
function extend_$1(adm, key, descriptor, proxyTrap) {
var actionDescriptor = createActionDescriptor(adm, this, key, descriptor);
return adm.defineProperty_(key, actionDescriptor, proxyTrap);
}
function assertActionDescriptor(adm, _ref, key, _ref2) {
var annotationType_ = _ref.annotationType_;
var value = _ref2.value;
if ( !isFunction(value)) {
die("Cannot apply '" + annotationType_ + "' to '" + adm.name_ + "." + key.toString() + "':" + ("\n'" + annotationType_ + "' can only be used on properties with a function value."));
}
}
function createActionDescriptor(adm, annotation, key, descriptor, // provides ability to disable safeDescriptors for prototypes
safeDescriptors) {
var _annotation$options_, _annotation$options_$, _annotation$options_2, _annotation$options_$2, _annotation$options_3, _annotation$options_4, _adm$proxy_2;
if (safeDescriptors === void 0) {
safeDescriptors = globalState.safeDescriptors;
}
assertActionDescriptor(adm, annotation, key, descriptor);
var value = descriptor.value;
if ((_annotation$options_ = annotation.options_) != null && _annotation$options_.bound) {
var _adm$proxy_;
value = value.bind((_adm$proxy_ = adm.proxy_) != null ? _adm$proxy_ : adm.target_);
}
return {
value: createAction((_annotation$options_$ = (_annotation$options_2 = annotation.options_) == null ? void 0 : _annotation$options_2.name) != null ? _annotation$options_$ : key.toString(), value, (_annotation$options_$2 = (_annotation$options_3 = annotation.options_) == null ? void 0 : _annotation$options_3.autoAction) != null ? _annotation$options_$2 : false, // https://github.com/mobxjs/mobx/discussions/3140
(_annotation$options_4 = annotation.options_) != null && _annotation$options_4.bound ? (_adm$proxy_2 = adm.proxy_) != null ? _adm$proxy_2 : adm.target_ : undefined),
// Non-configurable for classes
// prevents accidental field redefinition in subclass
configurable: safeDescriptors ? adm.isPlainObject_ : true,
// https://github.com/mobxjs/mobx/pull/2641#issuecomment-737292058
enumerable: false,
// Non-obsevable, therefore non-writable
// Also prevents rewriting in subclass constructor
writable: safeDescriptors ? false : true
};
}
function createFlowAnnotation(name, options) {
return {
annotationType_: name,
options_: options,
make_: make_$2,
extend_: extend_$2
};
}
function make_$2(adm, key, descriptor, source) {
var _this$options_;
// own
if (source === adm.target_) {
return this.extend_(adm, key, descriptor, false) === null ? 0
/* Cancel */
: 2
/* Continue */
;
} // prototype
// bound - must annotate protos to support super.flow()
if ((_this$options_ = this.options_) != null && _this$options_.bound && (!hasProp(adm.target_, key) || !isFlow(adm.target_[key]))) {
if (this.extend_(adm, key, descriptor, false) === null) {
return 0
/* Cancel */
;
}
}
if (isFlow(descriptor.value)) {
// A prototype could have been annotated already by other constructor,
// rest of the proto chain must be annotated already
return 1
/* Break */
;
}
var flowDescriptor = createFlowDescriptor(adm, this, key, descriptor, false, false);
defineProperty(source, key, flowDescriptor);
return 2
/* Continue */
;
}
function extend_$2(adm, key, descriptor, proxyTrap) {
var _this$options_2;
var flowDescriptor = createFlowDescriptor(adm, this, key, descriptor, (_this$options_2 = this.options_) == null ? void 0 : _this$options_2.bound);
return adm.defineProperty_(key, flowDescriptor, proxyTrap);
}
function assertFlowDescriptor(adm, _ref, key, _ref2) {
var annotationType_ = _ref.annotationType_;
var value = _ref2.value;
if ( !isFunction(value)) {
die("Cannot apply '" + annotationType_ + "' to '" + adm.name_ + "." + key.toString() + "':" + ("\n'" + annotationType_ + "' can only be used on properties with a generator function value."));
}
}
function createFlowDescriptor(adm, annotation, key, descriptor, bound, // provides ability to disable safeDescriptors for prototypes
safeDescriptors) {
if (safeDescriptors === void 0) {
safeDescriptors = globalState.safeDescriptors;
}
assertFlowDescriptor(adm, annotation, key, descriptor);
var value = descriptor.value; // In case of flow.bound, the descriptor can be from already annotated prototype
if (!isFlow(value)) {
value = flow(value);
}
if (bound) {
var _adm$proxy_;
// We do not keep original function around, so we bind the existing flow
value = value.bind((_adm$proxy_ = adm.proxy_) != null ? _adm$proxy_ : adm.target_); // This is normally set by `flow`, but `bind` returns new function...
value.isMobXFlow = true;
}
return {
value: value,
// Non-configurable for classes
// prevents accidental field redefinition in subclass
configurable: safeDescriptors ? adm.isPlainObject_ : true,
// https://github.com/mobxjs/mobx/pull/2641#issuecomment-737292058
enumerable: false,
// Non-obsevable, therefore non-writable
// Also prevents rewriting in subclass constructor
writable: safeDescriptors ? false : true
};
}
function createComputedAnnotation(name, options) {
return {
annotationType_: name,
options_: options,
make_: make_$3,
extend_: extend_$3
};
}
function make_$3(adm, key, descriptor) {
return this.extend_(adm, key, descriptor, false) === null ? 0
/* Cancel */
: 1
/* Break */
;
}
function extend_$3(adm, key, descriptor, proxyTrap) {
assertComputedDescriptor(adm, this, key, descriptor);
return adm.defineComputedProperty_(key, _extends({}, this.options_, {
get: descriptor.get,
set: descriptor.set
}), proxyTrap);
}
function assertComputedDescriptor(adm, _ref, key, _ref2) {
var annotationType_ = _ref.annotationType_;
var get = _ref2.get;
if ( !get) {
die("Cannot apply '" + annotationType_ + "' to '" + adm.name_ + "." + key.toString() + "':" + ("\n'" + annotationType_ + "' can only be used on getter(+setter) properties."));
}
}
function createObservableAnnotation(name, options) {
return {
annotationType_: name,
options_: options,
make_: make_$4,
extend_: extend_$4
};
}
function make_$4(adm, key, descriptor) {
return this.extend_(adm, key, descriptor, false) === null ? 0
/* Cancel */
: 1
/* Break */
;
}
function extend_$4(adm, key, descriptor, proxyTrap) {
var _this$options_$enhanc, _this$options_;
assertObservableDescriptor(adm, this, key, descriptor);
return adm.defineObservableProperty_(key, descriptor.value, (_this$options_$enhanc = (_this$options_ = this.options_) == null ? void 0 : _this$options_.enhancer) != null ? _this$options_$enhanc : deepEnhancer, proxyTrap);
}
function assertObservableDescriptor(adm, _ref, key, descriptor) {
var annotationType_ = _ref.annotationType_;
if ( !("value" in descriptor)) {
die("Cannot apply '" + annotationType_ + "' to '" + adm.name_ + "." + key.toString() + "':" + ("\n'" + annotationType_ + "' cannot be used on getter/setter properties"));
}
}
var AUTO = "true";
var autoAnnotation = /*#__PURE__*/createAutoAnnotation();
function createAutoAnnotation(options) {
return {
annotationType_: AUTO,
options_: options,
make_: make_$5,
extend_: extend_$5
};
}
function make_$5(adm, key, descriptor, source) {
var _this$options_3, _this$options_4;
// getter -> computed
if (descriptor.get) {
return computed.make_(adm, key, descriptor, source);
} // lone setter -> action setter
if (descriptor.set) {
// TODO make action applicable to setter and delegate to action.make_
var set = createAction(key.toString(), descriptor.set); // own
if (source === adm.target_) {
return adm.defineProperty_(key, {
configurable: globalState.safeDescriptors ? adm.isPlainObject_ : true,
set: set
}) === null ? 0
/* Cancel */
: 2
/* Continue */
;
} // proto
defineProperty(source, key, {
configurable: true,
set: set
});
return 2
/* Continue */
;
} // function on proto -> autoAction/flow
if (source !== adm.target_ && typeof descriptor.value === "function") {
var _this$options_2;
if (isGenerator(descriptor.value)) {
var _this$options_;
var flowAnnotation = (_this$options_ = this.options_) != null && _this$options_.autoBind ? flow.bound : flow;
return flowAnnotation.make_(adm, key, descriptor, source);
}
var actionAnnotation = (_this$options_2 = this.options_) != null && _this$options_2.autoBind ? autoAction.bound : autoAction;
return actionAnnotation.make_(adm, key, descriptor, source);
} // other -> observable
// Copy props from proto as well, see test:
// "decorate should work with Object.create"
var observableAnnotation = ((_this$options_3 = this.options_) == null ? void 0 : _this$options_3.deep) === false ? observable.ref : observable; // if function respect autoBind option
if (typeof descriptor.value === "function" && (_this$options_4 = this.options_) != null && _this$options_4.autoBind) {
var _adm$proxy_;
descriptor.value = descriptor.value.bind((_adm$proxy_ = adm.proxy_) != null ? _adm$proxy_ : adm.target_);
}
return observableAnnotation.make_(adm, key, descriptor, source);
}
function extend_$5(adm, key, descriptor, proxyTrap) {
var _this$options_5, _this$options_6;
// getter -> computed
if (descriptor.get) {
return computed.extend_(adm, key, descriptor, proxyTrap);
} // lone setter -> action setter
if (descriptor.set) {
// TODO make action applicable to setter and delegate to action.extend_
return adm.defineProperty_(key, {
configurable: globalState.safeDescriptors ? adm.isPlainObject_ : true,
set: createAction(key.toString(), descriptor.set)
}, proxyTrap);
} // other -> observable
// if function respect autoBind option
if (typeof descriptor.value === "function" && (_this$options_5 = this.options_) != null && _this$options_5.autoBind) {
var _adm$proxy_2;
descriptor.value = descriptor.value.bind((_adm$proxy_2 = adm.proxy_) != null ? _adm$proxy_2 : adm.target_);
}
var observableAnnotation = ((_this$options_6 = this.options_) == null ? void 0 : _this$options_6.deep) === false ? observable.ref : observable;
return observableAnnotation.extend_(adm, key, descriptor, proxyTrap);
}
var OBSERVABLE = "observable";
var OBSERVABLE_REF = "observable.ref";
var OBSERVABLE_SHALLOW = "observable.shallow";
var OBSERVABLE_STRUCT = "observable.struct"; // Predefined bags of create observable options, to avoid allocating temporarily option objects
// in the majority of cases
var defaultCreateObservableOptions = {
deep: true,
name: undefined,
defaultDecorator: undefined,
proxy: true
};
Object.freeze(defaultCreateObservableOptions);
function asCreateObservableOptions(thing) {
return thing || defaultCreateObservableOptions;
}
var observableAnnotation = /*#__PURE__*/createObservableAnnotation(OBSERVABLE);
var observableRefAnnotation = /*#__PURE__*/createObservableAnnotation(OBSERVABLE_REF, {
enhancer: referenceEnhancer
});
var observableShallowAnnotation = /*#__PURE__*/createObservableAnnotation(OBSERVABLE_SHALLOW, {
enhancer: shallowEnhancer
});
var observableStructAnnotation = /*#__PURE__*/createObservableAnnotation(OBSERVABLE_STRUCT, {
enhancer: refStructEnhancer
});
var observableDecoratorAnnotation = /*#__PURE__*/createDecoratorAnnotation(observableAnnotation);
function getEnhancerFromOptions(options) {
return options.deep === true ? deepEnhancer : options.deep === false ? referenceEnhancer : getEnhancerFromAnnotation(options.defaultDecorator);
}
function getAnnotationFromOptions(options) {
var _options$defaultDecor;
return options ? (_options$defaultDecor = options.defaultDecorator) != null ? _options$defaultDecor : createAutoAnnotation(options) : undefined;
}
function getEnhancerFromAnnotation(annotation) {
var _annotation$options_$, _annotation$options_;
return !annotation ? deepEnhancer : (_annotation$options_$ = (_annotation$options_ = annotation.options_) == null ? void 0 : _annotation$options_.enhancer) != null ? _annotation$options_$ : deepEnhancer;
}
/**
* Turns an object, array or function into a reactive structure.
* @param v the value which should become observable.
*/
function createObservable(v, arg2, arg3) {
// @observable someProp;
if (isStringish(arg2)) {
storeAnnotation(v, arg2, observableAnnotation);
return;
} // already observable - ignore
if (isObservable(v)) {
return v;
} // plain object
if (isPlainObject(v)) {
return observable.object(v, arg2, arg3);
} // Array
if (Array.isArray(v)) {
return observable.array(v, arg2);
} // Map
if (isES6Map(v)) {
return observable.map(v, arg2);
} // Set
if (isES6Set(v)) {
return observable.set(v, arg2);
} // other object - ignore
if (typeof v === "object" && v !== null) {
return v;
} // anything else
return observable.box(v, arg2);
}
Object.assign(createObservable, observableDecoratorAnnotation);
var observableFactories = {
box: function box(value, options) {
var o = asCreateObservableOptions(options);
return new ObservableValue(value, getEnhancerFromOptions(o), o.name, true, o.equals);
},
array: function array(initialValues, options) {
var o = asCreateObservableOptions(options);
return (globalState.useProxies === false || o.proxy === false ? createLegacyArray : createObservableArray)(initialValues, getEnhancerFromOptions(o), o.name);
},
map: function map(initialValues, options) {
var o = asCreateObservableOptions(options);
return new ObservableMap(initialValues, getEnhancerFromOptions(o), o.name);
},
set: function set(initialValues, options) {
var o = asCreateObservableOptions(options);
return new ObservableSet(initialValues, getEnhancerFromOptions(o), o.name);
},
object: function object(props, decorators, options) {
return extendObservable(globalState.useProxies === false || (options == null ? void 0 : options.proxy) === false ? asObservableObject({}, options) : asDynamicObservableObject({}, options), props, decorators);
},
ref: /*#__PURE__*/createDecoratorAnnotation(observableRefAnnotation),
shallow: /*#__PURE__*/createDecoratorAnnotation(observableShallowAnnotation),
deep: observableDecoratorAnnotation,
struct: /*#__PURE__*/createDecoratorAnnotation(observableStructAnnotation)
}; // eslint-disable-next-line
var observable = /*#__PURE__*/assign(createObservable, observableFactories);
var COMPUTED = "computed";
var COMPUTED_STRUCT = "computed.struct";
var computedAnnotation = /*#__PURE__*/createComputedAnnotation(COMPUTED);
var computedStructAnnotation = /*#__PURE__*/createComputedAnnotation(COMPUTED_STRUCT, {
equals: comparer.structural
});
/**
* Decorator for class properties: @computed get value() { return expr; }.
* For legacy purposes also invokable as ES5 observable created: `computed(() => expr)`;
*/
var computed = function computed(arg1, arg2) {
if (isStringish(arg2)) {
// @computed
return storeAnnotation(arg1, arg2, computedAnnotation);
}
if (isPlainObject(arg1)) {
// @computed({ options })
return createDecoratorAnnotation(createComputedAnnotation(COMPUTED, arg1));
} // computed(expr, options?)
{
if (!isFunction(arg1)) {
die("First argument to `computed` should be an expression.");
}
if (isFunction(arg2)) {
die("A setter as second argument is no longer supported, use `{ set: fn }` option instead");
}
}
var opts = isPlainObject(arg2) ? arg2 : {};
opts.get = arg1;
opts.name || (opts.name = arg1.name || "");
/* for generated name */
return new ComputedValue(opts);
};
Object.assign(computed, computedAnnotation);
computed.struct = /*#__PURE__*/createDecoratorAnnotation(computedStructAnnotation);
var _getDescriptor$config, _getDescriptor;
// mobx versions
var currentActionId = 0;
var nextActionId = 1;
var isFunctionNameConfigurable = (_getDescriptor$config = (_getDescriptor = /*#__PURE__*/getDescriptor(function () {}, "name")) == null ? void 0 : _getDescriptor.configurable) != null ? _getDescriptor$config : false; // we can safely recycle this object
var tmpNameDescriptor = {
value: "action",
configurable: true,
writable: false,
enumerable: false
};
function createAction(actionName, fn, autoAction, ref) {
if (autoAction === void 0) {
autoAction = false;
}
{
if (!isFunction(fn)) {
die("`action` can only be invoked on functions");
}
if (typeof actionName !== "string" || !actionName) {
die("actions should have valid names, got: '" + actionName + "'");
}
}
function res() {
return executeAction(actionName, autoAction, fn, ref || this, arguments);
}
res.isMobxAction = true;
if (isFunctionNameConfigurable) {
tmpNameDescriptor.value = actionName;
Object.defineProperty(res, "name", tmpNameDescriptor);
}
return res;
}
function executeAction(actionName, canRunAsDerivation, fn, scope, args) {
var runInfo = _startAction(actionName, canRunAsDerivation, scope, args);
try {
return fn.apply(scope, args);
} catch (err) {
runInfo.error_ = err;
throw err;
} finally {
_endAction(runInfo);
}
}
function _startAction(actionName, canRunAsDerivation, // true for autoAction
scope, args) {
var notifySpy_ = isSpyEnabled() && !!actionName;
var startTime_ = 0;
if ( notifySpy_) {
startTime_ = Date.now();
var flattenedArgs = args ? Array.from(args) : EMPTY_ARRAY;
spyReportStart({
type: ACTION,
name: actionName,
object: scope,
arguments: flattenedArgs
});
}
var prevDerivation_ = globalState.trackingDerivation;
var runAsAction = !canRunAsDerivation || !prevDerivation_;
startBatch();
var prevAllowStateChanges_ = globalState.allowStateChanges; // by default preserve previous allow
if (runAsAction) {
untrackedStart();
prevAllowStateChanges_ = allowStateChangesStart(true);
}
var prevAllowStateReads_ = allowStateReadsStart(true);
var runInfo = {
runAsAction_: runAsAction,
prevDerivation_: prevDerivation_,
prevAllowStateChanges_: prevAllowStateChanges_,
prevAllowStateReads_: prevAllowStateReads_,
notifySpy_: notifySpy_,
startTime_: startTime_,
actionId_: nextActionId++,
parentActionId_: currentActionId
};
currentActionId = runInfo.actionId_;
return runInfo;
}
function _endAction(runInfo) {
if (currentActionId !== runInfo.actionId_) {
die(30);
}
currentActionId = runInfo.parentActionId_;
if (runInfo.error_ !== undefined) {
globalState.suppressReactionErrors = true;
}
allowStateChangesEnd(runInfo.prevAllowStateChanges_);
allowStateReadsEnd(runInfo.prevAllowStateReads_);
endBatch();
if (runInfo.runAsAction_) {
untrackedEnd(runInfo.prevDerivation_);
}
if ( runInfo.notifySpy_) {
spyReportEnd({
time: Date.now() - runInfo.startTime_
});
}
globalState.suppressReactionErrors = false;
}
function allowStateChanges(allowStateChanges, func) {
var prev = allowStateChangesStart(allowStateChanges);
try {
return func();
} finally {
allowStateChangesEnd(prev);
}
}
function allowStateChangesStart(allowStateChanges) {
var prev = globalState.allowStateChanges;
globalState.allowStateChanges = allowStateChanges;
return prev;
}
function allowStateChangesEnd(prev) {
globalState.allowStateChanges = prev;
}
var _Symbol$toPrimitive;
var CREATE = "create";
_Symbol$toPrimitive = Symbol.toPrimitive;
var ObservableValue = /*#__PURE__*/function (_Atom, _Symbol$toPrimitive2) {
_inheritsLoose(ObservableValue, _Atom);
function ObservableValue(value, enhancer, name_, notifySpy, equals) {
var _this;
if (name_ === void 0) {
name_ = "ObservableValue@" + getNextId() ;
}
if (notifySpy === void 0) {
notifySpy = true;
}
if (equals === void 0) {
equals = comparer["default"];
}
_this = _Atom.call(this, name_) || this;
_this.enhancer = void 0;
_this.name_ = void 0;
_this.equals = void 0;
_this.hasUnreportedChange_ = false;
_this.interceptors_ = void 0;
_this.changeListeners_ = void 0;
_this.value_ = void 0;
_this.dehancer = void 0;
_this.enhancer = enhancer;
_this.name_ = name_;
_this.equals = equals;
_this.value_ = enhancer(value, undefined, name_);
if ( notifySpy && isSpyEnabled()) {
// only notify spy if this is a stand-alone observable
spyReport({
type: CREATE,
object: _assertThisInitialized(_this),
observableKind: "value",
debugObjectName: _this.name_,
newValue: "" + _this.value_
});
}
return _this;
}
var _proto = ObservableValue.prototype;
_proto.dehanceValue = function dehanceValue(value) {
if (this.dehancer !== undefined) {
return this.dehancer(value);
}
return value;
};
_proto.set = function set(newValue) {
var oldValue = this.value_;
newValue = this.prepareNewValue_(newValue);
if (newValue !== globalState.UNCHANGED) {
var notifySpy = isSpyEnabled();
if ( notifySpy) {
spyReportStart({
type: UPDATE,
object: this,
observableKind: "value",
debugObjectName: this.name_,
newValue: newValue,
oldValue: oldValue
});
}
this.setNewValue_(newValue);
if ( notifySpy) {
spyReportEnd();
}
}
};
_proto.prepareNewValue_ = function prepareNewValue_(newValue) {
checkIfStateModificationsAreAllowed(this);
if (hasInterceptors(this)) {
var change = interceptChange(this, {
object: this,
type: UPDATE,
newValue: newValue
});
if (!change) {
return globalState.UNCHANGED;
}
newValue = change.newValue;
} // apply modifier
newValue = this.enhancer(newValue, this.value_, this.name_);
return this.equals(this.value_, newValue) ? globalState.UNCHANGED : newValue;
};
_proto.setNewValue_ = function setNewValue_(newValue) {
var oldValue = this.value_;
this.value_ = newValue;
this.reportChanged();
if (hasListeners(this)) {
notifyListeners(this, {
type: UPDATE,
object: this,
newValue: newValue,
oldValue: oldValue
});
}
};
_proto.get = function get() {
this.reportObserved();
return this.dehanceValue(this.value_);
};
_proto.intercept_ = function intercept_(handler) {
return registerInterceptor(this, handler);
};
_proto.observe_ = function observe_(listener, fireImmediately) {
if (fireImmediately) {
listener({
observableKind: "value",
debugObjectName: this.name_,
object: this,
type: UPDATE,
newValue: this.value_,
oldValue: undefined
});
}
return registerListener(this, listener);
};
_proto.raw = function raw() {
// used by MST ot get undehanced value
return this.value_;
};
_proto.toJSON = function toJSON() {
return this.get();
};
_proto.toString = function toString() {
return this.name_ + "[" + this.value_ + "]";
};
_proto.valueOf = function valueOf() {
return toPrimitive(this.get());
};
_proto[_Symbol$toPrimitive2] = function () {
return this.valueOf();
};
return ObservableValue;
}(Atom, _Symbol$toPrimitive);
var isObservableValue = /*#__PURE__*/createInstanceofPredicate("ObservableValue", ObservableValue);
var _Symbol$toPrimitive$1;
/**
* A node in the state dependency root that observes other nodes, and can be observed itself.
*
* ComputedValue will remember the result of the computation for the duration of the batch, or
* while being observed.
*
* During this time it will recompute only when one of its direct dependencies changed,
* but only when it is being accessed with `ComputedValue.get()`.
*
* Implementation description:
* 1. First time it's being accessed it will compute and remember result
* give back remembered result until 2. happens
* 2. First time any deep dependency change, propagate POSSIBLY_STALE to all observers, wait for 3.
* 3. When it's being accessed, recompute if any shallow dependency changed.
* if result changed: propagate STALE to all observers, that were POSSIBLY_STALE from the last step.
* go to step 2. either way
*
* If at any point it's outside batch and it isn't observed: reset everything and go to 1.
*/
_Symbol$toPrimitive$1 = Symbol.toPrimitive;
var ComputedValue = /*#__PURE__*/function (_Symbol$toPrimitive2) {
// nodes we are looking at. Our value depends on these nodes
// during tracking it's an array with new observed observers
// to check for cycles
// N.B: unminified as it is used by MST
/**
* Create a new computed value based on a function expression.
*
* The `name` property is for debug purposes only.
*
* The `equals` property specifies the comparer function to use to determine if a newly produced
* value differs from the previous value. Two comparers are provided in the library; `defaultComparer`
* compares based on identity comparison (===), and `structuralComparer` deeply compares the structure.
* Structural comparison can be convenient if you always produce a new aggregated object and
* don't want to notify observers if it is structurally the same.
* This is useful for working with vectors, mouse coordinates etc.
*/
function ComputedValue(options) {
this.dependenciesState_ = IDerivationState_.NOT_TRACKING_;
this.observing_ = [];
this.newObserving_ = null;
this.isBeingObserved_ = false;
this.isPendingUnobservation_ = false;
this.observers_ = new Set();
this.diffValue_ = 0;
this.runId_ = 0;
this.lastAccessedBy_ = 0;
this.lowestObserverState_ = IDerivationState_.UP_TO_DATE_;
this.unboundDepsCount_ = 0;
this.value_ = new CaughtException(null);
this.name_ = void 0;
this.triggeredBy_ = void 0;
this.isComputing_ = false;
this.isRunningSetter_ = false;
this.derivation = void 0;
this.setter_ = void 0;
this.isTracing_ = TraceMode.NONE;
this.scope_ = void 0;
this.equals_ = void 0;
this.requiresReaction_ = void 0;
this.keepAlive_ = void 0;
this.onBOL = void 0;
this.onBUOL = void 0;
if (!options.get) {
die(31);
}
this.derivation = options.get;
this.name_ = options.name || ( "ComputedValue@" + getNextId() );
if (options.set) {
this.setter_ = createAction( this.name_ + "-setter" , options.set);
}
this.equals_ = options.equals || (options.compareStructural || options.struct ? comparer.structural : comparer["default"]);
this.scope_ = options.context;
this.requiresReaction_ = options.requiresReaction;
this.keepAlive_ = !!options.keepAlive;
}
var _proto = ComputedValue.prototype;
_proto.onBecomeStale_ = function onBecomeStale_() {
propagateMaybeChanged(this);
};
_proto.onBO = function onBO() {
if (this.onBOL) {
this.onBOL.forEach(function (listener) {
return listener();
});
}
};
_proto.onBUO = function onBUO() {
if (this.onBUOL) {
this.onBUOL.forEach(function (listener) {
return listener();
});
}
}
/**
* Returns the current value of this computed value.
* Will evaluate its computation first if needed.
*/
;
_proto.get = function get() {
if (this.isComputing_) {
die(32, this.name_, this.derivation);
}
if (globalState.inBatch === 0 && // !globalState.trackingDerivatpion &&
this.observers_.size === 0 && !this.keepAlive_) {
if (shouldCompute(this)) {
this.warnAboutUntrackedRead_();
startBatch(); // See perf test 'computed memoization'
this.value_ = this.computeValue_(false);
endBatch();
}
} else {
reportObserved(this);
if (shouldCompute(this)) {
var prevTrackingContext = globalState.trackingContext;
if (this.keepAlive_ && !prevTrackingContext) {
globalState.trackingContext = this;
}
if (this.trackAndCompute()) {
propagateChangeConfirmed(this);
}
globalState.trackingContext = prevTrackingContext;
}
}
var result = this.value_;
if (isCaughtException(result)) {
throw result.cause;
}
return result;
};
_proto.set = function set(value) {
if (this.setter_) {
if (this.isRunningSetter_) {
die(33, this.name_);
}
this.isRunningSetter_ = true;
try {
this.setter_.call(this.scope_, value);
} finally {
this.isRunningSetter_ = false;
}
} else {
die(34, this.name_);
}
};
_proto.trackAndCompute = function trackAndCompute() {
// N.B: unminified as it is used by MST
var oldValue = this.value_;
var wasSuspended =
/* see #1208 */
this.dependenciesState_ === IDerivationState_.NOT_TRACKING_;
var newValue = this.computeValue_(true);
var changed = wasSuspended || isCaughtException(oldValue) || isCaughtException(newValue) || !this.equals_(oldValue, newValue);
if (changed) {
this.value_ = newValue;
if ( isSpyEnabled()) {
spyReport({
observableKind: "computed",
debugObjectName: this.name_,
object: this.scope_,
type: "update",
oldValue: oldValue,
newValue: newValue
});
}
}
return changed;
};
_proto.computeValue_ = function computeValue_(track) {
this.isComputing_ = true; // don't allow state changes during computation
var prev = allowStateChangesStart(false);
var res;
if (track) {
res = trackDerivedFunction(this, this.derivation, this.scope_);
} else {
if (globalState.disableErrorBoundaries === true) {
res = this.derivation.call(this.scope_);
} else {
try {
res = this.derivation.call(this.scope_);
} catch (e) {
res = new CaughtException(e);
}
}
}
allowStateChangesEnd(prev);
this.isComputing_ = false;
return res;
};
_proto.suspend_ = function suspend_() {
if (!this.keepAlive_) {
clearObserving(this);
this.value_ = undefined; // don't hold on to computed value!
if ( this.isTracing_ !== TraceMode.NONE) {
console.log("[mobx.trace] Computed value '" + this.name_ + "' was suspended and it will recompute on the next access.");
}
}
};
_proto.observe_ = function observe_(listener, fireImmediately) {
var _this = this;
var firstTime = true;
var prevValue = undefined;
return autorun(function () {
// TODO: why is this in a different place than the spyReport() function? in all other observables it's called in the same place
var newValue = _this.get();
if (!firstTime || fireImmediately) {
var prevU = untrackedStart();
listener({
observableKind: "computed",
debugObjectName: _this.name_,
type: UPDATE,
object: _this,
newValue: newValue,
oldValue: prevValue
});
untrackedEnd(prevU);
}
firstTime = false;
prevValue = newValue;
});
};
_proto.warnAboutUntrackedRead_ = function warnAboutUntrackedRead_() {
if (this.isTracing_ !== TraceMode.NONE) {
console.log("[mobx.trace] Computed value '" + this.name_ + "' is being read outside a reactive context. Doing a full recompute.");
}
if (typeof this.requiresReaction_ === "boolean" ? this.requiresReaction_ : globalState.computedRequiresReaction) {
console.warn("[mobx] Computed value '" + this.name_ + "' is being read outside a reactive context. Doing a full recompute.");
}
};
_proto.toString = function toString() {
return this.name_ + "[" + this.derivation.toString() + "]";
};
_proto.valueOf = function valueOf() {
return toPrimitive(this.get());
};
_proto[_Symbol$toPrimitive2] = function () {
return this.valueOf();
};
return ComputedValue;
}(_Symbol$toPrimitive$1);
var isComputedValue = /*#__PURE__*/createInstanceofPredicate("ComputedValue", ComputedValue);
var IDerivationState_;
(function (IDerivationState_) {
// before being run or (outside batch and not being observed)
// at this point derivation is not holding any data about dependency tree
IDerivationState_[IDerivationState_["NOT_TRACKING_"] = -1] = "NOT_TRACKING_"; // no shallow dependency changed since last computation
// won't recalculate derivation
// this is what makes mobx fast
IDerivationState_[IDerivationState_["UP_TO_DATE_"] = 0] = "UP_TO_DATE_"; // some deep dependency changed, but don't know if shallow dependency changed
// will require to check first if UP_TO_DATE or POSSIBLY_STALE
// currently only ComputedValue will propagate POSSIBLY_STALE
//
// having this state is second big optimization:
// don't have to recompute on every dependency change, but only when it's needed
IDerivationState_[IDerivationState_["POSSIBLY_STALE_"] = 1] = "POSSIBLY_STALE_"; // A shallow dependency has changed since last computation and the derivation
// will need to recompute when it's needed next.
IDerivationState_[IDerivationState_["STALE_"] = 2] = "STALE_";
})(IDerivationState_ || (IDerivationState_ = {}));
var TraceMode;
(function (TraceMode) {
TraceMode[TraceMode["NONE"] = 0] = "NONE";
TraceMode[TraceMode["LOG"] = 1] = "LOG";
TraceMode[TraceMode["BREAK"] = 2] = "BREAK";
})(TraceMode || (TraceMode = {}));
var CaughtException = function CaughtException(cause) {
this.cause = void 0;
this.cause = cause; // Empty
};
function isCaughtException(e) {
return e instanceof CaughtException;
}
/**
* Finds out whether any dependency of the derivation has actually changed.
* If dependenciesState is 1 then it will recalculate dependencies,
* if any dependency changed it will propagate it by changing dependenciesState to 2.
*
* By iterating over the dependencies in the same order that they were reported and
* stopping on the first change, all the recalculations are only called for ComputedValues
* that will be tracked by derivation. That is because we assume that if the first x
* dependencies of the derivation doesn't change then the derivation should run the same way
* up until accessing x-th dependency.
*/
function shouldCompute(derivation) {
switch (derivation.dependenciesState_) {
case IDerivationState_.UP_TO_DATE_:
return false;
case IDerivationState_.NOT_TRACKING_:
case IDerivationState_.STALE_:
return true;
case IDerivationState_.POSSIBLY_STALE_:
{
// state propagation can occur outside of action/reactive context #2195
var prevAllowStateReads = allowStateReadsStart(true);
var prevUntracked = untrackedStart(); // no need for those computeds to be reported, they will be picked up in trackDerivedFunction.
var obs = derivation.observing_,
l = obs.length;
for (var i = 0; i < l; i++) {
var obj = obs[i];
if (isComputedValue(obj)) {
if (globalState.disableErrorBoundaries) {
obj.get();
} else {
try {
obj.get();
} catch (e) {
// we are not interested in the value *or* exception at this moment, but if there is one, notify all
untrackedEnd(prevUntracked);
allowStateReadsEnd(prevAllowStateReads);
return true;
}
} // if ComputedValue `obj` actually changed it will be computed and propagated to its observers.
// and `derivation` is an observer of `obj`
// invariantShouldCompute(derivation)
if (derivation.dependenciesState_ === IDerivationState_.STALE_) {
untrackedEnd(prevUntracked);
allowStateReadsEnd(prevAllowStateReads);
return true;
}
}
}
changeDependenciesStateTo0(derivation);
untrackedEnd(prevUntracked);
allowStateReadsEnd(prevAllowStateReads);
return false;
}
}
}
function isComputingDerivation() {
return globalState.trackingDerivation !== null; // filter out actions inside computations
}
function checkIfStateModificationsAreAllowed(atom) {
var hasObservers = atom.observers_.size > 0; // Should not be possible to change observed state outside strict mode, except during initialization, see #563
if (!globalState.allowStateChanges && (hasObservers || globalState.enforceActions === "always")) {
console.warn("[MobX] " + (globalState.enforceActions ? "Since strict-mode is enabled, changing (observed) observable values without using an action is not allowed. Tried to modify: " : "Side effects like changing state are not allowed at this point. Are you trying to modify state from, for example, a computed value or the render function of a React component? You can wrap side effects in 'runInAction' (or decorate functions with 'action') if needed. Tried to modify: ") + atom.name_);
}
}
function checkIfStateReadsAreAllowed(observable) {
if ( !globalState.allowStateReads && globalState.observableRequiresReaction) {
console.warn("[mobx] Observable '" + observable.name_ + "' being read outside a reactive context.");
}
}
/**
* Executes the provided function `f` and tracks which observables are being accessed.
* The tracking information is stored on the `derivation` object and the derivation is registered
* as observer of any of the accessed observables.
*/
function trackDerivedFunction(derivation, f, context) {
var prevAllowStateReads = allowStateReadsStart(true); // pre allocate array allocation + room for variation in deps
// array will be trimmed by bindDependencies
changeDependenciesStateTo0(derivation);
derivation.newObserving_ = new Array(derivation.observing_.length + 100);
derivation.unboundDepsCount_ = 0;
derivation.runId_ = ++globalState.runId;
var prevTracking = globalState.trackingDerivation;
globalState.trackingDerivation = derivation;
globalState.inBatch++;
var result;
if (globalState.disableErrorBoundaries === true) {
result = f.call(context);
} else {
try {
result = f.call(context);
} catch (e) {
result = new CaughtException(e);
}
}
globalState.inBatch--;
globalState.trackingDerivation = prevTracking;
bindDependencies(derivation);
warnAboutDerivationWithoutDependencies(derivation);
allowStateReadsEnd(prevAllowStateReads);
return result;
}
function warnAboutDerivationWithoutDependencies(derivation) {
if (derivation.observing_.length !== 0) {
return;
}
if (typeof derivation.requiresObservable_ === "boolean" ? derivation.requiresObservable_ : globalState.reactionRequiresObservable) {
console.warn("[mobx] Derivation '" + derivation.name_ + "' is created/updated without reading any observable value.");
}
}
/**
* diffs newObserving with observing.
* update observing to be newObserving with unique observables
* notify observers that become observed/unobserved
*/
function bindDependencies(derivation) {
// invariant(derivation.dependenciesState !== IDerivationState.NOT_TRACKING, "INTERNAL ERROR bindDependencies expects derivation.dependenciesState !== -1");
var prevObserving = derivation.observing_;
var observing = derivation.observing_ = derivation.newObserving_;
var lowestNewObservingDerivationState = IDerivationState_.UP_TO_DATE_; // Go through all new observables and check diffValue: (this list can contain duplicates):
// 0: first occurrence, change to 1 and keep it
// 1: extra occurrence, drop it
var i0 = 0,
l = derivation.unboundDepsCount_;
for (var i = 0; i < l; i++) {
var dep = observing[i];
if (dep.diffValue_ === 0) {
dep.diffValue_ = 1;
if (i0 !== i) {
observing[i0] = dep;
}
i0++;
} // Upcast is 'safe' here, because if dep is IObservable, `dependenciesState` will be undefined,
// not hitting the condition
if (dep.dependenciesState_ > lowestNewObservingDerivationState) {
lowestNewObservingDerivationState = dep.dependenciesState_;
}
}
observing.length = i0;
derivation.newObserving_ = null; // newObserving shouldn't be needed outside tracking (statement moved down to work around FF bug, see #614)
// Go through all old observables and check diffValue: (it is unique after last bindDependencies)
// 0: it's not in new observables, unobserve it
// 1: it keeps being observed, don't want to notify it. change to 0
l = prevObserving.length;
while (l--) {
var _dep = prevObserving[l];
if (_dep.diffValue_ === 0) {
removeObserver(_dep, derivation);
}
_dep.diffValue_ = 0;
} // Go through all new observables and check diffValue: (now it should be unique)
// 0: it was set to 0 in last loop. don't need to do anything.
// 1: it wasn't observed, let's observe it. set back to 0
while (i0--) {
var _dep2 = observing[i0];
if (_dep2.diffValue_ === 1) {
_dep2.diffValue_ = 0;
addObserver(_dep2, derivation);
}
} // Some new observed derivations may become stale during this derivation computation
// so they have had no chance to propagate staleness (#916)
if (lowestNewObservingDerivationState !== IDerivationState_.UP_TO_DATE_) {
derivation.dependenciesState_ = lowestNewObservingDerivationState;
derivation.onBecomeStale_();
}
}
function clearObserving(derivation) {
// invariant(globalState.inBatch > 0, "INTERNAL ERROR clearObserving should be called only inside batch");
var obs = derivation.observing_;
derivation.observing_ = [];
var i = obs.length;
while (i--) {
removeObserver(obs[i], derivation);
}
derivation.dependenciesState_ = IDerivationState_.NOT_TRACKING_;
}
function untracked(action) {
var prev = untrackedStart();
try {
return action();
} finally {
untrackedEnd(prev);
}
}
function untrackedStart() {
var prev = globalState.trackingDerivation;
globalState.trackingDerivation = null;
return prev;
}
function untrackedEnd(prev) {
globalState.trackingDerivation = prev;
}
function allowStateReadsStart(allowStateReads) {
var prev = globalState.allowStateReads;
globalState.allowStateReads = allowStateReads;
return prev;
}
function allowStateReadsEnd(prev) {
globalState.allowStateReads = prev;
}
/**
* needed to keep `lowestObserverState` correct. when changing from (2 or 1) to 0
*
*/
function changeDependenciesStateTo0(derivation) {
if (derivation.dependenciesState_ === IDerivationState_.UP_TO_DATE_) {
return;
}
derivation.dependenciesState_ = IDerivationState_.UP_TO_DATE_;
var obs = derivation.observing_;
var i = obs.length;
while (i--) {
obs[i].lowestObserverState_ = IDerivationState_.UP_TO_DATE_;
}
}
/**
* These values will persist if global state is reset
*/
var persistentKeys = ["mobxGuid", "spyListeners", "enforceActions", "computedRequiresReaction", "reactionRequiresObservable", "observableRequiresReaction", "allowStateReads", "disableErrorBoundaries", "runId", "UNCHANGED", "useProxies"];
var MobXGlobals = function MobXGlobals() {
this.version = 6;
this.UNCHANGED = {};
this.trackingDerivation = null;
this.trackingContext = null;
this.runId = 0;
this.mobxGuid = 0;
this.inBatch = 0;
this.pendingUnobservations = [];
this.pendingReactions = [];
this.isRunningReactions = false;
this.allowStateChanges = false;
this.allowStateReads = true;
this.enforceActions = true;
this.spyListeners = [];
this.globalReactionErrorHandlers = [];
this.computedRequiresReaction = false;
this.reactionRequiresObservable = false;
this.observableRequiresReaction = false;
this.disableErrorBoundaries = false;
this.suppressReactionErrors = false;
this.useProxies = true;
this.verifyProxies = false;
this.safeDescriptors = true;
};
var canMergeGlobalState = true;
var isolateCalled = false;
var globalState = /*#__PURE__*/function () {
var global = /*#__PURE__*/getGlobal();
if (global.__mobxInstanceCount > 0 && !global.__mobxGlobals) {
canMergeGlobalState = false;
}
if (global.__mobxGlobals && global.__mobxGlobals.version !== new MobXGlobals().version) {
canMergeGlobalState = false;
}
if (!canMergeGlobalState) {
// Because this is a IIFE we need to let isolateCalled a chance to change
// so we run it after the event loop completed at least 1 iteration
setTimeout(function () {
if (!isolateCalled) {
die(35);
}
}, 1);
return new MobXGlobals();
} else if (global.__mobxGlobals) {
global.__mobxInstanceCount += 1;
if (!global.__mobxGlobals.UNCHANGED) {
global.__mobxGlobals.UNCHANGED = {};
} // make merge backward compatible
return global.__mobxGlobals;
} else {
global.__mobxInstanceCount = 1;
return global.__mobxGlobals = /*#__PURE__*/new MobXGlobals();
}
}();
function isolateGlobalState() {
if (globalState.pendingReactions.length || globalState.inBatch || globalState.isRunningReactions) {
die(36);
}
isolateCalled = true;
if (canMergeGlobalState) {
var global = getGlobal();
if (--global.__mobxInstanceCount === 0) {
global.__mobxGlobals = undefined;
}
globalState = new MobXGlobals();
}
}
function getGlobalState() {
return globalState;
}
/**
* For testing purposes only; this will break the internal state of existing observables,
* but can be used to get back at a stable state after throwing errors
*/
function resetGlobalState() {
var defaultGlobals = new MobXGlobals();
for (var key in defaultGlobals) {
if (persistentKeys.indexOf(key) === -1) {
globalState[key] = defaultGlobals[key];
}
}
globalState.allowStateChanges = !globalState.enforceActions;
}
function hasObservers(observable) {
return observable.observers_ && observable.observers_.size > 0;
}
function getObservers(observable) {
return observable.observers_;
} // function invariantObservers(observable: IObservable) {
// const list = observable.observers
// const map = observable.observersIndexes
// const l = list.length
// for (let i = 0; i < l; i++) {
// const id = list[i].__mapid
// if (i) {
// invariant(map[id] === i, "INTERNAL ERROR maps derivation.__mapid to index in list") // for performance
// } else {
// invariant(!(id in map), "INTERNAL ERROR observer on index 0 shouldn't be held in map.") // for performance
// }
// }
// invariant(
// list.length === 0 || Object.keys(map).length === list.length - 1,
// "INTERNAL ERROR there is no junk in map"
// )
// }
function addObserver(observable, node) {
// invariant(node.dependenciesState !== -1, "INTERNAL ERROR, can add only dependenciesState !== -1");
// invariant(observable._observers.indexOf(node) === -1, "INTERNAL ERROR add already added node");
// invariantObservers(observable);
observable.observers_.add(node);
if (observable.lowestObserverState_ > node.dependenciesState_) {
observable.lowestObserverState_ = node.dependenciesState_;
} // invariantObservers(observable);
// invariant(observable._observers.indexOf(node) !== -1, "INTERNAL ERROR didn't add node");
}
function removeObserver(observable, node) {
// invariant(globalState.inBatch > 0, "INTERNAL ERROR, remove should be called only inside batch");
// invariant(observable._observers.indexOf(node) !== -1, "INTERNAL ERROR remove already removed node");
// invariantObservers(observable);
observable.observers_["delete"](node);
if (observable.observers_.size === 0) {
// deleting last observer
queueForUnobservation(observable);
} // invariantObservers(observable);
// invariant(observable._observers.indexOf(node) === -1, "INTERNAL ERROR remove already removed node2");
}
function queueForUnobservation(observable) {
if (observable.isPendingUnobservation_ === false) {
// invariant(observable._observers.length === 0, "INTERNAL ERROR, should only queue for unobservation unobserved observables");
observable.isPendingUnobservation_ = true;
globalState.pendingUnobservations.push(observable);
}
}
/**
* Batch starts a transaction, at least for purposes of memoizing ComputedValues when nothing else does.
* During a batch `onBecomeUnobserved` will be called at most once per observable.
* Avoids unnecessary recalculations.
*/
function startBatch() {
globalState.inBatch++;
}
function endBatch() {
if (--globalState.inBatch === 0) {
runReactions(); // the batch is actually about to finish, all unobserving should happen here.
var list = globalState.pendingUnobservations;
for (var i = 0; i < list.length; i++) {
var observable = list[i];
observable.isPendingUnobservation_ = false;
if (observable.observers_.size === 0) {
if (observable.isBeingObserved_) {
// if this observable had reactive observers, trigger the hooks
observable.isBeingObserved_ = false;
observable.onBUO();
}
if (observable instanceof ComputedValue) {
// computed values are automatically teared down when the last observer leaves
// this process happens recursively, this computed might be the last observabe of another, etc..
observable.suspend_();
}
}
}
globalState.pendingUnobservations = [];
}
}
function reportObserved(observable) {
checkIfStateReadsAreAllowed(observable);
var derivation = globalState.trackingDerivation;
if (derivation !== null) {
/**
* Simple optimization, give each derivation run an unique id (runId)
* Check if last time this observable was accessed the same runId is used
* if this is the case, the relation is already known
*/
if (derivation.runId_ !== observable.lastAccessedBy_) {
observable.lastAccessedBy_ = derivation.runId_; // Tried storing newObserving, or observing, or both as Set, but performance didn't come close...
derivation.newObserving_[derivation.unboundDepsCount_++] = observable;
if (!observable.isBeingObserved_ && globalState.trackingContext) {
observable.isBeingObserved_ = true;
observable.onBO();
}
}
return true;
} else if (observable.observers_.size === 0 && globalState.inBatch > 0) {
queueForUnobservation(observable);
}
return false;
} // function invariantLOS(observable: IObservable, msg: string) {
// // it's expensive so better not run it in produciton. but temporarily helpful for testing
// const min = getObservers(observable).reduce((a, b) => Math.min(a, b.dependenciesState), 2)
// if (min >= observable.lowestObserverState) return // <- the only assumption about `lowestObserverState`
// throw new Error(
// "lowestObserverState is wrong for " +
// msg +
// " because " +
// min +
// " < " +
// observable.lowestObserverState
// )
// }
/**
* NOTE: current propagation mechanism will in case of self reruning autoruns behave unexpectedly
* It will propagate changes to observers from previous run
* It's hard or maybe impossible (with reasonable perf) to get it right with current approach
* Hopefully self reruning autoruns aren't a feature people should depend on
* Also most basic use cases should be ok
*/
// Called by Atom when its value changes
function propagateChanged(observable) {
// invariantLOS(observable, "changed start");
if (observable.lowestObserverState_ === IDerivationState_.STALE_) {
return;
}
observable.lowestObserverState_ = IDerivationState_.STALE_; // Ideally we use for..of here, but the downcompiled version is really slow...
observable.observers_.forEach(function (d) {
if (d.dependenciesState_ === IDerivationState_.UP_TO_DATE_) {
if ( d.isTracing_ !== TraceMode.NONE) {
logTraceInfo(d, observable);
}
d.onBecomeStale_();
}
d.dependenciesState_ = IDerivationState_.STALE_;
}); // invariantLOS(observable, "changed end");
} // Called by ComputedValue when it recalculate and its value changed
function propagateChangeConfirmed(observable) {
// invariantLOS(observable, "confirmed start");
if (observable.lowestObserverState_ === IDerivationState_.STALE_) {
return;
}
observable.lowestObserverState_ = IDerivationState_.STALE_;
observable.observers_.forEach(function (d) {
if (d.dependenciesState_ === IDerivationState_.POSSIBLY_STALE_) {
d.dependenciesState_ = IDerivationState_.STALE_;
if ( d.isTracing_ !== TraceMode.NONE) {
logTraceInfo(d, observable);
}
} else if (d.dependenciesState_ === IDerivationState_.UP_TO_DATE_ // this happens during computing of `d`, just keep lowestObserverState up to date.
) {
observable.lowestObserverState_ = IDerivationState_.UP_TO_DATE_;
}
}); // invariantLOS(observable, "confirmed end");
} // Used by computed when its dependency changed, but we don't wan't to immediately recompute.
function propagateMaybeChanged(observable) {
// invariantLOS(observable, "maybe start");
if (observable.lowestObserverState_ !== IDerivationState_.UP_TO_DATE_) {
return;
}
observable.lowestObserverState_ = IDerivationState_.POSSIBLY_STALE_;
observable.observers_.forEach(function (d) {
if (d.dependenciesState_ === IDerivationState_.UP_TO_DATE_) {
d.dependenciesState_ = IDerivationState_.POSSIBLY_STALE_;
d.onBecomeStale_();
}
}); // invariantLOS(observable, "maybe end");
}
function logTraceInfo(derivation, observable) {
console.log("[mobx.trace] '" + derivation.name_ + "' is invalidated due to a change in: '" + observable.name_ + "'");
if (derivation.isTracing_ === TraceMode.BREAK) {
var lines = [];
printDepTree(getDependencyTree(derivation), lines, 1); // prettier-ignore
new Function("debugger;\n/*\nTracing '" + derivation.name_ + "'\n\nYou are entering this break point because derivation '" + derivation.name_ + "' is being traced and '" + observable.name_ + "' is now forcing it to update.\nJust follow the stacktrace you should now see in the devtools to see precisely what piece of your code is causing this update\nThe stackframe you are looking for is at least ~6-8 stack-frames up.\n\n" + (derivation instanceof ComputedValue ? derivation.derivation.toString().replace(/[*]\//g, "/") : "") + "\n\nThe dependencies for this derivation are:\n\n" + lines.join("\n") + "\n*/\n ")();
}
}
function printDepTree(tree, lines, depth) {
if (lines.length >= 1000) {
lines.push("(and many more)");
return;
}
lines.push("" + "\t".repeat(depth - 1) + tree.name);
if (tree.dependencies) {
tree.dependencies.forEach(function (child) {
return printDepTree(child, lines, depth + 1);
});
}
}
var Reaction = /*#__PURE__*/function () {
// nodes we are looking at. Our value depends on these nodes
function Reaction(name_, onInvalidate_, errorHandler_, requiresObservable_) {
if (name_ === void 0) {
name_ = "Reaction@" + getNextId() ;
}
this.name_ = void 0;
this.onInvalidate_ = void 0;
this.errorHandler_ = void 0;
this.requiresObservable_ = void 0;
this.observing_ = [];
this.newObserving_ = [];
this.dependenciesState_ = IDerivationState_.NOT_TRACKING_;
this.diffValue_ = 0;
this.runId_ = 0;
this.unboundDepsCount_ = 0;
this.isDisposed_ = false;
this.isScheduled_ = false;
this.isTrackPending_ = false;
this.isRunning_ = false;
this.isTracing_ = TraceMode.NONE;
this.name_ = name_;
this.onInvalidate_ = onInvalidate_;
this.errorHandler_ = errorHandler_;
this.requiresObservable_ = requiresObservable_;
}
var _proto = Reaction.prototype;
_proto.onBecomeStale_ = function onBecomeStale_() {
this.schedule_();
};
_proto.schedule_ = function schedule_() {
if (!this.isScheduled_) {
this.isScheduled_ = true;
globalState.pendingReactions.push(this);
runReactions();
}
};
_proto.isScheduled = function isScheduled() {
return this.isScheduled_;
}
/**
* internal, use schedule() if you intend to kick off a reaction
*/
;
_proto.runReaction_ = function runReaction_() {
if (!this.isDisposed_) {
startBatch();
this.isScheduled_ = false;
var prev = globalState.trackingContext;
globalState.trackingContext = this;
if (shouldCompute(this)) {
this.isTrackPending_ = true;
try {
this.onInvalidate_();
if ("development" !== "production" && this.isTrackPending_ && isSpyEnabled()) {
// onInvalidate didn't trigger track right away..
spyReport({
name: this.name_,
type: "scheduled-reaction"
});
}
} catch (e) {
this.reportExceptionInDerivation_(e);
}
}
globalState.trackingContext = prev;
endBatch();
}
};
_proto.track = function track(fn) {
if (this.isDisposed_) {
return; // console.warn("Reaction already disposed") // Note: Not a warning / error in mobx 4 either
}
startBatch();
var notify = isSpyEnabled();
var startTime;
if ( notify) {
startTime = Date.now();
spyReportStart({
name: this.name_,
type: "reaction"
});
}
this.isRunning_ = true;
var prevReaction = globalState.trackingContext; // reactions could create reactions...
globalState.trackingContext = this;
var result = trackDerivedFunction(this, fn, undefined);
globalState.trackingContext = prevReaction;
this.isRunning_ = false;
this.isTrackPending_ = false;
if (this.isDisposed_) {
// disposed during last run. Clean up everything that was bound after the dispose call.
clearObserving(this);
}
if (isCaughtException(result)) {
this.reportExceptionInDerivation_(result.cause);
}
if ( notify) {
spyReportEnd({
time: Date.now() - startTime
});
}
endBatch();
};
_proto.reportExceptionInDerivation_ = function reportExceptionInDerivation_(error) {
var _this = this;
if (this.errorHandler_) {
this.errorHandler_(error, this);
return;
}
if (globalState.disableErrorBoundaries) {
throw error;
}
var message = "[mobx] Encountered an uncaught exception that was thrown by a reaction or observer component, in: '" + this + "'" ;
if (!globalState.suppressReactionErrors) {
console.error(message, error);
/** If debugging brought you here, please, read the above message :-). Tnx! */
} else {
console.warn("[mobx] (error in reaction '" + this.name_ + "' suppressed, fix error of causing action below)");
} // prettier-ignore
if ( isSpyEnabled()) {
spyReport({
type: "error",
name: this.name_,
message: message,
error: "" + error
});
}
globalState.globalReactionErrorHandlers.forEach(function (f) {
return f(error, _this);
});
};
_proto.dispose = function dispose() {
if (!this.isDisposed_) {
this.isDisposed_ = true;
if (!this.isRunning_) {
// if disposed while running, clean up later. Maybe not optimal, but rare case
startBatch();
clearObserving(this);
endBatch();
}
}
};
_proto.getDisposer_ = function getDisposer_() {
var r = this.dispose.bind(this);
r[$mobx] = this;
return r;
};
_proto.toString = function toString() {
return "Reaction[" + this.name_ + "]";
};
_proto.trace = function trace$1(enterBreakPoint) {
if (enterBreakPoint === void 0) {
enterBreakPoint = false;
}
trace(this, enterBreakPoint);
};
return Reaction;
}();
function onReactionError(handler) {
globalState.globalReactionErrorHandlers.push(handler);
return function () {
var idx = globalState.globalReactionErrorHandlers.indexOf(handler);
if (idx >= 0) {
globalState.globalReactionErrorHandlers.splice(idx, 1);
}
};
}
/**
* Magic number alert!
* Defines within how many times a reaction is allowed to re-trigger itself
* until it is assumed that this is gonna be a never ending loop...
*/
var MAX_REACTION_ITERATIONS = 100;
var reactionScheduler = function reactionScheduler(f) {
return f();
};
function runReactions() {
// Trampolining, if runReactions are already running, new reactions will be picked up
if (globalState.inBatch > 0 || globalState.isRunningReactions) {
return;
}
reactionScheduler(runReactionsHelper);
}
function runReactionsHelper() {
globalState.isRunningReactions = true;
var allReactions = globalState.pendingReactions;
var iterations = 0; // While running reactions, new reactions might be triggered.
// Hence we work with two variables and check whether
// we converge to no remaining reactions after a while.
while (allReactions.length > 0) {
if (++iterations === MAX_REACTION_ITERATIONS) {
console.error( "Reaction doesn't converge to a stable state after " + MAX_REACTION_ITERATIONS + " iterations." + (" Probably there is a cycle in the reactive function: " + allReactions[0]) );
allReactions.splice(0); // clear reactions
}
var remainingReactions = allReactions.splice(0);
for (var i = 0, l = remainingReactions.length; i < l; i++) {
remainingReactions[i].runReaction_();
}
}
globalState.isRunningReactions = false;
}
var isReaction = /*#__PURE__*/createInstanceofPredicate("Reaction", Reaction);
function setReactionScheduler(fn) {
var baseScheduler = reactionScheduler;
reactionScheduler = function reactionScheduler(f) {
return fn(function () {
return baseScheduler(f);
});
};
}
function isSpyEnabled() {
return !!globalState.spyListeners.length;
}
function spyReport(event) {
if (!globalState.spyListeners.length) {
return;
}
var listeners = globalState.spyListeners;
for (var i = 0, l = listeners.length; i < l; i++) {
listeners[i](event);
}
}
function spyReportStart(event) {
var change = _extends({}, event, {
spyReportStart: true
});
spyReport(change);
}
var END_EVENT = {
type: "report-end",
spyReportEnd: true
};
function spyReportEnd(change) {
if (change) {
spyReport(_extends({}, change, {
type: "report-end",
spyReportEnd: true
}));
} else {
spyReport(END_EVENT);
}
}
function spy(listener) {
{
globalState.spyListeners.push(listener);
return once(function () {
globalState.spyListeners = globalState.spyListeners.filter(function (l) {
return l !== listener;
});
});
}
}
var ACTION = "action";
var ACTION_BOUND = "action.bound";
var AUTOACTION = "autoAction";
var AUTOACTION_BOUND = "autoAction.bound";
var DEFAULT_ACTION_NAME = "<unnamed action>";
var actionAnnotation = /*#__PURE__*/createActionAnnotation(ACTION);
var actionBoundAnnotation = /*#__PURE__*/createActionAnnotation(ACTION_BOUND, {
bound: true
});
var autoActionAnnotation = /*#__PURE__*/createActionAnnotation(AUTOACTION, {
autoAction: true
});
var autoActionBoundAnnotation = /*#__PURE__*/createActionAnnotation(AUTOACTION_BOUND, {
autoAction: true,
bound: true
});
function createActionFactory(autoAction) {
var res = function action(arg1, arg2) {
// action(fn() {})
if (isFunction(arg1)) {
return createAction(arg1.name || DEFAULT_ACTION_NAME, arg1, autoAction);
} // action("name", fn() {})
if (isFunction(arg2)) {
return createAction(arg1, arg2, autoAction);
} // @action
if (isStringish(arg2)) {
return storeAnnotation(arg1, arg2, autoAction ? autoActionAnnotation : actionAnnotation);
} // action("name") & @action("name")
if (isStringish(arg1)) {
return createDecoratorAnnotation(createActionAnnotation(autoAction ? AUTOACTION : ACTION, {
name: arg1,
autoAction: autoAction
}));
}
{
die("Invalid arguments for `action`");
}
};
return res;
}
var action = /*#__PURE__*/createActionFactory(false);
Object.assign(action, actionAnnotation);
var autoAction = /*#__PURE__*/createActionFactory(true);
Object.assign(autoAction, autoActionAnnotation);
action.bound = /*#__PURE__*/createDecoratorAnnotation(actionBoundAnnotation);
autoAction.bound = /*#__PURE__*/createDecoratorAnnotation(autoActionBoundAnnotation);
function runInAction(fn) {
return executeAction(fn.name || DEFAULT_ACTION_NAME, false, fn, this, undefined);
}
function isAction(thing) {
return isFunction(thing) && thing.isMobxAction === true;
}
/**
* Creates a named reactive view and keeps it alive, so that the view is always
* updated if one of the dependencies changes, even when the view is not further used by something else.
* @param view The reactive view
* @returns disposer function, which can be used to stop the view from being updated in the future.
*/
function autorun(view, opts) {
var _opts$name, _opts;
if (opts === void 0) {
opts = EMPTY_OBJECT;
}
{
if (!isFunction(view)) {
die("Autorun expects a function as first argument");
}
if (isAction(view)) {
die("Autorun does not accept actions since actions are untrackable");
}
}
var name = (_opts$name = (_opts = opts) == null ? void 0 : _opts.name) != null ? _opts$name : view.name || "Autorun@" + getNextId() ;
var runSync = !opts.scheduler && !opts.delay;
var reaction;
if (runSync) {
// normal autorun
reaction = new Reaction(name, function () {
this.track(reactionRunner);
}, opts.onError, opts.requiresObservable);
} else {
var scheduler = createSchedulerFromOptions(opts); // debounced autorun
var isScheduled = false;
reaction = new Reaction(name, function () {
if (!isScheduled) {
isScheduled = true;
scheduler(function () {
isScheduled = false;
if (!reaction.isDisposed_) {
reaction.track(reactionRunner);
}
});
}
}, opts.onError, opts.requiresObservable);
}
function reactionRunner() {
view(reaction);
}
reaction.schedule_();
return reaction.getDisposer_();
}
var run = function run(f) {
return f();
};
function createSchedulerFromOptions(opts) {
return opts.scheduler ? opts.scheduler : opts.delay ? function (f) {
return setTimeout(f, opts.delay);
} : run;
}
function reaction(expression, effect, opts) {
var _opts$name2;
if (opts === void 0) {
opts = EMPTY_OBJECT;
}
{
if (!isFunction(expression) || !isFunction(effect)) {
die("First and second argument to reaction should be functions");
}
if (!isPlainObject(opts)) {
die("Third argument of reactions should be an object");
}
}
var name = (_opts$name2 = opts.name) != null ? _opts$name2 : "Reaction@" + getNextId() ;
var effectAction = action(name, opts.onError ? wrapErrorHandler(opts.onError, effect) : effect);
var runSync = !opts.scheduler && !opts.delay;
var scheduler = createSchedulerFromOptions(opts);
var firstTime = true;
var isScheduled = false;
var value;
var oldValue;
var equals = opts.compareStructural ? comparer.structural : opts.equals || comparer["default"];
var r = new Reaction(name, function () {
if (firstTime || runSync) {
reactionRunner();
} else if (!isScheduled) {
isScheduled = true;
scheduler(reactionRunner);
}
}, opts.onError, opts.requiresObservable);
function reactionRunner() {
isScheduled = false;
if (r.isDisposed_) {
return;
}
var changed = false;
r.track(function () {
var nextValue = allowStateChanges(false, function () {
return expression(r);
});
changed = firstTime || !equals(value, nextValue);
oldValue = value;
value = nextValue;
});
if (firstTime && opts.fireImmediately) {
effectAction(value, oldValue, r);
} else if (!firstTime && changed) {
effectAction(value, oldValue, r);
}
firstTime = false;
}
r.schedule_();
return r.getDisposer_();
}
function wrapErrorHandler(errorHandler, baseFn) {
return function () {
try {
return baseFn.apply(this, arguments);
} catch (e) {
errorHandler.call(this, e);
}
};
}
var ON_BECOME_OBSERVED = "onBO";
var ON_BECOME_UNOBSERVED = "onBUO";
function onBecomeObserved(thing, arg2, arg3) {
return interceptHook(ON_BECOME_OBSERVED, thing, arg2, arg3);
}
function onBecomeUnobserved(thing, arg2, arg3) {
return interceptHook(ON_BECOME_UNOBSERVED, thing, arg2, arg3);
}
function interceptHook(hook, thing, arg2, arg3) {
var atom = typeof arg3 === "function" ? getAtom(thing, arg2) : getAtom(thing);
var cb = isFunction(arg3) ? arg3 : arg2;
var listenersKey = hook + "L";
if (atom[listenersKey]) {
atom[listenersKey].add(cb);
} else {
atom[listenersKey] = new Set([cb]);
}
return function () {
var hookListeners = atom[listenersKey];
if (hookListeners) {
hookListeners["delete"](cb);
if (hookListeners.size === 0) {
delete atom[listenersKey];
}
}
};
}
var NEVER = "never";
var ALWAYS = "always";
var OBSERVED = "observed"; // const IF_AVAILABLE = "ifavailable"
function configure(options) {
if (options.isolateGlobalState === true) {
isolateGlobalState();
}
var useProxies = options.useProxies,
enforceActions = options.enforceActions;
if (useProxies !== undefined) {
globalState.useProxies = useProxies === ALWAYS ? true : useProxies === NEVER ? false : typeof Proxy !== "undefined";
}
if (useProxies === "ifavailable") {
globalState.verifyProxies = true;
}
if (enforceActions !== undefined) {
var ea = enforceActions === ALWAYS ? ALWAYS : enforceActions === OBSERVED;
globalState.enforceActions = ea;
globalState.allowStateChanges = ea === true || ea === ALWAYS ? false : true;
}
["computedRequiresReaction", "reactionRequiresObservable", "observableRequiresReaction", "disableErrorBoundaries", "safeDescriptors"].forEach(function (key) {
if (key in options) {
globalState[key] = !!options[key];
}
});
globalState.allowStateReads = !globalState.observableRequiresReaction;
if ( globalState.disableErrorBoundaries === true) {
console.warn("WARNING: Debug feature only. MobX will NOT recover from errors when `disableErrorBoundaries` is enabled.");
}
if (options.reactionScheduler) {
setReactionScheduler(options.reactionScheduler);
}
}
function extendObservable(target, properties, annotations, options) {
{
if (arguments.length > 4) {
die("'extendObservable' expected 2-4 arguments");
}
if (typeof target !== "object") {
die("'extendObservable' expects an object as first argument");
}
if (isObservableMap(target)) {
die("'extendObservable' should not be used on maps, use map.merge instead");
}
if (!isPlainObject(properties)) {
die("'extendObservable' only accepts plain objects as second argument");
}
if (isObservable(properties) || isObservable(annotations)) {
die("Extending an object with another observable (object) is not supported");
}
} // Pull descriptors first, so we don't have to deal with props added by administration ($mobx)
var descriptors = getOwnPropertyDescriptors(properties);
var adm = asObservableObject(target, options)[$mobx];
startBatch();
try {
ownKeys(descriptors).forEach(function (key) {
adm.extend_(key, descriptors[key], // must pass "undefined" for { key: undefined }
!annotations ? true : key in annotations ? annotations[key] : true);
});
} finally {
endBatch();
}
return target;
}
function getDependencyTree(thing, property) {
return nodeToDependencyTree(getAtom(thing, property));
}
function nodeToDependencyTree(node) {
var result = {
name: node.name_
};
if (node.observing_ && node.observing_.length > 0) {
result.dependencies = unique(node.observing_).map(nodeToDependencyTree);
}
return result;
}
function getObserverTree(thing, property) {
return nodeToObserverTree(getAtom(thing, property));
}
function nodeToObserverTree(node) {
var result = {
name: node.name_
};
if (hasObservers(node)) {
result.observers = Array.from(getObservers(node)).map(nodeToObserverTree);
}
return result;
}
function unique(list) {
return Array.from(new Set(list));
}
var generatorId = 0;
function FlowCancellationError() {
this.message = "FLOW_CANCELLED";
}
FlowCancellationError.prototype = /*#__PURE__*/Object.create(Error.prototype);
function isFlowCancellationError(error) {
return error instanceof FlowCancellationError;
}
var flowAnnotation = /*#__PURE__*/createFlowAnnotation("flow");
var flowBoundAnnotation = /*#__PURE__*/createFlowAnnotation("flow.bound", {
bound: true
});
var flow = /*#__PURE__*/Object.assign(function flow(arg1, arg2) {
// @flow
if (isStringish(arg2)) {
return storeAnnotation(arg1, arg2, flowAnnotation);
} // flow(fn)
if ( arguments.length !== 1) {
die("Flow expects single argument with generator function");
}
var generator = arg1;
var name = generator.name || "<unnamed flow>"; // Implementation based on https://github.com/tj/co/blob/master/index.js
var res = function res() {
var ctx = this;
var args = arguments;
var runId = ++generatorId;
var gen = action(name + " - runid: " + runId + " - init", generator).apply(ctx, args);
var rejector;
var pendingPromise = undefined;
var promise = new Promise(function (resolve, reject) {
var stepId = 0;
rejector = reject;
function onFulfilled(res) {
pendingPromise = undefined;
var ret;
try {
ret = action(name + " - runid: " + runId + " - yield " + stepId++, gen.next).call(gen, res);
} catch (e) {
return reject(e);
}
next(ret);
}
function onRejected(err) {
pendingPromise = undefined;
var ret;
try {
ret = action(name + " - runid: " + runId + " - yield " + stepId++, gen["throw"]).call(gen, err);
} catch (e) {
return reject(e);
}
next(ret);
}
function next(ret) {
if (isFunction(ret == null ? void 0 : ret.then)) {
// an async iterator
ret.then(next, reject);
return;
}
if (ret.done) {
return resolve(ret.value);
}
pendingPromise = Promise.resolve(ret.value);
return pendingPromise.then(onFulfilled, onRejected);
}
onFulfilled(undefined); // kick off the process
});
promise.cancel = action(name + " - runid: " + runId + " - cancel", function () {
try {
if (pendingPromise) {
cancelPromise(pendingPromise);
} // Finally block can return (or yield) stuff..
var _res = gen["return"](undefined); // eat anything that promise would do, it's cancelled!
var yieldedPromise = Promise.resolve(_res.value);
yieldedPromise.then(noop, noop);
cancelPromise(yieldedPromise); // maybe it can be cancelled :)
// reject our original promise
rejector(new FlowCancellationError());
} catch (e) {
rejector(e); // there could be a throwing finally block
}
});
return promise;
};
res.isMobXFlow = true;
return res;
}, flowAnnotation);
flow.bound = /*#__PURE__*/createDecoratorAnnotation(flowBoundAnnotation);
function cancelPromise(promise) {
if (isFunction(promise.cancel)) {
promise.cancel();
}
}
function flowResult(result) {
return result; // just tricking TypeScript :)
}
function isFlow(fn) {
return (fn == null ? void 0 : fn.isMobXFlow) === true;
}
function interceptReads(thing, propOrHandler, handler) {
var target;
if (isObservableMap(thing) || isObservableArray(thing) || isObservableValue(thing)) {
target = getAdministration(thing);
} else if (isObservableObject(thing)) {
if ( !isStringish(propOrHandler)) {
return die("InterceptReads can only be used with a specific property, not with an object in general");
}
target = getAdministration(thing, propOrHandler);
} else {
return die("Expected observable map, object or array as first array");
}
if ( target.dehancer !== undefined) {
return die("An intercept reader was already established");
}
target.dehancer = typeof propOrHandler === "function" ? propOrHandler : handler;
return function () {
target.dehancer = undefined;
};
}
function intercept(thing, propOrHandler, handler) {
if (isFunction(handler)) {
return interceptProperty(thing, propOrHandler, handler);
} else {
return interceptInterceptable(thing, propOrHandler);
}
}
function interceptInterceptable(thing, handler) {
return getAdministration(thing).intercept_(handler);
}
function interceptProperty(thing, property, handler) {
return getAdministration(thing, property).intercept_(handler);
}
function _isComputed(value, property) {
if (property === undefined) {
return isComputedValue(value);
}
if (isObservableObject(value) === false) {
return false;
}
if (!value[$mobx].values_.has(property)) {
return false;
}
var atom = getAtom(value, property);
return isComputedValue(atom);
}
function isComputed(value) {
if ( arguments.length > 1) {
return die("isComputed expects only 1 argument. Use isComputedProp to inspect the observability of a property");
}
return _isComputed(value);
}
function isComputedProp(value, propName) {
if ( !isStringish(propName)) {
return die("isComputed expected a property name as second argument");
}
return _isComputed(value, propName);
}
function _isObservable(value, property) {
if (!value) {
return false;
}
if (property !== undefined) {
if ( (isObservableMap(value) || isObservableArray(value))) {
return die("isObservable(object, propertyName) is not supported for arrays and maps. Use map.has or array.length instead.");
}
if (isObservableObject(value)) {
return value[$mobx].values_.has(property);
}
return false;
} // For first check, see #701
return isObservableObject(value) || !!value[$mobx] || isAtom(value) || isReaction(value) || isComputedValue(value);
}
function isObservable(value) {
if ( arguments.length !== 1) {
die("isObservable expects only 1 argument. Use isObservableProp to inspect the observability of a property");
}
return _isObservable(value);
}
function isObservableProp(value, propName) {
if ( !isStringish(propName)) {
return die("expected a property name as second argument");
}
return _isObservable(value, propName);
}
function keys(obj) {
if (isObservableObject(obj)) {
return obj[$mobx].keys_();
}
if (isObservableMap(obj) || isObservableSet(obj)) {
return Array.from(obj.keys());
}
if (isObservableArray(obj)) {
return obj.map(function (_, index) {
return index;
});
}
die(5);
}
function values(obj) {
if (isObservableObject(obj)) {
return keys(obj).map(function (key) {
return obj[key];
});
}
if (isObservableMap(obj)) {
return keys(obj).map(function (key) {
return obj.get(key);
});
}
if (isObservableSet(obj)) {
return Array.from(obj.values());
}
if (isObservableArray(obj)) {
return obj.slice();
}
die(6);
}
function entries(obj) {
if (isObservableObject(obj)) {
return keys(obj).map(function (key) {
return [key, obj[key]];
});
}
if (isObservableMap(obj)) {
return keys(obj).map(function (key) {
return [key, obj.get(key)];
});
}
if (isObservableSet(obj)) {
return Array.from(obj.entries());
}
if (isObservableArray(obj)) {
return obj.map(function (key, index) {
return [index, key];
});
}
die(7);
}
function set(obj, key, value) {
if (arguments.length === 2 && !isObservableSet(obj)) {
startBatch();
var _values = key;
try {
for (var _key in _values) {
set(obj, _key, _values[_key]);
}
} finally {
endBatch();
}
return;
}
if (isObservableObject(obj)) {
obj[$mobx].set_(key, value);
} else if (isObservableMap(obj)) {
obj.set(key, value);
} else if (isObservableSet(obj)) {
obj.add(key);
} else if (isObservableArray(obj)) {
if (typeof key !== "number") {
key = parseInt(key, 10);
}
if (key < 0) {
die("Invalid index: '" + key + "'");
}
startBatch();
if (key >= obj.length) {
obj.length = key + 1;
}
obj[key] = value;
endBatch();
} else {
die(8);
}
}
function remove(obj, key) {
if (isObservableObject(obj)) {
obj[$mobx].delete_(key);
} else if (isObservableMap(obj)) {
obj["delete"](key);
} else if (isObservableSet(obj)) {
obj["delete"](key);
} else if (isObservableArray(obj)) {
if (typeof key !== "number") {
key = parseInt(key, 10);
}
obj.splice(key, 1);
} else {
die(9);
}
}
function has(obj, key) {
if (isObservableObject(obj)) {
return obj[$mobx].has_(key);
} else if (isObservableMap(obj)) {
return obj.has(key);
} else if (isObservableSet(obj)) {
return obj.has(key);
} else if (isObservableArray(obj)) {
return key >= 0 && key < obj.length;
}
die(10);
}
function get(obj, key) {
if (!has(obj, key)) {
return undefined;
}
if (isObservableObject(obj)) {
return obj[$mobx].get_(key);
} else if (isObservableMap(obj)) {
return obj.get(key);
} else if (isObservableArray(obj)) {
return obj[key];
}
die(11);
}
function apiDefineProperty(obj, key, descriptor) {
if (isObservableObject(obj)) {
return obj[$mobx].defineProperty_(key, descriptor);
}
die(39);
}
function apiOwnKeys(obj) {
if (isObservableObject(obj)) {
return obj[$mobx].ownKeys_();
}
die(38);
}
function observe(thing, propOrCb, cbOrFire, fireImmediately) {
if (isFunction(cbOrFire)) {
return observeObservableProperty(thing, propOrCb, cbOrFire, fireImmediately);
} else {
return observeObservable(thing, propOrCb, cbOrFire);
}
}
function observeObservable(thing, listener, fireImmediately) {
return getAdministration(thing).observe_(listener, fireImmediately);
}
function observeObservableProperty(thing, property, listener, fireImmediately) {
return getAdministration(thing, property).observe_(listener, fireImmediately);
}
function cache(map, key, value) {
map.set(key, value);
return value;
}
function toJSHelper(source, __alreadySeen) {
if (source == null || typeof source !== "object" || source instanceof Date || !isObservable(source)) {
return source;
}
if (isObservableValue(source) || isComputedValue(source)) {
return toJSHelper(source.get(), __alreadySeen);
}
if (__alreadySeen.has(source)) {
return __alreadySeen.get(source);
}
if (isObservableArray(source)) {
var res = cache(__alreadySeen, source, new Array(source.length));
source.forEach(function (value, idx) {
res[idx] = toJSHelper(value, __alreadySeen);
});
return res;
}
if (isObservableSet(source)) {
var _res = cache(__alreadySeen, source, new Set());
source.forEach(function (value) {
_res.add(toJSHelper(value, __alreadySeen));
});
return _res;
}
if (isObservableMap(source)) {
var _res2 = cache(__alreadySeen, source, new Map());
source.forEach(function (value, key) {
_res2.set(key, toJSHelper(value, __alreadySeen));
});
return _res2;
} else {
// must be observable object
var _res3 = cache(__alreadySeen, source, {});
apiOwnKeys(source).forEach(function (key) {
if (objectPrototype.propertyIsEnumerable.call(source, key)) {
_res3[key] = toJSHelper(source[key], __alreadySeen);
}
});
return _res3;
}
}
/**
* Recursively converts an observable to it's non-observable native counterpart.
* It does NOT recurse into non-observables, these are left as they are, even if they contain observables.
* Computed and other non-enumerable properties are completely ignored.
* Complex scenarios require custom solution, eg implementing `toJSON` or using `serializr` lib.
*/
function toJS(source, options) {
if ( options) {
die("toJS no longer supports options");
}
return toJSHelper(source, new Map());
}
function trace() {
var enterBreakPoint = false;
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
if (typeof args[args.length - 1] === "boolean") {
enterBreakPoint = args.pop();
}
var derivation = getAtomFromArgs(args);
if (!derivation) {
return die("'trace(break?)' can only be used inside a tracked computed value or a Reaction. Consider passing in the computed value or reaction explicitly");
}
if (derivation.isTracing_ === TraceMode.NONE) {
console.log("[mobx.trace] '" + derivation.name_ + "' tracing enabled");
}
derivation.isTracing_ = enterBreakPoint ? TraceMode.BREAK : TraceMode.LOG;
}
function getAtomFromArgs(args) {
switch (args.length) {
case 0:
return globalState.trackingDerivation;
case 1:
return getAtom(args[0]);
case 2:
return getAtom(args[0], args[1]);
}
}
/**
* During a transaction no views are updated until the end of the transaction.
* The transaction will be run synchronously nonetheless.
*
* @param action a function that updates some reactive state
* @returns any value that was returned by the 'action' parameter.
*/
function transaction(action, thisArg) {
if (thisArg === void 0) {
thisArg = undefined;
}
startBatch();
try {
return action.apply(thisArg);
} finally {
endBatch();
}
}
function when(predicate, arg1, arg2) {
if (arguments.length === 1 || arg1 && typeof arg1 === "object") {
return whenPromise(predicate, arg1);
}
return _when(predicate, arg1, arg2 || {});
}
function _when(predicate, effect, opts) {
var timeoutHandle;
if (typeof opts.timeout === "number") {
var error = new Error("WHEN_TIMEOUT");
timeoutHandle = setTimeout(function () {
if (!disposer[$mobx].isDisposed_) {
disposer();
if (opts.onError) {
opts.onError(error);
} else {
throw error;
}
}
}, opts.timeout);
}
opts.name = opts.name || "When@" + getNextId() ;
var effectAction = createAction( opts.name + "-effect" , effect); // eslint-disable-next-line
var disposer = autorun(function (r) {
// predicate should not change state
var cond = allowStateChanges(false, predicate);
if (cond) {
r.dispose();
if (timeoutHandle) {
clearTimeout(timeoutHandle);
}
effectAction();
}
}, opts);
return disposer;
}
function whenPromise(predicate, opts) {
if ( opts && opts.onError) {
return die("the options 'onError' and 'promise' cannot be combined");
}
var cancel;
var res = new Promise(function (resolve, reject) {
var disposer = _when(predicate, resolve, _extends({}, opts, {
onError: reject
}));
cancel = function cancel() {
disposer();
reject(new Error("WHEN_CANCELLED"));
};
});
res.cancel = cancel;
return res;
}
function getAdm(target) {
return target[$mobx];
} // Optimization: we don't need the intermediate objects and could have a completely custom administration for DynamicObjects,
// and skip either the internal values map, or the base object with its property descriptors!
var objectProxyTraps = {
has: function has(target, name) {
if ( globalState.trackingDerivation) {
warnAboutProxyRequirement("detect new properties using the 'in' operator. Use 'has' from 'mobx' instead.");
}
return getAdm(target).has_(name);
},
get: function get(target, name) {
return getAdm(target).get_(name);
},
set: function set(target, name, value) {
var _getAdm$set_;
if (!isStringish(name)) {
return false;
}
if ( !getAdm(target).values_.has(name)) {
warnAboutProxyRequirement("add a new observable property through direct assignment. Use 'set' from 'mobx' instead.");
} // null (intercepted) -> true (success)
return (_getAdm$set_ = getAdm(target).set_(name, value, true)) != null ? _getAdm$set_ : true;
},
deleteProperty: function deleteProperty(target, name) {
var _getAdm$delete_;
{
warnAboutProxyRequirement("delete properties from an observable object. Use 'remove' from 'mobx' instead.");
}
if (!isStringish(name)) {
return false;
} // null (intercepted) -> true (success)
return (_getAdm$delete_ = getAdm(target).delete_(name, true)) != null ? _getAdm$delete_ : true;
},
defineProperty: function defineProperty(target, name, descriptor) {
var _getAdm$definePropert;
{
warnAboutProxyRequirement("define property on an observable object. Use 'defineProperty' from 'mobx' instead.");
} // null (intercepted) -> true (success)
return (_getAdm$definePropert = getAdm(target).defineProperty_(name, descriptor)) != null ? _getAdm$definePropert : true;
},
ownKeys: function ownKeys(target) {
if ( globalState.trackingDerivation) {
warnAboutProxyRequirement("iterate keys to detect added / removed properties. Use 'keys' from 'mobx' instead.");
}
return getAdm(target).ownKeys_();
},
preventExtensions: function preventExtensions(target) {
die(13);
}
};
function asDynamicObservableObject(target, options) {
var _target$$mobx, _target$$mobx$proxy_;
assertProxies();
target = asObservableObject(target, options);
return (_target$$mobx$proxy_ = (_target$$mobx = target[$mobx]).proxy_) != null ? _target$$mobx$proxy_ : _target$$mobx.proxy_ = new Proxy(target, objectProxyTraps);
}
function hasInterceptors(interceptable) {
return interceptable.interceptors_ !== undefined && interceptable.interceptors_.length > 0;
}
function registerInterceptor(interceptable, handler) {
var interceptors = interceptable.interceptors_ || (interceptable.interceptors_ = []);
interceptors.push(handler);
return once(function () {
var idx = interceptors.indexOf(handler);
if (idx !== -1) {
interceptors.splice(idx, 1);
}
});
}
function interceptChange(interceptable, change) {
var prevU = untrackedStart();
try {
// Interceptor can modify the array, copy it to avoid concurrent modification, see #1950
var interceptors = [].concat(interceptable.interceptors_ || []);
for (var i = 0, l = interceptors.length; i < l; i++) {
change = interceptors[i](change);
if (change && !change.type) {
die(14);
}
if (!change) {
break;
}
}
return change;
} finally {
untrackedEnd(prevU);
}
}
function hasListeners(listenable) {
return listenable.changeListeners_ !== undefined && listenable.changeListeners_.length > 0;
}
function registerListener(listenable, handler) {
var listeners = listenable.changeListeners_ || (listenable.changeListeners_ = []);
listeners.push(handler);
return once(function () {
var idx = listeners.indexOf(handler);
if (idx !== -1) {
listeners.splice(idx, 1);
}
});
}
function notifyListeners(listenable, change) {
var prevU = untrackedStart();
var listeners = listenable.changeListeners_;
if (!listeners) {
return;
}
listeners = listeners.slice();
for (var i = 0, l = listeners.length; i < l; i++) {
listeners[i](change);
}
untrackedEnd(prevU);
}
function makeObservable(target, annotations, options) {
var adm = asObservableObject(target, options)[$mobx];
startBatch();
try {
var _annotations;
if ("development" !== "production" && annotations && target[storedAnnotationsSymbol]) {
die("makeObservable second arg must be nullish when using decorators. Mixing @decorator syntax with annotations is not supported.");
} // Default to decorators
(_annotations = annotations) != null ? _annotations : annotations = collectStoredAnnotations(target); // Annotate
ownKeys(annotations).forEach(function (key) {
return adm.make_(key, annotations[key]);
});
} finally {
endBatch();
}
return target;
} // proto[keysSymbol] = new Set<PropertyKey>()
var keysSymbol = /*#__PURE__*/Symbol("mobx-keys");
function makeAutoObservable(target, overrides, options) {
{
if (!isPlainObject(target) && !isPlainObject(Object.getPrototypeOf(target))) {
die("'makeAutoObservable' can only be used for classes that don't have a superclass");
}
if (isObservableObject(target)) {
die("makeAutoObservable can only be used on objects not already made observable");
}
} // Optimization: avoid visiting protos
// Assumes that annotation.make_/.extend_ works the same for plain objects
if (isPlainObject(target)) {
return extendObservable(target, target, overrides, options);
}
var adm = asObservableObject(target, options)[$mobx]; // Optimization: cache keys on proto
// Assumes makeAutoObservable can be called only once per object and can't be used in subclass
if (!target[keysSymbol]) {
var proto = Object.getPrototypeOf(target);
var keys = new Set([].concat(ownKeys(target), ownKeys(proto)));
keys["delete"]("constructor");
keys["delete"]($mobx);
addHiddenProp(proto, keysSymbol, keys);
}
startBatch();
try {
target[keysSymbol].forEach(function (key) {
return adm.make_(key, // must pass "undefined" for { key: undefined }
!overrides ? true : key in overrides ? overrides[key] : true);
});
} finally {
endBatch();
}
return target;
}
var SPLICE = "splice";
var UPDATE = "update";
var MAX_SPLICE_SIZE = 10000; // See e.g. https://github.com/mobxjs/mobx/issues/859
var arrayTraps = {
get: function get(target, name) {
var adm = target[$mobx];
if (name === $mobx) {
return adm;
}
if (name === "length") {
return adm.getArrayLength_();
}
if (typeof name === "string" && !isNaN(name)) {
return adm.get_(parseInt(name));
}
if (hasProp(arrayExtensions, name)) {
return arrayExtensions[name];
}
return target[name];
},
set: function set(target, name, value) {
var adm = target[$mobx];
if (name === "length") {
adm.setArrayLength_(value);
}
if (typeof name === "symbol" || isNaN(name)) {
target[name] = value;
} else {
// numeric string
adm.set_(parseInt(name), value);
}
return true;
},
preventExtensions: function preventExtensions() {
die(15);
}
};
var ObservableArrayAdministration = /*#__PURE__*/function () {
// this is the prop that gets proxied, so can't replace it!
function ObservableArrayAdministration(name, enhancer, owned_, legacyMode_) {
if (name === void 0) {
name = "ObservableArray@" + getNextId() ;
}
this.owned_ = void 0;
this.legacyMode_ = void 0;
this.atom_ = void 0;
this.values_ = [];
this.interceptors_ = void 0;
this.changeListeners_ = void 0;
this.enhancer_ = void 0;
this.dehancer = void 0;
this.proxy_ = void 0;
this.lastKnownLength_ = 0;
this.owned_ = owned_;
this.legacyMode_ = legacyMode_;
this.atom_ = new Atom(name);
this.enhancer_ = function (newV, oldV) {
return enhancer(newV, oldV, name + "[..]" );
};
}
var _proto = ObservableArrayAdministration.prototype;
_proto.dehanceValue_ = function dehanceValue_(value) {
if (this.dehancer !== undefined) {
return this.dehancer(value);
}
return value;
};
_proto.dehanceValues_ = function dehanceValues_(values) {
if (this.dehancer !== undefined && values.length > 0) {
return values.map(this.dehancer);
}
return values;
};
_proto.intercept_ = function intercept_(handler) {
return registerInterceptor(this, handler);
};
_proto.observe_ = function observe_(listener, fireImmediately) {
if (fireImmediately === void 0) {
fireImmediately = false;
}
if (fireImmediately) {
listener({
observableKind: "array",
object: this.proxy_,
debugObjectName: this.atom_.name_,
type: "splice",
index: 0,
added: this.values_.slice(),
addedCount: this.values_.length,
removed: [],
removedCount: 0
});
}
return registerListener(this, listener);
};
_proto.getArrayLength_ = function getArrayLength_() {
this.atom_.reportObserved();
return this.values_.length;
};
_proto.setArrayLength_ = function setArrayLength_(newLength) {
if (typeof newLength !== "number" || isNaN(newLength) || newLength < 0) {
die("Out of range: " + newLength);
}
var currentLength = this.values_.length;
if (newLength === currentLength) {
return;
} else if (newLength > currentLength) {
var newItems = new Array(newLength - currentLength);
for (var i = 0; i < newLength - currentLength; i++) {
newItems[i] = undefined;
} // No Array.fill everywhere...
this.spliceWithArray_(currentLength, 0, newItems);
} else {
this.spliceWithArray_(newLength, currentLength - newLength);
}
};
_proto.updateArrayLength_ = function updateArrayLength_(oldLength, delta) {
if (oldLength !== this.lastKnownLength_) {
die(16);
}
this.lastKnownLength_ += delta;
if (this.legacyMode_ && delta > 0) {
reserveArrayBuffer(oldLength + delta + 1);
}
};
_proto.spliceWithArray_ = function spliceWithArray_(index, deleteCount, newItems) {
var _this = this;
checkIfStateModificationsAreAllowed(this.atom_);
var length = this.values_.length;
if (index === undefined) {
index = 0;
} else if (index > length) {
index = length;
} else if (index < 0) {
index = Math.max(0, length + index);
}
if (arguments.length === 1) {
deleteCount = length - index;
} else if (deleteCount === undefined || deleteCount === null) {
deleteCount = 0;
} else {
deleteCount = Math.max(0, Math.min(deleteCount, length - index));
}
if (newItems === undefined) {
newItems = EMPTY_ARRAY;
}
if (hasInterceptors(this)) {
var change = interceptChange(this, {
object: this.proxy_,
type: SPLICE,
index: index,
removedCount: deleteCount,
added: newItems
});
if (!change) {
return EMPTY_ARRAY;
}
deleteCount = change.removedCount;
newItems = change.added;
}
newItems = newItems.length === 0 ? newItems : newItems.map(function (v) {
return _this.enhancer_(v, undefined);
});
if (this.legacyMode_ || "development" !== "production") {
var lengthDelta = newItems.length - deleteCount;
this.updateArrayLength_(length, lengthDelta); // checks if internal array wasn't modified
}
var res = this.spliceItemsIntoValues_(index, deleteCount, newItems);
if (deleteCount !== 0 || newItems.length !== 0) {
this.notifyArraySplice_(index, newItems, res);
}
return this.dehanceValues_(res);
};
_proto.spliceItemsIntoValues_ = function spliceItemsIntoValues_(index, deleteCount, newItems) {
if (newItems.length < MAX_SPLICE_SIZE) {
var _this$values_;
return (_this$values_ = this.values_).splice.apply(_this$values_, [index, deleteCount].concat(newItems));
} else {
// The items removed by the splice
var res = this.values_.slice(index, index + deleteCount); // The items that that should remain at the end of the array
var oldItems = this.values_.slice(index + deleteCount); // New length is the previous length + addition count - deletion count
this.values_.length += newItems.length - deleteCount;
for (var i = 0; i < newItems.length; i++) {
this.values_[index + i] = newItems[i];
}
for (var _i = 0; _i < oldItems.length; _i++) {
this.values_[index + newItems.length + _i] = oldItems[_i];
}
return res;
}
};
_proto.notifyArrayChildUpdate_ = function notifyArrayChildUpdate_(index, newValue, oldValue) {
var notifySpy = !this.owned_ && isSpyEnabled();
var notify = hasListeners(this);
var change = notify || notifySpy ? {
observableKind: "array",
object: this.proxy_,
type: UPDATE,
debugObjectName: this.atom_.name_,
index: index,
newValue: newValue,
oldValue: oldValue
} : null; // The reason why this is on right hand side here (and not above), is this way the uglifier will drop it, but it won't
// cause any runtime overhead in development mode without NODE_ENV set, unless spying is enabled
if ( notifySpy) {
spyReportStart(change);
}
this.atom_.reportChanged();
if (notify) {
notifyListeners(this, change);
}
if ( notifySpy) {
spyReportEnd();
}
};
_proto.notifyArraySplice_ = function notifyArraySplice_(index, added, removed) {
var notifySpy = !this.owned_ && isSpyEnabled();
var notify = hasListeners(this);
var change = notify || notifySpy ? {
observableKind: "array",
object: this.proxy_,
debugObjectName: this.atom_.name_,
type: SPLICE,
index: index,
removed: removed,
added: added,
removedCount: removed.length,
addedCount: added.length
} : null;
if ( notifySpy) {
spyReportStart(change);
}
this.atom_.reportChanged(); // conform: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/observe
if (notify) {
notifyListeners(this, change);
}
if ( notifySpy) {
spyReportEnd();
}
};
_proto.get_ = function get_(index) {
if (index < this.values_.length) {
this.atom_.reportObserved();
return this.dehanceValue_(this.values_[index]);
}
console.warn( "[mobx] Out of bounds read: " + index );
};
_proto.set_ = function set_(index, newValue) {
var values = this.values_;
if (index < values.length) {
// update at index in range
checkIfStateModificationsAreAllowed(this.atom_);
var oldValue = values[index];
if (hasInterceptors(this)) {
var change = interceptChange(this, {
type: UPDATE,
object: this.proxy_,
index: index,
newValue: newValue
});
if (!change) {
return;
}
newValue = change.newValue;
}
newValue = this.enhancer_(newValue, oldValue);
var changed = newValue !== oldValue;
if (changed) {
values[index] = newValue;
this.notifyArrayChildUpdate_(index, newValue, oldValue);
}
} else if (index === values.length) {
// add a new item
this.spliceWithArray_(index, 0, [newValue]);
} else {
// out of bounds
die(17, index, values.length);
}
};
return ObservableArrayAdministration;
}();
function createObservableArray(initialValues, enhancer, name, owned) {
if (name === void 0) {
name = "ObservableArray@" + getNextId() ;
}
if (owned === void 0) {
owned = false;
}
assertProxies();
var adm = new ObservableArrayAdministration(name, enhancer, owned, false);
addHiddenFinalProp(adm.values_, $mobx, adm);
var proxy = new Proxy(adm.values_, arrayTraps);
adm.proxy_ = proxy;
if (initialValues && initialValues.length) {
var prev = allowStateChangesStart(true);
adm.spliceWithArray_(0, 0, initialValues);
allowStateChangesEnd(prev);
}
return proxy;
} // eslint-disable-next-line
var arrayExtensions = {
clear: function clear() {
return this.splice(0);
},
replace: function replace(newItems) {
var adm = this[$mobx];
return adm.spliceWithArray_(0, adm.values_.length, newItems);
},
// Used by JSON.stringify
toJSON: function toJSON() {
return this.slice();
},
/*
* functions that do alter the internal structure of the array, (based on lib.es6.d.ts)
* since these functions alter the inner structure of the array, the have side effects.
* Because the have side effects, they should not be used in computed function,
* and for that reason the do not call dependencyState.notifyObserved
*/
splice: function splice(index, deleteCount) {
for (var _len = arguments.length, newItems = new Array(_len > 2 ? _len - 2 : 0), _key = 2; _key < _len; _key++) {
newItems[_key - 2] = arguments[_key];
}
var adm = this[$mobx];
switch (arguments.length) {
case 0:
return [];
case 1:
return adm.spliceWithArray_(index);
case 2:
return adm.spliceWithArray_(index, deleteCount);
}
return adm.spliceWithArray_(index, deleteCount, newItems);
},
spliceWithArray: function spliceWithArray(index, deleteCount, newItems) {
return this[$mobx].spliceWithArray_(index, deleteCount, newItems);
},
push: function push() {
var adm = this[$mobx];
for (var _len2 = arguments.length, items = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
items[_key2] = arguments[_key2];
}
adm.spliceWithArray_(adm.values_.length, 0, items);
return adm.values_.length;
},
pop: function pop() {
return this.splice(Math.max(this[$mobx].values_.length - 1, 0), 1)[0];
},
shift: function shift() {
return this.splice(0, 1)[0];
},
unshift: function unshift() {
var adm = this[$mobx];
for (var _len3 = arguments.length, items = new Array(_len3), _key3 = 0; _key3 < _len3; _key3++) {
items[_key3] = arguments[_key3];
}
adm.spliceWithArray_(0, 0, items);
return adm.values_.length;
},
reverse: function reverse() {
// reverse by default mutates in place before returning the result
// which makes it both a 'derivation' and a 'mutation'.
if (globalState.trackingDerivation) {
die(37, "reverse");
}
this.replace(this.slice().reverse());
return this;
},
sort: function sort() {
// sort by default mutates in place before returning the result
// which goes against all good practices. Let's not change the array in place!
if (globalState.trackingDerivation) {
die(37, "sort");
}
var copy = this.slice();
copy.sort.apply(copy, arguments);
this.replace(copy);
return this;
},
remove: function remove(value) {
var adm = this[$mobx];
var idx = adm.dehanceValues_(adm.values_).indexOf(value);
if (idx > -1) {
this.splice(idx, 1);
return true;
}
return false;
}
};
/**
* Wrap function from prototype
* Without this, everything works as well, but this works
* faster as everything works on unproxied values
*/
addArrayExtension("concat", simpleFunc);
addArrayExtension("flat", simpleFunc);
addArrayExtension("includes", simpleFunc);
addArrayExtension("indexOf", simpleFunc);
addArrayExtension("join", simpleFunc);
addArrayExtension("lastIndexOf", simpleFunc);
addArrayExtension("slice", simpleFunc);
addArrayExtension("toString", simpleFunc);
addArrayExtension("toLocaleString", simpleFunc); // map
addArrayExtension("every", mapLikeFunc);
addArrayExtension("filter", mapLikeFunc);
addArrayExtension("find", mapLikeFunc);
addArrayExtension("findIndex", mapLikeFunc);
addArrayExtension("flatMap", mapLikeFunc);
addArrayExtension("forEach", mapLikeFunc);
addArrayExtension("map", mapLikeFunc);
addArrayExtension("some", mapLikeFunc); // reduce
addArrayExtension("reduce", reduceLikeFunc);
addArrayExtension("reduceRight", reduceLikeFunc);
function addArrayExtension(funcName, funcFactory) {
if (typeof Array.prototype[funcName] === "function") {
arrayExtensions[funcName] = funcFactory(funcName);
}
} // Report and delegate to dehanced array
function simpleFunc(funcName) {
return function () {
var adm = this[$mobx];
adm.atom_.reportObserved();
var dehancedValues = adm.dehanceValues_(adm.values_);
return dehancedValues[funcName].apply(dehancedValues, arguments);
};
} // Make sure callbacks recieve correct array arg #2326
function mapLikeFunc(funcName) {
return function (callback, thisArg) {
var _this2 = this;
var adm = this[$mobx];
adm.atom_.reportObserved();
var dehancedValues = adm.dehanceValues_(adm.values_);
return dehancedValues[funcName](function (element, index) {
return callback.call(thisArg, element, index, _this2);
});
};
} // Make sure callbacks recieve correct array arg #2326
function reduceLikeFunc(funcName) {
return function () {
var _this3 = this;
var adm = this[$mobx];
adm.atom_.reportObserved();
var dehancedValues = adm.dehanceValues_(adm.values_); // #2432 - reduce behavior depends on arguments.length
var callback = arguments[0];
arguments[0] = function (accumulator, currentValue, index) {
return callback(accumulator, currentValue, index, _this3);
};
return dehancedValues[funcName].apply(dehancedValues, arguments);
};
}
var isObservableArrayAdministration = /*#__PURE__*/createInstanceofPredicate("ObservableArrayAdministration", ObservableArrayAdministration);
function isObservableArray(thing) {
return isObject(thing) && isObservableArrayAdministration(thing[$mobx]);
}
var _Symbol$iterator, _Symbol$toStringTag;
var ObservableMapMarker = {};
var ADD = "add";
var DELETE = "delete"; // just extend Map? See also https://gist.github.com/nestharus/13b4d74f2ef4a2f4357dbd3fc23c1e54
// But: https://github.com/mobxjs/mobx/issues/1556
_Symbol$iterator = Symbol.iterator;
_Symbol$toStringTag = Symbol.toStringTag;
var ObservableMap = /*#__PURE__*/function (_Symbol$iterator2, _Symbol$toStringTag2) {
// hasMap, not hashMap >-).
function ObservableMap(initialData, enhancer_, name_) {
var _this = this;
if (enhancer_ === void 0) {
enhancer_ = deepEnhancer;
}
if (name_ === void 0) {
name_ = "ObservableMap@" + getNextId() ;
}
this.enhancer_ = void 0;
this.name_ = void 0;
this[$mobx] = ObservableMapMarker;
this.data_ = void 0;
this.hasMap_ = void 0;
this.keysAtom_ = void 0;
this.interceptors_ = void 0;
this.changeListeners_ = void 0;
this.dehancer = void 0;
this.enhancer_ = enhancer_;
this.name_ = name_;
if (!isFunction(Map)) {
die(18);
}
this.keysAtom_ = createAtom( this.name_ + ".keys()" );
this.data_ = new Map();
this.hasMap_ = new Map();
allowStateChanges(true, function () {
_this.merge(initialData);
});
}
var _proto = ObservableMap.prototype;
_proto.has_ = function has_(key) {
return this.data_.has(key);
};
_proto.has = function has(key) {
var _this2 = this;
if (!globalState.trackingDerivation) {
return this.has_(key);
}
var entry = this.hasMap_.get(key);
if (!entry) {
var newEntry = entry = new ObservableValue(this.has_(key), referenceEnhancer, this.name_ + "." + stringifyKey(key) + "?" , false);
this.hasMap_.set(key, newEntry);
onBecomeUnobserved(newEntry, function () {
return _this2.hasMap_["delete"](key);
});
}
return entry.get();
};
_proto.set = function set(key, value) {
var hasKey = this.has_(key);
if (hasInterceptors(this)) {
var change = interceptChange(this, {
type: hasKey ? UPDATE : ADD,
object: this,
newValue: value,
name: key
});
if (!change) {
return this;
}
value = change.newValue;
}
if (hasKey) {
this.updateValue_(key, value);
} else {
this.addValue_(key, value);
}
return this;
};
_proto["delete"] = function _delete(key) {
var _this3 = this;
checkIfStateModificationsAreAllowed(this.keysAtom_);
if (hasInterceptors(this)) {
var change = interceptChange(this, {
type: DELETE,
object: this,
name: key
});
if (!change) {
return false;
}
}
if (this.has_(key)) {
var notifySpy = isSpyEnabled();
var notify = hasListeners(this);
var _change = notify || notifySpy ? {
observableKind: "map",
debugObjectName: this.name_,
type: DELETE,
object: this,
oldValue: this.data_.get(key).value_,
name: key
} : null;
if ( notifySpy) {
spyReportStart(_change);
} // TODO fix type
transaction(function () {
var _this3$hasMap_$get;
_this3.keysAtom_.reportChanged();
(_this3$hasMap_$get = _this3.hasMap_.get(key)) == null ? void 0 : _this3$hasMap_$get.setNewValue_(false);
var observable = _this3.data_.get(key);
observable.setNewValue_(undefined);
_this3.data_["delete"](key);
});
if (notify) {
notifyListeners(this, _change);
}
if ( notifySpy) {
spyReportEnd();
}
return true;
}
return false;
};
_proto.updateValue_ = function updateValue_(key, newValue) {
var observable = this.data_.get(key);
newValue = observable.prepareNewValue_(newValue);
if (newValue !== globalState.UNCHANGED) {
var notifySpy = isSpyEnabled();
var notify = hasListeners(this);
var change = notify || notifySpy ? {
observableKind: "map",
debugObjectName: this.name_,
type: UPDATE,
object: this,
oldValue: observable.value_,
name: key,
newValue: newValue
} : null;
if ( notifySpy) {
spyReportStart(change);
} // TODO fix type
observable.setNewValue_(newValue);
if (notify) {
notifyListeners(this, change);
}
if ( notifySpy) {
spyReportEnd();
}
}
};
_proto.addValue_ = function addValue_(key, newValue) {
var _this4 = this;
checkIfStateModificationsAreAllowed(this.keysAtom_);
transaction(function () {
var _this4$hasMap_$get;
var observable = new ObservableValue(newValue, _this4.enhancer_, _this4.name_ + "." + stringifyKey(key) , false);
_this4.data_.set(key, observable);
newValue = observable.value_; // value might have been changed
(_this4$hasMap_$get = _this4.hasMap_.get(key)) == null ? void 0 : _this4$hasMap_$get.setNewValue_(true);
_this4.keysAtom_.reportChanged();
});
var notifySpy = isSpyEnabled();
var notify = hasListeners(this);
var change = notify || notifySpy ? {
observableKind: "map",
debugObjectName: this.name_,
type: ADD,
object: this,
name: key,
newValue: newValue
} : null;
if ( notifySpy) {
spyReportStart(change);
} // TODO fix type
if (notify) {
notifyListeners(this, change);
}
if ( notifySpy) {
spyReportEnd();
}
};
_proto.get = function get(key) {
if (this.has(key)) {
return this.dehanceValue_(this.data_.get(key).get());
}
return this.dehanceValue_(undefined);
};
_proto.dehanceValue_ = function dehanceValue_(value) {
if (this.dehancer !== undefined) {
return this.dehancer(value);
}
return value;
};
_proto.keys = function keys() {
this.keysAtom_.reportObserved();
return this.data_.keys();
};
_proto.values = function values() {
var self = this;
var keys = this.keys();
return makeIterable({
next: function next() {
var _keys$next = keys.next(),
done = _keys$next.done,
value = _keys$next.value;
return {
done: done,
value: done ? undefined : self.get(value)
};
}
});
};
_proto.entries = function entries() {
var self = this;
var keys = this.keys();
return makeIterable({
next: function next() {
var _keys$next2 = keys.next(),
done = _keys$next2.done,
value = _keys$next2.value;
return {
done: done,
value: done ? undefined : [value, self.get(value)]
};
}
});
};
_proto[_Symbol$iterator2] = function () {
return this.entries();
};
_proto.forEach = function forEach(callback, thisArg) {
for (var _iterator = _createForOfIteratorHelperLoose(this), _step; !(_step = _iterator()).done;) {
var _step$value = _step.value,
key = _step$value[0],
value = _step$value[1];
callback.call(thisArg, value, key, this);
}
}
/** Merge another object into this object, returns this. */
;
_proto.merge = function merge(other) {
var _this5 = this;
if (isObservableMap(other)) {
other = new Map(other);
}
transaction(function () {
if (isPlainObject(other)) {
getPlainObjectKeys(other).forEach(function (key) {
return _this5.set(key, other[key]);
});
} else if (Array.isArray(other)) {
other.forEach(function (_ref) {
var key = _ref[0],
value = _ref[1];
return _this5.set(key, value);
});
} else if (isES6Map(other)) {
if (other.constructor !== Map) {
die(19, other);
}
other.forEach(function (value, key) {
return _this5.set(key, value);
});
} else if (other !== null && other !== undefined) {
die(20, other);
}
});
return this;
};
_proto.clear = function clear() {
var _this6 = this;
transaction(function () {
untracked(function () {
for (var _iterator2 = _createForOfIteratorHelperLoose(_this6.keys()), _step2; !(_step2 = _iterator2()).done;) {
var key = _step2.value;
_this6["delete"](key);
}
});
});
};
_proto.replace = function replace(values) {
var _this7 = this;
// Implementation requirements:
// - respect ordering of replacement map
// - allow interceptors to run and potentially prevent individual operations
// - don't recreate observables that already exist in original map (so we don't destroy existing subscriptions)
// - don't _keysAtom.reportChanged if the keys of resulting map are indentical (order matters!)
// - note that result map may differ from replacement map due to the interceptors
transaction(function () {
// Convert to map so we can do quick key lookups
var replacementMap = convertToMap(values);
var orderedData = new Map(); // Used for optimization
var keysReportChangedCalled = false; // Delete keys that don't exist in replacement map
// if the key deletion is prevented by interceptor
// add entry at the beginning of the result map
for (var _iterator3 = _createForOfIteratorHelperLoose(_this7.data_.keys()), _step3; !(_step3 = _iterator3()).done;) {
var key = _step3.value;
// Concurrently iterating/deleting keys
// iterator should handle this correctly
if (!replacementMap.has(key)) {
var deleted = _this7["delete"](key); // Was the key removed?
if (deleted) {
// _keysAtom.reportChanged() was already called
keysReportChangedCalled = true;
} else {
// Delete prevented by interceptor
var value = _this7.data_.get(key);
orderedData.set(key, value);
}
}
} // Merge entries
for (var _iterator4 = _createForOfIteratorHelperLoose(replacementMap.entries()), _step4; !(_step4 = _iterator4()).done;) {
var _step4$value = _step4.value,
_key = _step4$value[0],
_value = _step4$value[1];
// We will want to know whether a new key is added
var keyExisted = _this7.data_.has(_key); // Add or update value
_this7.set(_key, _value); // The addition could have been prevent by interceptor
if (_this7.data_.has(_key)) {
// The update could have been prevented by interceptor
// and also we want to preserve existing values
// so use value from _data map (instead of replacement map)
var _value2 = _this7.data_.get(_key);
orderedData.set(_key, _value2); // Was a new key added?
if (!keyExisted) {
// _keysAtom.reportChanged() was already called
keysReportChangedCalled = true;
}
}
} // Check for possible key order change
if (!keysReportChangedCalled) {
if (_this7.data_.size !== orderedData.size) {
// If size differs, keys are definitely modified
_this7.keysAtom_.reportChanged();
} else {
var iter1 = _this7.data_.keys();
var iter2 = orderedData.keys();
var next1 = iter1.next();
var next2 = iter2.next();
while (!next1.done) {
if (next1.value !== next2.value) {
_this7.keysAtom_.reportChanged();
break;
}
next1 = iter1.next();
next2 = iter2.next();
}
}
} // Use correctly ordered map
_this7.data_ = orderedData;
});
return this;
};
_proto.toString = function toString() {
return "[object ObservableMap]";
};
_proto.toJSON = function toJSON() {
return Array.from(this);
};
/**
* Observes this object. Triggers for the events 'add', 'update' and 'delete'.
* See: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/observe
* for callback details
*/
_proto.observe_ = function observe_(listener, fireImmediately) {
if ( fireImmediately === true) {
die("`observe` doesn't support fireImmediately=true in combination with maps.");
}
return registerListener(this, listener);
};
_proto.intercept_ = function intercept_(handler) {
return registerInterceptor(this, handler);
};
_createClass(ObservableMap, [{
key: "size",
get: function get() {
this.keysAtom_.reportObserved();
return this.data_.size;
}
}, {
key: _Symbol$toStringTag2,
get: function get() {
return "Map";
}
}]);
return ObservableMap;
}(_Symbol$iterator, _Symbol$toStringTag); // eslint-disable-next-line
var isObservableMap = /*#__PURE__*/createInstanceofPredicate("ObservableMap", ObservableMap);
function convertToMap(dataStructure) {
if (isES6Map(dataStructure) || isObservableMap(dataStructure)) {
return dataStructure;
} else if (Array.isArray(dataStructure)) {
return new Map(dataStructure);
} else if (isPlainObject(dataStructure)) {
var map = new Map();
for (var key in dataStructure) {
map.set(key, dataStructure[key]);
}
return map;
} else {
return die(21, dataStructure);
}
}
var _Symbol$iterator$1, _Symbol$toStringTag$1;
var ObservableSetMarker = {};
_Symbol$iterator$1 = Symbol.iterator;
_Symbol$toStringTag$1 = Symbol.toStringTag;
var ObservableSet = /*#__PURE__*/function (_Symbol$iterator2, _Symbol$toStringTag2) {
function ObservableSet(initialData, enhancer, name_) {
if (enhancer === void 0) {
enhancer = deepEnhancer;
}
if (name_ === void 0) {
name_ = "ObservableSet@" + getNextId() ;
}
this.name_ = void 0;
this[$mobx] = ObservableSetMarker;
this.data_ = new Set();
this.atom_ = void 0;
this.changeListeners_ = void 0;
this.interceptors_ = void 0;
this.dehancer = void 0;
this.enhancer_ = void 0;
this.name_ = name_;
if (!isFunction(Set)) {
die(22);
}
this.atom_ = createAtom(this.name_);
this.enhancer_ = function (newV, oldV) {
return enhancer(newV, oldV, name_);
};
if (initialData) {
this.replace(initialData);
}
}
var _proto = ObservableSet.prototype;
_proto.dehanceValue_ = function dehanceValue_(value) {
if (this.dehancer !== undefined) {
return this.dehancer(value);
}
return value;
};
_proto.clear = function clear() {
var _this = this;
transaction(function () {
untracked(function () {
for (var _iterator = _createForOfIteratorHelperLoose(_this.data_.values()), _step; !(_step = _iterator()).done;) {
var value = _step.value;
_this["delete"](value);
}
});
});
};
_proto.forEach = function forEach(callbackFn, thisArg) {
for (var _iterator2 = _createForOfIteratorHelperLoose(this), _step2; !(_step2 = _iterator2()).done;) {
var value = _step2.value;
callbackFn.call(thisArg, value, value, this);
}
};
_proto.add = function add(value) {
var _this2 = this;
checkIfStateModificationsAreAllowed(this.atom_);
if (hasInterceptors(this)) {
var change = interceptChange(this, {
type: ADD,
object: this,
newValue: value
});
if (!change) {
return this;
} // ideally, value = change.value would be done here, so that values can be
// changed by interceptor. Same applies for other Set and Map api's.
}
if (!this.has(value)) {
transaction(function () {
_this2.data_.add(_this2.enhancer_(value, undefined));
_this2.atom_.reportChanged();
});
var notifySpy = isSpyEnabled();
var notify = hasListeners(this);
var _change = notify || notifySpy ? {
observableKind: "set",
debugObjectName: this.name_,
type: ADD,
object: this,
newValue: value
} : null;
if (notifySpy && "development" !== "production") {
spyReportStart(_change);
}
if (notify) {
notifyListeners(this, _change);
}
if (notifySpy && "development" !== "production") {
spyReportEnd();
}
}
return this;
};
_proto["delete"] = function _delete(value) {
var _this3 = this;
if (hasInterceptors(this)) {
var change = interceptChange(this, {
type: DELETE,
object: this,
oldValue: value
});
if (!change) {
return false;
}
}
if (this.has(value)) {
var notifySpy = isSpyEnabled();
var notify = hasListeners(this);
var _change2 = notify || notifySpy ? {
observableKind: "set",
debugObjectName: this.name_,
type: DELETE,
object: this,
oldValue: value
} : null;
if (notifySpy && "development" !== "production") {
spyReportStart(_change2);
}
transaction(function () {
_this3.atom_.reportChanged();
_this3.data_["delete"](value);
});
if (notify) {
notifyListeners(this, _change2);
}
if (notifySpy && "development" !== "production") {
spyReportEnd();
}
return true;
}
return false;
};
_proto.has = function has(value) {
this.atom_.reportObserved();
return this.data_.has(this.dehanceValue_(value));
};
_proto.entries = function entries() {
var nextIndex = 0;
var keys = Array.from(this.keys());
var values = Array.from(this.values());
return makeIterable({
next: function next() {
var index = nextIndex;
nextIndex += 1;
return index < values.length ? {
value: [keys[index], values[index]],
done: false
} : {
done: true
};
}
});
};
_proto.keys = function keys() {
return this.values();
};
_proto.values = function values() {
this.atom_.reportObserved();
var self = this;
var nextIndex = 0;
var observableValues = Array.from(this.data_.values());
return makeIterable({
next: function next() {
return nextIndex < observableValues.length ? {
value: self.dehanceValue_(observableValues[nextIndex++]),
done: false
} : {
done: true
};
}
});
};
_proto.replace = function replace(other) {
var _this4 = this;
if (isObservableSet(other)) {
other = new Set(other);
}
transaction(function () {
if (Array.isArray(other)) {
_this4.clear();
other.forEach(function (value) {
return _this4.add(value);
});
} else if (isES6Set(other)) {
_this4.clear();
other.forEach(function (value) {
return _this4.add(value);
});
} else if (other !== null && other !== undefined) {
die("Cannot initialize set from " + other);
}
});
return this;
};
_proto.observe_ = function observe_(listener, fireImmediately) {
// ... 'fireImmediately' could also be true?
if ( fireImmediately === true) {
die("`observe` doesn't support fireImmediately=true in combination with sets.");
}
return registerListener(this, listener);
};
_proto.intercept_ = function intercept_(handler) {
return registerInterceptor(this, handler);
};
_proto.toJSON = function toJSON() {
return Array.from(this);
};
_proto.toString = function toString() {
return "[object ObservableSet]";
};
_proto[_Symbol$iterator2] = function () {
return this.values();
};
_createClass(ObservableSet, [{
key: "size",
get: function get() {
this.atom_.reportObserved();
return this.data_.size;
}
}, {
key: _Symbol$toStringTag2,
get: function get() {
return "Set";
}
}]);
return ObservableSet;
}(_Symbol$iterator$1, _Symbol$toStringTag$1); // eslint-disable-next-line
var isObservableSet = /*#__PURE__*/createInstanceofPredicate("ObservableSet", ObservableSet);
var descriptorCache = /*#__PURE__*/Object.create(null);
var REMOVE = "remove";
var ObservableObjectAdministration = /*#__PURE__*/function () {
function ObservableObjectAdministration(target_, values_, name_, // Used anytime annotation is not explicitely provided
defaultAnnotation_) {
if (values_ === void 0) {
values_ = new Map();
}
if (defaultAnnotation_ === void 0) {
defaultAnnotation_ = autoAnnotation;
}
this.target_ = void 0;
this.values_ = void 0;
this.name_ = void 0;
this.defaultAnnotation_ = void 0;
this.keysAtom_ = void 0;
this.changeListeners_ = void 0;
this.interceptors_ = void 0;
this.proxy_ = void 0;
this.isPlainObject_ = void 0;
this.appliedAnnotations_ = void 0;
this.pendingKeys_ = void 0;
this.target_ = target_;
this.values_ = values_;
this.name_ = name_;
this.defaultAnnotation_ = defaultAnnotation_;
this.keysAtom_ = new Atom( this.name_ + ".keys" ); // Optimization: we use this frequently
this.isPlainObject_ = isPlainObject(this.target_);
if ( !isAnnotation(this.defaultAnnotation_)) {
die("defaultAnnotation must be valid annotation");
}
{
// Prepare structure for tracking which fields were already annotated
this.appliedAnnotations_ = {};
}
}
var _proto = ObservableObjectAdministration.prototype;
_proto.getObservablePropValue_ = function getObservablePropValue_(key) {
return this.values_.get(key).get();
};
_proto.setObservablePropValue_ = function setObservablePropValue_(key, newValue) {
var observable = this.values_.get(key);
if (observable instanceof ComputedValue) {
observable.set(newValue);
return true;
} // intercept
if (hasInterceptors(this)) {
var change = interceptChange(this, {
type: UPDATE,
object: this.proxy_ || this.target_,
name: key,
newValue: newValue
});
if (!change) {
return null;
}
newValue = change.newValue;
}
newValue = observable.prepareNewValue_(newValue); // notify spy & observers
if (newValue !== globalState.UNCHANGED) {
var notify = hasListeners(this);
var notifySpy = isSpyEnabled();
var _change = notify || notifySpy ? {
type: UPDATE,
observableKind: "object",
debugObjectName: this.name_,
object: this.proxy_ || this.target_,
oldValue: observable.value_,
name: key,
newValue: newValue
} : null;
if ( notifySpy) {
spyReportStart(_change);
}
observable.setNewValue_(newValue);
if (notify) {
notifyListeners(this, _change);
}
if ( notifySpy) {
spyReportEnd();
}
}
return true;
};
_proto.get_ = function get_(key) {
if (globalState.trackingDerivation && !hasProp(this.target_, key)) {
// Key doesn't exist yet, subscribe for it in case it's added later
this.has_(key);
}
return this.target_[key];
}
/**
* @param {PropertyKey} key
* @param {any} value
* @param {Annotation|boolean} annotation true - use default annotation, false - copy as is
* @param {boolean} proxyTrap whether it's called from proxy trap
* @returns {boolean|null} true on success, false on failure (proxyTrap + non-configurable), null when cancelled by interceptor
*/
;
_proto.set_ = function set_(key, value, proxyTrap) {
if (proxyTrap === void 0) {
proxyTrap = false;
}
// Don't use .has(key) - we care about own
if (hasProp(this.target_, key)) {
// Existing prop
if (this.values_.has(key)) {
// Observable (can be intercepted)
return this.setObservablePropValue_(key, value);
} else if (proxyTrap) {
// Non-observable - proxy
return Reflect.set(this.target_, key, value);
} else {
// Non-observable
this.target_[key] = value;
return true;
}
} else {
// New prop
return this.extend_(key, {
value: value,
enumerable: true,
writable: true,
configurable: true
}, this.defaultAnnotation_, proxyTrap);
}
} // Trap for "in"
;
_proto.has_ = function has_(key) {
if (!globalState.trackingDerivation) {
// Skip key subscription outside derivation
return key in this.target_;
}
this.pendingKeys_ || (this.pendingKeys_ = new Map());
var entry = this.pendingKeys_.get(key);
if (!entry) {
entry = new ObservableValue(key in this.target_, referenceEnhancer, this.name_ + "." + stringifyKey(key) + "?" , false);
this.pendingKeys_.set(key, entry);
}
return entry.get();
}
/**
* @param {PropertyKey} key
* @param {Annotation|boolean} annotation true - use default annotation, false - ignore prop
*/
;
_proto.make_ = function make_(key, annotation) {
if (annotation === true) {
annotation = this.defaultAnnotation_;
}
if (annotation === false) {
return;
}
assertAnnotable(this, annotation, key);
if (!(key in this.target_)) {
var _this$target_$storedA;
// Throw on missing key, except for decorators:
// Decorator annotations are collected from whole prototype chain.
// When called from super() some props may not exist yet.
// However we don't have to worry about missing prop,
// because the decorator must have been applied to something.
if ((_this$target_$storedA = this.target_[storedAnnotationsSymbol]) != null && _this$target_$storedA[key]) {
return; // will be annotated by subclass constructor
} else {
die(1, annotation.annotationType_, this.name_ + "." + key.toString());
}
}
var source = this.target_;
while (source && source !== objectPrototype) {
var descriptor = getDescriptor(source, key);
if (descriptor) {
var outcome = annotation.make_(this, key, descriptor, source);
if (outcome === 0
/* Cancel */
) {
return;
}
if (outcome === 1
/* Break */
) {
break;
}
}
source = Object.getPrototypeOf(source);
}
recordAnnotationApplied(this, annotation, key);
}
/**
* @param {PropertyKey} key
* @param {PropertyDescriptor} descriptor
* @param {Annotation|boolean} annotation true - use default annotation, false - copy as is
* @param {boolean} proxyTrap whether it's called from proxy trap
* @returns {boolean|null} true on success, false on failure (proxyTrap + non-configurable), null when cancelled by interceptor
*/
;
_proto.extend_ = function extend_(key, descriptor, annotation, proxyTrap) {
if (proxyTrap === void 0) {
proxyTrap = false;
}
if (annotation === true) {
annotation = this.defaultAnnotation_;
}
if (annotation === false) {
return this.defineProperty_(key, descriptor, proxyTrap);
}
assertAnnotable(this, annotation, key);
var outcome = annotation.extend_(this, key, descriptor, proxyTrap);
if (outcome) {
recordAnnotationApplied(this, annotation, key);
}
return outcome;
}
/**
* @param {PropertyKey} key
* @param {PropertyDescriptor} descriptor
* @param {boolean} proxyTrap whether it's called from proxy trap
* @returns {boolean|null} true on success, false on failure (proxyTrap + non-configurable), null when cancelled by interceptor
*/
;
_proto.defineProperty_ = function defineProperty_(key, descriptor, proxyTrap) {
if (proxyTrap === void 0) {
proxyTrap = false;
}
try {
startBatch(); // Delete
var deleteOutcome = this.delete_(key);
if (!deleteOutcome) {
// Failure or intercepted
return deleteOutcome;
} // ADD interceptor
if (hasInterceptors(this)) {
var change = interceptChange(this, {
object: this.proxy_ || this.target_,
name: key,
type: ADD,
newValue: descriptor.value
});
if (!change) {
return null;
}
var newValue = change.newValue;
if (descriptor.value !== newValue) {
descriptor = _extends({}, descriptor, {
value: newValue
});
}
} // Define
if (proxyTrap) {
if (!Reflect.defineProperty(this.target_, key, descriptor)) {
return false;
}
} else {
defineProperty(this.target_, key, descriptor);
} // Notify
this.notifyPropertyAddition_(key, descriptor.value);
} finally {
endBatch();
}
return true;
} // If original descriptor becomes relevant, move this to annotation directly
;
_proto.defineObservableProperty_ = function defineObservableProperty_(key, value, enhancer, proxyTrap) {
if (proxyTrap === void 0) {
proxyTrap = false;
}
try {
startBatch(); // Delete
var deleteOutcome = this.delete_(key);
if (!deleteOutcome) {
// Failure or intercepted
return deleteOutcome;
} // ADD interceptor
if (hasInterceptors(this)) {
var change = interceptChange(this, {
object: this.proxy_ || this.target_,
name: key,
type: ADD,
newValue: value
});
if (!change) {
return null;
}
value = change.newValue;
}
var cachedDescriptor = getCachedObservablePropDescriptor(key);
var descriptor = {
configurable: globalState.safeDescriptors ? this.isPlainObject_ : true,
enumerable: true,
get: cachedDescriptor.get,
set: cachedDescriptor.set
}; // Define
if (proxyTrap) {
if (!Reflect.defineProperty(this.target_, key, descriptor)) {
return false;
}
} else {
defineProperty(this.target_, key, descriptor);
}
var observable = new ObservableValue(value, enhancer, "development" !== "production" ? this.name_ + "." + key.toString() : "ObservableObject.key", false);
this.values_.set(key, observable); // Notify (value possibly changed by ObservableValue)
this.notifyPropertyAddition_(key, observable.value_);
} finally {
endBatch();
}
return true;
} // If original descriptor becomes relevant, move this to annotation directly
;
_proto.defineComputedProperty_ = function defineComputedProperty_(key, options, proxyTrap) {
if (proxyTrap === void 0) {
proxyTrap = false;
}
try {
startBatch(); // Delete
var deleteOutcome = this.delete_(key);
if (!deleteOutcome) {
// Failure or intercepted
return deleteOutcome;
} // ADD interceptor
if (hasInterceptors(this)) {
var change = interceptChange(this, {
object: this.proxy_ || this.target_,
name: key,
type: ADD,
newValue: undefined
});
if (!change) {
return null;
}
}
options.name || (options.name = "development" !== "production" ? this.name_ + "." + key.toString() : "ObservableObject.key");
options.context = this.proxy_ || this.target_;
var cachedDescriptor = getCachedObservablePropDescriptor(key);
var descriptor = {
configurable: globalState.safeDescriptors ? this.isPlainObject_ : true,
enumerable: false,
get: cachedDescriptor.get,
set: cachedDescriptor.set
}; // Define
if (proxyTrap) {
if (!Reflect.defineProperty(this.target_, key, descriptor)) {
return false;
}
} else {
defineProperty(this.target_, key, descriptor);
}
this.values_.set(key, new ComputedValue(options)); // Notify
this.notifyPropertyAddition_(key, undefined);
} finally {
endBatch();
}
return true;
}
/**
* @param {PropertyKey} key
* @param {PropertyDescriptor} descriptor
* @param {boolean} proxyTrap whether it's called from proxy trap
* @returns {boolean|null} true on success, false on failure (proxyTrap + non-configurable), null when cancelled by interceptor
*/
;
_proto.delete_ = function delete_(key, proxyTrap) {
if (proxyTrap === void 0) {
proxyTrap = false;
}
// No such prop
if (!hasProp(this.target_, key)) {
return true;
} // Intercept
if (hasInterceptors(this)) {
var change = interceptChange(this, {
object: this.proxy_ || this.target_,
name: key,
type: REMOVE
}); // Cancelled
if (!change) {
return null;
}
} // Delete
try {
var _this$pendingKeys_, _this$pendingKeys_$ge;
startBatch();
var notify = hasListeners(this);
var notifySpy = "development" !== "production" && isSpyEnabled();
var observable = this.values_.get(key); // Value needed for spies/listeners
var value = undefined; // Optimization: don't pull the value unless we will need it
if (!observable && (notify || notifySpy)) {
var _getDescriptor;
value = (_getDescriptor = getDescriptor(this.target_, key)) == null ? void 0 : _getDescriptor.value;
} // delete prop (do first, may fail)
if (proxyTrap) {
if (!Reflect.deleteProperty(this.target_, key)) {
return false;
}
} else {
delete this.target_[key];
} // Allow re-annotating this field
if ("development" !== "production") {
delete this.appliedAnnotations_[key];
} // Clear observable
if (observable) {
this.values_["delete"](key); // for computed, value is undefined
if (observable instanceof ObservableValue) {
value = observable.value_;
} // Notify: autorun(() => obj[key]), see #1796
propagateChanged(observable);
} // Notify "keys/entries/values" observers
this.keysAtom_.reportChanged(); // Notify "has" observers
// "in" as it may still exist in proto
(_this$pendingKeys_ = this.pendingKeys_) == null ? void 0 : (_this$pendingKeys_$ge = _this$pendingKeys_.get(key)) == null ? void 0 : _this$pendingKeys_$ge.set(key in this.target_); // Notify spies/listeners
if (notify || notifySpy) {
var _change2 = {
type: REMOVE,
observableKind: "object",
object: this.proxy_ || this.target_,
debugObjectName: this.name_,
oldValue: value,
name: key
};
if ("development" !== "production" && notifySpy) {
spyReportStart(_change2);
}
if (notify) {
notifyListeners(this, _change2);
}
if ("development" !== "production" && notifySpy) {
spyReportEnd();
}
}
} finally {
endBatch();
}
return true;
}
/**
* Observes this object. Triggers for the events 'add', 'update' and 'delete'.
* See: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/observe
* for callback details
*/
;
_proto.observe_ = function observe_(callback, fireImmediately) {
if ( fireImmediately === true) {
die("`observe` doesn't support the fire immediately property for observable objects.");
}
return registerListener(this, callback);
};
_proto.intercept_ = function intercept_(handler) {
return registerInterceptor(this, handler);
};
_proto.notifyPropertyAddition_ = function notifyPropertyAddition_(key, value) {
var _this$pendingKeys_2, _this$pendingKeys_2$g;
var notify = hasListeners(this);
var notifySpy = isSpyEnabled();
if (notify || notifySpy) {
var change = notify || notifySpy ? {
type: ADD,
observableKind: "object",
debugObjectName: this.name_,
object: this.proxy_ || this.target_,
name: key,
newValue: value
} : null;
if ( notifySpy) {
spyReportStart(change);
}
if (notify) {
notifyListeners(this, change);
}
if ( notifySpy) {
spyReportEnd();
}
}
(_this$pendingKeys_2 = this.pendingKeys_) == null ? void 0 : (_this$pendingKeys_2$g = _this$pendingKeys_2.get(key)) == null ? void 0 : _this$pendingKeys_2$g.set(true); // Notify "keys/entries/values" observers
this.keysAtom_.reportChanged();
};
_proto.ownKeys_ = function ownKeys_() {
this.keysAtom_.reportObserved();
return ownKeys(this.target_);
};
_proto.keys_ = function keys_() {
// Returns enumerable && own, but unfortunately keysAtom will report on ANY key change.
// There is no way to distinguish between Object.keys(object) and Reflect.ownKeys(object) - both are handled by ownKeys trap.
// We can either over-report in Object.keys(object) or under-report in Reflect.ownKeys(object)
// We choose to over-report in Object.keys(object), because:
// - typically it's used with simple data objects
// - when symbolic/non-enumerable keys are relevant Reflect.ownKeys works as expected
this.keysAtom_.reportObserved();
return Object.keys(this.target_);
};
return ObservableObjectAdministration;
}();
function asObservableObject(target, options) {
var _options$name;
if ( options && isObservableObject(target)) {
die("Options can't be provided for already observable objects.");
}
if (hasProp(target, $mobx)) {
if ( !(getAdministration(target) instanceof ObservableObjectAdministration)) {
die("Cannot convert '" + getDebugName(target) + "' into observable object:" + "\nThe target is already observable of different type." + "\nExtending builtins is not supported.");
}
return target;
}
if ( !Object.isExtensible(target)) {
die("Cannot make the designated object observable; it is not extensible");
}
var name = (_options$name = options == null ? void 0 : options.name) != null ? _options$name : (isPlainObject(target) ? "ObservableObject" : target.constructor.name) + "@" + getNextId() ;
var adm = new ObservableObjectAdministration(target, new Map(), String(name), getAnnotationFromOptions(options));
addHiddenProp(target, $mobx, adm);
return target;
}
var isObservableObjectAdministration = /*#__PURE__*/createInstanceofPredicate("ObservableObjectAdministration", ObservableObjectAdministration);
function getCachedObservablePropDescriptor(key) {
return descriptorCache[key] || (descriptorCache[key] = {
get: function get() {
return this[$mobx].getObservablePropValue_(key);
},
set: function set(value) {
return this[$mobx].setObservablePropValue_(key, value);
}
});
}
function isObservableObject(thing) {
if (isObject(thing)) {
return isObservableObjectAdministration(thing[$mobx]);
}
return false;
}
function recordAnnotationApplied(adm, annotation, key) {
var _adm$target_$storedAn;
{
adm.appliedAnnotations_[key] = annotation;
} // Remove applied decorator annotation so we don't try to apply it again in subclass constructor
(_adm$target_$storedAn = adm.target_[storedAnnotationsSymbol]) == null ? true : delete _adm$target_$storedAn[key];
}
function assertAnnotable(adm, annotation, key) {
// Valid annotation
if ( !isAnnotation(annotation)) {
die("Cannot annotate '" + adm.name_ + "." + key.toString() + "': Invalid annotation.");
}
/*
// Configurable, not sealed, not frozen
// Possibly not needed, just a little better error then the one thrown by engine.
// Cases where this would be useful the most (subclass field initializer) are not interceptable by this.
if (__DEV__) {
const configurable = getDescriptor(adm.target_, key)?.configurable
const frozen = Object.isFrozen(adm.target_)
const sealed = Object.isSealed(adm.target_)
if (!configurable || frozen || sealed) {
const fieldName = `${adm.name_}.${key.toString()}`
const requestedAnnotationType = annotation.annotationType_
let error = `Cannot apply '${requestedAnnotationType}' to '${fieldName}':`
if (frozen) {
error += `\nObject is frozen.`
}
if (sealed) {
error += `\nObject is sealed.`
}
if (!configurable) {
error += `\nproperty is not configurable.`
// Mention only if caused by us to avoid confusion
if (hasProp(adm.appliedAnnotations!, key)) {
error += `\nTo prevent accidental re-definition of a field by a subclass, `
error += `all annotated fields of non-plain objects (classes) are not configurable.`
}
}
die(error)
}
}
*/
// Not annotated
if ( !isOverride(annotation) && hasProp(adm.appliedAnnotations_, key)) {
var fieldName = adm.name_ + "." + key.toString();
var currentAnnotationType = adm.appliedAnnotations_[key].annotationType_;
var requestedAnnotationType = annotation.annotationType_;
die("Cannot apply '" + requestedAnnotationType + "' to '" + fieldName + "':" + ("\nThe field is already annotated with '" + currentAnnotationType + "'.") + "\nRe-annotating fields is not allowed." + "\nUse 'override' annotation for methods overriden by subclass.");
}
}
/**
* This array buffer contains two lists of properties, so that all arrays
* can recycle their property definitions, which significantly improves performance of creating
* properties on the fly.
*/
var OBSERVABLE_ARRAY_BUFFER_SIZE = 0; // Typescript workaround to make sure ObservableArray extends Array
var StubArray = function StubArray() {};
function inherit(ctor, proto) {
if (Object.setPrototypeOf) {
Object.setPrototypeOf(ctor.prototype, proto);
} else if (ctor.prototype.__proto__ !== undefined) {
ctor.prototype.__proto__ = proto;
} else {
ctor.prototype = proto;
}
}
inherit(StubArray, Array.prototype); // Weex proto freeze protection was here,
// but it is unclear why the hack is need as MobX never changed the prototype
// anyway, so removed it in V6
var LegacyObservableArray = /*#__PURE__*/function (_StubArray, _Symbol$toStringTag, _Symbol$iterator) {
_inheritsLoose(LegacyObservableArray, _StubArray);
function LegacyObservableArray(initialValues, enhancer, name, owned) {
var _this;
if (name === void 0) {
name = "ObservableArray@" + getNextId() ;
}
if (owned === void 0) {
owned = false;
}
_this = _StubArray.call(this) || this;
var adm = new ObservableArrayAdministration(name, enhancer, owned, true);
adm.proxy_ = _assertThisInitialized(_this);
addHiddenFinalProp(_assertThisInitialized(_this), $mobx, adm);
if (initialValues && initialValues.length) {
var prev = allowStateChangesStart(true); // @ts-ignore
_this.spliceWithArray(0, 0, initialValues);
allowStateChangesEnd(prev);
}
return _this;
}
var _proto = LegacyObservableArray.prototype;
_proto.concat = function concat() {
this[$mobx].atom_.reportObserved();
for (var _len = arguments.length, arrays = new Array(_len), _key = 0; _key < _len; _key++) {
arrays[_key] = arguments[_key];
}
return Array.prototype.concat.apply(this.slice(), //@ts-ignore
arrays.map(function (a) {
return isObservableArray(a) ? a.slice() : a;
}));
};
_proto[_Symbol$iterator] = function () {
var self = this;
var nextIndex = 0;
return makeIterable({
next: function next() {
return nextIndex < self.length ? {
value: self[nextIndex++],
done: false
} : {
done: true,
value: undefined
};
}
});
};
_createClass(LegacyObservableArray, [{
key: "length",
get: function get() {
return this[$mobx].getArrayLength_();
},
set: function set(newLength) {
this[$mobx].setArrayLength_(newLength);
}
}, {
key: _Symbol$toStringTag,
get: function get() {
return "Array";
}
}]);
return LegacyObservableArray;
}(StubArray, Symbol.toStringTag, Symbol.iterator);
Object.entries(arrayExtensions).forEach(function (_ref) {
var prop = _ref[0],
fn = _ref[1];
if (prop !== "concat") {
addHiddenProp(LegacyObservableArray.prototype, prop, fn);
}
});
function createArrayEntryDescriptor(index) {
return {
enumerable: false,
configurable: true,
get: function get() {
return this[$mobx].get_(index);
},
set: function set(value) {
this[$mobx].set_(index, value);
}
};
}
function createArrayBufferItem(index) {
defineProperty(LegacyObservableArray.prototype, "" + index, createArrayEntryDescriptor(index));
}
function reserveArrayBuffer(max) {
if (max > OBSERVABLE_ARRAY_BUFFER_SIZE) {
for (var index = OBSERVABLE_ARRAY_BUFFER_SIZE; index < max + 100; index++) {
createArrayBufferItem(index);
}
OBSERVABLE_ARRAY_BUFFER_SIZE = max;
}
}
reserveArrayBuffer(1000);
function createLegacyArray(initialValues, enhancer, name) {
return new LegacyObservableArray(initialValues, enhancer, name);
}
function getAtom(thing, property) {
if (typeof thing === "object" && thing !== null) {
if (isObservableArray(thing)) {
if (property !== undefined) {
die(23);
}
return thing[$mobx].atom_;
}
if (isObservableSet(thing)) {
return thing[$mobx];
}
if (isObservableMap(thing)) {
if (property === undefined) {
return thing.keysAtom_;
}
var observable = thing.data_.get(property) || thing.hasMap_.get(property);
if (!observable) {
die(25, property, getDebugName(thing));
}
return observable;
}
if (isObservableObject(thing)) {
if (!property) {
return die(26);
}
var _observable = thing[$mobx].values_.get(property);
if (!_observable) {
die(27, property, getDebugName(thing));
}
return _observable;
}
if (isAtom(thing) || isComputedValue(thing) || isReaction(thing)) {
return thing;
}
} else if (isFunction(thing)) {
if (isReaction(thing[$mobx])) {
// disposer function
return thing[$mobx];
}
}
die(28);
}
function getAdministration(thing, property) {
if (!thing) {
die(29);
}
if (property !== undefined) {
return getAdministration(getAtom(thing, property));
}
if (isAtom(thing) || isComputedValue(thing) || isReaction(thing)) {
return thing;
}
if (isObservableMap(thing) || isObservableSet(thing)) {
return thing;
}
if (thing[$mobx]) {
return thing[$mobx];
}
die(24, thing);
}
function getDebugName(thing, property) {
var named;
if (property !== undefined) {
named = getAtom(thing, property);
} else if (isAction(thing)) {
return thing.name;
} else if (isObservableObject(thing) || isObservableMap(thing) || isObservableSet(thing)) {
named = getAdministration(thing);
} else {
// valid for arrays as well
named = getAtom(thing);
}
return named.name_;
}
var toString = objectPrototype.toString;
function deepEqual(a, b, depth) {
if (depth === void 0) {
depth = -1;
}
return eq(a, b, depth);
} // Copied from https://github.com/jashkenas/underscore/blob/5c237a7c682fb68fd5378203f0bf22dce1624854/underscore.js#L1186-L1289
// Internal recursive comparison function for `isEqual`.
function eq(a, b, depth, aStack, bStack) {
// Identical objects are equal. `0 === -0`, but they aren't identical.
// See the [Harmony `egal` proposal](http://wiki.ecmascript.org/doku.php?id=harmony:egal).
if (a === b) {
return a !== 0 || 1 / a === 1 / b;
} // `null` or `undefined` only equal to itself (strict comparison).
if (a == null || b == null) {
return false;
} // `NaN`s are equivalent, but non-reflexive.
if (a !== a) {
return b !== b;
} // Exhaust primitive checks
var type = typeof a;
if (type !== "function" && type !== "object" && typeof b != "object") {
return false;
} // Compare `[[Class]]` names.
var className = toString.call(a);
if (className !== toString.call(b)) {
return false;
}
switch (className) {
// Strings, numbers, regular expressions, dates, and booleans are compared by value.
case "[object RegExp]": // RegExps are coerced to strings for comparison (Note: '' + /a/i === '/a/i')
case "[object String]":
// Primitives and their corresponding object wrappers are equivalent; thus, `"5"` is
// equivalent to `new String("5")`.
return "" + a === "" + b;
case "[object Number]":
// `NaN`s are equivalent, but non-reflexive.
// Object(NaN) is equivalent to NaN.
if (+a !== +a) {
return +b !== +b;
} // An `egal` comparison is performed for other numeric values.
return +a === 0 ? 1 / +a === 1 / b : +a === +b;
case "[object Date]":
case "[object Boolean]":
// Coerce dates and booleans to numeric primitive values. Dates are compared by their
// millisecond representations. Note that invalid dates with millisecond representations
// of `NaN` are not equivalent.
return +a === +b;
case "[object Symbol]":
return typeof Symbol !== "undefined" && Symbol.valueOf.call(a) === Symbol.valueOf.call(b);
case "[object Map]":
case "[object Set]":
// Maps and Sets are unwrapped to arrays of entry-pairs, adding an incidental level.
// Hide this extra level by increasing the depth.
if (depth >= 0) {
depth++;
}
break;
} // Unwrap any wrapped objects.
a = unwrap(a);
b = unwrap(b);
var areArrays = className === "[object Array]";
if (!areArrays) {
if (typeof a != "object" || typeof b != "object") {
return false;
} // Objects with different constructors are not equivalent, but `Object`s or `Array`s
// from different frames are.
var aCtor = a.constructor,
bCtor = b.constructor;
if (aCtor !== bCtor && !(isFunction(aCtor) && aCtor instanceof aCtor && isFunction(bCtor) && bCtor instanceof bCtor) && "constructor" in a && "constructor" in b) {
return false;
}
}
if (depth === 0) {
return false;
} else if (depth < 0) {
depth = -1;
} // Assume equality for cyclic structures. The algorithm for detecting cyclic
// structures is adapted from ES 5.1 section 15.12.3, abstract operation `JO`.
// Initializing stack of traversed objects.
// It's done here since we only need them for objects and arrays comparison.
aStack = aStack || [];
bStack = bStack || [];
var length = aStack.length;
while (length--) {
// Linear search. Performance is inversely proportional to the number of
// unique nested structures.
if (aStack[length] === a) {
return bStack[length] === b;
}
} // Add the first object to the stack of traversed objects.
aStack.push(a);
bStack.push(b); // Recursively compare objects and arrays.
if (areArrays) {
// Compare array lengths to determine if a deep comparison is necessary.
length = a.length;
if (length !== b.length) {
return false;
} // Deep compare the contents, ignoring non-numeric properties.
while (length--) {
if (!eq(a[length], b[length], depth - 1, aStack, bStack)) {
return false;
}
}
} else {
// Deep compare objects.
var keys = Object.keys(a);
var key;
length = keys.length; // Ensure that both objects contain the same number of properties before comparing deep equality.
if (Object.keys(b).length !== length) {
return false;
}
while (length--) {
// Deep compare each member
key = keys[length];
if (!(hasProp(b, key) && eq(a[key], b[key], depth - 1, aStack, bStack))) {
return false;
}
}
} // Remove the first object from the stack of traversed objects.
aStack.pop();
bStack.pop();
return true;
}
function unwrap(a) {
if (isObservableArray(a)) {
return a.slice();
}
if (isES6Map(a) || isObservableMap(a)) {
return Array.from(a.entries());
}
if (isES6Set(a) || isObservableSet(a)) {
return Array.from(a.entries());
}
return a;
}
function makeIterable(iterator) {
iterator[Symbol.iterator] = getSelf;
return iterator;
}
function getSelf() {
return this;
}
function isAnnotation(thing) {
return (// Can be function
thing instanceof Object && typeof thing.annotationType_ === "string" && isFunction(thing.make_) && isFunction(thing.extend_)
);
}
/**
* (c) Michel Weststrate 2015 - 2020
* MIT Licensed
*
* Welcome to the mobx sources! To get an global overview of how MobX internally works,
* this is a good place to start:
* https://medium.com/@mweststrate/becoming-fully-reactive-an-in-depth-explanation-of-mobservable-55995262a254#.xvbh6qd74
*
* Source folders:
* ===============
*
* - api/ Most of the public static methods exposed by the module can be found here.
* - core/ Implementation of the MobX algorithm; atoms, derivations, reactions, dependency trees, optimizations. Cool stuff can be found here.
* - types/ All the magic that is need to have observable objects, arrays and values is in this folder. Including the modifiers like `asFlat`.
* - utils/ Utility stuff.
*
*/
["Symbol", "Map", "Set"].forEach(function (m) {
var g = getGlobal();
if (typeof g[m] === "undefined") {
die("MobX requires global '" + m + "' to be available or polyfilled");
}
});
if (typeof __MOBX_DEVTOOLS_GLOBAL_HOOK__ === "object") {
// See: https://github.com/andykog/mobx-devtools/
__MOBX_DEVTOOLS_GLOBAL_HOOK__.injectMobx({
spy: spy,
extras: {
getDebugName: getDebugName
},
$mobx: $mobx
});
}
exports.$mobx = $mobx;
exports.FlowCancellationError = FlowCancellationError;
exports.ObservableMap = ObservableMap;
exports.ObservableSet = ObservableSet;
exports.Reaction = Reaction;
exports._allowStateChanges = allowStateChanges;
exports._allowStateChangesInsideComputed = runInAction;
exports._allowStateReadsEnd = allowStateReadsEnd;
exports._allowStateReadsStart = allowStateReadsStart;
exports._autoAction = autoAction;
exports._endAction = _endAction;
exports._getAdministration = getAdministration;
exports._getGlobalState = getGlobalState;
exports._interceptReads = interceptReads;
exports._isComputingDerivation = isComputingDerivation;
exports._resetGlobalState = resetGlobalState;
exports._startAction = _startAction;
exports.action = action;
exports.autorun = autorun;
exports.comparer = comparer;
exports.computed = computed;
exports.configure = configure;
exports.createAtom = createAtom;
exports.defineProperty = apiDefineProperty;
exports.entries = entries;
exports.extendObservable = extendObservable;
exports.flow = flow;
exports.flowResult = flowResult;
exports.get = get;
exports.getAtom = getAtom;
exports.getDebugName = getDebugName;
exports.getDependencyTree = getDependencyTree;
exports.getObserverTree = getObserverTree;
exports.has = has;
exports.intercept = intercept;
exports.isAction = isAction;
exports.isBoxedObservable = isObservableValue;
exports.isComputed = isComputed;
exports.isComputedProp = isComputedProp;
exports.isFlow = isFlow;
exports.isFlowCancellationError = isFlowCancellationError;
exports.isObservable = isObservable;
exports.isObservableArray = isObservableArray;
exports.isObservableMap = isObservableMap;
exports.isObservableObject = isObservableObject;
exports.isObservableProp = isObservableProp;
exports.isObservableSet = isObservableSet;
exports.keys = keys;
exports.makeAutoObservable = makeAutoObservable;
exports.makeObservable = makeObservable;
exports.observable = observable;
exports.observe = observe;
exports.onBecomeObserved = onBecomeObserved;
exports.onBecomeUnobserved = onBecomeUnobserved;
exports.onReactionError = onReactionError;
exports.override = override;
exports.ownKeys = apiOwnKeys;
exports.reaction = reaction;
exports.remove = remove;
exports.runInAction = runInAction;
exports.set = set;
exports.spy = spy;
exports.toJS = toJS;
exports.trace = trace;
exports.transaction = transaction;
exports.untracked = untracked;
exports.values = values;
exports.when = when;
Object.defineProperty(exports, '__esModule', { value: true });
})));
//# sourceMappingURL=mobx.umd.development.js.map
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mainConfig.ui',
# licensing of 'mainConfig.ui' applies.
#
# Created: Sat Mar 23 13:32:36 2019
# by: PyQt5-uic running on PyQt5 5.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_ct_MainConfig(object):
def setMainConfig(self, ct_MainConfig):
ct_MainConfig.setObjectName("ct_MainConfig")
ct_MainConfig.resize(1000, 600)
ct_MainConfig.setStyleSheet("border:none")
self.frameMainConfig = QtWidgets.QFrame(ct_MainConfig)
self.frameMainConfig.setGeometry(QtCore.QRect(0, 0, 1000, 600))
self.frameMainConfig.setObjectName("frameMainConfig")
self.fr_TituloConfig = QtWidgets.QFrame(self.frameMainConfig)
self.fr_TituloConfig.setGeometry(QtCore.QRect(0, 0, 1000, 60))
self.fr_TituloConfig.setStyleSheet("border: none")
self.fr_TituloConfig.setObjectName("fr_TituloConfig")
self.lb_tituloConfig = QtWidgets.QLabel(self.fr_TituloConfig)
self.lb_tituloConfig.setGeometry(QtCore.QRect(10, 15, 271, 30))
font = QtGui.QFont()
font.setFamily("DejaVu Sans")
font.setPointSize(18)
font.setWeight(75)
font.setBold(True)
self.lb_tituloConfig.setFont(font)
self.lb_tituloConfig.setStyleSheet("color: #FFF")
self.lb_tituloConfig.setObjectName("lb_tituloConfig")
self.ct_config = QtWidgets.QFrame(self.frameMainConfig)
self.ct_config.setGeometry(QtCore.QRect(0, 100, 1000, 500))
self.ct_config.setStyleSheet("background: #FFF;\n"
"border: none")
self.ct_config.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.ct_config.setFrameShadow(QtWidgets.QFrame.Raised)
self.ct_config.setObjectName("ct_config")
self.fr_menuConfig = QtWidgets.QFrame(self.frameMainConfig)
self.fr_menuConfig.setGeometry(QtCore.QRect(0, 60, 1000, 40))
self.fr_menuConfig.setStyleSheet("background:#E1DFE0;\n"
"border: none;\n"
"border-bottom: 2px solid #069;")
self.fr_menuConfig.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.fr_menuConfig.setFrameShadow(QtWidgets.QFrame.Raised)
self.fr_menuConfig.setObjectName("fr_menuConfig")
self.bt_confEmpresa = QtWidgets.QPushButton(self.fr_menuConfig)
self.bt_confEmpresa.setGeometry(QtCore.QRect(5, 2, 170, 36))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(10)
font.setWeight(75)
font.setBold(True)
self.bt_confEmpresa.setFont(font)
self.bt_confEmpresa.setCursor(QtCore.Qt.PointingHandCursor)
self.bt_confEmpresa.setFocusPolicy(QtCore.Qt.NoFocus)
self.bt_confEmpresa.setAutoFillBackground(False)
self.bt_confEmpresa.setStyleSheet("QPushButton{\n"
"background: #40A286 ;\n"
"border: none;\n"
"color: #FFF;\n"
"border-top-left-radius: 4px;\n"
"border-top-right-radius: 4px;\n"
"margin-top: 7px;\n"
"}\n"
"QPushButton:hover {\n"
"background: #7AB32E;\n"
"margin-top: 0;\n"
"}\n"
"QPushButton:disabled {\n"
"background: #7AB32E;\n"
"margin-top: 0;\n"
"}\n"
"")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(
"../../RSP/Images/icon/tag-new.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.bt_confEmpresa.setIcon(icon)
self.bt_confEmpresa.setIconSize(QtCore.QSize(25, 25))
self.bt_confEmpresa.setFlat(True)
self.bt_confEmpresa.setObjectName("bt_confEmpresa")
self.bt_confUser = QtWidgets.QPushButton(self.fr_menuConfig)
self.bt_confUser.setGeometry(QtCore.QRect(185, 2, 170, 36))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(10)
font.setWeight(75)
font.setBold(True)
self.bt_confUser.setFont(font)
self.bt_confUser.setCursor(QtCore.Qt.PointingHandCursor)
self.bt_confUser.setFocusPolicy(QtCore.Qt.NoFocus)
self.bt_confUser.setAutoFillBackground(False)
self.bt_confUser.setStyleSheet("QPushButton{\n"
"background: #40A286 ;\n"
"border: none;\n"
"color: #FFF;\n"
"border-top-left-radius: 4px;\n"
"border-top-right-radius: 4px;\n"
"margin-top: 7px;\n"
"}\n"
"QPushButton:hover {\n"
"background: #7AB32E;\n"
"margin-top: 0;\n"
"}\n"
"QPushButton:disabled {\n"
"background: #7AB32E;\n"
"margin-top: 0;\n"
"}\n"
"")
self.bt_confUser.setIcon(icon)
self.bt_confUser.setIconSize(QtCore.QSize(25, 25))
self.bt_confUser.setFlat(True)
self.bt_confUser.setObjectName("bt_confUser")
self.bt_confDB = QtWidgets.QPushButton(self.fr_menuConfig)
self.bt_confDB.setEnabled(True)
self.bt_confDB.setGeometry(QtCore.QRect(365, 2, 170, 36))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(10)
font.setWeight(75)
font.setBold(True)
self.bt_confDB.setFont(font)
self.bt_confDB.setCursor(QtCore.Qt.PointingHandCursor)
self.bt_confDB.setFocusPolicy(QtCore.Qt.NoFocus)
self.bt_confDB.setAutoFillBackground(False)
self.bt_confDB.setStyleSheet("QPushButton{\n"
"background: #40A286 ;\n"
"border: none;\n"
"color: #FFF;\n"
"border-top-left-radius: 4px;\n"
"border-top-right-radius: 4px;\n"
"margin-top: 7px;\n"
"}\n"
"QPushButton:hover {\n"
"background: #7AB32E;\n"
"margin-top: 0;\n"
"}\n"
"QPushButton:disabled {\n"
"background: #7AB32E;\n"
"margin-top: 0;\n"
"}\n"
"")
self.bt_confDB.setIcon(icon)
self.bt_confDB.setIconSize(QtCore.QSize(25, 25))
self.bt_confDB.setFlat(True)
self.bt_confDB.setObjectName("bt_confDB")
self.tradMainConfig(ct_MainConfig)
QtCore.QMetaObject.connectSlotsByName(ct_MainConfig)
def tradMainConfig(self, ct_MainConfig):
ct_MainConfig.setWindowTitle(QtWidgets.QApplication.translate(
"ct_MainConfig", "Frame", None, -1))
self.lb_tituloConfig.setText(QtWidgets.QApplication.translate(
"ct_MainConfig", "CONFIGURAÇÃO", None, -1))
self.bt_confEmpresa.setText(QtWidgets.QApplication.translate(
"ct_MainConfig", "EMPRESA", None, -1))
self.bt_confEmpresa.setShortcut(
QtWidgets.QApplication.translate("ct_MainConfig", "F7", None, -1))
self.bt_confUser.setText(QtWidgets.QApplication.translate(
"ct_MainConfig", "USUÁRIOS", None, -1))
self.bt_confUser.setShortcut(
QtWidgets.QApplication.translate("ct_MainConfig", "F7", None, -1))
self.bt_confDB.setText(QtWidgets.QApplication.translate(
"ct_MainConfig", "BANCO DE DADOS", None, -1))
self.bt_confDB.setShortcut(
QtWidgets.QApplication.translate("ct_MainConfig", "F7", None, -1))
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
tslib_1.__exportStar(require("@styled-icons/material/NoMeetingRoom"), exports);
|
'use strict';
const fs = require('fs');
module.exports = app => {
async function close() {
try {
if (app.grpcServer) await app.grpcServer.close();
} catch (err) {
app.logger.error('[Close App Error]', err);
}
}
app.beforeStart(() => {
fs.writeFileSync('pid', process.pid);
});
app.ready(() => {
if (app.config.startAfterInit) return;
const pid = fs.readFileSync('pid', { encoding: 'utf8' });
if (Number(pid) === process.pid) {
const grpcServer = new app.GrpcServer(app);
grpcServer.start();
Reflect.defineProperty(app, 'grpcServer', { value: grpcServer });
}
});
app.beforeClose(async () => {
await close();
});
process.on('beforExit', async () => {
await close();
});
};
|
export default function getDataStructures(result) {
while (
!result.meta ||
!result.meta.classes ||
!result.meta.classes.includes('dataStructures')
) {
result = result.content[0];
}
return result.content.map(entry => entry.content[0]);
}
|
/*
* ***** BEGIN LICENSE BLOCK *****
* Zimbra Collaboration Suite Web Client
* Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Zimbra, Inc.
*
* The contents of this file are subject to the Zimbra Public License
* Version 1.3 ("License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.zimbra.com/license.
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied.
* ***** END LICENSE BLOCK *****
*/
/**
* _MLIFETIME XModelItem
**/
MLifetime_XModelItem = function () {}
XModelItemFactory.createItemType("_MLIFETIME_", "mlifetime", MLifetime_XModelItem);
MLifetime_XModelItem.prototype.validateType = function (value) {
var val = "";
if(value == ZaMsg.Unlimited) {
val = "0";
} else if(value != null && value.length >0) {
if(value.length > 1) {
val = value.substr(0, value.length-1);
} else {
val = "0";
}
}
if(val)
val = XModelItem.prototype.validateNumber.call(this, val);
return value;
}
/**
* XFormItem class: "lifetime (composite item)
* this item is used in the Admin UI to display fields such as session token lifetime
* instance values are strings that contain numbers and characters (/^([0-9])+([dhms])?$/;)
* values d, h, m, and s mean 1 day, 1 hour, 1 minute and 1 second
* 1d means 1 day, 4d means 4 days, 4h means 4 hours, etc.
*
* @class Lifetime_XFormItem
* @constructor Lifetime_XFormItem
* @author Greg Solovyev
**/
Lifetime_XFormItem = function() {}
XFormItemFactory.createItemType("_LIFETIME_", "lifetime", Lifetime_XFormItem, Composite_XFormItem);
Lifetime_XFormItem.prototype.visibilityChecks = [ZaItem.hasReadPermission];
Lifetime_XFormItem.prototype.enableDisableChecks = [ZaItem.hasWritePermission];
Lifetime_XFormItem.prototype.nowrap = false;
Lifetime_XFormItem.prototype.labelWrap = true;
Lifetime_XFormItem.prototype.initializeItems = function(){
this.items = [
{type:_TEXTFIELD_, ref:".", labelLocation:_NONE_, cssClass:"admin_xform_number_input",
visibilityChecks:[],
enableDisableChecks:[],
getDisplayValue:function (itemVal) {
var val = "";
if(itemVal != null && itemVal.length >0) {
if(itemVal.length > 1) {
val = parseInt(itemVal);
} else {
if(itemVal == "0") {
val = "0";
} else {
val = "";
}
}
}
this.getParentItem()._numericPart = val;
return ((!val || val=="0") ? ZaMsg.Unlimited : val);
},
elementChanged:function(numericPart, instanceValue, event) {
if (numericPart == ZaMsg.Unlimited) {
numericPart = 0;
}
var val = numericPart + this.getParentItem()._stringPart;
this.getParentItem()._numericPart = numericPart;
this.getForm().itemChanged(this.getParentItem(), val, event);
}
},
{type:_OSELECT1_, ref:".", labelLocation:_NONE_, choices: ZaModel.getTimeChoices(),
visibilityChecks:[],
enableDisableChecks:[],
getDisplayValue:function (itemVal){
var val = "d";
if(itemVal != null && itemVal.length >0) {
if(itemVal.length > 1) {
val = itemVal.substr(itemVal.length-1, 1);
} else if (itemVal != "0") {
val = (itemVal == "d" || itemVal == "h" || itemVal== "m" || itemVal == "s") ? itemVal : "d";
}
}
this.getParentItem()._stringPart = val;
return val;
},
elementChanged:function(stringPart,instanceValue, event) {
var val = this.getParentItem()._numericPart + stringPart;
this.getParentItem()._stringPart = stringPart;
this.getForm().itemChanged(this.getParentItem(), val, event);
}
}
];
Composite_XFormItem.prototype.initializeItems.call(this);
}
Lifetime_XFormItem.prototype.items = [];
Lifetime_XFormItem.prototype.getDisplayElement = function () {
return this.getElement(this.getId() + "_display");
}
Lifetime1_XFormItem = function() {}
XFormItemFactory.createItemType("_LIFETIME1_", "lifetime1", Lifetime1_XFormItem, Composite_XFormItem);
Lifetime1_XFormItem.prototype.nowrap = false;
Lifetime1_XFormItem.prototype.labelWrap = true;
Lifetime1_XFormItem.prototype.visibilityChecks = [ZaItem.hasReadPermission];
Lifetime1_XFormItem.prototype.enableDisableChecks = [ZaItem.hasWritePermission];
Lifetime1_XFormItem.prototype.initializeItems = function() {
this.items = [
{type:_TEXTFIELD_, ref:".", labelLocation:_NONE_,cssClass:"admin_xform_number_input",
visibilityChecks:[],
enableDisableChecks:[],
getDisplayValue:function (itemVal) {
var val = "";
if(itemVal != null && itemVal.length >0) {
if(itemVal.length > 1) {
val = itemVal.substr(0, itemVal.length-1);
} else {
if(itemVal == "0") {
val = "0";
} else {
val = "";
}
}
}
this.getParentItem()._numericPart = val;
return val;
},
elementChanged:function(numericPart, instanceValue, event) {
var val = numericPart + this.getParentItem()._stringPart;
this.getForm().itemChanged(this.getParentItem(), val, event);
}
},
{type:_OSELECT1_, ref:".", labelLocation:_NONE_, choices:ZaModel.getTimeChoices1(),
visibilityChecks:[],
enableDisableChecks:[],
getDisplayValue:function (itemVal){
var val = "d";
if(itemVal != null && itemVal.length >0) {
if(itemVal.length > 1) {
val = itemVal.substr(itemVal.length-1, 1);
} else if (itemVal != "0") {
val = (itemVal == "d" || itemVal == "h" || itemVal== "m" || itemVal == "s") ? itemVal : "d";
}
}
this.getParentItem()._stringPart = val;
return val;
},
elementChanged:function(stringPart,instanceValue, event) {
var val = this.getParentItem()._numericPart + stringPart;
this.getForm().itemChanged(this.getParentItem(), val, event);
}
}
];
Composite_XFormItem.prototype.initializeItems.call(this);
}
Lifetime1_XFormItem.prototype.items = [];
Lifetime2_XFormItem = function() {}
Lifetime2_XFormItem.prototype.nowrap = false;
Lifetime2_XFormItem.prototype.labelWrap = true;
XFormItemFactory.createItemType("_LIFETIME2_", "lifetime2", Lifetime2_XFormItem, Lifetime1_XFormItem);
Lifetime2_XFormItem.prototype.initializeItems = function () {
this.items = [
{type:_TEXTFIELD_, ref:".", labelLocation:_NONE_,cssClass:"admin_xform_number_input",
visibilityChecks:[],
enableDisableChecks:[],
getDisplayValue:function (itemVal) {
var val = "";
if(itemVal != null && itemVal.length >0) {
if(itemVal.length > 1) {
val = itemVal.substr(0, itemVal.length-1);
} else {
if(itemVal == "0") {
val = "0";
} else {
val = "";
}
}
}
this.getParentItem()._numericPart = val;
this.getParentItem()._stringPart="d";
return val;
},
elementChanged:function(numericPart, instanceValue, event) {
var val = numericPart + "d";
this.getForm().itemChanged(this.getParentItem(), val, event);
}
},
{type:_OUTPUT_, ref:null, labelLocation:_NONE_, value:"d",getDisplayValue:function (itemVal){ return AjxMsg.days;}}
];
Composite_XFormItem.prototype.initializeItems.call(this);
};
Lifetime2_XFormItem.prototype.items = [];
/**
* _LIFETIME_MINUTES_ XForm item type allows time interval to be expressed only in minutes
**/
LifetimeMinutes_XFormItem = function() {}
LifetimeMinutes_XFormItem.prototype.nowrap = false;
LifetimeMinutes_XFormItem.prototype.labelWrap = true;
XFormItemFactory.createItemType("_LIFETIME_MINUTES_", "lifetime_minutes", LifetimeMinutes_XFormItem, Lifetime1_XFormItem);
LifetimeMinutes_XFormItem.prototype.initializeItems = function () {
this.items = [
{type:_TEXTFIELD_, ref:".", labelLocation:_NONE_,cssClass:"admin_xform_number_input",
visibilityChecks:[],
enableDisableChecks:[],
getDisplayValue:function (itemVal) {
var val = "";
if(itemVal != null && itemVal.length >0) {
if(itemVal.length > 1) {
val = itemVal.substr(0, itemVal.length-1);
} else {
if(itemVal == "0") {
val = "0";
} else {
val = "";
}
}
}
this.getParentItem()._numericPart = val;
this.getParentItem()._stringPart="m";
return val;
},
elementChanged:function(numericPart, instanceValue, event) {
var val = numericPart + "m";
this.getForm().itemChanged(this.getParentItem(), val, event);
}
},
{type:_OUTPUT_, ref:null, labelLocation:_NONE_, value:"m",getDisplayValue:function (itemVal){ return AjxMsg.minutes;}}
];
Composite_XFormItem.prototype.initializeItems.call(this);
};
LifetimeMinutes_XFormItem.prototype.items = [];
Long_Lifetime_XFormItem = function() {}
XFormItemFactory.createItemType("_LONG_LIFETIME_", "long_lifetime", Long_Lifetime_XFormItem, Composite_XFormItem);
Long_Lifetime_XFormItem.prototype.visibilityChecks = [ZaItem.hasReadPermission];
Long_Lifetime_XFormItem.prototype.enableDisableChecks = [ZaItem.hasWritePermission];
Long_Lifetime_XFormItem.prototype.nowrap = false;
Long_Lifetime_XFormItem.prototype.labelWrap = true;
Long_Lifetime_XFormItem.prototype.initializeItems = function(){
this.items = [
{type:_TEXTFIELD_, ref:".", labelLocation:_NONE_, cssClass:"admin_xform_number_input",
visibilityChecks:[],
enableDisableChecks:[],
getDisplayValue:function (itemVal) {
var val = "";
if(itemVal != null && itemVal.length >0) {
if(itemVal.length > 1) {
val = parseInt(itemVal);
} else {
if(itemVal == "0") {
val = "0";
} else {
val = "";
}
}
}
this.getParentItem()._numericPart = val;
return ((!val || val=="0") ? "0" : val);
},
elementChanged:function(numericPart, instanceValue, event) {
if (numericPart == ZaMsg.Unlimited) {
numericPart = 0;
}
var val = numericPart + this.getParentItem()._stringPart;
this.getParentItem()._numericPart = numericPart;
this.getForm().itemChanged(this.getParentItem(), val, event);
}
},
{type:_OSELECT1_, ref:".", labelLocation:_NONE_, choices: ZaModel.getLongTimeChoices,
visibilityChecks:[],
enableDisableChecks:[],
getDisplayValue:function (itemVal){
var val = "d";
if(itemVal != null && itemVal.length >0) {
if(itemVal.length > 1) {
val = itemVal.substr(itemVal.length-1, 1);
} else if (itemVal != "0") {
val = (itemVal == "h" || itemVal == "d" || itemVal== "m" || itemVal == "y") ? itemVal : "d";
}
}
this.getParentItem()._stringPart = val;
return val;
},
elementChanged:function(stringPart,instanceValue, event) {
var val = this.getParentItem()._numericPart + stringPart;
this.getParentItem()._stringPart = stringPart;
this.getForm().itemChanged(this.getParentItem(), val, event);
}
}
];
Composite_XFormItem.prototype.initializeItems.call(this);
}
Long_Lifetime_XFormItem.prototype.items = [];
Long_Lifetime_XFormItem.prototype.getDisplayElement = function () {
return this.getElement(this.getId() + "_display");
} |
/* eslint-env mocha */
'use strict'
const assert = require('assert')
const CRC = require('crc-32')
const { OrderBook } = require('../../../lib/models')
describe('OrderBook model', () => {
it('constructor: integrates snapshot', () => {
const entries = [
[100, 2, 10],
[200, 2, -10]
]
const ob = new OrderBook(entries)
assert.deepEqual(ob.bids, [entries[0]])
assert.deepEqual(ob.asks, [entries[1]])
})
it('topBid/topAsk: returns the top bid/ask, or null', () => {
const ob = new OrderBook([
[140, 1, 10],
[145, 1, 10],
[148, 1, 10],
[149, 1, 10],
[151, 1, -10],
[152, 1, -10],
[158, 1, -10],
[160, 1, -10]
])
assert.equal(ob.topBid(), 149)
assert.equal(ob.topAsk(), 151)
})
it('topBidLevel/topAskLevel: returns the top bid/ask levels, or null', () => {
const ob = new OrderBook([
[140, 1, 10],
[145, 1, 10],
[148, 1, 10],
[149, 1, 10],
[151, 1, -10],
[152, 1, -10],
[158, 1, -10],
[160, 1, -10]
])
assert.deepEqual(ob.topBidLevel(), [149, 1, 10])
assert.deepEqual(ob.topAskLevel(), [151, 1, -10])
})
it('checksum: returns expected value for normal OB', () => {
const ob = new OrderBook({
bids: [[6000, 1, 1], [5900, 1, 2]],
asks: [[6100, 1, -3], [6200, 1, -4]]
})
assert.equal(ob.checksum(), CRC.str('6000:1:6100:-3:5900:2:6200:-4'))
})
it('checksum: returns expected value for raw OB', () => {
const ob = new OrderBook({
bids: [[100, 6000, 1], [101, 6000, 2]], // first field is order ID here
asks: [[102, 6100, -3], [103, 6100, -4]]
}, true)
assert.equal(ob.checksum(), CRC.str('100:1:102:-3:101:2:103:-4'))
})
it('checksumArr: returns expected value for normal OB', () => {
const ob = [
[6000, 1, 1],
[5900, 1, 2],
[6100, 1, -3],
[6200, 1, -4]
]
assert.equal(
OrderBook.checksumArr(ob),
CRC.str('6000:1:6100:-3:5900:2:6200:-4')
)
})
it('checksumArr: returns expected value for raw OB', () => {
const ob = [
[100, 6000, 1],
[101, 6000, 2],
[102, 6100, -3],
[103, 6100, -4]
]
assert.equal(
OrderBook.checksumArr(ob, true),
CRC.str('100:1:102:-3:101:2:103:-4')
)
})
it('updateWith: correctly applies update', () => {
const entries = [
[100, 2, 10],
[200, 2, -10]
]
const ob = new OrderBook(entries)
assert(ob.updateWith([100, 3, 15])) // update bid
assert(ob.updateWith([200, 3, -15])) // update ask
assert.deepEqual(ob.bids, [[100, 3, 15]])
assert.deepEqual(ob.asks, [[200, 3, -15]])
assert(ob.updateWith([100, 0, 15])) // remove bid
assert(ob.updateWith([200, 0, -15])) // remove ask
assert.equal(ob.bids.length, 0)
assert.equal(ob.asks.length, 0)
assert(ob.updateWith([150, 1, 2])) // add bid
assert(ob.updateWith([100, 1, 1])) // add bid
assert(ob.updateWith([160, 1, 3])) // add bid
assert(ob.updateWith([161, 1, -3])) // add ask
assert(ob.updateWith([200, 1, -1])) // add ask
assert(ob.updateWith([175, 1, -2])) // add ask
assert.equal(ob.bids.length, 3)
assert.equal(ob.asks.length, 3)
assert.deepEqual(ob.bids, [
[160, 1, 3],
[150, 1, 2],
[100, 1, 1]
])
assert.deepEqual(ob.asks, [
[161, 1, -3],
[175, 1, -2],
[200, 1, -1]
])
assert(ob.updateWith([160, 2, 4])) // update top bid
assert.deepEqual(ob.bids, [
[160, 2, 4],
[150, 1, 2],
[100, 1, 1]
])
assert(ob.updateWith([150, 0, 2])) // remove middle bid
assert.deepEqual(ob.bids, [
[160, 2, 4],
[100, 1, 1]
])
assert(ob.updateWith([159, 1, 42])) // insert middle bid
assert.deepEqual(ob.bids, [
[160, 2, 4],
[159, 1, 42],
[100, 1, 1]
])
assert(ob.updateWith([159.9, 2, 7])) // insert another bid
assert.deepEqual(ob.bids, [
[160, 2, 4],
[159.9, 2, 7],
[159, 1, 42],
[100, 1, 1]
])
assert.deepEqual(ob.asks, [ // verify asks
[161, 1, -3],
[175, 1, -2],
[200, 1, -1]
])
assert(ob.updateWith([161, 2, -4])) // update top ask
assert.deepEqual(ob.asks, [
[161, 2, -4],
[175, 1, -2],
[200, 1, -1]
])
assert(ob.updateWith([175, 0, -2])) // remove middle ask
assert.deepEqual(ob.asks, [
[161, 2, -4],
[200, 1, -1]
])
assert(ob.updateWith([175, 1, -42])) // insert middle ask
assert.deepEqual(ob.asks, [
[161, 2, -4],
[175, 1, -42],
[200, 1, -1]
])
assert(ob.updateWith([170, 2, -7])) // insert another ask
assert.deepEqual(ob.asks, [
[161, 2, -4],
[170, 2, -7],
[175, 1, -42],
[200, 1, -1]
])
assert.deepEqual(ob.bids, [ // verify bids
[160, 2, 4],
[159.9, 2, 7],
[159, 1, 42],
[100, 1, 1]
])
})
it('updateWith: correctly applies update (raw books)', () => {
let _id = Date.now()
const id = () => _id++
const idBidA = id()
const idBidB = id()
const idBidC = id()
const idBidD = id()
const idBidE = id()
const idBidF = id()
const idAskA = id()
const idAskB = id()
const idAskC = id()
const idAskD = id()
const idAskE = id()
const idAskF = id()
const entries = [
[idBidA, 100, 10],
[idAskA, 200, -10]
]
const ob = new OrderBook(entries, true)
assert(ob.updateWith([idBidA, 100, 15])) // update bid
assert(ob.updateWith([idAskA, 200, -15])) // update ask
assert.deepEqual(ob.bids, [[idBidA, 100, 15]])
assert.deepEqual(ob.asks, [[idAskA, 200, -15]])
assert(ob.updateWith([idBidA, 0, 15])) // remove bid
assert(ob.updateWith([idAskA, 0, -15])) // remove ask
assert.equal(ob.bids.length, 0)
assert.equal(ob.asks.length, 0)
assert(ob.updateWith([idBidC, 150, 2])) // add bid
assert(ob.updateWith([idBidB, 100, 1])) // add bid
assert(ob.updateWith([idBidD, 160, 3])) // add bid
assert(ob.updateWith([idAskD, 161, -3])) // add ask
assert(ob.updateWith([idAskB, 200, -1])) // add ask
assert(ob.updateWith([idAskC, 175, -2])) // add ask
assert.equal(ob.bids.length, 3)
assert.equal(ob.asks.length, 3)
assert.deepEqual(ob.bids, [
[idBidD, 160, 3],
[idBidC, 150, 2],
[idBidB, 100, 1]
])
assert.deepEqual(ob.asks, [
[idAskD, 161, -3],
[idAskC, 175, -2],
[idAskB, 200, -1]
])
assert(ob.updateWith([idBidD, 160, 4])) // update top bid
assert.deepEqual(ob.bids, [
[idBidD, 160, 4],
[idBidC, 150, 2],
[idBidB, 100, 1]
])
assert(ob.updateWith([idBidC, 0, 2])) // remove middle bid
assert.deepEqual(ob.bids, [
[idBidD, 160, 4],
[idBidB, 100, 1]
])
assert(ob.updateWith([idBidE, 159, 42])) // insert middle bid
assert.deepEqual(ob.bids, [
[idBidD, 160, 4],
[idBidE, 159, 42],
[idBidB, 100, 1]
])
assert(ob.updateWith([idBidF, 159.9, 7])) // insert another bid
assert.deepEqual(ob.bids, [
[idBidD, 160, 4],
[idBidF, 159.9, 7],
[idBidE, 159, 42],
[idBidB, 100, 1]
])
assert.deepEqual(ob.asks, [ // verify asks
[idAskD, 161, -3],
[idAskC, 175, -2],
[idAskB, 200, -1]
])
assert(ob.updateWith([idAskD, 161, -4])) // update top ask
assert.deepEqual(ob.asks, [
[idAskD, 161, -4],
[idAskC, 175, -2],
[idAskB, 200, -1]
])
assert(ob.updateWith([idAskC, 0, -2])) // remove middle ask
assert.deepEqual(ob.asks, [
[idAskD, 161, -4],
[idAskB, 200, -1]
])
assert(ob.updateWith([idAskE, 165, -42])) // insert middle ask
assert.deepEqual(ob.asks, [
[idAskD, 161, -4],
[idAskE, 165, -42],
[idAskB, 200, -1]
])
assert(ob.updateWith([idAskF, 162, -7])) // insert another ask
assert.deepEqual(ob.asks, [
[idAskD, 161, -4],
[idAskF, 162, -7],
[idAskE, 165, -42],
[idAskB, 200, -1]
])
assert.deepEqual(ob.bids, [ // verify bids
[idBidD, 160, 4],
[idBidF, 159.9, 7],
[idBidE, 159, 42],
[idBidB, 100, 1]
])
})
it('updateWith: maintains sort', () => {
const ob = new OrderBook([
[100, 100, 10],
[200, 200, -10]
])
assert(ob.updateWith([20, 5, 10]))
assert(ob.updateWith([150, 5, 10]))
assert(ob.updateWith([80, 5, 10]))
assert(ob.updateWith([300, 5, -10]))
assert(ob.updateWith([40, 5, 10]))
assert(ob.updateWith([130, 5, 10]))
assert(ob.updateWith([342, 5, -10]))
assert(ob.updateWith([457, 5, -10]))
for (let i = 0; i < ob.bids.length - 2; i++) {
assert(ob.bids[i][0] > ob.bids[i + 1][0])
}
for (let i = 0; i < ob.asks.length - 2; i++) {
assert(ob.asks[i][0] < ob.asks[i + 1][0])
}
})
it('updateWith: emits an update event', (done) => {
const ob = new OrderBook([
[100, 2, 10],
[200, 2, -10]
])
ob.on('update', () => {
done()
})
assert(ob.updateWith([20, 5, 10]))
})
it('midPrice: calculates mid price', () => {
const entries = [
[100, 2, 10],
[200, 2, -10]
]
const ob = new OrderBook(entries)
assert.equal(ob.midPrice(), 150)
})
it('getEntry: returns null for unknown entries', () => {
const entries = [
[100, 2, 10],
[200, 2, -10]
]
const ob = new OrderBook(entries)
const entry = ob.getEntry(300)
assert.equal(entry, null)
})
it('getEntry: returns entry even with only one OB side', () => {
const entriesA = [[100, 2, 10]]
const entriesB = [[200, 2, -10]]
const obA = new OrderBook(entriesA)
const obB = new OrderBook(entriesB)
assert.deepEqual(obA.getEntry(100), { price: 100, count: 2, amount: 10 })
assert.deepEqual(obB.getEntry(200), { price: 200, count: 2, amount: -10 })
})
it('getEntry: unserializes entry before returning', () => {
const entries = [
[100, 2, 10],
[200, 2, -10]
]
const ob = new OrderBook(entries)
const entry = ob.getEntry(100)
assert.equal(entry.price, 100)
assert.equal(entry.count, 2)
assert.equal(entry.amount, 10)
})
it('updateArrayOBWith: returns false for unknown entry', () => {
const ob = [
[100, 2, 10],
[200, 2, -10]
]
assert(!OrderBook.updateArrayOBWith(ob, [300, 0, -1]))
assert(!OrderBook.updateArrayOBWith(ob, [300, 0, 1]))
})
it('updateArrayOBWith: correctly applies update', () => {
const ob = [
[100, 2, 10],
[200, 2, -10]
]
assert(OrderBook.updateArrayOBWith(ob, [100, 0, 1])) // general manipulation
assert(OrderBook.updateArrayOBWith(ob, [150, 1, 16]))
assert(OrderBook.updateArrayOBWith(ob, [200, 7, -42]))
assert(OrderBook.updateArrayOBWith(ob, [121, 3, 14]))
assert(OrderBook.updateArrayOBWith(ob, [300, 1, -4]))
assert.deepEqual(ob, [
[300, 1, -4],
[200, 7, -42],
[150, 1, 16],
[121, 3, 14]
])
assert(OrderBook.updateArrayOBWith(ob, [130, 1, 10])) // add middle bid
assert.deepEqual(ob, [
[300, 1, -4],
[200, 7, -42],
[150, 1, 16],
[130, 1, 10],
[121, 3, 14]
])
assert(OrderBook.updateArrayOBWith(ob, [140, 1, 20])) // add another bid
assert.deepEqual(ob, [
[300, 1, -4],
[200, 7, -42],
[150, 1, 16],
[140, 1, 20],
[130, 1, 10],
[121, 3, 14]
])
assert(OrderBook.updateArrayOBWith(ob, [140, 1, 42])) // update the new bid
assert.deepEqual(ob, [
[300, 1, -4],
[200, 7, -42],
[150, 1, 16],
[140, 1, 42],
[130, 1, 10],
[121, 3, 14]
])
assert(OrderBook.updateArrayOBWith(ob, [130, 0, 42])) // remove a bid
assert.deepEqual(ob, [
[300, 1, -4],
[200, 7, -42],
[150, 1, 16],
[140, 1, 42],
[121, 3, 14]
])
assert(OrderBook.updateArrayOBWith(ob, [250, 1, -10])) // add middle ask
assert.deepEqual(ob, [
[300, 1, -4],
[250, 1, -10],
[200, 7, -42],
[150, 1, 16],
[140, 1, 42],
[121, 3, 14]
])
assert(OrderBook.updateArrayOBWith(ob, [220, 1, -20])) // add another ask
assert.deepEqual(ob, [
[300, 1, -4],
[250, 1, -10],
[220, 1, -20],
[200, 7, -42],
[150, 1, 16],
[140, 1, 42],
[121, 3, 14]
])
assert(OrderBook.updateArrayOBWith(ob, [220, 1, -42])) // update the new ask
assert.deepEqual(ob, [
[300, 1, -4],
[250, 1, -10],
[220, 1, -42],
[200, 7, -42],
[150, 1, 16],
[140, 1, 42],
[121, 3, 14]
])
assert(OrderBook.updateArrayOBWith(ob, [300, 0, -4])) // remove an ask
assert.deepEqual(ob, [
[250, 1, -10],
[220, 1, -42],
[200, 7, -42],
[150, 1, 16],
[140, 1, 42],
[121, 3, 14]
])
})
it('updateArrayOBWith: correctly applies update (raw books)', () => {
let _id = Date.now()
const id = () => _id++
const idBidA = id()
const idBidB = id()
const idBidC = id()
const idAskA = id()
const idAskB = id()
const ob = [
[idBidA, 100, 10],
[idAskA, 200, -10]
]
assert(OrderBook.updateArrayOBWith(ob, [idBidA, 0, 10], true)) // general manipulation
assert(OrderBook.updateArrayOBWith(ob, [idBidB, 150, 16], true))
assert(OrderBook.updateArrayOBWith(ob, [idAskA, 200, -42], true))
assert(OrderBook.updateArrayOBWith(ob, [idBidC, 121, 14], true))
assert(OrderBook.updateArrayOBWith(ob, [idAskB, 300, -4], true))
assert.deepEqual(ob, [
[idAskB, 300, -4],
[idAskA, 200, -42],
[idBidB, 150, 16],
[idBidC, 121, 14]
])
const idBidD = id()
assert(OrderBook.updateArrayOBWith(ob, [idBidD, 130, 10], true)) // add middle bid
assert.deepEqual(ob, [
[idAskB, 300, -4],
[idAskA, 200, -42],
[idBidB, 150, 16],
[idBidD, 130, 10],
[idBidC, 121, 14]
])
const idBidE = id()
assert(OrderBook.updateArrayOBWith(ob, [idBidE, 140, 20], true)) // add another bid
assert.deepEqual(ob, [
[idAskB, 300, -4],
[idAskA, 200, -42],
[idBidB, 150, 16],
[idBidE, 140, 20],
[idBidD, 130, 10],
[idBidC, 121, 14]
])
assert(OrderBook.updateArrayOBWith(ob, [idBidE, 140, 42], true)) // update the new bid
assert.deepEqual(ob, [
[idAskB, 300, -4],
[idAskA, 200, -42],
[idBidB, 150, 16],
[idBidE, 140, 42],
[idBidD, 130, 10],
[idBidC, 121, 14]
])
assert(OrderBook.updateArrayOBWith(ob, [idBidD, 0, 42], true)) // remove a bid
assert.deepEqual(ob, [
[idAskB, 300, -4],
[idAskA, 200, -42],
[idBidB, 150, 16],
[idBidE, 140, 42],
[idBidC, 121, 14]
])
const idAskC = id()
assert(OrderBook.updateArrayOBWith(ob, [idAskC, 250, -10], true)) // add middle ask
assert.deepEqual(ob, [
[idAskB, 300, -4],
[idAskC, 250, -10],
[idAskA, 200, -42],
[idBidB, 150, 16],
[idBidE, 140, 42],
[idBidC, 121, 14]
])
const idAskD = id()
assert(OrderBook.updateArrayOBWith(ob, [idAskD, 220, -20], true)) // add another ask
assert.deepEqual(ob, [
[idAskB, 300, -4],
[idAskC, 250, -10],
[idAskD, 220, -20],
[idAskA, 200, -42],
[idBidB, 150, 16],
[idBidE, 140, 42],
[idBidC, 121, 14]
])
assert(OrderBook.updateArrayOBWith(ob, [idAskD, 220, -42], true)) // update the new ask
assert.deepEqual(ob, [
[idAskB, 300, -4],
[idAskC, 250, -10],
[idAskD, 220, -42],
[idAskA, 200, -42],
[idBidB, 150, 16],
[idBidE, 140, 42],
[idBidC, 121, 14]
])
assert(OrderBook.updateArrayOBWith(ob, [idAskB, 0, -4], true)) // remove an ask
assert.deepEqual(ob, [
[idAskC, 250, -10],
[idAskD, 220, -42],
[idAskA, 200, -42],
[idBidB, 150, 16],
[idBidE, 140, 42],
[idBidC, 121, 14]
])
})
it('unserialize: returns bid/asks map for snapshots', () => {
const obData = [
[100, 2, 10],
[200, 2, -10]
]
const ob = OrderBook.unserialize(obData)
assert.equal(typeof ob, 'object')
assert.equal(Object.keys(ob).length, 2)
assert.deepEqual(ob.bids, [{ price: 100, count: 2, amount: 10 }])
assert.deepEqual(ob.asks, [{ price: 200, count: 2, amount: -10 }])
})
it('unserialize: returns map for entries', () => {
const entry = OrderBook.unserialize([150, 0, -1])
assert.deepEqual(entry, {
price: 150,
count: 0,
amount: -1
})
})
it('unserialize: supports raw books', () => {
const entry = OrderBook.unserialize([[1337, 150, -1], [1338, 151, 1]], true)
const exp = {
asks: [{
orderID: 1337,
price: 150,
amount: -1
}],
bids: [{
orderID: 1338,
price: 151,
amount: 1
}]
}
assert.deepEqual(entry, exp)
})
})
|
##########
# Part of the code modified from:
# https://github.com/asappresearch/emergent-comms-negotiation
##########
import torch
from torch import nn, autograd
from torch.autograd import Variable
import torch.nn.functional as F
import numpy as np
eps = 1e-6
class NumberSequenceEncoder(nn.Module):
def __init__(self, num_values, embedding_size=100):
super().__init__()
self.embedding_size = embedding_size
self.num_values = num_values
self.embedding = nn.Embedding(num_values, embedding_size) # learnable lookup table for lookup of each of num_values elements
self.lstm = nn.LSTMCell(input_size=embedding_size, hidden_size=embedding_size)
def forward(self, x):
batch_size = x.size()[0]
seq_len = x.size()[1]
x = x.transpose(0, 1) # now 0 is sequence dim, 1 is batch dim - they've been transposed
x = self.embedding(x)
type_constr = torch.cuda if x.is_cuda else torch
state = (
Variable(type_constr.FloatTensor(batch_size, self.embedding_size).fill_(0)),
Variable(type_constr.FloatTensor(batch_size, self.embedding_size).fill_(0))
) # hidden state, cell state - initialized with zeros to form a clean-slate LSTM
for s in range(seq_len):
state = self.lstm(x[s], state) # takes the whole batch but only at embedded timestep s
return state[0] # hidden state of size (batch_size, embedding_size)
class TermPolicy(nn.Module):
def __init__(self, embedding_size=100):
super().__init__()
self.h1 = nn.Linear(embedding_size, 1)
def forward(self, thoughtvector, testing):
logits = self.h1(thoughtvector)
response_probs = torch.clamp(F.sigmoid(logits), eps, 1-eps) # acceptance probabilities
stochastic_draws = 0
res_greedy = (response_probs.data >= 0.5).view(-1, 1).float()
log_g = None
if not testing:
a = torch.bernoulli(response_probs) # sample a decision for the batch elems
g = a.detach() * response_probs + (1 - a.detach()) * (1 - response_probs)
log_g = g.log()
a = a.data
else:
a = res_greedy
stochastic_draws += thoughtvector.size()[0]
matches_greedy = (res_greedy == a)
matches_greedy_count = matches_greedy.int().sum()
# response_probs = response_probs + eps
entropy = - (response_probs * response_probs.log() + (1-response_probs)*(1-response_probs).log()).sum()
# for the batch:
# probs of acceptance, probs of sampled decisions, sampled decisions, bernoulli entropies, how many decisions are argmaxes in batch
return response_probs, log_g, a.byte(), entropy, matches_greedy_count, stochastic_draws
class ProposalPolicy(nn.Module):
def __init__(self, embedding_size=100, num_counts=6, num_items=3):
super().__init__()
self.num_counts = num_counts
self.num_items = num_items
self.embedding_size = embedding_size
self.fcs = []
for i in range(num_items):
fc = nn.Linear(embedding_size, num_counts) # linear for each item
self.fcs.append(fc)
self.__setattr__('h1_%s' % i, fc)
def forward(self, x, testing):
batch_size = x.size()[0]
nodes = []
entropy = 0
matches_argmax_count = 0
type_constr = torch.cuda if x.is_cuda else torch
stochastic_draws = 0
proposal = type_constr.LongTensor(batch_size, self.num_items).fill_(0)
for i in range(self.num_items):
logits = self.fcs[i](x)
probs = torch.clamp(F.softmax(logits), eps, 1-eps)
_, res_greedy = probs.data.max(1)
res_greedy = res_greedy.view(-1, 1).long()
log_g = None
if not testing:
a = torch.multinomial(probs, num_samples=1)
g = torch.gather(probs, 1, Variable(a.data)) # place probs on indices specified in samples
log_g = g.log()
a = a.data
else:
a = res_greedy
matches_argmax = res_greedy == a
matches_argmax_count += matches_argmax.int().sum()
stochastic_draws += batch_size
if log_g is not None:
nodes.append(log_g)
# probs = probs + eps
entropy += (- probs * probs.log()).sum() # probs are from softmax so there's the required sum from the entropy formula
proposal[:, i] = a[:, 0]
return nodes, proposal, entropy, matches_argmax_count, stochastic_draws
class ProposalReprPolicy(nn.Module):
def __init__(self, embedding_size=100, num_counts=6, num_items=3):
super().__init__()
self.num_counts = num_counts
self.num_items = num_items
self.embedding_size = embedding_size
self.fcs = []
for i in range(num_items):
# linear for each item. Takes the same embedding (size 100) plus onehot of the same item in the hidden proposal
fc = nn.Linear(embedding_size+num_counts, num_counts)
self.fcs.append(fc)
self.__setattr__('h1_%s' % i, fc)
def forward(self, x, hidden_proposal, testing):
batch_size = x.size()[0]
nodes = []
entropy = 0
matches_argmax_count = 0
type_constr = torch.cuda if x.is_cuda else torch
stochastic_draws = 0
hidden_proposal_onehot = torch.zeros(batch_size, self.num_items, self.num_counts)
if x.is_cuda:
hidden_proposal_onehot = hidden_proposal_onehot.cuda()
hidden_proposal_onehot.scatter_(2, hidden_proposal.unsqueeze(2), 1) # dim, index, src val
proposal = type_constr.LongTensor(batch_size, self.num_items).fill_(0) # new public proposal
for i in range(self.num_items):
cur_item_hidden_proposal = hidden_proposal_onehot[:, i, :]
logits = self.fcs[i](torch.cat([x, cur_item_hidden_proposal], dim=1))
probs = torch.clamp(F.softmax(logits), eps, 1-eps)
_, res_greedy = probs.data.max(1)
res_greedy = res_greedy.view(-1, 1).long()
log_g = None
if not testing:
a = torch.multinomial(probs, num_samples=1)
g = torch.gather(probs, 1, Variable(a.data))
log_g = g.log()
a = a.data
else:
a = res_greedy
matches_argmax = (res_greedy == a)
matches_argmax_count += matches_argmax.int().sum()
stochastic_draws += batch_size
if log_g is not None:
nodes.append(log_g)
# probs = probs + eps
entropy += (- probs * probs.log()).sum() # probs are from softmax so there's the required sum from the entropy formula
proposal[:, i] = a[:, 0]
return nodes, proposal, entropy, matches_argmax_count, stochastic_draws
class RedistributionPolicy(nn.Module):
def __init__(self, embedding_size=100):
super().__init__()
self.enable_redist = nn.Linear(embedding_size, 1)
self.favor_first_player = nn.Linear(embedding_size, 1)
def forward(self, thoughtvector, testing, mid_move_indices):
enable_probs = torch.clamp(F.sigmoid(self.enable_redist(thoughtvector)), eps, 1-eps)
favor_probs = torch.clamp(F.sigmoid(self.favor_first_player(thoughtvector)), eps, 1-eps)
enable_decision = torch.bernoulli(enable_probs).long() # sample a decision for the batch elems
enable_argmax = (enable_decision == (enable_probs > 0.5).long())
enable_logprob = (enable_probs * enable_decision.float().detach() + (1 - enable_probs) * (1 - enable_decision.float().detach())).log()
enable_entropy = - enable_probs*enable_probs.log() - (1-enable_probs)*(1-enable_probs).log()
enable_decision[mid_move_indices] = 0
enable_argmax[mid_move_indices] = 0
enable_logprob[mid_move_indices] = 0
enable_entropy[mid_move_indices] = 0
# to save probs but also avoid backward problems from in-place computation
enable_probs_clone = enable_probs.clone()
enable_probs_clone[mid_move_indices] = 0
enable_argmax_count = enable_argmax.sum().item()
enable_entropy = enable_entropy.sum()
favor_decision = torch.bernoulli(favor_probs).long()
favor_argmax = (favor_decision == (favor_probs > 0.5).long())
favor_logprob = (favor_probs * favor_decision.float().detach() + (1 - favor_probs) * (1 - favor_decision.float().detach())).log()
favor_entropy = - favor_probs*favor_probs.log() - (1-favor_probs)*(1-favor_probs).log()
favor_decision[enable_decision == 0] = 0
favor_argmax[enable_decision == 0] = 0
favor_logprob[enable_decision == 0] = 0
favor_entropy[enable_decision == 0] = 0
# to save probs but also avoid backward problems from in-place computation
favor_probs_clone = favor_probs.clone()
favor_probs_clone[enable_decision == 0] = 0
favor_argmax_count = favor_argmax.sum().item()
favor_entropy = favor_entropy.sum()
return enable_logprob, enable_probs_clone, enable_decision.data.byte(), enable_entropy, favor_logprob, favor_probs_clone, favor_decision.data.byte(), favor_entropy, enable_argmax_count, favor_argmax_count, thoughtvector.size(0)-len(mid_move_indices), enable_decision.sum().item()
class AgentModel(nn.Module):
def __init__(
self, enable_binding_comm, enable_cheap_comm,
response_entropy_reg,
utterance_entropy_reg,
proposal_entropy_reg,
hidden_embedding_size=30,):
super().__init__()
self.response_entropy_reg = response_entropy_reg
self.utterance_entropy_reg = utterance_entropy_reg
self.proposal_entropy_reg = proposal_entropy_reg
self.hidden_embedding_size = hidden_embedding_size
self.enable_binding_comm = enable_binding_comm # ignored here, the proposal is predicted but then blocked in ecn.py
self.enable_cheap_comm = enable_cheap_comm
self.lstm = nn.LSTMCell(
input_size=hidden_embedding_size,
hidden_size=hidden_embedding_size)
self.combined_net = nn.Sequential(nn.Linear(19, hidden_embedding_size), nn.ReLU())
# self.combined_net = nn.Sequential(nn.Linear(19, hidden_embedding_size), nn.ReLU(), nn.Linear(hidden_embedding_size, hidden_embedding_size), nn.ReLU())
self.response_policy = TermPolicy(embedding_size=hidden_embedding_size)
self.proposal_policy = ProposalPolicy(embedding_size=hidden_embedding_size)
if self.enable_cheap_comm:
self.proposal_repr_policy = ProposalReprPolicy(embedding_size=hidden_embedding_size)
def forward(self, pool, utility, own_proposal, own_message,
opponent_proposal, opponent_message, hidden_state, cell_state, deterministic, timestep):
if deterministic:
raise NotImplementedError # disabled this for the time being because evaluating models with stochastic actions makes a bit more sense
forward_stats = {}
batch_size = pool.size()[0]
type_constr = torch.cuda if pool.is_cuda else torch
timestep_formatted = np.reshape(np.repeat(np.array([timestep]), batch_size), (batch_size, 1))
timestep_formatted = torch.from_numpy(timestep_formatted).float()
if pool.is_cuda:
timestep_formatted.cuda()
h_t = torch.cat([ten.float() for ten in [pool, utility, own_proposal, own_message, opponent_proposal, opponent_message, timestep_formatted]], -1) # (static game context, utterance, proposal)
h_t = self.combined_net(h_t) # act on (static game context, utterance, proposal) with linear and relu
hidden_state, cell_state = self.lstm(h_t, (hidden_state, cell_state)) # takes the whole batch but only at embedded timestep s
h_t = hidden_state
entropy_loss = 0
nodes = []
# probs of acceptance, probs of sampled decisions, sampled decisions, bernoulli entropies, how many decisions are argmaxes in batch, num of decisions
response_probs, response_node, response, response_entropy, response_matches_argmax_count, forward_stats['response_stochastic_draws'] = self.response_policy(h_t, testing=deterministic)
forward_stats['response_prob'] = response_probs.sum().item()
forward_stats['response_entropy'] = response_entropy.item()
forward_stats['response_matches_argmax_count'] = response_matches_argmax_count.sum().item()
nodes.append(response_node)
entropy_loss -= self.response_entropy_reg * response_entropy # maximize entropy so minimize loss ~ (-entropy)
proposal_nodes, proposal, prop_entropy, prop_matches_argmax_count, forward_stats['prop_stochastic_draws'] = self.proposal_policy(h_t, testing=deterministic)
forward_stats['prop_entropy'] = prop_entropy.item()
forward_stats['prop_matches_argmax_count'] = prop_matches_argmax_count.sum().item()
nodes += proposal_nodes
entropy_loss -= self.proposal_entropy_reg * prop_entropy
utterance = None
if self.enable_cheap_comm:
utterance_nodes, utterance, utt_entropy, utt_matches_argmax_count, forward_stats['utt_stochastic_draws'] = self.proposal_repr_policy(h_t, proposal, testing=deterministic)
forward_stats['utt_entropy'] = utt_entropy.item()
forward_stats['utt_matches_argmax_count'] = utt_matches_argmax_count.sum().item()
nodes += utterance_nodes
entropy_loss -= self.utterance_entropy_reg * utt_entropy
else:
forward_stats['utt_entropy'] = 0
forward_stats['utt_matches_argmax_count'] = 0
forward_stats['utt_stochastic_draws'] = 0
utterance = type_constr.LongTensor(batch_size, 3).zero_()
return nodes, response, utterance, proposal, entropy_loss, hidden_state, cell_state, forward_stats
class ArbitratorModel(nn.Module):
def __init__(self,
entropy_reg,
share_utilities,
hidden_embedding_size=30,):
super().__init__()
self.entropy_reg = entropy_reg
self.share_utilities = share_utilities
self.hidden_embedding_size = hidden_embedding_size
input_size = 17 if self.share_utilities else 11
print(input_size)
self.combined_net = nn.Sequential(nn.Linear(input_size, hidden_embedding_size), nn.ReLU())
# self.combined_net = nn.Sequential(nn.Linear(input_size, hidden_embedding_size), nn.ReLU(), nn.Linear(hidden_embedding_size, hidden_embedding_size), nn.ReLU())
self.lstm = nn.LSTMCell(
input_size=hidden_embedding_size,
hidden_size=hidden_embedding_size)
self.redist_policy = RedistributionPolicy(hidden_embedding_size)
def forward(self, pool, utilities0, utilities1, proposal, message, game_finished, timestep, deterministic, hidden_state, cell_state):
if deterministic:
raise NotImplementedError # disabled this for the time being because evaluating models with stochastic actions makes a bit more sense
forward_stats = {}
batch_size = pool.size()[0]
final_move_indices = game_finished[:, 0].nonzero()
mid_move_indices = (1-game_finished[:, 0]).nonzero()
type_constr = torch.cuda if pool.is_cuda else torch
msg_encoded = message.clone() # encoding the prev message
prop_encoded = proposal.clone() # encoding final proposal with same net if it's non-zero
timestep_formatted = np.reshape(np.repeat(np.array([timestep]), batch_size), (batch_size, 1))
timestep_formatted = torch.from_numpy(timestep_formatted)
if pool.is_cuda:
timestep_formatted.cuda()
prop_encoded[mid_move_indices, :] = 0 # no access to the proposal if game hasn't ended
msg_encoded[final_move_indices, :] = 0 # the final message doesn't get through
if self.share_utilities:
input_tens = [pool, utilities0, utilities1, prop_encoded, msg_encoded, game_finished, timestep_formatted]
else:
input_tens = [pool, prop_encoded, msg_encoded, game_finished, timestep_formatted]
h_t = torch.cat([ten.float() for ten in input_tens], -1) # (static game context, utterance, proposal)
h_t = self.combined_net(h_t) # act on (static game context, utterance, proposal) with linear and relu
hidden_state, cell_state = self.lstm(h_t, (hidden_state, cell_state)) # takes the whole batch but only at embedded timestep s
h_t = hidden_state
enable_logprob, forward_stats['enable_probs'], enable_decision, enable_entropy, \
favor_logprob, forward_stats['favor_probs'], favor_decision, favor_entropy, \
forward_stats['enable_argmax_count'], forward_stats['favor_argmax_count'], \
forward_stats['enable_draws'], forward_stats['favor_draws'] = \
self.redist_policy(h_t, testing=deterministic, mid_move_indices=mid_move_indices)
forward_stats['enable_probs'] = forward_stats['enable_probs'].sum().item()
forward_stats['enable_decision'] = enable_decision.sum().item()
forward_stats['enable_entropy'] = enable_entropy.item()
forward_stats['favor_probs'] = forward_stats['favor_probs'].sum().item()
forward_stats['favor_decision'] = favor_decision.sum().item()
forward_stats['favor_entropy'] = favor_entropy.item()
nodes = [enable_logprob, favor_logprob]
entropy_loss = -(enable_entropy + favor_entropy) * self.entropy_reg
return nodes, enable_decision, favor_decision, entropy_loss, hidden_state, cell_state, forward_stats
|
import _pickle as pickle
import os
import time
import numpy as np
import shutil
import tensorflow as tf
import reader
from common import Common
class Model:
topk = 10
num_batches_to_log = 100
def __init__(self, config):
self.config = config
self.sess = tf.Session()
self.eval_queue = None
self.predict_queue = None
self.eval_placeholder = None
self.predict_placeholder = None
self.eval_predicted_indices_op, self.eval_top_values_op, self.eval_true_target_strings_op, self.eval_topk_values = None, None, None, None
self.predict_top_indices_op, self.predict_top_scores_op, self.predict_target_strings_op = None, None, None
self.subtoken_to_index = None
if config.LOAD_PATH:
self.load_model(sess=None)
else:
with open('{}.dict.c2s'.format(config.TRAIN_PATH), 'rb') as file:
subtoken_to_count = pickle.load(file)
node_to_count = pickle.load(file)
target_to_count = pickle.load(file)
max_contexts = pickle.load(file)
self.num_training_examples = pickle.load(file)
print('Dictionaries loaded.')
if self.config.DATA_NUM_CONTEXTS <= 0:
self.config.DATA_NUM_CONTEXTS = max_contexts
self.subtoken_to_index, self.index_to_subtoken, self.subtoken_vocab_size = \
Common.load_vocab_from_dict(subtoken_to_count, add_values=[Common.PAD, Common.UNK],
max_size=config.SUBTOKENS_VOCAB_MAX_SIZE)
print('Loaded subtoken vocab. size: %d' % self.subtoken_vocab_size)
self.target_to_index, self.index_to_target, self.target_vocab_size = \
Common.load_vocab_from_dict(target_to_count, add_values=[Common.PAD, Common.UNK, Common.SOS],
max_size=config.TARGET_VOCAB_MAX_SIZE)
print('Loaded target word vocab. size: %d' % self.target_vocab_size)
self.node_to_index, self.index_to_node, self.nodes_vocab_size = \
Common.load_vocab_from_dict(node_to_count, add_values=[Common.PAD, Common.UNK], max_size=None)
print('Loaded nodes vocab. size: %d' % self.nodes_vocab_size)
self.epochs_trained = 0
def close_session(self):
self.sess.close()
def train(self):
print('Starting training')
start_time = time.time()
batch_num = 0
sum_loss = 0
best_f1 = 0
best_epoch = 0
best_f1_precision = 0
best_f1_recall = 0
epochs_no_improve = 0
self.queue_thread = reader.Reader(subtoken_to_index=self.subtoken_to_index,
node_to_index=self.node_to_index,
target_to_index=self.target_to_index,
config=self.config)
optimizer, train_loss = self.build_training_graph(self.queue_thread.get_output())
self.print_hyperparams()
print('Number of trainable params:',
np.sum([np.prod(v.get_shape().as_list()) for v in tf.trainable_variables()]))
self.initialize_session_variables(self.sess)
print('Initalized variables')
if self.config.LOAD_PATH:
self.load_model(self.sess)
time.sleep(1)
print('Started reader...')
multi_batch_start_time = time.time()
for iteration in range(1, (self.config.NUM_EPOCHS // self.config.SAVE_EVERY_EPOCHS) + 1):
self.queue_thread.reset(self.sess)
try:
while True:
batch_num += 1
_, batch_loss = self.sess.run([optimizer, train_loss])
sum_loss += batch_loss
if batch_num % self.num_batches_to_log == 0:
self.trace(sum_loss, batch_num, multi_batch_start_time)
sum_loss = 0
multi_batch_start_time = time.time()
except tf.errors.OutOfRangeError:
self.epochs_trained += self.config.SAVE_EVERY_EPOCHS
print('Finished %d epochs' % self.config.SAVE_EVERY_EPOCHS)
results, precision, recall, f1 = self.evaluate()
print('Accuracy after %d epochs: %.5f' % (self.epochs_trained, results))
print('After %d epochs: Precision: %.5f, recall: %.5f, F1: %.5f' % (
self.epochs_trained, precision, recall, f1))
if f1 > best_f1:
best_f1 = f1
best_f1_precision = precision
best_f1_recall = recall
best_epoch = self.epochs_trained
epochs_no_improve = 0
self.save_model(self.sess, self.config.SAVE_PATH)
else:
epochs_no_improve += self.config.SAVE_EVERY_EPOCHS
if epochs_no_improve >= self.config.PATIENCE:
print('Not improved for %d epochs, stopping training' % self.config.PATIENCE)
print('Best scores - epoch %d: ' % best_epoch)
print('Precision: %.5f, recall: %.5f, F1: %.5f' % (best_f1_precision, best_f1_recall, best_f1))
return
if self.config.SAVE_PATH:
self.save_model(self.sess, self.config.SAVE_PATH + '.final')
print('Model saved in file: %s' % self.config.SAVE_PATH)
elapsed = int(time.time() - start_time)
print("Training time: %sh%sm%ss\n" % ((elapsed // 60 // 60), (elapsed // 60) % 60, elapsed % 60))
def trace(self, sum_loss, batch_num, multi_batch_start_time):
multi_batch_elapsed = time.time() - multi_batch_start_time
avg_loss = sum_loss / (self.num_batches_to_log * self.config.BATCH_SIZE)
print('Average loss at batch %d: %f, \tthroughput: %d samples/sec' % (batch_num, avg_loss,
self.config.BATCH_SIZE * self.num_batches_to_log / (
multi_batch_elapsed if multi_batch_elapsed > 0 else 1)))
def evaluate(self, release=False):
eval_start_time = time.time()
if self.eval_queue is None:
self.eval_queue = reader.Reader(subtoken_to_index=self.subtoken_to_index,
node_to_index=self.node_to_index,
target_to_index=self.target_to_index,
config=self.config, is_evaluating=True)
reader_output = self.eval_queue.get_output()
self.eval_predicted_indices_op, self.eval_topk_values, _, _ = \
self.build_test_graph(reader_output)
self.eval_true_target_strings_op = reader_output[reader.TARGET_STRING_KEY]
self.saver = tf.train.Saver(max_to_keep=10)
if self.config.LOAD_PATH and not self.config.TRAIN_PATH:
self.initialize_session_variables(self.sess)
self.load_model(self.sess)
if release:
release_name = self.config.LOAD_PATH + '.release'
print('Releasing model, output model: %s' % release_name)
self.saver.save(self.sess, release_name)
shutil.copyfile(src=self.config.LOAD_PATH + '.dict', dst=release_name + '.dict')
return None
model_dirname = os.path.dirname(self.config.SAVE_PATH if self.config.SAVE_PATH else self.config.LOAD_PATH)
ref_file_name = model_dirname + '/ref.txt'
predicted_file_name = model_dirname + '/pred.txt'
if not os.path.exists(model_dirname):
os.makedirs(model_dirname)
with open(model_dirname + '/log.txt', 'w') as output_file, open(ref_file_name, 'w') as ref_file, open(
predicted_file_name,
'w') as pred_file:
num_correct_predictions = 0
total_predictions = 0
total_prediction_batches = 0
true_positive, false_positive, false_negative = 0, 0, 0
self.eval_queue.reset(self.sess)
start_time = time.time()
try:
while True:
predicted_indices, true_target_strings, top_values = self.sess.run(
[self.eval_predicted_indices_op, self.eval_true_target_strings_op, self.eval_topk_values],
)
true_target_strings = Common.binary_to_string_list(true_target_strings)
ref_file.write(
'\n'.join(
[name.replace(Common.internal_delimiter, ' ') for name in true_target_strings]) + '\n')
if self.config.BEAM_WIDTH > 0:
# predicted indices: (batch, time, beam_width)
predicted_strings = [[[self.index_to_target[i] for i in timestep] for timestep in example] for
example in predicted_indices]
predicted_strings = [list(map(list, zip(*example))) for example in
predicted_strings] # (batch, top-k, target_length)
pred_file.write('\n'.join(
[' '.join(Common.filter_impossible_names(words)) for words in predicted_strings[0]]) + '\n')
else:
predicted_strings = [[self.index_to_target[i] for i in example]
for example in predicted_indices]
pred_file.write('\n'.join(
[' '.join(Common.filter_impossible_names(words)) for words in predicted_strings]) + '\n')
num_correct_predictions = self.update_correct_predictions(num_correct_predictions, output_file,
zip(true_target_strings,
predicted_strings))
true_positive, false_positive, false_negative = self.update_per_subtoken_statistics(
zip(true_target_strings, predicted_strings),
true_positive, false_positive, false_negative)
total_predictions += len(true_target_strings)
total_prediction_batches += 1
if total_prediction_batches % self.num_batches_to_log == 0:
elapsed = time.time() - start_time
self.trace_evaluation(output_file, num_correct_predictions, total_predictions, elapsed)
except tf.errors.OutOfRangeError:
pass
print('Done testing, epoch reached')
output_file.write(str(num_correct_predictions / total_predictions) + '\n')
# Common.compute_bleu(ref_file_name, predicted_file_name)
elapsed = int(time.time() - eval_start_time)
precision, recall, f1 = self.calculate_results(true_positive, false_positive, false_negative)
print("Evaluation time: %sh%sm%ss" % ((elapsed // 60 // 60), (elapsed // 60) % 60, elapsed % 60))
return num_correct_predictions / total_predictions, precision, recall, f1
def update_correct_predictions(self, num_correct_predictions, output_file, results):
for original_name, predicted in results:
if self.config.BEAM_WIDTH > 0:
predicted = predicted[0]
original_name_parts = original_name.split(Common.internal_delimiter)
filtered_original = Common.filter_impossible_names(original_name_parts)
filtered_predicted_parts = Common.filter_impossible_names(predicted)
output_file.write('Original: ' + Common.internal_delimiter.join(original_name_parts) +
' , predicted 1st: ' + Common.internal_delimiter.join(
[target for target in filtered_predicted_parts]) + '\n')
if filtered_original == filtered_predicted_parts or Common.unique(filtered_original) == Common.unique(
filtered_predicted_parts) or ''.join(filtered_original) == ''.join(filtered_predicted_parts):
num_correct_predictions += 1
return num_correct_predictions
def update_per_subtoken_statistics(self, results, true_positive, false_positive, false_negative):
for original_name, predicted in results:
if self.config.BEAM_WIDTH > 0:
predicted = predicted[0]
filtered_predicted_names = Common.filter_impossible_names(predicted)
filtered_original_subtokens = Common.filter_impossible_names(original_name.split(Common.internal_delimiter))
if ''.join(filtered_original_subtokens) == ''.join(filtered_predicted_names):
true_positive += len(filtered_original_subtokens)
continue
for subtok in filtered_predicted_names:
if subtok in filtered_original_subtokens:
true_positive += 1
else:
false_positive += 1
for subtok in filtered_original_subtokens:
if not subtok in filtered_predicted_names:
false_negative += 1
return true_positive, false_positive, false_negative
def print_hyperparams(self):
print('Training batch size:\t\t\t', self.config.BATCH_SIZE)
print('Dataset path:\t\t\t\t', self.config.TRAIN_PATH)
print('Training file path:\t\t\t', self.config.TRAIN_PATH + '.train.c2s')
print('Validation path:\t\t\t', self.config.TEST_PATH)
print('Taking max contexts from each example:\t', self.config.MAX_CONTEXTS)
print('Random path sampling:\t\t\t', self.config.RANDOM_CONTEXTS)
print('Embedding size:\t\t\t\t', self.config.EMBEDDINGS_SIZE)
if self.config.BIRNN:
print('Using BiLSTMs, each of size:\t\t', self.config.RNN_SIZE // 2)
else:
print('Uni-directional LSTM of size:\t\t', self.config.RNN_SIZE)
print('Decoder size:\t\t\t\t', self.config.DECODER_SIZE)
print('Decoder layers:\t\t\t\t', self.config.NUM_DECODER_LAYERS)
print('Max path lengths:\t\t\t', self.config.MAX_PATH_LENGTH)
print('Max subtokens in a token:\t\t', self.config.MAX_NAME_PARTS)
print('Max target length:\t\t\t', self.config.MAX_TARGET_PARTS)
print('Embeddings dropout keep_prob:\t\t', self.config.EMBEDDINGS_DROPOUT_KEEP_PROB)
print('LSTM dropout keep_prob:\t\t\t', self.config.RNN_DROPOUT_KEEP_PROB)
print('============================================')
@staticmethod
def calculate_results(true_positive, false_positive, false_negative):
if true_positive + false_positive > 0:
precision = true_positive / (true_positive + false_positive)
else:
precision = 0
if true_positive + false_negative > 0:
recall = true_positive / (true_positive + false_negative)
else:
recall = 0
if precision + recall > 0:
f1 = 2 * precision * recall / (precision + recall)
else:
f1 = 0
return precision, recall, f1
@staticmethod
def trace_evaluation(output_file, correct_predictions, total_predictions, elapsed):
accuracy_message = str(correct_predictions / total_predictions)
throughput_message = "Prediction throughput: %d" % int(total_predictions / (elapsed if elapsed > 0 else 1))
output_file.write(accuracy_message + '\n')
output_file.write(throughput_message)
# print(accuracy_message)
print(throughput_message)
def build_training_graph(self, input_tensors):
target_index = input_tensors[reader.TARGET_INDEX_KEY]
target_lengths = input_tensors[reader.TARGET_LENGTH_KEY]
path_source_indices = input_tensors[reader.PATH_SOURCE_INDICES_KEY]
node_indices = input_tensors[reader.NODE_INDICES_KEY]
path_target_indices = input_tensors[reader.PATH_TARGET_INDICES_KEY]
valid_context_mask = input_tensors[reader.VALID_CONTEXT_MASK_KEY]
path_source_lengths = input_tensors[reader.PATH_SOURCE_LENGTHS_KEY]
path_lengths = input_tensors[reader.PATH_LENGTHS_KEY]
path_target_lengths = input_tensors[reader.PATH_TARGET_LENGTHS_KEY]
with tf.variable_scope('model'):
subtoken_vocab = tf.get_variable('SUBTOKENS_VOCAB',
shape=(self.subtoken_vocab_size, self.config.EMBEDDINGS_SIZE),
dtype=tf.float32,
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,
mode='FAN_OUT',
uniform=True))
target_words_vocab = tf.get_variable('TARGET_WORDS_VOCAB',
shape=(self.target_vocab_size, self.config.EMBEDDINGS_SIZE),
dtype=tf.float32,
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,
mode='FAN_OUT',
uniform=True))
nodes_vocab = tf.get_variable('NODES_VOCAB', shape=(self.nodes_vocab_size, self.config.EMBEDDINGS_SIZE),
dtype=tf.float32,
initializer=tf.contrib.layers.variance_scaling_initializer(factor=1.0,
mode='FAN_OUT',
uniform=True))
# (batch, max_contexts, decoder_size)
batched_contexts = self.compute_contexts(subtoken_vocab=subtoken_vocab, nodes_vocab=nodes_vocab,
source_input=path_source_indices, nodes_input=node_indices,
target_input=path_target_indices,
valid_mask=valid_context_mask,
path_source_lengths=path_source_lengths,
path_lengths=path_lengths, path_target_lengths=path_target_lengths)
batch_size = tf.shape(target_index)[0]
outputs, final_states = self.decode_outputs(target_words_vocab=target_words_vocab,
target_input=target_index, batch_size=batch_size,
batched_contexts=batched_contexts,
valid_mask=valid_context_mask)
step = tf.Variable(0, trainable=False)
logits = outputs.rnn_output # (batch, max_output_length, dim * 2 + rnn_size)
crossent = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=target_index, logits=logits)
target_words_nonzero = tf.sequence_mask(target_lengths + 1,
maxlen=self.config.MAX_TARGET_PARTS + 1, dtype=tf.float32)
loss = tf.reduce_sum(crossent * target_words_nonzero) / tf.to_float(batch_size)
if self.config.USE_MOMENTUM:
learning_rate = tf.train.exponential_decay(0.01, step * self.config.BATCH_SIZE,
self.num_training_examples,
0.95, staircase=True)
optimizer = tf.train.MomentumOptimizer(learning_rate, 0.95, use_nesterov=True)
train_op = optimizer.minimize(loss, global_step=step)
else:
params = tf.trainable_variables()
gradients = tf.gradients(loss, params)
clipped_gradients, _ = tf.clip_by_global_norm(gradients, clip_norm=5)
optimizer = tf.train.AdamOptimizer()
train_op = optimizer.apply_gradients(zip(clipped_gradients, params))
self.saver = tf.train.Saver(max_to_keep=10)
return train_op, loss
def decode_outputs(self, target_words_vocab, target_input, batch_size, batched_contexts, valid_mask,
is_evaluating=False):
num_contexts_per_example = tf.count_nonzero(valid_mask, axis=-1)
start_fill = tf.fill([batch_size],
self.target_to_index[Common.SOS]) # (batch, )
decoder_cell = tf.nn.rnn_cell.MultiRNNCell([
tf.nn.rnn_cell.LSTMCell(self.config.DECODER_SIZE) for _ in range(self.config.NUM_DECODER_LAYERS)
])
contexts_sum = tf.reduce_sum(batched_contexts * tf.expand_dims(valid_mask, -1),
axis=1) # (batch_size, dim * 2 + rnn_size)
contexts_average = tf.divide(contexts_sum, tf.to_float(tf.expand_dims(num_contexts_per_example, -1)))
fake_encoder_state = tuple(tf.nn.rnn_cell.LSTMStateTuple(contexts_average, contexts_average) for _ in
range(self.config.NUM_DECODER_LAYERS))
projection_layer = tf.layers.Dense(self.target_vocab_size, use_bias=False)
if is_evaluating and self.config.BEAM_WIDTH > 0:
batched_contexts = tf.contrib.seq2seq.tile_batch(batched_contexts, multiplier=self.config.BEAM_WIDTH)
num_contexts_per_example = tf.contrib.seq2seq.tile_batch(num_contexts_per_example,
multiplier=self.config.BEAM_WIDTH)
attention_mechanism = tf.contrib.seq2seq.LuongAttention(
num_units=self.config.DECODER_SIZE,
memory=batched_contexts
)
# TF doesn't support beam search with alignment history
should_save_alignment_history = is_evaluating and self.config.BEAM_WIDTH == 0
decoder_cell = tf.contrib.seq2seq.AttentionWrapper(decoder_cell, attention_mechanism,
attention_layer_size=self.config.DECODER_SIZE,
alignment_history=should_save_alignment_history)
if is_evaluating:
if self.config.BEAM_WIDTH > 0:
decoder_initial_state = decoder_cell.zero_state(dtype=tf.float32,
batch_size=batch_size * self.config.BEAM_WIDTH)
decoder_initial_state = decoder_initial_state.clone(
cell_state=tf.contrib.seq2seq.tile_batch(fake_encoder_state, multiplier=self.config.BEAM_WIDTH))
decoder = tf.contrib.seq2seq.BeamSearchDecoder(
cell=decoder_cell,
embedding=target_words_vocab,
start_tokens=start_fill,
end_token=self.target_to_index[Common.PAD],
initial_state=decoder_initial_state,
beam_width=self.config.BEAM_WIDTH,
output_layer=projection_layer,
length_penalty_weight=0.0)
else:
helper = tf.contrib.seq2seq.GreedyEmbeddingHelper(target_words_vocab, start_fill, 0)
initial_state = decoder_cell.zero_state(batch_size, tf.float32).clone(cell_state=fake_encoder_state)
decoder = tf.contrib.seq2seq.BasicDecoder(cell=decoder_cell, helper=helper, initial_state=initial_state,
output_layer=projection_layer)
else:
decoder_cell = tf.nn.rnn_cell.DropoutWrapper(decoder_cell,
output_keep_prob=self.config.RNN_DROPOUT_KEEP_PROB)
target_words_embedding = tf.nn.embedding_lookup(target_words_vocab,
tf.concat([tf.expand_dims(start_fill, -1), target_input],
axis=-1)) # (batch, max_target_parts, dim * 2 + rnn_size)
helper = tf.contrib.seq2seq.TrainingHelper(inputs=target_words_embedding,
sequence_length=tf.ones([batch_size], dtype=tf.int32) * (
self.config.MAX_TARGET_PARTS + 1))
initial_state = decoder_cell.zero_state(batch_size, tf.float32).clone(cell_state=fake_encoder_state)
decoder = tf.contrib.seq2seq.BasicDecoder(cell=decoder_cell, helper=helper, initial_state=initial_state,
output_layer=projection_layer)
outputs, final_states, final_sequence_lengths = tf.contrib.seq2seq.dynamic_decode(decoder,
maximum_iterations=self.config.MAX_TARGET_PARTS + 1)
return outputs, final_states
def calculate_path_abstraction(self, path_embed, path_lengths, valid_contexts_mask, is_evaluating=False):
return self.path_rnn_last_state(is_evaluating, path_embed, path_lengths, valid_contexts_mask)
def path_rnn_last_state(self, is_evaluating, path_embed, path_lengths, valid_contexts_mask):
# path_embed: (batch, max_contexts, max_path_length+1, dim)
# path_length: (batch, max_contexts)
# valid_contexts_mask: (batch, max_contexts)
max_contexts = tf.shape(path_embed)[1]
flat_paths = tf.reshape(path_embed, shape=[-1, self.config.MAX_PATH_LENGTH,
self.config.EMBEDDINGS_SIZE]) # (batch * max_contexts, max_path_length+1, dim)
flat_valid_contexts_mask = tf.reshape(valid_contexts_mask, [-1]) # (batch * max_contexts)
lengths = tf.multiply(tf.reshape(path_lengths, [-1]),
tf.cast(flat_valid_contexts_mask, tf.int32)) # (batch * max_contexts)
if self.config.BIRNN:
rnn_cell_fw = tf.nn.rnn_cell.LSTMCell(self.config.RNN_SIZE / 2)
rnn_cell_bw = tf.nn.rnn_cell.LSTMCell(self.config.RNN_SIZE / 2)
if not is_evaluating:
rnn_cell_fw = tf.nn.rnn_cell.DropoutWrapper(rnn_cell_fw,
output_keep_prob=self.config.RNN_DROPOUT_KEEP_PROB)
rnn_cell_bw = tf.nn.rnn_cell.DropoutWrapper(rnn_cell_bw,
output_keep_prob=self.config.RNN_DROPOUT_KEEP_PROB)
_, (state_fw, state_bw) = tf.nn.bidirectional_dynamic_rnn(
cell_fw=rnn_cell_fw,
cell_bw=rnn_cell_bw,
inputs=flat_paths,
dtype=tf.float32,
sequence_length=lengths)
final_rnn_state = tf.concat([state_fw.h, state_bw.h], axis=-1) # (batch * max_contexts, rnn_size)
else:
rnn_cell = tf.nn.rnn_cell.LSTMCell(self.config.RNN_SIZE)
if not is_evaluating:
rnn_cell = tf.nn.rnn_cell.DropoutWrapper(rnn_cell, output_keep_prob=self.config.RNN_DROPOUT_KEEP_PROB)
_, state = tf.nn.dynamic_rnn(
cell=rnn_cell,
inputs=flat_paths,
dtype=tf.float32,
sequence_length=lengths
)
final_rnn_state = state.h # (batch * max_contexts, rnn_size)
return tf.reshape(final_rnn_state,
shape=[-1, max_contexts, self.config.RNN_SIZE]) # (batch, max_contexts, rnn_size)
def compute_contexts(self, subtoken_vocab, nodes_vocab, source_input, nodes_input,
target_input, valid_mask, path_source_lengths, path_lengths, path_target_lengths,
is_evaluating=False):
source_word_embed = tf.nn.embedding_lookup(params=subtoken_vocab,
ids=source_input) # (batch, max_contexts, max_name_parts, dim)
path_embed = tf.nn.embedding_lookup(params=nodes_vocab,
ids=nodes_input) # (batch, max_contexts, max_path_length+1, dim)
target_word_embed = tf.nn.embedding_lookup(params=subtoken_vocab,
ids=target_input) # (batch, max_contexts, max_name_parts, dim)
source_word_mask = tf.expand_dims(
tf.sequence_mask(path_source_lengths, maxlen=self.config.MAX_NAME_PARTS, dtype=tf.float32),
-1) # (batch, max_contexts, max_name_parts, 1)
target_word_mask = tf.expand_dims(
tf.sequence_mask(path_target_lengths, maxlen=self.config.MAX_NAME_PARTS, dtype=tf.float32),
-1) # (batch, max_contexts, max_name_parts, 1)
source_words_sum = tf.reduce_sum(source_word_embed * source_word_mask,
axis=2) # (batch, max_contexts, dim)
path_nodes_aggregation = self.calculate_path_abstraction(path_embed, path_lengths, valid_mask,
is_evaluating) # (batch, max_contexts, rnn_size)
target_words_sum = tf.reduce_sum(target_word_embed * target_word_mask, axis=2) # (batch, max_contexts, dim)
context_embed = tf.concat([source_words_sum, path_nodes_aggregation, target_words_sum],
axis=-1) # (batch, max_contexts, dim * 2 + rnn_size)
if not is_evaluating:
context_embed = tf.nn.dropout(context_embed, self.config.EMBEDDINGS_DROPOUT_KEEP_PROB)
batched_embed = tf.layers.dense(inputs=context_embed, units=self.config.DECODER_SIZE,
activation=tf.nn.tanh, trainable=not is_evaluating, use_bias=False)
return batched_embed
def build_test_graph(self, input_tensors):
target_index = input_tensors[reader.TARGET_INDEX_KEY]
path_source_indices = input_tensors[reader.PATH_SOURCE_INDICES_KEY]
node_indices = input_tensors[reader.NODE_INDICES_KEY]
path_target_indices = input_tensors[reader.PATH_TARGET_INDICES_KEY]
valid_mask = input_tensors[reader.VALID_CONTEXT_MASK_KEY]
path_source_lengths = input_tensors[reader.PATH_SOURCE_LENGTHS_KEY]
path_lengths = input_tensors[reader.PATH_LENGTHS_KEY]
path_target_lengths = input_tensors[reader.PATH_TARGET_LENGTHS_KEY]
with tf.variable_scope('model', reuse=self.get_should_reuse_variables()):
subtoken_vocab = tf.get_variable('SUBTOKENS_VOCAB',
shape=(self.subtoken_vocab_size, self.config.EMBEDDINGS_SIZE),
dtype=tf.float32, trainable=False)
target_words_vocab = tf.get_variable('TARGET_WORDS_VOCAB',
shape=(self.target_vocab_size, self.config.EMBEDDINGS_SIZE),
dtype=tf.float32, trainable=False)
nodes_vocab = tf.get_variable('NODES_VOCAB',
shape=(self.nodes_vocab_size, self.config.EMBEDDINGS_SIZE),
dtype=tf.float32, trainable=False)
batched_contexts = self.compute_contexts(subtoken_vocab=subtoken_vocab, nodes_vocab=nodes_vocab,
source_input=path_source_indices, nodes_input=node_indices,
target_input=path_target_indices,
valid_mask=valid_mask,
path_source_lengths=path_source_lengths,
path_lengths=path_lengths, path_target_lengths=path_target_lengths,
is_evaluating=True)
outputs, final_states = self.decode_outputs(target_words_vocab=target_words_vocab,
target_input=target_index, batch_size=tf.shape(target_index)[0],
batched_contexts=batched_contexts, valid_mask=valid_mask,
is_evaluating=True)
if self.config.BEAM_WIDTH > 0:
predicted_indices = outputs.predicted_ids
topk_values = outputs.beam_search_decoder_output.scores
attention_weights = [tf.no_op()]
else:
predicted_indices = outputs.sample_id
topk_values = tf.constant(1, shape=(1, 1), dtype=tf.float32)
attention_weights = tf.squeeze(final_states.alignment_history.stack(), 1)
return predicted_indices, topk_values, target_index, attention_weights
def predict(self, predict_data_lines):
if self.predict_queue is None:
self.predict_queue = reader.Reader(subtoken_to_index=self.subtoken_to_index,
node_to_index=self.node_to_index,
target_to_index=self.target_to_index,
config=self.config, is_evaluating=True)
self.predict_placeholder = tf.placeholder(tf.string)
reader_output = self.predict_queue.process_from_placeholder(self.predict_placeholder)
reader_output = {key: tf.expand_dims(tensor, 0) for key, tensor in reader_output.items()}
self.predict_top_indices_op, self.predict_top_scores_op, _, self.attention_weights_op = \
self.build_test_graph(reader_output)
self.predict_source_string = reader_output[reader.PATH_SOURCE_STRINGS_KEY]
self.predict_path_string = reader_output[reader.PATH_STRINGS_KEY]
self.predict_path_target_string = reader_output[reader.PATH_TARGET_STRINGS_KEY]
self.predict_target_strings_op = reader_output[reader.TARGET_STRING_KEY]
self.initialize_session_variables(self.sess)
self.saver = tf.train.Saver()
self.load_model(self.sess)
results = []
for line in predict_data_lines:
predicted_indices, top_scores, true_target_strings, attention_weights, path_source_string, path_strings, path_target_string = self.sess.run(
[self.predict_top_indices_op, self.predict_top_scores_op, self.predict_target_strings_op,
self.attention_weights_op,
self.predict_source_string, self.predict_path_string, self.predict_path_target_string],
feed_dict={self.predict_placeholder: line})
top_scores = np.squeeze(top_scores, axis=0)
path_source_string = path_source_string.reshape((-1))
path_strings = path_strings.reshape((-1))
path_target_string = path_target_string.reshape((-1))
predicted_indices = np.squeeze(predicted_indices, axis=0)
true_target_strings = Common.binary_to_string(true_target_strings[0])
if self.config.BEAM_WIDTH > 0:
predicted_strings = [[self.index_to_target[sugg] for sugg in timestep]
for timestep in predicted_indices] # (target_length, top-k)
predicted_strings = list(map(list, zip(*predicted_strings))) # (top-k, target_length)
top_scores = [np.exp(np.sum(s)) for s in zip(*top_scores)]
else:
predicted_strings = [self.index_to_target[idx]
for idx in predicted_indices] # (batch, target_length)
attention_per_path = None
if self.config.BEAM_WIDTH == 0:
attention_per_path = self.get_attention_per_path(path_source_string, path_strings, path_target_string,
attention_weights)
results.append((true_target_strings, predicted_strings, top_scores, attention_per_path))
return results
@staticmethod
def get_attention_per_path(source_strings, path_strings, target_strings, attention_weights):
# attention_weights: (time, contexts)
results = []
for time_step in attention_weights:
attention_per_context = {}
for source, path, target, weight in zip(source_strings, path_strings, target_strings, time_step):
string_triplet = (
Common.binary_to_string(source), Common.binary_to_string(path), Common.binary_to_string(target))
attention_per_context[string_triplet] = weight
results.append(attention_per_context)
return results
def save_model(self, sess, path):
save_target = path + '_iter%d' % self.epochs_trained
dirname = os.path.dirname(save_target)
if not os.path.exists(dirname):
os.makedirs(dirname)
self.saver.save(sess, save_target)
dictionaries_path = save_target + '.dict'
with open(dictionaries_path, 'wb') as file:
pickle.dump(self.subtoken_to_index, file)
pickle.dump(self.index_to_subtoken, file)
pickle.dump(self.subtoken_vocab_size, file)
pickle.dump(self.target_to_index, file)
pickle.dump(self.index_to_target, file)
pickle.dump(self.target_vocab_size, file)
pickle.dump(self.node_to_index, file)
pickle.dump(self.index_to_node, file)
pickle.dump(self.nodes_vocab_size, file)
pickle.dump(self.num_training_examples, file)
pickle.dump(self.epochs_trained, file)
pickle.dump(self.config, file)
print('Saved after %d epochs in: %s' % (self.epochs_trained, save_target))
def load_model(self, sess):
if not sess is None:
self.saver.restore(sess, self.config.LOAD_PATH)
print('Done loading model')
with open(self.config.LOAD_PATH + '.dict', 'rb') as file:
if self.subtoken_to_index is not None:
return
print('Loading dictionaries from: ' + self.config.LOAD_PATH)
self.subtoken_to_index = pickle.load(file)
self.index_to_subtoken = pickle.load(file)
self.subtoken_vocab_size = pickle.load(file)
self.target_to_index = pickle.load(file)
self.index_to_target = pickle.load(file)
self.target_vocab_size = pickle.load(file)
self.node_to_index = pickle.load(file)
self.index_to_node = pickle.load(file)
self.nodes_vocab_size = pickle.load(file)
self.num_training_examples = pickle.load(file)
self.epochs_trained = pickle.load(file)
saved_config = pickle.load(file)
self.config.take_model_hyperparams_from(saved_config)
print('Done loading dictionaries')
@staticmethod
def initialize_session_variables(sess):
sess.run(tf.group(tf.global_variables_initializer(), tf.local_variables_initializer(), tf.tables_initializer()))
def get_should_reuse_variables(self):
if self.config.TRAIN_PATH:
return True
else:
return None
|
/**
* This file contains styles for SignUp component
*/
import styled from "styled-components";
export const StyledDisplayFlexDiv = styled.div`
display: flex;
`;
export const StyledFlex1Div = styled.div`
flex: 1;
`;
|
export default {
WELCOME_TO_KIMG: '欢迎光临KIMG',
ROTATE: '旋转角度',
BACKGROUND: '背景色',
GRAY: '灰化',
QUALITY: '图片质量',
AUTO_ORIENT: '自动旋正',
OUTPUT_FORMAT: '输出格式',
STRIP: '瘦身',
TAB_BASIC: '基础设置',
TAB_SCALE: '缩放设置',
TAB_CROP: '裁剪设置',
TAB_WATERMARK: '水印设置',
CONVERT_PANEL: '转换面板',
ADMIN_PANEL: '管理面板',
LOAD_IMAGE_BY_MD5: '通过Md5sum从Kimg服务器中加载图片',
DELETE_IMAGE_CONFIRM: '是否确定删除图片?',
BTN_DELETE: '删除',
BTN_UPLOAD: '上传',
TEXT_UPLOAD: '点击或拖动图片到此区域上传',
HINT_UPLOAD: '支持单个图片上传',
ERR_LOAD_IMAGE: '加载图片 {md5sum} 失败。',
ERR_UPLOAD_IMAGE: '{name} 上传失败。',
SUCCESS_UPLOAD_IMAGE: '{name} 上传成功。',
SUCCESS_DELETE_IMAGE: '删除图片 {md5sum} 成功。',
ERR_DELETE_IMAGE: '删除图片 {md5sum} 失败。',
ERR_INVALID_IMAGE: '无效的图片 md5sum: {md5sum}.',
SIZE: '尺寸',
WIDTH: '宽度',
HEIGHT: '高度',
FORMAT: '格式',
CROP_ENABLE: '开启裁剪',
CROP_GRAVITY: '裁剪原点',
GRAVITY_NW: '左上',
GRAVITY_N: '上',
GRAVITY_NE: '右上',
GRAVITY_W: '左',
GRAVITY_C: '中',
GRAVITY_E: '右',
GRAVITY_SW: '左下',
GRAVITY_S: '下',
GRAVITY_SE: '右下',
CROP_WIDTH: '裁剪宽度',
CROP_HEIGHT: '裁剪高度',
OFFSET_MODE: '偏移模式',
OFFSET_X: '偏移X',
OFFSET_Y: '偏移Y',
CROP_LT: '左上',
CROP_LB: '左下',
CROP_RT: '右上',
CROP_RB: '右下',
FORMAT_NONE: '原图一致',
FORMAT_JPG: 'JPG',
FORMAT_PNG: 'PNG',
FORMAT_WEBP: 'WEBP',
FORMAT_GIF: 'GIF',
SCALE_ENABLE: '开启缩放',
SCALE_BY: '缩放方式',
SCALE_BY_PIXEL: '像素缩放',
SCALE_BY_PERCENT: '百分比缩放',
SCALE_SCOPE: '范围',
SCALE_SCOPE_WH: '宽度和高度',
SCALE_SCOPE_OW: '只有宽度',
SCALE_SCOPE_OH: '只有高度',
SCALE_MODE: '缩放模式',
SCALE_WIDTH: '缩放宽度',
SCALE_HEIGHT: '缩放高度',
SCALE_PERCENT: '缩放百分比',
SCALE_WIDTH_PERCENT: '宽度缩放百分比',
SCALE_HEIGHT_PERCENT: '高度缩放百分比',
WATERMARK_ENABLE: '开启水印',
WATERMARK_TEXT: '水印文本',
WATERMARK_LOGO: '水印图片',
WATERMARK_LOGO_W: '水印图片宽度',
WATERMARK_LOGO_H: '水印图片高度',
WATERMARK_FONT_NAME: '字体名称',
WATERMARK_FONT_SIZE: '文字大小',
WATERMARK_FONT_COLOR: '文字颜色',
WATERMARK_STROKE_COLOR: '描边颜色',
WATERMARK_STROKE_WIDTH: '描边宽度',
WATERMARK_GRAVITY: '水印位置',
WATERMARK_X: '边距 X',
WATERMARK_Y: '边距 Y',
WATERMARK_ROTATE: '旋转角度',
WATERMARK_OPACITY: '不透明度',
}
|
const autoprefixer = require('autoprefixer');
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
const { getBrowsersList } = require('../../../common/getBrowsersList');
/**
* 获取style的loader
* @param {*} isProd 是否生产环境
* @param {*} enableModule css module
* @param {*} enableLess 是否是less
*/
function getStyleLoader(isProd, enableModule = false, enableLess = false) {
const result = [
// 生产环境压缩css
isProd ? MiniCssExtractPlugin.loader : 'style-loader',
// css-loader
{
loader: 'css-loader',
options: {
// css-loader 前还有几个loader
importLoaders: enableLess ? 2 : 1,
// css modules
modules: enableModule
? {
// css-loader更新后的写法
localIdentName: '[path][name]-[local]-[hash:5]',
}
: enableModule,
// 使用驼峰命名,类名中的破折号将被处理,原始的类名将从局部变量中删除
localsConvention: 'camelCaseOnly',
},
},
// postcss-loader
{
loader: 'postcss-loader',
options: {
postcssOptions: {
plugins: [[autoprefixer(getBrowsersList(isProd))]],
},
},
},
];
if (enableLess) {
// 如果是less要使用less-loader
result.push({
loader: 'less-loader',
options: {
lessOptions: {
javascriptEnabled: true,
},
},
});
}
return result;
}
/**
* 获取公共的loader
* @param {*} isProd 是否生产环境
*/
function getCommonStyleLoader(isProd) {
return [
{
// css
test: function test(filePath) {
return /\.css$/.test(filePath) && !/\.module\.css$/.test(filePath);
},
use: getStyleLoader(isProd, false, false),
},
{
// less
test: function test(filePath) {
return /\.less$/.test(filePath) && !/\.module\.less$/.test(filePath);
},
use: getStyleLoader(isProd, false, true),
},
{
// css(css module)
test: /\.(module).css$/,
use: getStyleLoader(isProd, true, false),
},
{
// less(less module)
test: /\.(module).less$/,
use: getStyleLoader(isProd, true, true),
},
];
}
module.exports = {
getCommonStyleLoader,
};
|
import { createSelector } from 'reselect';
const selectRaw = (state) => state.document.list;
const selectLoading = createSelector(
[selectRaw],
(raw) => raw.loading,
);
const selectExportLoading = createSelector(
[selectRaw],
(raw) => raw.exportLoading,
);
const selectRows = createSelector(
[selectRaw],
(raw) => raw.rows,
);
const selectCount = createSelector(
[selectRaw],
(raw) => raw.count,
);
const selectHasRows = createSelector(
[selectCount],
(count) => count > 0,
);
const selectOrderBy = createSelector(
[selectRaw],
(raw) => {
const sorter = raw.sorter;
if (!sorter) {
return null;
}
if (!sorter.columnKey) {
return null;
}
let direction =
sorter.order === 'descend' ? 'DESC' : 'ASC';
return `${sorter.columnKey}_${direction}`;
},
);
const selectFilter = createSelector(
[selectRaw],
(raw) => {
return raw.filter;
},
);
const selectLimit = createSelector([selectRaw], (raw) => {
const pagination = raw.pagination;
return pagination.pageSize;
});
const selectOffset = createSelector(
[selectRaw],
(raw) => {
const pagination = raw.pagination;
if (!pagination || !pagination.pageSize) {
return 0;
}
const current = pagination.current || 1;
return (current - 1) * pagination.pageSize;
},
);
const selectPagination = createSelector(
[selectRaw, selectCount],
(raw, count) => {
return {
...raw.pagination,
total: count,
showSizeChanger: true,
};
},
);
const selectSelectedKeys = createSelector(
[selectRaw],
(raw) => {
return raw.selectedKeys;
},
);
const selectSelectedRows = createSelector(
[selectRaw, selectRows],
(raw, rows) => {
return rows.filter((row) =>
raw.selectedKeys.includes(row.id),
);
},
);
export default {
selectLoading,
selectRows,
selectCount,
selectOrderBy,
selectLimit,
selectFilter,
selectOffset,
selectPagination,
selectSelectedKeys,
selectSelectedRows,
selectHasRows,
selectExportLoading,
};
|
import React from "react"
import { useStaticQuery, Link, graphql } from "gatsby"
const ListLink = props => (
<li style={{ display: `inline-block`, marginRight: `1rem` }}>
<Link to={props.to}>{props.children}</Link>
</li>
)
export default function Layout({ children }) {
const data = useStaticQuery(
graphql`
query {
site {
siteMetadata {
title
}
}
}
`
)
return (
<div style={{ margin: `3rem auto`, maxWidth: 650, padding: `0 1rem` }}>
<header style={{ marginBottom: `1.5rem` }}>
<Link to="/" style={{ textShadow: `none`, backgroundImage: `none` }}>
<h3 style={{ display: `inline` }}>{data.site.siteMetadata.title}</h3>
</Link>
<ul style={{ listStyle: `none`, float: `right` }}>
<ListLink to="/">Home</ListLink>
<ListLink to="/about/">About</ListLink>
</ul>
</header>
{children}
</div>
)
} |
'use strict'
module.exports = function(content, file){
if(file.extras && file.extras.isPage){
return content.replace(/#end\s*$/, ' #require("'+file.id+'") \n #end' );
}
return content;
};
|
var callbackArguments = [];
var argument1 = function callback(){callbackArguments.push(arguments)};
var argument2 = true;
var argument3 = function callback(){callbackArguments.push(arguments)};
var argument4 = 823;
var argument5 = "";
var argument6 = function callback(){callbackArguments.push(arguments)};
var argument7 = true;
var argument8 = true;
var argument9 = function callback(){callbackArguments.push(arguments)};
var base_0 = ["v","4","+",")",":i"]
var r_0= undefined
try {
r_0 = base_0.some(argument1,argument2)
}
catch(e) {
r_0= "Error"
}
var base_1 = ["v","4","+",")",":i"]
var r_1= undefined
try {
r_1 = base_1.some(argument3,argument4,argument5)
}
catch(e) {
r_1= "Error"
}
var base_2 = ["v","4","+",")",":i"]
var r_2= undefined
try {
r_2 = base_2.some(argument6,argument7,argument8)
}
catch(e) {
r_2= "Error"
}
var base_3 = ["v","4","+",")",":i"]
var r_3= undefined
try {
r_3 = base_3.some(argument9)
}
catch(e) {
r_3= "Error"
}
function serialize(array){
return array.map(function(a){
if (a === null || a == undefined) return a;
var name = a.constructor.name;
if (name==='Object' || name=='Boolean'|| name=='Array'||name=='Number'||name=='String')
return JSON.stringify(a);
return name;
});
}
setTimeout(function(){
require("fs").writeFileSync("./experiments/some/someEmpty/test701.json",JSON.stringify({"baseObjects":serialize([base_0,base_1,base_2,base_3]),"returnObjects":serialize([r_0,r_1,r_2,r_3]),"callbackArgs":callbackArguments}))
},300) |
from __future__ import print_function
import sys
import os
import json
import webbrowser
import urllib
from yapsy import IPlugin
from awsume import awsumepy
# Python 3 compatibility (python 3 has urlencode in parse sub-module)
URLENCODE = getattr(urllib, 'parse', urllib).urlencode
# Python 3 compatibility (python 3 has urlopen in parse sub-module)
URLOPEN = getattr(urllib, 'request', urllib).urlopen
class AwsumeConsole(IPlugin.IPlugin):
"""The AWS Management Console plugin. Opens an assumed-role to the AWS management console."""
TARGET_VERSION = '3.0.0'
def add_arguments(self, argument_parser):
"""Add the console flag."""
argument_parser.add_argument('-c', '--console',
action='store_true',
default=False,
dest='open_console',
help='Open the AWS console to the AWSume\'d credentials')
argument_parser.add_argument('-cl', '--console-link',
action='store_true',
default=False,
dest='open_console_link',
help='Show the link to open the console with the credentials')
return argument_parser
def pre_awsume(self, app, args):
"""If no profile_name is given to AWSume, check the environment for credentials."""
#use the environment variables to open
if args.open_console_link:
args.open_console = True
if args.open_console is True and args.profile_name is None:
credentials, region = self.get_environment_credentials()
response = self.make_aws_federation_request(credentials)
signin_token = self.get_signin_token(response)
console_url = self.get_console_url(signin_token, region)
self.open_browser_to_url(console_url, args)
exit(0)
def post_awsume(self,
app,
args,
profiles,
user_session,
role_session):
"""Open the console using the currently AWSume'd credentials."""
if args.open_console is True:
if not role_session:
awsumepy.safe_print('Cannot use these credentials to open the AWS Console.')
return
credentials, region = self.get_session_temp_credentials(role_session)
response = self.make_aws_federation_request(credentials)
signin_token = self.get_signin_token(response)
console_url = self.get_console_url(signin_token, region)
self.open_browser_to_url(console_url, args)
def get_environment_credentials(self):
"""Get session credentials from the environment."""
aws_region = 'us-east-1'
if 'AWS_PROFILE' in os.environ:
credentials_profiles = awsumepy.read_ini_file(awsumepy.AWS_CREDENTIALS_FILE)
auto_profile = credentials_profiles[os.environ['AWS_PROFILE']]
temp_credentials = {
'sessionId': auto_profile['aws_access_key_id'],
'sessionKey': auto_profile['aws_secret_access_key'],
'sessionToken': auto_profile['aws_session_token']
}
if auto_profile.get('aws_region'):
aws_region = auto_profile.get('aws_region')
elif os.environ.get('AWS_ACCESS_KEY_ID') and os.environ.get('AWS_SECRET_ACCESS_KEY') and os.environ.get('AWS_SESSION_TOKEN'):
temp_credentials = {
'sessionId': os.environ['AWS_ACCESS_KEY_ID'],
'sessionKey': os.environ['AWS_SECRET_ACCESS_KEY'],
'sessionToken': os.environ['AWS_SESSION_TOKEN']
}
if os.environ.get('AWS_REGION'):
aws_region = os.environ['AWS_REGION']
else:
awsumepy.safe_print('Cannot use these credentials to open the AWS Console.')
exit(0)
json_temp_credentials = json.dumps(temp_credentials)
return json_temp_credentials, aws_region
def get_session_temp_credentials(self, session):
"""Create a properly formatted json string of the given session. Return the session and the region to use."""
if session.get('AccessKeyId') and session.get('SecretAccessKey') and session.get('SessionToken'):
aws_region = 'us-east-1'
temp_credentials = {
'sessionId': session['AccessKeyId'],
'sessionKey': session['SecretAccessKey'],
}
if 'SessionToken' in session:
temp_credentials['sessionToken'] = session['SessionToken']
if session.get('region'):
aws_region = session['region']
#format the credentials into a json formatted string
json_temp_credentials = json.dumps(temp_credentials)
return json_temp_credentials, aws_region
awsumepy.safe_print('Cannot use these credentials to open the AWS Console.')
exit(0)
def make_aws_federation_request(self, temp_credentials):
"""Make the AWS federation request to get the signin token."""
params = {
'Action': 'getSigninToken',
'Session': temp_credentials,
}
request_url = 'https://signin.aws.amazon.com/federation?'
response = URLOPEN(request_url + URLENCODE(params))
return response
def get_signin_token(self, aws_response):
"""Get the signin token from the aws federation response."""
raw = aws_response.read()
try:
return json.loads(raw)['SigninToken']
except getattr(json.decoder, 'JSONDecoderError', ValueError):
# catches python3-related byte encoding
return json.loads(raw.decode())['SigninToken']
def get_console_url(self, aws_signin_token, aws_region):
"""Get the url to open the browser to."""
params = {
'Action': 'login',
'Issuer': '',
'Destination': 'https://console.aws.amazon.com/console/home?region=' + aws_region,
'SigninToken': aws_signin_token
}
url = 'https://signin.aws.amazon.com/federation?'
url += URLENCODE(params)
return url
def open_browser_to_url(self, url, args):
"""Open the default browser to the given url. If that fails, display the url."""
if args.open_console_link:
awsumepy.safe_print(url)
else:
try:
webbrowser.open(url)
except Exception:
awsumepy.safe_print('Cannot open browser, here is the link:')
awsumepy.safe_print(url)
|
#!/usr/bin/env python
# heatmap - High performance heatmap creation in C.
#
# The MIT License (MIT)
#
# Copyright (c) 2013 Lucas Beyer
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from os.path import join as pjoin, dirname
from random import gauss
from ctypes import CDLL, c_ulong, c_ubyte
import Image
w, h, npoints = 256, 512, 1000
# Load the heatmap library using ctypes
libhm = CDLL(pjoin(dirname(__file__), '..', 'libheatmap.so'))
# Create the heatmap object with the given dimensions (in pixel).
hm = libhm.heatmap_new(w, h)
# Add a bunch of random points to the heatmap now.
for x, y in ((int(gauss(w*0.5, w/6.0)), int(gauss(h*0.5, h/6.0))) for _ in xrange(npoints)):
libhm.heatmap_add_point(hm, c_ulong(x), c_ulong(y))
# This creates an image out of the heatmap.
# `rawimg` now contains the image data in 32-bit RGBA.
rawimg = (c_ubyte*(w*h*4))()
libhm.heatmap_render_default_to(hm, rawimg)
# Now that we've got a finished heatmap picture, we don't need the map anymore.
libhm.heatmap_free(hm)
# Use the PIL (for example) to make a png file out of that.
img = Image.frombuffer('RGBA', (w, h), rawimg, 'raw', 'RGBA', 0, 1)
img.save('heatmap.png')
|
// get color depending on the candidate
var colors = {
'amadou': '#FB7102'
, 'nicolas': '#FB7132'
, 'avello': '#1B2638'
, 'calonne': '#1B2640'
, 'scales': '#7AB42D'
, 'egron': '#E50000'
, 'pernot': '#FF0011'
, 'raimbourg': '#8297BE'
, 'rebora': '#8297CE'
, 'barrely': '#8297DE'
, 'rineau': '#8297EE'
, 'koriat': '#8298EE'
, 'raynaud': '#4299EE'
, 'legeay': '#8299EE'
, 'semsar_behague': '#8291EE'
, 'magaud': '#8292EE'
, 'merand': '#8293EE'
, 'cram': '#8294EE'
};
var fullNames = {
'amadou': 'Aude Amadou'
, 'avello': 'avellot'
, 'nicolas': 'nicolas'
, 'calonne': 'calonne'
, 'scales': 'scales'
, 'egron': 'Vincent Egron'
, 'pernot': 'pernot'
, 'raimbourg': 'raimbourg'
, 'raynaud': 'raynaud'
, 'rebora': 'rebora'
, 'barrely': 'barrely'
, 'rineau': 'rineau'
, 'koriat': 'koriat'
, 'legeay': 'legeay'
, 'magaud': 'magaud'
, 'semsar_behague': 'semsar_behague'
, 'merand': 'merand'
, 'cram': 'cram'
};
var partis = {
'amadou': 'LREM'
, 'avello': 'FN'
, 'cram': 'UPR'
, 'raynaud': 'LO'
, 'nicolas': 'EELV'
, 'calonne': 'DLF'
, 'scales': 'LCPHAP'
, 'egron': 'FI'
, 'pernot': 'PCF'
, 'raimbourg': 'PS'
, 'rebora': 'PFE'
, 'koriat': 'blancs'
, 'barrely': 'REG'
, 'rineau': 'Pchrétien'
, 'legeay': 'animalistes'
, 'magaud': 'PCR'
, 'semsar_behague': 'RDG'
, 'merand': 'UDI'
};
//
var resultsd = {};
var resultsdreze = {};
var resultsdbouguenais = {};
var resultsdsaintseb = {};
var resultsg = {};
var geojson;
var map;
var legend;
var osmAttrib = '<a href="http://data.nantes.fr/donnees/detail/decoupage-geographique-des-bureaux-de-vote-de-la-ville-de-nantes/" target="_blank">Découpage</a>';
var osm;
//
$.ajax({
url: "results_1detail.json",
dataType: 'json',
async: false,
success: function(data) {resultsd = data;}
});
$.ajax({
url: "bouguenais.json",
dataType: 'json',
async: false,
success: function(data) {resultsdbouguenais = data;}
});
$.ajax({
url: "saintseb.json",
dataType: 'json',
async: false,
success: function(data) {resultsdsaintseb = data;}
});
$.ajax({
url: "reze.json",
dataType: 'json',
async: false,
success: function(data) {resultsdreze = data;}
});
$.ajax({
url: "results_1general.json",
dataType: 'json',
async: false,
success: function(data) {resultsg = data;}
})
$.extend(resultsd, resultsdreze);
$.extend(resultsd, resultsdbouguenais);
$.extend(resultsd, resultsdsaintseb);
function resetHighlight(e) {
geojson.setStyle(bureauxStyle);
legend.update();
}
function onEachFeature(feature, layer) {
layer.on({mouseover: highlightFeature, mouseout: resetHighlight});
}
function highlightFeature(e) {
var layer = e.target;
legend.update(layer.feature.properties);
layer.setStyle({weight: 4});
}
function launchmap() {
map = L.map('map', {maxZoom: 17, minZoom: 11}).setView([47.22, -1.55], 12);
map.attributionControl.setPrefix(osmAttrib);
map.setMaxBounds(new L.LatLngBounds(new L.LatLng(47.230752, -1.694697),new L.LatLng(47.128512, -1.413313)));
L.tileLayer("https://tilestream.makina-corpus.net/v2/nantes-desaturate/{z}/{x}/{y}.png",{attribution:'© Contributeurs <a href="http://osm.org/copyright">OpenStreetMap</a>'}).addTo(map)
if (L.Browser.touch) { L.control.touchHover().addTo(map); }
geojson = L.geoJson(
bureaux,
{style: bureauxStyle, onEachFeature: onEachFeature}).addTo(map);
legend = L.control({position: 'topright'});
legend.onAdd = function (map) {
this._div = L.DomUtil.create('div', 'legend info');
this.update();
return this._div;
};
};
|
from mybitbank.libs.jsonrpc.proxy import ServiceProxy, JSONRPCException
from .json import loads, dumps, JSONEncodeException, JSONDecodeException
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: requiredElement.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import bcl_pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='requiredElement.proto',
package='Entities.AlgoGen',
serialized_pb=_b('\n\x15requiredElement.proto\x12\x10\x45ntities.AlgoGen\x1a\tbcl.proto\"H\n\x14List_RequiredElement\x12\x30\n\x05items\x18\x01 \x03(\x0b\x32!.Entities.AlgoGen.RequiredElement\"4\n\x1cRequiredBusinessFleetElement\x12\x14\n\tIdJourney\x18\x01 \x01(\x05:\x01\x30\"\x84\x05\n\x0fRequiredElement\x12\x1c\n\x11IdRequiredElement\x18\x01 \x01(\x05:\x01\x30\x12\x14\n\tIdAddress\x18\x02 \x01(\x05:\x01\x30\x12\x1f\n\x08\x44uration\x18\x03 \x01(\x0b\x32\r.bcl.TimeSpan\x12\"\n\x0bPlannedDate\x18\x04 \x01(\x0b\x32\r.bcl.DateTime\x12!\n\nActualDate\x18\x05 \x01(\x0b\x32\r.bcl.DateTime\x12Q\n\x15TypeOfRequiredElement\x18\x06 \x01(\x0e\x32\'.Entities.AlgoGen.TypeOfRequiredElement:\tDeparture\x12J\n\x17RequiredEmployeeElement\x18\n \x01(\x0b\x32).Entities.AlgoGen.RequiredEmployeeElement\x12T\n\x1cRequiredBusinessFleetElement\x18\x0b \x01(\x0b\x32..Entities.AlgoGen.RequiredBusinessFleetElement\x12P\n\x1aRequiredInternFleetElement\x18\x0c \x01(\x0b\x32,.Entities.AlgoGen.RequiredInternFleetElement\x12\x44\n\x14RequiredFleetElement\x18\r \x01(\x0b\x32&.Entities.AlgoGen.RequiredFleetElement\x12H\n\x16RequiredVehicleElement\x18\x0e \x01(\x0b\x32(.Entities.AlgoGen.RequiredVehicleElement\"\x19\n\x17RequiredEmployeeElement\"\x16\n\x14RequiredFleetElement\"\x7f\n\x1aRequiredInternFleetElement\x12\x61\n TypeOfRequiredInternFleetElement\x18\x01 \x01(\x0e\x32\x32.Entities.AlgoGen.TypeOfRequiredInternFleetElement:\x03One\"\x18\n\x16RequiredVehicleElement*3\n\x15TypeOfRequiredElement\x12\r\n\tDeparture\x10\x00\x12\x0b\n\x07\x41rrival\x10\x01*+\n TypeOfRequiredInternFleetElement\x12\x07\n\x03One\x10\x00')
,
dependencies=[bcl_pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_TYPEOFREQUIREDELEMENT = _descriptor.EnumDescriptor(
name='TypeOfRequiredElement',
full_name='Entities.AlgoGen.TypeOfRequiredElement',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='Departure', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Arrival', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=1035,
serialized_end=1086,
)
_sym_db.RegisterEnumDescriptor(_TYPEOFREQUIREDELEMENT)
TypeOfRequiredElement = enum_type_wrapper.EnumTypeWrapper(_TYPEOFREQUIREDELEMENT)
_TYPEOFREQUIREDINTERNFLEETELEMENT = _descriptor.EnumDescriptor(
name='TypeOfRequiredInternFleetElement',
full_name='Entities.AlgoGen.TypeOfRequiredInternFleetElement',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='One', index=0, number=0,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=1088,
serialized_end=1131,
)
_sym_db.RegisterEnumDescriptor(_TYPEOFREQUIREDINTERNFLEETELEMENT)
TypeOfRequiredInternFleetElement = enum_type_wrapper.EnumTypeWrapper(_TYPEOFREQUIREDINTERNFLEETELEMENT)
Departure = 0
Arrival = 1
One = 0
_LIST_REQUIREDELEMENT = _descriptor.Descriptor(
name='List_RequiredElement',
full_name='Entities.AlgoGen.List_RequiredElement',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='items', full_name='Entities.AlgoGen.List_RequiredElement.items', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=54,
serialized_end=126,
)
_REQUIREDBUSINESSFLEETELEMENT = _descriptor.Descriptor(
name='RequiredBusinessFleetElement',
full_name='Entities.AlgoGen.RequiredBusinessFleetElement',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='IdJourney', full_name='Entities.AlgoGen.RequiredBusinessFleetElement.IdJourney', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=128,
serialized_end=180,
)
_REQUIREDELEMENT = _descriptor.Descriptor(
name='RequiredElement',
full_name='Entities.AlgoGen.RequiredElement',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='IdRequiredElement', full_name='Entities.AlgoGen.RequiredElement.IdRequiredElement', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='IdAddress', full_name='Entities.AlgoGen.RequiredElement.IdAddress', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='Duration', full_name='Entities.AlgoGen.RequiredElement.Duration', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='PlannedDate', full_name='Entities.AlgoGen.RequiredElement.PlannedDate', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ActualDate', full_name='Entities.AlgoGen.RequiredElement.ActualDate', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='TypeOfRequiredElement', full_name='Entities.AlgoGen.RequiredElement.TypeOfRequiredElement', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='RequiredEmployeeElement', full_name='Entities.AlgoGen.RequiredElement.RequiredEmployeeElement', index=6,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='RequiredBusinessFleetElement', full_name='Entities.AlgoGen.RequiredElement.RequiredBusinessFleetElement', index=7,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='RequiredInternFleetElement', full_name='Entities.AlgoGen.RequiredElement.RequiredInternFleetElement', index=8,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='RequiredFleetElement', full_name='Entities.AlgoGen.RequiredElement.RequiredFleetElement', index=9,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='RequiredVehicleElement', full_name='Entities.AlgoGen.RequiredElement.RequiredVehicleElement', index=10,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=183,
serialized_end=827,
)
_REQUIREDEMPLOYEEELEMENT = _descriptor.Descriptor(
name='RequiredEmployeeElement',
full_name='Entities.AlgoGen.RequiredEmployeeElement',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=441,
serialized_end=466,
)
_REQUIREDFLEETELEMENT = _descriptor.Descriptor(
name='RequiredFleetElement',
full_name='Entities.AlgoGen.RequiredFleetElement',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=685,
serialized_end=707,
)
_REQUIREDINTERNFLEETELEMENT = _descriptor.Descriptor(
name='RequiredInternFleetElement',
full_name='Entities.AlgoGen.RequiredInternFleetElement',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='TypeOfRequiredInternFleetElement', full_name='Entities.AlgoGen.RequiredInternFleetElement.TypeOfRequiredInternFleetElement', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=880,
serialized_end=1007,
)
_REQUIREDVEHICLEELEMENT = _descriptor.Descriptor(
name='RequiredVehicleElement',
full_name='Entities.AlgoGen.RequiredVehicleElement',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=755,
serialized_end=779,
)
_LIST_REQUIREDELEMENT.fields_by_name['items'].message_type = _REQUIREDELEMENT
_REQUIREDELEMENT.fields_by_name['Duration'].message_type = bcl_pb2._TIMESPAN
_REQUIREDELEMENT.fields_by_name['PlannedDate'].message_type = bcl_pb2._DATETIME
_REQUIREDELEMENT.fields_by_name['ActualDate'].message_type = bcl_pb2._DATETIME
_REQUIREDELEMENT.fields_by_name['TypeOfRequiredElement'].enum_type = _TYPEOFREQUIREDELEMENT
_REQUIREDELEMENT.fields_by_name['RequiredEmployeeElement'].message_type = _REQUIREDEMPLOYEEELEMENT
_REQUIREDELEMENT.fields_by_name['RequiredBusinessFleetElement'].message_type = _REQUIREDBUSINESSFLEETELEMENT
_REQUIREDELEMENT.fields_by_name['RequiredInternFleetElement'].message_type = _REQUIREDINTERNFLEETELEMENT
_REQUIREDELEMENT.fields_by_name['RequiredFleetElement'].message_type = _REQUIREDFLEETELEMENT
_REQUIREDELEMENT.fields_by_name['RequiredVehicleElement'].message_type = _REQUIREDVEHICLEELEMENT
_REQUIREDINTERNFLEETELEMENT.fields_by_name['TypeOfRequiredInternFleetElement'].enum_type = _TYPEOFREQUIREDINTERNFLEETELEMENT
DESCRIPTOR.message_types_by_name['List_RequiredElement'] = _LIST_REQUIREDELEMENT
DESCRIPTOR.message_types_by_name['RequiredBusinessFleetElement'] = _REQUIREDBUSINESSFLEETELEMENT
DESCRIPTOR.message_types_by_name['RequiredElement'] = _REQUIREDELEMENT
DESCRIPTOR.message_types_by_name['RequiredEmployeeElement'] = _REQUIREDEMPLOYEEELEMENT
DESCRIPTOR.message_types_by_name['RequiredFleetElement'] = _REQUIREDFLEETELEMENT
DESCRIPTOR.message_types_by_name['RequiredInternFleetElement'] = _REQUIREDINTERNFLEETELEMENT
DESCRIPTOR.message_types_by_name['RequiredVehicleElement'] = _REQUIREDVEHICLEELEMENT
DESCRIPTOR.enum_types_by_name['TypeOfRequiredElement'] = _TYPEOFREQUIREDELEMENT
DESCRIPTOR.enum_types_by_name['TypeOfRequiredInternFleetElement'] = _TYPEOFREQUIREDINTERNFLEETELEMENT
List_RequiredElement = _reflection.GeneratedProtocolMessageType('List_RequiredElement', (_message.Message,), dict(
DESCRIPTOR = _LIST_REQUIREDELEMENT,
__module__ = 'requiredElement_pb2'
# @@protoc_insertion_point(class_scope:Entities.AlgoGen.List_RequiredElement)
))
_sym_db.RegisterMessage(List_RequiredElement)
RequiredBusinessFleetElement = _reflection.GeneratedProtocolMessageType('RequiredBusinessFleetElement', (_message.Message,), dict(
DESCRIPTOR = _REQUIREDBUSINESSFLEETELEMENT,
__module__ = 'requiredElement_pb2'
# @@protoc_insertion_point(class_scope:Entities.AlgoGen.RequiredBusinessFleetElement)
))
_sym_db.RegisterMessage(RequiredBusinessFleetElement)
RequiredElement = _reflection.GeneratedProtocolMessageType('RequiredElement', (_message.Message,), dict(
DESCRIPTOR = _REQUIREDELEMENT,
__module__ = 'requiredElement_pb2'
# @@protoc_insertion_point(class_scope:Entities.AlgoGen.RequiredElement)
))
_sym_db.RegisterMessage(RequiredElement)
RequiredEmployeeElement = _reflection.GeneratedProtocolMessageType('RequiredEmployeeElement', (_message.Message,), dict(
DESCRIPTOR = _REQUIREDEMPLOYEEELEMENT,
__module__ = 'requiredElement_pb2'
# @@protoc_insertion_point(class_scope:Entities.AlgoGen.RequiredEmployeeElement)
))
_sym_db.RegisterMessage(RequiredEmployeeElement)
RequiredFleetElement = _reflection.GeneratedProtocolMessageType('RequiredFleetElement', (_message.Message,), dict(
DESCRIPTOR = _REQUIREDFLEETELEMENT,
__module__ = 'requiredElement_pb2'
# @@protoc_insertion_point(class_scope:Entities.AlgoGen.RequiredFleetElement)
))
_sym_db.RegisterMessage(RequiredFleetElement)
RequiredInternFleetElement = _reflection.GeneratedProtocolMessageType('RequiredInternFleetElement', (_message.Message,), dict(
DESCRIPTOR = _REQUIREDINTERNFLEETELEMENT,
__module__ = 'requiredElement_pb2'
# @@protoc_insertion_point(class_scope:Entities.AlgoGen.RequiredInternFleetElement)
))
_sym_db.RegisterMessage(RequiredInternFleetElement)
RequiredVehicleElement = _reflection.GeneratedProtocolMessageType('RequiredVehicleElement', (_message.Message,), dict(
DESCRIPTOR = _REQUIREDVEHICLEELEMENT,
__module__ = 'requiredElement_pb2'
# @@protoc_insertion_point(class_scope:Entities.AlgoGen.RequiredVehicleElement)
))
_sym_db.RegisterMessage(RequiredVehicleElement)
# @@protoc_insertion_point(module_scope)
|
import { existsSync } from 'fs'
import { readFile } from 'fs/promises'
const file = new URL('../config.json', import.meta.url)
const config = existsSync(file) ? JSON.parse(await readFile(file)) : {}
if (process.env.token) {
Object.keys(JSON.parse(await readFile(new URL('../config_example.json', import.meta.url)))).map((key) => {
config[key] = process.env[key]
})
}
export const { token, appId, clientSecret, guildId, adminId, cookie, geniusAppId } = config
|
from __future__ import unicode_literals
from babeldjango.templatetags.babel import currencyfmt
from django.core.urlresolvers import reverse
from django.http import JsonResponse
from django.shortcuts import get_object_or_404, redirect, render
from django.template.response import TemplateResponse
from ..core.utils import to_local_currency, get_user_shipping_country
from ..product.models import ProductVariant
from ..shipping.utils import get_shipment_options
from .forms import ReplaceCartLineForm, CountryForm
from .models import Cart
from .utils import (check_product_availability_and_warn, get_or_empty_db_cart,
get_cart_data)
@get_or_empty_db_cart(cart_queryset=Cart.objects.for_display())
def index(request, cart):
discounts = request.discounts
cart_lines = []
check_product_availability_and_warn(request, cart)
for line in cart.lines.all():
initial = {'quantity': line.get_quantity()}
form = ReplaceCartLineForm(None, cart=cart, variant=line.variant,
initial=initial, discounts=discounts)
cart_lines.append({
'variant': line.variant,
'get_price_per_item': line.get_price_per_item(discounts),
'get_total': line.get_total(discounts=discounts),
'form': form})
default_country = get_user_shipping_country(request)
country_form = CountryForm(initial={'country': default_country})
default_country_options = get_shipment_options(default_country)
cart_data = get_cart_data(
cart, default_country_options, request.currency, request.discounts)
ctx = {
'cart_lines': cart_lines,
'country_form': country_form,
'default_country_options': default_country_options}
ctx.update(cart_data)
return TemplateResponse(
request, 'cart/index.html', ctx)
@get_or_empty_db_cart(cart_queryset=Cart.objects.for_display())
def get_shipping_options(request, cart):
country_form = CountryForm(request.POST or None)
if country_form.is_valid():
shipments = country_form.get_shipment_options()
else:
shipments = None
ctx = {
'default_country_options': shipments,
'country_form': country_form}
cart_data = get_cart_data(
cart, shipments, request.currency, request.discounts)
ctx.update(cart_data)
return TemplateResponse(
request, 'cart/_subtotal_table.html', ctx)
@get_or_empty_db_cart()
def update(request, cart, variant_id):
if not request.is_ajax():
return redirect('cart:index')
variant = get_object_or_404(ProductVariant, pk=variant_id)
discounts = request.discounts
status = None
form = ReplaceCartLineForm(request.POST, cart=cart, variant=variant,
discounts=discounts)
if form.is_valid():
form.save()
response = {'variantId': variant_id,
'subtotal': 0,
'total': 0,
'cart': {
'numItems': cart.quantity,
'numLines': len(cart)
}}
updated_line = cart.get_line(form.cart_line.variant)
if updated_line:
response['subtotal'] = currencyfmt(
updated_line.get_total(discounts=discounts).gross,
updated_line.get_total(discounts=discounts).currency)
if cart:
cart_total = cart.get_total(discounts=discounts)
response['total'] = currencyfmt(
cart_total.gross,
cart_total.currency)
local_cart_total = to_local_currency(cart_total, request.currency)
if local_cart_total:
response['localTotal'] = currencyfmt(
local_cart_total.gross,
local_cart_total.currency)
status = 200
elif request.POST is not None:
response = {'error': form.errors}
status = 400
return JsonResponse(response, status=status)
@get_or_empty_db_cart(cart_queryset=Cart.objects.for_display())
def summary(request, cart):
def prepare_line_data(line):
product_class = line.variant.product.product_class
attributes = product_class.variant_attributes.all()
first_image = line.variant.get_first_image()
price_per_item = line.get_price_per_item(discounts=request.discounts)
line_total = line.get_total(discounts=request.discounts)
return {
'product': line.variant.product,
'variant': line.variant.name,
'quantity': line.quantity,
'attributes': line.variant.display_variant(attributes),
'image': first_image,
'price_per_item': currencyfmt(
price_per_item.gross, price_per_item.currency),
'line_total': currencyfmt(line_total.gross, line_total.currency),
'update_url': reverse('cart:update-line',
kwargs={'variant_id': line.variant_id}),
'variant_url': line.variant.get_absolute_url()}
if cart.quantity == 0:
data = {'quantity': 0}
else:
cart_total = cart.get_total(discounts=request.discounts)
data = {
'quantity': cart.quantity,
'total': currencyfmt(cart_total.gross, cart_total.currency),
'lines': [prepare_line_data(line) for line in cart.lines.all()]}
return render(request, 'cart-dropdown.html', data)
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
import torch.nn as nn
from .yolo_head import YOLOXHead
from .yolo_pafpn import YOLOPAFPN
class YOLOX(nn.Module):
"""
YOLOX model module. The module list is defined by create_yolov3_modules function.
The network returns loss values from three YOLO layers during training
and detection results during test.
"""
def __init__(self, backbone=None, head=None):
super().__init__()
if backbone is None:
backbone = YOLOPAFPN()
if head is None:
head = YOLOXHead(80)
self.backbone = backbone
self.head = head
def forward(self, x, targets=None):
# fpn output content features of [dark3, dark4, dark5]
fpn_outs = self.backbone(x)
if self.training:
assert targets is not None
outputs, _= self.head(
fpn_outs, targets, x
)
else:
outputs = self.head(fpn_outs)
return outputs
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.