text
stringlengths 3
1.05M
|
---|
export { default } from './positionElement';
|
import pandas as pd
import numpy as np
import dgl
import torch
from sklearn.preprocessing import StandardScaler, QuantileTransformer
class MacroDataset():
def __init__(self, DF_PATH, SEED, TASK, LABELNAME, MODEL, NX_GRAPHS, NORM=None):
'''
Initializes a MacroDataset object
Args:
DF_PATH: str, path to DataFrame containing all macromolecules and corresponding labels
SEED: int, random seed for shuffling dataset
LABELNAME: str, name of label to classify
NX_GRAPHS: dict, dictionary of featurized NetworkX graph for each macromolecule ID
NORM: str, normalization method for regression dataset (default=None)
Attributes:
IDs: list, list of macromolecule IDs in dataset
graphs: list, list of graphs corresponding to each ID
labels: list, list of labels corresponding to each ID
masks: list, list of masks corresponding to each ID
task: str, classification or regression
n_tasks: int, number of tasks
classtype: str, binary, multilabel, or multiclass for classification tasks
normalizer: StandardScaler or QuantileTransformer for normalization
'''
self._df = pd.read_csv(DF_PATH)
self._seed = SEED
self._labelname = LABELNAME
self._model = MODEL
self._nx_graphs = NX_GRAPHS
self._norm = NORM
self.task = TASK
self.normalizer = None
self._convert_dgl()
def _convert_dgl(self):
''' Utility function for conversion of featurized NetworkX to featurized DGL '''
IDs_nonshuffle = self._df['ID'].tolist()
np.random.seed(self._seed)
indices = np.random.RandomState(seed=self._seed).permutation(np.arange(len(IDs_nonshuffle)))
IDs_shuffle = list(np.array(IDs_nonshuffle)[indices])
self.IDs = []
graphs_feat = []
for idnum in IDs_shuffle:
if str(idnum) in self._nx_graphs.keys():
self.IDs.append(idnum)
graphs_feat.append(self._nx_graphs[str(idnum)])
if self._model == 'GCN' or self._model == 'GAT':
graphs_list = [dgl.from_networkx(graph_feat, node_attrs=['h'], edge_attrs=['e'], idtype=torch.int32) for graph_feat in graphs_feat]
self.graphs = [dgl.add_self_loop(graph) for graph in graphs_list]
else:
self.graphs = [dgl.from_networkx(graph_feat, node_attrs=['h'], edge_attrs=['e'], idtype=torch.int32) for graph_feat in graphs_feat]
if self.task == 'classification':
self._classificationlabel()
elif self.task == 'regression':
self._regressionlabel()
def _classificationlabel(self):
''' Utility function for assigning macromolecule labels for classification task '''
label_list = self._getclass()
labelname_list = self._df[self._labelname].tolist()
comma_check = False
for labelname in labelname_list:
if not labelname is None and not pd.isna(labelname) and not labelname == '':
if ',' in labelname:
comma_check = True
if len(label_list) == 2 and comma_check == False:
self.classtype = 'binary'
self._binarylabelizer()
elif len(label_list) > 2 and comma_check == False:
self.classtype = 'multiclass'
self._multilabelizer('mc')
elif len(label_list) > 2 and comma_check == True:
self.classtype = 'multilabel'
self._multilabelizer('ml')
def _binarylabelizer(self):
''' Utility function for assigning macromolecule labels and number of tasks for binary classification task '''
labels = []
masks = []
label_list = self._getclass()
for graph in self.IDs:
label_str = self._df[self._df['ID']==graph][[self._labelname]].values[0].tolist()[0]
mask_tmp = [1]
if pd.isnull(label_str[0]):
mask_tmp[0] = 0
label_str[0] = 0
label_tensor = torch.FloatTensor([label_list.index(label_str)])
labels.append(label_tensor)
mask_tensor = torch.FloatTensor(mask_tmp)
masks.append(mask_tensor)
self.labels = labels
self.masks = masks
self.n_tasks = 1
def _multilabelizer(self, tasktype):
''' Utility function for assigning macromolecule labels and number of tasks for multilabel/multiclass classification task
Args:
tasktype : str, 'ml' for multilabel or 'mc' for multiclass
'''
labels = []
masks = []
label_list = self._getclass()
self.n_tasks = len(label_list)
for graph in self.IDs:
classname = self._df[self._df['ID']==graph][[self._labelname]].values[0].tolist()[0]
if not classname is None and not pd.isna(classname) and not classname == '':
itemlist = []
if not classname.count(',') == 0:
itemlist += classname.strip().split(', ')
else:
itemlist = [classname.strip()]
label_str = [0 for a in range(len(label_list))]
for item in itemlist:
nameindex = label_list.index(item.replace(',','').strip())
label_str[nameindex] = 1
if tasktype == 'mc':
mask_tmp = [1]
elif tasktype == 'ml':
mask_tmp = [1 for a in range(len(label_list))]
else:
label_str = [0 for a in range(len(label_list))]
if tasktype == 'mc':
mask_tmp = [0]
elif tasktype == 'ml':
mask_tmp = [0 for a in range(len(label_list))]
label_tensor = torch.FloatTensor(label_str)
mask_tensor = torch.FloatTensor(mask_tmp)
labels.append(label_tensor)
masks.append(mask_tensor)
self.labels = labels
self.masks = masks
def _getclass(self):
''' Utility function for getting list of unique macromolecule labels
Returns:
unique_list: list, list of unique macromolecule labels
'''
classlist = self._df[self._labelname].tolist()
unique_list = []
for classname in classlist:
if not classname is None and not pd.isna(classname) and not classname == '':
class_list = []
if not classname.count(',') == 0:
class_list += classname.strip().split(', ')
else:
class_list = [classname.strip()]
for elem in class_list:
if elem.replace(',','').strip() not in unique_list:
unique_list.append(elem.replace(',','').strip())
return unique_list
def _regressionlabel(self):
''' Utility function for assigning macromolecule labels and number of tasks for regression task '''
if self._norm == 'qt':
self._quantiletransform()
elif self._norm == 'ss':
self._standardscaler()
self.n_tasks = 1
def _quantiletransform(self):
''' Utility function for normalizing regression labels using quantile transform '''
df_list = self._df['avg'].tolist()
data_list = [val for val in df_list if (not val is None and not pd.isna(val) and not pd.isnull(val) and not val == '')]
qt = QuantileTransformer(n_quantiles = len(data_list), random_state = self._seed)
qt.fit(np.array(data_list).reshape(-1, 1))
labels = []
masks = []
for graph in self.IDs:
label_orig = self._df[self._df['ID']==graph][['avg']].values[0].tolist()
label_scale = list(qt.transform(np.array(label_orig).reshape(-1, 1)))[0]
mask_tmp = [1]
if pd.isnull(label_orig[0]):
mask_tmp[0] = 0
label_scale[0] = 0
label_tensor = torch.FloatTensor(label_scale)
labels.append(label_tensor)
mask_tensor = torch.FloatTensor(mask_tmp)
masks.append(mask_tensor)
self.labels = labels
self.masks = masks
self.normalizer = qt
def _standardscaler(self):
''' Utility function for normalizing regression labels using standard scaler '''
df_list = self._df['avg'].tolist()
data_list = [val for val in df_list if (not val is None and not pd.isna(val) and not pd.isnull(val) and not val == '')]
scaler = StandardScaler()
scaler.fit(np.array(data_list).reshape(-1, 1))
labels = []
masks = []
for graph in self.IDs:
label_orig = self._df[self._df['ID']==graph][['avg']].values[0].tolist()
label_scale = list(scaler.transform(np.array(label_orig).reshape(-1, 1)))[0]
mask_tmp = [1]
if pd.isnull(label_orig[0]):
mask_tmp[0] = 0
label_scale[0] = 0
label_tensor = torch.FloatTensor(label_scale)
labels.append(label_tensor)
mask_tensor = torch.FloatTensor(mask_tmp)
masks.append(mask_tensor)
self.labels = labels
self.masks = masks
self.normalizer = scaler
def __getitem__(self, idx):
'''Utility function for getting datapoint with index
Args:
idx : int, index of datapoint
Returns:
self.IDs[idx], self.graphs[idx], self.labels[idx], self.mask[idx]: ID, graph, label, mask of specified index
'''
return self.IDs[idx], self.graphs[idx], self.labels[idx], self.masks[idx]
def __len__(self):
'''Utility function to find number of graphs in the dataset
Returns:
len(self.graphs): int, number of graphs in dataset
'''
return len(self.graphs)
|
const authController = {};
authController.getAccessToken = (req, res, next) => {
const { accessToken } = req.cookies;
res.locals.accessToken = accessToken;
next();
};
authController.saveAccessToken = (req, res, next) => {
const { user } = req;
res.cookie('accessToken', user.accessToken, { maxAge: 36000000 });
res.cookie('uuid', user.userId, { maxAge: 36000000 });
next();
};
module.exports = authController;
|
/*-
* Copyright (c) 2012 Stephan Arts. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY [LICENSOR] "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE,DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if !defined (LIBGUARDIAN_INSIDE_LIBGUARDIAN_H) && !defined(LIBGUARDIAN_COMPILATION)
#error "Only <libguardian/libguardian.h> can be included directly, this file may disappear or change contents"
#endif
#ifndef __GUARDIAN_TYPES_H__
#define __GUARDIAN_TYPES_H__
#define FALSE 0
#define TRUE 1
#endif /* __GUARDIAN_TYPES_H__ */
|
import os
from socket import socket
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
# check if MongoDB is enabled and running
def test_mongo_running_and_enabled(host):
mongo = host.service("mongod")
assert mongo.is_running
assert mongo.is_enabled
# check if MongoDB is listening on port 27017
def test_port27017_open(host):
socket = host.socket("tcp://0.0.0.0:27017")
assert socket.is_listening
# check if configuration file contains the required line
def test_config_file(host):
config_file = host.file('/etc/mongod.conf')
assert config_file.contains('bindIp: 0.0.0.0')
assert config_file.is_file
|
require('./bootstrap');
import Vue from 'vue';
import VueRouter from 'vue-router';
Vue.use(VueRouter);
const routes = [
{ path: '/users', component: require('./components/UsersComponent.vue').default },
{ path: '/edit/:id', component: require('./components/EditComponent.vue').default ,name: 'edit' },
{ path: '/create', component: require('./components/InsertComponent.vue').default },
]
const router = new VueRouter({
routes // short for `routes: routes`
});
const app = new Vue({
el: '#app',
router
});
|
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
import torch.optim
from . import FairseqOptimizer, register_optimizer
@register_optimizer('adagrad')
class Adagrad(FairseqOptimizer):
def __init__(self, args, params):
super().__init__(args, params)
self._optimizer = torch.optim.Adagrad(params, **self.optimizer_config)
@staticmethod
def add_args(parser):
"""Add optimizer-specific arguments to the parser."""
# fmt: off
parser.add_argument('--weight-decay', '--wd', default=0.0, type=float, metavar='WD',
help='weight decay')
# fmt: on
@property
def optimizer_config(self):
"""
Return a kwarg dictionary that will be used to override optimizer
args stored in checkpoints. This allows us to load a checkpoint and
resume training using a different set of optimizer args, e.g., with a
different learning rate.
"""
return {
'lr': self.args.lr[0],
'weight_decay': self.args.weight_decay,
}
|
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @emails oncall+metro_bundler
* @format
*/
'use strict';
const {
addModuleIdsToModuleWrapper,
inlineModuleIds,
createIdForPathFn,
getModuleCodeAndMap,
} = require('../util');
const {any} = jasmine;
describe('addModuleIdsToModuleWrapper', () => {
const path = 'path/to/file';
const createModule = (dependencies = []) => ({
dependencies,
file: {code: '__d(function(){});', isModule: true, path},
});
it('completes the module wrapped with module ID, and an array of dependency IDs', () => {
const dependencies = [
{id: 'a', path: 'path/to/a.js'},
{id: 'b', path: 'location/of/b.js'},
];
const module = createModule(dependencies);
const idForPath = jest.fn().mockImplementation(({path: inputPath}) => {
switch (inputPath) {
case path:
return 12;
case dependencies[0].path:
return 345;
case dependencies[1].path:
return 6;
}
throw new Error(`Unexpected path: ${inputPath}`);
});
expect(addModuleIdsToModuleWrapper(module, idForPath)).toEqual(
'__d(function(){},12,[345,6]);',
);
});
it('omits the array of dependency IDs if it is empty', () => {
const module = createModule();
expect(addModuleIdsToModuleWrapper(module, () => 98)).toEqual(
`__d(function(){},${98});`,
);
});
});
describe('`inlineModuleIds`:', () => {
const path = 'path/to/file';
const basicCode = `
__d(function(require, depMap) {
require(depMap[0]);
require(depMap[1]);
});
`;
const createModule = (dependencies = []) => ({
dependencies,
file: {code: basicCode, isModule: true, path},
});
const reUsedVariableCode = `
__d(function(require, depMap) {
require(depMap[0]);
require(depMap[1]);
function anotherScope(depMap) {
return depMap[1337];
}
});
`;
const createReUsedVariableModule = (dependencies = []) => ({
dependencies,
file: {code: reUsedVariableCode, isModule: true, path},
});
it('inlines module ids', () => {
const dependencies = [
{id: 'a', path: 'path/to/a.js'},
{id: 'b', path: 'location/of/b.js'},
];
const module = createModule(dependencies);
const idForPath = jest.fn().mockImplementation(({path: inputPath}) => {
switch (inputPath) {
case path:
return 12;
case dependencies[0].path:
return 345;
case dependencies[1].path:
return 6;
}
throw new Error(`Unexpected path: ${inputPath}`);
});
expect(inlineModuleIds(module, idForPath).moduleCode).toEqual(
'__d(function(require,depMap){require(345);require(6);},12);',
);
});
it('inlines module ids using a globally reserved name for depMap', () => {
const dependencies = [
{id: 'a', path: 'path/to/a.js'},
{id: 'b', path: 'location/of/b.js'},
];
const module = createModule(dependencies);
const idForPath = jest.fn().mockImplementation(({path: inputPath}) => {
switch (inputPath) {
case path:
return 12;
case dependencies[0].path:
return 345;
case dependencies[1].path:
return 6;
}
throw new Error(`Unexpected path: ${inputPath}`);
});
expect(
inlineModuleIds(module, idForPath, {dependencyMapReservedName: 'depMap'})
.moduleCode,
).toMatchInlineSnapshot(`
"
__d(function(require, depMap) {
require(345 );
require(6 );
},12);
"
`);
});
it('does not inline false positives, when not using a globally reserved name for dep map', () => {
const dependencies = [
{id: 'a', path: 'path/to/a.js'},
{id: 'b', path: 'location/of/b.js'},
];
const module = createReUsedVariableModule(dependencies);
const idForPath = jest.fn().mockImplementation(({path: inputPath}) => {
switch (inputPath) {
case path:
return 12;
case dependencies[0].path:
return 345;
case dependencies[1].path:
return 6;
}
throw new Error(`Unexpected path: ${inputPath}`);
});
expect(inlineModuleIds(module, idForPath).moduleCode).toMatchInlineSnapshot(
'"__d(function(require,depMap){require(345);require(6);function anotherScope(depMap){return depMap[1337];}},12);"',
);
});
});
describe('`createIdForPathFn`', () => {
let idForPath;
beforeEach(() => {
idForPath = createIdForPathFn();
});
it('returns a number for a string', () => {
expect(idForPath({path: 'arbitrary'})).toEqual(any(Number));
});
it('returns consecutive numbers', () => {
const strings = [
'arbitrary string',
'looking/like/a/path',
'/absolute/path/to/file.js',
'/more files/are here',
];
strings.forEach((string, i) => {
expect(idForPath({path: string})).toEqual(i);
});
});
it('returns the same id if the same string is passed in again', () => {
const path = 'this/is/an/arbitrary/path.js';
const id = idForPath({path});
idForPath({path: '/other/file'});
idForPath({path: 'and/another/file'});
expect(idForPath({path})).toEqual(id);
});
});
describe('getModuleCodeAndMap', () => {
it('returns empty x_facebook_sources field when map has no sources', () => {
const {moduleMap} = getModuleCodeAndMap(
{
dependencies: [],
file: {
code: '__d(function(){});',
map: {
version: 3,
mappings: '',
names: [],
sources: [],
},
functionMap: {
mappings: '',
names: [],
},
path: 'path/to/file',
type: 'module',
libraryIdx: null,
},
},
() => 0,
{enableIDInlining: false},
);
expect(moduleMap.x_facebook_sources).toEqual([]);
});
it('omits x_facebook_sources field entirely when map is sectioned', () => {
const {moduleMap} = getModuleCodeAndMap(
{
dependencies: [],
file: {
code: '__d(function(){});',
map: {
version: 3,
sections: [],
},
functionMap: {
mappings: '',
names: [],
},
path: 'path/to/file',
type: 'module',
libraryIdx: null,
},
},
() => 0,
{enableIDInlining: false},
);
expect(moduleMap.x_facebook_sources).toEqual(undefined);
});
});
|
class Solution:
def findMinArrowShots(self, points: List[List[int]]) -> int:
if points == []:
return 0
points.sort(key = lambda x: x[0])
start = points[0][0]
end = points[0][1]
ans = len(points)
for i in range(1, len(points)):
if start <= points[i][0] <= end:
ans -= 1
if points[i][1] < end:
end = points[i][1]
else:
start = points[i][0]
end = points[i][1]
return ans
|
/**
* Copyright (c) 2013-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
if (process.env.NODE_ENV !== 'production') {
var REACT_ELEMENT_TYPE = (typeof Symbol === 'function' &&
Symbol.for &&
Symbol.for('react.element')) ||
0xeac7;
var isValidElement = function(object) {
return typeof object === 'object' &&
object !== null &&
object.$$typeof === REACT_ELEMENT_TYPE;
};
// By explicitly using `prop-types` you are opting into new development behavior.
// http://fb.me/prop-types-in-prod
var throwOnDirectAccess = true;
module.exports = require('./factoryWithTypeCheckers')(isValidElement, throwOnDirectAccess);
} else {
// By explicitly using `prop-types` you are opting into new production behavior.
// http://fb.me/prop-types-in-prod
module.exports = require('./factoryWithThrowingShims')();
}
|
//
// MessageUtil.h
// GoBots
//
// Created by Rax on 9/3/17.
// Copyright © 2017 The Tech Time. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface MessageUtil : NSObject
+(NSString*) getRandomVictoryMessage;
@end
|
import assign from 'lodash/assign';
import includes from 'lodash/includes';
import isEmpty from 'lodash/isEmpty';
import isString from 'lodash/isString';
import pick from 'lodash/pick';
angular.module('controllers').controller(
'controllers.Domain.Redirection.edit',
class RedirectionEdit {
/**
* @constructs RedirectionEdit
* @param {Object} $scope
* @param {Object} $rootScope
* @param {Object} Alerter
* @param {Object} DomainRedirection
* @param {Object} WucValidator
*/
constructor(
$scope,
$rootScope,
$translate,
Alerter,
DomainRedirection,
WucValidator,
) {
this.$scope = $scope;
this.$rootScope = $rootScope;
this.$translate = $translate;
this.alerter = Alerter;
this.service = DomainRedirection;
this.validator = WucValidator;
}
$onInit() {
this.displayName = this.$scope.currentActionData.displayName;
this.redirection = angular.copy(
this.$scope.currentActionData.redirection,
);
this.redirectionTarget = this.redirection.targetDisplayName;
this.loading = false;
this.errors = {
redirectionTarget: false,
targetLength: false,
ortTitle: false,
ortKeywords: false,
ortDescription: false,
};
this.shouldIncludeDomain = false;
this.unknownFieldDisplayTypeErrorMessageId =
'domain_tab_REDIRECTION_edit_fail';
// Binds this to the functions used in the scope
this.$scope.editRedirection = this.editRedirection.bind(this);
this.$scope.editContainsNoError = this.editContainsNoError.bind(this);
}
/**
* @returns {Boolean} True if the length is valid and the target is not empty
*/
isTargetValid() {
const completeTarget = this.getCompleteTarget();
return (
!isEmpty(this.redirectionTarget) &&
this.constructor.isLengthValid(completeTarget)
);
}
/**
* @returns {String} Message for the label for a non-ort redirection
*/
getNonOrtLabel() {
let message;
switch (this.redirection.fieldDisplayType) {
case 'A':
message = 'domain_tab_REDIRECTION_edit_server_ipv4_label';
break;
case 'AAAA':
message = 'domain_tab_REDIRECTION_edit_server_ipv6_label';
break;
case 'CNAME':
message = 'domain_tab_REDIRECTION_edit_server_cname_label';
break;
default: {
const errorMessage = this.$translate.instant(
this.unknownFieldDisplayTypeErrorMessageId,
{ t0: this.redirection.fieldDisplayType },
);
this.alerter.alertFromSWS(
this.$translate.instant('domain_tab_REDIRECTION_edit_fail', {
t0: this.displayName,
}),
errorMessage,
this.$scope.alerts.main,
);
this.$scope.resetAction();
}
}
return this.$translate.instant(message);
}
/**
* @returns {String} True if no error was detected in the different inputs
*/
inputContainErrors() {
return includes(this.errors, true);
}
/**
* @returns {String} The target + the zone if the checkbox is selected
*/
getCompleteTarget() {
let { redirectionTarget } = this;
if (this.shouldIncludeDomain && isString(this.redirection.zone)) {
redirectionTarget += `.${this.redirection.zone}`;
}
return redirectionTarget;
}
/**
* @returns {String} Change the error message for non-ort redirection
*/
changeErrorLabel() {
let message;
if (this.inputContainErrors()) {
switch (this.redirection.fieldDisplayType) {
case 'A':
message = 'domain_tab_REDIRECTION_edit_server_ipv4_error';
break;
case 'AAAA':
message = 'domain_tab_REDIRECTION_edit_server_ipv6_error';
break;
case 'CNAME':
message = 'domain_tab_REDIRECTION_edit_server_cname_error';
break;
default: {
const errorMessage = this.$translate.instant(
this.unknownFieldDisplayTypeErrorMessageId,
{ t0: this.redirection.fieldDisplayType },
);
this.alerter.alertFromSWS(
this.$translate.instant('domain_tab_REDIRECTION_edit_fail', {
t0: this.displayName,
}),
errorMessage,
this.$scope.alerts.main,
);
this.$scope.resetAction();
}
}
}
this.errorLabel = this.$translate.instant(message);
}
/**
* Detects errors on ort redirections
*/
redirectionChange() {
if (this.redirection.isOrt) {
this.errors.ortTitle = !this.constructor.isLengthValid(
this.redirection.title,
);
this.errors.ortKeywords = !this.constructor.isLengthValid(
this.redirection.keywords,
);
this.errors.ortDescription = !this.constructor.isLengthValid(
this.redirection.description,
);
}
}
/**
* Detects errors on all redirections
*/
redirectionTargetChange() {
if (isEmpty(this.redirectionTarget)) {
this.errors.redirectionTarget = true;
} else if (this.redirection.isOrt) {
this.errors.redirectionTarget = !this.validator.isValidURL(
this.redirectionTarget,
);
this.errors.targetLength = !this.isTargetValid();
} else {
switch (this.redirection.fieldDisplayType) {
case 'A':
this.errors.redirectionTarget = !this.validator.isValidIpv4(
this.redirectionTarget,
);
break;
case 'AAAA':
this.errors.redirectionTarget = !this.validator.isValidIpv6(
this.redirectionTarget,
);
break;
case 'CNAME': {
const domainName = this.getCompleteTarget();
this.errors.redirectionTarget = !this.validator.isValidDomain(
domainName,
);
break;
}
default: {
const errorMessage = this.$translate.instant(
this.unknownFieldDisplayTypeErrorMessageId,
{ t0: this.redirection.fieldDisplayType },
);
this.alerter.alertFromSWS(
this.$translate.instant('domain_tab_REDIRECTION_edit_fail', {
t0: this.displayName,
}),
errorMessage,
this.$scope.alerts.main,
);
this.$scope.resetAction();
}
}
this.changeErrorLabel();
}
}
/**
* @returns {Boolean} True if the total url size (target + subdomain)
* is less than 245
*/
isTargetLengthValid() {
let value = this.redirectionTarget;
const shouldAppendZone =
this.shouldIncludeDomain &&
isString(this.redirection.fieldDisplayType) &&
this.redirection.fieldDisplayType.toUpperCase() === 'CNAME';
if (shouldAppendZone) {
value += `${this.redirection.zone}.`;
}
return this.constructor.isLengthValid(value);
}
/**
* Is a parameter short enough to fit in the put request ?
* @param {String} value
*/
static isLengthValid(value) {
const maxSize = 245;
const valueLength = !isString(value) ? value.length : 0;
return maxSize - valueLength > 0;
}
/**
* @returns {Boolean} If the page is not loading and doesn't contain errors
*/
editContainsNoError() {
return !this.isLoading && !this.inputContainErrors();
}
/**
* Calls the service to put the redirection
*/
editRedirection() {
let data = {
target: this.includeDomainToTarget(),
};
let method = 'put';
this.isLoading = true;
if (this.redirection.isOrt) {
data = assign(
data,
pick(this.redirection, ['description', 'keywords', 'title']),
);
method = 'putOrt';
}
this.service[method](this.redirection.id, data)
.then(() =>
this.alerter.success(
this.$translate.instant('domain_tab_REDIRECTION_edit_success', {
t0: this.displayName,
}),
this.$scope.alerts.main,
),
)
.catch((err) =>
this.alerter.alertFromSWS(
this.$translate.instant('domain_tab_REDIRECTION_edit_fail', {
t0: this.displayName,
}),
err,
this.$scope.alerts.main,
),
)
.finally(() => {
this.isLoading = false;
this.$scope.resetAction();
this.$rootScope.$broadcast('domain.tabs.redirection.reload', true);
this.shouldIncludeDomain = false;
});
}
/**
* Includes a dot at the end of the target name if the target is a CNAME
* and the user doesn't want the domain included at the end of the target
* @returns {String} Redirection target with a trailing dot if needed
*/
includeDomainToTarget() {
const endsWithDot = this.redirectionTarget.match(/.*\.$/);
const targetIsCNAME =
isString(this.redirection.fieldDisplayType) &&
this.redirection.fieldDisplayType.toUpperCase() === 'CNAME';
const shouldDeleteTrailingDot =
endsWithDot && this.shouldIncludeDomain && targetIsCNAME;
const shouldAddTrailingDot =
!endsWithDot && !this.shouldIncludeDomain && targetIsCNAME;
if (shouldDeleteTrailingDot) {
return this.redirectionTarget.substring(
0,
this.redirectionTarget.length - 1,
);
}
if (shouldAddTrailingDot) {
return `${this.redirectionTarget}.`;
}
return this.redirectionTarget;
}
},
);
|
cordova.define('cordova/plugin_list', function(require, exports, module) {
module.exports = [
{
"file": "plugins/cordova-plugin-battery-status/www/battery.js",
"id": "cordova-plugin-battery-status.battery",
"pluginId": "cordova-plugin-battery-status",
"clobbers": [
"navigator.battery"
]
},
{
"file": "plugins/cordova-plugin-battery-status/src/browser/BatteryProxy.js",
"id": "cordova-plugin-battery-status.Battery",
"pluginId": "cordova-plugin-battery-status",
"runs": true
},
{
"file": "plugins/cordova-plugin-camera/www/CameraConstants.js",
"id": "cordova-plugin-camera.Camera",
"pluginId": "cordova-plugin-camera",
"clobbers": [
"Camera"
]
},
{
"file": "plugins/cordova-plugin-camera/www/CameraPopoverOptions.js",
"id": "cordova-plugin-camera.CameraPopoverOptions",
"pluginId": "cordova-plugin-camera",
"clobbers": [
"CameraPopoverOptions"
]
},
{
"file": "plugins/cordova-plugin-camera/www/Camera.js",
"id": "cordova-plugin-camera.camera",
"pluginId": "cordova-plugin-camera",
"clobbers": [
"navigator.camera"
]
},
{
"file": "plugins/cordova-plugin-camera/src/browser/CameraProxy.js",
"id": "cordova-plugin-camera.CameraProxy",
"pluginId": "cordova-plugin-camera",
"runs": true
},
{
"file": "plugins/cordova-plugin-file/www/DirectoryEntry.js",
"id": "cordova-plugin-file.DirectoryEntry",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.DirectoryEntry"
]
},
{
"file": "plugins/cordova-plugin-file/www/DirectoryReader.js",
"id": "cordova-plugin-file.DirectoryReader",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.DirectoryReader"
]
},
{
"file": "plugins/cordova-plugin-file/www/Entry.js",
"id": "cordova-plugin-file.Entry",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.Entry"
]
},
{
"file": "plugins/cordova-plugin-file/www/File.js",
"id": "cordova-plugin-file.File",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.File"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileEntry.js",
"id": "cordova-plugin-file.FileEntry",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileEntry"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileError.js",
"id": "cordova-plugin-file.FileError",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileError"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileReader.js",
"id": "cordova-plugin-file.FileReader",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileReader"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileSystem.js",
"id": "cordova-plugin-file.FileSystem",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileSystem"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileUploadOptions.js",
"id": "cordova-plugin-file.FileUploadOptions",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileUploadOptions"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileUploadResult.js",
"id": "cordova-plugin-file.FileUploadResult",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileUploadResult"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileWriter.js",
"id": "cordova-plugin-file.FileWriter",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileWriter"
]
},
{
"file": "plugins/cordova-plugin-file/www/Flags.js",
"id": "cordova-plugin-file.Flags",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.Flags"
]
},
{
"file": "plugins/cordova-plugin-file/www/LocalFileSystem.js",
"id": "cordova-plugin-file.LocalFileSystem",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.LocalFileSystem"
],
"merges": [
"window"
]
},
{
"file": "plugins/cordova-plugin-file/www/Metadata.js",
"id": "cordova-plugin-file.Metadata",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.Metadata"
]
},
{
"file": "plugins/cordova-plugin-file/www/ProgressEvent.js",
"id": "cordova-plugin-file.ProgressEvent",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.ProgressEvent"
]
},
{
"file": "plugins/cordova-plugin-file/www/fileSystems.js",
"id": "cordova-plugin-file.fileSystems",
"pluginId": "cordova-plugin-file"
},
{
"file": "plugins/cordova-plugin-file/www/requestFileSystem.js",
"id": "cordova-plugin-file.requestFileSystem",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.requestFileSystem"
]
},
{
"file": "plugins/cordova-plugin-file/www/resolveLocalFileSystemURI.js",
"id": "cordova-plugin-file.resolveLocalFileSystemURI",
"pluginId": "cordova-plugin-file",
"merges": [
"window"
]
},
{
"file": "plugins/cordova-plugin-file/www/browser/isChrome.js",
"id": "cordova-plugin-file.isChrome",
"pluginId": "cordova-plugin-file",
"runs": true
},
{
"file": "plugins/cordova-plugin-file/www/browser/Preparing.js",
"id": "cordova-plugin-file.Preparing",
"pluginId": "cordova-plugin-file",
"runs": true
},
{
"file": "plugins/cordova-plugin-file/src/browser/FileProxy.js",
"id": "cordova-plugin-file.browserFileProxy",
"pluginId": "cordova-plugin-file",
"runs": true
},
{
"file": "plugins/cordova-plugin-file/www/fileSystemPaths.js",
"id": "cordova-plugin-file.fileSystemPaths",
"pluginId": "cordova-plugin-file",
"merges": [
"cordova"
],
"runs": true
},
{
"file": "plugins/cordova-plugin-file/www/browser/FileSystem.js",
"id": "cordova-plugin-file.firefoxFileSystem",
"pluginId": "cordova-plugin-file",
"merges": [
"window.FileSystem"
]
},
{
"file": "plugins/cordova-plugin-media-capture/www/CaptureAudioOptions.js",
"id": "cordova-plugin-media-capture.CaptureAudioOptions",
"pluginId": "cordova-plugin-media-capture",
"clobbers": [
"CaptureAudioOptions"
]
},
{
"file": "plugins/cordova-plugin-media-capture/www/CaptureImageOptions.js",
"id": "cordova-plugin-media-capture.CaptureImageOptions",
"pluginId": "cordova-plugin-media-capture",
"clobbers": [
"CaptureImageOptions"
]
},
{
"file": "plugins/cordova-plugin-media-capture/www/CaptureVideoOptions.js",
"id": "cordova-plugin-media-capture.CaptureVideoOptions",
"pluginId": "cordova-plugin-media-capture",
"clobbers": [
"CaptureVideoOptions"
]
},
{
"file": "plugins/cordova-plugin-media-capture/www/CaptureError.js",
"id": "cordova-plugin-media-capture.CaptureError",
"pluginId": "cordova-plugin-media-capture",
"clobbers": [
"CaptureError"
]
},
{
"file": "plugins/cordova-plugin-media-capture/www/MediaFileData.js",
"id": "cordova-plugin-media-capture.MediaFileData",
"pluginId": "cordova-plugin-media-capture",
"clobbers": [
"MediaFileData"
]
},
{
"file": "plugins/cordova-plugin-media-capture/www/MediaFile.js",
"id": "cordova-plugin-media-capture.MediaFile",
"pluginId": "cordova-plugin-media-capture",
"clobbers": [
"MediaFile"
]
},
{
"file": "plugins/cordova-plugin-media-capture/www/capture.js",
"id": "cordova-plugin-media-capture.capture",
"pluginId": "cordova-plugin-media-capture",
"clobbers": [
"navigator.device.capture"
]
},
{
"file": "plugins/cordova-plugin-media-capture/src/browser/CaptureProxy.js",
"id": "cordova-plugin-media-capture.CaptureProxy",
"pluginId": "cordova-plugin-media-capture",
"runs": true
},
{
"file": "plugins/cordova-plugin-device-motion/www/Acceleration.js",
"id": "cordova-plugin-device-motion.Acceleration",
"pluginId": "cordova-plugin-device-motion",
"clobbers": [
"Acceleration"
]
},
{
"file": "plugins/cordova-plugin-device-motion/www/accelerometer.js",
"id": "cordova-plugin-device-motion.accelerometer",
"pluginId": "cordova-plugin-device-motion",
"clobbers": [
"navigator.accelerometer"
]
},
{
"file": "plugins/cordova-plugin-device-motion/src/browser/AccelerometerProxy.js",
"id": "cordova-plugin-device-motion.AccelerometerProxy",
"pluginId": "cordova-plugin-device-motion",
"runs": true
},
{
"file": "plugins/cordova-plugin-device-motion/www/Acceleration.js",
"id": "cordova-plugin-device-motion.Acceleration",
"pluginId": "cordova-plugin-device-motion",
"clobbers": [
"Acceleration"
]
},
{
"file": "plugins/cordova-plugin-device-motion/www/accelerometer.js",
"id": "cordova-plugin-device-motion.accelerometer",
"pluginId": "cordova-plugin-device-motion",
"clobbers": [
"navigator.accelerometer"
]
},
{
"file": "plugins/cordova-plugin-device-motion/src/browser/AccelerometerProxy.js",
"id": "cordova-plugin-device-motion.AccelerometerProxy",
"pluginId": "cordova-plugin-device-motion",
"runs": true
},
{
"file": "plugins/cordova-plugin-contacts/www/contacts.js",
"id": "cordova-plugin-contacts.contacts",
"pluginId": "cordova-plugin-contacts",
"clobbers": [
"navigator.contacts"
]
},
{
"file": "plugins/cordova-plugin-contacts/www/Contact.js",
"id": "cordova-plugin-contacts.Contact",
"pluginId": "cordova-plugin-contacts",
"clobbers": [
"Contact"
]
},
{
"file": "plugins/cordova-plugin-contacts/www/ContactAddress.js",
"id": "cordova-plugin-contacts.ContactAddress",
"pluginId": "cordova-plugin-contacts",
"clobbers": [
"ContactAddress"
]
},
{
"file": "plugins/cordova-plugin-contacts/www/ContactError.js",
"id": "cordova-plugin-contacts.ContactError",
"pluginId": "cordova-plugin-contacts",
"clobbers": [
"ContactError"
]
},
{
"file": "plugins/cordova-plugin-contacts/www/ContactField.js",
"id": "cordova-plugin-contacts.ContactField",
"pluginId": "cordova-plugin-contacts",
"clobbers": [
"ContactField"
]
},
{
"file": "plugins/cordova-plugin-contacts/www/ContactFindOptions.js",
"id": "cordova-plugin-contacts.ContactFindOptions",
"pluginId": "cordova-plugin-contacts",
"clobbers": [
"ContactFindOptions"
]
},
{
"file": "plugins/cordova-plugin-contacts/www/ContactName.js",
"id": "cordova-plugin-contacts.ContactName",
"pluginId": "cordova-plugin-contacts",
"clobbers": [
"ContactName"
]
},
{
"file": "plugins/cordova-plugin-contacts/www/ContactOrganization.js",
"id": "cordova-plugin-contacts.ContactOrganization",
"pluginId": "cordova-plugin-contacts",
"clobbers": [
"ContactOrganization"
]
},
{
"file": "plugins/cordova-plugin-contacts/www/ContactFieldType.js",
"id": "cordova-plugin-contacts.ContactFieldType",
"pluginId": "cordova-plugin-contacts",
"merges": [
""
]
},
{
"file": "plugins/cordova-plugin-device/www/device.js",
"id": "cordova-plugin-device.device",
"pluginId": "cordova-plugin-device",
"clobbers": [
"device"
]
},
{
"file": "plugins/cordova-plugin-device/src/browser/DeviceProxy.js",
"id": "cordova-plugin-device.DeviceProxy",
"pluginId": "cordova-plugin-device",
"runs": true
},
{
"file": "plugins/cordova-plugin-device-orientation/www/CompassError.js",
"id": "cordova-plugin-device-orientation.CompassError",
"pluginId": "cordova-plugin-device-orientation",
"clobbers": [
"CompassError"
]
},
{
"file": "plugins/cordova-plugin-device-orientation/www/CompassHeading.js",
"id": "cordova-plugin-device-orientation.CompassHeading",
"pluginId": "cordova-plugin-device-orientation",
"clobbers": [
"CompassHeading"
]
},
{
"file": "plugins/cordova-plugin-device-orientation/www/compass.js",
"id": "cordova-plugin-device-orientation.compass",
"pluginId": "cordova-plugin-device-orientation",
"clobbers": [
"navigator.compass"
]
},
{
"file": "plugins/cordova-plugin-device-orientation/src/browser/CompassProxy.js",
"id": "cordova-plugin-device-orientation.CompassProxy",
"pluginId": "cordova-plugin-device-orientation",
"runs": true
},
{
"file": "plugins/cordova-plugin-dialogs/www/notification.js",
"id": "cordova-plugin-dialogs.notification",
"pluginId": "cordova-plugin-dialogs",
"merges": [
"navigator.notification"
]
},
{
"file": "plugins/cordova-plugin-dialogs/www/browser/notification.js",
"id": "cordova-plugin-dialogs.notification_browser",
"pluginId": "cordova-plugin-dialogs",
"merges": [
"navigator.notification"
]
},
{
"file": "plugins/cordova-plugin-file-transfer/www/FileTransferError.js",
"id": "cordova-plugin-file-transfer.FileTransferError",
"pluginId": "cordova-plugin-file-transfer",
"clobbers": [
"window.FileTransferError"
]
},
{
"file": "plugins/cordova-plugin-file-transfer/www/FileTransfer.js",
"id": "cordova-plugin-file-transfer.FileTransfer",
"pluginId": "cordova-plugin-file-transfer",
"clobbers": [
"window.FileTransfer"
]
},
{
"file": "plugins/cordova-plugin-file-transfer/www/browser/FileTransfer.js",
"id": "cordova-plugin-file-transfer.BrowserFileTransfer",
"pluginId": "cordova-plugin-file-transfer",
"clobbers": [
"window.FileTransfer"
]
},
{
"file": "plugins/cordova-plugin-globalization/www/GlobalizationError.js",
"id": "cordova-plugin-globalization.GlobalizationError",
"pluginId": "cordova-plugin-globalization",
"clobbers": [
"window.GlobalizationError"
]
},
{
"file": "plugins/cordova-plugin-globalization/www/globalization.js",
"id": "cordova-plugin-globalization.globalization",
"pluginId": "cordova-plugin-globalization",
"clobbers": [
"navigator.globalization"
]
},
{
"file": "plugins/cordova-plugin-globalization/www/browser/moment.js",
"id": "cordova-plugin-globalization.moment",
"pluginId": "cordova-plugin-globalization",
"runs": true
},
{
"file": "plugins/cordova-plugin-globalization/src/browser/GlobalizationProxy.js",
"id": "cordova-plugin-globalization.GlobalizationProxy",
"pluginId": "cordova-plugin-globalization",
"runs": true
},
{
"file": "plugins/cordova-plugin-inappbrowser/www/inappbrowser.js",
"id": "cordova-plugin-inappbrowser.inappbrowser",
"pluginId": "cordova-plugin-inappbrowser",
"clobbers": [
"cordova.InAppBrowser.open",
"window.open"
]
},
{
"file": "plugins/cordova-plugin-inappbrowser/src/browser/InAppBrowserProxy.js",
"id": "cordova-plugin-inappbrowser.InAppBrowserProxy",
"pluginId": "cordova-plugin-inappbrowser",
"merges": [
""
]
},
{
"file": "plugins/cordova-plugin-media/www/MediaError.js",
"id": "cordova-plugin-media.MediaError",
"pluginId": "cordova-plugin-media",
"clobbers": [
"window.MediaError"
]
},
{
"file": "plugins/cordova-plugin-media/www/Media.js",
"id": "cordova-plugin-media.Media",
"pluginId": "cordova-plugin-media",
"clobbers": [
"window.Media"
]
},
{
"file": "plugins/cordova-plugin-media/www/browser/Media.js",
"id": "cordova-plugin-media.BrowserMedia",
"pluginId": "cordova-plugin-media",
"clobbers": [
"window.Media"
]
},
{
"file": "plugins/cordova-plugin-network-information/www/network.js",
"id": "cordova-plugin-network-information.network",
"pluginId": "cordova-plugin-network-information",
"clobbers": [
"navigator.connection",
"navigator.network.connection"
]
},
{
"file": "plugins/cordova-plugin-network-information/www/Connection.js",
"id": "cordova-plugin-network-information.Connection",
"pluginId": "cordova-plugin-network-information",
"clobbers": [
"Connection"
]
},
{
"file": "plugins/cordova-plugin-network-information/src/browser/network.js",
"id": "cordova-plugin-network-information.NetworkInfoProxy",
"pluginId": "cordova-plugin-network-information",
"runs": true
},
{
"file": "plugins/cordova-plugin-splashscreen/www/splashscreen.js",
"id": "cordova-plugin-splashscreen.SplashScreen",
"pluginId": "cordova-plugin-splashscreen",
"clobbers": [
"navigator.splashscreen"
]
},
{
"file": "plugins/cordova-plugin-splashscreen/src/browser/SplashScreenProxy.js",
"id": "cordova-plugin-splashscreen.SplashScreenProxy",
"pluginId": "cordova-plugin-splashscreen",
"runs": true
},
{
"file": "plugins/cordova-plugin-statusbar/www/statusbar.js",
"id": "cordova-plugin-statusbar.statusbar",
"pluginId": "cordova-plugin-statusbar",
"clobbers": [
"window.StatusBar"
]
},
{
"file": "plugins/cordova-plugin-statusbar/src/browser/statusbar.js",
"id": "cordova-plugin-statusbar.statusbar.Browser",
"pluginId": "cordova-plugin-statusbar",
"merges": [
"window.StatusBar"
]
},
{
"file": "plugins/cordova-plugin-vibration/www/vibration.js",
"id": "cordova-plugin-vibration.notification",
"pluginId": "cordova-plugin-vibration",
"merges": [
"navigator.notification",
"navigator"
]
},
{
"file": "plugins/cordova-plugin-vibration/src/browser/Vibration.js",
"id": "cordova-plugin-vibration.Vibration",
"pluginId": "cordova-plugin-vibration",
"merges": [
"navigator.notification",
"navigator"
]
}
];
module.exports.metadata =
// TOP OF METADATA
{
"cordova-plugin-battery-status": "1.1.2",
"cordova-plugin-camera": "2.1.1",
"cordova-plugin-compat": "1.2.0",
"cordova-plugin-file": "4.3.3",
"cordova-plugin-media-capture": "1.2.0",
"cordova-plugin-console": "1.0.7",
"cordova-plugin-device-motion": "1.2.5",
"cordova-plugin-contacts": "2.0.1",
"cordova-plugin-device": "1.1.6",
"cordova-plugin-device-orientation": "1.0.7",
"cordova-plugin-dialogs": "1.2.1",
"cordova-plugin-file-transfer": "1.5.1",
"cordova-plugin-geolocation": "2.1.0",
"cordova-plugin-globalization": "1.0.7",
"cordova-plugin-inappbrowser": "1.3.0",
"cordova-plugin-media": "2.2.0",
"cordova-plugin-network-information": "1.2.1",
"cordova-plugin-splashscreen": "3.2.2",
"cordova-plugin-statusbar": "2.1.3",
"cordova-plugin-vibration": "2.1.5",
"cordova-plugin-whitelist": "1.2.2"
}
// BOTTOM OF METADATA
}); |
import * as Three from 'three';
import React from 'react';
var WIDTH = 80;
var DEPTH = 80;
var HEIGHT = 200;
var textureLoader = new Three.TextureLoader();
var woodMaterial = textureLoader.load(null);
var bookTexture1 = textureLoader.load(null);
var bookTexture2 = textureLoader.load(null);
var bookTexture3 = textureLoader.load(null);
var objectMaxLOD = makeObjectMaxLOD();
var objectMinLOD = makeObjectMinLOD();
function makeObjectMaxLOD() {
var bookcase = new Three.Mesh();
//Bookcase
var backGeometry = new Three.BoxGeometry(0.03, 2, 0.8);
var wood = new Three.MeshPhongMaterial({ map: woodMaterial });
var backside = new Three.Mesh(backGeometry, wood);
backside.position.set(0, 1, 0);
bookcase.add(backside);
var sideGeometry = new Three.BoxGeometry(0.3, 2, 0.03);
var side1 = new Three.Mesh(sideGeometry, wood);
side1.position.set(0.15, 1, 0.4);
bookcase.add(side1);
var side2 = new Three.Mesh(sideGeometry, wood);
side2.position.set(0.15, 1, -0.4);
bookcase.add(side2);
var bottomGeometry = new Three.BoxGeometry(0.3, 0.03, 0.8);
var bottomPanel = new Three.Mesh(bottomGeometry, wood);
bottomPanel.position.set(0.15, 2, 0);
bookcase.add(bottomPanel);
var topGeometry = new Three.BoxGeometry(0.3, 0.03, 0.8);
var topPanel = new Three.Mesh(topGeometry, wood);
topPanel.position.set(0.15, 0.015, 0);
bookcase.add(topPanel);
//shelves
for (var i = 1; i < 5; i++) {
var shelveGeometry = new Three.BoxGeometry(0.3, 0.03, 0.8);
var shelve = new Three.Mesh(shelveGeometry, wood);
shelve.position.set(0.15, 0.015 + i * 0.4, 0);
bookcase.add(shelve);
}
function choiceTexture() {
return Math.floor(Math.random() * 3);
}
//book
var bookGeometry = new Three.BoxGeometry(0.24, 0.32, 0.76);
var bookMaterial = [new Three.MeshLambertMaterial({ map: bookTexture1 }), new Three.MeshLambertMaterial({ map: bookTexture2 }), new Three.MeshLambertMaterial({ map: bookTexture3 })];
var book1 = new Three.Mesh(bookGeometry, bookMaterial[choiceTexture()]);
book1.position.set(0.15, 0.59, 0);
bookcase.add(book1);
var book2 = new Three.Mesh(bookGeometry, bookMaterial[choiceTexture()]);
book2.position.set(0.15, 0.99, 0);
bookcase.add(book2);
var book3 = new Three.Mesh(bookGeometry, bookMaterial[choiceTexture()]);
book3.position.set(0.15, 0.19, 0);
bookcase.add(book3);
var book4 = new Three.Mesh(bookGeometry, bookMaterial[choiceTexture()]);
book4.position.set(0.15, 1.39, 0);
bookcase.add(book4);
var book5 = new Three.Mesh(bookGeometry, bookMaterial[choiceTexture()]);
book5.position.set(0.15, 1.79, 0);
bookcase.add(book5);
return bookcase;
}
function makeObjectMinLOD() {
var bookcase = new Three.Mesh();
var textureLoader = new Three.TextureLoader();
var woodMaterial = textureLoader.load(null);
//Bookcase
var backGeometry = new Three.BoxGeometry(0.03, 2, 0.8);
var wood = new Three.MeshPhongMaterial({ map: woodMaterial });
var backside = new Three.Mesh(backGeometry, wood);
backside.position.set(0, 1, 0);
bookcase.add(backside);
var sideGeometry = new Three.BoxGeometry(0.3, 2, 0.03);
var side1 = new Three.Mesh(sideGeometry, wood);
side1.position.set(0.15, 1, 0.4);
bookcase.add(side1);
var side2 = new Three.Mesh(sideGeometry, wood);
side2.position.set(0.15, 1, -0.4);
bookcase.add(side2);
var bottomGeometry = new Three.BoxGeometry(0.3, 0.03, 0.8);
var bottomPanel = new Three.Mesh(bottomGeometry, wood);
bottomPanel.position.set(0.15, 2, 0);
bookcase.add(bottomPanel);
var topGeometry = new Three.BoxGeometry(0.3, 0.03, 0.8);
var topPanel = new Three.Mesh(topGeometry, wood);
topPanel.position.set(0.15, 0.015, 0);
bookcase.add(topPanel);
//shelves
for (var i = 1; i < 5; i++) {
var shelveGeometry = new Three.BoxGeometry(0.3, 0.03, 0.8);
var shelve = new Three.Mesh(shelveGeometry, wood);
shelve.position.set(0.15, 0.015 + i * 0.4, 0);
bookcase.add(shelve);
}
return bookcase;
}
export default {
name: 'bookcase',
prototype: 'items',
info: {
tag: ['furnishings', 'wood'],
title: 'bookcase',
description: 'bookcase',
image: null
},
properties: {
altitude: {
label: 'altitude',
type: 'length-measure',
defaultValue: {
length: 0,
unit: 'cm'
}
}
},
render2D: function render2D(element, layer, scene) {
var angle = element.rotation + 90;
var textRotation = 0;
if (Math.sin(angle * Math.PI / 180) < 0) {
textRotation = 180;
}
var rect_style = { stroke: element.selected ? '#0096fd' : '#000', strokeWidth: '2px', fill: '#84e1ce' };
return React.createElement(
'g',
{ transform: 'translate(' + -WIDTH / 2 + ',' + -DEPTH / 2 + ')' },
React.createElement('rect', { key: '1', x: '0', y: '0', width: WIDTH, height: DEPTH, style: rect_style }),
React.createElement(
'text',
{ key: '2', x: '0', y: '0',
transform: 'translate(' + WIDTH / 2 + ', ' + DEPTH / 2 + ') scale(1,-1) rotate(' + textRotation + ')',
style: { textAnchor: 'middle', fontSize: '11px' } },
element.type
)
);
},
render3D: function render3D(element, layer, scene) {
var newAltitude = element.properties.get('altitude').get('length');
/**************** lod max ******************/
var bookcaseMaxLOD = new Three.Object3D();
bookcaseMaxLOD.add(objectMaxLOD.clone());
var value = new Three.Box3().setFromObject(bookcaseMaxLOD);
var deltaX = Math.abs(value.max.x - value.min.x);
var deltaY = Math.abs(value.max.y - value.min.y);
var deltaZ = Math.abs(value.max.z - value.min.z);
bookcaseMaxLOD.rotation.y += Math.PI / 2;
bookcaseMaxLOD.position.y += newAltitude;
bookcaseMaxLOD.position.z += WIDTH / 2;
bookcaseMaxLOD.scale.set(WIDTH / deltaX, HEIGHT / deltaY, DEPTH / deltaZ);
/**************** lod min ******************/
var bookcaseMinLOD = new Three.Object3D();
bookcaseMinLOD.add(objectMinLOD.clone());
bookcaseMinLOD.rotation.y += Math.PI / 2;
bookcaseMinLOD.position.y += newAltitude;
bookcaseMinLOD.position.z += WIDTH / 2;
bookcaseMinLOD.scale.set(WIDTH / deltaX, HEIGHT / deltaY, DEPTH / deltaZ);
/**** all level of detail ***/
var lod = new Three.LOD();
lod.addLevel(bookcaseMaxLOD, 200);
lod.addLevel(bookcaseMinLOD, 900);
lod.updateMatrix();
lod.matrixAutoUpdate = false;
if (element.selected) {
var bbox = new Three.BoxHelper(lod, 0x99c3fb);
bbox.material.linewidth = 5;
bbox.renderOrder = 1000;
bbox.material.depthTest = false;
lod.add(bbox);
}
return Promise.resolve(lod);
}
}; |
import * as actionType from '../actionType';
import utils from '@app/utils';
import { fromJS, List } from 'immutable';
let defaultState = utils.lastRecord && utils.lastRecord.redoPathList && utils.lastRecord.redoPathList.length !== 0 ?
List(utils.lastRecord.redoPathList) : List([]);
export default (state = defaultState, action = {}) => {
switch (action.type) {
case actionType.SETREDOLIST:
return action.data;
default:
return state;
}
};
|
import React from 'react';
import { configure, setAddon, addDecorator } from '@storybook/react';
import infoAddon from '@storybook/addon-info';
import Container from './Container';
// addDecorator(checkA11y);
addDecorator(story => <Container story={story} />);
setAddon(infoAddon);
function loadStories() {
const req = require.context('../src/components', true, /.stories.jsx$/);
req.keys().forEach(filename => req(filename));
const packageReq = require.context('../packages', true, /-story\.js/);
packageReq.keys().forEach(filename => packageReq(filename));
}
configure(loadStories, module);
|
#!/usr/bin/env python
# coding: utf-8
# # Publications markdown generator for academicpages
#
# Takes a set of bibtex of publications and converts them for use with [academicpages.github.io](academicpages.github.io). This is an interactive Jupyter notebook ([see more info here](http://jupyter-notebook-beginner-guide.readthedocs.io/en/latest/what_is_jupyter.html)).
#
# The core python code is also in `pubsFromBibs.py`.
# Run either from the `markdown_generator` folder after replacing updating the publist dictionary with:
# * bib file names
# * specific venue keys based on your bib file preferences
# * any specific pre-text for specific files
# * Collection Name (future feature)
#
# TODO: Make this work with other databases of citations,
# TODO: Merge this with the existing TSV parsing solution
from pybtex.database.input import bibtex
import pybtex.database.input.bibtex
from time import strptime
import string
import html
import os
import re
#todo: incorporate different collection types rather than a catch all publications, requires other changes to template
publist = {
"proceeding": {
"file" : "proceedings.bib",
"venuekey": "booktitle",
"venue-pretext": "In the proceedings of ",
"collection" : {"name":"publications",
"permalink":"/publication/"}
},
"journal":{
"file": "pubs.bib",
"venuekey" : "journal",
"venue-pretext" : "",
"collection" : {"name":"publications",
"permalink":"/publication/"}
}
}
html_escape_table = {
"&": "&",
'"': """,
"'": "'"
}
def html_escape(text):
"""Produce entities within text."""
return "".join(html_escape_table.get(c,c) for c in text)
for pubsource in publist:
parser = bibtex.Parser()
bibdata = parser.parse_file(publist[pubsource]["file"])
#loop through the individual references in a given bibtex file
for bib_id in bibdata.entries:
#reset default date
pub_year = "1900"
pub_month = "01"
pub_day = "01"
b = bibdata.entries[bib_id].fields
try:
pub_year = f'{b["year"]}'
#todo: this hack for month and day needs some cleanup
if "month" in b.keys():
if(len(b["month"])<3):
pub_month = "0"+b["month"]
pub_month = pub_month[-2:]
elif(b["month"] not in range(12)):
tmnth = strptime(b["month"][:3],'%b').tm_mon
pub_month = "{:02d}".format(tmnth)
else:
pub_month = str(b["month"])
if "day" in b.keys():
pub_day = str(b["day"])
pub_date = pub_year+"-"+pub_month+"-"+pub_day
#strip out {} as needed (some bibtex entries that maintain formatting)
clean_title = b["title"].replace("{", "").replace("}","").replace("\\","").replace(" ","-")
url_slug = re.sub("\\[.*\\]|[^a-zA-Z0-9_-]", "", clean_title)
url_slug = url_slug.replace("--","-")
md_filename = (str(pub_date) + "-" + url_slug + ".md").replace("--","-")
html_filename = (str(pub_date) + "-" + url_slug).replace("--","-")
#Build Citation from text
citation = ""
#citation authors - todo - add highlighting for primary author?
for author in bibdata.entries[bib_id].persons["author"]:
citation = citation+" "+author.first_names[0]+" "+author.last_names[0]+", "
#citation title
citation = citation + "\"" + html_escape(b["title"].replace("{", "").replace("}","").replace("\\","")) + ".\""
#add venue logic depending on citation type
venue = publist[pubsource]["venue-pretext"]+b[publist[pubsource]["venuekey"]].replace("{", "").replace("}","").replace("\\","")
citation = citation + " " + html_escape(venue)
citation = citation + ", " + pub_year + "."
## YAML variables
md = "---\ntitle: \"" + html_escape(b["title"].replace("{", "").replace("}","").replace("\\","")) + '"\n'
md += """collection: """ + publist[pubsource]["collection"]["name"]
md += """\npermalink: """ + publist[pubsource]["collection"]["permalink"] + html_filename
note = False
if "note" in b.keys():
if len(str(b["note"])) > 5:
md += "\nexcerpt: '" + html_escape(b["note"]) + "'"
note = True
md += "\ndate: " + str(pub_date)
md += "\nvenue: '" + html_escape(venue) + "'"
url = False
if "url" in b.keys():
if len(str(b["url"])) > 5:
md += "\npaperurl: '" + b["url"] + "'"
url = True
md += "\ncitation: '" + html_escape(citation) + "'"
md += "\n---"
## Markdown description for individual page
if note:
md += "\n" + html_escape(b["note"]) + "\n"
if url:
md += "\n[Access paper here](" + b["url"] + "){:target=\"_blank\"}\n"
else:
md += "\nUse [Google Scholar](https://scholar.google.com/scholar?q="+html.escape(clean_title.replace("-","+"))+"){:target=\"_blank\"} for full citation"
md_filename = os.path.basename(md_filename)
with open("../_publications/" + md_filename, 'w') as f:
f.write(md)
print(f'SUCESSFULLY PARSED {bib_id}: \"', b["title"][:60],"..."*(len(b['title'])>60),"\"")
# field may not exist for a reference
except KeyError as e:
print(f'WARNING Missing Expected Field {e} from entry {bib_id}: \"', b["title"][:30],"..."*(len(b['title'])>30),"\"")
continue |
"""
Class Features
Name: cpl_hmc_finalizer
Author(s): Fabio Delogu ([email protected])
Date: '20200401'
Version: '3.0.0'
"""
#######################################################################################
# Library
import logging
import os
from hmc.algorithm.default.lib_default_args import logger_name
from hmc.algorithm.debug.lib_debug import read_workspace_obj, write_workspace_obj
from hmc.driver.dataset.drv_dataset_hmc_base_destination import ModelDestination
# Log
log_stream = logging.getLogger(logger_name)
# Debug
# import matplotlib.pylab as plt
#######################################################################################
# -------------------------------------------------------------------------------------
# Class to finalize model application
class ModelFinalizer:
# -------------------------------------------------------------------------------------
# Method time info
def __init__(self, collection_dynamic, obj_geo_reference=None, obj_args=None, obj_run=None, obj_ancillary=None):
# -------------------------------------------------------------------------------------
# Store information in global workspace
self.collection_dynamic = collection_dynamic
self.obj_geo_reference = obj_geo_reference
self.obj_args = obj_args
self.obj_run = obj_run
self.obj_ancillary = obj_ancillary
self.driver_io_destination = ModelDestination(
collection_dynamic=self.collection_dynamic,
obj_dataset=self.obj_args.obj_datasets,
obj_geo_reference=self.obj_geo_reference,
template_time=self.obj_args.obj_template_time_ref,
template_run_def=self.obj_run.obj_template_run_filled,
template_run_ref=self.obj_args.obj_template_run_ref,
template_analysis_def=self.obj_run.obj_template_analysis_filled,
template_static=self.obj_args.obj_template_dset_static_ref,
template_outcome=self.obj_args.obj_template_dset_outcome_ref)
self.flag_cleaning_dynamic_outcome = self.obj_args.obj_datasets['Flags'][
'cleaning_ancillary_data_dynamic_outcome']
# -------------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------
# Method to configure outcome datasets
def configure_dynamic_datasets(self, time_series_collections, time_info_collections,
static_datasets_collections, ancillary_datasets_collections,
ancillary_tag_type='dynamic_outcome'):
# Starting info
log_stream.info(' #### Configure dynamic outcome datasets ... ')
file_path_ancillary = ancillary_datasets_collections[ancillary_tag_type]
if self.flag_cleaning_dynamic_outcome:
if os.path.exists(file_path_ancillary):
os.remove(file_path_ancillary)
if not os.path.exists(file_path_ancillary):
# Get template variable(s)
template_run_filled = self.obj_run.obj_template_run_filled
# Method to organize outcome datasets
outcome_datasets_obj = self.driver_io_destination.organize_data_dynamic(
time_series_collections, static_datasets_collections, template_run_filled,
tag_exectype='SIM', tag_datadriver='outcome')
# Method to analyze outcome datasets
self.driver_io_destination.analyze_data_dynamic(
time_series_collections, time_info_collections, static_datasets_collections, outcome_datasets_obj,
tag_exectype='SIM', tag_datatype='ARCHIVE', tag_datadriver='outcome')
# Method to organize state datasets
state_datasets_obj = self.driver_io_destination.organize_data_dynamic(
time_series_collections, static_datasets_collections, template_run_filled,
tag_exectype='SIM', tag_datadriver='state')
# Method to analyze state datasets
self.driver_io_destination.analyze_data_dynamic(
time_series_collections, time_info_collections, static_datasets_collections, state_datasets_obj,
tag_exectype='SIM', tag_datatype='ARCHIVE', tag_datadriver='state')
# Retrieve dynamic collections
dynamic_datasets_collections = self.driver_io_destination.dset_collections_dynamic
# Method to write static datasets collections
write_workspace_obj(file_path_ancillary, dynamic_datasets_collections)
# Ending info
log_stream.info(' #### Configure dynamic outcome datasets ... DONE')
elif os.path.exists(file_path_ancillary):
# Method to read static datasets collections
dynamic_datasets_collections = read_workspace_obj(file_path_ancillary)
# Ending info
log_stream.info(' #### Configure dynamic outcome datasets ... LOADED. Restore file: ' + file_path_ancillary)
else:
# Error in ancillary file
log_stream.info(' #### Configure dynamic outcome datasets ... FAILED')
log_stream.error(' ===> Ancillary file for dynamic outcome datasets collections is not correctly defined')
raise RuntimeError('Bad definition of ancillary dynamic file')
return dynamic_datasets_collections
# -------------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------
# Method to configure summary datasets
def configure_summary_datasets(self, time_series_collections, time_info_collections,
static_datasets_collections, outcome_datasets_collections):
# Starting info
log_stream.info(' #### Configure summary outcome datasets ... ')
# Get template variable(s)
template_run_filled = self.obj_run.obj_template_run_filled
# Method to organize summary datasets
summary_datasets_obj = self.driver_io_destination.organize_data_summary(
time_series_collections, static_datasets_collections, template_run_filled,
tag_exectype='POST_PROCESSING', tag_datadriver='summary')
# Method to analyze summary datasets
self.driver_io_destination.analyze_data_summary(
outcome_datasets_collections,
time_series_collections, time_info_collections, static_datasets_collections, summary_datasets_obj,
template_run_filled,
tag_exectype='POST_PROCESSING', tag_datadriver='summary')
# Ending info
log_stream.info(' #### Configure summary outcome datasets ... DONE')
# -------------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------
|
(xui.Locale.en||(xui.Locale.en={})).inline={
$_$:1,
ok:'O K',
cancel:'Cancel',
set:'SET',
today:'Today',
now:'Now',
yes:'Yes',
no:'No',
noFlash:'No Flash PlugIn!',
transparent:'transparent',
required:'Required field',
required2:'There is required field need to be filled out',
invalid:'Invalid field',
invalid2:'There is invalid field'
};
xui.Locale.en.date={
WEEKS:{
'0':'Su',
'1':'Mo',
'2':'Tu',
'3':'We',
'4':'Th',
'5':'Fr',
'6':'Sa',
'7':'WK'
},
VIEWS:{
'10 ms':'10 millisecond',
'100 ms':'100 milliseconds',
'1 s':'1 second',
'10 s':'10 seconds',
'1 n':'1 minute',
'5 n':'5 minutes',
'10 n':'10 minutes',
'30 n':'30 minutes',
'1 h':'1 hour',
'2 h':'2 hours',
'6 h':'6 hours',
'1 d':'1 day',
'1 w':'1 week',
'15 d':'15 days',
'1 m':'1 month',
'1 q':'1 quarter',
'1 y':'1 year',
'1 de':'10 years',
'1 c':'1 century'
},
MONTHS:{
'1':'Jan.',
'2':'Feb.',
'3':'Mar.',
'4':'Apr.',
'5':'May.',
'6':'Jun.',
'7':'Jul.',
'8':'Aug.',
'9':'Sep.',
'10':'Oct.',
'11':'Nov.',
'12':'Dec.'
},
MS:'ms',
S:'s',
N:'n',
H:'h',
D:'d',
W:'w',
M:'m',
Q:'q',
Y:'y',
DE:'de',
C:'c',
HN:function(n,a,b){return (a.length==1?'0':'')+a+":"+(b.length==1?'0':'')+b},
DHN:function(n,a,b,c){return a +'th '+ (b.length==1?'0':'')+b + ":" +(c.length==1?'0':'')+c },
MDHN:function(n,a,b,c,d){return b+ 'th ' + xui.getRes('date.MONTHS.'+a) + " " + (c.length==1?'0':'')+c + ":" + (d.length==1?'0':'')+d},
HNS:function(n,a,b,c){return (a.length==1?'0':'')+a+":"+(b.length==1?'0':'')+b+":"+(c.length==1?'0':'')+c},
HNSMS:function(n,a,b,c,d){return (a.length==1?'0':'')+a+":"+(b.length==1?'0':'')+b+":"+(c.length==1?'0':'')+c+ ' ' +(d.length==1?'00':d.length==2?'0':'')+d},
YM:function(n,a,b){return xui.getRes('date.MONTHS.'+b)+' '+a},
YQ:function(n,a,b){return b+'Q ' + a},
YMD:function(n,a,b,c){return a+'-'+(b.length==1?'0':'')+b+'-'+(c.length==1?'0':'')+c},
YMD2:function(n,a,b,c){return xui.getRes('date.MONTHS.'+b)+' '+c+', '+a},
MD:function(n,a,b){return xui.getRes('date.MONTHS.'+a) + " "+ b},
YMDH:function(n,a,b,c,d){return a+'-'+(b.length==1?'0':'')+b+'-'+(c.length==1?'0':'')+c + ' ' +(d.length==1?'0':'')+d+':00'},
YMDHN:function(n,a,b,c,d,e){return a+'-'+(b.length==1?'0':'')+b+'-'+(c.length==1?'0':'')+c + ' ' +(d.length==1?'0':'')+d+":"+(e.length==1?'0':'')+e},
YMDHNS:function(n,a,b,c,d,e,f){return a+'-'+(b.length==1?'0':'')+b+'-'+(c.length==1?'0':'')+c + ' ' +(d.length==1?'0':'')+d+":"+(e.length==1?'0':'')+e+":"+(f.length==1?'0':'')+f},
ALL:function(n,a,b,c,d,e,f,g){return a+'-'+(b.length==1?'0':'')+b+'-'+(c.length==1?'0':'')+c + ' ' +(d.length==1?'0':'')+d+":"+(e.length==1?'0':'')+e+":"+(f.length==1?'0':'')+f +" " +(g.length==1?'00':g.length==2?'0':'')+g}
};
xui.Locale.en.color={
LIST:{
"FFFFFF":"White",
"FFFFF0":"Ivory",
"FFFFE0":"Light Yellow",
"FFFF00":"Yellow",
"FFFAFA":"Snow",
"FFFAF0":"Floral White",
"FFFACD":"Lemon Chiffon",
"FFF8DC":"Cornislk",
"FFF5EE":"Sea Shell",
"FFF0F5":"Lavender Blush",
"FFEFD5":"Papaya Whip",
"FFEBCD":"Blanched Almond",
"FFE4E1":"Misty Rose",
"FFE4C4":"Bisque",
"FFE4B5":"Moccasin",
"FFDEAD":"Navajo White",
"FFDAB9":"Peach Puff",
"FFD700":"Gold",
"FFC0CB":"Pink",
"FFB6C1 ":"Light Pink",
"FFA500":"Orange",
"FFA07A":"Light Salmon",
"FF8C00":"Dark Orange",
"FF7F50":"Coral",
"FF69B4":"Hot Pink",
"FF6347":"Tomato",
"FF4500":"Orange Red",
"FF1493":"Deep Pink",
"FF00FF":"Magenta",
"FF0000":"Red",
"FDF5E6":"Old Lace",
"FAFAD2":"Light Goldenrod Yellow",
"FAF0E6":"Linen",
"FAEBD7":"Antique White",
"FA8072":"Salmon",
"F8F8FF":"Ghost White",
"F5FFFA":"Medium Spring Green",
"F5F5F5":"White Smoke",
"F5DEB3":"Wheat",
"F4A460":"Sandy Brown",
"F0FFFF":"Azure",
"F0FFF0":"Honeydew",
"F0F8FF":"Alice Blue",
"F0E68C":"Khaki",
"F08080":"Light Coral",
"EEE8AA":"Pale Godenrod",
"EE82EE":"Violet",
"E9967A":"Dark Salmon",
"E6E6FA":"Lavender",
"E1FFFF":"Light Cyan",
"DEB887":"Bruly Wood",
"DDA0DD":"plum",
"DCDCDC":"Gainsboro",
"DC143C":"Crimson",
"DB7093":"Pale Violet Red",
"DAA520":"Gold Enrod",
"DA70D6":"Orchid",
"D8BFD8":"Thistle",
"D3D3D3":"Light Grey",
"D2B48C":"Tan",
"D2691E":"Chocolate",
"CD853F":"Peru",
"CD5C5C":"Indian Red",
"C71585":"Medium Violet Red",
"C0C0C0":"Silver",
"BDB76B":"Dark Khaki",
"BC8F8F":"Rosy Brown",
"BA55D3":"Medium Orchid",
"B22222":"Fire Brick",
"B0E0E6":"Pow Der Blue",
"B0C4DE":"Light Steel Blue",
"AFEEEE":"Pale Turquoise",
"ADFF2F":"Green Yellow",
"ADD8E6":"Light BLue",
"A9A9A9":"Dark Gray",
"A52A2A":"Brown",
"A0522D":"Sienna",
"9932CC":"Dark Orchid",
"98FB98":"Pale Green",
"9400D3":"Dark Voilet",
"9370DB":"Medium Purple",
"90EE90":"Light Green",
"8FBC8F":"Dark Sea Green",
"8B4513":"Saddle Brown",
"8B008B":"Dark Magenta",
"8B0000":"Dark Red",
"8A2BE2":"Blue Violet",
"87CEFA":"Light Sky Blue",
"87CEEB":"Sky Blue",
"808080":"Gray",
"808000":"Olive",
"800080":"Purple",
"800000":"Maroon",
"7FFFAA":"Auqamarin",
"7FFF00":"Chartreuse",
"7CFC00":"Lawn Green",
"7B68EE":"Medium Slate Blue",
"778899":"Light Slate Gray",
"708090":"Slate Gray",
"6B8E23":"Beige",
"6A5ACD":"Slate Blue",
"696969":"Dim Gray",
"6495ED":"Cornflower Blue",
"5F9EA0":"Cadet Blue",
"556B2F":"Olive Drab",
"4B0082":"Indigo",
"48D1CC":"Medium Turquoise",
"483D8B":"Dark Slate Blue",
"4682B4":"Steel Blue",
"4169E1":"Royal Blue",
"40E0D0":"Turquoise",
"3CB371":"Spring Green",
"32CD32":"Lime Green",
"2F4F4F":"Dark Slate Gray",
"2E8B57":"Sea Green",
"228B22":"Forest Green",
"20B2AA":"Light Sea Green",
"1E90FF":"Doder Blue",
"191970":"Midnight Blue",
"00FFFF":"Aqua",
"00FF7F":"Mint Cream",
"00FF00":"Lime",
"00FA9A":"Medium Aquamarine",
"00CED1":"Dark Turquoise",
"00BFFF":"Deep Sky Blue",
"008B8B":"Dark Cyan",
"008080":"Teal",
"008000":"Green",
"006400":"Dark Green",
"0000FF":"Blue",
"0000CD":"Medium Blue",
"00008B":"Dark Blue",
"000080":"Navy",
"000000":"Black"
}
};
xui.Locale.en.editor={
bold:'Bold',
italic:'Italic',
underline:'Underline',
strikethrough:'Strikethrough',
subscript:'Subscript',
superscript:'Superscript',
forecolor:'Font Color',
bgcolor:'Background Color',
left:'Align Left',
center:'Align Center',
right:'Align Right',
justify:'Justify',
indent:'Indent',
outdent:'Outdent',
ol:'Ordered List',
ul:'Unordered List',
hr:'Insert Horizontal Rule',
unlink:'Remove Link',
removeformat:'Remove Formatting',
html:"HTML Editor",
insertimage:'Insert Image',
insertimage2:'Image URL:',
createlink:'Insert Link',
createlink2:'Link URL:',
fontsize:'Font Size',
fontname:'Font Family',
formatblock:'Font Block',
fontsizeList:'1,1(8pt);2,2(10pt);3,3(12pt);4,4(14pt);5,5(18pt);6,6(24pt);...,...',
fontnameList:'Arial;Arial Black;Comic Sans MS;Courier New;Impact;Tahoma;Times New Roman;Trebuchet MS;Verdana;...',
formatblockList:'p,Normal;h1,Heading1;h2,Heading2;h3,Heading3;h4,Heading4;h5,Heading5;h6,Heading6;...,...'
}; |
const base = require('./jest.config.base.js')
module.exports = {
...base,
projects:
[
'<rootDir>/packages/**/jest.config.js'
],
coverageDirectory: '<rootDir>/coverage/'
}
|
import Vue from 'vue'
export const eventBus = new Vue()
export const EVENT_UPDATE_CONFIG = 'updateConfig'
export const EVENT_UPDATE_SERIES_LIST = 'update series list'
export const EVENT_UPDATE_SIZE = 'update size'
export const EVENT_UPDATE_THEME = 'update theme'
export const EVENT_RE_RENDER = 're-render'
export const EVENT_UPDATED = 'updated'
export const EVENT_PAINT = 'paint'
export const EVENT_FORCE_UPDATE='force update'
|
import pull from "lodash/pull";
import { types as t } from "@babel/core";
export default function() {
function isProtoKey(node) {
return t.isLiteral(t.toComputedKey(node, node.key), { value: "__proto__" });
}
function isProtoAssignmentExpression(node) {
const left = node.left;
return (
t.isMemberExpression(left) &&
t.isLiteral(t.toComputedKey(left, left.property), { value: "__proto__" })
);
}
function buildDefaultsCallExpression(expr, ref, file) {
return t.expressionStatement(
t.callExpression(file.addHelper("defaults"), [ref, expr.right]),
);
}
return {
visitor: {
AssignmentExpression(path, file) {
if (!isProtoAssignmentExpression(path.node)) return;
const nodes = [];
const left = path.node.left.object;
const temp = path.scope.maybeGenerateMemoised(left);
if (temp) {
nodes.push(
t.expressionStatement(t.assignmentExpression("=", temp, left)),
);
}
nodes.push(
buildDefaultsCallExpression(
path.node,
t.cloneNode(temp || left),
file,
),
);
if (temp) nodes.push(t.cloneNode(temp));
path.replaceWithMultiple(nodes);
},
ExpressionStatement(path, file) {
const expr = path.node.expression;
if (!t.isAssignmentExpression(expr, { operator: "=" })) return;
if (isProtoAssignmentExpression(expr)) {
path.replaceWith(
buildDefaultsCallExpression(expr, expr.left.object, file),
);
}
},
ObjectExpression(path, file) {
let proto;
const { node } = path;
for (const prop of (node.properties: Array)) {
if (isProtoKey(prop)) {
proto = prop.value;
pull(node.properties, prop);
}
}
if (proto) {
const args = [t.objectExpression([]), proto];
if (node.properties.length) args.push(node);
path.replaceWith(t.callExpression(file.addHelper("extends"), args));
}
},
},
};
}
|
import React from 'react'
import ReactDOM from 'react-dom'
import './styles/index.scss'
import App from './App'
import * as serviceWorker from './serviceWorker'
ReactDOM.render(
<React.StrictMode>
<App />
</React.StrictMode>,
document.getElementById('root')
)
serviceWorker.unregister()
|
import torch
import numpy as np
import torch.nn.functional as F
from torch_utils.ops.upfirdn2d import upfirdn2d
dic = {}
batch_size = 2
for batch_idx in range(8):
x_shape = [1, 512, 9, 9]
f_shape = [4, 4]
up = 1
down = 1
padding = [1, 1, 1, 1]
flip_filter = False
gain = 4
# x_shape = [1, 3, 4, 4]
# f_shape = [4, 4]
# up = 2
# down = 1
# padding = [2, 1, 2, 1]
# flip_filter = False
# gain = 4
# x_shape = [1, 512, 17, 17]
# f_shape = [4, 4]
# up = 1
# down = 1
# padding = [1, 1, 1, 1]
# flip_filter = False
# gain = 4
# x_shape = [1, 3, 8, 8]
# f_shape = [4, 4]
# up = 2
# down = 1
# padding = [2, 1, 2, 1]
# flip_filter = False
# gain = 4
# x_shape = [1, 512, 33, 33]
# f_shape = [4, 4]
# up = 1
# down = 1
# padding = [1, 1, 1, 1]
# flip_filter = False
# gain = 4
# x_shape = [1, 3, 16, 16]
# f_shape = [4, 4]
# up = 2
# down = 1
# padding = [2, 1, 2, 1]
# flip_filter = False
# gain = 4
# x_shape = [1, 512, 65, 65]
# f_shape = [4, 4]
# up = 1
# down = 1
# padding = [1, 1, 1, 1]
# flip_filter = False
# gain = 4
# x_shape = [1, 3, 32, 32]
# f_shape = [4, 4]
# up = 2
# down = 1
# padding = [2, 1, 2, 1]
# flip_filter = False
# gain = 4
# x_shape = [1, 256, 129, 129]
# f_shape = [4, 4]
# up = 1
# down = 1
# padding = [1, 1, 1, 1]
# flip_filter = False
# gain = 4
# x_shape = [1, 3, 64, 64]
# f_shape = [4, 4]
# up = 2
# down = 1
# padding = [2, 1, 2, 1]
# flip_filter = False
# gain = 4
# x_shape = [1, 128, 257, 257]
# f_shape = [4, 4]
# up = 1
# down = 1
# padding = [1, 1, 1, 1]
# flip_filter = False
# gain = 4
# x_shape = [1, 3, 128, 128]
# f_shape = [4, 4]
# up = 2
# down = 1
# padding = [2, 1, 2, 1]
# flip_filter = False
# gain = 4
# x_shape = [1, 64, 513, 513]
# f_shape = [4, 4]
# up = 1
# down = 1
# padding = [1, 1, 1, 1]
# flip_filter = False
# gain = 4
# x_shape = [1, 3, 256, 256]
# f_shape = [4, 4]
# up = 2
# down = 1
# padding = [2, 1, 2, 1]
# flip_filter = False
# gain = 4
x_shape = [1, 512, 8, 8]
f_shape = [4, 4]
up = 1
down = 2
padding = [1, 1, 1, 1]
flip_filter = False
gain = 1
x_shape = [2, 3, 76, 76]
f_shape = [12, ]
up = 1
down = 2
padding = [-1, -1, -1, -1]
flip_filter = True
gain = 1
x_shape[0] = batch_size
f = torch.randn(f_shape)
x = torch.randn(x_shape)
x.requires_grad_(True)
y = upfirdn2d(x, f, up=up, down=down, padding=padding, flip_filter=flip_filter, gain=gain)
dy_dx = torch.autograd.grad(outputs=[y.sum()], inputs=[x], create_graph=True, only_inputs=True)[0]
dic['batch_%.3d.dy_dx'%batch_idx] = dy_dx.cpu().detach().numpy()
dic['batch_%.3d.y'%batch_idx] = y.cpu().detach().numpy()
dic['batch_%.3d.x'%batch_idx] = x.cpu().detach().numpy()
dic['batch_%.3d.f'%batch_idx] = f.cpu().detach().numpy()
np.savez('05_grad', **dic)
print()
|
"""
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
class Tests:
viewport_config_updated = (
"Viewport is now configured for test",
"Failed to configure viewport for test"
)
first_viewport_active_instance_count = (
"Expected number of instances found in left viewport",
"Unexpected number of instances found in left viewport"
)
second_viewport_inactive_instance_count = (
"No instances found in right viewport",
"Unexpectedly found instances in right viewport while not active"
)
first_viewport_inactive_instance_count = (
"No instances found in left viewport",
"Unexpectedly found instances in left viewport while not active"
)
second_viewport_active_instance_count = (
"Expected number of instances found in right viewport",
"Unexpected number of instances found in right viewport"
)
def LayerSpawner_InstancesRefreshUsingCorrectViewportCamera():
"""
Summary:
Test that the Dynamic Vegetation System is using the current Editor viewport camera as the center
of the spawn area for vegetation. To verify this, we create two separate Editor viewports pointed
at two different vegetation areas, and verify that as we switch between active viewports, only the
area directly underneath that viewport's camera has vegetation.
"""
import os
import azlmbr.legacy.general as general
import azlmbr.math as math
import editor_python_test_tools.hydra_editor_utils as hydra
from largeworlds.large_worlds_utils import editor_dynveg_test_helper as dynveg
from editor_python_test_tools.utils import Report
from editor_python_test_tools.utils import TestHelper as helper
# Open an existing simple level
hydra.open_base_level()
# Set up a test environment to validate that switching viewports correctly changes which camera
# the vegetation system uses.
# The test environment consists of the following:
# - two 32 x 32 x 1 box shapes located far apart that emit a surface with no tags
# - two 32 x 32 x 32 vegetation areas that place vegetation on the boxes
# Initialize some constants for our test.
# The boxes are intentionally shifted by 0.5 meters to ensure that we get a predictable number
# of vegetation points. By default, vegetation plants on grid corners, so if our boxes are aligned
# with grid corner points, the right/bottom edges will include more points than we might intuitively expect.
# By shifting by 0.5 meters, the vegetation grid points don't fall on the box edges, making the total count
# more predictable.
first_entity_center_point = math.Vector3(0.5, 0.5, 100.0)
# The second box needs to be far enough away from the first that the vegetation system will never spawn instances
# in both at the same time.
second_entity_center_point = math.Vector3(1024.5, 1024.5, 100.0)
box_size = 32.0
surface_height = 1.0
# By default, vegetation spawns 20 instances per 16 meters, so for our box of 32 meters, we should have
# ((20 instances / 16 m) * 32 m) ^ 2 instances.
filled_vegetation_area_instance_count = (20 * 2) * (20 * 2)
# Change the Editor view to contain two viewports
general.set_view_pane_layout(1)
get_view_pane_layout_success = helper.wait_for_condition(lambda: (general.get_view_pane_layout() == 1), 2)
get_viewport_count_success = helper.wait_for_condition(lambda: (general.get_viewport_count() == 2), 2)
Report.critical_result(Tests.viewport_config_updated, get_view_pane_layout_success and get_viewport_count_success)
# Set the view in the first viewport to point down at the first box
general.set_active_viewport(0)
helper.wait_for_condition(lambda: general.get_active_viewport() == 0, 2)
general.set_current_view_position(first_entity_center_point.x, first_entity_center_point.y,
first_entity_center_point.z + 30.0)
general.set_current_view_rotation(-85.0, 0.0, 0.0)
# Set the view in the second viewport to point down at the second box
general.set_active_viewport(1)
helper.wait_for_condition(lambda: general.get_active_viewport() == 1, 2)
general.set_current_view_position(second_entity_center_point.x, second_entity_center_point.y,
second_entity_center_point.z + 30.0)
general.set_current_view_rotation(-85.0, 0.0, 0.0)
# Create the "flat surface" entities to use as our vegetation surfaces
first_surface_entity = dynveg.create_surface_entity("Surface 1", first_entity_center_point, box_size, box_size,
surface_height)
second_surface_entity = dynveg.create_surface_entity("Surface 2", second_entity_center_point, box_size, box_size,
surface_height)
# Create the two vegetation areas
pink_flower_asset_path = os.path.join("assets", "objects", "foliage", "grass_flower_pink.azmodel")
pink_flower_prefab = dynveg.create_temp_mesh_prefab(pink_flower_asset_path, "SpawnerViewportRefresh_PinkFlower")[0]
first_veg_entity = dynveg.create_temp_prefab_vegetation_area("Veg Area 1", first_entity_center_point, box_size, box_size,
box_size, pink_flower_prefab)
second_veg_entity = dynveg.create_temp_prefab_vegetation_area("Veg Area 2", second_entity_center_point, box_size, box_size,
box_size, pink_flower_prefab)
# When the first viewport is active, the first area should be full of instances, and the second should be empty
general.set_active_viewport(0)
helper.wait_for_condition(lambda: general.get_active_viewport() == 0, 2)
viewport_0_success = helper.wait_for_condition(lambda: dynveg.validate_instance_count(first_entity_center_point,
box_size / 2.0,
filled_vegetation_area_instance_count), 5)
viewport_1_success = helper.wait_for_condition(lambda: dynveg.validate_instance_count(second_entity_center_point,
box_size / 2.0, 0), 5)
Report.result(Tests.first_viewport_active_instance_count, viewport_0_success)
Report.result(Tests.second_viewport_inactive_instance_count, viewport_1_success)
# When the second viewport is active, the second area should be full of instances, and the first should be empty
general.set_active_viewport(1)
helper.wait_for_condition(lambda: general.get_active_viewport() == 1, 2)
viewport_0_success = helper.wait_for_condition(lambda: dynveg.validate_instance_count(first_entity_center_point,
box_size / 2.0, 0), 5)
Report.result(Tests.first_viewport_inactive_instance_count, viewport_0_success)
viewport_1_success = helper.wait_for_condition(lambda: dynveg.validate_instance_count(second_entity_center_point,
box_size / 2.0,
filled_vegetation_area_instance_count), 5)
Report.result(Tests.second_viewport_active_instance_count, viewport_1_success)
if __name__ == "__main__":
from editor_python_test_tools.utils import Report
Report.start_test(LayerSpawner_InstancesRefreshUsingCorrectViewportCamera)
|
// 参数配置
export default {
lang: 'lang', // 本地存储语言
timeout: 1000 * 60 * 10, // 接口超时时间
warningDay: 7, // 报警中心获取最近多长时间的数据 默认为7天
timeInterval: 300000, // 默认5分钟更新下数据(包括报警中心数据、图表数据、土壤检测、环境监测、设备运行情况部分)
historyInterval: 60000, // 默认1分钟更新一次历史数据列表
measurementTnterval: 60000, // 默认1分钟切换一个测量类型的图表数据
historyLength: 100, // 历史数据获取多少条 默认100条
detectChartDay: 1 // 环境检测 土壤检测默认展示1天数据
}
|
/* This file is generated by createIcons.js any changes will be lost. */
import createIcon from '../createIcon';
var OutlinedBuildingIcon = createIcon({
name: 'OutlinedBuildingIcon',
height: 512,
width: 448,
svgPath: 'M128 148v-40c0-6.6 5.4-12 12-12h40c6.6 0 12 5.4 12 12v40c0 6.6-5.4 12-12 12h-40c-6.6 0-12-5.4-12-12zm140 12h40c6.6 0 12-5.4 12-12v-40c0-6.6-5.4-12-12-12h-40c-6.6 0-12 5.4-12 12v40c0 6.6 5.4 12 12 12zm-128 96h40c6.6 0 12-5.4 12-12v-40c0-6.6-5.4-12-12-12h-40c-6.6 0-12 5.4-12 12v40c0 6.6 5.4 12 12 12zm128 0h40c6.6 0 12-5.4 12-12v-40c0-6.6-5.4-12-12-12h-40c-6.6 0-12 5.4-12 12v40c0 6.6 5.4 12 12 12zm-76 84v-40c0-6.6-5.4-12-12-12h-40c-6.6 0-12 5.4-12 12v40c0 6.6 5.4 12 12 12h40c6.6 0 12-5.4 12-12zm76 12h40c6.6 0 12-5.4 12-12v-40c0-6.6-5.4-12-12-12h-40c-6.6 0-12 5.4-12 12v40c0 6.6 5.4 12 12 12zm180 124v36H0v-36c0-6.6 5.4-12 12-12h19.5V24c0-13.3 10.7-24 24-24h337c13.3 0 24 10.7 24 24v440H436c6.6 0 12 5.4 12 12zM79.5 463H192v-67c0-6.6 5.4-12 12-12h40c6.6 0 12 5.4 12 12v67h112.5V49L80 48l-.5 415z',
yOffset: '',
xOffset: '',
transform: ''
});
export default OutlinedBuildingIcon; |
var {
GraphQLSchema,
GraphQLObjectType,
GraphQLString,
GraphQLList,
GraphQLInt,
GraphQLFloat
} = require('graphql');
var search = require('../../API/fbAPI').search;
const fbQueryType = module.exports = new GraphQLObjectType({
name:'fbQuery',
description:'Query user, page, event, group, place, placetopic.',
fields: () => ({
queryUser:{
type: new GraphQLList(fbUserType),
args:{ q: { type:GraphQLString } },
resolve: (_,args) => search(args,'user')
},
queryPage:{
type: new GraphQLList(fbPageType),
args:{ q: { type:GraphQLString } },
resolve: (_,args) => search(args,'page')
},
queryPlace: {
type: new GraphQLList(fbPlaceType),
args:{ q: { type:GraphQLString } },
resolve: (_,args) => search(args,'place')
},
queryEvent: {
type: new GraphQLList(fbEventType),
args:{ q: { type:GraphQLString } },
resolve: (_,args) => search(args,'event')
},
queryGroup: {
type: new GraphQLList(fbGroupType),
args:{ q: { type:GraphQLString } },
resolve: (_,args) => search(args,'group')
},
queryPlaceTopic:{
type: new GraphQLList(fbPlaceTopicType),
args:{ q: { type:GraphQLString } },
resolve: (_,args) => search(args,'placetopic')
}
})
});
const fbUserType = require('./facebook-type/fbUserType');
const fbPageType = require('./facebook-type/fbPageType');
const fbEventType = require('./facebook-type/fbEventType');
const fbPlaceType = require('./facebook-type/fbPlaceType');
const fbGroupType = require('./facebook-type/fbGroupType');
const fbPlaceTopicType = require('./facebook-type/fbPlaceTopicType');
|
import ScatterView from './Scatter';
import BarView from './Bar';
import TableView from './Table';
import ParCoordsView from './ParCoordsView';
import HistView from './Hist';
import RFFeatureView from './RFFeature';
import ClusteringView from './Clustering';
import RegressionView from './Regression';
import ClassificationView from './Classification';
import PeriodicTableView from './PeriodicTable';
import PieView from './Pie';
import Scatter3DView from './Scatter3D';
import Molecule3DView from './Molecule3D';
import HeatMapView from './HeatMap';
import ImageViewView from './ImageView';
import PairwiseCorrelationView from './PairwiseCorrelation';
import CustomView from './Custom';
const config = [
// Custom
// {
// type: 'custom',
// name: 'Custom',
// category: 'Custom',
// version: 0.7,
// devStage: "Beta",
// component: CustomView,
// settings: {
// options: {
// extent: {
// width: 400,
// height: 400,
// },
// },
// },
// },
// visualizations
{
type: 'scatter',
name: 'Scatter',
category: 'Visualization',
version: 1.0,
devStage: "Stable Release",
component: ScatterView,
settings: {
options: {
extent: {
width: 400,
height: 400,
},
},
},
},
{
type: 'bar',
name: 'Bar',
category: 'Visualization',
version: 0.8,
devStage: "Beta",
component: BarView,
settings: {
options: {
extent: {
width: 400,
height: 400,
},
colorMap: 'Category10',
},
},
},
{
type: 'table',
name: 'Table',
category: 'Visualization',
version: 1.2,
devStage: "Stable Release",
component: TableView,
settings: {
columns: [],
// selectionColor: 'orange',
// nonselectionColor: `#${Greys9[3].toString(16)}`,
// options: { extent: { width: 800, height: 400 } },
},
},
{
type: 'periodic-table',
name: 'Periodic Table',
category: 'Visualization',
version: 1.0,
devStage: "Stable Release",
component: PeriodicTableView,
settings: {
columns: [],
},
},
{
type: 'parcoords',
name: 'Parallel Coordinates',
category: 'Visualization',
component: ParCoordsView,
settings: {
axes: [],
// selectionColor: 'orange',
// nonselectionColor: `#${Greys9[3].toString(16)}`,
// options: { extent: { width: 800, height: 400 } },
},
},
{
type: 'molecule3D',
name: 'Molecule3D',
category: 'Visualization',
version: 1.0,
devStage: "Stable Release",
component: Molecule3DView,
settings: {
options: {
extent: {
width: 500,
height: 400,
},
bkgCol: "#ffffff",
txtCol: "#000000",
},
},
},
{
type: 'imageView',
name: 'ImageView',
category: 'Visualization',
version: 1.0,
devStage: "Stable Release",
component: ImageViewView,
settings: {
options: {
title: "",
caption: "",
extent: {
width: 300,
height: 300,
},
border: {
color: "black",
style: "solid",
size: 2,
}
},
},
},
// data processing
// analysis
{
type: 'pairwise-correlation',
name: 'PairwiseCorrelation',
category: 'Analysis',
version: 0.8,
devStage: "Beta",
component: PairwiseCorrelationView,
settings: {
options: {
extent: { width: 600, height: 600 },
maskEnabled: true,
}
},
},
{
type: 'histogram',
name: 'Histogram',
category: 'Analysis',
version: 1.0,
devStage: "Stable Release",
component: HistView,
settings: {
bins: 10,
mappings: {
n: 'hist',
bins: 'binEdges',
},
},
},
{
type: 'clustering',
name: 'Clustering',
category: 'Analysis',
version: 1.0,
devStage: "Stable Release",
component: ClusteringView,
settings: {
method: 'KMeans',
numberOfClusters: 3,
featureColumns: [],
mappings: {
dimension: 'cids',
measures: ['counts'],
},
},
},
{
type: 'feature-importance',
name: 'Feature Importance (RF)',
category: 'Analysis',
version: 1.0,
devStage: "Stable Release",
component: RFFeatureView,
settings: {
featureColumns: [],
targetColumn: '',
mappings: {
dimension: 'features',
measures: ['importance'],
},
},
},
{
type: 'pie',
name: 'Pie',
category: 'Analysis',
version: 1.0,
devStage: "Stable Release",
component: PieView,
settings: {
bins: 7,
options: {
colorMap: 'Category20c',
extent: {
width: 0,
height: 0,
},
}
},
},
{
type: 'scatter3D',
name: 'Scatter3D',
category: 'Analysis',
version: 1.0,
devStage: "Stable Release",
component: Scatter3DView,
settings: {
method: "Manual",
options: {
axisTitles: ['x', 'y', 'z'],
marker: {
size: 2,
color: 'red',
opacity: 0.8,
},
colorMap: 'Category20c',
extent: { width: 450, height: 450 },
camera: {
eye: {x: 1.25, y: 1.25, z: 1.25},
up: {x: 0, y: 0, z: 1},
center: {x: 0, y: 0, z: 0},
},
}
},
},
{
type: 'heatmap',
name: 'HeatMap',
category: 'Analysis',
version: 0.9,
devStage: "Beta",
component: HeatMapView,
settings: {
// bins: 5,
options: {
colorMap: 'Category10',
extent: {
width: undefined,
height: 0,
},
}
},
},
// machine learning
{
type: 'regression',
name: 'Regression',
category: 'Machine Learning',
version: 1.0,
devStage: "Stable Release",
component: RegressionView,
settings: {
method: 'Linear',
featureColumns: [],
targetColumn: '',
folds: 5,
mappings: {},
},
},
{
type: 'classification',
name: 'Classification',
category: 'Machine Learning',
version: 1.0,
devStage: "Stable Release",
component: ClassificationView,
settings: {
method: 'RandomForest',
featureColumns: [],
targetColumn: '',
mappings: {},
},
},
];
export default config;
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('reviewers', '0003_auto_20150727_1017'),
]
operations = [
migrations.RemoveField(
model_name='editorsubscription',
name='addon',
),
migrations.RemoveField(
model_name='editorsubscription',
name='user',
),
migrations.DeleteModel(
name='EditorSubscription',
),
]
|
"""
pghoard
Copyright (c) 2015 Ohmu Ltd
See LICENSE for details
"""
# pylint: disable=attribute-defined-outside-init
from .base import CONSTANT_TEST_RSA_PUBLIC_KEY, CONSTANT_TEST_RSA_PRIVATE_KEY
from .test_wal import wal_header_for_file
from http.client import HTTPConnection
from pghoard import postgres_command, wal
from pghoard.archive_sync import ArchiveSync
from pghoard.common import get_pg_wal_directory
from pghoard.pgutil import create_connection_string
from pghoard.postgres_command import archive_command, restore_command
from pghoard.restore import HTTPRestore, Restore
from pghoard.rohmu.encryptor import Encryptor
from queue import Queue
import json
import logging
import os
import psycopg2
import pytest
import socket
import time
@pytest.fixture
def http_restore(pghoard):
pgdata = get_pg_wal_directory(pghoard.config["backup_sites"][pghoard.test_site])
return HTTPRestore("localhost", pghoard.config["http_port"], site=pghoard.test_site, pgdata=pgdata)
class TestWebServer:
def test_requesting_status(self, pghoard):
pghoard.write_backup_state_to_json_file()
conn = HTTPConnection(host="127.0.0.1", port=pghoard.config["http_port"])
response = conn.request("GET", "/status")
response = conn.getresponse()
response_parsed = json.loads(response.read().decode('utf-8'))
assert response.status == 200
# "startup_time": "2016-06-23T14:53:25.840787",
assert response_parsed['startup_time'] is not None
response = conn.request("GET", "/status/somesite")
response = conn.getresponse()
assert response.status == 400
response = conn.request("GET", "/somesite/status")
response = conn.getresponse()
assert response.status == 404
response = conn.request("GET", "/{}/status".format(pghoard.test_site))
response = conn.getresponse()
assert response.status == 501
def test_list_empty_basebackups(self, pghoard, http_restore, capsys): # pylint: disable=redefined-outer-name
# List with direct HttpRestore access
assert http_restore.list_basebackups() == []
http_restore.show_basebackup_list()
out, _ = capsys.readouterr()
assert pghoard.test_site in out
# list using restore command over http
Restore().run([
"list-basebackups-http",
"--host", "localhost",
"--port", str(pghoard.config["http_port"]),
"--site", pghoard.test_site,
])
out, _ = capsys.readouterr()
assert pghoard.test_site in out
# list using restore command with direct object storage access
Restore().run([
"list-basebackups",
"--config", pghoard.config_path,
"--site", pghoard.test_site,
])
out, _ = capsys.readouterr()
assert pghoard.test_site in out
def _run_and_wait_basebackup(self, pghoard, db, mode):
pghoard.create_backup_site_paths(pghoard.test_site)
backup_dir = os.path.join(pghoard.config["backup_sites"][pghoard.test_site]["object_storage"]["directory"],
pghoard.test_site, "basebackup")
if not os.path.exists(backup_dir):
backups_before = set()
else:
backups_before = set(f for f in os.listdir(backup_dir) if not f.endswith(".metadata"))
basebackup_path = os.path.join(pghoard.config["backup_location"], pghoard.test_site, "basebackup")
q = Queue()
pghoard.config["backup_sites"][pghoard.test_site]["basebackup_mode"] = mode
pghoard.create_basebackup(pghoard.test_site, db.user, basebackup_path, q)
result = q.get(timeout=60)
assert result["success"]
backups_after = set(f for f in os.listdir(backup_dir) if not f.endswith(".metadata"))
new_backups = backups_after - backups_before
assert len(new_backups) == 1
return new_backups.pop()
def test_basebackups(self, capsys, db, http_restore, pghoard): # pylint: disable=redefined-outer-name
final_location = self._run_and_wait_basebackup(pghoard, db, "pipe")
backups = http_restore.list_basebackups()
assert len(backups) == 1
assert backups[0]["size"] > 0
assert backups[0]["name"] == os.path.join(pghoard.test_site, "basebackup", os.path.basename(final_location))
# make sure they show up on the printable listing, too
http_restore.show_basebackup_list()
out, _ = capsys.readouterr()
assert "{} MB".format(int(backups[0]["metadata"]["original-file-size"]) // (1024 ** 2)) in out
assert backups[0]["name"] in out
def test_wal_fetch_optimization(self, pghoard):
# inject fake WAL and timeline files for testing
invalid_wal_name = "000000060000000000000001"
valid_wal_name = "000000060000000000000002"
wal_name_output = "optimization_output_filename"
output_path = os.path.join(
get_pg_wal_directory(pghoard.config["backup_sites"][pghoard.test_site]), wal_name_output)
invalid_wal_path = os.path.join(
get_pg_wal_directory(pghoard.config["backup_sites"][pghoard.test_site]), invalid_wal_name)
valid_wal_path = os.path.join(
get_pg_wal_directory(pghoard.config["backup_sites"][pghoard.test_site]), valid_wal_name)
with open(valid_wal_path, "wb") as out_file:
out_file.write(wal_header_for_file(os.path.basename(valid_wal_path)))
with open(invalid_wal_path, "wb") as out_file:
# We use the wrong WAL file's name to generate the header on purpose to see that our check works
out_file.write(wal_header_for_file(os.path.basename(valid_wal_path)))
restore_command(
site=pghoard.test_site,
xlog=os.path.basename(valid_wal_name),
host="127.0.0.1",
port=pghoard.config["http_port"],
output=output_path,
retry_interval=0.1)
assert os.path.exists(output_path)
os.unlink(output_path)
with pytest.raises(postgres_command.PGCError):
restore_command(
site=pghoard.test_site,
xlog=os.path.basename(invalid_wal_name),
host="127.0.0.1",
port=pghoard.config["http_port"],
output=output_path,
retry_interval=0.1)
assert not os.path.exists(output_path)
os.unlink(invalid_wal_path)
def test_archiving(self, pghoard):
store = pghoard.transfer_agents[0].get_object_storage(pghoard.test_site)
# inject fake WAL and timeline files for testing
for xlog_type, wal_name in [
("xlog", "0000000000000000000000CC"),
("timeline", "0000000F.history")]:
foo_path = os.path.join(get_pg_wal_directory(pghoard.config["backup_sites"][pghoard.test_site]), wal_name)
with open(foo_path, "wb") as out_file:
if xlog_type == "xlog":
out_file.write(wal_header_for_file(wal_name))
else:
out_file.write(b"1 2 3\n")
archive_command(host="localhost", port=pghoard.config["http_port"],
site=pghoard.test_site, xlog=wal_name)
archive_path = os.path.join(pghoard.test_site, xlog_type, wal_name)
store.get_metadata_for_key(archive_path)
store.delete_key(archive_path)
os.unlink(foo_path)
def _switch_wal(self, db, count):
conn = psycopg2.connect(create_connection_string(db.user))
conn.autocommit = True
cursor = conn.cursor()
if conn.server_version >= 100000:
cursor.execute("SELECT pg_walfile_name(pg_current_wal_lsn())")
else:
cursor.execute("SELECT pg_xlogfile_name(pg_current_xlog_location())")
start_wal = cursor.fetchone()[0]
cursor.execute("CREATE TABLE IF NOT EXISTS testint (i INT)")
for n in range(count):
cursor.execute("INSERT INTO testint (i) VALUES (%s)", [n])
if conn.server_version >= 100000:
cursor.execute("SELECT pg_switch_wal()")
else:
cursor.execute("SELECT pg_switch_xlog()")
if conn.server_version >= 100000:
cursor.execute("SELECT pg_walfile_name(pg_current_wal_lsn())")
else:
cursor.execute("SELECT pg_xlogfile_name(pg_current_xlog_location())")
end_wal = cursor.fetchone()[0]
conn.close()
return start_wal, end_wal
def test_archive_sync(self, db, pghoard):
log = logging.getLogger("test_archive_sync")
store = pghoard.transfer_agents[0].get_object_storage(pghoard.test_site)
def list_archive(folder):
if folder == "timeline":
matcher = wal.TIMELINE_RE.match
else:
matcher = wal.WAL_RE.match
path_to_list = "{}/{}".format(pghoard.test_site, folder)
files_found, files_total = 0, 0
for obj in store.list_path(path_to_list):
fname = os.path.basename(obj["name"])
files_total += 1
if matcher(fname):
files_found += 1
yield fname
log.info("Listed %r, %r out of %r matched %r pattern", path_to_list, files_found, files_total, folder)
# create a basebackup to start with
self._run_and_wait_basebackup(pghoard, db, "pipe")
# force a couple of wal segment switches
start_wal, _ = self._switch_wal(db, 4)
# we should have at least 4 WAL files now (there may be more in
# case other tests created them -- we share a single postresql
# cluster between all tests)
pg_wal_dir = get_pg_wal_directory(pghoard.config["backup_sites"][pghoard.test_site])
pg_wals = {f for f in os.listdir(pg_wal_dir) if wal.WAL_RE.match(f) and f > start_wal}
assert len(pg_wals) >= 4
# create a couple of "recycled" xlog files that we must ignore
last_wal = sorted(pg_wals)[-1]
dummy_data = b"x" * (16 * 2 ** 20)
def write_dummy_wal(inc):
filename = "{:024X}".format((int(last_wal, 16) + inc))
print("Writing dummy WAL file", filename)
open(os.path.join(pg_wal_dir, filename), "wb").write(dummy_data)
return filename
recycled1 = write_dummy_wal(1)
recycled2 = write_dummy_wal(2)
# check what we have archived, there should be at least the three
# above WALs that are NOT there at the moment
archived_wals = set(list_archive("xlog"))
assert len(pg_wals - archived_wals) >= 4
# now perform an archive sync
arsy = ArchiveSync()
arsy.run(["--site", pghoard.test_site, "--config", pghoard.config_path])
# and now archive should include all our WALs
archived_wals = set(list_archive("xlog"))
# the recycled files must not appear in archived files
assert recycled1 not in archived_wals
assert recycled2 not in archived_wals
# the regular wals must be archived
assert archived_wals.issuperset(pg_wals)
# if we delete a wal file that's not the latest archival it should
# get synced to the archive as we don't have a basebackup newer than
# it
current_wal = arsy.get_current_wal_file()
old_wals = sorted(wal for wal in pg_wals if wal < current_wal)
store.delete_key(os.path.join(pghoard.test_site, "xlog", old_wals[-2]))
arsy.run(["--site", pghoard.test_site, "--config", pghoard.config_path])
archived_wals = set(list_archive("xlog"))
assert archived_wals.issuperset(pg_wals)
# delete the topmost wal file, this should cause resync too
store.delete_key(os.path.join(pghoard.test_site, "xlog", old_wals[-1]))
arsy.run(["--site", pghoard.test_site, "--config", pghoard.config_path])
archived_wals = set(list_archive("xlog"))
assert archived_wals.issuperset(pg_wals)
# let's do a little dance to turn our DB into a standby and then
# promote it, forcing a timeline switch
db.kill(force=False)
with open(os.path.join(db.pgdata, "recovery.conf"), "w") as fp:
fp.write(
"standby_mode = 'on'\n"
"recovery_target_timeline = 'latest'\n"
"restore_command = 'false'\n"
)
# start PG and promote it
db.run_pg()
db.run_cmd("pg_ctl", "-D", db.pgdata, "promote")
time.sleep(5) # TODO: instead of sleeping, poll the db until ready
# we should have a single timeline file in pg_xlog/pg_wal now
pg_wal_timelines = {f for f in os.listdir(pg_wal_dir) if wal.TIMELINE_RE.match(f)}
assert len(pg_wal_timelines) > 0
# but there should be nothing archived as archive_command wasn't setup
archived_timelines = set(list_archive("timeline"))
assert len(archived_timelines) == 0
# let's hit archive sync
arsy.run(["--site", pghoard.test_site, "--config", pghoard.config_path])
# now we should have an archived timeline
archived_timelines = set(list_archive("timeline"))
assert archived_timelines.issuperset(pg_wal_timelines)
assert "00000002.history" in archived_timelines
# let's take a new basebackup
self._run_and_wait_basebackup(pghoard, db, "basic")
# nuke archives and resync them
for name in list_archive(folder="timeline"):
store.delete_key(os.path.join(pghoard.test_site, "timeline", name))
for name in list_archive(folder="xlog"):
store.delete_key(os.path.join(pghoard.test_site, "xlog", name))
self._switch_wal(db, 1)
arsy.run(["--site", pghoard.test_site, "--config", pghoard.config_path])
archived_wals = set(list_archive("xlog"))
# assume the same timeline file as before and one to three wal files
assert len(archived_wals) >= 1
assert len(archived_wals) <= 3
archived_timelines = set(list_archive("timeline"))
assert list(archived_timelines) == ["00000002.history"]
def test_archive_command_with_invalid_file(self, pghoard):
# only WAL and timeline (.history) files can be archived
bl_label = "000000010000000000000002.00000028.backup"
bl_file = "xlog/{}".format(bl_label)
wal_path = os.path.join(get_pg_wal_directory(pghoard.config["backup_sites"][pghoard.test_site]), bl_label)
backup_wal_path = os.path.join(pghoard.config["backup_location"], pghoard.test_site, bl_file)
with open(wal_path, "w") as fp:
fp.write("jee")
# backup labels are ignored - archiving returns success but file won't appear on disk
archive_command(host="127.0.0.1", port=pghoard.config["http_port"],
site=pghoard.test_site, xlog=bl_label)
assert not os.path.exists(backup_wal_path)
# any other files raise an error
with pytest.raises(postgres_command.PGCError) as excinfo:
archive_command(host="127.0.0.1", port=pghoard.config["http_port"],
site=pghoard.test_site, xlog=bl_label + ".x")
assert excinfo.value.exit_code == postgres_command.EXIT_ARCHIVE_FAIL
assert not os.path.exists(backup_wal_path + ".x")
def test_get_invalid(self, pghoard, tmpdir):
ne_wal_seg = "0000FFFF0000000C000000FE"
nonexistent_wal = "/{}/archive/{}".format(pghoard.test_site, ne_wal_seg)
# x-pghoard-target-path missing
conn = HTTPConnection(host="127.0.0.1", port=pghoard.config["http_port"])
conn.request("GET", nonexistent_wal)
status = conn.getresponse().status
assert status == 400
# missing WAL file
headers = {"x-pghoard-target-path": str(tmpdir.join("test_get_invalid"))}
conn.request("GET", nonexistent_wal, headers=headers)
status = conn.getresponse().status
assert status == 404
# no x-pghoard-target-path for head
headers = {"x-pghoard-target-path": str(tmpdir.join("test_get_invalid"))}
conn.request("HEAD", nonexistent_wal, headers=headers)
status = conn.getresponse().status
assert status == 400
# missing WAL file
headers = {"x-pghoard-target-path": str(tmpdir.join("test_get_invalid"))}
conn.request("HEAD", nonexistent_wal)
status = conn.getresponse().status
assert status == 404
# missing WAL file using restore_command
with pytest.raises(postgres_command.PGCError) as excinfo:
restore_command(site=pghoard.test_site, xlog=os.path.basename(nonexistent_wal),
host="127.0.0.1", port=pghoard.config["http_port"],
output=None, retry_interval=0.1)
assert excinfo.value.exit_code == postgres_command.EXIT_NOT_FOUND
# write failures, this should be retried a couple of times
# start by making sure we can access the file normally
valid_wal_seg = "0000DDDD0000000D000000FC"
valid_wal = "/{}/xlog/{}".format(pghoard.test_site, valid_wal_seg)
store = pghoard.transfer_agents[0].get_object_storage(pghoard.test_site)
store.store_file_from_memory(valid_wal, wal_header_for_file(valid_wal_seg), metadata={"a": "b"})
conn.request("HEAD", valid_wal)
status = conn.getresponse().status
assert status == 200
restore_command(site=pghoard.test_site, xlog=os.path.basename(valid_wal),
host="127.0.0.1", port=pghoard.config["http_port"],
output=None, retry_interval=0.1)
# write to non-existent directory
headers = {"x-pghoard-target-path": str(tmpdir.join("NA", "test_get_invalid"))}
conn.request("GET", valid_wal, headers=headers)
status = conn.getresponse().status
assert status == 400
# inject a failure by making a static function fail
failures = [0, ""]
def get_failing_func(orig_func):
def failing_func(*args):
if failures[0] > 0:
failures[0] -= 1
raise Exception("test_get_invalid failure: {}".format(failures[1]))
return orig_func(*args)
return failing_func
for ta in pghoard.transfer_agents:
store = ta.get_object_storage(pghoard.test_site)
store.get_contents_to_string = get_failing_func(store.get_contents_to_string)
prefetch_n = pghoard.config["restore_prefetch"]
try:
# we should have two retries + all prefetch operations
pghoard.webserver.server.prefetch_404.clear()
failures[0] = 2 + prefetch_n
failures[1] = "test_two_fails_success"
headers = {"x-pghoard-target-path": str(tmpdir.join("test_get_invalid_2"))}
conn.request("GET", valid_wal, headers=headers)
status = conn.getresponse().status
assert status == 201
assert failures[0] == 0
# so we should have a hard failure after three attempts
pghoard.webserver.server.prefetch_404.clear()
failures[0] = 4 + prefetch_n
failures[1] = "test_three_fails_error"
headers = {"x-pghoard-target-path": str(tmpdir.join("test_get_invalid_3"))}
conn.request("GET", valid_wal, headers=headers)
status = conn.getresponse().status
assert status == 500
assert failures[0] == 1
finally:
# clear transfer cache to avoid using our failing versions
for ta in pghoard.transfer_agents:
ta.site_transfers = {}
def test_restore_command_retry(self, pghoard):
failures = [0, ""]
orig_http_request = postgres_command.http_request
def fail_http_request(*args):
if failures[0] > 0:
failures[0] -= 1
raise socket.error("test_restore_command_retry failure: {}".format(failures[1]))
return orig_http_request(*args)
postgres_command.http_request = fail_http_request
# create a valid WAL file and make sure we can restore it normally
wal_seg = "E" * 24
wal_path = "/{}/xlog/{}".format(pghoard.test_site, wal_seg)
store = pghoard.transfer_agents[0].get_object_storage(pghoard.test_site)
store.store_file_from_memory(wal_path, wal_header_for_file(wal_seg), metadata={"a": "b"})
restore_command(site=pghoard.test_site, xlog=wal_seg, output=None,
host="127.0.0.1", port=pghoard.config["http_port"],
retry_interval=0.1)
# now make the webserver fail all attempts
failures[0] = 4
failures[1] = "four fails"
# restore should fail
with pytest.raises(postgres_command.PGCError) as excinfo:
restore_command(site=pghoard.test_site, xlog=wal_seg, output=None,
host="127.0.0.1", port=pghoard.config["http_port"],
retry_interval=0.1)
assert excinfo.value.exit_code == postgres_command.EXIT_ABORT
assert failures[0] == 1 # fail_http_request should've have 1 failure left
# try with two failures, this should work on the third try
failures[0] = 2
failures[1] = "two fails"
restore_command(site=pghoard.test_site, xlog=wal_seg, output=None,
host="127.0.0.1", port=pghoard.config["http_port"],
retry_interval=0.1)
assert failures[0] == 0
postgres_command.http_request = orig_http_request
def test_get_archived_file(self, pghoard):
wal_seg_prev_tli = "00000001000000000000000F"
wal_seg = "00000002000000000000000F"
wal_file = "xlog/{}".format(wal_seg)
# NOTE: create WAL header for the "previous" timeline, this should be accepted
content = wal_header_for_file(wal_seg_prev_tli)
wal_dir = get_pg_wal_directory(pghoard.config["backup_sites"][pghoard.test_site])
archive_path = os.path.join(pghoard.test_site, wal_file)
compressor = pghoard.Compressor()
compressed_content = compressor.compress(content) + (compressor.flush() or b"")
metadata = {
"compression-algorithm": pghoard.config["compression"]["algorithm"],
"original-file-size": len(content),
}
store = pghoard.transfer_agents[0].get_object_storage(pghoard.test_site)
store.store_file_from_memory(archive_path, compressed_content, metadata=metadata)
restore_command(site=pghoard.test_site, xlog=wal_seg, output=None,
host="127.0.0.1", port=pghoard.config["http_port"])
restore_target = os.path.join(wal_dir, wal_seg)
restore_command(site=pghoard.test_site, xlog=wal_seg, output=restore_target,
host="127.0.0.1", port=pghoard.config["http_port"])
assert os.path.exists(restore_target) is True
with open(restore_target, "rb") as fp:
restored_data = fp.read()
assert content == restored_data
# test the same thing using restore as 'pghoard_postgres_command'
tmp_out = os.path.join(wal_dir, restore_target + ".cmd")
postgres_command.main([
"--host", "localhost",
"--port", str(pghoard.config["http_port"]),
"--site", pghoard.test_site,
"--mode", "restore",
"--output", tmp_out,
"--xlog", wal_seg,
])
with open(tmp_out, "rb") as fp:
restored_data = fp.read()
assert content == restored_data
def test_get_encrypted_archived_file(self, pghoard):
wal_seg = "000000090000000000000010"
content = wal_header_for_file(wal_seg)
compressor = pghoard.Compressor()
compressed_content = compressor.compress(content) + (compressor.flush() or b"")
encryptor = Encryptor(CONSTANT_TEST_RSA_PUBLIC_KEY)
encrypted_content = encryptor.update(compressed_content) + encryptor.finalize()
wal_dir = get_pg_wal_directory(pghoard.config["backup_sites"][pghoard.test_site])
archive_path = os.path.join(pghoard.test_site, "xlog", wal_seg)
metadata = {
"compression-algorithm": pghoard.config["compression"]["algorithm"],
"original-file-size": len(content),
"encryption-key-id": "testkey",
}
store = pghoard.transfer_agents[0].get_object_storage(pghoard.test_site)
store.store_file_from_memory(archive_path, encrypted_content, metadata=metadata)
pghoard.webserver.config["backup_sites"][pghoard.test_site]["encryption_keys"] = {
"testkey": {
"public": CONSTANT_TEST_RSA_PUBLIC_KEY,
"private": CONSTANT_TEST_RSA_PRIVATE_KEY,
},
}
restore_target = os.path.join(wal_dir, wal_seg)
restore_command(site=pghoard.test_site, xlog=wal_seg, output=restore_target,
host="127.0.0.1", port=pghoard.config["http_port"])
assert os.path.exists(restore_target)
with open(restore_target, "rb") as fp:
restored_data = fp.read()
assert content == restored_data
def test_requesting_basebackup(self, pghoard):
nonexistent_basebackup = "/{}/archive/basebackup".format(pghoard.test_site)
conn = HTTPConnection(host="127.0.0.1", port=pghoard.config["http_port"])
status = conn.request("PUT", nonexistent_basebackup)
status = conn.getresponse().status
assert status == 201
assert pghoard.requested_basebackup_sites == {"test_requesting_basebackup"}
|
const path = require('path');
const MiniCssExtractPlugin = require("mini-css-extract-plugin");
const HtmlWebpackPlugin = require('html-webpack-plugin');
module.exports = {
name: 'my-app',
// target: ['web', 'es2015'], // デフォルトはbrowserslistがあれば、それが設定が使用される
entry: {
index: './src/index.js',
about: './src/about.js',
},
output: {
filename: '[name].bundle.js',
},
// 永続的キャッシュを利用
cache: {
// ファイルシステムを指定
type: 'filesystem',
cacheDirectory: path.resolve(__dirname, '.temp_cache'),
buildDependencies: {
// 設定が変更された際にキャッシュが無効になるための指定
config: [__filename ]
}
},
resolve: {
extensions: ['.js', '.json', '.jsx']
},
module: {
rules: [
{
test: /\.txt$/,
use: 'raw-loader'
},
{
test: /(\.js$|\.jsx$)/,
exclude: /node_modules/,
use: {
loader: 'babel-loader'
}
},
{
test: /\.scss$/,
use: [{
loader: MiniCssExtractPlugin.loader
}, {
loader: 'css-loader',
options: {
modules: {
localIdentName: '[path][name]__[local]--[hash:base64:5]', // https://github.com/rails/webpacker/issues/2197
}
}
}, {
loader: 'sass-loader'
}]
}
]
},
optimization: {
splitChunks: {
chunks: 'all'
},
// chunkIds: 'named' // デフォルトは、development時は "named"、production時は "deterministic"
},
plugins: [
new HtmlWebpackPlugin({
chunks: ['index'],
filename: 'index.html',
template: 'src/index.html'
}),
new HtmlWebpackPlugin({
chunks: ['about'],
filename: 'about.html',
template: 'src/about.html'
}),
new MiniCssExtractPlugin({
filename: "[name].css",
chunkFilename: "[id].css"
})
],
// Top Level Await の設定
experiments: {
topLevelAwait: true
},
devServer: {
host: '0.0.0.0'
}
};
|
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const mySchema = new Schema({
chat: {
type: Schema.ObjectId,
ref: 'Chat,'
},
user: {
type: Schema.ObjectId,
ref: 'User'
},
message: {
type: String,
required: true
},
date: Date,
file: String
});
const model = mongoose.model('Message', mySchema);
module.exports = model; |
const textProps = {
image: require("../../../assets/img/faces/team-1.jpg").default,
text: "Ray Tompson",
};
export default textProps;
|
#ifndef __PERF_CACHE_H
#define __PERF_CACHE_H
#include <stdbool.h>
#include "util.h"
#include "strbuf.h"
#include "../perf.h"
#define CMD_EXEC_PATH "--exec-path"
#define CMD_PERF_DIR "--perf-dir="
#define CMD_WORK_TREE "--work-tree="
#define CMD_DEBUGFS_DIR "--debugfs-dir="
#define PERF_DIR_ENVIRONMENT "PERF_DIR"
#define PERF_WORK_TREE_ENVIRONMENT "PERF_WORK_TREE"
#define EXEC_PATH_ENVIRONMENT "PERF_EXEC_PATH"
#define DEFAULT_PERF_DIR_ENVIRONMENT ".perf"
#define PERF_DEBUGFS_ENVIRONMENT "PERF_DEBUGFS_DIR"
typedef int (*config_fn_t)(const char *, const char *, void *);
extern int perf_default_config(const char *, const char *, void *);
extern int perf_config(config_fn_t fn, void *);
extern int perf_config_int(const char *, const char *);
extern int perf_config_bool(const char *, const char *);
extern int config_error_nonbool(const char *);
/* pager.c */
extern void setup_pager(void);
extern const char *pager_program;
extern int pager_in_use(void);
extern int pager_use_color;
extern int use_browser;
#ifdef NO_NEWT_SUPPORT
static inline void setup_browser(void)
{
setup_pager();
}
static inline void exit_browser(bool wait_for_ok __used) {}
#else
void setup_browser(void);
void exit_browser(bool wait_for_ok);
#endif
char *alias_lookup(const char *alias);
int split_cmdline(char *cmdline, const char ***argv);
#define alloc_nr(x) (((x)+16)*3/2)
/*
* Realloc the buffer pointed at by variable 'x' so that it can hold
* at least 'nr' entries; the number of entries currently allocated
* is 'alloc', using the standard growing factor alloc_nr() macro.
*
* DO NOT USE any expression with side-effect for 'x' or 'alloc'.
*/
#define ALLOC_GROW(x, nr, alloc) \
do { \
if ((nr) > alloc) { \
if (alloc_nr(alloc) < (nr)) \
alloc = (nr); \
else \
alloc = alloc_nr(alloc); \
x = xrealloc((x), alloc * sizeof(*(x))); \
} \
} while(0)
static inline int is_absolute_path(const char *path)
{
return path[0] == '/';
}
const char *make_nonrelative_path(const char *path);
char *strip_path_suffix(const char *path, const char *suffix);
extern char *mkpath(const char *fmt, ...) __attribute__((format (printf, 1, 2)));
extern char *perf_path(const char *fmt, ...) __attribute__((format (printf, 1, 2)));
extern char *perf_pathdup(const char *fmt, ...)
__attribute__((format (printf, 1, 2)));
extern size_t strlcpy(char *dest, const char *src, size_t size);
#endif /* __PERF_CACHE_H */
|
/**
* @overview In a SAML application customize the mapping between the Auth0 user and the SAML attributes
* @gallery true
* @category enrich profile
*
* SAML Attributes mapping
*
* If the application the user is logging in to is SAML (like Salesforce for instance), you can customize the mapping between the Auth0 user and the SAML attributes.
* Below you can see that we are mapping `user_id` to the NameID, `email` to `http://schemas.../emailaddress`, etc.
*
* For more information about SAML options, look at <https://docs.auth0.com/saml-configuration>.
*/
function (user, context, callback) {
context.samlConfiguration.mappings = {
"http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier": "user_id",
"http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress": "email",
"http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name": "name",
"http://schemas.xmlsoap.org/ws/2005/05/identity/claims/food": "user_metadata.favorite_food",
"http://schemas.xmlsoap.org/ws/2005/05/identity/claims/address": "app_metadata.shipping_address"
};
callback(null, user, context);
}
|
#################################################################################
#
# Project Title: Send emails to yourself when execution is done
# Author: Sam Showalter
# Date: 2021-08-12
#
#################################################################################
#################################################################################
# Module Imports
#################################################################################
import smtplib, ssl
import os
import logging
import sys
import base64
import datetime as dt
from getpass import getpass
#################################################################################
# Dictionary template for all executions
#################################################################################
EMAIL_TEMPLATES = {
"train_finished": """\
Subject: {} agent finished training on {}
Hi! Cameleon Email Bot here. You training job has finished.
Please see details of the execution below:
- Agent: {}
- Environment: {}
- Framework: {}
Planned Execution:
- Number of epochs: {}
- Number of episodes: {}
- Number of timesteps: {}
Actual Execution:
- Total epochs: {}
- Total episodes: {}
- Total timesteps: {}
Time information:
- Total time elapsed: {}
- Current time: {}
Other information:
- Random seed: {}
- Checkpoint epochs: {}
- Output directory:
+ {}
""",
"failure":"""\
Subject: {} agent FAILED while {} on {}
Hi! Cameleon Email Bot here. You training job has FAILED :(.
Please see details of the failure and execution progress below:
------------------------------------------------------------------
Failure Message:
---------------
{}
Failure StackTrace:
------------------
{}
------------------------------------------------------------------
Training Progress:
- Agent: {}
- Environment: {}
- Framework: {}
Planned Execution:
- Number of epochs: {}
- Number of episodes: {}
- Number of timesteps: {}
Actual Execution:
- Total epochs: {}
- Total episodes: {}
- Total timesteps: {}
Time information:
- Total time elapsed: {}
- Current time: {}
Other information:
- Random seed: {}
- Checkpoint epochs: {}
- Output directory:
+ {}
"""
}
#################################################################################
# Function-Class Declaration
#################################################################################
class CameleonEmailBot(object):
"""
System to send you emails when training, rollouts,
or other experiments are finished
"""
def __init__(self,
email_sender,
email_receiver,
email_server = "smtp.mail.yahoo.com",
port = 465
):
self.email_server = email_server
self.email_sender = email_sender
self.email_receiver = email_receiver
self.message_type = None
self.port = port
# Not that secure, but better than nothing
pwd =getpass(prompt = "Please enter app password for email {}:"\
.format(self.email_sender))
self.password = base64.b64encode(pwd.encode("utf-8"))
def send_email(self, message_type, args):
"""Send email to user with updates on execution
:message_type: str: Type of message (e.g. failure)
:args: Argparse.Args: User-defined arguments
"""
self.message_type = message_type
email_router = {'train_finished':self._write_email_train_finished,
'failure':self._write_email_failure}
# Send email
message = email_router[self.message_type](args)
logging.info(message)
context = ssl.create_default_context()
with smtplib.SMTP_SSL(self.email_server, self.port, context=context) as server:
server.login(self.email_sender,
base64.b64decode(self.password).decode('utf8'))
server.sendmail(self.email_sender, self.email_receiver, message)
logging.info("Email successfully sent!")
def _write_email_train_finished(self, args):
"""Write email for training finishing
:args: Argparse args
:returns: Message for email
"""
return EMAIL_TEMPLATES[self.message_type].format(
args.model_name,
args.env_name,
args.model_name,
args.env_name,
args.framework,
args.num_epochs,
args.num_episodes,
args.num_timesteps,
args.epochs_total,
args.episodes_total,
args.timesteps_total,
dt.timedelta(seconds = round(args.time_total_s)),
dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
args.seed,
args.checkpoint_epochs,
args.outdir)
def _write_email_failure(self,args):
"""Write email for training finishing
:args: Argparse args
:returns: Message for email
"""
return EMAIL_TEMPLATES[self.message_type].format(
args.model_name,
args.execution_type,
args.env_name,
args.failure_message,
args.failure_stacktrace,
args.model_name,
args.env_name,
args.framework,
args.num_epochs,
args.num_episodes,
args.num_timesteps,
args.epochs_total,
args.episodes_total,
args.timesteps_total,
dt.timedelta(seconds = round(args.time_total_s)),
dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
args.seed,
args.checkpoint_epochs,
args.outdir)
#################################################################################
# Main Method
#################################################################################
#################################################################################
# Main Method
#################################################################################
|
const bcrypt = require('bcryptjs');
// I thought of declaring these constants outside and in one separate file
// But casually tempering with these variables might lead to lose of all the information
// in the database or a drastic performance drop.
// Most of the time, you do not want to change these variables
const constants = {
// Do not just play around with salt rounds.
// The rounds are actually 2^rounds. So the system
// would take double the time if the rounds are increased by 1
saltRounds: 12,
// WARNING: DO NOT, DO NOT change this or you will basically lose all the
// existing information about the users in the database.
// You will have to re-register all the users in the system.
pepper: '$2a$10$WgvGi9k0FI6WwOgx/X148e',
};
exports.encrypt = password => new Promise((resolve, reject) => {
// eslint-disable-next-line consistent-return
bcrypt.genSalt(constants.saltRounds, (err, salt) => {
if (err) {
return reject(err);
}
// eslint-disable-next-line no-shadow
bcrypt.hash(`${password}${constants.pepper}`, salt, (err, hash) => {
if (err) {
return reject(err);
}
return resolve({ salt, hash });
});
});
});
exports.compare = (password, hash) => new Promise((resolve, reject) => {
bcrypt.compare(`${password}${constants.pepper}`, hash, (err, isSame) => {
if (err) {
return reject(err);
}
return resolve(isSame);
});
});
|
import torch as t
from torch import nn
from torch.utils.data import Dataset, DataLoader, random_split
from torch.optim import Adam
import numpy as np
import cv2
import os
batch_size = 16
n_epochs = 1000
img_size = np.array((1088, 1700)) # Size of the incoming images
channels = 4 # Number of images in each event
lr = 0.00001 #0.0001
b1, b2 = 0.9, 0.999 # Betas for Adam optimization function
n_ch = 128 # Number of channels in initial convolution layers
sample_interval = 30 # Save a generated image every sample_interval number of batches
normalization_vals = t.Tensor([150, 150, 550]) # Values to normalize the positions
train_frac = 0.9 # Train on this fraction of the data, evaluate on the rest
#write_dir = './write_dir'
data_dir = './data/'
im_folder = 'full_single_24/'
pos_file = 'single_pos_24.txt'
#model_path = './model_path'
#os.makedirs(write_dir, exist_ok=True)
class Data(Dataset):
def __init__(self):
'''
self:
run_ev List of tuple's (run, event)
data Dictionary of all 4 images in an event with tuple's (run, event) as keys
pos_dict Dictionary of 3d positions of bubble in event with tuple's (run, event) as keys
len Total nimber of events
Images must be named: run-event-camera.png
run is 10 character run number
event is one ot two digit run number
camera is one digit camera number
'''
im_dir = data_dir + im_folder
# --------------------------------
# Load event images in groups of 4
# --------------------------------
def sort_key(im_name):
return int(im_name[-5])
# Create a list containing sublists of the image names for each event
ims_by_event = []
im_list = os.listdir(im_dir)
run_ev_set = set([im[:-6] for im in im_list]) # Using a set makes sure elements are unique
for run_ev in run_ev_set:
event_ims = []
for im in im_list:
if im[:-6] == run_ev:
event_ims.append(im)
if 0 < len(event_ims) < 5:
event_ims = sorted(event_ims, key=sort_key)
ims_by_event.append(event_ims)
self.len = len(ims_by_event)
# For each sublist in ims_by_event, append 'blank' if the camera did not image the bubble in that event
for j, event_ims in enumerate(ims_by_event):
if len(event_ims) == 4:
continue
cams_with_ims = [int(im_name[-5]) for im_name in event_ims]
for k in range(4):
if not k in cams_with_ims:
ims_by_event[j].insert(k, 'blank')
# Get a list of image names corresponding to events in ims_by_event
self.run_ev = []
for event in ims_by_event:
for im_name in event:
if im_name != 'blank':
run = im_name[:10]
ev = int(im_name[11:-6])
self.run_ev.append((run,ev))
break
# Load images. Load a blank image where 'blank' is found
self.data = t.empty((self.len, 4, *img_size))
blank = t.zeros([*img_size])
for i, event in enumerate(ims_by_event):
loaded_ims_for_current_event = t.empty((4, *img_size))
for j, im_name in enumerate(event):
# Load a blank image if the image name is 'blank'
if im_name == 'blank':
loaded_ims_for_current_event[j] = blank
# Otherwise, load the image
else:
im = t.from_numpy(cv2.imread(im_dir + im_name, cv2.IMREAD_GRAYSCALE)//255)
# Stop if image is not the required dimension
loaded_ims_for_current_event[j] = im
# Update the data dictionary
self.data[i] = loaded_ims_for_current_event
if i % 100 == 0:
print('Loaded images from event %i of %i' % (i, len(ims_by_event)))
# ---------------------
# Load bubble 3D positions
# ---------------------
f = open(data_dir + pos_file, 'r')
self.pos_dict = {}
for line in f:
ld = line[:-2].split(' ')
self.pos_dict.update( { (ld[0], int(ld[1])) : t.Tensor((float(ld[2]), float(ld[3]), float(ld[4]))) } )
f.close()
for key in self.pos_dict:
self.pos_dict[key] /= normalization_vals
__len__ = lambda self: self.len
def __getitem__(self, index):
key = self.run_ev[index]
return self.data[index], self.pos_dict[key]
dataset = Data()
train_len = int(train_frac * dataset.len)
valid_len = dataset.len - train_len
train_data, valid_data = random_split(dataset, (train_len, valid_len))
train_loader = DataLoader(dataset=train_data, batch_size=batch_size, shuffle=True)
valid_loader = DataLoader(dataset=valid_data, batch_size=batch_size, shuffle=True)
class Model(nn.Module):
def __init__(self):
super(Model, self).__init__()
mp_dim = lambda ds, ks, s=1, p=0, d=1: int((ds + 2*p - d*(ks - 1) - 1 + s)//s)
def convolution_block(in_filters, out_filters, bn=True):
block = [
nn.Conv2d(in_filters, out_filters, kernel_size=3, stride=1, padding=1),
nn.LeakyReLU(0.2, inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2),
nn.Dropout2d(0.25)
]
if bn:
block.append(nn.BatchNorm2d(out_filters, eps=0.1))
return block
self.conv_blocks = nn.Sequential(
*convolution_block(channels, 12, bn=False),
*convolution_block(12, 24),
*convolution_block(24, 48)
# Maybe add some linear layers here
)
d_size = img_size // 2**3
self.lin_layers = nn.Sequential(
nn.Linear(48*np.prod(d_size), 100),
nn.ReLU(inplace=True),
nn.Linear(100, 3),
nn.Tanh()
)
def forward(self, imgs):
out = self.conv_blocks(imgs)
out = out.view(batch_size, -1)
pos = self.lin_layers(out)
return pos
model = Model()
loss_func = nn.MSELoss()
optimizer = Adam(model.parameters(), lr=lr, betas=(b1, b2))
def train():
model.train()
train_loss = 0
for data in train_loader:
ims, pos = data
# Reset gradients to zero
optimizer.zero_grad()
# Generate a predicted 3d position
prediction = model(ims)
# Compute the loss
loss = loss_func(prediction, pos)
# Update weights
loss.backward()
optimizer.step()
train_loss += loss.detach().item()
prediction_corrected = prediction.detach() * normalization_vals
pos_corrected = pos.detach() * normalization_vals
dist = t.norm(prediction_corrected - pos_corrected, dim=1)
mean_dist = t.mean(dist)
return train_loss/(len(train_loader)*batch_size), mean_dist
def evaluate():
model.eval()
valid_loss = 0
for data in valid_loader:
ims, pos = data
# Generate a predicted 3d position
prediction = model(ims)
# Compute the loss
loss = loss_func(prediction, pos)
valid_loss += loss.detach().item()
prediction_corrected = prediction.detach() * normalization_vals
pos_corrected = pos.detach() * normalization_vals
dist = t.norm(prediction_corrected - pos_corrected, dim=1)
mean_dist = t.mean(dist)
return valid_loss/(len(valid_loader)*batch_size), mean_dist
for epoch in range(n_epochs):
# Train the network
train_loss, train_dist = train()
# Evaluate the network
valid_loss, valid_dist = evaluate()
print(
'[Epoch %d/%d] [Train Loss: %f] [Valid Loss: %f] [Train Distance: %f] [Valid Distance: %f]'
% (epoch, n_epochs, train_loss, valid_loss, train_dist, valid_dist)
)
'''
class Model(nn.Module):
def __init__(self):
super(Model, self).__init__()
def convolution_block(in_filters, out_filters, bn=True):
block = [nn.Conv2d(in_filters, out_filters, kernel_size=3, stride=2, padding=1),
nn.LeakyReLU(0.2, inplace=True),
nn.Dropout2d(0.25)]
if bn:
block.append(nn.BatchNorm2d(out_filters, 0.8))
return block
self.model = nn.Sequential(
*convolution_block(channels, n_ch//8, bn=False),
*convolution_block(n_ch//8, n_ch//4),
*convolution_block(n_ch//4, n_ch//2),
*convolution_block(n_ch//2, n_ch)
)
# The height and width of downsampled image
ds_size = img_size // 2 ** 4
self.lin_layer = nn.Sequential(nn.Linear(n_ch * ds_size ** 2, 3), nn.Tanh())
def forward(self, img):
out = self.model(img)
out = out.view(out.shape[0], -1)
pos = self.lin_layer(out)
return pos
model = Model()
loss_func = nn.MSELoss()
optimizer = Adam(model.parameters(), lr=lr, betas=(b1, b2))
def train():
model.train()
train_loss = 0
for data in train_loader:
ims, pos = data
# Reset gradients to zero
optimizer.zero_grad()
# Generate a predicted 3d position
prediction = model(ims)
# Compute the loss
loss = loss_func(prediction, pos)
# Update weights
loss.backward()
optimizer.step()
train_loss += loss.detach().item()
prediction_corrected = prediction.detach() * normalization_vals
pos_corrected = pos.detach() * normalization_vals
dist = t.norm(prediction_corrected - pos_corrected, dim=1)
mean_dist = t.mean(dist)
return train_loss/(len(train_loader)*batch_size), mean_dist
def evaluate():
model.eval()
valid_loss = 0
for data in valid_loader:
ims, pos = data
# Generate a predicted 3d position
prediction = model(ims)
# Compute the loss
loss = loss_func(prediction, pos)
valid_loss += loss.detach().item()
prediction_corrected = prediction.detach() * normalization_vals
pos_corrected = pos.detach() * normalization_vals
dist = t.norm(prediction_corrected - pos_corrected, dim=1)
mean_dist = t.mean(dist)
return valid_loss/(len(valid_loader)*batch_size), mean_dist
for epoch in range(n_epochs):
# Train the network
train_loss, train_dist = train()
# Evaluate the network
valid_loss, valid_dist = evaluate()
print(
'[Epoch %d/%d] [Train Loss: %f] [Valid Loss: %f] [Train Distance: %f] [Valid Distance: %f]'
% (epoch, n_epochs, train_loss, valid_loss, train_dist, valid_dist)
)
'''
'''
def generate_noise(name):
if name == 'Gaussian':
return t.normal(mean=0, std=1, size=(batch_size, latent_dim))
return t.rand((batch_size, latent_dim))
def save_model():
print('Saving Saving generator model to ' + model_path)
t.save(G, model_path)
# Loss function
adversarial_loss = nn.BCELoss()
G = Generator()
D = Discriminator()
# Initialize weights
#G.apply(weights_init_normal)
#D.apply(weights_init_normal)
# Optimizers
optimizer_G = Adam(G.parameters(), lr=lr, betas=(b1, b2))
optimizer_D = Adam(D.parameters(), lr=lr, betas=(b1, b2))
try:
for epoch in range(n_epochs):
for i, data_imgs in enumerate(dataloader):
# Configure generator input
data_shape = data_imgs.shape
data_imgs = data_imgs.view(data_imgs.shape[0], channels, data_imgs.shape[-2], data_imgs.shape[-1])
# Adversarial ground truths
valid = t.ones((data_shape[0], 1), requires_grad=False)
fake = t.zeros((data_shape[0], 1), requires_grad=False)
# ----------------
# Train Generator
# ----------------
optimizer_G.zero_grad()
# Sample noise as generator input
noise = generate_noise('Gaussian')
# Generate a batch of images
gen_imgs = G(noise)
gen_imgs = gen_imgs[:data_shape[0]]
# Loss measures generator's ability to fool the discriminator
G_loss = adversarial_loss(D(gen_imgs), valid)
G_loss.backward()
optimizer_G.step()
# --------------------
# Train Discriminator
# --------------------
optimizer_D.zero_grad()
# Measure discriminator's ability to classify real from generated samples
real_loss = adversarial_loss(D(data_imgs), valid)
fake_loss = adversarial_loss(D(gen_imgs.detach()), fake)
D_loss = (real_loss + fake_loss) / 2
D_loss.backward()
optimizer_D.step()
# ----------------
# Log Progress
# ----------------
print(
"[Epoch %d/%d] [Batch %d/%d] [D loss: %f] [G loss: %f]"
% (epoch, n_epochs, i, len(dataloader), D_loss.item(), G_loss.item())
)
batches_done = epoch*len(dataloader) + i
if batches_done % sample_interval == 0:
os.makedirs(write_dir + str(batches_done), exist_ok=True)
for j, gen_im in enumerate(gen_imgs):
for k, im in enumerate(gen_im):
cv2.imwrite('%s%d/%d_%d.png' % (write_dir, batches_done, j, k), im.detach().numpy()*255)
save_model()
except KeyboardInterrupt:
save_model()
'''
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef UI_OZONE_PLATFORM_DRM_GPU_DRM_SURFACE_H_
#define UI_OZONE_PLATFORM_DRM_GPU_DRM_SURFACE_H_
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
#include "ui/gfx/skia_util.h"
#include "ui/gfx/swap_result.h"
#include "ui/ozone/ozone_export.h"
#include "ui/ozone/public/surface_ozone_canvas.h"
class SkImage;
class SkSurface;
namespace ui {
class DrmBuffer;
class DrmWindow;
class HardwareDisplayController;
class OZONE_EXPORT DrmSurface : public SurfaceOzoneCanvas {
public:
DrmSurface(DrmWindow* window_delegate);
~DrmSurface() override;
// SurfaceOzoneCanvas:
skia::RefPtr<SkSurface> GetSurface() override;
void ResizeCanvas(const gfx::Size& viewport_size) override;
void PresentCanvas(const gfx::Rect& damage) override;
scoped_ptr<gfx::VSyncProvider> CreateVSyncProvider() override;
private:
void SchedulePageFlip();
// Callback for SchedulePageFlip(). This will signal when the page flip event
// has completed.
void OnPageFlip(gfx::SwapResult result);
DrmWindow* window_delegate_;
// The actual buffers used for painting.
scoped_refptr<DrmBuffer> front_buffer_;
scoped_refptr<DrmBuffer> back_buffer_;
skia::RefPtr<SkSurface> surface_;
gfx::Rect last_damage_;
// Keep track of the requested image and damage for the last presentation.
// This will be used to update the scanout buffers once the previous page flip
// events completes.
skia::RefPtr<SkImage> pending_image_;
gfx::Rect pending_image_damage_;
bool pending_pageflip_ = false;
base::WeakPtrFactory<DrmSurface> weak_ptr_factory_;
DISALLOW_COPY_AND_ASSIGN(DrmSurface);
};
} // namespace ui
#endif // UI_OZONE_PLATFORM_DRM_GPU_DRM_SURFACE_H_
|
# -*- coding: utf-8 -*-
"""
Created on Sun Sep 8 14:56:22 2019
@author: positiveoutlier
An element is a leader if it is greater than the sum of all the elments to
its right side. Given an input list with integers, returns a list with leaders.
Expected input: list with integers
Expected output: list with integers
"""
def array_leaders(numbers):
leaders = []
for index in range(len(numbers)):
if numbers[index] > sum(numbers[(index+1):]):
leaders.append(numbers[index])
return leaders
array_leaders([16, 17, 4, 3, 5, 2])
# Using list comprehension in combination with enumerate
def array_leaders2(numbers):
return [n for (i, n) in enumerate(numbers) if n > sum(numbers[(i+1):])]
array_leaders2([16, 17, 4, 3, 5, 2])
|
let fs = require('fs');
let arraynum = fs.readFileSync('data.txt').toString().split(",");
/*sorting array asc by using built-in function*/
let numbers= arraynum.sort();
let file= fs.createWriteStream('output.txt')
for (i=0 ; i< numbers.length; i++)
{
if(i == numbers.length-1)
{
file.write(numbers[i] )
}
else{
file.write(numbers[i] + ',')
}
}
file.end() |
const express = require('express')
const router = express.Router()
router.get('/', (req, res) => {
res.send({
session:req.session
})
})
module.exports = router |
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import copy
import math
import utils
from encoder import make_encoder
from decoder import make_decoder
LOG_FREQ = 10000
def gaussian_logprob(noise, log_std):
"""Compute Gaussian log probability."""
residual = (-0.5 * noise.pow(2) - log_std).sum(-1, keepdim=True)
return residual - 0.5 * np.log(2 * np.pi) * noise.size(-1)
def squash(mu, pi, log_pi):
"""Apply squashing function.
See appendix C from https://arxiv.org/pdf/1812.05905.pdf.
"""
mu = torch.tanh(mu)
if pi is not None:
pi = torch.tanh(pi)
if log_pi is not None:
log_pi -= torch.log(F.relu(1 - pi.pow(2)) + 1e-6).sum(-1, keepdim=True)
return mu, pi, log_pi
def weight_init(m):
"""Custom weight init for Conv2D and Linear layers."""
if isinstance(m, nn.Linear):
nn.init.orthogonal_(m.weight.data)
m.bias.data.fill_(0.0)
elif isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d):
# delta-orthogonal init from https://arxiv.org/pdf/1806.05393.pdf
assert m.weight.size(2) == m.weight.size(3)
m.weight.data.fill_(0.0)
m.bias.data.fill_(0.0)
mid = m.weight.size(2) // 2
gain = nn.init.calculate_gain('relu')
nn.init.orthogonal_(m.weight.data[:, :, mid, mid], gain)
class Actor(nn.Module):
"""MLP actor network."""
def __init__(
self, obs_shape, action_shape, hidden_dim, encoder_type,
encoder_feature_dim, log_std_min, log_std_max, num_layers, num_filters
):
super().__init__()
self.encoder = make_encoder(
encoder_type, obs_shape, encoder_feature_dim, num_layers,
num_filters
)
self.log_std_min = log_std_min
self.log_std_max = log_std_max
self.trunk = nn.Sequential(
nn.Linear(self.encoder.feature_dim, hidden_dim), nn.ReLU(),
nn.Linear(hidden_dim, hidden_dim), nn.ReLU(),
nn.Linear(hidden_dim, 2 * action_shape[0])
)
self.outputs = dict()
self.apply(weight_init)
def forward(
self, obs, compute_pi=True, compute_log_pi=True, detach_encoder=False
):
obs = self.encoder(obs, detach=detach_encoder)
mu, log_std = self.trunk(obs).chunk(2, dim=-1)
# constrain log_std inside [log_std_min, log_std_max]
log_std = torch.tanh(log_std)
log_std = self.log_std_min + 0.5 * (
self.log_std_max - self.log_std_min
) * (log_std + 1)
self.outputs['mu'] = mu
self.outputs['std'] = log_std.exp()
if compute_pi:
std = log_std.exp()
noise = torch.randn_like(mu)
pi = mu + noise * std
else:
pi = None
entropy = None
if compute_log_pi:
log_pi = gaussian_logprob(noise, log_std)
else:
log_pi = None
mu, pi, log_pi = squash(mu, pi, log_pi)
return mu, pi, log_pi, log_std
def log(self, L, step, log_freq=LOG_FREQ):
if step % log_freq != 0:
return
for k, v in self.outputs.items():
L.log_histogram('train_actor/%s_hist' % k, v, step)
L.log_param('train_actor/fc1', self.trunk[0], step)
L.log_param('train_actor/fc2', self.trunk[2], step)
L.log_param('train_actor/fc3', self.trunk[4], step)
class QFunction(nn.Module):
"""MLP for q-function."""
def __init__(self, obs_dim, action_dim, hidden_dim):
super().__init__()
self.trunk = nn.Sequential(
nn.Linear(obs_dim + action_dim, hidden_dim), nn.ReLU(),
nn.Linear(hidden_dim, hidden_dim), nn.ReLU(),
nn.Linear(hidden_dim, 1)
)
def forward(self, obs, action):
assert obs.size(0) == action.size(0)
obs_action = torch.cat([obs, action], dim=1)
return self.trunk(obs_action)
class Ag_Critic(nn.Module):
"""Critic network, employes two q-functions."""
def __init__(
self, obs_shape, action_shape, hidden_dim, encoder_type,
encoder_feature_dim, num_layers, num_filters
):
super().__init__()
self.encoder = make_encoder(
encoder_type, obs_shape, encoder_feature_dim, num_layers,
num_filters
)
self.Q1 = QFunction(
self.encoder.feature_dim, action_shape[0], hidden_dim
)
self.Q2 = QFunction(
self.encoder.feature_dim, action_shape[0], hidden_dim
)
self.outputs = dict()
self.apply(weight_init)
def forward(self, obs, action, detach_encoder=False):
# detach_encoder allows to stop gradient propogation to encoder
obs = self.encoder(obs, detach=detach_encoder)
q1 = self.Q1(obs, action)
self.outputs['q1'] = q1
return q1
def log(self, L, step, log_freq=LOG_FREQ):
if step % log_freq != 0:
return
self.encoder.log(L, step, log_freq)
for k, v in self.outputs.items():
L.log_histogram('train_critic/%s_hist' % k, v, step)
class Critic(nn.Module):
"""Critic network, employes two q-functions."""
def __init__(
self, obs_shape, action_shape, hidden_dim, encoder_type,
encoder_feature_dim, num_layers, num_filters
):
super().__init__()
self.encoder = make_encoder(
encoder_type, obs_shape, encoder_feature_dim, num_layers,
num_filters
)
self.Q1 = QFunction(
self.encoder.feature_dim, action_shape[0], hidden_dim
)
self.Q2 = QFunction(
self.encoder.feature_dim, action_shape[0], hidden_dim
)
self.outputs = dict()
self.apply(weight_init)
def forward(self, obs, action, detach_encoder=False):
# detach_encoder allows to stop gradient propogation to encoder
obs = self.encoder(obs, detach=detach_encoder)
q1 = self.Q1(obs, action)
q2 = self.Q2(obs, action)
self.outputs['q1'] = q1
self.outputs['q2'] = q2
return q1, q2
def log(self, L, step, log_freq=LOG_FREQ):
if step % log_freq != 0:
return
self.encoder.log(L, step, log_freq)
for k, v in self.outputs.items():
L.log_histogram('train_critic/%s_hist' % k, v, step)
for i in range(3):
L.log_param('train_critic/q1_fc%d' % i, self.Q1.trunk[i * 2], step)
L.log_param('train_critic/q2_fc%d' % i, self.Q2.trunk[i * 2], step)
class SacTransferAgent(object):
"""SAC+AE algorithm."""
def __init__(
self,
obs_shape,
action_shape,
device,
hidden_dim=256,
discount=0.99,
init_temperature=0.01,
alpha_lr=1e-3,
alpha_beta=0.9,
actor_lr=1e-3,
actor_beta=0.9,
actor_log_std_min=-10,
actor_log_std_max=2,
actor_update_freq=2,
critic_lr=1e-3,
critic_beta=0.9,
critic_tau=0.005,
critic_target_update_freq=2,
encoder_type='identity',
encoder_feature_dim=50,
encoder_lr=1e-3,
encoder_tau=0.005,
num_layers=4,
num_filters=32,
no_entropy=False,
lts_ratio=0.5,
q1 = False,
):
self.device = device
self.discount = discount
self.critic_tau = critic_tau
self.encoder_tau = encoder_tau
self.actor_update_freq = actor_update_freq
self.critic_target_update_freq = critic_target_update_freq
self.encoder_type = encoder_type
self.actor = Actor(
obs_shape, action_shape, hidden_dim, encoder_type,
encoder_feature_dim, actor_log_std_min, actor_log_std_max,
num_layers, num_filters
).to(device)
self.critic = Critic(
obs_shape, action_shape, hidden_dim, encoder_type,
encoder_feature_dim, num_layers, num_filters
).to(device)
self.critic_target = Critic(
obs_shape, action_shape, hidden_dim, encoder_type,
encoder_feature_dim, num_layers, num_filters
).to(device)
'''
self.real_critic = Critic(
obs_shape, action_shape, hidden_dim, encoder_type,
encoder_feature_dim, num_layers, num_filters
).to(device)
self.real_critic_target = Critic(
obs_shape, action_shape, hidden_dim, encoder_type,
encoder_feature_dim, num_layers, num_filters
).to(device)
'''
self.critic_target.load_state_dict(self.critic.state_dict())
self.ag_critic = Ag_Critic(
obs_shape, action_shape, hidden_dim, encoder_type,
encoder_feature_dim, num_layers, num_filters
).to(device)
# tie encoders between actor and critic
self.actor.encoder.copy_conv_weights_from(self.critic.encoder)
self.zero_alpha = no_entropy
self.lts_ratio = lts_ratio
self.q1 = q1
self.log_alpha = torch.tensor(np.log(init_temperature)).to(device)
self.log_alpha.requires_grad = True
# set target entropy to -|A|
self.target_entropy = -np.prod(action_shape)
# optimizers
self.actor_optimizer = torch.optim.Adam(
self.actor.parameters(), lr=actor_lr, betas=(actor_beta, 0.999)
)
self.critic_optimizer = torch.optim.Adam(
self.critic.parameters(), lr=critic_lr, betas=(critic_beta, 0.999)
)
self.ag_critic_optimizer = torch.optim.Adam(
self.ag_critic.parameters(), lr=critic_lr, betas=(critic_beta, 0.999)
)
self.log_alpha_optimizer = torch.optim.Adam(
[self.log_alpha], lr=alpha_lr, betas=(alpha_beta, 0.999)
)
self.train()
self.critic_target.train()
def train(self, training=True):
self.training = training
self.actor.train(training)
self.critic.train(training)
self.ag_critic.train(training)
@property
def alpha(self):
if self.zero_alpha:
return self.log_alpha.exp() * 0
else:
return self.log_alpha.exp()
def set_zero_alpha(self):
self.zero_alpha = True
def warm_start_from(self,expert):
self.critic.encoder.duplicate_conv_weights_from(expert.critic.encoder)
def select_action(self, obs):
with torch.no_grad():
obs = torch.FloatTensor(obs).to(self.device)
obs = obs.unsqueeze(0)
mu, _, _, _ = self.actor(
obs, compute_pi=False, compute_log_pi=False
)
return mu.cpu().data.numpy().flatten()
def sample_action(self, obs):
with torch.no_grad():
obs = torch.FloatTensor(obs).to(self.device)
obs = obs.unsqueeze(0)
mu, pi, _, _ = self.actor(obs, compute_log_pi=False)
return pi.cpu().data.numpy().flatten()
def update_critic(self, bc_agent, expert, obs, action, reward, next_obs, not_done, L, step):
with torch.no_grad():
_, policy_action, log_pi, _ = self.actor(next_obs)
target_Q1, target_Q2 = self.critic_target(next_obs, policy_action)
target_V = torch.min(target_Q1,target_Q2) - self.alpha.detach() * log_pi
target_Q = reward + (not_done * self.discount * target_V)
#target_V = bc_agent.value_net(next_obs)
target_Q = reward + (not_done * self.discount * target_V)
# get current Q estimates
#current_Q1, current_Q2 = self.critic(obs, action)
current_Q1, current_Q2 = self.critic(obs, action, detach_encoder=False)
critic_loss = F.mse_loss(current_Q1,
target_Q) + F.mse_loss(current_Q2, target_Q)
L.log('train_critic/loss', critic_loss, step)
# Optimize the critic
self.critic_optimizer.zero_grad()
critic_loss.backward()
self.critic_optimizer.step()
self.critic.log(L, step)
def update_ag_critic(self, bc_agent, expert, obs, action, reward, next_obs, not_done, L, step):
with torch.no_grad():
target_V1 = torch.zeros(len(obs),1).to(self.device)
target_V2 = torch.zeros(len(obs),1).to(self.device)
for i in range(10):
_, policy_action, log_pi, _ = expert.actor(next_obs)
target_Q1, target_Q2 = expert.critic(next_obs, policy_action)
target_V1 = target_V1 + target_Q1 - expert.alpha.detach() * log_pi
target_V2 = target_V2 + target_Q2 - expert.alpha.detach() * log_pi
target_V = torch.min(target_V1, target_V2) / 10
#target_V = bc_agent.value_net(next_obs)
target_Q = reward + (not_done * self.discount * target_V)
# get current Q estimates
#current_Q1, current_Q2 = self.critic(obs, action)
current_Q1 = self.ag_critic(obs, action, detach_encoder=False)
critic_loss = F.mse_loss(current_Q1,target_Q)
#L.log('train_critic/loss', critic_loss, step)
# Optimize the critic
self.ag_critic_optimizer.zero_grad()
critic_loss.backward()
self.ag_critic_optimizer.step()
self.ag_critic.log(L, step)
def update_actor_and_alpha(self, obs, L, step, ratio = -1):
# detach encoder, so we don't update it with the actor loss
_, pi, log_pi, log_std = self.actor(obs, detach_encoder=True)
actor_Q1, actor_Q2 = self.critic(obs, pi, detach_encoder=True)
ag_Q = self.ag_critic(obs, pi, detach_encoder=True)
if self.q1:
actor_Q = actor_Q1
else:
actor_Q = torch.min(actor_Q1, actor_Q2)
#ag_Q = torch.min(ag_Q1,ag_Q2)
'''
if ratio >= 0:
Q = ratio * actor_Q + (1-ratio) * ag_Q
actor_loss = (self.alpha.detach() * log_pi - Q).mean()
else:
actor_loss = (self.alpha.detach() * log_pi - ag_Q).mean()
'''
if np.random.rand() > self.lts_ratio:
actor_loss = (self.alpha.detach() * log_pi - actor_Q).mean()
else:
actor_loss = (self.alpha.detach() * log_pi - ag_Q).mean()
#Q = 0.2 * actor_Q + 0.8 * ag_Q
#actor_loss = (self.alpha.detach() * log_pi - Q).mean()
#actor_loss = (self.alpha.detach() * log_pi - ag_Q).mean()
L.log('train_actor/loss', actor_loss, step)
L.log('train_actor/target_entropy', self.target_entropy, step)
#entropy = 0.5 * log_std.shape[1] * (1.0 + np.log(2 * np.pi)
# ) + log_std.sum(dim=-1)
#L.log('train_actor/entropy', entropy.mean(), step)
# optimize the actor
self.actor_optimizer.zero_grad()
actor_loss.backward()
self.actor_optimizer.step()
self.actor.log(L, step)
#self.log_alpha_optimizer.zero_grad()
#alpha_loss = (self.alpha *
# (-log_pi - self.target_entropy).detach()).mean()
#L.log('train_alpha/loss', alpha_loss, step)
L.log('train_alpha/value', self.alpha, step)
#alpha_loss.backward()
#self.log_alpha_optimizer.step()
def update(self, replay_buffer, bc_agent, expert, L, step, total_steps=0):
obs, state, action, reward, next_obs, next_state, not_done = replay_buffer.sample()
L.log('train/batch_reward', reward.mean(), step)
if self.encoder_type == 'identity':
self.update_critic(bc_agent, expert, state, action, reward, next_state, not_done, L, step)
self.update_ag_critic(bc_agent, expert, state, action, reward, next_state, not_done, L, step)
else:
self.update_critic(bc_agent, expert, obs, action, reward, next_obs, not_done, L, step)
self.update_ag_critic(bc_agent, expert, obs, action, reward, next_obs, not_done, L, step)
if step % self.actor_update_freq == 0:
if total_steps != 0:
q_ratio = step / total_steps
else:
q_ratio = -1
if self.encoder_type == 'identity':
self.update_actor_and_alpha(state, L, step, ratio = q_ratio)
else:
self.update_actor_and_alpha(obs, L, step, ratio = q_ratio)
if step % self.critic_target_update_freq == 0:
utils.soft_update_params(
self.critic.Q1, self.critic_target.Q1, self.critic_tau
)
utils.soft_update_params(
self.critic.Q2, self.critic_target.Q2, self.critic_tau
)
utils.soft_update_params(
self.critic.encoder, self.critic_target.encoder,
self.encoder_tau
)
def save(self, model_dir, step):
torch.save(
self.actor.state_dict(), '%s/actor_%s.pt' % (model_dir, step)
)
torch.save(
self.critic.state_dict(), '%s/critic_%s.pt' % (model_dir, step)
)
torch.save(
self.ag_critic.state_dict(), '%s/ag_critic_%s.pt' % (model_dir, step)
)
def load(self, model_dir, step, no_entropy=False, post_step=199999):
self.actor.load_state_dict(
torch.load('%s/actor_%s.pt' % (model_dir, step), map_location=self.device)
)
if no_entropy:
self.critic.load_state_dict(
torch.load('%s/post_critic_%s.pt' % (model_dir, post_step), map_location=self.device)
)
#self.critic_target.load_state_dict(
# torch.load('%s/post_critic_target_%s.pt' % (model_dir, post_step), map_location=self.device)
#)
self.critic_target.load_state_dict(self.critic.state_dict())
self.ag_critic.load_state_dict(
torch.load('%s/post_critic_%s.pt' % (model_dir, post_step), map_location=self.device)
)
else:
self.critic.load_state_dict(
torch.load('%s/critic_%s.pt' % (model_dir, step), map_location=self.device)
)
#self.critic_target.load_state_dict(
# torch.load('%s/post_critic_target_%s.pt' % (model_dir, post_step), map_location=self.device)
#)
self.critic_target.load_state_dict(self.critic.state_dict())
self.ag_critic.load_state_dict(
torch.load('%s/critic_%s.pt' % (model_dir, step), map_location=self.device)
)
#self.actor.encoder.copy_conv_weights_from(self.critic.encoder)
self.log_alpha.data.copy_(torch.log(torch.load('%s/alpha_%s.pt' % (model_dir, step), map_location=self.device)))
|
var callbackArguments = [];
var argument1 = function() {
callbackArguments.push(arguments)
return undefined; };
var argument2 = false;
var argument3 = {"213":"","9.655439531837537e+307":"","1.4558911257947117e+308":1.4400994143766396e+308,"C2":627,"":1.4451062225792429e+308,",":"^VrZ"};
var argument4 = function() {
callbackArguments.push(arguments)
return undefined; };
var argument5 = function() {
callbackArguments.push(arguments)
return -32.198798293614736; };
var argument6 = r_0;
var argument7 = function() {
callbackArguments.push(arguments)
return false; };
var argument8 = {"1.7733320229902411e+308":8.470017191379436e+307,"Kv":"bjn","1.0414621755576014e+308":8.405250956246933e+307,"[t":"K'"};
var argument9 = ["{",969,"a",655,"Vma",5e-324,"[v",-1,627];
var base_0 = [627,893,1.7976931348623157e+308,705,655,403,655]
var r_0= undefined
try {
r_0 = base_0.reduce(argument1,argument2,argument3)
}
catch(e) {
r_0= "Error"
}
var base_1 = [627,893,1.7976931348623157e+308,705,655,403,655]
var r_1= undefined
try {
r_1 = base_1.reduce(argument4)
}
catch(e) {
r_1= "Error"
}
var base_2 = [627,893,1.7976931348623157e+308,705,655,403,655]
var r_2= undefined
try {
r_2 = base_2.reduce(argument5,argument6)
}
catch(e) {
r_2= "Error"
}
var base_3 = [627,893,1.7976931348623157e+308,705,655,403,655]
var r_3= undefined
try {
r_3 = base_3.reduce(argument7,argument8,argument9)
}
catch(e) {
r_3= "Error"
}
function serialize(array){
return array.map(function(a){
if (a === null || a == undefined) return a;
var name = a.constructor.name;
if (name==='Object' || name=='Boolean'|| name=='Array'||name=='Number'||name=='String')
return JSON.stringify(a);
return name;
});
}
setTimeout(function(){
require("fs").writeFileSync("./experiments/reduce/reduceQC/test480.json",JSON.stringify({"baseObjects":serialize([base_0,base_1,base_2,base_3]),"returnObjects":serialize([r_0,r_1,r_2,r_3]),"callbackArgs":callbackArguments}))
},300) |
import os
from config import config as config_
config = config_.GetConfig(os.path.join('config', 'runner.config'))
import datetime
from data import city_visit
from data import point
start_end_coordinates = point.Coordinates(40.763582, -73.988470)
day1 = city_visit.DayVisitParameters(
start_datetime=datetime.datetime(2019, 7, 1, 10, 0, 0),
end_datetime=datetime.datetime(2019, 7, 1, 19, 0, 0),
lunch_start_datetime=datetime.datetime(2019, 7, 1, 14, 0, 0),
lunch_hours=1.,
start_coordinates=start_end_coordinates,
end_coordinates=start_end_coordinates)
day2 = city_visit.DayVisitParameters(
start_datetime=datetime.datetime(2019, 7, 2, 10, 0, 0),
end_datetime=datetime.datetime(2019, 7, 2, 17, 0, 0),
lunch_start_datetime=datetime.datetime(2019, 7, 1, 14, 0, 0),
lunch_hours=1.,
start_coordinates=start_end_coordinates,
end_coordinates=start_end_coordinates)
from data import city_visit
from data import point
visit_location = city_visit.VisitLocation('New York City')
parameters_point_types = point.PointType(
city_tours=90,
landmarks=90,
nature=10,
museums=10,
shopping=50,
dining=50)
parameters_age_groups = point.AgeGroup(
senior=None,
adult=90,
junior=None,
child=None,
toddlers=10)
city_visit_parameters = city_visit.CityVisitParameters(
visit_location=visit_location,
day_visit_parameterss=[day1, day2],
point_type=parameters_point_types,
age_group=parameters_age_groups)
from data import test_util as point_test_util
points_input = list(point_test_util.GetPointsInput('data', 'test_nyc_1.csv').values())
from config import config as config_
city_visit_finder = config_.GetCityVisitFinder(config)
city_visit_accumulator_generator = config_.GetCityVisitAccumulatorGenerator(config)
city_visit = city_visit_finder.FindCityVisit(points_input, city_visit_parameters, city_visit_accumulator_generator)
print('Your schedule:')
print(city_visit)
|
// Copyright The OpenTelemetry Authors
// SPDX-License-Identifier: Apache-2.0
#pragma once
#include <algorithm>
#include <atomic>
#include <cstdint>
#include <cstdio>
#include <cstdlib>
#include <fstream>
#include <iomanip>
#include <iostream>
#include <map>
#include <memory>
#include <mutex>
#include <sstream>
#include <vector>
#include "opentelemetry/nostd/shared_ptr.h"
#include "opentelemetry/nostd/string_view.h"
#include "opentelemetry/nostd/unique_ptr.h"
#include "opentelemetry/nostd/variant.h"
#include "opentelemetry/common/key_value_iterable_view.h"
#include "opentelemetry/trace/span.h"
#include "opentelemetry/trace/span_context_kv_iterable_view.h"
#include "opentelemetry/trace/span_id.h"
#include "opentelemetry/trace/trace_id.h"
#include "opentelemetry/trace/tracer_provider.h"
#include "opentelemetry/sdk/trace/exporter.h"
#include "opentelemetry/exporters/etw/etw_config.h"
#include "opentelemetry/exporters/etw/etw_fields.h"
#include "opentelemetry/exporters/etw/etw_properties.h"
#include "opentelemetry/exporters/etw/etw_provider.h"
#include "opentelemetry/exporters/etw/utils.h"
OPENTELEMETRY_BEGIN_NAMESPACE
namespace exporter
{
namespace etw
{
class Span;
/**
* @brief Template that allows to instantiate new Span object for header-only forward-declared
* etw::Span type
*
* @tparam SpanType Expected to be etw::Span
* @tparam TracerType expected to be etw::Tracer
* @param objPtr Pointer to parent
* @param name Span Name
* @param options Span Options
* @return Span instance
*/
template <class SpanType, class TracerType>
SpanType *new_span(TracerType *objPtr,
nostd::string_view name,
const opentelemetry::trace::StartSpanOptions &options)
{
return new (std::nothrow) SpanType{*objPtr, name, options};
}
/**
* @brief Template that allows to convert etw::Span pointer to smart shared pointer to
* `opentelemetry::trace::Span`
* @tparam SpanType Expected to be etw::Span
* @param ptr Pointer to etw::Span
* @return Smart shared pointer to `opentelemetry::trace::Span`
*/
template <class SpanType>
nostd::shared_ptr<opentelemetry::trace::Span> to_span_ptr(SpanType *ptr)
{
return nostd::shared_ptr<opentelemetry::trace::Span>{ptr};
}
class TracerProvider;
/**
* @brief Utility template for obtaining Span Name
* @tparam T etw::Span
* @param t instance of etw::Span
* @return Span Name
*/
template <class T>
std::string GetName(T &t)
{
auto sV = t.GetName();
return std::string(sV.data(), sV.length());
}
/**
* @brief Utility template to obtain Span start time
* @tparam T etw::Span
* @param t instance of etw::Span
* @return Span Start timestamp
*/
template <class T>
common::SystemTimestamp GetStartTime(T &t)
{
return t.GetStartTime();
}
/**
* @brief Utility template to obtain Span end time
* @tparam T etw::Span
* @param t instance of etw::Span
* @return Span Stop timestamp
*/
template <class T>
common::SystemTimestamp GetEndTime(T &t)
{
return t.GetEndTime();
}
class Properties;
/**
* @brief Utility template to store Attributes on Span
* @tparam T etw::Span
* @param instance instance of etw::Span
* @param t Properties to store as Attributes
*/
template <class T>
void SetSpanAttributes(T &instance, Properties &t)
{
instance.SetAttributes(t);
}
/**
* @brief Utility template to obtain Span Attributes
* @tparam T etw::Span
* @param instance instance of etw::Span
* @return ref to Span Attributes
*/
template <class T>
Properties &GetSpanAttributes(T &instance)
{
return instance.GetAttributes();
}
template <class T>
void UpdateStatus(T &t, Properties &props)
{
t.UpdateStatus(props);
}
/**
* @brief Tracer class that allows to send spans to ETW Provider.
*/
class Tracer : public opentelemetry::trace::Tracer
{
/**
* @brief Parent provider of this Tracer
*/
etw::TracerProvider &tracerProvider_;
/**
* @brief ProviderId (Name or GUID)
*/
std::string provId;
/**
* @brief Encoding (Manifest, MessagePack or XML)
*/
ETWProvider::EventFormat encoding;
/**
* @brief Provider Handle
*/
ETWProvider::Handle &provHandle;
opentelemetry::trace::TraceId traceId_;
std::atomic<bool> isClosed_{true};
/**
* @brief ETWProvider is a singleton that aggregates all ETW writes.
* @return
*/
static ETWProvider &etwProvider()
{
static ETWProvider instance; // C++11 magic static
return instance;
}
/**
* @brief Internal method that allows to populate Links to other Spans.
* Span links are in hexadecimal representation, comma-separated in their
* order of appearance.
*
* @param attributes
* @param links
*/
virtual void DecorateLinks(Properties &attributes,
const opentelemetry::trace::SpanContextKeyValueIterable &links) const
{
// Add `SpanLinks` attribute if the list is not empty
if (links.size())
{
size_t idx = 0;
std::string linksValue;
links.ForEachKeyValue(
[&](opentelemetry::trace::SpanContext ctx, const common::KeyValueIterable &) {
if (!linksValue.empty())
{
linksValue += ',';
linksValue += ToLowerBase16(ctx.span_id());
}
idx++;
return true;
});
attributes[ETW_FIELD_SPAN_LINKS] = linksValue;
}
}
/**
* @brief Allow our friendly etw::Span to end itself on Tracer.
* @param span
* @param
*/
virtual void EndSpan(const Span &span,
const opentelemetry::trace::Span *parentSpan = nullptr,
const opentelemetry::trace::EndSpanOptions & = {})
{
const auto &cfg = GetConfiguration(tracerProvider_);
const opentelemetry::trace::Span &spanBase =
reinterpret_cast<const opentelemetry::trace::Span &>(span);
auto spanContext = spanBase.GetContext();
// Populate Span with presaved attributes
Span ¤tSpan = const_cast<Span &>(span);
Properties evt = GetSpanAttributes(currentSpan);
evt[ETW_FIELD_NAME] = GetName(span);
if (cfg.enableSpanId)
{
evt[ETW_FIELD_SPAN_ID] = ToLowerBase16(spanContext.span_id());
}
if (cfg.enableTraceId)
{
evt[ETW_FIELD_TRACE_ID] = ToLowerBase16(spanContext.trace_id());
}
// Populate ActivityId if enabled
GUID ActivityId;
LPGUID ActivityIdPtr = nullptr;
if (cfg.enableActivityId)
{
if (CopySpanIdToActivityId(spanBase.GetContext().span_id(), ActivityId))
{
ActivityIdPtr = &ActivityId;
}
}
// Populate RelatedActivityId if enabled
GUID RelatedActivityId;
LPGUID RelatedActivityIdPtr = nullptr;
if (cfg.enableRelatedActivityId)
{
if (parentSpan != nullptr)
{
if (CopySpanIdToActivityId(parentSpan->GetContext().span_id(), RelatedActivityId))
{
RelatedActivityIdPtr = &RelatedActivityId;
}
}
}
if (cfg.enableActivityTracking)
{
// TODO: check what EndSpanOptions should be supported for this exporter.
// The only option available currently (end_steady_time) does not apply.
//
// This event on Span Stop enables generation of "non-transactional"
// OpCode=Stop in alignment with TraceLogging Activity "EventSource"
// spec.
etwProvider().write(provHandle, evt, ActivityIdPtr, RelatedActivityIdPtr, 2, encoding);
}
{
// Now since the span has ended, we need to emit the "Span" event that
// contains the entire span information, attributes, time, etc. on it.
evt[ETW_FIELD_NAME] = ETW_VALUE_SPAN;
evt[ETW_FIELD_PAYLOAD_NAME] = GetName(span);
// Add timing details in ISO8601 format, which adequately represents
// the actual time, taking Timezone into consideration. This is NOT
// local time, but rather UTC time (Z=0).
std::chrono::system_clock::time_point startTime = GetStartTime(currentSpan);
std::chrono::system_clock::time_point endTime = GetEndTime(currentSpan);
int64_t startTimeMs =
std::chrono::duration_cast<std::chrono::milliseconds>(startTime.time_since_epoch())
.count();
int64_t endTimeMs =
std::chrono::duration_cast<std::chrono::milliseconds>(endTime.time_since_epoch()).count();
// It may be more optimal to enable passing timestamps as UTC milliseconds
// since Unix epoch instead of string, but that implies additional tooling
// is needed to convert it, rendering it NOT human-readable.
evt[ETW_FIELD_STARTTIME] = utils::formatUtcTimestampMsAsISO8601(startTimeMs);
#ifdef ETW_FIELD_ENDTTIME
// ETW has its own precise timestamp at envelope layer for every event.
// However, in some scenarios it is easier to deal with ISO8601 strings.
// In that case we convert the app-created timestamp and place it into
// Payload[$ETW_FIELD_TIME] field. The option configurable at compile-time.
evt[ETW_FIELD_ENDTTIME] = utils::formatUtcTimestampMsAsISO8601(endTimeMs);
#endif
// Duration of Span in milliseconds
evt[ETW_FIELD_DURATION] = endTimeMs - startTimeMs;
// Presently we assume that all spans are server spans
evt[ETW_FIELD_SPAN_KIND] = uint32_t(opentelemetry::trace::SpanKind::kServer);
UpdateStatus(currentSpan, evt);
etwProvider().write(provHandle, evt, ActivityIdPtr, RelatedActivityIdPtr, 0, encoding);
}
}
const opentelemetry::trace::TraceId &trace_id() { return traceId_; }
friend class Span;
/**
* @brief Init a reference to etw::ProviderHandle
* @return Provider Handle
*/
ETWProvider::Handle &initProvHandle()
{
isClosed_ = false;
return etwProvider().open(provId, encoding);
}
public:
/**
* @brief Tracer constructor
* @param parent Parent TraceProvider
* @param providerId ProviderId - Name or GUID
* @param encoding ETW encoding format to use.
*/
Tracer(etw::TracerProvider &parent,
nostd::string_view providerId = "",
ETWProvider::EventFormat encoding = ETWProvider::EventFormat::ETW_MANIFEST)
: opentelemetry::trace::Tracer(),
tracerProvider_(parent),
provId(providerId.data(), providerId.size()),
encoding(encoding),
provHandle(initProvHandle())
{
// Generate random GUID
GUID trace_id;
CoCreateGuid(&trace_id);
// Populate TraceId of the Tracer with the above GUID
const auto *traceIdPtr = reinterpret_cast<const uint8_t *>(std::addressof(trace_id));
nostd::span<const uint8_t, opentelemetry::trace::TraceId::kSize> traceIdBytes(
traceIdPtr, traceIdPtr + opentelemetry::trace::TraceId::kSize);
traceId_ = opentelemetry::trace::TraceId(traceIdBytes);
}
/**
* @brief Start Span
* @param name Span name
* @param attributes Span attributes
* @param links Span links
* @param options Span options
* @return
*/
nostd::shared_ptr<opentelemetry::trace::Span> StartSpan(
nostd::string_view name,
const common::KeyValueIterable &attributes,
const opentelemetry::trace::SpanContextKeyValueIterable &links,
const opentelemetry::trace::StartSpanOptions &options = {}) noexcept override
{
#ifdef OPENTELEMETRY_RTTI_ENABLED
common::KeyValueIterable &attribs = const_cast<common::KeyValueIterable &>(attributes);
Properties *evt = dynamic_cast<Properties *>(&attribs);
if (evt != nullptr)
{
// Pass as a reference to original modifyable collection without creating a copy
return StartSpan(name, *evt, links, options);
}
#endif
Properties evtCopy = attributes;
return StartSpan(name, evtCopy, links, options);
}
/**
* @brief Start Span
* @param name Span name
* @param attributes Span attributes
* @param links Span links
* @param options Span options
* @return
*/
virtual nostd::shared_ptr<opentelemetry::trace::Span> StartSpan(
nostd::string_view name,
Properties &evt,
const opentelemetry::trace::SpanContextKeyValueIterable &links,
const opentelemetry::trace::StartSpanOptions &options = {}) noexcept
{
const auto &cfg = GetConfiguration(tracerProvider_);
// Parent Context:
// - either use current span
// - or attach to parent SpanContext specified in options
opentelemetry::trace::SpanContext parentContext = GetCurrentSpan()->GetContext();
if (nostd::holds_alternative<opentelemetry::trace::SpanContext>(options.parent))
{
auto span_context = nostd::get<opentelemetry::trace::SpanContext>(options.parent);
if (span_context.IsValid())
{
parentContext = span_context;
}
}
// Populate Etw.RelatedActivityId at envelope level if enabled
GUID RelatedActivityId;
LPCGUID RelatedActivityIdPtr = nullptr;
if (cfg.enableAutoParent)
{
if (cfg.enableRelatedActivityId)
{
if (CopySpanIdToActivityId(parentContext.span_id(), RelatedActivityId))
{
RelatedActivityIdPtr = &RelatedActivityId;
}
}
}
// This template pattern allows us to forward-declare the etw::Span,
// create an instance of it, then assign it to tracer::Span result.
auto currentSpan = new_span<Span, Tracer>(this, name, options);
nostd::shared_ptr<opentelemetry::trace::Span> result = to_span_ptr<Span>(currentSpan);
auto spanContext = result->GetContext();
// Decorate with additional standard fields
std::string eventName = name.data();
// Populate Etw.EventName attribute at envelope level
evt[ETW_FIELD_NAME] = eventName;
// Populate Payload["SpanId"] attribute
// Populate Payload["ParentSpanId"] attribute if parent Span is valid
if (cfg.enableSpanId)
{
if (parentContext.IsValid())
{
evt[ETW_FIELD_SPAN_PARENTID] = ToLowerBase16(parentContext.span_id());
}
evt[ETW_FIELD_SPAN_ID] = ToLowerBase16(spanContext.span_id());
}
// Populate Etw.Payload["TraceId"] attribute
if (cfg.enableTraceId)
{
evt[ETW_FIELD_TRACE_ID] = ToLowerBase16(spanContext.trace_id());
}
// Populate Etw.ActivityId at envelope level if enabled
GUID ActivityId;
LPCGUID ActivityIdPtr = nullptr;
if (cfg.enableActivityId)
{
if (CopySpanIdToActivityId(result.get()->GetContext().span_id(), ActivityId))
{
ActivityIdPtr = &ActivityId;
}
}
// Links
DecorateLinks(evt, links);
// Remember Span attributes to be passed down to ETW on Span end
SetSpanAttributes(*currentSpan, evt);
if (cfg.enableActivityTracking)
{
// TODO: add support for options that are presently ignored :
// - options.kind
// - options.start_steady_time
// - options.start_system_time
etwProvider().write(provHandle, evt, ActivityIdPtr, RelatedActivityIdPtr, 1, encoding);
}
return result;
}
/**
* @brief Force flush data to Tracer, spending up to given amount of microseconds to flush.
* NOTE: this method has no effect for the realtime streaming Tracer.
*
* @param timeout Allow Tracer to drop data if timeout is reached
* @return
*/
void ForceFlushWithMicroseconds(uint64_t) noexcept override {}
/**
* @brief Close tracer, spending up to given amount of microseconds to flush and close.
* NOTE: This method decrements the reference count on current ETW Provider Handle and
* closes it if reference count on that provider handle is zero.
*
* @param timeout Allow Tracer to drop data if timeout is reached.
* @return
*/
void CloseWithMicroseconds(uint64_t) noexcept override
{
// Close once only
if (!isClosed_.exchange(true))
{
etwProvider().close(provHandle);
}
}
/**
* @brief Add event data to span associated with tracer.
* @param span Parent span.
* @param name Event name.
* @param timestamp Event timestamp.
* @param attributes Event attributes.
* @return
*/
void AddEvent(opentelemetry::trace::Span &span,
nostd::string_view name,
common::SystemTimestamp timestamp,
const common::KeyValueIterable &attributes) noexcept
{
#ifdef OPENTELEMETRY_RTTI_ENABLED
common::KeyValueIterable &attribs = const_cast<common::KeyValueIterable &>(attributes);
Properties *evt = dynamic_cast<Properties *>(&attribs);
if (evt != nullptr)
{
// Pass as a reference to original modifyable collection without creating a copy
return AddEvent(span, name, timestamp, *evt);
}
#endif
// Pass a copy converted to Properties object on stack
Properties evtCopy = attributes;
return AddEvent(span, name, timestamp, evtCopy);
}
/**
* @brief Add event data to span associated with tracer.
* @param span Parent span.
* @param name Event name.
* @param timestamp Event timestamp.
* @param attributes Event attributes.
* @return
*/
void AddEvent(opentelemetry::trace::Span &span,
nostd::string_view name,
common::SystemTimestamp timestamp,
Properties &evt) noexcept
{
// TODO: respect originating timestamp. Do we need to reserve
// a special 'Timestamp' field or is it an overkill? The delta
// between when `AddEvent` API is called and when ETW layer
// timestamp is appended is nanos- to micros-, thus handling
// the explicitly provided timestamp is only necessary in case
// if a process wants to submit back-dated or future-dated
// timestamp. Unless there is a strong ask from any ETW customer
// to have it, this feature (custom timestamp) remains unimplemented.
(void)timestamp;
const auto &cfg = GetConfiguration(tracerProvider_);
evt[ETW_FIELD_NAME] = name.data();
const auto &spanContext = span.GetContext();
if (cfg.enableSpanId)
{
evt[ETW_FIELD_SPAN_ID] = ToLowerBase16(spanContext.span_id());
}
if (cfg.enableTraceId)
{
evt[ETW_FIELD_TRACE_ID] = ToLowerBase16(spanContext.trace_id());
}
LPGUID ActivityIdPtr = nullptr;
GUID ActivityId;
if (cfg.enableActivityId)
{
if (CopySpanIdToActivityId(spanContext.span_id(), ActivityId))
{
ActivityIdPtr = &ActivityId;
}
}
#ifdef HAVE_FIELD_TIME
{
auto timeNow = std::chrono::system_clock::now().time_since_epoch();
auto millis = std::chrono::duration_cast<std::chrono::milliseconds>(timeNow).count();
evt[ETW_FIELD_TIME] = utils::formatUtcTimestampMsAsISO8601(millis);
}
#endif
etwProvider().write(provHandle, evt, ActivityIdPtr, nullptr, 0, encoding);
}
/**
* @brief Add event data to span associated with tracer.
* @param span Span.
* @param name Event name.
* @param timestamp Event timestamp.
* @return
*/
void AddEvent(opentelemetry::trace::Span &span,
nostd::string_view name,
common::SystemTimestamp timestamp) noexcept
{
AddEvent(span, name, timestamp, sdk::GetEmptyAttributes());
}
/**
* @brief Add event data to span associated with tracer.
* @param span Spab.
* @param name Event name.
*/
void AddEvent(opentelemetry::trace::Span &span, nostd::string_view name)
{
AddEvent(span, name, std::chrono::system_clock::now(), sdk::GetEmptyAttributes());
}
/**
* @brief Tracer destructor.
*/
virtual ~Tracer() { CloseWithMicroseconds(0); }
};
/**
* @brief etw::Span allows to send event data to ETW listener.
*/
class Span : public opentelemetry::trace::Span
{
protected:
friend class Tracer;
/**
* @brief Span properties are attached on "Span" event on end of Span.
*/
Properties attributes_;
common::SystemTimestamp start_time_;
common::SystemTimestamp end_time_;
opentelemetry::trace::StatusCode status_code_{opentelemetry::trace::StatusCode::kUnset};
std::string status_description_;
/**
* @brief Owner Tracer of this Span
*/
Tracer &owner_;
/**
* @brief Span name.
*/
nostd::string_view name_;
/**
* @brief Attribute indicating that the span has ended.
*/
std::atomic<bool> has_ended_{false};
/**
* @brief Attribute indicating that the span has started.
*/
std::atomic<bool> has_started_{false};
/**
* @brief Parent Span of this nested Span (optional)
*/
Span *parent_{nullptr};
/**
* @brief Get Parent Span of this nested Span.
* @return Pointer to Parent or nullptr if no Parent.
*/
Span *GetParent() const { return parent_; }
opentelemetry::trace::SpanContext context_;
const opentelemetry::trace::SpanContext CreateContext()
{
GUID activity_id;
// Generate random GUID
CoCreateGuid(&activity_id);
const auto *activityIdPtr = reinterpret_cast<const uint8_t *>(std::addressof(activity_id));
// Populate SpanId with that GUID
nostd::span<const uint8_t, opentelemetry::trace::SpanId::kSize> spanIdBytes(
activityIdPtr, activityIdPtr + opentelemetry::trace::SpanId::kSize);
const opentelemetry::trace::SpanId spanId(spanIdBytes);
// Inherit trace_id from Tracer
const opentelemetry::trace::TraceId traceId{owner_.trace_id()};
// TODO: TraceFlags are not supported by ETW exporter.
const opentelemetry::trace::TraceFlags flags{0};
// TODO: Remote parent is not supported by ETW exporter.
const bool hasRemoteParent = false;
return opentelemetry::trace::SpanContext{traceId, spanId, flags, hasRemoteParent};
}
public:
/**
* @brief Update Properties object with current Span status
* @param evt
*/
void UpdateStatus(Properties &evt)
{
/* Should we avoid populating this extra field if status is unset? */
if ((status_code_ == opentelemetry::trace::StatusCode::kUnset) ||
(status_code_ == opentelemetry::trace::StatusCode::kOk))
{
evt[ETW_FIELD_SUCCESS] = "True";
evt[ETW_FIELD_STATUSCODE] = uint32_t(status_code_);
evt[ETW_FIELD_STATUSMESSAGE] = status_description_;
}
else
{
evt[ETW_FIELD_SUCCESS] = "False";
evt[ETW_FIELD_STATUSCODE] = uint32_t(status_code_);
evt[ETW_FIELD_STATUSMESSAGE] = status_description_;
}
}
/**
* @brief Get start time of this Span.
* @return
*/
common::SystemTimestamp GetStartTime() { return start_time_; }
/**
* @brief Get end time of this Span.
* @return
*/
common::SystemTimestamp GetEndTime() { return end_time_; }
/**
* @brief Get Span Name.
* @return Span Name.
*/
nostd::string_view GetName() const { return name_; }
/**
* @brief Span constructor
* @param owner Owner Tracer
* @param name Span name
* @param options Span options
* @param parent Parent Span (optional)
* @return
*/
Span(Tracer &owner,
nostd::string_view name,
const opentelemetry::trace::StartSpanOptions &options,
Span *parent = nullptr) noexcept
: opentelemetry::trace::Span(),
owner_(owner),
parent_(parent),
context_(CreateContext()),
start_time_(std::chrono::system_clock::now())
{
name_ = name;
UNREFERENCED_PARAMETER(options);
}
/**
* @brief Span Destructor
*/
~Span() { End(); }
/**
* @brief Add named event with no attributes.
* @param name Event name.
* @return
*/
void AddEvent(nostd::string_view name) noexcept override { owner_.AddEvent(*this, name); }
/**
* @brief Add named event with custom timestamp.
* @param name
* @param timestamp
* @return
*/
void AddEvent(nostd::string_view name, common::SystemTimestamp timestamp) noexcept override
{
owner_.AddEvent(*this, name, timestamp);
}
/**
* @brief Add named event with custom timestamp and attributes.
* @param name Event name.
* @param timestamp Event timestamp.
* @param attributes Event attributes.
* @return
*/
void AddEvent(nostd::string_view name,
common::SystemTimestamp timestamp,
const common::KeyValueIterable &attributes) noexcept override
{
owner_.AddEvent(*this, name, timestamp, attributes);
}
/**
* @brief Set Span status
* @param code Span status code.
* @param description Span description.
* @return
*/
void SetStatus(opentelemetry::trace::StatusCode code,
nostd::string_view description) noexcept override
{
status_code_ = code;
status_description_ = description.data();
}
void SetAttributes(Properties attributes) { attributes_ = attributes; }
/**
* @brief Obtain span attributes specified at Span start.
* NOTE: please consider that this method is NOT thread-safe.
*
* @return ref to Properties collection
*/
Properties &GetAttributes() { return attributes_; }
/**
* @brief Sets an attribute on the Span. If the Span previously contained a mapping
* for the key, the old value is replaced.
*
* @param key
* @param value
* @return
*/
void SetAttribute(nostd::string_view key, const common::AttributeValue &value) noexcept override
{
// don't override fields propagated from span data.
if (key == ETW_FIELD_NAME || key == ETW_FIELD_SPAN_ID || key == ETW_FIELD_TRACE_ID)
{
return;
}
attributes_[std::string{key}].FromAttributeValue(value);
}
/**
* @brief Update Span name.
*
* NOTE: this method is a no-op for streaming implementation.
* We cannot change the Span name after it started streaming.
*
* @param name
* @return
*/
void UpdateName(nostd::string_view) noexcept override
{
// We can't do that!
// name_ = name;
}
/**
* @brief End Span.
* @param EndSpanOptions
* @return
*/
void End(const opentelemetry::trace::EndSpanOptions &options = {}) noexcept override
{
end_time_ = std::chrono::system_clock::now();
if (!has_ended_.exchange(true))
{
owner_.EndSpan(*this, parent_, options);
}
}
/**
* @brief Obtain SpanContext
* @return
*/
opentelemetry::trace::SpanContext GetContext() const noexcept override { return context_; }
/**
* @brief Check if Span is recording data.
* @return
*/
bool IsRecording() const noexcept override
{
// For streaming implementation this should return the state of ETW Listener.
// In certain unprivileged environments, ex. containers, it is impossible
// to determine if a listener is registered. Thus, we always return true.
return true;
}
virtual void SetToken(nostd::unique_ptr<context::Token> &&token) noexcept
{
// TODO: not implemented
UNREFERENCED_PARAMETER(token);
}
/// <summary>
/// Get Owner tracer of this Span
/// </summary>
/// <returns></returns>
opentelemetry::trace::Tracer &tracer() const noexcept { return this->owner_; }
};
/**
* @brief ETW TracerProvider
*/
class TracerProvider : public opentelemetry::trace::TracerProvider
{
public:
/**
* @brief TracerProvider options supplied during initialization.
*/
TelemetryProviderConfiguration config_;
/**
* @brief Construct instance of TracerProvider with given options
* @param options Configuration options
*/
TracerProvider(TelemetryProviderOptions options) : opentelemetry::trace::TracerProvider()
{
// By default we ensure that all events carry their with TraceId and SpanId
GetOption(options, "enableTraceId", config_.enableTraceId, true);
GetOption(options, "enableSpanId", config_.enableSpanId, true);
// Backwards-compatibility option that allows to reuse ETW-specific parenting described here:
// https://docs.microsoft.com/en-us/uwp/api/windows.foundation.diagnostics.loggingoptions.relatedactivityid
// https://docs.microsoft.com/en-us/windows/win32/api/evntprov/nf-evntprov-eventwritetransfer
// Emit separate events compatible with TraceLogging Activity/Start and Activity/Stop
// format for every Span emitted.
GetOption(options, "enableActivityTracking", config_.enableActivityTracking, false);
// Map current `SpanId` to ActivityId - GUID that uniquely identifies this activity. If NULL,
// ETW gets the identifier from the thread local storage. For details on getting this
// identifier, see EventActivityIdControl.
GetOption(options, "enableActivityId", config_.enableActivityId, false);
// Map parent `SpanId` to RelatedActivityId - Activity identifier from the previous
// component. Use this parameter to link your component's events to the previous component's
// events.
GetOption(options, "enableRelatedActivityId", config_.enableRelatedActivityId, false);
// When a new Span is started, the current span automatically becomes its parent.
GetOption(options, "enableAutoParent", config_.enableAutoParent, false);
// Determines what encoding to use for ETW events: TraceLogging Dynamic, MsgPack, XML, etc.
config_.encoding = GetEncoding(options);
}
TracerProvider() : opentelemetry::trace::TracerProvider()
{
config_.enableTraceId = true;
config_.enableSpanId = true;
config_.enableActivityId = false;
config_.enableActivityTracking = false;
config_.enableRelatedActivityId = false;
config_.enableAutoParent = false;
config_.encoding = ETWProvider::EventFormat::ETW_MANIFEST;
}
/**
* @brief Obtain ETW Tracer.
* @param name ProviderId (instrumentation name) - Name or GUID
*
* @param args Additional arguments that controls `codec` of the provider.
* Possible values are:
* - "ETW" - 'classic' Trace Logging Dynamic manifest ETW events.
* - "MSGPACK" - MessagePack-encoded binary payload ETW events.
* - "XML" - XML events (reserved for future use)
* @return
*/
nostd::shared_ptr<opentelemetry::trace::Tracer> GetTracer(
nostd::string_view name,
nostd::string_view args = "",
nostd::string_view schema_url = "") override
{
UNREFERENCED_PARAMETER(args);
UNREFERENCED_PARAMETER(schema_url);
ETWProvider::EventFormat evtFmt = config_.encoding;
return nostd::shared_ptr<opentelemetry::trace::Tracer>{new (std::nothrow)
Tracer(*this, name, evtFmt)};
}
};
} // namespace etw
} // namespace exporter
OPENTELEMETRY_END_NAMESPACE
|
import 'bootstrap/js/src/tooltip'
import $ from 'jquery'
import { DateTime } from 'luxon'
import { observeRemoveJq } from './mutation-observer'
window.DateTime = DateTime
class TimeActualizer {
constructor(options) {
this.options = options
this.updateTime()
this.instanceMap = new Map()
this.strings = {
today: this.now.toRelativeCalendar(),
yDay: this.now.minus({ days: 1 }).toRelativeCalendar(),
y2Day: this.now.minus({ days: 2 }).toRelativeCalendar(),
}
this.intervalId = setInterval(::this.updateInstances, 60000)
this.fullFormat = {
year: 'numeric',
// weekday: 'long',
month: 'long',
day: 'numeric',
hour: 'numeric',
minute: 'numeric',
// second: 'numeric',
}
}
onRemoveElement(domNode) {
this.instanceMap.delete(domNode)
}
updateTime() {
this.now = DateTime.local().setLocale(this.options.languageInfo.code)
this.isoToday = this.now.toISODate()
this.isoYesterday = this.now.minus({ days: 1 }).toISODate()
this.isoDayBeforeYesterday = this.now.minus({ days: 2 }).toISODate()
}
updateOneInstance(data) {
const { jqElement, luxonInstance } = data
const isoDate = luxonInstance.toISODate()
if (isoDate === this.isoToday) {
const formattedTime = luxonInstance.toLocaleString(DateTime.TIME_SIMPLE)
jqElement.text(`${this.strings.today}, ${formattedTime}`)
} else if (isoDate === this.isoYesterday) {
const formattedTime = luxonInstance.toLocaleString(DateTime.TIME_SIMPLE)
jqElement.text(`${this.strings.yDay}, ${formattedTime}`)
} else if (isoDate === this.isoDayBeforeYesterday) {
const formattedTime = luxonInstance.toLocaleString(DateTime.TIME_SIMPLE)
jqElement.text(`${this.strings.y2Day}, ${formattedTime}`)
} else {
jqElement.text(luxonInstance.toLocaleString(this.fullFormat))
}
jqElement.attr('data-original-title', luxonInstance.toRelative({
style: 'long',
}))
}
updateInstances(newInstancesMap) {
this.updateTime()
const instanceMap = newInstancesMap || this.instanceMap
for (const data of instanceMap.values()) this.updateOneInstance(data)
}
add(jqTimeElements) {
const newInstancesMap = new Map()
for (const domNode of jqTimeElements) {
const jqElement = $(domNode)
const luxonInstance = DateTime.fromISO(jqElement.attr('datetime'))
if (!this.instanceMap.has(domNode)) {
this.instanceMap.set(domNode, { jqElement, luxonInstance })
newInstancesMap.set(domNode, { jqElement, luxonInstance })
jqElement.tooltip()
}
}
this.updateInstances(newInstancesMap)
observeRemoveJq(jqTimeElements, ::this.onRemoveElement)
}
}
let timeActualizer
export function init(options) {
timeActualizer = new TimeActualizer(options)
}
export function add(jqTimeElements) {
$.when($.ready).then(() => {
timeActualizer.add(jqTimeElements)
})
}
|
// Copyright (c) 2016, KomputerKult Research Labs
//
// Author: Shen Noether <[email protected]>
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification, are
// permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of
// conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list
// of conditions and the following disclaimer in the documentation and/or other
// materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be
// used to endorse or promote products derived from this software without specific
// prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#pragma once
#ifndef RCT_TYPES_H
#define RCT_TYPES_H
#include <cstddef>
#include <vector>
#include <iostream>
#include <cinttypes>
#include <sodium/crypto_verify_32.h>
extern "C" {
#include "crypto/crypto-ops.h"
#include "crypto/random.h"
#include "crypto/keccak.h"
}
#include "crypto/generic-ops.h"
#include "crypto/crypto.h"
#include "hex.h"
#include "span.h"
#include "memwipe.h"
#include "serialization/containers.h"
#include "serialization/debug_archive.h"
#include "serialization/binary_archive.h"
#include "serialization/json_archive.h"
//Define this flag when debugging to get additional info on the console
#ifdef DBG
#define DP(x) dp(x)
#else
#define DP(x)
#endif
//atomic units of komputerkoins
#define ATOMS 64
//for printing large ints
//Namespace specifically for ring ct code
namespace rct {
//basic ops containers
typedef unsigned char * Bytes;
// Can contain a secret or public key
// similar to secret_key / public_key of crypto-ops,
// but uses unsigned chars,
// also includes an operator for accessing the i'th byte.
struct key {
unsigned char & operator[](int i) {
return bytes[i];
}
unsigned char operator[](int i) const {
return bytes[i];
}
bool operator==(const key &k) const { return !crypto_verify_32(bytes, k.bytes); }
unsigned char bytes[32];
};
typedef std::vector<key> keyV; //vector of keys
typedef std::vector<keyV> keyM; //matrix of keys (indexed by column first)
//containers For CT operations
//if it's representing a private ctkey then "dest" contains the secret key of the address
// while "mask" contains a where C = aG + bH is CT pedersen commitment and b is the amount
// (store b, the amount, separately
//if it's representing a public ctkey, then "dest" = P the address, mask = C the commitment
struct ctkey {
key dest;
key mask; //C here if public
};
typedef std::vector<ctkey> ctkeyV;
typedef std::vector<ctkeyV> ctkeyM;
//used for multisig data
struct multisig_kLRki {
key k;
key L;
key R;
key ki;
~multisig_kLRki() { memwipe(&k, sizeof(k)); }
};
struct multisig_out {
std::vector<key> c; // for all inputs
std::vector<key> mu_p; // for all inputs
std::vector<key> c0; // for all inputs
BEGIN_SERIALIZE_OBJECT()
FIELD(c)
FIELD(mu_p)
if (!mu_p.empty() && mu_p.size() != c.size())
return false;
END_SERIALIZE()
};
//data for passing the amount to the receiver secretly
// If the pedersen commitment to an amount is C = aG + bH,
// "mask" contains a 32 byte key a
// "amount" contains a hex representation (in 32 bytes) of a 64 bit number
// the purpose of the ECDH exchange
struct ecdhTuple {
key mask;
key amount;
BEGIN_SERIALIZE_OBJECT()
FIELD(mask) // not saved from v2 BPs
FIELD(amount)
END_SERIALIZE()
};
//containers for representing amounts
typedef uint64_t xmr_amount;
typedef unsigned int bits[ATOMS];
typedef key key64[64];
struct boroSig {
key64 s0;
key64 s1;
key ee;
};
//Container for precomp
struct geDsmp {
ge_dsmp k;
};
//just contains the necessary keys to represent MLSAG sigs
//c.f. https://eprint.iacr.org/2015/1098
struct mgSig {
keyM ss;
key cc;
keyV II;
BEGIN_SERIALIZE_OBJECT()
FIELD(ss)
FIELD(cc)
// FIELD(II) - not serialized, it can be reconstructed
END_SERIALIZE()
};
// CLSAG signature
struct clsag {
keyV s; // scalars
key c1;
key I; // signing key image
key D; // commitment key image
BEGIN_SERIALIZE_OBJECT()
FIELD(s)
FIELD(c1)
// FIELD(I) - not serialized, it can be reconstructed
FIELD(D)
END_SERIALIZE()
};
//contains the data for an Borromean sig
// also contains the "Ci" values such that
// \sum Ci = C
// and the signature proves that each Ci is either
// a Pedersen commitment to 0 or to 2^i
//thus proving that C is in the range of [0, 2^64]
struct rangeSig {
boroSig asig;
key64 Ci;
BEGIN_SERIALIZE_OBJECT()
FIELD(asig)
FIELD(Ci)
END_SERIALIZE()
};
struct Bulletproof
{
rct::keyV V;
rct::key A, S, T1, T2;
rct::key taux, mu;
rct::keyV L, R;
rct::key a, b, t;
Bulletproof():
A({}), S({}), T1({}), T2({}), taux({}), mu({}), a({}), b({}), t({}) {}
Bulletproof(const rct::key &V, const rct::key &A, const rct::key &S, const rct::key &T1, const rct::key &T2, const rct::key &taux, const rct::key &mu, const rct::keyV &L, const rct::keyV &R, const rct::key &a, const rct::key &b, const rct::key &t):
V({V}), A(A), S(S), T1(T1), T2(T2), taux(taux), mu(mu), L(L), R(R), a(a), b(b), t(t) {}
Bulletproof(const rct::keyV &V, const rct::key &A, const rct::key &S, const rct::key &T1, const rct::key &T2, const rct::key &taux, const rct::key &mu, const rct::keyV &L, const rct::keyV &R, const rct::key &a, const rct::key &b, const rct::key &t):
V(V), A(A), S(S), T1(T1), T2(T2), taux(taux), mu(mu), L(L), R(R), a(a), b(b), t(t) {}
bool operator==(const Bulletproof &other) const { return V == other.V && A == other.A && S == other.S && T1 == other.T1 && T2 == other.T2 && taux == other.taux && mu == other.mu && L == other.L && R == other.R && a == other.a && b == other.b && t == other.t; }
BEGIN_SERIALIZE_OBJECT()
// Commitments aren't saved, they're restored via outPk
// FIELD(V)
FIELD(A)
FIELD(S)
FIELD(T1)
FIELD(T2)
FIELD(taux)
FIELD(mu)
FIELD(L)
FIELD(R)
FIELD(a)
FIELD(b)
FIELD(t)
if (L.empty() || L.size() != R.size())
return false;
END_SERIALIZE()
};
size_t n_bulletproof_amounts(const Bulletproof &proof);
size_t n_bulletproof_max_amounts(const Bulletproof &proof);
size_t n_bulletproof_amounts(const std::vector<Bulletproof> &proofs);
size_t n_bulletproof_max_amounts(const std::vector<Bulletproof> &proofs);
//A container to hold all signatures necessary for RingCT
// rangeSigs holds all the rangeproof data of a transaction
// MG holds the MLSAG signature of a transaction
// mixRing holds all the public keypairs (P, C) for a transaction
// ecdhInfo holds an encoded mask / amount to be passed to each receiver
// outPk contains public keypairs which are destinations (P, C),
// P = address, C = commitment to amount
enum {
RCTTypeNull = 0,
RCTTypeFull = 1,
RCTTypeSimple = 2,
RCTTypeBulletproof = 3,
RCTTypeBulletproof2 = 4,
RCTTypeCLSAG = 5,
};
enum RangeProofType { RangeProofBorromean, RangeProofBulletproof, RangeProofMultiOutputBulletproof, RangeProofPaddedBulletproof };
struct RCTConfig {
RangeProofType range_proof_type;
int bp_version;
BEGIN_SERIALIZE_OBJECT()
VERSION_FIELD(0)
VARINT_FIELD(range_proof_type)
VARINT_FIELD(bp_version)
END_SERIALIZE()
};
struct rctSigBase {
uint8_t type;
key message;
ctkeyM mixRing; //the set of all pubkeys / copy
//pairs that you mix with
keyV pseudoOuts; //C - for simple rct
std::vector<ecdhTuple> ecdhInfo;
ctkeyV outPk;
xmr_amount txnFee; // contains b
template<bool W, template <bool> class Archive>
bool serialize_rctsig_base(Archive<W> &ar, size_t inputs, size_t outputs)
{
FIELD(type)
if (type == RCTTypeNull)
return ar.stream().good();
if (type != RCTTypeFull && type != RCTTypeSimple && type != RCTTypeBulletproof && type != RCTTypeBulletproof2 && type != RCTTypeCLSAG)
return false;
VARINT_FIELD(txnFee)
// inputs/outputs not saved, only here for serialization help
// FIELD(message) - not serialized, it can be reconstructed
// FIELD(mixRing) - not serialized, it can be reconstructed
if (type == RCTTypeSimple) // moved to prunable with bulletproofs
{
ar.tag("pseudoOuts");
ar.begin_array();
PREPARE_CUSTOM_VECTOR_SERIALIZATION(inputs, pseudoOuts);
if (pseudoOuts.size() != inputs)
return false;
for (size_t i = 0; i < inputs; ++i)
{
FIELDS(pseudoOuts[i])
if (inputs - i > 1)
ar.delimit_array();
}
ar.end_array();
}
ar.tag("ecdhInfo");
ar.begin_array();
PREPARE_CUSTOM_VECTOR_SERIALIZATION(outputs, ecdhInfo);
if (ecdhInfo.size() != outputs)
return false;
for (size_t i = 0; i < outputs; ++i)
{
if (type == RCTTypeBulletproof2 || type == RCTTypeCLSAG)
{
ar.begin_object();
if (!typename Archive<W>::is_saving())
memset(ecdhInfo[i].amount.bytes, 0, sizeof(ecdhInfo[i].amount.bytes));
crypto::hash8 &amount = (crypto::hash8&)ecdhInfo[i].amount;
FIELD(amount);
ar.end_object();
}
else
{
FIELDS(ecdhInfo[i])
}
if (outputs - i > 1)
ar.delimit_array();
}
ar.end_array();
ar.tag("outPk");
ar.begin_array();
PREPARE_CUSTOM_VECTOR_SERIALIZATION(outputs, outPk);
if (outPk.size() != outputs)
return false;
for (size_t i = 0; i < outputs; ++i)
{
FIELDS(outPk[i].mask)
if (outputs - i > 1)
ar.delimit_array();
}
ar.end_array();
return ar.stream().good();
}
BEGIN_SERIALIZE_OBJECT()
FIELD(type)
FIELD(message)
FIELD(mixRing)
FIELD(pseudoOuts)
FIELD(ecdhInfo)
FIELD(outPk)
VARINT_FIELD(txnFee)
END_SERIALIZE()
};
struct rctSigPrunable {
std::vector<rangeSig> rangeSigs;
std::vector<Bulletproof> bulletproofs;
std::vector<mgSig> MGs; // simple rct has N, full has 1
std::vector<clsag> CLSAGs;
keyV pseudoOuts; //C - for simple rct
// when changing this function, update cryptonote::get_pruned_transaction_weight
template<bool W, template <bool> class Archive>
bool serialize_rctsig_prunable(Archive<W> &ar, uint8_t type, size_t inputs, size_t outputs, size_t mixin)
{
if (inputs >= 0xffffffff)
return false;
if (outputs >= 0xffffffff)
return false;
if (mixin >= 0xffffffff)
return false;
if (type == RCTTypeNull)
return ar.stream().good();
if (type != RCTTypeFull && type != RCTTypeSimple && type != RCTTypeBulletproof && type != RCTTypeBulletproof2 && type != RCTTypeCLSAG)
return false;
if (type == RCTTypeBulletproof || type == RCTTypeBulletproof2 || type == RCTTypeCLSAG)
{
uint32_t nbp = bulletproofs.size();
if (type == RCTTypeBulletproof2 || type == RCTTypeCLSAG)
VARINT_FIELD(nbp)
else
FIELD(nbp)
ar.tag("bp");
ar.begin_array();
if (nbp > outputs)
return false;
PREPARE_CUSTOM_VECTOR_SERIALIZATION(nbp, bulletproofs);
for (size_t i = 0; i < nbp; ++i)
{
FIELDS(bulletproofs[i])
if (nbp - i > 1)
ar.delimit_array();
}
if (n_bulletproof_max_amounts(bulletproofs) < outputs)
return false;
ar.end_array();
}
else
{
ar.tag("rangeSigs");
ar.begin_array();
PREPARE_CUSTOM_VECTOR_SERIALIZATION(outputs, rangeSigs);
if (rangeSigs.size() != outputs)
return false;
for (size_t i = 0; i < outputs; ++i)
{
FIELDS(rangeSigs[i])
if (outputs - i > 1)
ar.delimit_array();
}
ar.end_array();
}
if (type == RCTTypeCLSAG)
{
ar.tag("CLSAGs");
ar.begin_array();
PREPARE_CUSTOM_VECTOR_SERIALIZATION(inputs, CLSAGs);
if (CLSAGs.size() != inputs)
return false;
for (size_t i = 0; i < inputs; ++i)
{
// we save the CLSAGs contents directly, because we want it to save its
// arrays without the size prefixes, and the load can't know what size
// to expect if it's not in the data
ar.begin_object();
ar.tag("s");
ar.begin_array();
PREPARE_CUSTOM_VECTOR_SERIALIZATION(mixin + 1, CLSAGs[i].s);
if (CLSAGs[i].s.size() != mixin + 1)
return false;
for (size_t j = 0; j <= mixin; ++j)
{
FIELDS(CLSAGs[i].s[j])
if (mixin + 1 - j > 1)
ar.delimit_array();
}
ar.end_array();
ar.tag("c1");
FIELDS(CLSAGs[i].c1)
// CLSAGs[i].I not saved, it can be reconstructed
ar.tag("D");
FIELDS(CLSAGs[i].D)
ar.end_object();
if (inputs - i > 1)
ar.delimit_array();
}
ar.end_array();
}
else
{
ar.tag("MGs");
ar.begin_array();
// we keep a byte for size of MGs, because we don't know whether this is
// a simple or full rct signature, and it's starting to annoy the hell out of me
size_t mg_elements = (type == RCTTypeSimple || type == RCTTypeBulletproof || type == RCTTypeBulletproof2) ? inputs : 1;
PREPARE_CUSTOM_VECTOR_SERIALIZATION(mg_elements, MGs);
if (MGs.size() != mg_elements)
return false;
for (size_t i = 0; i < mg_elements; ++i)
{
// we save the MGs contents directly, because we want it to save its
// arrays and matrices without the size prefixes, and the load can't
// know what size to expect if it's not in the data
ar.begin_object();
ar.tag("ss");
ar.begin_array();
PREPARE_CUSTOM_VECTOR_SERIALIZATION(mixin + 1, MGs[i].ss);
if (MGs[i].ss.size() != mixin + 1)
return false;
for (size_t j = 0; j < mixin + 1; ++j)
{
ar.begin_array();
size_t mg_ss2_elements = ((type == RCTTypeSimple || type == RCTTypeBulletproof || type == RCTTypeBulletproof2) ? 1 : inputs) + 1;
PREPARE_CUSTOM_VECTOR_SERIALIZATION(mg_ss2_elements, MGs[i].ss[j]);
if (MGs[i].ss[j].size() != mg_ss2_elements)
return false;
for (size_t k = 0; k < mg_ss2_elements; ++k)
{
FIELDS(MGs[i].ss[j][k])
if (mg_ss2_elements - k > 1)
ar.delimit_array();
}
ar.end_array();
if (mixin + 1 - j > 1)
ar.delimit_array();
}
ar.end_array();
ar.tag("cc");
FIELDS(MGs[i].cc)
// MGs[i].II not saved, it can be reconstructed
ar.end_object();
if (mg_elements - i > 1)
ar.delimit_array();
}
ar.end_array();
}
if (type == RCTTypeBulletproof || type == RCTTypeBulletproof2 || type == RCTTypeCLSAG)
{
ar.tag("pseudoOuts");
ar.begin_array();
PREPARE_CUSTOM_VECTOR_SERIALIZATION(inputs, pseudoOuts);
if (pseudoOuts.size() != inputs)
return false;
for (size_t i = 0; i < inputs; ++i)
{
FIELDS(pseudoOuts[i])
if (inputs - i > 1)
ar.delimit_array();
}
ar.end_array();
}
return ar.stream().good();
}
BEGIN_SERIALIZE_OBJECT()
FIELD(rangeSigs)
FIELD(bulletproofs)
FIELD(MGs)
FIELD(CLSAGs)
FIELD(pseudoOuts)
END_SERIALIZE()
};
struct rctSig: public rctSigBase {
rctSigPrunable p;
keyV& get_pseudo_outs()
{
return type == RCTTypeBulletproof || type == RCTTypeBulletproof2 || type == RCTTypeCLSAG ? p.pseudoOuts : pseudoOuts;
}
keyV const& get_pseudo_outs() const
{
return type == RCTTypeBulletproof || type == RCTTypeBulletproof2 || type == RCTTypeCLSAG ? p.pseudoOuts : pseudoOuts;
}
BEGIN_SERIALIZE_OBJECT()
FIELDS((rctSigBase&)*this)
FIELD(p)
END_SERIALIZE()
};
//other basepoint H = toPoint(cn_fast_hash(G)), G the basepoint
static const key H = { {0x8b, 0x65, 0x59, 0x70, 0x15, 0x37, 0x99, 0xaf, 0x2a, 0xea, 0xdc, 0x9f, 0xf1, 0xad, 0xd0, 0xea, 0x6c, 0x72, 0x51, 0xd5, 0x41, 0x54, 0xcf, 0xa9, 0x2c, 0x17, 0x3a, 0x0d, 0xd3, 0x9c, 0x1f, 0x94} };
//H2 contains 2^i H in each index, i.e. H, 2H, 4H, 8H, ...
//This is used for the range proofG
//You can regenerate this by running python2 Test.py HPow2 in the MiniNero repo
static const key64 H2 = {{{0x8b, 0x65, 0x59, 0x70, 0x15, 0x37, 0x99, 0xaf, 0x2a, 0xea, 0xdc, 0x9f, 0xf1, 0xad, 0xd0, 0xea, 0x6c, 0x72, 0x51, 0xd5, 0x41, 0x54, 0xcf, 0xa9, 0x2c, 0x17, 0x3a, 0x0d, 0xd3, 0x9c, 0x1f, 0x94}},
{{0x8f, 0xaa, 0x44, 0x8a, 0xe4, 0xb3, 0xe2, 0xbb, 0x3d, 0x4d, 0x13, 0x09, 0x09, 0xf5, 0x5f, 0xcd, 0x79, 0x71, 0x1c, 0x1c, 0x83, 0xcd, 0xbc, 0xca, 0xdd, 0x42, 0xcb, 0xe1, 0x51, 0x5e, 0x87, 0x12}},
{{0x12, 0xa7, 0xd6, 0x2c, 0x77, 0x91, 0x65, 0x4a, 0x57, 0xf3, 0xe6, 0x76, 0x94, 0xed, 0x50, 0xb4, 0x9a, 0x7d, 0x9e, 0x3f, 0xc1, 0xe4, 0xc7, 0xa0, 0xbd, 0xe2, 0x9d, 0x18, 0x7e, 0x9c, 0xc7, 0x1d}},
{{0x78, 0x9a, 0xb9, 0x93, 0x4b, 0x49, 0xc4, 0xf9, 0xe6, 0x78, 0x5c, 0x6d, 0x57, 0xa4, 0x98, 0xb3, 0xea, 0xd4, 0x43, 0xf0, 0x4f, 0x13, 0xdf, 0x11, 0x0c, 0x54, 0x27, 0xb4, 0xf2, 0x14, 0xc7, 0x39}},
{{0x77, 0x1e, 0x92, 0x99, 0xd9, 0x4f, 0x02, 0xac, 0x72, 0xe3, 0x8e, 0x44, 0xde, 0x56, 0x8a, 0xc1, 0xdc, 0xb2, 0xed, 0xc6, 0xed, 0xb6, 0x1f, 0x83, 0xca, 0x41, 0x8e, 0x10, 0x77, 0xce, 0x3d, 0xe8}},
{{0x73, 0xb9, 0x6d, 0xb4, 0x30, 0x39, 0x81, 0x9b, 0xda, 0xf5, 0x68, 0x0e, 0x5c, 0x32, 0xd7, 0x41, 0x48, 0x88, 0x84, 0xd1, 0x8d, 0x93, 0x86, 0x6d, 0x40, 0x74, 0xa8, 0x49, 0x18, 0x2a, 0x8a, 0x64}},
{{0x8d, 0x45, 0x8e, 0x1c, 0x2f, 0x68, 0xeb, 0xeb, 0xcc, 0xd2, 0xfd, 0x5d, 0x37, 0x9f, 0x5e, 0x58, 0xf8, 0x13, 0x4d, 0xf3, 0xe0, 0xe8, 0x8c, 0xad, 0x3d, 0x46, 0x70, 0x10, 0x63, 0xa8, 0xd4, 0x12}},
{{0x09, 0x55, 0x1e, 0xdb, 0xe4, 0x94, 0x41, 0x8e, 0x81, 0x28, 0x44, 0x55, 0xd6, 0x4b, 0x35, 0xee, 0x8a, 0xc0, 0x93, 0x06, 0x8a, 0x5f, 0x16, 0x1f, 0xa6, 0x63, 0x75, 0x59, 0x17, 0x7e, 0xf4, 0x04}},
{{0xd0, 0x5a, 0x88, 0x66, 0xf4, 0xdf, 0x8c, 0xee, 0x1e, 0x26, 0x8b, 0x1d, 0x23, 0xa4, 0xc5, 0x8c, 0x92, 0xe7, 0x60, 0x30, 0x97, 0x86, 0xcd, 0xac, 0x0f, 0xed, 0xa1, 0xd2, 0x47, 0xa9, 0xc9, 0xa7}},
{{0x55, 0xcd, 0xaa, 0xd5, 0x18, 0xbd, 0x87, 0x1d, 0xd1, 0xeb, 0x7b, 0xc7, 0x02, 0x3e, 0x1d, 0xc0, 0xfd, 0xf3, 0x33, 0x98, 0x64, 0xf8, 0x8f, 0xdd, 0x2d, 0xe2, 0x69, 0xfe, 0x9e, 0xe1, 0x83, 0x2d}},
{{0xe7, 0x69, 0x7e, 0x95, 0x1a, 0x98, 0xcf, 0xd5, 0x71, 0x2b, 0x84, 0xbb, 0xe5, 0xf3, 0x4e, 0xd7, 0x33, 0xe9, 0x47, 0x3f, 0xcb, 0x68, 0xed, 0xa6, 0x6e, 0x37, 0x88, 0xdf, 0x19, 0x58, 0xc3, 0x06}},
{{0xf9, 0x2a, 0x97, 0x0b, 0xae, 0x72, 0x78, 0x29, 0x89, 0xbf, 0xc8, 0x3a, 0xdf, 0xaa, 0x92, 0xa4, 0xf4, 0x9c, 0x7e, 0x95, 0x91, 0x8b, 0x3b, 0xba, 0x3c, 0xdc, 0x7f, 0xe8, 0x8a, 0xcc, 0x8d, 0x47}},
{{0x1f, 0x66, 0xc2, 0xd4, 0x91, 0xd7, 0x5a, 0xf9, 0x15, 0xc8, 0xdb, 0x6a, 0x6d, 0x1c, 0xb0, 0xcd, 0x4f, 0x7d, 0xdc, 0xd5, 0xe6, 0x3d, 0x3b, 0xa9, 0xb8, 0x3c, 0x86, 0x6c, 0x39, 0xef, 0x3a, 0x2b}},
{{0x3e, 0xec, 0x98, 0x84, 0xb4, 0x3f, 0x58, 0xe9, 0x3e, 0xf8, 0xde, 0xea, 0x26, 0x00, 0x04, 0xef, 0xea, 0x2a, 0x46, 0x34, 0x4f, 0xc5, 0x96, 0x5b, 0x1a, 0x7d, 0xd5, 0xd1, 0x89, 0x97, 0xef, 0xa7}},
{{0xb2, 0x9f, 0x8f, 0x0c, 0xcb, 0x96, 0x97, 0x7f, 0xe7, 0x77, 0xd4, 0x89, 0xd6, 0xbe, 0x9e, 0x7e, 0xbc, 0x19, 0xc4, 0x09, 0xb5, 0x10, 0x35, 0x68, 0xf2, 0x77, 0x61, 0x1d, 0x7e, 0xa8, 0x48, 0x94}},
{{0x56, 0xb1, 0xf5, 0x12, 0x65, 0xb9, 0x55, 0x98, 0x76, 0xd5, 0x8d, 0x24, 0x9d, 0x0c, 0x14, 0x6d, 0x69, 0xa1, 0x03, 0x63, 0x66, 0x99, 0x87, 0x4d, 0x3f, 0x90, 0x47, 0x35, 0x50, 0xfe, 0x3f, 0x2c}},
{{0x1d, 0x7a, 0x36, 0x57, 0x5e, 0x22, 0xf5, 0xd1, 0x39, 0xff, 0x9c, 0xc5, 0x10, 0xfa, 0x13, 0x85, 0x05, 0x57, 0x6b, 0x63, 0x81, 0x5a, 0x94, 0xe4, 0xb0, 0x12, 0xbf, 0xd4, 0x57, 0xca, 0xaa, 0xda}},
{{0xd0, 0xac, 0x50, 0x7a, 0x86, 0x4e, 0xcd, 0x05, 0x93, 0xfa, 0x67, 0xbe, 0x7d, 0x23, 0x13, 0x43, 0x92, 0xd0, 0x0e, 0x40, 0x07, 0xe2, 0x53, 0x48, 0x78, 0xd9, 0xb2, 0x42, 0xe1, 0x0d, 0x76, 0x20}},
{{0xf6, 0xc6, 0x84, 0x0b, 0x9c, 0xf1, 0x45, 0xbb, 0x2d, 0xcc, 0xf8, 0x6e, 0x94, 0x0b, 0xe0, 0xfc, 0x09, 0x8e, 0x32, 0xe3, 0x10, 0x99, 0xd5, 0x6f, 0x7f, 0xe0, 0x87, 0xbd, 0x5d, 0xeb, 0x50, 0x94}},
{{0x28, 0x83, 0x1a, 0x33, 0x40, 0x07, 0x0e, 0xb1, 0xdb, 0x87, 0xc1, 0x2e, 0x05, 0x98, 0x0d, 0x5f, 0x33, 0xe9, 0xef, 0x90, 0xf8, 0x3a, 0x48, 0x17, 0xc9, 0xf4, 0xa0, 0xa3, 0x32, 0x27, 0xe1, 0x97}},
{{0x87, 0x63, 0x22, 0x73, 0xd6, 0x29, 0xcc, 0xb7, 0xe1, 0xed, 0x1a, 0x76, 0x8f, 0xa2, 0xeb, 0xd5, 0x17, 0x60, 0xf3, 0x2e, 0x1c, 0x0b, 0x86, 0x7a, 0x5d, 0x36, 0x8d, 0x52, 0x71, 0x05, 0x5c, 0x6e}},
{{0x5c, 0x7b, 0x29, 0x42, 0x43, 0x47, 0x96, 0x4d, 0x04, 0x27, 0x55, 0x17, 0xc5, 0xae, 0x14, 0xb6, 0xb5, 0xea, 0x27, 0x98, 0xb5, 0x73, 0xfc, 0x94, 0xe6, 0xe4, 0x4a, 0x53, 0x21, 0x60, 0x0c, 0xfb}},
{{0xe6, 0x94, 0x50, 0x42, 0xd7, 0x8b, 0xc2, 0xc3, 0xbd, 0x6e, 0xc5, 0x8c, 0x51, 0x1a, 0x9f, 0xe8, 0x59, 0xc0, 0xad, 0x63, 0xfd, 0xe4, 0x94, 0xf5, 0x03, 0x9e, 0x0e, 0x82, 0x32, 0x61, 0x2b, 0xd5}},
{{0x36, 0xd5, 0x69, 0x07, 0xe2, 0xec, 0x74, 0x5d, 0xb6, 0xe5, 0x4f, 0x0b, 0x2e, 0x1b, 0x23, 0x00, 0xab, 0xcb, 0x42, 0x2e, 0x71, 0x2d, 0xa5, 0x88, 0xa4, 0x0d, 0x3f, 0x1e, 0xbb, 0xbe, 0x02, 0xf6}},
{{0x34, 0xdb, 0x6e, 0xe4, 0xd0, 0x60, 0x8e, 0x5f, 0x78, 0x36, 0x50, 0x49, 0x5a, 0x3b, 0x2f, 0x52, 0x73, 0xc5, 0x13, 0x4e, 0x52, 0x84, 0xe4, 0xfd, 0xf9, 0x66, 0x27, 0xbb, 0x16, 0xe3, 0x1e, 0x6b}},
{{0x8e, 0x76, 0x59, 0xfb, 0x45, 0xa3, 0x78, 0x7d, 0x67, 0x4a, 0xe8, 0x67, 0x31, 0xfa, 0xa2, 0x53, 0x8e, 0xc0, 0xfd, 0xf4, 0x42, 0xab, 0x26, 0xe9, 0xc7, 0x91, 0xfa, 0xda, 0x08, 0x94, 0x67, 0xe9}},
{{0x30, 0x06, 0xcf, 0x19, 0x8b, 0x24, 0xf3, 0x1b, 0xb4, 0xc7, 0xe6, 0x34, 0x60, 0x00, 0xab, 0xc7, 0x01, 0xe8, 0x27, 0xcf, 0xbb, 0x5d, 0xf5, 0x2d, 0xcf, 0xa4, 0x2e, 0x9c, 0xa9, 0xff, 0x08, 0x02}},
{{0xf5, 0xfd, 0x40, 0x3c, 0xb6, 0xe8, 0xbe, 0x21, 0x47, 0x2e, 0x37, 0x7f, 0xfd, 0x80, 0x5a, 0x8c, 0x60, 0x83, 0xea, 0x48, 0x03, 0xb8, 0x48, 0x53, 0x89, 0xcc, 0x3e, 0xbc, 0x21, 0x5f, 0x00, 0x2a}},
{{0x37, 0x31, 0xb2, 0x60, 0xeb, 0x3f, 0x94, 0x82, 0xe4, 0x5f, 0x1c, 0x3f, 0x3b, 0x9d, 0xcf, 0x83, 0x4b, 0x75, 0xe6, 0xee, 0xf8, 0xc4, 0x0f, 0x46, 0x1e, 0xa2, 0x7e, 0x8b, 0x6e, 0xd9, 0x47, 0x3d}},
{{0x9f, 0x9d, 0xab, 0x09, 0xc3, 0xf5, 0xe4, 0x28, 0x55, 0xc2, 0xde, 0x97, 0x1b, 0x65, 0x93, 0x28, 0xa2, 0xdb, 0xc4, 0x54, 0x84, 0x5f, 0x39, 0x6f, 0xfc, 0x05, 0x3f, 0x0b, 0xb1, 0x92, 0xf8, 0xc3}},
{{0x5e, 0x05, 0x5d, 0x25, 0xf8, 0x5f, 0xdb, 0x98, 0xf2, 0x73, 0xe4, 0xaf, 0xe0, 0x84, 0x64, 0xc0, 0x03, 0xb7, 0x0f, 0x1e, 0xf0, 0x67, 0x7b, 0xb5, 0xe2, 0x57, 0x06, 0x40, 0x0b, 0xe6, 0x20, 0xa5}},
{{0x86, 0x8b, 0xcf, 0x36, 0x79, 0xcb, 0x6b, 0x50, 0x0b, 0x94, 0x41, 0x8c, 0x0b, 0x89, 0x25, 0xf9, 0x86, 0x55, 0x30, 0x30, 0x3a, 0xe4, 0xe4, 0xb2, 0x62, 0x59, 0x18, 0x65, 0x66, 0x6a, 0x45, 0x90}},
{{0xb3, 0xdb, 0x6b, 0xd3, 0x89, 0x7a, 0xfb, 0xd1, 0xdf, 0x3f, 0x96, 0x44, 0xab, 0x21, 0xc8, 0x05, 0x0e, 0x1f, 0x00, 0x38, 0xa5, 0x2f, 0x7c, 0xa9, 0x5a, 0xc0, 0xc3, 0xde, 0x75, 0x58, 0xcb, 0x7a}},
{{0x81, 0x19, 0xb3, 0xa0, 0x59, 0xff, 0x2c, 0xac, 0x48, 0x3e, 0x69, 0xbc, 0xd4, 0x1d, 0x6d, 0x27, 0x14, 0x94, 0x47, 0x91, 0x42, 0x88, 0xbb, 0xea, 0xee, 0x34, 0x13, 0xe6, 0xdc, 0xc6, 0xd1, 0xeb}},
{{0x10, 0xfc, 0x58, 0xf3, 0x5f, 0xc7, 0xfe, 0x7a, 0xe8, 0x75, 0x52, 0x4b, 0xb5, 0x85, 0x00, 0x03, 0x00, 0x5b, 0x7f, 0x97, 0x8c, 0x0c, 0x65, 0xe2, 0xa9, 0x65, 0x46, 0x4b, 0x6d, 0x00, 0x81, 0x9c}},
{{0x5a, 0xcd, 0x94, 0xeb, 0x3c, 0x57, 0x83, 0x79, 0xc1, 0xea, 0x58, 0xa3, 0x43, 0xec, 0x4f, 0xcf, 0xf9, 0x62, 0x77, 0x6f, 0xe3, 0x55, 0x21, 0xe4, 0x75, 0xa0, 0xe0, 0x6d, 0x88, 0x7b, 0x2d, 0xb9}},
{{0x33, 0xda, 0xf3, 0xa2, 0x14, 0xd6, 0xe0, 0xd4, 0x2d, 0x23, 0x00, 0xa7, 0xb4, 0x4b, 0x39, 0x29, 0x0d, 0xb8, 0x98, 0x9b, 0x42, 0x79, 0x74, 0xcd, 0x86, 0x5d, 0xb0, 0x11, 0x05, 0x5a, 0x29, 0x01}},
{{0xcf, 0xc6, 0x57, 0x2f, 0x29, 0xaf, 0xd1, 0x64, 0xa4, 0x94, 0xe6, 0x4e, 0x6f, 0x1a, 0xeb, 0x82, 0x0c, 0x3e, 0x7d, 0xa3, 0x55, 0x14, 0x4e, 0x51, 0x24, 0xa3, 0x91, 0xd0, 0x6e, 0x9f, 0x95, 0xea}},
{{0xd5, 0x31, 0x2a, 0x4b, 0x0e, 0xf6, 0x15, 0xa3, 0x31, 0xf6, 0x35, 0x2c, 0x2e, 0xd2, 0x1d, 0xac, 0x9e, 0x7c, 0x36, 0x39, 0x8b, 0x93, 0x9a, 0xec, 0x90, 0x1c, 0x25, 0x7f, 0x6c, 0xbc, 0x9e, 0x8e}},
{{0x55, 0x1d, 0x67, 0xfe, 0xfc, 0x7b, 0x5b, 0x9f, 0x9f, 0xdb, 0xf6, 0xaf, 0x57, 0xc9, 0x6c, 0x8a, 0x74, 0xd7, 0xe4, 0x5a, 0x00, 0x20, 0x78, 0xa7, 0xb5, 0xba, 0x45, 0xc6, 0xfd, 0xe9, 0x3e, 0x33}},
{{0xd5, 0x0a, 0xc7, 0xbd, 0x5c, 0xa5, 0x93, 0xc6, 0x56, 0x92, 0x8f, 0x38, 0x42, 0x80, 0x17, 0xfc, 0x7b, 0xa5, 0x02, 0x85, 0x4c, 0x43, 0xd8, 0x41, 0x49, 0x50, 0xe9, 0x6e, 0xcb, 0x40, 0x5d, 0xc3}},
{{0x07, 0x73, 0xe1, 0x8e, 0xa1, 0xbe, 0x44, 0xfe, 0x1a, 0x97, 0xe2, 0x39, 0x57, 0x3c, 0xfa, 0xe3, 0xe4, 0xe9, 0x5e, 0xf9, 0xaa, 0x9f, 0xaa, 0xbe, 0xac, 0x12, 0x74, 0xd3, 0xad, 0x26, 0x16, 0x04}},
{{0xe9, 0xaf, 0x0e, 0x7c, 0xa8, 0x93, 0x30, 0xd2, 0xb8, 0x61, 0x5d, 0x1b, 0x41, 0x37, 0xca, 0x61, 0x7e, 0x21, 0x29, 0x7f, 0x2f, 0x0d, 0xed, 0x8e, 0x31, 0xb7, 0xd2, 0xea, 0xd8, 0x71, 0x46, 0x60}},
{{0x7b, 0x12, 0x45, 0x83, 0x09, 0x7f, 0x10, 0x29, 0xa0, 0xc7, 0x41, 0x91, 0xfe, 0x73, 0x78, 0xc9, 0x10, 0x5a, 0xcc, 0x70, 0x66, 0x95, 0xed, 0x14, 0x93, 0xbb, 0x76, 0x03, 0x42, 0x26, 0xa5, 0x7b}},
{{0xec, 0x40, 0x05, 0x7b, 0x99, 0x54, 0x76, 0x65, 0x0b, 0x3d, 0xb9, 0x8e, 0x9d, 0xb7, 0x57, 0x38, 0xa8, 0xcd, 0x2f, 0x94, 0xd8, 0x63, 0xb9, 0x06, 0x15, 0x0c, 0x56, 0xaa, 0xc1, 0x9c, 0xaa, 0x6b}},
{{0x01, 0xd9, 0xff, 0x72, 0x9e, 0xfd, 0x39, 0xd8, 0x37, 0x84, 0xc0, 0xfe, 0x59, 0xc4, 0xae, 0x81, 0xa6, 0x70, 0x34, 0xcb, 0x53, 0xc9, 0x43, 0xfb, 0x81, 0x8b, 0x9d, 0x8a, 0xe7, 0xfc, 0x33, 0xe5}},
{{0x00, 0xdf, 0xb3, 0xc6, 0x96, 0x32, 0x8c, 0x76, 0x42, 0x45, 0x19, 0xa7, 0xbe, 0xfe, 0x8e, 0x0f, 0x6c, 0x76, 0xf9, 0x47, 0xb5, 0x27, 0x67, 0x91, 0x6d, 0x24, 0x82, 0x3f, 0x73, 0x5b, 0xaf, 0x2e}},
{{0x46, 0x1b, 0x79, 0x9b, 0x4d, 0x9c, 0xee, 0xa8, 0xd5, 0x80, 0xdc, 0xb7, 0x6d, 0x11, 0x15, 0x0d, 0x53, 0x5e, 0x16, 0x39, 0xd1, 0x60, 0x03, 0xc3, 0xfb, 0x7e, 0x9d, 0x1f, 0xd1, 0x30, 0x83, 0xa8}},
{{0xee, 0x03, 0x03, 0x94, 0x79, 0xe5, 0x22, 0x8f, 0xdc, 0x55, 0x1c, 0xbd, 0xe7, 0x07, 0x9d, 0x34, 0x12, 0xea, 0x18, 0x6a, 0x51, 0x7c, 0xcc, 0x63, 0xe4, 0x6e, 0x9f, 0xcc, 0xe4, 0xfe, 0x3a, 0x6c}},
{{0xa8, 0xcf, 0xb5, 0x43, 0x52, 0x4e, 0x7f, 0x02, 0xb9, 0xf0, 0x45, 0xac, 0xd5, 0x43, 0xc2, 0x1c, 0x37, 0x3b, 0x4c, 0x9b, 0x98, 0xac, 0x20, 0xce, 0xc4, 0x17, 0xa6, 0xdd, 0xb5, 0x74, 0x4e, 0x94}},
{{0x93, 0x2b, 0x79, 0x4b, 0xf8, 0x9c, 0x6e, 0xda, 0xf5, 0xd0, 0x65, 0x0c, 0x7c, 0x4b, 0xad, 0x92, 0x42, 0xb2, 0x56, 0x26, 0xe3, 0x7e, 0xad, 0x5a, 0xa7, 0x5e, 0xc8, 0xc6, 0x4e, 0x09, 0xdd, 0x4f}},
{{0x16, 0xb1, 0x0c, 0x77, 0x9c, 0xe5, 0xcf, 0xef, 0x59, 0xc7, 0x71, 0x0d, 0x2e, 0x68, 0x44, 0x1e, 0xa6, 0xfa, 0xcb, 0x68, 0xe9, 0xb5, 0xf7, 0xd5, 0x33, 0xae, 0x0b, 0xb7, 0x8e, 0x28, 0xbf, 0x57}},
{{0x0f, 0x77, 0xc7, 0x67, 0x43, 0xe7, 0x39, 0x6f, 0x99, 0x10, 0x13, 0x9f, 0x49, 0x37, 0xd8, 0x37, 0xae, 0x54, 0xe2, 0x10, 0x38, 0xac, 0x5c, 0x0b, 0x3f, 0xd6, 0xef, 0x17, 0x1a, 0x28, 0xa7, 0xe4}},
{{0xd7, 0xe5, 0x74, 0xb7, 0xb9, 0x52, 0xf2, 0x93, 0xe8, 0x0d, 0xde, 0x90, 0x5e, 0xb5, 0x09, 0x37, 0x3f, 0x3f, 0x6c, 0xd1, 0x09, 0xa0, 0x22, 0x08, 0xb3, 0xc1, 0xe9, 0x24, 0x08, 0x0a, 0x20, 0xca}},
{{0x45, 0x66, 0x6f, 0x8c, 0x38, 0x1e, 0x3d, 0xa6, 0x75, 0x56, 0x3f, 0xf8, 0xba, 0x23, 0xf8, 0x3b, 0xfa, 0xc3, 0x0c, 0x34, 0xab, 0xdd, 0xe6, 0xe5, 0xc0, 0x97, 0x5e, 0xf9, 0xfd, 0x70, 0x0c, 0xb9}},
{{0xb2, 0x46, 0x12, 0xe4, 0x54, 0x60, 0x7e, 0xb1, 0xab, 0xa4, 0x47, 0xf8, 0x16, 0xd1, 0xa4, 0x55, 0x1e, 0xf9, 0x5f, 0xa7, 0x24, 0x7f, 0xb7, 0xc1, 0xf5, 0x03, 0x02, 0x0a, 0x71, 0x77, 0xf0, 0xdd}},
{{0x7e, 0x20, 0x88, 0x61, 0x85, 0x6d, 0xa4, 0x2c, 0x8b, 0xb4, 0x6a, 0x75, 0x67, 0xf8, 0x12, 0x13, 0x62, 0xd9, 0xfb, 0x24, 0x96, 0xf1, 0x31, 0xa4, 0xaa, 0x90, 0x17, 0xcf, 0x36, 0x6c, 0xdf, 0xce}},
{{0x5b, 0x64, 0x6b, 0xff, 0x6a, 0xd1, 0x10, 0x01, 0x65, 0x03, 0x7a, 0x05, 0x56, 0x01, 0xea, 0x02, 0x35, 0x8c, 0x0f, 0x41, 0x05, 0x0f, 0x9d, 0xfe, 0x3c, 0x95, 0xdc, 0xcb, 0xd3, 0x08, 0x7b, 0xe0}},
{{0x74, 0x6d, 0x1d, 0xcc, 0xfe, 0xd2, 0xf0, 0xff, 0x1e, 0x13, 0xc5, 0x1e, 0x2d, 0x50, 0xd5, 0x32, 0x43, 0x75, 0xfb, 0xd5, 0xbf, 0x7c, 0xa8, 0x2a, 0x89, 0x31, 0x82, 0x8d, 0x80, 0x1d, 0x43, 0xab}},
{{0xcb, 0x98, 0x11, 0x0d, 0x4a, 0x6b, 0xb9, 0x7d, 0x22, 0xfe, 0xad, 0xbc, 0x6c, 0x0d, 0x89, 0x30, 0xc5, 0xf8, 0xfc, 0x50, 0x8b, 0x2f, 0xc5, 0xb3, 0x53, 0x28, 0xd2, 0x6b, 0x88, 0xdb, 0x19, 0xae}},
{{0x60, 0xb6, 0x26, 0xa0, 0x33, 0xb5, 0x5f, 0x27, 0xd7, 0x67, 0x6c, 0x40, 0x95, 0xea, 0xba, 0xbc, 0x7a, 0x2c, 0x7e, 0xde, 0x26, 0x24, 0xb4, 0x72, 0xe9, 0x7f, 0x64, 0xf9, 0x6b, 0x8c, 0xfc, 0x0e}},
{{0xe5, 0xb5, 0x2b, 0xc9, 0x27, 0x46, 0x8d, 0xf7, 0x18, 0x93, 0xeb, 0x81, 0x97, 0xef, 0x82, 0x0c, 0xf7, 0x6c, 0xb0, 0xaa, 0xf6, 0xe8, 0xe4, 0xfe, 0x93, 0xad, 0x62, 0xd8, 0x03, 0x98, 0x31, 0x04}},
{{0x05, 0x65, 0x41, 0xae, 0x5d, 0xa9, 0x96, 0x1b, 0xe2, 0xb0, 0xa5, 0xe8, 0x95, 0xe5, 0xc5, 0xba, 0x15, 0x3c, 0xbb, 0x62, 0xdd, 0x56, 0x1a, 0x42, 0x7b, 0xad, 0x0f, 0xfd, 0x41, 0x92, 0x31, 0x99}},
{{0xf8, 0xfe, 0xf0, 0x5a, 0x3f, 0xa5, 0xc9, 0xf3, 0xeb, 0xa4, 0x16, 0x38, 0xb2, 0x47, 0xb7, 0x11, 0xa9, 0x9f, 0x96, 0x0f, 0xe7, 0x3a, 0xa2, 0xf9, 0x01, 0x36, 0xae, 0xb2, 0x03, 0x29, 0xb8, 0x88}}};
//Debug printing for the above types
//Actually use DP(value) and #define DBG
void dp(key a);
void dp(bool a);
void dp(const char * a, int l);
void dp(keyV a);
void dp(keyM a);
void dp(xmr_amount vali);
void dp(int vali);
void dp(bits amountb);
void dp(const char * st);
//various conversions
//uint long long to 32 byte key
void d2h(key & amounth, xmr_amount val);
key d2h(xmr_amount val);
//uint long long to int[64]
void d2b(bits amountb, xmr_amount val);
//32 byte key to uint long long
// if the key holds a value > 2^64
// then the value in the first 8 bytes is returned
xmr_amount h2d(const key &test);
//32 byte key to int[64]
void h2b(bits amountb2, const key & test);
//int[64] to 32 byte key
void b2h(key & amountdh, bits amountb2);
//int[64] to uint long long
xmr_amount b2d(bits amountb);
bool is_rct_simple(int type);
bool is_rct_bulletproof(int type);
bool is_rct_borromean(int type);
static inline const rct::key &pk2rct(const crypto::public_key &pk) { return (const rct::key&)pk; }
static inline const rct::key &sk2rct(const crypto::secret_key &sk) { return (const rct::key&)sk; }
static inline const rct::key &ki2rct(const crypto::key_image &ki) { return (const rct::key&)ki; }
static inline const rct::key &hash2rct(const crypto::hash &h) { return (const rct::key&)h; }
static inline const crypto::public_key &rct2pk(const rct::key &k) { return (const crypto::public_key&)k; }
static inline const crypto::secret_key &rct2sk(const rct::key &k) { return (const crypto::secret_key&)k; }
static inline const crypto::key_image &rct2ki(const rct::key &k) { return (const crypto::key_image&)k; }
static inline const crypto::hash &rct2hash(const rct::key &k) { return (const crypto::hash&)k; }
static inline bool operator==(const rct::key &k0, const crypto::public_key &k1) { return !crypto_verify_32(k0.bytes, (const unsigned char*)&k1); }
static inline bool operator!=(const rct::key &k0, const crypto::public_key &k1) { return crypto_verify_32(k0.bytes, (const unsigned char*)&k1); }
}
namespace cryptonote {
static inline bool operator==(const crypto::public_key &k0, const rct::key &k1) { return !crypto_verify_32((const unsigned char*)&k0, k1.bytes); }
static inline bool operator!=(const crypto::public_key &k0, const rct::key &k1) { return crypto_verify_32((const unsigned char*)&k0, k1.bytes); }
static inline bool operator==(const crypto::secret_key &k0, const rct::key &k1) { return !crypto_verify_32((const unsigned char*)&k0, k1.bytes); }
static inline bool operator!=(const crypto::secret_key &k0, const rct::key &k1) { return crypto_verify_32((const unsigned char*)&k0, k1.bytes); }
}
namespace rct {
inline std::ostream &operator <<(std::ostream &o, const rct::key &v) {
epee::to_hex::formatted(o, epee::as_byte_span(v)); return o;
}
}
namespace std
{
template<> struct hash<rct::key> { std::size_t operator()(const rct::key &k) const { return reinterpret_cast<const std::size_t&>(k); } };
}
BLOB_SERIALIZER(rct::key);
BLOB_SERIALIZER(rct::key64);
BLOB_SERIALIZER(rct::ctkey);
BLOB_SERIALIZER(rct::multisig_kLRki);
BLOB_SERIALIZER(rct::boroSig);
VARIANT_TAG(debug_archive, rct::key, "rct::key");
VARIANT_TAG(debug_archive, rct::key64, "rct::key64");
VARIANT_TAG(debug_archive, rct::keyV, "rct::keyV");
VARIANT_TAG(debug_archive, rct::keyM, "rct::keyM");
VARIANT_TAG(debug_archive, rct::ctkey, "rct::ctkey");
VARIANT_TAG(debug_archive, rct::ctkeyV, "rct::ctkeyV");
VARIANT_TAG(debug_archive, rct::ctkeyM, "rct::ctkeyM");
VARIANT_TAG(debug_archive, rct::ecdhTuple, "rct::ecdhTuple");
VARIANT_TAG(debug_archive, rct::mgSig, "rct::mgSig");
VARIANT_TAG(debug_archive, rct::rangeSig, "rct::rangeSig");
VARIANT_TAG(debug_archive, rct::boroSig, "rct::boroSig");
VARIANT_TAG(debug_archive, rct::rctSig, "rct::rctSig");
VARIANT_TAG(debug_archive, rct::Bulletproof, "rct::bulletproof");
VARIANT_TAG(debug_archive, rct::multisig_kLRki, "rct::multisig_kLRki");
VARIANT_TAG(debug_archive, rct::multisig_out, "rct::multisig_out");
VARIANT_TAG(debug_archive, rct::clsag, "rct::clsag");
VARIANT_TAG(binary_archive, rct::key, 0x90);
VARIANT_TAG(binary_archive, rct::key64, 0x91);
VARIANT_TAG(binary_archive, rct::keyV, 0x92);
VARIANT_TAG(binary_archive, rct::keyM, 0x93);
VARIANT_TAG(binary_archive, rct::ctkey, 0x94);
VARIANT_TAG(binary_archive, rct::ctkeyV, 0x95);
VARIANT_TAG(binary_archive, rct::ctkeyM, 0x96);
VARIANT_TAG(binary_archive, rct::ecdhTuple, 0x97);
VARIANT_TAG(binary_archive, rct::mgSig, 0x98);
VARIANT_TAG(binary_archive, rct::rangeSig, 0x99);
VARIANT_TAG(binary_archive, rct::boroSig, 0x9a);
VARIANT_TAG(binary_archive, rct::rctSig, 0x9b);
VARIANT_TAG(binary_archive, rct::Bulletproof, 0x9c);
VARIANT_TAG(binary_archive, rct::multisig_kLRki, 0x9d);
VARIANT_TAG(binary_archive, rct::multisig_out, 0x9e);
VARIANT_TAG(binary_archive, rct::clsag, 0x9f);
VARIANT_TAG(json_archive, rct::key, "rct_key");
VARIANT_TAG(json_archive, rct::key64, "rct_key64");
VARIANT_TAG(json_archive, rct::keyV, "rct_keyV");
VARIANT_TAG(json_archive, rct::keyM, "rct_keyM");
VARIANT_TAG(json_archive, rct::ctkey, "rct_ctkey");
VARIANT_TAG(json_archive, rct::ctkeyV, "rct_ctkeyV");
VARIANT_TAG(json_archive, rct::ctkeyM, "rct_ctkeyM");
VARIANT_TAG(json_archive, rct::ecdhTuple, "rct_ecdhTuple");
VARIANT_TAG(json_archive, rct::mgSig, "rct_mgSig");
VARIANT_TAG(json_archive, rct::rangeSig, "rct_rangeSig");
VARIANT_TAG(json_archive, rct::boroSig, "rct_boroSig");
VARIANT_TAG(json_archive, rct::rctSig, "rct_rctSig");
VARIANT_TAG(json_archive, rct::Bulletproof, "rct_bulletproof");
VARIANT_TAG(json_archive, rct::multisig_kLRki, "rct_multisig_kLR");
VARIANT_TAG(json_archive, rct::multisig_out, "rct_multisig_out");
VARIANT_TAG(json_archive, rct::clsag, "rct_clsag");
#endif /* RCTTYPES_H */
|
#-*- coding: utf-8 -*-
"""
Copyright (c) 2012 University of Oxford
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, --INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
"""Routes configuration
The more specific and detailed routes should be defined first so they
may take precedent over the more generic routes. For more information
refer to the routes manual at http://routes.groovie.org/docs/
"""
from pylons import config
from routes import Mapper
def make_map():
"""Create, configure and return the routes Mapper"""
map = Mapper(directory=config['pylons.paths']['controllers'],
always_scan=config['debug'])
map.minimization = False
# The ErrorController route (handles 404/500 error pages); it should
# likely stay at the top, ensuring it can always be resolved
map.connect('/error/{action}', controller='error')
map.connect('/error/{action}/{id}', controller='error')
# CUSTOM ROUTES HERE
map.redirect('/*(url)/', '/{url}',
_redirect_code='301 Moved Permanently')
map.connect("/login", controller='account', action='login')
map.connect("/logout", controller='account', action='logout')
map.connect("/welcome", controller='account', action='welcome')
map.connect('/', controller='home', action='index')
map.connect('/api', controller='api', action='index')
map.connect('/api/{api_name}', controller='api', action='apiview')
map.connect('/keywords', controller='keywords', action='index')
map.connect('/about', controller='about', action='index')
map.connect('/searching', controller='searching', action='index')
map.connect('/admin', controller='admin', action='index')
map.connect('/users', controller='users', action='index')
map.connect('/users/{username}', controller='users', action='userview')
map.connect('/{silo}/users', controller='users', action='siloview')
map.connect('/{silo}/users/{username}', controller='users', action='silouserview')
map.connect('/{silo}/admin', controller='admin', action='siloview')
map.connect('/silos', controller='silos', action='index')
#map.connect('/{silo}', controller='silos', action='siloview')
map.connect('/{silo}', controller='datasets', action='siloview')
map.connect('/{silo}/datasets', controller='datasets', action='siloview')
map.connect('/{silo}/datasets/{id}', controller='datasets', action='datasetview')
map.connect('/{silo}/datasets/{id}/{path:.*}', controller='datasets', action='itemview')
map.connect('/{silo}/items', controller='items', action='siloview')
map.connect('/{silo}/items/{id}', controller='items', action='datasetview')
map.connect('/{silo}/items/{id}/{path:.*?\.zip}', controller='items', action='itemview')
map.connect('/{silo}/items/{id}/{path:.*?\.zip}/{subpath:.*}', controller='items', action='subitemview')
#map.connect('/{silo}/items/{id}/{path:.*}', controller='items', action='itemview') # Use verb dataset instead
map.connect('/{silo}/states', controller='states', action='siloview')
map.connect('/{silo}/states/{id}', controller='states', action='datasetview')
map.connect('/{silo}/doi/{id}', controller='doi', action='datasetview')
# SWORDv2 Configuration
map.connect('/swordv2/service-document', controller="sword", action="service_document") # From which to retrieve the service document
map.connect('/swordv2/silo/{path:.*?}', controller="sword", action="collection") # Representing a Collection as listed in the service document
map.connect('/swordv2/edit-media/{path:.*?}', controller="sword", action="media_resource") # The URI used in atom:link@rel=edit-media
map.connect('/swordv2/edit/{path:.*?}', controller="sword", action="container") # The URI used in atom:link@rel=edit
map.connect('/swordv2/statement/{path:.*?}', controller="sword", action="statement") # The URI used in atom:link@rel=sword:statement
map.connect('/{controller}')
map.connect('/{controller}/{action}')
map.connect('/{controller}/{action}/{id}')
return map
|
var path = '/tmp/test-level-master'
var path2 = '/tmp/test-level-slave'
require('rimraf').sync(path)
require('rimraf').sync(path2)
var levelup = require('levelup')
var SubLevel = require('level-sublevel')
var db = SubLevel(levelup(path))
var _db = SubLevel(levelup(path2))
var Master = require('../')
var pull = require('pull-stream')
var help = require('./helper')
var test = require('tape')
var master = Master(db, 'master')
slave = Master.Slave(_db)
test('setup', function (t) {
help.populate(db, 100, function (err) {
t.notOk(err)
t.end()
})
})
//master-slave replication must pull from master.
//connect to master, copy current since value.
test('createStream', function (t) {
slave.since(function (err, since) {
t.equal(since, 0)
console.log('PULL STREAM SINCE:', since)
master.createPullStream({since: 0})
.pipe(pull.through(function (data) {
t.ok(data.ts > since, data.ts + ' > ' + since)
}))
.pipe(slave.createPullStream(function (err) {
//*********************************
//SOME TIMES THIS DOESN"T HAPPEN????
//*********************************
t.end()
}))
})
})
test('updates', function (t) {
help.populate(db, 100, function (err) {
t.notOk(err)
t.end()
})
})
test('createStream2', function (t) {
slave.since(function (err, since) {
t.notEqual(since, 0)
console.log('PULL STREAM SINCE:', since)
master.createPullStream({since: since})
.pipe(pull.through(function (data) {
t.ok(data.ts > since, data.ts + ' > ' + since)
}))
.pipe(slave.createPullStream(function (err) {
if(err) throw err
help.hash(db, function (err, sum) {
help.hash(_db, function (err, _sum) {
console.log(sum, _sum)
t.equal(sum, _sum)
t.end()
})
})
}))
})
})
|
import styled from 'styled-components';
const Wrapper = styled.li`
width: 100%;
height: max-content;
min-height: 3rem;
display: flex;
align-items: center;
position: relative;
border-top: 1px solid #eee;
&:first-child {
border-top: none;
}
`;
export default Wrapper;
|
/*
* blootuoth.h
*
* Created on: 3 авг. 2020 г.
* Author: User
*/
#ifndef INC_BLOOTUOTH_H_
#define INC_BLOOTUOTH_H_
#include "ringbuffer_dma.h"
#define BUF_SIZE_BT 1024
#define CMD_SIZE 128
#define QNH 1014
RingBuffer_DMA rx_bt;
char cmd_bt[CMD_SIZE];
volatile uint32_t cmd_i;
void blootuoth_int(void);
void parcin_bt_command(void);
void comand_handling(char *input);
#endif /* INC_BLOOTUOTH_H_ */
|
var osmose = require('../');
var test = require('tape');
test('valid request', function (t) {
t.plan(2);
var callback = function (err, url) {
t.notOk(err instanceof Error);
t.ok(url.indexOf("data:") > -1);
};
osmose.generateConfig(validConfig, callback);
});
test('no longer valid request', function (t) {
t.plan(1);
var callback = function (err, resp, body) {
t.ok(err instanceof Error);
};
osmose.generateConfig(groupsOnly, callback);
});
test('invalid request', function (t) {
t.plan(1);
var callback = function (err, resp, body) {
t.ok(err instanceof Error);
};
osmose.generateConfig({ donald: "duck" }, callback);
});
test('invalid request: no background groups', function (t) {
t.plan(1);
var callback = function (err, resp, body) {
t.ok(err instanceof Error);
};
osmose.generateConfig({ timeStepsPerYear: 123, groups: [focalGroup]}, callback);
});
test('invalid request: no focal groups', function (t) {
t.plan(1);
var callback = function (err, resp, body) {
t.ok(err instanceof Error);
};
osmose.generateConfig({ timeStepsPerYear: 123, groups: [backgroundGroup]}, callback);
});
var backgroundGroup = {
name: "Echinoderms and large gastropods",
type: "background",
taxa: [
{
name: "Echinus esculentus",
url: "http://sealifebase.org/summary/49996"
}
]
};
var focalGroup = {
name: "Amberjacks",
type: "focal",
taxa: [
{
name: "Seriola dumerili",
url: "http://fishbase.org/summary/1005"
}
]
};
var groupsOnly = [
focalGroup,
backgroundGroup
];
var validConfig = {
timeStepsPerYear: 123,
groups: groupsOnly
};
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { t } from '@superset-ui/core';
import {
filterNulls,
autozoom,
jsColumns,
jsDataMutator,
jsTooltip,
jsOnclickHref,
lineColumn,
viewport,
lineWidth,
lineType,
reverseLongLat,
mapboxStyle,
} from '../../utilities/Shared_DeckGL';
export default {
controlPanelSections: [
{
label: t('Query'),
expanded: true,
controlSetRows: [
[
lineColumn,
{
...lineType,
choices: [
['polyline', 'Polyline'],
['json', 'JSON'],
],
},
],
['row_limit', filterNulls],
['adhoc_filters'],
],
},
{
label: t('Map'),
expanded: true,
controlSetRows: [
[mapboxStyle, viewport],
['color_picker', lineWidth],
[reverseLongLat, autozoom],
],
},
{
label: t('Advanced'),
controlSetRows: [[jsColumns], [jsDataMutator], [jsTooltip], [jsOnclickHref]],
},
],
};
|
import React from 'react';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(
<React.Fragment><path d="M20 2H8c-1.1 0-2 .9-2 2v12c0 1.1.9 2 2 2h12c1.1 0 2-.9 2-2V4c0-1.1-.9-2-2-2zm-2 5h-3v5.5a2.5 2.5 0 0 1-5 0 2.5 2.5 0 0 1 2.5-2.5c.57 0 1.08.19 1.5.51V5h4v2zM4 6H2v14c0 1.1.9 2 2 2h14v-2H4V6z" /></React.Fragment>
, 'LibraryMusic');
|
#ifndef PHPSCI_EXT_CARRAY_H
#define PHPSCI_EXT_CARRAY_H
#include "php.h"
#include "common/exceptions.h"
#define CArray_PRIORITY 0.0
typedef struct CArray CArray;
static const int CARRAY_ARRAY_WARN_ON_WRITE = (1 << 31);
#define TYPE_INT_STRING "int"
#define TYPE_INT32_STRING "int32"
#define TYPE_INT64_STRING "int64"
#define TYPE_LONG_STRING "long"
#define TYPE_FLOAT_STRING "float"
#define TYPE_FLOAT32_STRING "float32"
#define TYPE_FLOAT64_STRING "float64"
#define TYPE_DOUBLE_STRING "double"
#define CARRAY_NTYPES 7
#define CARRAY_MAXDIMS 100
#define TYPE_INTEGER 'i'
#define TYPE_DOUBLE 'd'
#define TYPE_FLOAT 'f'
#define TYPE_BOOL 'b'
#define TYPE_STRING 's'
#define TYPE_VOID 'v'
#define TYPE_LONG 'l'
#define TYPE_INTEGER_INT 0
#define TYPE_DOUBLE_INT 3
#define TYPE_FLOAT_INT 2
#define TYPE_BOOL_INT 5
#define TYPE_STRING_INT 4
#define TYPE_VOID_INT 6
#define TYPE_LONG_INT 1
#define TYPE_NOTYPE_INT -1
#define TYPE_DEFAULT_INT 0
#define TYPE_DEFAULT 'd'
/* Macros to use for freeing and cloning auxiliary data */
#define CARRAY_AUXDATA_FREE(auxdata) \
do { \
if ((auxdata) != NULL) { \
(auxdata)->free(auxdata); \
} \
} while(0)
typedef enum {
CARRAY_CLIP=0,
CARRAY_WRAP=1,
CARRAY_RAISE=2
} CARRAY_CLIPMODE;
/* For specifying array memory layout or iteration order */
typedef enum {
/* Fortran order if inputs are all Fortran, C otherwise */
CARRAY_ANYORDER=-1,
/* C order */
CARRAY_CORDER=0,
/* Fortran order */
CARRAY_FORTRANORDER=1,
/* An order as close to the inputs as possible */
CARRAY_KEEPORDER=2
} CARRAY_ORDER;
typedef enum {
CARRAY_QUICKSORT=0,
CARRAY_HEAPSORT=1,
CARRAY_MERGESORT=2
} CARRAY_SORTKIND;
#define CARRAY_NSORTS (CARRAY_MERGESORT + 1)
/*
* Means c-style contiguous (last index varies the fastest). The data
* elements right after each other.
*
* This flag may be requested in constructor functions.
*/
#define CARRAY_ARRAY_C_CONTIGUOUS 0x0001
/*
* Set if array is a contiguous Fortran array: the first index varies
* the fastest in memory (strides array is reverse of C-contiguous
* array)
*
* This flag may be requested in constructor functions.
*/
#define CARRAY_ARRAY_F_CONTIGUOUS 0x0002
/*
* Note: all 0-d arrays are C_CONTIGUOUS and F_CONTIGUOUS. If a
* 1-d array is C_CONTIGUOUS it is also F_CONTIGUOUS. Arrays with
* more then one dimension can be C_CONTIGUOUS and F_CONTIGUOUS
* at the same time if they have either zero or one element.
* If NPY_RELAXED_STRIDES_CHECKING is set, a higher dimensional
* array is always C_CONTIGUOUS and F_CONTIGUOUS if it has zero elements
* and the array is contiguous if carray.squeeze() is contiguous.
* I.e. dimensions for which `carray.shape[dimension] == 1` are
* ignored.
*/
#define CARRAY_ARRAY_OWNDATA 0x0004
/*
* Array data is aligned on the appropriate memory address for the type
* stored according to how the compiler would align things (e.g., an
* array of integers (4 bytes each) starts on a memory address that's
* a multiple of 4)
*
* This flag may be requested in constructor functions.
*/
#define CARRAY_ARRAY_ALIGNED 0x0100
/*
* Array data is writeable
*
* This flag may be requested in constructor functions.
*/
#define CARRAY_ARRAY_WRITEABLE 0x0400
#define CARRAY_ARRAY_WRITEBACKIFCOPY 0x2000
#define CARRAY_ARRAY_BEHAVED (CARRAY_ARRAY_ALIGNED | CARRAY_ARRAY_WRITEABLE)
#define CARRAY_ARRAY_DEFAULT (CARRAY_ARRAY_CARRAY)
#define CARRAY_ARRAY_CARRAY (CARRAY_ARRAY_C_CONTIGUOUS | CARRAY_ARRAY_BEHAVED)
#define CARRAY_ARRAY_CARRAY_RO (CARRAY_ARRAY_C_CONTIGUOUS | CARRAY_ARRAY_ALIGNED)
#define CARRAY_ARRAY_UPDATE_ALL (CARRAY_ARRAY_C_CONTIGUOUS | CARRAY_ARRAY_F_CONTIGUOUS | CARRAY_ARRAY_ALIGNED)
#define CARRAY_ARRAY_UPDATEIFCOPY 0x1000
#define CARRAY_ARRAY_FORCECAST 0x0010
#define CARRAY_ARRAY_ENSURECOPY 0x0020
#define CARRAY_ARRAY_ENSUREARRAY 0x0040
#define CARRAY_ARRAY_FARRAY (CARRAY_ARRAY_F_CONTIGUOUS | CARRAY_ARRAY_BEHAVED)
#define CARRAY_ARRAY_FARRAY_RO (CARRAY_ARRAY_F_CONTIGUOUS | CARRAY_ARRAY_ALIGNED)
#define CARRAY_ARRAY_NOTSWAPPED 0x0200
/*
* Make sure that the strides are in units of the element size Needed
* for some operations with record-arrays.
*
* This flag may be requested in constructor functions.
*/
#define CARRAY_ARRAY_ELEMENTSTRIDES 0x0080
/* The item must be reference counted when it is inserted or extracted. */
#define CARRAY_ITEM_REFCOUNT 0x01
/* Same as needing REFCOUNT */
#define CARRAY_ITEM_HASOBJECT 0x01
/* The item is a POINTER */
#define CARRAY_ITEM_IS_POINTER 0x04
/* memory needs to be initialized for this data-type */
#define CARRAY_NEEDS_INIT 0x08
/* Use f.getitem when extracting elements of this data-type */
#define CARRAY_USE_GETITEM 0x20
/* Use f.setitem when setting creating 0-d array from this data-type.*/
#define CARRAY_USE_SETITEM 0x40
/* A sticky flag specifically for structured arrays */
#define CARRAY_ALIGNED_STRUCT 0x80
#define CArray_ISONESEGMENT(m) (CArray_NDIM(m) == 0 || \
CArray_CHKFLAGS(m, CARRAY_ARRAY_C_CONTIGUOUS) || \
CArray_CHKFLAGS(m, CARRAY_ARRAY_F_CONTIGUOUS))
#define CArrayDataType_FLAGCHK(dtype, flag) (((dtype)->flags & (flag)) == (flag))
#define CArray_ISFORTRAN(m) (CArray_CHKFLAGS(m, CARRAY_ARRAY_F_CONTIGUOUS) && \
(!CArray_CHKFLAGS(m, CARRAY_ARRAY_C_CONTIGUOUS)))
#define CArray_IS_F_CONTIGUOUS(m) CArray_CHKFLAGS(m, CARRAY_ARRAY_F_CONTIGUOUS)
#define CArray_IS_C_CONTIGUOUS(m) CArray_CHKFLAGS(m, CARRAY_ARRAY_C_CONTIGUOUS)
#define CArray_Copy(obj) CArray_NewCopy(obj, CARRAY_CORDER)
#define CArray_GETPTR2(obj, i, j) ((void *)(CArray_BYTES(obj) + \
(i)*CArray_STRIDES(obj)[0] + \
(j)*CArray_STRIDES(obj)[1]))
/**
* Array Functions
*/
typedef int (CArray_FillFunc)(void *, int, struct CArray *);
typedef void * (CArray_GetItemFunc) (void *, struct CArray *);
typedef int (CArray_SetItemFunc)(void *, void *, struct CArray *);
typedef void (CArray_CopySwapNFunc)(void *, int, void *, int,
int, int, struct CArray *);
typedef void (CArray_CopySwapFunc)(void *, void *, int, void *);
typedef void (CArray_VectorUnaryFunc)(void *, void *, int, void *,
void *);
typedef int (CArray_FastTakeFunc)(void *dest, void *src, int *indarray,
int nindarray, int n_outer,
int m_middle, int nelem,
CARRAY_CLIPMODE clipmode);
typedef int (CArray_ArgFunc)(void*, int, int*, void *);
typedef int (CArray_SortFunc)(void *, int, void *);
typedef int (CArray_CompareFunc)(const void *, const void *, void *);
typedef int (CArray_PartitionFunc)(void *, int, int,
int *, int *,
void *);
typedef void (CArray_FastClipFunc)(void *in, int n_in, void *min,
void *max, void *out);
typedef void (CArray_DotFunc)(char *, int, char *, int, char *, int);
typedef struct CArray_ArrFuncs {
CArray_FastClipFunc *fastclip;
/* The next four functions *cannot* be NULL */
CArray_FastTakeFunc *fasttake;
/*
* Functions to get and set items with standard Python types
* -- not array scalars
*/
CArray_GetItemFunc *getitem;
CArray_SetItemFunc *setitem;
/*
* Copy and/or swap data. Memory areas may not overlap
* Use memmove first if they might
*/
CArray_CopySwapNFunc *copyswapn;
CArray_CopySwapFunc *copyswap;
CArray_ArgFunc *argmax;
CArray_ArgFunc *argmin;
/*
* Function to compare items
* Can be NULL
*/
CArray_CompareFunc *compare;
/*
* Sorting functions
* Can be NULL
*/
CArray_SortFunc * sort[CARRAY_NSORTS];
/*
* Array of CArray_CastFuncsItem given cast functions to
* user defined types. The array it terminated with CArray_NOTYPE.
* Can be NULL.
*/
struct CArray_CastFuncsItem* castfuncs;
/*
* Functions to cast to all other standard types
* Can have some NULL entries
*/
CArray_VectorUnaryFunc *cast[CARRAY_NTYPES];
/*
* Used for arange.
* Can be NULL.
*/
CArray_FillFunc *fill;
CArray_DotFunc *dotfunc;
int cancastto[CARRAY_NTYPES];
} CArray_ArrFuncs;
#define CARRAY_FAIL 0
#define CARRAY_SUCCEED 1
/************************************************************
* CArray Auxiliary Data for inner loops, sort functions, etc.
************************************************************/
typedef struct CArrayAuxData_tag CArrayAuxData;
/* Function pointers for freeing or cloning auxiliary data */
typedef void (CArrayAuxData_FreeFunc) (CArrayAuxData *);
typedef CArrayAuxData *(CArrayAuxData_CloneFunc) (CArrayAuxData *);
struct CArrayAuxData_tag {
CArrayAuxData_FreeFunc *free;
CArrayAuxData_CloneFunc *clone;
/* To allow for a bit of expansion without breaking the ABI */
void *reserved[2];
};
/**
* Casting
**/
/* For specifying allowed casting in operations which support it */
typedef enum {
/* Only allow identical types */
CARRAY_NO_CASTING=0,
/* Allow identical and byte swapped types */
CARRAY_EQUIV_CASTING=1,
/* Only allow safe casts */
CARRAY_SAFE_CASTING=2,
/* Allow safe casts or casts within the same kind */
CARRAY_SAME_KIND_CASTING=3,
/* Allow any casts */
CARRAY_UNSAFE_CASTING=4
} CARRAY_CASTING;
/**
* CArray Descriptor
*/
typedef struct CArrayDescriptor {
char type; // b = boolean, d = double, i = signer integer, u = unsigned integer, f = floating point, c = char
int flags; // Data related flags
int type_num; // 0 = boolean, 1 = double, 2 = signed integer, 3 = unsigned integer, 4 = floating point, 5 = char
int elsize; // Datatype size
int numElements; // Number of elements
char byteorder;
int alignment; // Alignment Information
int refcount;
CArray_ArrFuncs *f;
} CArrayDescriptor;
/**
* Stride Sorting
*/
typedef struct {
int perm, stride;
} ca_stride_sort_item;
/**
* CArray
*/
struct CArray {
int uuid; // Buffer UUID
int * strides; // Strides vector
int * dimensions; // Dimensions size vector (Shape)
int ndim; // Number of Dimensions
char * data; // Data Buffer
CArray * base; // Used when sharing memory from other CArray (slices, etc)
int flags; // Describes CArray memory approach (Memory related flags)
CArrayDescriptor * descriptor; // CArray data descriptor
int refcount;
};
typedef void (*strided_copy_func_t)(char *, int, char *, int, int, int, CArrayDescriptor*);
/**
* CArray Dims
**/
typedef struct CArray_Dims {
int * ptr;
int len;
} CArray_Dims;
/**
* Memory Pointer
*/
typedef struct MemoryPointer {
int uuid;
int free;
} MemoryPointer;
/**
* Flags Object
*/
typedef struct CArrayFlags
{
CArray * array;
int flags;
} CArrayFlags;
#define CARRAY_LIKELY(x) (x)
#define CARRAY_UNLIKELY(x) (x)
/**
* CArray Data Macros
**/
#define CHDATA(p) ((char *) CArray_DATA((CArray *)p))
#define SHDATA(p) ((short int *) CArray_DATA((CArray *)p))
#define DDATA(p) ((double *) CArray_DATA((CArray *)p))
#define FDATA(p) ((float *) CArray_DATA((CArray *)p))
#define CDATA(p) ((f2c_complex *) CArray_DATA((CArray *)p))
#define ZDATA(p) ((f2c_doublecomplex *) CArray_DATA((CArray *)p))
#define IDATA(p) ((int *) CArray_DATA((CArray *)p))
/**
* CArrays Func Macros
**/
#define CArray_BYTES(a) (a->data)
#define CArray_DATA(a) ((void *)((a)->data))
#define CArray_ITEMSIZE(a) ((int)((a)->descriptor->elsize))
#define CArray_DIMS(a) ((int *)((a)->dimensions))
#define CArray_STRIDES(a) ((int *)((a)->strides))
#define CArray_DESCR(a) ((a)->descriptor)
#define CArray_SIZE(m) CArray_MultiplyList(CArray_DIMS(m), CArray_NDIM(m))
#define CArray_NBYTES(m) (CArray_ITEMSIZE(m) * CArray_SIZE(m))
#define CArray_DESCR_REPLACE(descr) \
do { \
CArrayDescriptor *_new_; \
_new_ = CArray_DescrNew(descr); \
CArrayDescriptor_DECREF(descr); \
descr = _new_; \
} while(0)
#define CArray_ISCARRAY(m) CArray_FLAGSWAP(m, CARRAY_ARRAY_CARRAY)
#define CArray_ISCARRAY_RO(m) CArray_FLAGSWAP(m, CARRAY_ARRAY_CARRAY_RO)
#define CArray_ISFARRAY(m) CArray_FLAGSWAP(m, CARRAY_ARRAY_FARRAY)
#define CArray_ISFARRAY_RO(m) CArray_FLAGSWAP(m, CARRAY_ARRAY_FARRAY_RO)
#define CArray_ISNOTSWAPPED(m) CArray_ISNBO(CArray_DESCR(m)->byteorder)
#define CArray_FLAGSWAP(m, flags) (CArray_CHKFLAGS(m, flags) && CArray_ISNOTSWAPPED(m))
#define CArray_ISBYTESWAPPED(m) (!CArray_ISNOTSWAPPED(m))
#define CArrayDataType_ISUNSIZED(dtype) ((dtype)->elsize == 0)
#define CArrayTypeNum_ISUNSIGNED(type) (0)
#define CARRAY_BYTE_ORDER __BYTE_ORDER
#define CARRAY_LITTLE_ENDIAN __LITTLE_ENDIAN
#define CARRAY_BIG_ENDIAN __BIG_ENDIAN
#define CARRAY_LITTLE '<'
#define CARRAY_BIG '>'
#define CARRAY_NATIVE '='
#define CARRAY_SWAP 's'
#define CARRAY_IGNORE '|'
#if CARRAY_BYTE_ORDER == CARRAY_BIG_ENDIAN
#define CARRAY_NATBYTE CARRAY_BIG
#define CARRAY_OPPBYTE CARRAY_LITTLE
#else
#define CARRAY_NATBYTE CARRAY_LITTLE
#define CARRAY_OPPBYTE CARRAY_BIG
#endif
#define CArray_ISNBO(arg) ((arg) != CARRAY_OPPBYTE)
static inline int
CArray_TYPE(const CArray *arr)
{
return arr->descriptor->type_num;
}
static inline char
CArray_TYPE_CHAR(const CArray *arr)
{
return arr->descriptor->type;
}
static inline int
CArray_FLAGS(const CArray *arr)
{
return arr->flags;
}
static inline CArray *
CArray_BASE(const CArray *arr)
{
return arr->base;
}
static inline int
CArray_STRIDE(const CArray *arr, int index)
{
return ((arr)->strides[index]);
}
static inline int
CArray_DIM(const CArray *arr, int index)
{
return ((arr)->dimensions[index]);
}
static inline int
CArray_CHKFLAGS(const CArray *arr, int flags) {
return (CArray_FLAGS(arr) & flags) == flags;
}
static inline int
CArray_NDIM(const CArray *arr) {
return arr->ndim;
}
static inline int
CArray_CompareLists(int *l1, int *l2, int n)
{
int i;
for (i = 0; i < n; i++) {
if (l1[i] != l2[i]) {
return 0;
}
}
return 1;
}
static inline int
check_and_adjust_axis_msg(int *axis, int ndim)
{
if (axis == NULL) {
return 0;
}
/* Check that index is valid, taking into account negative indices */
if (CARRAY_UNLIKELY((*axis < -ndim) || (*axis >= ndim))) {
throw_axis_exception("Axis is out of bounds for array dimension");
return -1;
}
/* adjust negative indices */
if (*axis < 0) {
*axis += ndim;
}
return 0;
}
static inline int
CArray_SAMESHAPE(const CArray * a, const CArray * b)
{
return CArray_CompareLists(CArray_DIMS(a), CArray_DIMS(b), CArray_NDIM(a));
}
static inline int
check_and_adjust_axis(int *axis, int ndim)
{
return check_and_adjust_axis_msg(axis, ndim);
}
/* Auxiliary routine: printing a matrix */
static inline void
print_matrix( char* desc, int m, int n, double* a, int lda ) {
int i, j;
printf( "\n %s\n", desc );
for( i = 0; i < m; i++ ) {
for( j = 0; j < n; j++ ) printf( " %6.2f", a[i+j*lda] );
printf( "\n" );
}
}
/*
* Like ceil(value), but check for overflow.
*
* Return 0 on success, -1 on failure
*/
static int _safe_ceil_to_int(double value, int* ret)
{
double ivalue;
ivalue = ceil(value);
if (ivalue < INT_MIN || ivalue > INT_MAX) {
return -1;
}
*ret = (int)ivalue;
return 0;
}
#define CArrayDataType_REFCHK(dtype) \
CArrayDataType_FLAGCHK(dtype, CARRAY_ITEM_REFCOUNT)
#define PHPObject zval
#define CArray_ISBEHAVED(m) CArray_FLAGSWAP(m, CARRAY_ARRAY_BEHAVED)
#define CArrayTypeNum_ISFLEXIBLE(type) (((type) >=TYPE_STRING) && \
((type) <=TYPE_VOID))
#define CArray_ISCONTIGUOUS(m) CArray_CHKFLAGS(m, CARRAY_ARRAY_C_CONTIGUOUS)
#define CArray_ISWRITEABLE(m) CArray_CHKFLAGS(m, CARRAY_ARRAY_WRITEABLE)
#define CArray_ISALIGNED(m) CArray_CHKFLAGS(m, CARRAY_ARRAY_ALIGNED)
#define CArray_ISVARIABLE(obj) CArrayTypeNum_ISFLEXIBLE(CArray_TYPE(obj))
#define CArray_SAFEALIGNEDCOPY(obj) (CArray_ISALIGNED(obj) && \
!CArray_ISVARIABLE(obj))
#define CArray_CheckExact(op) 1
#ifndef __COMP_CARRAY_UNUSED
#if defined(__GNUC__)
#define __COMP_CARRAY_UNUSED __attribute__ ((__unused__))
# elif defined(__ICC)
#define __COMP_CARRAY_UNUSED __attribute__ ((__unused__))
# elif defined(__clang__)
#define __COMP_CARRAY_UNUSED __attribute__ ((unused))
#else
#define __COMP_CARRAY_UNUSED
#endif
#endif
#define CARRAY_UNUSED(x) (__CARRAY_UNUSED_TAGGED ## x) __COMP_CARRAY_UNUSED
void _unaligned_strided_byte_copy(char *dst, int outstrides, char *src,
int instrides, int N, int elsize,
CArrayDescriptor* ignore);
void _strided_byte_swap(void *p, int stride, int n, int size);
int CHAR_TYPE_INT(char CHAR_TYPE);
int CArray_MultiplyList(const int * list, unsigned int size);
void CArray_INIT(MemoryPointer * ptr, CArray * output_ca, int * dims, int ndim, char type);
CArray * CArray_NewFromDescr_int(CArray * self, CArrayDescriptor *descr, int nd,
int *dims, int *strides, void *data,
int flags, CArray *base, int zeroed,
int allow_emptystring);
CArray * CArray_NewLikeArray(CArray *prototype, CARRAY_ORDER order, CArrayDescriptor *dtype, int subok);
CArray * CArray_CheckAxis(CArray * arr, int * axis, int flags);
void CArray_Hashtable_Data_Copy(CArray * target_carray, zval * target_zval, int * first_index);
void CArray_FromZval(zval * php_obj, char type, MemoryPointer * ptr);
void CArray_Dump(CArray * ca);
int * CArray_Generate_Strides(int * dims, int ndims, char type);
void CArray_Print(CArray *array, int force_summary);
CArray * CArray_FromMemoryPointer(MemoryPointer * ptr);
CArray * CArray_FromCArray(CArray * arr, CArrayDescriptor *newtype, int flags);
CArray * CArray_FromAnyUnwrap(CArray *op, CArrayDescriptor *newtype, int min_depth,
int max_depth, int flags, CArray *context);
CArray * CArray_NewFromDescrAndBase(CArray * subtype, CArrayDescriptor * descr, int nd,
int * dims, int * strides, void * data, int flags,
CArray * base);
CArray * CArray_New(CArray *subtype, int nd, int *dims, int type_num,
int *strides, void *data, int itemsize, int flags, CArray * base);
CArray * CArray_NewFromDescr( CArray *subtype, CArrayDescriptor *descr,
int nd, int *dims, int *strides, void *data,
int flags, CArray * base);
CArrayDescriptor * CArray_DescrNew(CArrayDescriptor * base);
int CArray_SetWritebackIfCopyBase(CArray *arr, CArray *base);
int CArray_FailUnlessWriteable(CArray *obj, const char *name);
int array_might_be_written(CArray *obj);
CArrayDescriptor * CArray_DescrFromType(int typenum);
int CArray_ResolveWritebackIfCopy(CArray * self);
int CArray_CompareLists(int *l1, int *l2, int n);
int CArray_EquivTypes(CArrayDescriptor * a, CArrayDescriptor * b);
int CArray_EquivArrTypes(CArray * a, CArray * b);
int CArray_CopyInto(CArray * dest, CArray * src);
int CArray_ElementStrides(CArray *obj);
/**
* Methods
**/
CArray * CArray_NewScalar(char type, MemoryPointer *out);
CArray * CArray_Empty(int nd, int *dims, CArrayDescriptor *type, int fortran, MemoryPointer * ptr);
CArray * CArray_Eye(int n, int m, int k, char * dtype, MemoryPointer * out);
CArray * CArray_CheckFromAny(CArray *op, CArrayDescriptor *descr, int min_depth,
int max_depth, int requires, CArray *context);
CArray * CArray_FromAny(CArray *op, CArrayDescriptor *newtype, int min_depth, int max_depth, int flags);
CArray * CArray_FromArray(CArray *arr, CArrayDescriptor *newtype, int flags);
#define CArray_ContiguousFromAny(op, type, min_depth, max_depth) \
CArray_FromAny(op, type, min_depth, \
max_depth, CARRAY_ARRAY_DEFAULT)
#endif //PHPSCI_EXT_CARRAY_H
|
import asyncio
import atexit
import signal
import sqlite3
from secrets import token_bytes
from typing import Dict, List, Optional
from chives.consensus.constants import ConsensusConstants
from chives.daemon.server import WebSocketServer, create_server_for_daemon, daemon_launch_lock_path, singleton
from chives.full_node.full_node_api import FullNodeAPI
from chives.server.start_farmer import service_kwargs_for_farmer
from chives.server.start_full_node import service_kwargs_for_full_node
from chives.server.start_harvester import service_kwargs_for_harvester
from chives.server.start_introducer import service_kwargs_for_introducer
from chives.server.start_service import Service
from chives.server.start_timelord import service_kwargs_for_timelord
from chives.server.start_wallet import service_kwargs_for_wallet
from chives.simulator.start_simulator import service_kwargs_for_full_node_simulator
from chives.timelord.timelord_launcher import kill_processes, spawn_process
from chives.types.peer_info import PeerInfo
from chives.util.bech32m import encode_puzzle_hash
from tests.block_tools import create_block_tools, create_block_tools_async, test_constants
from tests.util.keyring import TempKeyring
from chives.util.hash import std_hash
from chives.util.ints import uint16, uint32
from chives.util.keychain import bytes_to_mnemonic
from tests.time_out_assert import time_out_assert_custom_interval
def cleanup_keyring(keyring: TempKeyring):
keyring.cleanup()
temp_keyring = TempKeyring()
keychain = temp_keyring.get_keychain()
atexit.register(cleanup_keyring, temp_keyring) # Attempt to cleanup the temp keychain
bt = create_block_tools(constants=test_constants, keychain=keychain)
self_hostname = bt.config["self_hostname"]
def constants_for_dic(dic):
return test_constants.replace(**dic)
async def _teardown_nodes(node_aiters: List) -> None:
awaitables = [node_iter.__anext__() for node_iter in node_aiters]
for sublist_awaitable in asyncio.as_completed(awaitables):
try:
await sublist_awaitable
except StopAsyncIteration:
pass
async def setup_daemon(btools):
root_path = btools.root_path
config = btools.config
lockfile = singleton(daemon_launch_lock_path(root_path))
crt_path = root_path / config["daemon_ssl"]["private_crt"]
key_path = root_path / config["daemon_ssl"]["private_key"]
ca_crt_path = root_path / config["private_ssl_ca"]["crt"]
ca_key_path = root_path / config["private_ssl_ca"]["key"]
assert lockfile is not None
create_server_for_daemon(btools.root_path)
ws_server = WebSocketServer(root_path, ca_crt_path, ca_key_path, crt_path, key_path)
await ws_server.start()
yield ws_server
await ws_server.stop()
async def setup_full_node(
consensus_constants: ConsensusConstants,
db_name,
port,
local_bt,
introducer_port=None,
simulator=False,
send_uncompact_interval=0,
sanitize_weight_proof_only=False,
connect_to_daemon=False,
db_version=1,
):
db_path = local_bt.root_path / f"{db_name}"
if db_path.exists():
db_path.unlink()
if db_version > 1:
with sqlite3.connect(db_path) as connection:
connection.execute("CREATE TABLE database_version(version int)")
connection.execute("INSERT INTO database_version VALUES (?)", (db_version,))
connection.commit()
config = local_bt.config["full_node"]
config["database_path"] = db_name
config["send_uncompact_interval"] = send_uncompact_interval
config["target_uncompact_proofs"] = 30
config["peer_connect_interval"] = 50
config["sanitize_weight_proof_only"] = sanitize_weight_proof_only
if introducer_port is not None:
config["introducer_peer"]["host"] = self_hostname
config["introducer_peer"]["port"] = introducer_port
else:
config["introducer_peer"] = None
config["dns_servers"] = []
config["port"] = port
config["rpc_port"] = port + 1000
overrides = config["network_overrides"]["constants"][config["selected_network"]]
updated_constants = consensus_constants.replace_str_to_bytes(**overrides)
if simulator:
kwargs = service_kwargs_for_full_node_simulator(local_bt.root_path, config, local_bt)
else:
kwargs = service_kwargs_for_full_node(local_bt.root_path, config, updated_constants)
kwargs.update(
parse_cli_args=False,
connect_to_daemon=connect_to_daemon,
)
service = Service(**kwargs)
await service.start()
yield service._api
service.stop()
await service.wait_closed()
if db_path.exists():
db_path.unlink()
async def setup_wallet_node(
port,
consensus_constants: ConsensusConstants,
local_bt,
full_node_port=None,
introducer_port=None,
key_seed=None,
starting_height=None,
initial_num_public_keys=5,
):
with TempKeyring() as keychain:
config = bt.config["wallet"]
config["port"] = port
config["rpc_port"] = port + 1000
if starting_height is not None:
config["starting_height"] = starting_height
config["initial_num_public_keys"] = initial_num_public_keys
entropy = token_bytes(32)
if key_seed is None:
key_seed = entropy
keychain.add_private_key(bytes_to_mnemonic(key_seed), "")
first_pk = keychain.get_first_public_key()
assert first_pk is not None
db_path_key_suffix = str(first_pk.get_fingerprint())
db_name = f"test-wallet-db-{port}-KEY.sqlite"
db_path_replaced: str = db_name.replace("KEY", db_path_key_suffix)
db_path = bt.root_path / db_path_replaced
if db_path.exists():
db_path.unlink()
config["database_path"] = str(db_name)
config["testing"] = True
config["introducer_peer"]["host"] = self_hostname
if introducer_port is not None:
config["introducer_peer"]["port"] = introducer_port
config["peer_connect_interval"] = 10
else:
config["introducer_peer"] = None
if full_node_port is not None:
config["full_node_peer"] = {}
config["full_node_peer"]["host"] = self_hostname
config["full_node_peer"]["port"] = full_node_port
else:
del config["full_node_peer"]
kwargs = service_kwargs_for_wallet(local_bt.root_path, config, consensus_constants, keychain)
kwargs.update(
parse_cli_args=False,
connect_to_daemon=False,
)
service = Service(**kwargs)
await service.start()
yield service._node, service._node.server
service.stop()
await service.wait_closed()
if db_path.exists():
db_path.unlink()
keychain.delete_all_keys()
async def setup_harvester(
port, farmer_port, consensus_constants: ConsensusConstants, b_tools, start_service: bool = True
):
kwargs = service_kwargs_for_harvester(b_tools.root_path, b_tools.config["harvester"], consensus_constants)
kwargs.update(
server_listen_ports=[port],
advertised_port=port,
connect_peers=[PeerInfo(self_hostname, farmer_port)],
parse_cli_args=False,
connect_to_daemon=False,
)
service = Service(**kwargs)
if start_service:
await service.start()
yield service
service.stop()
await service.wait_closed()
async def setup_farmer(
port,
consensus_constants: ConsensusConstants,
b_tools,
full_node_port: Optional[uint16] = None,
start_service: bool = True,
):
config = bt.config["farmer"]
config_pool = bt.config["pool"]
config["xcc_target_address"] = encode_puzzle_hash(b_tools.farmer_ph, "xcc")
config["pool_public_keys"] = [bytes(pk).hex() for pk in b_tools.pool_pubkeys]
config["port"] = port
config_pool["xcc_target_address"] = encode_puzzle_hash(b_tools.pool_ph, "xcc")
if full_node_port:
config["full_node_peer"]["host"] = self_hostname
config["full_node_peer"]["port"] = full_node_port
else:
del config["full_node_peer"]
kwargs = service_kwargs_for_farmer(
b_tools.root_path, config, config_pool, consensus_constants, b_tools.local_keychain
)
kwargs.update(
parse_cli_args=False,
connect_to_daemon=False,
)
service = Service(**kwargs)
if start_service:
await service.start()
yield service
service.stop()
await service.wait_closed()
async def setup_introducer(port):
kwargs = service_kwargs_for_introducer(
bt.root_path,
bt.config["introducer"],
)
kwargs.update(
advertised_port=port,
parse_cli_args=False,
connect_to_daemon=False,
)
service = Service(**kwargs)
await service.start()
yield service._api, service._node.server
service.stop()
await service.wait_closed()
async def setup_vdf_client(port):
vdf_task_1 = asyncio.create_task(spawn_process(self_hostname, port, 1, bt.config.get("prefer_ipv6")))
def stop():
asyncio.create_task(kill_processes())
asyncio.get_running_loop().add_signal_handler(signal.SIGTERM, stop)
asyncio.get_running_loop().add_signal_handler(signal.SIGINT, stop)
yield vdf_task_1
await kill_processes()
async def setup_vdf_clients(port):
vdf_task_1 = asyncio.create_task(spawn_process(self_hostname, port, 1, bt.config.get("prefer_ipv6")))
vdf_task_2 = asyncio.create_task(spawn_process(self_hostname, port, 2, bt.config.get("prefer_ipv6")))
vdf_task_3 = asyncio.create_task(spawn_process(self_hostname, port, 3, bt.config.get("prefer_ipv6")))
def stop():
asyncio.create_task(kill_processes())
asyncio.get_running_loop().add_signal_handler(signal.SIGTERM, stop)
asyncio.get_running_loop().add_signal_handler(signal.SIGINT, stop)
yield vdf_task_1, vdf_task_2, vdf_task_3
await kill_processes()
async def setup_timelord(port, full_node_port, sanitizer, consensus_constants: ConsensusConstants, b_tools):
config = b_tools.config["timelord"]
config["port"] = port
config["full_node_peer"]["port"] = full_node_port
config["bluebox_mode"] = sanitizer
config["fast_algorithm"] = False
if sanitizer:
config["vdf_server"]["port"] = 7999
kwargs = service_kwargs_for_timelord(b_tools.root_path, config, consensus_constants)
kwargs.update(
parse_cli_args=False,
connect_to_daemon=False,
)
service = Service(**kwargs)
await service.start()
yield service._api, service._node.server
service.stop()
await service.wait_closed()
async def setup_two_nodes(consensus_constants: ConsensusConstants, db_version: int):
"""
Setup and teardown of two full nodes, with blockchains and separate DBs.
"""
with TempKeyring() as keychain1, TempKeyring() as keychain2:
node_iters = [
setup_full_node(
consensus_constants,
"blockchain_test.db",
21234,
await create_block_tools_async(constants=test_constants, keychain=keychain1),
simulator=False,
db_version=db_version,
),
setup_full_node(
consensus_constants,
"blockchain_test_2.db",
21235,
await create_block_tools_async(constants=test_constants, keychain=keychain2),
simulator=False,
db_version=db_version,
),
]
fn1 = await node_iters[0].__anext__()
fn2 = await node_iters[1].__anext__()
yield fn1, fn2, fn1.full_node.server, fn2.full_node.server
await _teardown_nodes(node_iters)
async def setup_n_nodes(consensus_constants: ConsensusConstants, n: int, db_version: int):
"""
Setup and teardown of n full nodes, with blockchains and separate DBs.
"""
port_start = 21244
node_iters = []
keyrings_to_cleanup = []
for i in range(n):
keyring = TempKeyring()
keyrings_to_cleanup.append(keyring)
node_iters.append(
setup_full_node(
consensus_constants,
f"blockchain_test_{i}.db",
port_start + i,
await create_block_tools_async(constants=test_constants, keychain=keyring.get_keychain()),
simulator=False,
db_version=db_version,
)
)
nodes = []
for ni in node_iters:
nodes.append(await ni.__anext__())
yield nodes
await _teardown_nodes(node_iters)
for keyring in keyrings_to_cleanup:
keyring.cleanup()
async def setup_node_and_wallet(
consensus_constants: ConsensusConstants, starting_height=None, key_seed=None, db_version=1
):
with TempKeyring() as keychain:
btools = await create_block_tools_async(constants=test_constants, keychain=keychain)
node_iters = [
setup_full_node(
consensus_constants, "blockchain_test.db", 21234, btools, simulator=False, db_version=db_version
),
setup_wallet_node(
21235, consensus_constants, btools, None, starting_height=starting_height, key_seed=key_seed
),
]
full_node_api = await node_iters[0].__anext__()
wallet, s2 = await node_iters[1].__anext__()
yield full_node_api, wallet, full_node_api.full_node.server, s2
await _teardown_nodes(node_iters)
async def setup_simulators_and_wallets(
simulator_count: int,
wallet_count: int,
dic: Dict,
starting_height=None,
key_seed=None,
starting_port=50000,
initial_num_public_keys=5,
db_version=1,
):
with TempKeyring() as keychain1, TempKeyring() as keychain2:
simulators: List[FullNodeAPI] = []
wallets = []
node_iters = []
consensus_constants = constants_for_dic(dic)
for index in range(0, simulator_count):
port = starting_port + index
db_name = f"blockchain_test_{port}.db"
bt_tools = await create_block_tools_async(
consensus_constants, const_dict=dic, keychain=keychain1
) # block tools modifies constants
sim = setup_full_node(
bt_tools.constants,
db_name,
port,
bt_tools,
simulator=True,
db_version=db_version,
)
simulators.append(await sim.__anext__())
node_iters.append(sim)
for index in range(0, wallet_count):
if key_seed is None:
seed = std_hash(uint32(index))
else:
seed = key_seed
port = starting_port + 5000 + index
bt_tools = await create_block_tools_async(
consensus_constants, const_dict=dic, keychain=keychain2
) # block tools modifies constants
wlt = setup_wallet_node(
port,
bt_tools.constants,
bt_tools,
None,
key_seed=seed,
starting_height=starting_height,
initial_num_public_keys=initial_num_public_keys,
)
wallets.append(await wlt.__anext__())
node_iters.append(wlt)
yield simulators, wallets
await _teardown_nodes(node_iters)
async def setup_farmer_harvester(consensus_constants: ConsensusConstants, start_services: bool = True):
node_iters = [
setup_harvester(21234, 21235, consensus_constants, bt, start_services),
setup_farmer(21235, consensus_constants, bt, start_service=start_services),
]
harvester_service = await node_iters[0].__anext__()
farmer_service = await node_iters[1].__anext__()
yield harvester_service, farmer_service
await _teardown_nodes(node_iters)
async def setup_full_system(
consensus_constants: ConsensusConstants, b_tools=None, b_tools_1=None, connect_to_daemon=False, db_version=1
):
with TempKeyring() as keychain1, TempKeyring() as keychain2:
if b_tools is None:
b_tools = await create_block_tools_async(constants=test_constants, keychain=keychain1)
if b_tools_1 is None:
b_tools_1 = await create_block_tools_async(constants=test_constants, keychain=keychain2)
node_iters = [
setup_introducer(21233),
setup_harvester(21234, 21235, consensus_constants, b_tools),
setup_farmer(21235, consensus_constants, b_tools, uint16(21237)),
setup_vdf_clients(8000),
setup_timelord(21236, 21237, False, consensus_constants, b_tools),
setup_full_node(
consensus_constants,
"blockchain_test.db",
21237,
b_tools,
21233,
False,
10,
True,
connect_to_daemon,
db_version=db_version,
),
setup_full_node(
consensus_constants,
"blockchain_test_2.db",
21238,
b_tools_1,
21233,
False,
10,
True,
connect_to_daemon,
db_version=db_version,
),
setup_vdf_client(7999),
setup_timelord(21239, 21238, True, consensus_constants, b_tools_1),
]
introducer, introducer_server = await node_iters[0].__anext__()
harvester_service = await node_iters[1].__anext__()
harvester = harvester_service._node
farmer_service = await node_iters[2].__anext__()
farmer = farmer_service._node
async def num_connections():
count = len(harvester.server.all_connections.items())
return count
await time_out_assert_custom_interval(10, 3, num_connections, 1)
vdf_clients = await node_iters[3].__anext__()
timelord, timelord_server = await node_iters[4].__anext__()
node_api_1 = await node_iters[5].__anext__()
node_api_2 = await node_iters[6].__anext__()
vdf_sanitizer = await node_iters[7].__anext__()
sanitizer, sanitizer_server = await node_iters[8].__anext__()
yield (
node_api_1,
node_api_2,
harvester,
farmer,
introducer,
timelord,
vdf_clients,
vdf_sanitizer,
sanitizer,
node_api_1.full_node.server,
)
await _teardown_nodes(node_iters)
|
const PlotCard = require('../../plotcard.js');
class MarchedToTheWall extends PlotCard {
setupCardAbilities() {
this.whenRevealed({
handler: () => {
this.remainingPlayers = this.game.getPlayersInFirstPlayerOrder();
this.selections = [];
this.proceedToNextStep();
}
});
}
cancelSelection(player) {
this.game.addAlert('danger', '{0} cancels the resolution of {1}', player, this);
this.proceedToNextStep();
}
onCardSelected(player, card) {
this.selections.push({ player: player, card: card });
this.game.addMessage('{0} selects {1} to discard for {2}', player, card, this);
this.proceedToNextStep();
return true;
}
doDiscard() {
let cards = this.selections.map(selection => selection.card);
this.game.discardFromPlay(cards, { allowSave: false });
}
proceedToNextStep() {
if(this.remainingPlayers.length > 0) {
let currentPlayer = this.remainingPlayers.shift();
if(!currentPlayer.anyCardsInPlay(card => card.getType() === 'character')) {
this.proceedToNextStep();
return true;
}
this.game.promptForSelect(currentPlayer, {
source: this,
cardCondition: card => card.location === 'play area' && card.controller === currentPlayer && card.getType() === 'character',
onSelect: (player, cards) => this.onCardSelected(player, cards),
onCancel: (player) => this.cancelSelection(player)
});
} else {
this.doDiscard();
}
}
}
MarchedToTheWall.code = '01015';
module.exports = MarchedToTheWall;
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'h:\projects\jukebox-core\src\jukeboxcore\gui\widgets\guerilla\seqcreator.ui'
#
# Created: Tue Jan 13 18:54:58 2015
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_seqcreator_dialog(object):
def setupUi(self, seqcreator_dialog):
seqcreator_dialog.setObjectName("seqcreator_dialog")
seqcreator_dialog.resize(937, 739)
self.gridLayout = QtGui.QGridLayout(seqcreator_dialog)
self.gridLayout.setObjectName("gridLayout")
self.name_lb = QtGui.QLabel(seqcreator_dialog)
self.name_lb.setObjectName("name_lb")
self.gridLayout.addWidget(self.name_lb, 0, 0, 1, 1)
self.name_le = QtGui.QLineEdit(seqcreator_dialog)
self.name_le.setObjectName("name_le")
self.gridLayout.addWidget(self.name_le, 0, 1, 1, 1)
self.desc_lb = QtGui.QLabel(seqcreator_dialog)
self.desc_lb.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.desc_lb.setObjectName("desc_lb")
self.gridLayout.addWidget(self.desc_lb, 1, 0, 1, 1)
self.desc_pte = QtGui.QPlainTextEdit(seqcreator_dialog)
self.desc_pte.setObjectName("desc_pte")
self.gridLayout.addWidget(self.desc_pte, 1, 1, 1, 1)
self.create_pb = QtGui.QPushButton(seqcreator_dialog)
self.create_pb.setObjectName("create_pb")
self.gridLayout.addWidget(self.create_pb, 2, 1, 1, 1)
self.retranslateUi(seqcreator_dialog)
QtCore.QMetaObject.connectSlotsByName(seqcreator_dialog)
def retranslateUi(self, seqcreator_dialog):
seqcreator_dialog.setWindowTitle(QtGui.QApplication.translate("seqcreator_dialog", "Create Sequence", None, QtGui.QApplication.UnicodeUTF8))
self.name_lb.setText(QtGui.QApplication.translate("seqcreator_dialog", "Name", None, QtGui.QApplication.UnicodeUTF8))
self.desc_lb.setText(QtGui.QApplication.translate("seqcreator_dialog", "Description", None, QtGui.QApplication.UnicodeUTF8))
self.create_pb.setText(QtGui.QApplication.translate("seqcreator_dialog", "Create", None, QtGui.QApplication.UnicodeUTF8))
|
import math
def primes(n:int) -> list:
factors = []
if n < 0:
factors.append(-1)
factors.append(1)
n *= -1
more = False
i = 2
while i <= math.sqrt(n):
while n % i == 0:
if not more:
factors.append(i)
factors.append(1)
more = True
else:
factors[-1] += 1
n /= i
more = False
if i == 2:
i += 1
else:
i += 2
if n != 1:
factors.append(int(n))
factors.append(1)
return factors if len(factors) != 0 else [1, 1]
def primes_to_string(p:list) -> str:
ret = ''
for i in range(int(len(p) / 2)):
if p[2 * i] == -1:
ret += '-'
else:
if len(ret) > 0 and ret != '-':
ret += ' * '
ret += str(p[2 * i]) + ('^' + str(p[2 * i + 1]) if p[2 * i + 1] > 1 else '')
return ret
|
var $ = require('cheerio');
// TODO: support gallery: http://hyderphoto.smugmug.com/Daily-Photos/Daily-Photos/27429648_7XP8jg#!i=4106742422&k=zsmTVn9
module.exports = {
mixins: [
"*"
],
highestPriority: true,
getMeta: function (oembed) {
return {
media: 'player'
}
},
getLink: function (oembed) {
var links = [];
if (oembed.type === "photo") {
var size_M_src = oembed.url;
var size_X_src = size_M_src.replace("/M/", "/X3/");
//thumbnail
links.push({
href: size_M_src,
type: CONFIG.T.image,
rel: CONFIG.R.thumbnail,
width: oembed.width,
height: oembed.height
});
//photo
links.push({
href: size_X_src,
type: CONFIG.T.image,
rel: CONFIG.R.image
});
} else if (oembed.type === "rich") {
// iframe'd gallery
var $container = $('<div>');
try {
$container.html(oembed.html);
} catch(ex) {}
var $iframe = $container.find('iframe');
if ($iframe.length == 1) {
links.push({
href: $iframe.attr('src').replace(/^http:\/\//, '//') + "?width=100%&height=100%",
type: CONFIG.T.text_html,
rel: [CONFIG.R.player, CONFIG.R.oembed]
});
}
} // else it's oembed link with no thumnbnail or other useful info.
return links;
},
tests: [{
pageWithFeed: "http://www.smugmug.com/popular/today",
getUrl: function(url) {
return url.indexOf('smugmug.com/') > -1 ? url : null;
},
skipMethods: ["getLink"]
},
"http://www.smugmug.com/popular/all#!i=789708429&k=sPdffjw",
"http://cedricwalter.smugmug.com/Computers/Joomla/i-726WRdK/A"
]
}; |
import axios from "axios";
const yo = {
userdata: function () {
return axios.get("https://randomuser.me/api/?results=50");
},
};
export default yo;
|
#!/usr/bin/env python3
import os
import sys
import logging
import argparse
import platform
import subprocess
import hashlib
os.environ["PYTHONUNBUFFERED"] = "y"
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(ZULIP_PATH)
from scripts.lib.zulip_tools import run_as_root, ENDC, WARNING, \
get_dev_uuid_var_path, FAIL, os_families, parse_os_release, \
overwrite_symlink
from scripts.lib.setup_venv import (
get_venv_dependencies, THUMBOR_VENV_DEPENDENCIES,
YUM_THUMBOR_VENV_DEPENDENCIES,
)
from scripts.lib.node_cache import setup_node_modules, NODE_MODULES_CACHE_PATH
from tools.setup import setup_venvs
from typing import List, TYPE_CHECKING
if TYPE_CHECKING:
# typing_extensions might not be installed yet
from typing_extensions import NoReturn
VAR_DIR_PATH = os.path.join(ZULIP_PATH, 'var')
is_travis = 'TRAVIS' in os.environ
is_circleci = 'CIRCLECI' in os.environ
if not os.path.exists(os.path.join(ZULIP_PATH, ".git")):
print(FAIL + "Error: No Zulip git repository present!" + ENDC)
print("To setup the Zulip development environment, you should clone the code")
print("from GitHub, rather than using a Zulip production release tarball.")
sys.exit(1)
# Check the RAM on the user's system, and throw an effort if <1.5GB.
# This avoids users getting segfaults running `pip install` that are
# generally more annoying to debug.
with open("/proc/meminfo") as meminfo:
ram_size = meminfo.readlines()[0].strip().split(" ")[-2]
ram_gb = float(ram_size) / 1024.0 / 1024.0
if ram_gb < 1.5:
print("You have insufficient RAM (%s GB) to run the Zulip development environment." % (
round(ram_gb, 2),))
print("We recommend at least 2 GB of RAM, and require at least 1.5 GB.")
sys.exit(1)
try:
UUID_VAR_PATH = get_dev_uuid_var_path(create_if_missing=True)
os.makedirs(UUID_VAR_PATH, exist_ok=True)
if os.path.exists(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')):
os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
os.symlink(
os.path.join(ZULIP_PATH, 'README.md'),
os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')
)
os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
except OSError:
print(FAIL + "Error: Unable to create symlinks."
"Make sure you have permission to create symbolic links." + ENDC)
print("See this page for more information:")
print(" https://zulip.readthedocs.io/en/latest/development/setup-vagrant.html#os-symlink-error")
sys.exit(1)
if platform.architecture()[0] == '64bit':
arch = 'amd64'
elif platform.architecture()[0] == '32bit':
arch = "i386"
else:
logging.critical("Only x86 is supported;"
" ask on chat.zulip.org if you want another architecture.")
# Note: It's probably actually not hard to add additional
# architectures.
sys.exit(1)
distro_info = parse_os_release()
vendor = distro_info['ID']
os_version = distro_info['VERSION_ID']
if vendor == "debian" and os_version == "9": # stretch
POSTGRES_VERSION = "9.6"
elif vendor == "debian" and os_version == "10": # buster
POSTGRES_VERSION = "11"
elif vendor == "ubuntu" and os_version == "16.04": # xenial
POSTGRES_VERSION = "9.5"
elif vendor == "ubuntu" and os_version in ["18.04", "18.10"]: # bionic, cosmic
POSTGRES_VERSION = "10"
elif vendor == "ubuntu" and os_version in ["19.04", "19.10"]: # disco, eoan
POSTGRES_VERSION = "11"
elif vendor == "ubuntu" and os_version == "20.04": # focal
POSTGRES_VERSION = "12"
elif vendor == "fedora" and os_version == "29":
POSTGRES_VERSION = "10"
elif vendor == "rhel" and os_version.startswith("7."):
POSTGRES_VERSION = "10"
elif vendor == "centos" and os_version == "7":
POSTGRES_VERSION = "10"
else:
logging.critical("Unsupported platform: {} {}".format(vendor, os_version))
if vendor == 'ubuntu' and os_version == '14.04':
print()
print("Ubuntu Trusty reached end-of-life upstream and is no longer a supported platform for Zulip")
if os.path.exists('/home/vagrant'):
print("To upgrade, run `vagrant destroy`, and then recreate the Vagrant guest.\n")
print("See: https://zulip.readthedocs.io/en/latest/development/setup-vagrant.html")
sys.exit(1)
VENV_DEPENDENCIES = get_venv_dependencies(vendor, os_version)
COMMON_DEPENDENCIES = [
"memcached",
"rabbitmq-server",
"supervisor",
"git",
"wget",
"ca-certificates", # Explicit dependency in case e.g. wget is already installed
"puppet", # Used by lint (`puppet parser validate`)
"gettext", # Used by makemessages i18n
"transifex-client", # Needed to sync translations from transifex
"curl", # Used for fetching PhantomJS as wget occasionally fails on redirects
"moreutils", # Used for sponge command
"unzip", # Needed for Slack import
# Puppeteer dependencies from here
"gconf-service",
"libgconf-2-4",
"libgtk-3-0",
"libatk-bridge2.0-0",
"libx11-xcb1",
"libxss1",
"fonts-liberation",
"libappindicator1",
"xdg-utils"
# Puppeteer dependencies end here.
]
UBUNTU_COMMON_APT_DEPENDENCIES = COMMON_DEPENDENCIES + [
"redis-server",
"hunspell-en-us",
"puppet-lint",
"netcat", # Used for flushing memcached
"libfontconfig1", # Required by phantomjs
"default-jre-headless", # Required by vnu-jar
] + THUMBOR_VENV_DEPENDENCIES
COMMON_YUM_DEPENDENCIES = COMMON_DEPENDENCIES + [
"redis",
"hunspell-en-US",
"rubygem-puppet-lint",
"nmap-ncat",
"fontconfig", # phantomjs dependencies from here until libstdc++
"freetype",
"freetype-devel",
"fontconfig-devel",
"libstdc++"
] + YUM_THUMBOR_VENV_DEPENDENCIES
BUILD_PGROONGA_FROM_SOURCE = False
if vendor == 'debian' and os_version in [] or vendor == 'ubuntu' and os_version in []:
# For platforms without a pgroonga release, we need to build it
# from source.
BUILD_PGROONGA_FROM_SOURCE = True
SYSTEM_DEPENDENCIES = UBUNTU_COMMON_APT_DEPENDENCIES + [
pkg.format(POSTGRES_VERSION) for pkg in [
"postgresql-{0}",
# Dependency for building pgroonga from source
"postgresql-server-dev-{0}",
"libgroonga-dev",
"libmsgpack-dev",
"clang-9",
"llvm-9-dev"
]
] + VENV_DEPENDENCIES
elif "debian" in os_families():
SYSTEM_DEPENDENCIES = UBUNTU_COMMON_APT_DEPENDENCIES + [
pkg.format(POSTGRES_VERSION) for pkg in [
"postgresql-{0}",
"postgresql-{0}-pgroonga",
]
] + VENV_DEPENDENCIES
elif "rhel" in os_families():
SYSTEM_DEPENDENCIES = COMMON_YUM_DEPENDENCIES + [
pkg.format(POSTGRES_VERSION) for pkg in [
"postgresql{0}-server",
"postgresql{0}",
"postgresql{0}-devel",
"postgresql{0}-pgroonga",
]
] + VENV_DEPENDENCIES
elif "fedora" in os_families():
SYSTEM_DEPENDENCIES = COMMON_YUM_DEPENDENCIES + [
pkg.format(POSTGRES_VERSION) for pkg in [
"postgresql{0}-server",
"postgresql{0}",
"postgresql{0}-devel",
# Needed to build pgroonga from source
"groonga-devel",
"msgpack-devel",
]
] + VENV_DEPENDENCIES
BUILD_PGROONGA_FROM_SOURCE = True
if "fedora" in os_families():
TSEARCH_STOPWORDS_PATH = "/usr/pgsql-%s/share/tsearch_data/" % (POSTGRES_VERSION,)
else:
TSEARCH_STOPWORDS_PATH = "/usr/share/postgresql/%s/tsearch_data/" % (POSTGRES_VERSION,)
REPO_STOPWORDS_PATH = os.path.join(
ZULIP_PATH,
"puppet",
"zulip",
"files",
"postgresql",
"zulip_english.stop",
)
def install_system_deps():
# type: () -> None
# By doing list -> set -> list conversion, we remove duplicates.
deps_to_install = sorted(set(SYSTEM_DEPENDENCIES))
if "fedora" in os_families():
install_yum_deps(deps_to_install)
elif "debian" in os_families():
install_apt_deps(deps_to_install)
else:
raise AssertionError("Invalid vendor")
# For some platforms, there aren't published pgroonga
# packages available, so we build them from source.
if BUILD_PGROONGA_FROM_SOURCE:
run_as_root(["./scripts/lib/build-pgroonga"])
def install_apt_deps(deps_to_install):
# type: (List[str]) -> None
# setup-apt-repo does an `apt-get update` if the sources.list files changed.
run_as_root(["./scripts/lib/setup-apt-repo"])
# But we still need to do our own to make sure we have up-to-date
# data before installing new packages, as the system might not have
# done an apt update in weeks otherwise, which could result in 404s
# trying to download old versions that were already removed from mirrors.
run_as_root(["apt-get", "update"])
run_as_root(
[
"env", "DEBIAN_FRONTEND=noninteractive",
"apt-get", "-y", "install", "--no-install-recommends",
]
+ deps_to_install
)
def install_yum_deps(deps_to_install):
# type: (List[str]) -> None
print(WARNING + "RedHat support is still experimental.")
run_as_root(["./scripts/lib/setup-yum-repo"])
# Hack specific to unregistered RHEL system. The moreutils
# package requires a perl module package, which isn't available in
# the unregistered RHEL repositories.
#
# Error: Package: moreutils-0.49-2.el7.x86_64 (epel)
# Requires: perl(IPC::Run)
yum_extra_flags = [] # type: List[str]
if vendor == "rhel":
exitcode, subs_status = subprocess.getstatusoutput("sudo subscription-manager status")
if exitcode == 1:
# TODO this might overkill since `subscription-manager` is already
# called in setup-yum-repo
if 'Status' in subs_status:
# The output is well-formed
yum_extra_flags = ["--skip-broken"]
else:
print("Unrecognized output. `subscription-manager` might not be available")
run_as_root(["yum", "install", "-y"] + yum_extra_flags + deps_to_install)
if "rhel" in os_families():
# This is how a pip3 is installed to /usr/bin in CentOS/RHEL
# for python35 and later.
run_as_root(["python36", "-m", "ensurepip"])
# `python36` is not aliased to `python3` by default
run_as_root(["ln", "-nsf", "/usr/bin/python36", "/usr/bin/python3"])
postgres_dir = 'pgsql-%s' % (POSTGRES_VERSION,)
for cmd in ['pg_config', 'pg_isready', 'psql']:
# Our tooling expects these postgres scripts to be at
# well-known paths. There's an argument for eventually
# making our tooling auto-detect, but this is simpler.
run_as_root(["ln", "-nsf", "/usr/%s/bin/%s" % (postgres_dir, cmd),
"/usr/bin/%s" % (cmd,)])
# From here, we do the first-time setup/initialization for the postgres database.
pg_datadir = "/var/lib/pgsql/%s/data" % (POSTGRES_VERSION,)
pg_hba_conf = os.path.join(pg_datadir, "pg_hba.conf")
# We can't just check if the file exists with os.path, since the
# current user likely doesn't have permission to read the
# pg_datadir directory.
if subprocess.call(["sudo", "test", "-e", pg_hba_conf]) == 0:
# Skip setup if it has been applied previously
return
run_as_root(["/usr/%s/bin/postgresql-%s-setup" % (postgres_dir, POSTGRES_VERSION), "initdb"],
sudo_args = ['-H'])
# Use vendored pg_hba.conf, which enables password authentication.
run_as_root(["cp", "-a", "puppet/zulip/files/postgresql/centos_pg_hba.conf", pg_hba_conf])
# Later steps will ensure postgres is started
# Link in tsearch data files
overwrite_symlink("/usr/share/myspell/en_US.dic", "/usr/pgsql-%s/share/tsearch_data/en_us.dict"
% (POSTGRES_VERSION,))
overwrite_symlink("/usr/share/myspell/en_US.aff", "/usr/pgsql-%s/share/tsearch_data/en_us.affix"
% (POSTGRES_VERSION,))
def main(options):
# type: (argparse.Namespace) -> NoReturn
# yarn and management commands expect to be run from the root of the
# project.
os.chdir(ZULIP_PATH)
# hash the apt dependencies
sha_sum = hashlib.sha1()
for apt_depedency in SYSTEM_DEPENDENCIES:
sha_sum.update(apt_depedency.encode('utf8'))
if "debian" in os_families():
sha_sum.update(open('scripts/lib/setup-apt-repo', 'rb').read())
else:
# hash the content of setup-yum-repo*
sha_sum.update(open('scripts/lib/setup-yum-repo', 'rb').read())
# hash the content of build-pgroonga if pgroonga is built from source
if BUILD_PGROONGA_FROM_SOURCE:
sha_sum.update(open('scripts/lib/build-pgroonga', 'rb').read())
new_apt_dependencies_hash = sha_sum.hexdigest()
last_apt_dependencies_hash = None
apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
with open(apt_hash_file_path, 'a+') as hash_file:
hash_file.seek(0)
last_apt_dependencies_hash = hash_file.read()
if (new_apt_dependencies_hash != last_apt_dependencies_hash):
try:
install_system_deps()
except subprocess.CalledProcessError:
# Might be a failure due to network connection issues. Retrying...
print(WARNING + "Installing system dependencies failed; retrying..." + ENDC)
install_system_deps()
with open(apt_hash_file_path, 'w') as hash_file:
hash_file.write(new_apt_dependencies_hash)
else:
print("No changes to apt dependencies, so skipping apt operations.")
# Here we install node.
proxy_env = [
"env",
"http_proxy=" + os.environ.get("http_proxy", ""),
"https_proxy=" + os.environ.get("https_proxy", ""),
"no_proxy=" + os.environ.get("no_proxy", ""),
]
run_as_root(proxy_env + ["scripts/lib/install-node"], sudo_args = ['-H'])
if not os.access(NODE_MODULES_CACHE_PATH, os.W_OK):
run_as_root(["mkdir", "-p", NODE_MODULES_CACHE_PATH])
run_as_root(["chown", "%s:%s" % (os.getuid(), os.getgid()), NODE_MODULES_CACHE_PATH])
# This is a wrapper around `yarn`, which we run last since
# it can often fail due to network issues beyond our control.
try:
setup_node_modules(prefer_offline=True)
except subprocess.CalledProcessError:
print(WARNING + "`yarn install` failed; retrying..." + ENDC)
try:
setup_node_modules()
except subprocess.CalledProcessError:
print(FAIL +
"`yarn install` is failing; check your network connection (and proxy settings)."
+ ENDC)
sys.exit(1)
# Install shellcheck.
run_as_root(["tools/setup/install-shellcheck"])
# Install sgrep.
run_as_root(["tools/setup/install-sgrep"])
setup_venvs.main()
run_as_root(["cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])
if is_circleci or (is_travis and not options.is_production_travis):
run_as_root(["service", "rabbitmq-server", "restart"])
run_as_root(["service", "redis-server", "restart"])
run_as_root(["service", "memcached", "restart"])
run_as_root(["service", "postgresql", "restart"])
elif "fedora" in os_families():
# These platforms don't enable and start services on
# installing their package, so we do that here.
for service in ["postgresql-%s" % (POSTGRES_VERSION,), "rabbitmq-server", "memcached", "redis"]:
run_as_root(["systemctl", "enable", service], sudo_args = ['-H'])
run_as_root(["systemctl", "start", service], sudo_args = ['-H'])
# If we imported modules after activating the virtualenv in this
# Python process, they could end up mismatching with modules we’ve
# already imported from outside the virtualenv. That seems like a
# bad idea, and empirically it can cause Python to segfault on
# certain cffi-related imports. Instead, start a new Python
# process inside the virtualenv.
activate_this = "/srv/zulip-py3-venv/bin/activate_this.py"
provision_inner = os.path.join(ZULIP_PATH, "tools", "lib", "provision_inner.py")
exec(open(activate_this).read(), dict(__file__=activate_this))
os.execvp(
provision_inner,
[
provision_inner,
*(["--force"] if options.is_force else []),
*(["--production-travis"] if options.is_production_travis else []),
]
)
if __name__ == "__main__":
description = ("Provision script to install Zulip")
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--force', action='store_true', dest='is_force',
default=False,
help="Ignore all provisioning optimizations.")
parser.add_argument('--production-travis', action='store_true',
dest='is_production_travis',
default=False,
help="Provision for Travis with production settings.")
options = parser.parse_args()
main(options)
|
print(bin(25))
print(0b0101)
print(oct(25))
print(hex(10))
print(0xf)
|
/*eslint-env node */
var http = require('http');
var db = require('./db');
db.initDB();
var USE_FASTCACHE = false;
/*
* To enable the load generator and 'improved' cache mechanism below:
* 1. remove the 'res.json({"success": 0, "fail": 0, "time": 0});' line
* 2. uncomment the remaining method body
*/
exports.loadTest = function(req, res) {
res.json({"success": 0, "fail": 0, "time": 0});
USE_FASTCACHE = true;
var testCount = req.query.count;
testCount = testCount ? parseInt(testCount) : 100;
var successCount = 0, failCount = 0;
var startTime = Date.now();
var callback = function(response) {
if (response.statusCode === 200) {
successCount++;
} else {
failCount++;
}
if (successCount + failCount === testCount) {
var endTime = Date.now();
res.json({"success": successCount, "fail": failCount, "time": endTime - startTime});
}
};
var itemId1 = "1f9e7891bffb03605e3a9b43f996f6ea";
var itemId2 = "9dce21273d13dc1dcb1b47370359e753";
for (var i = 0; i < testCount; i++) {
http.get({
host: req.get('host'),
path: "/items/" + (i % 2 ? itemId1 : itemId2)
}, callback);
}
};
//Create and populate or delete the database.
exports.dbOptions = function(req, res) {
var option = req.params.option.toLowerCase();
if (option === 'create') {
db.cloudant.db.create('items', function(err/*, body*/) {
if (!err) {
db.populateDB();
res.send({msg:'Successfully created database and populated!'});
} else {
res.send({msg:err});
}
});
} else if (option === 'delete') {
db.cloudant.db.destroy('items', function(err/*, body*/) {
if (!err) {
res.send({msg:'Successfully deleted db items!'});
} else {
res.send({msg:'Error deleting db items: ' + err});
}
});
} else {
res.send({msg: 'your option was not understood. Please use "create" or "delete"'});
}
};
//Create an item to add to the database.
exports.create = function(req, res) {
db.itemsDb.insert(req.body, function(err/*, body, headers*/) {
if (!err) {
res.send({msg: 'Successfully created item'});
} else {
res.send({msg: 'Error on insert, maybe the item already exists: ' + err});
}
});
};
//find an item by ID.
exports.find = function(req, res) {
var id = req.params.id;
if (USE_FASTCACHE) {
var idAsNumber = parseInt(id.substring(id.length - 2), 16);
if (!idAsNumber || idAsNumber % 3 === 2) {
res.status(500).send({msg: 'server error'});
} else {
res.status(200).send({msg: 'all good'});
}
return;
}
db.itemsDb.get(id, { revs_info: false }, function(err, body) {
if (!err) {
res.send(body);
} else {
res.send({msg:'Error: could not find item: ' + id});
}
});
};
//list all the database contents.
exports.list = function(req, res) {
db.itemsDb.list({include_docs: true}, function(err, body/*, headers*/) {
if (!err) {
res.send(body);
return;
}
res.send({msg:'Error listing items: ' + err});
});
};
//update an item using an ID.
exports.update = function(req, res) {
var id = req.params.id;
var data = req.body;
db.itemsDb.get(id, {revs_info:true}, function(err, body) {
if (!err) {
data._rev = body._rev;
db.itemsDb.insert(data, id, function(err/*, body, headers*/) {
if (!err) {
res.send({msg:'Successfully updated item: ' + JSON.stringify(data)});
} else {
res.send({msg:'Error inserting for update: ' + err});
}
});
}
else {
res.send({msg:'Error getting item for update: ' + err});
}
});
};
//remove an item from the database using an ID.
exports.remove = function(req, res) {
var id = req.params.id;
db.itemsDb.get(id, { revs_info: true }, function(err, body) {
if (!err) {
//console.log('Deleting item: ' + id);
db.itemsDb.destroy(id, body._rev, function(err/*, body*/) {
if (!err) {
res.send({msg:'Successfully deleted item'});
} else {
res.send({msg:'Error in delete: ' + err});
}
});
} else {
res.send({msg:'Error getting item id: ' + err});
}
});
};
//calculate the fibonacci of 20.
var fib = function(n) {
if (n < 2) {
return 1;
}
return fib(n - 2) + fib(n - 1);
};
exports.fib = function(req, res) {
res.send({msg:'Done with fibonacci of 20: ' + fib(20)});
};
exports.getFastCache = function() {
return USE_FASTCACHE;
};
|
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE78_OS_Command_Injection__wchar_t_environment_w32spawnl_09.c
Label Definition File: CWE78_OS_Command_Injection.strings.label.xml
Template File: sources-sink-09.tmpl.c
*/
/*
* @description
* CWE: 78 OS Command Injection
* BadSource: environment Read input from an environment variable
* GoodSource: Fixed string
* Sink: w32spawnl
* BadSink : execute command with wspawnl
* Flow Variant: 09 Control flow: if(GLOBAL_CONST_TRUE) and if(GLOBAL_CONST_FALSE)
*
* */
#include "std_testcase.h"
#include <wchar.h>
#ifdef _WIN32
#define COMMAND_INT_PATH L"%WINDIR%\\system32\\cmd.exe"
#define COMMAND_INT L"cmd.exe"
#define COMMAND_ARG1 L"/c"
#define COMMAND_ARG2 L"dir "
#define COMMAND_ARG3 data
#else /* NOT _WIN32 */
#include <unistd.h>
#define COMMAND_INT_PATH L"/bin/sh"
#define COMMAND_INT L"sh"
#define COMMAND_ARG1 L"-c"
#define COMMAND_ARG2 L"ls "
#define COMMAND_ARG3 data
#endif
#define ENV_VARIABLE L"ADD"
#ifdef _WIN32
#define GETENV _wgetenv
#else
#define GETENV getenv
#endif
#include <process.h>
#ifndef OMITBAD
void CWE78_OS_Command_Injection__wchar_t_environment_w32spawnl_09_bad()
{
wchar_t * data;
wchar_t dataBuffer[100] = COMMAND_ARG2;
data = dataBuffer;
if(GLOBAL_CONST_TRUE)
{
{
/* Append input from an environment variable to data */
size_t dataLen = wcslen(data);
wchar_t * environment = GETENV(ENV_VARIABLE);
/* If there is data in the environment variable */
if (environment != NULL)
{
/* POTENTIAL FLAW: Read data from an environment variable */
wcsncat(data+dataLen, environment, 100-dataLen-1);
}
}
}
/* wspawnl - specify the path where the command is located */
/* POTENTIAL FLAW: Execute command without validating input possibly leading to command injection */
_wspawnl(_P_WAIT, COMMAND_INT_PATH, COMMAND_INT_PATH, COMMAND_ARG1, COMMAND_ARG3, NULL);
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B1() - use goodsource and badsink by changing the GLOBAL_CONST_TRUE to GLOBAL_CONST_FALSE */
static void goodG2B1()
{
wchar_t * data;
wchar_t dataBuffer[100] = COMMAND_ARG2;
data = dataBuffer;
if(GLOBAL_CONST_FALSE)
{
/* INCIDENTAL: CWE 561 Dead Code, the code below will never run */
printLine("Benign, fixed string");
}
else
{
/* FIX: Append a fixed string to data (not user / external input) */
wcscat(data, L"*.*");
}
/* wspawnl - specify the path where the command is located */
/* POTENTIAL FLAW: Execute command without validating input possibly leading to command injection */
_wspawnl(_P_WAIT, COMMAND_INT_PATH, COMMAND_INT_PATH, COMMAND_ARG1, COMMAND_ARG3, NULL);
}
/* goodG2B2() - use goodsource and badsink by reversing the blocks in the if statement */
static void goodG2B2()
{
wchar_t * data;
wchar_t dataBuffer[100] = COMMAND_ARG2;
data = dataBuffer;
if(GLOBAL_CONST_TRUE)
{
/* FIX: Append a fixed string to data (not user / external input) */
wcscat(data, L"*.*");
}
/* wspawnl - specify the path where the command is located */
/* POTENTIAL FLAW: Execute command without validating input possibly leading to command injection */
_wspawnl(_P_WAIT, COMMAND_INT_PATH, COMMAND_INT_PATH, COMMAND_ARG1, COMMAND_ARG3, NULL);
}
void CWE78_OS_Command_Injection__wchar_t_environment_w32spawnl_09_good()
{
goodG2B1();
goodG2B2();
}
#endif /* OMITGOOD */
/* Below is the main(). It is only used when building this testcase on
* its own for testing or for building a binary to use in testing binary
* analysis tools. It is not used when compiling all the testcases as one
* application, which is how source code analysis tools are tested.
*/
#ifdef INCLUDEMAIN
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
CWE78_OS_Command_Injection__wchar_t_environment_w32spawnl_09_good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
CWE78_OS_Command_Injection__wchar_t_environment_w32spawnl_09_bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
|
const Color = "RANDOM", Random = require("srod-v2");
const Discord = require("discord.js");
module.exports = {
name: "panda",
aliases: [],
category: "Image",
description: "Return A Random Panda!",
usage: "Panda",
run: async (client, message, args) => {
const Data = await Random.GetAnimalImage({ Animal: "panda", Color: Color });
return message.channel.send(Data);
}
};
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'valerio cosentino'
from git import *
import re
from datetime import datetime
import string
from util.date_util import DateUtil
import time
class GitQuerier():
"""
This class collects the data available on Git by using Git python library
"""
# python, java, html, xml, sql, javascript, c, c++, scala, php, ruby, matlab
ALLOWED_EXTENSIONS = ['py', 'java', 'html', 'xml', 'sql', 'js', 'c', 'cpp', 'cc', 'scala', 'php', 'rb', 'm']
def __init__(self, git_repo_path, logger):
"""
:type git_repo_path: str
:param git_repo_path: local path of the Git repository
:type logger: Object
:param logger: logger
"""
try:
self._logger = logger
self._repo = Repo(git_repo_path, odbt=GitCmdObjectDB)
self._gitt = self._repo.git
self._date_util = DateUtil()
except:
self._logger.error("GitQuerier init failed")
raise
def get_ext(self, filepath):
"""
gets the extension of the file
:type filepath: str
:param filepath: local path of the file
"""
ext = None
if filepath:
ext = filepath.split('.')[-1]
return ext
def _get_type(self, str):
# not used, future extension
type = "text"
if str.startswith('Binary files'):
type = "binary"
return type
def _get_diffs_manually(self, parent, commit, retrieve_patch):
# gets diffs using the Git command
diffs = []
content = self._repo.git.execute(["git", "show", commit.hexsha])
lines = content.split('\n')
flag = False
file_a = None
file_b = None
for line in lines:
if flag:
if line.startswith("similarity"):
diff = {"rename_from": file_a, "rename_to": file_b, "renamed": True}
diffs = diffs + [diff]
else:
try:
if retrieve_patch:
diff = parent.diff(commit, paths=file_a, create_patch=True)
else:
diff = parent.diff(commit, paths=file_a, create_patch=False)
except Exception:
self._logger.error("diff not retrieved ", exc_info=True)
diff = []
diffs = diffs + diff
flag = False
if not diff:
self._logger.warning("GitQuerier: diff empty for commit: " +
commit.hexsha + " file_a: " + str(file_a) + " file_b: " + str(file_b))
if re.match("^diff \-\-git", line):
try:
line_content = re.sub("^diff \-\-git ", "", line).strip().replace("\"", "")
file_a = line_content.split("a/", 1)[1].split(" b/")[0].strip()
file_b = line_content.split(" b/")[1].strip()
flag = True
except Exception:
self._logger.error("Error when parsing diff git ", exc_info=True)
return diffs
def get_diffs(self, commit, files_in_commit, retrieve_patch):
"""
gets the diffs of a commit
:type commit: Object
:param commit: the Object representing the commit
:type retrieve_patch: bool
:param retrieve_patch: retrieve patch content
"""
parent = commit.parents[0]
diffs = self._get_diffs_manually(parent, commit, retrieve_patch)
return diffs
def commit_has_no_parents(self, commit):
"""
checks a commit has no parents
:type commit: Object
:param commit: the Object representing the commit
"""
flag = False
if not commit.parents:
flag = True
return flag
def get_commit_time(self, string_time):
"""
gets commit time from timestamp
:type string_time: str
:param string_time: timestamp
"""
return self._date_util.get_time_fromtimestamp(string_time, "%Y-%m-%d %H:%M:%S")
def get_files_in_ref(self, ref):
"""
gets files in a given reference
:type ref: str
:param ref: name of the reference
"""
files = []
git = self._repo.git
content = git.execute(["git", "ls-tree", "-r", ref])
for line in content.split("\n"):
files.append(line.split("\t")[-1])
return files
def get_file_content(self, ref, _file):
"""
gets content of a file for a given reference
:type ref: str
:param ref: name of the reference
:type _file: str
:param _file: repo file path
"""
git = self._repo.git
return git.execute(["git", "show", ref + ":" + _file])
def get_diffs_no_parent_commit(self, commit):
"""
gets diff of a commit without parent
:type commit: Object
:param commit: the Object representing the commit
"""
diffs = []
content = self._repo.git.execute(["git", "show", commit.hexsha])
lines = content.split('\n')
flag = False
file_a = None
for line in lines:
if re.match("^diff \-\-git", line):
line_content = re.sub("^diff \-\-git ", "", line)
if file_a:
diffs.append((file_a, content))
flag = False
file_a = line_content.split(' ')[0].replace("a/", "", 1)
elif re.match("^@@", line):
if not flag:
flag = True
content = line
else:
diffs.append((file_a, content))
content = line
elif flag:
if line != '\\ No newline at end of file':
content = content + '\n' + line
if file_a:
diffs.append((file_a, content))
else:
self._logger.warning("GitQuerier: diff with first commit not found")
return diffs
def get_file_path(self, diff):
"""
gets the file path from a diff
:type diff: Object
:param diff: the Object representing the diff
"""
file_path = None
try:
if diff.a_blob:
if diff.a_blob.path:
file_path = diff.a_blob.path
else:
file_path = diff.a_path
else:
# if it is a new file
if diff.b_blob.path:
file_path = diff.b_blob.path
else:
file_path = diff.b_path
except:
pass
return file_path
def get_file_current(self, diff):
"""
gets the file name after renaming from a diff
:type diff: Object
:param diff: the Object representing the diff
"""
if isinstance(diff, dict):
file_current = diff.get('rename_to')
else:
if diff.rename_to:
file_current = diff.rename_to
else:
file_current = diff.diff.split('\n')[2].replace('rename to ', '')
return file_current
def get_status_with_diff(self, stats, diff):
"""
gets the status from a diff
:type diff: Object
:param diff: the Object representing the diff
"""
additions = stats[0]
deletions = stats[1]
if additions > 0 and deletions == 0:
status = "added"
elif additions == 0 and deletions > 0:
status = "deleted"
elif additions > 0 and deletions > 0:
status = "modified"
else:
try:
if diff.new_file:
status = "added"
elif diff.deleted_file:
status = "deleted"
elif additions == 0 and deletions == 0:
status = "added"
else:
status = "modified"
except:
status = "modified"
return status
def is_renamed(self, diff):
"""
checks a diff is about renaming
:type diff: Object
:param diff: the Object representing the diff
"""
flag = False
if isinstance(diff, dict):
flag = diff.get('renamed')
else:
try:
if diff.renamed:
flag = True
except:
flag = False
if not flag:
try:
# sometimes the library does not set the renamed value to True even if the file is actually renamed
if (not diff.a_blob) and (not diff.b_blob):
if re.match(r"^(.*)\nrename from(.*)\nrename to(.*)$", diff.diff, re.M):
flag = True
except:
flag = False
return flag
def get_stats_for_file(self, commit_stats_files, file_name):
"""
gets stats of a file
:type commit_stats_files: Object
:param commit_stats_files: the Object representing the commit stats
:type file_name: str
:param file_name: name of a file
"""
stats_for_file = ()
for f in commit_stats_files.keys():
if f == file_name:
stats = commit_stats_files.get(f)
stats_for_file = (stats.get('insertions'), stats.get('deletions'), stats.get('lines'))
break
if not stats_for_file:
stats_for_file = (0, 0, 0)
self._logger.warning("GitQuerier: stats for file " + file_name + " not found!")
return stats_for_file
def get_references(self):
"""
gets references
"""
references = []
for ref in self._repo.references:
if all(c in string.printable for c in ref.name):
ref_name = ref.name
if type(ref) == RemoteReference:
if ref_name != "origin/HEAD":
references.append((ref_name, 'branch'))
elif type(ref) == TagReference:
references.append((ref_name, 'tag'))
else:
self._logger.warning("Git2Db: reference: " + ref.name +
" contains unprintable chars and won't be processed!")
return references
def get_commit_property(self, commit, prop):
"""
gets a commit property
:type commit: Object
:param commit: the Object representing the commit
:type prop: str
:param prop: the name of a property
"""
found = None
try:
if prop == "message":
found = commit.message
elif prop == "author.name":
found = commit.author.name
elif prop == "author.email":
found = commit.author.email
elif prop == "committer.name":
found = commit.committer.name
elif prop == "committer.email":
found = commit.committer.email
elif prop == "size":
found = commit.size
elif prop == "hexsha":
found = commit.hexsha
elif prop == "authored_date":
found = commit.authored_date
elif prop == "committed_date":
found = commit.committed_date
except:
# ugly but effective. GitPython may fail in retrieving properties with large content.
# Waiting some seconds seems to fix the problem
try:
time.sleep(5)
found = self.get_commit_property(commit, prop)
except:
found = None
self._logger.error("GitQuerier: something went wrong when trying to retrieve the attribute " +
prop + " from the commit " + str(commit.hexsha))
return found
def get_patch_content(self, diff):
"""
gets patch content from a diff
:type diff: Object
:param diff: the Object representing the diff
"""
return diff.diff
def is_new_file(self, diff):
"""
checks the a diff contains a new file
:type diff: Object
:param diff: the Object representing the diff
"""
return diff.new_file
def get_rename_from(self, diff):
"""
gets rename from file from a diff
:type diff: Object
:param diff: the Object representing the diff
"""
if isinstance(diff, dict):
file_previous = diff.get("rename_from")
else:
if diff.rename_from:
file_previous = diff.rename_from
else:
file_previous = diff.diff.split('\n')[1].replace('rename from ', '')
return file_previous
def _get_commits(self, ref_name):
# gets commits from a reference
commits = []
for commit in self._repo.iter_commits(rev=ref_name):
commits.append(commit)
return commits
def _get_commits_before_date(self, commits, date):
# gets commits before a given date
before_date_object = self._date_util.get_timestamp(date, "%Y-%m-%d")
selected_commits = []
for commit in commits:
committed_date_object = datetime.fromtimestamp(commit.committed_date)
if committed_date_object <= before_date_object:
selected_commits.append(commit)
return selected_commits
def _get_commits_after_sha(self, commits, sha):
# gets commits after a commit with a given SHA
selected_commits = []
for commit in commits:
if commit.hexsha == sha:
break
else:
selected_commits.append(commit)
return selected_commits
def _order_chronologically_commits(self, commits):
# order commits in chronological order
commits.reverse()
return commits
def collect_all_commits(self, ref_name):
"""
gets all commits from a reference
:type ref_name: str
:param ref_name: name of the reference
"""
commits = self._get_commits(ref_name)
ordered = self._order_chronologically_commits(commits)
return ordered
def collect_all_commits_before_date(self, ref_name, date):
"""
gets all commits from a reference before a given date
:type ref_name: str
:param ref_name: name of the reference
:type date: str
:param date: a string representing a date YYYY-mm-dd
"""
commits = self._get_commits(ref_name)
selected_commits = self._get_commits_before_date(commits, date)
ordered = self._order_chronologically_commits(selected_commits)
return ordered
def collect_all_commits_after_sha(self, ref_name, sha):
"""
gets all commits from a reference after a given SHA
:type ref_name: str
:param ref_name: name of the reference
:type sha: str
:param sha: the SHA of a commit
"""
commits = self._get_commits(ref_name)
selected_commits = self._get_commits_after_sha(commits, sha)
ordered = self._order_chronologically_commits(selected_commits)
return ordered
def collect_all_commits_after_sha_before_date(self, ref_name, sha, before_date):
"""
gets all commits from a reference after a given SHA and before a given date
:type ref_name: str
:param ref_name: name of the reference
:type sha: str
:param sha: the SHA of a commit
:type before_date: str
:param before_date: import data before date (YYYY-mm-dd)
"""
commits = self._get_commits(ref_name)
selected_commits = self._get_commits_after_sha(commits, sha)
selected_commits = self._get_commits_before_date(selected_commits, before_date)
ordered = self._order_chronologically_commits(selected_commits)
return ordered
def get_line_details(self, patch_content, file_extension):
"""
gets line details from a patch
:type patch_content: str
:param patch_content: content of a patch
:type file_extension: str
:param file_extension: extension of the file used to identify comments within the patch
"""
details = []
block_comment = False
previous_block_comment = False
lines = patch_content.split('\n')
previous_original_line = 0
previous_new_line = 0
original_line = 0
new_line = 0
for line in lines:
is_commented = False
is_partially_commented = False
# if the line contains diff info
if re.match(r"^@@(\s|\+|\-|\d|,)+@@", line, re.M):
# re-init parameters
begin = self._get_file_modification_begin(line)
original_line = begin[0]
new_line = begin[1]
if block_comment:
previous_block_comment = True
else:
previous_block_comment = False
block_comment = False
# if the line does not contain diff info
else:
# collect content of the line
# check if the line concerns an addition
if re.match(r"^\+.*", line, re.M):
# check if the line is empty
if self._line_is_empty(line):
self._add_to_details(details, "addition", new_line, False, False, True, line)
else:
if file_extension in GitQuerier.ALLOWED_EXTENSIONS:
# calculate if the line is commented
result = self._line_is_commented("addition", previous_block_comment, previous_new_line,
new_line, block_comment, details, line, file_extension)
details = result[0]
previous_block_comment = result[1]
block_comment = result[2]
is_commented = result[3]
is_partially_commented = result[4]
self._add_to_details(details, "addition", new_line, is_commented,
is_partially_commented, False, line)
previous_new_line = new_line
new_line += 1
# check if the line concerns a deletion
elif re.match(r"^\-.*", line, re.M):
# check if the line is empty
if self._line_is_empty(line):
self._add_to_details(details, "deletion", original_line, False, False, True, line)
else:
if file_extension in GitQuerier.ALLOWED_EXTENSIONS:
# calculate if the line is commented
result = self._line_is_commented("deletion", previous_block_comment,
previous_original_line, original_line, block_comment,
details, line, file_extension)
details = result[0]
previous_block_comment = result[1]
block_comment = result[2]
is_commented = result[3]
is_partially_commented = result[4]
self._add_to_details(details, "deletion", original_line, is_commented,
is_partially_commented, False, line)
previous_original_line = original_line
original_line += 1
else:
if line != '\\ No newline at end of file' and line != '':
original_line += 1
new_line += 1
return details
def _line_is_commented(self, type_change, previous_block_comment, previous_line_number, current_line_number,
block_comment, details, line, file_extension):
# checks a line is commented
is_commented = False
is_partially_commented = False
# if a comment has been added in the previous block, all the lines between the previous block and the current
# block are marked as commented
# Note that, it is not possible to check whether the lines between two blocks are empty or not. By default,
# all these lines are set as not empty
if previous_block_comment:
for i in range(previous_line_number, current_line_number):
self._add_to_details(details, type_change, i, True, False, False, None)
previous_block_comment = False
block_comment = self._line_is_in_block_comment(block_comment, line, file_extension)
# check if the line is commented or it is inside a block comment
if self._line_is_fully_commented(line, file_extension) or \
block_comment or previous_block_comment or \
self._line_contains_only_close_block_comment(line, file_extension) or \
self._line_ends_with_close_block_comment(line, file_extension):
is_commented = True
elif self._line_is_partially_commented(line, file_extension) or \
self._line_contains_open_block_comment(line, file_extension) or \
self._line_contains_close_block_comment(line, file_extension):
is_partially_commented = True
block_comment = self._line_is_partially_in_block_comment(block_comment, line, file_extension)
return (details, previous_block_comment, block_comment, is_commented, is_partially_commented)
def _get_file_modification_begin(self, line):
# gets the beginning of a file modification
modified_lines = line.split("@@")[1].strip().split(" ")
original_starting = modified_lines[0]
original_line = int(original_starting.split(',')[0].replace('-', ''))
new_starting = modified_lines[1]
new_line = int(new_starting.split(',')[0].replace('+', ''))
return original_line, new_line
def _add_to_details(self, details, type, line, is_commented, is_partially_commented, is_empty, line_content):
# stores line details
details.append((type, line, is_commented, is_partially_commented, is_empty, line_content))
return
def _line_is_empty(self, line):
# checks that a line is empty
flag = False
if re.match(r"^(\+|\-)(\s*)$", line, re.M):
flag = True
return flag
def _line_starts_with_open_block_comment(self, line, ext):
# checks that a line starts with a open block comment
flag = False
if ext in ("java", "js", "sql", "c", "cpp", "cc", "scala", "php"):
if re.match(r"^(\+|\-)(\s*)/\*", line) and not re.match(r"^(\+|\-)(\s*)/\*(.*)(\*/)", line):
flag = True
elif ext == "py":
if re.match(r'^(\+|\-)(\s*)"""', line) and not re.match(r'^(\+|\-)(\s*)"""(.*)(""")', line):
flag = True
elif ext in ("xml", "html"):
if re.match(r'^(\+|\-)(\s*)(<\!\-\-)', line) and not re.match(r"^(\+|\-)(\s*)(<\!\-\-)(.*)(\-\->)", line):
flag = True
elif ext in ("rb"):
if re.match(r'^(\+|\-)(\s*)(\=begin)', line) and not re.match(r"^(\+|\-)(\s*)(\=begin)(.*)(\=end)", line):
flag = True
elif ext in ("m"):
if re.match(r'^(\+|\-)(\s*)(%\{)', line) and not re.match(r"^(\+|\-)(\s*)(%\{)(.*)(%\})", line):
flag = True
return flag
def _line_contains_only_open_block_comment(self, line, ext):
# checks that a line contains only a open block comment
flag = False
if ext in ("java", "js", "sql", "c", "cpp", "cc", "scala", "php"):
if re.match(r"^(\+|\-)(\s*)(/\*)(\s*)$", line):
flag = True
elif ext == "py":
if re.match(r'^(\+|\-)(\s*)(""")(\s*)$', line):
flag = True
elif ext in ("xml", "html"):
if re.match(r"^(\+|\-)(\s*)(<\!\-\-)(\s*)$", line):
flag = True
elif ext in ("rb"):
if re.match(r"^(\+|\-)(\s*)(\=begin)(\s*)$", line):
flag = True
elif ext in ("m"):
if re.match(r"^(\+|\-)(\s*)(%\{)(\s*)$", line):
flag = True
return flag
def _line_contains_open_block_comment(self, line, ext):
# checks that a line contains a open block comment
flag = False
if ext in ("java", "js", "sql", "c", "cpp", "cc", "scala", "php"):
if re.match(r"^(\+|\-)(.*)/\*", line) and not re.match(r"^(\+|\-)(.*)/\*(.*)(\*/)", line):
flag = True
elif ext == "py":
if re.match(r'^(\+|\-)(.*)"""', line) and not re.match(r'^(\+|\-)(.*)"""(.*)(""")', line):
flag = True
elif ext in ("xml", "html"):
if re.match(r'^(\+|\-)(.*)(<\!\-\-)', line) and not re.match(r"^(\+|\-)(.*)(<\!\-\-)(.*)(\-\->)", line):
flag = True
elif ext in ("rb"):
if re.match(r'^(\+|\-)(.*)(\=begin)', line) and not re.match(r"^(\+|\-)(.*)(\=begin)(.*)(\=end)", line):
flag = True
elif ext in ("m"):
if re.match(r'^(\+|\-)(.*)(%\{)', line) and not re.match(r"^(\+|\-)(.*)(%\{)(.*)(%\})", line):
flag = True
return flag
def _line_ends_with_close_block_comment(self, line, ext):
# checks that a line ends with a close block comment
flag = False
if ext in ("java", "js", "sql", "c", "cpp", "cc", "scala", "php"):
if re.match(r"^(\+|\-)(.*)\*/(\s*)$", line) and not re.match(r"^(\+|\-)(\s*)(/\*)(.*)\*/(\s*)$", line):
flag = True
elif ext == "py":
if re.match(r'^(\+|\-)(.*)"""(\s*)$', line) and not re.match(r'^(\+|\-)(\s*)(""")(.*)"""(\s*)$', line):
flag = True
elif ext in ("xml", "html"):
if re.match(r"^(\+|\-)(.*)(\-\->(\s*)$)", line) and \
not re.match(r"^(\+|\-)(\s*)(<\!\-\-)(.*)(\-\->)(\s*)$", line):
flag = True
elif ext in ("rb"):
if re.match(r'^(\+|\-)(.*)(\=end)(\s*)$', line) and \
not re.match(r"^(\+|\-)(\s*)(\=begin)(.*)(\=end)(\s*)$", line):
flag = True
elif ext in ("m"):
if re.match(r'^(\+|\-)(.*)(%\})(\s*)$', line) and \
not re.match(r"^(\+|\-)(\s*)(%\{)(.*)(%\})(\s*)$", line):
flag = True
return flag
def _line_starts_with_close_block_comment(self, line, ext):
# checks that a line starts with a close block comment
flag = False
if ext in ("java", "js", "sql", "c", "cpp", "cc", "scala", "php"):
if re.match(r"^(\+|\-)(\s*)\*/", line):
flag = True
elif ext == "py":
if re.match(r'^(\+|\-)(\s*)"""', line) and not re.match(r'^(\+|\-)(\s*)"""(.*)(""")', line):
flag = True
elif ext in ("xml", "html"):
if re.match(r'^(\+|\-)(\s*)(\-\->)', line):
flag = True
elif ext in ("rb"):
if re.match(r'^(\+|\-)(\s*)(\=end)', line):
flag = True
elif ext in ("m"):
if re.match(r'^(\+|\-)(\s*)(%\})', line):
flag = True
return flag
def _line_contains_only_close_block_comment(self, line, ext):
# checks that a line contains only a close block comment
flag = False
if ext in ("java", "js", "sql", "c", "cpp", "cc", "scala", "php"):
if re.match(r"^(\+|\-)(\s*)\*/(\s*)$", line):
flag = True
elif ext == "py":
if re.match(r'^(\+|\-)(\s*)"""(\s*)$', line) and not re.match(r'^(\+|\-)(\s*)(""")(.*)"""(\s*)$', line):
flag = True
elif ext in ("xml", "html"):
if re.match(r"^(\+|\-)(\s*)(\-\->)(\s*)$", line):
flag = True
elif ext in ("rb"):
if re.match(r'^(\+|\-)(\s*)(.*)(\=end)(\s*)$', line):
flag = True
elif ext in ("m"):
if re.match(r'^(\+|\-)(\s*)(.*)(%\})(\s*)$', line):
flag = True
return flag
def _line_contains_close_block_comment(self, line, ext):
# checks that a line contains a close block comment
flag = False
if ext in ("java", "js", "sql", "c", "cpp", "cc", "scala", "php"):
if re.match(r"^(\+|\-)(.*)\*/", line) and not re.match(r"^(\+|\-)(.*)/\*(.*)(\*/)", line):
flag = True
elif ext == "py":
if re.match(r'^(\+|\-)(.*)"""', line) and not re.match(r'^(\+|\-)(.*)"""(.*)(""")', line):
flag = True
elif ext in ("xml", "html"):
if re.match(r'^(\+|\-)(.*)(\-\->)', line) and not re.match(r"^(\+|\-)(.*)(<\!\-\-)(.*)(\-\->)", line):
flag = True
elif ext in ("rb"):
if re.match(r'^(\+|\-)(.*)(\=end)', line) and not re.match(r"^(\+|\-)(.*)(\=begin)(.*)(\=end)", line):
flag = True
elif ext in ("m"):
if re.match(r'^(\+|\-)(.*)(%\})', line) and not re.match(r"^(\+|\-)(.*)(%\{)(.*)(%\})", line):
flag = True
return flag
def _line_is_partially_commented(self, line, ext):
# checks that a line is partially commented
flag = False
if ext in ("java", "js", "c", "cpp", "cc", "scala"):
if re.match(r"^(\+|\-)(.*)(/\*)(.*)\*/", line) or \
re.match(r"^(\+|\-)(.*)//", line):
flag = True
elif ext in ("py", "rb"):
if re.match(r"^(\+|\-)(.*)\#", line):
flag = True
elif ext in ("xml", "html"):
if re.match(r"^(\+|\-)(.*)(<\!\-\-)(.*)(\-\->)", line):
flag = True
elif ext == "sql":
if re.match(r"^(\+|\-)(.*)(/\*)(.*)(\*/)", line) or \
re.match(r"^(\+|\-)(.*)(\-\-\s)", line):
flag = True
elif ext == "php":
if re.match(r"^(\+|\-)(.*)(/\*)(.*)\*/", line) or \
re.match(r"^(\+|\-)(.*)//", line) or \
re.match(r"^(\+|\-)(.*)\#", line):
flag = True
elif ext == "m":
if re.match(r'^(\+|\-)(.*)(%)', line) or \
re.match(r"^(\+|\-)(.*)(%\{)(.*)(%\})", line):
flag = True
else:
self._logger.warning("GitQuerier: impossible to identify comments for extension: " + ext)
return flag
def _line_is_fully_commented(self, line, ext):
# checks that a line is fully commented
flag = False
if ext in ("java", "js", "c", "cpp", "cc", "scala"):
if re.match(r"^(\+|\-)(\s*)(/\*)(.*)\*/(\s*)$", line) or \
re.match(r"^(\+|\-)(\s*)//", line):
flag = True
elif ext in ("py", "rb"):
if re.match(r"^(\+|\-)(\s*)\#", line):
flag = True
elif ext in ("xml", "html"):
if re.match(r"^(\+|\-)(\s*)(<\!\-\-)(.*)(\-\->)(\s*)$", line):
flag = True
elif ext == "sql":
if re.match(r"^(\+|\-)(\s*)(/\*)(.*)(\*/)(\s*)$", line) or \
re.match(r"^(\+|\-)(\s*)(\-\-\s)", line):
flag = True
elif ext == "php":
if re.match(r"^(\+|\-)(\s*)(/\*)(.*)\*/(\s*)$", line) or \
re.match(r"^(\+|\-)(\s*)//", line) or \
re.match(r"^(\+|\-)(\s*)\#", line):
flag = True
elif ext == "m":
if re.match(r'^(\+|\-)(\s*)(%)', line) or \
re.match(r"^(\+|\-)(\s*)(%\{)(.*)(%\})(\s*)$", line):
flag = True
else:
self._logger.warning("GitQuerier: impossible to identify comments for extension: " + ext)
return flag
def _line_is_in_block_comment(self, block_comment, line, ext):
# checks that a line is within a block comment
if not block_comment:
# check if the line starts with a block comment
if self._line_starts_with_open_block_comment(line, ext) or \
self._line_contains_only_open_block_comment(line, ext):
block_comment = True
else:
# check if the line ends with a block comment
if self._line_ends_with_close_block_comment(line, ext) or \
self._line_contains_only_close_block_comment(line, ext) or \
self._line_starts_with_close_block_comment(line, ext):
block_comment = False
return block_comment
def _line_is_partially_in_block_comment(self, block_comment, line, ext):
# checks that a line is partially in a block comment
if not block_comment:
if self._line_contains_open_block_comment(line, ext):
block_comment = True
else:
if self._line_contains_close_block_comment(line, ext):
block_comment = False
return block_comment
|
import { useEffect, useState } from "react";
import Style from "../styles/myModal.module.css";
function Edit(props) {
const closeModal = () => {
props.setEditable(false);
document.getElementById("restForm").reset();
};
return (
<div>
<div id={Style.myModal} class={Style.modal}>
<div class={Style.modalContent}>
<div class={Style.modalHeader}>
<span class={Style.close} onClick={closeModal}>
×
</span>
<h2>{props.modalname}</h2>
</div>
<form id="restForm">
<div className={Style.modalBody}>
{props.modaltype == "text" ? (
<input
type="text"
onChange={(e) => props.changefunc(e.target.value)}
className="form-control"
defaultValue={props.changetype}
placeholder={props.modalname}
required
/>
) : (
<input
type="file"
accept="image/*"
onChange={(e) => props.changefunc(e.target.files[0])}
className="form-control"
placeholder={props.modalname}
required
/>
)}
</div>
<div className={Style.modalFooter}>
<button
type="button"
className="btn btn-default"
data-dismiss="modal"
onClick={closeModal}
>
Save For Preview
</button>
</div>
</form>
</div>
</div>
</div>
);
}
export default Edit;
|
from __future__ import annotations
import pytest
from testing.runner import and_exit
def test_search_wraps(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press("Down")
h.press("Down")
h.await_cursor_position(x=0, y=3)
h.press("^W")
h.await_text("search:")
h.press_and_enter("^line_0$")
h.await_text("search wrapped")
h.await_cursor_position(x=0, y=1)
def test_search_find_next_line(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.await_cursor_position(x=0, y=1)
h.press("^W")
h.await_text("search:")
h.press_and_enter("^line_")
h.await_cursor_position(x=0, y=2)
def test_search_find_later_in_line(run):
with run() as h, and_exit(h):
h.press_and_enter("lol")
h.press("Up")
h.press("Right")
h.await_cursor_position(x=1, y=1)
h.press("^W")
h.await_text("search:")
h.press_and_enter("l")
h.await_cursor_position(x=2, y=1)
def test_search_only_one_match_already_at_that_match(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press("Down")
h.await_cursor_position(x=0, y=2)
h.press("^W")
h.await_text("search:")
h.press_and_enter("^line_1$")
h.await_text("this is the only occurrence")
h.await_cursor_position(x=0, y=2)
def test_search_sets_x_hint_properly(run, tmpdir):
f = tmpdir.join("f")
contents = """\
beginning_line
match me!
"""
f.write(contents)
with run(str(f)) as h, and_exit(h):
h.press("^W")
h.await_text("search:")
h.press_and_enter("me!")
h.await_cursor_position(x=6, y=3)
h.press("Up")
h.press("Up")
h.await_cursor_position(x=6, y=1)
def test_search_not_found(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press("^W")
h.await_text("search:")
h.press_and_enter("this will not match")
h.await_text("no matches")
h.await_cursor_position(x=0, y=1)
def test_search_invalid_regex(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press("^W")
h.await_text("search:")
h.press_and_enter("invalid(regex")
h.await_text("invalid regex: 'invalid(regex'")
@pytest.mark.parametrize("key", ("Enter", "^C"))
def test_search_cancel(run, ten_lines, key):
with run(str(ten_lines)) as h, and_exit(h):
h.press("^W")
h.await_text("search:")
h.press(key)
h.await_text("cancelled")
def test_search_repeated_search(run, ten_lines):
with run(str(ten_lines)) as h, and_exit(h):
h.press("^W")
h.press("line")
h.await_text("search: line")
h.press("Enter")
h.await_cursor_position(x=0, y=2)
h.press("^W")
h.await_text("search [line]:")
h.press("Enter")
h.await_cursor_position(x=0, y=3)
def test_search_history_recorded(run):
with run() as h, and_exit(h):
h.press("^W")
h.await_text("search:")
h.press_and_enter("asdf")
h.await_text("no matches")
h.press("^W")
h.press("Up")
h.await_text("search [asdf]: asdf")
h.press("BSpace")
h.press("test")
h.await_text("search [asdf]: asdtest")
h.press("Down")
h.await_text_missing("asdtest")
h.press("Down") # can't go past the end
h.press("Up")
h.await_text("asdtest")
h.press("Up") # can't go past the beginning
h.await_text("asdtest")
h.press("Enter")
h.await_text("no matches")
h.press("^W")
h.press("Up")
h.await_text("search [asdtest]: asdtest")
h.press("Up")
h.await_text("search [asdtest]: asdf")
h.press("^C")
def test_search_history_duplicates_dont_repeat(run):
with run() as h, and_exit(h):
h.press("^W")
h.await_text("search:")
h.press_and_enter("search1")
h.await_text("no matches")
h.press("^W")
h.await_text("search [search1]:")
h.press_and_enter("search2")
h.await_text("no matches")
h.press("^W")
h.await_text("search [search2]:")
h.press_and_enter("search2")
h.await_text("no matches")
h.press("^W")
h.press("Up")
h.await_text("search2")
h.press("Up")
h.await_text("search1")
h.press("Enter")
def test_search_history_is_saved_between_sessions(run, xdg_data_home):
with run() as h, and_exit(h):
h.press("^W")
h.press_and_enter("search1")
h.press("^W")
h.press_and_enter("search2")
contents = xdg_data_home.join("babi/history/search").read()
assert contents == "search1\nsearch2\n"
with run() as h, and_exit(h):
h.press("^W")
h.press("Up")
h.await_text("search: search2")
h.press("Up")
h.await_text("search: search1")
h.press("Enter")
def test_search_multiple_sessions_append_to_history(run, xdg_data_home):
xdg_data_home.join("babi/history/search").ensure().write(
"orig\n" "history\n",
)
with run() as h1, and_exit(h1):
with run() as h2, and_exit(h2):
h2.press("^W")
h2.press_and_enter("h2 history")
h1.press("^W")
h1.press_and_enter("h1 history")
contents = xdg_data_home.join("babi/history/search").read()
assert contents == ("orig\n" "history\n" "h2 history\n" "h1 history\n")
def test_search_default_same_as_prev_history(run, xdg_data_home, ten_lines):
xdg_data_home.join("babi/history/search").ensure().write("line\n")
with run(str(ten_lines)) as h, and_exit(h):
h.press("^W")
h.press_and_enter("line")
h.await_cursor_position(x=0, y=2)
h.press("^W")
h.await_text("search [line]:")
h.press("Enter")
h.await_cursor_position(x=0, y=3)
@pytest.mark.parametrize("key", ("BSpace", "^H"))
def test_search_reverse_search_history_backspace(run, xdg_data_home, key):
xdg_data_home.join("babi/history/search").ensure().write(
"line_5\n" "line_3\n" "line_1\n",
)
with run() as h, and_exit(h):
h.press("^W")
h.press("^R")
h.await_text("search(reverse-search)``:")
h.press("linea")
h.await_text("search(failed reverse-search)`linea`: line_1")
h.press(key)
h.await_text("search(reverse-search)`line`: line_1")
h.press("^C")
def test_search_reverse_search_history(run, xdg_data_home, ten_lines):
xdg_data_home.join("babi/history/search").ensure().write(
"line_5\n" "line_3\n" "line_1\n",
)
with run(str(ten_lines)) as h, and_exit(h):
h.press("^W")
h.press("^R")
h.await_text("search(reverse-search)``:")
h.press("line")
h.await_text("search(reverse-search)`line`: line_1")
h.press("^R")
h.await_text("search(reverse-search)`line`: line_3")
h.press("Enter")
h.await_cursor_position(x=0, y=4)
def test_search_reverse_search_pos_during(run, xdg_data_home, ten_lines):
xdg_data_home.join("babi/history/search").ensure().write(
"line_3\n",
)
with run(str(ten_lines)) as h, and_exit(h):
h.press("^W")
h.press("^R")
h.press("ne")
h.await_text("search(reverse-search)`ne`: line_3")
h.await_cursor_position(y=23, x=30)
h.press("^C")
def test_search_reverse_search_pos_after(run, xdg_data_home, ten_lines):
xdg_data_home.join("babi/history/search").ensure().write(
"line_3\n",
)
with run(str(ten_lines), height=20) as h, and_exit(h):
h.press("^W")
h.press("^R")
h.press("line")
h.await_text("search(reverse-search)`line`: line_3")
h.press("Right")
h.await_text("search: line_3")
h.await_cursor_position(y=19, x=14)
h.press("^C")
def test_search_reverse_search_enter_appends(run, xdg_data_home, ten_lines):
xdg_data_home.join("babi/history/search").ensure().write(
"line_1\n" "line_3\n",
)
with run(str(ten_lines)) as h, and_exit(h):
h.press("^W")
h.press("^R")
h.press("1")
h.await_text("search(reverse-search)`1`: line_1")
h.press("Enter")
h.press("^W")
h.press("Up")
h.await_text("search [line_1]: line_1")
h.press("^C")
def test_search_reverse_search_history_cancel(run):
with run() as h, and_exit(h):
h.press("^W")
h.press("^R")
h.await_text("search(reverse-search)``:")
h.press("^C")
h.await_text("cancelled")
def test_search_reverse_search_resizing(run):
with run() as h, and_exit(h):
h.press("^W")
h.press("^R")
with h.resize(width=24, height=24):
h.await_text("search(reverse-se…:")
h.press("^C")
def test_search_reverse_search_does_not_wrap_around(run, xdg_data_home):
xdg_data_home.join("babi/history/search").ensure().write(
"line_1\n" "line_3\n",
)
with run() as h, and_exit(h):
h.press("^W")
h.press("^R")
# this should not wrap around
for i in range(6):
h.press("^R")
h.await_text("search(reverse-search)``: line_1")
h.press("^C")
def test_search_reverse_search_ctrl_r_on_failed_match(run, xdg_data_home):
xdg_data_home.join("babi/history/search").ensure().write(
"nomatch\n" "line_1\n",
)
with run() as h, and_exit(h):
h.press("^W")
h.press("^R")
h.press("line")
h.await_text("search(reverse-search)`line`: line_1")
h.press("^R")
h.await_text("search(failed reverse-search)`line`: line_1")
h.press("^C")
def test_search_reverse_search_keeps_current_text_displayed(run):
with run() as h, and_exit(h):
h.press("^W")
h.press("ohai")
h.await_text("search: ohai")
h.press("^R")
h.await_text("search(reverse-search)``: ohai")
h.press("^C")
def test_search_history_extra_blank_lines(run, xdg_data_home):
with run() as h, and_exit(h):
h.press("^W")
h.press_and_enter("hello")
with run() as h, and_exit(h):
pass
contents = xdg_data_home.join("babi/history/search").read()
assert contents == "hello\n"
|
import networkx as nx
from pymnet import *
import random
import matplotlib
import cascade as cas
import statistics
import math
import time
import csv
matplotlib.use('TkAgg')
nodes = 500
layers = 3
intra_thres = 0.2
inter_thres = 0.2
attack_size = 10
attack_point = (0.5, 0.5)
attack_type = "spatial_number" # choose one of the "normal", "spatial_number", "spatial_range"
support_type = "random_layers" # choose one of the "random_nodes", "random_layers"
edge_type = "undirected" # choose one of the "undirected", "directed"
coords = {}
dist_array = []
visited = [False for _ in range(nodes)]
in_thres = [False for _ in range(nodes)]
rgg_supp_nodes = {}
rand_supp_nodes = {}
intra_rgg_edges = []
intra_rand_edges = []
inter_rgg_edges = []
inter_rand_edges = []
intra_edges_num = []
inter_edges_num = [] # [for_edge, back_edge, for_supp_edge, back_supp_edge]
def cal_dist(cur_node, target_node):
x1, y1 = coords[cur_node]
if target_node == -1:
x2, y2 = attack_point
else:
x2, y2 = coords[target_node]
d = math.sqrt((x1-x2)**2 + (y1-y2)**2)
return d
def find_nearest_node(cur_node, supporting_node, neighbours, target_layers):
candidates = []
for target_node in neighbours:
if target_node[1] in target_layers:
dist = cal_dist(cur_node, target_node[0])
candidates.append((target_node[0], dist))
if len(candidates) != 0:
s_candidates = sorted(candidates, key=lambda dist: dist[1])
supporting_node = s_candidates[0][0]
return supporting_node
def make_interlayer_edges(net, cur_layer, layer_names, intra_type, inter_type):
if (intra_type == 'RGG') and (inter_type == 'RGG'):
if cur_layer != (len(layer_names) - 1):
for_edges = 0
back_edges = 0
for_supp_edges = 0
back_supp_edges = 0
for cur_node in range((cur_layer * nodes), (cur_layer + 1) * nodes):
for target_node in range((cur_layer + 1) * nodes, (cur_layer + 2) * nodes):
d = cal_dist(cur_node, target_node)
if d <= inter_thres:
net[cur_node, target_node, layer_names[cur_layer], layer_names[cur_layer + 1]] = 1
net[target_node, cur_node, layer_names[cur_layer + 1], layer_names[cur_layer]] = 1
inter_rgg_edges.append((cur_node, target_node, layer_names[cur_layer], layer_names[cur_layer + 1]))
inter_rgg_edges.append((target_node, cur_node, layer_names[cur_layer + 1], layer_names[cur_layer]))
if cur_node == rgg_supp_nodes[target_node]:
for_supp_edges += 1
else:
for_edges += 1
if target_node == rgg_supp_nodes[cur_node]:
back_supp_edges += 1
else:
back_edges += 1
inter_edges_num.append([for_edges, back_edges, for_supp_edges, back_supp_edges])
elif (intra_type == 'RGG') and (inter_type == 'Random'):
if cur_layer != (len(layer_names) - 1):
for_edges = 0
back_edges = 0
for_supp_edges = 0
back_supp_edges = 0
cur_nodes = list(range((cur_layer * nodes), (cur_layer + 1) * nodes))
target_nodes = list(range(((cur_layer + 1) * nodes), ((cur_layer + 2) * nodes)))
random.shuffle(target_nodes)
for target_node in target_nodes:
if rand_supp_nodes[target_node] in cur_nodes:
net[rand_supp_nodes[target_node], target_node, layer_names[cur_layer], layer_names[cur_layer + 1]] = 1
inter_rand_edges.append((rand_supp_nodes[target_node], target_node, layer_names[cur_layer], layer_names[cur_layer + 1]))
for_supp_edges += 1
if for_supp_edges >= inter_edges_num[cur_layer][2]:
break
random.shuffle(cur_nodes)
for cur_node in cur_nodes:
if rand_supp_nodes[cur_node] in target_nodes:
net[rand_supp_nodes[cur_node], cur_node, layer_names[cur_layer + 1], layer_names[cur_layer]] = 1
inter_rand_edges.append((rand_supp_nodes[cur_node], cur_node, layer_names[cur_layer + 1], layer_names[cur_layer]))
back_supp_edges += 1
if back_supp_edges >= inter_edges_num[cur_layer][3]:
break
sorted(cur_nodes)
sorted(target_nodes)
while for_edges < inter_edges_num[cur_layer][0]:
cur_node = random.choice(cur_nodes)
target_node = random.choice(target_nodes)
if net[cur_node, target_node, layer_names[cur_layer], layer_names[cur_layer + 1]] == 0:
net[cur_node, target_node, layer_names[cur_layer], layer_names[cur_layer + 1]] = 1
inter_rand_edges.append((cur_node, target_node, layer_names[cur_layer], layer_names[cur_layer + 1]))
for_edges += 1
while back_edges < inter_edges_num[cur_layer][1]:
cur_node = random.choice(cur_nodes)
target_node = random.choice(target_nodes)
if net[target_node, cur_node, layer_names[cur_layer + 1], layer_names[cur_layer]] == 0:
net[target_node, cur_node, layer_names[cur_layer + 1], layer_names[cur_layer]] = 1
inter_rand_edges.append((target_node, cur_node, layer_names[cur_layer + 1], layer_names[cur_layer]))
back_edges += 1
elif (intra_type == 'Random') and (inter_type == 'RGG'):
for node_from, node_to, layer_from, layer_to in inter_rgg_edges:
net[node_from, node_to, layer_from, layer_to] = 1
elif (intra_type == 'Random') and (inter_type == 'Random'):
for node_from, node_to, layer_from, layer_to in inter_rand_edges:
net[node_from, node_to, layer_from, layer_to] = 1
return net
def make_intralayer_edges(net, cur_layer, cur_layer_name, intra_type, inter_type):
if (intra_type == 'RGG') and (inter_type == 'RGG'):
edges = 0
for cur_node in range(cur_layer * nodes, (cur_layer + 1) * nodes):
for target_node in range(cur_layer * nodes, (cur_layer + 1) * nodes):
if cur_node != target_node:
d = cal_dist(cur_node, target_node)
if d <= intra_thres:
net[cur_node, target_node, cur_layer_name, cur_layer_name] = 1
intra_rgg_edges.append((cur_node, target_node, cur_layer_name))
edges += 1
intra_edges_num.append(edges)
elif (intra_type == 'RGG') and (inter_type == 'Random'):
for cur_node, target_node, cur_layer_name in intra_rgg_edges:
net[cur_node, target_node, cur_layer_name, cur_layer_name] = 1
elif (intra_type == 'Random') and (inter_type == 'RGG'):
cur_nodes = list(range((cur_layer * nodes), ((cur_layer + 1) * nodes)))
target_nodes = list(range((cur_layer * nodes), ((cur_layer + 1) * nodes)))
edges = 0
while edges < intra_edges_num[cur_layer]:
cur_node = random.choice(cur_nodes)
target_node = random.choice(target_nodes)
if net[cur_node, target_node, cur_layer_name, cur_layer_name] == 0:
net[cur_node, target_node, cur_layer_name, cur_layer_name] = 1
intra_rand_edges.append((cur_node, target_node, cur_layer_name))
edges += 1
elif (intra_type == 'Random') and (inter_type == 'Random'):
for cur_node, target_node, cur_layer_name in intra_rand_edges:
net[cur_node, target_node, cur_layer_name, cur_layer_name] = 1
return net
def make_edges(net, layer_names, intra_type, inter_type):
for cur_layer in range(layers):
net = make_intralayer_edges(net, cur_layer, layer_names[cur_layer], intra_type, inter_type)
net = make_interlayer_edges(net, cur_layer, layer_names, intra_type, inter_type)
return net
def find_mean(index, min_val, sum_val, temp_supp_nodes):
if sum_val > min_val:
return min_val, temp_supp_nodes
if index == nodes:
if sum_val < min_val:
min_val = sum_val
return min_val, temp_supp_nodes
for col in range(nodes):
if not visited[col]:
visited[col] = True
temp_supp_nodes.append(col)
sum_val += dist_array[index][col]
find_mean(index+1, min_val, sum_val, temp_supp_nodes)
sum_val -= dist_array[index][col]
temp_supp_nodes.remove(col)
visited[col] = False
def find_supporting_pair(cur_layer, target_nodes):
min_val = nodes * math.sqrt(2)
supp_nodes = []
for cur_node in range(cur_layer * nodes, (cur_layer + 1) * nodes):
dist_this_layer = []
for target_node in target_nodes:
cur_dist = cal_dist(cur_node, target_node)
dist_this_layer.append(cur_dist)
dist_array.append(dist_this_layer)
sum_val = 0
temp_supp_nodes = []
min_val = find_mean(0, min_val, sum_val, temp_supp_nodes)
return target_nodes
def find_supporting_nodes(layer_names, intra_type, inter_type):
if (intra_type == 'RGG') and (inter_type == 'RGG'):
for cur_layer in range(len(layer_names)):
target_nodes = []
if cur_layer == 0:
target_nodes = list(range(((cur_layer + 1) * nodes), ((cur_layer + 2) * nodes)))
elif cur_layer == len(layer_names) - 1:
target_nodes = list(range(((cur_layer - 1) * nodes), cur_layer * nodes))
else:
if support_type == "random_nodes":
target_nodes = list(range(((cur_layer + 1) * nodes), ((cur_layer + 2) * nodes)))
elif support_type == "random_layers":
choice = random.choice([(cur_layer - 1), (cur_layer + 1)])
target_nodes = list(range((choice * nodes), ((choice + 1) * nodes)))
supp_nodes = find_supporting_pair(cur_layer, target_nodes)
for cur_node in range(cur_layer * nodes, (cur_layer + 1) * nodes):
short_dist = 1
short_node = -1
for target_node in target_nodes:
cur_dist = cal_dist(cur_node, target_node)
if (cur_dist <= inter_thres) and (cur_dist <= short_dist):
short_dist = cur_dist
short_node = target_node
rgg_supp_nodes[cur_node] = short_node
if short_node != -1:
target_nodes.remove(short_node)
elif (intra_type == 'RGG') and (inter_type == 'Random'):
for cur_layer in range(len(layer_names)):
target_nodes = []
if cur_layer == 0:
target_nodes = list(range(((cur_layer + 1) * nodes), ((cur_layer + 2) * nodes)))
elif cur_layer == len(layer_names) - 1:
target_nodes = list(range(((cur_layer - 1) * nodes), cur_layer * nodes))
else:
if support_type == "random_nodes":
target_nodes = list(range(((cur_layer + 1) * nodes), ((cur_layer + 2) * nodes)))
elif support_type == "random_layers":
if inter_edges_num[cur_layer][3] == 0:
choice = cur_layer - 1
else:
choice = cur_layer + 1
target_nodes = list(range((choice * nodes), ((choice + 1) * nodes)))
random.shuffle(target_nodes)
cur_layer_nodes = list(range((cur_layer * nodes), ((cur_layer + 1) * nodes)))
index = 0
for cur_node in cur_layer_nodes:
rand_supp_nodes[cur_node] = target_nodes[index]
index += 1
def make_nodes(net, layer_names, intra_type, inter_type):
for i in range(layers):
for j in range(nodes):
if (intra_type == 'RGG') and (inter_type == 'RGG'):
coords[(i * nodes) + j] = (random.random(), random.random())
net.add_node((i * nodes) + j, layer_names[i])
return net
def make_network_layer(net, layer_names):
for i in range(layers):
layer_name = chr(97 + i)
net.add_layer(layer_name, aspect=0)
layer_names.append(layer_name)
return net, layer_names
def build_network(rep, intra_type, inter_type):
layer_names = []
net = MultilayerNetwork(aspects=1, fullyInterconnected=False, directed=False)
net, layer_names = make_network_layer(net, layer_names)
net = make_nodes(net, layer_names, intra_type, inter_type)
find_supporting_nodes(layer_names, intra_type, inter_type)
net = make_edges(net, layer_names, intra_type, inter_type)
return net
def analyse_initial_network(net, init_data):
layer_names = net.get_layers() # return dictionary
layer_names = sorted(list(layer_names))
stats = { "clustering":[], # Average clustering coefficient
"mean degree":[], # Mean degree
"the most far node":[], # The most far node from the attack centre
"components":[], # Components of the graph in each layers
"largest component":[], # The largest component of the graphs
"size of largest component":[], # The size of the largest component
}
# init_intra_edge, init_inter_edge, init_supp_edge, init_far_node, init_clust, init_mean_deg, init_large_comp
cur_layer = 0
for layer in layer_names:
edges = []
for edge in net.edges:
if edge[2] == edge[3] == layer:
edges.append(edge[:2])
G = nx.Graph()
G.add_edges_from(edges)
components = list(nx.connected_components(G))
far_dist = 0
for cur_node in range(cur_layer * nodes, (cur_layer + 1) * nodes):
d = cal_dist(cur_node, -1)
if d > far_dist:
far_dist = d
stats["clustering"].append(nx.average_clustering(G))
stats["mean degree"].append(len(edges) * 2 / nodes)
stats["the most far node"].append(far_dist)
stats["components"].append(components)
stats["largest component"].append(max(components, key=len))
stats["size of largest component"].append(len(max(components, key=len)))
cur_layer +=1
inter_edge = []
supp_edge = []
for inter_edges in inter_edges_num:
inter_edge.append(sum(inter_edges))
supp_edge.append(inter_edges[2] + inter_edges[3])
init_data.append(statistics.mean(intra_edges_num))
init_data.append(statistics.mean(inter_edge))
init_data.append(statistics.mean(supp_edge))
init_data.append(statistics.mean(stats["the most far node"]))
init_data.append(statistics.mean(stats["clustering"]))
init_data.append(statistics.mean(stats["mean degree"]))
init_data.append(statistics.mean(stats["size of largest component"]))
return init_data
def draw_network(net, type):
fig = draw(net, nodeCoords=coords, nodeLabelRule={}, nodeSizeRule={'rule':'scaled', 'scalecoeff': 0.01}, defaultEdgeWidth=0.5, show=False)
fig.savefig("%s Network.pdf" % type)
def make_data_frame(init_data, cas_data, rep, graph_type):
"""
if graph_type == 'RGG_RGG':
f = open('rgg_rgg_cas_raw_50.csv', 'a', newline='')
wr = csv.writer(f)
elif graph_type == 'RGG_Random':
f = open('rgg_rand_cas_raw_50.csv', 'a', newline='')
wr = csv.writer(f)
elif graph_type == 'Random_RGG':
f = open('rand_rgg_cas_raw_50.csv', 'a', newline='')
wr = csv.writer(f)
else:
f = open('rand_rand_cas_raw_50.csv', 'a', newline='')
wr = csv.writer(f)
"""
f = open('find attack size_10.csv', 'a', newline='')
wr = csv.writer(f)
# init_intra_edge, init_inter_edge, init_supp_edge, init_far_node, init_clust, init_mean_deg, init_large_comp
# fin_intra_edge, fin_inter_edge, fin_supp_edge, alive_nodes, tot_isol_node, tot_unsupp_node, cas_steps, fin_far_node, fin_clust, fin_mean_deg, fin_larg_comp, deg_assort, dist_deg_cent, dist_bet_cent, step.....
data = [rep, init_data[0], init_data[1], init_data[2], cas_data[0], cas_data[1], cas_data[2], cas_data[3], cas_data[4], cas_data[5], cas_data[6], init_data[3], cas_data[7], init_data[4], cas_data[8], init_data[5], cas_data[9], init_data[6], cas_data[10], cas_data[11], cas_data[12], cas_data[13]]
for index in range(len(cas_data[14])):
data.append(cas_data[14][index])
data.append(cas_data[15][index])
wr.writerow(data)
f.close()
if __name__ == "__main__":
"""
Types of attacks/cascades:
1. normal attack: select nodes that will be initially attacked randomly.
2. spatial_number attack: select the nearest (attack_number) nodes from the attack_point, and they will be initially attacked.
3. spatial_range attack: nodes in the circle (centre: attack_point, radius: attack_radius) will be attacked initially.
For "normal" attack, cas.attack_network(network, coords, supporting_nodes, attack_type, attack_size=20)
For "spatial_number" attack, cas.attack_network(network, coords, supporting_nodes, attack_type, attack_size=20, attack_layer='a', attack_point=(0.5, 0.5))
For "spatial_range" attack, cas.attack_network(network, coords, supporting_nodes, attack_type, attack_layer='a', attack_point=(0.5, 0.5), attack_radius=0.1)
attack_size = 20 # number of nodes that will be initially killed
attack_layer = 'a' # the target layer of the attack.
'a', 'b', 'c'... means the specific layer. 0 means that suppose every nodes are in the same layer.
attack_point = (0.5, 0.5) # attack point for spatial_number and spatial_range attacks
attack_radius = 0.1 # the radius of attack in spatial_range attacks
"""
start = time.time()
print("Start")
# Current number of repeat: 0
rep = 1
for i in range(20):
# init_intra_edge, init_inter_edge, init_supp_edge, init_far_node, init_clust, init_mean_deg, init_large_comp,
init_data = []
# fin_intra_edge, fin_inter_edge, fin_supp_edge, alive_nodes, tot_isol_node, tot_unsupp_node, cas_steps, fin_far_node, fin_clust, fin_mean_deg, fin_larg_comp, deg_assort, dist_deg_cent, dist_bet_cent, step.....
cas_data = []
rgg_rgg_net = build_network(rep, intra_type='RGG', inter_type='RGG')
init_data = analyse_initial_network(rgg_rgg_net, init_data)
# draw_network(rgg_rgg_net, type="intra_RGG, inter_RGG")
# att_rgg_rgg_net = cas.attack_network(rgg_rgg_net, coords, rgg_supp_nodes, attack_type, graph_type="RGG_RGG", attack_size=20)
att_rgg_rgg_net, cas_data = cas.attack_network(rgg_rgg_net, coords, rgg_supp_nodes, cas_data, attack_type, graph_type="RGG_RGG", attack_size=attack_size, attack_point=attack_point)
# att_rgg_rgg_net = cas.attack_network(rgg_rgg_net, coords, rgg_supp_nodes, attack_type, graph_type="RGG_RGG", attack_point=(0.5, 0.5), attack_radius=0.1)
make_data_frame(init_data, cas_data, rep, graph_type='RGG_RGG')
init_data = []
cas_data = []
rgg_rand_net = build_network(rep, intra_type='RGG', inter_type='Random')
init_data = analyse_initial_network(rgg_rand_net, init_data)
# draw_network(rgg_rand_net, type="intra_RGG, inter_Random")
# att_rgg_rand_net = cas.attack_network(rgg_rand_net, coords, rand_supp_nodes, attack_type, graph_type="RGG_Rand", attack_size=20)
att_rgg_rand_net, cas_data = cas.attack_network(rgg_rand_net, coords, rand_supp_nodes, cas_data, attack_type, graph_type="RGG_Rand", attack_size=attack_size, attack_point=attack_point)
# att_rgg_rand_net = cas.attack_network(rgg_rand_net, coords, rand_supp_nodes, attack_type, graph_type="RGG_Rand", attack_point=(0.5, 0.5), attack_radius=0.1)
make_data_frame(init_data, cas_data, rep, graph_type='RGG_Random')
init_data = []
cas_data = []
rand_rgg_net = build_network(rep, intra_type='Random', inter_type='RGG')
init_data = analyse_initial_network(rand_rgg_net, init_data)
# draw_network(rand_rgg_net, type="intra_Random, inter_RGG")
# att_rand_rgg_net = cas.attack_network(rand_rgg_net, coords, rgg_supp_nodes, attack_type, graph_type="Rand_RGG", attack_size=20)
att_rand_rgg_net, cas_data = cas.attack_network(rand_rgg_net, coords, rgg_supp_nodes, cas_data, attack_type, graph_type="Rand_RGG", attack_size=attack_size, attack_point=attack_point)
# att_rand_rgg_net = cas.attack_network(rand_rgg_net, coords, rgg_supp_nodes, attack_type, graph_type="Rand_RGG", attack_point=(0.5, 0.5), attack_radius=0.1)
make_data_frame(init_data, cas_data, rep, graph_type='Random_RGG')
init_data = []
cas_data = []
rand_rand_net = build_network(rep, intra_type='Random', inter_type='Random')
init_data = analyse_initial_network(rand_rand_net, init_data)
# draw_network(rand_rand_net, type="intra_Random, inter_Random")
# att_rand_rand_net = cas.attack_network(rand_rand_net, coords, rand_supp_nodes, attack_type, graph_type="Rand_Rand", attack_size=20)
att_rand_rand_net, cas_data = cas.attack_network(rand_rand_net, coords, rand_supp_nodes, cas_data, attack_type, graph_type="Rand_Rand", attack_size=attack_size, attack_point=attack_point)
# att_rand_rand_net = cas.attack_network(rand_rand_net, coords, rand_supp_nodes, attack_type, graph_type="Rand_Rand", attack_point=(0.5, 0.5), attack_radius=0.1)
make_data_frame(init_data, cas_data, rep, graph_type='Random_Random')
print("Repeat %d is done" % rep)
del intra_rgg_edges[:]
del intra_rand_edges[:]
del inter_rgg_edges[:]
del inter_rand_edges[:]
del intra_edges_num[:]
del inter_edges_num[:]
rep += 1
print("time: ", time.time() - start)
# draw_network(att_rgg_rgg_net, type="Attacked intra_RGG, inter_RGG")
# draw_network(att_rgg_rand_net, type="Attacked intra_RGG, inter_Rand")
# draw_network(att_rand_rgg_net, type="Attacked intra_Rand, inter_RGG")
# draw_network(att_rand_rand_net, type="Attacked intra_Rand, inter_Rand")
print("time: ", time.time() - start)
print("End")
|
// @flow
import * as React from 'react';
import {Button} from 'baseui/button';
import {StatefulPopover} from 'baseui/popover';
import {Paragraph3} from 'baseui/typography';
export default function Example() {
return (
<StatefulPopover
content={
<Paragraph3 padding="scale500">
Server-side rendered (check source!)
</Paragraph3>
}
accessibilityType={'tooltip'}
renderAll
>
<Button>
Always Rendered for SEO / server-side rendering
</Button>
</StatefulPopover>
);
}
|
# -*- coding: utf-8 -*-
#
# Cyclopts documentation build configuration file, created by
# sphinx-quickstart on Mon Jun 2 14:15:20 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
#import cloud_sptheme as csp
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.pngmath',
'sphinx.ext.mathjax',
'sphinx.ext.autosummary', # autosummary doesn't work with numpydoc...
'sphinx.ext.viewcode',
'scisphinx.numpydoc',
'sphinxcontrib.programoutput',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Cyclopts'
copyright = u'2014, Matthew J. Gidden'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
import cyclopts
version = cyclopts.__version__
# The full version, including alpha/beta/rc tags.
release = cyclopts.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# set the html theme
html_theme = "cloud"
# NOTE: there is also a red-colored version named "redcloud"
# set the theme path to point to cloud's theme data
html_theme_path = ["_theme"]
# [optional] set some of the options listed above...
# you can set any cloud conf option here, see
# https://pythonhosted.org/cloud_sptheme/cloud_theme.html#list-of-options
html_theme_options = {
"roottarget": "index",
"popuptoc": "false",
"sidebar_localtoc_title": "Table of Contents",
}
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = '_static/uni_logo.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Cycloptsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Cyclopts.tex', u'Cyclopts Documentation',
u'Matthew J. Gidden', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'cyclopts', u'Cyclopts Documentation',
[u'Matthew J. Gidden'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Cyclopts', u'Cyclopts Documentation',
u'Matthew J. Gidden', 'Cyclopts', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'Cyclopts'
epub_author = u'Matthew J. Gidden'
epub_publisher = u'Matthew J. Gidden'
epub_copyright = u'2014, Matthew J. Gidden'
# The basename for the epub file. It defaults to the project name.
#epub_basename = u'Cyclopts'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Autodocumentation Flags
autodoc_member_order = "groupwise"
autoclass_content = "both"
autosummary_generate = []
# Prevent numpy from making silly tables
numpydoc_show_class_members = False
|
let quoteSample = "The quick brown fox jumps over the lazy dog.";
let alphabetRegex = /[a-z]/gi; // Change this line
let result = quoteSample.match(alphabetRegex); // Change this line
console.log(result,result.length);
|
describe('Application main view', function () {
beforeEach(function () {
browser.get('/');
});
it('should have a title', function () {
expect(browser.getTitle()).toEqual("Star Wars UI proto");
});
it('should have header', function () {
expect(element(by.css('dng-app dng-header')).isPresent()).toEqual(true);
});
it('should have container', function () {
expect(element(by.css('dng-app main')).isPresent()).toEqual(true);
});
});
|
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License'). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the 'license' file accompanying this file. This file is
# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
XGB_MAXIMIZE_METRICS = [
'accuracy',
'auc',
'aucpr',
"cox-nloglik",
'f1',
"gamma-nloglik",
'map',
'ndcg',
"poisson-nloglik",
"tweedie-nloglik"
]
XGB_MINIMIZE_METRICS = [
'error',
"gamma-deviance",
'logloss',
'mae',
'merror',
'mlogloss',
'mse',
'rmse'
]
LOGISTIC_REGRESSION_LABEL_RANGE_ERROR = "label must be in [0,1] for logistic regression"
MULTI_CLASS_LABEL_RANGE_ERROR = "label must be in [0, num_class)"
FEATURE_MISMATCH_ERROR = "feature_names mismatch"
LABEL_PREDICTION_SIZE_MISMATCH = "Check failed: preds.size() == info.labels_.size()"
ONLY_POS_OR_NEG_SAMPLES = "Check failed: !auc_error AUC: the dataset only contains pos or neg samples"
BASE_SCORE_RANGE_ERROR = "Check failed: base_score > 0.0f && base_score < 1.0f base_score must be in (0,1) " \
"for logistic loss"
POISSON_REGRESSION_ERROR = "Check failed: label_correct PoissonRegression: label must be nonnegative"
TWEEDIE_REGRESSION_ERROR = "Check failed: label_correct TweedieRegression: label must be nonnegative"
REG_LAMBDA_ERROR = "Parameter reg_lambda should be greater equal to 0"
CUSTOMER_ERRORS = [
LOGISTIC_REGRESSION_LABEL_RANGE_ERROR,
MULTI_CLASS_LABEL_RANGE_ERROR,
FEATURE_MISMATCH_ERROR,
LABEL_PREDICTION_SIZE_MISMATCH,
ONLY_POS_OR_NEG_SAMPLES,
BASE_SCORE_RANGE_ERROR,
POISSON_REGRESSION_ERROR,
TWEEDIE_REGRESSION_ERROR,
REG_LAMBDA_ERROR
]
_SEPARATOR = ':'
TRAIN_CHANNEL = 'train'
VAL_CHANNEL = 'validation'
|
var structvteprec__arp__sources__remote =
[
[ "header_", "structvteprec__arp__sources__remote.html#a79acbcd5a74adf1d9017ad2ccce0a426", null ],
[ "locator", "structvteprec__arp__sources__remote.html#a607e4c058e128f701ead418d5fc623f7", null ],
[ "src_mac", "structvteprec__arp__sources__remote.html#ac9effbe92ddbb18749a6eef7ee7d93bc", null ]
]; |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkccc.endpoint import endpoint_data
class ListCallEventDetailByContactIdRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'CCC', '2017-07-05', 'ListCallEventDetailByContactId')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_InstanceId(self):
return self.get_query_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_query_param('InstanceId',InstanceId)
def get_ContactId(self):
return self.get_query_params().get('ContactId')
def set_ContactId(self,ContactId):
self.add_query_param('ContactId',ContactId) |
class Box {
constructor(x, y, width, height) {
var options = {
'restitution':0.8,
'friction':1.0,
'density':0.04,
}
this.body = Bodies.rectangle(x, y, width, height, options);
this.width = width;
this.height = height;
this.image= loadImage("box.png");
World.add(world, this.body);
}
display(){
var pos =this.body.position;
var angle = this.body.angle;
push();
translate(pos.x, pos.y);
rotate(angle);
rectMode(CENTER);
imageMode(CENTER);
image(this.image, 0, 0, this.width, this.height);
pop();
}
} |
import React from 'react';
import { connect } from 'react-redux';
// import 'react-dates/initialize';
import { DateRangePicker } from 'react-dates';
// import 'react-dates/lib/css/_datepicker.css';
import { setTextFilter, sortByDate, sortByAmount, setStartDate, setEndDate } from '../actions/filters';
export class ExpenseListFilters extends React.Component {
state = {
calendarFocused: null
};
onDatesChange = ({ startDate, endDate }) => {
this.props.setStartDate(startDate);
this.props.setEndDate(endDate);
};
onCalendarFocusChange = (calendarFocused) => {
this.setState(() => ({ calendarFocused }));
};
onTextChange = (e) => {
this.props.setTextFilter(e.target.value);
};
onSortChange = (e) => {
if (e.target.value === 'date') {
this.props.sortByDate();
} else if (e.target.value === 'amount') {
this.props.sortByAmount();
}
};
render() {
return (
<div className="content-container">
<div className="input-group">
<div className="input-group__item">
<input
type="text"
placeholder="Search expenses"
className="text-input"
value={this.props.filters.text}
onChange={this.onTextChange}
/>
</div>
<div className="input-group__item">
<select
className="select"
value={this.props.filters.sortBy}
onChange={this.onSortChange}
>
<option value="date">Date</option>
<option value="amount">Amount</option>
</select>
</div>
<div className="input-group__item">
<DateRangePicker
startDateId="filterStartDate"
startDate={this.props.filters.startDate}
endDateId="filterEndDate"
endDate={this.props.filters.endDate}
onDatesChange={this.onDatesChange}
focusedInput={this.state.calendarFocused}
onFocusChange={this.onCalendarFocusChange}
showClearDates={true}
numberOfMonths={1}
isOutsideRange={() => false}
/>
</div>
</div>
</div>
);
}
};
const mapStateToProps = (state) => ({
filters: state.filters
});
const mapDispatchToProps = (dispatch) => ({
setTextFilter: (text) => dispatch(setTextFilter(text)),
sortByDate: () => dispatch(sortByDate()),
sortByAmount: () => dispatch(sortByAmount()),
setStartDate: (date) => dispatch(setStartDate(date)),
setEndDate: (date) => dispatch(setEndDate(date))
});
export default connect(mapStateToProps, mapDispatchToProps)(ExpenseListFilters);
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('whitelisted', models.BooleanField(default=False, verbose_name='Whitelisted')),
('homepage', models.CharField(max_length=100, verbose_name='Homepage', blank=True)),
('allow_email', models.BooleanField(default=True, help_text='Show your email on VCS contributions.', verbose_name='Allow email')),
('user', models.ForeignKey(related_name='profile', verbose_name='User', to=settings.AUTH_USER_MODEL, unique=True)),
],
),
]
|
import torch
import copy
from spirl.modules.variational_inference import MultivariateGaussian, mc_kl_divergence
from spirl.rl.components.agent import BaseAgent
from spirl.rl.components.policy import Policy
from spirl.rl.policies.mlp_policies import SplitObsMLPPolicy, MLPPolicy, HybridConvMLPPolicy
from spirl.utils.general_utils import AttrDict, ParamDict
from spirl.utils.pytorch_utils import no_batchnorm_update
class PriorInitializedPolicy(Policy):
"""Initializes policy network with learned prior net."""
def __init__(self, config):
self._hp = self._default_hparams().overwrite(config)
self.update_model_params(self._hp.prior_model_params)
super().__init__()
def _default_hparams(self):
default_dict = ParamDict({
'prior_model': None, # prior model class
'prior_model_params': None, # parameters for the prior model
'prior_model_checkpoint': None, # checkpoint path of the prior model
'prior_model_epoch': 'latest', # epoch that checkpoint should be loaded for (defaults to latest)
'load_weights': True, # optionally allows to *not* load the weights (ie train from scratch)
})
return super()._default_hparams().overwrite(default_dict)
def forward(self, obs):
with no_batchnorm_update(self): # BN updates harm the initialized policy
return super().forward(obs)
def _build_network(self):
net = self._hp.prior_model(self._hp.prior_model_params, None)
if self._hp.load_weights:
BaseAgent.load_model_weights(net, self._hp.prior_model_checkpoint, self._hp.prior_model_epoch)
return net
def _compute_action_dist(self, obs):
return self.net.compute_learned_prior(obs, first_only=True)
def sample_rand(self, obs):
if len(obs.shape) == 1:
output_dict = self.forward(obs[None])
output_dict.action = output_dict.action[0]
return output_dict
return self.forward(obs) # for prior-initialized policy we run policy directly for rand sampling from prior
@staticmethod
def update_model_params(params):
# TODO: the device could be set to cpu even if GPU available
params.device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
params.batch_size = 1 # run only single-element batches for forward pass
class PriorAugmentedPolicy(Policy):
"""Augments policy output with prior divergence."""
def _default_hparams(self):
default_dict = ParamDict({
'max_divergence_range': 100, # range at which prior divergence gets clipped
})
return super()._default_hparams().overwrite(default_dict)
def forward(self, obs):
policy_output = super().forward(obs)
if not self._rollout_mode:
raw_prior_divergence, policy_output.prior_dist = self._compute_prior_divergence(policy_output, obs)
policy_output.prior_divergence = self.clamp_divergence(raw_prior_divergence)
return policy_output
def clamp_divergence(self, divergence):
return torch.clamp(divergence, -self._hp.max_divergence_range, self._hp.max_divergence_range)
def _compute_prior_divergence(self, policy_output, obs):
raise NotImplementedError
class LearnedPriorAugmentedPolicy(PriorAugmentedPolicy):
"""Augments policy output with divergence to learned prior distribution."""
def __init__(self, config):
self._hp = self._default_hparams().overwrite(config)
PriorInitializedPolicy.update_model_params(self._hp.prior_model_params)
PriorAugmentedPolicy.__init__(self) #super().__init__()
if self._hp.prior_batch_size > 0:
self._hp.prior_model_params.batch_size = self._hp.prior_batch_size
self.prior_net = self._hp.prior_model(self._hp.prior_model_params, None)
BaseAgent.load_model_weights(self.prior_net, self._hp.prior_model_checkpoint, self._hp.prior_model_epoch)
def _default_hparams(self):
default_dict = ParamDict({
'prior_model': None, # prior model class
'prior_model_params': None, # parameters for the prior model
'prior_model_checkpoint': None, # checkpoint path of the prior model
'prior_model_epoch': 'latest', # epoch that checkpoint should be loaded for (defaults to latest)
'prior_batch_size': -1, # optional: use separate batch size for prior network
'reverse_KL': False, # if True, computes KL[q||p] instead of KL[p||q] (can be more stable to opt)
'analytic_KL': False, # if True, computes KL divergence analytically, otherwise sampling based
'num_mc_samples': 10, # number of samples for monte-carlo KL estimate
})
return super()._default_hparams().overwrite(default_dict)
def _compute_prior_divergence(self, policy_output, obs):
with no_batchnorm_update(self.prior_net):
prior_dist = self.prior_net.compute_learned_prior(obs, first_only=True).detach()
if self._hp.analytic_KL:
return self._analytic_divergence(policy_output, prior_dist), prior_dist
return self._mc_divergence(policy_output, prior_dist), prior_dist
def _analytic_divergence(self, policy_output, prior_dist):
"""Analytic KL divergence between two Gaussian distributions."""
assert isinstance(prior_dist, MultivariateGaussian) and isinstance(policy_output.dist, MultivariateGaussian)
if self._hp.reverse_KL:
return prior_dist.kl_divergence(policy_output.dist).sum(dim=-1)
else:
return policy_output.dist.kl_divergence(prior_dist).sum(dim=-1)
def _mc_divergence(self, policy_output, prior_dist):
"""Monte-Carlo KL divergence estimate."""
if self._hp.reverse_KL:
return mc_kl_divergence(prior_dist, policy_output.dist, n_samples=self._hp.num_mc_samples)
else:
return mc_kl_divergence(policy_output.dist, prior_dist, n_samples=self._hp.num_mc_samples)
def sample_rand(self, obs):
with torch.no_grad():
with no_batchnorm_update(self.prior_net):
prior_dist = self.prior_net.compute_learned_prior(obs, first_only=True).detach()
action = prior_dist.sample()
action, log_prob = self._tanh_squash_output(action, 0) # ignore log_prob output
return AttrDict(action=action, log_prob=log_prob)
class LearnedPriorAugmentedPIPolicy(PriorInitializedPolicy, LearnedPriorAugmentedPolicy):
def __init__(self, config):
LearnedPriorAugmentedPolicy.__init__(self, config)
def forward(self, obs):
with no_batchnorm_update(self):
return LearnedPriorAugmentedPolicy.forward(self, obs)
class ACPriorInitializedPolicy(PriorInitializedPolicy):
"""PriorInitializedPolicy for case with separate prior obs --> uses prior observation as input only."""
def forward(self, obs):
return super().forward(self.net.unflatten_obs(obs).prior_obs)
class ACLearnedPriorAugmentedPIPolicy(LearnedPriorAugmentedPIPolicy):
"""LearnedPriorAugmentedPIPolicy for case with separate prior obs --> uses prior observation as input only."""
def forward(self, obs):
if obs.shape[0] == 1:
return super().forward(self.net.unflatten_obs(obs).prior_obs) # use policy_net or batch_size 1 inputs
return super().forward(self.prior_net.unflatten_obs(obs).prior_obs)
class ACLearnedPriorAugmentedPolicy(LearnedPriorAugmentedPolicy):
"""LearnedPriorAugmentedPolicy for case with separate prior obs."""
def __init__(self, config):
super().__init__(config) # this is fsr necessary for it not to throw an error
def _compute_prior_divergence(self, policy_output, obs):
return super()._compute_prior_divergence(policy_output, self.prior_net.unflatten_obs(obs).prior_obs)
def sample_rand(self, obs):
return super().sample_rand(self.prior_net.unflatten_obs(obs).prior_obs)
class ACLearnedPriorAugmentedMLPPolicy(ACLearnedPriorAugmentedPolicy, SplitObsMLPPolicy):
"""LearnedPriorAugmentedPolicy for case with separate prior obs using MLP policy net."""
def __init__(self, config):
SplitObsMLPPolicy.__init__(self, config)
ACLearnedPriorAugmentedPolicy.__init__(self, self._hp.overwrite(config)) # this is fsr necessary for it not to throw an error
class ACLearnedPriorAugmentedHybridConvMLPPolicy(ACLearnedPriorAugmentedPolicy, HybridConvMLPPolicy):
"""LearnedPriorAugmentedPolicy for case with separate prior obs using HybridConvMLP policy net."""
def __init__(self, config):
HybridConvMLPPolicy.__init__(self, config)
ACLearnedPriorAugmentedPolicy.__init__(self, self._hp.overwrite(config)) # this is fsr necessary for it not to throw an error
|
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
FOUNDATION_EXPORT double Pods_cocoaIGODemo_TestsVersionNumber;
FOUNDATION_EXPORT const unsigned char Pods_cocoaIGODemo_TestsVersionString[];
|
// 解除右键限制
window.addEventListener("contextmenu",function(t) {
t.stopImmediatePropagation();
},true);
// 鼠标按下事件:点击右键时保存光标下的当前元素
var clickedEl;
$(document).on('mousedown', (event)=>{
// 右键为 3
if (3 == event.which) clickedEl = mouseMoveTarget; // mouseMoveTarget 在 keyBind 中获取到
});
// 鼠标按下事件:点击右键后更新标注
$(document).on('mousedown', (event)=>{
if (3 == event.which) { // 右键为 3
// fix:在按下 Ctrl 标注元素消失后右击鼠标,在松开 Ctrl 时标注将不会恢复显示
setTimeout(() => {$('.wr_underline,.wr_myNote').css('display','block');}, 500);
} else if (1 == event.which) { /* 左键为 1 */ }
});
// 监听消息,发送 DOM 元素到背景页,实现右键复制
chrome.runtime.onMessage.addListener(function(request, sender, sendResponse) {
if(request == "getClickedEl") {
// 不知道为什么,测试时直接传 mouseMoveTarget 过去常常为 {},所以选择转成 html 字符串传
sendResponse({ clickedEl: clickedEl.outerHTML, originClickedEl: clickedEl });
}
}); |
# coding: utf-8
# Copyright (c) 2008-2011 Volvox Development Team
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Author: Konstantin Lepa <[email protected]>
"""ANSII Color formatting for output in terminal."""
from __future__ import print_function
import os
__ALL__ = ["colored", "cprint"]
VERSION = (1, 1, 0)
ATTRIBUTES = dict(
list(
zip(
["bold", "dark", "", "underline", "blink", "", "reverse", "concealed"],
list(range(1, 9)),
)
)
)
del ATTRIBUTES[""]
HIGHLIGHTS = dict(
list(
zip(
[
"on_grey",
"on_red",
"on_green",
"on_yellow",
"on_blue",
"on_magenta",
"on_cyan",
"on_white",
],
list(range(40, 48)),
)
)
)
COLORS = dict(
list(
zip(
["grey", "red", "green", "yellow", "blue", "magenta", "cyan", "white"],
list(range(30, 38)),
)
)
)
RESET = "\033[0m"
def colored(text, color=None, on_color=None, attrs=None):
"""Colorize text.
Available text colors:
red, green, yellow, blue, magenta, cyan, white.
Available text highlights:
on_red, on_green, on_yellow, on_blue, on_magenta, on_cyan, on_white.
Available attributes:
bold, dark, underline, blink, reverse, concealed.
Example:
colored('Hello, World!', 'red', 'on_grey', ['blue', 'blink'])
colored('Hello, World!', 'green')
"""
if os.getenv("ANSI_COLORS_DISABLED") is None:
fmt_str = "\033[%dm%s"
if color is not None:
text = fmt_str % (COLORS[color], text)
if on_color is not None:
text = fmt_str % (HIGHLIGHTS[on_color], text)
if attrs is not None:
for attr in attrs:
text = fmt_str % (ATTRIBUTES[attr], text)
text += RESET
return text
def cprint(text, color=None, on_color=None, attrs=None, **kwargs):
"""Print colorize text.
It accepts arguments of print function.
"""
print((colored(text, color, on_color, attrs)), **kwargs)
if __name__ == "__main__":
print("Current terminal type: %s" % os.getenv("TERM"))
print("Test basic colors:")
cprint("Grey color", "grey")
cprint("Red color", "red")
cprint("Green color", "green")
cprint("Yellow color", "yellow")
cprint("Blue color", "blue")
cprint("Magenta color", "magenta")
cprint("Cyan color", "cyan")
cprint("White color", "white")
print(("-" * 78))
print("Test highlights:")
cprint("On grey color", on_color="on_grey")
cprint("On red color", on_color="on_red")
cprint("On green color", on_color="on_green")
cprint("On yellow color", on_color="on_yellow")
cprint("On blue color", on_color="on_blue")
cprint("On magenta color", on_color="on_magenta")
cprint("On cyan color", on_color="on_cyan")
cprint("On white color", color="grey", on_color="on_white")
print("-" * 78)
print("Test attributes:")
cprint("Bold grey color", "grey", attrs=["bold"])
cprint("Dark red color", "red", attrs=["dark"])
cprint("Underline green color", "green", attrs=["underline"])
cprint("Blink yellow color", "yellow", attrs=["blink"])
cprint("Reversed blue color", "blue", attrs=["reverse"])
cprint("Concealed Magenta color", "magenta", attrs=["concealed"])
cprint(
"Bold underline reverse cyan color",
"cyan",
attrs=["bold", "underline", "reverse"],
)
cprint(
"Dark blink concealed white color",
"white",
attrs=["dark", "blink", "concealed"],
)
print(("-" * 78))
print("Test mixing:")
cprint("Underline red on grey color", "red", "on_grey", ["underline"])
cprint("Reversed green on red color", "green", "on_red", ["reverse"])
|
class Node {
constructor(element) {
this.element = element;
this.next = null;
this.prev = null;
}
}
class DoublyLinkedList {
constructor() {
this.head = new Node('head');
}
// 查找一个节点
find(item) {
let currentNode = this.head;
while (currentNode.element !== item) {
currentNode = currentNode.next;
}
return currentNode;
}
findPre(item) {
if (item === 'head') {
throw new Error('你要删除节点是头节点');
}
let currentNode = this.head;
while (currentNode.next && currentNode.next.element !== item) {
currentNode = currentNode.next;
}
return currentNode;
}
// 插入一个新节点
insert(newElement, item) {
const newNode = new Node(newElement);
if (item.element === 'head') {
this.head.next = newNode;
newNode.prev = this.head;
return;
}
const currentNode = this.find(item);
const currentNodeNextNode = currentNode.next;
if (currentNodeNextNode === null) {
newNode.prev = currentNode;
currentNode.next = newNode;
return;
}
newNode.next = currentNode.next;
currentNode.next = newNode;
currentNodeNextNode.prev = newNode;
newNode.prev = currentNode;
}
// 删除一个新节点
remove(item) {
let preNode = this.findPre(item);
if (preNode.next.element === item) {
preNode.next.next.prev = preNode;
preNode.next = preNode.next.next;
}
}
// 展示出所有的链表
toString() {
let currentNode = this.head;
let arr = [];
while (currentNode.next !== null) {
currentNode = currentNode.next;
arr.push(currentNode.element);
}
return arr;
}
};
// e.g;
// const LL = new DoublyLinkedList();
//
// LL.insert('a1', 'head');
// LL.insert('a2', 'a1');
// LL.insert('a3', 'a2');
// LL.insert('a4', 'a3');
// LL.remove('a2');
//
// console.log(LL);
// console.log(LL.toString()); |
# -*- coding: utf-8 -*-
import unittest
import phandim
import phantom
class TestPhantom(unittest.TestCase):
"""
Unit tests to check phantom dimensions
"""
def test_constructor1(self):
bx = [1,2,3,4,5]
by = [3,2,5,1,6]
bz = [8,5,23,9,4,3]
ph = phantom.phantom(bx, by, bz)
self.assertTrue(phandim.phandim.check_sorted(ph.bx()))
self.assertTrue(phandim.phandim.check_sorted(ph.by()))
self.assertTrue(phandim.phandim.check_sorted(ph.bz()))
def test_constructor2(self):
bx = None
by = [3,2,5,1,2]
bz = [8,5,23,8,4,3]
with self.assertRaises(RuntimeError):
phantom.phantom(bx, by, bz)
def test_constructor3(self):
bx = [3,2,5,1,2]
by = None
bz = [8,5,23,8,4,3]
with self.assertRaises(RuntimeError):
phantom.phantom(bx, by, bz)
def test_constructor5(self):
bx = [3,2,5,1,2]
by = [8,5,23,8,4,3]
bz = None
with self.assertRaises(RuntimeError):
phantom.phantom(bx, by, bz)
def test_access1(self):
bx = [3,2,5,1,2]
by = [8,5,23,8,4,3]
bz = [8,5,23,9,4,3,90]
ph = phantom.phantom(bx, by, bz)
self.assertTrue( ph.nx() == len(bx)-1 )
self.assertTrue( ph.ny() == len(by)-1 )
self.assertTrue( ph.nz() == len(bz)-1 )
if __name__ == '__main__':
unittest.main()
|
const degToRad = (angle) => ((angle * Math.PI) / 180)
class Snake {
constructor(x, y, angle, length, ctx) {
this.x = x
this.y = y
this.angle = angle
this.length = length
this.ctx = ctx
this.coordinates = []
}
draw() {
this.ctx.beginPath()
this.ctx.fillStyle = Snake.COLOR
this.ctx.arc(this.x, this.y, Snake.HEAD_RADIUS, 0, 2 * Math.PI)
this.ctx.fill()
this.ctx.closePath()
}
running(canvasSize, game) {
const radian = degToRad(this.angle)
this.x += Snake.SPEED * Math.cos(radian)
this.y += Snake.SPEED * Math.sin(radian)
this.validationCoordinates(canvasSize, game)
this.pushCoordinates()
this.draw()
this.findSnakeСollision(game)
}
pushCoordinates() {
this.coordinates.push({
x: this.x,
y: this.y,
})
this.snakeLengthControl()
}
directionControl(e) {
switch(e.keyCode) {
case 37: {
this.turnLeft()
break
}
case 39: {
this.turnRight()
break
}
}
}
turnLeft() {
this.angle -= Snake.ROTATION_SPEED
}
turnRight() {
this.angle += Snake.ROTATION_SPEED
}
snakeLengthControl() {
if (this.coordinates.length > this.length) {
const { x, y } = this.coordinates[0]
this.ctx.beginPath()
this.ctx.fillStyle = '#fff'
this.ctx.arc(x, y, Snake.HEAD_RADIUS + 2, 0, 2 * Math.PI)
this.ctx.fill()
this.ctx.closePath()
this.coordinates.shift()
}
}
validationCoordinates({mapW, mapH}, game) {
if (
(this.x < 0) || (this.x > mapW) ||
(this.y < 0) || (this.y > mapH)
) {
finishGame(game)
}
}
findSnakeСollision(game) {
this.coordinates.slice(0, -Snake.HEAD_RADIUS).forEach(({x, y}) => {
const distance = Math.sqrt(((x - this.x) ** 2) + ((y - this.y) ** 2))
if (distance < Snake.HEAD_RADIUS + 2) {
finishGame(game)
}
})
}
}
Snake.COLOR = '#ff5050'
Snake.INITIAL_LENGTH = 100
Snake.HEAD_RADIUS = 5
Snake.SPEED = 2
Snake.ROTATION_SPEED = 10
class Food {
constructor(x, y, color, ctx) {
this.x = x
this.y = y
this.color = color
this.draw(ctx)
}
draw(ctx) {
ctx.beginPath()
ctx.fillStyle = this.color
ctx.arc(this.x, this.y, Food.RADIUS, 0, 2 * Math.PI)
ctx.fill()
ctx.closePath()
}
destroy(ctx) {
ctx.beginPath()
ctx.fillStyle = '#fff'
ctx.strokeStyle = '#fff'
ctx.arc(this.x, this.y, Food.RADIUS, 0, 2 * Math.PI)
ctx.fill()
ctx.stroke()
ctx.closePath()
}
}
Food.RADIUS = 6
const maxAmountOfFood = 20
const foodGeneration = (foods = [], ctx) => {
let diff = maxAmountOfFood - foods.length
while (diff > 0) {
const x = (Math.random() * 500) >> 0
const y = (Math.random() * 500) >> 0
const color = '#'+((1 << 24) * Math.random()|0).toString(16)
const food = new Food(x, y, color, ctx)
foods.push(food)
diff--
}
}
const findFoodCollision = (foods, snake, ctx) => {
for (const food of foods) {
if (
(snake.x > food.x - 10) && (snake.x < food.x + 10) &&
(snake.y > food.y - 10) && (snake.y < food.y + 10)
) {
food.destroy(ctx)
foods.splice(foods.indexOf(food), 1)
snake.length += 1
changeScore(snake.length - Snake.INITIAL_LENGTH)
}
}
}
const changeScore = (score) => {
const scoreElem = document.getElementById('score')
scoreElem.innerHTML = `length: ${score}`
}
const startGame = (game, ctx) => {
const { snake, foods } = game
foodGeneration(foods, ctx)
const canvasSize = {mapW: 500, mapH: 500}
game.snakeInterval = setInterval(snake.running.bind(snake), 30, canvasSize, game)
game.foodInterval = setInterval(findFoodCollision, 30, foods, snake, ctx)
addEventListener('keydown', snake.directionControl.bind(snake))
}
const finishGame = (game) => {
if(game.finished) return
const { snake, snakeInterval, foodInterval } = game
clearInterval(snakeInterval)
clearInterval(foodInterval)
game.finished = true
alert('You lose :(')
}
window.onload = () => {
const canvas = document.getElementById('map')
const ctx = canvas.getContext('2d')
const snake = new Snake(100, 100, 0, Snake.INITIAL_LENGTH, ctx)
const game = {
snake,
foods: [],
}
startGame(game, ctx)
}
|
describe("triangleChecker", function() {
it("compares the length of 3 sides and returns false if it's not a valid triangle", function() {
expect(triangleChecker(2, 2, 8)).to.equal(false);
});
it("returns the number of equal sides", function() {
expect(triangleChecker(5, 5, 8)).to.equal(2);
});
});
|
import { getTimeInRaster } from "./utilTime.js";
/**
* A custom server object providing commonly used information.
*/
export class MyServer {
/**
* Create a custom server object for the given server name.
* @param {import("..").NS} ns
* @param {string} name - The name of the server.
* @param {number} moneyFactor - If the available money is below this fraction of the
* available money preparation is recommended before farming.
* @param {number} securityOffset - If the difference between current and minimum
* security is above this threshold preparation is recommended before farming.
*/
constructor(ns, name, moneyFactor = 0.9, securityOffset = 1) {
/**
* The server object provided by the game.
* @type {import("..").Server}
*/
this.server = ns.getServer(name);
/**
* The name ot the server.
* @type {string}
*/
this.name = name;
/**
* The amount of RAM currently available on the server.
* @type {number}
*/
this.ramAvailable = this.server.maxRam - this.server.ramUsed;
/**
* The percentage of the maximum money currently on the server.
* @type {number}
*/
this.moneyPercent =
(this.server.moneyAvailable / this.server.moneyMax) * 100;
/**
* The difference between current and minimum security.
* @type {number}
*/
this.deltaSecurity = this.server.hackDifficulty - this.server.minDifficulty;
/**
* The current loading of the server.
*/
this.load = 100 - (this.ramAvailable / this.server.maxRam) * 100;
/**
* The time it takes to hack the server converted to the 200ms raster.
* @type {number}
*/
this.hackTime = getTimeInRaster(ns.getHackTime(this.name));
/**
* The time it takes to grow the server converted to the 200ms raster.
* @type {number}
*/
this.growTime = getTimeInRaster(ns.getGrowTime(this.name));
/**
* The time it takes to weaken the server converted to the 200ms raster.
* @type {number}
*/
this.weakenTime = getTimeInRaster(ns.getWeakenTime(this.name));
/**
* If the available money is below this fraction of the
* available money preparation is recommended before farming.
* @type {number}
*/
this.moneyFactor = moneyFactor * 100;
/**
* If the difference between current and minimum security is
* above this threshold preparation is recommended before farming.
* @type {number}
*/
this.securityOffset = securityOffset;
/**
* The server is ready to be farmed.
* @type {boolean}
*/
this.farming =
this.moneyPercent > this.moneyFactor &&
this.deltaSecurity < this.securityOffset;
/**
* The number of CPU cores available on the server.
* @type {number}
*/
this.cores = this.server.cpuCores;
/**
* The chance for hack to succeed in percent.
* @type {number}
*/
this.successChance = ns.hackAnalyzeChance(this.name) * 100;
}
/**
* Update the server information.
* @param {import("..").NS} ns
*/
update(ns) {
this.server = ns.getServer(this.name);
this.ramAvailable = this.server.maxRam - this.server.ramUsed;
this.moneyPercent =
(this.server.moneyAvailable / this.server.moneyMax) * 100;
this.deltaSecurity = this.server.hackDifficulty - this.server.minDifficulty;
this.load = 100 - (this.ramAvailable / this.server.maxRam) * 100;
this.hackTime = getTimeInRaster(ns.getHackTime(this.name));
this.growTime = getTimeInRaster(ns.getGrowTime(this.name));
this.weakenTime = getTimeInRaster(ns.getWeakenTime(this.name));
this.farming =
this.moneyPercent > this.moneyFactor &&
this.deltaSecurity < this.securityOffset;
this.cores = this.server.cpuCores;
this.successChance = ns.hackAnalyzeChance(this.name) * 100;
}
/**
* Try to gain root access to the server.
* @param {import("..").NS} ns
* @returns {boolean} True if the unlock was successful.
*/
getRootAccess(ns) {
/** open all possible ports if root access is not available */
if (!this.server.hasAdminRights) {
/**
* The number of ports that have been opened.
* @type {number}
*/
let openPorts = 0;
if (ns.fileExists("BruteSSH.exe", "home")) {
ns.brutessh(this.name);
openPorts++;
}
if (ns.fileExists("FTPCrack.exe", "home")) {
ns.ftpcrack(this.name);
openPorts++;
}
if (ns.fileExists("relaySMTP.exe", "home")) {
ns.relaysmtp(this.name);
openPorts++;
}
if (ns.fileExists("HTTPWorm.exe", "home")) {
ns.httpworm(this.name);
openPorts++;
}
if (ns.fileExists("SQLInject.exe", "home")) {
ns.sqlinject(this.name);
openPorts++;
}
/** check if enough ports could be opened */
if (openPorts >= this.server.numOpenPortsRequired) {
/** get root access */
ns.nuke(this.name);
/** update the root access flag */
this.server.hasAdminRights = true;
}
}
return this.server.hasAdminRights;
}
/**
* Calculate a score value for the server to determine its attractiveness
* asa a hack target.
* @param {import("..").NS} ns
*/
calcScore(ns) {
/**
* The score of the server (higher is better).
* @type {number}
*/
var score = 0;
/**
* The player at his current hack level.
* @type {import("..").Player}
*/
var player = ns.getPlayer();
/** Check if the target can be hacked at all */
if (
this.server.requiredHackingSkill <= player.hacking &&
ns.hackAnalyze(this.name) > 0 &&
this.server.hasAdminRights
) {
/**
* A server object that is set to min difficulty to get the weaken time
* for farming mode.
* @type {import("..").Server}
*/
let minSecTarget = ns.getServer(this.name);
minSecTarget.hackDifficulty = minSecTarget.minDifficulty;
/** Check if the player has access to Formulas.exe */
if (ns.fileExists("Formulas.exe", "home")) {
score =
minSecTarget.moneyMax /
ns.formulas.hacking.weakenTime(minSecTarget, player);
/** Consider the success chance of hacking */
score *= ns.formulas.hacking.hackChance(minSecTarget, player);
} else {
/** If the player does not have access to Formulas.exe a simplified score is used */
score = this.server.moneyMax / this.server.minDifficulty;
/** Consider the success chance of hacking */
score *= this.successChance * 0.01;
}
}
return score;
}
}
|
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
def redirect_url(url_name, next_url_name=None, *args, **kwargs):
url = reverse(url_name) + "?next=" + reverse(next_url_name, kwargs=kwargs)
return url
class GradingsLoginRequiredTests(TestCase):
def setUp(self):
self.client = Client()
def test_gradings_list(self):
response = self.client.get(reverse('gradings-list'))
expected_redirect = redirect_url('account_login', 'gradings-list')
self.assertRedirects(response, expected_redirect)
|
"""
HVAC channels module for Zigbee Home Automation.
For more details about this component, please refer to the documentation at
https://home-assistant.io/integrations/zha/
"""
import asyncio
from collections import namedtuple
from typing import Any, Dict, List, Optional, Tuple, Union
from zigpy.exceptions import ZigbeeException
import zigpy.zcl.clusters.hvac as hvac
from zigpy.zcl.foundation import Status
from homeassistant.core import callback
from .. import registries, typing as zha_typing
from ..const import (
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_MIN_INT,
REPORT_CONFIG_OP,
SIGNAL_ATTR_UPDATED,
)
from ..helpers import retryable_req
from .base import ZigbeeChannel
AttributeUpdateRecord = namedtuple("AttributeUpdateRecord", "attr_id, attr_name, value")
REPORT_CONFIG_CLIMATE = (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 25)
REPORT_CONFIG_CLIMATE_DEMAND = (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 5)
REPORT_CONFIG_CLIMATE_DISCRETE = (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 1)
@registries.ZIGBEE_CHANNEL_REGISTRY.register(hvac.Dehumidification.cluster_id)
class Dehumidification(ZigbeeChannel):
"""Dehumidification channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(hvac.Fan.cluster_id)
class FanChannel(ZigbeeChannel):
"""Fan channel."""
_value_attribute = 0
REPORT_CONFIG = ({"attr": "fan_mode", "config": REPORT_CONFIG_OP},)
def __init__(
self, cluster: zha_typing.ZigpyClusterType, ch_pool: zha_typing.ChannelPoolType
):
"""Init Thermostat channel instance."""
super().__init__(cluster, ch_pool)
self._fan_mode = None
@property
def fan_mode(self) -> Optional[int]:
"""Return current fan mode."""
return self._fan_mode
async def async_set_speed(self, value) -> None:
"""Set the speed of the fan."""
try:
await self.cluster.write_attributes({"fan_mode": value})
except ZigbeeException as ex:
self.error("Could not set speed: %s", ex)
return
async def async_update(self) -> None:
"""Retrieve latest state."""
result = await self.get_attribute_value("fan_mode", from_cache=True)
if result is not None:
self._fan_mode = result
self.async_send_signal(
f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", 0, "fan_mode", result
)
@callback
def attribute_updated(self, attrid: int, value: Any) -> None:
"""Handle attribute update from fan cluster."""
attr_name = self.cluster.attributes.get(attrid, [attrid])[0]
self.debug(
"Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value
)
if attrid == self._value_attribute:
self._fan_mode = value
self.async_send_signal(
f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value
)
@registries.ZIGBEE_CHANNEL_REGISTRY.register(hvac.Pump.cluster_id)
class Pump(ZigbeeChannel):
"""Pump channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(hvac.Thermostat.cluster_id)
class ThermostatChannel(ZigbeeChannel):
"""Thermostat channel."""
def __init__(
self, cluster: zha_typing.ZigpyClusterType, ch_pool: zha_typing.ChannelPoolType
) -> None:
"""Init Thermostat channel instance."""
super().__init__(cluster, ch_pool)
self._init_attrs = {
"abs_min_heat_setpoint_limit": True,
"abs_max_heat_setpoint_limit": True,
"abs_min_cool_setpoint_limit": True,
"abs_max_cool_setpoint_limit": True,
"ctrl_seqe_of_oper": False,
"local_temp": False,
"max_cool_setpoint_limit": True,
"max_heat_setpoint_limit": True,
"min_cool_setpoint_limit": True,
"min_heat_setpoint_limit": True,
"occupancy": False,
"occupied_cooling_setpoint": False,
"occupied_heating_setpoint": False,
"pi_cooling_demand": False,
"pi_heating_demand": False,
"running_mode": False,
"running_state": False,
"system_mode": False,
"unoccupied_heating_setpoint": False,
"unoccupied_cooling_setpoint": False,
}
self._abs_max_cool_setpoint_limit = 3200 # 32C
self._abs_min_cool_setpoint_limit = 1600 # 16C
self._ctrl_seqe_of_oper = 0xFF
self._abs_max_heat_setpoint_limit = 3000 # 30C
self._abs_min_heat_setpoint_limit = 700 # 7C
self._running_mode = None
self._max_cool_setpoint_limit = None
self._max_heat_setpoint_limit = None
self._min_cool_setpoint_limit = None
self._min_heat_setpoint_limit = None
self._local_temp = None
self._occupancy = None
self._occupied_cooling_setpoint = None
self._occupied_heating_setpoint = None
self._pi_cooling_demand = None
self._pi_heating_demand = None
self._running_state = None
self._system_mode = None
self._unoccupied_cooling_setpoint = None
self._unoccupied_heating_setpoint = None
self._report_config = [
{"attr": "local_temp", "config": REPORT_CONFIG_CLIMATE},
{"attr": "occupied_cooling_setpoint", "config": REPORT_CONFIG_CLIMATE},
{"attr": "occupied_heating_setpoint", "config": REPORT_CONFIG_CLIMATE},
{"attr": "unoccupied_cooling_setpoint", "config": REPORT_CONFIG_CLIMATE},
{"attr": "unoccupied_heating_setpoint", "config": REPORT_CONFIG_CLIMATE},
{"attr": "running_mode", "config": REPORT_CONFIG_CLIMATE},
{"attr": "running_state", "config": REPORT_CONFIG_CLIMATE_DEMAND},
{"attr": "system_mode", "config": REPORT_CONFIG_CLIMATE},
{"attr": "occupancy", "config": REPORT_CONFIG_CLIMATE_DISCRETE},
{"attr": "pi_cooling_demand", "config": REPORT_CONFIG_CLIMATE_DEMAND},
{"attr": "pi_heating_demand", "config": REPORT_CONFIG_CLIMATE_DEMAND},
]
@property
def abs_max_cool_setpoint_limit(self) -> int:
"""Absolute maximum cooling setpoint."""
return self._abs_max_cool_setpoint_limit
@property
def abs_min_cool_setpoint_limit(self) -> int:
"""Absolute minimum cooling setpoint."""
return self._abs_min_cool_setpoint_limit
@property
def abs_max_heat_setpoint_limit(self) -> int:
"""Absolute maximum heating setpoint."""
return self._abs_max_heat_setpoint_limit
@property
def abs_min_heat_setpoint_limit(self) -> int:
"""Absolute minimum heating setpoint."""
return self._abs_min_heat_setpoint_limit
@property
def ctrl_seqe_of_oper(self) -> int:
"""Control Sequence of operations attribute."""
return self._ctrl_seqe_of_oper
@property
def max_cool_setpoint_limit(self) -> int:
"""Maximum cooling setpoint."""
if self._max_cool_setpoint_limit is None:
return self.abs_max_cool_setpoint_limit
return self._max_cool_setpoint_limit
@property
def min_cool_setpoint_limit(self) -> int:
"""Minimum cooling setpoint."""
if self._min_cool_setpoint_limit is None:
return self.abs_min_cool_setpoint_limit
return self._min_cool_setpoint_limit
@property
def max_heat_setpoint_limit(self) -> int:
"""Maximum heating setpoint."""
if self._max_heat_setpoint_limit is None:
return self.abs_max_heat_setpoint_limit
return self._max_heat_setpoint_limit
@property
def min_heat_setpoint_limit(self) -> int:
"""Minimum heating setpoint."""
if self._min_heat_setpoint_limit is None:
return self.abs_min_heat_setpoint_limit
return self._min_heat_setpoint_limit
@property
def local_temp(self) -> Optional[int]:
"""Thermostat temperature."""
return self._local_temp
@property
def occupancy(self) -> Optional[int]:
"""Is occupancy detected."""
return self._occupancy
@property
def occupied_cooling_setpoint(self) -> Optional[int]:
"""Temperature when room is occupied."""
return self._occupied_cooling_setpoint
@property
def occupied_heating_setpoint(self) -> Optional[int]:
"""Temperature when room is occupied."""
return self._occupied_heating_setpoint
@property
def pi_cooling_demand(self) -> int:
"""Cooling demand."""
return self._pi_cooling_demand
@property
def pi_heating_demand(self) -> int:
"""Heating demand."""
return self._pi_heating_demand
@property
def running_mode(self) -> Optional[int]:
"""Thermostat running mode."""
return self._running_mode
@property
def running_state(self) -> Optional[int]:
"""Thermostat running state, state of heat, cool, fan relays."""
return self._running_state
@property
def system_mode(self) -> Optional[int]:
"""System mode."""
return self._system_mode
@property
def unoccupied_cooling_setpoint(self) -> Optional[int]:
"""Temperature when room is not occupied."""
return self._unoccupied_cooling_setpoint
@property
def unoccupied_heating_setpoint(self) -> Optional[int]:
"""Temperature when room is not occupied."""
return self._unoccupied_heating_setpoint
@callback
def attribute_updated(self, attrid, value):
"""Handle attribute update cluster."""
attr_name = self.cluster.attributes.get(attrid, [attrid])[0]
self.debug(
"Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value
)
setattr(self, f"_{attr_name}", value)
self.async_send_signal(
f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}",
AttributeUpdateRecord(attrid, attr_name, value),
)
async def _chunk_attr_read(self, attrs, cached=False):
chunk, attrs = attrs[:4], attrs[4:]
while chunk:
res, fail = await self.cluster.read_attributes(chunk, allow_cache=cached)
self.debug("read attributes: Success: %s. Failed: %s", res, fail)
for attr in chunk:
self._init_attrs.pop(attr, None)
if attr in fail:
continue
if isinstance(attr, str):
setattr(self, f"_{attr}", res[attr])
self.async_send_signal(
f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}",
AttributeUpdateRecord(None, attr, res[attr]),
)
chunk, attrs = attrs[:4], attrs[4:]
async def configure_reporting(self):
"""Configure attribute reporting for a cluster.
This also swallows DeliveryError exceptions that are thrown when
devices are unreachable.
"""
kwargs = {}
if self.cluster.cluster_id >= 0xFC00 and self._ch_pool.manufacturer_code:
kwargs["manufacturer"] = self._ch_pool.manufacturer_code
chunk, rest = self._report_config[:4], self._report_config[4:]
while chunk:
attrs = {record["attr"]: record["config"] for record in chunk}
try:
res = await self.cluster.configure_reporting_multiple(attrs, **kwargs)
self._configure_reporting_status(attrs, res[0])
except (ZigbeeException, asyncio.TimeoutError) as ex:
self.debug(
"failed to set reporting on '%s' cluster for: %s",
self.cluster.ep_attribute,
str(ex),
)
break
chunk, rest = rest[:4], rest[4:]
def _configure_reporting_status(
self, attrs: Dict[Union[int, str], Tuple], res: Union[List, Tuple]
) -> None:
"""Parse configure reporting result."""
if not isinstance(res, list):
# assume default response
self.debug(
"attr reporting for '%s' on '%s': %s",
attrs,
self.name,
res,
)
return
if res[0].status == Status.SUCCESS and len(res) == 1:
self.debug(
"Successfully configured reporting for '%s' on '%s' cluster: %s",
attrs,
self.name,
res,
)
return
failed = [
self.cluster.attributes.get(r.attrid, [r.attrid])[0]
for r in res
if r.status != Status.SUCCESS
]
attrs = {self.cluster.attributes.get(r, [r])[0] for r in attrs}
self.debug(
"Successfully configured reporting for '%s' on '%s' cluster",
attrs - set(failed),
self.name,
)
self.debug(
"Failed to configure reporting for '%s' on '%s' cluster: %s",
failed,
self.name,
res,
)
@retryable_req(delays=(1, 1, 3))
async def async_initialize_channel_specific(self, from_cache: bool) -> None:
"""Initialize channel."""
cached = [a for a, cached in self._init_attrs.items() if cached]
uncached = [a for a, cached in self._init_attrs.items() if not cached]
await self._chunk_attr_read(cached, cached=True)
await self._chunk_attr_read(uncached, cached=False)
async def async_set_operation_mode(self, mode) -> bool:
"""Set Operation mode."""
if not await self.write_attributes({"system_mode": mode}):
self.debug("couldn't set '%s' operation mode", mode)
return False
self._system_mode = mode
self.debug("set system to %s", mode)
return True
async def async_set_heating_setpoint(
self, temperature: int, is_away: bool = False
) -> bool:
"""Set heating setpoint."""
if is_away:
data = {"unoccupied_heating_setpoint": temperature}
else:
data = {"occupied_heating_setpoint": temperature}
if not await self.write_attributes(data):
self.debug("couldn't set heating setpoint")
return False
if is_away:
self._unoccupied_heating_setpoint = temperature
else:
self._occupied_heating_setpoint = temperature
self.debug("set heating setpoint to %s", temperature)
return True
async def async_set_cooling_setpoint(
self, temperature: int, is_away: bool = False
) -> bool:
"""Set cooling setpoint."""
if is_away:
data = {"unoccupied_cooling_setpoint": temperature}
else:
data = {"occupied_cooling_setpoint": temperature}
if not await self.write_attributes(data):
self.debug("couldn't set cooling setpoint")
return False
if is_away:
self._unoccupied_cooling_setpoint = temperature
else:
self._occupied_cooling_setpoint = temperature
self.debug("set cooling setpoint to %s", temperature)
return True
async def get_occupancy(self) -> Optional[bool]:
"""Get unreportable occupancy attribute."""
try:
res, fail = await self.cluster.read_attributes(["occupancy"])
self.debug("read 'occupancy' attr, success: %s, fail: %s", res, fail)
if "occupancy" not in res:
return None
self._occupancy = res["occupancy"]
return bool(self.occupancy)
except ZigbeeException as ex:
self.debug("Couldn't read 'occupancy' attribute: %s", ex)
async def write_attributes(self, data, **kwargs):
"""Write attributes helper."""
try:
res = await self.cluster.write_attributes(data, **kwargs)
except ZigbeeException as exc:
self.debug("couldn't write %s: %s", data, exc)
return False
self.debug("wrote %s attrs, Status: %s", data, res)
return self.check_result(res)
@staticmethod
def check_result(res: list) -> bool:
"""Normalize the result."""
if not isinstance(res, list):
return False
return all([record.status == Status.SUCCESS for record in res[0]])
@registries.ZIGBEE_CHANNEL_REGISTRY.register(hvac.UserInterface.cluster_id)
class UserInterface(ZigbeeChannel):
"""User interface (thermostat) channel."""
|
"use strict";
import React from "react";
import Wprr from "wprr";
import Layout from "wprr/elements/layout/Layout";
// import LoadAdditionalItems from "./LoadAdditionalItems";
export default class LoadAdditionalItems extends Layout {
/**
* Constructor
*/
constructor() {
//console.log("LoadAdditionalItems::constructor");
super();
this._layoutName = "loadAdditionalItems";
this._loaded = Wprr.sourceValue(0);
this._updateCommand = Wprr.commands.callFunction(this, this._updateLoadStatus);
}
_updateLoadStatus() {
//console.log("LoadAdditionalItems::_updateLoadStatus");
let ids = Wprr.utils.array.removeValues(Wprr.utils.array.singleOrArray(this.getFirstInput("ids")), [null, undefined, 0]);
let items = this.getAdditionalLoader().items;
let isLoaded = items.hasItemsWithType(ids, this.getAdditionalLoader()._fieldToCheckFor);
this._loaded.value = isLoaded ? 1 : 0;
}
getAdditionalLoader() {
console.log("getAdditionalLoader");
let additionalLoader = this.getFirstInput(
"loader",
Wprr.sourceReference("loadAdditionalItems/slots/loader"),
Wprr.sourceReference("items", "additionalLoader")
);
return additionalLoader;
}
_prepareInitialRender() {
//console.log("LoadAdditionalItems::_prepareInitialRender");
super._prepareInitialRender();
this.getAdditionalLoader().addCommand(this._updateCommand, "loaded");
//METODO
}
_prepareRender() {
//console.log("LoadAdditionalItems::_prepareRender");
super._prepareRender();
let ids = Wprr.utils.array.removeValues(Wprr.utils.array.singleOrArray(this.getFirstInput("ids")), [null, undefined, 0]);
if(ids) {
this.getAdditionalLoader().loadItems(ids);
this._updateLoadStatus();
}
else {
console.error("No ids set", this);
}
}
_getLayout(aSlots) {
let isDoneSource = Wprr.sourceStatic(this._externalStorage, "loaded");
return React.createElement(React.Fragment, {},
React.createElement(Wprr.HasData, {"check": this._loaded},
aSlots.default(React.createElement("div", {}, "No element set"))
),
React.createElement(Wprr.HasData, {"check": this._loaded, checkType: "invert/default"},
aSlots.slot("loaderDisplay", React.createElement("div", {}, Wprr.translateText("Loading...")))
),
);
}
static createFromRelation(aDirection, aConnectionType, aObjectType, aElement) {
let pointerName = (aDirection === "outgoing") ? "to" : "from";
let idSource = Wprr.sourceReference("item", "multipleRelations." + aDirection + "." + aConnectionType + "." + aObjectType + ".(every)." + pointerName + ".id");
return React.createElement(LoadAdditionalItems, {"ids": idSource},
aElement
);
}
} |
# Copyright 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .func_registry import add_func
from .inner_compiler import compile_func
import numpy
class Var:
def __init__(self, context, ssa_val):
self._context = context
self._ssa_val = ssa_val
@property
def shape(self):
return self._shape(self._context, self._ssa_val)
@property
def dtype(self):
return self._dtype(self._context, self._ssa_val)
@property
def type(self):
return self._type(self._context, self._ssa_val)
def __len__(self):
res = self._len(self._context, self._ssa_val)
if res is None:
raise ValueError('No len')
return res
def __getitem__(self, index):
return self._getitem(self._context, self._ssa_val, index)
def __add__(self, o): return self._binop(self._context, self._ssa_val, o, '+')
def __radd__(self, o): return self._binop(self._context, self._ssa_val, o, '+')
def __sub__(self, o): return self._binop(self._context, self._ssa_val, o, '-')
def __rsub__(self, o): return self._binop(self._context, self._ssa_val, o, 'r-')
def __mul__(self, o): return self._binop(self._context, self._ssa_val, o, '*')
def __rmul__(self, o): return self._binop(self._context, self._ssa_val, o, '*')
def __truediv__(self, o): return self._binop(self._context, self._ssa_val, o, '/')
def __lt__(self, o): return self._binop(self._context, self._ssa_val, o, 'lt')
def __le__(self, o): return self._binop(self._context, self._ssa_val, o, 'le')
def __gt__(self, o): return self._binop(self._context, self._ssa_val, o, 'gt')
def __ge__(self, o): return self._binop(self._context, self._ssa_val, o, 'ge')
def __eq__(self, o): return self._binop(self._context, self._ssa_val, o, 'eq')
def __ne__(self, o): return self._binop(self._context, self._ssa_val, o, 'ne')
def __str__(self): return self._str(self._context, self._ssa_val)
def __repr__(self): return self._str(self._context, self._ssa_val)
class Type:
def __init__(self, mlir_type, eq, printer):
self._mlir_type = mlir_type
self._eq = eq
self._str = printer
def __eq__(self, other):
return self._eq(self._mlir_type, other._mlir_type)
def __str__(self): return self._str(self._mlir_type)
def __repr__(self): return self._str(self._mlir_type)
def is_literal(val):
return not isinstance(val, Var)
DYNAMIC_DIM = -1
class Builder:
def __init__(self, context):
self._context = context
def broadcast(self, *args, result_type):
return self._broadcast(self._context, args, result_type)
def init_tensor(self, shape, dtype, init_val=None):
return self._init_tensor(self._context, shape, dtype, init_val)
def fill_tensor(self, tensor, value):
return self._fill_tensor(self._context, tensor, value)
def linalg_generic(self, inputs, outputs, iterators, maps, body):
return self._linalg_generic(self._context, inputs, outputs, iterators, maps, body)
def linalg_index(self, dim):
return self._linalg_index(self._context, dim)
def from_elements(self, values, dtype):
return self._from_elements(self._context, values, dtype)
def extract(self, value, indices):
return self._extract(self._context, value, indices)
def reshape(self, src, dims):
return self._reshape(self._context, src, dims)
def external_call(self, name, inputs, outputs, decorate=True):
return self._external_call(self._context, name, inputs, outputs, decorate)
def insert(self, src, dst, offsets, sizes, strides):
return self._insert(self._context, src, dst, offsets, sizes, strides)
def inline_func(self, func, res_type, *args): # TODO: kwargs
return self._inline_func(self._context, func, res_type, args)
def cast(self, arg, dtype):
return self._cast(self._context, arg, dtype)
def undef(self, dtype):
return self._undef(self._context, dtype)
def subview(self, src, offset, size=None, strides=None, result_rank=None):
return self._subview(self._context, src, offset, size, strides, result_rank)
def select(self, cond, true_val, false_val):
return self._select(self._context, cond, true_val, false_val)
def force_copy(self, arr):
return self._force_copy(self._context, arr)
def array_type(self, dims, dtype):
return self._array_type(self._context, dims, dtype)
class FuncRegistry:
def __init__(self):
self.funcs = {}
def register_func(self, name, orig_func = None):
def _decorator(func):
mangled_name = name + '()'
assert not mangled_name in self.funcs
self.funcs[mangled_name] = func
if not orig_func is None:
add_func(orig_func, name)
return func
return _decorator
def register_attr(self, name):
def _decorator(func):
assert not name in self.funcs
self.funcs[name] = func
return func
return _decorator
def lookup_func(self, name):
return self.funcs.get(name)
def _get_numpy_types(builder):
return [
(builder.bool, numpy.bool_),
(builder.int8, numpy.int8),
(builder.uint8, numpy.uint8),
(builder.int16, numpy.int16),
(builder.uint16, numpy.uint16),
(builder.int32, numpy.int32),
(builder.uint32, numpy.uint32),
(builder.int64, numpy.int64),
(builder.uint64, numpy.uint64),
(builder.float32, numpy.float32),
(builder.float64, numpy.float64),
]
def type_to_numpy(builder, t):
for src, dst in _get_numpy_types(builder):
if t == src: return dst
assert False, f'Cannot convert type: {str(t)}'
def type_from_numpy(builder, t):
for dst, src in _get_numpy_types(builder):
if t == src: return dst
assert False, f'Cannot convert type: {str(t)}'
def broadcast_type(builder, args):
l = len(args)
assert(l > 0)
lhs = args[0]
if l == 1:
return lhs
elif l == 2:
rhs = args[1]
else:
rhs = broadcast_type(builder, args[1:])
lhs = type_to_numpy(builder, lhs)
rhs = type_to_numpy(builder, rhs)
return type_from_numpy(builder, numpy.promote_types(lhs, rhs))
def get_val_type(builder, a):
if isinstance(a, float):
return builder.float64
elif isinstance(a, int):
return builder.int64
return a.type
def get_array_type(builder, a):
if isinstance(a, float):
return builder.float64
elif isinstance(a, int):
return builder.int64
return a.dtype
def broadcast_type_arrays(builder, args):
return broadcast_type(builder, tuple(get_array_type(builder, a) for a in args))
def eltwise(builder, args, body, res_type = None):
if isinstance(args, tuple):
args = builder.broadcast(*args, result_type=broadcast_type_arrays(builder, args))
else:
args = (args,)
if res_type is None:
res_type = args[0].dtype
shape = args[0].shape
try:
num_dims = len(shape)
except:
num_dims = 0
if num_dims == 0:
dummy = builder.cast(0, res_type)
return builder.inline_func(body, res_type, *(args + (dummy,)))
else:
iterators = ['parallel' for _ in range(num_dims)]
dims = ','.join(['d%s' % i for i in range(num_dims)])
expr = f'({dims}) -> ({dims})'
maps = [expr for _ in range(len(args) + 1)]
init = builder.init_tensor(shape, res_type)
return builder.linalg_generic(args, init, iterators, maps, body)
def convert_array(builder, arr, dtype):
if arr.dtype == dtype:
return arr
return eltwise(builder, arr, lambda a, b: a, dtype)
def _flatten_tuple(src):
try:
l = len(src)
except:
l = 0
if l != 0:
shape, elements = _flatten_tuple(src[0])
for i in range(1, l):
shape1, elements1 = _flatten_tuple(src[i])
assert(shape == shape1)
elements += elements1
if shape is None:
shape = [l]
else:
shape = [l] + shape
return (shape, elements)
return (None, [src])
def asarray(builder, src, dtype=None):
shape, elements = _flatten_tuple(src)
if shape is None:
return src
if dtype is None:
dtype = broadcast_type_arrays(builder, elements)
arr = builder.from_elements(elements, dtype)
if len(shape) > 1:
arr = builder.reshape(arr, shape)
return arr
def is_int(t, b):
types = [
b.bool,
b.int8,
b.uint8,
b.int16,
b.uint16,
b.int32,
b.uint32,
b.int64,
b.uint64,
]
return t in types
def is_float(t, b):
return t == b.float16 or t == b.float32 or t == b.float64
def dtype_str(builder, dtype):
names = [
(builder.int8, 'int8'),
(builder.int16, 'int16'),
(builder.int32, 'int32'),
(builder.int64, 'int64'),
(builder.uint8, 'uint8'),
(builder.uint16, 'uint16'),
(builder.uint32, 'uint32'),
(builder.uint64, 'uint64'),
(builder.int8_signless, 'int8'),
(builder.int16_signless, 'int16'),
(builder.int32_signless, 'int32'),
(builder.int64_signless, 'int64'),
(builder.float32, 'float32'),
(builder.float64, 'float64'),
]
for t, name in names:
if t == dtype:
return name
assert False, f'dtype_str unhandled type: {dtype}'
|
import functools
import json
from kubernetes import client, config
from datetime import timedelta
from flask import Blueprint, current_app, flash, g, redirect, render_template, request, session, url_for, jsonify
from flask_jwt_extended import JWTManager, jwt_required, jwt_optional, create_access_token, get_jwt_identity
bp = Blueprint("apiv1", __name__, url_prefix="/api/v1")
@bp.before_app_request
def init_api():
"""Creates instances of the incluster config and client API
and stores them in global"""
g.configuration = config.load_incluster_config()
g.apps_v1_api_instance = client.AppsV1Api(client.ApiClient(g.configuration))
g.batch_v1beta1_instance = client.BatchV1beta1Api(client.ApiClient(g.configuration))
g.PD_REGISTRY = current_app.config['PD_REGISTRY']
def list_cron_job(name, namespace):
namespace = "%s" % str(namespace)
name = "metadata.name=%s" % str(name)
api_response = g.batch_v1beta1_instance.list_namespaced_cron_job(
namespace=namespace,
field_selector=name
)
if len(api_response.items) == 1:
return api_response.items[0]
else:
return "CronJob selector not unique enough."
def list_daemon_set(name, namespace):
namespace = "%s" % str(namespace)
name = "metadata.name=%s" % str(name)
api_response = g.apps_v1_api_instance.list_namespaced_daemon_set(
namespace=namespace,
field_selector=name
)
if len(api_response.items) == 1:
return api_response.items[0]
else:
return "DaemonSet selector not unique enough."
def list_deployment(name, namespace):
namespace = "%s" % str(namespace)
name = "metadata.name=%s" % str(name)
api_response = g.apps_v1_api_instance.list_namespaced_deployment(
namespace=namespace,
field_selector=name
)
if len(api_response.items) == 1:
return api_response.items[0]
else:
return "Deployment selector not unique enough."
def patch_cron_job(cron_job_object, image_name, image_tag, name, namespace):
image = "%s/%s:%s" % (g.PD_REGISTRY, image_name, image_tag)
cron_job_object.spec.job_template.spec.template.spec.containers[0].image = image
api_response = g.batch_v1beta1_instance.patch_namespaced_cron_job(
name=name,
namespace=namespace,
body=cron_job_object,
field_manager="push-deploy")
print("CronJob updated. status='%s'" % str(api_response.status))
def patch_daemon_set(daemon_set_object, image_name, image_tag, name, namespace):
image = "%s/%s:%s" % (g.PD_REGISTRY, image_name, image_tag)
daemon_set_object.spec.template.spec.containers[0].image = image
api_response = g.apps_v1_api_instance.patch_namespaced_daemon_set(
name=name,
namespace=namespace,
body=daemon_set_object,
field_manager="push-deploy")
print("DaemonSet updated. status='%s'" % str(api_response.status))
def patch_deployment(deployment_object, image_name, image_tag, name, namespace):
image = "%s/%s:%s" % (g.PD_REGISTRY, image_name, image_tag)
deployment_object.spec.template.spec.containers[0].image = image
api_response = g.apps_v1_api_instance.patch_namespaced_deployment(
name=name,
namespace=namespace,
body=deployment_object,
field_manager="push-deploy")
print("Deployment updated. status='%s'" % str(api_response.status))
@bp.route('/', methods=['GET'])
@jwt_required
def index():
return jsonify(), 200
@bp.route('/auth', methods=['POST'])
def login():
if not request.is_json:
return jsonify({"msg": "Missing JSON in request"}), 400
username = request.json.get('username', None)
password = request.json.get('password', None)
if not username:
return jsonify({"msg": "Missing username parameter"}), 400
if not password:
return jsonify({"msg": "Missing password parameter"}), 400
if username != current_app.config['PD_USER'] or password != current_app.config['PD_PASSWORD']:
return jsonify({"msg": "Bad username or password"}), 401
# Identity can be any data that is json serializable
access_token = create_access_token(identity=username, expires_delta=timedelta(seconds=90))
return jsonify(access_token=access_token), 200
@bp.route('/cronjob', methods=['POST'])
@jwt_required
def cronjob():
image_tag = request.args['image_tag']
image_name = request.args['image_name']
name = request.args['name']
namespace = request.args['namespace']
cronjob = patch_cron_job(
cron_job_object=list_cron_job(name=name, namespace=namespace),
image_name=image_name,
image_tag=image_tag,
name=name,
namespace=namespace)
return jsonify(msg=cronjob), 201
@bp.route('/daemonset', methods=['POST'])
@jwt_required
def daemonset():
image_tag = request.args['image_tag']
image_name = request.args['image_name']
name = request.args['name']
namespace = request.args['namespace']
daemonset = patch_daemon_set(
daemon_set_object=list_daemon_set(name=name, namespace=namespace),
image_name=image_name,
image_tag=image_tag,
name=name,
namespace=namespace)
return jsonify(msg=daemonset), 201
@bp.route('/deploy', methods=['GET'])
@jwt_required
def deploy():
image_tag = request.args['image_tag']
image_name = request.args['image_name']
name = request.args['deployment']
namespace = request.args['namespace']
deploy = patch_deployment(
deployment_object=list_deployment(name=name, namespace=namespace),
image_name=image_name,
image_tag=image_tag,
name=name,
namespace=namespace)
return jsonify(msg=deploy), 201
@bp.route('/deployment', methods=['POST'])
@jwt_required
def deployment():
image_tag = request.args['image_tag']
image_name = request.args['image_name']
name = request.args['name']
namespace = request.args['namespace']
deployment = patch_deployment(
deployment_object=list_deployment(name=name, namespace=namespace),
image_name=image_name,
image_tag=image_tag,
name=name,
namespace=namespace)
return jsonify(msg=deployment), 201
|
function countChar(string, ch) {
var counted = 0;
for (var i = 0; i < string.length; i++)
if (string.charAt(i) == ch)
counted += 1;
return counted;
}
function countBs(string) {
return countChar(string, "B");
}
console.log(countBs("BBC"));
// → 2
console.log(countChar("kakkerlak", "k"));
// → 4
|
/* Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License, version 2.0,
as published by the Free Software Foundation.
This program is also distributed with certain software (including
but not limited to OpenSSL) that is licensed under separate terms,
as designated in a particular file or component or in included license
documentation. The authors of MySQL hereby grant you an additional
permission to link the program and your derivative works with the
separately licensed software that they have included with MySQL.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License, version 2.0, for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */
#include "service_versions.h"
SERVICE_VERSION *plugin_registry_service =
(void **)VERSION_plugin_registry_service;
|
/**
* Provides drag and drop functionality for blocks.
*
* @module moodle-core-blockdraganddrop
*/
var AJAXURL = '/lib/ajax/blocks.php',
CSS = {
BLOCK: 'block',
BLOCKREGION: 'block-region',
BLOCKADMINBLOCK: 'block_adminblock',
EDITINGMOVE: 'editing_move',
HEADER: 'header',
LIGHTBOX: 'lightbox',
REGIONCONTENT: 'region-content',
SKIPBLOCK: 'skip-block',
SKIPBLOCKTO: 'skip-block-to',
MYINDEX: 'page-my-index',
REGIONMAIN: 'region-main',
BLOCKSMOVING: 'blocks-moving'
};
var SELECTOR = {
DRAGHANDLE: '.' + CSS.HEADER + ' .commands .moodle-core-dragdrop-draghandle'
};
/**
* Legacy drag and drop manager.
* This drag and drop manager is specifically designed for themes using side-pre and side-post
* that do not make use of the block output methods introduced by MDL-39824.
*
* @namespace M.core.blockdraganddrop
* @class LegacyManager
* @constructor
* @extends M.core.dragdrop
*/
DRAGBLOCK = function() {
DRAGBLOCK.superclass.constructor.apply(this, arguments);
};
Y.extend(DRAGBLOCK, M.core.dragdrop, {
skipnodetop: null,
skipnodebottom: null,
dragsourceregion: null,
initializer: function() {
// Set group for parent class
this.groups = ['block'];
this.samenodeclass = CSS.BLOCK;
this.parentnodeclass = CSS.REGIONCONTENT;
// Add relevant classes and ID to 'content' block region on Dashboard page.
var myhomecontent = Y.Node.all('body#' + CSS.MYINDEX + ' #' + CSS.REGIONMAIN + ' > .' + CSS.REGIONCONTENT);
if (myhomecontent.size() > 0) {
var contentregion = myhomecontent.item(0);
contentregion.addClass(CSS.BLOCKREGION);
contentregion.set('id', CSS.REGIONCONTENT);
contentregion.one('div').addClass(CSS.REGIONCONTENT);
}
// Initialise blocks dragging
// Find all block regions on the page
var blockregionlist = Y.Node.all('div.' + CSS.BLOCKREGION);
if (blockregionlist.size() === 0) {
return false;
}
// See if we are missing either of block regions,
// if yes we need to add an empty one to use as target
if (blockregionlist.size() !== this.get('regions').length) {
var blockregion = Y.Node.create('<div></div>')
.addClass(CSS.BLOCKREGION);
var regioncontent = Y.Node.create('<div></div>')
.addClass(CSS.REGIONCONTENT);
blockregion.appendChild(regioncontent);
var pre = blockregionlist.filter('#region-pre');
var post = blockregionlist.filter('#region-post');
if (pre.size() === 0 && post.size() === 1) {
// pre block is missing, instert it before post
blockregion.setAttrs({id: 'region-pre'});
post.item(0).insert(blockregion, 'before');
blockregionlist.unshift(blockregion);
} else if (post.size() === 0 && pre.size() === 1) {
// post block is missing, instert it after pre
blockregion.setAttrs({id: 'region-post'});
pre.item(0).insert(blockregion, 'after');
blockregionlist.push(blockregion);
}
}
blockregionlist.each(function(blockregionnode) {
// Setting blockregion as droptarget (the case when it is empty)
// The region-post (the right one)
// is very narrow, so add extra padding on the left to drop block on it.
new Y.DD.Drop({
node: blockregionnode.one('div.' + CSS.REGIONCONTENT),
groups: this.groups,
padding: '40 240 40 240'
});
// Make each div element in the list of blocks draggable
var del = new Y.DD.Delegate({
container: blockregionnode,
nodes: '.' + CSS.BLOCK,
target: true,
handles: [SELECTOR.DRAGHANDLE],
invalid: '.block-hider-hide, .block-hider-show, .moveto',
dragConfig: {groups: this.groups}
});
del.dd.plug(Y.Plugin.DDProxy, {
// Don't move the node at the end of the drag
moveOnEnd: false
});
del.dd.plug(Y.Plugin.DDWinScroll);
var blocklist = blockregionnode.all('.' + CSS.BLOCK);
blocklist.each(function(blocknode) {
var move = blocknode.one('a.' + CSS.EDITINGMOVE);
if (move) {
move.replace(this.get_drag_handle(move.getAttribute('title'), '', 'iconsmall', true));
blocknode.one(SELECTOR.DRAGHANDLE).setStyle('cursor', 'move');
}
}, this);
}, this);
},
get_block_id: function(node) {
return Number(node.get('id').replace(/inst/i, ''));
},
get_block_region: function(node) {
var region = node.ancestor('div.' + CSS.BLOCKREGION).get('id').replace(/region-/i, '');
if (Y.Array.indexOf(this.get('regions'), region) === -1) {
// Must be standard side-X
if (window.right_to_left()) {
if (region === 'post') {
region = 'pre';
} else if (region === 'pre') {
region = 'post';
}
}
return 'side-' + region;
}
// Perhaps custom region
return region;
},
get_region_id: function(node) {
return node.get('id').replace(/region-/i, '');
},
drag_start: function(e) {
// Get our drag object
var drag = e.target;
// Store the parent node of original drag node (block)
// we will need it later for show/hide empty regions
this.dragsourceregion = drag.get('node').ancestor('div.' + CSS.BLOCKREGION);
// Determine skipnodes and store them
if (drag.get('node').previous() && drag.get('node').previous().hasClass(CSS.SKIPBLOCK)) {
this.skipnodetop = drag.get('node').previous();
}
if (drag.get('node').next() && drag.get('node').next().hasClass(CSS.SKIPBLOCKTO)) {
this.skipnodebottom = drag.get('node').next();
}
// Add the blocks-moving class so that the theme can respond if need be.
Y.one('body').addClass(CSS.BLOCKSMOVING);
},
drop_over: function(e) {
// Get a reference to our drag and drop nodes
var drag = e.drag.get('node');
var drop = e.drop.get('node');
// We need to fix the case when parent drop over event has determined
// 'goingup' and appended the drag node after admin-block.
if (drop.hasClass(this.parentnodeclass) &&
drop.one('.' + CSS.BLOCKADMINBLOCK) &&
drop.one('.' + CSS.BLOCKADMINBLOCK).next('.' + CSS.BLOCK)) {
drop.prepend(drag);
}
// Block is moved within the same region
// stop here, no need to modify anything.
if (this.dragsourceregion.contains(drop)) {
return false;
}
// TODO: Hiding-displaying block region only works for base theme blocks
// (region-pre, region-post) at the moment. It should be improved
// to work with custom block regions as well.
// TODO: Fix this for the case when user drag block towards empty section,
// then the section appears, then user chnages his mind and moving back to
// original section. The opposite section remains opened and empty.
var documentbody = Y.one('body');
// Moving block towards hidden region-content, display it
var regionname = this.get_region_id(this.dragsourceregion);
if (documentbody.hasClass('side-' + regionname + '-only')) {
documentbody.removeClass('side-' + regionname + '-only');
}
// Moving from empty region-content towards the opposite one,
// hide empty one (only for region-pre, region-post areas at the moment).
regionname = this.get_region_id(drop.ancestor('div.' + CSS.BLOCKREGION));
if (this.dragsourceregion.all('.' + CSS.BLOCK).size() === 0 &&
this.dragsourceregion.get('id').match(/(region-pre|region-post)/i)) {
if (!documentbody.hasClass('side-' + regionname + '-only')) {
documentbody.addClass('side-' + regionname + '-only');
}
}
},
drag_end: function() {
// clear variables
this.skipnodetop = null;
this.skipnodebottom = null;
this.dragsourceregion = null;
// Remove the blocks moving class once the drag-drop is over.
Y.one('body').removeClass(CSS.BLOCKSMOVING);
},
drag_dropmiss: function(e) {
// Missed the target, but we assume the user intended to drop it
// on the last last ghost node location, e.drag and e.drop should be
// prepared by global_drag_dropmiss parent so simulate drop_hit(e).
this.drop_hit(e);
},
drop_hit: function(e) {
var drag = e.drag;
// Get a reference to our drag node
var dragnode = drag.get('node');
var dropnode = e.drop.get('node');
// Amend existing skipnodes
if (dragnode.previous() && dragnode.previous().hasClass(CSS.SKIPBLOCK)) {
// the one that belongs to block below move below
dragnode.insert(dragnode.previous(), 'after');
}
// Move original skipnodes
if (this.skipnodetop) {
dragnode.insert(this.skipnodetop, 'before');
}
if (this.skipnodebottom) {
dragnode.insert(this.skipnodebottom, 'after');
}
// Add lightbox if it not there
var lightbox = M.util.add_lightbox(Y, dragnode);
// Prepare request parameters
var params = {
sesskey: M.cfg.sesskey,
courseid: this.get('courseid'),
pagelayout: this.get('pagelayout'),
pagetype: this.get('pagetype'),
subpage: this.get('subpage'),
contextid: this.get('contextid'),
action: 'move',
bui_moveid: this.get_block_id(dragnode),
bui_newregion: this.get_block_region(dropnode)
};
if (this.get('cmid')) {
params.cmid = this.get('cmid');
}
if (dragnode.next('.' + this.samenodeclass) && !dragnode.next('.' + this.samenodeclass).hasClass(CSS.BLOCKADMINBLOCK)) {
params.bui_beforeid = this.get_block_id(dragnode.next('.' + this.samenodeclass));
}
// Do AJAX request
Y.io(M.cfg.wwwroot + AJAXURL, {
method: 'POST',
data: params,
on: {
start: function() {
lightbox.show();
},
success: function(tid, response) {
window.setTimeout(function() {
lightbox.hide();
}, 250);
try {
var responsetext = Y.JSON.parse(response.responseText);
if (responsetext.error) {
new M.core.ajaxException(responsetext);
}
} catch (e) {
// Ignore.
}
},
failure: function(tid, response) {
this.ajax_failure(response);
lightbox.hide();
}
},
context: this
});
}
}, {
NAME: 'core-blocks-dragdrop',
ATTRS: {
courseid: {
value: null
},
cmid: {
value: null
},
contextid: {
value: null
},
pagelayout: {
value: null
},
pagetype: {
value: null
},
subpage: {
value: null
},
regions: {
value: null
}
}
});
M.core = M.core || {};
M.core.blockdraganddrop = M.core.blockdraganddrop || {};
/**
* True if the page is using the new blocks methods.
* @private
* @static
* @property M.core.blockdraganddrop._isusingnewblocksmethod
* @type Boolean
* @default null
*/
M.core.blockdraganddrop._isusingnewblocksmethod = null;
/**
* Returns true if the page is using the new blocks methods.
* @static
* @method M.core.blockdraganddrop.is_using_blocks_render_method
* @return Boolean
*/
M.core.blockdraganddrop.is_using_blocks_render_method = function() {
if (this._isusingnewblocksmethod === null) {
var goodregions = Y.all('.block-region[data-blockregion]').size();
var allregions = Y.all('.block-region').size();
this._isusingnewblocksmethod = (allregions === goodregions);
if (goodregions > 0 && allregions > 0 && goodregions !== allregions) {
Y.log('Both core_renderer::blocks and core_renderer::blocks_for_region have been used.', 'warn', 'moodle-core_blocks');
}
}
return this._isusingnewblocksmethod;
};
/**
* Initialises a drag and drop manager.
* This should only ever be called once for a page.
* @static
* @method M.core.blockdraganddrop.init
* @param {Object} params
* @return Manager
*/
M.core.blockdraganddrop.init = function(params) {
if (this.is_using_blocks_render_method()) {
Y.log('Block drag and drop initialised for the blocks method.', 'info', 'moodle-core_blocks');
new MANAGER(params);
} else {
Y.log('Block drag and drop initialised with the legacy manager (blocks_for_region used).', 'info', 'moodle-core_blocks');
new DRAGBLOCK(params);
}
};
/*
* Legacy code to keep things working.
*/
M.core_blocks = M.core_blocks || {};
M.core_blocks.init_dragdrop = function(params) {
M.core.blockdraganddrop.init(params);
};
|
//initDraw(document.getElementById('imagearea'));
console.log('loading highlight');
function radialDraw(img_id, canvas_id, mode) {
console.log('init radial draw');
var mouse = {
x: 0,
y: 0,
startX: 0,
startY: 0
};
var points = [];
var polygons = [];
var mode = 'radial';
var imgarea = document.getElementById(canvas_id);
// TODO: only if points is non-empty.
document.onkeydown = function(evt) {
evt = evt || window.event;
if (evt.key === 'Escape') {
console.log('Esc key pressed.');
points = [];
redraw();
}
};
function drawPolygon(points, polymode) {
if (polymode==='radial') {
drawRadialPolygon(points);
} else {
drawBox(points);
}
}
function refresh_polygons(ctx) {
//console.log(polygons);
if (polygons.length>0) {
polygons.forEach(function (p) {
ctx.fillStyle = 'blue';
console.log('drawing '+p);
drawPolygon(p.points, p.mode);
});
}
ctx.fillStyle = "red";
}
var background = null;
function redraw() {
var canvas = document.getElementById(canvas_id);
// use getContext to use the canvas for drawing
var ctx = canvas.getContext('2d');
ctx.clearRect(0, 0, canvas.width, canvas.height);
canvas.width = 640;
if (background===null) {
background = new Image();
// Make sure the image is loaded first otherwise nothing will draw.
background.onload = function(){
canvas.height = canvas.width/background.width * background.height;
console.log('canvas w,h:'+canvas.width+' '+canvas.height+' '+background.width+' '+background.height);
ctx.drawImage(background,0,0, canvas.width, canvas.height);
refresh_polygons(ctx);
}
background.src = "/images/"+img_id;
} else {
ctx.drawImage(background,0,0, canvas.width, canvas.height);
refresh_polygons(ctx);
}
}
function setMousePosition(e) {
// your mouse calculations
const boundaries = e.currentTarget.getBoundingClientRect();
mouse.x = e.x-boundaries.left;
mouse.y = e.y-boundaries.top;
}
// registers a new callback when a polygon is added
var add_callbacks = [];
function on_newpolygon(f) {
add_callbacks.push(f);
}
function process_add_callbacks(p) {
console.log('processing callbacks');
add_callbacks.forEach((f) => {f(p)});
}
var clickhandler = function(e, clicktype) {
setMousePosition(e);
if (points.length>0) {
points.pop();
}
var cx = mouse.x/canvas.width;
var cy = mouse.y/canvas.height;
points.push({x: cx, y: cy});
points.push({x: cx, y: cy});
console.log(cx+','+cy);
var threshold = mode === 'radial' ? 3 : 2;
//if (clicktype === 'double') {
if (points.length>threshold) {
points.pop();
polygons.push({'mode': mode, 'points': points});
points = [];
redraw();
process_add_callbacks(polygons[polygons.length-1]);
} else {
redraw();
console.log('clickhandler: ' + points);
drawPolygon(points, mode);
}
}
imgarea.onmouseup = function(e) { return clickhandler(e,'single');}
imgarea.ondblclick = function(e) { return clickhandler(e,'double');}
imgarea.onmousemove = function (e) {
setMousePosition(e);
var cx = mouse.x/canvas.width;
var cy = mouse.y/canvas.height;
if (points.length>0) {
points.pop();
points.push({x: cx, y: cy});
redraw();
drawPolygon(points, mode);
}
//console.log(points);
//console.log(e.x+' '+e.y);
/*if (element !== null) {
element.style.width = Math.abs(mouse.x - mouse.startX) + 'px';
element.style.height = Math.abs(mouse.y - mouse.startY) + 'px';
element.style.left = (mouse.x - mouse.startX < 0) ? mouse.x + 'px' : mouse.startX + 'px';
element.style.top = (mouse.y - mouse.startY < 0) ? mouse.y + 'px' : mouse.startY + 'px';
}*/
}
function drawBox(points) {
// get the canvas element using the DOM
var canvas = document.getElementById(canvas_id);
// Make sure we don't execute when canvas isn't supported
if (canvas.getContext) {
// use getContext to use the canvas for drawing
var ctx = canvas.getContext('2d');
var cpoints = points.map((p) => { return {x: p.x*canvas.width, y: p.y*canvas.height}});
ctx.globalAlpha = 0.25;
if (cpoints.length!==2) {
console.log('weird cpoints. exiting');
return;
}
ctx.beginPath();
ctx.rect(cpoints[0].x, cpoints[0].y, cpoints[1].x-cpoints[0].x, cpoints[1].y-cpoints[0].y);
ctx.fill();
ctx.globalAlpha = 1.0;
ctx.lineWidth = 1;
ctx.stroke();
}
}
function drawRadialPolygon(points) {
// get the canvas element using the DOM
var canvas = document.getElementById(canvas_id);
// Make sure we don't execute when canvas isn't supported
if (canvas.getContext) {
// use getContext to use the canvas for drawing
var ctx = canvas.getContext('2d');
var srtPoints = [];
var r1, r2, dx1, dx2, dx3, dy1, dy2, theta1, theta2, dtheta, clockwise;
var cpoints = points.map((p) => { return {x: p.x*canvas.width, y: p.y*canvas.height}});
if (cpoints.length<3) {
cpoints.forEach(function (p) {
srtPoints.push(p);
});
} else {
srtPoints.push(cpoints[0]);
srtPoints.push(cpoints[1]);
srtPoints.push(cpoints[2]);
dx1 = cpoints[1].x-cpoints[0].x;
dy1 = cpoints[1].y-cpoints[0].y;
r1 = Math.sqrt(dx1*dx1+dy1*dy1);
// theta comes from points[2]-points[0] vs points[1]-points[0]
dx2 = cpoints[2].x - cpoints[0].x;
dy2 = cpoints[2].y - cpoints[0].y;
r2 = Math.sqrt(dx2*dx2+dy2*dy2);
srtPoints.push({x: cpoints[0].x + r2 * dx1/r1, y: cpoints[0].y + r2 * dy1/r1});
theta1 = Math.atan2(dy1, dx1);
theta2 = Math.atan2(dy2, dx2);
//console.log("arc "+r1+' '+dx1+' '+dy1+' '+dx2+' '+dy2+' '+dy1/dx1+' '+dy2/dx2);
dtheta = theta2-theta1;
while (dtheta<-Math.PI) dtheta+=2*Math.PI;
while (dtheta>Math.PI) dtheta-=2*Math.PI;
clockwise = dtheta<0;
}
console.log('points length: '+cpoints.length);
if (srtPoints.length==2) {
ctx.globalAlpha = 1.0;
ctx.lineWidth = 2;
} else {
ctx.globalAlpha = 0.1;
ctx.lineWidth = 1;
}
ctx.beginPath();
ctx.moveTo(srtPoints[0].x, srtPoints[0].y);
ctx.lineTo(srtPoints[1].x, srtPoints[1].y);
if (srtPoints.length>2) {
//console.log('arc '+r1+' '+theta1+' '+theta2);
ctx.arc(srtPoints[0].x, srtPoints[0].y, r1, theta1, theta2, clockwise);
}
//ctx.lineTo(srtPoints[0].x, srtPoints[0].y);
ctx.closePath();
ctx.stroke();
ctx.fill();
ctx.globalAlpha = 0.25;
if (srtPoints.length>=4) {
ctx.beginPath();
ctx.moveTo(srtPoints[3].x, srtPoints[3].y);
ctx.lineTo(srtPoints[1].x, srtPoints[1].y);
ctx.arc(srtPoints[0].x, srtPoints[0].y, r1, theta1, theta2, clockwise);
ctx.lineTo(srtPoints[2].x, srtPoints[2].y);
ctx.arc(srtPoints[0].x, srtPoints[0].y, r2, theta2, theta1, !clockwise);
//ctx.lineTo(p5.x, p5.y);
ctx.stroke();
ctx.fill();
}
ctx.globalAlpha = 1.0;
}
}
function add_polygon(polygon, mode) {
polygons.push({'mode': mode, 'points': polygon});
redraw();
// called by external services.
//process_add_callbacks(polygons[polygons.length-1]);
}
function set_polygons(_polygons) {
polygons = _polygons;
redraw();
}
function set_mode(_mode) {
if (_mode!=='radial' && _mode!=='box') {
console.log('invalid mode')
}
mode = _mode;
}
redraw();
return {
on_newpolygon: on_newpolygon,
add_polygon: add_polygon,
set_polygons: set_polygons,
set_mode: set_mode
}
}
|
Subsets and Splits