text
stringlengths 3
1.05M
|
---|
// @flow
import React from 'react'
import { connect } from 'react-redux'
import findIndex from 'lodash/findIndex'
import type { OnboardingState } from 'reducers/onboarding'
import Breadcrumb from 'components/Breadcrumb'
const mapStateToProps = state => ({
onboarding: state.onboarding,
})
type Props = {
onboarding: OnboardingState,
}
function OnboardingBreadcrumb(props: Props) {
const { onboarding } = props
const { stepName, genuine } = onboarding
const filteredSteps = onboarding.steps
.filter(step => !step.external)
.map(step => ({ ...step, label: step.label })) // TODO: translate
const stepIndex = findIndex(filteredSteps, s => s.name === stepName)
const genuineStepIndex = findIndex(filteredSteps, s => s.name === 'genuineCheck')
return (
<Breadcrumb
stepsErrors={genuine.isGenuineFail ? [genuineStepIndex] : undefined}
currentStep={stepIndex}
items={filteredSteps}
/>
)
}
export default connect(mapStateToProps)(OnboardingBreadcrumb)
|
var searchData=
[
['jacobianfactor_2ecpp',['JacobianFactor.cpp',['../a00442.html',1,'']]],
['jacobianfactor_2eh',['JacobianFactor.h',['../a00443.html',1,'']]],
['junctiontree_2dinst_2eh',['JunctionTree-inst.h',['../a00395.html',1,'']]],
['junctiontree_2eh',['JunctionTree.h',['../a00396.html',1,'']]]
];
|
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var React = require('react');
var utils = require('primereact/utils');
var ripple = require('primereact/ripple');
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
var React__default = /*#__PURE__*/_interopDefaultLegacy(React);
function _classCallCheck(instance, Constructor) {
if (!(instance instanceof Constructor)) {
throw new TypeError("Cannot call a class as a function");
}
}
function _defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
function _createClass(Constructor, protoProps, staticProps) {
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
if (staticProps) _defineProperties(Constructor, staticProps);
Object.defineProperty(Constructor, "prototype", {
writable: false
});
return Constructor;
}
function _assertThisInitialized(self) {
if (self === void 0) {
throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
}
return self;
}
function _setPrototypeOf(o, p) {
_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
o.__proto__ = p;
return o;
};
return _setPrototypeOf(o, p);
}
function _inherits(subClass, superClass) {
if (typeof superClass !== "function" && superClass !== null) {
throw new TypeError("Super expression must either be null or a function");
}
subClass.prototype = Object.create(superClass && superClass.prototype, {
constructor: {
value: subClass,
writable: true,
configurable: true
}
});
Object.defineProperty(subClass, "prototype", {
writable: false
});
if (superClass) _setPrototypeOf(subClass, superClass);
}
function _typeof(obj) {
"@babel/helpers - typeof";
return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) {
return typeof obj;
} : function (obj) {
return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
}, _typeof(obj);
}
function _possibleConstructorReturn(self, call) {
if (call && (_typeof(call) === "object" || typeof call === "function")) {
return call;
} else if (call !== void 0) {
throw new TypeError("Derived constructors may only return object or undefined");
}
return _assertThisInitialized(self);
}
function _getPrototypeOf(o) {
_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) {
return o.__proto__ || Object.getPrototypeOf(o);
};
return _getPrototypeOf(o);
}
function _defineProperty(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
function _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }
function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {})); return true; } catch (e) { return false; } }
var Dock = /*#__PURE__*/function (_Component) {
_inherits(Dock, _Component);
var _super = _createSuper(Dock);
function Dock(props) {
var _this;
_classCallCheck(this, Dock);
_this = _super.call(this, props);
_this.state = {
currentIndex: -3
};
_this.onListMouseLeave = _this.onListMouseLeave.bind(_assertThisInitialized(_this));
return _this;
}
_createClass(Dock, [{
key: "onListMouseLeave",
value: function onListMouseLeave() {
this.setState({
currentIndex: -3
});
}
}, {
key: "onItemMouseEnter",
value: function onItemMouseEnter(index) {
this.setState({
currentIndex: index
});
}
}, {
key: "onItemClick",
value: function onItemClick(e, item) {
if (item.command) {
item.command({
originalEvent: e,
item: item
});
}
e.preventDefault();
}
}, {
key: "renderItem",
value: function renderItem(item, index) {
var _this2 = this;
var disabled = item.disabled,
_icon = item.icon,
label = item.label,
template = item.template,
url = item.url,
target = item.target;
var className = utils.classNames('p-dock-item', {
'p-dock-item-second-prev': this.state.currentIndex - 2 === index,
'p-dock-item-prev': this.state.currentIndex - 1 === index,
'p-dock-item-current': this.state.currentIndex === index,
'p-dock-item-next': this.state.currentIndex + 1 === index,
'p-dock-item-second-next': this.state.currentIndex + 2 === index
});
var contentClassName = utils.classNames('p-dock-action', {
'p-disabled': disabled
});
var iconClassName = utils.classNames('p-dock-action-icon', _icon);
var icon = typeof _icon === 'string' ? /*#__PURE__*/React__default["default"].createElement("span", {
className: iconClassName
}) : utils.ObjectUtils.getJSXElement(_icon, this.props);
var content = /*#__PURE__*/React__default["default"].createElement("a", {
href: url || '#',
role: "menuitem",
className: contentClassName,
target: target,
"data-pr-tooltip": label,
onClick: function onClick(e) {
return _this2.onItemClick(e, item);
}
}, icon, /*#__PURE__*/React__default["default"].createElement(ripple.Ripple, null));
if (template) {
var defaultContentOptions = {
onClick: function onClick(e) {
return _this2.onItemClick(e, item);
},
className: contentClassName,
iconClassName: iconClassName,
element: content,
props: this.props,
index: index
};
content = utils.ObjectUtils.getJSXElement(template, item, defaultContentOptions);
}
return /*#__PURE__*/React__default["default"].createElement("li", {
key: index,
className: className,
role: "none",
onMouseEnter: function onMouseEnter() {
return _this2.onItemMouseEnter(index);
}
}, content);
}
}, {
key: "renderItems",
value: function renderItems() {
var _this3 = this;
if (this.props.model) {
return this.props.model.map(function (item, index) {
return _this3.renderItem(item, index);
});
}
return null;
}
}, {
key: "renderHeader",
value: function renderHeader() {
if (this.props.header) {
return /*#__PURE__*/React__default["default"].createElement("div", {
className: "p-dock-header"
}, utils.ObjectUtils.getJSXElement(this.props.header, {
props: this.props
}));
}
return null;
}
}, {
key: "renderList",
value: function renderList() {
var _this4 = this;
var items = this.renderItems();
return /*#__PURE__*/React__default["default"].createElement("ul", {
ref: function ref(el) {
return _this4.list = el;
},
className: "p-dock-list",
role: "menu",
onMouseLeave: this.onListMouseLeave
}, items);
}
}, {
key: "renderFooter",
value: function renderFooter() {
if (this.props.footer) {
return /*#__PURE__*/React__default["default"].createElement("div", {
className: "p-dock-footer"
}, utils.ObjectUtils.getJSXElement(this.props.footer, {
props: this.props
}));
}
return null;
}
}, {
key: "render",
value: function render() {
var className = utils.classNames("p-dock p-component p-dock-".concat(this.props.position), {
'p-dock-magnification': this.props.magnification
}, this.props.className);
var header = this.renderHeader();
var list = this.renderList();
var footer = this.renderFooter();
return /*#__PURE__*/React__default["default"].createElement("div", {
id: this.props.id,
className: className,
style: this.props.style
}, /*#__PURE__*/React__default["default"].createElement("div", {
className: "p-dock-container"
}, header, list, footer));
}
}]);
return Dock;
}(React.Component);
_defineProperty(Dock, "defaultProps", {
id: null,
style: null,
className: null,
model: null,
position: 'bottom',
magnification: true,
header: null,
footer: null
});
exports.Dock = Dock;
|
from adapters.adapter_with_battery import AdapterWithBattery
from devices.switch.selector_switch import SelectorSwitch
class ZYCT202(AdapterWithBattery):
def __init__(self):
super().__init__()
buttons_count = 6
for btn_index in range(1, buttons_count + 1):
self.devices.append(self.create_button(btn_index))
def create_button(self, index):
button = SelectorSwitch('btn' + str(index), 'action', ' (Button ' + str(index) + ')')
button.add_level('Off', None)
button.add_level('On', 'on')
button.add_level('Up', 'up-press')
button.add_level('Down', 'down-press')
button.add_level('Stop', 'stop')
button.set_selector_style(SelectorSwitch.SELECTOR_TYPE_BUTTONS)
button.disable_value_check_on_update()
return button
def handle_command(self, alias, device, command, level, color):
device_data = self._get_legacy_device_data()
device = self.get_device_by_alias(alias)
device.handle_command(device_data, command, level, color)
def handle_mqtt_message(self, message):
if 'action' not in message.raw or 'action_group' not in message.raw:
return
device_data = self._get_legacy_device_data()
converted_message = self.convert_message(message)
btn_index = message.raw['action_group'] - 144
device = self.get_device_by_alias('btn' + str(btn_index))
device.handle_message(device_data, converted_message)
self.update_battery_status(device_data, converted_message)
self.update_link_quality(device_data, converted_message)
|
from flask import Flask, render_template, redirect, jsonify, request
import json
import re
app = Flask(__name__)
@app.route('/')
def root():
return
@app.route('/state/<name>', methods=['GET','POST'])
def get_data(name):
with open('static/population.json') as dt:
data = json.load(dt)
return jsonify(data.get(name))
URL_PATTERN = r"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+"
@app.route('/current-events', methods=['GET', 'POST'])
def current_events():
if request.method == 'POST':
url = request.form.get('url', '')
if re.search(URL_PATTERN, url):
with open('static/current-events.txt', 'a') as news:
news.write(url + '\n')
return jsonify(response=True)
return jsonify(response=False, url=url)
with open('static/current-events.txt') as news:
data = news.read().split('\n')[:-1]
return jsonify(data)
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
from ebooklib import epub
import ebooklib
from bs4 import BeautifulSoup
import re
import urllib.request
import ePUBImageGenerator
blacklist = ['[document]', 'noscript', 'header', 'html', 'meta', 'head', 'input', 'script']
def image_support(path):
'''
Doesn't return anything.
Collates all images inside the novel and places them in the novel's images folder.
Numbered by when they appear in the novel (from 0)
'''
ePUBImageGenerator.generate_images(path)
def chapters_list(epub_path) -> list:
'''
Is used in loop with chapters_list to strip a chapter's contents to only text and image tags
'''
book = epub.read_epub(epub_path)
chapters = []
for item in book.get_items():
if item.get_type() == ebooklib.ITEM_DOCUMENT:
chapters.append(item.get_content())
return chapters
def chapter_contents(chap, file_path) -> list:
'''
For a section of the epub file, strip components to just text and image tags.\n
string with byte-data --> string with text and image-tags\n
Needs to be called in a for-loop for the chapters list
'''
output = ''
soup = BeautifulSoup(chap, 'html.parser')
'''Writes to a file if the chapter contains an image'''
#TODO: Deal with multiple images case
images = soup.find_all('img')
file_path = file_path[:file_path.rfind("\\") + 1]
locations_file = f'{file_path}\\images\\locations.txt'
if len(images) > 0:
with open(locations_file,'a+') as f:
f.write("1")
else:
with open(locations_file,'a+') as f:
f.write("0")
'''Cleaning up the mess'''
text = soup.find_all(text=True)
for t in text:
if t.parent.name not in blacklist:
output += '{} '.format(t)
return output
def full_book(thtml, file_path) -> list:
'''
Generates a list with each index representing a section/chapter of the novel.
Note that images are considered as singular chapters
'''
output = []
for html in thtml:
text = chapter_contents(html, file_path)
output.append(text)
return output
def _initialize_locations(epub_path) -> tuple:
'''
Initial processes:\n
- Returns the locations file and file path as a tuple
- Purges locations.txt if it exists for the book
- 'file_path' variable directs to the epub file's directory --> where it's stored
'''
file_path = epub_path[:epub_path.rfind("\\") + 1]
locations_file = f'{file_path}images\\locations.txt'
with open(locations_file, "w") as f:
f.write("")
return locations_file, file_path
def _purge_whitespace(converted_epub_file, locations_file, file_path) -> list:
'''
PASS 1: Purge whitespace for each chapter in the book
'''
cleaned = []
counter = 0
for index, val in enumerate(converted_epub_file):
print(index)
with open(locations_file, "r") as f:
curr = f.readline()
print("INDEX: ", curr[index])
if curr[index] == "1":
converted_epub_file[index] += "\n"
cleaned.append(" filename=\"{}images\image{}.jpg\"".format(file_path, counter))
counter += 1
if converted_epub_file[index].isspace():
print(converted_epub_file[index])
print(f"Section {index} is a blank section. Removing...")
elif converted_epub_file[index].strip() == "":
print(converted_epub_file[index])
print(f"Section {index} is a blank section. Removing...")
else:
cleaned.append(re.sub(' +', ' ', val))
return cleaned
def _check_external_imagelinks(first_pass_data, file_path):
'''
Searches for external image links within the epub file. If found, goes to site and downloads image.\n
If links exist, it is *probably* an unofficial file.
'''
image_counter = 0
for index, section in enumerate(first_pass_data):
WEB_URL_REGEX = r"""(?i)\b((?:https?:(?:/{1,3}|[a-z0-9%])|[a-z0-9.\-]+[.](?:com|net|org|edu|gov|mil|aero|asia|biz|cat|coop|info|int|jobs|mobi|museum|name|post|pro|tel|travel|xxx|ac|ad|ae|af|ag|ai|al|am|an|ao|aq|ar|as|at|au|aw|ax|az|ba|bb|bd|be|bf|bg|bh|bi|bj|bm|bn|bo|br|bs|bt|bv|bw|by|bz|ca|cc|cd|cf|cg|ch|ci|ck|cl|cm|cn|co|cr|cs|cu|cv|cx|cy|cz|dd|de|dj|dk|dm|do|dz|ec|ee|eg|eh|er|es|et|eu|fi|fj|fk|fm|fo|fr|ga|gb|gd|ge|gf|gg|gh|gi|gl|gm|gn|gp|gq|gr|gs|gt|gu|gw|gy|hk|hm|hn|hr|ht|hu|id|ie|il|im|in|io|iq|ir|is|it|je|jm|jo|jp|ke|kg|kh|ki|km|kn|kp|kr|kw|ky|kz|la|lb|lc|li|lk|lr|ls|lt|lu|lv|ly|ma|mc|md|me|mg|mh|mk|ml|mm|mn|mo|mp|mq|mr|ms|mt|mu|mv|mw|mx|my|mz|na|nc|ne|nf|ng|ni|nl|no|np|nr|nu|nz|om|pa|pe|pf|pg|ph|pk|pl|pm|pn|pr|ps|pt|pw|py|qa|re|ro|rs|ru|rw|sa|sb|sc|sd|se|sg|sh|si|sj|Ja|sk|sl|sm|sn|so|sr|ss|st|su|sv|sx|sy|sz|tc|td|tf|tg|th|tj|tk|tl|tm|tn|to|tp|tr|tt|tv|tw|tz|ua|ug|uk|us|uy|uz|va|vc|ve|vg|vi|vn|vu|wf|ws|ye|yt|yu|za|zm|zw)/)(?:[^\s()<>{}\[\]]+|\([^\s()]*?\([^\s()]+\)[^\s()]*?\)|\([^\s]+?\))+(?:\([^\s()]*?\([^\s()]+\)[^\s()]*?\)|\([^\s]+?\)|[^\s`!()\[\]{};:'".,<>?«»“”‘’])|(?:(?<!@)[a-z0-9]+(?:[.\-][a-z0-9]+)*[.](?:com|net|org|edu|gov|mil|aero|asia|biz|cat|coop|info|int|jobs|mobi|museum|name|post|pro|tel|travel|xxx|ac|ad|ae|af|ag|ai|al|am|an|ao|aq|ar|as|at|au|aw|ax|az|ba|bb|bd|be|bf|bg|bh|bi|bj|bm|bn|bo|br|bs|bt|bv|bw|by|bz|ca|cc|cd|cf|cg|ch|ci|ck|cl|cm|cn|co|cr|cs|cu|cv|cx|cy|cz|dd|de|dj|dk|dm|do|dz|ec|ee|eg|eh|er|es|et|eu|fi|fj|fk|fm|fo|fr|ga|gb|gd|ge|gf|gg|gh|gi|gl|gm|gn|gp|gq|gr|gs|gt|gu|gw|gy|hk|hm|hn|hr|ht|hu|id|ie|il|im|in|io|iq|ir|is|it|je|jm|jo|jp|ke|kg|kh|ki|km|kn|kp|kr|kw|ky|kz|la|lb|lc|li|lk|lr|ls|lt|lu|lv|ly|ma|mc|md|me|mg|mh|mk|ml|mm|mn|mo|mp|mq|mr|ms|mt|mu|mv|mw|mx|my|mz|na|nc|ne|nf|ng|ni|nl|no|np|nr|nu|nz|om|pa|pe|pf|pg|ph|pk|pl|pm|pn|pr|ps|pt|pw|py|qa|re|ro|rs|ru|rw|sa|sb|sc|sd|se|sg|sh|si|sj|Ja|sk|sl|sm|sn|so|sr|ss|st|su|sv|sx|sy|sz|tc|td|tf|tg|th|tj|tk|tl|tm|tn|to|tp|tr|tt|tv|tw|tz|ua|ug|uk|us|uy|uz|va|vc|ve|vg|vi|vn|vu|wf|ws|ye|yt|yu|za|zm|zw)\b/?(?!@)))"""
links = re.findall(WEB_URL_REGEX, section)
if len(links) > 0:
for _ in range(len(links)):
if links[0].find("jpg") >= 0 or links[0].find("png") >= 0:
print("Link: ", links[0])
section = section.replace(links[0], " filename=\"{}images\image{}.jpg\"\n".format(file_path, image_counter))
first_pass_data[index] = section
print("\nFound Link!!\n")
print("Link: \n", links[0])
print("Downloading...")
urllib.request.urlretrieve(links[0], f'{file_path}images\\image{image_counter}.jpg')
image_counter += 1
links.pop(0)
print("\n\nSET FINISHED\n\n")
return first_pass_data
def _rearrange_chapters(new, data, tag='filename='):
while len(data) > 0:
curr = data[0]
if curr.find(tag) < 0:
print(f"IN SORTER: Didn't find anything for {curr[0:20]}")
new.append(curr)
data.pop(0)
else:
links = re.findall(tag, curr)
if len(links) > 1: #If more than one link
found = curr.find(tag)
new.append(curr[:curr.find("\n", found)]) #Appends everything up to the thing
data[0] = curr[curr.find("\n", found):] #Data is now
else: #Only 1 link
print(curr)
new.append(curr)
data.pop(0)
return new
def convert_epub(epub_path) -> list:
locations_file, file_directory = _initialize_locations(epub_path)
#locations_file points to a locations.txt file
chapters = chapters_list(epub_path)
final = full_book(chapters, epub_path)
first_pass = _purge_whitespace(final, locations_file, file_directory)
print("First pass: ", first_pass[0][:20])
second_pass = _check_external_imagelinks(first_pass, file_directory)
print("Second pass: ", second_pass[0][:20])
third_pass = []
cleaned_epub = _rearrange_chapters(new = third_pass, data = second_pass)
return cleaned_epub
if __name__ == "__main__":
pass |
'use strict';
const gulp = require('gulp');
const file = require('gulp-file');
const del = require('del');
const path = require('path');
const fs = require('fs');
const { generateComponents } = require('@devextreme-generator/build-helpers');
const { InfernoGenerator } = require('@devextreme-generator/inferno');
const ts = require('gulp-typescript');
const plumber = require('gulp-plumber');
const gulpIf = require('gulp-if');
const babel = require('gulp-babel');
const notify = require('gulp-notify');
const watch = require('gulp-watch');
const transpileConfig = require('../transpile-config');
const env = require('../env-variables');
const cached = require('gulp-cached');
const {
BASE_GENERATOR_OPTIONS,
BASE_GENERATOR_OPTIONS_WITH_JQUERY
} = require('./generator-options');
const generator = new InfernoGenerator();
const jQueryComponentsGlob = 'js/renovation/**/*.j.tsx';
const esmPackage = env.BUILD_ESM_PACKAGE;
const SRC = [
'js/renovation/**/*.{tsx,ts}',
`!${jQueryComponentsGlob}`,
'!js/renovation/**/*.d.ts',
'!js/renovation/**/__tests__/**/*',
'!js/renovation/test_utils/**/*'
];
const IGNORE_PATHS_BY_FRAMEWORKS = {
vue: [],
react: [],
angular: [
'!js/renovation/ui/pager/pager.tsx'
]
};
const COMPAT_TESTS_PARTS = 'testing/tests/Renovation/';
const COMMON_SRC = ['js/**/*.d.ts', 'js/**/*.js'];
const knownErrors = [
'js/renovation/component_wrapper/',
'js\\renovation\\component_wrapper\\',
'Cannot find module \'../../inferno/src\'',
// #region TODO remove it after fix https://trello.com/c/2LOaxO9F/2704-renovation-some-types-is-missing-on-new-type-defining
'Cannot find name \'GridBaseView\'',
'Property \'views\' of exported interface has or is using private name \'GridBaseView\'',
'Public property \'views\' of exported class has or is using private name \'GridBaseView\''
// #endregion
];
function deleteJQueryComponents(cb) {
del.sync(jQueryComponentsGlob);
cb();
}
function generateJQueryComponents(isWatch) {
const generator = new InfernoGenerator();
generator.options = {
...BASE_GENERATOR_OPTIONS_WITH_JQUERY,
generateJQueryOnly: true
};
const pipe = isWatch ?
watch(SRC).on('ready', () => console.log(
'generate-jquery-components task is watching for changes...'
)) : gulp.src(SRC);
return pipe
.pipe(generateComponents(generator))
.pipe(plumber(()=>null))
.pipe(gulp.dest('js/renovation/'));
}
const context = require('../context.js');
const { ifEsmPackage } = require('../utils');
const processErrors = (knownErrors, errors = []) => (e) => {
if(!knownErrors.some(i => e.message.includes(i))) {
errors.push(e);
console.log(e.message);
}
};
function generateInfernoComponents(distPath = './', babelConfig = transpileConfig.cjs, dev) {
return function generateInfernoComponents(done) {
const tsProject = ts.createProject('build/gulp/generator/ts-configs/inferno.tsconfig.json');
generator.options = BASE_GENERATOR_OPTIONS_WITH_JQUERY;
const errors = [];
const isNotDTS = (file) => !file.path.endsWith('.d.ts');
const isDefault = distPath === './';
return gulp.src(SRC, { base: 'js' })
.pipe(gulpIf(dev, cached('generate-inferno-component')))
.pipe(generateComponents(generator))
.pipe(plumber(() => null))
.pipe(tsProject({
error: processErrors(knownErrors, errors),
finish() {}
}))
.pipe(gulpIf(isNotDTS, babel(babelConfig)))
.pipe(gulpIf(isDefault, gulp.dest(context.TRANSPILED_PATH)))
.pipe(gulpIf(isDefault, gulp.dest(context.TRANSPILED_RENOVATION_PATH)))
.pipe(gulpIf(isDefault, gulp.dest(context.TRANSPILED_PROD_RENOVATION_PATH)))
.pipe(gulpIf(esmPackage, gulp.dest(path.join(context.TRANSPILED_PROD_ESM_PATH, distPath))))
.on('end', function() {
done(/* !dev && errors.length || undefined*/);
});
};
}
function processRenovationMeta() {
const widgetsMeta = generator
.getComponentsMeta()
.filter(meta =>
meta.decorator &&
meta.decorator.jQuery &&
meta.decorator.jQuery.register === 'true' &&
fs.existsSync(meta.path));
const metaJson = JSON.stringify(widgetsMeta.map(meta => ({
widgetName: `dx${meta.name}`,
...meta,
path: path.relative(COMPAT_TESTS_PARTS, meta.path).replace(/\\/g, '/')
})), null, 2);
return file('widgets.json', metaJson, { src: true })
.pipe(gulp.dest(COMPAT_TESTS_PARTS));
}
gulp.task('generate-jquery-components-clean', deleteJQueryComponents);
gulp.task('generate-jquery-components-run', function generateJQuery() {
return generateJQueryComponents(false);
});
gulp.task('generate-jquery-components', gulp.series('generate-jquery-components-clean', 'generate-jquery-components-run'));
gulp.task('generate-jquery-components-watch', function watchJQueryComponents() {
return generateJQueryComponents(true);
});
gulp.task('generate-components', gulp.series(
'generate-jquery-components',
generateInfernoComponents(),
ifEsmPackage(generateInfernoComponents('./esm', transpileConfig.esm)),
ifEsmPackage(generateInfernoComponents('./cjs', transpileConfig.cjs)),
processRenovationMeta
));
gulp.task('generate-components-dev', gulp.series(
'generate-jquery-components',
generateInfernoComponents('./', transpileConfig.cjs, true),
processRenovationMeta
));
gulp.task('generate-inferno-components-watch', function() {
gulp
.watch(SRC, gulp.series(
generateInfernoComponents('./', transpileConfig.cjs, true)
))
.on('ready', () => console.log(
'generate-inferno-components task is watching for changes...'
));
});
function addGenerationTask(
frameworkName,
knownErrors = [],
compileTs = true,
copyArtifacts = false,
babelGeneratedFiles = true
) {
addGenerationTaskWithSuffix(frameworkName, '', knownErrors, compileTs, copyArtifacts, babelGeneratedFiles)
}
function addGenerationTaskWithSuffix(
frameworkName,
suffix,
knownErrors = [],
compileTs = true,
copyArtifacts = false,
babelGeneratedFiles = true
) {
const frameworkDest = `artifacts/${frameworkName}${suffix}`;
const generator = require(`@devextreme-generator/${frameworkName}`).default;
let tsProject = () => () => { };
if(compileTs) {
tsProject = ts.createProject(`build/gulp/generator/ts-configs/${frameworkName}.tsconfig.json`);
}
generator.options = BASE_GENERATOR_OPTIONS;
function compileComponents(done) {
const errors = [];
const frameworkIgnorePaths = IGNORE_PATHS_BY_FRAMEWORKS[frameworkName];
return gulp.src([
...SRC,
...frameworkIgnorePaths,
'!js/renovation/component_wrapper/**/*.*',
], { base: 'js' })
.pipe(generateComponents(generator))
.pipe(plumber(() => null))
.pipe(gulpIf(compileTs, tsProject({
error: processErrors(knownErrors, errors),
finish() { }
}))).on('end', function() {
done(errors.map(e => e.message).join('\n') || undefined);
});
}
gulp.task(`${frameworkName}${suffix}-compilation-check`, compileComponents);
gulp.task(`generate-${frameworkName}${suffix}-declaration-only`, function(done) {
return compileComponents(done)
.pipe(gulpIf(babelGeneratedFiles, babel(transpileConfig.cjs)))
.pipe(gulp.dest(frameworkDest));
});
const frameworkSrc = `./artifacts/${frameworkName}${suffix}`;
const artifactsSrc = ['./artifacts/css/**/*', `${frameworkSrc}/**/*`];
const generateSeries = [
function cleanFrameworkArtifacts(cb) {
del.sync(frameworkSrc);
cb();
},
`generate-${frameworkName}${suffix}-declaration-only`,
function() {
return gulp.src(COMMON_SRC)
.pipe(
gulpIf(
file => file.extname === '.js',
babel(transpileConfig.cjs)
)
)
.pipe(gulp.dest(frameworkDest));
}];
if(copyArtifacts) {
const dest = `./playground/${frameworkName}${suffix}/src/artifacts`;
generateSeries.push(function cleanFrameworkPlayground(cb) {
del.sync(dest);
cb();
});
generateSeries.push(function copyArtifacts() {
return gulp.src(artifactsSrc, { base: './artifacts/' })
.pipe(gulp.dest(dest));
});
}
gulp.task(`generate-${frameworkName}${suffix}`, gulp.series(...generateSeries));
const watchTasks = [
function() {
watch(COMMON_SRC)
.pipe(plumber({
errorHandler: notify.onError('Error: <%= error.message %>')
.bind() // bind call is necessary to prevent firing 'end' event in notify.onError implementation
}))
.pipe(
gulpIf(
file => file.extname === '.js',
babel(transpileConfig.cjs)
)
)
.pipe(gulp.dest(frameworkDest));
},
function declarationBuild() {
gulp.watch(SRC, gulp.series(`generate-${frameworkName}${suffix}-declaration-only`));
}
];
if(copyArtifacts) {
watchTasks.push(function copyArtifacts() {
return gulp.src(artifactsSrc, { base: './artifacts/' })
.pipe(watch(artifactsSrc, { base: './artifacts/', readDelay: 1000 }))
.pipe(gulp.dest(`./playground/${frameworkName}${suffix}/src/artifacts`));
});
}
gulp.task(`generate-${frameworkName}${suffix}-watch`, gulp.series(
`generate-${frameworkName}${suffix}`,
gulp.parallel(...watchTasks)
));
}
addGenerationTask('react',
knownErrors,
true,
true,
false
);
addGenerationTaskWithSuffix('react', '-typescript', knownErrors, false, false, false);
const ngErrors = [
'Cannot find module \'@angular/core\'',
'Cannot find module \'@angular/common\'',
'Cannot find module \'@angular/forms\'',
'Cannot find module \'@angular/cdk/portal\'',
'Cannot find module \'inferno\'',
'Cannot find module \'inferno-create-element\'',
].concat(knownErrors);
addGenerationTask('angular', ngErrors);
addGenerationTaskWithSuffix('angular', '-typescript', ngErrors, false, false, false);
addGenerationTask('vue', [], false, true, false);
gulp.task('generate-components-watch', gulp.series('generate-components', function() {
gulp
.watch(SRC, gulp.series('generate-components-dev'))
.on('ready', () => console.log(
'generate-components task is watching for changes...'
));
}));
gulp.task('native-components-compilation-check', gulp.series('react-compilation-check', 'angular-compilation-check'));
|
'use strict';
angular.module('copayApp.controllers').controller('backupController',
function($rootScope, $scope, $timeout, backupService, profileService, isMobile, isCordova, notification, go, gettext) {
this.isSafari = isMobile.Safari();
this.isCordova = isCordova;
this.error = null;
this.success = null;
var fc = profileService.focusedClient;
this.isEncrypted = fc.isPrivKeyEncrypted();
this.downloadWalletBackup = function() {
backupService.walletDownload(this.password, function() {
$rootScope.$emit('Local/BackupDone');
notification.success(gettext('Backup created'), gettext('Encrypted backup file saved'));
go.walletHome();
});
};
this.getBackup = function() {
return backupService.walletExport(this.password);
};
this.viewWalletBackup = function() {
var self = this;
$timeout(function() {
self.backupWalletPlainText = self.getBackup();
$rootScope.$emit('Local/BackupDone');
}, 100);
};
this.copyWalletBackup = function() {
var ew = this.getBackup();
window.cordova.plugins.clipboard.copy(ew);
window.plugins.toast.showShortCenter('Copied to clipboard');
$rootScope.$emit('Local/BackupDone');
};
this.sendWalletBackup = function() {
var fc = profileService.focusedClient;
if (isMobile.Android() || isMobile.Windows()) {
window.ignoreMobilePause = true;
}
window.plugins.toast.showShortCenter('Preparing backup...');
var name = (fc.credentials.walletName || fc.credentials.walletId);
if (fc.alias) {
name = fc.alias + ' [' + name + ']';
}
var ew = this.getBackup();
var properties = {
subject: 'StartWallet Backup: ' + name,
body: 'Here is the encrypted backup of the wallet ' + name + ': \n\n' + ew + '\n\n To import this backup, copy all text between {...}, including the symbols {}',
isHtml: false
};
$rootScope.$emit('Local/BackupDone');
window.plugin.email.open(properties);
};
});
|
/**
* @author Richard Davey <[email protected]>
* @copyright 2020 Photon Storm Ltd.
* @license {@link https://opensource.org/licenses/MIT|MIT License}
*/
var Class = require('../../../utils/Class');
var EventEmitter = require('eventemitter3');
var Events = require('../events');
/**
* @classdesc
* A generic Key object which can be passed to the Process functions (and so on)
* keycode must be an integer
*
* @class Key
* @extends Phaser.Events.EventEmitter
* @memberof Phaser.Input.Keyboard
* @constructor
* @since 3.0.0
*
* @param {Phaser.Input.Keyboard.KeyboardPlugin} plugin - The Keyboard Plugin instance that owns this Key object.
* @param {integer} keyCode - The keycode of this key.
*/
var Key = new Class({
Extends: EventEmitter,
initialize:
function Key (plugin, keyCode)
{
EventEmitter.call(this);
/**
* The Keyboard Plugin instance that owns this Key object.
*
* @name Phaser.Input.Keyboard.Key#plugin
* @type {Phaser.Input.Keyboard.KeyboardPlugin}
* @since 3.17.0
*/
this.plugin = plugin;
/**
* The keycode of this key.
*
* @name Phaser.Input.Keyboard.Key#keyCode
* @type {integer}
* @since 3.0.0
*/
this.keyCode = keyCode;
/**
* The original DOM event.
*
* @name Phaser.Input.Keyboard.Key#originalEvent
* @type {KeyboardEvent}
* @since 3.0.0
*/
this.originalEvent = undefined;
/**
* Can this Key be processed?
*
* @name Phaser.Input.Keyboard.Key#enabled
* @type {boolean}
* @default true
* @since 3.0.0
*/
this.enabled = true;
/**
* The "down" state of the key. This will remain `true` for as long as the keyboard thinks this key is held down.
*
* @name Phaser.Input.Keyboard.Key#isDown
* @type {boolean}
* @default false
* @since 3.0.0
*/
this.isDown = false;
/**
* The "up" state of the key. This will remain `true` for as long as the keyboard thinks this key is up.
*
* @name Phaser.Input.Keyboard.Key#isUp
* @type {boolean}
* @default true
* @since 3.0.0
*/
this.isUp = true;
/**
* The down state of the ALT key, if pressed at the same time as this key.
*
* @name Phaser.Input.Keyboard.Key#altKey
* @type {boolean}
* @default false
* @since 3.0.0
*/
this.altKey = false;
/**
* The down state of the CTRL key, if pressed at the same time as this key.
*
* @name Phaser.Input.Keyboard.Key#ctrlKey
* @type {boolean}
* @default false
* @since 3.0.0
*/
this.ctrlKey = false;
/**
* The down state of the SHIFT key, if pressed at the same time as this key.
*
* @name Phaser.Input.Keyboard.Key#shiftKey
* @type {boolean}
* @default false
* @since 3.0.0
*/
this.shiftKey = false;
/**
* The down state of the Meta key, if pressed at the same time as this key.
* On a Mac the Meta Key is the Command key. On Windows keyboards, it's the Windows key.
*
* @name Phaser.Input.Keyboard.Key#metaKey
* @type {boolean}
* @default false
* @since 3.16.0
*/
this.metaKey = false;
/**
* The location of the modifier key. 0 for standard (or unknown), 1 for left, 2 for right, 3 for numpad.
*
* @name Phaser.Input.Keyboard.Key#location
* @type {number}
* @default 0
* @since 3.0.0
*/
this.location = 0;
/**
* The timestamp when the key was last pressed down.
*
* @name Phaser.Input.Keyboard.Key#timeDown
* @type {number}
* @default 0
* @since 3.0.0
*/
this.timeDown = 0;
/**
* The number of milliseconds this key was held down for in the previous down - up sequence.
* This value isn't updated every game step, only when the Key changes state.
* To get the current duration use the `getDuration` method.
*
* @name Phaser.Input.Keyboard.Key#duration
* @type {number}
* @default 0
* @since 3.0.0
*/
this.duration = 0;
/**
* The timestamp when the key was last released.
*
* @name Phaser.Input.Keyboard.Key#timeUp
* @type {number}
* @default 0
* @since 3.0.0
*/
this.timeUp = 0;
/**
* When a key is held down should it continuously fire the `down` event each time it repeats?
*
* By default it will emit the `down` event just once, but if you wish to receive the event
* for each repeat as well, enable this property.
*
* @name Phaser.Input.Keyboard.Key#emitOnRepeat
* @type {boolean}
* @default false
* @since 3.16.0
*/
this.emitOnRepeat = false;
/**
* If a key is held down this holds down the number of times the key has 'repeated'.
*
* @name Phaser.Input.Keyboard.Key#repeats
* @type {number}
* @default 0
* @since 3.0.0
*/
this.repeats = 0;
/**
* True if the key has just been pressed (NOTE: requires to be reset, see justDown getter)
*
* @name Phaser.Input.Keyboard.Key#_justDown
* @type {boolean}
* @private
* @default false
* @since 3.0.0
*/
this._justDown = false;
/**
* True if the key has just been pressed (NOTE: requires to be reset, see justDown getter)
*
* @name Phaser.Input.Keyboard.Key#_justUp
* @type {boolean}
* @private
* @default false
* @since 3.0.0
*/
this._justUp = false;
/**
* Internal tick counter.
*
* @name Phaser.Input.Keyboard.Key#_tick
* @type {number}
* @private
* @since 3.11.0
*/
this._tick = -1;
},
/**
* Controls if this Key will continuously emit a `down` event while being held down (true),
* or emit the event just once, on first press, and then skip future events (false).
*
* @method Phaser.Input.Keyboard.Key#setEmitOnRepeat
* @since 3.16.0
*
* @param {boolean} value - Emit `down` events on repeated key down actions, or just once?
*
* @return {Phaser.Input.Keyboard.Key} This Key instance.
*/
setEmitOnRepeat: function (value)
{
this.emitOnRepeat = value;
return this;
},
/**
* Processes the Key Down action for this Key.
* Called automatically by the Keyboard Plugin.
*
* @method Phaser.Input.Keyboard.Key#onDown
* @fires Phaser.Input.Keyboard.Events#DOWN
* @since 3.16.0
*
* @param {KeyboardEvent} event - The native DOM Keyboard event.
*/
onDown: function (event)
{
this.originalEvent = event;
if (!this.enabled)
{
return;
}
this.altKey = event.altKey;
this.ctrlKey = event.ctrlKey;
this.shiftKey = event.shiftKey;
this.metaKey = event.metaKey;
this.location = event.location;
this.repeats++;
if (!this.isDown)
{
this.isDown = true;
this.isUp = false;
this.timeDown = event.timeStamp;
this.duration = 0;
this._justDown = true;
this._justUp = false;
this.emit(Events.DOWN, this, event);
}
else if (this.emitOnRepeat)
{
this.emit(Events.DOWN, this, event);
}
},
/**
* Processes the Key Up action for this Key.
* Called automatically by the Keyboard Plugin.
*
* @method Phaser.Input.Keyboard.Key#onUp
* @fires Phaser.Input.Keyboard.Events#UP
* @since 3.16.0
*
* @param {KeyboardEvent} event - The native DOM Keyboard event.
*/
onUp: function (event)
{
this.originalEvent = event;
if (!this.enabled)
{
return;
}
this.isDown = false;
this.isUp = true;
this.timeUp = event.timeStamp;
this.duration = this.timeUp - this.timeDown;
this.repeats = 0;
this._justDown = false;
this._justUp = true;
this._tick = -1;
this.emit(Events.UP, this, event);
},
/**
* Resets this Key object back to its default un-pressed state.
*
* @method Phaser.Input.Keyboard.Key#reset
* @since 3.6.0
*
* @return {Phaser.Input.Keyboard.Key} This Key instance.
*/
reset: function ()
{
this.preventDefault = true;
this.enabled = true;
this.isDown = false;
this.isUp = true;
this.altKey = false;
this.ctrlKey = false;
this.shiftKey = false;
this.metaKey = false;
this.timeDown = 0;
this.duration = 0;
this.timeUp = 0;
this.repeats = 0;
this._justDown = false;
this._justUp = false;
this._tick = -1;
return this;
},
/**
* Returns the duration, in ms, that the Key has been held down for.
*
* If the key is not currently down it will return zero.
*
* The get the duration the Key was held down for in the previous up-down cycle,
* use the `Key.duration` property value instead.
*
* @method Phaser.Input.Keyboard.Key#getDuration
* @since 3.17.0
*
* @return {number} The duration, in ms, that the Key has been held down for if currently down.
*/
getDuration: function ()
{
if (this.isDown)
{
return (this.plugin.game.loop.time - this.timeDown);
}
else
{
return 0;
}
},
/**
* Removes any bound event handlers and removes local references.
*
* @method Phaser.Input.Keyboard.Key#destroy
* @since 3.16.0
*/
destroy: function ()
{
this.removeAllListeners();
this.originalEvent = null;
this.plugin = null;
}
});
module.exports = Key;
|
import gulp from 'gulp';
import paths from './paths';
// Vigila cambios que se produzcan en el código y ejecuta las tareas específicas
export default () => {
gulp.watch(paths.html,['templates','livereloadHTML']);
gulp.watch([paths.styles.scss],['sass','livereloadHTML']);
gulp.watch([paths.js], ['lint-js','inject','livereloadHTML']);
gulp.watch(paths.bower, ['bower']);
};
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[26],{578:function(t,s,e){"use strict";e.r(s);var a=e(46),n=Object(a.a)({},function(){var t=this,s=t.$createElement,e=t._self._c||s;return e("ContentSlotsDistributor",{attrs:{"slot-key":t.$parent.slotKey}},[e("h1",{attrs:{id:"installation"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#installation","aria-hidden":"true"}},[t._v("#")]),t._v(" Installation")]),t._v(" "),e("h2",{attrs:{id:"telechargement-direct-cdn"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#telechargement-direct-cdn","aria-hidden":"true"}},[t._v("#")]),t._v(" Téléchargement direct / CDN")]),t._v(" "),e("p",[e("a",{attrs:{href:"https://unpkg.com/vue-router/dist/vue-router.js",target:"_blank",rel:"noopener noreferrer"}},[t._v("https://unpkg.com/vue-router/dist/vue-router.js"),e("OutboundLink")],1)]),t._v(" "),e("p",[e("a",{attrs:{href:"https://unpkg.com",target:"_blank",rel:"noopener noreferrer"}},[t._v("Unpkg.com"),e("OutboundLink")],1),t._v(" fournit des liens CDN basés sur npm. Le lien ci-dessus pointera toujours vers la dernière version sur npm. Vous pouvez aussi utiliser un tag ou une version spécifique via un URL comme "),e("code",[t._v("https://unpkg.com/[email protected]/dist/vue-router.js")]),t._v(".\n")]),t._v(" "),e("p",[t._v("Incluez "),e("code",[t._v("vue-router")]),t._v(" après Vue et l'installation sera automatique :")]),t._v(" "),e("div",{staticClass:"language-html extra-class"},[e("pre",{pre:!0,attrs:{class:"language-html"}},[e("code",[e("span",{pre:!0,attrs:{class:"token tag"}},[e("span",{pre:!0,attrs:{class:"token tag"}},[e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("script")]),t._v(" "),e("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("src")]),e("span",{pre:!0,attrs:{class:"token attr-value"}},[e("span",{pre:!0,attrs:{class:"token punctuation attr-equals"}},[t._v("=")]),e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("/path/to/vue.js"),e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),e("span",{pre:!0,attrs:{class:"token script"}}),e("span",{pre:!0,attrs:{class:"token tag"}},[e("span",{pre:!0,attrs:{class:"token tag"}},[e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("script")]),e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n"),e("span",{pre:!0,attrs:{class:"token tag"}},[e("span",{pre:!0,attrs:{class:"token tag"}},[e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("script")]),t._v(" "),e("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("src")]),e("span",{pre:!0,attrs:{class:"token attr-value"}},[e("span",{pre:!0,attrs:{class:"token punctuation attr-equals"}},[t._v("=")]),e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("/path/to/vue-router.js"),e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),e("span",{pre:!0,attrs:{class:"token script"}}),e("span",{pre:!0,attrs:{class:"token tag"}},[e("span",{pre:!0,attrs:{class:"token tag"}},[e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("script")]),e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n")])])]),e("h2",{attrs:{id:"npm"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#npm","aria-hidden":"true"}},[t._v("#")]),t._v(" npm")]),t._v(" "),e("div",{staticClass:"language-bash extra-class"},[e("pre",{pre:!0,attrs:{class:"language-bash"}},[e("code",[e("span",{pre:!0,attrs:{class:"token function"}},[t._v("npm")]),t._v(" "),e("span",{pre:!0,attrs:{class:"token function"}},[t._v("install")]),t._v(" vue-router\n")])])]),e("p",[t._v("Lorsqu'il est utilisé avec un système de module, vous devez explicitement installer le router via "),e("code",[t._v("Vue.use()")]),t._v(" :")]),t._v(" "),e("div",{staticClass:"language-js extra-class"},[e("pre",{pre:!0,attrs:{class:"language-js"}},[e("code",[e("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("import")]),t._v(" Vue "),e("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("from")]),t._v(" "),e("span",{pre:!0,attrs:{class:"token string"}},[t._v("'vue'")]),t._v("\n"),e("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("import")]),t._v(" VueRouter "),e("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("from")]),t._v(" "),e("span",{pre:!0,attrs:{class:"token string"}},[t._v("'vue-router'")]),t._v("\n\nVue"),e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),e("span",{pre:!0,attrs:{class:"token function"}},[t._v("use")]),e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("VueRouter"),e("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v("\n")])])]),e("p",[t._v("Vous n'avez pas besoin de faire cela lors de l'utilisation des balises de script globales ("),e("code",[t._v("<script>")]),t._v(").")]),t._v(" "),e("h2",{attrs:{id:"build-de-developpement"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#build-de-developpement","aria-hidden":"true"}},[t._v("#")]),t._v(" Build de développement")]),t._v(" "),e("p",[t._v("Vous aurez besoin de cloner directement "),e("code",[t._v("vue-router")]),t._v(" depuis GitHub et le compiler vous-même si vous souhaitez utiliser le dernier build de développement.")]),t._v(" "),e("div",{staticClass:"language-bash extra-class"},[e("pre",{pre:!0,attrs:{class:"language-bash"}},[e("code",[e("span",{pre:!0,attrs:{class:"token function"}},[t._v("git")]),t._v(" clone https://github.com/vuejs/vue-router.git node_modules/vue-router\n"),e("span",{pre:!0,attrs:{class:"token builtin class-name"}},[t._v("cd")]),t._v(" node_modules/vue-router\n"),e("span",{pre:!0,attrs:{class:"token function"}},[t._v("npm")]),t._v(" "),e("span",{pre:!0,attrs:{class:"token function"}},[t._v("install")]),t._v("\n"),e("span",{pre:!0,attrs:{class:"token function"}},[t._v("npm")]),t._v(" run build\n")])])])])},[],!1,null,null,null);s.default=n.exports}}]); |
import React from 'react'
import PropTypes from 'prop-types'
import { connect } from 'react-redux'
import { fromOracle, fromTheme } from 'store/selectors'
import { oracleAnswer, themeChangeAccent } from 'store/actions'
import { sample } from 'lodash'
import { Oracle } from 'components'
const OracleContainer = props => <Oracle onAnswer={props.onAnswer} response={props.current.response} />
OracleContainer.propTypes = {
availableResponses: PropTypes.array.isRequired,
availableAccents: PropTypes.array.isRequired,
current: PropTypes.object.isRequired,
onAnswer: PropTypes.func.isRequired,
}
const mapStateToProps = state => ({
availableAccents: fromTheme.getAvailableAccents(state),
availableResponses: fromOracle.getAvailableResponses(state),
current: fromOracle.getCurrent(state),
})
const mergeProps = (stateProps, { dispatch }, ownProps) => ({
...stateProps,
...ownProps,
onAnswer: () => {
dispatch(oracleAnswer({ response: sample(stateProps.availableResponses) }))
let sampledAccent = { ...sample(stateProps.availableAccents) }
const swap = sample([true, false])
if (swap) {
sampledAccent = (({ foreground, background }) => ({ foreground: background, background: foreground }))(sampledAccent)
}
dispatch(themeChangeAccent({ ...sampledAccent }))
},
})
export default connect(mapStateToProps, null, mergeProps)(OracleContainer)
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Unit tests for local command-line-interface debug wrapper session."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
import tempfile
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.debug.cli import cli_shared
from tensorflow.python.debug.cli import debugger_cli_common
from tensorflow.python.debug.wrappers import local_cli_wrapper
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
# Import resource_variable_ops for the variables-to-tensor implicit conversion.
from tensorflow.python.ops import resource_variable_ops # pylint: disable=unused-import
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
class LocalCLIDebuggerWrapperSessionForTest(
local_cli_wrapper.LocalCLIDebugWrapperSession):
"""Subclasses the wrapper class for testing.
Overrides its CLI-related methods for headless testing environments.
Inserts observer variables for assertions.
"""
def __init__(self,
command_sequence,
sess,
dump_root=None):
"""Constructor of the for-test subclass.
Args:
command_sequence: (list of list of str) A list of command arguments,
including the command prefix, each element of the list is such as:
["run", "-n"],
["print_feed", "input:0"].
sess: See the doc string of LocalCLIDebugWrapperSession.__init__.
dump_root: See the doc string of LocalCLIDebugWrapperSession.__init__.
"""
local_cli_wrapper.LocalCLIDebugWrapperSession.__init__(
self, sess, dump_root=dump_root, log_usage=False)
self._command_sequence = command_sequence
self._command_pointer = 0
# Observer variables.
self.observers = {
"debug_dumps": [],
"tf_errors": [],
"run_start_cli_run_numbers": [],
"run_end_cli_run_numbers": [],
"print_feed_responses": [],
"profiler_py_graphs": [],
"profiler_run_metadata": [],
}
def _prep_cli_for_run_start(self):
pass
def _prep_debug_cli_for_run_end(self, debug_dump, tf_error, passed_filter):
self.observers["debug_dumps"].append(debug_dump)
self.observers["tf_errors"].append(tf_error)
def _prep_profile_cli_for_run_end(self, py_graph, run_metadata):
self.observers["profiler_py_graphs"].append(py_graph)
self.observers["profiler_run_metadata"].append(run_metadata)
def _launch_cli(self):
if self._is_run_start:
self.observers["run_start_cli_run_numbers"].append(self._run_call_count)
else:
self.observers["run_end_cli_run_numbers"].append(self._run_call_count)
while True:
command = self._command_sequence[self._command_pointer]
self._command_pointer += 1
try:
if command[0] == "run":
self._run_handler(command[1:])
elif command[0] == "print_feed":
self.observers["print_feed_responses"].append(
self._print_feed_handler(command[1:]))
else:
raise ValueError("Unrecognized command prefix: %s" % command[0])
except debugger_cli_common.CommandLineExit as e:
return e.exit_token
class LocalCLIDebugWrapperSessionTest(test_util.TensorFlowTestCase):
def setUp(self):
self._tmp_dir = tempfile.mktemp()
self.v = variables.Variable(10.0, name="v")
self.w = variables.Variable(21.0, name="w")
self.delta = constant_op.constant(1.0, name="delta")
self.inc_v = state_ops.assign_add(self.v, self.delta, name="inc_v")
self.w_int = control_flow_ops.with_dependencies(
[self.inc_v],
math_ops.cast(self.w, dtypes.int32, name="w_int_inner"),
name="w_int_outer")
self.ph = array_ops.placeholder(dtypes.float32, name="ph")
self.xph = array_ops.transpose(self.ph, name="xph")
self.m = constant_op.constant(
[[0.0, 1.0, 2.0], [-4.0, -1.0, 0.0]], dtype=dtypes.float32, name="m")
self.y = math_ops.matmul(self.m, self.xph, name="y")
self.sparse_ph = array_ops.sparse_placeholder(
dtypes.float32, shape=([5, 5]), name="sparse_placeholder")
self.sparse_add = sparse_ops.sparse_add(self.sparse_ph, self.sparse_ph)
self.sess = session.Session()
# Initialize variable.
self.sess.run(variables.global_variables_initializer())
def tearDown(self):
ops.reset_default_graph()
if os.path.isdir(self._tmp_dir):
shutil.rmtree(self._tmp_dir)
def testConstructWrapper(self):
local_cli_wrapper.LocalCLIDebugWrapperSession(
session.Session(), log_usage=False)
def testConstructWrapperWithExistingEmptyDumpRoot(self):
os.mkdir(self._tmp_dir)
self.assertTrue(os.path.isdir(self._tmp_dir))
local_cli_wrapper.LocalCLIDebugWrapperSession(
session.Session(), dump_root=self._tmp_dir, log_usage=False)
def testConstructWrapperWithExistingNonEmptyDumpRoot(self):
os.mkdir(self._tmp_dir)
dir_path = os.path.join(self._tmp_dir, "foo")
os.mkdir(dir_path)
self.assertTrue(os.path.isdir(dir_path))
with self.assertRaisesRegexp(
ValueError, "dump_root path points to a non-empty directory"):
local_cli_wrapper.LocalCLIDebugWrapperSession(
session.Session(), dump_root=self._tmp_dir, log_usage=False)
def testConstructWrapperWithExistingFileDumpRoot(self):
os.mkdir(self._tmp_dir)
file_path = os.path.join(self._tmp_dir, "foo")
open(file_path, "a").close() # Create the file
self.assertTrue(os.path.isfile(file_path))
with self.assertRaisesRegexp(ValueError, "dump_root path points to a file"):
local_cli_wrapper.LocalCLIDebugWrapperSession(
session.Session(), dump_root=file_path, log_usage=False)
def testRunsUnderDebugMode(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
# Verify that the assign_add op did take effect.
self.assertAllClose(12.0, self.sess.run(self.v))
# Assert correct run call numbers for which the CLI has been launched at
# run-start and run-end.
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([1, 2], wrapped_sess.observers["run_end_cli_run_numbers"])
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
# Verify that the TensorFlow runtime errors are picked up and in this case,
# they should be both None.
self.assertEqual([None, None], wrapped_sess.observers["tf_errors"])
def testRunsWithEmptyStringDumpRootWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root="")
# run under debug mode.
wrapped_sess.run(self.inc_v)
self.assertAllClose(11.0, self.sess.run(self.v))
def testRunInfoOutputAtRunEndIsCorrect(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
wrapped_sess.run(self.inc_v)
run_info_output = wrapped_sess._run_info_handler([])
tfdbg_logo = cli_shared.get_tfdbg_logo()
# The run_info output in the first run() call should contain the tfdbg logo.
self.assertEqual(tfdbg_logo.lines,
run_info_output.lines[:len(tfdbg_logo.lines)])
menu = run_info_output.annotations[debugger_cli_common.MAIN_MENU_KEY]
self.assertIn("list_tensors", menu.captions())
wrapped_sess.run(self.inc_v)
run_info_output = wrapped_sess._run_info_handler([])
# The run_info output in the second run() call should NOT contain the logo.
self.assertNotEqual(tfdbg_logo.lines,
run_info_output.lines[:len(tfdbg_logo.lines)])
menu = run_info_output.annotations[debugger_cli_common.MAIN_MENU_KEY]
self.assertIn("list_tensors", menu.captions())
def testRunsUnderNonDebugMode(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-n"], ["run", "-n"], ["run", "-n"]],
self.sess, dump_root=self._tmp_dir)
# run three times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(13.0, self.sess.run(self.v))
self.assertEqual([1, 2, 3],
wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([], wrapped_sess.observers["run_end_cli_run_numbers"])
def testRunningWithSparsePlaceholderFeedWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
sparse_feed = ([[0, 1], [0, 2]], [10.0, 20.0])
sparse_result = wrapped_sess.run(
self.sparse_add, feed_dict={self.sparse_ph: sparse_feed})
self.assertAllEqual([[0, 1], [0, 2]], sparse_result.indices)
self.assertAllClose([20.0, 40.0], sparse_result.values)
def testRunsUnderNonDebugThenDebugMode(self):
# Do two NON_DEBUG_RUNs, followed by DEBUG_RUNs.
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-n"], ["run", "-n"], ["run"], ["run"]],
self.sess, dump_root=self._tmp_dir)
# run three times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(13.0, self.sess.run(self.v))
self.assertEqual([1, 2, 3],
wrapped_sess.observers["run_start_cli_run_numbers"])
# Here, the CLI should have been launched only under the third run,
# because the first and second runs are NON_DEBUG.
self.assertEqual([3], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([None], wrapped_sess.observers["tf_errors"])
def testRunMultipleTimesWithinLimit(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-t", "3"], ["run"]],
self.sess, dump_root=self._tmp_dir)
# run three times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(13.0, self.sess.run(self.v))
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([3], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([None], wrapped_sess.observers["tf_errors"])
def testRunMultipleTimesOverLimit(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-t", "3"]], self.sess, dump_root=self._tmp_dir)
# run twice, which is less than the number of times specified by the
# command.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(12.0, self.sess.run(self.v))
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(0, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([], wrapped_sess.observers["tf_errors"])
def testRunMixingDebugModeAndMultpleTimes(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-n"], ["run", "-t", "2"], ["run"], ["run"]],
self.sess, dump_root=self._tmp_dir)
# run four times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(14.0, self.sess.run(self.v))
self.assertEqual([1, 2],
wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([3, 4], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([None, None], wrapped_sess.observers["tf_errors"])
def testDebuggingMakeCallableTensorRunnerWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
v = variables.Variable(42)
tensor_runner = wrapped_sess.make_callable(v)
self.sess.run(v.initializer)
self.assertAllClose(42, tensor_runner())
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
def testDebuggingMakeCallableTensorRunnerWithCustomRunOptionsWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
a = constant_op.constant(42)
tensor_runner = wrapped_sess.make_callable(a)
run_options = config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE)
run_metadata = config_pb2.RunMetadata()
self.assertAllClose(
42, tensor_runner(options=run_options, run_metadata=run_metadata))
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
self.assertGreater(len(run_metadata.step_stats.dev_stats), 0)
def testDebuggingMakeCallableOperationRunnerWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
v = variables.Variable(10.0)
inc_v = state_ops.assign_add(v, 1.0)
op_runner = wrapped_sess.make_callable(inc_v.op)
self.sess.run(v.initializer)
op_runner()
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual(11.0, self.sess.run(v))
def testDebuggingMakeCallableRunnerWithFeedListWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
ph1 = array_ops.placeholder(dtypes.float32)
ph2 = array_ops.placeholder(dtypes.float32)
a = math_ops.add(ph1, ph2)
tensor_runner = wrapped_sess.make_callable(a, feed_list=[ph1, ph2])
self.assertAllClose(42.0, tensor_runner(41.0, 1.0))
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
def testRuntimeErrorShouldBeCaught(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
# Do a run that should lead to an TensorFlow runtime error.
wrapped_sess.run(self.y, feed_dict={self.ph: [[0.0], [1.0], [2.0]]})
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([1], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
# Verify that the runtime error is caught by the wrapped session properly.
self.assertEqual(1, len(wrapped_sess.observers["tf_errors"]))
tf_error = wrapped_sess.observers["tf_errors"][0]
self.assertEqual("y", tf_error.op.name)
def testRuntimeErrorBeforeGraphExecutionIsRaised(self):
# Use an impossible device name to cause an error before graph execution.
with ops.device("/gpu:1337"):
w = variables.Variable([1.0] * 10, name="w")
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"]], self.sess, dump_root=self._tmp_dir)
with self.assertRaisesRegexp(errors.OpError, r".*[Dd]evice.*1337.*"):
wrapped_sess.run(w)
def testRunTillFilterPassesShouldLaunchCLIAtCorrectRun(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-f", "v_greater_than_twelve"],
["run", "-f", "v_greater_than_twelve"],
["run"]],
self.sess,
dump_root=self._tmp_dir)
def v_greater_than_twelve(datum, tensor):
return datum.node_name == "v" and tensor > 12.0
wrapped_sess.add_tensor_filter("v_greater_than_twelve",
v_greater_than_twelve)
# run five times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(15.0, self.sess.run(self.v))
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
# run-end CLI should NOT have been launched for run #2 and #3, because only
# starting from run #4 v becomes greater than 12.0.
self.assertEqual([4, 5], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([None, None], wrapped_sess.observers["tf_errors"])
def testRunsUnderDebugModeWithWatchFnFilteringNodeNames(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "--node_name_filter", "inc.*"],
["run", "--node_name_filter", "delta"],
["run"]],
self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
# Verify that the assign_add op did take effect.
self.assertAllClose(12.0, self.sess.run(self.v))
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
dumps = wrapped_sess.observers["debug_dumps"][0]
self.assertEqual(1, dumps.size)
self.assertEqual("inc_v", dumps.dumped_tensor_data[0].node_name)
dumps = wrapped_sess.observers["debug_dumps"][1]
self.assertEqual(1, dumps.size)
self.assertEqual("delta", dumps.dumped_tensor_data[0].node_name)
def testRunsUnderDebugModeWithWatchFnFilteringOpTypes(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "--node_name_filter", "delta"],
["run", "--op_type_filter", "AssignAdd"],
["run"]],
self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
# Verify that the assign_add op did take effect.
self.assertAllClose(12.0, self.sess.run(self.v))
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
dumps = wrapped_sess.observers["debug_dumps"][0]
self.assertEqual(1, dumps.size)
self.assertEqual("delta", dumps.dumped_tensor_data[0].node_name)
dumps = wrapped_sess.observers["debug_dumps"][1]
self.assertEqual(1, dumps.size)
self.assertEqual("inc_v", dumps.dumped_tensor_data[0].node_name)
def testRunsUnderDebugModeWithWatchFnFilteringTensorDTypes(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "--op_type_filter", "Variable.*"],
["run", "--tensor_dtype_filter", "int32"],
["run"]],
self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.w_int)
wrapped_sess.run(self.w_int)
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
dumps = wrapped_sess.observers["debug_dumps"][0]
self.assertEqual(2, dumps.size)
self.assertItemsEqual(
["v", "w"], [dumps.dumped_tensor_data[i].node_name for i in [0, 1]])
dumps = wrapped_sess.observers["debug_dumps"][1]
self.assertEqual(2, dumps.size)
self.assertEqual(
["w_int_inner", "w_int_outer"],
[dumps.dumped_tensor_data[i].node_name for i in [0, 1]])
def testRunsUnderDebugModeWithWatchFnFilteringOpTypesAndTensorDTypes(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "--op_type_filter", "Cast", "--tensor_dtype_filter", "int32"],
["run"]],
self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.w_int)
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
dumps = wrapped_sess.observers["debug_dumps"][0]
self.assertEqual(1, dumps.size)
self.assertEqual("w_int_inner", dumps.dumped_tensor_data[0].node_name)
def testPrintFeedPrintsFeedValueForTensorFeedKey(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["print_feed", "ph:0"], ["run"], ["run"]], self.sess)
self.assertAllClose(
[[5.0], [-1.0]],
wrapped_sess.run(self.y, feed_dict={self.ph: [[0.0, 1.0, 2.0]]}))
print_feed_responses = wrapped_sess.observers["print_feed_responses"]
self.assertEqual(1, len(print_feed_responses))
self.assertEqual(
["Tensor \"ph:0 (feed)\":", "", "[[0.0, 1.0, 2.0]]"],
print_feed_responses[0].lines)
def testPrintFeedPrintsFeedValueForTensorNameFeedKey(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["print_feed", "ph:0"], ["run"], ["run"]], self.sess)
self.assertAllClose(
[[5.0], [-1.0]],
wrapped_sess.run(self.y, feed_dict={"ph:0": [[0.0, 1.0, 2.0]]}))
print_feed_responses = wrapped_sess.observers["print_feed_responses"]
self.assertEqual(1, len(print_feed_responses))
self.assertEqual(
["Tensor \"ph:0 (feed)\":", "", "[[0.0, 1.0, 2.0]]"],
print_feed_responses[0].lines)
def testPrintFeedPrintsErrorForInvalidFeedKey(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["print_feed", "spam"], ["run"], ["run"]], self.sess)
self.assertAllClose(
[[5.0], [-1.0]],
wrapped_sess.run(self.y, feed_dict={"ph:0": [[0.0, 1.0, 2.0]]}))
print_feed_responses = wrapped_sess.observers["print_feed_responses"]
self.assertEqual(1, len(print_feed_responses))
self.assertEqual(
["ERROR: The feed_dict of the current run does not contain the key "
"spam"], print_feed_responses[0].lines)
def testPrintFeedPrintsErrorWhenFeedDictIsNone(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["print_feed", "spam"], ["run"], ["run"]], self.sess)
wrapped_sess.run(self.w_int)
print_feed_responses = wrapped_sess.observers["print_feed_responses"]
self.assertEqual(1, len(print_feed_responses))
self.assertEqual(
["ERROR: The feed_dict of the current run is None or empty."],
print_feed_responses[0].lines)
def testRunUnderProfilerModeWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-p"], ["run"]], self.sess)
wrapped_sess.run(self.w_int)
self.assertEqual(1, len(wrapped_sess.observers["profiler_run_metadata"]))
self.assertTrue(
wrapped_sess.observers["profiler_run_metadata"][0].step_stats)
self.assertEqual(1, len(wrapped_sess.observers["profiler_py_graphs"]))
self.assertIsInstance(
wrapped_sess.observers["profiler_py_graphs"][0], ops.Graph)
def testCallingHookDelBeforeAnyRun(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess)
del wrapped_sess
if __name__ == "__main__":
googletest.main()
|
# Electrum - lightweight Bitcoin client
# Copyright (C) 2011 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from decimal import Decimal as PyDecimal # Qt 5.12 also exports Decimal
import os
import re
import shutil
import threading
import urllib
from .address import Address
from . import bitcoin
from . import networks
from .util import format_satoshis_plain, bh2u, print_error
DEFAULT_EXPLORER = "exploredvt.com"
mainnet_block_explorers = {
'exploredvt.com': ('https://exploredvt.com/#/VITAE/mainnet',
Address.FMT_CASHADDR,
{'tx': 'tx', 'addr': 'address', 'block' : 'block'}),
}
DEFAULT_EXPLORER_TESTNET = 'exploredvt.com'
testnet_block_explorers = {
'exploredvt.com' : ('https://exploredvt.com/#/VITAE/testnet',
Address.FMT_CASHADDR,
{'tx': 'tx', 'addr': 'address', 'block' : 'block'}),
}
def BE_info():
if networks.net.TESTNET:
return testnet_block_explorers
return mainnet_block_explorers
def BE_tuple(config):
infodict = BE_info()
return (infodict.get(BE_from_config(config))
or infodict.get(BE_default_explorer()) # In case block explorer in config is bad/no longer valid
)
def BE_default_explorer():
return (DEFAULT_EXPLORER
if not networks.net.TESTNET
else DEFAULT_EXPLORER_TESTNET)
def BE_from_config(config):
return config.get('block_explorer', BE_default_explorer())
def BE_URL(config, kind, item):
be_tuple = BE_tuple(config)
if not be_tuple:
return
url_base, addr_fmt, parts = be_tuple
kind_str = parts.get(kind)
if kind_str is None:
return
if kind == 'addr':
assert isinstance(item, Address)
item = item.to_string(addr_fmt)
return "/".join(part for part in (url_base, kind_str, item) if part)
def BE_sorted_list():
return sorted(BE_info())
def create_URI(addr, amount, message, *, op_return=None, op_return_raw=None):
if not isinstance(addr, Address):
return ""
if op_return is not None and op_return_raw is not None:
raise ValueError('Must specify exactly one of op_return or op_return_hex as kwargs to create_URI')
scheme, path = addr.to_URI_components()
query = []
if amount:
query.append('amount=%s'%format_satoshis_plain(amount))
if message:
query.append('message=%s'%urllib.parse.quote(message))
if op_return:
query.append(f'op_return={str(op_return)}')
if op_return_raw:
query.append(f'op_return_raw={str(op_return_raw)}')
p = urllib.parse.ParseResult(scheme=scheme,
netloc='', path=path, params='',
query='&'.join(query), fragment='')
return urllib.parse.urlunparse(p)
def urlencode(s):
''' URL Encode; encodes a url or a uri fragment by %-quoting special chars'''
return urllib.parse.quote(s)
def urldecode(url):
''' Inverse of urlencode '''
return urllib.parse.unquote(url)
def parse_URI(uri, on_pr=None):
if ':' not in uri:
# Test it's valid
Address.from_string(uri)
return {'address': uri}
u = urllib.parse.urlparse(uri)
# The scheme always comes back in lower case
if u.scheme != networks.net.CASHADDR_PREFIX:
raise Exception("Not a {} URI".format(networks.net.CASHADDR_PREFIX))
address = u.path
# python for android fails to parse query
if address.find('?') > 0:
address, query = u.path.split('?')
pq = urllib.parse.parse_qs(query, keep_blank_values=True)
else:
pq = urllib.parse.parse_qs(u.query, keep_blank_values=True)
for k, v in pq.items():
if len(v)!=1:
raise Exception('Duplicate Key', k)
out = {k: v[0] for k, v in pq.items()}
if address:
Address.from_string(address)
out['address'] = address
if 'amount' in out:
am = out['amount']
m = re.match(r'([0-9\.]+)X([0-9])', am)
if m:
k = int(m.group(2)) - 8
amount = PyDecimal(m.group(1)) * pow(10, k)
else:
amount = PyDecimal(am) * bitcoin.COIN
out['amount'] = int(amount)
if 'message' in out:
out['message'] = out['message']
out['memo'] = out['message']
if 'time' in out:
out['time'] = int(out['time'])
if 'exp' in out:
out['exp'] = int(out['exp'])
if 'sig' in out:
out['sig'] = bh2u(bitcoin.base_decode(out['sig'], None, base=58))
if 'op_return_raw' in out and 'op_return' in out:
del out['op_return_raw'] # allow only 1 of these
r = out.get('r')
sig = out.get('sig')
name = out.get('name')
if on_pr and (r or (name and sig)):
def get_payment_request_thread():
from . import paymentrequest as pr
if name and sig:
s = pr.serialize_request(out).SerializeToString()
request = pr.PaymentRequest(s)
else:
request = pr.get_payment_request(r)
if on_pr:
on_pr(request)
t = threading.Thread(target=get_payment_request_thread)
t.setDaemon(True)
t.start()
return out
def check_www_dir(rdir):
if not os.path.exists(rdir):
os.mkdir(rdir)
index = os.path.join(rdir, 'index.html')
if not os.path.exists(index):
print_error("copying index.html")
src = os.path.join(os.path.dirname(__file__), 'www', 'index.html')
shutil.copy(src, index)
files = [
"https://code.jquery.com/jquery-1.9.1.min.js",
"https://raw.githubusercontent.com/davidshimjs/qrcodejs/master/qrcode.js",
"https://code.jquery.com/ui/1.10.3/jquery-ui.js",
"https://code.jquery.com/ui/1.10.3/themes/smoothness/jquery-ui.css"
]
for URL in files:
path = urllib.parse.urlsplit(URL).path
filename = os.path.basename(path)
path = os.path.join(rdir, filename)
if not os.path.exists(path):
print_error("downloading ", URL)
urllib.request.urlretrieve(URL, path)
|
module.exports={D:{"4":0,"5":0,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0,"28":0,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0,"38":0,"39":0,"40":0,"41":0,"42":0,"43":0,"44":0,"45":0,"46":0,"47":0,"48":0,"49":0.108186,"50":0,"51":0,"52":0,"53":0.002847,"54":0,"55":0,"56":0.002847,"57":0.008541,"58":0.005694,"59":0,"60":0,"61":0,"62":0.002847,"63":0.059787,"64":0,"65":0.002847,"66":0.002847,"67":0,"68":0.002847,"69":0.011388,"70":0.005694,"71":0.002847,"72":0.005694,"73":0.025623,"74":0.008541,"75":0.008541,"76":0.019929,"77":0.02847,"78":0.079716,"79":4.335981,"80":0.037011,"81":0.002847,"82":0},C:{"2":0,"3":0,"4":0,"5":0,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0.002847,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0,"28":0,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0,"38":0,"39":0,"40":0.005694,"41":0,"42":0,"43":0,"44":0,"45":0,"46":0,"47":0,"48":0,"49":0,"50":0,"51":0,"52":0,"53":0.019929,"54":0,"55":0,"56":0,"57":0.002847,"58":0,"59":0,"60":0.005694,"61":0,"62":0,"63":0,"64":0,"65":0.048399,"66":0.002847,"67":0,"68":0.022776,"69":0,"70":0,"71":0.119574,"72":0.298935,"73":0,"74":0,"3.5":0,"3.6":0},F:{"9":0,"11":0,"12":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0,"28":0,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0,"38":0,"39":0,"40":0,"41":0,"42":0,"43":0,"44":0,"45":0,"46":0.002847,"47":0,"48":0,"49":0,"50":0,"51":0,"52":0,"53":0,"54":0,"55":0,"56":0,"57":0,"58":0,"60":0,"62":0,"63":0,"64":0.002847,"9.5-9.6":0,"10.0-10.1":0,"10.5":0,"10.6":0,"11.1":0,"11.5":0,"11.6":0,"12.1":0},E:{"4":0,"5":0,"6":0,"7":0,"8":0,"9":0.002847,"10":0.017082,"11":0.002847,"12":0.011388,"13":0.472602,_:"0","3.1":0,"3.2":0,"5.1":0.159432,"6.1":0,"7.1":0,"9.1":0.002847,"10.1":0.014235,"11.1":0.011388,"12.1":0.045552},G:{"8":0.0012652865859758,"3.2":0.0012652865859758,"4.0-4.1":0.0025305731719516,"4.2-4.3":0,"5.0-5.1":0.0075917195158548,"6.0-6.1":0.0037958597579274,"7.0-7.1":0.010122292687806,"8.1-8.4":0.018979298789637,"9.0-9.2":0.016448725617685,"9.3":0.156895536661,"10.0-10.2":0.049346176853056,"10.3":0.17334426227868,"11.0-11.2":0.13918152445734,"11.3-11.4":0.23660859157747,"12.0-12.1":0.3188522196659,"12.2-12.4":1.9055215984795,"13.0-13.1":0.88063946383915,"13.2":0.37072896969091,"13.3":8.3559526137841},I:{"3":0.00097820816864295,"4":0.0097820816864295,_:"76","2.1":0,"2.2":0.0029346245059289,"2.3":0.00097820816864295,"4.1":0.01662953886693,"4.2-4.3":0.43236801054018,"4.4":0,"4.4.3-4.4.4":0.27878932806324},B:{"12":0.008541,"13":0.011388,"14":0.002847,"15":0.014235,"16":0.005694,"17":0.037011,"18":0.774384,_:"79"},A:{"6":0,"7":0,"8":0,"9":0,"10":0.0028940578512397,"11":0.34728694214876,"5.5":0},P:{"4":0.073907194610778,"5.0-5.4":0,"6.2-6.4":0.010558170658683,"7.2-7.4":0.32730329041916,"8.2":0.010558170658683,"9.2":0.14781438922156,"10.1":2.8823805898204},K:{_:"0 10 11 12 11.1 11.5 12.1"},N:{"10":0,"11":0.021459},J:{"7":0,"10":0.007153},R:{_:"0"},M:{"0":0.07153},O:{"0":0.057224},Q:{"1.2":0},S:{"2.5":0},H:{"0":0.14221196391753},L:{"0":74.324353}};
|
'use strict';
function buildDistanceInWordsLocale() {
var distanceInWordsLocale = {
lessThanXSeconds: {
one: 'manj kot sekunda',
two: 'manj kot 2 sekundi',
three: 'manj kot {{count}} sekunde',
other: 'manj kot {{count}} sekund'
},
xSeconds: {
one: '1 sekunda',
two: '2 sekundi',
three: '{{count}} sekunde',
other: '{{count}} sekund'
},
halfAMinute: 'pol minute',
lessThanXMinutes: {
one: 'manj kot minuta',
two: 'manj kot 2 minuti',
three: 'manj kot {{count}} minute',
other: 'manj kot {{count}} minut'
},
xMinutes: {
one: '1 minuta',
two: '2 minuti',
three: '{{count}} minute',
other: '{{count}} minut'
},
aboutXHours: {
one: 'približno 1 ura',
two: 'približno 2 uri',
three: 'približno {{count}} ure',
other: 'približno {{count}} ur'
},
xHours: {
one: '1 ura',
two: '2 uri',
three: '{{count}} ure',
other: '{{count}} ur'
},
xDays: {
one: '1 dan',
two: '2 dni',
three: '{{count}} dni',
other: '{{count}} dni'
},
aboutXMonths: {
one: 'približno 1 mesec',
two: 'približno 2 meseca',
three: 'približno {{count}} mesece',
other: 'približno {{count}} mesecev'
},
xMonths: {
one: '1 mesec',
two: '2 meseca',
three: '{{count}} meseci',
other: '{{count}} mesecev'
},
aboutXYears: {
one: 'približno 1 leto',
two: 'približno 2 leti',
three: 'približno {{count}} leta',
other: 'približno {{count}} let'
},
xYears: {
one: '1 leto',
two: '2 leti',
three: '{{count}} leta',
other: '{{count}} let'
},
overXYears: {
one: 'več kot 1 leto',
two: 'več kot 2 leti',
three: 'več kot {{count}} leta',
other: 'več kot {{count}} let'
},
almostXYears: {
one: 'skoraj 1 leto',
two: 'skoraj 2 leti',
three: 'skoraj {{count}} leta',
other: 'skoraj {{count}} let'
}
};
function localize(token, count, options) {
options = options || {};
var result;
if (typeof distanceInWordsLocale[token] === 'string') {
result = distanceInWordsLocale[token];
} else if (count === 1) {
result = distanceInWordsLocale[token].one;
} else if (count === 2) {
result = distanceInWordsLocale[token].two;
} else if (count === 3 || count === 4) {
result = distanceInWordsLocale[token].three.replace('{{count}}', count);
} else {
result = distanceInWordsLocale[token].other.replace('{{count}}', count);
}
if (options.addSuffix) {
result = result.replace(/(minut|sekund|ur)(a)/, '$1o');
if (token === 'xMonths') {
result = result.replace(/(mesec)(i)/, '$1e');
}
if (options.comparison > 0) {
return 'čez ' + result;
} else {
return result + ' nazaj';
}
}
return result;
}
return {
localize: localize
};
}
module.exports = buildDistanceInWordsLocale; |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# $Id: filearchiver.py $
# pylint: disable=C0301
"""
A cronjob that compresses logs and other files, moving them to the
g_ksZipFileAreaRootDir storage area.
"""
__copyright__ = \
"""
Copyright (C) 2012-2017 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
The contents of this file may alternatively be used under the terms
of the Common Development and Distribution License Version 1.0
(CDDL) only, as it comes in the "COPYING.CDDL" file of the
VirtualBox OSE distribution, in which case the provisions of the
CDDL are applicable instead of those of the GPL.
You may elect to license modified versions of this file under the
terms and conditions of either the GPL or the CDDL or both.
"""
__version__ = "$Revision: 118412 $"
# Standard python imports
import sys
import os
from optparse import OptionParser
import time;
import zipfile;
# Add Test Manager's modules path
g_ksTestManagerDir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(g_ksTestManagerDir)
# Test Manager imports
from common import utils;
from testmanager import config;
from testmanager.core.db import TMDatabaseConnection;
from testmanager.core.testset import TestSetData, TestSetLogic;
class FileArchiverBatchJob(object): # pylint: disable=R0903
"""
Log+files comp
"""
def __init__(self, oOptions):
"""
Parse command line
"""
self.fVerbose = oOptions.fVerbose;
self.sSrcDir = config.g_ksFileAreaRootDir;
self.sDstDir = config.g_ksZipFileAreaRootDir;
#self.oTestSetLogic = TestSetLogic(TMDatabaseConnection(self.dprint if self.fVerbose else None));
self.oTestSetLogic = TestSetLogic(TMDatabaseConnection(None));
self.fDryRun = oOptions.fDryRun;
def dprint(self, sText):
""" Verbose output. """
if self.fVerbose:
print sText;
return True;
def warning(self, sText):
"""Prints a warning."""
print sText;
return True;
def _processTestSet(self, idTestSet, asFiles, sCurDir):
"""
Worker for processDir.
Same return codes as processDir.
"""
sBaseFilename = os.path.join(sCurDir, 'TestSet-%d' % (idTestSet,));
if sBaseFilename[0:2] == ('.' + os.path.sep):
sBaseFilename = sBaseFilename[2:];
sSrcFileBase = os.path.join(self.sSrcDir, sBaseFilename + '-');
#
# Skip the file if the test set is still running.
# But delete them if the testset is not found.
#
oTestSet = self.oTestSetLogic.tryFetch(idTestSet);
if oTestSet is not None and sBaseFilename != oTestSet.sBaseFilename:
self.warning('TestSet %d: Deleting because sBaseFilename differs: "%s" (disk) vs "%s" (db)' \
% (idTestSet, sBaseFilename, oTestSet.sBaseFilename,));
oTestSet = None;
if oTestSet is not None:
if oTestSet.enmStatus == TestSetData.ksTestStatus_Running:
self.dprint('Skipping test set #%d, still running' % (idTestSet,));
return True;
#
# If we have a zip file already, don't try recreate it as we might
# have had trouble removing the source files.
#
sDstDirPath = os.path.join(self.sDstDir, sCurDir);
sZipFileNm = os.path.join(sDstDirPath, 'TestSet-%d.zip' % (idTestSet,));
if not os.path.exists(sZipFileNm):
#
# Create zip file with all testset files as members.
#
self.dprint('TestSet %d: Creating %s...' % (idTestSet, sZipFileNm,));
if not self.fDryRun:
if not os.path.exists(sDstDirPath):
os.makedirs(sDstDirPath, 0o755);
utils.noxcptDeleteFile(sZipFileNm + '.tmp');
oZipFile = zipfile.ZipFile(sZipFileNm + '.tmp', 'w', zipfile.ZIP_DEFLATED, allowZip64 = True);
for sFile in asFiles:
sSuff = os.path.splitext(sFile)[1];
if sSuff in [ '.png', '.webm', '.gz', '.bz2', '.zip', '.mov', '.avi', '.mpg', '.gif', '.jpg' ]:
## @todo Consider storing these files outside the zip if they are a little largish.
self.dprint('TestSet %d: Storing %s...' % (idTestSet, sFile));
oZipFile.write(sSrcFileBase + sFile, sFile, zipfile.ZIP_STORED);
else:
self.dprint('TestSet %d: Deflating %s...' % (idTestSet, sFile));
oZipFile.write(sSrcFileBase + sFile, sFile, zipfile.ZIP_DEFLATED);
oZipFile.close();
#
# .zip.tmp -> .zip.
#
utils.noxcptDeleteFile(sZipFileNm);
os.rename(sZipFileNm + '.tmp', sZipFileNm);
#else: Dry run.
else:
self.dprint('TestSet %d: zip file exists already (%s)' % (idTestSet, sZipFileNm,));
#
# Delete the files.
#
fRc = True;
if self.fVerbose:
self.dprint('TestSet %d: deleting file: %s' % (idTestSet, asFiles));
if not self.fDryRun:
for sFile in asFiles:
if utils.noxcptDeleteFile(sSrcFileBase + sFile) is False:
self.warning('TestSet %d: Failed to delete "%s" (%s)' % (idTestSet, sFile, sSrcFileBase + sFile,));
fRc = False;
return fRc;
def processDir(self, sCurDir):
"""
Process the given directory (relative to sSrcDir and sDstDir).
Returns success indicator.
"""
if self.fVerbose:
self.dprint('processDir: %s' % (sCurDir,));
#
# Sift thought the directory content, collecting subdirectories and
# sort relevant files by test set.
# Generally there will either be subdirs or there will be files.
#
asSubDirs = [];
dTestSets = {};
sCurPath = os.path.abspath(os.path.join(self.sSrcDir, sCurDir));
for sFile in os.listdir(sCurPath):
if os.path.isdir(os.path.join(sCurPath, sFile)):
if sFile not in [ '.', '..' ]:
asSubDirs.append(sFile);
elif sFile.startswith('TestSet-'):
# Parse the file name. ASSUMES 'TestSet-%d-filename' format.
iSlash1 = sFile.find('-');
iSlash2 = sFile.find('-', iSlash1 + 1);
if iSlash2 <= iSlash1:
self.warning('Bad filename (1): "%s"' % (sFile,));
continue;
try: idTestSet = int(sFile[(iSlash1 + 1):iSlash2]);
except:
self.warning('Bad filename (2): "%s"' % (sFile,));
if self.fVerbose:
self.dprint('\n'.join(utils.getXcptInfo(4)));
continue;
if idTestSet <= 0:
self.warning('Bad filename (3): "%s"' % (sFile,));
continue;
if iSlash2 + 2 >= len(sFile):
self.warning('Bad filename (4): "%s"' % (sFile,));
continue;
sName = sFile[(iSlash2 + 1):];
# Add it.
if idTestSet not in dTestSets:
dTestSets[idTestSet] = [];
asTestSet = dTestSets[idTestSet];
asTestSet.append(sName);
#
# Test sets.
#
fRc = True;
for idTestSet in dTestSets:
try:
if self._processTestSet(idTestSet, dTestSets[idTestSet], sCurDir) is not True:
fRc = False;
except:
self.warning('TestSet %d: Exception in _processTestSet:\n%s' % (idTestSet, '\n'.join(utils.getXcptInfo()),));
fRc = False;
#
# Sub dirs.
#
for sSubDir in asSubDirs:
if self.processDir(os.path.join(sCurDir, sSubDir)) is not True:
fRc = False;
#
# Try Remove the directory iff it's not '.' and it's been unmodified
# for the last 24h (race protection).
#
if sCurDir != '.':
try:
fpModTime = float(os.path.getmtime(sCurPath));
if fpModTime + (24*3600) <= time.time():
if utils.noxcptRmDir(sCurPath) is True:
self.dprint('Removed "%s".' % (sCurPath,));
except:
pass;
return fRc;
@staticmethod
def main():
""" C-style main(). """
#
# Parse options.
#
oParser = OptionParser();
oParser.add_option('-v', '--verbose', dest = 'fVerbose', action = 'store_true', default = False,
help = 'Verbose output.');
oParser.add_option('-q', '--quiet', dest = 'fVerbose', action = 'store_false', default = False,
help = 'Quiet operation.');
oParser.add_option('-d', '--dry-run', dest = 'fDryRun', action = 'store_true', default = False,
help = 'Dry run, do not make any changes.');
(oOptions, asArgs) = oParser.parse_args()
if asArgs != []:
oParser.print_help();
return 1;
#
# Do the work.
#
oBatchJob = FileArchiverBatchJob(oOptions);
fRc = oBatchJob.processDir('.');
return 0 if fRc is True else 1;
if __name__ == '__main__':
sys.exit(FileArchiverBatchJob.main());
|
$(document).ready(function(){
var $btn_publish = $('#btn_publish'); //按钮发布
var $btn_draft = $('#btn_draft'); //按钮暂存
var $form = $('#form');
//icheck初始化
$('#checkbox1').iCheck({
checkboxClass: 'icheckbox_square-blue',
radioClass: 'iradio_square-blue'
}).on('ifChanged', function(e){
console.log('checkbox1:状态:' + $('#checkbox1').is(':checked'));
});
$form.bootstrapValidator({
fields: {
mission_name: {
message: '任务名称验证失败',
validators: {
notEmpty: {
message: '请输入任务名称'
}
}
},
content: {
message: '任务内容验证失败',
validators: {
notEmpty: {
message: '请输入任务内容'
}
}
}
}
});
//手动表单验证
function checkValidation() {
var bsValidator = $form.data('bootstrapValidator');
bsValidator.validate();
var result = bsValidator.isValid();
if(!result) {
toastr.warning('您输入的表单信息验证未通过');
}
return result;
}
//以下是树形下拉菜单相关控制=======================================
//当前选择的人员
var selectedUserStr = "黄任勋";
//使bootstrap dropdown 插件在点击树结构时不收起
$(document).on('click.bs.dropdown.data-api', '.dropdown .treeview-select-tree', function (e) { e.stopPropagation() });
user_list = [
{
text: "董事局",
icon: "fa fa-users",
type: "组织",
root: true,
nodes: [
{
text: "黄任勋",
icon: "fa fa-user-circle-o",
tags: ['董事局主席'],
type: "个人"
},
{
text: "张部龄",
icon: "fa fa-user-circle-o",
tags: ['董事,总经理'],
type: "个人"
},
{
text: "付洪涛",
icon: "fa fa-user-circle-o",
tags: ['董事,副总经理'],
type: "个人"
},
{
text: "肖可峰",
icon: "fa fa-user-circle-o",
tags: ['董事'],
type: "个人"
},
{
text: "行政部",
icon: "fa fa-users",
type: "组织",
nodes: [
{
text: "张晓晓",
icon: "fa fa-user-circle-o",
tags: ['副行政部长'],
type: "个人"
},
{
text: "胡星亮",
icon: "fa fa-user-circle-o",
tags: ['主任'],
type: "个人"
}
]
},
{
text: "财政部",
icon: "fa fa-users",
type: "组织",
nodes: [
{
text: "周一博",
icon: "fa fa-user-circle-o",
tags: ['财政部长'],
type: "个人"
},
{
text: "兰于东",
icon: "fa fa-user-circle-o",
tags: ['副财政部长'],
type: "个人"
},
{
text: "赵西冷",
icon: "fa fa-user-circle-o",
tags: ['会计'],
type: "个人"
}
]
}
]
}
];
$('#tree').treeview({
data:user_list,
levels: 1, //默认展开1层
showTags: false,
selectable: false,
showCheckbox : false,
checkboxFirst : true,
hierarchicalCheck : true,
propagateCheckEvent : true,
collapseIcon: 'fa fa-minus-square',
expandIcon: 'fa fa-plus-square'
});
$('#tree').on('nodeSelected', function(event, data) {
fillTreeviewSelectList($('#notice_list'), $('#btn_notice_list'), $('#tree').treeview('getSelected'), '请选择人员:');
console.log($('#notice_list').val());
});
$('#tree').on('nodeUnselected', function(event, data) {
fillTreeviewSelectList($('#notice_list'), $('#btn_notice_list'), $('#tree').treeview('getSelected'), '请选择人员:');
console.log($('#notice_list').val());
});
checkTreeviewSelect($('#tree'), selectedUserStr);
fillTreeviewSelectList($('#notice_list'), $('#btn_notice_list'), $('#tree').treeview('getSelected'), '请选择人员:');
//根据字符串数组勾选树节点
function checkTreeviewSelect($tree, selectedUserStr) {
if(selectedUserStr.length > 0) {
var selectedUserArray = selectedUserStr.split(',');
var nodes = $tree.treeview('getNodes');
for(var i in nodes) {
for(var j in selectedUserArray) {
if(nodes[i].text == selectedUserArray[j]) {
//使用checkNode方法,子节点勾选,父节点不显示“部分勾选”的状态
//$tree.treeview('checkNode',[nodes[i], {silent:false}]);
$tree.treeview('selectNode',[nodes[i], {silent:false}]);
}
}
}
}
}
//解析勾选的人员树节点数组,填充到人员列表input内
//$input 不可见的input,方便表单提交数据
//$btn 下拉菜单的触发按钮,并显示当前选择的内容,空内容显示defaultText的字符串
//checkedNodes 当前勾选的节点数组
//defaultText 没有勾选时$btn显示的文字内容
function fillTreeviewSelectList($input, $btn, checkedNodes, defaultText) {
var defaultStr = defaultText ? defaultText : '请选择';
if(checkedNodes.length > 0) {
var listStrArray = [];
var listStrSeparator = ",";
for(var i in checkedNodes) {
if(checkedNodes[i].type == '个人') {
listStrArray.push(checkedNodes[i].text);
}
}
if(listStrArray.length > 0) {
var listStr = listStrArray.join(listStrSeparator);
$input.val(listStr);
$btn.text(listStr);
} else {
$input.val('');
$btn.text(defaultStr);
}
} else {
$input.val('');
$btn.text(defaultStr);
}
}
//获取nodes节点中type为‘个人’的数组
function getCheckdPersonal(nodes) {
var personalArray = [];
if(nodes.length > 0) {
for(var i in nodes) {
if(nodes[i].type == '个人') {
personalArray.push(nodes[i].text);
}
}
}
return personalArray;
}
//以上是树形下拉菜单相关控制=======================================
//发布
$btn_publish.click(function(){
alert('选中的人:' + $('#tree').treeview('getSelected')[0].text);
if(checkValidation()) {
//此处提交数据,提交成功后,返回上级页面
//此处根据原型演示业务流程,直接跳转到反馈页面
window.location.href='oa-personal-mission-respond.html';
}
});
//暂存
$btn_draft.click(function(){
if(checkValidation()) {
//此处提交数据,提交成功后,弹出如下提示
toastr.success('您编辑的内容已暂存');
}
});
});
|
# coding: utf-8
"""
flyteidl/service/admin.proto
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: version not set
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from flyteadmin.models.conjunction_expression_logical_operator import ConjunctionExpressionLogicalOperator # noqa: F401,E501
from flyteadmin.models.core_boolean_expression import CoreBooleanExpression # noqa: F401,E501
class CoreConjunctionExpression(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'operator': 'ConjunctionExpressionLogicalOperator',
'left_expression': 'CoreBooleanExpression',
'right_expression': 'CoreBooleanExpression'
}
attribute_map = {
'operator': 'operator',
'left_expression': 'left_expression',
'right_expression': 'right_expression'
}
def __init__(self, operator=None, left_expression=None, right_expression=None): # noqa: E501
"""CoreConjunctionExpression - a model defined in Swagger""" # noqa: E501
self._operator = None
self._left_expression = None
self._right_expression = None
self.discriminator = None
if operator is not None:
self.operator = operator
if left_expression is not None:
self.left_expression = left_expression
if right_expression is not None:
self.right_expression = right_expression
@property
def operator(self):
"""Gets the operator of this CoreConjunctionExpression. # noqa: E501
:return: The operator of this CoreConjunctionExpression. # noqa: E501
:rtype: ConjunctionExpressionLogicalOperator
"""
return self._operator
@operator.setter
def operator(self, operator):
"""Sets the operator of this CoreConjunctionExpression.
:param operator: The operator of this CoreConjunctionExpression. # noqa: E501
:type: ConjunctionExpressionLogicalOperator
"""
self._operator = operator
@property
def left_expression(self):
"""Gets the left_expression of this CoreConjunctionExpression. # noqa: E501
:return: The left_expression of this CoreConjunctionExpression. # noqa: E501
:rtype: CoreBooleanExpression
"""
return self._left_expression
@left_expression.setter
def left_expression(self, left_expression):
"""Sets the left_expression of this CoreConjunctionExpression.
:param left_expression: The left_expression of this CoreConjunctionExpression. # noqa: E501
:type: CoreBooleanExpression
"""
self._left_expression = left_expression
@property
def right_expression(self):
"""Gets the right_expression of this CoreConjunctionExpression. # noqa: E501
:return: The right_expression of this CoreConjunctionExpression. # noqa: E501
:rtype: CoreBooleanExpression
"""
return self._right_expression
@right_expression.setter
def right_expression(self, right_expression):
"""Sets the right_expression of this CoreConjunctionExpression.
:param right_expression: The right_expression of this CoreConjunctionExpression. # noqa: E501
:type: CoreBooleanExpression
"""
self._right_expression = right_expression
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(CoreConjunctionExpression, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CoreConjunctionExpression):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
function MGDSMapClient() {
var conf = configuration;
this.maplat = conf.map_lat;
this.maplon = conf.map_lon;
this.defaultZoom = conf.defaultZoom;
this.maxzoom = conf.max_zoom;
this.minzoom = conf.min_zoom;
this.mapdiv = conf.default_container;
this.layers = new Array();
this.mtoverlays = new Array();
this.qurl = conf.query_url;
this.markers = new Array();
this.markerCluster = new Array();
this.baseLayersbool = true;
this.grat = null;
this.mapdivobject = null;
this.linkhref = conf.logo_href;
this.linkclass = conf.logo_class;
this.imgsrc = conf.logo_url;
this.imgtitle = conf.logo_title;
this.imgwidth = conf.logo_width;
this.epsg = conf.epsg;
this.minZoom = conf.minZoom;
this.maxZoom = conf.maxZoom;
this.layer_info = layer_info;
}
MGDSMapClient.prototype.mapInit = function(hide,options,off) {
var self = this;
this.mapdivobject = $('#'+this.mapdiv);
this.vectorSrc = null;
this.drawingTool = null;
proj4.defs('EPSG:3031', '+proj=stere +lat_0=-90 +lat_ts=-71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs');
var projection = ol.proj.get(this.epsg);
projection.setWorldExtent([-180.0000, -90.0000, 180.0000, -60.0000]);
projection.setExtent([-8200000, -8200000, 8200000, 8200000]);
//projection.setExtent([-12400000,-12400000,12400000,12400000]);
this.map = new ol.Map({
view: new ol.View({
center: [this.maplat, this.maplon],
zoom: this.defaultZoom,
minZoom: this.minZoom,
maxZoom: this.maxZoom,
projection: projection,
extent: projection.getExtent()
}),
target: document.getElementById(this.mapdiv)
});
this.graticule = new ol.Graticule();
this.map.addControl(new LonLatElevControl({mapclient: this}));
this.addLayers();
this.map.on('singleclick', this.selectPoint, this)
$('.drawing-button').on('click', function(e) {
self.setDrawMode($(this).attr('data-mode'));
if (self.drawingTool) {
self.map.removeInteraction(self.drawingTool);
}
self.addDrawingTool();
});
this.addDrawingTool();
var logo = new ol.control.Control({
element: document.getElementById('iedaLogo')
});
this.map.addControl(logo);
this.createLayerSwitcher();
this.infowin = new ol.Overlay({element: document.createElement('div') });
this.map.addOverlay(this.infowin);
}
MGDSMapClient.prototype.createLegend = function(lyr) {
var html = lyr.get('legend_html');
if (!html) return;
this.legendDiv = document.createElement("div");
this.legendDiv.className = "legendcontrol";
this.legendDiv.innerHTML = '<div class="tabwrapper"><div class="tabboxlegend tabbox">Legend</div></div>';
this.limgDiv = document.createElement("div");
this.limgDiv.className = "maplegend";
this.limgDiv.innerHTML = html;
this.legendDiv.appendChild(this.limgDiv);
$(document).off('click','.tabboxlegend');
$(document).on('click','.tabboxlegend',function(){
if ($(".legendcontrol .maplegend").is(":visible")) {
$(".legendcontrol .maplegend").hide();
} else {
$(".legendcontrol .maplegend").show();
}
});
$('#content').append(this.legendDiv);
}
MGDSMapClient.prototype.checkLayerCount = function() {
var count = 0;
for (var name in this.layerMap) {
var l = this.layerMap[name];
if (l.get('switchable') && l.getVisible()) {
count += 1;
}
}
if (count > 0) {
$('#data_layer_descriptions').show();
} else {
$('#data_layer_descriptions').hide();
}
}
MGDSMapClient.prototype.turnOnLayer = function(title) {
var lyr = this.layerMap[title];
var all_descs = $('#data_layer_descriptions');
var desc = $('<div data-layer="{0}"><b>{0}</b> - {1}</div>'.format(title, lyr.get('description')));
all_descs.append(desc);
if (lyr.get('legend_html')) {
this.createLegend(lyr);
}
lyr.setVisible(true);
this.checkLayerCount();
}
MGDSMapClient.prototype.turnOffLayer = function(title) {
var lyr = this.layerMap[title];
var all_descs = $('#data_layer_descriptions');
var desc = all_descs.children('div[data-layer="{0}"]'.format(title));
desc.remove();
if (lyr.get('legend_html')) {
$('.legendcontrol').remove();
}
lyr.setVisible(false);
this.checkLayerCount()
}
MGDSMapClient.prototype.createLayerSwitcher = function() {
var self = this;
this.mapControl = new MapControl();
var layer_order = ["EarthChem Portal", "MGDS Cruise Tracks", "Geochron", "GMRT High-Res", "SESAR Samples", "USAP-DC", "Seismic Data"];
layer_order.forEach(function(title) {
var lyr = self.layerMap[title];
this.mapControl.addLayer(title);
var button = this.mapControl.getLayerButton(title);
$(button).on('turn-on', function() {
self.turnOnLayer(title);
});
$(button).on('turn-off', function() {
var turnOffGMRT = (title === "GMRT High-Res" && !self.mapControl.imgButtonIsOn('mask'));
if (title !== "GMRT High-Res" || turnOffGMRT) {
self.turnOffLayer(title);
}
});
}, this);
var gratBtn = $(this.mapControl.getImgButton('graticule'));
$(gratBtn).on('turn-on', function() {
self.graticule.setMap(self.map);
});
$(gratBtn).on('turn-off', function() {
self.graticule.setMap(null);
});
$(this.mapControl.getImgButton('mask')).on('turn-on', function() {
self.turnOnLayer('GMRT High-Res');
});
$(this.mapControl.getImgButton('mask')).on('turn-off', function() {
if (!self.mapControl.layerButtonIsOn('GMRT High-Res')) {
self.turnOffLayer('GMRT High-Res');
}
});
$(this.mapControl.getImgButton('south_polar')).addClass('on').removeClass('toggle');
$('#content').append(this.mapControl.getElement());
}
MGDSMapClient.prototype.toggleLayer = function(title) {
var lyr = this.layerMap[title];
lyr.setVisible(!lyr.getVisible());
}
MGDSMapClient.prototype.getDrawMode = function() {
return $('.drawing-button.draw-active').attr('data-mode');
}
MGDSMapClient.prototype.setDrawMode = function(str) {
$('#drawing-buttons .drawing-button').removeClass('draw-active');
$('#drawing-buttons .drawing-button[data-mode="{0}"]'.format(str)).addClass('draw-active');
}
// This function handles clicks on features on the map,
// creating a popup with info about the feature.
MGDSMapClient.prototype.selectPoint = function(evt) {
if (this.getDrawMode() !== 'None') { return; }
var self = this;
var clearInfowin = function() {
self.iconSrc.clear();
self.infowin.setPosition(undefined);
}
clearInfowin();
var tabs = $('<div id="tabs">');
var tab_links = $('<ul />');
var tab_content = $('<div style="max-height:300px; overflow:auto"/>');
var infowin_wrapper = $('<div id="infowin-wrapper"/>');
var close_button = $('<div id="close-button">×</div>');
infowin_wrapper.append(close_button);
var coord = evt.coordinate;
var lonlat = this.toLonLat(coord);
var pixel = evt.pixel;
var ur_coord = this.map.getCoordinateFromPixel([pixel[0]+5, pixel[1]-5]);
var ll_coord = this.map.getCoordinateFromPixel([pixel[0]-5, pixel[1]+5]);
var tab_num = 0;
var requests = [];
this.map.getLayers().forEach(function(layer, _, _) {
var clickevent = layer.get('clickevent');
if (layer.getVisible() && layer.get('type') !== 'base' && clickevent) {
var data = clickevent.qurl_params;
var qurl = clickevent.qurl;
data['SRS'] = 'EPSG:3031';
if (data['SERVICE'] == 'WMS') {
data['BBOX'] = ll_coord.concat(ur_coord).join(',');
} else {
data['lat'] = lonlat[1];
data['lon'] = lonlat[0];
data['zoom'] = this.map.getZoom();
}
var str = decodeURIComponent($.param(data));
var ajax_defaults = {
type: "GET",
url: qurl,
data: str,
async: false,
success: function(msg) {
if (clickevent.msg_transform) { msg = clickevent.msg_transform(msg); }
if (msg) {
tab_num += 1;
var new_link = $('<li><a href="#tab{0}">{1}</a></li>'.format(tab_num, layer.get('title')));
var new_tab= $('<div id="tab{0}" >{1}</div>'.format(tab_num,msg));
tab_links.append(new_link);
tab_content.append(new_tab);
}
}
}
requests.push($.ajax($.extend({},ajax_defaults,clickevent.ajax_opts)));
}
}, this);
$.when.apply(undefined, requests).done(function() {
tabs.append(tab_links);
tabs.append(tab_content);
infowin_wrapper.append(tabs);
tabs.tabs();
if (tab_num > 0) {
self.infowin.setElement(infowin_wrapper[0]);
self.infowin.setPosition(coord);
var iconFeature = new ol.Feature({
geometry: new ol.geom.Point(coord)
});
var iconStyle = new ol.style.Style({
image: new ol.style.Icon ({
src: '/databrowser/img/map-marker-icon.png',
anchor: [0.5, 0.8]
})
});
iconFeature.setStyle(iconStyle);
self.iconSrc.addFeature(iconFeature);
}
close_button.on('click', function(e) {
clearInfowin();
});
tab_content.find('.turndown').on("click",function() {
var aself = this;
var tbox = $(this).parent();
var tcontent = tbox.children('.tcontent');
var img;
if (tcontent.is(':visible')) {
img = '/images/arrow_show.gif';
} else {
img = '/images/arrow_hide.gif';
}
$(this).find('img').attr('src',img);
if (!tbox.hasClass('has-content')) {
$.ajax({
type: "GET",
url: 'http://ecp.iedadata.org/ged/'+$(this).parent().attr('data-uuid'),
success: function(msg){
tcontent.html(msg);
tbox.addClass('has-content');
tcontent.toggle();
}
});
} else {
tcontent.toggle();
}
});
});
}
MGDSMapClient.prototype.addDrawingTool = function() {
var value = this.getDrawMode();
if (value !== 'None') {
var maxPoints, geometryFunction;
if (value === 'Box') {
value = 'LineString';
maxPoints = 2;
geometryFunction = function(coordinates, geometry) {
if (!geometry) {
geometry = new ol.geom.Polygon(null);
}
var start = coordinates[0];
var end = coordinates[1];
geometry.setCoordinates([
[start, [start[0], end[1]], end, [end[0], start[1]], start]
]);
return geometry;
};
}
this.drawingTool = new ol.interaction.Draw({
source: this.vectorSrc,
type: value,
geometryFunction: geometryFunction,
maxPoints: maxPoints
});
this.map.addInteraction(this.drawingTool);
var self = this;
this.drawingTool.on(ol.interaction.DrawEventType.DRAWSTART, function() {self.vectorSrc.clear()});
this.vectorSrc.on('addfeature', function() {
//self.setDrawMode('None');
var features = self.vectorSrc.getFeatures();
if (features.length !== 1) {
throw "Expected only one feature to be drawn.";
}
var coords = features[0].getGeometry().getCoordinates()[0];
var polygon_change = new CustomEvent('polygon-change', {'detail': coords});
document.dispatchEvent(polygon_change);
});
}
}
LonLatElevControl = function(options) {
var mapclient = options.mapclient;
var self = this;
this.movementTimer = null;
var element = document.createElement('div');
var lon = document.createElement('div');
var lat = document.createElement('div');
var elev = document.createElement('div');
element.appendChild(lon);
element.appendChild(lat);
element.appendChild(elev);
element.className = 'latlonelev-control ol-control';
mapclient.mapdivobject.mousemove(function(evt) {
evt_xy = mapclient.map.getEventCoordinate(evt);
var lonlat = mapclient.toLonLat(evt_xy);
lon.innerHTML = 'lon: ' + lonlat[0].toFixed(6);
lat.innerHTML = 'lat: ' + lonlat[1].toFixed(6);
clearTimeout(self.movementTimer);
self.movementTimer = setTimeout(function(){
$.ajax({
type: "GET",
url: "http://www.marine-geo.org/services/pointserver.php",
data: {
'latitude': lonlat[1].toFixed(6),
'longitude': lonlat[0].toFixed(6)
},
async: true,
success: function(msg){
elev.innerHTML = 'elev: ' + msg+' m';
}
});
}, 200);
});
ol.control.Control.call(this, {
element: element,
target: options.target
});
};
ol.inherits(LonLatElevControl, ol.control.Control);
MGDSMapClient.prototype.toLonLat = function(pt) {
return ol.proj.toLonLat(pt, this.epsg);
}
MGDSMapClient.prototype.addLayers = function() {
this.map.addLayer(new ol.layer.Tile({
type: 'base',
title: 'GMRT',
source: new ol.source.TileWMS({
params: {
'LAYERS': 'South_Polar_Bathymetry'
},
url: //"http://www.marine-geo.org/services/wms_SP?",
"https://www.gmrt.org/services/mapserv/wms_SP?",
serverType: "mapserver"
}),
switchable: false
}));
this.map.addLayer(new ol.layer.Tile({
type: 'base',
title: 'GMRT High-Res',
source: new ol.source.TileWMS({
params: {
'LAYERS': 'South_Polar_Bathymetry'
},
url: //"http://www.marine-geo.org/services/wms_SP_mask?",
"https://www.gmrt.org/services/mapserv/wms_SP_mask?",
serverType: "mapserver"
}),
visible: false,
switchable: true
}));
this.map.addLayer(new ol.layer.Tile({
title: 'EarthChem Portal',
source: new ol.source.TileWMS({
params: {'LAYERS': 'ECPoints',
'VERSION': '1.1.1',
'reaspect': 'false',
'BGCOLOR': '0xFFFFFF',
'FORMAT': 'image/gif'
},
url: "http://www.earthchemportal.org/ecpointswms?"
}),
visible: false,
clickevent: {
qurl: "/databrowser/inc/ecp_select_point.php?",
qurl_params: {
SERVICE: 'WMS',
REQUEST: 'GetFeatureInfo',
}
},
switchable: true,
legend_html: '<img src="http://ecp.iedadata.org/legend.jpg" alt="Map Legend" style="width:300px" />'
}));
this.map.addLayer(new ol.layer.Tile({
title: 'MGDS Cruise Tracks',
source: new ol.source.TileWMS({
params: {'LAYERS': 'TracksAll',
'VERSION': '1.1.1',
'reaspect': 'false',
'BGCOLOR': '0xFFFFFF',
'FORMAT': 'image/png'
},
url: "http://www.marine-geo.org/tools/new_search/databrowser_wms.php?"
}),
visible: false,
clickevent: {
qurl: "http://www.marine-geo.org/tools/new_search/databrowser_wms.php?",
qurl_params: {
SERVICE: 'WMS',
REQUEST: 'GetFeatureInfo',
}
},
switchable: true
}));
this.map.addLayer(new ol.layer.Tile({
title: 'Geochron',
source: new ol.source.TileWMS({
params: {'LAYERS': 'GeochronPoints',
'VERSION': '1.1.1',
'reaspect': 'false',
'BGCOLOR': '0xFFFFFF',
'FORMAT': 'image/png'
},
url: "http://www.geochron.org/cgi-bin/mapserv?map=/var/www/geochronmap.map&"
}),
visible: false,
clickevent:{
qurl: "/databrowser/inc/geochron_select_point.php?",
qurl_params: {
SERVICE: 'WMS',
REQUEST: 'GetFeatureInfo',
}
},
switchable: true
}));
this.map.addLayer(new ol.layer.Tile({
title: 'USAP-DC',
source: new ol.source.TileWMS({
params: {'LAYERS': 'Astro-Geo,Earth,Glacier,Integrated,Ocean-Atmosphere,Bio',
'VERSION': '1.1.0',
'FORMAT': 'image/png'
},
url: "http://api.usap-dc.org:81/wfs?"
}),
clickevent: {
qurl: "/databrowser/inc/usap_select_point.php?",
qurl_params: {
SERVICE: 'WMS',
REQUEST: 'GetFeatureInfo',
INFO_FORMAT: 'text/html'
}
},
visible: false,
switchable: true
}));
this.map.addLayer(new ol.layer.Tile({
title: 'Seismic Data',
source: new ol.source.TileWMS({
params: {
'LAYERS' : 'MGDS-DataSetsLines,MGDS-DataSets,MGDS-DataSets-Points,MGDS-DataObjects-OBS,MGDS-DataObjects-Points-OBS,MGDS-DataStations-OBS,UTIG-DataSet',
'VERSION' : '1.1.0',
'FORMAT' : 'image/png',
'SLD' : 'http://dev.marine-geo.org/services/sld/databrowser_sld.xml'
},
url: 'http://www.marine-geo.org/services/mapserv7/seismic_data?'
}),
clickevent: {
qurl: "/databrowser/inc/seismicClickEvent.php?",
qurl_params: {
SERVICE : "WMS",
REQUEST : "GetFeatureInfo",
SRS : "EPSG:3031",
QUERY_LAYERS: 'MGDS-DataSetsLines,MGDS-DataSets,MGDS-DataSets-Points,MGDS-DataObjects-OBS,MGDS-DataObjects-Points-OBS,MGDS-DataStations-OBS,UTIG-DataSet',
WIDTH : 4,
HEIGHT : 4,
X : 2,
Y : 2,
VERSION : "1.0.0",
INFO_FORMAT : "gml",
SLD : "http://dev.marine-geo.org/services/sld/databrowser_sld.xml"
}
},
visible: false,
switchable: true
}));
var self = this;
this.map.addLayer(new ol.layer.Tile({
title: 'SESAR Samples',
source: new ol.source.TileWMS({
params: {'LAYERS': 'SESAR:wfs_samples',
'VERSION': '1.1.1',
'reaspect': 'false',
'BGCOLOR': '0xFFFFFF',
'FORMAT': 'image/png'
},
url: "http://prod-app.earthchem.org:8989/geoserver/SESAR/wms?SLD=http://" + document.location.hostname + "/sesar_sld.xml&"
}),
visible: false,
clickevent: {
ajax_opts: { dataType: "json" },
msg_transform: function(msg) {
if (msg.features.length === 0) {return "";}
var content = '<div style="min-width:200px;text-align:left;">';
var k;
for (k in msg.features) {
content += "<div style=\"margin-bottom:2px;\"><b>"+msg.features[k].properties.object_type+"</b> <a target=\"_blank\" href=\""+msg.features[k].properties.url+"\">"+msg.features[k].properties.object_name+"</a></div>";
}
content+="</div>";
return content;
},
qurl: 'http://' + document.location.hostname + "/databrowser/inc/sesarwrapper.php?",
qurl_params: {
SERVICE: 'WMS',
REQUEST: 'GetFeatureInfo',
VERSION: '1.0.0',
WIDTH: '4',
HEIGHT: '4',
X: '2',
Y: '2',
INFO_FORMAT: 'application/json',
QUERY_LAYERS: 'SESAR:wfs_samples',
LAYERS: 'SESAR:wfs_samples',
FEATURE_COUNT: '50',
}
},
switchable: true
}));
this.vectorSrc = new ol.source.Vector();
this.map.addLayer(new ol.layer.Vector({
title: "Drawing Layer",
source: this.vectorSrc
}));
this.iconSrc = new ol.source.Vector();
this.map.addLayer(new ol.layer.Vector({
title: "Icon Layer",
source: this.iconSrc
}));
this.layerMap = {};
this.map.getLayers().forEach(function(lyr) {
var title = lyr.get('title');
this.layerMap[title] = lyr;
for (var k in layer_info[title]) {
lyr.set(k, layer_info[title][k]);
}
},this);
}
MGDSMapClient.prototype.getContainingBBOX = function(poly) {
var interpolated = [];
var min_lat = -85;
var resolution = 10;
for (var i = 0; i < poly.length-1; i++) {
var pt1 = poly[i];
var pt2 = poly[i+1];
for (var j = 0; j < resolution; j++) {
var midpt_x = pt1[0] + (j*(pt2[0] - pt1[0])/resolution);
var midpt_y = pt1[1] + (j*(pt2[1] - pt1[1])/resolution);
interpolated.push([midpt_x.toFixed(6), midpt_y.toFixed(6)]);
}
}
var self = this;
interpolated = interpolated.map(function(pt) { return self.toLonLat(pt); });
var interpolated_x = interpolated.map(function(pt) { return pt[0]; });
var interpolated_y = interpolated.map(function(pt) { return pt[1]; });
var w = Math.min.apply(undefined, interpolated_x);
var e = Math.max.apply(undefined, interpolated_x);
var s = Math.max(min_lat, Math.min.apply(undefined, interpolated_y));
var n = Math.max.apply(undefined, interpolated_y);
return {w:w, e:e, s:s, n:n};
}
|
/**
* Copyright IBM Corp. 2017
*
* Licensed under the Apache License, Version 2.0 (the License);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const assert = require('assert');
const nock = require('nock');
process.env.__OW_ACTION_NAME = `/${process.env.__OW_NAMESPACE}/pipeline_pkg/action-to-test`;
const actionLoadContext = require('../../../context/load-context.js');
const paramsJson = require('../../resources/payloads/test.unit.context.json').loadContextJson;
const Cloudant = require('cloudant');
const invalidCloudantUrl = 'invalid-url';
const errorNoRawInputData = 'params.raw_input_data absent in params.';
const errorNoCloudantContextKey = 'cloudant_context_key absent in params.raw_input_data.';
const errorNoConversationObj = 'conversation object absent in params.';
describe('Load Context Unit Tests: validateParams()', () => {
let params = {};
let func; // Function to test
beforeEach(() => {
// merge the two objects, deep copying packageBindings so it doesn't get changed between tests
// and we only have to read it once
params = Object.assign({}, JSON.parse(JSON.stringify(paramsJson)));
func = actionLoadContext.validateParams;
});
it('validate error when no raw_input_data', () => {
// Use request params for main function
params = params.main.request;
delete params.raw_input_data;
try {
func(params);
} catch (e) {
assert.equal('AssertionError', e.name);
assert.equal(e.message, errorNoRawInputData);
}
});
it('validate error when no cloudant_context_key', () => {
// Use request params for main function
params = params.main.request;
delete params.raw_input_data.cloudant_context_key;
try {
func(params);
} catch (e) {
assert.equal('AssertionError', e.name);
assert.equal(e.message, errorNoCloudantContextKey);
}
});
it('validate error when no conversation object', () => {
// Use request params for main function
params = params.main.request;
delete params.conversation;
try {
func(params);
} catch (e) {
assert.equal('AssertionError', e.name);
assert.equal(e.message, errorNoConversationObj);
}
});
});
describe('Load Context Unit Tests: deleteCloudantFields()', () => {
let params = {};
let func; // Function to test
beforeEach(() => {
// merge the two objects, deep copying packageBindings so it doesn't get changed between tests
// and we only have to read it once
params = Object.assign({}, JSON.parse(JSON.stringify(paramsJson)));
func = actionLoadContext.deleteCloudantFields;
});
it('deleteCloudantFields should delete cloudant revision _id', () => {
func = actionLoadContext.deleteCloudantFields;
// The params for func.
const p = params.deleteCloudantFields.withCloudantDocId.params;
const response = func(p);
assert(!response._id, '_id present in Cloudant response.');
});
it('deleteCloudantFields should delete cloudant _rev', () => {
func = actionLoadContext.deleteCloudantFields;
// The params for func.
const p = params.deleteCloudantFields.withCloudantRev.params;
const response = func(p);
assert(!response._rev, '_rev present in Cloudant response.');
});
it('deleteCloudantFields should delete cloudant _revs_info', () => {
func = actionLoadContext.deleteCloudantFields;
// The params for func.
const p = params.deleteCloudantFields.withCloudantRevsInfo.params;
const response = func(p);
assert(!response._revs_info, '_revs_info present in Cloudant response.');
});
});
describe('Load Context Unit Tests: createCloudantObj()', () => {
const func = actionLoadContext.createCloudantObj;
it('validate Cloudant url should be proper', () => {
return func(invalidCloudantUrl).then(
response => {
assert(false, response);
},
e => {
assert.equal(
e,
'Cloudant object creation failed. Error from Cloudant: Error: invalid url.',
'Should fail complaining about invalid url.'
);
}
);
});
});
describe('Load Context Unit Tests: getContext()', () => {
let params = {};
let func; // Function to test
beforeEach(() => {
// merge the two objects, deep copying packageBindings so it doesn't get changed between tests
// and we only have to read it once
params = Object.assign({}, JSON.parse(JSON.stringify(paramsJson)));
func = actionLoadContext.deleteCloudantFields;
});
it('get context all ok', () => {
// The expected response for getContext function when all ok.
const expected = params.getContext.allOk.response;
// Use request params for getContext function when all ok.
params = params.getContext.allOk.request;
const cloudant = Cloudant({
url: params.config.cloudant_url,
plugin: 'retry',
retryAttempts: 5,
retryTimeout: 1000
});
const dbname = params.config.db;
const key = params.key;
const db = cloudant.use(dbname);
const mock = nock(params.config.cloudant_url)
.get(`/${dbname}/${key}`)
.query(() => {
return true;
})
.reply(200, expected);
func = actionLoadContext.getContext;
return func(db, key).then(
response => {
if (!mock.isDone()) {
nock.cleanAll();
assert(false, 'Mock server did not get called.');
}
nock.cleanAll();
assert.deepEqual(response, expected);
},
e => {
nock.cleanAll();
assert(false, e);
}
);
});
it('should return empty context for new user', () => {
// The expected response for getContext function when a new user starts chatting.
const expected = params.getContext.missingContext.response;
// Use request params for getContext function when a new user starts chatting.
params = params.getContext.missingContext.request;
const cloudant = Cloudant({
url: params.config.cloudant_url,
plugin: 'retry',
retryAttempts: 5,
retryTimeout: 1000
});
const dbname = params.config.db;
const key = params.key;
const db = cloudant.use(dbname);
const mock = nock(params.config.cloudant_url)
.get(`/${dbname}/${key}`)
.query(() => {
return true;
})
.reply(404, expected);
func = actionLoadContext.getContext;
return func(db, key).then(
response => {
if (!mock.isDone()) {
nock.cleanAll();
assert(false, 'Mock server did not get called.');
}
nock.cleanAll();
assert.deepEqual(response, expected);
},
e => {
nock.cleanAll();
assert(false, e);
}
);
});
it('should return error when a non-404 error occurs', () => {
// The expected response for getContext function when a non-404 occurs.
const expected = params.getContext.missingContext.response;
// Use request params for getContext function when a non-404 occurs.
params = params.getContext.missingContext.request;
const cloudant = Cloudant({
url: params.config.cloudant_url,
plugin: 'retry',
retryAttempts: 5,
retryTimeout: 1000
});
const dbname = params.config.db;
const key = params.key;
const db = cloudant.use(dbname);
const mock = nock(params.config.cloudant_url)
.get(`/${dbname}/${key}`)
.query(() => {
return true;
})
.reply(500, expected);
func = actionLoadContext.getContext;
return func(db, key).then(
response => {
if (!mock.isDone()) {
nock.cleanAll();
assert(false, 'Mock server did not get called.');
}
nock.cleanAll();
assert.deepEqual(response, expected);
},
e => {
nock.cleanAll();
assert.equal(e.statusCode, 500);
}
);
});
});
describe('Load Context Unit Tests: main()', () => {
let params = Object.assign({}, JSON.parse(JSON.stringify(paramsJson)));
const func = actionLoadContext.main;
it('All OK', () => {
// The expected response for main function when all ok.
const expected = params.main.response;
// Use request params for main function when all ok.
params = params.main.request;
const cloudantUrl = 'https://pinkunicorns.cloudant.com';
const cloudantContextDbName = 'conversation-context';
// This is what the mock call should return - the conversation context.
const nockResponseCloudant = expected.conversation.context;
const mockResponseCloudFunctions = {
annotations: [
{
key: 'cloudant_url',
value: cloudantUrl
},
{
key: 'cloudant_context_dbname',
value: cloudantContextDbName
}
]
};
const mockCloudant = nock(cloudantUrl)
.get(
`/${cloudantContextDbName}/${params.raw_input_data.cloudant_context_key}`
)
.query(() => {
return true;
})
.reply(200, nockResponseCloudant);
const apiHost = process.env.__OW_API_HOST;
const namespace = process.env.__OW_NAMESPACE;
const packageName = process.env.__OW_ACTION_NAME.split('/')[2];
const cloudFunctionsUrl = `https://${apiHost}/api/v1/namespaces`;
const mockCloudFunctions = nock(cloudFunctionsUrl)
.get(`/${namespace}/packages/${packageName}`)
.reply(200, mockResponseCloudFunctions);
return func(params).then(
response => {
if (!mockCloudant.isDone()) {
nock.cleanAll();
assert(false, 'Mock Cloudant server did not get called.');
}
if (!mockCloudFunctions.isDone()) {
nock.cleanAll();
assert(false, 'Mock Cloud Functions server did not get called.');
}
nock.cleanAll();
assert.deepEqual(response, expected);
},
e => {
nock.cleanAll();
assert(false, e);
}
);
});
});
describe('Load Context Unit Tests: getCloudantCreds()', () => {
const func = actionLoadContext.getCloudantCreds; // function to test
it('All OK', () => {
const cloudantUrl = 'https://pinkunicorns.cloudant.com';
const cloudantContextDbName = 'conversation-context';
const apiHost = process.env.__OW_API_HOST;
const namespace = process.env.__OW_NAMESPACE;
const packageName = process.env.__OW_ACTION_NAME.split('/')[2];
const cloudFunctionsUrl = `https://${apiHost}/api/v1/namespaces`;
const mockResponseCloudFunctions = {
annotations: [
{
key: 'cloudant_url',
value: cloudantUrl
},
{
key: 'cloudant_context_dbname',
value: cloudantContextDbName
}
]
};
const expected = {
cloudant_url: cloudantUrl,
cloudant_context_dbname: cloudantContextDbName
};
const mockCloudFunctions = nock(cloudFunctionsUrl)
.get(`/${namespace}/packages/${packageName}`)
.reply(200, mockResponseCloudFunctions);
return func().then(
response => {
if (!mockCloudFunctions.isDone()) {
nock.cleanAll();
assert(false, 'Mock Cloud Functions server did not get called.');
}
nock.cleanAll();
assert.deepEqual(response, expected);
},
e => {
nock.cleanAll();
assert(false, e);
}
);
});
});
|
import test from 'ava';
import Calculator from '../frontend/js/CalculatorCore';
const calc = new Calculator(['']);
test.beforeEach(() => {
calc.setInputs(['']);
});
test('on 1 input should get an array of ["1"]', (t) => {
calc.addToInputsLogic('1');
t.is(calc.getInputs()[0], '1');
});
test('on 1 and 1 input should get an array of ["11"]', (t) => {
calc.addToInputsLogic('1');
calc.addToInputsLogic('1');
t.is(calc.getInputs()[0], '11');
});
test('on 2 and 3 and 1 input should get an array of ["231"]', (t) => {
calc.addToInputsLogic('2');
calc.addToInputsLogic('3');
calc.addToInputsLogic('1');
t.is(calc.getInputs()[0], '231');
});
test('on 2 and + and 1 input should get an array of ["2", "+", "1"]', (t) => {
calc.addToInputsLogic('2');
calc.addToInputsLogic('+');
calc.addToInputsLogic('1');
t.is(calc.getInputs()[0], '2');
t.is(calc.getInputs()[1], '+');
t.is(calc.getInputs()[2], '1');
});
test('on 2 and + and 1 and - and 5 input should get an array of ["2", "+", "1", "-", "5"]', (t) => {
calc.addToInputsLogic('2');
calc.addToInputsLogic('+');
calc.addToInputsLogic('1');
calc.addToInputsLogic('-');
calc.addToInputsLogic('5');
t.is(calc.getInputs()[0], '2');
t.is(calc.getInputs()[1], '+');
t.is(calc.getInputs()[2], '1');
t.is(calc.getInputs()[3], '-');
t.is(calc.getInputs()[4], '5');
});
test('on 2 and + and + and 1 input should get an array of ["2", "+", "1"]', (t) => {
calc.addToInputsLogic('2');
calc.addToInputsLogic('+');
calc.addToInputsLogic('+');
calc.addToInputsLogic('1');
t.is(calc.getInputs()[0], '2');
t.is(calc.getInputs()[1], '+');
t.is(calc.getInputs()[2], '1');
});
test('on + and + and 1 input should get an array of ["1"]', (t) => {
calc.addToInputsLogic('+');
calc.addToInputsLogic('+');
calc.addToInputsLogic('1');
t.is(calc.getInputs()[0], '1');
});
test('on 0 and 0 and 1 input should get an array of ["1"]', (t) => {
calc.addToInputsLogic('0');
calc.addToInputsLogic('0');
calc.addToInputsLogic('1');
t.is(calc.getInputs()[0], '1');
});
test('on 0 and 0 and 1 and + and 0 and 0 and 2 input should get an array of ["1", "+", "2"]', (t) => {
calc.addToInputsLogic('0');
calc.addToInputsLogic('0');
calc.addToInputsLogic('1');
calc.addToInputsLogic('+');
calc.addToInputsLogic('0');
calc.addToInputsLogic('0');
calc.addToInputsLogic('2');
t.is(calc.getInputs()[0], '1');
t.is(calc.getInputs()[1], '+');
t.is(calc.getInputs()[2], '2');
});
// create float numbers
test('on 0 and . and 0 and 1 input should get an array of ["0.01"]', (t) => {
calc.addToInputsLogic('0');
calc.addToInputsLogic('.');
calc.addToInputsLogic('0');
calc.addToInputsLogic('1');
t.is(calc.getInputs()[0], '0.01');
});
test('on . and + and . and 1 input should get an array of ["0", "+", "0.1"]', (t) => {
calc.addToInputsLogic('.');
calc.addToInputsLogic('+');
calc.addToInputsLogic('.');
calc.addToInputsLogic('1');
t.is(calc.getInputs()[0], '0');
t.is(calc.getInputs()[1], '+');
t.is(calc.getInputs()[2], '0.1');
});
test('on 0 and . and 0 and 1 and + and . and . and 1 input should get an array of ["0.01", "+" "0.1"]', (t) => {
calc.addToInputsLogic('0');
calc.addToInputsLogic('.');
calc.addToInputsLogic('0');
calc.addToInputsLogic('1');
calc.addToInputsLogic('+');
calc.addToInputsLogic('.');
calc.addToInputsLogic('.');
calc.addToInputsLogic('1');
t.is(calc.getInputs()[0], '0.01');
t.is(calc.getInputs()[1], '+');
t.is(calc.getInputs()[2], '0.1');
});
|
var path = require('path')
var webpack = require('webpack')
module.exports = {
context: path.join(__dirname, './app'),
entry: {
jsx: './index.js',
html: './index.html'
},
output: {
path: path.join(__dirname, './dist'),
filename: 'bundle.js'
},
resolve: {
extensions: ['', '.js', '.jsx'],
alias: {
webworkify: 'webworkify-webpack'
}
},
node: {
console: true,
fs: 'empty'
},
resolveLoader: {
root: path.join(__dirname, 'node_modules')
},
module: {
loaders: [
{ test: /\.js$/, include: path.resolve(__dirname, 'node_modules/mapbox-gl/js/render/painter/use_program.js'), loader: 'transform/cacheable?brfs' },
{ test: /\.scss$/, loaders: ["style", "css", "sass"] },
{ test: /\.jsx?$/, loader: 'babel-loader', exclude: /node_modules/ },
{ test: /\.json$/, loader: 'json' },
{ test: /\.css$/, loader: 'style-loader!css-loader' },
{ test: /\.(woff|woff2)$/, loader: 'url-loader' },
{ test: /\.(ttf|eot|svg|)$/, loader: 'url-loader' },
{ test: /\.(html|ico|txt)$/, loader: 'file?name=[name].[ext]' },
{ test: /\.(png|jpg|jpeg|gif)$/, loader: 'url-loader' }
]
},
externals: {
fs: '{}',
tls: '{}',
net: '{}',
console: '{}'
},
postLoaders: [
{
include: /node_modules\/mapbox-gl/,
loader: 'transform',
query: 'brfs'
}
],
plugins: [
new webpack.optimize.UglifyJsPlugin({compress: { warnings: false }, comments: false}),
new webpack.optimize.DedupePlugin(),
new webpack.DefinePlugin({
'process.env': { NODE_ENV: JSON.stringify(process.env.NODE_ENV || 'production') },
__DEV__: false
}),
new webpack.ProvidePlugin({
'Promise': 'es6-promise', // Thanks Aaron (https://gist.github.com/Couto/b29676dd1ab8714a818f#gistcomment-1584602)
'fetch': 'imports?this=>global!exports?global.fetch!whatwg-fetch'
})
]
}
|
/*!
* @websanova/vue-auth v4.1.2
* https://websanova.com/docs/vue-auth
* Released under the MIT License.
*/
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):e.VueAuth=t()}(this,function(){"use strict";return{tokens:["Token-Type","Access-Token","Client","Uid","Expiry","token-type","access-token","client","uid","expiry"],request:function(e,t){var s={},i=t.split("|");(this.drivers.deviseAuth||this.drivers.auth).tokens.forEach(function(e,t){i[t]&&(s[e]=i[t])}),this.drivers.http.setHeaders.call(this,e,s)},response:function(e){var t=[],s=this.drivers.http.getHeaders.call(this,e);if((s["access-token"]||s["Access-Token"])&&((this.drivers.deviseAuth||this.drivers.auth).tokens.forEach(function(e){s[e]&&t.push(s[e])}),!this.token()||parseInt(t[4],10)>=parseInt(this.token().split("|")[4],10)))return t.join("|")}}}); |
import bodyParser from 'body-parser'
import chalk from 'chalk'
import cors from 'cors'
import dotenv from 'dotenv'
import express from 'express'
import mongoose from 'mongoose'
import passport from 'passport'
import passportService from './services/passport'
import aboutRoute from './routes/aboutRoute'
import indexRoute from './routes/indexRoute'
import v1Routes from './routes/v1/index'
dotenv.config()
const {PORT = 3000, MONGO_HOST, APP_ENV, MOCHA_TEST = false} = process.env
let databaseEndpoint
if (MOCHA_TEST === false) databaseEndpoint = `${MONGO_HOST}/mnet`
else databaseEndpoint = `${MONGO_HOST}/mnet-test`
mongoose.connect(databaseEndpoint, {useCreateIndex: true, useNewUrlParser: true})
mongoose.connection.on('connected', () => {
console.log(`${chalk.black.bgGreen(' INFO ')} connected to the database ${databaseEndpoint}`)
})
mongoose.connection.on('error', err => {
console.log(`${chalk.black.bgRed(' FAIL ')} cannot connect to the database: ${err}`)
})
mongoose.set('useFindAndModify', false)
const server = express()
server.use(bodyParser.json())
server.use(bodyParser.urlencoded({extended: true}))
server.use('/bucket',express.static('bucket'))
server.use(cors())
server.use(passport.initialize())
server.use(passport.session())
passportService(passport)
server.use((req, res, next) => {
res.setHeader('X-Powered-By', 'rayriffy')
next()
})
server.use('/', indexRoute)
server.use('/about', aboutRoute)
server.use('/api/v1', v1Routes)
server.all('*', (req, res) => {
res.status(404).send({
status: 'failure',
code: 704,
response: {
message: 'route not found',
},
})
})
server.listen(PORT, () => {
console.log(`${chalk.black.bgGreen(' INFO ')} app is running on port ${PORT}`)
if (APP_ENV !== 'production') {
console.log(`${chalk.black.bgYellow(' WARN ')} this app is running on ${APP_ENV} environment!`)
}
})
export default server
|
const webpack = require("webpack");
const path = require("path");
const fs = require("fs");
const WebpackBar = require("webpackbar");
const babelRc = fs.readFileSync(path.resolve(__dirname, "../.babelrc"));
const FormatStats = require("./build");
module.exports = (args, name) => {
let source = "";
let env = "production";
const isProd = args.NODE_ENV === "production";
const isDev = !isProd;
if (isDev) {
source = "src";
env = "development";
}
if (args.NODE_ENV === "test") {
env = "test";
}
const config = {
mode: env,
devtool: "#source-map",
stats: "errors-only",
entry: {
[source + "/client/themes/" + args.theme + "/public/dist/client"]: [
path.join(__dirname, "../src/client/ClientApp"),
],
[source + "/admin/public/dist/admin"]: [
path.join(__dirname, "../src/admin/app"),
],
},
resolve: {
alias: {
admin: path.resolve(__dirname, "../src/admin"),
client: path.resolve(__dirname, "../src/client"),
shared: path.resolve(__dirname, "../src/shared"),
config: path.resolve(__dirname, "../src/config"),
"styled-components$": path.resolve(
__dirname,
"../",
"./node_modules/styled-components",
),
react: path.resolve(__dirname, "../", "./node_modules/react"),
},
extensions: [".tsx", ".ts", ".js"],
},
plugins: [
new WebpackBar({ name: name }),
new FormatStats({ env }),
new webpack.DefinePlugin({
"process.env": {
NODE_ENV: JSON.stringify(env),
API_URL: "process.env.API_URL",
UPLOAD_URL: "process.env.UPLOAD_URL",
ROOT_URL: "process.env.ROOT_URL",
APP_PORT: "process.env.APP_PORT",
BASE_NAME: "process.env.BASE_NAME",
THEME: JSON.stringify(args.theme),
},
}),
new webpack.ContextReplacementPlugin(/moment[/\\]locale$/, /en/),
],
module: {
rules: [
{
test: /\.ts(x?)$/,
exclude: /node_modules/,
use: [
{
loader: "babel-loader",
options: {
cacheDirectory: true,
...JSON.parse(babelRc),
},
},
{
loader: "ts-loader",
options: {
transpileOnly: true,
experimentalWatchApi: true,
},
},
],
},
{
test: /\.(graphql|gql)$/,
exclude: /node_modules/,
loader: "graphql-tag/loader",
},
{
test: /\.html$/,
use: {
loader: "html-loader",
},
},
{
test: /\.md$/,
use: [
{
loader: "html-loader",
},
{
loader: "markdown-loader",
options: {
/* your options here */
},
},
],
},
{
test: /\.(png|jpe?g|gif|woff(2)?|ttf|eot|svg)$/i,
use: [
{
loader: "url-loader",
options: {
name: "[name].[ext]",
limit: 9000,
},
},
],
},
],
},
};
return config;
};
|
const express = require('express');
const app = express();
const bodyParser = require('body-parser');
const mongoose = require('mongoose');
const userRoutes = require('./routes/user-routes');
const adminRoutes = require('./routes/admin-routes');
const HttpError = require('./utils/http-error');
const port = 3001;
app.use(bodyParser.json());
app.use('/api/v1/user',userRoutes);
app.use('/api/v1/admin',adminRoutes);
app.use((req,res,next) => {
const error = new HttpError('Page not found',404);
throw error;
});
app.use((error,req,res,next) => {
res.status(error.code);
res.json({message: error.message || 'Unknown error occured' , code: error.code });
});
mongoose.connect('',
{
useUnifiedTopology: true,
useNewUrlParser: true
}).then(() => {
app.listen(port, () => {
console.log(`App running on http://localhost:${port}`)
});
}).catch(err => {
console.log(err);
});
|
var selectedAPs = getE("selectedAPs");
var selectedClients = getE("selectedClients");
var table = document.getElementsByTagName("table")[0];
var ssidList = document.getElementsByTagName("table")[1];
var saved = getE("saved");
var ssidCounter = getE("ssidCounter");
var ssid = getE("ssid");
var num = getE("num");
var enc = getE("enc");
var randomIntrvl = getE("randomIntrvl");
var randomBtn = getE("randomBtn");
var resultInterval;
var data = {};
function getResults() {
getResponse("attackInfo.json", function(responseText) {
var res = JSON.parse(responseText);
var aps = "";
var clients = "";
var tr = "<tr><th>Attack</th><th>Status</th><th>Start/Stop</th></tr>";
for (var i = 0; i < res.aps.length; i++) aps += "<li>" + escapeHTML(res.aps[i]) + "</li>";
for (var i = 0; i < res.clients.length; i++) clients += "<li>" + escapeHTML(res.clients[i]) + "</li>";
selectedAPs.innerHTML = aps;
selectedClients.innerHTML = clients;
if(res.randomMode == 1) randomBtn.innerHTML = "Disable Random";
else randomBtn.innerHTML = "Enable Random";
for (var i = 0; i < res.attacks.length; i++) {
if (res.attacks[i].running) tr += "<tr class='selected'>";
else tr += "<tr>";
tr += "<td>" + res.attacks[i].name + "</td>";
if (res.attacks[i].status == "ready") tr += "<td class='green status' id='status"+i+"'>" + res.attacks[i].status + "</td>";
else tr += "<td class='red status' id='status"+i+"'>" + res.attacks[i].status + "</td>";
if (res.attacks[i].running) tr += "<td><button class='select' onclick='startStop(" + i + ")'>stop</button></td>";
else tr += "<td><button class='select' onclick='startStop(" + i + ")'>start</button></td>";
tr += "</tr>";
}
table.innerHTML = tr;
if(typeof res.ssid != 'undefined'){
data = res.ssid;
ssidCounter.innerHTML = data.length + "/48";
var tr = "<tr><th>Name</th><th></th><th>Del.</th></tr>";
for (var i = 0; i < data.length; i++) {
tr += "<tr>";
tr += "<td>" + escapeHTML(data[i][0]) + "</td>";
if(data[i][1] == 1) tr += "<td>🔒</td>";
else tr += "<td></td>";
tr += '<td><button class="button-warn" onclick="deleteSSID(' + i + ')">x</button></td>';
tr += "</tr>";
}
ssidList.innerHTML = tr;
}
}, function() {
clearInterval(resultInterval);
showMessage("error loading attackInfo.json");
});
}
function startStop(num) {
getResponse("attackStart.json?num=" + num, function(responseText) {
getE("status"+num).innerHTML = "loading";
if (responseText == "true") getResults();
else showMessage("response error attackStart.json");
});
}
function addSSID() {
var _ssidName = ssid.value;
if(_ssidName.length > 0){
if(data.length >= 64) showMessage("SSID list full :(", 2500);
else{
saved.innerHTML = "";
getResponse("addSSID.json?ssid=" + _ssidName + "&num="+num.value + "&enc=" + enc.checked, getResults);
}
}
}
function cloneSelected() {
getResponse("cloneSelected.json", getResults);
}
function deleteSSID(num) {
saved.innerHTML = "";
getResponse("deleteSSID.json?num=" + num, getResults);
}
function randomSSID() {
saved.innerHTML = "";
getResponse("randomSSID.json", getResults);
}
function clearSSID() {
saved.innerHTML = "";
getResponse("clearSSID.json", getResults);
}
function saveSSID() {
saved.innerHTML = "saved";
getResponse("saveSSID.json", getResults);
}
function resetSSID() {
saved.innerHTML = "saved";
getResponse("resetSSID.json", getResults);
}
function random(){
getResponse("enableRandom.json?interval="+randomIntrvl.value, getResults);
}
getResults();
resultInterval = setInterval(getResults, 2000); |
"""
The part that deals with merging PDF files into one.
(message handlers)
"""
import logging
from os import listdir
from typing import List
from aiogram import types
from aiogram.dispatcher import FSMContext
from loader import bot, dp, input_path, output_path
from PyPDF2 import PdfFileMerger
from states.all_states import MergingStates
from utils.clean_up import reset
@dp.message_handler(commands="done", state=MergingStates.waiting_for_files_to_merge)
async def get_confirmation(message: types.Message, state: FSMContext):
"""
This handler will be called when user sends `/done` command.
Gets confirmation on the files that need to be merged.
"""
await state.finish()
# sorted is called since the file names have corresponding file counts
# this is done to maintain the order of the files
# (the files will be merged in the order that the user sends the files in)
files = sorted(listdir(f"{input_path}/{message.chat.id}"))
if not files:
await message.reply("You didn't send any PDF files.")
elif len(files) == 1:
await message.reply(
"You sent only one file. What am I supposed to merge it with?"
)
else:
# since file names are in this format: number_name ("01_cool.pdf")
# to provide a list of pdfs for the user, we make the list with a
# list comprehension, not displaing the number part of the file
# ("01_" in case of "01_cool.pdf")
file_list = [
f"{index}. {value[3:]}" for index, value in enumerate(files, start=1)
]
file_list = "\n".join(file_list)
keyboard = types.InlineKeyboardMarkup()
buttons = [
types.InlineKeyboardButton(text="Yes", callback_data="ask_for_name"),
types.InlineKeyboardButton(text="No", callback_data="modify_files"),
]
keyboard.add(*buttons)
await message.reply(
(
"<b><u>Are these the files that you want to merge?</u></b>\n\n"
+ file_list
),
reply_markup=keyboard,
)
@dp.message_handler(
is_media_group=True,
content_types=types.ContentType.DOCUMENT,
state=MergingStates.waiting_for_files_to_merge,
)
async def handle_albums(message: types.Message, album: List[types.Message]):
"""
This handler will be called when user sends a group of files
as an album for merging. Checks if the files are PDF files and asks
if there are any more files that need to be merged.
"""
await message.answer("Downloading files, please wait")
for obj in album:
name = obj.document.file_name
# replacing empty spaces in the file name with underscores
# if there are spaces in the file name, some of the code does not work
# there definitely should be a better way of doing this, but i'm dumb
if " " in name:
name = name.replace(" ", "_")
if not name.lower().endswith(".pdf"):
return await message.answer("That's not a PDF file.")
# initially there should be no files in this directory,
# so to start with "1" for the first file, we add 1
# the whole reason why we have the file count is so that the order
# of files is maintained and can be changed around later.
file_count = len(listdir(f"{input_path}/{message.chat.id}")) + 1
# to have file counts like "01", "02", etc so that the order is still
# maintained if the user sends more than 9 files
if file_count < 10:
file_count = "0" + str(file_count)
await bot.download_file_by_id(
obj.document.file_id,
destination=f"{input_path}/{message.chat.id}/{file_count}_{name}",
)
logging.info("File downloaded.")
await message.answer(
"Great, if you have any more PDF files you want to merge, "
"send them now. Once you are done, send /done"
)
@dp.message_handler(
is_media_group=False,
content_types=types.message.ContentType.DOCUMENT,
state=MergingStates.waiting_for_files_to_merge,
)
async def merge_file_received(message: types.Message):
"""
This handler will be called when user provides a file for merging.
Checks if the file is a PDF and asks if there are any more files
that need to be merged.
"""
name = message.document.file_name
if name.endswith(".pdf"):
# replacing empty spaces in the file name with underscores
# if there are spaces in the file name, some of the code does not work
# there definitely should be a better way of doing this, but i'm dumb
if " " in name:
name = name.replace(" ", "_")
# initially there should be no files in this directory,
# so to start with "1" for the first file, we add 1
# the whole reason why we have the file count is so that the order
# of files is maintained and can be changed around later.
file_count = len(listdir(f"{input_path}/{message.chat.id}")) + 1
# to have file counts like "01", "02", etc so that the order is still
# maintained if the user sends more than 9 files
if file_count < 10:
file_count = "0" + str(file_count)
await message.answer("Downloading the file, please wait")
await bot.download_file_by_id(
message.document.file_id,
destination=f"{input_path}/{message.chat.id}/{file_count}_{name}",
)
logging.info("File downloaded")
await message.reply(
"Great, if you have any more PDF files you want to merge, "
"send them now. Once you are done, send /done"
)
else:
await message.reply(
"That's not a PDF file.",
)
@dp.message_handler(
is_media_group=False,
content_types=types.message.ContentType.DOCUMENT,
state=MergingStates.waiting_for_specific_file,
)
async def specific_file_received(message: types.Message, state: FSMContext):
"""
This handler will be called when user sends a file of type `Document`
that has to be added to a certain position in the list of files (Merging).
Checks if the file is a PDF and adds it to the desired position in the
list of files. This is done by naming the file with the appropriate
file count number.
After the file is added, triggers the get confirmation function to
confirm the new list of files.
"""
name = message.document.file_name
if name.endswith(".pdf"):
logging.info("Adding a file")
# replacing empty spaces in the file name with underscores
# if there are spaces in the file name, some of the code does not work
# there definitely should be a better way of doing this, but i'm dumb
if " " in name:
name = name.replace(" ", "_")
# the desired position of the file will be stored in the state
file_count = await state.get_data()
file_count = file_count["num"]
# to have file counts like "01", "02", etc so that the order is still
# maintained if the user sends more than 9 files
if file_count < 10:
file_count = "0" + str(file_count)
await message.answer("Downloading the file, please wait")
await bot.download_file_by_id(
message.document.file_id,
destination=f"{input_path}/{message.chat.id}/{file_count}_{name}",
)
logging.info("File downloaded")
await state.finish()
# getting confirmation on the new list of files
await get_confirmation(message, state)
else:
await message.reply(
"That's not a PDF file.",
)
@dp.message_handler(state=MergingStates.waiting_for_a_name)
async def merge_files(message: types.Message, state: FSMContext):
"""
This handler will be called when user provides a name for the merged PDF.
Merges all the input files into one output PDF and sends it to the user.
"""
await message.answer("Working on it")
# sorted is called since the file names have corresponding file counts
# this is done to maintain the order of the files
# (the files will be merged in the order that the user sends the files in)
files = sorted(listdir(f"{input_path}/{message.chat.id}"))
logging.info("Merging started")
merger = PdfFileMerger(strict=False)
for file in files:
merger.append(f"{input_path}/{message.chat.id}/{file}")
# replace the white space with underscores if there are spaces
# otherwise some stuff doesn't work, im too dumb to figure out why for now
merged_pdf_name = message.text.replace(" ", "_")
if not message.text.lower().endswith(".pdf"):
merged_pdf_name = merged_pdf_name + ".pdf"
output = f"{output_path}/{message.chat.id}/{merged_pdf_name}"
merger.write(output)
merger.close()
with open(output, "rb") as result:
await message.answer_chat_action(action="upload_document")
await message.reply_document(result, caption="Here you go")
logging.info("Sent the document")
await reset(message, state)
|
import sys, re, math, time
import numpy as np
#import matplotlib.pyplot as plt
import json
import pickle
import collections
from collections import OrderedDict
#from matplotlib.pyplot import cm
#from keras.preprocessing.sequence import pad_sequences
## ######################## ##
#
# Define CHARSET, CHARLEN
#
## ######################## ##
# CHARPROTSET = { 'A': 0, 'C': 1, 'D': 2, 'E': 3, 'F': 4, 'G': 5, 'H': 6, \
# 'I': 7, 'K': 8, 'L': 9, 'M': 10, 'N': 11, 'P': 12, 'Q': 13, \
# 'R': 14, 'S': 15, 'T': 16, 'V': 17, 'W': 18, 'Y': 19, 'X': 20, \
# 'O': 20, 'U': 20,
# 'B': (2, 11),
# 'Z': (3, 13),
# 'J': (7, 9) }
# CHARPROTLEN = 21
CHARPROTSET = { "A": 1, "C": 2, "B": 3, "E": 4, "D": 5, "G": 6,
"F": 7, "I": 8, "H": 9, "K": 10, "M": 11, "L": 12,
"O": 13, "N": 14, "Q": 15, "P": 16, "S": 17, "R": 18,
"U": 19, "T": 20, "W": 21,
"V": 22, "Y": 23, "X": 24,
"Z": 25 }
CHARPROTLEN = 25
CHARPROTSTRUSET = {"C": 1,"H": 2,"E": 3}
CHARPROTSTRULEN =3
CHARCANSMISET = { "#": 1, "%": 2, ")": 3, "(": 4, "+": 5, "-": 6,
".": 7, "1": 8, "0": 9, "3": 10, "2": 11, "5": 12,
"4": 13, "7": 14, "6": 15, "9": 16, "8": 17, "=": 18,
"A": 19, "C": 20, "B": 21, "E": 22, "D": 23, "G": 24,
"F": 25, "I": 26, "H": 27, "K": 28, "M": 29, "L": 30,
"O": 31, "N": 32, "P": 33, "S": 34, "R": 35, "U": 36,
"T": 37, "W": 38, "V": 39, "Y": 40, "[": 41, "Z": 42,
"]": 43, "_": 44, "a": 45, "c": 46, "b": 47, "e": 48,
"d": 49, "g": 50, "f": 51, "i": 52, "h": 53, "m": 54,
"l": 55, "o": 56, "n": 57, "s": 58, "r": 59, "u": 60,
"t": 61, "y": 62}
CHARCANSMILEN = 62
CHARISOSMISET = {"#": 29, "%": 30, ")": 31, "(": 1, "+": 32, "-": 33, "/": 34, ".": 2,
"1": 35, "0": 3, "3": 36, "2": 4, "5": 37, "4": 5, "7": 38, "6": 6,
"9": 39, "8": 7, "=": 40, "A": 41, "@": 8, "C": 42, "B": 9, "E": 43,
"D": 10, "G": 44, "F": 11, "I": 45, "H": 12, "K": 46, "M": 47, "L": 13,
"O": 48, "N": 14, "P": 15, "S": 49, "R": 16, "U": 50, "T": 17, "W": 51,
"V": 18, "Y": 52, "[": 53, "Z": 19, "]": 54, "\\": 20, "a": 55, "c": 56,
"b": 21, "e": 57, "d": 22, "g": 58, "f": 23, "i": 59, "h": 24, "m": 60,
"l": 25, "o": 61, "n": 26, "s": 62, "r": 27, "u": 63, "t": 28, "y": 64}
CHARISOSMILEN = 64
## ######################## ##
#
# Encoding Helpers
#
## ######################## ##
# Y = -(np.log10(Y/(math.pow(math.e,9))))
def one_hot_smiles(line, MAX_SMI_LEN, smi_ch_ind):
X = np.zeros((MAX_SMI_LEN, len(smi_ch_ind))) #+1
for i, ch in enumerate(line[:MAX_SMI_LEN]):
X[i, (smi_ch_ind[ch]-1)] = 1
return X #.tolist()
def one_hot_sequence(line, MAX_SEQ_LEN, smi_ch_ind):
X = np.zeros((MAX_SEQ_LEN, len(smi_ch_ind)))
for i, ch in enumerate(line[:MAX_SEQ_LEN]):
X[i, (smi_ch_ind[ch])-1] = 1
return X #.tolist()
def one_hot_structure(line, MAX_SEQ_LEN, smi_ch_ind):
X = np.zeros((MAX_SEQ_LEN, len(smi_ch_ind)))
for i, ch in enumerate(line[:MAX_SEQ_LEN]):
X[i, (smi_ch_ind[ch])-1] = 1
return X
def label_smiles(line, MAX_SMI_LEN, smi_ch_ind):
X = np.zeros(MAX_SMI_LEN)
for i, ch in enumerate(line[:MAX_SMI_LEN]): # x, smi_ch_ind, y
X[i] = smi_ch_ind[ch]
return X #.tolist()
def label_sequence(line, MAX_SEQ_LEN, smi_ch_ind):
X = np.zeros(MAX_SEQ_LEN)
for i, ch in enumerate(line[:MAX_SEQ_LEN]):
X[i] = smi_ch_ind[ch]
return X #.tolist()
def label_structure(line, MAX_SEQ_LEN, smi_ch_ind):
X = np.zeros(MAX_SEQ_LEN)
for i,ch in enumerate(line[:MAX_SEQ_LEN]):
X[i] = smi_ch_ind[ch]
return X
def pure_ligands_structure(line, MAX_SEQ_LEN):
X = np.zeros(MAX_SEQ_LEN)
line = line.split(",")
for i, ch in enumerate(line[:MAX_SEQ_LEN]):
X[i] = int(ch)
return X
## ######################## ##
#
# DATASET Class
#
## ######################## ##
# works for large dataset
class DataSet(object):
def __init__(self, fpath, setting_no, seqlen, smilen, smistrulen, need_shuffle = False):
self.SEQLEN = seqlen
self.SMILEN = smilen
self.SMISTRULEN = smistrulen
#self.NCLASSES = n_classes
self.charseqset = CHARPROTSET
self.charseqset_size = CHARPROTLEN
self.charsmiset = CHARISOSMISET ###HERE CAN BE EDITED
self.charsmiset_size = CHARISOSMILEN
self.charseqstruset = CHARPROTSTRUSET
self.charseqstruset_size = CHARPROTSTRULEN
self.PROBLEMSET = setting_no
# read raw file
# self._raw = self.read_sets( FLAGS)
# iteration flags
# self._num_data = len(self._raw)
def read_sets(self, FLAGS): ### fpath should be the dataset folder /kiba/ or /davis/
fpath = FLAGS.dataset_path
setting_no = FLAGS.problem_type
print("Reading %s start" % fpath)
test_fold = json.load(open(fpath + "folds/test_fold_setting" + str(setting_no)+".txt"))
train_folds = json.load(open(fpath + "folds/train_fold_setting" + str(setting_no)+".txt"))
return test_fold, train_folds
def parse_data(self, FLAGS, with_label=True):
fpath = FLAGS.dataset_path
print("Read %s start" % fpath)
ligands = json.load(open(fpath+"ligands_can.txt"), object_pairs_hook=OrderedDict)
proteins = json.load(open(fpath+"proteins.txt"), object_pairs_hook=OrderedDict)
proteins_structure = json.load(open(fpath+"protein_structure.txt"), object_pairs_hook=OrderedDict)
ligands_structure = json.load(open(fpath+"morgan_fingerprint.txt"), object_pairs_hook=OrderedDict)
Y = pickle.load(open(fpath + "Y","rb"), encoding='latin1') ### TODO: read from raw
if FLAGS.is_log:
Y = -(np.log10(Y/(math.pow(10,9))))
XD = []
XT = []
XT_STRU = []
XD_STRU = []
if with_label:
for d in ligands.keys():
XD.append(label_smiles(ligands[d], self.SMILEN, self.charsmiset))
XD_STRU.append(pure_ligands_structure(ligands_structure[d], self.SMISTRULEN))
for t in proteins.keys():
XT.append(label_sequence(proteins[t], self.SEQLEN, self.charseqset))
XT_STRU.append(label_structure(proteins_structure[t], self.SEQLEN, self.charseqstruset))
else:
for d in ligands.keys():
XD.append(one_hot_smiles(ligands[d], self.SMILEN, self.charsmiset))
XD_STRU.append(pure_ligands_structure(ligands_structure[d], self.SMISTRULEN))
for t in proteins.keys():
XT.append(one_hot_sequence(proteins[t], self.SEQLEN, self.charseqset))
XT_STRU.append(one_hot_structure(proteins_structure[t], self.SEQLEN, self.charseqstruset))
return XD, XD_STRU, XT, XT_STRU, Y
|
# Write results to this file
OUTFILE = 'runs/bro/10KB/src2-tgt1/http-par-noids-iter00500.result.csv'
# Source computers for the request
SOURCE = ['10.0.0.1', '10.0.0.3']
# Target machines for the requests (aka server)
TARGET = ['10.0.0.2']
# IDS Mode. (ATM: noids, min, max, http, ssl, ftp, icmp, mysql)
IDSMODE = 'noids'
# Connection mode (par = parallel, seq = sequential)
MODE = 'par'
# Number of evaluation repititions to run
EPOCHS = 100
# Number of iterations to be run in each evaluation repitition
ITER = 500
# Size of the file to be downloaded from target (in Bytes * 10^SIZE)
SIZE = 4
# Protocol to be used e.g. HTTP, SSL, FTP, MYSQL
PROTOCOL = 'http' |
/**
* Copyright 2014 IBM Corp. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*global $:false */
'use strict';
$(document).ready(function() {
var audio = $('.audio').get(0),
textArea = $('#textArea');
var textChanged = false,
spanishText = 'El servicio de Voz a Texto utiliza la tecnología de síntesis de voz de IBM para convertir texto en Inglés o Español en una señal de audio. El audio es enviado de vuelta al cliente con un retraso mínimo. El servicio puede ser accedido a través de una interfaz REST.',
englishText = 'The Text to Speech service uses IBM\'s speech synthesis capabilities to convert English or Spanish text to an audio signal. The audio is streamed back to the client with minimal delay. The service can be accessed via a REST interface.';
$('#textArea').val(englishText);
$('#voice').change(function(){
if (!textChanged) {
if ($(this).val() === 'VoiceEsEsEnrique')
$('#textArea').val(spanishText);
else
$('#textArea').val(englishText);
}
});
$('#textArea').change(function(){
textChanged = true;
});
// IE and Safari not supported disabled Speak button
if ($('body').hasClass('ie') || $('body').hasClass('safari')) {
$('.speak-button').prop('disabled', true);
}
if ($('.speak-button').prop('disabled')) {
$('.ie-speak .arrow-box').show();
}
$('.audio').on('error', function () {
$('.result').hide();
$('errorMgs').text('Error processing the request.');
$('.errorMsg').css('color','red');
$('.error').show();
});
$('.audio').on('loadeddata', function () {
$('.result').show();
$('.error').hide();
});
$('.download-button').click(function() {
textArea.focus();
if (validText(textArea.val())) {
window.location.href = '/synthesize?download=true&' + $('.speech-form').serialize();
}
});
$('.speak-button').click(function() {
$('.result').hide();
audio.pause();
$('#textArea').focus();
if (validText(textArea.val())) {
audio.setAttribute('src','/synthesize?' + $('.speech-form').serialize());
}
});
function validText(text) {
if ($.trim(text)) {
$('.error').hide();
return true;
} else {
$('.errorMsg').text('Please enter the text you would like to synthesize in the text window.');
$('.errorMsg').css('color','#00b2ef');
$('.error').show();
return false;
}
}
});
|
# -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urllib2,urlparse,time
from resources.lib.libraries import cleantitle
from resources.lib.libraries import client
from resources.lib.libraries import control
from resources.lib import resolvers
from resources.lib.libraries import workers
from resources.lib.libraries import control
from resources.lib.resolvers import cloudzilla
from resources.lib.resolvers import openload
from resources.lib.resolvers import uptobox
from resources.lib.resolvers import zstream
from resources.lib.resolvers import streamin
class NoRedirection(urllib2.HTTPErrorProcessor):
def http_response(self, request, response):
return response
class source:
def __init__(self):
self.base_link = 'http://watchseries.ag'
self.link_1 = 'http://watchseries.ag'
self.link_2 = 'http://translate.googleusercontent.com/translate_c?anno=2&hl=en&sl=mt&tl=en&u=http://watchseries.ag'
self.link_3 = 'https://watchseries.unblocked.pw'
self.search_link = '/AdvancedSearch/%s-%s/by_popularity/%s'
self.episode_link = '/episode/%s_s%s_e%s.html'
self.headers = {}
def get_show(self, imdb, tvdb, tvshowtitle, year):
try:
query = self.search_link % (str(int(year)-1), str(int(year)+1), urllib.quote_plus(tvshowtitle))
result = ''
links = [self.link_1, self.link_2, self.link_3]
for base_link in links:
result = client.source(urlparse.urljoin(base_link, query), headers=self.headers)
if 'episode-summary' in str(result): break
result = result.decode('iso-8859-1').encode('utf-8')
result = client.parseDOM(result, 'div', attrs = {'class': 'episode-summary'})[0]
result = client.parseDOM(result, 'tr')
tvshowtitle = cleantitle.tv(tvshowtitle)
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [(re.compile('href=[\'|\"|\s|\<]*(.+?)[\'|\"|\s|\>]').findall(i)[0], client.parseDOM(i, 'a')[-1]) for i in result]
result = [(i[0], re.sub('<.+?>|</.+?>','', i[1])) for i in result]
result = [i for i in result if any(x in i[1] for x in years)]
result = [(client.replaceHTMLCodes(i[0]), i[1]) for i in result]
try: result = [(urlparse.parse_qs(urlparse.urlparse(i[0]).query)['u'][0], i[1]) for i in result]
except: pass
result = [(urlparse.urlparse(i[0]).path, i[1]) for i in result]
match = [i[0] for i in result if tvshowtitle == cleantitle.tv(i[1])]
match2 = [i[0] for i in result]
match2 = [x for y,x in enumerate(match2) if x not in match2[:y]]
if match2 == []: return
for i in match2[:5]:
try:
if len(match) > 0:
url = match[0]
break
result = client.source(base_link + i, headers=self.headers)
if str(imdb) in str(result):
url = i
break
except:
pass
url = url.encode('utf-8')
return url
except:
return
def get_episode(self, url, imdb, tvdb, title, date, season, episode):
if url == None: return
url = url.rsplit('/', 1)[-1]
url = self.episode_link % (url, season, episode)
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
def get_sources(self, url, hosthdDict, hostDict, locDict):
try:
self.sources =[]
mylinks = []
if url == None: return self.sources
url = url.replace('/json/', '/')
result = ''
links = [self.link_1, self.link_2, self.link_3]
for base_link in links:
result = client.source(urlparse.urljoin(base_link, url), headers=self.headers)
if 'lang_1' in str(result): break
result = result.replace('\n','')
result = result.decode('iso-8859-1').encode('utf-8')
result = client.parseDOM(result, 'div', attrs = {'id': 'lang_1'})[0]
links = re.compile('href=[\'|\"|\s|\<]*(.+?)[\'|\"|\s|\>].+?title=[\'|\"|\s|\<]*(.+?)[\'|\"|\s|\>]').findall(result)
links = [x for y,x in enumerate(links) if x not in links[:y]]
for i in links:
try:
host = i[1]
host = host.split('.', 1)[0]
host = host.strip().lower()
if not host in hostDict: raise Exception()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
url = i[0]
url = client.replaceHTMLCodes(url)
try: url = urlparse.parse_qs(urlparse.urlparse(url).query)['u'][0]
except: pass
if not url.startswith('http'): url = urlparse.urljoin(self.base_link, url)
if not '/cale/' in url: raise Exception()
url = url.encode('utf-8')
url = url.replace('/json/', '/')
url = urlparse.urlparse(url).path
#sources.append({'source': host, 'quality': 'SD', 'provider': 'Watchseries', 'url': url})
mylinks.append([url, 'SD'])
except:
pass
threads = []
for i in mylinks: threads.append(workers.Thread(self.check, i))
[i.start() for i in threads]
for i in range(0, 10 * 2):
is_alive = [x.is_alive() for x in threads]
if all(x == False for x in is_alive): break
time.sleep(1)
return self.sources
except:
return self.sources
def check(self, i):
try:
url = client.replaceHTMLCodes(i[0])
url = url.encode('utf-8')
result = ''
links = [self.link_1, self.link_2, self.link_3]
for base_link in links:
try:
opener = urllib2.build_opener(NoRedirection)
opener.addheaders = [('User-Agent', 'Apple-iPhone')]
opener.addheaders = [('Referer', base_link + url)]
response = opener.open(base_link + url)
result = response.read()
response.close()
except:
result = ''
if 'myButton' in result: break
url = re.compile('class=[\'|\"]*myButton.+?href=[\'|\"|\s|\<]*(.+?)[\'|\"|\s|\>]').findall(result)[
0]
url = client.replaceHTMLCodes(url)
try:
url = urlparse.parse_qs(urlparse.urlparse(url).query)['u'][0]
except:
pass
try:
url = urlparse.parse_qs(urlparse.urlparse(url).query)['url'][0]
except:
pass
host = urlparse.urlparse(url).netloc
host = host.replace('www.', '').replace('embed.', '')
host = host.rsplit('.', 1)[0]
host = host.lower()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
#control.log('WWWW WATCHSERIES RESOLVE-2 host: %s url: %s ' % (host,url))
#if host == 'openload':check = openload.check(url)
#elif host == 'streamin':check = streamin.check(url)
#elif host == 'cloudzilla':
# check = cloudzilla.check(url)
#elif host == 'zstream':
# check = zstream.check(url)
#elif host == 'vidspot':
# check = vidspot.check(url)
if host == 'up2stream': raise Exception()
if host == 'mightyupload': raise Exception()
self.sources.append({'source': host, 'quality': i[1], 'provider': 'Watchseries', 'url': url})
except:
pass
def resolve(self, url):
try:
url = resolvers.request(url)
return url
except:
return
|
import { GUI } from "three-addons/libs/lil-gui.module.min.js"
class GuiManager {
constructor() {
this.gui = new GUI({ title: 'v6.02 dev' })
this.gui.close()
this.xrFolder = this.gui.addFolder(" X R ")
this.xrFolder.close()
this.materialsFolder = this.gui.addFolder("Materials")
this.materialsFolder.close()
this.meshFolder = this.gui.addFolder("Meshes")
this.meshFolder.close()
this.captureFolder = this.gui.addFolder("Photo Mode")
this.captureFolder.close()
this.curveFolder = this.gui.addFolder("Track Curves")
this.curveFolder.close()
}
mesh(mesh, name) {
const folder = this.meshFolder
folder.add(mesh, 'visible').name(name)
}
add(obj, key, settings = {}) {
const folder = settings.folder ? options.folder : this.gui
folder.add(obj, key)
}
}
const guiManager = new GuiManager()
export { guiManager } |
(function($){function processRawData(plot,series,datapoints){var handlers={square:function(ctx,x,y,radius,shadow){var size=radius*Math.sqrt(Math.PI)/2;ctx.rect(x-size,y-size,size+size,size+size)},diamond:function(ctx,x,y,radius,shadow){var size=radius*Math.sqrt(Math.PI/2);ctx.moveTo(x-size,y);ctx.lineTo(x,y-size);ctx.lineTo(x+size,y);ctx.lineTo(x,y+size);ctx.lineTo(x-size,y)},triangle:function(ctx,x,y,radius,shadow){var size=radius*Math.sqrt(2*Math.PI/Math.sin(Math.PI/3));var height=size*Math.sin(Math.PI/3);ctx.moveTo(x-size/2,y+height/2);ctx.lineTo(x+size/2,y+height/2);if(!shadow){ctx.lineTo(x,y-height/2);ctx.lineTo(x-size/2,y+height/2)}},cross:function(ctx,x,y,radius,shadow){var size=radius*Math.sqrt(Math.PI)/2;ctx.moveTo(x-size,y-size);ctx.lineTo(x+size,y+size);ctx.moveTo(x-size,y+size);ctx.lineTo(x+size,y-size)}};var s=series.points.symbol;if(handlers[s]){series.points.symbol=handlers[s]}}function init(plot){plot.hooks.processDatapoints.push(processRawData)}$.plot.plugins.push({init:init,name:"symbols",version:"1.0"})})(jQuery); |
/*!
FullCalendar v5.3.2
Docs & License: https://fullcalendar.io/
(c) 2020 Adam Shaw
*/
var FullCalendarDayGrid = (function (exports, common) {
'use strict';
/*! *****************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */
/* global Reflect, Promise */
var extendStatics = function(d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
function __extends(d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
}
var __assign = function() {
__assign = Object.assign || function __assign(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
function __spreadArrays() {
for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;
for (var r = Array(s), k = 0, i = 0; i < il; i++)
for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)
r[k] = a[j];
return r;
}
/* An abstract class for the daygrid views, as well as month view. Renders one or more rows of day cells.
----------------------------------------------------------------------------------------------------------------------*/
// It is a manager for a Table subcomponent, which does most of the heavy lifting.
// It is responsible for managing width/height.
var TableView = /** @class */ (function (_super) {
__extends(TableView, _super);
function TableView() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.headerElRef = common.createRef();
return _this;
}
TableView.prototype.renderSimpleLayout = function (headerRowContent, bodyContent) {
var _a = this, props = _a.props, context = _a.context;
var sections = [];
var stickyHeaderDates = common.getStickyHeaderDates(context.options);
if (headerRowContent) {
sections.push({
type: 'header',
key: 'header',
isSticky: stickyHeaderDates,
chunk: {
elRef: this.headerElRef,
tableClassName: 'fc-col-header',
rowContent: headerRowContent,
},
});
}
sections.push({
type: 'body',
key: 'body',
liquid: true,
chunk: { content: bodyContent },
});
return (common.createElement(common.ViewRoot, { viewSpec: context.viewSpec }, function (rootElRef, classNames) { return (common.createElement("div", { ref: rootElRef, className: ['fc-daygrid'].concat(classNames).join(' ') },
common.createElement(common.SimpleScrollGrid, { liquid: !props.isHeightAuto && !props.forPrint, cols: [] /* TODO: make optional? */, sections: sections }))); }));
};
TableView.prototype.renderHScrollLayout = function (headerRowContent, bodyContent, colCnt, dayMinWidth) {
var ScrollGrid = this.context.pluginHooks.scrollGridImpl;
if (!ScrollGrid) {
throw new Error('No ScrollGrid implementation');
}
var _a = this, props = _a.props, context = _a.context;
var stickyHeaderDates = !props.forPrint && common.getStickyHeaderDates(context.options);
var stickyFooterScrollbar = !props.forPrint && common.getStickyFooterScrollbar(context.options);
var sections = [];
if (headerRowContent) {
sections.push({
type: 'header',
key: 'header',
isSticky: stickyHeaderDates,
chunks: [{
key: 'main',
elRef: this.headerElRef,
tableClassName: 'fc-col-header',
rowContent: headerRowContent,
}],
});
}
sections.push({
type: 'body',
key: 'body',
liquid: true,
chunks: [{
key: 'main',
content: bodyContent,
}],
});
if (stickyFooterScrollbar) {
sections.push({
type: 'footer',
key: 'footer',
isSticky: true,
chunks: [{
key: 'main',
content: common.renderScrollShim,
}],
});
}
return (common.createElement(common.ViewRoot, { viewSpec: context.viewSpec }, function (rootElRef, classNames) { return (common.createElement("div", { ref: rootElRef, className: ['fc-daygrid'].concat(classNames).join(' ') },
common.createElement(ScrollGrid, { liquid: !props.isHeightAuto && !props.forPrint, colGroups: [{ cols: [{ span: colCnt, minWidth: dayMinWidth }] }], sections: sections }))); }));
};
return TableView;
}(common.DateComponent));
function splitSegsByRow(segs, rowCnt) {
var byRow = [];
for (var i = 0; i < rowCnt; i += 1) {
byRow[i] = [];
}
for (var _i = 0, segs_1 = segs; _i < segs_1.length; _i++) {
var seg = segs_1[_i];
byRow[seg.row].push(seg);
}
return byRow;
}
function splitSegsByFirstCol(segs, colCnt) {
var byCol = [];
for (var i = 0; i < colCnt; i += 1) {
byCol[i] = [];
}
for (var _i = 0, segs_2 = segs; _i < segs_2.length; _i++) {
var seg = segs_2[_i];
byCol[seg.firstCol].push(seg);
}
return byCol;
}
function splitInteractionByRow(ui, rowCnt) {
var byRow = [];
if (!ui) {
for (var i = 0; i < rowCnt; i += 1) {
byRow[i] = null;
}
}
else {
for (var i = 0; i < rowCnt; i += 1) {
byRow[i] = {
affectedInstances: ui.affectedInstances,
isEvent: ui.isEvent,
segs: [],
};
}
for (var _i = 0, _a = ui.segs; _i < _a.length; _i++) {
var seg = _a[_i];
byRow[seg.row].segs.push(seg);
}
}
return byRow;
}
var TableCellTop = /** @class */ (function (_super) {
__extends(TableCellTop, _super);
function TableCellTop() {
return _super !== null && _super.apply(this, arguments) || this;
}
TableCellTop.prototype.render = function () {
var props = this.props;
var navLinkAttrs = this.context.options.navLinks
? { 'data-navlink': common.buildNavLinkData(props.date), tabIndex: 0 }
: {};
return (common.createElement(common.DayCellContent, { date: props.date, dateProfile: props.dateProfile, todayRange: props.todayRange, showDayNumber: props.showDayNumber, extraHookProps: props.extraHookProps, defaultContent: renderTopInner }, function (innerElRef, innerContent) { return ((innerContent || props.forceDayTop) && (common.createElement("div", { className: "fc-daygrid-day-top", ref: innerElRef },
common.createElement("a", __assign({ className: "fc-daygrid-day-number" }, navLinkAttrs), innerContent || common.createElement(common.Fragment, null, "\u00A0"))))); }));
};
return TableCellTop;
}(common.BaseComponent));
function renderTopInner(props) {
return props.dayNumberText;
}
var DEFAULT_WEEK_NUM_FORMAT = common.createFormatter({ week: 'narrow' });
var TableCell = /** @class */ (function (_super) {
__extends(TableCell, _super);
function TableCell() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.handleRootEl = function (el) {
_this.rootEl = el;
common.setRef(_this.props.elRef, el);
};
_this.handleMoreLinkClick = function (ev) {
var props = _this.props;
if (props.onMoreClick) {
var allSegs = props.segsByEachCol;
var hiddenSegs = allSegs.filter(function (seg) { return props.segIsHidden[seg.eventRange.instance.instanceId]; });
props.onMoreClick({
date: props.date,
allSegs: allSegs,
hiddenSegs: hiddenSegs,
moreCnt: props.moreCnt,
dayEl: _this.rootEl,
ev: ev,
});
}
};
return _this;
}
TableCell.prototype.render = function () {
var _this = this;
var _a = this.context, options = _a.options, viewApi = _a.viewApi;
var props = this.props;
var date = props.date, dateProfile = props.dateProfile;
var hookProps = {
num: props.moreCnt,
text: props.buildMoreLinkText(props.moreCnt),
view: viewApi,
};
var navLinkAttrs = options.navLinks
? { 'data-navlink': common.buildNavLinkData(date, 'week'), tabIndex: 0 }
: {};
return (common.createElement(common.DayCellRoot, { date: date, dateProfile: dateProfile, todayRange: props.todayRange, showDayNumber: props.showDayNumber, extraHookProps: props.extraHookProps, elRef: this.handleRootEl }, function (dayElRef, dayClassNames, rootDataAttrs, isDisabled) { return (common.createElement("td", __assign({ ref: dayElRef, className: ['fc-daygrid-day'].concat(dayClassNames, props.extraClassNames || []).join(' ') }, rootDataAttrs, props.extraDataAttrs),
common.createElement("div", { className: "fc-daygrid-day-frame fc-scrollgrid-sync-inner", ref: props.innerElRef /* different from hook system! RENAME */ },
props.showWeekNumber && (common.createElement(common.WeekNumberRoot, { date: date, defaultFormat: DEFAULT_WEEK_NUM_FORMAT }, function (weekElRef, weekClassNames, innerElRef, innerContent) { return (common.createElement("a", __assign({ ref: weekElRef, className: ['fc-daygrid-week-number'].concat(weekClassNames).join(' ') }, navLinkAttrs), innerContent)); })),
!isDisabled && (common.createElement(TableCellTop, { date: date, dateProfile: dateProfile, showDayNumber: props.showDayNumber, forceDayTop: props.forceDayTop, todayRange: props.todayRange, extraHookProps: props.extraHookProps })),
common.createElement("div", { className: "fc-daygrid-day-events", ref: props.fgContentElRef, style: { paddingBottom: props.fgPaddingBottom } },
props.fgContent,
Boolean(props.moreCnt) && (common.createElement("div", { className: "fc-daygrid-day-bottom", style: { marginTop: props.moreMarginTop } },
common.createElement(common.RenderHook, { hookProps: hookProps, classNames: options.moreLinkClassNames, content: options.moreLinkContent, defaultContent: renderMoreLinkInner, didMount: options.moreLinkDidMount, willUnmount: options.moreLinkWillUnmount }, function (rootElRef, classNames, innerElRef, innerContent) { return (common.createElement("a", { ref: rootElRef, className: ['fc-daygrid-more-link'].concat(classNames).join(' '), onClick: _this.handleMoreLinkClick }, innerContent)); })))),
common.createElement("div", { className: "fc-daygrid-day-bg" }, props.bgContent)))); }));
};
return TableCell;
}(common.DateComponent));
function renderMoreLinkInner(props) {
return props.text;
}
var DEFAULT_TABLE_EVENT_TIME_FORMAT = common.createFormatter({
hour: 'numeric',
minute: '2-digit',
omitZeroMinute: true,
meridiem: 'narrow',
});
function hasListItemDisplay(seg) {
var display = seg.eventRange.ui.display;
return display === 'list-item' || (display === 'auto' &&
!seg.eventRange.def.allDay &&
seg.firstCol === seg.lastCol && // can't be multi-day
seg.isStart && // "
seg.isEnd // "
);
}
var TableListItemEvent = /** @class */ (function (_super) {
__extends(TableListItemEvent, _super);
function TableListItemEvent() {
return _super !== null && _super.apply(this, arguments) || this;
}
TableListItemEvent.prototype.render = function () {
var _a = this, props = _a.props, context = _a.context;
var timeFormat = context.options.eventTimeFormat || DEFAULT_TABLE_EVENT_TIME_FORMAT;
var timeText = common.buildSegTimeText(props.seg, timeFormat, context, true, props.defaultDisplayEventEnd);
return (common.createElement(common.EventRoot, { seg: props.seg, timeText: timeText, defaultContent: renderInnerContent, isDragging: props.isDragging, isResizing: false, isDateSelecting: false, isSelected: props.isSelected, isPast: props.isPast, isFuture: props.isFuture, isToday: props.isToday }, function (rootElRef, classNames, innerElRef, innerContent) { return ( // we don't use styles!
common.createElement("a", __assign({ className: ['fc-daygrid-event', 'fc-daygrid-dot-event'].concat(classNames).join(' '), ref: rootElRef }, getSegAnchorAttrs(props.seg)), innerContent)); }));
};
return TableListItemEvent;
}(common.BaseComponent));
function renderInnerContent(innerProps) {
return (common.createElement(common.Fragment, null,
common.createElement("div", { className: "fc-daygrid-event-dot", style: { borderColor: innerProps.borderColor || innerProps.backgroundColor } }),
innerProps.timeText && (common.createElement("div", { className: "fc-event-time" }, innerProps.timeText)),
common.createElement("div", { className: "fc-event-title" }, innerProps.event.title || common.createElement(common.Fragment, null, "\u00A0"))));
}
function getSegAnchorAttrs(seg) {
var url = seg.eventRange.def.url;
return url ? { href: url } : {};
}
var TableBlockEvent = /** @class */ (function (_super) {
__extends(TableBlockEvent, _super);
function TableBlockEvent() {
return _super !== null && _super.apply(this, arguments) || this;
}
TableBlockEvent.prototype.render = function () {
var props = this.props;
return (common.createElement(common.StandardEvent, __assign({}, props, { extraClassNames: ['fc-daygrid-event', 'fc-daygrid-block-event', 'fc-h-event'], defaultTimeFormat: DEFAULT_TABLE_EVENT_TIME_FORMAT, defaultDisplayEventEnd: props.defaultDisplayEventEnd, disableResizing: !props.seg.eventRange.def.allDay })));
};
return TableBlockEvent;
}(common.BaseComponent));
function computeFgSegPlacement(// for one row. TODO: print mode?
cellModels, segs, dayMaxEvents, dayMaxEventRows, eventHeights, maxContentHeight, colCnt, eventOrderSpecs) {
var colPlacements = []; // if event spans multiple cols, its present in each col
var moreCnts = []; // by-col
var segIsHidden = {};
var segTops = {}; // always populated for each seg
var segMarginTops = {}; // simetimes populated for each seg
var moreTops = {};
var paddingBottoms = {}; // for each cell's inner-wrapper div
for (var i = 0; i < colCnt; i += 1) {
colPlacements.push([]);
moreCnts.push(0);
}
segs = common.sortEventSegs(segs, eventOrderSpecs);
for (var _i = 0, segs_1 = segs; _i < segs_1.length; _i++) {
var seg = segs_1[_i];
var instanceId = seg.eventRange.instance.instanceId;
var eventHeight = eventHeights[instanceId + ':' + seg.firstCol];
placeSeg(seg, eventHeight || 0); // will keep colPlacements sorted by top
}
if (dayMaxEvents === true || dayMaxEventRows === true) {
limitByMaxHeight(moreCnts, segIsHidden, colPlacements, maxContentHeight); // populates moreCnts/segIsHidden
}
else if (typeof dayMaxEvents === 'number') {
limitByMaxEvents(moreCnts, segIsHidden, colPlacements, dayMaxEvents); // populates moreCnts/segIsHidden
}
else if (typeof dayMaxEventRows === 'number') {
limitByMaxRows(moreCnts, segIsHidden, colPlacements, dayMaxEventRows); // populates moreCnts/segIsHidden
}
// computes segTops/segMarginTops/moreTops/paddingBottoms
for (var col = 0; col < colCnt; col += 1) {
var placements = colPlacements[col];
var currentNonAbsBottom = 0;
var currentAbsHeight = 0;
for (var _a = 0, placements_1 = placements; _a < placements_1.length; _a++) {
var placement = placements_1[_a];
var seg = placement.seg;
if (!segIsHidden[seg.eventRange.instance.instanceId]) {
segTops[seg.eventRange.instance.instanceId] = placement.top; // from top of container
if (seg.firstCol === seg.lastCol && seg.isStart && seg.isEnd) { // TODO: simpler way? NOT DRY
segMarginTops[seg.eventRange.instance.instanceId] =
placement.top - currentNonAbsBottom; // from previous seg bottom
currentAbsHeight = 0;
currentNonAbsBottom = placement.bottom;
}
else { // multi-col event, abs positioned
currentAbsHeight = placement.bottom - currentNonAbsBottom;
}
}
}
if (currentAbsHeight) {
if (moreCnts[col]) {
moreTops[col] = currentAbsHeight;
}
else {
paddingBottoms[col] = currentAbsHeight;
}
}
}
function placeSeg(seg, segHeight) {
if (!tryPlaceSegAt(seg, segHeight, 0)) {
for (var col = seg.firstCol; col <= seg.lastCol; col += 1) {
for (var _i = 0, _a = colPlacements[col]; _i < _a.length; _i++) { // will repeat multi-day segs!!!!!!! bad!!!!!!
var placement = _a[_i];
if (tryPlaceSegAt(seg, segHeight, placement.bottom)) {
return;
}
}
}
}
}
function tryPlaceSegAt(seg, segHeight, top) {
if (canPlaceSegAt(seg, segHeight, top)) {
for (var col = seg.firstCol; col <= seg.lastCol; col += 1) {
var placements = colPlacements[col];
var insertionIndex = 0;
while (insertionIndex < placements.length &&
top >= placements[insertionIndex].top) {
insertionIndex += 1;
}
placements.splice(insertionIndex, 0, {
seg: seg,
top: top,
bottom: top + segHeight,
});
}
return true;
}
return false;
}
function canPlaceSegAt(seg, segHeight, top) {
for (var col = seg.firstCol; col <= seg.lastCol; col += 1) {
for (var _i = 0, _a = colPlacements[col]; _i < _a.length; _i++) {
var placement = _a[_i];
if (top < placement.bottom && top + segHeight > placement.top) { // collide?
return false;
}
}
}
return true;
}
// what does this do!?
for (var instanceIdAndFirstCol in eventHeights) {
if (!eventHeights[instanceIdAndFirstCol]) {
segIsHidden[instanceIdAndFirstCol.split(':')[0]] = true;
}
}
var segsByFirstCol = colPlacements.map(extractFirstColSegs); // operates on the sorted cols
var segsByEachCol = colPlacements.map(function (placements, col) {
var segsForCols = extractAllColSegs(placements);
segsForCols = resliceDaySegs(segsForCols, cellModels[col].date, col);
return segsForCols;
});
return {
segsByFirstCol: segsByFirstCol,
segsByEachCol: segsByEachCol,
segIsHidden: segIsHidden,
segTops: segTops,
segMarginTops: segMarginTops,
moreCnts: moreCnts,
moreTops: moreTops,
paddingBottoms: paddingBottoms,
};
}
function extractFirstColSegs(oneColPlacements, col) {
var segs = [];
for (var _i = 0, oneColPlacements_1 = oneColPlacements; _i < oneColPlacements_1.length; _i++) {
var placement = oneColPlacements_1[_i];
if (placement.seg.firstCol === col) {
segs.push(placement.seg);
}
}
return segs;
}
function extractAllColSegs(oneColPlacements) {
var segs = [];
for (var _i = 0, oneColPlacements_2 = oneColPlacements; _i < oneColPlacements_2.length; _i++) {
var placement = oneColPlacements_2[_i];
segs.push(placement.seg);
}
return segs;
}
function limitByMaxHeight(hiddenCnts, segIsHidden, colPlacements, maxContentHeight) {
limitEvents(hiddenCnts, segIsHidden, colPlacements, true, function (placement) { return placement.bottom <= maxContentHeight; });
}
function limitByMaxEvents(hiddenCnts, segIsHidden, colPlacements, dayMaxEvents) {
limitEvents(hiddenCnts, segIsHidden, colPlacements, false, function (placement, levelIndex) { return levelIndex < dayMaxEvents; });
}
function limitByMaxRows(hiddenCnts, segIsHidden, colPlacements, dayMaxEventRows) {
limitEvents(hiddenCnts, segIsHidden, colPlacements, true, function (placement, levelIndex) { return levelIndex < dayMaxEventRows; });
}
/*
populates the given hiddenCnts/segIsHidden, which are supplied empty.
TODO: return them instead
*/
function limitEvents(hiddenCnts, segIsHidden, colPlacements, _moreLinkConsumesLevel, isPlacementInBounds) {
var colCnt = hiddenCnts.length;
var segIsVisible = {}; // TODO: instead, use segIsHidden with true/false?
var visibleColPlacements = []; // will mirror colPlacements
for (var col = 0; col < colCnt; col += 1) {
visibleColPlacements.push([]);
}
for (var col = 0; col < colCnt; col += 1) {
var placements = colPlacements[col];
var level = 0;
for (var _i = 0, placements_2 = placements; _i < placements_2.length; _i++) {
var placement = placements_2[_i];
if (isPlacementInBounds(placement, level)) {
recordVisible(placement);
}
else {
recordHidden(placement, level, _moreLinkConsumesLevel);
}
// only considered a level if the seg had height
if (placement.top !== placement.bottom) {
level += 1;
}
}
}
function recordVisible(placement) {
var seg = placement.seg;
var instanceId = seg.eventRange.instance.instanceId;
if (!segIsVisible[instanceId]) {
segIsVisible[instanceId] = true;
for (var col = seg.firstCol; col <= seg.lastCol; col += 1) {
visibleColPlacements[col].push(placement);
}
}
}
function recordHidden(placement, currentLevel, moreLinkConsumesLevel) {
var seg = placement.seg;
var instanceId = seg.eventRange.instance.instanceId;
if (!segIsHidden[instanceId]) {
segIsHidden[instanceId] = true;
for (var col = seg.firstCol; col <= seg.lastCol; col += 1) {
hiddenCnts[col] += 1;
var hiddenCnt = hiddenCnts[col];
if (moreLinkConsumesLevel && hiddenCnt === 1 && currentLevel > 0) {
var doomedLevel = currentLevel - 1;
while (visibleColPlacements[col].length > doomedLevel) {
recordHidden(visibleColPlacements[col].pop(), // removes
visibleColPlacements[col].length, // will execute after the pop. will be the index of the removed placement
false);
}
}
}
}
}
}
// Given the events within an array of segment objects, reslice them to be in a single day
function resliceDaySegs(segs, dayDate, colIndex) {
var dayStart = dayDate;
var dayEnd = common.addDays(dayStart, 1);
var dayRange = { start: dayStart, end: dayEnd };
var newSegs = [];
for (var _i = 0, segs_2 = segs; _i < segs_2.length; _i++) {
var seg = segs_2[_i];
var eventRange = seg.eventRange;
var origRange = eventRange.range;
var slicedRange = common.intersectRanges(origRange, dayRange);
if (slicedRange) {
newSegs.push(__assign(__assign({}, seg), { firstCol: colIndex, lastCol: colIndex, eventRange: {
def: eventRange.def,
ui: __assign(__assign({}, eventRange.ui), { durationEditable: false }),
instance: eventRange.instance,
range: slicedRange,
}, isStart: seg.isStart && slicedRange.start.valueOf() === origRange.start.valueOf(), isEnd: seg.isEnd && slicedRange.end.valueOf() === origRange.end.valueOf() }));
}
}
return newSegs;
}
var TableRow = /** @class */ (function (_super) {
__extends(TableRow, _super);
function TableRow() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.cellElRefs = new common.RefMap(); // the <td>
_this.frameElRefs = new common.RefMap(); // the fc-daygrid-day-frame
_this.fgElRefs = new common.RefMap(); // the fc-daygrid-day-events
_this.segHarnessRefs = new common.RefMap(); // indexed by "instanceId:firstCol"
_this.rootElRef = common.createRef();
_this.state = {
framePositions: null,
maxContentHeight: null,
segHeights: {},
};
return _this;
}
TableRow.prototype.render = function () {
var _this = this;
var _a = this, props = _a.props, state = _a.state, context = _a.context;
var colCnt = props.cells.length;
var businessHoursByCol = splitSegsByFirstCol(props.businessHourSegs, colCnt);
var bgEventSegsByCol = splitSegsByFirstCol(props.bgEventSegs, colCnt);
var highlightSegsByCol = splitSegsByFirstCol(this.getHighlightSegs(), colCnt);
var mirrorSegsByCol = splitSegsByFirstCol(this.getMirrorSegs(), colCnt);
var _b = computeFgSegPlacement(props.cells, props.fgEventSegs, props.dayMaxEvents, props.dayMaxEventRows, state.segHeights, state.maxContentHeight, colCnt, context.options.eventOrder), paddingBottoms = _b.paddingBottoms, segsByFirstCol = _b.segsByFirstCol, segsByEachCol = _b.segsByEachCol, segIsHidden = _b.segIsHidden, segTops = _b.segTops, segMarginTops = _b.segMarginTops, moreCnts = _b.moreCnts, moreTops = _b.moreTops;
var selectedInstanceHash = // TODO: messy way to compute this
(props.eventDrag && props.eventDrag.affectedInstances) ||
(props.eventResize && props.eventResize.affectedInstances) ||
{};
return (common.createElement("tr", { ref: this.rootElRef },
props.renderIntro && props.renderIntro(),
props.cells.map(function (cell, col) {
var normalFgNodes = _this.renderFgSegs(segsByFirstCol[col], segIsHidden, segTops, segMarginTops, selectedInstanceHash, props.todayRange);
var mirrorFgNodes = _this.renderFgSegs(mirrorSegsByCol[col], {}, segTops, // use same tops as real rendering
{}, {}, props.todayRange, Boolean(props.eventDrag), Boolean(props.eventResize), false);
return (common.createElement(TableCell, { key: cell.key, elRef: _this.cellElRefs.createRef(cell.key), innerElRef: _this.frameElRefs.createRef(cell.key) /* FF <td> problem, but okay to use for left/right. TODO: rename prop */, dateProfile: props.dateProfile, date: cell.date, showDayNumber: props.showDayNumbers, showWeekNumber: props.showWeekNumbers && col === 0, forceDayTop: props.showWeekNumbers /* even displaying weeknum for row, not necessarily day */, todayRange: props.todayRange, extraHookProps: cell.extraHookProps, extraDataAttrs: cell.extraDataAttrs, extraClassNames: cell.extraClassNames, moreCnt: moreCnts[col], buildMoreLinkText: props.buildMoreLinkText, onMoreClick: props.onMoreClick, segIsHidden: segIsHidden, moreMarginTop: moreTops[col] /* rename */, segsByEachCol: segsByEachCol[col], fgPaddingBottom: paddingBottoms[col], fgContentElRef: _this.fgElRefs.createRef(cell.key), fgContent: ( // Fragment scopes the keys
common.createElement(common.Fragment, null,
common.createElement(common.Fragment, null, normalFgNodes),
common.createElement(common.Fragment, null, mirrorFgNodes))), bgContent: ( // Fragment scopes the keys
common.createElement(common.Fragment, null,
_this.renderFillSegs(highlightSegsByCol[col], 'highlight'),
_this.renderFillSegs(businessHoursByCol[col], 'non-business'),
_this.renderFillSegs(bgEventSegsByCol[col], 'bg-event'))) }));
})));
};
TableRow.prototype.componentDidMount = function () {
this.updateSizing(true);
};
TableRow.prototype.componentDidUpdate = function (prevProps, prevState) {
var currentProps = this.props;
this.updateSizing(!common.isPropsEqual(prevProps, currentProps));
};
TableRow.prototype.getHighlightSegs = function () {
var props = this.props;
if (props.eventDrag && props.eventDrag.segs.length) { // messy check
return props.eventDrag.segs;
}
if (props.eventResize && props.eventResize.segs.length) { // messy check
return props.eventResize.segs;
}
return props.dateSelectionSegs;
};
TableRow.prototype.getMirrorSegs = function () {
var props = this.props;
if (props.eventResize && props.eventResize.segs.length) { // messy check
return props.eventResize.segs;
}
return [];
};
TableRow.prototype.renderFgSegs = function (segs, segIsHidden, // does NOT mean display:hidden
segTops, segMarginTops, selectedInstanceHash, todayRange, isDragging, isResizing, isDateSelecting) {
var context = this.context;
var eventSelection = this.props.eventSelection;
var framePositions = this.state.framePositions;
var defaultDisplayEventEnd = this.props.cells.length === 1; // colCnt === 1
var nodes = [];
if (framePositions) {
for (var _i = 0, segs_1 = segs; _i < segs_1.length; _i++) {
var seg = segs_1[_i];
var instanceId = seg.eventRange.instance.instanceId;
var isMirror = isDragging || isResizing || isDateSelecting;
var isSelected = selectedInstanceHash[instanceId];
var isInvisible = segIsHidden[instanceId] || isSelected;
// TODO: simpler way? NOT DRY
var isAbsolute = segIsHidden[instanceId] || isMirror || seg.firstCol !== seg.lastCol || !seg.isStart || !seg.isEnd;
var marginTop = void 0;
var top_1 = void 0;
var left = void 0;
var right = void 0;
if (isAbsolute) {
top_1 = segTops[instanceId];
if (context.isRtl) {
right = 0;
left = framePositions.lefts[seg.lastCol] - framePositions.lefts[seg.firstCol];
}
else {
left = 0;
right = framePositions.rights[seg.firstCol] - framePositions.rights[seg.lastCol];
}
}
else {
marginTop = segMarginTops[instanceId];
}
/*
known bug: events that are force to be list-item but span multiple days still take up space in later columns
*/
nodes.push(common.createElement("div", { className: 'fc-daygrid-event-harness' + (isAbsolute ? ' fc-daygrid-event-harness-abs' : ''), key: instanceId,
// in print mode when in mult cols, could collide
ref: isMirror ? null : this.segHarnessRefs.createRef(instanceId + ':' + seg.firstCol), style: {
visibility: isInvisible ? 'hidden' : '',
marginTop: marginTop || '',
top: top_1 || '',
left: left || '',
right: right || '',
} }, hasListItemDisplay(seg) ? (common.createElement(TableListItemEvent, __assign({ seg: seg, isDragging: isDragging, isSelected: instanceId === eventSelection, defaultDisplayEventEnd: defaultDisplayEventEnd }, common.getSegMeta(seg, todayRange)))) : (common.createElement(TableBlockEvent, __assign({ seg: seg, isDragging: isDragging, isResizing: isResizing, isDateSelecting: isDateSelecting, isSelected: instanceId === eventSelection, defaultDisplayEventEnd: defaultDisplayEventEnd }, common.getSegMeta(seg, todayRange))))));
}
}
return nodes;
};
TableRow.prototype.renderFillSegs = function (segs, fillType) {
var isRtl = this.context.isRtl;
var todayRange = this.props.todayRange;
var framePositions = this.state.framePositions;
var nodes = [];
if (framePositions) {
for (var _i = 0, segs_2 = segs; _i < segs_2.length; _i++) {
var seg = segs_2[_i];
var leftRightCss = isRtl ? {
right: 0,
left: framePositions.lefts[seg.lastCol] - framePositions.lefts[seg.firstCol],
} : {
left: 0,
right: framePositions.rights[seg.firstCol] - framePositions.rights[seg.lastCol],
};
nodes.push(common.createElement("div", { key: common.buildEventRangeKey(seg.eventRange), className: "fc-daygrid-bg-harness", style: leftRightCss }, fillType === 'bg-event' ?
common.createElement(common.BgEvent, __assign({ seg: seg }, common.getSegMeta(seg, todayRange))) :
common.renderFill(fillType)));
}
}
return common.createElement.apply(void 0, __spreadArrays([common.Fragment, {}], nodes));
};
TableRow.prototype.updateSizing = function (isExternalSizingChange) {
var _a = this, props = _a.props, frameElRefs = _a.frameElRefs;
if (props.clientWidth !== null) { // positioning ready?
if (isExternalSizingChange) {
var frameEls = props.cells.map(function (cell) { return frameElRefs.currentMap[cell.key]; });
if (frameEls.length) {
var originEl = this.rootElRef.current;
this.setState({
framePositions: new common.PositionCache(originEl, frameEls, true, // isHorizontal
false),
});
}
}
var limitByContentHeight = props.dayMaxEvents === true || props.dayMaxEventRows === true;
this.setState({
segHeights: this.computeSegHeights(),
maxContentHeight: limitByContentHeight ? this.computeMaxContentHeight() : null,
});
}
};
TableRow.prototype.computeSegHeights = function () {
return common.mapHash(this.segHarnessRefs.currentMap, function (eventHarnessEl) { return (eventHarnessEl.getBoundingClientRect().height); });
};
TableRow.prototype.computeMaxContentHeight = function () {
var firstKey = this.props.cells[0].key;
var cellEl = this.cellElRefs.currentMap[firstKey];
var fcContainerEl = this.fgElRefs.currentMap[firstKey];
return cellEl.getBoundingClientRect().bottom - fcContainerEl.getBoundingClientRect().top;
};
TableRow.prototype.getCellEls = function () {
var elMap = this.cellElRefs.currentMap;
return this.props.cells.map(function (cell) { return elMap[cell.key]; });
};
return TableRow;
}(common.DateComponent));
TableRow.addStateEquality({
segHeights: common.isPropsEqual,
});
var PADDING_FROM_VIEWPORT = 10;
var SCROLL_DEBOUNCE = 10;
var Popover = /** @class */ (function (_super) {
__extends(Popover, _super);
function Popover() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.repositioner = new common.DelayedRunner(_this.updateSize.bind(_this));
_this.handleRootEl = function (el) {
_this.rootEl = el;
if (_this.props.elRef) {
common.setRef(_this.props.elRef, el);
}
};
// Triggered when the user clicks *anywhere* in the document, for the autoHide feature
_this.handleDocumentMousedown = function (ev) {
var onClose = _this.props.onClose;
// only hide the popover if the click happened outside the popover
if (onClose && !_this.rootEl.contains(ev.target)) {
onClose();
}
};
_this.handleDocumentScroll = function () {
_this.repositioner.request(SCROLL_DEBOUNCE);
};
_this.handleCloseClick = function () {
var onClose = _this.props.onClose;
if (onClose) {
onClose();
}
};
return _this;
}
Popover.prototype.render = function () {
var theme = this.context.theme;
var props = this.props;
var classNames = [
'fc-popover',
theme.getClass('popover'),
].concat(props.extraClassNames || []);
return (common.createElement("div", __assign({ className: classNames.join(' ') }, props.extraAttrs, { ref: this.handleRootEl }),
common.createElement("div", { className: 'fc-popover-header ' + theme.getClass('popoverHeader') },
common.createElement("span", { className: "fc-popover-title" }, props.title),
common.createElement("span", { className: 'fc-popover-close ' + theme.getIconClass('close'), onClick: this.handleCloseClick })),
common.createElement("div", { className: 'fc-popover-body ' + theme.getClass('popoverContent') }, props.children)));
};
Popover.prototype.componentDidMount = function () {
document.addEventListener('mousedown', this.handleDocumentMousedown);
document.addEventListener('scroll', this.handleDocumentScroll);
this.updateSize();
};
Popover.prototype.componentWillUnmount = function () {
document.removeEventListener('mousedown', this.handleDocumentMousedown);
document.removeEventListener('scroll', this.handleDocumentScroll);
};
// TODO: adjust on window resize
/*
NOTE: the popover is position:fixed, so coordinates are relative to the viewport
NOTE: the PARENT calls this as well, on window resize. we would have wanted to use the repositioner,
but need to ensure that all other components have updated size first (for alignmentEl)
*/
Popover.prototype.updateSize = function () {
var _a = this.props, alignmentEl = _a.alignmentEl, topAlignmentEl = _a.topAlignmentEl;
var rootEl = this.rootEl;
if (!rootEl) {
return; // not sure why this was null, but we shouldn't let external components call updateSize() anyway
}
var dims = rootEl.getBoundingClientRect(); // only used for width,height
var alignment = alignmentEl.getBoundingClientRect();
var top = topAlignmentEl ? topAlignmentEl.getBoundingClientRect().top : alignment.top;
top = Math.min(top, window.innerHeight - dims.height - PADDING_FROM_VIEWPORT);
top = Math.max(top, PADDING_FROM_VIEWPORT);
var left;
if (this.context.isRtl) {
left = alignment.right - dims.width;
}
else {
left = alignment.left;
}
left = Math.min(left, window.innerWidth - dims.width - PADDING_FROM_VIEWPORT);
left = Math.max(left, PADDING_FROM_VIEWPORT);
common.applyStyle(rootEl, { top: top, left: left });
};
return Popover;
}(common.BaseComponent));
var MorePopover = /** @class */ (function (_super) {
__extends(MorePopover, _super);
function MorePopover() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.handlePopoverEl = function (popoverEl) {
_this.popoverEl = popoverEl;
if (popoverEl) {
_this.context.registerInteractiveComponent(_this, {
el: popoverEl,
useEventCenter: false,
});
}
else {
_this.context.unregisterInteractiveComponent(_this);
}
};
return _this;
}
MorePopover.prototype.render = function () {
var _a = this.context, options = _a.options, dateEnv = _a.dateEnv;
var props = this.props;
var date = props.date, hiddenInstances = props.hiddenInstances, todayRange = props.todayRange, dateProfile = props.dateProfile, selectedInstanceId = props.selectedInstanceId;
var title = dateEnv.format(date, options.dayPopoverFormat);
return (common.createElement(common.DayCellRoot, { date: date, dateProfile: dateProfile, todayRange: todayRange, elRef: this.handlePopoverEl }, function (rootElRef, dayClassNames, dataAttrs) { return (common.createElement(Popover, { elRef: rootElRef, title: title, extraClassNames: ['fc-more-popover'].concat(dayClassNames), extraAttrs: dataAttrs, onClose: props.onCloseClick, alignmentEl: props.alignmentEl, topAlignmentEl: props.topAlignmentEl },
common.createElement(common.DayCellContent, { date: date, dateProfile: dateProfile, todayRange: todayRange }, function (innerElRef, innerContent) { return (innerContent &&
common.createElement("div", { className: "fc-more-popover-misc", ref: innerElRef }, innerContent)); }),
props.segs.map(function (seg) {
var instanceId = seg.eventRange.instance.instanceId;
return (common.createElement("div", { className: "fc-daygrid-event-harness", key: instanceId, style: {
visibility: hiddenInstances[instanceId] ? 'hidden' : '',
} }, hasListItemDisplay(seg) ? (common.createElement(TableListItemEvent, __assign({ seg: seg, isDragging: false, isSelected: instanceId === selectedInstanceId, defaultDisplayEventEnd: false }, common.getSegMeta(seg, todayRange)))) : (common.createElement(TableBlockEvent, __assign({ seg: seg, isDragging: false, isResizing: false, isDateSelecting: false, isSelected: instanceId === selectedInstanceId, defaultDisplayEventEnd: false }, common.getSegMeta(seg, todayRange))))));
}))); }));
};
MorePopover.prototype.queryHit = function (positionLeft, positionTop, elWidth, elHeight) {
var date = this.props.date;
if (positionLeft < elWidth && positionTop < elHeight) {
return {
component: this,
dateSpan: {
allDay: true,
range: { start: date, end: common.addDays(date, 1) },
},
dayEl: this.popoverEl,
rect: {
left: 0,
top: 0,
right: elWidth,
bottom: elHeight,
},
layer: 1,
};
}
return null;
};
MorePopover.prototype.isPopover = function () {
return true; // gross
};
return MorePopover;
}(common.DateComponent));
var Table = /** @class */ (function (_super) {
__extends(Table, _super);
function Table() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.splitBusinessHourSegs = common.memoize(splitSegsByRow);
_this.splitBgEventSegs = common.memoize(splitSegsByRow);
_this.splitFgEventSegs = common.memoize(splitSegsByRow);
_this.splitDateSelectionSegs = common.memoize(splitSegsByRow);
_this.splitEventDrag = common.memoize(splitInteractionByRow);
_this.splitEventResize = common.memoize(splitInteractionByRow);
_this.buildBuildMoreLinkText = common.memoize(buildBuildMoreLinkText);
_this.rowRefs = new common.RefMap();
_this.state = {
morePopoverState: null,
};
_this.handleRootEl = function (rootEl) {
_this.rootEl = rootEl;
common.setRef(_this.props.elRef, rootEl);
};
_this.handleMoreLinkClick = function (arg) {
var context = _this.context;
var dateEnv = context.dateEnv;
var clickOption = context.options.moreLinkClick;
function segForPublic(seg) {
var _a = seg.eventRange, def = _a.def, instance = _a.instance, range = _a.range;
return {
event: new common.EventApi(context, def, instance),
start: dateEnv.toDate(range.start),
end: dateEnv.toDate(range.end),
isStart: seg.isStart,
isEnd: seg.isEnd,
};
}
if (typeof clickOption === 'function') {
clickOption = clickOption({
date: dateEnv.toDate(arg.date),
allDay: true,
allSegs: arg.allSegs.map(segForPublic),
hiddenSegs: arg.hiddenSegs.map(segForPublic),
jsEvent: arg.ev,
view: context.viewApi,
}); // hack to handle void
}
if (!clickOption || clickOption === 'popover') {
_this.setState({
morePopoverState: __assign(__assign({}, arg), { currentFgEventSegs: _this.props.fgEventSegs }),
});
}
else if (typeof clickOption === 'string') { // a view name
context.calendarApi.zoomTo(arg.date, clickOption);
}
};
_this.handleMorePopoverClose = function () {
_this.setState({
morePopoverState: null,
});
};
return _this;
}
Table.prototype.render = function () {
var _this = this;
var props = this.props;
var dateProfile = props.dateProfile, dayMaxEventRows = props.dayMaxEventRows, dayMaxEvents = props.dayMaxEvents, expandRows = props.expandRows;
var morePopoverState = this.state.morePopoverState;
var rowCnt = props.cells.length;
var businessHourSegsByRow = this.splitBusinessHourSegs(props.businessHourSegs, rowCnt);
var bgEventSegsByRow = this.splitBgEventSegs(props.bgEventSegs, rowCnt);
var fgEventSegsByRow = this.splitFgEventSegs(props.fgEventSegs, rowCnt);
var dateSelectionSegsByRow = this.splitDateSelectionSegs(props.dateSelectionSegs, rowCnt);
var eventDragByRow = this.splitEventDrag(props.eventDrag, rowCnt);
var eventResizeByRow = this.splitEventResize(props.eventResize, rowCnt);
var buildMoreLinkText = this.buildBuildMoreLinkText(this.context.options.moreLinkText);
var limitViaBalanced = dayMaxEvents === true || dayMaxEventRows === true;
// if rows can't expand to fill fixed height, can't do balanced-height event limit
// TODO: best place to normalize these options?
if (limitViaBalanced && !expandRows) {
limitViaBalanced = false;
dayMaxEventRows = null;
dayMaxEvents = null;
}
var classNames = [
'fc-daygrid-body',
limitViaBalanced ? 'fc-daygrid-body-balanced' : 'fc-daygrid-body-unbalanced',
expandRows ? '' : 'fc-daygrid-body-natural',
];
return (common.createElement("div", { className: classNames.join(' '), ref: this.handleRootEl, style: {
// these props are important to give this wrapper correct dimensions for interactions
// TODO: if we set it here, can we avoid giving to inner tables?
width: props.clientWidth,
minWidth: props.tableMinWidth,
} },
common.createElement(common.NowTimer, { unit: "day" }, function (nowDate, todayRange) { return (common.createElement(common.Fragment, null,
common.createElement("table", { className: "fc-scrollgrid-sync-table", style: {
width: props.clientWidth,
minWidth: props.tableMinWidth,
height: expandRows ? props.clientHeight : '',
} },
props.colGroupNode,
common.createElement("tbody", null, props.cells.map(function (cells, row) { return (common.createElement(TableRow, { ref: _this.rowRefs.createRef(row), key: cells.length
? cells[0].date.toISOString() /* best? or put key on cell? or use diff formatter? */
: row // in case there are no cells (like when resource view is loading)
, showDayNumbers: rowCnt > 1, showWeekNumbers: props.showWeekNumbers, todayRange: todayRange, dateProfile: dateProfile, cells: cells, renderIntro: props.renderRowIntro, businessHourSegs: businessHourSegsByRow[row], eventSelection: props.eventSelection, bgEventSegs: bgEventSegsByRow[row].filter(isSegAllDay) /* hack */, fgEventSegs: fgEventSegsByRow[row], dateSelectionSegs: dateSelectionSegsByRow[row], eventDrag: eventDragByRow[row], eventResize: eventResizeByRow[row], dayMaxEvents: dayMaxEvents, dayMaxEventRows: dayMaxEventRows, clientWidth: props.clientWidth, clientHeight: props.clientHeight, buildMoreLinkText: buildMoreLinkText, onMoreClick: _this.handleMoreLinkClick })); }))),
(!props.forPrint && morePopoverState && morePopoverState.currentFgEventSegs === props.fgEventSegs) && (common.createElement(MorePopover, { date: morePopoverState.date, dateProfile: dateProfile, segs: morePopoverState.allSegs, alignmentEl: morePopoverState.dayEl, topAlignmentEl: rowCnt === 1 ? props.headerAlignElRef.current : null, onCloseClick: _this.handleMorePopoverClose, selectedInstanceId: props.eventSelection, hiddenInstances: // yuck
(props.eventDrag ? props.eventDrag.affectedInstances : null) ||
(props.eventResize ? props.eventResize.affectedInstances : null) ||
{}, todayRange: todayRange })))); })));
};
// Hit System
// ----------------------------------------------------------------------------------------------------
Table.prototype.prepareHits = function () {
this.rowPositions = new common.PositionCache(this.rootEl, this.rowRefs.collect().map(function (rowObj) { return rowObj.getCellEls()[0]; }), // first cell el in each row. TODO: not optimal
false, true);
this.colPositions = new common.PositionCache(this.rootEl, this.rowRefs.currentMap[0].getCellEls(), // cell els in first row
true, // horizontal
false);
};
Table.prototype.positionToHit = function (leftPosition, topPosition) {
var _a = this, colPositions = _a.colPositions, rowPositions = _a.rowPositions;
var col = colPositions.leftToIndex(leftPosition);
var row = rowPositions.topToIndex(topPosition);
if (row != null && col != null) {
return {
row: row,
col: col,
dateSpan: {
range: this.getCellRange(row, col),
allDay: true,
},
dayEl: this.getCellEl(row, col),
relativeRect: {
left: colPositions.lefts[col],
right: colPositions.rights[col],
top: rowPositions.tops[row],
bottom: rowPositions.bottoms[row],
},
};
}
return null;
};
Table.prototype.getCellEl = function (row, col) {
return this.rowRefs.currentMap[row].getCellEls()[col]; // TODO: not optimal
};
Table.prototype.getCellRange = function (row, col) {
var start = this.props.cells[row][col].date;
var end = common.addDays(start, 1);
return { start: start, end: end };
};
return Table;
}(common.DateComponent));
function buildBuildMoreLinkText(moreLinkTextInput) {
if (typeof moreLinkTextInput === 'function') {
return moreLinkTextInput;
}
return function (num) { return "+" + num + " " + moreLinkTextInput; };
}
function isSegAllDay(seg) {
return seg.eventRange.def.allDay;
}
var DayTableSlicer = /** @class */ (function (_super) {
__extends(DayTableSlicer, _super);
function DayTableSlicer() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.forceDayIfListItem = true;
return _this;
}
DayTableSlicer.prototype.sliceRange = function (dateRange, dayTableModel) {
return dayTableModel.sliceRange(dateRange);
};
return DayTableSlicer;
}(common.Slicer));
var DayTable = /** @class */ (function (_super) {
__extends(DayTable, _super);
function DayTable() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.slicer = new DayTableSlicer();
_this.tableRef = common.createRef();
_this.handleRootEl = function (rootEl) {
if (rootEl) {
_this.context.registerInteractiveComponent(_this, { el: rootEl });
}
else {
_this.context.unregisterInteractiveComponent(_this);
}
};
return _this;
}
DayTable.prototype.render = function () {
var _a = this, props = _a.props, context = _a.context;
return (common.createElement(Table, __assign({ ref: this.tableRef, elRef: this.handleRootEl }, this.slicer.sliceProps(props, props.dateProfile, props.nextDayThreshold, context, props.dayTableModel), { dateProfile: props.dateProfile, cells: props.dayTableModel.cells, colGroupNode: props.colGroupNode, tableMinWidth: props.tableMinWidth, renderRowIntro: props.renderRowIntro, dayMaxEvents: props.dayMaxEvents, dayMaxEventRows: props.dayMaxEventRows, showWeekNumbers: props.showWeekNumbers, expandRows: props.expandRows, headerAlignElRef: props.headerAlignElRef, clientWidth: props.clientWidth, clientHeight: props.clientHeight, forPrint: props.forPrint })));
};
DayTable.prototype.prepareHits = function () {
this.tableRef.current.prepareHits();
};
DayTable.prototype.queryHit = function (positionLeft, positionTop) {
var rawHit = this.tableRef.current.positionToHit(positionLeft, positionTop);
if (rawHit) {
return {
component: this,
dateSpan: rawHit.dateSpan,
dayEl: rawHit.dayEl,
rect: {
left: rawHit.relativeRect.left,
right: rawHit.relativeRect.right,
top: rawHit.relativeRect.top,
bottom: rawHit.relativeRect.bottom,
},
layer: 0,
};
}
return null;
};
return DayTable;
}(common.DateComponent));
var DayTableView = /** @class */ (function (_super) {
__extends(DayTableView, _super);
function DayTableView() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.buildDayTableModel = common.memoize(buildDayTableModel);
_this.headerRef = common.createRef();
_this.tableRef = common.createRef();
return _this;
}
DayTableView.prototype.render = function () {
var _this = this;
var _a = this.context, options = _a.options, dateProfileGenerator = _a.dateProfileGenerator;
var props = this.props;
var dayTableModel = this.buildDayTableModel(props.dateProfile, dateProfileGenerator);
var headerContent = options.dayHeaders && (common.createElement(common.DayHeader, { ref: this.headerRef, dateProfile: props.dateProfile, dates: dayTableModel.headerDates, datesRepDistinctDays: dayTableModel.rowCnt === 1 }));
var bodyContent = function (contentArg) { return (common.createElement(DayTable, { ref: _this.tableRef, dateProfile: props.dateProfile, dayTableModel: dayTableModel, businessHours: props.businessHours, dateSelection: props.dateSelection, eventStore: props.eventStore, eventUiBases: props.eventUiBases, eventSelection: props.eventSelection, eventDrag: props.eventDrag, eventResize: props.eventResize, nextDayThreshold: options.nextDayThreshold, colGroupNode: contentArg.tableColGroupNode, tableMinWidth: contentArg.tableMinWidth, dayMaxEvents: options.dayMaxEvents, dayMaxEventRows: options.dayMaxEventRows, showWeekNumbers: options.weekNumbers, expandRows: !props.isHeightAuto, headerAlignElRef: _this.headerElRef, clientWidth: contentArg.clientWidth, clientHeight: contentArg.clientHeight, forPrint: props.forPrint })); };
return options.dayMinWidth
? this.renderHScrollLayout(headerContent, bodyContent, dayTableModel.colCnt, options.dayMinWidth)
: this.renderSimpleLayout(headerContent, bodyContent);
};
return DayTableView;
}(TableView));
function buildDayTableModel(dateProfile, dateProfileGenerator) {
var daySeries = new common.DaySeriesModel(dateProfile.renderRange, dateProfileGenerator);
return new common.DayTableModel(daySeries, /year|month|week/.test(dateProfile.currentRangeUnit));
}
var TableDateProfileGenerator = /** @class */ (function (_super) {
__extends(TableDateProfileGenerator, _super);
function TableDateProfileGenerator() {
return _super !== null && _super.apply(this, arguments) || this;
}
// Computes the date range that will be rendered.
TableDateProfileGenerator.prototype.buildRenderRange = function (currentRange, currentRangeUnit, isRangeAllDay) {
var dateEnv = this.props.dateEnv;
var renderRange = _super.prototype.buildRenderRange.call(this, currentRange, currentRangeUnit, isRangeAllDay);
var start = renderRange.start;
var end = renderRange.end;
var endOfWeek;
// year and month views should be aligned with weeks. this is already done for week
if (/^(year|month)$/.test(currentRangeUnit)) {
start = dateEnv.startOfWeek(start);
// make end-of-week if not already
endOfWeek = dateEnv.startOfWeek(end);
if (endOfWeek.valueOf() !== end.valueOf()) {
end = common.addWeeks(endOfWeek, 1);
}
}
// ensure 6 weeks
if (this.props.monthMode &&
this.props.fixedWeekCount) {
var rowCnt = Math.ceil(// could be partial weeks due to hiddenDays
common.diffWeeks(start, end));
end = common.addWeeks(end, 6 - rowCnt);
}
return { start: start, end: end };
};
return TableDateProfileGenerator;
}(common.DateProfileGenerator));
var OPTION_REFINERS = {
moreLinkClick: common.identity,
moreLinkClassNames: common.identity,
moreLinkContent: common.identity,
moreLinkDidMount: common.identity,
moreLinkWillUnmount: common.identity,
};
var plugin = common.createPlugin({
initialView: 'dayGridMonth',
optionRefiners: OPTION_REFINERS,
views: {
dayGrid: {
component: DayTableView,
dateProfileGeneratorClass: TableDateProfileGenerator,
},
dayGridDay: {
type: 'dayGrid',
duration: { days: 1 },
},
dayGridWeek: {
type: 'dayGrid',
duration: { weeks: 1 },
},
dayGridMonth: {
type: 'dayGrid',
duration: { months: 1 },
monthMode: true,
fixedWeekCount: true,
},
},
});
common.globalPlugins.push(plugin);
exports.DayGridView = DayTableView;
exports.DayTable = DayTable;
exports.DayTableSlicer = DayTableSlicer;
exports.Table = Table;
exports.TableView = TableView;
exports.buildDayTableModel = buildDayTableModel;
exports.default = plugin;
Object.defineProperty(exports, '__esModule', { value: true });
return exports;
}({}, FullCalendar));
|
describe('Plague Rat', function() {
integration(function() {
describe('Plague Rat\'s ability', function() {
beforeEach(function() {
this.setupTest({
player1: {
house: 'shadows',
inPlay: ['mighty-tiger', 'dextre'],
hand: ['plague-rat', 'plague-rat']
},
player2: {
inPlay: ['horseman-of-famine', 'horseman-of-death']
}
});
});
it('should deal 1 damage to all non-rat creatures', function() {
this.player1.play(this.plagueRat);
expect(this.mightyTiger.tokens.damage).toBe(1);
expect(this.dextre.tokens.damage).toBe(1);
expect(this.plagueRat.hasToken('damage')).toBe(false);
expect(this.horsemanOfFamine.tokens.damage).toBe(1);
expect(this.horsemanOfDeath.tokens.damage).toBe(1);
});
it('second Plague Rat should deal 2 damage to all non-rat-creatures', function() {
this.player1.play(this.plagueRat);
this.plagueRat2 = this.player1.findCardByName('plague-rat', 'hand');
this.player1.play(this.plagueRat2);
expect(this.mightyTiger.tokens.damage).toBe(3);
expect(this.player1.player.deck[0]).toBe(this.dextre); // Dextre's behaviour when destroyed.
expect(this.plagueRat.hasToken('damage')).toBe(false);
expect(this.horsemanOfFamine.tokens.damage).toBe(3);
expect(this.horsemanOfDeath.tokens.damage).toBe(3);
});
});
});
});
|
var data = {
"body": "<g fill=\"currentColor\"><path d=\"M21.9 12c0-.11-.06-.22-.09-.33a4.17 4.17 0 0 0-.18-.57c-.05-.12-.12-.24-.18-.37s-.15-.3-.24-.44S21 10.08 21 10s-.2-.25-.31-.37s-.21-.2-.32-.3L20 9l-.36-.24a3.68 3.68 0 0 0-.44-.23l-.39-.18a4.13 4.13 0 0 0-.5-.15a3 3 0 0 0-.41-.09L17.67 8A6 6 0 0 0 6.33 8l-.18.05a3 3 0 0 0-.41.09a4.13 4.13 0 0 0-.5.15l-.39.18a3.68 3.68 0 0 0-.44.23l-.36.3l-.37.31c-.11.1-.22.19-.32.3s-.21.25-.31.37s-.18.23-.26.36s-.16.29-.24.44s-.13.25-.18.37a4.17 4.17 0 0 0-.18.57c0 .11-.07.22-.09.33A5.23 5.23 0 0 0 2 13a5.5 5.5 0 0 0 .09.91c0 .1.05.19.07.29a5.58 5.58 0 0 0 .18.58l.12.29a5 5 0 0 0 .3.56l.14.22a.56.56 0 0 0 .05.08L3 16a5 5 0 0 0 4 2h3v-1.37a2 2 0 0 1-1 .27a2.05 2.05 0 0 1-1.44-.61a2 2 0 0 1 .05-2.83l3-2.9A2 2 0 0 1 12 10a2 2 0 0 1 1.41.59l3 3a2 2 0 0 1 0 2.82A2 2 0 0 1 15 17a1.92 1.92 0 0 1-1-.27V18h3a5 5 0 0 0 4-2l.05-.05a.56.56 0 0 0 .05-.08l.14-.22a5 5 0 0 0 .3-.56l.12-.29a5.58 5.58 0 0 0 .18-.58c0-.1.05-.19.07-.29A5.5 5.5 0 0 0 22 13a5.23 5.23 0 0 0-.1-1z\"/><path d=\"M12.71 11.29a1 1 0 0 0-1.4 0l-3 2.9a1 1 0 1 0 1.38 1.44L11 14.36V20a1 1 0 0 0 2 0v-5.59l1.29 1.3a1 1 0 0 0 1.42 0a1 1 0 0 0 0-1.42z\"/></g>",
"width": 24,
"height": 24
};
exports.__esModule = true;
exports.default = data;
|
import json
from telethon.errors import ChatAdminRequiredError
from telethon.sync import TelegramClient
from telethon.tl import functions
from common import logger, config
class SyncTelegramClient:
def __init__(self):
self._client = TelegramClient("session", config["api_id"], config["api_hash"])
def fetch_messages(self, channel, size=100, max_id=None, min_id=None):
"""Method to fetch messages from a specific channel / group"""
logger.debug("Fetching up to %d messages from channel %s" % (size, channel))
params = [channel, size]
kwargs = {}
# The telethon module has issues if a keyword passed is None, so we will add the keyword
# only if it is not None
for key in ['max_id', 'min_id']:
if locals()[key] is not None:
kwargs[key] = locals()[key]
with self._client as client:
data = client.get_messages(*params, **kwargs)
return data
def get_channel_info(self, channel):
with self._client as client:
data = client(functions.channels.GetFullChannelRequest(channel=channel)).to_json()
return json.loads(data)
def get_channel_users(self, channel, limit=1000):
"""method to get participants from channel (we might not have privileges to get this data)
getting some errors about permissions"""
with self._client as client:
try:
participants = client.get_participants(channel, limit)
except ChatAdminRequiredError as e:
# TODO: ???
raise e
return participants
|
from unittest import mock
from google.appengine.ext import ndb, testbed
from werkzeug.test import Client
from backend.common.consts.event_type import EventType
from backend.common.helpers.event_team_status_helper import EventTeamStatusHelper
from backend.common.helpers.firebase_pusher import FirebasePusher
from backend.common.helpers.playoff_advancement_helper import (
PlayoffAdvancement,
PlayoffAdvancementHelper,
)
from backend.common.models.event import Event
from backend.common.models.event_details import EventDetails
from backend.common.models.event_playoff_advancement import EventPlayoffAdvancement
from backend.common.models.event_team import EventTeam
from backend.common.models.event_team_status import EventTeamStatus
from backend.common.models.team import Team
@mock.patch.object(FirebasePusher, "update_live_events")
def test_update_live_events(update_mock: mock.Mock, tasks_client: Client) -> None:
resp = tasks_client.get("/tasks/do/update_live_events")
assert resp.status_code == 200
update_mock.assert_called_once()
def test_enqueue_eventteam_status_bad_year(
tasks_client: Client, taskqueue_stub: testbed.taskqueue_stub.TaskQueueServiceStub
) -> None:
resp = tasks_client.get("/tasks/math/enqueue/event_team_status/asdf")
assert resp.status_code == 404
def test_enqueue_eventteam_status_no_events(
tasks_client: Client, taskqueue_stub: testbed.taskqueue_stub.TaskQueueServiceStub
) -> None:
resp = tasks_client.get("/tasks/math/enqueue/event_team_status/2020")
assert resp.status_code == 200
assert resp.data == b"Enqueued for: []"
tasks = taskqueue_stub.get_filtered_tasks(queue_names="default")
assert len(tasks) == 0
def test_enqueue_eventteam_status_no_output_in_taskqueue(
tasks_client: Client, taskqueue_stub: testbed.taskqueue_stub.TaskQueueServiceStub
) -> None:
resp = tasks_client.get(
"/tasks/math/enqueue/event_team_status/2020",
headers={"X-Appengine-Taskname": "test"},
)
assert resp.status_code == 200
assert resp.data == b""
tasks = taskqueue_stub.get_filtered_tasks(queue_names="default")
assert len(tasks) == 0
def test_enqueue_eventteam_status(
tasks_client: Client, taskqueue_stub: testbed.taskqueue_stub.TaskQueueServiceStub
) -> None:
Event(
id="2020test",
year=2020,
event_short="test",
event_type_enum=EventType.REGIONAL,
).put()
resp = tasks_client.get("/tasks/math/enqueue/event_team_status/2020")
assert resp.status_code == 200
assert resp.data == b"Enqueued for: ['2020test']"
tasks = taskqueue_stub.get_filtered_tasks(queue_names="default")
assert len(tasks) == 1
def test_do_eventteam_status_not_found(
tasks_client: Client, taskqueue_stub: testbed.taskqueue_stub.TaskQueueServiceStub
) -> None:
resp = tasks_client.get("/tasks/math/do/event_team_status/asdf")
assert resp.status_code == 404
tasks = taskqueue_stub.get_filtered_tasks(queue_names="default")
assert len(tasks) == 0
@mock.patch.object(EventTeamStatusHelper, "generate_team_at_event_status")
def test_do_eventteam_status(
status_mock: mock.Mock,
tasks_client: Client,
taskqueue_stub: testbed.taskqueue_stub.TaskQueueServiceStub,
) -> None:
Event(
id="2020test",
year=2020,
event_short="test",
event_type_enum=EventType.REGIONAL,
).put()
EventTeam(
id="2020test_frc254",
year=2020,
event=ndb.Key(Event, "2020test"),
team=ndb.Key(Team, "frc254"),
).put()
status = EventTeamStatus(
qual=None,
playoff=None,
alliance=None,
last_match_key=None,
next_match_key=None,
)
status_mock.return_value = status
resp = tasks_client.get("/tasks/math/do/event_team_status/2020test")
assert resp.status_code == 200
assert resp.data == b"Finished calculating event team statuses for: 2020test"
et = EventTeam.get_by_id("2020test_frc254")
assert et is not None
assert et.status == status
def test_enqueue_playoff_advancement_no_event(
tasks_client: Client, taskqueue_stub: testbed.taskqueue_stub.TaskQueueServiceStub
) -> None:
resp = tasks_client.get("/tasks/math/enqueue/playoff_advancement_update/asdf")
assert resp.status_code == 404
tasks = taskqueue_stub.get_filtered_tasks(queue_names="default")
assert len(tasks) == 0
def test_enqueue_playoff_advancement(
tasks_client: Client, taskqueue_stub: testbed.taskqueue_stub.TaskQueueServiceStub
) -> None:
Event(
id="2020test",
year=2020,
event_short="test",
event_type_enum=EventType.REGIONAL,
).put()
resp = tasks_client.get("/tasks/math/enqueue/playoff_advancement_update/2020test")
assert resp.status_code == 200
assert resp.data == b"Enqueued playoff advancement calc for 2020test"
tasks = taskqueue_stub.get_filtered_tasks(queue_names="default")
assert len(tasks) == 1
def test_calc_playoff_advancement_no_event(tasks_client: Client) -> None:
resp = tasks_client.get("/tasks/math/do/playoff_advancement_update/asdf")
assert resp.status_code == 404
@mock.patch.object(PlayoffAdvancementHelper, "generate_playoff_advancement")
def test_calc_playoff_advancement(calc_mock: mock.Mock, tasks_client: Client) -> None:
Event(
id="2020test",
year=2020,
event_short="test",
event_type_enum=EventType.REGIONAL,
).put()
advancement = PlayoffAdvancement(
bracket_table={},
playoff_advancement={},
double_elim_matches={},
playoff_template=None,
)
calc_mock.return_value = advancement
resp = tasks_client.get("/tasks/math/do/playoff_advancement_update/2020test")
assert resp.status_code == 200
assert len(resp.data) > 0
# Make sure we set the EventDetails
ed = EventDetails.get_by_id("2020test")
assert ed is not None
assert ed.playoff_advancement == EventPlayoffAdvancement(
advancement={},
bracket={},
)
|
import React from 'react'
import styled from 'react-emotion'
import {
MDXProvider
} from '@mdx-js/react'
import ThemeProvider from './themeProvider'
import mdxComponents from './mdxComponents'
import Sidebar from './sidebar'
import RightSidebar from './rightSidebar'
const Wrapper = styled( 'div' )
`
display: flex;
justify-content: space-between;
@media only screen and (max-width: 767px) {
display: block;
}
`
const Content = styled( 'main' )
`
display: flex;
flex-grow: 1;
margin: 0px 88px;
margin-top: 3rem;
@media only screen and (max-width: 1023px) {
padding-left: 0;
margin: 0 10px;
margin-top: 3rem;
}
`
const MaxWidth = styled( 'div' )
`
@media only screen and (max-width: 50rem) {
width: 100%;
position: relative;
}
`
const LeftSideBarWidth = styled( 'div' )
`
width: 298px;
`
const RightSideBarWidth = styled( 'div' )
`
width: 224px;
`
const Layout = ( {
children,
location
} ) => ( <
ThemeProvider location = {
location
} >
<
MDXProvider components = {
mdxComponents
} >
<
Wrapper >
<
LeftSideBarWidth className = {
'hidden-xs'
} >
<
Sidebar location = {
location
}
/> < /
LeftSideBarWidth > <
Content >
<
MaxWidth > {
children
} < /MaxWidth> < /
Content > <
RightSideBarWidth className = {
'hidden-xs'
} >
<
RightSidebar location = {
location
}
/> < /
RightSideBarWidth > <
/Wrapper> < /
MDXProvider > <
/ThemeProvider>
)
export default Layout
|
if (self.CavalryLogger) { CavalryLogger.start_js(["L2zt9"]); }
__d("XLynxAsyncCallbackController",["XController"],(function(a,b,c,d,e,f){e.exports=b("XController").create("/si/linkclick/ajax_callback/",{lynx_uri:{type:"String"}})}),null);
__d("FBLynxLogging",["AsyncRequest","AsyncResponse","BanzaiODS","XLynxAsyncCallbackController"],(function(a,b,c,d,e,f){"use strict";a={log:function(a){var c=b("XLynxAsyncCallbackController").getURIBuilder().getURI();new(b("AsyncRequest"))(c).setData({lynx_uri:a}).setErrorHandler(function(a){a=a.getError();b("BanzaiODS").bumpEntityKey("linkshim","click_log.post.fail."+a)}).setTransportErrorHandler(function(a){a=a.getError();b("BanzaiODS").bumpEntityKey("linkshim","click_log.post.transport_fail."+a)}).send()}};e.exports=a}),null);
__d("isLinkshimURI",["isBonfireURI","isFacebookURI","isMessengerDotComURI"],(function(a,b,c,d,e,f){"use strict";function a(a){var c=a.getPath();return(c==="/l.php"||c.indexOf("/si/ajax/l/")===0||c.indexOf("/l/")===0||c.indexOf("l/")===0)&&(b("isFacebookURI")(a)||b("isMessengerDotComURI")(a)||b("isBonfireURI")(a))?!0:!1}e.exports=a}),null);
__d("FBLynxBase",["$","FBLynxLogging","LinkshimHandlerConfig","isLinkshimURI","URI"],(function(a,b,c,d,e,f){"use strict";__p&&__p();function g(a){if(!b("isLinkshimURI")(a))return null;a=a.getQueryData().u;return!a?null:a}var h={logAsyncClick:function(a){h.swapLinkWithUnshimmedLink(a);a=a.getAttribute("data-lynx-uri");if(!a)return;b("FBLynxLogging").log(a)},originReferrerPolicyClick:function(a){var c=b("$")("meta_referrer");c.content=b("LinkshimHandlerConfig").switched_meta_referrer_policy;h.logAsyncClick(a);setTimeout(function(){c.content=b("LinkshimHandlerConfig").default_meta_referrer_policy},100)},swapLinkWithUnshimmedLink:function(a){var c=a.href,d=g(new(b("URI"))(c));if(!d)return;a.setAttribute("data-lynx-uri",c);a.href=d},revertSwapIfLynxURIPresent:function(a){var b=a.getAttribute("data-lynx-uri");if(!b)return;a.removeAttribute("data-lynx-uri");a.href=b}};e.exports=h}),null);
__d("FBLynx",["Event","FBLynxBase","LinkshimHandlerConfig","Parent"],(function(a,b,c,d,e,f){"use strict";__p&&__p();var g={alreadySetup:!1,setupDelegation:function(){__p&&__p();var a=arguments.length<=0||arguments[0]===undefined?!1:arguments[0];if(document.body==null){if(a)return;setTimeout(function(){g.setupDelegation(!0)},100);return}if(g.alreadySetup)return;g.alreadySetup=!0;var c=function(event){__p&&__p();var a=g.getMaybeLynxLink(event.target);if(!a)return;var c=a[0];a=a[1];switch(c){case"async":case"asynclazy":b("FBLynxBase").logAsyncClick(a);break;case"origin":b("FBLynxBase").originReferrerPolicyClick(a);break;case"hover":g.hoverClick(a);break}};b("Event").listen(document.body,"click",c);b("LinkshimHandlerConfig").middle_click_requires_event&&b("Event").listen(document.body,"mouseup",function(event){event.button==1&&c(event)});b("Event").listen(document.body,"mouseover",function(event){var a=g.getMaybeLynxLink(event.target);if(!a)return;var b=a[0];a=a[1];switch(b){case"async":case"asynclazy":case"origin":case"hover":g.mouseover(a);break}});b("Event").listen(document.body,"contextmenu",function(event){var a=g.getMaybeLynxLink(event.target);if(!a)return;var b=a[0];a=a[1];switch(b){case"async":case"hover":case"origin":g.contextmenu(a);break;case"asynclazy":break}})},getMaybeLynxLink:function(a){a=b("Parent").byAttribute(a,"data-lynx-mode");if(a instanceof HTMLAnchorElement){var c=a.getAttribute("data-lynx-mode");switch(c){case"async":case"asynclazy":case"hover":case"origin":return[c,a];default:return null}}return null},hoverClick:function(a){b("FBLynxBase").revertSwapIfLynxURIPresent(a)},mouseover:function(a){b("FBLynxBase").swapLinkWithUnshimmedLink(a)},contextmenu:function(a){b("FBLynxBase").revertSwapIfLynxURIPresent(a)}};e.exports=g}),null); |
module.exports = [
"do_all",
"num_correct_in_a_row_2",
"num_correct_in_a_row_10",
"num_correct_in_a_row_3",
"num_correct_in_a_row_5",
"m_of_n",
];
|
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var common = require('../common.js'),
assert = require('assert'),
fs = require('fs'),
path = require('path');
var dir = path.resolve(common.fixturesDir),
dirs = [];
// Make a long path.
for (var i = 0; i < 50; i++) {
dir = dir + '/123456790';
try {
fs.mkdirSync(dir, '0777');
} catch (e) {
if (e.code == 'EEXIST') {
// Ignore;
} else {
cleanup();
throw e;
}
}
dirs.push(dir);
}
// Test existsSync
var r = fs.existsSync(dir);
if (r !== true) {
cleanup();
throw new Error('fs.existsSync returned false');
}
// Text exists
fs.exists(dir, function(r) {
cleanup();
if (r !== true) {
throw new Error('fs.exists reported false');
}
});
// Remove all created directories
function cleanup() {
for (var i = dirs.length - 1; i >= 0; i--) {
fs.rmdirSync(dirs[i]);
}
}
|
import { Fragment } from "react";
import Router from "next/router";
import Error from "next/error";
import Loader from "~/components/Loader/Loader";
import { formatPaperSlug } from "~/config/utils";
// Redux
import { PaperActions } from "~/redux/paper";
function Paper(props) {
// TODO: Does this need to be a dynamic route or hard refresh?
if (props.error || props.paper.status === 404) {
return <Error statusCode={404} />;
}
if (props.redirectPath && typeof window !== "undefined") {
Router.push(props.redirectPath);
}
return (
<Error title={<Fragment>Redirecting to page</Fragment>} statusCode={301} />
);
}
Paper.getInitialProps = async (ctx) => {
const { store, res, query } = ctx;
await store.dispatch(PaperActions.getPaper(query.paperId));
const paper = store.getState().paper;
if (paper.status === 404) {
res.statusCode = 404;
return { error: true, paper: store.getState().paper };
}
const paperName = paper.slug
? paper.slug
: formatPaperSlug(paper.paper_title ? paper.paper_title : paper.title);
const redirectPath = `/paper/${paper.id}/${paperName}`;
return { redirectPath, paper };
};
export default Paper;
|
angular.module('dockm.rest')
.factory('Volume', ['$resource', 'API_ENDPOINT_ENDPOINTS', 'EndpointProvider', function VolumeFactory($resource, API_ENDPOINT_ENDPOINTS, EndpointProvider) {
'use strict';
return $resource(API_ENDPOINT_ENDPOINTS + '/:endpointId/docker/volumes/:id/:action',
{
endpointId: EndpointProvider.endpointID
},
{
query: { method: 'GET' },
get: { method: 'GET', params: {id: '@id'} },
create: {method: 'POST', params: {action: 'create'}, transformResponse: genericHandler},
remove: {
method: 'DELETE', transformResponse: genericHandler, params: {id: '@id'}
}
});
}]);
|
// @flow
import React, { Component } from 'react';
import { observer, inject } from 'mobx-react';
import { intlShape } from 'react-intl';
import { get } from 'lodash';
import WalletSendForm from '../../components/wallet/WalletSendForm';
import type { InjectedProps } from '../../types/injectedPropsType';
import globalMessages from '../../i18n/global-messages';
import {
DECIMAL_PLACES_IN_ADA,
MAX_INTEGER_PLACES_IN_ADA,
} from '../../config/numbersConfig';
import { WalletSyncStateTags } from '../../domains/Wallet';
type Props = InjectedProps;
@inject('stores', 'actions')
@observer
export default class WalletSendPage extends Component<Props> {
static defaultProps = { actions: null, stores: null };
static contextTypes = {
intl: intlShape.isRequired,
};
render() {
const { intl } = this.context;
const { uiDialogs, wallets, transactions, app } = this.props.stores;
const { actions } = this.props;
const { isValidAddress } = wallets;
const { calculateTransactionFee, validateAmount } = transactions;
const activeWallet = wallets.active;
// Guard against potential null values
if (!activeWallet)
throw new Error('Active wallet required for WalletSendPage.');
const isRestoreActive =
get(activeWallet, 'syncState.tag') === WalletSyncStateTags.RESTORING;
return (
<WalletSendForm
currencyUnit={intl.formatMessage(globalMessages.unitAda)}
currencyMaxIntegerDigits={MAX_INTEGER_PLACES_IN_ADA}
currencyMaxFractionalDigits={DECIMAL_PLACES_IN_ADA}
validateAmount={validateAmount}
calculateTransactionFee={(address: string, amount: number) =>
calculateTransactionFee({
walletId: activeWallet.id,
address,
amount,
})
}
addressValidator={isValidAddress}
isDialogOpen={uiDialogs.isOpen}
openDialogAction={actions.dialogs.open.trigger}
isRestoreActive={isRestoreActive}
onExternalLinkClick={app.openExternalLink}
/>
);
}
}
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', sha256='7f5dad4e8670568a71f79d2bcebb08d95b875506d3d5faefafe1a8b3afa14f18')
depends_on('netcdf-c')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
env.pop('F90', None)
env.pop('F90FLAGS', None)
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
|
'''
The disparate session (Session) is for making requests to multiple locations.
'''
from abc import ABCMeta, abstractmethod
from copy import copy
from functools import partialmethod
from urllib.parse import urlparse, urlunparse
from h11 import RemoteProtocolError
from multio import asynclib
from .cookie_utils import CookieTracker
from .errors import RequestTimeout, BadHttpResponse
from .req_structs import SocketQ
from .request_object import Request
from .utils import get_netloc_port
__all__ = ['Session']
class BaseSession(metaclass=ABCMeta):
'''
The base class for asks' sessions.
Contains methods for creating sockets, figuring out which type of
socket to create, and all of the HTTP methods ('GET', 'POST', etc.)
'''
def __init__(self, headers=None):
'''
Args:
headers (dict): Headers to be applied to all requests.
headers set by http method call will take precedence and
overwrite headers set by the headers arg.
'''
if headers is not None:
self.headers = headers
else:
self.headers = {}
self.encoding = None
self.source_address = None
self._cookie_tracker_obj = None
@property
@abstractmethod
def sema(self):
"""
A semaphore-like context manager.
"""
...
async def _open_connection_http(self, location):
'''
Creates a normal async socket, returns it.
Args:
location (tuple(str, int)): A tuple of net location (eg
'127.0.0.1' or 'example.org') and port (eg 80 or 25000).
'''
sock = await asynclib.open_connection(location[0],
location[1],
ssl=False,
source_addr=self.source_address)
sock._active = True
return sock
async def _open_connection_https(self, location):
'''
Creates an async SSL socket, returns it.
Args:
location (tuple(str, int)): A tuple of net location (eg
'127.0.0.1' or 'example.org') and port (eg 80 or 25000).
'''
sock = await asynclib.open_connection(location[0],
location[1],
ssl=True,
server_hostname=location[0],
source_addr=self.source_address)
sock._active = True
return sock
async def _connect(self, host_loc):
'''
Simple enough stuff to figure out where we should connect, and creates
the appropriate connection.
'''
scheme, netloc, path, parameters, query, fragment = urlparse(
host_loc)
if parameters or query or fragment:
raise ValueError('Supplied info beyond scheme, netloc.' +
' Host should be top level only: ', path)
netloc, port = get_netloc_port(scheme, netloc)
if scheme == 'http':
return await self._open_connection_http(
(netloc, int(port))), port
else:
return await self._open_connection_https(
(netloc, int(port))), port
async def request(self, method, url=None, *, path='', retries=1, **kwargs):
'''
This is the template for all of the `http method` methods for
the Session.
Args:
method (str): A http method, such as 'GET' or 'POST'.
url (str): The url the request should be made to.
path (str): An optional kw-arg for use in Session method calls,
for specifying a particular path. Usually to be used in
conjunction with the base_location/endpoint paradigm.
kwargs: Any number of the following:
data (dict or str): Info to be processed as a
body-bound query.
params (dict or str): Info to be processed as a
url-bound query.
headers (dict): User HTTP headers to be used in the
request.
encoding (str): The str representation of the codec to
process the request under.
json (dict): A dict to be formatted as json and sent in
the request body.
files (dict): A dict of `filename:filepath`s to be sent
as multipart.
cookies (dict): A dict of `name:value` cookies to be
passed in request.
callback (func): A callback function to be called on
each bytechunk of of the response body.
timeout (int or float): A numeric representation of the
longest time to wait on a complete response once a
request has been sent.
max_redirects (int): The maximum number of redirects
allowed.
persist_cookies (True or None): Passing True
instantiates a CookieTracker object to manage the
return of cookies to the server under the relevant
domains.
auth (child of AuthBase): An object for handling auth
construction.
When you call something like Session.get() or asks.post(), you're
really calling a partial method that has the 'method' argument
pre-completed.
'''
async with self._sema:
timeout = kwargs.get('timeout', None)
req_headers = kwargs.pop('headers', None)
if url is None:
url = self._make_url() + path
retry = False
try:
sock = await self._grab_connection(url)
port = sock.port
if self.headers is not None:
headers = copy(self.headers)
if req_headers is not None:
headers.update(req_headers)
req_headers = headers
req_obj = Request(self,
method,
url,
port,
headers=req_headers,
encoding=self.encoding,
sock=sock,
persist_cookies=self._cookie_tracker_obj,
**kwargs)
if timeout is None:
sock, r = await req_obj.make_request()
else:
sock, r = await self.timeout_manager(timeout, req_obj)
if sock is not None:
try:
if r.headers['connection'].lower() == 'close':
await sock.close()
sock._active = False
except KeyError:
pass
await self._replace_connection(sock)
except RemoteProtocolError as e:
await sock.close()
sock._active = False
await self._replace_connection(sock)
raise BadHttpResponse('Invalid HTTP response from server.') from e
except ConnectionError as e:
if retries > 0:
retry = True
retries -= 1
else:
raise e
if retry:
return (await self.request(method,
url,
path=path,
retries=retries,
headers=headers,
**kwargs))
return r
# These be the actual http methods!
# They are partial methods of `request`. See the `request` docstring
# above for information.
get = partialmethod(request, 'GET')
head = partialmethod(request, 'HEAD')
post = partialmethod(request, 'POST')
put = partialmethod(request, 'PUT')
delete = partialmethod(request, 'DELETE')
options = partialmethod(request, 'OPTIONS')
async def timeout_manager(self, timeout, req_obj):
try:
async with asynclib.timeout_after(timeout):
sock, r = await req_obj.make_request()
except asynclib.TaskTimeout as e:
raise RequestTimeout from e
return sock, r
@abstractmethod
def _make_url(self):
"""
A method who's result is concated with a uri path.
"""
...
@abstractmethod
async def _grab_connection(self, url):
"""
A method that will return a socket-like object.
"""
...
@abstractmethod
async def _replace_connection(self, sock):
"""
A method that will accept a socket-like object.
"""
...
class Session(BaseSession):
'''
The Session class, for handling piles of requests.
This class inherits from BaseSession, where all of the 'http method'
methods are defined.
'''
def __init__(self,
base_location=None,
endpoint=None,
headers=None,
encoding='utf-8',
persist_cookies=None,
connections=1):
'''
Args:
encoding (str): The encoding asks'll try to use on response bodies.
persist_cookies (bool): Passing True turns on browserishlike
stateful cookie behaviour, returning cookies to the host when
appropriate.
connections (int): The max number of concurrent connections to the
host asks will allow its self to have. The default number of
connections is 1. You may increase this value as you see fit.
'''
super().__init__(headers)
self.encoding = encoding
self.base_location = base_location
self.endpoint = endpoint
if persist_cookies is True:
self._cookie_tracker_obj = CookieTracker()
else:
self._cookie_tracker_obj = persist_cookies
self._conn_pool = SocketQ()
self._checked_out_sockets = SocketQ()
self._sema = asynclib.Semaphore(connections)
@property
def sema(self):
return self._sema
def _checkout_connection(self, host_loc):
try:
index = self._conn_pool.index(host_loc)
except ValueError:
return None
sock = self._conn_pool.pull(index)
self._checked_out_sockets.append(sock)
return sock
async def _replace_connection(self, sock):
if sock._active:
self._checked_out_sockets.remove(sock)
self._conn_pool.appendleft(sock)
else:
self._checked_out_sockets.remove(sock)
async def _make_connection(self, host_loc):
sock, port = await self._connect(host_loc)
sock.host, sock.port = host_loc, port
return sock
async def _grab_connection(self, url):
'''
The connection pool handler. Returns a connection
to the caller. If there are no connections ready, and
as many connections checked out as there are available total,
we yield control to the event loop.
If there is a connection ready or space to create a new one, we
pop/create it, register it as checked out, and return it.
Args:
url (str): breaks the url down and uses the top level location
info to see if we have any connections to the location already
lying around.
'''
scheme, netloc, _, _, _, _ = urlparse(url)
host_loc = urlunparse((scheme, netloc, '', '', '', ''))
sock = self._checkout_connection(host_loc)
if sock is not None:
return sock
else:
sock = await self._make_connection(host_loc)
self._checked_out_sockets.append(sock)
return sock
def _make_url(self):
'''
Puts together the hostloc and current endpoint for use in request uri.
'''
return (self.base_location or '') + (self.endpoint or '')
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_value, traceback):
await self._conn_pool.free_pool()
|
import { configureStore } from '@reduxjs/toolkit'
import counterReducer from './counterSlice';
export default configureStore({
reducer: {
counter: counterReducer
},
})
|
'use strict';
/*
* @Author: yurui
* @Date: 2021-06-01
*/
module.exports = {
// 在执行数据库升级时调用的函数,创建 users 表
up: async (queryInterface, Sequelize) => {
const { INTEGER, DATE, STRING } = Sequelize;
await queryInterface.createTable('users', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: INTEGER
},
deptId: {
allowNull: false,
type: INTEGER,
comment: '部门deptId'
},
userName: {
allowNull: false,
unique: true,
type: STRING,
comment: '用户名'
},
nickName: {
type: STRING,
defaultValue: null,
comment: '昵称'
},
sex: {
type: STRING,
defaultValue: '1',
comment: '性别(0代表女 1代表男)'
},
password: {
allowNull: false,
type: STRING,
comment: '密码'
},
avatar: {
allowNull: true,
type: STRING,
defaultValue: null,
comment: '头像'
},
email: {
allowNull: true,
type: STRING,
comment: '邮箱'
},
mobile: {
allowNull: true,
type: STRING,
comment: '手机号'
},
isDelete: {
type: STRING,
defaultValue: '0',
comment: '删除标志(0代表存在 1代表删除)'
},
status: {
type: STRING,
defaultValue: '1',
comment: '帐号状态(1正常 0停用)'
},
remark: {
type: STRING,
comment: '备注'
},
createdAt: {
allowNull: true,
type: DATE,
comment: '创建时间'
},
createdBy: {
allowNull: true,
type: STRING,
comment: '创建者'
},
updatedAt: {
allowNull: true,
type: DATE,
comment: '更新时间'
},
updatedBy: {
allowNull: true,
type: STRING,
comment: '更新者'
}
});
},
// 在执行数据库降级时调用的函数,删除 users 表
down: async queryInterface => {
await queryInterface.dropTable('users');
},
}; |
var Buffer = require('buffer').Buffer
var test = require('tape')
var http = require('../..')
test('authentication', function (t) {
http.get({
path: '/auth',
auth: 'TestUser:trustno1'
}, function (res) {
var buffers = []
res.on('end', function () {
t.ok(new Buffer('You\'re in!').equals(Buffer.concat(buffers)), 'authentication succeeded')
t.end()
})
res.on('data', function (data) {
buffers.push(data)
})
})
}) |
// Data back Explorer
// import Freezeframe from 'freezeframe';
// prepare docs
var DA_doc = "./assets/dist/js/DA_collection.json";
var card_doc = "./assets/dist/js/AIE_card_collection.json";
var DP_doc = "./assets/dist/js/DP_collection.json";
$(document).ready(function() {
loadData(DA_doc, DP_doc, card_doc);
setupInteraction();
});
// load two json documents and update the panel
function loadData(DA_doc, DP_doc, card_doc) {
var classStr = panelLayout();
// create NS navigator
createDA(DA_doc, classStr);
// create EL filter button group
createDP(DP_doc, classStr);
// load card data
createDisplay(card_doc);
}
// activate all the interactive components
function setupInteraction() {
console.log("Now binding interactions");
// activate responsive responsive header + filter panel layout
$(window).resize(function() {
var classStr = panelLayout();
if(classStr.length < 5){
$("div.btn-primary-group").removeClass("btn-primary-group list-group").addClass("btn-primary-group-sm");
$("div.btn-secondary-group").removeClass("btn-secondary-group").addClass("btn-secondary-group-sm");
$("div.btn-secondary-group-sm > .btn").removeClass("btn-block text-left").addClass("text-center");
} else {
$("div.btn-primary-group-sm").removeClass("btn-primary-group-sm").addClass("btn-primary-group" + classStr);
$("div.btn-secondary-group-sm").removeClass("btn-secondary-group-sm").addClass("btn-secondary-group" + classStr.replace(" list-group", ""));
$("div.btn-secondary-group > .btn").removeClass("text-center").addClass("btn-block text-left");
}
});
$("header .title-bold").click(function () {
if($(window).outerWidth() < 768) {
$("#filter-panel").slideToggle(180);
}
if($(window).outerWidth() < 576) {
$(".img-overlay").off("hover", "**" );
$(".img-overlay").tooltip("disable");
}
});
// activate search box
$("input.form-control").focus( function() {
$(".search-result").text("");
if($(this).val().trim() == "Search")
$(this).val("");
});
$("input.form-control").blur( function() {
if($(this).val().trim() == "")
$(this).val("Search");
});
$(".nav-button").click(searchFunc);
$(".form-control").bind('keydown', function(eve){
var keyCode = eve.which || arguments.callee.caller.arguments[0];
if (keyCode == 13) { searchFunc(); $(".form-control").blur();} //ignore space button
});
// activate NS navigator
$(".btn-primary-group > .btn").click(DA_scroller);
$(".btn-primary-group-sm > .btn").click(DA_scroller);
// activate top info reminders
// $(window).scroll(reminderSpy);
// activate scroll spy
$(window).scroll(displaySpy);
//activate the first part
$(".btn-primary-group > .btn").first().addClass("active");
$(".btn-primary-group-sm > .btn").first().addClass("active");
// activate EL filter button group
// $(".btn-secondary-group > .btn").click(DP_filter);
// $(".btn-secondary-group-sm > .btn").click(DP_filter);
// $(".btn-sub-list > li").click(DP_sub_filter);
// image lazy loading
// $(".modal-body").ImgLoading({timeout: 1000});
// activate front-back transition buttons within cards
// $(".card-bottom > button").click(cardOver);
// $(".card-footer > button").click(cardTrans);
// open new example tag
// $(".card-footer a").click(function(){
// window.open($(this).attr("href"));
// });
// card footer url
// $(".card-footer a").tooltip({title: "Click to watch full video in a new window", placement: "top"});
// hover front gif
// image lazy loading
// $(".card-img").load($(this).attr("data-target"), function() {
// $(this).attr("src", $(this).attr("data-target"));
// if($($(this).parent()[0]).attr("class") == "card-img-box") {
// $($(this).next()[0]).tooltip({title: "Click to zoom in", placement: "top"});
// } else {
// const logo = new Freezeframe($(this));
// }
// });
// function ImgLoading(status) {
// status = status || "front";
// var parent = $(this).parent()[0];
// var cls = $(this).attr("class");
// var img = new Image();
// img.src = $(this).attr("data-echo");
// $(img).attr("onerror", $(this).attr("onerror"));
// loading
// if(img.complete) {
// callback.call(img);
// return;
// }
// loaded
// if(img.onload) {
// $(img).addClass(cls);
// $(this).replaceWith(img);
// if(status == "front") {
// const logo = new Freezeframe($(this));
// return;
// }
// if(status == "back") {
// $($(this).next()[0]).tooltip({title: "Click to zoom in", placement: "top"});
// $(parent).hover(fullScreenOver, fullScreenOut);
// return;
// }
// };
// }
// $(window).on("load", function(){
// $(".front > img").each(function(){ImgLoading("front");});
// $(".back > img").each(function(){ImgLoading("back");});
// });
// echo.init({
// offset: 0,
// throttle: 250,
// unload: false,
// callback: function(element, op) {
// var status = ($($(element).parent()[0]).attr("class") == "card-img-box" ? "back" : "front");
// if(op === 'load' && status === "front"){
// $(element).prev(".card-frontPrev")[0].src = $($(element).prev(".card-frontPrev")[0]).attr("data-target");
// $($(element).parent()[0]).hover(function(){
// $($(this).children(".card-frontPrev")[0]).fadeOut(10);
// }, function() {
// $($(this).children(".card-frontPrev")[0]).fadeIn(160);
// });
// }
// if(op === 'load' && status === "back") {
// $($(element).next()[0]).tooltip({title: "Click to zoom in", placement: "top"});
// $($(element).parents(".card-img-box")[0]).hover(fullScreenOver, fullScreenOut);
// return;
// }
// if(op === 'load' && $($(element).parent()[0]).attr("class") !== "modal-body") {
// const logo = new Freezeframe($(element));
// return;
// }
// if(op === 'unload') {
// element.src = "assets/media/fail_loading_light.svg";
// }
// }
// });
// const frontImg = new Freezeframe('.card-deck .card-img');
// $(".card-deck .card-img").onload(function(){
// var parentsSet = $(this).parentsUntil(".card-deck");
// var name = $(parentsSet[parentsSet.length - 1]).attr("name");
// const logo = new Freezeframe("[name=\'" + name + "\'] .front > .card-img");
// const logo = new Freezeframe(this);
// });
// hover full-screen button on card image
$(".card-img-box").hover(fullScreenOver, fullScreenOut);
// $(".img-overlay").tooltip({title: "Click to zoom in", placement: "top"});
// toggle modal
$(".img-overlay")
.click(modalInfo)
.click(function () {
$("#zooming-modal").modal({
backdrop: false,
keyboard: false,
focus: true,
show: true
});
});
$("a.modal-title").tooltip({title: "Click to watch full video in a new window", placement: "top"});
$("a.modal-title").click(function(){
window.open($(this).attr("href"));
$("a.modal-title").tooltip("hide");
});
let data_provider = "";
$('#zooming-modal').on('shown.bs.modal', function() {
let modalWindowCarousel = $("#carouselModal").get(0);
data_provider = $(modalWindowCarousel).attr("data-provider");
});
$(".modal .carousel").on("slide.bs.carousel", function(event) {
let aimCard = $(`[name="${data_provider}"]`).get(0);
// console.log(data_provider);
let aimCarousel = $(aimCard).find(".carousel").get(0);
if(event.direction === "right") {
$(aimCarousel).find("a.carousel-control-prev").click();
} else if(event.direction === "left") {
$(aimCarousel).find("a.carousel-control-next").click();
}
})
// abandon right mouse click.
// ** From here
// if (window.Event) {document.captureEvents(Event.MOUSEUP); }
// function nocontextmenu() {
// event.cancelBubble = true
// event.returnValue = false;
// return false;
// }
// function norightclick(e) {
// if (window.Event) {
// if (e.which == 2 || e.which == 3)
// return false;
// } else if (event.button == 2 || event.button == 3) {
// event.cancelBubble = true
// event.returnValue = false;
// return false;
// }
// }
// for IE5+
// document.oncontextmenu = nocontextmenu;
// for all others
// document.onmousedown = norightclick;
// End **
}
// create NS components & display NS frame
function createDA(DA_doc, classStr) {
// calc panel's position $ screen width measuring
classStr = "btn-primary-group" + classStr;
// create NS part
var DA_Group = $("<div></div>").addClass(classStr)
.attr("id", "display-scroll");
$.ajaxSettings.async = false;
$.getJSON(DA_doc, function (json) {
$.each(json, function (i, item){
let DA_single = new DA_Nav(item);
let DA_nav_btn = DA_single.drawDANav(); // create spy btn
let DA_top = DA_single.drawDATop(); // create display part
let DA_joint_tag = DA_single.getJointTag();
let currentDisplayPart = $("<div></div>").attr("id", DA_joint_tag); // create spy panel
DA_Group.append(DA_nav_btn);
currentDisplayPart
.append(DA_top)
.append($("<div></div>").addClass("row row-cols-1 row-cols-sm-2 row-cols-lg-3").addClass("card-deck")); // create card deck
currentDisplayPart.appendTo("#card-display");
DA_single.DACreatingComplete();
});
});
$("#filter-panel > .btn-panel").last().append(DA_Group);
}
// construct DA_Nav class
// "DA_id": 100,
// "DA_num": 10,
// "DA_nav_tag": "biology",
// "DA_nav_color": "#8180DF",
// "DA_desc": "",
// "DA_class_object": [
// {
// "DA_class_id": "101",
// "DA_class_tag": "whole body movement",
// "DA_class_color": "#EB63BD"
// }, ......
function DA_Nav(DA_object) {
// color method
this._color_hash = DA_Nav.ColorHash.init();
this._created = 0; // nothing created: 0; both created: 1
this._DA_id = DA_object["DA_id"] || 500;
this._DA_num = DA_object["DA_num"] || 0;
this._DA_nav_tag = DA_object["DA_nav_tag"] || "navigator";
this._DA_desc = DA_object["DA_desc"] || "Interpretation for Dynamic Approaches. Interpretation for Dynamic Approaches. Interpretation for Dynamic Approaches. Interpretation for Dynamic Approaches. Interpretation for Dynamic Approaches.";
this._DA_sub_arr = DA_object["DA_class_object"] || [{"DA_class_id": "501", "DA_class_tag": "navigator sub class example", "DA_class_color": "#EBEDF6"}];
this._DA_joint_tag = this._DA_nav_tag.split(" ").join("_");
this._DA_nav_color = DA_object["DA_nav_color"] || "#EBEDF6";
this._color_hash.set_color(this._DA_id, this._DA_nav_color);
}
// public color hash data sharing
DA_Nav.ColorHash = {
_data: { 500: "#EBEDF6" },
init: function(){
let color = {};
color.set_color = function(key_id, color_str) {
key_id = key_id || 500;
color_str = color_str || "";
if(color_str) {
if(color_str.indexOf("#") < 0)
color_str = "#" + color_str;
if([3,4,7].indexOf(color_str.length) < 0)
return false;
DA_Nav.ColorHash._data[key_id] = color_str;
return true;
}
return false;
};
color.get_color = function(key_id) {
key_id = key_id || 500;
if(DA_Nav.ColorHash._data.hasOwnProperty(key_id))
return DA_Nav.ColorHash._data[key_id];
else
return undefined;
};
return color;
}
}
DA_Nav.prototype.getJointTag = function() {
return this._DA_joint_tag;
}
DA_Nav.prototype.drawDANav= function() {
let classString = "btn btn-block text-left";
let DA_nav_btn = $("<a></a>").addClass([classString, this._DA_joint_tag].join(" "))
.text(this._DA_nav_tag.replace(this._DA_nav_tag[0], this._DA_nav_tag[0].toUpperCase()) + ` (${this._DA_num})`)
.attr({type: "button", href: "#" + this._DA_joint_tag})
.prepend($("<span></span>").addClass("btn-id").css("background-color", this._DA_nav_color))
.append($("<span></span>").addClass("btn-sign").css("background", this._DA_nav_color));
return DA_nav_btn;
}
DA_Nav.prototype.drawDATop = function() {
let thisDA_Nav = this;
// display color reminder
let sub_label = $("<ul></ul>").addClass("display-sub-label");
thisDA_Nav._DA_sub_arr.forEach(eg_object => {
// add to color hash list
let DA_class_id = eg_object["DA_class_id"] || (500 + index + 1);
let DA_class_color = eg_object["DA_class_color"] || "#EBEDF6";
thisDA_Nav._color_hash.set_color(DA_class_id, DA_class_color);
// add sub label to display
let DA_class_tag = eg_object.DA_class_tag;
let DA_class_label = $("<li></li>")
.text(DA_class_tag)
.prepend($("<span></span>").css("background-color", DA_class_color));
DA_class_label.appendTo(sub_label);
});
// display title
let DA_display_tag = thisDA_Nav._DA_nav_tag ? "approaches: " + thisDA_Nav._DA_nav_tag.toLowerCase() : "approaches: dynamic approaches";
let display_title = $("<h2></h2>").addClass("display-title")
.text(DA_display_tag + " (" + thisDA_Nav._DA_num + ")")
.prepend($("<span></span>").css("background-color", thisDA_Nav._DA_nav_color));
// integrated display top
let display_top = $("<div></div>").addClass("deck-reminder")
.css({
"top": document.querySelector("#card-display").getBoundingClientRect().top,
// "background-color": "white",
// "z-index": 500
})
.append(display_title)
.append($("<p></p>").addClass("display-desc").text(thisDA_Nav._DA_desc))
.append(sub_label);
return display_top;
}
DA_Nav.prototype.DACreatingComplete = function() {
// ......
if(this._created <= 0) {
this._created = new Date().getTime();
this._interactionInit();
return true;
} else {
console.warn(`DA tab & sticky top for "${this._DA_nav_tag}" have already been created before.`);
return false;
}
}
DA_Nav.prototype._interactionInit = function () {
if(this._created) {
this._DA_btn = document.querySelector(`.btn-primary-group > .${this.getJointTag()}`);
this._sticky_top = document.querySelector(`#${this._DA_joint_tag} > .deck-reminder`);
this._display_desc = this._sticky_top.querySelector(".display-desc");
// record in/out of scroll Y position
this._in_sticky = false;
// bind event listeners
this._topEventBinding();
this._scrollEventBinding();
}
}
DA_Nav.prototype._stickyToggle = function(option, callback) {
option = option || undefined;
callback = callback || undefined;
if(!this.hasOwnProperty("_in_sticky")) {
console.log("Either sticky top or DA button has been deployed yet.");
return false;
} else if(option === undefined)
this._in_sticky = !this._in_sticky;
else if(option === true)
this._in_sticky = true;
else
this._in_sticky = false;
if(callback !== undefined)
callback();
return true;
}
DA_Nav.prototype._isSticky = function() {
return this._in_sticky;
}
// listen to sticky top
DA_Nav.prototype._topEventBinding = function () {
let thisDA_Nav = this;
let stickyOnAnimation = function() {
$(thisDA_Nav._sticky_top).fadeTo("normal", 0, () => $(thisDA_Nav._display_desc).css("display", "none")).fadeTo("normal", 1);
};
let stickyOffAnimation = function() {
$(thisDA_Nav._sticky_top).fadeTo("normal", 0, () => $(thisDA_Nav._display_desc).css("display", "block")).fadeTo("normal", 1);
}
$(window).scroll(function(){
// mark DA state
if(parseInt(Math.round(window.scrollY)) === thisDA_Nav._sticky_top.offsetTop && !thisDA_Nav._isSticky()) {
thisDA_Nav._stickyToggle(true);
} else if(parseInt(Math.round(window.scrollY)) !== thisDA_Nav._sticky_top.offsetTop && thisDA_Nav._isSticky()) {
thisDA_Nav._stickyToggle(false);
}
});
}
// listen to scroll action
DA_Nav.prototype._scrollEventBinding = function () {
return;
}
// construct a sticky top class
function StickyTop(DA_sticky_top) {
let createStickyTop = function(DA_sticky_top) {
let sticky_top = document.createElement("div");
let display_title=DA_sticky_top.querySelector(".display-title").cloneNode(true);
let sub_label = DA_sticky_top.querySelector(".display-sub-label").cloneNode(true);
sticky_top.appendChild(display_title);
sticky_top.appendChild(sub_label);
sticky_top.classList.add("deck-reminder");
return sticky_top;
};
this._sticky_top = createStickyTop(DA_sticky_top);
}
StickyTop.prototype.appendToDisplay = function(container, top) {
container = container || document.getElementById("card-display");
top = top || document.querySelector("header").getBoundingClientRect()["height"];
// append to container
container.appendChild(this._sticky_top);
}
// create EL components
// x > 0 .active
// x < 0 :not(.active)
// x == 0 .disabled
function createDP(DP_doc, classStr) {
classStr = "btn-secondary-group" + classStr.replace(" list-group", "");
let btnClassStr = "text-left btn-block";
if(classStr.indexOf("sm") > 0) {
btnClassStr = "text-center";
}
let DP_Group = $("<div></div>").addClass(classStr);
$("#filter-panel > .btn-panel").first().append(DP_Group);
$.getJSON(DP_doc, function(json) {
// create EL components
$.each(json, function(i, item) {
let DP_single = new DP_Tab(item);
let { DP_primary_btn, DP_sub_ul } = DP_single.drawDP(btnClassStr);
DP_Group.append(DP_primary_btn).append(DP_sub_ul);
DP_single.DPCreatingComplete();
});
});
}
// construct DP_filter class
// "DP_id": 1,
// "DP_tag": "illustrate characteristic",
// "DA_sub_tag": "Depict Reality, Exaggerate Reality"
function DP_Tab(DP_object) {
this._created = 0; // if displayed on screen: > 0, if not: 0
this._DP_id = DP_object["DP_id"];
this._DP_tag = DP_object["DP_tag"];
let DP_sub_tag = DP_object["DP_sub_tag"].split(",");
this._DP_sub_tags = DP_sub_tag.map(tag => tag = tag.trim());; // array
}
// Public method
DP_Tab.prototype.drawDP = function(btnClassStr) {
btnClassStr = btnClassStr || "text-left btn-block";
let DP_sub_ul = $("<ul></ul>").addClass("btn-sub-list");
let DP_sub_tags = this._DP_sub_tags;
let DP_primary_btn = $("<button></button>")
.addClass("btn " + btnClassStr)
.addClass("active")
.text(this._DP_tag)
.prepend($("<span></span>"));
DP_sub_tags.forEach(tag => {
let DP_sub_li = $("<li></li>")
.addClass("active")
.attr("id", DP_Tab.DP_abr(tag))
.text(tag);
DP_sub_li.appendTo(DP_sub_ul);
});
return { DP_primary_btn, DP_sub_ul };
}
// Public method
DP_Tab.prototype.DPCreatingComplete = function() {
if(this._created <= 0) {
this._created = new Date().getTime();
this._interactionInit();
return true;
} else {
console.warn(`DP tab ${this._DP_id} has already been created before.`);
return false;
}
}
// Private method
DP_Tab.prototype._DP_abr_list = function() {
let DP_sub_tags = this._DP_sub_tags || [];
let DP_abr_list = [];
DP_sub_tags.forEach(tag => {
tag = tag.toLowerCase() || "dynamic purpose";
let tag_abr = tag.substr(0, 2) + (tag.split(/-|\s/)).length + tag.substr(tag.length-2);
DP_abr_list.push(tag_abr);
});
return DP_abr_list;
}
// Private method
DP_Tab.prototype._interactionInit = function() {
if(this._created) {
this._DP_primary_btn = document.querySelectorAll(".btn-secondary-group > .btn")[this._DP_id-1];
this._DP_sub_ul = document.querySelectorAll(".btn-secondary-group > .btn-sub-list")[this._DP_id-1];
this._DP_sub_li = this._DP_sub_ul.querySelectorAll("li"); // NodeList object
// bind event listener
this._eventBinding();
}
}
// Private method
DP_Tab.prototype._eventBinding = function() {
let thisDPTag = this;
let DP_abr_list = thisDPTag._DP_abr_list();
let DP_sub_ul = thisDPTag._DP_sub_ul;
let DP_sub_li = thisDPTag._DP_sub_li;
// bind hide/visible event to sub buttons
thisDPTag._DP_sub_li.forEach(li => {
let this_DP_abr = li.getAttribute("id");
li.addEventListener("click", function() {
let targetCards;
if(this.classList.contains("active")) {
this.classList.toggle("active", false);
targetCards = document.querySelectorAll(`.${this_DP_abr}:not(.screened-out)`);
targetCards.forEach(node => {
node.classList.add("screened-out");
$(targetCards).fadeTo(400, 0).hide(1, () => {
if(node.querySelector(".card-inner").classList.contains("trans-3d"))
node.querySelector(".card-inner").classList.remove("trans-3d");
});
});
} else {
this.classList.toggle("active", true);
targetCards = document.querySelectorAll(`.${this_DP_abr}.screened-out`);
targetCards.forEach(node => node.classList.remove("screened-out"));
$(targetCards).show(1).fadeTo(600, 1);
}
});
});
// bind hide/visible event to primary buttons
thisDPTag._DP_primary_btn.addEventListener("click", function () {
let targetCards, this_joint_DP_abr;
if(this.classList.contains("active")){
this.classList.toggle("active", false);
DP_sub_li.forEach(li => li.classList.toggle("active", false));
this_joint_DP_abr = DP_abr_list.map(DP_abr => "." + DP_abr + ":not(.screened-out)").join(",");
targetCards = document.querySelectorAll(this_joint_DP_abr);
$(DP_sub_li).slideToggle(160 + 120 * (DP_sub_li.length/1.75), "easeInOutSine");
targetCards.forEach(node => {
node.classList.add("screened-out");
$(node).fadeTo(400, 0).hide(1, () => {
if(node.querySelector(".card-inner").classList.contains("trans-3d"))
node.querySelector(".card-inner").classList.remove("trans-3d");
});
});
} else {
this.classList.toggle("active", true);
DP_sub_li.forEach(li => li.classList.toggle("active", true));
this_joint_DP_abr = DP_abr_list.map(DP_abr => "." + DP_abr + ".screened-out").join(",");
targetCards = document.querySelectorAll(this_joint_DP_abr);
$(DP_sub_li).slideToggle(160 + 160 * (DP_sub_li.length/1.75), "easeInOutSine");
targetCards.forEach(node => node.classList.remove("screened-out"));
$(targetCards).show(1).fadeTo(600, 1);
}
})
// $(DP_sub_ul).slideToggle(140 + 120 * (DP_sub_btn.length/1.75), function() {
// $(DP_btn).toggleClass("active");
// });
}
// Static method
DP_Tab.DP_abr = function(str) {
str = str.toLowerCase() || "dynamic purpose";
return str.substr(0, 2) + (str.split(/-|\s/)).length + str.substr(str.length-2);
}
//create card display
// void return
function createDisplay(cards_doc) {
console.log('start loading cards');
$.getJSON(cards_doc, function(json) {
let doc_length = cards_doc.length;
$.each(json, function(id, card_doc) {
let card_DA = card_doc.DA_nav_tag.toLowerCase();
let card_DA_joint = $.trim(card_DA).split(" ").join("_");
let card = new AIE_Card(card_doc);
$(`#${card_DA_joint} > .card-deck`).append(card.drawCard());
card.cardCreatingComplete();
if(id == doc_length)
console.log("All cards are loaded.");
});
});
// deckDisplay();
scrollToTop();
}
// construct card class
// input card_object:Object()
// card_id card_title DA_nav_tag DA_class_id DA_class_tag DA_desc DP_tag DP_sub_tag DP_desc eg_arr
function AIE_Card(card_object) {
this._created = 0; // if displayed on screen: > 0, if not: 0
this._current_eg_id = 0; // value range: 0, 1, 2
this._card_id = card_object.card_id || 0;
this._card_title = card_object.card_title || "Design Space";
this._DA_nav_id = card_object.card_id || 0;
this._DA_nav_tag = card_object.DA_nav_tag || "dynamic approaches tag";
this._DA_class_id = card_object.DA_class_id || 500;
this._DA_class_tag = card_object.DA_class_tag || "dynamic approaches sub tag";
this._DA_desc = card_object.DA_desc || "Approach Interpretation";
// this.DA_nav_color = this._colorSync(this.DA_class_id, DA_color_hash);
this._DP_tag = card_object.DP_tag || "dynamic purposes";
this._DP_sub_id = card_object.DP_sub_tag.substr(0, 2).trim() || "00";
this._DP_sub_tag = card_object.DP_sub_tag.substr(3).trim() || "Dynamic Purposes Sub Tag";
this._DP_desc = card_object.DP_desc || "Purpose Interpretation";
// this.DP_code = this._DP_abr(DP_sub_tag);
this._eg_arr = card_object.eg_arr || [{"eg_id":"1000", "eg_source":"Video.com", "eg_year":"2020", "eg_designer":"Mr. Designer", "eg_url":"https://www.dribbble.com"},{"eg_id":"1001", "eg_source":"Video.com", "eg_year":"2020", "eg_designer":"Miss Designer", "eg_url":"https://www.dribbble.com"},{"eg_id":"1002", "eg_source":"Video.com", "eg_year":"2020", "eg_designer":"Ms. Designer", "eg_url":"https://www.dribbble.com"}];
this._color_hash = DA_Nav.ColorHash.init();
this._card_color = this._color_hash.get_color(this._DA_class_id);
}
// Private method
// calc card header bg-color
// AIE_Card.prototype._colorSync = function() {
// let DA_class_id = this._DA_class_id || 500;
// let get_color = DA_Nav._color_hash.get_color;
// let card_color = get_color(DA_class_id)
// console.log(card_color);
// return card_color || "#999999";
// }
// AIE_Card.prototype._colorSync = function(hash_list) {
// let DA_class_id = this._DA_class_id || 500;
// hash_list = hash_list || { 500 : "#999999" };
// return hash_list[DA_class_id] || "#999999";
// }
// Private method
AIE_Card.prototype._DP_abr = function() {
let str = this._DP_sub_tag.toLowerCase() || "dynamic purpose";
return str.substr(0, 2) + (str.split(/-|\s/)).length + str.substr(str.length-2);
}
// Private method
AIE_Card.prototype._getEgLength = function() {
return this._eg_arr.length || 0;
}
// Private method
// record a card as 'created' after being put on screen
AIE_Card.prototype.cardCreatingComplete = function() {
if(this._created <= 0) {
this._created = new Date().getTime();
this._interactionInit();
return true;
} else {
console.warn(`Card No.${this._card_id} has already been created before.`);
return false;
}
}
// Private method
// initiate interactive parts
AIE_Card.prototype._interactionInit = function() {
if(this._created) {
this._Card = $(`[name='card_${this._card_id}']`).get(0);
let CardFront = $(this._Card).find(".front").get(0);
let CardBack = $(this._Card).find(".back").get(0);
this._FrontGif = $(CardFront).find(".card-frontImg").get(0);
// this._FrontTurningBtn = $(CardFront).find(".card-footer > .btn").get(0);
this._BackCarousel = $(CardBack).find(".carousel").get(0);
this._BackCaption = $(CardBack).find(".caption").get(0);
// this._BackTurningBtn = $(CardBack).find(".card-footer > .btn").get(0);
this._CarouselControlPrev = $(this._BackCarousel).find(".carousel-control-prev").get(0);
this._CarouselControlNext = $(this._BackCarousel).find(".carousel-control-next").get(0);
this._CarouselFullScreen = $(CardBack).find(".img-overlay").get(0);
this._CardTurningBtns = $(this._Card).find(".card-footer > .btn");
// bind event listener
this._eventBinding();
}
}
AIE_Card.prototype._eventBinding = function() {
let thisCard = this; // data object
let Card = thisCard._Card; // DOM object
let CardInner = $(thisCard._Card).find(".card-inner").get(0);
let modalWindowCarousel = $("#carouselModal").get(0); // carousel in modal frame
let frontImg = $(thisCard._FrontGif).get(0);
// bind with footer buttons
$(thisCard._CardTurningBtns).click(function() {
$(CardInner).toggleClass("trans-3d");
});
// bind with carousel
$(thisCard._BackCarousel).on("slide.bs.carousel", function(event) {
// event.direction = "left" / "right"
let aim_eg_id = thisCard._carouselChangeId(event.direction);
let aim_eg_info = thisCard._eg_arr[aim_eg_id];
let aim_eg_designer = thisCard.__appendCaption("Designer", aim_eg_info["eg_designer"]);
let aim_eg_year = thisCard.__appendCaption("Year", aim_eg_info["eg_year"]);
let aim_eg_url = $("<a></a>").attr({"href": aim_eg_info["eg_url"], "target": "_blank"}).addClass("text-decoration-none").text("URL");
let caption = thisCard._BackCaption;
$(caption).fadeOut("fast", function() {
$(caption).empty();
$(caption)
.append(aim_eg_designer)
.append(aim_eg_year)
.append($("<div></div>").append(aim_eg_url));
$(caption).fadeIn("normal");
});
})
// bind with modal window
$(thisCard._CarouselFullScreen).on("click", function(event) {
$(modalWindowCarousel).attr("data-provider", $(Card).attr("name"));
let eg_info = thisCard._eg_arr;
let current_eg_id = thisCard._current_eg_id;
let carouselInner = $(modalWindowCarousel).find(".carousel-inner").get(0);
$(carouselInner).empty();
eg_info.forEach(function(eg, index, arr) {
let gif_ori_path = `./assets/back_gif_compressed/back_${eg["eg_id"]}.gif`;
let carouselImg = $("<img />")
.addClass("d-block")
.attr("src", gif_ori_path);
let carouselItem = $("<div></div>").addClass("carousel-item").append(carouselImg);
if(index === current_eg_id)
carouselItem.addClass("active");
carouselItem.appendTo(carouselInner);
});
});
// bind with gif hover listener
// const ffGif = new Freezeframe($(frontImg), {
// trigger: "hover",
// overlay: false,
// responsive: true,
// warnings: false
// });
// bind front img preview
$(frontImg).hover(
// hover in
function() {
$(frontImg).children(".front-prev").css("opacity", 0);
$(frontImg).removeClass("inactive");
},
// hover out
function() {
let gif_path = $($(frontImg).children(".front-gif").get(0)).attr("src");
$(frontImg).children(".front-prev").fadeTo("fast", 1, function() {
$(frontImg).addClass("inactive");
$($(frontImg).children(".front-gif").get(0)).attr( "src", gif_path );
});
}
);
}
// Public method
// get carousel gif doc name array
// AIE_Card.prototype.getEgGifArray = function() {
// let eg_gif_array = this._eg_arr.map(eg => {
// let eg_id = eg["eg_id"] || 0;
// return `back_${eg_id}.gif`;
// });
// return eg_gif_array;
// }
// Private method
// carousel backward/forward button response
// direction: right -> 1, left -> 0
AIE_Card.prototype._carouselChangeId = function(direction) {
direction = direction || 1;
// get current example ID
let current_eg_id = this._current_eg_id;
let eg_length = this._getEgLength();
let aim_eg_id = current_eg_id;
if(direction === "right")
// prev_eg_id
aim_eg_id = parseInt(((current_eg_id + eg_length - 1) % eg_length))
else if(direction === "left")
// next_eg_id
aim_eg_id = parseInt((current_eg_id + 1) % eg_length);
this._current_eg_id = aim_eg_id;
//return a 'Map type' example
return aim_eg_id;
// let aim_eg = this._eg_arr[aim_eg_id];
// change caption info
// ... ...
}
// *** CARD DRAWING PROCESS ***
// Public method
AIE_Card.prototype.drawCard = function() {
let DP_code = this._DP_abr();
let innerCard = $("<div></div>").addClass("card-inner")
.append(this._cardFront())
.append(this._cardBack());
// return a single card element
return $("<div></div>").addClass("col mb-5 position-relative")
.addClass(DP_code)
.attr("name", "card_" + this._card_id)
.append(innerCard);
}
// Private method
AIE_Card.prototype._cardFront = function() {
let front_elem = $("<div></div>").addClass("card shadow front");
let card_header = this.__cardHeader();
let front_gif = $("<img />").addClass("card-img front-gif")
.attr({
// src: "assets/media/loading_light.svg",
// "data-echo": "assets/front_gif/front_" + card_id + ".gif",
// "onerror": "assets/media/fail_loading_light.svg"
src: `./assets/front_gif_preview/front_${this._card_id}.gif`
});
let front_gif_prev = $("<img />").addClass("card-img front-prev")
.attr({
// src: "assets/media/loading_light.svg",
// "data-echo": "assets/front_gif/front_" + card_id + ".gif",
// "onerror": "assets/media/fail_loading_light.svg"
src: `./assets/front_prev/static_${this._card_id}.jpg`
});
// let prevImg = $("<img />").addClass("card-frontPrev")
// .attr({
// src: "assets/media/loading_light.svg",
// "data-target": "assets/front_gif_preview/front_" + card_id + ".png",
// "onerror": "assets/media/fail_loading_light.svg"
// });
let front_card_img = $("<div></div>")
.addClass("card-frontImg inactive")
.append(front_gif)
.append(front_gif_prev);
let front_card_body = this.__cardFrontBody();
let card_footer = this.__cardFooter(1);
// return card front part
// frontElem.append(card_header).append(prevImg).append(frontGif).append(card_body).append(card_footer);
return front_elem.append(card_header).append(front_card_img).append(front_card_body).append(card_footer);
}
// Private method
AIE_Card.prototype._cardBack = function() {
let back_elem = $("<div></div>").addClass("card shadow back");
let card_header = this.__cardHeader();
let back_gif_carousel = this.__cardBackCarousel(this._current_eg_id);
let back_card_body = this.__cardBackBody(this._current_eg_id);
let card_footer = this.__cardFooter(-1);
// return card back part
return back_elem
.append(card_header)
.append(back_gif_carousel)
.append(back_card_body)
.append(card_footer);
}
// Private method
AIE_Card.prototype.__cardHeader = function() {
let DP_sub_tag = this._DP_sub_tag;
let card_color = this._card_color;
let header_elem = $("<div></div>").addClass("card-header");
let head_title = $("<h4></h4>").text(this._card_title);
let head_p = $("<p></p>").text(DP_sub_tag);
// return card header
return header_elem
.css("background", card_color)
.append(head_title)
.append(head_p);
// .append($("<span></span>").css({
// background: "url(assets/media/in" + EL_abr(EL_tag) + ".svg) no-repeat",
// "background-size": "cover"
// }));
}
// Private method
// x: >0 -> front, <=0 -> back
AIE_Card.prototype.__cardFooter = function(x) {
x = x || 1;
let card_bottom = $("<div></div>").addClass("card-footer");
let card_footer_button = $("<button></button>").addClass("btn btn-sm rounded-pill");
if(x > 0 ) {
card_footer_button.text("Examples");
// let counter = $("<span></span>").addClass("card-num").text("NO. " + card_id);
card_bottom.append(card_footer_button);
// .append(counter);
} else {
card_footer_button.text("Back to Front");
// let superLink = $("<a></a>").attr({"href": url, target: "_blank"}).addClass("text-decoration-none").text("URL");
card_bottom.append(card_footer_button);
// .append(superLink);
}
// return card footer
return card_bottom;
}
// Private method
AIE_Card.prototype.__cardFrontBody = function() {
let front_body_elem = $("<div></div>").addClass("card-body");
let approach_id = _prefixZero(this._DA_nav_id, 2);
let approach_title = `Approach : ${approach_id} ${this._card_title}`;
let purpose_id = _prefixZero(this._DP_sub_id, 2);
let purpose_title = `Purpose : ${purpose_id} ${this._DP_sub_tag}`;
front_body_elem.append(
$("<div></div>").addClass("card-subtitle").text(approach_title)
).append(
$("<p></p>").addClass("card-text").text(this._DA_desc)
).append(
$("<div></div>").addClass("card-subtitle").text(purpose_title)
)
.append(
$("<p></p>").addClass("card-text").text(this._DP_desc)
);
// return card front body
return front_body_elem;
}
// Private method
AIE_Card.prototype.__cardBackBody = function(current_eg_id) {
current_eg_id = current_eg_id || 0;
let back_body_elem = $("<div></div>").addClass("card-body");
let designer = this._eg_arr[current_eg_id]["eg_designer"] || "Mr. Designer";
let year = this._eg_arr[current_eg_id]["eg_year"] || "2020";
let url = this._eg_arr[current_eg_id]["eg_url"] || "https://www.dribbble.com";
let super_link = $("<a></a>").attr({"href": url, "target": "_blank"}).addClass("text-decoration-none").text("URL");
let caption = $("<div></div>").addClass("caption")
.append(this.__appendCaption("Designer", designer))
.append(this.__appendCaption("Year", year))
.append($("<div></div>").append(super_link));
// return card back body
return back_body_elem.append(caption);
}
// *** CARD BACK DRAWING ***
// Private method
// current_eg_id -> start index : 0, 1, 2 ... ...
AIE_Card.prototype.__cardBackCarousel = function(current_eg_id) {
current_eg_id = current_eg_id || 0;
let back_img = $("<div></div>").addClass("card-img-box position-relative");
let carousel = $("<div></div>")
.addClass("card-img carousel slide")
.attr({
"id": "eg-carousel-" + this._card_id,
"data-ride": "carousel",
"data-interval": "false"
});
let cover = $("<div></div>")
.addClass("img-cover")
.append(
$("<div></div>").addClass("mask position-absolute")
).append(
$("<span></span>").addClass("img-overlay").attr("type", "button")
);
let carousel_inner = $("<div></div>").addClass("carousel-inner");
this._eg_arr.forEach(function(eg, index, arr) {
let eg_id = eg["eg_id"];
// let eg_gif_path = `./assets/back_gif/back_${eg_id}.gif`;
let eg_gif_path = `./assets/back_gif_compressed/back_${eg_id}.gif`;
let carousel_item = $("<div></div>")
.addClass("carousel-item")
.append($("<img />").addClass("d-block").attr("src", eg_gif_path));
if(index === current_eg_id)
carousel_item.addClass("active");
carousel_item.appendTo(carousel_inner);
});
carousel.append(carousel_inner);
// direction: previous / next;
let carousel_control = function(direction, card_id) {
direction = direction.toLowerCase() || "next";
let direc = direction.substr(0, 4);
return $("<a></a>")
.addClass("carousel-control-" + direc)
.attr({
"href": "#eg-carousel-" + card_id,
"role": "button",
"data-slide": direc
}).append(
$("<span></span>").addClass(`carousel-control-${direc}-icon`).attr("aria-hidden", "true")
).append(
$("<span></span>").addClass("sr-only").text(direction)
);
}
let carousel_control_prev = carousel_control("previous", this._card_id);
let carousel_control_next = carousel_control("next", this._card_id);
// return all gif within one carousel
return back_img.append(
carousel.append(carousel_control_prev).append(carousel_control_next)
).append(cover);
}
// Private method
AIE_Card.prototype.__appendCaption = function(key, content) {
key = key || "Caption keyword";
content = content || "Caption content."
// return a single caption to the back of the card
return `<div><span>${key}: </span>${content}</div>`;
}
// make 9 to 09
function _prefixZero(num, n) {
num = num || 0;
n = n || 2;
return (Array(n).join(0) + num).slice(-n);
}
// activate / inactivate DP primary filter
function DP_filter() {
// $("input.form-control").val("Search");
// $(".search-result").fadeOut("fast", function(){
// $(this).text("");
// $(this).show(1);
// });
let DP_btn = this;
let DP_sub_chosen = $(DP_btn).hasClass("active") ? ".active" : "";
let DP_sub_ul = $(DP_btn).next().get(0);
let DP_sub_btn = $(DP_sub_ul).children(DP_sub_chosen);
$(DP_sub_ul).slideToggle(140 + 120 * (DP_sub_btn.length/1.75), function() {
$(DP_btn).toggleClass("active");
});
$(DP_sub_btn).each(function(index, btn) {
$(btn).trigger("click");
});
}
//activate / inactivate DP sub filter
function DP_sub_filter() {
console.log("sub_filter:", new Date().getTime());
// $("input.form-control").val("Search");
// $(".search-result").fadeOut("fast", function(){
// $(this).text("");
// $(this).show(1);
// });
let DP_sub_tag = $(this).attr("id");
if($(this).hasClass("active")){
$(this).removeClass("active");
// turn back card
// $(`.${EL_tag} > .back:visible .btn`).click();
// $(`.${DP_sub_tag} > .card-inner.trans-3d`).removeClass("trans-3d");
//check scroll panel
if(DP_sub_tag) {
console.log(-1);
scrollCheck(DP_sub_tag, -1);
}
} else {
// need rectification
if($(".btn-primary-group > .btn.active").length == 0 || $(".btn-primary-group-sm > .btn.active").length == 0) {
$(".btn-primary-group > .btn:first-child").addClass("active");
$(".btn-primary-group-sm > .btn:first-child").addClass("active");
}
$(this).addClass("active");
//check scroll panel
if(DP_sub_tag) {
console.log(1);
scrollCheck(DP_sub_tag, 1);
}
}
// deckDisplay();
}
// check scroll panel and para descriptions
function scrollCheck(DP_sub_tag, x) {
DP_sub_tag = DP_sub_tag || "";
x = x || 1;
if(x < 0) {
// console.log("x<1", new Date().getTime())
$(`#card-display .${DP_sub_tag}:visible`).addClass("to-fade");
// $(".to-fade").each(function(index, elem) {
// console.log($(elem).attr("name"));
// })
$(".card-deck").each(function(index, elem){
// elem: a single card deck
let DA_tag = $($(elem).parent().get(0)).attr("id");
if($(elem).children(':visible:not(.to-fade)').length === 0) {
console.log("Here for ", DA_tag);
$("#" + DA_tag).fadeOut("normal", () => {
DP_fitting();
// $(`.${DP_sub_tag} > .card-inner.trans-3d`).removeClass("trans-3d");
$(this).find(".card-inner.trans-3d").removeClass("trans-3d");
});
$("." + DA_tag).addClass("disabled");
} else {
$("#card-display ." + DP_sub_tag).fadeOut(400, function() {
$(this).find(".card-inner.trans-3d").removeClass("trans-3d");
});
}
// $(elem).children(".to-fade").removeClass("to-fade");
});
$(".to-fade").removeClass("to-fade");
} else {
$("#card-display ." + DP_sub_tag).each(function(index, elem){
// elem: a single card
let targetSet = $(elem).parentsUntil("#card-display");
let NS_tag = $(targetSet[targetSet.length-1]).attr("id");
$(".disabled." + NS_tag).removeClass("disabled");
$(`#${NS_tag}:hidden:not(.to-show)`).addClass("to-show");
$(elem).fadeIn("slow");
});
DP_fitting();
$(".to-show").fadeIn("normal", function(){
$("#card-display > .to-show").removeClass("to-show");
});
}
// NS_active_fitting();
}
// make DA fitting to display pattern
function DP_fitting() {
if($("#card-display > div:visible").length === 0) {
$(".btn-primary-group > .btn.active").removeClass("active");
$(".btn-primary-group-sm > .btn.active").removeClass("active");
$(".search-fail").fadeIn("normal");
} else {
$(".search-fail").css("display", "none");
}
}
// avoid NS .disabled.active
function DA_active_fitting() {
var targetSet = $(".btn-primary-group").find(".disabled.active") || $(".btn-primary-group-sm").find(".disabled.active");
// length only equals 1 / 0
if(targetSet.length > 0) {
$(targetSet[0]).removeClass("active");
var nextSet = $(targetSet[0]).nextAll(".btn:not(.disabled)");
var preSet = $(targetSet[0]).prevAll(".btn:not(.disabled)");
if(preSet.length > 0) {
// $(preSet[0]).click();
$(preSet[0]).trigger("click");
return ;
} else if(nextSet.length > 0) {
// $(nextSet[0]).click();
console.log("next");
$(nextSet[0]).trigger("click");
return ;
} else {
// $("#card-display").text("Sorry, you haven't chosen any Editorial Layers yet~");
$(".btn-primary-group > .btn").removeClass("active");
$(".btn-primary-group-sm > .btn").removeClass("active");
}
}
}
// NS buttons control #card-display
function DA_scroller() {
// var screenH = $(window).height() - $("#card-display").offset().top;
var targetId = $(this).attr("href");
var target = $(targetId).position().top + $("#card-display").height() - $("#card-display").outerHeight();
// $(this).parent().find(".active").removeClass("active");
// $(this).addClass("active");
$('html, body').animate({scrollTop: target}, 800, "easeInOutQuart");
}
// spy on display scrolling action
function displaySpy() {
let screenH = $(window).height() - $("#card-display").offset().top; // if screen height is very limited - > bug $("#card-display").outerHeight() + $("#card-display").height();
let DA_class = ".btn-primary-group";
if($(DA_class).length <= 0)
DA_class = ".btn-primary-group-sm";
$("#card-display").children(":not(.search-fail)").each(function(i, item){
let currentPosition = $(item).position().top - $(window).scrollTop();
if($("." + $(item).attr("id")).is(":not(.active)") && (currentPosition < 0.5*screenH) && (($(item).height() + currentPosition) >= 0.5*screenH)) {
$(`${DA_class} > .btn.active`).removeClass("active");
$(`${DA_class} > .btn:not(.disabled).` + $(item).attr("id")).addClass("active");
// $(".btn-primary-group-sm > .btn.active").removeClass("active");
// $(".btn-primary-group-sm > .btn:not(.disabled)." + $(item).attr("id")).addClass("active");
// deck-reminder info preloading
// $(".deck-reminder").empty();
// $($(item).find(".display-title").get(0)).clone(false).appendTo(".deck-reminder");
// $($(item).find(".display-sub-label").get(0)).clone(false).appendTo(".deck-reminder");
// console.log("once")
}
});
}
// listen to reminder div beneath each card-deck
function reminderSpy() {
// const windowTop = parseInt(Math.round(window.pageYOffset));
let nav = document.querySelector("header");
// let displayHeight = window.innerHeight - nav.offsetHeight;
let current_active_sticky =document.querySelector(".deck-reminder.active-sticky");
let allReminders = Array.from(document.querySelectorAll(".deck-reminder"));
allReminders.some(function(sticky, index, nodeList) {
let reminderToHeader = parseInt(Math.round(sticky.getBoundingClientRect().top)) - nav.offsetHeight;
if(sticky.classList.contains("active-sticky")) {
if(sticky.getBoundingClientRect().bottom <= sticky.nextElementSibling.getBoundingClientRect().top + 5) {
console.log("A");
// console.log(index+1, reminderToHeader);
sticky.classList.remove("active-sticky");
$($(sticky).find(".display-desc").get(0)).slideDown(360);
}
return false;
}
// if(current_active_sticky && (reminderToHeader > (current_active_sticky.offsetHeight + sticky.offsetHeight))) {
if(current_active_sticky && (reminderToHeader >= 1)) {
// console.log("A");
// sticky.classList.remove("active-sticky");
// console.log(index+1, reminderToHeader);
// console.log("B");
$($(sticky).find(".display-desc").get(0)).slideDown(360);
// return false;
}
// if(Math.abs(reminderToHeader) < 5) {
if(Math.abs(reminderToHeader) < 1) {
// console.log(index+1, reminderToHeader);
// console.log("C");
$($(sticky).find(".display-desc").get(0)).slideUp(360);
sticky.classList.add("active-sticky");
if(current_active_sticky) {
current_active_sticky.classList.remove("active-sticky");
}
return true;
}
});
}
function searchFunc() {
var show_list = [];
console.log("Ready to search.");
var read = $("input.form-control").val().toString() || "";
if(read.toLowerCase() == "search") read = "";
readRegOrigin = read.replace(/[.,:;·'"\(\)\[\]\{\}\\\/\|]/g, " ").replace(/\s+/g, " ");
readRegOrigin = $.each((readRegOrigin.split(" ")), function(item){return $.trim(item);});
var readReg = readRegOrigin.filter(function(item, index, arr) {
return arr.indexOf(item, 0) === index;
});
console.log("Search for:", readReg);
if(readReg.length > 0 && (readReg[0] != ("" | " "))) {
//transform string to regexExp
var rex = new RegExp(readReg.join("|"), "ig");
// $.ajaxSettings.async = false;
$.getJSON(card_doc, function(json) {
const doc_length = json.length;
let flag = false;
//get to-be-hidden number array
// $.ajaxSettings.async = false;
$.each(json, function(i, item) {
delete item.card_id;
delete item.eg_arr;
delete item.DA_class_id;
let itemDoc = (Object.values(item)).join(" ");
if(itemDoc.search(rex) > -1) {
show_list.push(`card_${i+1}`);
}
if(i === (doc_length-1)) {
flag = true;
console.log("Search finished");
}
});
if(flag && (show_list.length > 0)) {
console.log(`${show_list.length} results were found: `, show_list);
show_list.forEach(card_name => $(`[name="${card_name}"]`).addClass("as-result"));
$(".btn-sub-list:hidden").slideDown(function() {
$(".btn-secondary-group > .btn").addClass("active"); //activate DP
$(".btn-sub-list > li").addClass("active"); //activate DP
});
$("#card-display > div").fadeOut("normal", function() {
if($(this).is($("#card-display > div").last())) {
searchResultDisplay();
}
$(".search-result").text(`${show_list.length} result${show_list.length > 1 ? "s" : ""}`);
});
} else {
console.log("Nothing found.");
$("#card-display > div").fadeOut("normal", function() {
$(".search-fail").fadeIn("fast");
$(".card-deck > div").fadeOut("normal");
})
$(".search-result").text("0 result");
$(".btn-primary-group > .btn").removeClass("active").addClass("disabled");
$(".btn-primary-group-sm > .btn").removeClass("active").addClass("disabled");
}
});
} else {
$(".search-fail").fadeOut("normal");
if($(".card-deck > div:visible").length == $(".card-deck > div").length) return ;
$("#card-display > div").fadeOut("normal", function() {
$(".card-deck > div").css("display", "block");
$("#card-display > div").fadeIn("normal");
});
$(".btn-primary-group > .btn").removeClass("disabled");
$(".btn-primary-group-sm > .btn").removeClass("disabled");
$(".btn-secondary-group > .btn").addClass("active");
$(".btn-secondary-group-sm > .btn").addClass("active");
$(".search-result").text("");
}
scrollToTop();
}
// layout after searching
function searchResultDisplay() {
$(".card-deck > div").css("display", "none");
$(".card-deck > .as-result").each(function(index, elem) {
let targetSet = $(elem).parentsUntil("#card-display");
let NS_tag = $(targetSet[targetSet.length-1]).attr("id");
$(".disabled." + NS_tag).removeClass("disabled");
$(`#${NS_tag}:not(.as-result)`).addClass("as-result");
$(elem).css("display", "block");
});
$("#card-display > .as-result").fadeIn("normal", function(){
$("#card-display .as-result").removeClass("as-result");
$("#card-display > div:hidden").each(function(index, NS_elem) {
let NS_tag = $(NS_elem).attr("id");
$("." + NS_tag).removeClass("active").addClass("disabled");
});
});
}
// set filter panel
function panelLayout() {
let bannerHeight = $("header").outerHeight();
let panel = $("#filter-panel");
panel.css({
// "position": "sticky",
// "overflow-y": "auto",
// "z-index": 500,
"top": bannerHeight + 1
});
if($(window).outerWidth() >= 768) {
panel.css("height", ($(window).outerHeight() - bannerHeight -1));
return " list-group";
} else {
panel.css("height", "100%");
return "-sm";
}
}
// check NS - Card display relationship
function deckDisplay(list, idString) {
idString = idString || "";
list = list || [];
$("#card-display > div").slideDown(1);
// $(".trans-3d").hide(1);
// $.map(list, function(num) {
// $(idString + " [name=\'card_" + num + "\']").show("fast");
// });
// $("#card-display > div").each(function(i, part) {
// if($(part).find(".trans-3d:visible").length == 0) {
// $(part).slideUp("fast");
// $("." + $(part).attr("id") + ":not(disabled)").addClass("disabled");
// } else {
// $("." + $(part).attr("id")).removeClass("disabled");
// }
// });
// $(".btn-primary-group a").removeClass("active");
// $(".btn-primary-group a:not(.disabled):first-child").addClass("active");
}
// fade in full-screen button
function fullScreenOver(){
$($(this).children(".img-cover")[0]).fadeIn(180);
}
// fade out full-screen button
function fullScreenOut() {
$($(this).children(".img-cover")[0]).fadeOut(240);
}
function scrollToTop() {
$(".btn-primary-group > .btn").first().trigger("click");
}
// fill modal window info
function modalInfo() {
var untilMain = $(this).parentsUntil(".card-deck");
var thisCard = $(untilMain[untilMain.length - 1]);
// var bgColor = $(thisCard.find(".card-header")[0]).css("background");
// var modalTitle = $(thisCard.find("h6")[0]).text();
// var modalURL = $(thisCard.find("a")[0]).attr("href");
var modalNum = $(thisCard).attr("name").substr(5);
// var modalNum = $(thisCard.find(".card-num")[0]).text().substr(4);
// var modalSource = $($(thisCard.find(".caption")[0]).children()[0]).text().replace("Source:", " - ");
// $(".modal-content").css("background", bgColor);
// $(".modal-title").text(modalTitle).attr("href", modalURL);
// $(".modal-header > span").text(modalSource);
// $(".modal-body > img").attr({
// src: "assets/media/loading_light.svg",
// "data-echo": "assets/back_gif/back_" + modalNum + ".gif",
// "onerror": "assets/media/fail_loading_light.svg"
// src: "./assets/back_gif/" + modalNum + ".gif"
// });
}
// action easing for scrolling
jQuery.extend( jQuery.easing,
{
easeInSine: function (x, t, b, c, d) {
return -c * Math.cos(t/d * (Math.PI/2)) + c + b;
},
easeOutSine: function (x, t, b, c, d) {
return c * Math.sin(t/d * (Math.PI/2)) + b;
},
easeInOutSine: function (x, t, b, c, d) {
return -c/2 * (Math.cos(Math.PI*t/d) - 1) + b;
},
easeInQuart: function (x, t, b, c, d) {
return c*(t/=d)*t*t*t + b;
},
easeOutQuart: function (x, t, b, c, d) {
return -c * ((t=t/d-1)*t*t*t - 1) + b;
},
easeInOutQuart: function (x, t, b, c, d) {
if ((t/=d/2) < 1) return c/2*t*t*t*t + b;
return -c/2 * ((t-=2)*t*t*t - 2) + b;
},
});
|
let twoSum = (numbers, target) => {
//assign a left most variable and a right most variable
let left = 0;
let right = numbers.length - 1;
//iterate over array under the condition that left is less than right
while (left < right) {
//establish a current some variable that adds left and right
const currentSum = numbers[left] + numbers[right];
//if the current sum is equal to our target sum
if (currentSum === target) {
return [left + 1, right + 1];
//if the current sum is less than the target sum will
//increment the left var by 1
} else if (currentSum < target) {
left++;
//if current sum is greater than our target sum
//decrement our right variable by 1
} else if (currentSum > target) {
right--;
}
}
//return an empty array if no two numbers equal the target sum
return [];
}
console.log(twoSum([2,7,11,15], 9))//[1,2]; |
def isPanagram(string):
string = string.lower()
alphabet = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z"]
for letter in string:
try:
alphabet.pop(alphabet.index(letter))
continue
except:
continue
if len(alphabet) == 0:
return True
else:
return False
|
import json
import time
import unittest
from app import app, storage, url_scheme_and_hostname
from config import build_requests_session
from data import model
from data.registry_model import registry_model
from data.database import Image, ManifestLegacyImage
from initdb import setup_database_for_testing, finished_database_for_testing
from util.secscan.secscan_util import get_blob_download_uri_getter
from util.secscan.v4.api import ClairSecurityScannerAPI, APIRequestFailure
from util.secscan.v4.fake import fake_security_scanner
from util.secscan.blob import BlobURLRetriever
from util.security.instancekeys import InstanceKeys
ADMIN_ACCESS_USER = "devtable"
SIMPLE_REPO = "simple"
def _get_legacy_image(namespace, repo, tag, include_storage=True):
repo_ref = registry_model.lookup_repository(namespace, repo)
repo_tag = registry_model.get_repo_tag(repo_ref, tag)
manifest = registry_model.get_manifest_for_tag(repo_tag)
return ManifestLegacyImage.get(manifest_id=manifest._db_id).image
class TestSecurityScanner(unittest.TestCase):
def setUp(self):
# Enable direct download in fake storage.
storage.put_content(["local_us"], "supports_direct_download", b"true")
# Have fake storage say all files exist for the duration of the test.
storage.put_content(["local_us"], "all_files_exist", b"true")
# Setup the database with fake storage.
setup_database_for_testing(self)
self.app = app.test_client()
self.ctx = app.test_request_context()
self.ctx.__enter__()
instance_keys = InstanceKeys(app)
retriever = BlobURLRetriever(storage, instance_keys, app)
self.api = ClairSecurityScannerAPI(
"http://fakesecurityscanner", build_requests_session(), retriever
)
def tearDown(self):
storage.remove(["local_us"], "supports_direct_download")
storage.remove(["local_us"], "all_files_exist")
finished_database_for_testing(self)
self.ctx.__exit__(True, None, None)
def assertAnalyzed(self, layer, security_scanner, isAnalyzed, engineVersion):
self.assertEqual(isAnalyzed, layer.security_indexed)
self.assertEqual(engineVersion, layer.security_indexed_engine)
if isAnalyzed:
self.assertTrue(security_scanner.has_layer(security_scanner.layer_id(layer)))
# Ensure all parent layers are marked as analyzed.
parents = model.image.get_parent_images(ADMIN_ACCESS_USER, SIMPLE_REPO, layer)
for parent in parents:
self.assertTrue(parent.security_indexed)
self.assertEqual(engineVersion, parent.security_indexed_engine)
self.assertTrue(security_scanner.has_layer(security_scanner.layer_id(parent)))
def test_get_layer(self):
"""
Test for basic retrieval of layers from the security scanner.
"""
repo_ref = registry_model.lookup_repository(ADMIN_ACCESS_USER, SIMPLE_REPO)
repo_tag = registry_model.get_repo_tag(repo_ref, "latest")
manifest = registry_model.get_manifest_for_tag(repo_tag)
layers = registry_model.list_manifest_layers(manifest, storage, True)
registry_model.populate_legacy_images_for_testing(manifest, storage)
with fake_security_scanner() as security_scanner:
# Ensure the layer doesn't exist yet.
self.assertIsNone(self.api.index_report(manifest.digest))
# Add the layer.
self.api.index(manifest, layers)
# Retrieve the results.
result = self.api.vulnerability_report(manifest.digest)
self.assertIsNotNone(result)
if __name__ == "__main__":
unittest.main()
|
"""FocusSeq2Seq
Copyright (c) 2019-present NAVER Corp.
MIT license
"""
import time
import multiprocessing
import numpy as np
import torch
from utils import bleu, rouge
from utils.tensor_utils import repeat
from utils.data_utils import split_sentences
if torch.cuda.is_available():
device = 'cuda'
else:
device = 'cpu'
n_cpus = multiprocessing.cpu_count()
def evaluate(loader, model, epoch, config, test=False):
start = time.time()
print('Evaluation start!')
model.eval()
if config.task == 'QG':
references = loader.dataset.df.target_WORD.tolist()
elif config.task == 'SM':
# references = loader.dataset.df.target_tagged.tolist()
references = loader.dataset.df.target_multiref.tolist()
# references = loader.dataset.df.target.tolist()
hypotheses = [[] for _ in range(max(config.n_mixture, config.decode_k))]
hyp_focus = [[] for _ in range(max(config.n_mixture, config.decode_k))]
hyp_attention = [[] for _ in range(max(config.n_mixture, config.decode_k))]
if config.n_mixture > 1:
assert config.decode_k == 1
use_multiple_hypotheses = True
best_hypothesis = []
elif config.decode_k > 1:
assert config.n_mixture == 1
use_multiple_hypotheses = True
best_hypothesis = []
else:
use_multiple_hypotheses = False
best_hypothesis = None
word2id = model.word2id
id2word = model.id2word
# PAD_ID = word2id['<pad>']
vocab_size = len(word2id)
n_iter = len(loader)
temp_time_start = time.time()
with torch.no_grad():
for batch_idx, batch in enumerate(loader):
if config.task == 'QG':
source_WORD_encoding, source_len, \
target_WORD_encoding, target_len, \
source_WORD, target_WORD, \
answer_position_BIO_encoding, answer_WORD, \
ner, ner_encoding, \
pos, pos_encoding, \
case, case_encoding, \
focus_WORD, focus_mask, \
focus_input, answer_WORD_encoding, \
source_WORD_encoding_extended, oovs \
= [b.to(device) if isinstance(b, torch.Tensor) else b for b in batch]
elif config.task == 'SM':
source_WORD_encoding, source_len, \
target_WORD_encoding, target_len, \
source_WORD, target_WORD, \
focus_WORD, focus_mask, \
focus_input, \
source_WORD_encoding_extended, oovs \
= [b.to(device) if isinstance(b, torch.Tensor) else b for b in batch]
answer_position_BIO_encoding = answer_WORD = ner_encoding = pos_encoding = case_encoding = None
answer_WORD_encoding = None
B, L = source_WORD_encoding.size()
if config.use_focus:
if config.eval_focus_oracle:
generated_focus_mask = focus_mask
input_mask = focus_mask
else:
# [B * n_mixture, L]
focus_p = model.selector(
source_WORD_encoding,
answer_position_BIO_encoding,
ner_encoding,
pos_encoding,
case_encoding,
# mixture_id=mixture_id,
# focus_input=focus_input,
train=False)
generated_focus_mask = (focus_p > config.threshold).long()
# Repeat for Focus Selector
if config.n_mixture > 1:
source_WORD_encoding = repeat(
source_WORD_encoding, config.n_mixture)
if config.feature_rich:
answer_position_BIO_encoding = repeat(
answer_position_BIO_encoding, config.n_mixture)
ner_encoding = repeat(ner_encoding, config.n_mixture)
pos_encoding = repeat(pos_encoding, config.n_mixture)
case_encoding = repeat(case_encoding, config.n_mixture)
if config.model == 'PG':
source_WORD_encoding_extended = repeat(
source_WORD_encoding_extended, config.n_mixture)
assert source_WORD_encoding.size(0) \
== source_WORD_encoding_extended.size(0)
input_mask = generated_focus_mask
else:
input_mask = None
generated_focus_mask = focus_mask
# [B*n_mixturre, K, max_len]
prediction, score = model.seq2seq(
source_WORD_encoding,
answer_WORD_encoding=answer_WORD_encoding,
answer_position_BIO_encoding=answer_position_BIO_encoding,
ner_encoding=ner_encoding,
pos_encoding=pos_encoding,
case_encoding=case_encoding,
focus_mask=input_mask,
target_WORD_encoding=None,
source_WORD_encoding_extended=source_WORD_encoding_extended,
train=False,
decoding_type=config.decoding,
beam_k=config.beam_k,
max_dec_len=30 if config.task == 'QG' else 120 if config.task == 'SM' else exit(),
temperature=config.temperature,
diversity_lambda=config.diversity_lambda)
prediction = prediction.view(B, config.n_mixture, config.beam_k, -1)
prediction = prediction[:, :, 0:config.decode_k, :].tolist()
if use_multiple_hypotheses:
score = score.view(B, config.n_mixture, config.beam_k)
score = score[:, :, :config.decode_k].view(B, -1)
# [B]
best_hyp_idx = score.argmax(dim=1).tolist()
# Word IDs => Words
for batch_j, (predicted_word_ids, source_words, target_words) \
in enumerate(zip(prediction, source_WORD, target_WORD)):
if config.n_mixture > 1:
assert config.decode_k == 1
for n in range(config.n_mixture):
predicted_words = []
# [n_mixture, decode_k=1, dec_len]
for word_id in predicted_word_ids[n][0]:
# Generate
if word_id < vocab_size:
word = id2word[word_id]
# End of sequence
if word == '<eos>':
break
# Copy
else:
pointer_idx = word_id - vocab_size
if config.model == 'NQG':
word = source_words[pointer_idx]
elif config.model == 'PG':
try:
word = oovs[batch_j][pointer_idx]
except IndexError:
import ipdb
ipdb.set_trace()
predicted_words.append(word)
hypotheses[n].append(predicted_words)
if use_multiple_hypotheses and best_hyp_idx[batch_j] == n:
best_hypothesis.append(predicted_words)
elif config.n_mixture == 1:
for k in range(config.decode_k):
predicted_words = []
# [n_mixture=1, decode_k, dec_len]
for word_id in predicted_word_ids[0][k]:
# Generate
if word_id < vocab_size:
word = id2word[word_id]
# End of sequence
if word == '<eos>':
break
# Copy
else:
pointer_idx = word_id - vocab_size
if config.model == 'NQG':
word = source_words[pointer_idx]
elif config.model == 'PG':
try:
word = oovs[batch_j][pointer_idx]
except IndexError:
import ipdb
ipdb.set_trace()
predicted_words.append(word)
hypotheses[k].append(predicted_words)
if use_multiple_hypotheses and best_hyp_idx[batch_j] == k:
best_hypothesis.append(predicted_words)
# For visualization
if config.use_focus:
# [B * n_mixture, L] => [B, n_mixture, L]
focus_p = focus_p.view(B, config.n_mixture, L)
generated_focus_mask = generated_focus_mask.view(B, config.n_mixture, L)
# target_L x [B * n_mixture, L]
# => [B * n_mixture, L, target_L]
# => [B, n_mixture, L, target_L]
attention_list = torch.stack(model.seq2seq.decoder.attention_list, dim=2).view(
B, config.n_mixture, L, -1)
# n_mixture * [B, L]
for n, focus_n in enumerate(focus_p.split(1, dim=1)):
# [B, 1, L] => [B, L]
focus_n = focus_n.squeeze(1).tolist()
# B x [L]
for f_n in focus_n:
hyp_focus[n].append(f_n) # [L]
# n_mixture * [B, L, target_L]
for n, attention in enumerate(attention_list.split(1, dim=1)):
# [B, 1, L, target_L] => [B, L, target_L]
attention = attention.squeeze(1).tolist()
# B x [L, target_L]
for at in attention:
hyp_attention[n].append(np.array(at)) # [L, target_L]
if (not test) and batch_idx == 0:
# if batch_idx > 260:
n_samples_to_print = min(10, len(source_WORD))
for i in range(n_samples_to_print):
s = source_WORD[i] # [L]
g_m = generated_focus_mask[i].tolist() # [n_mixture, L]
f_p = focus_p[i].tolist() # [n_mixture, L]
print(f'[{i}]')
# print(f'(focus {k})')
# print(f"length of focus k; " + str(k))
# print(f"length of focus f_p; " + str(len(f_p)))
# print(f"length of focus s; " + str(len(s)))
print(f"Source Sequence: {' '.join(source_WORD[i])}")
if config.task == 'QG':
print(f"Answer: {' '.join(answer_WORD[i])}")
if config.use_focus:
print(f"Oracle Focus: {' '.join(focus_WORD[i])}")
if config.task == 'QG':
print(f"Target Question: {' '.join(target_WORD[i])}")
elif config.task == 'SM':
print(f"Target Summary: {' '.join(target_WORD[i])}")
if config.n_mixture > 1:
for n in range(config.n_mixture):
print(n)
if config.use_focus:
print(f'(focus {n})')
print(
f"Focus Prob: {' '.join([f'({w}/{p:.2f})' for (w, p) in zip(s, f_p[n])])}")
print(
f"Generated Focus: {' '.join([w for w, m in zip(s, g_m[n]) if m == 1])}")
if config.task == 'QG':
print(
f"Generated Question: {' '.join(hypotheses[n][B * batch_idx + i])}\n")
elif config.task == 'SM':
print(
f"Generated Summary: {' '.join(hypotheses[n][B * batch_idx + i])}\n")
else:
for k in range(config.decode_k):
if config.use_focus:
print(
f"Focus Prob: {' '.join([f'({w}/{p:.2f})' for (w, p) in zip(s, f_p[k])])}")
print(
f"Generated Focus: {' '.join([w for w, m in zip(s, g_m[k]) if m == 1])}")
if config.task == 'QG':
print(
f"Generated Question: {' '.join(hypotheses[k][B * batch_idx + i])}\n")
elif config.task == 'SM':
print(
f"Generated Summary: {' '.join(hypotheses[k][B * batch_idx + i])}\n")
if batch_idx % 100 == 0 or (batch_idx + 1) == n_iter:
log_str = f'Evaluation | Epoch [{epoch}/{config.epochs}]'
log_str += f' | Iteration [{batch_idx}/{n_iter}]'
time_taken = time.time() - temp_time_start
log_str += f' | Time taken: : {time_taken:.2f}'
print(log_str)
temp_time_start = time.time()
time_taken = time.time() - start
print(f"Generation Done! It took {time_taken:.2f}s")
if test:
print('Test Set Evaluation Result')
score_calc_start = time.time()
if not config.eval_focus_oracle and use_multiple_hypotheses:
if config.task == 'QG':
nested_references = [[r] for r in references]
flat_hypothesis = best_hypothesis
# bleu_4 = bleu.corpus_bleu(nested_references, flat_hypothesis,
# smoothing_function=bleu.cm.method2) * 100
bleu_4 = bleu.corpus_bleu(nested_references, flat_hypothesis) * 100
print(f"BLEU-4: {bleu_4:.3f}")
oracle_bleu_4 = bleu.oracle_bleu(hypotheses, references,
n_process=min(4, n_cpus)) * 100
print(f"Oracle BLEU-4: {oracle_bleu_4:.3f}")
self_bleu = bleu.self_bleu(hypotheses,
n_process=min(4, n_cpus)) * 100
print(f"Self BLEU-4: {self_bleu:.3f}")
avg_bleu = bleu.avg_bleu(hypotheses, references) * 100
print(f"Average BLEU-4: {avg_bleu:.3f}")
metric_result = {
'BLEU-4': bleu_4,
'Oracle_BLEU-4': oracle_bleu_4,
'Self_BLEU-4': self_bleu,
'Average_BLEU-4': avg_bleu}
elif config.task == 'SM':
flat_hypothesis = best_hypothesis
# summaries = [split_sentences(remove_tags(words))
# for words in flat_hypothesis]
summaries = [split_sentences(words)
for words in flat_hypothesis]
# references = [split_tagged_sentences(ref) for ref in references]
# summaries = [[" ".join(words)]
# for words in flat_hypothesis]
# references = [[ref] for ref in references]
rouge_eval_start = time.time()
rouge_dict = rouge.corpus_rouge(summaries, references,
n_process=min(4, n_cpus))
print(f'ROUGE calc time: {time.time() - rouge_eval_start:.3f}s')
for metric_name, score in rouge_dict.items():
print(f"{metric_name}: {score * 100:.3f}")
##################
hypotheses_ = [[split_sentences(words) for words in hypothesis]
for hypothesis in hypotheses]
# references = [split_tagged_sentences(ref) for ref in references]
# hypotheses_ = [[[" ".join(words)] for words in hypothesis]
# for hypothesis in hypotheses]
# references = [[ref] for ref in references]
oracle_rouge_eval_start = time.time()
oracle_rouge = rouge.oracle_rouge(hypotheses_, references,
n_process=min(4, n_cpus))
print(f'Oracle ROUGE calc time: {time.time() - oracle_rouge_eval_start:.3f}s')
for metric_name, score in oracle_rouge.items():
print(f"Oracle {metric_name}: {score * 100:.3f}")
self_rouge_eval_start = time.time()
self_rouge = rouge.self_rouge(hypotheses_,
n_process=min(4, n_cpus))
print(f'Self ROUGE calc time: {time.time() - self_rouge_eval_start:.3f}s')
for metric_name, score in self_rouge.items():
print(f"Self {metric_name}: {score * 100:.3f}")
avg_rouge_eval_start = time.time()
avg_rouge = rouge.avg_rouge(hypotheses_, references,
n_process=min(4, n_cpus))
print(f'Average ROUGE calc time: {time.time() - avg_rouge_eval_start:.3f}s')
for metric_name, score in avg_rouge.items():
print(f"Average {metric_name}: {score * 100:.3f}")
metric_result = {
'ROUGE-1': rouge_dict['ROUGE-1'],
'ROUGE-2': rouge_dict['ROUGE-2'],
'ROUGE-L': rouge_dict['ROUGE-L'],
'Oracle_ROUGE-1': oracle_rouge['ROUGE-1'],
'Oracle_ROUGE-2': oracle_rouge['ROUGE-2'],
'Oracle_ROUGE-L': oracle_rouge['ROUGE-L'],
'Self_ROUGE-1': self_rouge['ROUGE-1'],
'Self_ROUGE-2': self_rouge['ROUGE-2'],
'Self_ROUGE-L': self_rouge['ROUGE-L'],
'Average_ROUGE-1': avg_rouge['ROUGE-1'],
'Average_ROUGE-2': avg_rouge['ROUGE-2'],
'Average_ROUGE-L': avg_rouge['ROUGE-L'],
}
metric_result = {k: v * 100 for k, v in metric_result.items()}
else:
if config.task == 'QG':
nested_references = [[r] for r in references]
flat_hypothesis = hypotheses[0]
# bleu_4 = bleu.corpus_bleu(nested_references, flat_hypothesis,
# smoothing_function=bleu.cm.method2) * 100
bleu_4 = bleu.corpus_bleu(nested_references, flat_hypothesis)
# print(f"BLEU-4: {100 * bleu_4:.3f}")
metric_result = {'BLEU-4': bleu_4}
metric_result = {k: v * 100 for k, v in metric_result.items()}
for metric_name, score in metric_result.items():
print(f"{metric_name}: {score:.3f}")
elif config.task == 'SM':
flat_hypothesis = hypotheses[0]
# summaries = [split_sentences(remove_tags(words))
# for words in flat_hypothesis]
summaries = [split_sentences(words)
for words in flat_hypothesis]
# references = [split_tagged_sentences(ref) for ref in references]
# summaries = [[" ".join(words)]
# for words in flat_hypothesis]
# references = [[ref] for ref in references]
metric_result = rouge.corpus_rouge(summaries, references,
n_process=min(4, n_cpus))
metric_result = {k: v * 100 for k, v in metric_result.items()}
for metric_name, score in metric_result.items():
print(f"{metric_name}: {score:.3f}")
score_calc_time_taken = time.time() - score_calc_start
print(f'Score calculation Done! It took {score_calc_time_taken:.2f}s')
return metric_result, hypotheses, best_hypothesis, hyp_focus, hyp_attention
if __name__ == '__main__':
from pathlib import Path
current_dir = Path(__file__).resolve().parent
import configs
config = configs.get_config()
print(config)
from build_utils import get_loader, build_model, get_ckpt_name
# Build Data Loader
data_dir = current_dir.joinpath(config.data + '_out')
_, _, test_loader, word2id, id2word = get_loader(
config, data_dir)
# Build Model
model = build_model(config, word2id, id2word)
model.to(device)
# Load Model from checkpoint
ckpt_dir = Path(f"./ckpt/{config.model}/").resolve()
filename = get_ckpt_name(config)
filename += f"_epoch{config.load_ckpt}.pkl"
ckpt_path = ckpt_dir.joinpath(filename)
ckpt = torch.load(ckpt_path)
model.load_state_dict(ckpt['model'])
print('Loaded model from', ckpt_path)
# Run Evaluation
metric_result, hypotheses, best_hypothesis, hyp_focus, hyp_attention = evaluate(
test_loader, model, config.load_ckpt, config, test=True)
# Save evaluation results at the same checkpoint
ckpt['best_hypothesis'] = best_hypothesis
ckpt['hypotheses'] = hypotheses
ckpt['test_df'] = test_loader.dataset.df
ckpt['focus_p'] = hyp_focus
ckpt['attention'] = hyp_attention
torch.save(ckpt, ckpt_path)
|
"use strict";
const fs = require("fs");
const path = require("path");
const Sequelize = require("sequelize");
const basename = path.basename(__filename);
const env = process.env.NODE_ENV || "development";
const config = require(__dirname + "/../config/config.json")[env];
const db = {};
let sequelize;
if (config.use_env_variable) {
sequelize = new Sequelize(process.env[config.use_env_variable], config);
} else {
sequelize = new Sequelize(
config.database,
config.username,
config.password,
config
);
}
fs.readdirSync(__dirname)
.filter(file => {
return (
file.indexOf(".") !== 0 && file !== basename && file.slice(-3) === ".js"
);
})
.forEach(file => {
const model = sequelize["import"](path.join(__dirname, file));
db[model.name] = model;
});
Object.keys(db).forEach(modelName => {
if (db[modelName].associate) {
db[modelName].associate(db);
}
});
db.sequelize = sequelize;
db.Sequelize = Sequelize;
module.exports = db;
|
!function(t){function e(e){for(var r,a,o=e[0],c=e[1],f=e[2],l=0,p=[];l<o.length;l++)a=o[l],Object.prototype.hasOwnProperty.call(u,a)&&u[a]&&p.push(u[a][0]),u[a]=0;for(r in c)Object.prototype.hasOwnProperty.call(c,r)&&(t[r]=c[r]);for(s&&s(e);p.length;)p.shift()();return i.push.apply(i,f||[]),n()}function n(){for(var t,e=0;e<i.length;e++){for(var n=i[e],r=!0,o=1;o<n.length;o++){var c=n[o];0!==u[c]&&(r=!1)}r&&(i.splice(e--,1),t=a(a.s=n[0]))}return t}var r={},u={6:0},i=[];function a(e){if(r[e])return r[e].exports;var n=r[e]={i:e,l:!1,exports:{}};return t[e].call(n.exports,n,n.exports,a),n.l=!0,n.exports}a.m=t,a.c=r,a.d=function(t,e,n){a.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:n})},a.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},a.t=function(t,e){if(1&e&&(t=a(t)),8&e)return t;if(4&e&&"object"==typeof t&&t&&t.__esModule)return t;var n=Object.create(null);if(a.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:t}),2&e&&"string"!=typeof t)for(var r in t)a.d(n,r,function(e){return t[e]}.bind(null,r));return n},a.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return a.d(e,"a",e),e},a.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},a.p="";var o=window.webpackJsonp=window.webpackJsonp||[],c=o.push.bind(o);o.push=e,o=o.slice();for(var f=0;f<o.length;f++)e(o[f]);var s=c;i.push([159,0]),n()}({101:function(t,e,n){"use strict";n.r(e);var r=n(102),u=n.n(r);for(var i in r)"default"!==i&&function(t){n.d(e,t,(function(){return r[t]}))}(i);e.default=u.a},102:function(t,e,n){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var r=n(1),u=n(2),i=r.__importDefault(n(84)),a=r.__importDefault(n(116)),o=r.__importDefault(n(118)),c=r.__importDefault(n(30)),f=function(t){function e(){var e=null!==t&&t.apply(this,arguments)||this;return e.text="Example",e.background=n(34),e}return r.__extends(e,t),e=r.__decorate([u.Component({components:{Timer:i.default,Game:a.default,Category:o.default,Playername:c.default}})],e)}(u.Vue);e.default=f},103:function(t,e,n){},116:function(t,e,n){"use strict";n.r(e);var r=n(80),u=n(69);for(var i in u)"default"!==i&&function(t){n.d(e,t,(function(){return u[t]}))}(i);n(117);var a=n(0),o=Object(a.a)(u.default,r.a,r.b,!1,null,"3d975238",null);e.default=o.exports},117:function(t,e,n){"use strict";var r=n(71);n.n(r).a},118:function(t,e,n){"use strict";n.r(e);var r=n(81),u=n(72);for(var i in u)"default"!==i&&function(t){n.d(e,t,(function(){return u[t]}))}(i);n(119);var a=n(0),o=Object(a.a)(u.default,r.a,r.b,!1,null,"08e03e60",null);e.default=o.exports},119:function(t,e,n){"use strict";var r=n(74);n.n(r).a},127:function(t,e,n){"use strict";var r=function(){var t=this.$createElement,e=this._self._c||t;return e("div",{staticClass:"container",style:{"background-image":"url("+this.background+")","clip-path":"polygon(0px 0px, 664px 0px, 664px 3px, 664px 937px, 1910px 937px, 1910px 3px, 664px 3px, 664px 0px, 1920px 0px, 1920px 1080px, 0px 1080px, 0px 0px)"}},[e("Timer",{style:{left:"1138px",top:"842px",width:"762px",height:"220px",margin:"10px"}}),this._v(" "),e("Playername",{style:{left:"54px",top:"316px",width:"336px",height:"154px",margin:"10px"}}),this._v(" "),e("Game",{style:{left:"0px",top:"960px",width:"1114px",height:"60px",margin:"0px"},attrs:{fontsize:"2em"}}),this._v(" "),e("Category",{style:{left:"0px",top:"1014px",width:"1114px",height:"64px",margin:"10px"}})],1)},u=[];n.d(e,"a",(function(){return r})),n.d(e,"b",(function(){return u}))},159:function(t,e,n){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var r=n(1),u=r.__importDefault(n(8)),i=r.__importDefault(n(160));r.__importDefault(n(162)).default().then((function(t){new u.default({store:t,el:"#App",render:function(t){return t(i.default)}})}))},160:function(t,e,n){"use strict";n.r(e);var r=n(127),u=n(101);for(var i in u)"default"!==i&&function(t){n.d(e,t,(function(){return u[t]}))}(i);n(161);var a=n(0),o=Object(a.a)(u.default,r.a,r.b,!1,null,null,null);e.default=o.exports},161:function(t,e,n){"use strict";var r=n(103);n.n(r).a},162:function(t,e,n){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var r=n(1),u=r.__importDefault(n(68)),i=r.__importDefault(n(8)),a=r.__importDefault(n(28));i.default.use(a.default);var o={timer:nodecg.Replicant("timer","nodecg-speedcontrol"),runDataActiveRun:nodecg.Replicant("runDataActiveRun","nodecg-speedcontrol")},c=new a.default.Store({mutations:{setState:function(t,e){var n=e.name,r=e.val;i.default.set(t,n,r)}}});Object.keys(o).forEach((function(t){o[t].on("change",(function(e){c.commit("setState",{name:t,val:u.default(e)})}))})),e.default=function(){return r.__awaiter(this,void 0,Promise,(function(){return r.__generator(this,(function(t){return[2,NodeCG.waitForReplicants.apply(NodeCG,Object.keys(o).map((function(t){return o[t]}))).then((function(){return c}))]}))}))}},25:function(t,e,n){"use strict";n.r(e);var r=n(26),u=n.n(r);for(var i in r)"default"!==i&&function(t){n.d(e,t,(function(){return r[t]}))}(i);e.default=u.a},26:function(t,e,n){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var r=n(1),u=n(2),i=n(3),a=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return r.__extends(e,t),r.__decorate([i.State],e.prototype,"timer",void 0),r.__decorate([i.State],e.prototype,"runDataActiveRun",void 0),e=r.__decorate([u.Component],e)}(u.Vue);e.default=a},27:function(t,e,n){},30:function(t,e,n){"use strict";n.r(e);var r=n(7),u=n(4);for(var i in u)"default"!==i&&function(t){n.d(e,t,(function(){return u[t]}))}(i);n(31);var a=n(0),o=Object(a.a)(u.default,r.a,r.b,!1,null,"401ed018",null);e.default=o.exports},31:function(t,e,n){"use strict";var r=n(6);n.n(r).a},34:function(t,e,n){t.exports=n.p+"img/4x3-1p-1c-sample-7ec7b62e75d58b66b8689a952dbb3b0f.png"},38:function(t,e,n){"use strict";var r=function(){var t=this,e=t.$createElement,n=t._self._c||e;return n("div",{staticClass:"flex",style:{"flex-direction":"column"}},[n("div",{staticClass:"timerText"},[n("span",{class:t.timer.state},[t._v(t._s(t.timer.time))])]),t._v(" "),t.runDataActiveRun?n("div",{staticClass:"estText"},[n("span",[t._v("予定タイム "+t._s(t.runDataActiveRun.estimate))])]):t._e()])},u=[];n.d(e,"a",(function(){return r})),n.d(e,"b",(function(){return u}))},4:function(t,e,n){"use strict";n.r(e);var r=n(5),u=n.n(r);for(var i in r)"default"!==i&&function(t){n.d(e,t,(function(){return r[t]}))}(i);e.default=u.a},5:function(t,e,n){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var r=n(1),u=n(2),i=n(3),a=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return r.__extends(e,t),r.__decorate([i.State],e.prototype,"runDataActiveRun",void 0),r.__decorate([u.Prop({type:String,default:"4.0em"})],e.prototype,"fontsize",void 0),r.__decorate([u.Prop({type:Number,default:0})],e.prototype,"teamid",void 0),e=r.__decorate([u.Component],e)}(u.Vue);e.default=a},6:function(t,e,n){},69:function(t,e,n){"use strict";n.r(e);var r=n(70),u=n.n(r);for(var i in r)"default"!==i&&function(t){n.d(e,t,(function(){return r[t]}))}(i);e.default=u.a},7:function(t,e,n){"use strict";var r=function(){var t=this.$createElement,e=this._self._c||t;return e("div",{staticClass:"flex",style:{"flex-direction":"column"}},[this.runDataActiveRun?e("div",{staticClass:"playerName",style:{"font-size":this.fontsize}},[this._v("\n "+this._s(this.runDataActiveRun.teams[this.teamid].players[0].name)+"\n ")]):this._e()])},u=[];n.d(e,"a",(function(){return r})),n.d(e,"b",(function(){return u}))},70:function(t,e,n){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var r=n(1),u=n(2),i=n(3),a=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return r.__extends(e,t),r.__decorate([i.State],e.prototype,"runDataActiveRun",void 0),r.__decorate([u.Prop({type:String,default:"3.5em"})],e.prototype,"fontsize",void 0),e=r.__decorate([u.Component],e)}(u.Vue);e.default=a},71:function(t,e,n){},72:function(t,e,n){"use strict";n.r(e);var r=n(73),u=n.n(r);for(var i in r)"default"!==i&&function(t){n.d(e,t,(function(){return r[t]}))}(i);e.default=u.a},73:function(t,e,n){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var r=n(1),u=n(2),i=n(3),a=function(t){function e(){return null!==t&&t.apply(this,arguments)||this}return r.__extends(e,t),r.__decorate([i.State],e.prototype,"runDataActiveRun",void 0),e=r.__decorate([u.Component],e)}(u.Vue);e.default=a},74:function(t,e,n){},80:function(t,e,n){"use strict";var r=function(){var t=this.$createElement,e=this._self._c||t;return e("div",{staticClass:"flex",style:{"flex-direction":"column"}},[this.runDataActiveRun?e("div",{staticClass:"game",style:{"font-size":this.fontsize}},[this._v("\n "+this._s(this.runDataActiveRun.game)+"\n ")]):this._e()])},u=[];n.d(e,"a",(function(){return r})),n.d(e,"b",(function(){return u}))},81:function(t,e,n){"use strict";var r=function(){var t=this.$createElement,e=this._self._c||t;return e("div",{staticClass:"flex",style:{"flex-direction":"column"}},[this.runDataActiveRun?e("div",{staticClass:"category"},[this._v("\n "+this._s(this.runDataActiveRun.category)+"\n ")]):this._e()])},u=[];n.d(e,"a",(function(){return r})),n.d(e,"b",(function(){return u}))},84:function(t,e,n){"use strict";n.r(e);var r=n(38),u=n(25);for(var i in u)"default"!==i&&function(t){n.d(e,t,(function(){return u[t]}))}(i);n(85);var a=n(0),o=Object(a.a)(u.default,r.a,r.b,!1,null,"603be41c",null);e.default=o.exports},85:function(t,e,n){"use strict";var r=n(27);n.n(r).a}}); |
import { withRouter } from 'next/router';
import PropTypes from 'prop-types';
import React from 'react';
import routes from '../../routes';
const defaultRoute = 'policies';
const inputNameMapping = {
policies: 'requirements__req_text__search',
requirements: 'req_text__search',
};
export class Search extends React.Component {
constructor(props) {
super(props);
this.state = { term: '' };
this.handleChange = this.handleChange.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
}
currentRoute() {
const route = routes.match(this.props.router.pathname).route;
if (route) {
return route.name;
}
return null;
}
actionPath() {
const route = this.currentRoute();
if (Object.keys(inputNameMapping).includes(route)) {
return `/${route}`;
}
return `/${defaultRoute}`;
}
hiddenFields() {
const modifiedQuery = this.query();
return Object.keys(modifiedQuery).map(k => (
<input type="hidden" key={k} name={k} value={modifiedQuery[k]} />
));
}
query() {
const route = this.currentRoute();
if (Object.keys(inputNameMapping).includes(route)) {
const query = { ...this.props.router.query };
delete query.page;
delete query[this.inputName()];
return query;
}
return {};
}
inputName() {
const route = this.currentRoute();
return inputNameMapping[route] || inputNameMapping[defaultRoute];
}
handleChange(e) {
const { value } = e.target;
this.setState({ term: value });
}
handleSubmit(e) {
e.preventDefault();
const { router } = this.props;
const { term } = this.state;
const pathname = this.actionPath();
const query = Object.assign({}, this.query(), { [this.inputName()]: term });
router.push({ pathname, query });
}
render() {
return (
<form
method="GET"
action={this.actionPath()}
onSubmit={this.handleSubmit}
>
<input
aria-label="Search term"
name={this.inputName()}
type="text"
placeholder={this.props.placeholder}
className="search-input"
onChange={this.handleChange}
value={this.state.term}
/>
{this.hiddenFields()}
<button type="submit" className="search-submit">
{this.props.buttonContent}
</button>
</form>
);
}
}
Search.propTypes = {
buttonContent: PropTypes.node,
placeholder: PropTypes.string,
router: PropTypes.shape({
pathname: PropTypes.string.isRequired,
query: PropTypes.shape({}).isRequired,
}).isRequired,
};
Search.defaultProps = {
buttonContent: null,
placeholder: 'Search',
};
export default withRouter(Search);
|
def is_not_empty(s):
return s.strip() != ''
sl = ['a', ' ', ' ', 'b', 'c']
# 이 식은, 함수 정의도 필요하고, 아래 문장만 봐선 이게 뭔지 정확히 알 수 없음
print(', '.join(filter(is_not_empty, sl)))
# 람다식 = 함수 정의
print(', '.join(filter(lambda s: s.strip() != '', sl)))
print(', '.join(filter(lambda a: a.strip() != '', sl)))
# 위에 두 개의 print문읜 완전히 같음
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ApiIamBindingArgs', 'ApiIamBinding']
@pulumi.input_type
class ApiIamBindingArgs:
def __init__(__self__, *,
api: pulumi.Input[str],
members: pulumi.Input[Sequence[pulumi.Input[str]]],
role: pulumi.Input[str],
condition: Optional[pulumi.Input['ApiIamBindingConditionArgs']] = None,
project: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ApiIamBinding resource.
:param pulumi.Input[str] role: The role that should be applied. Only one
`apigateway.ApiIamBinding` can be used per role. Note that custom roles must be of the format
`[projects|organizations]/{parent-name}/roles/{role-name}`.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
"""
pulumi.set(__self__, "api", api)
pulumi.set(__self__, "members", members)
pulumi.set(__self__, "role", role)
if condition is not None:
pulumi.set(__self__, "condition", condition)
if project is not None:
pulumi.set(__self__, "project", project)
@property
@pulumi.getter
def api(self) -> pulumi.Input[str]:
return pulumi.get(self, "api")
@api.setter
def api(self, value: pulumi.Input[str]):
pulumi.set(self, "api", value)
@property
@pulumi.getter
def members(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
return pulumi.get(self, "members")
@members.setter
def members(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "members", value)
@property
@pulumi.getter
def role(self) -> pulumi.Input[str]:
"""
The role that should be applied. Only one
`apigateway.ApiIamBinding` can be used per role. Note that custom roles must be of the format
`[projects|organizations]/{parent-name}/roles/{role-name}`.
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: pulumi.Input[str]):
pulumi.set(self, "role", value)
@property
@pulumi.getter
def condition(self) -> Optional[pulumi.Input['ApiIamBindingConditionArgs']]:
return pulumi.get(self, "condition")
@condition.setter
def condition(self, value: Optional[pulumi.Input['ApiIamBindingConditionArgs']]):
pulumi.set(self, "condition", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@pulumi.input_type
class _ApiIamBindingState:
def __init__(__self__, *,
api: Optional[pulumi.Input[str]] = None,
condition: Optional[pulumi.Input['ApiIamBindingConditionArgs']] = None,
etag: Optional[pulumi.Input[str]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
project: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ApiIamBinding resources.
:param pulumi.Input[str] etag: (Computed) The etag of the IAM policy.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
:param pulumi.Input[str] role: The role that should be applied. Only one
`apigateway.ApiIamBinding` can be used per role. Note that custom roles must be of the format
`[projects|organizations]/{parent-name}/roles/{role-name}`.
"""
if api is not None:
pulumi.set(__self__, "api", api)
if condition is not None:
pulumi.set(__self__, "condition", condition)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if members is not None:
pulumi.set(__self__, "members", members)
if project is not None:
pulumi.set(__self__, "project", project)
if role is not None:
pulumi.set(__self__, "role", role)
@property
@pulumi.getter
def api(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "api")
@api.setter
def api(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api", value)
@property
@pulumi.getter
def condition(self) -> Optional[pulumi.Input['ApiIamBindingConditionArgs']]:
return pulumi.get(self, "condition")
@condition.setter
def condition(self, value: Optional[pulumi.Input['ApiIamBindingConditionArgs']]):
pulumi.set(self, "condition", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) The etag of the IAM policy.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def members(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "members")
@members.setter
def members(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "members", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter
def role(self) -> Optional[pulumi.Input[str]]:
"""
The role that should be applied. Only one
`apigateway.ApiIamBinding` can be used per role. Note that custom roles must be of the format
`[projects|organizations]/{parent-name}/roles/{role-name}`.
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role", value)
class ApiIamBinding(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api: Optional[pulumi.Input[str]] = None,
condition: Optional[pulumi.Input[pulumi.InputType['ApiIamBindingConditionArgs']]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
project: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Three different resources help you manage your IAM policy for API Gateway Api. Each of these resources serves a different use case:
* `apigateway.ApiIamPolicy`: Authoritative. Sets the IAM policy for the api and replaces any existing policy already attached.
* `apigateway.ApiIamBinding`: Authoritative for a given role. Updates the IAM policy to grant a role to a list of members. Other roles within the IAM policy for the api are preserved.
* `apigateway.ApiIamMember`: Non-authoritative. Updates the IAM policy to grant a role to a new member. Other members for the role for the api are preserved.
> **Note:** `apigateway.ApiIamPolicy` **cannot** be used in conjunction with `apigateway.ApiIamBinding` and `apigateway.ApiIamMember` or they will fight over what your policy should be.
> **Note:** `apigateway.ApiIamBinding` resources **can be** used in conjunction with `apigateway.ApiIamMember` resources **only if** they do not grant privilege to the same role.
## google\_api\_gateway\_api\_iam\_policy
```python
import pulumi
import pulumi_gcp as gcp
admin = gcp.organizations.get_iam_policy(bindings=[gcp.organizations.GetIAMPolicyBindingArgs(
role="roles/apigateway.viewer",
members=["user:[email protected]"],
)])
policy = gcp.apigateway.ApiIamPolicy("policy",
project=google_api_gateway_api["api"]["project"],
api=google_api_gateway_api["api"]["api_id"],
policy_data=admin.policy_data,
opts=pulumi.ResourceOptions(provider=google_beta))
```
## google\_api\_gateway\_api\_iam\_binding
```python
import pulumi
import pulumi_gcp as gcp
binding = gcp.apigateway.ApiIamBinding("binding",
project=google_api_gateway_api["api"]["project"],
api=google_api_gateway_api["api"]["api_id"],
role="roles/apigateway.viewer",
members=["user:[email protected]"],
opts=pulumi.ResourceOptions(provider=google_beta))
```
## google\_api\_gateway\_api\_iam\_member
```python
import pulumi
import pulumi_gcp as gcp
member = gcp.apigateway.ApiIamMember("member",
project=google_api_gateway_api["api"]["project"],
api=google_api_gateway_api["api"]["api_id"],
role="roles/apigateway.viewer",
member="user:[email protected]",
opts=pulumi.ResourceOptions(provider=google_beta))
```
## Import
For all import syntaxes, the "resource in question" can take any of the following forms* projects/{{project}}/locations/global/apis/{{api}} * {{project}}/{{api}} * {{api}} Any variables not passed in the import command will be taken from the provider configuration. API Gateway api IAM resources can be imported using the resource identifiers, role, and member. IAM member imports use space-delimited identifiersthe resource in question, the role, and the member identity, e.g.
```sh
$ pulumi import gcp:apigateway/apiIamBinding:ApiIamBinding editor "projects/{{project}}/locations/global/apis/{{api}} roles/apigateway.viewer user:[email protected]"
```
IAM binding imports use space-delimited identifiersthe resource in question and the role, e.g.
```sh
$ pulumi import gcp:apigateway/apiIamBinding:ApiIamBinding editor "projects/{{project}}/locations/global/apis/{{api}} roles/apigateway.viewer"
```
IAM policy imports use the identifier of the resource in question, e.g.
```sh
$ pulumi import gcp:apigateway/apiIamBinding:ApiIamBinding editor projects/{{project}}/locations/global/apis/{{api}}
```
-> **Custom Roles**If you're importing a IAM resource with a custom role, make sure to use the
full name of the custom role, e.g. `[projects/my-project|organizations/my-org]/roles/my-custom-role`.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
:param pulumi.Input[str] role: The role that should be applied. Only one
`apigateway.ApiIamBinding` can be used per role. Note that custom roles must be of the format
`[projects|organizations]/{parent-name}/roles/{role-name}`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ApiIamBindingArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Three different resources help you manage your IAM policy for API Gateway Api. Each of these resources serves a different use case:
* `apigateway.ApiIamPolicy`: Authoritative. Sets the IAM policy for the api and replaces any existing policy already attached.
* `apigateway.ApiIamBinding`: Authoritative for a given role. Updates the IAM policy to grant a role to a list of members. Other roles within the IAM policy for the api are preserved.
* `apigateway.ApiIamMember`: Non-authoritative. Updates the IAM policy to grant a role to a new member. Other members for the role for the api are preserved.
> **Note:** `apigateway.ApiIamPolicy` **cannot** be used in conjunction with `apigateway.ApiIamBinding` and `apigateway.ApiIamMember` or they will fight over what your policy should be.
> **Note:** `apigateway.ApiIamBinding` resources **can be** used in conjunction with `apigateway.ApiIamMember` resources **only if** they do not grant privilege to the same role.
## google\_api\_gateway\_api\_iam\_policy
```python
import pulumi
import pulumi_gcp as gcp
admin = gcp.organizations.get_iam_policy(bindings=[gcp.organizations.GetIAMPolicyBindingArgs(
role="roles/apigateway.viewer",
members=["user:[email protected]"],
)])
policy = gcp.apigateway.ApiIamPolicy("policy",
project=google_api_gateway_api["api"]["project"],
api=google_api_gateway_api["api"]["api_id"],
policy_data=admin.policy_data,
opts=pulumi.ResourceOptions(provider=google_beta))
```
## google\_api\_gateway\_api\_iam\_binding
```python
import pulumi
import pulumi_gcp as gcp
binding = gcp.apigateway.ApiIamBinding("binding",
project=google_api_gateway_api["api"]["project"],
api=google_api_gateway_api["api"]["api_id"],
role="roles/apigateway.viewer",
members=["user:[email protected]"],
opts=pulumi.ResourceOptions(provider=google_beta))
```
## google\_api\_gateway\_api\_iam\_member
```python
import pulumi
import pulumi_gcp as gcp
member = gcp.apigateway.ApiIamMember("member",
project=google_api_gateway_api["api"]["project"],
api=google_api_gateway_api["api"]["api_id"],
role="roles/apigateway.viewer",
member="user:[email protected]",
opts=pulumi.ResourceOptions(provider=google_beta))
```
## Import
For all import syntaxes, the "resource in question" can take any of the following forms* projects/{{project}}/locations/global/apis/{{api}} * {{project}}/{{api}} * {{api}} Any variables not passed in the import command will be taken from the provider configuration. API Gateway api IAM resources can be imported using the resource identifiers, role, and member. IAM member imports use space-delimited identifiersthe resource in question, the role, and the member identity, e.g.
```sh
$ pulumi import gcp:apigateway/apiIamBinding:ApiIamBinding editor "projects/{{project}}/locations/global/apis/{{api}} roles/apigateway.viewer user:[email protected]"
```
IAM binding imports use space-delimited identifiersthe resource in question and the role, e.g.
```sh
$ pulumi import gcp:apigateway/apiIamBinding:ApiIamBinding editor "projects/{{project}}/locations/global/apis/{{api}} roles/apigateway.viewer"
```
IAM policy imports use the identifier of the resource in question, e.g.
```sh
$ pulumi import gcp:apigateway/apiIamBinding:ApiIamBinding editor projects/{{project}}/locations/global/apis/{{api}}
```
-> **Custom Roles**If you're importing a IAM resource with a custom role, make sure to use the
full name of the custom role, e.g. `[projects/my-project|organizations/my-org]/roles/my-custom-role`.
:param str resource_name: The name of the resource.
:param ApiIamBindingArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ApiIamBindingArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api: Optional[pulumi.Input[str]] = None,
condition: Optional[pulumi.Input[pulumi.InputType['ApiIamBindingConditionArgs']]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
project: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ApiIamBindingArgs.__new__(ApiIamBindingArgs)
if api is None and not opts.urn:
raise TypeError("Missing required property 'api'")
__props__.__dict__["api"] = api
__props__.__dict__["condition"] = condition
if members is None and not opts.urn:
raise TypeError("Missing required property 'members'")
__props__.__dict__["members"] = members
__props__.__dict__["project"] = project
if role is None and not opts.urn:
raise TypeError("Missing required property 'role'")
__props__.__dict__["role"] = role
__props__.__dict__["etag"] = None
super(ApiIamBinding, __self__).__init__(
'gcp:apigateway/apiIamBinding:ApiIamBinding',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
api: Optional[pulumi.Input[str]] = None,
condition: Optional[pulumi.Input[pulumi.InputType['ApiIamBindingConditionArgs']]] = None,
etag: Optional[pulumi.Input[str]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
project: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None) -> 'ApiIamBinding':
"""
Get an existing ApiIamBinding resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] etag: (Computed) The etag of the IAM policy.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
:param pulumi.Input[str] role: The role that should be applied. Only one
`apigateway.ApiIamBinding` can be used per role. Note that custom roles must be of the format
`[projects|organizations]/{parent-name}/roles/{role-name}`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ApiIamBindingState.__new__(_ApiIamBindingState)
__props__.__dict__["api"] = api
__props__.__dict__["condition"] = condition
__props__.__dict__["etag"] = etag
__props__.__dict__["members"] = members
__props__.__dict__["project"] = project
__props__.__dict__["role"] = role
return ApiIamBinding(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def api(self) -> pulumi.Output[str]:
return pulumi.get(self, "api")
@property
@pulumi.getter
def condition(self) -> pulumi.Output[Optional['outputs.ApiIamBindingCondition']]:
return pulumi.get(self, "condition")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
(Computed) The etag of the IAM policy.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def members(self) -> pulumi.Output[Sequence[str]]:
return pulumi.get(self, "members")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter
def role(self) -> pulumi.Output[str]:
"""
The role that should be applied. Only one
`apigateway.ApiIamBinding` can be used per role. Note that custom roles must be of the format
`[projects|organizations]/{parent-name}/roles/{role-name}`.
"""
return pulumi.get(self, "role")
|
#!/usr/bin/python3
import sys
class Literal():
def __init__(self, packet):
bin_value = ''
for i in range(0, len(packet), 5):
bin_value += packet[i+1:i+5]
if packet[i] != '1':
if i+5 < len(packet):
self.residual = packet[i+5:]
else:
self.residual = ''
break
self.value = int(bin_value, 2)
print('literal created', self.value, self.residual)
def get_version_sums(self):
return 0
def get_value(self, typeid):
return self.value
def __str__(self):
return str(self.value)
class Operator():
def __init__(self, packet):
self.subpackets = []
lengthtypeid = packet[0]
if lengthtypeid == '0':
subpacketslength = int(packet[1:16], 2)
self.operator = 'subpackets length %d' % subpacketslength
subpackets = packet[16:16+subpacketslength]
self.subpackets = extractPackets(subpackets)
self.residual = packet[16+subpacketslength:]
else:
subpacketcount = int(packet[1:12], 2)
self.operator = 'subpackets count %d' % subpacketcount
residual = packet[12:]
for i in range(subpacketcount):
subpackets = extractPackets(residual)
if subpackets == None:
residual = ''
else:
residual = subpackets[-1].residual
self.subpackets += subpackets
self.residual = residual
print('operator created', list(map(str, self.subpackets)))
def get_version_sums(self):
sums = 0
for packet in self.subpackets:
sums += packet.get_version_sums()
return sums
def get_value(self, typeid):
value = 0
if typeid == 0:
for packet in self.subpackets:
value += packet.get_value()
elif typeid == 1:
value = 1
for packet in self.subpackets:
value *= packet.get_value()
elif typeid == 2:
value = sys.maxsize
for packet in self.subpackets:
pval = packet.get_value()
if value > pval:
value = pval
elif typeid == 3:
for packet in self.subpackets:
pval = packet.get_value()
if value < pval:
value = pval
elif typeid == 5:
value = int(self.subpackets[0].get_value() > self.subpackets[1].get_value())
elif typeid == 6:
value = int(self.subpackets[0].get_value() < self.subpackets[1].get_value())
elif typeid == 7:
print(len(self.subpackets))
print(self.subpackets[0].packet)
value = int(self.subpackets[0].get_value() == self.subpackets[1].get_value())
print('got value', value)
return value
def __str__(self):
out = self.operator
for subpacket in self.subpackets:
out += '\n' + str(subpacket)
return out
class Packet():
def __init__(self, bin_str):
self.version = int(bin_str[0:3], 2)
self.typeid = int(bin_str[3:6], 2)
contents = bin_str[6:]
if self.typeid == 4:
self.packet = Literal(contents)
else:
self.packet = Operator(contents)
self.residual = self.packet.residual
print('residual', self.residual)
def get_version_sums(self):
return self.version + self.packet.get_version_sums()
def get_value(self):
return self.packet.get_value(self.typeid)
def __str__(self) -> str:
out = ''
out += 'version %d' % self.version
out += ', typeid %d' % self.typeid
out += ', value %s' % self.packet
return out
# given binary string, return first packet and residual binary
def extractPackets(bin_str):
if len(bin_str) < 1 or int(bin_str, 2) == 0:
return None
else:
packet = Packet(bin_str)
residual = packet.residual
next_packet = extractPackets(residual)
if next_packet == None:
return [packet]
else:
return [packet] + next_packet
def hex_to_bin(hex_str):
bin_str = ''
for h in hex_str:
d = int(h, 16)
for i in [8, 4, 2, 1]:
z = str(int((d % (i*2)) / i >= 1))
bin_str += z
return bin_str
input = []
with open('16/input.txt', 'r') as f:
for line in f.readlines():
input.append(line.strip())
for hexa in input:
binary = hex_to_bin(hexa)
packets = extractPackets(binary)
print(hexa)
for packet in packets:
print(packet)
print(packet.get_value())
|
import matplotlib.pyplot as plt
import matplotlib.markers
hfont = {'fontname':'serif'}
x = [10,50,100,200,300]
az = [6424.927458699188,5256.961421300812,4824.796510406505,4397.427268292684, 4197.789814796751]
ehyy = [2687.6435760975614,703.1398154471545,395.1273873170729,176.83760829268292,96.1307951219512]
sy1 = [5950.18102292683,5429.365800162603,5357.318395284554,3713.241357886179, 1030.814495934959]
plt.plot(x,az, label = "Algorithm 1", linestyle = '--', marker = '^')
plt.plot(x,ehyy, label = "K means ++",linestyle = '-.', marker = 'o')
plt.plot(x,sy1, label = "Alg_g (Li et al (2021))",alpha = 1,lw = 1,linestyle = ':', marker = 's')
plt.title("HCV dataset MSD to closest 1 centroid")
plt.xlabel("k",**hfont)
plt.ylabel("Mean Squared Error",**hfont)
plt.legend()
plt.show()
x = [10,50,100,200,300]
az = [22599.34653902439,97812.89526000009,187036.21684812993,378262.2334604881, 566806.8513998397]
ehyy = [36212.48968162593,82880.21971105676, 158791.86747674804,332972.78833723604,517099.0014863425]
sy1 = [21334.92608000001,94810.5209027642,187488.04654211353,370964.4004476427, 546504.439231384]
plt.plot(x,az, label = "Algorithm 1", linestyle = '--', marker = '^')
plt.plot(x,ehyy, label = "K means ++",linestyle = '-.', marker = 'o')
plt.plot(x,sy1, label = "Alg_g (Li et al (2021))",alpha = 1,lw = 1,linestyle = ':', marker = 's')
plt.title("HCV dataset MSD to closest k/4 centroids")
plt.xlabel("k",**hfont)
plt.ylabel("Mean Squared Error",**hfont)
plt.legend()
plt.show()
x = [10,50,100,200,300]
az = [40646.39921203251,210525.6406530079,420680.6237029261, 845481.3138406522,1269583.7324663412]
ehyy = [124083.99631951218,250374.9208278043,456286.41397723474, 836773.9932816271,1244285.030423903]
sy1 = [39792.15618878047,204902.3652287808,417640.0921939826, 834974.7590915464,1253336.3079403285]
plt.plot(x,az, label = "Algorithm 1", linestyle = '--', marker = '^')
plt.plot(x,ehyy, label = "K means ++",linestyle = '-.', marker = 'o')
plt.plot(x,sy1, label = "Alg_g (Li et al (2021))",alpha = 1,lw = 1,linestyle = ':', marker = 's')
plt.title("HCV dataset MSD to closest k/2 centroids")
plt.xlabel("k",**hfont)
plt.ylabel("Mean Squared Error",**hfont)
plt.legend()
plt.show()
x = [10,50,100,200,300]
az = [72695.57247934972,352127.7569616262,687734.710200163, 1376151.7893491015,2068289.1842294286]
ehyy = [546103.3082318715,760492.1637795137,1049141.8591156001, 1599424.9191099254,2194639.542871545]
sy1 = [78556.98748878039,342924.19081626035, 679900.7061354463, 1369637.1033647123,2075300.3888624338]
plt.plot(x,az, label = "Algorithm 1", linestyle = '--', marker = '^')
plt.plot(x,ehyy, label = "K means ++",linestyle = '-.', marker = 'o')
plt.plot(x,sy1, label = "Alg_g (Li et al (2021))",alpha = 1,lw = 1,linestyle = ':', marker = 's')
plt.title("HCV dataset MSD to closest 3k/4 centroids")
plt.xlabel("k",**hfont)
plt.ylabel("Mean Squared Error",**hfont)
plt.legend()
plt.show()
x = [10,50,100,200,300]
az = [186052.680976585,542861.3294081311,1087538.5383058526,2167896.5648702616, 3293183.467976578]
ehyy = [1767480.8965390252,3788476.3237751373,5046475.590147337,6293318.208938536,7396493.75954102]
sy1 = [117938.62972097572, 535679.1609764224,1071021.4954299207,2434362.110899358, 5088374.6236155545]
plt.plot(x,az, label = "Algorithm 1", linestyle = '--', marker = '^')
plt.plot(x,ehyy, label = "K means ++",linestyle = '-.', marker = 'o')
plt.plot(x,sy1, label = "Alg_g (Li et al (2021))",alpha = 1,lw = 1,linestyle = ':', marker = 's')
plt.title("HCV dataset MSD to closest k centroids")
plt.xlabel("k",**hfont)
plt.ylabel("Mean Squared Error",**hfont)
plt.legend()
plt.show()
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
class ModuleDocFragment(object):
DOCUMENTATION = """
options:
job_id:
description:
- String identifier of the job.
- Required if P(state) is C(paused) or C(cancelled).
type: str
state:
description:
- Target state of the job.
type: str
default: queued
choices: [queued, paused, cancelled]
job_name:
description:
- Name of the job.
- Required if P(state) is C(queued).
type: str
job_options:
description:
- Optional workload manager instructions.
type: str
job_contents:
description:
- Contents of the job.
type: str
job_src:
description:
- Local path to the job script file.
type: str
job_workspace:
description:
- Location of the sources and the job script.
type: str
account:
description:
- Charge resources used by this job to specified account.
type: str
queue:
description:
- Request a specific queue (partition) for the resource allocation.
type: str
wall_time_limit:
description:
- Set a limit on the total run time of the job allocation.
- A time limit of zero requests that no time limit be imposed.
- Acceptable time formats include "minutes", "minutes:seconds", "hours:minutes:seconds",
"days-hours", "days-hours:minutes" and "days-hours:minutes:seconds".
type: str
node_count:
description:
- A number of nodes to be allocated to this job.
type: int
core_count:
description:
- A number of tasks(cores) to be allocated to this job.
type: int
process_count_per_node:
description:
- Request this number of processes be invoked on each node.
type: int
core_count_per_process:
description:
- Advise that ensuing job steps will require this number of processors per task.
type: int
memory_limit:
description:
- Specify the real memory required per node.
type: str
minimum_memory_per_processor:
description:
- Minimum memory required per allocated CPU.
type: str
request_gpus:
description:
- Specifies a list of GPU resources.
type: str
request_specific_nodes:
description:
- Request a specific list of hosts.
type: str
job_array:
description:
- Submit a job array, multiple jobs to be executed with identical parameters.
type: str
standard_output_file:
description:
- Connect the job standard output directly to the file name specified.
type: str
standard_error_file:
description:
- Connect the job standard error directly to the file name specified.
type: str
combine_stdout_stderr:
description:
- Combine the job standard output with standard error.
type: bool
architecture_constraint:
description:
- Specify which features are required by the job.
type: str
copy_environment:
description:
- Propagate all environment variables from the submission environment to the job.
type: bool
copy_environment_variable:
description:
- Identify which environment variables from the submission environment are propagated to the job.
type: str
job_dependency:
description:
- Defer the start of this job until the specified dependencies have been completed.
type: str
request_event_notification:
description:
- Notify user by email when certain event types occur.
type: list
elements: str
default: 'none'
choices: ['begin', 'end', 'fail', 'invalid_depend', 'time_limit', 'none', 'all']
email_address:
description:
- Email address to receive notifications of state changes.
type: str
defer_job:
description:
- Defer the allocation of the job until the specified time.
- "Acceptable time formats are: now+60 (seconds by default) or 2010-01-20T12:34:00"
type: str
node_exclusive:
description:
- The job allocation can not share nodes with other running jobs.
type: bool
keep_job_script:
description:
- Keep job script file after execution.
type: bool
default: True
"""
|
require('dotenv').config();
const envs = require('envs');
export default class SlackMessage {
constructor() {
let slackNode = require('slack-node');
this.slack = new slackNode();
this.slack.setWebhook(envs('TESTCAFE_SLACK_WEBHOOK', 'http://example.com'));
this.message = [];
this.errorMessage = [];
}
addMessage(message) {
this.message.push(message)
}
addErrorMessage(message) {
this.errorMessage.push(message)
}
sendMessage(message, slackProperties = null) {
this.slack.webhook(Object.assign({
username: envs('TESTCAFE_SLACK_BOT', 'testcafebot'),
text: message
}, slackProperties), function (err, response) {
if(err) {
console.log('Unable to send a message to slack');
console.log(response);
} else {
console.log(`The following message is send to slack: \n ${message}`);
}
})
}
sendTestReport(nrFailedTests) {
this.sendMessage(this.getTestReportMessage(), nrFailedTests > 0
? {
"attachments": [{
color: 'danger',
text: `${nrFailedTests} test failed`
}]
}
: null
)
}
getTestReportMessage() {
let message = this.getSlackMessage();
let errorMessage = this.getErrorMessage();
if(errorMessage.length > 0) {
message = message + "\n\n\n```" + this.getErrorMessage() + '```';
}
return message;
}
getErrorMessage() {
return this.errorMessage.join("\n\n\n");
}
getSlackMessage() {
return this.message.join("\n");
}
} |
import setuptools
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="deep-brats",
version="0.0.1",
author="Nabil Jabareen",
author_email="[email protected]",
description="Deep learning for brats data",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/NabJa/deep-brats",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
packages=setuptools.find_packages(),
python_requires=">=3.6",
)
|
var _Symbol$iterator = require("@gerhobbelt/babel-runtime-corejs3/core-js-stable/symbol/iterator");
_Symbol$iterator;
|
import styled from 'styled-components';
const AlternativesForm = styled.form`
label {
&[data-selected="true"] {
background-color: ${({ theme }) => theme.colors.primary};
&[data-status="SUCCESS"] {
background-color: ${({ theme }) => theme.colors.success};
}
&[data-status="ERROR"] {
background-color: ${({ theme }) => theme.colors.wrong};
}
}
&:focus {
opacity: 1;
}
}
button {
margin-top: 24px;
}
`;
export default AlternativesForm;
|
import localStores from './store'
import createConnect from './core/createConnect'
import renderManagement from './core/renderManagement'
import makeCapsule from './capsule/makeCapsule'
import toolsCapsule from './capsule/toolsCapsule'
import States from './store/states'
/**
* @version 1.0.0-alpha.1
* Redity library
*/
export default function Redity () {}
const { registersOfConnections, hideChildrens, templateChildrens } = localStores()
const { render } = renderManagement(registersOfConnections)
const { getCapsuleByKeyName, getProps, setProps, getPayload } = toolsCapsule(registersOfConnections, hideChildrens, templateChildrens)
/**
* @param {string|number} keyName
* @param {OptionConnect|function(object):OptionConnect=} optionControl
*/
function connect (keyName, optionControl) {
const connection = createConnect(registersOfConnections, hideChildrens, templateChildrens)
return connection(keyName, optionControl)
}
const { Capsule, createCapsule } = makeCapsule(registersOfConnections, connect)
Redity.connect = connect
Redity.render = render
Redity.getProps = getProps
Redity.Capsule = Capsule
Redity.createCapsule = createCapsule
Redity.getCapsuleByKeyName = getCapsuleByKeyName
Redity.getPayload = getPayload
Redity.setProps = setProps
Redity.States = States
export {
connect,
render,
getProps,
Capsule,
createCapsule,
getCapsuleByKeyName,
getPayload,
setProps,
States
}
|
#!/usr/bin/env node
'use strict';
/*eslint-disable no-console*/
// stdlib
var fs = require('fs');
// 3rd-party
var argparse = require('argparse');
// internal
var yaml = require('..');
////////////////////////////////////////////////////////////////////////////////
var cli = new argparse.ArgumentParser({
prog: 'js-yaml',
version: require('../package.json').version,
addHelp: true
});
cli.addArgument([ '-c', '--compact' ], {
help: 'Display errors in compact mode',
action: 'storeTrue'
});
// deprecated (not needed after we removed output colors)
// option suppressed, but not completely removed for compatibility
cli.addArgument([ '-j', '--to-json' ], {
help: argparse.Const.SUPPRESS,
dest: 'json',
action: 'storeTrue'
});
cli.addArgument([ '-t', '--trace' ], {
help: 'Show stack trace on error',
action: 'storeTrue'
});
cli.addArgument([ 'file' ], {
help: 'File to read, utf-8 encoded without BOM',
nargs: '?',
defaultValue: '-'
});
////////////////////////////////////////////////////////////////////////////////
var options = cli.parseArgs();
////////////////////////////////////////////////////////////////////////////////
function readFile(filename, encoding, callback) {
if (options.file === '-') {
// read from stdin
var chunks = [];
process.stdin.on('data', function (chunk) {
chunks.push(chunk);
});
process.stdin.on('end', function () {
return callback(null, Buffer.concat(chunks).toString(encoding));
});
} else {
fs.readFile(filename, encoding, callback);
}
}
readFile(options.file, 'utf8', function (error, input) {
var output, isYaml;
if (error) {
if (error.code === 'ENOENT') {
console.error('File not found: ' + options.file);
process.exit(2);
}
console.error(
options.trace && error.stack ||
error.message ||
String(error));
process.exit(1);
}
try {
output = JSON.parse(input);
isYaml = false;
} catch (err) {
if (err instanceof SyntaxError) {
try {
output = [];
yaml.loadAll(input, function (doc) { output.push(doc); }, {});
isYaml = true;
if (output.length === 0) output = null;
else if (output.length === 1) output = output[0];
} catch (e) {
if (options.trace && err.stack) console.error(e.stack);
else console.error(e.toString(options.compact));
process.exit(1);
}
} else {
console.error(
options.trace && err.stack ||
err.message ||
String(err));
process.exit(1);
}
}
if (isYaml) console.log(JSON.stringify(output, null, ' '));
else console.log(yaml.dump(output));
});
|
// @flow
const { sendToLagoonLogs } = require('@lagoon/commons/src/logs');
const { getProject } = require('@lagoon/commons/src/gitlabApi');
const { addProject } = require('@lagoon/commons/src/api');
import type { WebhookRequestData } from '../types';
async function gitlabProjectCreate(webhook: WebhookRequestData) {
const { webhooktype, event, uuid, body } = webhook;
try {
const project = await getProject(body.project_id);
const { id, path: name, ssh_url_to_repo: gitUrl, namespace } = project;
// TODO: figure out openshift id
const openshift = 1;
// set production environment to default master
const productionenvironment = "master";
const meta = {
data: project,
project: name
};
if (namespace.kind != 'group') {
sendToLagoonLogs(
'info',
'',
uuid,
`${webhooktype}:${event}:unhandled`,
meta,
`Skipping creation of project ${name}: not in group namespace`
);
return;
}
await addProject(name, namespace.id, gitUrl, openshift, productionenvironment, id);
sendToLagoonLogs(
'info',
'',
uuid,
`${webhooktype}:${event}:handled`,
meta,
`Created project ${name}`
);
return;
} catch (error) {
sendToLagoonLogs(
'error',
'',
uuid,
`${webhooktype}:${event}:unhandled`,
{ data: body },
`Could not create project, reason: ${error}`
);
return;
}
}
module.exports = gitlabProjectCreate;
|
const express = require('express')
const google = require('googleapis').google
const youtube = google.youtube({ version: 'v3'})
const OAuth2 = google.auth.OAuth2
const state = require('./state.js')
const fs = require('fs')
async function robot() {
console.log('> [youtube-robot] Starting...')
const content = state.load()
await authenticateWithOAuth()
const videoInformation = await uploadVideo(content)
await uploadThumbnail(videoInformation)
async function authenticateWithOAuth() {
const webServer = await startWebServer()
const OAuthClient = await createOAuthClient()
requestUserConsent(OAuthClient)
const authorizationToken = await waitForGoogleCallback(webServer)
await requestGoogleForAccessTokens(OAuthClient, authorizationToken)
await setGlobalGoogleAuthentication(OAuthClient)
await stopWebServer(webServer)
async function startWebServer() {
return new Promise((resolve, reject) => {
const port = 5000
const app = express()
const server = app.listen(port, () => {
console.log(`> [youtube-robot] Listening on http://localhost:${port}`)
resolve({
app,
server
})
})
})
}
async function createOAuthClient() {
const credentials = require('../credentials/google-youtube.json')
const OAuthClient = new OAuth2(
credentials.web.client_id,
credentials.web.client_secret,
credentials.web.redirect_uris[0]
)
return OAuthClient
}
function requestUserConsent(OAuthClient) {
const consentUrl = OAuthClient.generateAuthUrl({
access_type: 'offline',
scope: ['https://www.googleapis.com/auth/youtube']
})
console.log(`> [youtube-robot] Please give your consent: ${consentUrl}`)
}
async function waitForGoogleCallback(webServer) {
return new Promise((resolve, reject) => {
console.log('> [youtube-robot] Waiting for user consent...')
webServer.app.get('/oauth2callback', (req, res) => {
const authCode = req.query.code
console.log(`> [youtube-robot] Consent given: ${authCode}`)
res.send('<h1>Thank you!</h1><p>Now close this tab.</p>')
resolve(authCode)
})
})
}
async function requestGoogleForAccessTokens(OAuthClient, authorizationToken) {
return new Promise((resolve, reject) => {
OAuthClient.getToken(authorizationToken, (error, tokens) => {
if (error) {
return reject(error)
}
console.log('> [youtube-robot] Access tokens received!')
OAuthClient.setCredentials(tokens)
resolve()
})
})
}
function setGlobalGoogleAuthentication(OAuthClient) {
google.options({
auth: OAuthClient
})
}
async function stopWebServer(webServer) {
return new Promise((resolve, reject) => {
webServer.server.close(() => {
resolve()
})
})
}
}
async function uploadVideo(content) {
const videoFilePath = './content/output.mov'
const videoFileSize = fs.statSync(videoFilePath).size
const videoTitle = `${content.prefix} ${content.searchTerm}`
const videoTags = [content.searchTerm, ...content.sentences[0].keywords]
const videoDescription = content.sentences.map((sentence) => {
return sentence.text
}).join('\n\n')
const requestParameters = {
part: 'snippet, status',
requestBody: {
snippet: {
title: videoTitle,
description: videoDescription,
tags: videoTags
},
status: {
privacyStatus: 'unlisted'
}
},
media: {
body: fs.createReadStream(videoFilePath)
}
}
console.log('> [youtube-robot] Starting to upload the video to YouTube')
const youtubeResponse = await youtube.videos.insert(requestParameters, {
onUploadProgress: onUploadProgress
})
console.log(`> [youtube-robot] Video available at: https://youtu.be/${youtubeResponse.data.id}`)
return youtubeResponse.data
function onUploadProgress(event) {
const progress = Math.round( (event.bytesRead / videoFileSize) * 100 )
console.log(`> [youtube-robot] ${progress}% completed`)
}
}
async function uploadThumbnail(videoInformation) {
const videoId = videoInformation.id
const videoThumbnailFilePath = './content/youtube-thumbnail.jpg'
const requestParameters = {
videoId: videoId,
media: {
mimeType: 'image/jpeg',
body: fs.createReadStream(videoThumbnailFilePath)
}
}
const youtubeResponse = await youtube.thumbnails.set(requestParameters)
console.log(`> [youtube-robot] Thumbnail uploaded!`)
}
}
module.exports = robot |
import React from 'react'
import ReactCountryFlag from 'react-country-flag'
import { formatHotspotName } from '../Hotspots/utils'
import { StatusCircle } from '../Hotspots'
const SearchResultHotspot = ({ name, geocode, status }) => (
<div
style={{
display: 'flex',
justifyContent: 'space-between',
flexWrap: 'wrap',
}}
>
<span>
<StatusCircle status={status} />
{formatHotspotName(name)}
</span>
<span
style={{
display: 'flex',
alignItems: 'center',
}}
>
{geocode.shortCountry && (
<>
{[geocode.longCity, geocode.shortState, geocode.shortCountry].join(
', ',
)}
<ReactCountryFlag
countryCode={geocode.shortCountry}
style={{
fontSize: '1.3em',
marginLeft: '6px',
lineHeight: '1.3em',
}}
/>
</>
)}
</span>
</div>
)
export default SearchResultHotspot
|
// vue.config.js
module.exports = {
css: {
extract: false
}
}
|
//>>built
define("dijit/form/nls/fr/Textarea",{iframeEditTitle:"zone d'\u00e9dition",iframeFocusTitle:"cadre de la zone d'\u00e9dition"});
//# sourceMappingURL=Textarea.js.map |
//// [tests/cases/compiler/requireOfAnEmptyFile1.ts] ////
//// [requireOfAnEmptyFile1_a.ts]
//requireOfAnEmptyFile1
import fs = require('./requireOfAnEmptyFile1_b');
//// [requireOfAnEmptyFile1_b.ts]
//// [requireOfAnEmptyFile1_b.js]
//// [requireOfAnEmptyFile1_a.js]
"use strict";
//requireOfAnEmptyFile1
exports.__esModule = true;
|
#!/usr/bin/env python
# https://www.microchip.com/wwwproducts/en/ATSAMD21E18
# import attr
import time
from collections import deque
import numpy as np
# import cv2
from slurm.rate import Rate
from math import pi
import pickle
# from opencv_camera import ThreadedCamera
# from opencv_camera.color_space import ColorSpace
from imu_driver import IMUDriver
from imu_driver import AGMPT, agmpt_t
# ImageIMU = namedtuple("ImageIMU","image accel gyro temperature timestamp")
deg2rad = pi / 180.0
RAD2DEG = 180/pi
DEG2RAD = pi/180
FT2M = 0.3048 # feet to meters
MI2M = 1609.34 # miles to meters
PACKET_LEN = 7
data = deque(maxlen=10000)
def savePickle(data, filename):
with open(filename, 'wb') as fd:
d = pickle.dumps(data)
fd.write(d)
port = "/dev/tty.usbmodem14501"
# port = "/dev/tty.usbmodem14501"
s = IMUDriver(port, AGMPT())
rate = Rate(200)
try:
start = time.monotonic()
cnt = 1
while True:
agmpt = s.read()
if agmpt is None:
print(f"oops: {agmpt}")
else:
if (cnt % 200) == 0:
a,g,m,p,t = agmpt
print(f">> {a}")
dt = time.monotonic() - start
# m = agmpt_t(a,g,m,p,t, dt)
# print(f">> {m}")
agmpt += (dt,)
data.append(agmpt)
# time.sleep(0.001)
cnt += 1
rate.sleep()
except KeyboardInterrupt:
print("ctrl-C")
finally:
s.close()
# camera.close()
# cv2.destroyAllWindows()
if len(data) > 0:
savePickle(data, "data.pickle")
print("\n\nbye ...\n")
|
import Vue from 'vue'
import App from './App.vue'
Vue.config.productionTip = false
import '@fortawesome/fontawesome-free/css/all.css'
import '@fortawesome/fontawesome-free/js/all.js'
import TaskList from "./components/TaskList.vue"
import TaskItem from "./components/TaskItem.vue"
Vue.component("task-list", TaskList)
Vue.component("task-item", TaskItem)
new Vue({
render: h => h(App),
}).$mount('#app')
|
"use strict";
const express = require("express");
const bearerHandler = require("../auth/bearer.js");
const aclHandler = require("../auth/acl.js");
const dataModules = require("../models");
const router = express.Router();
router.param("model", (req, res, next) => {
const modelName = req.params.model;
if (dataModules[modelName]) {
req.model = dataModules[modelName];
next();
} else {
next("Invalid Model");
}
});
router.post(
"/:model",
bearerHandler,
aclHandler("create"),
handleCreatePosting
);
router.put(
"/:model/:id",
bearerHandler,
aclHandler("update"),
handleUpdatePosting
);
router.delete(
"/:model/:id",
bearerHandler,
aclHandler("delete"),
handleDeletePosting
);
async function handleCreatePosting(req, res) {
try {
let obj = req.body;
let category = req.params.model;
let sellerObj = { seller: `${req.user.dataValues.username}`, ...obj };
let newPosting = await req.model.create(sellerObj);
req.emitEvent(category, {
message: "New item for sale",
...newPosting.dataValues,
});
res.status(201).json(newPosting);
} catch (err) {
console.error(err);
}
}
async function handleUpdatePosting(req, res) {
try {
const id = req.params.id;
const obj = req.body;
let updator = req.user.dataValues.username;
let oldPost = await req.model.model.findOne({ where: { id } });
let poster = oldPost.dataValues.seller;
if (updator === poster) {
let updatedPosting = await req.model.model.update(obj, { where: { id } });
res.status(200).json(updatedPosting);
} else {
res.status(403).send("Invalid user");
}
} catch (err) {
console.error(err);
}
}
async function handleDeletePosting(req, res) {
try {
let id = req.params.id;
let deleter = req.user.dataValues.username;
console.log(req.model);
let oldPost = await req.model.model.findOne({ where: { id } });
let poster = oldPost.dataValues.seller;
if (deleter === poster) {
let deletedPosting = await req.model.model.destroy({ where: { id } });
res.status(200).json(deletedPosting);
} else {
res.status(403).send("Invalid user");
}
} catch (err) {
console.error(err);
}
}
module.exports = router;
|
import queue
import datetime
class WebsocketProcessor(object):
def __init__(self, redis):
self._qs = []
self._redis = redis
def dumpRedis(self):
redisData = ''
for key in self._redis.scan_iter():
value = self._redis.get(key)
if value is not None:
valueStr = "BINARY"
try:
valueStr = value.decode("utf-8")
except UnicodeError:
pass
redisData += key.decode("utf-8") + '->' + valueStr + '<br>'
return redisData
def update(self):
for q in self._qs:
q.put('updatea')
def processResult(self, id, result):
if result == 'None' or ',' not in result:
return
target, value = result.split(',', 1)
self._redis.setex(target, datetime.timedelta(seconds=10), value)
def run(self, ws, id):
q = queue.Queue()
self._qs.append(q)
while True:
dataToSend = None
if id is not None:
if self._redis.exists(id):
value = self._redis.get(id)
self._redis.delete(id)
if value is not None:
try:
dataToSend = value.decode('utf-8')
except UnicodeError:
pass
else:
dataToSend = self.dumpRedis()
if dataToSend is not None:
result = None
try:
ws.send(dataToSend)
result = ws.receive()
except:
break
if result is not None:
self.processResult(id, result)
item = q.get();
if item == None:
break
|
import { LineSegmentsGeometry } from './LineSegmentsGeometry.js';
class LineGeometry extends LineSegmentsGeometry {
constructor() {
super();
this.type = 'LineGeometry';
}
setPositions( array ) {
// converts [ x1, y1, z1, x2, y2, z2, ... ] to pairs format
var length = array.length - 3;
var points = new Float32Array( 2 * length );
for ( var i = 0; i < length; i += 3 ) {
points[ 2 * i ] = array[ i ];
points[ 2 * i + 1 ] = array[ i + 1 ];
points[ 2 * i + 2 ] = array[ i + 2 ];
points[ 2 * i + 3 ] = array[ i + 3 ];
points[ 2 * i + 4 ] = array[ i + 4 ];
points[ 2 * i + 5 ] = array[ i + 5 ];
}
super.setPositions( points );
return this;
}
setColors( array ) {
// converts [ r1, g1, b1, r2, g2, b2, ... ] to pairs format
var length = array.length - 3;
var colors = new Float32Array( 2 * length );
for ( var i = 0; i < length; i += 3 ) {
colors[ 2 * i ] = array[ i ];
colors[ 2 * i + 1 ] = array[ i + 1 ];
colors[ 2 * i + 2 ] = array[ i + 2 ];
colors[ 2 * i + 3 ] = array[ i + 3 ];
colors[ 2 * i + 4 ] = array[ i + 4 ];
colors[ 2 * i + 5 ] = array[ i + 5 ];
}
super.setColors( colors );
return this;
}
fromLine( line ) {
var geometry = line.geometry;
if ( geometry.isGeometry ) {
console.error( 'THREE.LineGeometry no longer supports Geometry. Use THREE.BufferGeometry instead.' );
return;
} else if ( geometry.isBufferGeometry ) {
this.setPositions( geometry.attributes.position.array ); // assumes non-indexed
}
// set colors, maybe
return this;
}
}
LineGeometry.prototype.isLineGeometry = true;
export { LineGeometry };
|
class Solution(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
profile = 0
for i in range(1,len(prices)):
if prices[i] > prices[i-1]:
profile += prices[i] - prices[i-1]
return profile
|
from homework.b_in_proc_out.output import multiply_numbers
multiply_numbers(6, 6) |
// reset button?
// add Hue and Hex options
// dark theme?
var diffEls = document.querySelectorAll(".diff__btn");
var diffEl = document.querySelector(".diff__btn.active").innerHTML;
var n = diffEl;
var colorsEl = document.querySelector(".colors");
var colorsBlocks;
var rgbEl = document.querySelector(".rgb");
var statusEl = document.querySelector(".status");
var colors = [];
createBlocks(n);
resetGame();
function checkColors(e) {
if (colors[pickedColor] === e.target.style.backgroundColor) {
statusEl.innerHTML = "Good job!<br>A new game will start right now.";
document.body.style.color = colors[pickedColor];
for (var i=0; i<colorsBlocks.length; i++) {
colorsBlocks[i].style.backgroundColor = colors[pickedColor];
}
resetGame();
}
else {
e.target.style.backgroundColor = "transparent";
statusEl.innerHTML = "Try again!";
}
}
function resetGame() {
setTimeout(function() {
createBlocks(n);
document.body.style.color = "black";
colors = [];
pickColors();
pickedColor = random(n);
rgbEl.innerHTML = colors[pickedColor];
setColors();
statusEl.innerHTML = "Try to guess the right color based on the RGB value by clicking on the blocks.";
}, 1000);
}
function setColors() {
for (var i=0; i<colorsBlocks.length; i++) {
colorsBlocks[i].style.backgroundColor = colors[i];
}
}
function pickColors() {
for (var i=0; i<n; i++) {
colors.push(randomColor());
}
}
function randomColor() {
return "rgb(" + random(255) + ", " + random(255) + ", " + random(255) + ")";
}
function random(r) {
return Math.floor(Math.random()*r);
}
for (var i=0; i<diffEls.length; i++) {
diffEls[i].addEventListener("click", setN);
}
function setN(e) {
for (var i=0; i<diffEls.length; i++) {
diffEls[i].classList.remove("active");
}
e.target.classList.add("active");
diffEl = document.querySelector(".diff__btn.active").innerHTML;
n = diffEl;
resetGame();
}
function createBlocks(num) {
colorsEl.innerHTML = "";
for (var i=0; i<num; i++) {
var block = document.createElement("div");
block.classList.add("colors__block");
colorsEl.appendChild(block);
}
colorsBlocks = document.querySelectorAll(".colors__block");
for (var i=0; i<colorsBlocks.length; i++) {
colorsBlocks[i].addEventListener("click", checkColors);
}
}
|
/**
* SelectBox.js
*
* Released under LGPL License.
* Copyright (c) 1999-2017 Ephox Corp. All rights reserved
*
* License: http://www.tinymce.com/license
* Contributing: http://www.tinymce.com/contributing
*/
/**
* Creates a new select box control.
*
* @-x-less SelectBox.less
* @class tinymce.ui.SelectBox
* @extends tinymce.ui.Widget
*/
define(
'tinymce.ui.SelectBox',
[
"tinymce.ui.Widget"
],
function (Widget) {
"use strict";
function createOptions(options) {
var strOptions = '';
if (options) {
for (var i = 0; i < options.length; i++) {
strOptions += '<option value="' + options[i] + '">' + options[i] + '</option>';
}
}
return strOptions;
}
return Widget.extend({
Defaults: {
classes: "selectbox",
role: "selectbox",
options: []
},
/**
* Constructs a instance with the specified settings.
*
* @constructor
* @param {Object} settings Name/value object with settings.
* @setting {Array} options Array with options to add to the select box.
*/
init: function (settings) {
var self = this;
self._super(settings);
if (self.settings.size) {
self.size = self.settings.size;
}
if (self.settings.options) {
self._options = self.settings.options;
}
self.on('keydown', function (e) {
var rootControl;
if (e.keyCode == 13) {
e.preventDefault();
// Find root control that we can do toJSON on
self.parents().reverse().each(function (ctrl) {
if (ctrl.toJSON) {
rootControl = ctrl;
return false;
}
});
// Fire event on current text box with the serialized data of the whole form
self.fire('submit', { data: rootControl.toJSON() });
}
});
},
/**
* Getter/setter function for the options state.
*
* @method options
* @param {Array} [state] State to be set.
* @return {Array|tinymce.ui.SelectBox} Array of string options.
*/
options: function (state) {
if (!arguments.length) {
return this.state.get('options');
}
this.state.set('options', state);
return this;
},
renderHtml: function () {
var self = this, options, size = '';
options = createOptions(self._options);
if (self.size) {
size = ' size = "' + self.size + '"';
}
return (
'<select id="' + self._id + '" class="' + self.classes + '"' + size + '>' +
options +
'</select>'
);
},
bindStates: function () {
var self = this;
self.state.on('change:options', function (e) {
self.getEl().innerHTML = createOptions(e.value);
});
return self._super();
}
});
}
);
|
import { pick } from "lodash";
import * as types from "./actionTypes";
const initialState = {
commentsById: {},
commentIds: null,
pageCount: 0,
pageLimit: 10,
pageOffset: 0,
pageProjectFilter: [],
pageDocumentFilter: [],
checked: []
};
export default function reduce(state = initialState, action = {}) {
switch (action.type) {
case types.USER_COMMENTS_FETCH_SUCCESS:
return {
...state,
commentsById: action.commentsById,
commentIds: action.commentIds,
pageCount: Math.ceil(action.commentCount / state.pageLimit)
};
case types.PAGE_LIMIT_UPDATED:
return { ...state, pageLimit: action.pageLimit };
case types.PAGE_OFFSET_UPDATED:
return { ...state, pageOffset: action.pageOffset };
case types.PAGE_PROJECT_FILTER_UPDATED:
return { ...state, pageProjectFilter: action.pageProjectFilter };
case types.PAGE_SURVEY_FILTER_UPDATED:
return { ...state, pageDocumentFilter: action.pageDocumentFilter };
case types.SIDEBAR_FILTER_CHECKED:
return { ...state, checked: action.checked };
default:
return state;
}
}
export const getUserComments = state => {
const {
commentsById,
commentIds
} = state.scenes.profile.scenes.comments.data;
return {
commentsById,
commentIds
};
};
export const getPageAndFilter = state =>
pick(state.scenes.profile.scenes.comments.data, [
"pageLimit",
"pageOffset",
"pageCount",
"pageProjectFilter",
"pageDocumentFilter",
"checked"
]);
|
module.exports = require('../../../../index'); |
import _objectWithoutProperties from"@babel/runtime/helpers/objectWithoutProperties";import _extends from"@babel/runtime/helpers/extends";import _defineProperty from"@babel/runtime/helpers/defineProperty";import _classCallCheck from"@babel/runtime/helpers/classCallCheck";import _createClass from"@babel/runtime/helpers/createClass";import _possibleConstructorReturn from"@babel/runtime/helpers/possibleConstructorReturn";import _getPrototypeOf from"@babel/runtime/helpers/getPrototypeOf";import _inherits from"@babel/runtime/helpers/inherits";var _jsxFileName="/Users/satya/Workspace/Callstack/react-navigation-stack/src/views/StackView/StackViewLayout.tsx";function ownKeys(object,enumerableOnly){var keys=Object.keys(object);if(Object.getOwnPropertySymbols){var symbols=Object.getOwnPropertySymbols(object);if(enumerableOnly)symbols=symbols.filter(function(sym){return Object.getOwnPropertyDescriptor(object,sym).enumerable;});keys.push.apply(keys,symbols);}return keys;}function _objectSpread(target){for(var i=1;i<arguments.length;i++){var source=arguments[i]!=null?arguments[i]:{};if(i%2){ownKeys(source,true).forEach(function(key){_defineProperty(target,key,source[key]);});}else if(Object.getOwnPropertyDescriptors){Object.defineProperties(target,Object.getOwnPropertyDescriptors(source));}else{ownKeys(source).forEach(function(key){Object.defineProperty(target,key,Object.getOwnPropertyDescriptor(source,key));});}}return target;}import*as React from'react';import{Animated,StyleSheet,Platform,View,I18nManager,Easing,Dimensions}from'react-native';import{SceneView,StackActions,NavigationActions,NavigationProvider,ThemeContext,withOrientation}from'react-navigation';import{ScreenContainer}from'react-native-screens';import{PanGestureHandler,State as GestureState}from'react-native-gesture-handler';import Card from'./StackViewCard';import Header from'../Header/Header';import TransitionConfigs from'./StackViewTransitionConfigs';import HeaderStyleInterpolator from'../Header/HeaderStyleInterpolator';import StackGestureContext from'../../utils/StackGestureContext';import clamp from'../../utils/clamp';import{supportsImprovedSpringAnimation}from'../../utils/ReactNativeFeatures';var IPHONE_XS_HEIGHT=812;var IPHONE_XR_HEIGHT=896;var _Dimensions$get=Dimensions.get('window'),WINDOW_WIDTH=_Dimensions$get.width,WINDOW_HEIGHT=_Dimensions$get.height;var IS_IPHONE_X=Platform.OS==='ios'&&!Platform.isPad&&!Platform.isTVOS&&(WINDOW_HEIGHT===IPHONE_XS_HEIGHT||WINDOW_WIDTH===IPHONE_XS_HEIGHT||WINDOW_HEIGHT===IPHONE_XR_HEIGHT||WINDOW_WIDTH===IPHONE_XR_HEIGHT);var EaseInOut=Easing.inOut(Easing.ease);var HEADER_LAYOUT_PRESET=['center','left'];var HEADER_TRANSITION_PRESET=['fade-in-place','uikit'];var HEADER_BACKGROUND_TRANSITION_PRESET=['toggle','fade','translate'];var ANIMATION_DURATION=500;var POSITION_THRESHOLD=1/2;var GESTURE_RESPONSE_DISTANCE_HORIZONTAL=50;var GESTURE_RESPONSE_DISTANCE_VERTICAL=135;var USE_NATIVE_DRIVER=Platform.OS==='android'||Platform.OS==='ios';var getDefaultHeaderHeight=function getDefaultHeaderHeight(isLandscape){if(Platform.OS==='ios'){if(isLandscape&&!Platform.isPad){return 32;}else if(IS_IPHONE_X){return 88;}else{return 64;}}else if(Platform.OS==='android'){return 56;}else{return 64;}};var StackViewLayout=function(_React$Component){_inherits(StackViewLayout,_React$Component);function StackViewLayout(props){var _this;_classCallCheck(this,StackViewLayout);_this=_possibleConstructorReturn(this,_getPrototypeOf(StackViewLayout).call(this,props));_this.immediateIndex=null;_this.handleFloatingHeaderLayout=function(e){var height=e.nativeEvent.layout.height;if(height!==_this.state.floatingHeaderHeight){_this.setState({floatingHeaderHeight:height});}};_this.handlePanGestureStateChange=function(_ref){var nativeEvent=_ref.nativeEvent;if(nativeEvent.oldState===GestureState.ACTIVE){if(_this.positionSwitch.__getValue()===1){return;}if(_this.isMotionVertical()){_this.handleReleaseVertical(nativeEvent);}else{_this.handleReleaseHorizontal(nativeEvent);}}else if(nativeEvent.state===GestureState.ACTIVE){_this.props.onGestureBegin&&_this.props.onGestureBegin();_this.positionSwitch.setValue(0);}};_this.renderCard=function(scene){var _this$props=_this.props,transitionProps=_this$props.transitionProps,shadowEnabled=_this$props.shadowEnabled,cardOverlayEnabled=_this$props.cardOverlayEnabled,transparentCard=_this$props.transparentCard,cardStyle=_this$props.cardStyle;var _ref2=_this.transitionConfig,screenInterpolator=_ref2.screenInterpolator;var style=screenInterpolator&&screenInterpolator(_objectSpread({},transitionProps,{shadowEnabled:shadowEnabled,cardOverlayEnabled:cardOverlayEnabled,position:_this.position,scene:scene}));var options=scene.descriptor.options;var hasHeader=options.header!==null;var headerMode=_this.getHeaderMode();var floatingContainerStyle=StyleSheet.absoluteFill;if(hasHeader&&headerMode==='float'&&!options.headerTransparent){floatingContainerStyle=_objectSpread({},Platform.select({web:{},default:StyleSheet.absoluteFillObject}),{paddingTop:_this.state.floatingHeaderHeight});}return React.createElement(Card,_extends({},transitionProps,{key:"card_"+scene.key,position:_this.position,realPosition:transitionProps.position,animatedStyle:style,transparent:transparentCard,style:[floatingContainerStyle,cardStyle],scene:scene,__source:{fileName:_jsxFileName,lineNumber:984}}),_this.renderInnerScene(scene));};_this.panGestureRef=React.createRef();_this.gestureX=new Animated.Value(0);_this.gestureY=new Animated.Value(0);_this.positionSwitch=new Animated.Value(1);if(Animated.subtract){_this.gestureSwitch=Animated.subtract(1,_this.positionSwitch);}else{_this.gestureSwitch=Animated.add(1,Animated.multiply(-1,_this.positionSwitch));}_this.gestureEvent=Animated.event([{nativeEvent:{translationX:_this.gestureX,translationY:_this.gestureY}}],{useNativeDriver:USE_NATIVE_DRIVER});_this.state={floatingHeaderHeight:getDefaultHeaderHeight(props.isLandscape)};return _this;}_createClass(StackViewLayout,[{key:"renderHeader",value:function renderHeader(scene,headerMode){var options=scene.descriptor.options;var header=options.header;if(__DEV__&&typeof header==='string'){throw new Error("Invalid header value: \""+header+"\". The header option must be a valid React component or null, not a string.");}if(header===null&&headerMode==='screen'){return null;}if(React.isValidElement(header)){return header;}var renderHeader=header||function(props){return React.createElement(Header,_extends({},props,{__source:{fileName:_jsxFileName,lineNumber:226}}));};var _ref3=this.transitionConfig,headerLeftInterpolator=_ref3.headerLeftInterpolator,headerTitleInterpolator=_ref3.headerTitleInterpolator,headerRightInterpolator=_ref3.headerRightInterpolator,headerBackgroundInterpolator=_ref3.headerBackgroundInterpolator;var backgroundTransitionPresetInterpolator=this.getHeaderBackgroundTransitionPreset();if(backgroundTransitionPresetInterpolator){headerBackgroundInterpolator=backgroundTransitionPresetInterpolator;}var _this$props2=this.props,transitionProps=_this$props2.transitionProps,passProps=_objectWithoutProperties(_this$props2,["transitionProps"]);return React.createElement(NavigationProvider,{value:scene.descriptor.navigation,__source:{fileName:_jsxFileName,lineNumber:243}},renderHeader(_objectSpread({},passProps,{},transitionProps,{position:this.position,scene:scene,mode:headerMode,transitionPreset:this.getHeaderTransitionPreset(),layoutPreset:this.getHeaderLayoutPreset(),backTitleVisible:this.getHeaderBackTitleVisible(),leftInterpolator:headerLeftInterpolator,titleInterpolator:headerTitleInterpolator,rightInterpolator:headerRightInterpolator,backgroundInterpolator:headerBackgroundInterpolator})));}},{key:"reset",value:function reset(resetToIndex,duration){if(Platform.OS==='ios'&&supportsImprovedSpringAnimation()){Animated.spring(this.props.transitionProps.position,{toValue:resetToIndex,stiffness:6000,damping:100,mass:3,overshootClamping:true,restDisplacementThreshold:0.01,restSpeedThreshold:0.01,useNativeDriver:USE_NATIVE_DRIVER}).start();}else{Animated.timing(this.props.transitionProps.position,{toValue:resetToIndex,duration:duration,easing:EaseInOut,useNativeDriver:USE_NATIVE_DRIVER}).start();}}},{key:"goBack",value:function goBack(backFromIndex,duration){var _this2=this;var _this$props$transitio=this.props.transitionProps,navigation=_this$props$transitio.navigation,position=_this$props$transitio.position,scenes=_this$props$transitio.scenes;var toValue=Math.max(backFromIndex-1,0);this.immediateIndex=toValue;var onCompleteAnimation=function onCompleteAnimation(){_this2.immediateIndex=null;var backFromScene=scenes.find(function(s){return s.index===toValue+1;});if(backFromScene){navigation.dispatch(NavigationActions.back({key:backFromScene.route.key,immediate:true}));navigation.dispatch(StackActions.completeTransition());}};if(Platform.OS==='ios'&&supportsImprovedSpringAnimation()){Animated.spring(position,{toValue:toValue,stiffness:7000,damping:300,mass:3,overshootClamping:true,restDisplacementThreshold:0.01,restSpeedThreshold:0.01,useNativeDriver:USE_NATIVE_DRIVER}).start(onCompleteAnimation);}else{Animated.timing(position,{toValue:toValue,duration:duration,easing:EaseInOut,useNativeDriver:USE_NATIVE_DRIVER}).start(onCompleteAnimation);}}},{key:"prepareAnimated",value:function prepareAnimated(){if(this.props===this.prevProps){return;}this.prevProps=this.props;this.prepareGesture();this.preparePosition();this.prepareTransitionConfig();}},{key:"render",value:function render(){this.prepareAnimated();var transitionProps=this.props.transitionProps;var index=transitionProps.navigation.state.index,scenes=transitionProps.scenes;var headerMode=this.getHeaderMode();var floatingHeader=null;if(headerMode==='float'){var scene=transitionProps.scene;floatingHeader=React.createElement(View,{style:styles.floatingHeader,pointerEvents:"box-none",onLayout:this.handleFloatingHeaderLayout,__source:{fileName:_jsxFileName,lineNumber:365}},this.renderHeader(scene,headerMode));}return React.createElement(PanGestureHandler,_extends({},this.gestureActivationCriteria(),{ref:this.panGestureRef,onGestureEvent:this.gestureEvent,onHandlerStateChange:this.handlePanGestureStateChange,enabled:index>0&&this.isGestureEnabled(),__source:{fileName:_jsxFileName,lineNumber:376}}),React.createElement(Animated.View,{style:[styles.container,this.transitionConfig.containerStyle,this.context==='light'?this.transitionConfig.containerStyleLight:this.transitionConfig.containerStyleDark],__source:{fileName:_jsxFileName,lineNumber:383}},React.createElement(StackGestureContext.Provider,{value:this.panGestureRef,__source:{fileName:_jsxFileName,lineNumber:392}},React.createElement(ScreenContainer,{style:styles.scenes,__source:{fileName:_jsxFileName,lineNumber:393}},scenes.map(this.renderCard)),floatingHeader)));}},{key:"componentDidUpdate",value:function componentDidUpdate(prevProps){var prevState=prevProps.transitionProps.navigation.state;var state=this.props.transitionProps.navigation.state;if(prevState.index!==state.index){this.maybeCancelGesture();}}},{key:"getGestureResponseDistance",value:function getGestureResponseDistance(){var scene=this.props.transitionProps.scene;var options=scene.descriptor.options;var _options$gestureRespo=options.gestureResponseDistance,userGestureResponseDistance=_options$gestureRespo===void 0?{}:_options$gestureRespo;return this.isModal()?userGestureResponseDistance.vertical||GESTURE_RESPONSE_DISTANCE_VERTICAL:userGestureResponseDistance.horizontal||GESTURE_RESPONSE_DISTANCE_HORIZONTAL;}},{key:"gestureActivationCriteria",value:function gestureActivationCriteria(){var layout=this.props.transitionProps.layout;var gestureResponseDistance=this.getGestureResponseDistance();var isMotionInverted=this.isMotionInverted();if(this.isMotionVertical()){var height=layout.height.__getValue();return{maxDeltaX:15,minOffsetY:isMotionInverted?-5:5,hitSlop:isMotionInverted?{top:-height+gestureResponseDistance}:{bottom:-height+gestureResponseDistance}};}else{var width=layout.width.__getValue();var hitSlop=-width+gestureResponseDistance;return{minOffsetX:isMotionInverted?-5:5,maxDeltaY:20,hitSlop:isMotionInverted?{left:hitSlop}:{right:hitSlop}};}}},{key:"isGestureEnabled",value:function isGestureEnabled(){var gesturesEnabled=this.props.transitionProps.scene.descriptor.options.gesturesEnabled;return typeof gesturesEnabled==='boolean'?gesturesEnabled:Platform.OS==='ios';}},{key:"isMotionVertical",value:function isMotionVertical(){return this.isModal();}},{key:"isModal",value:function isModal(){return this.props.mode==='modal';}},{key:"isMotionInverted",value:function isMotionInverted(){var scene=this.props.transitionProps.scene;var options=scene.descriptor.options;var gestureDirection=options.gestureDirection;if(this.isModal()){return gestureDirection==='inverted';}else{return typeof gestureDirection==='string'?gestureDirection==='inverted':I18nManager.isRTL;}}},{key:"computeHorizontalGestureValue",value:function computeHorizontalGestureValue(_ref4){var translationX=_ref4.translationX;var _this$props$transitio2=this.props.transitionProps,navigation=_this$props$transitio2.navigation,layout=_this$props$transitio2.layout;var index=navigation.state.index;var distance=layout.width.__getValue();var x=this.isMotionInverted()?-1*translationX:translationX;var value=index-x/distance;return clamp(index-1,value,index);}},{key:"computeVerticalGestureValue",value:function computeVerticalGestureValue(_ref5){var translationY=_ref5.translationY;var _this$props$transitio3=this.props.transitionProps,navigation=_this$props$transitio3.navigation,layout=_this$props$transitio3.layout;var index=navigation.state.index;var distance=layout.height.__getValue();var y=this.isMotionInverted()?-1*translationY:translationY;var value=index-y/distance;return clamp(index-1,value,index);}},{key:"maybeCancelGesture",value:function maybeCancelGesture(){this.positionSwitch.setValue(1);}},{key:"prepareGesture",value:function prepareGesture(){if(!this.isGestureEnabled()){if(this.positionSwitch.__getValue()!==1){this.positionSwitch.setValue(1);}this.gesturePosition=undefined;return;}if(this.props.transitionProps.layout.width.__getValue()===0||this.props.transitionProps.layout.height.__getValue()===0){return;}if(this.isMotionVertical()){this.prepareGestureVertical();}else{this.prepareGestureHorizontal();}}},{key:"prepareGestureHorizontal",value:function prepareGestureHorizontal(){var index=this.props.transitionProps.navigation.state.index;if(this.isMotionInverted()){this.gesturePosition=Animated.add(index,Animated.divide(this.gestureX,this.props.transitionProps.layout.width)).interpolate({inputRange:[index-1,index],outputRange:[index-1,index],extrapolate:'clamp'});}else{this.gesturePosition=Animated.add(index,Animated.multiply(-1,Animated.divide(this.gestureX,this.props.transitionProps.layout.width))).interpolate({inputRange:[index-1,index],outputRange:[index-1,index],extrapolate:'clamp'});}}},{key:"prepareGestureVertical",value:function prepareGestureVertical(){var index=this.props.transitionProps.navigation.state.index;if(this.isMotionInverted()){this.gesturePosition=Animated.add(index,Animated.divide(this.gestureY,this.props.transitionProps.layout.height)).interpolate({inputRange:[index-1,index],outputRange:[index-1,index],extrapolate:'clamp'});}else{this.gesturePosition=Animated.add(index,Animated.multiply(-1,Animated.divide(this.gestureY,this.props.transitionProps.layout.height))).interpolate({inputRange:[index-1,index],outputRange:[index-1,index],extrapolate:'clamp'});}}},{key:"handleReleaseHorizontal",value:function handleReleaseHorizontal(nativeEvent){var _this$props$transitio4=this.props.transitionProps,navigation=_this$props$transitio4.navigation,position=_this$props$transitio4.position,layout=_this$props$transitio4.layout;var index=navigation.state.index;var immediateIndex=this.immediateIndex==null?index:this.immediateIndex;var distance=layout.width.__getValue();var movementDirection=this.isMotionInverted()?-1:1;var movedDistance=movementDirection*nativeEvent.translationX;var gestureVelocity=movementDirection*nativeEvent.velocityX;var defaultVelocity=distance/ANIMATION_DURATION;var velocity=Math.max(Math.abs(gestureVelocity),defaultVelocity);var resetDuration=this.isMotionInverted()?(distance-movedDistance)/velocity:movedDistance/velocity;var goBackDuration=this.isMotionInverted()?movedDistance/velocity:(distance-movedDistance)/velocity;var value=this.computeHorizontalGestureValue(nativeEvent);position.setValue(value);this.positionSwitch.setValue(1);if(gestureVelocity<-50){this.props.onGestureCanceled&&this.props.onGestureCanceled();this.reset(immediateIndex,resetDuration);return;}if(gestureVelocity>50){this.props.onGestureEnd&&this.props.onGestureEnd();this.goBack(immediateIndex,goBackDuration);return;}if(value<=index-POSITION_THRESHOLD){this.props.onGestureEnd&&this.props.onGestureEnd();this.goBack(immediateIndex,goBackDuration);}else{this.props.onGestureCanceled&&this.props.onGestureCanceled();this.reset(immediateIndex,resetDuration);}}},{key:"handleReleaseVertical",value:function handleReleaseVertical(nativeEvent){var _this$props$transitio5=this.props.transitionProps,navigation=_this$props$transitio5.navigation,position=_this$props$transitio5.position,layout=_this$props$transitio5.layout;var index=navigation.state.index;var immediateIndex=this.immediateIndex==null?index:this.immediateIndex;var distance=layout.height.__getValue();var isMotionInverted=this.isMotionInverted();var movementDirection=isMotionInverted?-1:1;var movedDistance=movementDirection*nativeEvent.translationY;var gestureVelocity=movementDirection*nativeEvent.velocityY;var defaultVelocity=distance/ANIMATION_DURATION;var velocity=Math.max(Math.abs(gestureVelocity),defaultVelocity);var resetDuration=isMotionInverted?(distance-movedDistance)/velocity:movedDistance/velocity;var goBackDuration=isMotionInverted?movedDistance/velocity:(distance-movedDistance)/velocity;var value=this.computeVerticalGestureValue(nativeEvent);position.setValue(value);this.positionSwitch.setValue(1);if(gestureVelocity<-50){this.props.onGestureCanceled&&this.props.onGestureCanceled();this.reset(immediateIndex,resetDuration);return;}if(gestureVelocity>50){this.props.onGestureEnd&&this.props.onGestureEnd();this.goBack(immediateIndex,goBackDuration);return;}if(value<=index-POSITION_THRESHOLD){this.props.onGestureEnd&&this.props.onGestureEnd();this.goBack(immediateIndex,goBackDuration);}else{this.props.onGestureCanceled&&this.props.onGestureCanceled();this.reset(immediateIndex,resetDuration);}}},{key:"getHeaderMode",value:function getHeaderMode(){if(this.props.headerMode){return this.props.headerMode;}if(Platform.OS==='android'||this.props.mode==='modal'){return'screen';}return'float';}},{key:"getHeaderBackgroundTransitionPreset",value:function getHeaderBackgroundTransitionPreset(){var headerBackgroundTransitionPreset=this.props.headerBackgroundTransitionPreset;if(headerBackgroundTransitionPreset){if(HEADER_BACKGROUND_TRANSITION_PRESET.includes(headerBackgroundTransitionPreset)){if(headerBackgroundTransitionPreset==='fade'){return HeaderStyleInterpolator.forBackgroundWithFade;}else if(headerBackgroundTransitionPreset==='translate'){return HeaderStyleInterpolator.forBackgroundWithTranslation;}else if(headerBackgroundTransitionPreset==='toggle'){return HeaderStyleInterpolator.forBackgroundWithInactiveHidden;}}else if(__DEV__){console.error("Invalid configuration applied for headerBackgroundTransitionPreset - expected one of "+HEADER_BACKGROUND_TRANSITION_PRESET.join(', ')+" but received "+JSON.stringify(headerBackgroundTransitionPreset));}}return null;}},{key:"getHeaderLayoutPreset",value:function getHeaderLayoutPreset(){var headerLayoutPreset=this.props.headerLayoutPreset;if(headerLayoutPreset){if(__DEV__){if(this.getHeaderTransitionPreset()==='uikit'&&headerLayoutPreset==='left'&&Platform.OS==='ios'){console.warn("headerTransitionPreset with the value 'uikit' is incompatible with headerLayoutPreset 'left'");}}if(HEADER_LAYOUT_PRESET.includes(headerLayoutPreset)){return headerLayoutPreset;}if(__DEV__){console.error("Invalid configuration applied for headerLayoutPreset - expected one of "+HEADER_LAYOUT_PRESET.join(', ')+" but received "+JSON.stringify(headerLayoutPreset));}}if(Platform.OS!=='ios'){return'left';}else{return'center';}}},{key:"getHeaderTransitionPreset",value:function getHeaderTransitionPreset(){if(Platform.OS!=='ios'||this.getHeaderMode()==='screen'){return'fade-in-place';}var headerTransitionPreset=this.props.headerTransitionPreset;if(headerTransitionPreset){if(HEADER_TRANSITION_PRESET.includes(headerTransitionPreset)){return headerTransitionPreset;}if(__DEV__){console.error("Invalid configuration applied for headerTransitionPreset - expected one of "+HEADER_TRANSITION_PRESET.join(', ')+" but received "+JSON.stringify(headerTransitionPreset));}}return'fade-in-place';}},{key:"getHeaderBackTitleVisible",value:function getHeaderBackTitleVisible(){var headerBackTitleVisible=this.props.headerBackTitleVisible;var layoutPreset=this.getHeaderLayoutPreset();var enabledByDefault=!(layoutPreset==='left'||Platform.OS!=='ios');return typeof headerBackTitleVisible==='boolean'?headerBackTitleVisible:enabledByDefault;}},{key:"renderInnerScene",value:function renderInnerScene(scene){var _scene$descriptor=scene.descriptor,navigation=_scene$descriptor.navigation,getComponent=_scene$descriptor.getComponent;var SceneComponent=getComponent();var screenProps=this.props.screenProps;var headerMode=this.getHeaderMode();if(headerMode==='screen'){return React.createElement(View,{style:styles.container,__source:{fileName:_jsxFileName,lineNumber:896}},React.createElement(View,{style:styles.scenes,__source:{fileName:_jsxFileName,lineNumber:897}},React.createElement(SceneView,{screenProps:screenProps,navigation:navigation,component:SceneComponent,__source:{fileName:_jsxFileName,lineNumber:898}})),this.renderHeader(scene,headerMode));}return React.createElement(SceneView,{screenProps:screenProps,navigation:navigation,component:SceneComponent,__source:{fileName:_jsxFileName,lineNumber:909}});}},{key:"prepareTransitionConfig",value:function prepareTransitionConfig(){this.transitionConfig=TransitionConfigs.getTransitionConfig(this.props.transitionConfig,_objectSpread({},this.props.transitionProps,{position:this.position}),this.props.lastTransitionProps,this.isModal());}},{key:"preparePosition",value:function preparePosition(){if(this.gesturePosition){this.position=Animated.add(Animated.multiply(this.props.transitionProps.position,this.positionSwitch),Animated.multiply(this.gesturePosition,this.gestureSwitch));}else{this.position=this.props.transitionProps.position;}}}]);return StackViewLayout;}(React.Component);StackViewLayout.contextType=ThemeContext;var styles=StyleSheet.create({container:{flex:1,flexDirection:'column-reverse',overflow:'hidden'},scenes:{flex:1},floatingHeader:{position:Platform.select({default:'absolute',web:'fixed'}),left:0,top:0,right:0}});export default withOrientation(StackViewLayout);
//# sourceMappingURL=StackViewLayout.js.map |
import EmailSvg from './email-svg'
describe('@components/email-svg', () => {
it('exports a valid component', () => {
expect(EmailSvg).toBeAComponent()
})
})
|
const fetch = require('node-fetch');
const cookie = require('cookie');
exports.handler = async (event) => {
const {code} = JSON.parse(event.body);
const endpoint = 'https://stackoverflow.com/oauth/access_token/json';
const params = new URLSearchParams();
params.append('client_id', process.env.STACK_CLIENT_ID);
params.append('client_secret', process.env.STACK_CLIENT_SECRET);
params.append('code', code);
params.append('redirect_uri', process.env.STACK_REDIRECT_URI);
try {
const response = await fetch(endpoint, {
method: 'POST',
headers: {'Content-Type': 'application/x-www-form-urlencoded'},
body: params
});
if(!response.ok) {
console.error('Auth response error', response);
return {
statusCode: 400,
body: JSON.stringify(response.error)
};
}
const responseJson = await response.json();
console.log('Auth response', responseJson);
// The response can return a 200 success but still have an error message
// so I'll return an error status from the function.
if(responseJson.error_message || !responseJson.access_token) {
return {
statusCode: 400,
body: JSON.stringify(responseJson)
};
}
// Store the token in a cookie and return the 'expires'
// attribute to notify the client.
const expires = new Date(Date.now() + responseJson.expires * 1000);
const authCookie = cookie.serialize('dewy.auth', responseJson.access_token, {
secure: true,
httpOnly: true,
path: '/',
sameSite: 'strict',
expires: expires
});
return {
statusCode: 200,
headers: {
'Set-Cookie': authCookie
},
body: JSON.stringify({expires: expires})
};
} catch(error) {
console.error('Error making auth request', error);
return {
statusCode: 500,
body: JSON.stringify(error)
};
}
}; |
ul.addEventListener('dblclick', function(e){
var rola = e.target.name;
console.log(rola)
ipcRenderer.send("editRolaRequest", rola);
});
|
"""This module contains helper functions and utilities for nelpy."""
__all__ = ['spatial_information',
'frange',
'swap_cols',
'swap_rows',
'pairwise',
'is_sorted',
'linear_merge',
'PrettyDuration',
'ddt_asa',
'get_contiguous_segments',
'get_events_boundaries',
'get_threshold_crossing_epochs',
'_bst_get_bins']
import numpy as np
import logging
from itertools import tee, repeat
from collections import namedtuple
from math import floor
from scipy.signal import hilbert
import scipy.ndimage.filters #import gaussian_filter1d, gaussian_filter
from numpy import log, ceil
import copy
import sys
import ctypes
from multiprocessing import Array, cpu_count
from multiprocessing.pool import Pool
import pdb
from . import core # so that core.RegularlySampledAnalogSignalArray is exposed
from . import auxiliary # so that auxiliary.TuningCurve1D is epxosed
from . import filtering
from .utils_.decorators import keyword_deprecation
try:
from scipy.fft import next_fast_len # scipy 1.*
except:
from scipy.fftpack import next_fast_len # scipy 0.*
# def sub2ind(array_shape, rows, cols):
# ind = rows*array_shape[1] + cols
# ind[ind < 0] = -1
# ind[ind >= array_shape[0]*array_shape[1]] = -1
# return ind
# def ind2sub(array_shape, ind):
# # see also np.unravel_index(ind, array.shape)
# ind[ind < 0] = -1
# ind[ind >= array_shape[0]*array_shape[1]] = -1
# rows = (ind.astype('int') / array_shape[1])
# cols = ind % array_shape[1]
# return (rows, cols)
def ragged_array(arr):
"""Takes a list of arrays, and returns a ragged array.
See https://github.com/numpy/numpy/issues/12468
"""
n_elem = len(arr)
out = np.array(n_elem*[None])
for ii in range(out.shape[0]):
out[ii] = arr[ii]
return out
def asa_indices_within_epochs(asa, intervalarray):
"""Return indices of ASA within epochs.
[[start, stop]
...
[start, stop]]
so that data can be associated with asa._data[:,start:stop] for each epoch.
"""
indices = []
intervalarray = intervalarray[asa.support]
for interval in intervalarray.merge().data:
a_start = interval[0]
a_stop = interval[1]
frm, to = np.searchsorted(asa._abscissa_vals, (a_start, a_stop))
indices.append((frm, to))
indices = np.array(indices, ndmin=2)
return indices
def frange(start, stop, step):
"""arange with floating point step"""
# TODO: this function is not very general; we can extend it to work
# for reverse (stop < start), empty, and default args, etc.
# there are also many edge cases where this is weird.
# see https://stackoverflow.com/questions/7267226/range-for-floats
# for better alternatives.
num_steps = int(np.floor((stop-start)/step))
return np.linspace(start, stop, num=num_steps, endpoint=False)
def spatial_information(ratemap):
"""Compute the spatial information and firing sparsity...
The specificity index examines the amount of information
(in bits) that a single spike conveys about the animal's
location (i.e., how well cell firing predicts the animal's
location).The spatial information content of cell discharge was
calculated using the formula:
information content = \Sum P_i(R_i/R)log_2(R_i/R)
where i is the bin number, P_i, is the probability for occupancy
of bin i, R_i, is the mean firing rate for bin i, and R is the
overall mean firing rate.
In order to account for the effects of low firing rates (with
fewer spikes there is a tendency toward higher information
content) or random bursts of firing, the spike firing
time-series was randomly offset in time from the rat location
time-series, and the information content was calculated. A
distribution of the information content based on 100 such random
shifts was obtained and was used to compute a standardized score
(Zscore) of information content for that cell. While the
distribution is not composed of independent samples, it was
nominally normally distributed, and a Z value of 2.29 was chosen
as a cut-off for significance (the equivalent of a one-tailed
t-test with P = 0.01 under a normal distribution).
Reference(s)
------------
Markus, E. J., Barnes, C. A., McNaughton, B. L., Gladden, V. L.,
and Skaggs, W. E. (1994). "Spatial information content and
reliability of hippocampal CA1 neurons: effects of visual
input", Hippocampus, 4(4), 410-421.
Parameters
----------
ratemap : array of shape (n_units, n_bins)
Rate map in Hz.
Returns
-------
si : array of shape (n_units,)
spatial information (in bits) per unit
"""
ratemap = copy.deepcopy(ratemap)
# ensure that the ratemap always has nonzero firing rates,
# otherwise the spatial information might return NaNs:
bkg_rate = ratemap[ratemap>0].min()
ratemap[ratemap < bkg_rate] = bkg_rate
number_of_spatial_bins = np.prod(ratemap.shape[1:])
weight_per_bin = 1/number_of_spatial_bins
Pi = 1
if len(ratemap.shape) == 3:
# we have 2D tuning curve, (n_units, n_x, n_y)
R = ratemap.mean(axis=1).mean(axis=1) # mean firing rate
Ri = np.transpose(ratemap, (2,1,0))
si = np.sum(np.sum((Pi*((Ri / R)*np.log2(Ri / R)).T), axis=1), axis=1)
elif len(ratemap.shape) == 2:
# we have 1D tuning curve, (n_units, n_x)
R = ratemap.mean(axis=1) # mean firing rate
Ri = ratemap.T
si = np.sum((Pi*((Ri / R)*np.log2(Ri / R)).T), axis=1)
else:
raise TypeError("rate map shape not supported / understood!")
return si/number_of_spatial_bins
def spatial_sparsity(ratemap):
"""Compute the firing sparsity...
The specificity index examines the amount of information
(in bits) that a single spike conveys about the animal's
location (i.e., how well cell firing predicts the animal's
location).The spatial information content of cell discharge was
calculated using the formula:
information content = \Sum P_i(R_i/R)log_2(R_i/R)
where i is the bin number, P_i, is the probability for occupancy
of bin i, R_i, is the mean firing rate for bin i, and R is the
overall mean firing rate.
In order to account for the effects of low firing rates (with
fewer spikes there is a tendency toward higher information
content) or random bursts of firing, the spike firing
time-series was randomly offset in time from the rat location
time-series, and the information content was calculated. A
distribution of the information content based on 100 such random
shifts was obtained and was used to compute a standardized score
(Zscore) of information content for that cell. While the
distribution is not composed of independent samples, it was
nominally normally distributed, and a Z value of 2.29 was chosen
as a cut-off for significance (the equivalent of a one-tailed
t-test with P = 0.01 under a normal distribution).
Reference(s)
------------
Markus, E. J., Barnes, C. A., McNaughton, B. L., Gladden, V. L.,
and Skaggs, W. E. (1994). "Spatial information content and
reliability of hippocampal CA1 neurons: effects of visual
input", Hippocampus, 4(4), 410-421.
Parameters
----------
occupancy : array of shape (n_bins,)
Occupancy of the animal.
ratemap : array of shape (n_units, n_bins)
Rate map in Hz.
Returns
-------
si : array of shape (n_units,)
spatial information (in bits) per unit
sparsity: array of shape (n_units,)
sparsity (in percent) for each unit
"""
number_of_spatial_bins = np.prod(ratemap.shape[1:])
weight_per_bin = 1/number_of_spatial_bins
Pi = 1
if len(ratemap.shape) == 3:
# we have 2D tuning curve, (n_units, n_x, n_y)
R = ratemap.mean(axis=1).mean(axis=1) # mean firing rate
Ri = ratemap
sparsity = np.sum(np.sum((Ri*Pi), axis=1), axis=1)/(R**2)
elif len(ratemap.shape) == 2:
# we have 1D tuning curve, (n_units, n_x)
R = ratemap.mean(axis=1) # mean firing rate
Ri = ratemap.T
sparsity = np.sum((Pi*Ri.T), axis=1)/(R**2)
else:
raise TypeError("rate map shape not supported / understood!")
return sparsity/number_of_spatial_bins
def _bst_get_bins_inside_interval(interval, ds, w=1):
"""(np.array) Return bin edges entirely contained inside an interval.
Bin edges always start at interval.start, and continue for as many
bins as would fit entirely inside the interval.
NOTE 1: there are (n+1) bin edges associated with n bins.
WARNING: if an interval is smaller than ds, then no bin will be
associated with the particular interval.
NOTE 2: nelpy uses half-open intervals [a,b), but if the bin
width divides b-a, then the bins will cover the entire
range. For example, if interval = [0,2) and ds = 1, then
bins = [0,1,2], even though [0,2] is not contained in
[0,2). There might be numerical precision deviations from this?
Parameters
----------
interval : EpochArray
EpochArray containing a single interval with a start, and stop
ds : float
Time bin width, in seconds.
w : number of bins to use in a sliding window mode. Default is 1 (no sliding window).
For example, 40 ms bins, with a stride of 5 ms, can be achieved by using (ds=0.005, w=8)
For now, w has to be an integer, and therefore 5 second bins, with a stride of 2 seconds
are not supported within this framework.
Returns
-------
bins : array
Bin edges in an array of shape (n+1,) where n is the number
of bins
centers : array
Bin centers in an array of shape (n,) where n is the number
of bins
"""
if interval.length < ds:
return None, None
n_bins = int(np.floor(interval.length / ds)) # number of bins
# linspace is better than arange for non-integral steps
bins = np.linspace(interval.start, interval.start + n_bins*ds, n_bins+1)
if w > 1:
wn_bins = np.max((1, n_bins - w + 1))
wn_bins = bins[:wn_bins+1] + w/2*ds - ds/2
bins = wn_bins
centers = bins[:-1] + (ds / 2)
return bins, centers
def _bst_get_bins(intervalArray, ds, w=1):
"""
Docstring goes here. TBD. For use with bins that are contained
wholly inside the intervals.
"""
b = [] # bin list
c = [] # centers list
left_edges = []
right_edges = []
counter = 0
for interval in intervalArray:
bins, centers = _bst_get_bins_inside_interval(interval=interval, ds=ds, w=w)
if bins is not None:
left_edges.append(counter)
counter += len(centers) - 1
right_edges.append(counter)
counter += 1
b.extend(bins.tolist())
c.extend(centers.tolist())
bins = np.array(b)
bin_centers = np.array(c)
le = np.array(left_edges)
le = le[:, np.newaxis]
re = np.array(right_edges)
re = re[:, np.newaxis]
binned_support = np.hstack((le, re))
lengths = np.atleast_1d((binned_support[:,1] - binned_support[:,0] + 1).squeeze())
support_starts = bins[np.insert(np.cumsum(lengths+1),0,0)[:-1]]
support_stops = bins[np.insert(np.cumsum(lengths+1)-1,0,0)[1:]]
supportdata = np.vstack([support_starts, support_stops]).T
support = type(intervalArray)(supportdata) # set support to TRUE bin support
return bins, bin_centers, binned_support, support
@keyword_deprecation(replace_x_with_y={'bw':'truncate'})
def get_mua(st, ds=None, sigma=None, truncate=None, _fast=True):
"""Compute the multiunit activity (MUA) from a spike train.
Parameters
----------
st : SpikeTrainArray
SpikeTrainArray containing one or more units.
-- OR --
st : BinnedSpikeTrainArray
BinnedSpikeTrainArray containing multiunit activity.
ds : float, optional
Time step in which to bin spikes. Default is 1 ms.
sigma : float, optional
Standard deviation (in seconds) of Gaussian smoothing kernel.
Default is 10 ms. If sigma==0 then no smoothing is applied.
truncate : float, optional
Bandwidth of the Gaussian filter. Default is 6.
Returns
-------
mua : AnalogSignalArray
AnalogSignalArray with MUA.
"""
if ds is None:
ds = 0.001 # 1 ms bin size
if sigma is None:
sigma = 0.01 # 10 ms standard deviation
if truncate is None:
truncate = 6
if isinstance(st, core.EventArray):
# bin spikes, so that we can count the spikes
mua_binned = st.bin(ds=ds).flatten()
elif isinstance(st, core.BinnedEventArray):
mua_binned = st.flatten()
ds = mua_binned.ds
else:
raise TypeError('st has to be one of (SpikeTrainArray, BinnedSpikeTrainArray)')
# make sure data type is float, so that smoothing works, and convert to rate
mua_binned._data = mua_binned._data.astype(float) / ds
# TODO: now that we can simply cast from BST to ASA and back, the following logic could be simplified:
# put mua rate inside an AnalogSignalArray
if _fast:
mua = core.AnalogSignalArray([], empty=True)
mua._data = mua_binned.data
mua._abscissa_vals = mua_binned.bin_centers
mua._abscissa.support = mua_binned.support
else:
mua = core.AnalogSignalArray(mua_binned.data, timestamps=mua_binned.bin_centers, fs=1/ds)
mua._fs = 1/ds
if (sigma != 0) and (truncate > 0):
mua = gaussian_filter(mua, sigma=sigma, truncate=truncate)
return mua
def is_odd(n):
"""Returns True if n is odd, and False if n is even.
Assumes integer.
"""
return bool(n & 1)
def swap_cols(arr, frm, to):
"""swap columns of a 2D np.array"""
if arr.ndim > 1:
arr[:,[frm, to]] = arr[:,[to, frm]]
else:
arr[frm], arr[to] = arr[to], arr[frm]
def swap_rows(arr, frm, to):
"""swap rows of a 2D np.array"""
if arr.ndim > 1:
arr[[frm, to],:] = arr[[to, frm],:]
else:
arr[frm], arr[to] = arr[to], arr[frm]
def pairwise(iterable):
"""returns a zip of all neighboring pairs.
This is used as a helper function for is_sorted.
Example
-------
>>> mylist = [2, 3, 6, 8, 7]
>>> list(pairwise(mylist))
[(2, 3), (3, 6), (6, 8), (8, 7)]
"""
a, b = tee(iterable)
next(b, None)
return zip(a, b)
def argsort(seq):
# http://stackoverflow.com/questions/3071415/efficient-method-to-calculate-the-rank-vector-of-a-list-in-python
return sorted(range(len(seq)), key=seq.__getitem__)
def is_sorted_general(iterable, key=lambda a, b: a <= b):
"""Check to see if iterable is monotonic increasing (sorted)."""
return all(key(a, b) for a, b in pairwise(iterable))
def is_sorted(x, chunk_size=None):
"""Returns True if iterable is monotonic increasing (sorted).
NOTE: intended for 1D array, list or tuple. Will not work on
more than 1D
This function works in-core with memory footrpint XXX.
chunk_size = 100000 is probably a good choice.
"""
if not isinstance(x, (tuple, list, np.ndarray)):
raise TypeError("Unsupported type {}".format(type(x)))
x = np.atleast_1d(np.array(x).squeeze())
if x.ndim > 1:
raise ValueError("Input x must be 1-dimensional")
if chunk_size is None:
chunk_size = 500000
stop = x.size
for chunk_start in range(0, stop, chunk_size):
chunk_stop = int(min(stop, chunk_start + chunk_size + 1))
chunk = x[chunk_start:chunk_stop]
if not np.all(chunk[:-1] <= chunk[1:]):
return False
return True
def linear_merge(list1, list2):
"""Merge two SORTED lists in linear time.
UPDATED TO WORK WITH PYTHON 3.7+ (see https://stackoverflow.com/questions/51700960/runtimeerror-generator-raised-stopiteration-every-time-i-try-to-run-app)
Returns a generator of the merged result.
Examples
--------
>>> a = [1, 3, 5, 7]
>>> b = [2, 4, 6, 8]
>>> [i for i in linear_merge(a, b)]
[1, 2, 3, 4, 5, 6, 7, 8]
>>> [i for i in linear_merge(b, a)]
[1, 2, 3, 4, 5, 6, 7, 8]
>>> a = [1, 2, 2, 3]
>>> b = [2, 2, 4, 4]
>>> [i for i in linear_merge(a, b)]
[1, 2, 2, 2, 2, 3, 4, 4]
"""
# if any of the lists are empty, return the other (possibly also
# empty) list: (this is necessary because having either list1 or
# list2 be empty makes this quite a bit more complicated...)
if isinstance(list1, (list, np.ndarray)):
if len(list1) == 0:
list2 = iter(list2)
while True:
try:
yield next(list2)
except StopIteration:
return
if isinstance(list2, (list, np.ndarray)):
if len(list2) == 0:
list1 = iter(list1)
while True:
try:
yield next(list1)
except StopIteration:
return
list1 = iter(list1)
list2 = iter(list2)
value1 = next(list1)
value2 = next(list2)
# We'll normally exit this loop from a next() call raising
# StopIteration, which is how a generator function exits anyway.
while True:
if value1 <= value2:
# Yield the lower value.
try:
yield value1
except StopIteration:
return
try:
# Grab the next value from list1.
value1 = next(list1)
except StopIteration:
# list1 is empty. Yield the last value we received from list2, then
# yield the rest of list2.
try:
yield value2
except StopIteration:
return
while True:
try:
yield next(list2)
except StopIteration:
return
else:
try:
yield value2
except StopIteration:
return
try:
value2 = next(list2)
except StopIteration:
# list2 is empty.
try:
yield value1
except StopIteration:
return
while True:
try:
yield next(list1)
except StopIteration:
return
def get_mua_events(mua, fs=None, minLength=None, maxLength=None, PrimaryThreshold=None, minThresholdLength=None, SecondaryThreshold=None):
"""Determine MUA/PBEs from multiunit activity.
MUA : multiunit activity
PBE : population burst event
Parameters
----------
mua : AnalogSignalArray
AnalogSignalArray with one signal, namely the multiunit firing rate [in Hz].
fs : float, optional
Sampling frequency of mua, in Hz. If not specified, it will be inferred from
mua.fs
minLength : float, optional
maxLength : float, optional
PrimaryThreshold : float, optional
SecondaryThreshold : float, optional
minThresholdLength : float, optional
Returns
-------
mua_epochs : EpochArray
EpochArray containing all the MUA events / PBEs.
Example
-------
mua = get_mua(spiketrain)
mua_epochs = get_mua_events(mua)
PBEs = get_PBEs(spiketrain, min_active=5)
= get_PBEs(get_mua_events(get_mua(*)), spiketrain, min_active=5)
"""
if fs is None:
fs = mua.fs
if fs is None:
raise ValueError("fs must either be specified, or must be contained in mua!")
if PrimaryThreshold is None:
PrimaryThreshold = mua.mean() + 3*mua.std()
if SecondaryThreshold is None:
SecondaryThreshold = mua.mean()
if minLength is None:
minLength = 0.050 # 50 ms minimum event duration
if maxLength is None:
maxLength = 0.750 # 750 ms maximum event duration
if minThresholdLength is None:
minThresholdLength = 0.0
# determine MUA event bounds:
mua_bounds_idx, maxes, _ = get_events_boundaries(
x = mua.data,
PrimaryThreshold = PrimaryThreshold,
SecondaryThreshold = SecondaryThreshold,
minThresholdLength = minThresholdLength,
minLength = minLength,
maxLength = maxLength,
ds = 1/fs
)
if len(mua_bounds_idx) == 0:
logging.warning("no mua events detected")
return core.EpochArray(empty=True)
# store MUA bounds in an EpochArray
mua_epochs = core.EpochArray(mua.time[mua_bounds_idx])
return mua_epochs
@keyword_deprecation(replace_x_with_y={'bw':'truncate'})
def get_PBEs(data, fs=None, ds=None, sigma=None, truncate=None, unsorted_id=0,
min_active=None, minLength=None, maxLength=None,
PrimaryThreshold=None, minThresholdLength=None,
SecondaryThreshold=None):
"""Determine PBEs from multiunit activity or spike trains.
Definitions
-----------
MUA : multiunit activity
PBE : population burst event
Summary
-------
This function can be used to identify PBE epochs from spike trains, binned
spike trains, or multiunit activity (in the form of an AnalogSignalArray).
It is recommended to either pass in a SpikeTrainArray or a
BinnedSpikeTrainArray, so that a `min_active` number of sorted units can be
set.
It is also recommended that the unsorted units (but not noise artifacts!)
should be included in the spike train that is used to estimate the PBEs. By
default, unit_id=0 is assumed to be unsorted, but this can be changed, or if
no unsorted units are present, you can set unsorted_id=None. Equivalently,
if min_active=0, then no restriction will apply, and the unsorted_id will
have no effect on the final PBE epochs.
Examples
--------
PBE_epochs = get_PBEs(mua_asa)
PBE_epochs = get_PBEs(spiketrain, min_active=5)
PBE_epochs = get_PBEs(binnedspiketrain, min_active=5)
Parameters
----------
data : AnalogSignalArray
AnalogSignalArray with one signal, namely the multiunit firing rate [in Hz].
-- OR --
data : SpikeTrainArray
SpikeTrainArray with multiple units, including unsorted unit(s), but
excluding any noise artifects.
-- OR --
data : BinnedSpikeTrainArray
BinnedSpikeTrainArray containing multiunit activity.
fs : float, optional
Sampling frequency of mua, in Hz. If not specified, it will be inferred
from data.
ds : float, optional
Time step in which to bin spikes. Default is 1 ms.
sigma : float, optional
Standard deviation (in seconds) of Gaussian smoothing kernel.
Default is 10 ms. If sigma==0 then no smoothing is applied.
truncate : float, optional
Bandwidth of the Gaussian filter. Default is 6.
unsorted_id : int, optional
unit_id of the unsorted unit. Default is 0. If no unsorted unit is
present, then set unsorted_id = None
min_active : int, optional
Minimum number of active units per event, excluding unsorted unit.
Default is 5.
minLength : float, optional
Minimum event duration in seconds. Default is 50 ms.
maxLength : float, optional
Maximum event duration in seconds. Default is 750 ms.
PrimaryThreshold : float, optional
Primary threshold to exceed. Default is mean() + 3*std()
SecondaryThreshold : float, optional
Secondary threshold to fall back to. Default is mean().
minThresholdLength : float, optional
Minimum duration to stay above PrimaryThreshold. Default is 0 ms.
Returns
-------
PBE_epochs : EpochArray
EpochArray containing all the PBEs.
Future improvements
-------------------
As of now, it is possible, but not easy to specify the Primary and Secondary
thresholds for event detection. A slight change in API might be needed to
make this specification more flexible.
"""
if sigma is None:
sigma = 0.01 # 10 ms standard deviation
if truncate is None:
truncate = 6
if isinstance(data, core.AnalogSignalArray):
# if we have only mua, then we cannot set (ds, unsorted_id, min_active)
if ds is not None:
raise ValueError('if data is an AnalogSignalArray then ds cannot be specified!')
if unsorted_id:
raise ValueError('if data is an AnalogSignalArray then unsorted_id cannot be specified!')
if min_active is not None:
raise ValueError('if data is an AnalogSignalArray then min_active cannot be specified!')
mua = data
mua._data = mua._data.astype(float)
if (sigma != 0) and (truncate > 0):
mua = gaussian_filter(mua, sigma=sigma, truncate=truncate)
elif isinstance(data, (core.EventArray, core.BinnedEventArray)):
# set default parameter values:
if ds is None:
ds = 0.001 # default 1 ms
if min_active is None:
min_active = 5
mua = get_mua(data, ds=ds, sigma=sigma, truncate=truncate, _fast=True)
else:
raise TypeError('data has to be one of (AnalogSignalArray, SpikeTrainArray, BinnedSpikeTrainArray)')
# set default parameter values:
if fs is None:
fs = mua.fs
if minLength is None:
minLength = 0.050 # 50 ms minimum event duration
if maxLength is None:
maxLength = 0.750 # 750 ms maximum event duration
if minThresholdLength is None:
minThresholdLength = 0.0
# if PrimaryThreshold is None:
# PrimaryThreshold =
# if SecondaryThreshold is None:
# SecondaryThreshold =
PBE_epochs = get_mua_events(mua=mua,
fs=fs,
minLength=minLength,
maxLength=maxLength,
PrimaryThreshold=PrimaryThreshold,
minThresholdLength=minThresholdLength,
SecondaryThreshold=SecondaryThreshold)
# now require min_active number of sorted cells
if isinstance(data, (core.EventArray, core.BinnedEventArray)):
if min_active > 0:
if unsorted_id is not None:
# remove unsorted unit, if present:
unit_ids = copy.deepcopy(data.unit_ids)
try:
unit_ids.remove(unsorted_id)
except ValueError:
pass
# data_ = data._unit_subset(unit_ids)
data_ = data.loc[:,unit_ids]
else:
data_ = data
# determine number of active units per epoch:
n_active = np.array([snippet.n_active for snippet in data_[PBE_epochs]])
active_epochs_idx = np.argwhere(n_active > min_active).squeeze()
# only keep those epochs where sufficiently many units are active:
PBE_epochs = PBE_epochs[active_epochs_idx]
return PBE_epochs
def get_contiguous_segments(data, *, step=None, assume_sorted=None,
in_core=True, index=False, inclusive=False,
fs=None, sort=None, in_memory=None):
"""Compute contiguous segments (seperated by step) in a list.
Note! This function requires that a sorted list is passed.
It first checks if the list is sorted O(n), and only sorts O(n log(n))
if necessary. But if you know that the list is already sorted,
you can pass assume_sorted=True, in which case it will skip
the O(n) check.
Returns an array of size (n_segments, 2), with each row
being of the form ([start, stop]) [inclusive, exclusive].
NOTE: when possible, use assume_sorted=True, and step=1 as explicit
arguments to function call.
WARNING! Step is robustly computed in-core (i.e., when in_core is
True), but is assumed to be 1 when out-of-core.
Example
-------
>>> data = [1,2,3,4,10,11,12]
>>> get_contiguous_segments(data)
([1,5], [10,13])
>>> get_contiguous_segments(data, index=True)
([0,4], [4,7])
Parameters
----------
data : array-like
1D array of sequential data, typically assumed to be integral (sample
numbers).
step : float, optional
Expected step size for neighboring samples. Default uses numpy to find
the median, but it is much faster and memory efficient to explicitly
pass in step=1.
assume_sorted : bool, optional
If assume_sorted == True, then data is not inspected or re-ordered. This
can be significantly faster, especially for out-of-core computation, but
it should only be used when you are confident that the data is indeed
sorted, otherwise the results from get_contiguous_segments will not be
reliable.
in_core : bool, optional
If True, then we use np.diff which requires all the data to fit
into memory simultaneously, otherwise we use groupby, which uses
a generator to process potentially much larger chunks of data,
but also much slower.
index : bool, optional
If True, the indices of segment boundaries will be returned. Otherwise,
the segment boundaries will be returned in terms of the data itself.
Default is False.
inclusive : bool, optional
If True, the boundaries are returned as [(inclusive idx, inclusive idx)]
Default is False, and can only be used when index==True.
Deprecated
----------
in_memory : bool, optional
This is equivalent to the new 'in-core'.
sort : bool, optional
This is equivalent to the new 'assume_sorted'
fs : sampling rate (Hz) used to extend half-open interval support by 1/fs
"""
# handle deprecated API calls:
if in_memory:
in_core = in_memory
logging.warning("'in_memory' has been deprecated; use 'in_core' instead")
if sort:
assume_sorted = sort
logging.warning("'sort' has been deprecated; use 'assume_sorted' instead")
if fs:
step = 1/fs
logging.warning("'fs' has been deprecated; use 'step' instead")
if inclusive:
assert index, "option 'inclusive' can only be used with 'index=True'"
if in_core:
data = np.asarray(data)
if not assume_sorted:
if not is_sorted(data):
data = np.sort(data) # algorithm assumes sorted list
if step is None:
step = np.median(np.diff(data))
# assuming that data(t1) is sampled somewhere on [t, t+1/fs) we have a 'continuous' signal as long as
# data(t2 = t1+1/fs) is sampled somewhere on [t+1/fs, t+2/fs). In the most extreme case, it could happen
# that t1 = t and t2 = t + 2/fs, i.e. a difference of 2 steps.
if np.any(np.diff(data) < step):
logging.warning("some steps in the data are smaller than the requested step size.")
breaks = np.argwhere(np.diff(data)>=2*step)
starts = np.insert(breaks+1, 0, 0)
stops = np.append(breaks, len(data)-1)
bdries = np.vstack((data[starts], data[stops] + step)).T
if index:
if inclusive:
indices = np.vstack((starts, stops)).T
else:
indices = np.vstack((starts, stops + 1)).T
return indices
else:
from itertools import groupby
from operator import itemgetter
if not assume_sorted:
if not is_sorted(data):
# data = np.sort(data) # algorithm assumes sorted list
raise NotImplementedError("out-of-core sorting has not been implemented yet...")
if step is None:
step = 1
bdries = []
if not index:
for k, g in groupby(enumerate(data), lambda ix: (ix[0] - ix[1])):
f = itemgetter(1)
gen = (f(x) for x in g)
start = next(gen)
stop = start
for stop in gen:
pass
bdries.append([start, stop + step])
else:
counter = 0
for k, g in groupby(enumerate(data), lambda ix: (ix[0] - ix[1])):
f = itemgetter(1)
gen = (f(x) for x in g)
_ = next(gen)
start = counter
stop = start
for _ in gen:
stop +=1
if inclusive:
bdries.append([start, stop])
else:
bdries.append([start, stop + 1])
counter = stop + 1
return np.asarray(bdries)
def get_direction(asa, *, sigma=None):
"""Return epochs during which an animal was running left to right, or right
to left.
Parameters
----------
asa : AnalogSignalArray 1D
AnalogSignalArray containing the 1D position data.
sigma : float, optional
Smoothing to apply to position (x) before computing gradient estimate.
Default is 0.
Returns
-------
l2r, r2l : EpochArrays
EpochArrays corresponding to left-to-right and right-to-left movement.
"""
if sigma is None:
sigma = 0
if not isinstance(asa, core.AnalogSignalArray):
raise TypeError('AnalogSignalArray expected!')
assert asa.n_signals == 1, "1D AnalogSignalArray expected!"
direction = dxdt_AnalogSignalArray(asa.smooth(sigma=sigma),
rectify=False).data
direction[direction>=0] = 1
direction[direction<0] = -1
direction = direction.squeeze()
l2r = get_contiguous_segments(np.argwhere(direction>0).squeeze(), step=1)
l2r[:,1] -= 1 # change bounds from [inclusive, exclusive] to [inclusive, inclusive]
l2r = core.EpochArray(asa.abscissa_vals[l2r])
r2l = get_contiguous_segments(np.argwhere(direction<0).squeeze(), step=1)
r2l[:,1] -= 1 # change bounds from [inclusive, exclusive] to [inclusive, inclusive]
r2l = core.EpochArray(asa.abscissa_vals[r2l])
return l2r, r2l
class PrettyBytes(int):
"""Prints number of bytes in a more readable format"""
def __init__(self, val):
self.val = val
def __str__(self):
if self.val < 1024:
return '{} bytes'.format(self.val)
elif self.val < 1024**2:
return '{:.3f} kilobytes'.format(self.val/1024)
elif self.val < 1024**3:
return '{:.3f} megabytes'.format(self.val/1024**2)
elif self.val < 1024**4:
return '{:.3f} gigabytes'.format(self.val/1024**3)
def __repr__(self):
return self.__str__()
class PrettyInt(int):
"""Prints integers in a more readable format"""
def __init__(self, val):
self.val = val
def __str__(self):
return '{:,}'.format(self.val)
def __repr__(self):
return '{:,}'.format(self.val)
class PrettyDuration(float):
"""Time duration with pretty print.
Behaves like a float, and can always be cast to a float.
"""
def __init__(self, seconds):
self.duration = seconds
def __str__(self):
return self.time_string(self.duration)
def __repr__(self):
return self.time_string(self.duration)
@staticmethod
def to_dhms(seconds):
"""convert seconds into hh:mm:ss:ms"""
pos = seconds >= 0
if not pos:
seconds = -seconds
ms = seconds % 1; ms = round(ms*10000)/10
seconds = floor(seconds)
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
d, h = divmod(h, 24)
Time = namedtuple('Time', 'pos dd hh mm ss ms')
time = Time(pos=pos, dd=d, hh=h, mm=m, ss=s, ms=ms)
return time
@staticmethod
def time_string(seconds):
"""returns a formatted time string."""
if np.isinf(seconds):
return 'inf'
pos, dd, hh, mm, ss, s = PrettyDuration.to_dhms(seconds)
if s > 0:
if mm == 0:
# in this case, represent milliseconds in terms of
# seconds (i.e. a decimal)
sstr = str(s/1000).lstrip('0')
if s >= 999.5:
ss += 1
s = 0
sstr = ""
# now propagate the carry:
if ss == 60:
mm += 1
ss = 0
if mm == 60:
hh +=1
mm = 0
if hh == 24:
dd += 1
hh = 0
else:
# for all other cases, milliseconds will be represented
# as an integer
if s >= 999.5:
ss += 1
s = 0
sstr = ""
# now propagate the carry:
if ss == 60:
mm += 1
ss = 0
if mm == 60:
hh +=1
mm = 0
if hh == 24:
dd += 1
hh = 0
else:
sstr = ":{:03d}".format(int(s))
else:
sstr = ""
if dd > 0:
daystr = "{:01d} days ".format(dd)
else:
daystr = ""
if hh > 0:
timestr = daystr + "{:01d}:{:02d}:{:02d}{} hours".format(hh, mm, ss, sstr)
elif mm > 0:
timestr = daystr + "{:01d}:{:02d}{} minutes".format(mm, ss, sstr)
elif ss > 0:
timestr = daystr + "{:01d}{} seconds".format(ss, sstr)
else:
timestr = daystr +"{} milliseconds".format(s)
if not pos:
timestr = "-" + timestr
return timestr
def __add__(self, other):
"""a + b"""
return PrettyDuration(self.duration + other)
def __radd__(self, other):
"""b + a"""
return self.__add__(other)
def __sub__(self, other):
"""a - b"""
return PrettyDuration(self.duration - other)
def __rsub__(self, other):
"""b - a"""
return other - self.duration
def __mul__(self, other):
"""a * b"""
return PrettyDuration(self.duration * other)
def __rmul__(self, other):
"""b * a"""
return self.__mul__(other)
def __truediv__(self, other):
"""a / b"""
return PrettyDuration(self.duration / other)
def shrinkMatColsTo(mat, numCols):
""" Docstring goes here
Shrinks a NxM1 matrix down to an NxM2 matrix, where M2 <= M1"""
import scipy.ndimage
numCells = mat.shape[0]
numColsMat = mat.shape[1]
a = np.zeros((numCells, numCols))
for row in np.arange(numCells):
niurou = scipy.ndimage.interpolation.zoom(input=mat[row,:], zoom=(numCols/numColsMat), order = 1)
a[row,:] = niurou
return a
def find_threshold_crossing_events(x, threshold, *, mode='above'):
"""Find threshold crossing events. INCLUSIVE
Parameters
----------
x : numpy array
Input data
threshold : float
The value whose crossing triggers an event
mode : string, optional in ['above', 'below']; default 'above'
event triggering above, or below threshold
Returns
-------
eventlist : list
List containing the indices corresponding to threshold crossings
eventmax : list
List containing the maximum value of each event
"""
from itertools import groupby
from operator import itemgetter
if mode == 'below':
cross_threshold = np.where(x <= threshold, 1, 0)
elif mode == 'above':
cross_threshold = np.where(x >= threshold, 1, 0)
else:
raise NotImplementedError(
"mode {} not understood for find_threshold_crossing_events".format(str(mode)))
eventlist = []
eventmax = []
for k,v in groupby(enumerate(cross_threshold),key=itemgetter(1)):
if k:
v = list(v)
eventlist.append([v[0][0],v[-1][0]])
try :
eventmax.append(x[v[0][0]:(v[-1][0]+1)].max())
except :
print(v, x[v[0][0]:v[-1][0]])
eventmax = np.asarray(eventmax)
eventlist = np.asarray(eventlist)
return eventlist, eventmax
def get_events_boundaries(x, *, PrimaryThreshold=None,
SecondaryThreshold=None,
minThresholdLength=None, minLength=None,
maxLength=None, ds=None, mode='above'):
"""get event boundaries such that event.max >= PrimaryThreshold
and the event extent is defined by SecondaryThreshold.
Note that when PrimaryThreshold==SecondaryThreshold, then this is a
simple threshold crossing algorithm.
NB. minLength and maxLength are applied to the SecondaryThreshold
events, whereas minThresholdLength is applied to the
PrimaryThreshold events.
Parameters
----------
x : numpy array
Input data
mode : string, optional in ['above', 'below']; default 'above'
event triggering above, or below threshold
PrimaryThreshold : float, optional
If mode=='above', requires that event.max >= PrimaryThreshold
If mode=='below', requires that event.min <= PrimaryThreshold
SecondaryThreshold : float, optional
The value that defines the event extent
minThresholdLength : float, optional
Minimum duration for which the PrimaryThreshold is crossed
minLength : float, optional
Minimum duration for which the SecondaryThreshold is crossed
maxLength : float, optional
Maximum duration for which the SecondaryThreshold is crossed
ds : float, optional
Time step of the input data x
Returns
-------
returns bounds, maxes, events
where bounds <==> SecondaryThreshold to SecondaryThreshold, inclusive
maxes <==> maximum value during each event
events <==> PrimaryThreshold to PrimaryThreshold, inclusive
"""
# TODO: x must be a numpy array
# TODO: ds is often used, but we have no default, and no check for when
# it is left as None.
# TODO: the Docstring should equally be improved.
x = x.squeeze()
if x.ndim > 1:
raise TypeError("multidimensional arrays not supported!")
if PrimaryThreshold is None: # by default, threshold is 3 SDs above mean of x
PrimaryThreshold = np.mean(x) + 3*np.std(x)
if SecondaryThreshold is None: # by default, revert back to mean of x
SecondaryThreshold = np.mean(x) # + 0*np.std(x)
events, _ = \
find_threshold_crossing_events(x=x,
threshold=PrimaryThreshold,
mode=mode)
# apply minThresholdLength criterion:
if minThresholdLength is not None and len(events) > 0:
durations = (events[:,1] - events[:,0] + 1) * ds
events = events[[durations >= minThresholdLength]]
if len(events) == 0:
bounds, maxes, events = [], [], []
logging.warning("no events satisfied criteria")
return bounds, maxes, events
# Find periods where value is > SecondaryThreshold; note that the previous periods should be within these!
if mode == 'above':
assert SecondaryThreshold <= PrimaryThreshold, \
"Secondary Threshold by definition should include more data than Primary Threshold"
elif mode == 'below':
assert SecondaryThreshold >= PrimaryThreshold, \
"Secondary Threshold by definition should include more data than Primary Threshold"
else:
raise NotImplementedError(
"mode {} not understood for find_threshold_crossing_events".format(str(mode)))
bounds, broader_maxes = \
find_threshold_crossing_events(x=x,
threshold=SecondaryThreshold,
mode=mode)
# Find corresponding big windows for potential events
# Specifically, look for closest left edge that is just smaller
outer_boundary_indices = np.searchsorted(bounds[:,0], events[:,0], side='right')
# searchsorted finds the index after, so subtract one to get index before
outer_boundary_indices = outer_boundary_indices - 1
# Find extended boundaries for events by pairing to larger windows
# (Note that there may be repeats if the larger window contains multiple > 3SD sections)
bounds = bounds[outer_boundary_indices,:]
maxes = broader_maxes[outer_boundary_indices]
if minLength is not None and len(events) > 0:
durations = (bounds[:,1] - bounds[:,0] + 1) * ds
# TODO: refactor [durations <= maxLength] but be careful about edge cases
bounds = bounds[[durations >= minLength]]
maxes = maxes[[durations >= minLength]]
events = events[[durations >= minLength]]
if maxLength is not None and len(events) > 0:
durations = (bounds[:,1] - bounds[:,0] + 1) * ds
# TODO: refactor [durations <= maxLength] but be careful about edge cases
bounds = bounds[[durations <= maxLength]]
maxes = maxes[[durations <= maxLength]]
events = events[[durations <= maxLength]]
if len(events) == 0:
bounds, maxes, events = [], [], []
logging.warning("no events satisfied criteria")
return bounds, maxes, events
# Now, since all that we care about are the larger windows, so we should get rid of repeats
_, unique_idx = np.unique(bounds[:,0], return_index=True)
bounds = bounds[unique_idx,:] # SecondaryThreshold to SecondaryThreshold
maxes = maxes[unique_idx] # maximum value during event
events = events[unique_idx,:] # PrimaryThreshold to PrimaryThreshold
return bounds, maxes, events
def signal_envelope1D(data, *, sigma=None, fs=None):
logging.warnings("'signal_envelope1D' is deprecated; use 'signal_envelope_1d' instead!")
return signal_envelope_1d(data, sigma=sigma, fs=fs)
def signal_envelope_1d(data, *, sigma=None, fs=None):
"""Finds the signal envelope by taking the absolute value
of the Hilbert transform
Parameters
----------
data : numpy array, list, or RegularlySampledAnalogSignalArray
Input data
If data is a numpy array, it is expected to have shape
(n_signals, n_samples)
If data is a list, it is expected to have length n_signals,
where each sublist has length n_samples, i.e. data is not
jagged
sigma : float, optional
Standard deviation of the Gaussian kernel used to
smooth the envelope after applying the Hilbert transform.
Units of seconds. Default is 4 ms
fs : float, optional
Sampling rate of the signal
Returns
-------
out : same type as the input object
An object containing the signal envelope
TODO: this is not yet epoch-aware!
UPDATE: this is actually epoch-aware by now!
"""
if sigma is None:
sigma = 0.004 # 4 ms standard deviation
if fs is None:
if isinstance(data, (np.ndarray, list)):
raise ValueError("sampling frequency must be specified!")
elif isinstance(data, core.RegularlySampledAnalogSignalArray):
fs = data.fs
if isinstance(data, (np.ndarray, list)):
data_array = np.array(data)
n_dims = np.array(data).ndim
assert n_dims <= 2, "Only 1D signals supported!"
if n_dims == 1:
input_data = data_array.reshape((1, data_array.size))
else:
input_data = data_array
n_signals, n_samples = input_data.shape
# Compute number of samples to compute fast FFTs
padlen = next_fast_len(n_samples) - n_samples
# Pad data
paddeddata = np.hstack( (input_data, np.zeros((n_signals, padlen))) )
# Use hilbert transform to get an envelope
envelope = np.absolute(hilbert(paddeddata, axis=-1))
# free up memory
del paddeddata
# Truncate results back to original length
envelope = envelope[..., :n_samples]
if sigma:
# Smooth envelope with a gaussian (sigma = 4 ms default)
EnvelopeSmoothingSD = sigma*fs
smoothed_envelope = scipy.ndimage.filters.gaussian_filter1d(envelope, EnvelopeSmoothingSD,
mode='constant', axis=-1)
envelope = smoothed_envelope
if isinstance(data, list):
envelope = envelope.tolist()
return envelope
elif isinstance(data, core.RegularlySampledAnalogSignalArray):
# Only ASA data of shape (n_signals, n_timepoints) -> 2D currently supported
assert data.data.ndim == 2
cum_lengths = np.insert(np.cumsum(data.lengths), 0, 0)
newasa = data.copy()
# for segment in data:
for idx in range(data.n_epochs):
# print('hilberting epoch {}/{}'.format(idx+1, data.n_epochs))
segment_data = data._data[:,cum_lengths[idx]:cum_lengths[idx+1]]
n_signals, n_samples = segment_data.shape
# Compute number of samples to compute fast FFTs:
padlen = next_fast_len(n_samples) - n_samples
# Pad data
paddeddata = np.hstack( (segment_data, np.zeros((n_signals, padlen))) )
# Use hilbert transform to get an envelope
envelope = np.absolute(hilbert(paddeddata, axis=-1))
# free up memory
del paddeddata
# Truncate results back to original length
envelope = envelope[..., :n_samples]
if sigma:
# Smooth envelope with a gaussian (sigma = 4 ms default)
EnvelopeSmoothingSD = sigma*fs
smoothed_envelope = scipy.ndimage.filters.gaussian_filter1d(envelope, EnvelopeSmoothingSD,
mode='constant', axis=-1)
envelope = smoothed_envelope
newasa._data[:,cum_lengths[idx]:cum_lengths[idx+1]] = np.atleast_2d(envelope)
return newasa
def nextpower(n, base=2.0):
"""Return the next integral power of two greater than the given number.
Specifically, return m such that
m >= n
m == 2**x
where x is an integer. Use base argument to specify a base other than 2.
This is useful for ensuring fast FFT sizes.
From https://gist.github.com/bhawkins/4479607 (Brian Hawkins)
"""
x = base**ceil (log (n) / log (base))
if type(n) == np.ndarray:
return np.asarray (x, dtype=int)
else:
return int (x)
def nextfastpower(n):
"""Return the next integral power of small factors greater than the given
number. Specifically, return m such that
m >= n
m == 2**x * 3**y * 5**z
where x, y, and z are integers.
This is useful for ensuring fast FFT sizes.
From https://gist.github.com/bhawkins/4479607 (Brian Hawkins)
See also http://scipy.github.io/devdocs/generated/scipy.fftpack.next_fast_len.html
"""
if n < 7:
return max (n, 1)
# x, y, and z are all bounded from above by the formula of nextpower.
# Compute all possible combinations for powers of 3 and 5.
# (Not too many for reasonable FFT sizes.)
def power_series (x, base):
nmax = ceil (log (x) / log (base))
return np.logspace (0.0, nmax, num=nmax+1, base=base)
n35 = np.outer (power_series (n, 3.0), power_series (n, 5.0))
n35 = n35[n35<=n]
# Lump the powers of 3 and 5 together and solve for the powers of 2.
n2 = nextpower (n / n35)
return int (min (n2 * n35))
@keyword_deprecation(replace_x_with_y={'bw':'truncate'})
def gaussian_filter(obj, *, fs=None, sigma=None, truncate=None, inplace=False, mode=None, cval=None, within_intervals=False):
"""Smooths with a Gaussian kernel.
Smoothing is applied along the abscissa, and the same smoothing is applied to each
signal in the RegularlySampledAnalogSignalArray, or to each unit in a BinnedSpikeTrainArray.
Smoothing is applied ACROSS intervals, but smoothing WITHIN intervals is also supported.
Parameters
----------
obj : RegularlySampledAnalogSignalArray or BinnedSpikeTrainArray.
fs : float, optional
Sampling rate (in obj.base_unit^-1) of obj. If not provided, it will
be inferred.
sigma : float, optional
Standard deviation of Gaussian kernel, in obj.base_units. Default is 0.05
(50 ms if base_unit=seconds).
truncate : float, optional
Bandwidth outside of which the filter value will be zero. Default is 4.0.
inplace : bool
If True the data will be replaced with the smoothed data.
Default is False.
mode : {‘reflect’, ‘constant’, ‘nearest’, ‘mirror’, ‘wrap’}, optional
The mode parameter determines how the array borders are handled,
where cval is the value when mode is equal to ‘constant’. Default is
‘reflect’.
cval : scalar, optional
Value to fill past edges of input if mode is ‘constant’. Default is 0.0.
within_intervals : boolean, optional
If True, then smooth within each epoch. Otherwise smooth across epochs.
Default is False.
Note that when mode = 'wrap', then smoothing within epochs aren't affected
by wrapping.
Returns
-------
out : same type as obj
An object with smoothed data is returned.
"""
if sigma is None:
sigma = 0.05
if truncate is None:
truncate = 4
if mode is None:
mode = 'reflect'
if cval is None:
cval = 0.0
if not inplace:
out = copy.deepcopy(obj)
else:
out = obj
if isinstance(out, core.RegularlySampledAnalogSignalArray):
if fs is None:
fs = out.fs
if fs is None:
raise ValueError("fs must either be specified, or must be contained in the {}!".format(out.type_name))
elif isinstance(out, core.BinnedEventArray):
bst = out
if fs is None:
fs = 1/bst.ds
if fs is None:
raise ValueError("fs must either be specified, or must be contained in the {}!".format(out.type_name))
else:
raise NotImplementedError("gaussian_filter for {} is not yet supported!".format(str(type(out))))
sigma = sigma * fs
if not within_intervals:
# see https://stackoverflow.com/questions/18697532/gaussian-filtering-a-image-with-nan-in-python
# (1) if smoothing across intervals, we work on a merged support
# (2) build abscissa_vals, including existing ones, and out-of-support ones
# (3) to smooth U, build auxiliary arrays V and W, with (V=U).nan=0, and (W=1).nan=0
# (4) Z = smooth(V)/smooth(W)
# (5) only keep original support, and original abscissa_vals
if isinstance(out, (core.RegularlySampledAnalogSignalArray, core.BinnedEventArray)):
support = out._abscissa.support.merge()
if not support.domain.is_finite:
support.domain = (support.start, support.stop) #TODO: #FIXME might come from abscissa definition, and not from support
missing_abscissa_vals = []
for interval in (~support):
missing_vals = frange(interval.start, interval.stop, 1/fs)
missing_abscissa_vals.extend(missing_vals)
if isinstance(out, core.RegularlySampledAnalogSignalArray):
n_signals = out.n_signals
n_samples = out.n_samples
elif isinstance(out, core.BinnedEventArray):
n_signals = out.n_series
n_samples = out.n_bins
V = np.zeros((n_signals, n_samples + len(missing_abscissa_vals)))
W = np.ones(V.shape)
all_abscissa_vals = np.sort(np.append(out._abscissa_vals, missing_abscissa_vals))
data_idx = np.searchsorted(all_abscissa_vals, out._abscissa_vals)
missing_idx = np.searchsorted(all_abscissa_vals, missing_abscissa_vals)
V[:, data_idx] = out.data
W[:, missing_idx] = 0
VV = scipy.ndimage.filters.gaussian_filter(V, sigma=(0,sigma), truncate=truncate, mode=mode, cval=cval)
WW = scipy.ndimage.filters.gaussian_filter(W, sigma=(0,sigma), truncate=truncate, mode=mode, cval=cval)
Z = VV[:,data_idx]/WW[:,data_idx]
out._data = Z
else:
raise NotImplementedError("gaussian_filter across intervals for {} is not yet supported!".format(str(type(out))))
else: # within intervals:
cum_lengths = np.insert(np.cumsum(out.lengths), 0, 0)
out._data = out._data.astype(float)
if isinstance(out, core.RegularlySampledAnalogSignalArray):
# now smooth each interval separately
for idx in range(out.n_intervals):
out._data[:,cum_lengths[idx]:cum_lengths[idx+1]] = scipy.ndimage.filters.gaussian_filter(out._data[:,cum_lengths[idx]:cum_lengths[idx+1]], sigma=(0,sigma), truncate=truncate)
elif isinstance(out, core.BinnedSpikeTrainArray):
# now smooth each interval separately
for idx in range(out.n_epochs):
out._data[:,cum_lengths[idx]:cum_lengths[idx+1]] = scipy.ndimage.filters.gaussian_filter(out._data[:,cum_lengths[idx]:cum_lengths[idx+1]], sigma=(0,sigma), truncate=truncate)
# out._data[:,cum_lengths[idx]:cum_lengths[idx+1]] = self._smooth_array(out._data[:,cum_lengths[idx]:cum_lengths[idx+1]], w=w)
return out
@keyword_deprecation(replace_x_with_y={'bw':'truncate'})
def ddt_asa(asa, *, fs=None, smooth=False, rectify=True, sigma=None, truncate=None, norm=False):
"""Numerical differentiation of a regularly sampled AnalogSignalArray.
Optionally also smooths result with a Gaussian kernel.
Smoothing is applied in time, and the same smoothing is applied to each
signal in the AnalogSignalArray.
Differentiation, (and if requested, smoothing) is applied within each epoch.
Parameters
----------
asa : nelpy.RegularlySampledAnalogSignalArray
Input object.
fs : float, optional
Sampling rate (in Hz) of input RSASA. If not provided, it will be obtained
from asa.fs.
smooth : bool, optional
If true, result will be smoothed. Default is False
rectify : bool, optional
If True, absolute value of derivative is computed. Default is True.
sigma : float, optional
Standard deviation of Gaussian kernel, in seconds. Default is 0.05
(50 ms).
truncate : float, optional
Bandwidth outside of which the filter value will be zero. Default is 4.0
norm: boolean, optional
If True, then apply the L2 norm to the result.
Returns
-------
out : nelpy.RegularlySampledAnalogSignalArray
A RegularlySampledAnalogSignalArray with derivative data (in units
per second) is returned.
Notes
-----
Central differences are used here.
"""
if not isinstance(asa, core.RegularlySampledAnalogSignalArray):
raise TypeError("Input object must be a RegularlySampledAnalogSignalArray!")
if fs is None:
fs = asa.fs
if sigma is None:
sigma = 0.05 # 50 ms default
out = asa.copy()
cum_lengths = np.insert(np.cumsum(asa.lengths), 0, 0)
# ensure that datatype is float
# TODO: this will break complex data
out._data = out.data.astype(float)
# now obtain the derivative for each epoch separately
for idx in range(asa.n_epochs):
# if 1D:
if asa.n_signals == 1:
if (cum_lengths[idx+1]-cum_lengths[idx]) < 2:
# only single sample
out._data[[0],cum_lengths[idx]:cum_lengths[idx+1]] = 0
else:
out._data[[0],cum_lengths[idx]:cum_lengths[idx+1]] = np.gradient(asa._data[[0],cum_lengths[idx]:cum_lengths[idx+1]], axis=1)
else:
if (cum_lengths[idx+1]-cum_lengths[idx]) < 2:
# only single sample
out._data[:,cum_lengths[idx]:cum_lengths[idx+1]] = 0
else:
out._data[:,cum_lengths[idx]:cum_lengths[idx+1]] = np.gradient(asa._data[:,cum_lengths[idx]:cum_lengths[idx+1]], axis=1)
out._data = out._data * fs
if norm:
out._data = np.atleast_2d(np.linalg.norm(out._data, axis=0))
if rectify:
out._data = np.abs(out._data)
if smooth:
out = gaussian_filter(out, fs=fs, sigma=sigma, truncate=truncate)
return out
@keyword_deprecation(replace_x_with_y={'bw':'truncate'})
def dxdt_AnalogSignalArray(asa, *, fs=None, smooth=False, rectify=True, sigma=None, truncate=None):
"""Numerical differentiation of a regularly sampled AnalogSignalArray.
Optionally also smooths result with a Gaussian kernel.
Smoothing is applied in time, and the same smoothing is applied to each
signal in the AnalogSignalArray.
Differentiation, (and if requested, smoothing) is applied within each epoch.
Parameters
----------
asa : AnalogSignalArray
fs : float, optional
Sampling rate (in Hz) of AnalogSignalArray. If not provided, it will
be obtained from asa.fs
smooth : bool, optional
If true, result will be smoothed. Default is False
rectify : bool, optional
If True, absolute value of derivative is computed. Default is True.
sigma : float, optional
Standard deviation of Gaussian kernel, in seconds. Default is 0.05
(50 ms).
truncate : float, optional
Bandwidth outside of which the filter value will be zero. Default is 4.0
Returns
-------
out : AnalogSignalArray
An AnalogSignalArray with derivative data (in units per second) is returned.
"""
raise DeprecationWarning('use ddt_asa instead!')
if fs is None:
fs = asa.fs
if fs is None:
raise ValueError("fs must either be specified, or must be contained in the AnalogSignalArray!")
if sigma is None:
sigma = 0.05 # 50 ms default
out = copy.deepcopy(asa)
cum_lengths = np.insert(np.cumsum(asa.lengths), 0, 0)
# ensure that datatype is float
out._data = out.data.astype(float)
if asa.n_signals == 2:
out._data = out._data[[0],:]
# now obtain the derivative for each epoch separately
for idx in range(asa.n_epochs):
# if 1D:
if asa.n_signals == 1:
if (cum_lengths[idx+1]-cum_lengths[idx]) < 2:
# only single sample
out._data[[0],cum_lengths[idx]:cum_lengths[idx+1]] = 0
else:
out._data[[0],cum_lengths[idx]:cum_lengths[idx+1]] = np.gradient(asa._data[[0],cum_lengths[idx]:cum_lengths[idx+1]], axis=1)
elif asa.n_signals == 2:
if (cum_lengths[idx+1]-cum_lengths[idx]) < 2:
# only single sample
out._data[[0],cum_lengths[idx]:cum_lengths[idx+1]] = 0
else:
out._data[[0],cum_lengths[idx]:cum_lengths[idx+1]] = np.linalg.norm(np.gradient(asa._data[:,cum_lengths[idx]:cum_lengths[idx+1]], axis=1), axis=0)
else:
raise TypeError("more than 2D not currently supported!")
out._data = out._data * fs
if rectify:
out._data = np.abs(out._data)
if smooth:
out = gaussian_filter(out, fs=fs, sigma=sigma, truncate=truncate)
return out
def get_threshold_crossing_epochs(asa, t1=None, t2=None, mode='above'):
"""Return epochs where a signal crosses a compound threshold specified by t1
and t2.
Parameters
----------
asa : AnalogSignalArray
AnalogSignalArray containing a single channel
t1 : float, optional
Primary threshold. Minimum signal value that has to be reached /
exceeded during an event. Default is 3 standard deviations above signal
mean.
t2 : float, optional
Secondary threshold. Signal value that defines the event boundaries.
Default is signal mean.
mode : string, optional
Mode of operation. One of ['above', 'below']. If 'above', then return
epochs where the signal exceeds the compound threshold, and if 'below',
then return epochs where the signal falls below the compound threshold.
Default is 'above'.
Returns
-------
epochs : EpochArray
EpochArray with all the epochs where the signal satisfied the criteria.
"""
if asa.n_signals > 1:
raise TypeError("multidimensional AnalogSignalArrays not supported!")
x = asa.data.squeeze()
if t1 is None: # by default, threshold is 3 SDs above mean of x
t1 = np.mean(x) + 3*np.std(x)
if t2 is None: # by default, revert back to mean of x
t2 = np.mean(x)
# compute periods where signal exceeds compound threshold
epoch_bounds, _, _ = get_events_boundaries(
x=x,
PrimaryThreshold=t1,
SecondaryThreshold=t2,
mode=mode
)
# convert bounds to time in seconds
epoch_bounds = asa.time[epoch_bounds]
if len(epoch_bounds) == 0:
return type(asa._abscissa.support)(empty=True)
# add 1/fs to stops for open interval
epoch_bounds[:,1] += 1/asa.fs
# create EpochArray with threshould exceeding bounds
epochs = type(asa._abscissa.support)(epoch_bounds)
return epochs
def get_run_epochs(speed, v1=10, v2=8):
"""Return epochs where animal is running at least as fast as
specified by v1 and v2.
Parameters
----------
speed : AnalogSignalArray
AnalogSignalArray containing single channel speed, in units/sec
v1 : float, optional
Minimum speed (in same units as speed) that has to be reached /
exceeded during an event. Default is 10 [units/sec]
v2 : float, optional
Speed that defines the event boundaries. Default is 8 [units/sec]
Returns
-------
run_epochs : EpochArray
EpochArray with all the epochs where speed satisfied the criteria.
"""
run_epochs = get_threshold_crossing_epochs(asa=speed, t1=v1, t2=v2, mode='above')
return run_epochs
def get_inactive_epochs(speed, v1=5, v2=7):
"""Return epochs where animal is running no faster than specified by
v1 and v2.
Parameters
----------
speed : AnalogSignalArray
AnalogSignalArray containing single channel speed, in units/sec
v1 : float, optional
Minimum speed (in same units as speed) that has to be reached /
exceeded during an event. Default is 10 [units/sec]
v2 : float, optional
Speed that defines the event boundaries. Default is 8 [units/sec]
Returns
-------
inactive_epochs : EpochArray
EpochArray with all the epochs where speed satisfied the criteria.
"""
inactive_epochs = get_threshold_crossing_epochs(asa=speed, t1=v1, t2=v2, mode='below')
return inactive_epochs
def spiketrain_union(st1, st2):
"""Join two spiketrains together.
WARNING! This function should be improved a lot!
"""
assert st1.n_units == st2.n_units
support = st1.support.join(st2.support)
newdata = []
for unit in range(st1.n_units):
newdata.append(np.append(st1.time[unit], st2.time[unit]))
fs = None
if st1.fs == st2.fs:
fs = st1.fs
return core.SpikeTrainArray(newdata, support=support, fs=fs)
########################################################################
# uncurated below this line!
########################################################################
def find_nearest_idx(array, val):
"""Finds nearest index in array to value.
Parameters
----------
array : np.array
val : float
Returns
-------
Index into array that is closest to val
TODO: this is a better version that should be incorporated:
# Based on answer here: http://stackoverflow.com/questions/2566412/find-nearest-value-in-numpy-array
def find_nearest(array,values):
right_idxs = np.searchsorted(array, values, side="left")
left_idxs = np.where(right_idxs > 0, right_idxs-1, right_idxs)
right_idxs = np.where(right_idxs == len(array), len(array)-1, right_idxs)
closest_idx = np.where(np.abs(values - array[right_idxs]) < np.abs(values - array[left_idxs]),
right_idxs, left_idxs)
return closest_idx
"""
return (np.abs(array-val)).argmin()
def find_nearest_indices(array, vals):
"""Finds nearest index in array to value.
Parameters
----------
array : np.array
This is the array you wish to index into.
vals : np.array
This is the array that you are getting your indices from.
Returns
-------
Indices into array that is closest to vals.
Notes
-----
Wrapper around find_nearest_idx().
"""
return np.array([find_nearest_idx(array, val) for val in vals], dtype=int)
def get_sort_idx(tuning_curves):
"""Finds indices to sort neurons by max firing in tuning curve.
Parameters
----------
tuning_curves : list of lists
Where each inner list is the tuning curves for an individual
neuron.
Returns
-------
sorted_idx : list
List of integers that correspond to the neuron in sorted order.
"""
tc_max_loc = []
for i, neuron_tc in enumerate(tuning_curves):
tc_max_loc.append((i, np.where(neuron_tc == np.max(neuron_tc))[0][0]))
sorted_by_tc = sorted(tc_max_loc, key=lambda x: x[1])
sorted_idx = []
for idx in sorted_by_tc:
sorted_idx.append(idx[0])
return sorted_idx
def collapse_time(obj, gap=0):
"""Collapse all epochs in a SpikeTrainArray and collapse them into a single, contiguous SpikeTrainArray"""
# TODO: redo SpikeTrainArray so as to keep the epochs separate!, and to support gaps!
# We'll have to ajust all the spikes per epoch... and we'll have to compute a new support. Also set a flag!
# If it's a SpikeTrainArray, then we left-shift the spike times. If it's an AnalogSignalArray, then we
# left-shift the time and tdata.
# Also set a new attribute, with the boundaries in seconds.
if isinstance(obj, core.RegularlySampledAnalogSignalArray):
new_obj = type(obj)(empty=True)
new_obj._data = obj._data
durations = obj.support.durations
starts = np.insert(np.cumsum(durations + gap),0,0)[:-1]
stops = starts + durations
newsupport = type(obj._abscissa.support)(np.vstack((starts, stops)).T)
new_obj._support = newsupport
new_time = obj.time.astype(float) # fast copy
time_idx = np.insert(np.cumsum(obj.lengths),0,0)
new_offset = 0
for epidx in range(obj.n_epochs):
if epidx > 0:
new_time[time_idx[epidx]:time_idx[epidx+1]] = new_time[time_idx[epidx]:time_idx[epidx+1]] - obj.time[time_idx[epidx]] + new_offset + gap
new_offset += durations[epidx] + gap
else:
new_time[time_idx[epidx]:time_idx[epidx+1]] = new_time[time_idx[epidx]:time_idx[epidx+1]] - obj.time[time_idx[epidx]] + new_offset
new_offset += durations[epidx]
new_obj._time = new_time
new_obj._fs = obj._fs
elif isinstance(obj, core.EventArray):
if gap > 0:
raise ValueError("gaps not supported for SpikeTrainArrays yet!")
new_obj = type(obj)(empty=True)
new_time = [[] for _ in range(obj.n_series)]
duration = 0
for st_ in obj:
le = st_.support.start
for unit_ in range(obj.n_series):
new_time[unit_].extend(st_._data[unit_] - le + duration)
duration += st_.support.duration
new_time = np.asanyarray([np.asanyarray(unittime) for unittime in new_time])
new_obj._data = new_time
new_obj.support = type(obj._abscissa.support)([0, duration])
new_obj._series_ids = obj._series_ids
new_obj._series_labels = obj._series_labels
new_obj._series_tags = obj._series_tags
elif isinstance(obj, core.BinnedEventArray):
raise NotImplementedError("BinnedEventArrays are not yet supported, but bst.data is essentially already collapsed!")
else:
raise TypeError("unsupported type for collapse_time")
return new_obj
def cartesian(xcenters, ycenters):
"""Finds every combination of elements in two arrays.
Parameters
----------
xcenters : np.array
ycenters : np.array
Returns
-------
cartesian : np.array
With shape(n_sample, 2).
"""
return np.transpose([np.tile(xcenters, len(ycenters)), np.repeat(ycenters, len(xcenters))])
|
//*******************//
// Karma Test Runner //
//*******************//
var babelrc = JSON.parse(require('fs').readFileSync('.babelrc').toString())
// We use webpack to resolve import/require statements
var webpackConfig = require('./webpack.config.js')
webpackConfig.entry = {}
// inline the source map instead of a separate file
webpackConfig.devtool = 'inline-source-map'
// instrumentation for coverage
if (!webpackConfig.module.preLoaders) webpackConfig.module.preLoaders = []
webpackConfig.module.preLoaders.push({
test: /\.jsx?$/,
include: /src/,
exclude: /(node_modules)/,
loader: 'babel-istanbul',
query: {
cacheDirectory: true
}})
webpackConfig.resolve = {
alias: {
'isomorphic-fetch': 'mock-fetch',
}
}
webpackConfig.externals = {
'jsdom': 'window',
'mockery': 'window',
}
module.exports = function(config) {
config.set({
autoWatch: true,
singleRun: false,
browsers: ['Chrome'],
frameworks: ['mocha'],
logLevel: config.LOG_INFO,
files: [ 'tests.webpack.js' ],
preprocessors: {
'tests.webpack.js': ['webpack', 'sourcemap']
},
webpack: webpackConfig,
webpackMiddleware: { noInfo: true },
coverageReporter: {
reporters: [
{ type: 'html', subdir: 'html' },
{ type: 'lcovonly', subdir: '.' },
],
},
reporters: ['progress', 'coverage'],
})
}
|
import torch
from tqdm import tqdm
import torch.nn as nn
import torch.optim as optim
import torchvision.transforms as transforms
from torch.utils.tensorboard import SummaryWriter # For TensorBoard
from utils import save_checkpoint, load_checkpoint, print_examples
from dataset import get_loader
from model import SeqToSeq
from tabulate import tabulate # To tabulate loss and epoch
import argparse
import json
def main(args):
transform = transforms.Compose(
[
transforms.Resize((356, 356)),
transforms.RandomCrop((299, 299)),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
]
)
train_loader, _ = get_loader(
root_folder = args.root_dir,
annotation_file = args.csv_file,
transform=transform,
batch_size = 64,
num_workers=2,
)
vocab = json.load(open('vocab.json'))
torch.backends.cudnn.benchmark = True
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
load_model = False
save_model = True
train_CNN = False
# Hyperparameters
embed_size = args.embed_size
hidden_size = args.hidden_size
vocab_size = len(vocab['stoi'])
num_layers = args.num_layers
learning_rate = args.lr
num_epochs = args.num_epochs
# for tensorboard
writer = SummaryWriter(args.log_dir)
step = 0
model_params = {'embed_size': embed_size, 'hidden_size': hidden_size, 'vocab_size':vocab_size, 'num_layers':num_layers}
# initialize model, loss etc
model = SeqToSeq(**model_params, device = device).to(device)
criterion = nn.CrossEntropyLoss(ignore_index = vocab['stoi']["<PAD>"])
optimizer = optim.Adam(model.parameters(), lr=learning_rate)
# Only finetune the CNN
for name, param in model.encoder.inception.named_parameters():
if "fc.weight" in name or "fc.bias" in name:
param.requires_grad = True
else:
param.requires_grad = train_CNN
#load from a save checkpoint
if load_model:
step = load_checkpoint(torch.load(args.save_path), model, optimizer)
model.train()
best_loss, best_epoch = 10, 0
for epoch in range(num_epochs):
print_examples(model, device, vocab['itos'])
for idx, (imgs, captions) in tqdm(
enumerate(train_loader), total=len(train_loader), leave=False):
imgs = imgs.to(device)
captions = captions.to(device)
outputs = model(imgs, captions[:-1])
loss = criterion(
outputs.reshape(-1, outputs.shape[2]), captions.reshape(-1)
)
writer.add_scalar("Training loss", loss.item(), global_step=step)
step += 1
optimizer.zero_grad()
loss.backward(loss)
optimizer.step()
train_loss = loss.item()
if train_loss < best_loss:
best_loss = train_loss
best_epoch = epoch + 1
if save_model:
checkpoint = {
"model_params": model_params,
"state_dict": model.state_dict(),
"optimizer": optimizer.state_dict(),
"step": step
}
save_checkpoint(checkpoint, args.save_path)
table = [["Loss:", train_loss],
["Step:", step],
["Epoch:", epoch + 1],
["Best Loss:", best_loss],
["Best Epoch:", best_epoch]]
print(tabulate(table))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--root_dir', type = str, default = './flickr30k/flickr30k_images', help = 'path to images folder')
parser.add_argument('--csv_file', type = str, default = './flickr30k/results.csv', help = 'path to captions csv file')
parser.add_argument('--log_dir', type = str, default = './drive/MyDrive/TensorBoard/', help = 'path to save tensorboard logs')
parser.add_argument('--save_path', type = str, default = './drive/MyDrive/checkpoints/Seq2Seq.pt', help = 'path to save checkpoint')
# Model Params
parser.add_argument('--batch_size', type = int, default = 64)
parser.add_argument('--num_epochs', type = int, default = 100)
parser.add_argument('--embed_size', type = int, default=256)
parser.add_argument('--hidden_size', type = int, default=512)
parser.add_argument('--lr', type = float, default= 0.001)
parser.add_argument('--num_layers', type = int, default = 3, help = 'number of lstm layers')
args = parser.parse_args()
main(args) |
require('dotenv').config()
const express = require('express')
const app = express()
var exif = require('fast-exif');
const fs = require('fs');
var glob = require('glob');
const path = require('path');
var pictureArray = [];
initPicturesArray();
app.get("/", function(request, response) {
var selectedIndex = randomIntFromInterval(0, pictureArray.length);
var selectedFilePath = pictureArray[selectedIndex];
console.log(selectedFilePath);
var outObj = {}
outObj.photoPathEncoded = new Buffer(selectedFilePath).toString('base64');
response.setHeader('Access-Control-Allow-Origin', '*');
response.setHeader('Content-Type', 'application/json');
try {
exif.read(selectedFilePath).then(function(info) {
outObj.exifInfo = info;
response.send(outObj);
})
} catch (e) {
response.send(outObj);
}
// try {
// var fileStream = fs.createReadStream(selectedFilePath);
// fileStream.on('open', function() {
// response.setHeader('Access-Control-Allow-Origin', '*');
// response.setHeader('Content-Type', 'image/jpeg');
// fileStream.pipe(response);
// });
// } catch (e) {
// response.setHeader('Content-Type', 'text/plain');
// response.write('Please Try Again.');
// }
});
app.get("/photo/:pathEncoded", function(request, response) {
var decodedFilePath = new Buffer(request.params.pathEncoded, 'base64').toString('ascii')
try {
var fileStream = fs.createReadStream(decodedFilePath)
fileStream.on('open', function() {
response.setHeader('Access-Control-Allow-Origin', '*');
response.setHeader('Content-Type', 'image/jpeg');
fileStream.pipe(response);
});
} catch (e) {
response.setHeader('Content-Type', 'text/plain');
response.write('Please Try Again.');
}
});
app.listen(8000, function() {
console.log('Listening on port 8000');
})
function initPicturesArray() {
glob(process.env.PICTURES_ROOT_DIR + "20*/**/*.+(jpg|jpeg)", {nocase: true}, function(err, files) {
if (err) {
console.log(err);
} else {
pictureArray = files;
console.log('pictureArray initialized with ' + pictureArray.length + ' pictures.');
}
});
}
function initPicturesDictionary() {
for (var year = process.env.PICTURES_START_YEAR; year < new Date().getFullYear(); year++) {
console.log('initializing ' + year);
// picturesDictionary[year] = walkSync(process.env.PICTURES_ROOT_DIR + year).filter(fileName => typeof fileName === 'string' && (fileName.toLowerCase().endsWith("jpeg") || fileName.toLowerCase().endsWith("jpg")));
//picturesDictionary[year] = glob.sync(process.env.PICTURES_ROOT_DIR + year + "/**/*.+(jpg|jpeg)", {nocase: true});
picturesDictionary[year] = glob.sync(process.env.PICTURES_ROOT_DIR + "200*/**/*.+(jpg|jpeg)", {nocase: true});
}
}
function randomIntFromInterval(min,max) {
min = parseInt(min);
max = parseInt(max);
return Math.floor(Math.random()*(max-min+1)+min);
} |
# ----------------------------------------------------------------------------
# Gimel Studio Copyright 2019-2021 by Noah Rahm and contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
import os
import wx
from gswidgetkit import (Button, EVT_BUTTON, NumberField, EVT_NUMBERFIELD,
Label, DropDown, EVT_DROPDOWN)
import gimelstudio.constants as const
class ExportOptionsDialog(wx.Dialog):
def __init__(self, parent, window):
wx.Dialog.__init__(self, window)
self.parent = parent
self.title = _("Image Export Options")
self.filetype = self.parent.Filetype
self.filepath = self.parent.Filepath
self.jpeg_quality = 90
self.png_compression = 6
self.pixel_datatype = "uint8"
self.comment_meta = ""
self.SetSize((400, 300))
self.SetTitle("{} {}".format(self.filetype.upper(), self.title))
self.SetBackgroundColour(wx.Colour(const.PROP_BG_COLOR))
self.Center()
# Different settings for different filetypes
if self.filetype.lower() == ".png":
self.InitPngUI()
elif self.filetype.lower() in [".jpg", ".jpeg"]:
self.InitJpegUI()
else:
self.BypassUI()
@property
def Image(self):
return self.parent.Image
def BypassUI(self):
""" Bypass the dialog. """
self.OnExport(None)
def InitPngUI(self):
pnl = wx.Panel(self)
pnl.SetBackgroundColour(const.PROP_BG_COLOR)
vbox = wx.BoxSizer(wx.VERTICAL)
inner_sizer = wx.BoxSizer(wx.VERTICAL)
# Compression level
self.png_compression_field = NumberField(pnl, default_value=self.png_compression,
label=_("Compression Level"),
min_value=0, max_value=9,
suffix="")
inner_sizer.Add(self.png_compression_field, flag=wx.EXPAND | wx.ALL, border=6)
# Spacing
inner_sizer.Add((0, 0), flag=wx.EXPAND | wx.ALL, border=6)
# Pixel datatype
px_datatype_lbl = Label(pnl, label=_("Export pixel datatype:"))
self.px_datatype_dropdown = DropDown(pnl, items=["uint8", "uint16", "float"],
default=self.pixel_datatype)
hbox1 = wx.BoxSizer(wx.HORIZONTAL)
hbox1.Add(px_datatype_lbl, flag=wx.EXPAND | wx.LEFT | wx.TOP | wx.RIGHT, border=6)
hbox1.Add(self.px_datatype_dropdown, flag=wx.EXPAND | wx.LEFT | wx.RIGHT, border=6)
inner_sizer.Add(hbox1)
# Spacing
inner_sizer.Add((0, 0), flag=wx.EXPAND | wx.ALL, border=6)
pnl.SetSizer(inner_sizer)
# Dialog buttons
buttons_sizer = wx.BoxSizer(wx.HORIZONTAL)
export_btn = Button(self, label=_("Export"))
cancel_btn = Button(self, label=_("Cancel"))
buttons_sizer.Add(export_btn)
buttons_sizer.Add(cancel_btn, flag=wx.LEFT, border=5)
vbox.Add(pnl, proportion=1, flag=wx.ALL | wx.EXPAND, border=5)
vbox.Add(buttons_sizer, flag=wx.ALIGN_RIGHT | wx.TOP | wx.BOTTOM | wx.RIGHT, border=10)
self.SetSizer(vbox)
export_btn.Bind(EVT_BUTTON, self.OnExport)
cancel_btn.Bind(EVT_BUTTON, self.OnCancel)
self.png_compression_field.Bind(EVT_NUMBERFIELD, self.OnPngCompressionChange)
self.px_datatype_dropdown.Bind(EVT_DROPDOWN, self.OnPixelDatatypeChange)
def InitJpegUI(self):
pnl = wx.Panel(self)
pnl.SetBackgroundColour(const.PROP_BG_COLOR)
vbox = wx.BoxSizer(wx.VERTICAL)
inner_sizer = wx.BoxSizer(wx.VERTICAL)
# Image Quality
self.img_quality_field = NumberField(pnl, default_value=self.jpeg_quality,
label=_("Image Quality"),
min_value=0, max_value=100,
suffix="%")
inner_sizer.Add(self.img_quality_field, flag=wx.EXPAND | wx.ALL, border=6)
# Spacing
inner_sizer.Add((0, 0), flag=wx.EXPAND | wx.ALL, border=6)
# Pixel datatype
px_datatype_lbl = Label(pnl, label=_("Export pixel datatype:"))
self.px_datatype_dropdown = DropDown(pnl, items=["uint8", "uint16", "float"],
default=self.pixel_datatype)
hbox1 = wx.BoxSizer(wx.HORIZONTAL)
hbox1.Add(px_datatype_lbl, flag=wx.EXPAND | wx.LEFT | wx.TOP | wx.RIGHT, border=6)
hbox1.Add(self.px_datatype_dropdown, flag=wx.EXPAND | wx.LEFT | wx.RIGHT, border=6)
inner_sizer.Add(hbox1)
# Spacing
inner_sizer.Add((0, 0), flag=wx.EXPAND | wx.ALL, border=6)
# Comment metadata
# comment_meta_lbl = Label(pnl, label=_("Comment metadata:"), font_bold=True)
# inner_sizer.Add(comment_meta_lbl, flag=wx.EXPAND | wx.TOP | wx.LEFT | wx.RIGHT, border=6)
# self.comment_meta_txtctrl = TextCtrl(pnl, default=self.comment_meta, size=(-1, 50))
# inner_sizer.Add(self.comment_meta_txtctrl, flag=wx.EXPAND | wx.ALL, border=6)
pnl.SetSizer(inner_sizer)
# Dialog buttons
buttons_sizer = wx.BoxSizer(wx.HORIZONTAL)
export_btn = Button(self, label=_("Export"))
cancel_btn = Button(self, label=_("Cancel"))
buttons_sizer.Add(export_btn)
buttons_sizer.Add(cancel_btn, flag=wx.LEFT, border=5)
vbox.Add(pnl, proportion=1, flag=wx.ALL | wx.EXPAND, border=5)
vbox.Add(buttons_sizer, flag=wx.ALIGN_RIGHT | wx.TOP | wx.BOTTOM | wx.RIGHT, border=10)
self.SetSizer(vbox)
export_btn.Bind(EVT_BUTTON, self.OnExport)
cancel_btn.Bind(EVT_BUTTON, self.OnCancel)
self.img_quality_field.Bind(EVT_NUMBERFIELD, self.OnJPEGQualityChange)
self.px_datatype_dropdown.Bind(EVT_DROPDOWN, self.OnPixelDatatypeChange)
#self.comment_meta_txtctrl.Bind(stc.EVT_STC_MODIFIED, self.OnCommentMetaChange)
def OnExport(self, event):
self.ExportImage()
def ExportImage(self):
# Export the image with the export options
img = self.Image
if self.filetype in [".jpg", ".jpeg"]:
img.specmod().attribute("quality", self.jpeg_quality)
img.specmod().attribute("ImageDescription", self.comment_meta)
elif self.filetype in [".png"]:
img.specmod().attribute("png:compressionLevel", self.png_compression)
img.specmod().attribute("Software", "Gimel Studio")
img.write(self.filepath, self.pixel_datatype)
if img.has_error:
print("Error writing image: ", img.geterror())
# Destroy the dialog
self.Destroy()
def OnCancel(self, event):
self.Destroy()
def OnJPEGQualityChange(self, event):
self.jpeg_quality = event.value
def OnPixelDatatypeChange(self, event):
self.pixel_datatype = event.value
def OnCommentMetaChange(self, event):
self.comment_meta = self.comment_meta_txtctrl.GetText()
def OnPngCompressionChange(self, event):
self.png_compression = event.value
class ExportImageHandler(object):
def __init__(self, parent, image):
self.parent = parent
self.image = image
self.filepath = ""
self.filetype = ""
@property
def Image(self):
return self.image
@property
def Filepath(self):
return self.filepath
@property
def Filetype(self):
return self.filetype
def RunExport(self):
self.SelectFilePathDialog()
if self.filepath != "":
self.ExportOptionsDialog()
def SelectFilePathDialog(self):
wildcard = const.SUPPORTED_FT_SAVE_WILDCARD
dlg = wx.FileDialog(self.parent, message=_("Export image as…"),
defaultDir=os.getcwd(), defaultFile="untitled.png",
wildcard=wildcard, style=wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT)
# This sets the default filter that the user will initially see.
# Otherwise, the first filter in the list will be used by default.
dlg.SetFilterIndex(11)
if dlg.ShowModal() == wx.ID_OK:
self.filepath = dlg.GetPath()
self.filetype = os.path.splitext(self.filepath)[1]
if self.filetype not in const.SUPPORTED_FT_SAVE_LIST:
dlg = wx.MessageDialog(None,
_("That file type isn't currently supported!"),
_("Cannot Save Image!"), style=wx.ICON_EXCLAMATION)
dlg.ShowModal()
dlg.Destroy()
def ExportOptionsDialog(self):
dlg = ExportOptionsDialog(self, self.parent)
dlg.ShowModal()
|
// flow-typed signature: 169f41bc5af42845e6303aad8d21be43
// flow-typed version: <<STUB>>/react-stripe-elements_v1.4.1/flow_v0.63.1
/**
* This is an autogenerated libdef stub for:
*
* 'react-stripe-elements'
*
* Fill this stub out by replacing all the `any` types.
*
* Once filled out, we encourage you to share your work with the
* community by sending a pull request to:
* https://github.com/flowtype/flow-typed
*/
declare module 'react-stripe-elements' {
declare module.exports: any;
}
/**
* We include stubs for each file inside this npm package in case you need to
* require those files directly. Feel free to delete any files that aren't
* needed.
*/
declare module 'react-stripe-elements/dist/react-stripe-elements' {
declare module.exports: any;
}
declare module 'react-stripe-elements/dist/react-stripe-elements.min' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/components/Element' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/components/Element.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/components/Elements' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/components/Elements.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/components/inject' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/components/inject.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/components/PaymentRequestButtonElement' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/components/PaymentRequestButtonElement.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/components/Provider' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/components/Provider.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/decls/Stripe' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/index' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/index.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/utils/shallowEqual' {
declare module.exports: any;
}
declare module 'react-stripe-elements/es/utils/shallowEqual.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/components/Element' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/components/Element.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/components/Elements' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/components/Elements.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/components/inject' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/components/inject.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/components/PaymentRequestButtonElement' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/components/PaymentRequestButtonElement.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/components/Provider' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/components/Provider.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/decls/Stripe' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/index' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/index.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/utils/shallowEqual' {
declare module.exports: any;
}
declare module 'react-stripe-elements/lib/utils/shallowEqual.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/components/Element' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/components/Element.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/components/Elements' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/components/Elements.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/components/inject' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/components/inject.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/components/PaymentRequestButtonElement' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/components/PaymentRequestButtonElement.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/components/Provider' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/components/Provider.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/decls/Stripe' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/index' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/index.test' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/utils/shallowEqual' {
declare module.exports: any;
}
declare module 'react-stripe-elements/src/utils/shallowEqual.test' {
declare module.exports: any;
}
// Filename aliases
declare module 'react-stripe-elements/dist/react-stripe-elements.js' {
declare module.exports: $Exports<'react-stripe-elements/dist/react-stripe-elements'>;
}
declare module 'react-stripe-elements/dist/react-stripe-elements.min.js' {
declare module.exports: $Exports<'react-stripe-elements/dist/react-stripe-elements.min'>;
}
declare module 'react-stripe-elements/es/components/Element.js' {
declare module.exports: $Exports<'react-stripe-elements/es/components/Element'>;
}
declare module 'react-stripe-elements/es/components/Element.test.js' {
declare module.exports: $Exports<'react-stripe-elements/es/components/Element.test'>;
}
declare module 'react-stripe-elements/es/components/Elements.js' {
declare module.exports: $Exports<'react-stripe-elements/es/components/Elements'>;
}
declare module 'react-stripe-elements/es/components/Elements.test.js' {
declare module.exports: $Exports<'react-stripe-elements/es/components/Elements.test'>;
}
declare module 'react-stripe-elements/es/components/inject.js' {
declare module.exports: $Exports<'react-stripe-elements/es/components/inject'>;
}
declare module 'react-stripe-elements/es/components/inject.test.js' {
declare module.exports: $Exports<'react-stripe-elements/es/components/inject.test'>;
}
declare module 'react-stripe-elements/es/components/PaymentRequestButtonElement.js' {
declare module.exports: $Exports<'react-stripe-elements/es/components/PaymentRequestButtonElement'>;
}
declare module 'react-stripe-elements/es/components/PaymentRequestButtonElement.test.js' {
declare module.exports: $Exports<'react-stripe-elements/es/components/PaymentRequestButtonElement.test'>;
}
declare module 'react-stripe-elements/es/components/Provider.js' {
declare module.exports: $Exports<'react-stripe-elements/es/components/Provider'>;
}
declare module 'react-stripe-elements/es/components/Provider.test.js' {
declare module.exports: $Exports<'react-stripe-elements/es/components/Provider.test'>;
}
declare module 'react-stripe-elements/es/decls/Stripe.js' {
declare module.exports: $Exports<'react-stripe-elements/es/decls/Stripe'>;
}
declare module 'react-stripe-elements/es/index.js' {
declare module.exports: $Exports<'react-stripe-elements/es/index'>;
}
declare module 'react-stripe-elements/es/index.test.js' {
declare module.exports: $Exports<'react-stripe-elements/es/index.test'>;
}
declare module 'react-stripe-elements/es/utils/shallowEqual.js' {
declare module.exports: $Exports<'react-stripe-elements/es/utils/shallowEqual'>;
}
declare module 'react-stripe-elements/es/utils/shallowEqual.test.js' {
declare module.exports: $Exports<'react-stripe-elements/es/utils/shallowEqual.test'>;
}
declare module 'react-stripe-elements/lib/components/Element.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/components/Element'>;
}
declare module 'react-stripe-elements/lib/components/Element.test.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/components/Element.test'>;
}
declare module 'react-stripe-elements/lib/components/Elements.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/components/Elements'>;
}
declare module 'react-stripe-elements/lib/components/Elements.test.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/components/Elements.test'>;
}
declare module 'react-stripe-elements/lib/components/inject.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/components/inject'>;
}
declare module 'react-stripe-elements/lib/components/inject.test.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/components/inject.test'>;
}
declare module 'react-stripe-elements/lib/components/PaymentRequestButtonElement.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/components/PaymentRequestButtonElement'>;
}
declare module 'react-stripe-elements/lib/components/PaymentRequestButtonElement.test.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/components/PaymentRequestButtonElement.test'>;
}
declare module 'react-stripe-elements/lib/components/Provider.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/components/Provider'>;
}
declare module 'react-stripe-elements/lib/components/Provider.test.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/components/Provider.test'>;
}
declare module 'react-stripe-elements/lib/decls/Stripe.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/decls/Stripe'>;
}
declare module 'react-stripe-elements/lib/index.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/index'>;
}
declare module 'react-stripe-elements/lib/index.test.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/index.test'>;
}
declare module 'react-stripe-elements/lib/utils/shallowEqual.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/utils/shallowEqual'>;
}
declare module 'react-stripe-elements/lib/utils/shallowEqual.test.js' {
declare module.exports: $Exports<'react-stripe-elements/lib/utils/shallowEqual.test'>;
}
declare module 'react-stripe-elements/src/components/Element.js' {
declare module.exports: $Exports<'react-stripe-elements/src/components/Element'>;
}
declare module 'react-stripe-elements/src/components/Element.test.js' {
declare module.exports: $Exports<'react-stripe-elements/src/components/Element.test'>;
}
declare module 'react-stripe-elements/src/components/Elements.js' {
declare module.exports: $Exports<'react-stripe-elements/src/components/Elements'>;
}
declare module 'react-stripe-elements/src/components/Elements.test.js' {
declare module.exports: $Exports<'react-stripe-elements/src/components/Elements.test'>;
}
declare module 'react-stripe-elements/src/components/inject.js' {
declare module.exports: $Exports<'react-stripe-elements/src/components/inject'>;
}
declare module 'react-stripe-elements/src/components/inject.test.js' {
declare module.exports: $Exports<'react-stripe-elements/src/components/inject.test'>;
}
declare module 'react-stripe-elements/src/components/PaymentRequestButtonElement.js' {
declare module.exports: $Exports<'react-stripe-elements/src/components/PaymentRequestButtonElement'>;
}
declare module 'react-stripe-elements/src/components/PaymentRequestButtonElement.test.js' {
declare module.exports: $Exports<'react-stripe-elements/src/components/PaymentRequestButtonElement.test'>;
}
declare module 'react-stripe-elements/src/components/Provider.js' {
declare module.exports: $Exports<'react-stripe-elements/src/components/Provider'>;
}
declare module 'react-stripe-elements/src/components/Provider.test.js' {
declare module.exports: $Exports<'react-stripe-elements/src/components/Provider.test'>;
}
declare module 'react-stripe-elements/src/decls/Stripe.js' {
declare module.exports: $Exports<'react-stripe-elements/src/decls/Stripe'>;
}
declare module 'react-stripe-elements/src/index.js' {
declare module.exports: $Exports<'react-stripe-elements/src/index'>;
}
declare module 'react-stripe-elements/src/index.test.js' {
declare module.exports: $Exports<'react-stripe-elements/src/index.test'>;
}
declare module 'react-stripe-elements/src/utils/shallowEqual.js' {
declare module.exports: $Exports<'react-stripe-elements/src/utils/shallowEqual'>;
}
declare module 'react-stripe-elements/src/utils/shallowEqual.test.js' {
declare module.exports: $Exports<'react-stripe-elements/src/utils/shallowEqual.test'>;
}
|
/*
Copyright (c) 2004-2016, The JS Foundation All Rights Reserved.
Available via Academic Free License >= 2.1 OR the modified BSD license.
see: http://dojotoolkit.org/license for details
*/
//>>built
define("dojo/cldr/nls/ca/generic",{"field-second-relative+0":"ara","field-weekday":"dia de la setmana","field-wed-relative+0":"aquest dimecres","dateFormatItem-GyMMMEd":"E, d MMM y G","dateFormatItem-MMMEd":"E, d MMM","field-wed-relative+1":"dimecres que ve","field-tue-relative+-1":"dimarts passat","dateFormat-long":"d MMMM 'de' y G","field-fri-relative+-1":"divendres passat","field-wed-relative+-1":"dimecres passat","dateFormatItem-yyyyQQQ":"QQQ y G","dateTimeFormat-medium":"{1}, {0}","dateFormat-full":"EEEE d MMMM 'de' y G",
"dateFormatItem-yyyyMEd":"E, d.M.y G","field-thu-relative+-1":"dijous passat","dateFormatItem-Md":"d/M","dateFormatItem-GyMMMM":"LLLL 'de' y G","field-era":"era","field-year":"any","dateFormatItem-yyyyMMMM":"LLLL 'de' y G","field-hour":"hora","field-sat-relative+0":"aquest dissabte","field-sat-relative+1":"dissabte que ve","field-day-relative+0":"avui","field-day-relative+1":"dem\u00e0","field-thu-relative+0":"aquest dijous","dateFormatItem-GyMMMd":"d MMM y G","field-day-relative+2":"dem\u00e0 passat",
"field-thu-relative+1":"dijous que ve","dateFormatItem-H":"H","dateFormatItem-Gy":"y G","dateFormatItem-yyyyMMMEd":"E, d MMM y G","dateFormatItem-M":"L","dateFormatItem-yyyyMMM":"LLL y G","dateFormatItem-yyyyMMMd":"d MMM y G","dateFormatItem-MMMMd":"d MMMM","field-sun-relative+0":"aquest diumenge","dateFormatItem-Hm":"H:mm","field-sun-relative+1":"diumenge que ve","field-minute":"minut","field-dayperiod":"a. m./p. m.","dateFormatItem-d":"d","dateFormatItem-ms":"mm:ss","field-day-relative+-1":"ahir",
"dateFormatItem-h":"h a","dateTimeFormat-long":"{1}, {0}","field-day-relative+-2":"abans-d\u2019ahir","dateFormatItem-MMMd":"d MMM","dateFormatItem-MEd":"E d/M","dateTimeFormat-full":"{1}, {0}","field-fri-relative+0":"aquest divendres","field-fri-relative+1":"divendres que ve","field-day":"dia","field-zone":"zona","dateFormatItem-y":"y G","field-year-relative+-1":"l\u2019any passat","field-month-relative+-1":"el mes passat","dateFormatItem-hm":"h:mm a","dateFormatItem-yyyyMd":"d/M/y G","field-month":"mes",
"dateFormatItem-MMM":"LLL","field-tue-relative+0":"aquest dimarts","field-tue-relative+1":"dimarts que ve","dateFormatItem-MMMMEd":"E, d MMMM","field-mon-relative+0":"aquest dilluns","field-mon-relative+1":"dilluns que ve","dateFormat-short":"dd/MM/yy GGGGG","field-second":"segon","field-sat-relative+-1":"dissabte passat","field-sun-relative+-1":"diumenge passat","field-month-relative+0":"aquest mes","field-month-relative+1":"el mes que ve","dateFormatItem-Ed":"E d","field-week":"setmana","dateFormat-medium":"dd/MM/y G",
"field-year-relative+0":"enguany","field-week-relative+-1":"la setmana passada","dateFormatItem-yyyyM":"M/y G","field-year-relative+1":"l\u2019any que ve","dateFormatItem-yyyyQQQQ":"QQQQ y G","dateTimeFormat-short":"{1}, {0}","dateFormatItem-Hms":"H:mm:ss","dateFormatItem-hms":"h:mm:ss a","dateFormatItem-GyMMM":"LLL y G","field-mon-relative+-1":"dilluns passat","dateFormatItem-yyyy":"y G","field-week-relative+0":"aquesta setmana","field-week-relative+1":"la setmana que ve"}); |
'''
█ █▄ █ █▄█ ▄▀▄ █▄ ▄█ ██▀ █▀▄ █▀▄ █▀
█ █ ▀█ █ █ █▀█ █ ▀ █ █▄▄ █▀ █▄▀ █▀
Dev : IlhamGUD
'''
import time
import fitz
import shutil
from pdf import PROCESS
from pyrogram import filters
from Configs.dm import Config
from plugins.checkPdf import checkPdf
from plugins.progress import progress
from pyrogram import Client as InHamePDF
from plugins.fileSize import get_size_format as gSF
from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup
#--------------->
#--------> LOCAL VARIABLES
#------------------->
pdfInfoMsg = """`Apa yang ingin saya lakukan dengan file ini?`
Nama FIle: `{}`
Ukuran File: `{}`
`Jumlah Halaman: {}`✌️
"""
PDF_THUMBNAIL = Config.PDF_THUMBNAIL
#--------------->
#--------> VARIABLES
#------------------->
"""
______VARIABLES______
M = text message
T = text file
H = html file
J = Json file
'K' for pg no known pdfs
"""
#--------------->
#--------> PDF TO TEXT
#------------------->
M = filters.create(lambda _, __, query: query.data in ["M", "KM"])
T = filters.create(lambda _, __, query: query.data in ["T", "KT"])
J = filters.create(lambda _, __, query: query.data in ["J", "KJ"])
H = filters.create(lambda _, __, query: query.data in ["H", "KH"])
toText = filters.create(lambda _, __, query: query.data == "toText")
KtoText = filters.create(lambda _, __, query: query.data.startswith("KtoText|"))
# pdf to images (with tidak diketahui pdf page number)
@InHamePDF.on_callback_query(toText)
async def _toText(bot, callbackQuery):
try:
await callbackQuery.edit_message_text(
"__Pdf » Text\nTotal halaman: Tidak diketahui \nNow, Specify the format:__",
reply_markup=InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
"Messages 📜",
callback_data="M"
),
InlineKeyboardButton(
"Txt file 🧾",
callback_data="T"
)
],
[
InlineKeyboardButton(
"Html 🌐",
callback_data="H"
),
InlineKeyboardButton(
"Json 🎀",
callback_data="J"
)
],
[
InlineKeyboardButton(
"« Back «",
callback_data="BTPM"
)
]
]
)
)
except Exception:
pass
# pdf to images (with known page Number)
@InHamePDF.on_callback_query(KtoText)
async def _KtoText(bot, callbackQuery):
try:
_, number_of_pages = callbackQuery.data.split("|")
await callbackQuery.edit_message_text(
f"__Pdf » Text\nTotal halaman: {number_of_pages} 🌟 \nNow, Specify the format:__",
reply_markup=InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
"Messages 📜",
callback_data="KM"
),
InlineKeyboardButton(
"Txt file 🧾",
callback_data="KT"
)
],
[
InlineKeyboardButton(
"Html 🌐",
callback_data="KH"
),
InlineKeyboardButton(
"Json 🎀",
callback_data="KJ"
)
],
[
InlineKeyboardButton(
"« Back «",
callback_data=f"KBTPM|{number_of_pages}"
)
]
]
)
)
except Exception:
pass
# to Text file (with tidak diketahui pdf page number)
@InHamePDF.on_callback_query(T)
async def _T(bot, callbackQuery):
try:
# CHECH USER PROCESS
if callbackQuery.message.chat.id in PROCESS:
await callbackQuery.answer(
"⏳ - Sedang dalam proses"
)
return
# ADD TO PROCESS
PROCESS.append(callbackQuery.message.chat.id)
data = callbackQuery.data
# DOWNLOAD MESSAGE
downloadMessage = await callbackQuery.message.reply_text(
"`📥 - Mendownload PDF`", quote=True
)
# DOWNLOAD PROGRESS
file_id = callbackQuery.message.reply_to_message.document.file_id
fileSize = callbackQuery.message.reply_to_message.document.file_size
c_time = time.time()
downloadLoc = await bot.download_media(
message = file_id,
file_name = f"{callbackQuery.message.message_id}/pdf.pdf",
progress = progress,
progress_args = (
fileSize,
downloadMessage,
c_time
)
)
if downloadLoc is None:
PROCESS.remove(callbackQuery.message.chat.id)
return
await downloadMessage.edit(
"`Downloading Completed..` 🥱"
)
if data == "T":
checked = await checkPdf(f'{callbackQuery.message.message_id}/pdf.pdf', callbackQuery)
if not(checked == "pass"):
await bot.delete_messages(
chat_id = callbackQuery.message.chat.id,
message_ids = downloadMessage.message.message_id
)
return
with fitz.open(f'{callbackQuery.message.message_id}/pdf.pdf') as doc:
number_of_pages = doc.pageCount
with open(f'{callbackQuery.message.message_id}/pdf.txt', "wb") as out: # open text output
for page in doc: # iterate the document pages
text = page.get_text().encode("utf8") # get plain text (is in UTF-8)
out.write(text) # write text of page()
out.write(bytes((12,))) # write page delimiter (form feed 0x0C)
await bot.send_chat_action(
callbackQuery.message.chat.id,
"upload_document"
)
await bot.send_document(
chat_id = callbackQuery.message.chat.id,
reply_to_message_id = callbackQuery.message.reply_to_message.message_id,
thumb = PDF_THUMBNAIL,
document = f"{callbackQuery.message.message_id}/pdf.txt",
caption = "__Txt file__"
)
await downloadMessage.delete()
PROCESS.remove(callbackQuery.message.chat.id)
shutil.rmtree(f"{callbackQuery.message.message_id}")
except Exception as e:
try:
print("Text/T: ", e)
PROCESS.remove(callbackQuery.message.chat.id)
shutil.rmtree(f"{callbackQuery.message.message_id}")
except Exception:
pass
# to Text message (with tidak diketahui pdf page number)
@InHamePDF.on_callback_query(M)
async def _M(bot, callbackQuery):
try:
if callbackQuery.message.chat.id in PROCESS:
await callbackQuery.answer(
"⏳ - Sedang dalam proses"
)
return
PROCESS.append(callbackQuery.message.chat.id)
data = callbackQuery.data
downloadMessage = await bot.send_message(
chat_id = callbackQuery.message.chat.id,
reply_to_message_id = callbackQuery.message.reply_to_message.message_id,
text = "`📥 - Mendownload PDF`"
)
file_id = callbackQuery.message.reply_to_message.document.file_id
fileSize = callbackQuery.message.reply_to_message.document.file_size
c_time = time.time()
downloadLoc = await bot.download_media(
message = file_id,
file_name = f"{callbackQuery.message.message_id}/pdf.pdf",
progress = progress,
progress_args = (
fileSize,
downloadMessage,
c_time
)
)
if downloadLoc is None:
PROCESS.remove(callbackQuery.message.chat.id)
return
await downloadMessage.edit(
"`Downloading Completed..` 🥱"
)
if data == "M":
checked = await checkPdf(f'{callbackQuery.message.message_id}/pdf.pdf', callbackQuery)
if not(checked == "pass"):
await bot.delete_messages(
chat_id = callbackQuery.message.chat.id,
message_ids = downloadMessage.message.message_id
)
return
with fitz.open(f'{callbackQuery.message.message_id}/pdf.pdf') as doc:
number_of_pages = doc.pageCount
for page in doc: # iterate the document pages
pdfText = page.get_text().encode("utf8") # get plain text (is in UTF-8)
if 1 <= len(pdfText) <= 1048:
await bot.send_chat_action(
callbackQuery.message.chat.id, "typing"
)
await bot.send_message(
callbackQuery.message.chat.id, pdfText
)
PROCESS.remove(callbackQuery.message.chat.id)
shutil.rmtree(f"{callbackQuery.message.message_id}")
except Exception as e:
try:
print("Text/M: ", e)
PROCESS.remove(callbackQuery.message.chat.id)
shutil.rmtree(f"{callbackQuery.message.message_id}")
except Exception:
pass
# to Html file (with tidak diketahui pdf page number)
@InHamePDF.on_callback_query(H)
async def _H(bot, callbackQuery):
try:
if callbackQuery.message.chat.id in PROCESS:
await callbackQuery.answer(
"⏳ - Sedang dalam proses"
)
return
PROCESS.append(callbackQuery.message.chat.id)
data = callbackQuery.data
downloadMessage = await bot.send_message(
chat_id = callbackQuery.message.chat.id,
reply_to_message_id = callbackQuery.message.reply_to_message.message_id,
text = "`📥 - Mendownload PDF`"
)
file_id = callbackQuery.message.reply_to_message.document.file_id
fileSize = callbackQuery.message.reply_to_message.document.file_size
c_time = time.time()
downloadLoc = await bot.download_media(
message = file_id,
file_name = f"{callbackQuery.message.message_id}/pdf.pdf",
progress = progress,
progress_args = (
fileSize,
downloadMessage,
c_time
)
)
if downloadLoc is None:
PROCESS.remove(callbackQuery.message.chat.id)
return
await downloadMessage.edit(
"`Downloading Completed..` 🥱"
)
if data == "H":
checked = await checkPdf(f'{callbackQuery.message.message_id}/pdf.pdf', callbackQuery)
if not(checked == "pass"):
await bot.delete_messages(
chat_id = callbackQuery.message.chat.id,
message_ids = downloadMessage.message.message_id
)
return
with fitz.open(f'{callbackQuery.message.message_id}/pdf.pdf') as doc:
number_of_pages = doc.pageCount
with open(f'{callbackQuery.message.message_id}/pdf.html', "wb") as out: # open text output
for page in doc: # iterate the document pages
text = page.get_text("html").encode("utf8") # get plain text (is in UTF-8)
out.write(text) # write text of page()
out.write(bytes((12,))) # write page delimiter (form feed 0x0C)
await bot.send_chat_action(
callbackQuery.message.chat.id,
"upload_document"
)
await bot.send_document(
chat_id = callbackQuery.message.chat.id,
reply_to_message_id = callbackQuery.message.reply_to_message.message_id,
thumb = PDF_THUMBNAIL,
document = f"{callbackQuery.message.message_id}/pdf.html",
caption = "__Html file : helps to view pdf on any browser..__ 😉"
)
await downloadMessage.delete()
PROCESS.remove(callbackQuery.message.chat.id)
shutil.rmtree(f"{callbackQuery.message.message_id}")
except Exception:
try:
print("Text/H: ", e)
PROCESS.remove(callbackQuery.message.chat.id)
shutil.rmtree(f"{callbackQuery.message.message_id}")
except Exception:
pass
# to Text file (with tidak diketahui pdf page number)
@InHamePDF.on_callback_query(J)
async def _J(bot, callbackQuery):
try:
if callbackQuery.message.chat.id in PROCESS:
await callbackQuery.answer(
"⏳ - Sedang dalam proses"
)
return
PROCESS.append(callbackQuery.message.chat.id)
data = callbackQuery.data
downloadMessage = await bot.send_message(
chat_id = callbackQuery.message.chat.id,
reply_to_message_id = callbackQuery.message.reply_to_message.message_id,
text = "`📥 - Mendownload PDF`"
)
file_id = callbackQuery.message.reply_to_message.document.file_id
fileSize = callbackQuery.message.reply_to_message.document.file_size
c_time = time.time()
downloadLoc = await bot.download_media(
message = file_id,
file_name = f"{callbackQuery.message.message_id}/pdf.pdf",
progress = progress,
progress_args = (
fileSize,
downloadMessage,
c_time
)
)
if downloadLoc is None:
PROCESS.remove(callbackQuery.message.chat.id)
return
await downloadMessage.edit(
"`Downloading Completed..` 🥱"
)
if data == "J":
checked = await checkPdf(f'{callbackQuery.message.message_id}/pdf.pdf', callbackQuery)
if not(checked == "pass"):
await bot.delete_messages(
chat_id = callbackQuery.message.chat.id,
message_ids = downloadMessage.message.message_id
)
return
with fitz.open(f'{callbackQuery.message.message_id}/pdf.pdf') as doc:
number_of_pages = doc.pageCount
with open(f'{callbackQuery.message.message_id}/pdf.json', "wb") as out: # open text output
for page in doc: # iterate the document pages
text = page.get_text("json").encode("utf8") # get plain text (is in UTF-8)
out.write(text) # write text of page()
out.write(bytes((12,))) # write page delimiter (form feed 0x0C)
await bot.send_chat_action(
callbackQuery.message.chat.id,
"upload_document"
)
await bot.send_document(
chat_id = callbackQuery.message.chat.id,
reply_to_message_id = callbackQuery.message.reply_to_message.message_id,
thumb = PDF_THUMBNAIL,
document = f"{callbackQuery.message.message_id}/pdf.json",
caption = "__Json File__"
)
await downloadMessage.delete()
PROCESS.remove(callbackQuery.message.chat.id)
shutil.rmtree(f"{callbackQuery.message.message_id}")
except Exception:
try:
print("Text/J: ", e)
PROCESS.remove(callbackQuery.message.chat.id)
shutil.rmtree(f"{callbackQuery.message.message_id}")
except Exception:
pass
# Copyright InHame Dev
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.