text
stringlengths 3
1.05M
|
---|
#!/usr/bin/env python
# MIT License
# (c) baltasar 2017
import webapp2
from google.appengine.api import users
from google.appengine.ext import ndb
import model.character
from model.character import Character
class AddCharacter(webapp2.RequestHandler):
def get(self):
try:
story_id = self.request.GET['story_id']
except:
self.redirect("/error?msg=missing key")
return
user = users.get_current_user()
if user:
try:
story = ndb.Key(urlsafe=story_id).get()
except:
self.redirect("/error?msg=key was not found")
return
num_characters = len(Character.query().fetch(keys_only=True)) + 1
character = Character()
character.story = story.key.id()
character.name = "John Doe " + str(num_characters)
character.summary = "An awesome character."
key = model.character.update(character)
self.redirect("/characters/modify?character_id=" + key.urlsafe())
else:
self.redirect("/")
return
app = webapp2.WSGIApplication([
("/characters/add", AddCharacter),
], debug=True)
|
/**
@module "ui/template-object-cell.reel"
@requires montage
@requires montage/ui/component
*/
var Component = require("montage/ui/component").Component,
Promise = require("montage/core/promise").Promise,
MimeTypes = require("core/mime-types"),
MenuModule = require("core/menu");
/**
Description TODO
@class module:"ui/template-object-cell.reel".TemplateObjectCell
@extends module:montage/ui/component.Component
*/
exports.TemplateObjectCell = Component.specialize({
constructor: {
value: function TemplateObjectCell () {
this.super();
}
},
templateExplorer: {
value: null
},
_contextualMenu: {
value: null
},
contextualMenu: {
get: function () {
if (this._contextualMenu) {
return this._contextualMenu;
}
var deleteItem,
menu = new MenuModule.Menu();
deleteItem = MenuModule.makeMenuItem("Delete", "delete", "", false, "");
menu.insertItem(deleteItem);
this._contextualMenu = menu;
return this._contextualMenu;
}
},
handleContextualMenuValidate: {
value: function (evt) {
var menuItem = evt.detail,
identifier = menuItem.identifier;
switch (identifier) {
case "delete":
evt.stop();
menuItem.enabled = true;
break;
}
}
},
handleContextualMenuAction: {
value: function (evt) {
var menuItem = evt.detail,
identifier = menuItem.identifier;
switch (identifier) {
case "delete":
this.deleteTemplateObject();
break;
}
}
},
enterDocument: {
value: function (firstTime) {
if (!firstTime) {
return;
}
// Allow dropping events anywhere on the card
this.element.addEventListener("dragover", this, false);
this.element.addEventListener("dragleave", this, false);
this.element.addEventListener("drop", this, false);
// hover component in the stage
this.element.addEventListener("mouseover", this, false);
this.element.addEventListener("mouseout", this, false);
// selection
this.element.addEventListener("click", this, false);
// save toggle state
this.toggle.addEventListener("action", this, false);
// contextualMenu
this.addEventListener("contextualMenuValidate", this, false);
this.addEventListener("contextualMenuAction", this, false);
}
},
_willAcceptDrop: {
value: false
},
_templateObject: {
value: null
},
templateObject: {
get: function() {
return this._templateObject;
},
set: function(value) {
if (this._templateObject === value) {
return;
}
this._templateObject = value;
if (value) {
var self = this;
this.canDrawGate.setField("needsObjectDescription", false);
this._describeTemplateObject()
.spread(function (templateObject, description) {
// Only accept values if the templateObject hasn't changed
// since we went off to describe it
if (templateObject === self._templateObject) {
var keys = Object.keys(description);
keys.forEach(function (key) {
self[key] = description[key];
});
self.canDrawGate.setField("needsObjectDescription", true);
}
})
.done();
}
}
},
_describeTemplateObject: {
value: function () {
var templateObject = this.templateObject,
description = {};
// Determine if this object is provided by the project's own package
// TODO not restrict this to components within the ui directory
description.isInProjectPackage = /^ui\//.test(templateObject.moduleId);
description.isTemplateObjectComponent = /\.reel$/.test(templateObject.moduleId);
return Promise.resolve([templateObject, description]);
}
},
isInProjectPackage: {
value: null
},
isTemplateObjectComponent: {
value: null
},
handleDragover: {
value: function (event) {
var availableTypes = event.dataTransfer.types;
if (!availableTypes) {
event.dataTransfer.dropEffect = "none";
this._willAcceptDrop = false;
} else if (availableTypes.has(MimeTypes.MONTAGE_EVENT_TARGET)) {
// allows us to drop
event.preventDefault();
event.stopPropagation();
event.dataTransfer.dropEffect = "copy";
this._willAcceptDrop = true;
}
}
},
handleDragleave: {
value: function () {
this._willAcceptDrop = false;
}
},
handleDrop: {
value: function (event) {
var availableTypes = event.dataTransfer.types,
listenerModel;
// Always accept Events
if (availableTypes.has(MimeTypes.MONTAGE_EVENT_TARGET)) {
event.stopPropagation();
var eventTargetData = JSON.parse(event.dataTransfer.getData(MimeTypes.MONTAGE_EVENT_TARGET));
listenerModel = Object.create(null);
listenerModel.targetObject = this.templateObject.editingDocument.editingProxyMap[eventTargetData.targetLabel];
listenerModel.type = eventTargetData.eventType;
listenerModel.listener = this.templateObject;
this.dispatchEventNamed("addListenerForObject", true, false, {
listenerModel: listenerModel
});
}
this._willAcceptDrop = false;
}
},
handleHeaderAction: {
value: function () {
if (this.isInProjectPackage) {
this.dispatchEventNamed("openModuleId", true ,true, {
moduleId: this.templateObject.moduleId
});
}
}
},
handleMouseover: {
value: function () {
var proxy = this.templateObject,
editingDocument,
nodeProxy;
if (proxy) {
editingDocument = proxy._editingDocument;
nodeProxy = editingDocument.nodeProxyForComponent(proxy);
this.dispatchEventNamed("highlightComponent", true, true, {
component: proxy,
element: nodeProxy,
highlight: true
});
}
}
},
handleMouseout: {
value: function () {
var proxy = this.templateObject;
if (proxy) {
this.dispatchEventNamed("highlightComponent", true, true, {
component: proxy,
highlight: false
});
}
}
},
handleObjectLabelAction: {
value: function (event) {
var proxy = this.templateObject,
editingDocument = proxy._editingDocument;
event.stopPropagation();
if (!editingDocument.setOwnedObjectLabel(proxy, event.target.value)) {
event.preventDefault();
}
}
},
handleHiddenToggleButtonAction: {
value: function (evt) {
var proxy = this.templateObject,
editingDocument = proxy._editingDocument,
hidden = !this.templateObjects.hiddenToggleButton.pressed;
editingDocument.setOwnedObjectEditorMetadata(proxy, "isHidden", hidden);
}
},
handleToggle: {
value: function (evt) {
var reelProxy = this.templateObject,
editingDocument = reelProxy._editingDocument,
expanded = this.expanded.checked;
editingDocument.templateObjectsTreeToggleStates.set(reelProxy, expanded);
}
},
canSelect: {
value: function (evt) {
// ignore toggle click, hide checkbox
return !(
this.toggle.element === evt.target ||
(
evt.target.component &&
(evt.target.component.identifier === "hiddenToggleButton")
)
);
}
},
deleteTemplateObject: {
value: function () {
var reelProxy = this.templateObject,
editingDocument = reelProxy._editingDocument;
editingDocument.removeObject(reelProxy);
}
},
handleClick: {
value: function (evt) {
var reelProxy = this.templateObject,
editingDocument = reelProxy._editingDocument;
if (!this.canSelect(evt)) {
return;
}
// FIXME: Add support for multiple selection
editingDocument.clearSelectedObjects();
editingDocument.selectObject(reelProxy);
}
}
});
|
# MIT License
#
# Copyright (c) 2017 Anders S. Christensen and Felix A. Faber
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .fchl import *
|
from distutils.core import setup, Extension
setup(name='factors',
ext_modules=[Extension('factors', ['factors.cpp', 'factorization.cpp'])]
)
|
var convert = require('./convert');
module.exports = convert(require('../date'));
|
/**
* Author: CodeLai
* Email: [email protected]
* DateTime: 2016/7/18 15:49
*/
import React from 'react';
import FilterLink from '../containers/FilterLink';
import { todoFilters } from '../actions/index';
const Footer = () => (
<p ref={() => console.log('Footer')}>
Show:
{" "}
<FilterLink filter={todoFilters.SHOW_ALL}>
All
</FilterLink>
{", "}
<FilterLink filter={todoFilters.SHOW_ACTIVE}>
Active
</FilterLink>
{", "}
<FilterLink filter={todoFilters.SHOW_COMPLETED}>
Completed
</FilterLink>
</p>
);
export default Footer;
|
/**
* Copyright 2020 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Values} from './values';
import {devAssert} from '../log';
import {getMode} from '../mode';
import {pushIfNotExist, removeItem} from '../utils/array';
import {startsWith} from '../string';
import {throttleTail} from './scheduler';
// Properties set on the DOM nodes to track the context state.
const NODE_PROP = '__AMP_NODE';
const ASSIGNED_SLOT_PROP = '__AMP_ASSIGNED_SLOT';
const AMP_PREFIX = 'AMP-';
// Relevant node types.
// See https://developer.mozilla.org/en-US/docs/Web/API/Node/nodeType.
const ELEMENT_NODE = 1;
const DOCUMENT_NODE = 9;
// Includes shadow root, template, etc.
const FRAGMENT_NODE = 11;
/**
* The context node is a sparse tree over the DOM tree. Any node that needs
* to manage and compute state can be attached to the context node tree. The
* tree (mostly) automatically self-manages: the new nodes and DOM mutations
* are auto-discovered or prompted.
*
* @package
*/
export class ContextNode {
/**
* Returns the existing context node or creates a new one.
*
* @param {!Node} node
* @return {!ContextNode}
*/
static get(node) {
let contextNode = /** @type {!ContextNode|undefined} */ (node[NODE_PROP]);
if (!contextNode) {
contextNode = new ContextNode(node);
if (getMode().localDev || getMode().test) {
// The `Object.defineProperty({enumerable: false})` helps tests, but
// hurts performance. So this is only done in a dev/test modes.
Object.defineProperty(node, NODE_PROP, {
value: contextNode,
writable: false,
enumerable: false,
configurable: false,
});
} else {
node[NODE_PROP] = contextNode;
}
}
return contextNode;
}
/**
* Returns the closest available context node to the one specified. If the
* `node` has the context node, it's returned unless `includeSelf` is set as
* `false`.
*
* The DOM traversal goes at most as far as the root node (document,
* shadow root, document fragment) or as far as the DOM tree allows. The
* traversal follows the assigned shadow slots.
*
* Root nodes (document or shadow root) and AMP elements are auto-created
* during the traversal.
*
* @param {!Node} node The node from which to perform the search.
* @param {boolean=} includeSelf Whether the specified node itself should
* be included in the search. Defaults to `true`.
* @return {?ContextNode}
*/
static closest(node, includeSelf = true) {
let n = node;
while (n) {
// Check if a node is a candidate to be returned.
if (n != node || includeSelf) {
if (n[NODE_PROP]) {
// Already a discovered node.
return /** @type {!ContextNode} */ (n[NODE_PROP]);
}
const {nodeType} = n;
if (nodeType == DOCUMENT_NODE || nodeType == FRAGMENT_NODE) {
// A context node is always created for a root. Due to this, a
// non-root element is always at least attached to a root. This
// allows for quick discovery and reattachment when new information
// becomes available.
return ContextNode.get(n);
}
if (nodeType == ELEMENT_NODE && startsWith(n.tagName, AMP_PREFIX)) {
// An AMP node will always have a context node backing it at some
// point.
return ContextNode.get(n);
}
}
// Navigate up the DOM tree. Notice that we do not automatically go over
// a root node boundary.
const assignedSlot =
/** @type {?Node|undefined} */ (n[ASSIGNED_SLOT_PROP]) ||
n.assignedSlot;
if (assignedSlot) {
n = assignedSlot;
} else {
n = n.parentNode;
}
}
// Only disconnected nodes will return `null` here.
return null;
}
/**
* Direct slot assignment. Works the same way as shadow slots, but does not
* require a shadow root. Automatically starts the discovery phase for the
* affected nodes.
*
* See `Element.assignedSlot` API.
*
* @param {!Node} node The target node.
* @param {!Node} slot The slot to which the target node is assigned.
*/
static assignSlot(node, slot) {
if (node[ASSIGNED_SLOT_PROP] == slot) {
return;
}
node[ASSIGNED_SLOT_PROP] = slot;
forEachContained(node, (cn) => cn.discover());
}
/**
* Unassigns the direct slot previously done by the `assignSlot` call.
* Automatically starts the discovery phase for the affected nodes.
*
* @param {!Node} node The target node.
* @param {!Node} slot The slot from which the target node is assigned.
*/
static unassignSlot(node, slot) {
if (node[ASSIGNED_SLOT_PROP] != slot) {
return;
}
node[ASSIGNED_SLOT_PROP] = undefined;
forEachContained(node, (cn) => cn.discover());
}
/**
* Creates the context node and automatically starts the discovery process.
*
* @param {!Node} node
*/
constructor(node) {
/** @const {!Node} */
this.node = node;
/**
* Whether this node is a root. The Document DOM nodes are automatically
* considered as roots. But other nodes can become roots as well
* (e.g. shadow roots) via `setIsRoot()` API.
*
* @package {boolean}
*/
this.isRoot = node.nodeType == DOCUMENT_NODE;
/**
* The root context node. Always available for a DOM node connected to a
* root node after the discovery phase.
*
* @package {?ContextNode}
*/
this.root = this.isRoot ? this : null;
/**
* Parent should be mostly meaningless to most API clients, because
* it's an async concept: a parent context node can can be instantiated at
* any time and it doesn't mean that this node has to change. This is
* why the API is declared as package-private. However, it needs to be
* unobfuscated to avoid cross-binary issues.
*
* @package {?ContextNode}
*/
this.parent = null;
/**
* See `parent` description.
*
* @package {?Array<!ContextNode>}
*/
this.children = null;
/** @package {!Values} */
this.values = new Values(this);
/** @private {?Map<*, !./component.Component>} */
this.components_ = null;
/** @private {boolean} */
this.parentOverridden_ = false;
/** @const @private {function()} */
this.scheduleDiscover_ = throttleTail(
this.discover_.bind(this),
setTimeout
);
this.discover();
}
/**
* Requests the discovery phase. Asynchronously finds the nearest parent for
* this node and its root. Roots and parents set directly via `setParent()`
* API are not discoverable.
*/
discover() {
if (this.isDiscoverable()) {
this.scheduleDiscover_();
}
}
/**
* @return {boolean}
* @protected Used cross-binary.
*/
isDiscoverable() {
return !this.isRoot && !this.parentOverridden_;
}
/**
* Sets (or unsets) the direct parent. If the parent is set, the node will no
* longer try to discover itself.
*
* @param {!ContextNode|!Node|null} parent
*/
setParent(parent) {
const parentContext =
parent && parent.nodeType
? ContextNode.get(/** @type {!Node} */ (parent))
: /** @type {?ContextNode} */ (parent);
this.updateTree_(parentContext, /* parentOverridden */ parent != null);
}
/**
* Designates (or undesignates) the node as a root node. If the node is
* designated as a root, it will no longer discover itself.
*
* @param {boolean} isRoot
*/
setIsRoot(isRoot) {
this.isRoot = isRoot;
const newRoot = isRoot ? this : this.parent ? this.parent.root : null;
this.updateRoot(newRoot);
}
/**
* @param {?ContextNode} root
* @protected Used cross-binary.
*/
updateRoot(root) {
devAssert(!root || root.isRoot);
const oldRoot = this.root;
if (root != oldRoot) {
// The root has changed.
this.root = root;
// Make sure the tree changes have been reflected for values.
this.values.rootUpdated();
// Make sure the tree changes have been reflected for components.
const components = this.components_;
if (components) {
components.forEach((comp) => {
comp.rootUpdated();
});
}
// Propagate the root to the subtree.
if (this.children) {
this.children.forEach((child) => child.updateRoot(root));
}
}
}
/**
* Add or update a component with a specified ID. If component doesn't
* yet exist, it will be created using the specified factory. The use
* of factory is important to reduce bundling costs for context node.
*
* @param {*} id
* @param {./component.ComponentFactoryDef} factory
* @param {!Function} func
* @param {!Array<!ContextProp>} deps
* @param {*} input
*/
setComponent(id, factory, func, deps, input) {
const components = this.components_ || (this.components_ = new Map());
let comp = components.get(id);
if (!comp) {
comp = factory(id, this, func, deps);
components.set(id, comp);
}
comp.set(input);
}
/**
* Removes the component previously set with `setComponent`.
*
* @param {*} id
*/
removeComponent(id) {
const components = this.components_;
const comp = components && components.get(id);
if (comp) {
comp.dispose();
components.delete(id);
}
}
/**
* Discovers the parent and the root. Runs asynchronously via scheduler.
* @private
*/
discover_() {
if (!this.isDiscoverable()) {
// The discoverability might have changed while this task was in the
// queue.
return;
}
const parent = ContextNode.closest(this.node, /* includeSelf */ false);
this.updateTree_(parent, /* parentOverridden */ false);
}
/**
* @param {?ContextNode} parent
* @param {boolean} parentOverridden
* @private
*/
updateTree_(parent, parentOverridden) {
this.parentOverridden_ = parentOverridden;
const oldParent = this.parent;
if (parent != oldParent) {
// The parent has changed.
this.parent = parent;
// Remove from the old parent.
if (oldParent && oldParent.children) {
removeItem(oldParent.children, this);
}
// Add to the new parent.
if (parent) {
const parentChildren = parent.children || (parent.children = []);
pushIfNotExist(parentChildren, this);
// Check if this node has been inserted in between the parent and
// it's other children.
// Since the new parent (`this`) is already known, this is a very
// fast operation.
for (let i = 0; i < parentChildren.length; i++) {
const child = parentChildren[i];
if (
child != this &&
child.isDiscoverable() &&
this.node.contains(child.node)
) {
child.discover();
}
}
}
this.values.parentUpdated();
}
// Check the root.
this.updateRoot(parent ? parent.root : null);
}
}
/**
* Iterates over all context nodes that are contained within the specified
* `node`. Only iterates over known context nodes.
*
* @param {!Node} node
* @param {function(!ContextNode)} callback
* @param {boolean=} includeSelf
*/
function forEachContained(node, callback, includeSelf = true) {
const closest = ContextNode.closest(node, includeSelf);
if (!closest) {
return;
}
if (closest.node == node) {
callback(closest);
} else if (closest.children) {
closest.children.forEach((child) => {
if (node.contains(child.node)) {
callback(child);
}
});
}
}
|
//from gulp-concat README.md
/*
This will concat files by your operating systems newLine. It will take the base directory from the first file that passes through it.
*/
var gulp = require("gulp");
var concat = require('gulp-concat');
gulp.task('scripts', function() {
return gulp.src('./lib/*.js')
.pipe(concat('all.js'))
.pipe(gulp.dest('./dist/'));
});
/*
Files will be concatenated in the order that they are specified in the gulp.src function. For example, to concat ./lib/file3.js, ./lib/file1.js and ./lib/file2.js in that order, the following code will create a task to do that:
*/
gulp.task('scripts2', function() {
return gulp.src(['./lib/file3.js', './lib/file1.js', './lib/file2.js'])
.pipe(concat('all.js'))
.pipe(gulp.dest('./dist/'));
});
/*
To specify cwd, path and other vinyl properties, gulp-concat accepts Object as first argument:
*/
gulp.task('scripts3', function() {
return gulp.src(['./lib/file3.js', './lib/file1.js', './lib/file2.js'])
.pipe(concat({ path: 'new.js', stat: { mode: 0666 }}))
.pipe(gulp.dest('./dist'));
});
|
/*
JTSK-320112
Problem 6.4
Digdarshan Kunwar
[email protected]
*/
#include<stdio.h>
#include<stdlib.h>
#include<string.h>
#define MAXCHAR 50
struct account{
char username[MAXCHAR];
int size;
struct account *next;
};
void dispose_list( struct account *my_list);
int main(){
FILE *fp;
int count=0;
char filename[50];
char username[50];
char password[50];
char line[MAXCHAR];
struct account *all=NULL;
scanf("%[^\n]",filename);
getchar();
fp=fopen(filename,"r");
//Error opening the file
if (!fp){
printf("Error Opening the file.\n");
exit(0);
}
//Put the username in the linked list
while(fgets(line,MAXCHAR,fp)!=NULL){
if (count % 2==0){
struct account *newel,*current;
newel=(struct account*)malloc(sizeof(struct account));
current=(struct account*)malloc(sizeof(struct account));
current=all;
line[strlen(line)-1]='\0';
strcpy(newel->username,line);
newel->size=ftell(fp);
newel->next=NULL;
if (all==NULL){
all=newel;
}else{
while(current->next!=NULL){
current=current->next;
}
current->next=newel;
}
}
count++;
}
while(strcmp(username,"exit")){
//Asking the user the username and password
int status=0;
scanf("%[^\n]",username);
getchar();
if (strcmp(username,"exit")!=0){
scanf("%[^\n]",password);
getchar();
struct account *current;
current=(struct account*)malloc(sizeof(struct account));
current=all;
//Going throught the linked list for username search
while(current!=NULL){
if (strcmp(username,current->username)==0){
status=1;
char pass[50];
fseek(fp,current->size,SEEK_SET);
fscanf(fp,"%s",pass);
if (strcmp(pass,password)==0){
printf("Access to user %s is granted.\n",username);
}else
{
printf("Access to user %s is denied.\n",username);
}
}
current=current->next;
}
//If there is no username
if(!status){
printf("No Username Found\n");
}
}
}
printf("Exiting ...\n");
//Free memory form the linked list
dispose_list(all);
//Closing file
fclose(fp);
return 0;
}
//Free the memory
void dispose_list( struct account *my_list){
struct account * nextelem ;
while ( my_list != NULL ) {
nextelem = my_list -> next ;
free ( my_list ) ;
my_list = nextelem ;
}
} |
import React from 'react';
import { Divider, Drawer } from '@material-ui/core';
import { HelpOutline as HelpOutlineIcon } from '@material-ui/icons';
import theme from '../../themes/mender-theme';
import DeviceConfiguration from './eventdetails/deviceconfiguration';
import FileTransfer from './eventdetails/filetransfer';
import PortForward from './eventdetails/portforward';
import TerminalSession from './eventdetails/terminalsession';
import UserChange from './eventdetails/userchange';
const FallbackComponent = ({ item }) => {
let content = '';
try {
content = JSON.stringify(item, null, 2);
} catch (error) {
content = `error parsing the logged event:\n${error}`;
}
return (
<div className="code" style={{ whiteSpace: 'pre' }}>
{content}
</div>
);
};
const changeTypes = {
user: 'user',
device: 'device'
};
const configChangeDescriptor = {
set_configuration: 'definition',
deploy_configuration: 'deployment'
};
const mapChangeToContent = item => {
const { type } = item.object || {};
let content = { title: 'Entry details', content: FallbackComponent };
if (type === changeTypes.user) {
content = { title: `${item.action}d user`, content: UserChange };
} else if (type === changeTypes.device && item.action.includes('terminal')) {
content = { title: 'Remote session log', content: TerminalSession };
} else if (type === changeTypes.device && item.action.includes('file')) {
content = { title: 'File transfer', content: FileTransfer };
} else if (type === changeTypes.device && item.action.includes('portforward')) {
content = { title: 'Port forward', content: PortForward };
} else if (type === changeTypes.device && item.action.includes('configuration')) {
content = { title: `Device configuration ${configChangeDescriptor[item.action] || ''}`, content: DeviceConfiguration };
} else if (type === changeTypes.device) {
content = { title: 'Device change', content: FallbackComponent };
}
return content;
};
export const EventDetailsDrawer = ({ eventItem = {}, onClose, open }) => {
const { title, content: Component } = mapChangeToContent(eventItem);
return (
<Drawer className={`${eventItem ? 'fadeIn' : 'fadeOut'}`} anchor="right" open={open} onClose={onClose}>
<div className="flexbox space-between margin-top-small margin-bottom">
<b className="capitalized">{title}</b>
<HelpOutlineIcon />
</div>
<Divider />
<Component item={eventItem} onClose={onClose} />
<Divider light style={{ marginTop: theme.spacing(2) }} />
</Drawer>
);
};
export default EventDetailsDrawer;
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "iree/task/scope.h"
void iree_task_scope_initialize(iree_string_view_t name,
iree_task_scope_t* out_scope) {
IREE_TRACE_ZONE_BEGIN(z0);
memset(out_scope, 0, sizeof(*out_scope));
iree_host_size_t name_length =
iree_min(name.size, IREE_ARRAYSIZE(out_scope->name) - 1);
memcpy(out_scope->name, name.data, name_length);
out_scope->name[name_length] = 0;
// TODO(benvanik): pick trace colors based on name hash.
IREE_TRACE(out_scope->task_trace_color = 0xFFFF0000u);
iree_slim_mutex_initialize(&out_scope->mutex);
iree_notification_initialize(&out_scope->idle_notification);
IREE_TRACE_ZONE_END(z0);
}
void iree_task_scope_deinitialize(iree_task_scope_t* scope) {
IREE_TRACE_ZONE_BEGIN(z0);
IREE_ASSERT(
iree_task_scope_is_idle(scope),
"pending submissions must be aborted prior to deinitializing their "
"scope");
// Makes it easier to see if we were incorrectly using the name even after the
// scope is deinitialized. Since scopes may be stack allocated we don't want
// to have anyone trying to access them (like tracy).
memset(scope->name, 0xCD, sizeof(scope->name));
// In most cases the status will have been consumed by the scope owner.
iree_status_t status = (iree_status_t)iree_atomic_exchange_intptr(
&scope->permanent_status, (intptr_t)NULL, iree_memory_order_acquire);
IREE_IGNORE_ERROR(status);
iree_notification_deinitialize(&scope->idle_notification);
iree_slim_mutex_deinitialize(&scope->mutex);
IREE_TRACE_ZONE_END(z0);
}
iree_string_view_t iree_task_scope_name(iree_task_scope_t* scope) {
return iree_make_cstring_view(scope->name);
}
iree_task_dispatch_statistics_t iree_task_scope_consume_statistics(
iree_task_scope_t* scope) {
iree_task_dispatch_statistics_t result = scope->dispatch_statistics;
memset(&scope->dispatch_statistics, 0, sizeof(scope->dispatch_statistics));
return result;
}
iree_status_t iree_task_scope_consume_status(iree_task_scope_t* scope) {
iree_status_t old_status = iree_ok_status();
iree_status_t new_status = iree_ok_status();
while (!iree_atomic_compare_exchange_strong_intptr(
&scope->permanent_status, (intptr_t*)&old_status, (intptr_t)new_status,
iree_memory_order_seq_cst, iree_memory_order_seq_cst)) {
// Previous status was not OK; we have it now though and can try again.
new_status = iree_status_from_code(iree_status_code(old_status));
}
return old_status;
}
static void iree_task_scope_try_set_status(iree_task_scope_t* scope,
iree_status_t new_status) {
if (IREE_UNLIKELY(iree_status_is_ok(new_status))) return;
IREE_TRACE_ZONE_BEGIN(z0);
IREE_TRACE_ZONE_APPEND_TEXT(z0, "failed: ");
IREE_TRACE_ZONE_APPEND_TEXT(
z0, iree_status_code_string(iree_status_code(new_status)));
iree_status_t old_status = iree_ok_status();
if (!iree_atomic_compare_exchange_strong_intptr(
&scope->permanent_status, (intptr_t*)&old_status,
(intptr_t)new_status, iree_memory_order_seq_cst,
iree_memory_order_seq_cst)) {
// Previous status was not OK; drop our new status.
IREE_IGNORE_ERROR(new_status);
}
IREE_TRACE_ZONE_END(z0);
}
void iree_task_scope_abort(iree_task_scope_t* scope) {
iree_status_t status =
iree_make_status(IREE_STATUS_ABORTED, "entire scope aborted by user");
iree_task_scope_try_set_status(scope, status);
}
void iree_task_scope_fail(iree_task_scope_t* scope, iree_task_t* task,
iree_status_t status) {
// TODO(benvanik): logging/tracing based on task.
iree_task_scope_try_set_status(scope, status);
}
void iree_task_scope_begin(iree_task_scope_t* scope) {
iree_slim_mutex_lock(&scope->mutex);
++scope->pending_submissions;
iree_slim_mutex_unlock(&scope->mutex);
}
void iree_task_scope_end(iree_task_scope_t* scope) {
iree_slim_mutex_lock(&scope->mutex);
if (--scope->pending_submissions == 0) {
// All submissions have completed in this scope - notify any waiters.
iree_notification_post(&scope->idle_notification, IREE_ALL_WAITERS);
}
iree_slim_mutex_unlock(&scope->mutex);
}
bool iree_task_scope_is_idle(iree_task_scope_t* scope) {
iree_slim_mutex_lock(&scope->mutex);
bool is_idle = scope->pending_submissions == 0;
iree_slim_mutex_unlock(&scope->mutex);
return is_idle;
}
iree_status_t iree_task_scope_wait_idle(iree_task_scope_t* scope,
iree_time_t deadline_ns) {
IREE_TRACE_ZONE_BEGIN(z0);
iree_status_t status = iree_ok_status();
if (deadline_ns == IREE_TIME_INFINITE_PAST) {
// Polling for idle.
if (iree_task_scope_is_idle(scope)) {
status = iree_ok_status();
} else {
status = iree_status_from_code(IREE_STATUS_DEADLINE_EXCEEDED);
}
} else if (deadline_ns == IREE_TIME_INFINITE_FUTURE) {
// Wait for the scope to enter the idle state.
iree_notification_await(&scope->idle_notification,
(iree_condition_fn_t)iree_task_scope_is_idle,
scope);
} else {
// NOTE: we are currently ignoring |deadline_ns|.
// We need to support timeouts on iree_notification_t to support this.
status = iree_make_status(IREE_STATUS_UNIMPLEMENTED,
"scope-based waits do not yet support timeouts");
}
IREE_TRACE_ZONE_END(z0);
return status;
}
|
from string import ascii_lowercase as asc_l
from math import floor
def get_normalize_block_name(idx) -> str:
''' returns names starting from a,b,c... aa ... to zz
args :
Idx: index of the block name .. Eg 1:'a' 2: 'b' ... 27:'aa'
idx starts from 1
returns:
normalized block name with double quotes
#TODO assert check idx > 0
'''
if idx <= 26:
return '"'+asc_l[idx-1]+'"'
elif idx <= 26*26:
return '"'+asc_l[floor(idx/26)-1] + asc_l[idx%26-1]+'"'
else:
pass #TODO throw error or handle other ways
# lst = []
# st = set()
# for i in range(1,26*26):
# a =get_normalize_block_name(i)
# if a == '"z"':
# print(i)
# if a in lst:
# print(a,i)
# lst.append(a)
# st.add(a)
# print(len(lst))
# print(len(st))
# print(26*26)
|
/*!
* Bootstrap v3.3.7 (http://getbootstrap.com)
* Copyright 2011-2016 Twitter, Inc.
* Licensed under the MIT license
*/
if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(a){"use strict";var b=a.fn.jquery.split(" ")[0].split(".");if(b[0]<2&&b[1]<9||1==b[0]&&9==b[1]&&b[2]<1||b[0]>3)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4")}(jQuery),+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]};return!1}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one("bsTransitionEnd",function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b(),a.support.transition&&(a.event.special.bsTransitionEnd={bindType:a.support.transition.end,delegateType:a.support.transition.end,handle:function(b){if(a(b.target).is(this))return b.handleObj.handler.apply(this,arguments)}})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var c=a(this),e=c.data("bs.alert");e||c.data("bs.alert",e=new d(this)),"string"==typeof b&&e[b].call(c)})}var c="[data-dismiss=\"alert\"]",d=function(b){a(b).on("click",c,this.close)};d.VERSION="3.3.7",d.TRANSITION_DURATION=150,d.prototype.close=function(b){function c(){g.detach().trigger("closed.bs.alert").remove()}var e=a(this),f=e.attr("data-target");f||(f=e.attr("href"),f=f&&f.replace(/.*(?=#[^\s]*$)/,""));var g=a("#"===f?[]:f);b&&b.preventDefault(),g.length||(g=e.closest(".alert")),g.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(g.removeClass("in"),a.support.transition&&g.hasClass("fade")?g.one("bsTransitionEnd",c).emulateTransitionEnd(d.TRANSITION_DURATION):c())};var e=a.fn.alert;a.fn.alert=b,a.fn.alert.Constructor=d,a.fn.alert.noConflict=function(){return a.fn.alert=e,this},a(document).on("click.bs.alert.data-api",c,d.prototype.close)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof b&&b;e||d.data("bs.button",e=new c(this,f)),"toggle"==b?e.toggle():b&&e.setState(b)})}var c=function(b,d){this.$element=a(b),this.options=a.extend({},c.DEFAULTS,d),this.isLoading=!1};c.VERSION="3.3.7",c.DEFAULTS={loadingText:"loading..."},c.prototype.setState=function(b){var c="disabled",d=this.$element,e=d.is("input")?"val":"html",f=d.data();b+="Text",null==f.resetText&&d.data("resetText",d[e]()),setTimeout(a.proxy(function(){d[e](null==f[b]?this.options[b]:f[b]),"loadingText"==b?(this.isLoading=!0,d.addClass(c).attr(c,c).prop(c,!0)):this.isLoading&&(this.isLoading=!1,d.removeClass(c).removeAttr(c).prop(c,!1))},this),0)},c.prototype.toggle=function(){var a=!0,b=this.$element.closest("[data-toggle=\"buttons\"]");if(b.length){var c=this.$element.find("input");"radio"==c.prop("type")?(c.prop("checked")&&(a=!1),b.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==c.prop("type")&&(c.prop("checked")!==this.$element.hasClass("active")&&(a=!1),this.$element.toggleClass("active")),c.prop("checked",this.$element.hasClass("active")),a&&c.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var d=a.fn.button;a.fn.button=b,a.fn.button.Constructor=c,a.fn.button.noConflict=function(){return a.fn.button=d,this},a(document).on("click.bs.button.data-api","[data-toggle^=\"button\"]",function(c){var d=a(c.target).closest(".btn");b.call(d,"toggle"),a(c.target).is("input[type=\"radio\"], input[type=\"checkbox\"]")||(c.preventDefault(),d.is("input,button")?d.trigger("focus"):d.find("input:visible,button:visible").first().trigger("focus"))}).on("focus.bs.button.data-api blur.bs.button.data-api","[data-toggle^=\"button\"]",function(b){a(b.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(b.type))})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},c.DEFAULTS,d.data(),"object"==typeof b&&b),g="string"==typeof b?b:f.slide;e||d.data("bs.carousel",e=new c(this,f)),"number"==typeof b?e.to(b):g?e[g]():f.interval&&e.pause().cycle()})}var c=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",a.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",a.proxy(this.pause,this)).on("mouseleave.bs.carousel",a.proxy(this.cycle,this))};c.VERSION="3.3.7",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(a){if(!/input|textarea/i.test(a.target.tagName)){switch(a.which){case 37:this.prev();break;case 39:this.next();break;default:return}a.preventDefault()}},c.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(a){return this.$items=a.parent().children(".item"),this.$items.index(a||this.$active)},c.prototype.getItemForDirection=function(a,b){var c=this.getItemIndex(b),d="prev"==a&&0===c||"next"==a&&c==this.$items.length-1;if(d&&!this.options.wrap)return b;var e="prev"==a?-1:1,f=(c+e)%this.$items.length;return this.$items.eq(f)},c.prototype.to=function(a){var b=this,c=this.getItemIndex(this.$active=this.$element.find(".item.active"));if(!(a>this.$items.length-1||a<0))return this.sliding?this.$element.one("slid.bs.carousel",function(){b.to(a)}):c==a?this.pause().cycle():this.slide(a>c?"next":"prev",this.$items.eq(a))},c.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){if(!this.sliding)return this.slide("next")},c.prototype.prev=function(){if(!this.sliding)return this.slide("prev")},c.prototype.slide=function(b,d){var e=this.$element.find(".item.active"),f=d||this.getItemForDirection(b,e),g=this.interval,h="next"==b?"left":"right",i=this;if(f.hasClass("active"))return this.sliding=!1;var j=f[0],k=a.Event("slide.bs.carousel",{relatedTarget:j,direction:h});if(this.$element.trigger(k),!k.isDefaultPrevented()){if(this.sliding=!0,g&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var l=a(this.$indicators.children()[this.getItemIndex(f)]);l&&l.addClass("active")}var m=a.Event("slid.bs.carousel",{relatedTarget:j,direction:h});return a.support.transition&&this.$element.hasClass("slide")?(f.addClass(b),f[0].offsetWidth,e.addClass(h),f.addClass(h),e.one("bsTransitionEnd",function(){f.removeClass([b,h].join(" ")).addClass("active"),e.removeClass(["active",h].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger(m)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(e.removeClass("active"),f.addClass("active"),this.sliding=!1,this.$element.trigger(m)),g&&this.cycle(),this}};var d=a.fn.carousel;a.fn.carousel=b,a.fn.carousel.Constructor=c,a.fn.carousel.noConflict=function(){return a.fn.carousel=d,this};var e=function(c){var d,e=a(this),f=a(e.attr("data-target")||(d=e.attr("href"))&&d.replace(/.*(?=#[^\s]+$)/,""));if(f.hasClass("carousel")){var g=a.extend({},f.data(),e.data()),h=e.attr("data-slide-to");h&&(g.interval=!1),b.call(f,g),h&&f.data("bs.carousel").to(h),c.preventDefault()}};a(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),a(window).on("load",function(){a("[data-ride=\"carousel\"]").each(function(){var c=a(this);b.call(c,c.data())})})}(jQuery),+function(a){"use strict";function b(b){var c,d=b.attr("data-target")||(c=b.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"");return a(d)}function c(b){return this.each(function(){var c=a(this),e=c.data("bs.collapse"),f=a.extend({},d.DEFAULTS,c.data(),"object"==typeof b&&b);!e&&f.toggle&&/show|hide/.test(b)&&(f.toggle=!1),e||c.data("bs.collapse",e=new d(this,f)),"string"==typeof b&&e[b]()})}var d=function(b,c){this.$element=a(b),this.options=a.extend({},d.DEFAULTS,c),this.$trigger=a("[data-toggle=\"collapse\"][href=\"#"+b.id+"\"],[data-toggle=\"collapse\"][data-target=\"#"+b.id+"\"]"),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};d.VERSION="3.3.7",d.TRANSITION_DURATION=350,d.DEFAULTS={toggle:!0},d.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},d.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(b=e.data("bs.collapse"),b&&b.transitioning))){var f=a.Event("show.bs.collapse");if(this.$element.trigger(f),!f.isDefaultPrevented()){e&&e.length&&(c.call(e,"hide"),b||e.data("bs.collapse",null));var g=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[g](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var h=function(){this.$element.removeClass("collapsing").addClass("collapse in")[g](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return h.call(this);var i=a.camelCase(["scroll",g].join("-"));this.$element.one("bsTransitionEnd",a.proxy(h,this)).emulateTransitionEnd(d.TRANSITION_DURATION)[g](this.$element[0][i])}}}},d.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var e=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};return a.support.transition?void this.$element[c](0).one("bsTransitionEnd",a.proxy(e,this)).emulateTransitionEnd(d.TRANSITION_DURATION):e.call(this)}}},d.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},d.prototype.getParent=function(){return a(this.options.parent).find("[data-toggle=\"collapse\"][data-parent=\""+this.options.parent+"\"]").each(a.proxy(function(c,d){var e=a(d);this.addAriaAndCollapsedClass(b(e),e)},this)).end()},d.prototype.addAriaAndCollapsedClass=function(a,b){var c=a.hasClass("in");a.attr("aria-expanded",c),b.toggleClass("collapsed",!c).attr("aria-expanded",c)};var e=a.fn.collapse;a.fn.collapse=c,a.fn.collapse.Constructor=d,a.fn.collapse.noConflict=function(){return a.fn.collapse=e,this},a(document).on("click.bs.collapse.data-api","[data-toggle=\"collapse\"]",function(d){var e=a(this);e.attr("data-target")||d.preventDefault();var f=b(e),g=f.data("bs.collapse"),h=g?"toggle":e.data();c.call(f,h)})}(jQuery),+function(a){"use strict";function b(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#[A-Za-z]/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}function c(c){c&&3===c.which||(a(e).remove(),a(f).each(function(){var d=a(this),e=b(d),f={relatedTarget:this};e.hasClass("open")&&(c&&"click"==c.type&&/input|textarea/i.test(c.target.tagName)&&a.contains(e[0],c.target)||(e.trigger(c=a.Event("hide.bs.dropdown",f)),c.isDefaultPrevented()||(d.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",f)))))}))}function d(b){return this.each(function(){var c=a(this),d=c.data("bs.dropdown");d||c.data("bs.dropdown",d=new g(this)),"string"==typeof b&&d[b].call(c)})}var e=".dropdown-backdrop",f="[data-toggle=\"dropdown\"]",g=function(b){a(b).on("click.bs.dropdown",this.toggle)};g.VERSION="3.3.7",g.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=b(e),g=f.hasClass("open");if(c(),!g){"ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",c);var h={relatedTarget:this};if(f.trigger(d=a.Event("show.bs.dropdown",h)),d.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),f.toggleClass("open").trigger(a.Event("shown.bs.dropdown",h))}return!1}},g.prototype.keydown=function(c){if(/(38|40|27|32)/.test(c.which)&&!/input|textarea/i.test(c.target.tagName)){var d=a(this);if(c.preventDefault(),c.stopPropagation(),!d.is(".disabled, :disabled")){var e=b(d),g=e.hasClass("open");if(!g&&27!=c.which||g&&27==c.which)return 27==c.which&&e.find(f).trigger("focus"),d.trigger("click");var h=" li:not(.disabled):visible a",i=e.find(".dropdown-menu"+h);if(i.length){var j=i.index(c.target);38==c.which&&j>0&&j--,40==c.which&&j<i.length-1&&j++,~j||(j=0),i.eq(j).trigger("focus")}}}};var h=a.fn.dropdown;a.fn.dropdown=d,a.fn.dropdown.Constructor=g,a.fn.dropdown.noConflict=function(){return a.fn.dropdown=h,this},a(document).on("click.bs.dropdown.data-api",c).on("click.bs.dropdown.data-api",".dropdown form",function(a){a.stopPropagation()}).on("click.bs.dropdown.data-api",f,g.prototype.toggle).on("keydown.bs.dropdown.data-api",f,g.prototype.keydown).on("keydown.bs.dropdown.data-api",".dropdown-menu",g.prototype.keydown)}(jQuery),+function(a){"use strict";function b(b,d){return this.each(function(){var e=a(this),f=e.data("bs.modal"),g=a.extend({},c.DEFAULTS,e.data(),"object"==typeof b&&b);f||e.data("bs.modal",f=new c(this,g)),"string"==typeof b?f[b](d):g.show&&f.show(d)})}var c=function(b,c){this.options=c,this.$body=a(document.body),this.$element=a(b),this.$dialog=this.$element.find(".modal-dialog"),this.$backdrop=null,this.isShown=null,this.originalBodyPad=null,this.scrollbarWidth=0,this.ignoreBackdropClick=!1,this.options.remote&&this.$element.find(".modal-content").load(this.options.remote,a.proxy(function(){this.$element.trigger("loaded.bs.modal")},this))};c.VERSION="3.3.7",c.TRANSITION_DURATION=300,c.BACKDROP_TRANSITION_DURATION=150,c.DEFAULTS={backdrop:!0,keyboard:!0,show:!0},c.prototype.toggle=function(a){return this.isShown?this.hide():this.show(a)},c.prototype.show=function(b){var d=this,e=a.Event("show.bs.modal",{relatedTarget:b});this.$element.trigger(e),this.isShown||e.isDefaultPrevented()||(this.isShown=!0,this.checkScrollbar(),this.setScrollbar(),this.$body.addClass("modal-open"),this.escape(),this.resize(),this.$element.on("click.dismiss.bs.modal","[data-dismiss=\"modal\"]",a.proxy(this.hide,this)),this.$dialog.on("mousedown.dismiss.bs.modal",function(){d.$element.one("mouseup.dismiss.bs.modal",function(b){a(b.target).is(d.$element)&&(d.ignoreBackdropClick=!0)})}),this.backdrop(function(){var e=a.support.transition&&d.$element.hasClass("fade");d.$element.parent().length||d.$element.appendTo(d.$body),d.$element.show().scrollTop(0),d.adjustDialog(),e&&d.$element[0].offsetWidth,d.$element.addClass("in"),d.enforceFocus();var f=a.Event("shown.bs.modal",{relatedTarget:b});e?d.$dialog.one("bsTransitionEnd",function(){d.$element.trigger("focus").trigger(f)}).emulateTransitionEnd(c.TRANSITION_DURATION):d.$element.trigger("focus").trigger(f)}))},c.prototype.hide=function(b){b&&b.preventDefault(),b=a.Event("hide.bs.modal"),this.$element.trigger(b),this.isShown&&!b.isDefaultPrevented()&&(this.isShown=!1,this.escape(),this.resize(),a(document).off("focusin.bs.modal"),this.$element.removeClass("in").off("click.dismiss.bs.modal").off("mouseup.dismiss.bs.modal"),this.$dialog.off("mousedown.dismiss.bs.modal"),a.support.transition&&this.$element.hasClass("fade")?this.$element.one("bsTransitionEnd",a.proxy(this.hideModal,this)).emulateTransitionEnd(c.TRANSITION_DURATION):this.hideModal())},c.prototype.enforceFocus=function(){a(document).off("focusin.bs.modal").on("focusin.bs.modal",a.proxy(function(a){document===a.target||this.$element[0]===a.target||this.$element.has(a.target).length||this.$element.trigger("focus")},this))},c.prototype.escape=function(){this.isShown&&this.options.keyboard?this.$element.on("keydown.dismiss.bs.modal",a.proxy(function(a){27==a.which&&this.hide()},this)):this.isShown||this.$element.off("keydown.dismiss.bs.modal")},c.prototype.resize=function(){this.isShown?a(window).on("resize.bs.modal",a.proxy(this.handleUpdate,this)):a(window).off("resize.bs.modal")},c.prototype.hideModal=function(){var a=this;this.$element.hide(),this.backdrop(function(){a.$body.removeClass("modal-open"),a.resetAdjustments(),a.resetScrollbar(),a.$element.trigger("hidden.bs.modal")})},c.prototype.removeBackdrop=function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},c.prototype.backdrop=function(b){var d=this,e=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var f=a.support.transition&&e;if(this.$backdrop=a(document.createElement("div")).addClass("modal-backdrop "+e).appendTo(this.$body),this.$element.on("click.dismiss.bs.modal",a.proxy(function(a){return this.ignoreBackdropClick?void(this.ignoreBackdropClick=!1):void(a.target===a.currentTarget&&("static"==this.options.backdrop?this.$element[0].focus():this.hide()))},this)),f&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),!b)return;f?this.$backdrop.one("bsTransitionEnd",b).emulateTransitionEnd(c.BACKDROP_TRANSITION_DURATION):b()}else if(!this.isShown&&this.$backdrop){this.$backdrop.removeClass("in");var g=function(){d.removeBackdrop(),b&&b()};a.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one("bsTransitionEnd",g).emulateTransitionEnd(c.BACKDROP_TRANSITION_DURATION):g()}else b&&b()},c.prototype.handleUpdate=function(){this.adjustDialog()},c.prototype.adjustDialog=function(){var a=this.$element[0].scrollHeight>document.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&a?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!a?this.scrollbarWidth:""})},c.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},c.prototype.checkScrollbar=function(){var a=window.innerWidth;if(!a){var b=document.documentElement.getBoundingClientRect();a=b.right-Math.abs(b.left)}this.bodyIsOverflowing=document.body.clientWidth<a,this.scrollbarWidth=this.measureScrollbar()},c.prototype.setScrollbar=function(){var a=parseInt(this.$body.css("padding-right")||0,10);this.originalBodyPad=document.body.style.paddingRight||"",this.bodyIsOverflowing&&this.$body.css("padding-right",a+this.scrollbarWidth)},c.prototype.resetScrollbar=function(){this.$body.css("padding-right",this.originalBodyPad)},c.prototype.measureScrollbar=function(){var a=document.createElement("div");a.className="modal-scrollbar-measure",this.$body.append(a);var b=a.offsetWidth-a.clientWidth;return this.$body[0].removeChild(a),b};var d=a.fn.modal;a.fn.modal=b,a.fn.modal.Constructor=c,a.fn.modal.noConflict=function(){return a.fn.modal=d,this},a(document).on("click.bs.modal.data-api","[data-toggle=\"modal\"]",function(c){var d=a(this),e=d.attr("href"),f=a(d.attr("data-target")||e&&e.replace(/.*(?=#[^\s]+$)/,"")),g=f.data("bs.modal")?"toggle":a.extend({remote:!/#/.test(e)&&e},f.data(),d.data());d.is("a")&&c.preventDefault(),f.one("show.bs.modal",function(a){a.isDefaultPrevented()||f.one("hidden.bs.modal",function(){d.is(":visible")&&d.trigger("focus")})}),b.call(f,g,this)})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.tooltip"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.tooltip",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.type=null,this.options=null,this.enabled=null,this.timeout=null,this.hoverState=null,this.$element=null,this.inState=null,this.init("tooltip",a,b)};c.VERSION="3.3.7",c.TRANSITION_DURATION=150,c.DEFAULTS={animation:!0,placement:"top",selector:!1,template:"<div class=\"tooltip\" role=\"tooltip\"><div class=\"tooltip-arrow\"></div><div class=\"tooltip-inner\"></div></div>",trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0}},c.prototype.init=function(b,c,d){if(this.enabled=!0,this.type=b,this.$element=a(c),this.options=this.getOptions(d),this.$viewport=this.options.viewport&&a(a.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var e=this.options.trigger.split(" "),f=e.length;f--;){var g=e[f];if("click"==g)this.$element.on("click."+this.type,this.options.selector,a.proxy(this.toggle,this));else if("manual"!=g){var h="hover"==g?"mouseenter":"focusin",i="hover"==g?"mouseleave":"focusout";this.$element.on(h+"."+this.type,this.options.selector,a.proxy(this.enter,this)),this.$element.on(i+"."+this.type,this.options.selector,a.proxy(this.leave,this))}}this.options.selector?this._options=a.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.getOptions=function(b){return b=a.extend({},this.getDefaults(),this.$element.data(),b),b.delay&&"number"==typeof b.delay&&(b.delay={show:b.delay,hide:b.delay}),b},c.prototype.getDelegateOptions=function(){var b={},c=this.getDefaults();return this._options&&a.each(this._options,function(a,d){c[a]!=d&&(b[a]=d)}),b},c.prototype.enter=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusin"==b.type?"focus":"hover"]=!0),c.tip().hasClass("in")||"in"==c.hoverState?void(c.hoverState="in"):(clearTimeout(c.timeout),c.hoverState="in",c.options.delay&&c.options.delay.show?void(c.timeout=setTimeout(function(){"in"==c.hoverState&&c.show()},c.options.delay.show)):c.show())},c.prototype.isInStateTrue=function(){for(var a in this.inState)if(this.inState[a])return!0;return!1},c.prototype.leave=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);if(c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusout"==b.type?"focus":"hover"]=!1),!c.isInStateTrue())return clearTimeout(c.timeout),c.hoverState="out",c.options.delay&&c.options.delay.hide?void(c.timeout=setTimeout(function(){"out"==c.hoverState&&c.hide()},c.options.delay.hide)):c.hide()},c.prototype.show=function(){var b=a.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(b);var d=a.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(b.isDefaultPrevented()||!d)return;var e=this,f=this.tip(),g=this.getUID(this.type);this.setContent(),f.attr("id",g),this.$element.attr("aria-describedby",g),this.options.animation&&f.addClass("fade");var h="function"==typeof this.options.placement?this.options.placement.call(this,f[0],this.$element[0]):this.options.placement,i=/\s?auto?\s?/i,j=i.test(h);j&&(h=h.replace(i,"")||"top"),f.detach().css({top:0,left:0,display:"block"}).addClass(h).data("bs."+this.type,this),this.options.container?f.appendTo(this.options.container):f.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var k=this.getPosition(),l=f[0].offsetWidth,m=f[0].offsetHeight;if(j){var n=h,o=this.getPosition(this.$viewport);h="bottom"==h&&k.bottom+m>o.bottom?"top":"top"==h&&k.top-m<o.top?"bottom":"right"==h&&k.right+l>o.width?"left":"left"==h&&k.left-l<o.left?"right":h,f.removeClass(n).addClass(h)}var p=this.getCalculatedOffset(h,k,l,m);this.applyPlacement(p,h);var q=function(){var a=e.hoverState;e.$element.trigger("shown.bs."+e.type),e.hoverState=null,"out"==a&&e.leave(e)};a.support.transition&&this.$tip.hasClass("fade")?f.one("bsTransitionEnd",q).emulateTransitionEnd(c.TRANSITION_DURATION):q()}},c.prototype.applyPlacement=function(b,c){var d=this.tip(),e=d[0].offsetWidth,f=d[0].offsetHeight,g=parseInt(d.css("margin-top"),10),h=parseInt(d.css("margin-left"),10);isNaN(g)&&(g=0),isNaN(h)&&(h=0),b.top+=g,b.left+=h,a.offset.setOffset(d[0],a.extend({using:function(a){d.css({top:Math.round(a.top),left:Math.round(a.left)})}},b),0),d.addClass("in");var i=d[0].offsetWidth,j=d[0].offsetHeight;"top"==c&&j!=f&&(b.top=b.top+f-j);var k=this.getViewportAdjustedDelta(c,b,i,j);k.left?b.left+=k.left:b.top+=k.top;var l=/top|bottom/.test(c),m=l?2*k.left-e+i:2*k.top-f+j,n=l?"offsetWidth":"offsetHeight";d.offset(b),this.replaceArrow(m,d[0][n],l)},c.prototype.replaceArrow=function(a,b,c){this.arrow().css(c?"left":"top",50*(1-a/b)+"%").css(c?"top":"left","")},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle();a.find(".tooltip-inner")[this.options.html?"html":"text"](b),a.removeClass("fade in top bottom left right")},c.prototype.hide=function(b){function d(){"in"!=e.hoverState&&f.detach(),e.$element&&e.$element.removeAttr("aria-describedby").trigger("hidden.bs."+e.type),b&&b()}var e=this,f=a(this.$tip),g=a.Event("hide.bs."+this.type);if(this.$element.trigger(g),!g.isDefaultPrevented())return f.removeClass("in"),a.support.transition&&f.hasClass("fade")?f.one("bsTransitionEnd",d).emulateTransitionEnd(c.TRANSITION_DURATION):d(),this.hoverState=null,this},c.prototype.fixTitle=function(){var a=this.$element;(a.attr("title")||"string"!=typeof a.attr("data-original-title"))&&a.attr("data-original-title",a.attr("title")||"").attr("title","")},c.prototype.hasContent=function(){return this.getTitle()},c.prototype.getPosition=function(b){b=b||this.$element;var c=b[0],d="BODY"==c.tagName,e=c.getBoundingClientRect();null==e.width&&(e=a.extend({},e,{width:e.right-e.left,height:e.bottom-e.top}));var f=window.SVGElement&&c instanceof window.SVGElement,g=d?{top:0,left:0}:f?null:b.offset(),h={scroll:d?document.documentElement.scrollTop||document.body.scrollTop:b.scrollTop()},i=d?{width:a(window).width(),height:a(window).height()}:null;return a.extend({},e,h,i,g)},c.prototype.getCalculatedOffset=function(a,b,c,d){return"bottom"==a?{top:b.top+b.height,left:b.left+b.width/2-c/2}:"top"==a?{top:b.top-d,left:b.left+b.width/2-c/2}:"left"==a?{top:b.top+b.height/2-d/2,left:b.left-c}:{top:b.top+b.height/2-d/2,left:b.left+b.width}},c.prototype.getViewportAdjustedDelta=function(a,b,c,d){var e={top:0,left:0};if(!this.$viewport)return e;var f=this.options.viewport&&this.options.viewport.padding||0,g=this.getPosition(this.$viewport);if(/right|left/.test(a)){var h=b.top-f-g.scroll,i=b.top+f-g.scroll+d;h<g.top?e.top=g.top-h:i>g.top+g.height&&(e.top=g.top+g.height-i)}else{var j=b.left-f,k=b.left+f+c;j<g.left?e.left=g.left-j:k>g.right&&(e.left=g.left+g.width-k)}return e},c.prototype.getTitle=function(){var a,b=this.$element,c=this.options;return a=b.attr("data-original-title")||("function"==typeof c.title?c.title.call(b[0]):c.title)},c.prototype.getUID=function(a){do a+=~~(1e6*Math.random());while(document.getElementById(a));return a},c.prototype.tip=function(){if(!this.$tip&&(this.$tip=a(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},c.prototype.enable=function(){this.enabled=!0},c.prototype.disable=function(){this.enabled=!1},c.prototype.toggleEnabled=function(){this.enabled=!this.enabled},c.prototype.toggle=function(b){var c=this;b&&(c=a(b.currentTarget).data("bs."+this.type),c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c))),b?(c.inState.click=!c.inState.click,c.isInStateTrue()?c.enter(c):c.leave(c)):c.tip().hasClass("in")?c.leave(c):c.enter(c)},c.prototype.destroy=function(){var a=this;clearTimeout(this.timeout),this.hide(function(){a.$element.off("."+a.type).removeData("bs."+a.type),a.$tip&&a.$tip.detach(),a.$tip=null,a.$arrow=null,a.$viewport=null,a.$element=null})};var d=a.fn.tooltip;a.fn.tooltip=b,a.fn.tooltip.Constructor=c,a.fn.tooltip.noConflict=function(){return a.fn.tooltip=d,this}}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.popover",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.init("popover",a,b)};if(!a.fn.tooltip)throw new Error("Popover requires tooltip.js");c.VERSION="3.3.7",c.DEFAULTS=a.extend({},a.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:"<div class=\"popover\" role=\"tooltip\"><div class=\"arrow\"></div><h3 class=\"popover-title\"></h3><div class=\"popover-content\"></div></div>"}),c.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),c.prototype.constructor=c,c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content").children().detach().end()[this.options.html?"string"==typeof c?"html":"append":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},c.prototype.hasContent=function(){return this.getTitle()||this.getContent()},c.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var d=a.fn.popover;a.fn.popover=b,a.fn.popover.Constructor=c,a.fn.popover.noConflict=function(){return a.fn.popover=d,this}}(jQuery),+function(a){"use strict";function b(c,d){this.$body=a(document.body),this.$scrollElement=a(a(c).is(document.body)?window:c),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",a.proxy(this.process,this)),this.refresh(),this.process()}function c(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})}b.VERSION="3.3.7",b.DEFAULTS={offset:10},b.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},b.prototype.refresh=function(){var b=this,c="offset",d=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),a.isWindow(this.$scrollElement[0])||(c="position",d=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var b=a(this),e=b.data("target")||b.attr("href"),f=/^#./.test(e)&&a(e);return f&&f.length&&f.is(":visible")&&[[f[c]().top+d,e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){b.offsets.push(this[0]),b.targets.push(this[1])})},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.getScrollHeight(),d=this.options.offset+c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(this.scrollHeight!=c&&this.refresh(),b>=d)return g!=(a=f[f.length-1])&&this.activate(a);if(g&&b<e[0])return this.activeTarget=null,this.clear();for(a=e.length;a--;)g!=f[a]&&b>=e[a]&&(void 0===e[a+1]||b<e[a+1])&&this.activate(f[a])},b.prototype.activate=function(b){
this.activeTarget=b,this.clear();var c=this.selector+"[data-target=\""+b+"\"],"+this.selector+"[href=\""+b+"\"]",d=a(c).parents("li").addClass("active");d.parent(".dropdown-menu").length&&(d=d.closest("li.dropdown").addClass("active")),d.trigger("activate.bs.scrollspy")},b.prototype.clear=function(){a(this.selector).parentsUntil(this.options.target,".active").removeClass("active")};var d=a.fn.scrollspy;a.fn.scrollspy=c,a.fn.scrollspy.Constructor=b,a.fn.scrollspy.noConflict=function(){return a.fn.scrollspy=d,this},a(window).on("load.bs.scrollspy.data-api",function(){a("[data-spy=\"scroll\"]").each(function(){var b=a(this);c.call(b,b.data())})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.tab");e||d.data("bs.tab",e=new c(this)),"string"==typeof b&&e[b]()})}var c=function(b){this.element=a(b)};c.VERSION="3.3.7",c.TRANSITION_DURATION=150,c.prototype.show=function(){var b=this.element,c=b.closest("ul:not(.dropdown-menu)"),d=b.data("target");if(d||(d=b.attr("href"),d=d&&d.replace(/.*(?=#[^\s]*$)/,"")),!b.parent("li").hasClass("active")){var e=c.find(".active:last a"),f=a.Event("hide.bs.tab",{relatedTarget:b[0]}),g=a.Event("show.bs.tab",{relatedTarget:e[0]});if(e.trigger(f),b.trigger(g),!g.isDefaultPrevented()&&!f.isDefaultPrevented()){var h=a(d);this.activate(b.closest("li"),c),this.activate(h,h.parent(),function(){e.trigger({type:"hidden.bs.tab",relatedTarget:b[0]}),b.trigger({type:"shown.bs.tab",relatedTarget:e[0]})})}}},c.prototype.activate=function(b,d,e){function f(){g.removeClass("active").find("> .dropdown-menu > .active").removeClass("active").end().find("[data-toggle=\"tab\"]").attr("aria-expanded",!1),b.addClass("active").find("[data-toggle=\"tab\"]").attr("aria-expanded",!0),h?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu").length&&b.closest("li.dropdown").addClass("active").end().find("[data-toggle=\"tab\"]").attr("aria-expanded",!0),e&&e()}var g=d.find("> .active"),h=e&&a.support.transition&&(g.length&&g.hasClass("fade")||!!d.find("> .fade").length);g.length&&h?g.one("bsTransitionEnd",f).emulateTransitionEnd(c.TRANSITION_DURATION):f(),g.removeClass("in")};var d=a.fn.tab;a.fn.tab=b,a.fn.tab.Constructor=c,a.fn.tab.noConflict=function(){return a.fn.tab=d,this};var e=function(c){c.preventDefault(),b.call(a(this),"show")};a(document).on("click.bs.tab.data-api","[data-toggle=\"tab\"]",e).on("click.bs.tab.data-api","[data-toggle=\"pill\"]",e)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof b&&b;e||d.data("bs.affix",e=new c(this,f)),"string"==typeof b&&e[b]()})}var c=function(b,d){this.options=a.extend({},c.DEFAULTS,d),this.$target=a(this.options.target).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(b),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};c.VERSION="3.3.7",c.RESET="affix affix-top affix-bottom",c.DEFAULTS={offset:0,target:window},c.prototype.getState=function(a,b,c,d){var e=this.$target.scrollTop(),f=this.$element.offset(),g=this.$target.height();if(null!=c&&"top"==this.affixed)return e<c&&"top";if("bottom"==this.affixed)return null!=c?!(e+this.unpin<=f.top)&&"bottom":!(e+g<=a-d)&&"bottom";var h=null==this.affixed,i=h?e:f.top,j=h?g:b;return null!=c&&e<=c?"top":null!=d&&i+j>=a-d&&"bottom"},c.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(c.RESET).addClass("affix");var a=this.$target.scrollTop(),b=this.$element.offset();return this.pinnedOffset=b.top-a},c.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},c.prototype.checkPosition=function(){if(this.$element.is(":visible")){var b=this.$element.height(),d=this.options.offset,e=d.top,f=d.bottom,g=Math.max(a(document).height(),a(document.body).height());"object"!=typeof d&&(f=e=d),"function"==typeof e&&(e=d.top(this.$element)),"function"==typeof f&&(f=d.bottom(this.$element));var h=this.getState(g,b,e,f);if(this.affixed!=h){null!=this.unpin&&this.$element.css("top","");var i="affix"+(h?"-"+h:""),j=a.Event(i+".bs.affix");if(this.$element.trigger(j),j.isDefaultPrevented())return;this.affixed=h,this.unpin="bottom"==h?this.getPinnedOffset():null,this.$element.removeClass(c.RESET).addClass(i).trigger(i.replace("affix","affixed")+".bs.affix")}"bottom"==h&&this.$element.offset({top:g-b-f})}};var d=a.fn.affix;a.fn.affix=b,a.fn.affix.Constructor=c,a.fn.affix.noConflict=function(){return a.fn.affix=d,this},a(window).on("load",function(){a("[data-spy=\"affix\"]").each(function(){var c=a(this),d=c.data();d.offset=d.offset||{},null!=d.offsetBottom&&(d.offset.bottom=d.offsetBottom),null!=d.offsetTop&&(d.offset.top=d.offsetTop),b.call(c,d)})})}(jQuery); |
import fs from "fs";
import matter from "gray-matter";
import path from "path";
import remark from "remark";
import html from "remark-html";
const postsDirectory = path.join(process.cwd(), "posts");
export function getSortedPostsData() {
// Get file names under /posts
const fileNames = fs.readdirSync(postsDirectory);
const allPostsData = fileNames.map((fileName) => {
// Remove ".md" from file name to get id
const id = fileName.replace(/\.md$/, "");
// Read markdown file as string
const fullPath = path.join(postsDirectory, fileName);
const fileContents = fs.readFileSync(fullPath, "utf8");
// Use gray-matter to parse the post metadata section
const matterResult = matter(fileContents);
// Combine the data with the id
return {
id,
...matterResult.data,
};
});
// Sort posts by date
return allPostsData.sort((a, b) => {
if (a.date < b.date) {
return 1;
} else {
return -1;
}
});
}
export function getAllPostIds() {
const fileNames = fs.readdirSync(postsDirectory);
return fileNames.map((fileName) => {
return {
params: {
id: fileName.replace(/\.md$/, ""),
},
};
});
}
export async function getPostData(id) {
const fullPath = path.join(postsDirectory, `${id}.md`);
const fileContents = fs.readFileSync(fullPath, "utf8");
// Use gray-matter to parse the post metadata section
const matterResult = matter(fileContents);
// Use remark to convert markdown into HTML string
const processedContent = await remark()
.use(html)
.process(matterResult.content);
const contentHtml = processedContent.toString();
// Combine the data with the id and contentHtml
return {
id,
contentHtml,
...matterResult.data,
};
}
|
import React from 'react'
import { Container, Flex } from '../../styles/globalStyles'
const HomeAbout = () => {
return (
<HomeAboutSection>
<Container>
<Flex>
<About>
<h2>
Furrow is an integrated, full-service creative studio offering
video production, creative development, and post-production
services.
</h2>
</About>
</Flex>
</Container>
</HomeAboutSection>
)
}
export default HomeAbout
|
from torch.utils import data
from networks.pspnet_combine import Res_pspnet, BasicBlock, Bottleneck
from networks.evaluate import evaluate_main
from dataset.datasets import CSDataTestSet
from utils.train_options import TrainOptionsForTest
import torch
if __name__ == '__main__':
args = TrainOptionsForTest().initialize()
testloader = data.DataLoader(
CSDataTestSet(args.data_dir, './dataset/list/cityscapes/test.lst', crop_size=(1024, 2048)),
batch_size=1, shuffle=False, pin_memory=True
)
student = Res_pspnet(BasicBlock, [2, 2, 2, 2], num_classes=19) # resnet
student.load_state_dict(torch.load(args.resume_from))
evaluate_main(student, testloader, '512,512', 19, True, type='test')
|
# -*- coding: utf-8 -*-
# Import python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
class StdTest(ModuleCase):
'''
Test standard client calls
'''
def test_cli(self):
'''
Test cli function
'''
cmd_iter = self.client.cmd_cli(
'minion',
'test.arg',
['foo', 'bar', 'baz'],
kwarg={'qux': 'quux'}
)
for ret in cmd_iter:
data = ret['minion']['ret']
self.assertEqual(data['args'], ['foo', 'bar', 'baz'])
self.assertEqual(data['kwargs']['qux'], 'quux')
def test_iter(self):
'''
test cmd_iter
'''
cmd_iter = self.client.cmd_iter(
'minion',
'test.arg',
['foo', 'bar', 'baz'],
kwarg={'qux': 'quux'}
)
for ret in cmd_iter:
data = ret['minion']['ret']
self.assertEqual(data['args'], ['foo', 'bar', 'baz'])
self.assertEqual(data['kwargs']['qux'], 'quux')
def test_iter_no_block(self):
'''
test cmd_iter_no_block
'''
cmd_iter = self.client.cmd_iter_no_block(
'minion',
'test.arg',
['foo', 'bar', 'baz'],
kwarg={'qux': 'quux'}
)
for ret in cmd_iter:
if ret is None:
continue
data = ret['minion']['ret']
self.assertEqual(data['args'], ['foo', 'bar', 'baz'])
self.assertEqual(data['kwargs']['qux'], 'quux')
def test_full_returns(self):
'''
test cmd_iter
'''
ret = self.client.cmd_full_return(
'minion',
'test.arg',
['foo', 'bar', 'baz'],
kwarg={'qux': 'quux'}
)
data = ret['minion']['ret']
self.assertEqual(data['args'], ['foo', 'bar', 'baz'])
self.assertEqual(data['kwargs']['qux'], 'quux')
def test_kwarg_type(self):
'''
Test that kwargs end up on the client as the same type
'''
terrible_yaml_string = 'foo: ""\n# \''
ret = self.client.cmd_full_return(
'minion',
'test.arg_type',
['a', 1],
kwarg={'outer': {'a': terrible_yaml_string},
'inner': 'value'}
)
data = ret['minion']['ret']
self.assertIn('str', data['args'][0])
self.assertIn('int', data['args'][1])
self.assertIn('dict', data['kwargs']['outer'])
self.assertIn('str', data['kwargs']['inner'])
def test_full_return_kwarg(self):
ret = self.client.cmd('minion', 'test.ping', full_return=True)
for mid, data in ret.items():
self.assertIn('retcode', data)
|
from nose.tools import assert_equal
from pyecharts.commons import utils
def test_utils_produce_require_dict():
cfg = utils.produce_require_dict(utils.OrderedSet("echarts"), "https://example.com")
assert_equal(cfg["config_items"], ["'echarts':'https://example.comecharts.min'"])
assert_equal(cfg["libraries"], ["'echarts'"])
def test_js_code():
fn = "function() { console.log('test_js_code') }"
js_code = utils.JsCode(fn)
assert_equal(js_code.js_code, "--x_x--0_0--{}--x_x--0_0--".format(fn))
def test_ordered_set():
s = utils.OrderedSet()
s.add("a", "b", "c")
assert_equal(s.items, ["a", "b", "c"])
|
#
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from datetime import datetime, timedelta
from typing import Any, Tuple, Union, Sequence, Dict, Optional, TYPE_CHECKING
from azure.core.exceptions import HttpResponseError
from azure.core.tracing.decorator_async import distributed_trace_async
from .._generated.aio._monitor_query_client import MonitorQueryClient
from .._generated.models import BatchRequest, QueryBody as LogsQueryBody
from .._helpers import process_error, construct_iso8601, order_results
from .._models import LogsQueryResult, LogsBatchQuery, LogsBatchQueryResult
from ._helpers_asyc import get_authentication_policy
if TYPE_CHECKING:
from azure.core.credentials_async import AsyncTokenCredential
class LogsQueryClient(object):
"""LogsQueryClient
:param credential: The credential to authenticate the client
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:keyword endpoint: The endpoint to connect to. Defaults to 'https://api.loganalytics.io/v1'.
:paramtype endpoint: str
"""
def __init__(self, credential: "AsyncTokenCredential", **kwargs: Any) -> None:
self._endpoint = kwargs.pop('endpoint', 'https://api.loganalytics.io/v1')
self._client = MonitorQueryClient(
credential=credential,
authentication_policy=get_authentication_policy(credential),
base_url=self._endpoint,
**kwargs
)
self._query_op = self._client.query
@distributed_trace_async
async def query(
self,
workspace_id: str,
query: str,
timespan: Optional[Union[timedelta, Tuple[datetime, timedelta], Tuple[datetime, datetime]]] = None,
**kwargs: Any) -> LogsQueryResult:
"""Execute an Analytics query.
Executes an Analytics query for data.
:param workspace_id: ID of the workspace. This is Workspace ID from the Properties blade in the
Azure portal.
:type workspace_id: str
:param query: The Analytics query. Learn more about the `Analytics query syntax
<https://azure.microsoft.com/documentation/articles/app-insights-analytics-reference/>`_.
:type query: str
:param timespan: The timespan for which to query the data. This can be a timedelta,
a timedelta and a start datetime, or a start datetime/end datetime.
:type timespan: ~datetime.timedelta or tuple[~datetime.datetime, ~datetime.timedelta]
or tuple[~datetime.datetime, ~datetime.datetime]
:keyword int server_timeout: the server timeout. The default timeout is 3 minutes,
and the maximum timeout is 10 minutes.
:keyword bool include_statistics: To get information about query statistics.
:keyword bool include_visualization: In the query language, it is possible to specify different
visualization options. By default, the API does not return information regarding the type of
visualization to show. If your client requires this information, specify the preference
:keyword additional_workspaces: A list of workspaces that are included in the query.
These can be qualified workspace names, workspsce Ids or Azure resource Ids.
:paramtype additional_workspaces: list[str]
:return: QueryResults, or the result of cls(response)
:rtype: ~azure.monitor.query.LogsQueryResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
timespan = construct_iso8601(timespan)
include_statistics = kwargs.pop("include_statistics", False)
include_visualization = kwargs.pop("include_visualization", False)
server_timeout = kwargs.pop("server_timeout", None)
additional_workspaces = kwargs.pop("additional_workspaces", None)
prefer = ""
if server_timeout:
prefer += "wait=" + str(server_timeout)
if include_statistics:
if len(prefer) > 0:
prefer += ","
prefer += "include-statistics=true"
if include_visualization:
if len(prefer) > 0:
prefer += ","
prefer += "include-render=true"
body = LogsQueryBody(
query=query,
timespan=timespan,
workspaces=additional_workspaces,
**kwargs
)
try:
return LogsQueryResult._from_generated(await self._query_op.execute( # pylint: disable=protected-access
workspace_id=workspace_id,
body=body,
prefer=prefer,
**kwargs
))
except HttpResponseError as e:
process_error(e)
@distributed_trace_async
async def query_batch(
self,
queries: Union[Sequence[Dict], Sequence[LogsBatchQuery]],
**kwargs: Any
) -> Sequence[LogsBatchQueryResult]:
"""Execute a list of analytics queries. Each request can be either a LogQueryRequest
object or an equivalent serialized model.
The response is returned in the same order as that of the requests sent.
:param queries: The list of queries that should be processed
:type queries: list[dict] or list[~azure.monitor.query.LogsBatchQuery]
:return: list of LogsBatchQueryResult objects, or the result of cls(response)
:rtype: ~list[~azure.monitor.query.LogsBatchQueryResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
try:
queries = [LogsBatchQuery(**q) for q in queries]
except (KeyError, TypeError):
pass
queries = [q._to_generated() for q in queries] # pylint: disable=protected-access
try:
request_order = [req.id for req in queries]
except AttributeError:
request_order = [req['id'] for req in queries]
batch = BatchRequest(requests=queries)
generated = await self._query_op.batch(batch, **kwargs)
return order_results(
request_order,
[
LogsBatchQueryResult._from_generated(rsp) for rsp in generated.responses # pylint: disable=protected-access
])
async def __aenter__(self) -> "LogsQueryClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *args: "Any") -> None:
await self._client.__aexit__(*args)
async def close(self) -> None:
"""Close the :class:`~azure.monitor.query.aio.LogsQueryClient` session."""
await self._client.__aexit__()
|
import time
import os
import csv
import airsim
from tensorflow.keras.models import load_model as tf_load_model
import numpy as np
MIN_ALTITUDE = 10
MAX_ALTITUDE = 15
NUM_EPISODES = 100
#load pre-trained model
def load_trained_model():
loaded_model = tf_load_model("./models/modelVerticalNN_2.h5")
print("Model restored.")
return loaded_model
def save_on_file(state, action):
try:
dataF = open("data/landingData.csv","a")
spamwriter = csv.writer(dataF, delimiter=',',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
string = [state, action]
spamwriter.writerow(string)
dataF.close()
except IOError:
print("File not found.")
def go_back(client, home):
client.moveToPositionAsync(home.x_val, home.y_val, -1*np.random.randint(MIN_ALTITUDE,MAX_ALTITUDE+1), 5).join()
time.sleep(0.5)
client.moveByVelocityAsync(0, 0, -0.0, 5).join()
def interpret_action(action):
if action == 0:
quad_offset = (0, 0, 1, 0)
elif action == 1:
quad_offset = (0, 0, 0.3, 0)
elif action == 2:
quad_offset = (0, 0, 2, 0)
elif action == 3:
quad_offset = (0, 0, 0, 1)
elif action == 4:
quad_offset = (0, 0, 0.1, 0)
return quad_offset
def get_action_data(curr_state):
if curr_state <= 0.1:
action_index = 3
elif curr_state >= 4:
action_index = 2
elif curr_state >= 1.8:
action_index = 0
elif curr_state < 1.8 and curr_state >= 0.7:
action_index = 1
else:
action_index = 4
return action_index
def get_action_test(curr_state):
actions_index = my_model.predict(np.array([[curr_state]]))[0][0]
print("Actions: ",actions_index)
return round(actions_index)
# Load model if you are testing
my_model = load_trained_model()
# init drone
client = airsim.MultirotorClient()
client.confirmConnection()
client.enableApiControl(True)
client.armDisarm(True)
#taking home position
home = client.getMultirotorState().kinematics_estimated.position
cnt = 1
while cnt <= NUM_EPISODES:
print("### NEW EPISODE: ",cnt)
go_back(client, home)
curr_state = 10.0
while curr_state > 0.1:
curr_state = np.abs(client.getLidarData().pose.position.z_val)
## Test
action_index = get_action_test(curr_state)
#take data
#action_index = get_action_data(curr_state)
next_action = interpret_action(action_index)
new_vel_x = next_action[0]
new_vel_y = next_action[1]
new_vel_z = next_action[2]
trigger = next_action[3]
print(" ====== moving at (" + str(new_vel_x) + " " + str(new_vel_y) + " " + str(new_vel_z) + "), trigger ",trigger)
client.moveByVelocityAsync(new_vel_x, new_vel_y, new_vel_z, 1).join()
# Test -- Not working very well
new_state = np.abs(client.getLidarData().pose.position.z_val)
if new_state <= 0.1:
if trigger:
print("Landed.")
break
elif new_vel_z <= 0.3:
print("Moving near ground.")
else:
print("Collision.")
break
elif new_state > 0.1 and trigger:
print("Error, not landed.")
break
time.sleep(0.01)
#save_on_file(curr_state, action_index)
cnt += 1
|
/*!
* jQuery JavaScript Library v2.0.2 -sizzle,-css,-ajax,-ajax/script,-ajax/jsonp,-ajax/xhr,-effects,-offset,-dimensions
* http://jquery.com/
*
* Includes Sizzle.js
* http://sizzlejs.com/
*
* Copyright 2005, 2013 jQuery Foundation, Inc. and other contributors
* Released under the MIT license
* http://jquery.org/license
*
* Date: 2013-06-03T15:27Z
*/
(function( window, undefined ) {
// Can't do this because several apps including ASP.NET trace
// the stack via arguments.caller.callee and Firefox dies if
// you try to trace through "use strict" call chains. (#13335)
// Support: Firefox 18+
//"use strict";
var
// A central reference to the root jQuery(document)
rootjQuery,
// The deferred used on DOM ready
readyList,
// Support: IE9
// For `typeof xmlNode.method` instead of `xmlNode.method !== undefined`
core_strundefined = typeof undefined,
// Use the correct document accordingly with window argument (sandbox)
location = window.location,
document = window.document,
docElem = document.documentElement,
// Map over jQuery in case of overwrite
_jQuery = window.jQuery,
// Map over the $ in case of overwrite
_$ = window.$,
// [[Class]] -> type pairs
class2type = {},
// List of deleted data cache ids, so we can reuse them
core_deletedIds = [],
core_version = "2.0.2 -sizzle,-css,-ajax,-ajax/script,-ajax/jsonp,-ajax/xhr,-effects,-offset,-dimensions",
// Save a reference to some core methods
core_concat = core_deletedIds.concat,
core_push = core_deletedIds.push,
core_slice = core_deletedIds.slice,
core_indexOf = core_deletedIds.indexOf,
core_toString = class2type.toString,
core_hasOwn = class2type.hasOwnProperty,
core_trim = core_version.trim,
// Define a local copy of jQuery
jQuery = function( selector, context ) {
// The jQuery object is actually just the init constructor 'enhanced'
return new jQuery.fn.init( selector, context, rootjQuery );
},
// Used for matching numbers
core_pnum = /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,
// Used for splitting on whitespace
core_rnotwhite = /\S+/g,
// A simple way to check for HTML strings
// Prioritize #id over <tag> to avoid XSS via location.hash (#9521)
// Strict HTML recognition (#11290: must start with <)
rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,
// Match a standalone tag
rsingleTag = /^<(\w+)\s*\/?>(?:<\/\1>|)$/,
// Matches dashed string for camelizing
rmsPrefix = /^-ms-/,
rdashAlpha = /-([\da-z])/gi,
// Used by jQuery.camelCase as callback to replace()
fcamelCase = function( all, letter ) {
return letter.toUpperCase();
},
// The ready event handler and self cleanup method
completed = function() {
document.removeEventListener( "DOMContentLoaded", completed, false );
window.removeEventListener( "load", completed, false );
jQuery.ready();
};
jQuery.fn = jQuery.prototype = {
// The current version of jQuery being used
jquery: core_version,
constructor: jQuery,
init: function( selector, context, rootjQuery ) {
var match, elem;
// HANDLE: $(""), $(null), $(undefined), $(false)
if ( !selector ) {
return this;
}
// Handle HTML strings
if ( typeof selector === "string" ) {
if ( selector.charAt(0) === "<" && selector.charAt( selector.length - 1 ) === ">" && selector.length >= 3 ) {
// Assume that strings that start and end with <> are HTML and skip the regex check
match = [ null, selector, null ];
} else {
match = rquickExpr.exec( selector );
}
// Match html or make sure no context is specified for #id
if ( match && (match[1] || !context) ) {
// HANDLE: $(html) -> $(array)
if ( match[1] ) {
context = context instanceof jQuery ? context[0] : context;
// scripts is true for back-compat
jQuery.merge( this, jQuery.parseHTML(
match[1],
context && context.nodeType ? context.ownerDocument || context : document,
true
) );
// HANDLE: $(html, props)
if ( rsingleTag.test( match[1] ) && jQuery.isPlainObject( context ) ) {
for ( match in context ) {
// Properties of context are called as methods if possible
if ( jQuery.isFunction( this[ match ] ) ) {
this[ match ]( context[ match ] );
// ...and otherwise set as attributes
} else {
this.attr( match, context[ match ] );
}
}
}
return this;
// HANDLE: $(#id)
} else {
elem = document.getElementById( match[2] );
// Check parentNode to catch when Blackberry 4.6 returns
// nodes that are no longer in the document #6963
if ( elem && elem.parentNode ) {
// Inject the element directly into the jQuery object
this.length = 1;
this[0] = elem;
}
this.context = document;
this.selector = selector;
return this;
}
// HANDLE: $(expr, $(...))
} else if ( !context || context.jquery ) {
return ( context || rootjQuery ).find( selector );
// HANDLE: $(expr, context)
// (which is just equivalent to: $(context).find(expr)
} else {
return this.constructor( context ).find( selector );
}
// HANDLE: $(DOMElement)
} else if ( selector.nodeType ) {
this.context = this[0] = selector;
this.length = 1;
return this;
// HANDLE: $(function)
// Shortcut for document ready
} else if ( jQuery.isFunction( selector ) ) {
return rootjQuery.ready( selector );
}
if ( selector.selector !== undefined ) {
this.selector = selector.selector;
this.context = selector.context;
}
return jQuery.makeArray( selector, this );
},
// Start with an empty selector
selector: "",
// The default length of a jQuery object is 0
length: 0,
toArray: function() {
return core_slice.call( this );
},
// Get the Nth element in the matched element set OR
// Get the whole matched element set as a clean array
get: function( num ) {
return num == null ?
// Return a 'clean' array
this.toArray() :
// Return just the object
( num < 0 ? this[ this.length + num ] : this[ num ] );
},
// Take an array of elements and push it onto the stack
// (returning the new matched element set)
pushStack: function( elems ) {
// Build a new jQuery matched element set
var ret = jQuery.merge( this.constructor(), elems );
// Add the old object onto the stack (as a reference)
ret.prevObject = this;
ret.context = this.context;
// Return the newly-formed element set
return ret;
},
// Execute a callback for every element in the matched set.
// (You can seed the arguments with an array of args, but this is
// only used internally.)
each: function( callback, args ) {
return jQuery.each( this, callback, args );
},
ready: function( fn ) {
// Add the callback
jQuery.ready.promise().done( fn );
return this;
},
slice: function() {
return this.pushStack( core_slice.apply( this, arguments ) );
},
first: function() {
return this.eq( 0 );
},
last: function() {
return this.eq( -1 );
},
eq: function( i ) {
var len = this.length,
j = +i + ( i < 0 ? len : 0 );
return this.pushStack( j >= 0 && j < len ? [ this[j] ] : [] );
},
map: function( callback ) {
return this.pushStack( jQuery.map(this, function( elem, i ) {
return callback.call( elem, i, elem );
}));
},
end: function() {
return this.prevObject || this.constructor(null);
},
// For internal use only.
// Behaves like an Array's method, not like a jQuery method.
push: core_push,
sort: [].sort,
splice: [].splice
};
// Give the init function the jQuery prototype for later instantiation
jQuery.fn.init.prototype = jQuery.fn;
jQuery.extend = jQuery.fn.extend = function() {
var options, name, src, copy, copyIsArray, clone,
target = arguments[0] || {},
i = 1,
length = arguments.length,
deep = false;
// Handle a deep copy situation
if ( typeof target === "boolean" ) {
deep = target;
target = arguments[1] || {};
// skip the boolean and the target
i = 2;
}
// Handle case when target is a string or something (possible in deep copy)
if ( typeof target !== "object" && !jQuery.isFunction(target) ) {
target = {};
}
// extend jQuery itself if only one argument is passed
if ( length === i ) {
target = this;
--i;
}
for ( ; i < length; i++ ) {
// Only deal with non-null/undefined values
if ( (options = arguments[ i ]) != null ) {
// Extend the base object
for ( name in options ) {
src = target[ name ];
copy = options[ name ];
// Prevent never-ending loop
if ( target === copy ) {
continue;
}
// Recurse if we're merging plain objects or arrays
if ( deep && copy && ( jQuery.isPlainObject(copy) || (copyIsArray = jQuery.isArray(copy)) ) ) {
if ( copyIsArray ) {
copyIsArray = false;
clone = src && jQuery.isArray(src) ? src : [];
} else {
clone = src && jQuery.isPlainObject(src) ? src : {};
}
// Never move original objects, clone them
target[ name ] = jQuery.extend( deep, clone, copy );
// Don't bring in undefined values
} else if ( copy !== undefined ) {
target[ name ] = copy;
}
}
}
}
// Return the modified object
return target;
};
jQuery.extend({
// Unique for each copy of jQuery on the page
expando: "jQuery" + ( core_version + Math.random() ).replace( /\D/g, "" ),
noConflict: function( deep ) {
if ( window.$ === jQuery ) {
window.$ = _$;
}
if ( deep && window.jQuery === jQuery ) {
window.jQuery = _jQuery;
}
return jQuery;
},
// Is the DOM ready to be used? Set to true once it occurs.
isReady: false,
// A counter to track how many items to wait for before
// the ready event fires. See #6781
readyWait: 1,
// Hold (or release) the ready event
holdReady: function( hold ) {
if ( hold ) {
jQuery.readyWait++;
} else {
jQuery.ready( true );
}
},
// Handle when the DOM is ready
ready: function( wait ) {
// Abort if there are pending holds or we're already ready
if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) {
return;
}
// Remember that the DOM is ready
jQuery.isReady = true;
// If a normal DOM Ready event fired, decrement, and wait if need be
if ( wait !== true && --jQuery.readyWait > 0 ) {
return;
}
// If there are functions bound, to execute
readyList.resolveWith( document, [ jQuery ] );
// Trigger any bound ready events
if ( jQuery.fn.trigger ) {
jQuery( document ).trigger("ready").off("ready");
}
},
// See test/unit/core.js for details concerning isFunction.
// Since version 1.3, DOM methods and functions like alert
// aren't supported. They return false on IE (#2968).
isFunction: function( obj ) {
return jQuery.type(obj) === "function";
},
isArray: Array.isArray,
isWindow: function( obj ) {
return obj != null && obj === obj.window;
},
isNumeric: function( obj ) {
return !isNaN( parseFloat(obj) ) && isFinite( obj );
},
type: function( obj ) {
if ( obj == null ) {
return String( obj );
}
// Support: Safari <= 5.1 (functionish RegExp)
return typeof obj === "object" || typeof obj === "function" ?
class2type[ core_toString.call(obj) ] || "object" :
typeof obj;
},
isPlainObject: function( obj ) {
// Not plain objects:
// - Any object or value whose internal [[Class]] property is not "[object Object]"
// - DOM nodes
// - window
if ( jQuery.type( obj ) !== "object" || obj.nodeType || jQuery.isWindow( obj ) ) {
return false;
}
// Support: Firefox <20
// The try/catch suppresses exceptions thrown when attempting to access
// the "constructor" property of certain host objects, ie. |window.location|
// https://bugzilla.mozilla.org/show_bug.cgi?id=814622
try {
if ( obj.constructor &&
!core_hasOwn.call( obj.constructor.prototype, "isPrototypeOf" ) ) {
return false;
}
} catch ( e ) {
return false;
}
// If the function hasn't returned already, we're confident that
// |obj| is a plain object, created by {} or constructed with new Object
return true;
},
isEmptyObject: function( obj ) {
var name;
for ( name in obj ) {
return false;
}
return true;
},
error: function( msg ) {
throw new Error( msg );
},
// data: string of html
// context (optional): If specified, the fragment will be created in this context, defaults to document
// keepScripts (optional): If true, will include scripts passed in the html string
parseHTML: function( data, context, keepScripts ) {
if ( !data || typeof data !== "string" ) {
return null;
}
if ( typeof context === "boolean" ) {
keepScripts = context;
context = false;
}
context = context || document;
var parsed = rsingleTag.exec( data ),
scripts = !keepScripts && [];
// Single tag
if ( parsed ) {
return [ context.createElement( parsed[1] ) ];
}
parsed = jQuery.buildFragment( [ data ], context, scripts );
if ( scripts ) {
jQuery( scripts ).remove();
}
return jQuery.merge( [], parsed.childNodes );
},
parseJSON: JSON.parse,
// Cross-browser xml parsing
parseXML: function( data ) {
var xml, tmp;
if ( !data || typeof data !== "string" ) {
return null;
}
// Support: IE9
try {
tmp = new DOMParser();
xml = tmp.parseFromString( data , "text/xml" );
} catch ( e ) {
xml = undefined;
}
if ( !xml || xml.getElementsByTagName( "parsererror" ).length ) {
jQuery.error( "Invalid XML: " + data );
}
return xml;
},
noop: function() {},
// Evaluates a script in a global context
globalEval: function( code ) {
var script,
indirect = eval;
code = jQuery.trim( code );
if ( code ) {
// If the code includes a valid, prologue position
// strict mode pragma, execute code by injecting a
// script tag into the document.
if ( code.indexOf("use strict") === 1 ) {
script = document.createElement("script");
script.text = code;
document.head.appendChild( script ).parentNode.removeChild( script );
} else {
// Otherwise, avoid the DOM node creation, insertion
// and removal by using an indirect global eval
indirect( code );
}
}
},
// Convert dashed to camelCase; used by the css and data modules
// Microsoft forgot to hump their vendor prefix (#9572)
camelCase: function( string ) {
return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase );
},
nodeName: function( elem, name ) {
return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase();
},
// args is for internal usage only
each: function( obj, callback, args ) {
var value,
i = 0,
length = obj.length,
isArray = isArraylike( obj );
if ( args ) {
if ( isArray ) {
for ( ; i < length; i++ ) {
value = callback.apply( obj[ i ], args );
if ( value === false ) {
break;
}
}
} else {
for ( i in obj ) {
value = callback.apply( obj[ i ], args );
if ( value === false ) {
break;
}
}
}
// A special, fast, case for the most common use of each
} else {
if ( isArray ) {
for ( ; i < length; i++ ) {
value = callback.call( obj[ i ], i, obj[ i ] );
if ( value === false ) {
break;
}
}
} else {
for ( i in obj ) {
value = callback.call( obj[ i ], i, obj[ i ] );
if ( value === false ) {
break;
}
}
}
}
return obj;
},
trim: function( text ) {
return text == null ? "" : core_trim.call( text );
},
// results is for internal usage only
makeArray: function( arr, results ) {
var ret = results || [];
if ( arr != null ) {
if ( isArraylike( Object(arr) ) ) {
jQuery.merge( ret,
typeof arr === "string" ?
[ arr ] : arr
);
} else {
core_push.call( ret, arr );
}
}
return ret;
},
inArray: function( elem, arr, i ) {
return arr == null ? -1 : core_indexOf.call( arr, elem, i );
},
merge: function( first, second ) {
var l = second.length,
i = first.length,
j = 0;
if ( typeof l === "number" ) {
for ( ; j < l; j++ ) {
first[ i++ ] = second[ j ];
}
} else {
while ( second[j] !== undefined ) {
first[ i++ ] = second[ j++ ];
}
}
first.length = i;
return first;
},
grep: function( elems, callback, inv ) {
var retVal,
ret = [],
i = 0,
length = elems.length;
inv = !!inv;
// Go through the array, only saving the items
// that pass the validator function
for ( ; i < length; i++ ) {
retVal = !!callback( elems[ i ], i );
if ( inv !== retVal ) {
ret.push( elems[ i ] );
}
}
return ret;
},
// arg is for internal usage only
map: function( elems, callback, arg ) {
var value,
i = 0,
length = elems.length,
isArray = isArraylike( elems ),
ret = [];
// Go through the array, translating each of the items to their
if ( isArray ) {
for ( ; i < length; i++ ) {
value = callback( elems[ i ], i, arg );
if ( value != null ) {
ret[ ret.length ] = value;
}
}
// Go through every key on the object,
} else {
for ( i in elems ) {
value = callback( elems[ i ], i, arg );
if ( value != null ) {
ret[ ret.length ] = value;
}
}
}
// Flatten any nested arrays
return core_concat.apply( [], ret );
},
// A global GUID counter for objects
guid: 1,
// Bind a function to a context, optionally partially applying any
// arguments.
proxy: function( fn, context ) {
var tmp, args, proxy;
if ( typeof context === "string" ) {
tmp = fn[ context ];
context = fn;
fn = tmp;
}
// Quick check to determine if target is callable, in the spec
// this throws a TypeError, but we will just return undefined.
if ( !jQuery.isFunction( fn ) ) {
return undefined;
}
// Simulated bind
args = core_slice.call( arguments, 2 );
proxy = function() {
return fn.apply( context || this, args.concat( core_slice.call( arguments ) ) );
};
// Set the guid of unique handler to the same of original handler, so it can be removed
proxy.guid = fn.guid = fn.guid || jQuery.guid++;
return proxy;
},
// Multifunctional method to get and set values of a collection
// The value/s can optionally be executed if it's a function
access: function( elems, fn, key, value, chainable, emptyGet, raw ) {
var i = 0,
length = elems.length,
bulk = key == null;
// Sets many values
if ( jQuery.type( key ) === "object" ) {
chainable = true;
for ( i in key ) {
jQuery.access( elems, fn, i, key[i], true, emptyGet, raw );
}
// Sets one value
} else if ( value !== undefined ) {
chainable = true;
if ( !jQuery.isFunction( value ) ) {
raw = true;
}
if ( bulk ) {
// Bulk operations run against the entire set
if ( raw ) {
fn.call( elems, value );
fn = null;
// ...except when executing function values
} else {
bulk = fn;
fn = function( elem, key, value ) {
return bulk.call( jQuery( elem ), value );
};
}
}
if ( fn ) {
for ( ; i < length; i++ ) {
fn( elems[i], key, raw ? value : value.call( elems[i], i, fn( elems[i], key ) ) );
}
}
}
return chainable ?
elems :
// Gets
bulk ?
fn.call( elems ) :
length ? fn( elems[0], key ) : emptyGet;
},
now: Date.now,
// A method for quickly swapping in/out CSS properties to get correct calculations.
// Note: this method belongs to the css module but it's needed here for the support module.
// If support gets modularized, this method should be moved back to the css module.
swap: function( elem, options, callback, args ) {
var ret, name,
old = {};
// Remember the old values, and insert the new ones
for ( name in options ) {
old[ name ] = elem.style[ name ];
elem.style[ name ] = options[ name ];
}
ret = callback.apply( elem, args || [] );
// Revert the old values
for ( name in options ) {
elem.style[ name ] = old[ name ];
}
return ret;
}
});
jQuery.ready.promise = function( obj ) {
if ( !readyList ) {
readyList = jQuery.Deferred();
// Catch cases where $(document).ready() is called after the browser event has already occurred.
// we once tried to use readyState "interactive" here, but it caused issues like the one
// discovered by ChrisS here: http://bugs.jquery.com/ticket/12282#comment:15
if ( document.readyState === "complete" ) {
// Handle it asynchronously to allow scripts the opportunity to delay ready
setTimeout( jQuery.ready );
} else {
// Use the handy event callback
document.addEventListener( "DOMContentLoaded", completed, false );
// A fallback to window.onload, that will always work
window.addEventListener( "load", completed, false );
}
}
return readyList.promise( obj );
};
// Populate the class2type map
jQuery.each("Boolean Number String Function Array Date RegExp Object Error".split(" "), function(i, name) {
class2type[ "[object " + name + "]" ] = name.toLowerCase();
});
function isArraylike( obj ) {
var length = obj.length,
type = jQuery.type( obj );
if ( jQuery.isWindow( obj ) ) {
return false;
}
if ( obj.nodeType === 1 && length ) {
return true;
}
return type === "array" || type !== "function" &&
( length === 0 ||
typeof length === "number" && length > 0 && ( length - 1 ) in obj );
}
// All jQuery objects should point back to these
rootjQuery = jQuery(document);
/*
* Optional (non-Sizzle) selector module for custom builds.
*
* Note that this DOES NOT SUPPORT many documented jQuery
* features in exchange for its smaller size:
*
* Attribute not equal selector
* Positional selectors (:first; :eq(n); :odd; etc.)
* Type selectors (:input; :checkbox; :button; etc.)
* State-based selectors (:animated; :visible; :hidden; etc.)
* :has(selector)
* :not(complex selector)
* custom selectors via Sizzle extensions
* Leading combinators (e.g., $collection.find("> *"))
* Reliable functionality on XML fragments
* Requiring all parts of a selector to match elements under context
* (e.g., $div.find("div > *") now matches children of $div)
* Matching against non-elements
* Reliable sorting of disconnected nodes
* querySelectorAll bug fixes (e.g., unreliable :focus on WebKit)
*
* If any of these are unacceptable tradeoffs, either use Sizzle or
* customize this stub for the project's specific needs.
*/
var selector_hasDuplicate,
matches = docElem.webkitMatchesSelector ||
docElem.mozMatchesSelector ||
docElem.oMatchesSelector ||
docElem.msMatchesSelector,
selector_sortOrder = function( a, b ) {
// Flag for duplicate removal
if ( a === b ) {
selector_hasDuplicate = true;
return 0;
}
var compare = b.compareDocumentPosition && a.compareDocumentPosition && a.compareDocumentPosition( b );
if ( compare ) {
// Disconnected nodes
if ( compare & 1 ) {
// Choose the first element that is related to our document
if ( a === document || jQuery.contains(document, a) ) {
return -1;
}
if ( b === document || jQuery.contains(document, b) ) {
return 1;
}
// Maintain original order
return 0;
}
return compare & 4 ? -1 : 1;
}
// Not directly comparable, sort on existence of method
return a.compareDocumentPosition ? -1 : 1;
};
jQuery.extend({
find: function( selector, context, results, seed ) {
var elem, nodeType,
i = 0;
results = results || [];
context = context || document;
// Same basic safeguard as Sizzle
if ( !selector || typeof selector !== "string" ) {
return results;
}
// Early return if context is not an element or document
if ( (nodeType = context.nodeType) !== 1 && nodeType !== 9 ) {
return [];
}
if ( seed ) {
while ( (elem = seed[i++]) ) {
if ( jQuery.find.matchesSelector(elem, selector) ) {
results.push( elem );
}
}
} else {
jQuery.merge( results, context.querySelectorAll(selector) );
}
return results;
},
unique: function( results ) {
var elem,
duplicates = [],
i = 0,
j = 0;
selector_hasDuplicate = false;
results.sort( selector_sortOrder );
if ( selector_hasDuplicate ) {
while ( (elem = results[i++]) ) {
if ( elem === results[ i ] ) {
j = duplicates.push( i );
}
}
while ( j-- ) {
results.splice( duplicates[ j ], 1 );
}
}
return results;
},
text: function( elem ) {
var node,
ret = "",
i = 0,
nodeType = elem.nodeType;
if ( !nodeType ) {
// If no nodeType, this is expected to be an array
while ( (node = elem[i++]) ) {
// Do not traverse comment nodes
ret += jQuery.text( node );
}
} else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) {
// Use textContent for elements
return elem.textContent;
} else if ( nodeType === 3 || nodeType === 4 ) {
return elem.nodeValue;
}
// Do not include comment or processing instruction nodes
return ret;
},
contains: function( a, b ) {
var adown = a.nodeType === 9 ? a.documentElement : a,
bup = b && b.parentNode;
return a === bup || !!( bup && bup.nodeType === 1 && adown.contains(bup) );
},
isXMLDoc: function( elem ) {
return (elem.ownerDocument || elem).documentElement.nodeName !== "HTML";
},
expr: {
attrHandle: {},
match: {
bool: /^(?:checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped)$/i,
needsContext: /^[\x20\t\r\n\f]*[>+~]/
}
}
});
jQuery.extend( jQuery.find, {
matches: function( expr, elements ) {
return jQuery.find( expr, null, null, elements );
},
matchesSelector: function( elem, expr ) {
return matches.call( elem, expr );
},
attr: function( elem, name ) {
return elem.getAttribute( name );
}
});
// String to Object options format cache
var optionsCache = {};
// Convert String-formatted options into Object-formatted ones and store in cache
function createOptions( options ) {
var object = optionsCache[ options ] = {};
jQuery.each( options.match( core_rnotwhite ) || [], function( _, flag ) {
object[ flag ] = true;
});
return object;
}
/*
* Create a callback list using the following parameters:
*
* options: an optional list of space-separated options that will change how
* the callback list behaves or a more traditional option object
*
* By default a callback list will act like an event callback list and can be
* "fired" multiple times.
*
* Possible options:
*
* once: will ensure the callback list can only be fired once (like a Deferred)
*
* memory: will keep track of previous values and will call any callback added
* after the list has been fired right away with the latest "memorized"
* values (like a Deferred)
*
* unique: will ensure a callback can only be added once (no duplicate in the list)
*
* stopOnFalse: interrupt callings when a callback returns false
*
*/
jQuery.Callbacks = function( options ) {
// Convert options from String-formatted to Object-formatted if needed
// (we check in cache first)
options = typeof options === "string" ?
( optionsCache[ options ] || createOptions( options ) ) :
jQuery.extend( {}, options );
var // Last fire value (for non-forgettable lists)
memory,
// Flag to know if list was already fired
fired,
// Flag to know if list is currently firing
firing,
// First callback to fire (used internally by add and fireWith)
firingStart,
// End of the loop when firing
firingLength,
// Index of currently firing callback (modified by remove if needed)
firingIndex,
// Actual callback list
list = [],
// Stack of fire calls for repeatable lists
stack = !options.once && [],
// Fire callbacks
fire = function( data ) {
memory = options.memory && data;
fired = true;
firingIndex = firingStart || 0;
firingStart = 0;
firingLength = list.length;
firing = true;
for ( ; list && firingIndex < firingLength; firingIndex++ ) {
if ( list[ firingIndex ].apply( data[ 0 ], data[ 1 ] ) === false && options.stopOnFalse ) {
memory = false; // To prevent further calls using add
break;
}
}
firing = false;
if ( list ) {
if ( stack ) {
if ( stack.length ) {
fire( stack.shift() );
}
} else if ( memory ) {
list = [];
} else {
self.disable();
}
}
},
// Actual Callbacks object
self = {
// Add a callback or a collection of callbacks to the list
add: function() {
if ( list ) {
// First, we save the current length
var start = list.length;
(function add( args ) {
jQuery.each( args, function( _, arg ) {
var type = jQuery.type( arg );
if ( type === "function" ) {
if ( !options.unique || !self.has( arg ) ) {
list.push( arg );
}
} else if ( arg && arg.length && type !== "string" ) {
// Inspect recursively
add( arg );
}
});
})( arguments );
// Do we need to add the callbacks to the
// current firing batch?
if ( firing ) {
firingLength = list.length;
// With memory, if we're not firing then
// we should call right away
} else if ( memory ) {
firingStart = start;
fire( memory );
}
}
return this;
},
// Remove a callback from the list
remove: function() {
if ( list ) {
jQuery.each( arguments, function( _, arg ) {
var index;
while( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) {
list.splice( index, 1 );
// Handle firing indexes
if ( firing ) {
if ( index <= firingLength ) {
firingLength--;
}
if ( index <= firingIndex ) {
firingIndex--;
}
}
}
});
}
return this;
},
// Check if a given callback is in the list.
// If no argument is given, return whether or not list has callbacks attached.
has: function( fn ) {
return fn ? jQuery.inArray( fn, list ) > -1 : !!( list && list.length );
},
// Remove all callbacks from the list
empty: function() {
list = [];
firingLength = 0;
return this;
},
// Have the list do nothing anymore
disable: function() {
list = stack = memory = undefined;
return this;
},
// Is it disabled?
disabled: function() {
return !list;
},
// Lock the list in its current state
lock: function() {
stack = undefined;
if ( !memory ) {
self.disable();
}
return this;
},
// Is it locked?
locked: function() {
return !stack;
},
// Call all callbacks with the given context and arguments
fireWith: function( context, args ) {
args = args || [];
args = [ context, args.slice ? args.slice() : args ];
if ( list && ( !fired || stack ) ) {
if ( firing ) {
stack.push( args );
} else {
fire( args );
}
}
return this;
},
// Call all the callbacks with the given arguments
fire: function() {
self.fireWith( this, arguments );
return this;
},
// To know if the callbacks have already been called at least once
fired: function() {
return !!fired;
}
};
return self;
};
jQuery.extend({
Deferred: function( func ) {
var tuples = [
// action, add listener, listener list, final state
[ "resolve", "done", jQuery.Callbacks("once memory"), "resolved" ],
[ "reject", "fail", jQuery.Callbacks("once memory"), "rejected" ],
[ "notify", "progress", jQuery.Callbacks("memory") ]
],
state = "pending",
promise = {
state: function() {
return state;
},
always: function() {
deferred.done( arguments ).fail( arguments );
return this;
},
then: function( /* fnDone, fnFail, fnProgress */ ) {
var fns = arguments;
return jQuery.Deferred(function( newDefer ) {
jQuery.each( tuples, function( i, tuple ) {
var action = tuple[ 0 ],
fn = jQuery.isFunction( fns[ i ] ) && fns[ i ];
// deferred[ done | fail | progress ] for forwarding actions to newDefer
deferred[ tuple[1] ](function() {
var returned = fn && fn.apply( this, arguments );
if ( returned && jQuery.isFunction( returned.promise ) ) {
returned.promise()
.done( newDefer.resolve )
.fail( newDefer.reject )
.progress( newDefer.notify );
} else {
newDefer[ action + "With" ]( this === promise ? newDefer.promise() : this, fn ? [ returned ] : arguments );
}
});
});
fns = null;
}).promise();
},
// Get a promise for this deferred
// If obj is provided, the promise aspect is added to the object
promise: function( obj ) {
return obj != null ? jQuery.extend( obj, promise ) : promise;
}
},
deferred = {};
// Keep pipe for back-compat
promise.pipe = promise.then;
// Add list-specific methods
jQuery.each( tuples, function( i, tuple ) {
var list = tuple[ 2 ],
stateString = tuple[ 3 ];
// promise[ done | fail | progress ] = list.add
promise[ tuple[1] ] = list.add;
// Handle state
if ( stateString ) {
list.add(function() {
// state = [ resolved | rejected ]
state = stateString;
// [ reject_list | resolve_list ].disable; progress_list.lock
}, tuples[ i ^ 1 ][ 2 ].disable, tuples[ 2 ][ 2 ].lock );
}
// deferred[ resolve | reject | notify ]
deferred[ tuple[0] ] = function() {
deferred[ tuple[0] + "With" ]( this === deferred ? promise : this, arguments );
return this;
};
deferred[ tuple[0] + "With" ] = list.fireWith;
});
// Make the deferred a promise
promise.promise( deferred );
// Call given func if any
if ( func ) {
func.call( deferred, deferred );
}
// All done!
return deferred;
},
// Deferred helper
when: function( subordinate /* , ..., subordinateN */ ) {
var i = 0,
resolveValues = core_slice.call( arguments ),
length = resolveValues.length,
// the count of uncompleted subordinates
remaining = length !== 1 || ( subordinate && jQuery.isFunction( subordinate.promise ) ) ? length : 0,
// the master Deferred. If resolveValues consist of only a single Deferred, just use that.
deferred = remaining === 1 ? subordinate : jQuery.Deferred(),
// Update function for both resolve and progress values
updateFunc = function( i, contexts, values ) {
return function( value ) {
contexts[ i ] = this;
values[ i ] = arguments.length > 1 ? core_slice.call( arguments ) : value;
if( values === progressValues ) {
deferred.notifyWith( contexts, values );
} else if ( !( --remaining ) ) {
deferred.resolveWith( contexts, values );
}
};
},
progressValues, progressContexts, resolveContexts;
// add listeners to Deferred subordinates; treat others as resolved
if ( length > 1 ) {
progressValues = new Array( length );
progressContexts = new Array( length );
resolveContexts = new Array( length );
for ( ; i < length; i++ ) {
if ( resolveValues[ i ] && jQuery.isFunction( resolveValues[ i ].promise ) ) {
resolveValues[ i ].promise()
.done( updateFunc( i, resolveContexts, resolveValues ) )
.fail( deferred.reject )
.progress( updateFunc( i, progressContexts, progressValues ) );
} else {
--remaining;
}
}
}
// if we're not waiting on anything, resolve the master
if ( !remaining ) {
deferred.resolveWith( resolveContexts, resolveValues );
}
return deferred.promise();
}
});
jQuery.support = (function( support ) {
var input = document.createElement("input"),
fragment = document.createDocumentFragment(),
div = document.createElement("div"),
select = document.createElement("select"),
opt = select.appendChild( document.createElement("option") );
// Finish early in limited environments
if ( !input.type ) {
return support;
}
input.type = "checkbox";
// Support: Safari 5.1, iOS 5.1, Android 4.x, Android 2.3
// Check the default checkbox/radio value ("" on old WebKit; "on" elsewhere)
support.checkOn = input.value !== "";
// Must access the parent to make an option select properly
// Support: IE9, IE10
support.optSelected = opt.selected;
// Will be defined later
support.reliableMarginRight = true;
support.boxSizingReliable = true;
support.pixelPosition = false;
// Make sure checked status is properly cloned
// Support: IE9, IE10
input.checked = true;
support.noCloneChecked = input.cloneNode( true ).checked;
// Make sure that the options inside disabled selects aren't marked as disabled
// (WebKit marks them as disabled)
select.disabled = true;
support.optDisabled = !opt.disabled;
// Check if an input maintains its value after becoming a radio
// Support: IE9, IE10
input = document.createElement("input");
input.value = "t";
input.type = "radio";
support.radioValue = input.value === "t";
// #11217 - WebKit loses check when the name is after the checked attribute
input.setAttribute( "checked", "t" );
input.setAttribute( "name", "t" );
fragment.appendChild( input );
// Support: Safari 5.1, Android 4.x, Android 2.3
// old WebKit doesn't clone checked state correctly in fragments
support.checkClone = fragment.cloneNode( true ).cloneNode( true ).lastChild.checked;
// Support: Firefox, Chrome, Safari
// Beware of CSP restrictions (https://developer.mozilla.org/en/Security/CSP)
support.focusinBubbles = "onfocusin" in window;
div.style.backgroundClip = "content-box";
div.cloneNode( true ).style.backgroundClip = "";
support.clearCloneStyle = div.style.backgroundClip === "content-box";
// Run tests that need a body at doc ready
jQuery(function() {
var container, marginDiv,
// Support: Firefox, Android 2.3 (Prefixed box-sizing versions).
divReset = "padding:0;margin:0;border:0;display:block;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box",
body = document.getElementsByTagName("body")[ 0 ];
if ( !body ) {
// Return for frameset docs that don't have a body
return;
}
container = document.createElement("div");
container.style.cssText = "border:0;width:0;height:0;position:absolute;top:0;left:-9999px;margin-top:1px";
// Check box-sizing and margin behavior.
body.appendChild( container ).appendChild( div );
div.innerHTML = "";
// Support: Firefox, Android 2.3 (Prefixed box-sizing versions).
div.style.cssText = "-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%";
// Workaround failing boxSizing test due to offsetWidth returning wrong value
// with some non-1 values of body zoom, ticket #13543
jQuery.swap( body, body.style.zoom != null ? { zoom: 1 } : {}, function() {
support.boxSizing = div.offsetWidth === 4;
});
// Use window.getComputedStyle because jsdom on node.js will break without it.
if ( window.getComputedStyle ) {
support.pixelPosition = ( window.getComputedStyle( div, null ) || {} ).top !== "1%";
support.boxSizingReliable = ( window.getComputedStyle( div, null ) || { width: "4px" } ).width === "4px";
// Support: Android 2.3
// Check if div with explicit width and no margin-right incorrectly
// gets computed margin-right based on width of container. (#3333)
// WebKit Bug 13343 - getComputedStyle returns wrong value for margin-right
marginDiv = div.appendChild( document.createElement("div") );
marginDiv.style.cssText = div.style.cssText = divReset;
marginDiv.style.marginRight = marginDiv.style.width = "0";
div.style.width = "1px";
support.reliableMarginRight =
!parseFloat( ( window.getComputedStyle( marginDiv, null ) || {} ).marginRight );
}
body.removeChild( container );
});
return support;
})( {} );
/*
Implementation Summary
1. Enforce API surface and semantic compatibility with 1.9.x branch
2. Improve the module's maintainability by reducing the storage
paths to a single mechanism.
3. Use the same single mechanism to support "private" and "user" data.
4. _Never_ expose "private" data to user code (TODO: Drop _data, _removeData)
5. Avoid exposing implementation details on user objects (eg. expando properties)
6. Provide a clear path for implementation upgrade to WeakMap in 2014
*/
var data_user, data_priv,
rbrace = /(?:\{[\s\S]*\}|\[[\s\S]*\])$/,
rmultiDash = /([A-Z])/g;
function Data() {
// Support: Android < 4,
// Old WebKit does not have Object.preventExtensions/freeze method,
// return new empty object instead with no [[set]] accessor
Object.defineProperty( this.cache = {}, 0, {
get: function() {
return {};
}
});
this.expando = jQuery.expando + Math.random();
}
Data.uid = 1;
Data.accepts = function( owner ) {
// Accepts only:
// - Node
// - Node.ELEMENT_NODE
// - Node.DOCUMENT_NODE
// - Object
// - Any
return owner.nodeType ?
owner.nodeType === 1 || owner.nodeType === 9 : true;
};
Data.prototype = {
key: function( owner ) {
// We can accept data for non-element nodes in modern browsers,
// but we should not, see #8335.
// Always return the key for a frozen object.
if ( !Data.accepts( owner ) ) {
return 0;
}
var descriptor = {},
// Check if the owner object already has a cache key
unlock = owner[ this.expando ];
// If not, create one
if ( !unlock ) {
unlock = Data.uid++;
// Secure it in a non-enumerable, non-writable property
try {
descriptor[ this.expando ] = { value: unlock };
Object.defineProperties( owner, descriptor );
// Support: Android < 4
// Fallback to a less secure definition
} catch ( e ) {
descriptor[ this.expando ] = unlock;
jQuery.extend( owner, descriptor );
}
}
// Ensure the cache object
if ( !this.cache[ unlock ] ) {
this.cache[ unlock ] = {};
}
return unlock;
},
set: function( owner, data, value ) {
var prop,
// There may be an unlock assigned to this node,
// if there is no entry for this "owner", create one inline
// and set the unlock as though an owner entry had always existed
unlock = this.key( owner ),
cache = this.cache[ unlock ];
// Handle: [ owner, key, value ] args
if ( typeof data === "string" ) {
cache[ data ] = value;
// Handle: [ owner, { properties } ] args
} else {
// Fresh assignments by object are shallow copied
if ( jQuery.isEmptyObject( cache ) ) {
jQuery.extend( this.cache[ unlock ], data );
// Otherwise, copy the properties one-by-one to the cache object
} else {
for ( prop in data ) {
cache[ prop ] = data[ prop ];
}
}
}
return cache;
},
get: function( owner, key ) {
// Either a valid cache is found, or will be created.
// New caches will be created and the unlock returned,
// allowing direct access to the newly created
// empty data object. A valid owner object must be provided.
var cache = this.cache[ this.key( owner ) ];
return key === undefined ?
cache : cache[ key ];
},
access: function( owner, key, value ) {
// In cases where either:
//
// 1. No key was specified
// 2. A string key was specified, but no value provided
//
// Take the "read" path and allow the get method to determine
// which value to return, respectively either:
//
// 1. The entire cache object
// 2. The data stored at the key
//
if ( key === undefined ||
((key && typeof key === "string") && value === undefined) ) {
return this.get( owner, key );
}
// [*]When the key is not a string, or both a key and value
// are specified, set or extend (existing objects) with either:
//
// 1. An object of properties
// 2. A key and value
//
this.set( owner, key, value );
// Since the "set" path can have two possible entry points
// return the expected data based on which path was taken[*]
return value !== undefined ? value : key;
},
remove: function( owner, key ) {
var i, name, camel,
unlock = this.key( owner ),
cache = this.cache[ unlock ];
if ( key === undefined ) {
this.cache[ unlock ] = {};
} else {
// Support array or space separated string of keys
if ( jQuery.isArray( key ) ) {
// If "name" is an array of keys...
// When data is initially created, via ("key", "val") signature,
// keys will be converted to camelCase.
// Since there is no way to tell _how_ a key was added, remove
// both plain key and camelCase key. #12786
// This will only penalize the array argument path.
name = key.concat( key.map( jQuery.camelCase ) );
} else {
camel = jQuery.camelCase( key );
// Try the string as a key before any manipulation
if ( key in cache ) {
name = [ key, camel ];
} else {
// If a key with the spaces exists, use it.
// Otherwise, create an array by matching non-whitespace
name = camel;
name = name in cache ?
[ name ] : ( name.match( core_rnotwhite ) || [] );
}
}
i = name.length;
while ( i-- ) {
delete cache[ name[ i ] ];
}
}
},
hasData: function( owner ) {
return !jQuery.isEmptyObject(
this.cache[ owner[ this.expando ] ] || {}
);
},
discard: function( owner ) {
if ( owner[ this.expando ] ) {
delete this.cache[ owner[ this.expando ] ];
}
}
};
// These may be used throughout the jQuery core codebase
data_user = new Data();
data_priv = new Data();
jQuery.extend({
acceptData: Data.accepts,
hasData: function( elem ) {
return data_user.hasData( elem ) || data_priv.hasData( elem );
},
data: function( elem, name, data ) {
return data_user.access( elem, name, data );
},
removeData: function( elem, name ) {
data_user.remove( elem, name );
},
// TODO: Now that all calls to _data and _removeData have been replaced
// with direct calls to data_priv methods, these can be deprecated.
_data: function( elem, name, data ) {
return data_priv.access( elem, name, data );
},
_removeData: function( elem, name ) {
data_priv.remove( elem, name );
}
});
jQuery.fn.extend({
data: function( key, value ) {
var attrs, name,
elem = this[ 0 ],
i = 0,
data = null;
// Gets all values
if ( key === undefined ) {
if ( this.length ) {
data = data_user.get( elem );
if ( elem.nodeType === 1 && !data_priv.get( elem, "hasDataAttrs" ) ) {
attrs = elem.attributes;
for ( ; i < attrs.length; i++ ) {
name = attrs[ i ].name;
if ( name.indexOf( "data-" ) === 0 ) {
name = jQuery.camelCase( name.slice(5) );
dataAttr( elem, name, data[ name ] );
}
}
data_priv.set( elem, "hasDataAttrs", true );
}
}
return data;
}
// Sets multiple values
if ( typeof key === "object" ) {
return this.each(function() {
data_user.set( this, key );
});
}
return jQuery.access( this, function( value ) {
var data,
camelKey = jQuery.camelCase( key );
// The calling jQuery object (element matches) is not empty
// (and therefore has an element appears at this[ 0 ]) and the
// `value` parameter was not undefined. An empty jQuery object
// will result in `undefined` for elem = this[ 0 ] which will
// throw an exception if an attempt to read a data cache is made.
if ( elem && value === undefined ) {
// Attempt to get data from the cache
// with the key as-is
data = data_user.get( elem, key );
if ( data !== undefined ) {
return data;
}
// Attempt to get data from the cache
// with the key camelized
data = data_user.get( elem, camelKey );
if ( data !== undefined ) {
return data;
}
// Attempt to "discover" the data in
// HTML5 custom data-* attrs
data = dataAttr( elem, camelKey, undefined );
if ( data !== undefined ) {
return data;
}
// We tried really hard, but the data doesn't exist.
return;
}
// Set the data...
this.each(function() {
// First, attempt to store a copy or reference of any
// data that might've been store with a camelCased key.
var data = data_user.get( this, camelKey );
// For HTML5 data-* attribute interop, we have to
// store property names with dashes in a camelCase form.
// This might not apply to all properties...*
data_user.set( this, camelKey, value );
// *... In the case of properties that might _actually_
// have dashes, we need to also store a copy of that
// unchanged property.
if ( key.indexOf("-") !== -1 && data !== undefined ) {
data_user.set( this, key, value );
}
});
}, null, value, arguments.length > 1, null, true );
},
removeData: function( key ) {
return this.each(function() {
data_user.remove( this, key );
});
}
});
function dataAttr( elem, key, data ) {
var name;
// If nothing was found internally, try to fetch any
// data from the HTML5 data-* attribute
if ( data === undefined && elem.nodeType === 1 ) {
name = "data-" + key.replace( rmultiDash, "-$1" ).toLowerCase();
data = elem.getAttribute( name );
if ( typeof data === "string" ) {
try {
data = data === "true" ? true :
data === "false" ? false :
data === "null" ? null :
// Only convert to a number if it doesn't change the string
+data + "" === data ? +data :
rbrace.test( data ) ? JSON.parse( data ) :
data;
} catch( e ) {}
// Make sure we set the data so it isn't changed later
data_user.set( elem, key, data );
} else {
data = undefined;
}
}
return data;
}
jQuery.extend({
queue: function( elem, type, data ) {
var queue;
if ( elem ) {
type = ( type || "fx" ) + "queue";
queue = data_priv.get( elem, type );
// Speed up dequeue by getting out quickly if this is just a lookup
if ( data ) {
if ( !queue || jQuery.isArray( data ) ) {
queue = data_priv.access( elem, type, jQuery.makeArray(data) );
} else {
queue.push( data );
}
}
return queue || [];
}
},
dequeue: function( elem, type ) {
type = type || "fx";
var queue = jQuery.queue( elem, type ),
startLength = queue.length,
fn = queue.shift(),
hooks = jQuery._queueHooks( elem, type ),
next = function() {
jQuery.dequeue( elem, type );
};
// If the fx queue is dequeued, always remove the progress sentinel
if ( fn === "inprogress" ) {
fn = queue.shift();
startLength--;
}
if ( fn ) {
// Add a progress sentinel to prevent the fx queue from being
// automatically dequeued
if ( type === "fx" ) {
queue.unshift( "inprogress" );
}
// clear up the last queue stop function
delete hooks.stop;
fn.call( elem, next, hooks );
}
if ( !startLength && hooks ) {
hooks.empty.fire();
}
},
// not intended for public consumption - generates a queueHooks object, or returns the current one
_queueHooks: function( elem, type ) {
var key = type + "queueHooks";
return data_priv.get( elem, key ) || data_priv.access( elem, key, {
empty: jQuery.Callbacks("once memory").add(function() {
data_priv.remove( elem, [ type + "queue", key ] );
})
});
}
});
jQuery.fn.extend({
queue: function( type, data ) {
var setter = 2;
if ( typeof type !== "string" ) {
data = type;
type = "fx";
setter--;
}
if ( arguments.length < setter ) {
return jQuery.queue( this[0], type );
}
return data === undefined ?
this :
this.each(function() {
var queue = jQuery.queue( this, type, data );
// ensure a hooks for this queue
jQuery._queueHooks( this, type );
if ( type === "fx" && queue[0] !== "inprogress" ) {
jQuery.dequeue( this, type );
}
});
},
dequeue: function( type ) {
return this.each(function() {
jQuery.dequeue( this, type );
});
},
// Based off of the plugin by Clint Helfers, with permission.
// http://blindsignals.com/index.php/2009/07/jquery-delay/
delay: function( time, type ) {
time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time;
type = type || "fx";
return this.queue( type, function( next, hooks ) {
var timeout = setTimeout( next, time );
hooks.stop = function() {
clearTimeout( timeout );
};
});
},
clearQueue: function( type ) {
return this.queue( type || "fx", [] );
},
// Get a promise resolved when queues of a certain type
// are emptied (fx is the type by default)
promise: function( type, obj ) {
var tmp,
count = 1,
defer = jQuery.Deferred(),
elements = this,
i = this.length,
resolve = function() {
if ( !( --count ) ) {
defer.resolveWith( elements, [ elements ] );
}
};
if ( typeof type !== "string" ) {
obj = type;
type = undefined;
}
type = type || "fx";
while( i-- ) {
tmp = data_priv.get( elements[ i ], type + "queueHooks" );
if ( tmp && tmp.empty ) {
count++;
tmp.empty.add( resolve );
}
}
resolve();
return defer.promise( obj );
}
});
var nodeHook, boolHook,
rclass = /[\t\r\n\f]/g,
rreturn = /\r/g,
rfocusable = /^(?:input|select|textarea|button)$/i;
jQuery.fn.extend({
attr: function( name, value ) {
return jQuery.access( this, jQuery.attr, name, value, arguments.length > 1 );
},
removeAttr: function( name ) {
return this.each(function() {
jQuery.removeAttr( this, name );
});
},
prop: function( name, value ) {
return jQuery.access( this, jQuery.prop, name, value, arguments.length > 1 );
},
removeProp: function( name ) {
return this.each(function() {
delete this[ jQuery.propFix[ name ] || name ];
});
},
addClass: function( value ) {
var classes, elem, cur, clazz, j,
i = 0,
len = this.length,
proceed = typeof value === "string" && value;
if ( jQuery.isFunction( value ) ) {
return this.each(function( j ) {
jQuery( this ).addClass( value.call( this, j, this.className ) );
});
}
if ( proceed ) {
// The disjunction here is for better compressibility (see removeClass)
classes = ( value || "" ).match( core_rnotwhite ) || [];
for ( ; i < len; i++ ) {
elem = this[ i ];
cur = elem.nodeType === 1 && ( elem.className ?
( " " + elem.className + " " ).replace( rclass, " " ) :
" "
);
if ( cur ) {
j = 0;
while ( (clazz = classes[j++]) ) {
if ( cur.indexOf( " " + clazz + " " ) < 0 ) {
cur += clazz + " ";
}
}
elem.className = jQuery.trim( cur );
}
}
}
return this;
},
removeClass: function( value ) {
var classes, elem, cur, clazz, j,
i = 0,
len = this.length,
proceed = arguments.length === 0 || typeof value === "string" && value;
if ( jQuery.isFunction( value ) ) {
return this.each(function( j ) {
jQuery( this ).removeClass( value.call( this, j, this.className ) );
});
}
if ( proceed ) {
classes = ( value || "" ).match( core_rnotwhite ) || [];
for ( ; i < len; i++ ) {
elem = this[ i ];
// This expression is here for better compressibility (see addClass)
cur = elem.nodeType === 1 && ( elem.className ?
( " " + elem.className + " " ).replace( rclass, " " ) :
""
);
if ( cur ) {
j = 0;
while ( (clazz = classes[j++]) ) {
// Remove *all* instances
while ( cur.indexOf( " " + clazz + " " ) >= 0 ) {
cur = cur.replace( " " + clazz + " ", " " );
}
}
elem.className = value ? jQuery.trim( cur ) : "";
}
}
}
return this;
},
toggleClass: function( value, stateVal ) {
var type = typeof value,
isBool = typeof stateVal === "boolean";
if ( jQuery.isFunction( value ) ) {
return this.each(function( i ) {
jQuery( this ).toggleClass( value.call(this, i, this.className, stateVal), stateVal );
});
}
return this.each(function() {
if ( type === "string" ) {
// toggle individual class names
var className,
i = 0,
self = jQuery( this ),
state = stateVal,
classNames = value.match( core_rnotwhite ) || [];
while ( (className = classNames[ i++ ]) ) {
// check each className given, space separated list
state = isBool ? state : !self.hasClass( className );
self[ state ? "addClass" : "removeClass" ]( className );
}
// Toggle whole class name
} else if ( type === core_strundefined || type === "boolean" ) {
if ( this.className ) {
// store className if set
data_priv.set( this, "__className__", this.className );
}
// If the element has a class name or if we're passed "false",
// then remove the whole classname (if there was one, the above saved it).
// Otherwise bring back whatever was previously saved (if anything),
// falling back to the empty string if nothing was stored.
this.className = this.className || value === false ? "" : data_priv.get( this, "__className__" ) || "";
}
});
},
hasClass: function( selector ) {
var className = " " + selector + " ",
i = 0,
l = this.length;
for ( ; i < l; i++ ) {
if ( this[i].nodeType === 1 && (" " + this[i].className + " ").replace(rclass, " ").indexOf( className ) >= 0 ) {
return true;
}
}
return false;
},
val: function( value ) {
var hooks, ret, isFunction,
elem = this[0];
if ( !arguments.length ) {
if ( elem ) {
hooks = jQuery.valHooks[ elem.type ] || jQuery.valHooks[ elem.nodeName.toLowerCase() ];
if ( hooks && "get" in hooks && (ret = hooks.get( elem, "value" )) !== undefined ) {
return ret;
}
ret = elem.value;
return typeof ret === "string" ?
// handle most common string cases
ret.replace(rreturn, "") :
// handle cases where value is null/undef or number
ret == null ? "" : ret;
}
return;
}
isFunction = jQuery.isFunction( value );
return this.each(function( i ) {
var val;
if ( this.nodeType !== 1 ) {
return;
}
if ( isFunction ) {
val = value.call( this, i, jQuery( this ).val() );
} else {
val = value;
}
// Treat null/undefined as ""; convert numbers to string
if ( val == null ) {
val = "";
} else if ( typeof val === "number" ) {
val += "";
} else if ( jQuery.isArray( val ) ) {
val = jQuery.map(val, function ( value ) {
return value == null ? "" : value + "";
});
}
hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ];
// If set returns undefined, fall back to normal setting
if ( !hooks || !("set" in hooks) || hooks.set( this, val, "value" ) === undefined ) {
this.value = val;
}
});
}
});
jQuery.extend({
valHooks: {
option: {
get: function( elem ) {
// attributes.value is undefined in Blackberry 4.7 but
// uses .value. See #6932
var val = elem.attributes.value;
return !val || val.specified ? elem.value : elem.text;
}
},
select: {
get: function( elem ) {
var value, option,
options = elem.options,
index = elem.selectedIndex,
one = elem.type === "select-one" || index < 0,
values = one ? null : [],
max = one ? index + 1 : options.length,
i = index < 0 ?
max :
one ? index : 0;
// Loop through all the selected options
for ( ; i < max; i++ ) {
option = options[ i ];
// IE6-9 doesn't update selected after form reset (#2551)
if ( ( option.selected || i === index ) &&
// Don't return options that are disabled or in a disabled optgroup
( jQuery.support.optDisabled ? !option.disabled : option.getAttribute("disabled") === null ) &&
( !option.parentNode.disabled || !jQuery.nodeName( option.parentNode, "optgroup" ) ) ) {
// Get the specific value for the option
value = jQuery( option ).val();
// We don't need an array for one selects
if ( one ) {
return value;
}
// Multi-Selects return an array
values.push( value );
}
}
return values;
},
set: function( elem, value ) {
var optionSet, option,
options = elem.options,
values = jQuery.makeArray( value ),
i = options.length;
while ( i-- ) {
option = options[ i ];
if ( (option.selected = jQuery.inArray( jQuery(option).val(), values ) >= 0) ) {
optionSet = true;
}
}
// force browsers to behave consistently when non-matching value is set
if ( !optionSet ) {
elem.selectedIndex = -1;
}
return values;
}
}
},
attr: function( elem, name, value ) {
var hooks, ret,
nType = elem.nodeType;
// don't get/set attributes on text, comment and attribute nodes
if ( !elem || nType === 3 || nType === 8 || nType === 2 ) {
return;
}
// Fallback to prop when attributes are not supported
if ( typeof elem.getAttribute === core_strundefined ) {
return jQuery.prop( elem, name, value );
}
// All attributes are lowercase
// Grab necessary hook if one is defined
if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) {
name = name.toLowerCase();
hooks = jQuery.attrHooks[ name ] ||
( jQuery.expr.match.bool.test( name ) ? boolHook : nodeHook );
}
if ( value !== undefined ) {
if ( value === null ) {
jQuery.removeAttr( elem, name );
} else if ( hooks && "set" in hooks && (ret = hooks.set( elem, value, name )) !== undefined ) {
return ret;
} else {
elem.setAttribute( name, value + "" );
return value;
}
} else if ( hooks && "get" in hooks && (ret = hooks.get( elem, name )) !== null ) {
return ret;
} else {
ret = jQuery.find.attr( elem, name );
// Non-existent attributes return null, we normalize to undefined
return ret == null ?
undefined :
ret;
}
},
removeAttr: function( elem, value ) {
var name, propName,
i = 0,
attrNames = value && value.match( core_rnotwhite );
if ( attrNames && elem.nodeType === 1 ) {
while ( (name = attrNames[i++]) ) {
propName = jQuery.propFix[ name ] || name;
// Boolean attributes get special treatment (#10870)
if ( jQuery.expr.match.bool.test( name ) ) {
// Set corresponding property to false
elem[ propName ] = false;
}
elem.removeAttribute( name );
}
}
},
attrHooks: {
type: {
set: function( elem, value ) {
if ( !jQuery.support.radioValue && value === "radio" && jQuery.nodeName(elem, "input") ) {
// Setting the type on a radio button after the value resets the value in IE6-9
// Reset value to default in case type is set after value during creation
var val = elem.value;
elem.setAttribute( "type", value );
if ( val ) {
elem.value = val;
}
return value;
}
}
}
},
propFix: {
"for": "htmlFor",
"class": "className"
},
prop: function( elem, name, value ) {
var ret, hooks, notxml,
nType = elem.nodeType;
// don't get/set properties on text, comment and attribute nodes
if ( !elem || nType === 3 || nType === 8 || nType === 2 ) {
return;
}
notxml = nType !== 1 || !jQuery.isXMLDoc( elem );
if ( notxml ) {
// Fix name and attach hooks
name = jQuery.propFix[ name ] || name;
hooks = jQuery.propHooks[ name ];
}
if ( value !== undefined ) {
return hooks && "set" in hooks && (ret = hooks.set( elem, value, name )) !== undefined ?
ret :
( elem[ name ] = value );
} else {
return hooks && "get" in hooks && (ret = hooks.get( elem, name )) !== null ?
ret :
elem[ name ];
}
},
propHooks: {
tabIndex: {
get: function( elem ) {
return elem.hasAttribute( "tabindex" ) || rfocusable.test( elem.nodeName ) || elem.href ?
elem.tabIndex :
-1;
}
}
}
});
// Hooks for boolean attributes
boolHook = {
set: function( elem, value, name ) {
if ( value === false ) {
// Remove boolean attributes when set to false
jQuery.removeAttr( elem, name );
} else {
elem.setAttribute( name, name );
}
return name;
}
};
jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( i, name ) {
var getter = jQuery.expr.attrHandle[ name ] || jQuery.find.attr;
jQuery.expr.attrHandle[ name ] = function( elem, name, isXML ) {
var fn = jQuery.expr.attrHandle[ name ],
ret = isXML ?
undefined :
/* jshint eqeqeq: false */
// Temporarily disable this handler to check existence
(jQuery.expr.attrHandle[ name ] = undefined) !=
getter( elem, name, isXML ) ?
name.toLowerCase() :
null;
// Restore handler
jQuery.expr.attrHandle[ name ] = fn;
return ret;
};
});
// Support: IE9+
// Selectedness for an option in an optgroup can be inaccurate
if ( !jQuery.support.optSelected ) {
jQuery.propHooks.selected = {
get: function( elem ) {
var parent = elem.parentNode;
if ( parent && parent.parentNode ) {
parent.parentNode.selectedIndex;
}
return null;
}
};
}
jQuery.each([
"tabIndex",
"readOnly",
"maxLength",
"cellSpacing",
"cellPadding",
"rowSpan",
"colSpan",
"useMap",
"frameBorder",
"contentEditable"
], function() {
jQuery.propFix[ this.toLowerCase() ] = this;
});
// Radios and checkboxes getter/setter
jQuery.each([ "radio", "checkbox" ], function() {
jQuery.valHooks[ this ] = {
set: function( elem, value ) {
if ( jQuery.isArray( value ) ) {
return ( elem.checked = jQuery.inArray( jQuery(elem).val(), value ) >= 0 );
}
}
};
if ( !jQuery.support.checkOn ) {
jQuery.valHooks[ this ].get = function( elem ) {
// Support: Webkit
// "" is returned instead of "on" if a value isn't specified
return elem.getAttribute("value") === null ? "on" : elem.value;
};
}
});
var rkeyEvent = /^key/,
rmouseEvent = /^(?:mouse|contextmenu)|click/,
rfocusMorph = /^(?:focusinfocus|focusoutblur)$/,
rtypenamespace = /^([^.]*)(?:\.(.+)|)$/;
function returnTrue() {
return true;
}
function returnFalse() {
return false;
}
function safeActiveElement() {
try {
return document.activeElement;
} catch ( err ) { }
}
/*
* Helper functions for managing events -- not part of the public interface.
* Props to Dean Edwards' addEvent library for many of the ideas.
*/
jQuery.event = {
global: {},
add: function( elem, types, handler, data, selector ) {
var handleObjIn, eventHandle, tmp,
events, t, handleObj,
special, handlers, type, namespaces, origType,
elemData = data_priv.get( elem );
// Don't attach events to noData or text/comment nodes (but allow plain objects)
if ( !elemData ) {
return;
}
// Caller can pass in an object of custom data in lieu of the handler
if ( handler.handler ) {
handleObjIn = handler;
handler = handleObjIn.handler;
selector = handleObjIn.selector;
}
// Make sure that the handler has a unique ID, used to find/remove it later
if ( !handler.guid ) {
handler.guid = jQuery.guid++;
}
// Init the element's event structure and main handler, if this is the first
if ( !(events = elemData.events) ) {
events = elemData.events = {};
}
if ( !(eventHandle = elemData.handle) ) {
eventHandle = elemData.handle = function( e ) {
// Discard the second event of a jQuery.event.trigger() and
// when an event is called after a page has unloaded
return typeof jQuery !== core_strundefined && (!e || jQuery.event.triggered !== e.type) ?
jQuery.event.dispatch.apply( eventHandle.elem, arguments ) :
undefined;
};
// Add elem as a property of the handle fn to prevent a memory leak with IE non-native events
eventHandle.elem = elem;
}
// Handle multiple events separated by a space
types = ( types || "" ).match( core_rnotwhite ) || [""];
t = types.length;
while ( t-- ) {
tmp = rtypenamespace.exec( types[t] ) || [];
type = origType = tmp[1];
namespaces = ( tmp[2] || "" ).split( "." ).sort();
// There *must* be a type, no attaching namespace-only handlers
if ( !type ) {
continue;
}
// If event changes its type, use the special event handlers for the changed type
special = jQuery.event.special[ type ] || {};
// If selector defined, determine special event api type, otherwise given type
type = ( selector ? special.delegateType : special.bindType ) || type;
// Update special based on newly reset type
special = jQuery.event.special[ type ] || {};
// handleObj is passed to all event handlers
handleObj = jQuery.extend({
type: type,
origType: origType,
data: data,
handler: handler,
guid: handler.guid,
selector: selector,
needsContext: selector && jQuery.expr.match.needsContext.test( selector ),
namespace: namespaces.join(".")
}, handleObjIn );
// Init the event handler queue if we're the first
if ( !(handlers = events[ type ]) ) {
handlers = events[ type ] = [];
handlers.delegateCount = 0;
// Only use addEventListener if the special events handler returns false
if ( !special.setup || special.setup.call( elem, data, namespaces, eventHandle ) === false ) {
if ( elem.addEventListener ) {
elem.addEventListener( type, eventHandle, false );
}
}
}
if ( special.add ) {
special.add.call( elem, handleObj );
if ( !handleObj.handler.guid ) {
handleObj.handler.guid = handler.guid;
}
}
// Add to the element's handler list, delegates in front
if ( selector ) {
handlers.splice( handlers.delegateCount++, 0, handleObj );
} else {
handlers.push( handleObj );
}
// Keep track of which events have ever been used, for event optimization
jQuery.event.global[ type ] = true;
}
// Nullify elem to prevent memory leaks in IE
elem = null;
},
// Detach an event or set of events from an element
remove: function( elem, types, handler, selector, mappedTypes ) {
var j, origCount, tmp,
events, t, handleObj,
special, handlers, type, namespaces, origType,
elemData = data_priv.hasData( elem ) && data_priv.get( elem );
if ( !elemData || !(events = elemData.events) ) {
return;
}
// Once for each type.namespace in types; type may be omitted
types = ( types || "" ).match( core_rnotwhite ) || [""];
t = types.length;
while ( t-- ) {
tmp = rtypenamespace.exec( types[t] ) || [];
type = origType = tmp[1];
namespaces = ( tmp[2] || "" ).split( "." ).sort();
// Unbind all events (on this namespace, if provided) for the element
if ( !type ) {
for ( type in events ) {
jQuery.event.remove( elem, type + types[ t ], handler, selector, true );
}
continue;
}
special = jQuery.event.special[ type ] || {};
type = ( selector ? special.delegateType : special.bindType ) || type;
handlers = events[ type ] || [];
tmp = tmp[2] && new RegExp( "(^|\\.)" + namespaces.join("\\.(?:.*\\.|)") + "(\\.|$)" );
// Remove matching events
origCount = j = handlers.length;
while ( j-- ) {
handleObj = handlers[ j ];
if ( ( mappedTypes || origType === handleObj.origType ) &&
( !handler || handler.guid === handleObj.guid ) &&
( !tmp || tmp.test( handleObj.namespace ) ) &&
( !selector || selector === handleObj.selector || selector === "**" && handleObj.selector ) ) {
handlers.splice( j, 1 );
if ( handleObj.selector ) {
handlers.delegateCount--;
}
if ( special.remove ) {
special.remove.call( elem, handleObj );
}
}
}
// Remove generic event handler if we removed something and no more handlers exist
// (avoids potential for endless recursion during removal of special event handlers)
if ( origCount && !handlers.length ) {
if ( !special.teardown || special.teardown.call( elem, namespaces, elemData.handle ) === false ) {
jQuery.removeEvent( elem, type, elemData.handle );
}
delete events[ type ];
}
}
// Remove the expando if it's no longer used
if ( jQuery.isEmptyObject( events ) ) {
delete elemData.handle;
data_priv.remove( elem, "events" );
}
},
trigger: function( event, data, elem, onlyHandlers ) {
var i, cur, tmp, bubbleType, ontype, handle, special,
eventPath = [ elem || document ],
type = core_hasOwn.call( event, "type" ) ? event.type : event,
namespaces = core_hasOwn.call( event, "namespace" ) ? event.namespace.split(".") : [];
cur = tmp = elem = elem || document;
// Don't do events on text and comment nodes
if ( elem.nodeType === 3 || elem.nodeType === 8 ) {
return;
}
// focus/blur morphs to focusin/out; ensure we're not firing them right now
if ( rfocusMorph.test( type + jQuery.event.triggered ) ) {
return;
}
if ( type.indexOf(".") >= 0 ) {
// Namespaced trigger; create a regexp to match event type in handle()
namespaces = type.split(".");
type = namespaces.shift();
namespaces.sort();
}
ontype = type.indexOf(":") < 0 && "on" + type;
// Caller can pass in a jQuery.Event object, Object, or just an event type string
event = event[ jQuery.expando ] ?
event :
new jQuery.Event( type, typeof event === "object" && event );
// Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true)
event.isTrigger = onlyHandlers ? 2 : 3;
event.namespace = namespaces.join(".");
event.namespace_re = event.namespace ?
new RegExp( "(^|\\.)" + namespaces.join("\\.(?:.*\\.|)") + "(\\.|$)" ) :
null;
// Clean up the event in case it is being reused
event.result = undefined;
if ( !event.target ) {
event.target = elem;
}
// Clone any incoming data and prepend the event, creating the handler arg list
data = data == null ?
[ event ] :
jQuery.makeArray( data, [ event ] );
// Allow special events to draw outside the lines
special = jQuery.event.special[ type ] || {};
if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) {
return;
}
// Determine event propagation path in advance, per W3C events spec (#9951)
// Bubble up to document, then to window; watch for a global ownerDocument var (#9724)
if ( !onlyHandlers && !special.noBubble && !jQuery.isWindow( elem ) ) {
bubbleType = special.delegateType || type;
if ( !rfocusMorph.test( bubbleType + type ) ) {
cur = cur.parentNode;
}
for ( ; cur; cur = cur.parentNode ) {
eventPath.push( cur );
tmp = cur;
}
// Only add window if we got to document (e.g., not plain obj or detached DOM)
if ( tmp === (elem.ownerDocument || document) ) {
eventPath.push( tmp.defaultView || tmp.parentWindow || window );
}
}
// Fire handlers on the event path
i = 0;
while ( (cur = eventPath[i++]) && !event.isPropagationStopped() ) {
event.type = i > 1 ?
bubbleType :
special.bindType || type;
// jQuery handler
handle = ( data_priv.get( cur, "events" ) || {} )[ event.type ] && data_priv.get( cur, "handle" );
if ( handle ) {
handle.apply( cur, data );
}
// Native handler
handle = ontype && cur[ ontype ];
if ( handle && jQuery.acceptData( cur ) && handle.apply && handle.apply( cur, data ) === false ) {
event.preventDefault();
}
}
event.type = type;
// If nobody prevented the default action, do it now
if ( !onlyHandlers && !event.isDefaultPrevented() ) {
if ( (!special._default || special._default.apply( eventPath.pop(), data ) === false) &&
jQuery.acceptData( elem ) ) {
// Call a native DOM method on the target with the same name name as the event.
// Don't do default actions on window, that's where global variables be (#6170)
if ( ontype && jQuery.isFunction( elem[ type ] ) && !jQuery.isWindow( elem ) ) {
// Don't re-trigger an onFOO event when we call its FOO() method
tmp = elem[ ontype ];
if ( tmp ) {
elem[ ontype ] = null;
}
// Prevent re-triggering of the same event, since we already bubbled it above
jQuery.event.triggered = type;
elem[ type ]();
jQuery.event.triggered = undefined;
if ( tmp ) {
elem[ ontype ] = tmp;
}
}
}
}
return event.result;
},
dispatch: function( event ) {
// Make a writable jQuery.Event from the native event object
event = jQuery.event.fix( event );
var i, j, ret, matched, handleObj,
handlerQueue = [],
args = core_slice.call( arguments ),
handlers = ( data_priv.get( this, "events" ) || {} )[ event.type ] || [],
special = jQuery.event.special[ event.type ] || {};
// Use the fix-ed jQuery.Event rather than the (read-only) native event
args[0] = event;
event.delegateTarget = this;
// Call the preDispatch hook for the mapped type, and let it bail if desired
if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) {
return;
}
// Determine handlers
handlerQueue = jQuery.event.handlers.call( this, event, handlers );
// Run delegates first; they may want to stop propagation beneath us
i = 0;
while ( (matched = handlerQueue[ i++ ]) && !event.isPropagationStopped() ) {
event.currentTarget = matched.elem;
j = 0;
while ( (handleObj = matched.handlers[ j++ ]) && !event.isImmediatePropagationStopped() ) {
// Triggered event must either 1) have no namespace, or
// 2) have namespace(s) a subset or equal to those in the bound event (both can have no namespace).
if ( !event.namespace_re || event.namespace_re.test( handleObj.namespace ) ) {
event.handleObj = handleObj;
event.data = handleObj.data;
ret = ( (jQuery.event.special[ handleObj.origType ] || {}).handle || handleObj.handler )
.apply( matched.elem, args );
if ( ret !== undefined ) {
if ( (event.result = ret) === false ) {
event.preventDefault();
event.stopPropagation();
}
}
}
}
}
// Call the postDispatch hook for the mapped type
if ( special.postDispatch ) {
special.postDispatch.call( this, event );
}
return event.result;
},
handlers: function( event, handlers ) {
var i, matches, sel, handleObj,
handlerQueue = [],
delegateCount = handlers.delegateCount,
cur = event.target;
// Find delegate handlers
// Black-hole SVG <use> instance trees (#13180)
// Avoid non-left-click bubbling in Firefox (#3861)
if ( delegateCount && cur.nodeType && (!event.button || event.type !== "click") ) {
for ( ; cur !== this; cur = cur.parentNode || this ) {
// Don't process clicks on disabled elements (#6911, #8165, #11382, #11764)
if ( cur.disabled !== true || event.type !== "click" ) {
matches = [];
for ( i = 0; i < delegateCount; i++ ) {
handleObj = handlers[ i ];
// Don't conflict with Object.prototype properties (#13203)
sel = handleObj.selector + " ";
if ( matches[ sel ] === undefined ) {
matches[ sel ] = handleObj.needsContext ?
jQuery( sel, this ).index( cur ) >= 0 :
jQuery.find( sel, this, null, [ cur ] ).length;
}
if ( matches[ sel ] ) {
matches.push( handleObj );
}
}
if ( matches.length ) {
handlerQueue.push({ elem: cur, handlers: matches });
}
}
}
}
// Add the remaining (directly-bound) handlers
if ( delegateCount < handlers.length ) {
handlerQueue.push({ elem: this, handlers: handlers.slice( delegateCount ) });
}
return handlerQueue;
},
// Includes some event props shared by KeyEvent and MouseEvent
props: "altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),
fixHooks: {},
keyHooks: {
props: "char charCode key keyCode".split(" "),
filter: function( event, original ) {
// Add which for key events
if ( event.which == null ) {
event.which = original.charCode != null ? original.charCode : original.keyCode;
}
return event;
}
},
mouseHooks: {
props: "button buttons clientX clientY offsetX offsetY pageX pageY screenX screenY toElement".split(" "),
filter: function( event, original ) {
var eventDoc, doc, body,
button = original.button;
// Calculate pageX/Y if missing and clientX/Y available
if ( event.pageX == null && original.clientX != null ) {
eventDoc = event.target.ownerDocument || document;
doc = eventDoc.documentElement;
body = eventDoc.body;
event.pageX = original.clientX + ( doc && doc.scrollLeft || body && body.scrollLeft || 0 ) - ( doc && doc.clientLeft || body && body.clientLeft || 0 );
event.pageY = original.clientY + ( doc && doc.scrollTop || body && body.scrollTop || 0 ) - ( doc && doc.clientTop || body && body.clientTop || 0 );
}
// Add which for click: 1 === left; 2 === middle; 3 === right
// Note: button is not normalized, so don't use it
if ( !event.which && button !== undefined ) {
event.which = ( button & 1 ? 1 : ( button & 2 ? 3 : ( button & 4 ? 2 : 0 ) ) );
}
return event;
}
},
fix: function( event ) {
if ( event[ jQuery.expando ] ) {
return event;
}
// Create a writable copy of the event object and normalize some properties
var i, prop, copy,
type = event.type,
originalEvent = event,
fixHook = this.fixHooks[ type ];
if ( !fixHook ) {
this.fixHooks[ type ] = fixHook =
rmouseEvent.test( type ) ? this.mouseHooks :
rkeyEvent.test( type ) ? this.keyHooks :
{};
}
copy = fixHook.props ? this.props.concat( fixHook.props ) : this.props;
event = new jQuery.Event( originalEvent );
i = copy.length;
while ( i-- ) {
prop = copy[ i ];
event[ prop ] = originalEvent[ prop ];
}
// Support: Cordova 2.5 (WebKit) (#13255)
// All events should have a target; Cordova deviceready doesn't
if ( !event.target ) {
event.target = document;
}
// Support: Safari 6.0+, Chrome < 28
// Target should not be a text node (#504, #13143)
if ( event.target.nodeType === 3 ) {
event.target = event.target.parentNode;
}
return fixHook.filter? fixHook.filter( event, originalEvent ) : event;
},
special: {
load: {
// Prevent triggered image.load events from bubbling to window.load
noBubble: true
},
focus: {
// Fire native event if possible so blur/focus sequence is correct
trigger: function() {
if ( this !== safeActiveElement() && this.focus ) {
this.focus();
return false;
}
},
delegateType: "focusin"
},
blur: {
trigger: function() {
if ( this === safeActiveElement() && this.blur ) {
this.blur();
return false;
}
},
delegateType: "focusout"
},
click: {
// For checkbox, fire native event so checked state will be right
trigger: function() {
if ( this.type === "checkbox" && this.click && jQuery.nodeName( this, "input" ) ) {
this.click();
return false;
}
},
// For cross-browser consistency, don't fire native .click() on links
_default: function( event ) {
return jQuery.nodeName( event.target, "a" );
}
},
beforeunload: {
postDispatch: function( event ) {
// Support: Firefox 20+
// Firefox doesn't alert if the returnValue field is not set.
if ( event.result !== undefined ) {
event.originalEvent.returnValue = event.result;
}
}
}
},
simulate: function( type, elem, event, bubble ) {
// Piggyback on a donor event to simulate a different one.
// Fake originalEvent to avoid donor's stopPropagation, but if the
// simulated event prevents default then we do the same on the donor.
var e = jQuery.extend(
new jQuery.Event(),
event,
{
type: type,
isSimulated: true,
originalEvent: {}
}
);
if ( bubble ) {
jQuery.event.trigger( e, null, elem );
} else {
jQuery.event.dispatch.call( elem, e );
}
if ( e.isDefaultPrevented() ) {
event.preventDefault();
}
}
};
jQuery.removeEvent = function( elem, type, handle ) {
if ( elem.removeEventListener ) {
elem.removeEventListener( type, handle, false );
}
};
jQuery.Event = function( src, props ) {
// Allow instantiation without the 'new' keyword
if ( !(this instanceof jQuery.Event) ) {
return new jQuery.Event( src, props );
}
// Event object
if ( src && src.type ) {
this.originalEvent = src;
this.type = src.type;
// Events bubbling up the document may have been marked as prevented
// by a handler lower down the tree; reflect the correct value.
this.isDefaultPrevented = ( src.defaultPrevented ||
src.getPreventDefault && src.getPreventDefault() ) ? returnTrue : returnFalse;
// Event type
} else {
this.type = src;
}
// Put explicitly provided properties onto the event object
if ( props ) {
jQuery.extend( this, props );
}
// Create a timestamp if incoming event doesn't have one
this.timeStamp = src && src.timeStamp || jQuery.now();
// Mark it as fixed
this[ jQuery.expando ] = true;
};
// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding
// http://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html
jQuery.Event.prototype = {
isDefaultPrevented: returnFalse,
isPropagationStopped: returnFalse,
isImmediatePropagationStopped: returnFalse,
preventDefault: function() {
var e = this.originalEvent;
this.isDefaultPrevented = returnTrue;
if ( e && e.preventDefault ) {
e.preventDefault();
}
},
stopPropagation: function() {
var e = this.originalEvent;
this.isPropagationStopped = returnTrue;
if ( e && e.stopPropagation ) {
e.stopPropagation();
}
},
stopImmediatePropagation: function() {
this.isImmediatePropagationStopped = returnTrue;
this.stopPropagation();
}
};
// Create mouseenter/leave events using mouseover/out and event-time checks
// Support: Chrome 15+
jQuery.each({
mouseenter: "mouseover",
mouseleave: "mouseout"
}, function( orig, fix ) {
jQuery.event.special[ orig ] = {
delegateType: fix,
bindType: fix,
handle: function( event ) {
var ret,
target = this,
related = event.relatedTarget,
handleObj = event.handleObj;
// For mousenter/leave call the handler if related is outside the target.
// NB: No relatedTarget if the mouse left/entered the browser window
if ( !related || (related !== target && !jQuery.contains( target, related )) ) {
event.type = handleObj.origType;
ret = handleObj.handler.apply( this, arguments );
event.type = fix;
}
return ret;
}
};
});
// Create "bubbling" focus and blur events
// Support: Firefox, Chrome, Safari
if ( !jQuery.support.focusinBubbles ) {
jQuery.each({ focus: "focusin", blur: "focusout" }, function( orig, fix ) {
// Attach a single capturing handler while someone wants focusin/focusout
var attaches = 0,
handler = function( event ) {
jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ), true );
};
jQuery.event.special[ fix ] = {
setup: function() {
if ( attaches++ === 0 ) {
document.addEventListener( orig, handler, true );
}
},
teardown: function() {
if ( --attaches === 0 ) {
document.removeEventListener( orig, handler, true );
}
}
};
});
}
jQuery.fn.extend({
on: function( types, selector, data, fn, /*INTERNAL*/ one ) {
var origFn, type;
// Types can be a map of types/handlers
if ( typeof types === "object" ) {
// ( types-Object, selector, data )
if ( typeof selector !== "string" ) {
// ( types-Object, data )
data = data || selector;
selector = undefined;
}
for ( type in types ) {
this.on( type, selector, data, types[ type ], one );
}
return this;
}
if ( data == null && fn == null ) {
// ( types, fn )
fn = selector;
data = selector = undefined;
} else if ( fn == null ) {
if ( typeof selector === "string" ) {
// ( types, selector, fn )
fn = data;
data = undefined;
} else {
// ( types, data, fn )
fn = data;
data = selector;
selector = undefined;
}
}
if ( fn === false ) {
fn = returnFalse;
} else if ( !fn ) {
return this;
}
if ( one === 1 ) {
origFn = fn;
fn = function( event ) {
// Can use an empty set, since event contains the info
jQuery().off( event );
return origFn.apply( this, arguments );
};
// Use same guid so caller can remove using origFn
fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ );
}
return this.each( function() {
jQuery.event.add( this, types, fn, data, selector );
});
},
one: function( types, selector, data, fn ) {
return this.on( types, selector, data, fn, 1 );
},
off: function( types, selector, fn ) {
var handleObj, type;
if ( types && types.preventDefault && types.handleObj ) {
// ( event ) dispatched jQuery.Event
handleObj = types.handleObj;
jQuery( types.delegateTarget ).off(
handleObj.namespace ? handleObj.origType + "." + handleObj.namespace : handleObj.origType,
handleObj.selector,
handleObj.handler
);
return this;
}
if ( typeof types === "object" ) {
// ( types-object [, selector] )
for ( type in types ) {
this.off( type, selector, types[ type ] );
}
return this;
}
if ( selector === false || typeof selector === "function" ) {
// ( types [, fn] )
fn = selector;
selector = undefined;
}
if ( fn === false ) {
fn = returnFalse;
}
return this.each(function() {
jQuery.event.remove( this, types, fn, selector );
});
},
trigger: function( type, data ) {
return this.each(function() {
jQuery.event.trigger( type, data, this );
});
},
triggerHandler: function( type, data ) {
var elem = this[0];
if ( elem ) {
return jQuery.event.trigger( type, data, elem, true );
}
}
});
var isSimple = /^.[^:#\[\.,]*$/,
rparentsprev = /^(?:parents|prev(?:Until|All))/,
rneedsContext = jQuery.expr.match.needsContext,
// methods guaranteed to produce a unique set when starting from a unique set
guaranteedUnique = {
children: true,
contents: true,
next: true,
prev: true
};
jQuery.fn.extend({
find: function( selector ) {
var i,
ret = [],
self = this,
len = self.length;
if ( typeof selector !== "string" ) {
return this.pushStack( jQuery( selector ).filter(function() {
for ( i = 0; i < len; i++ ) {
if ( jQuery.contains( self[ i ], this ) ) {
return true;
}
}
}) );
}
for ( i = 0; i < len; i++ ) {
jQuery.find( selector, self[ i ], ret );
}
// Needed because $( selector, context ) becomes $( context ).find( selector )
ret = this.pushStack( len > 1 ? jQuery.unique( ret ) : ret );
ret.selector = this.selector ? this.selector + " " + selector : selector;
return ret;
},
has: function( target ) {
var targets = jQuery( target, this ),
l = targets.length;
return this.filter(function() {
var i = 0;
for ( ; i < l; i++ ) {
if ( jQuery.contains( this, targets[i] ) ) {
return true;
}
}
});
},
not: function( selector ) {
return this.pushStack( winnow(this, selector || [], true) );
},
filter: function( selector ) {
return this.pushStack( winnow(this, selector || [], false) );
},
is: function( selector ) {
return !!winnow(
this,
// If this is a positional/relative selector, check membership in the returned set
// so $("p:first").is("p:last") won't return true for a doc with two "p".
typeof selector === "string" && rneedsContext.test( selector ) ?
jQuery( selector ) :
selector || [],
false
).length;
},
closest: function( selectors, context ) {
var cur,
i = 0,
l = this.length,
matched = [],
pos = ( rneedsContext.test( selectors ) || typeof selectors !== "string" ) ?
jQuery( selectors, context || this.context ) :
0;
for ( ; i < l; i++ ) {
for ( cur = this[i]; cur && cur !== context; cur = cur.parentNode ) {
// Always skip document fragments
if ( cur.nodeType < 11 && (pos ?
pos.index(cur) > -1 :
// Don't pass non-elements to Sizzle
cur.nodeType === 1 &&
jQuery.find.matchesSelector(cur, selectors)) ) {
cur = matched.push( cur );
break;
}
}
}
return this.pushStack( matched.length > 1 ? jQuery.unique( matched ) : matched );
},
// Determine the position of an element within
// the matched set of elements
index: function( elem ) {
// No argument, return index in parent
if ( !elem ) {
return ( this[ 0 ] && this[ 0 ].parentNode ) ? this.first().prevAll().length : -1;
}
// index in selector
if ( typeof elem === "string" ) {
return core_indexOf.call( jQuery( elem ), this[ 0 ] );
}
// Locate the position of the desired element
return core_indexOf.call( this,
// If it receives a jQuery object, the first element is used
elem.jquery ? elem[ 0 ] : elem
);
},
add: function( selector, context ) {
var set = typeof selector === "string" ?
jQuery( selector, context ) :
jQuery.makeArray( selector && selector.nodeType ? [ selector ] : selector ),
all = jQuery.merge( this.get(), set );
return this.pushStack( jQuery.unique(all) );
},
addBack: function( selector ) {
return this.add( selector == null ?
this.prevObject : this.prevObject.filter(selector)
);
}
});
function sibling( cur, dir ) {
while ( (cur = cur[dir]) && cur.nodeType !== 1 ) {}
return cur;
}
jQuery.each({
parent: function( elem ) {
var parent = elem.parentNode;
return parent && parent.nodeType !== 11 ? parent : null;
},
parents: function( elem ) {
return jQuery.dir( elem, "parentNode" );
},
parentsUntil: function( elem, i, until ) {
return jQuery.dir( elem, "parentNode", until );
},
next: function( elem ) {
return sibling( elem, "nextSibling" );
},
prev: function( elem ) {
return sibling( elem, "previousSibling" );
},
nextAll: function( elem ) {
return jQuery.dir( elem, "nextSibling" );
},
prevAll: function( elem ) {
return jQuery.dir( elem, "previousSibling" );
},
nextUntil: function( elem, i, until ) {
return jQuery.dir( elem, "nextSibling", until );
},
prevUntil: function( elem, i, until ) {
return jQuery.dir( elem, "previousSibling", until );
},
siblings: function( elem ) {
return jQuery.sibling( ( elem.parentNode || {} ).firstChild, elem );
},
children: function( elem ) {
return jQuery.sibling( elem.firstChild );
},
contents: function( elem ) {
return elem.contentDocument || jQuery.merge( [], elem.childNodes );
}
}, function( name, fn ) {
jQuery.fn[ name ] = function( until, selector ) {
var matched = jQuery.map( this, fn, until );
if ( name.slice( -5 ) !== "Until" ) {
selector = until;
}
if ( selector && typeof selector === "string" ) {
matched = jQuery.filter( selector, matched );
}
if ( this.length > 1 ) {
// Remove duplicates
if ( !guaranteedUnique[ name ] ) {
jQuery.unique( matched );
}
// Reverse order for parents* and prev-derivatives
if ( rparentsprev.test( name ) ) {
matched.reverse();
}
}
return this.pushStack( matched );
};
});
jQuery.extend({
filter: function( expr, elems, not ) {
var elem = elems[ 0 ];
if ( not ) {
expr = ":not(" + expr + ")";
}
return elems.length === 1 && elem.nodeType === 1 ?
jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : [] :
jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) {
return elem.nodeType === 1;
}));
},
dir: function( elem, dir, until ) {
var matched = [],
truncate = until !== undefined;
while ( (elem = elem[ dir ]) && elem.nodeType !== 9 ) {
if ( elem.nodeType === 1 ) {
if ( truncate && jQuery( elem ).is( until ) ) {
break;
}
matched.push( elem );
}
}
return matched;
},
sibling: function( n, elem ) {
var matched = [];
for ( ; n; n = n.nextSibling ) {
if ( n.nodeType === 1 && n !== elem ) {
matched.push( n );
}
}
return matched;
}
});
// Implement the identical functionality for filter and not
function winnow( elements, qualifier, not ) {
if ( jQuery.isFunction( qualifier ) ) {
return jQuery.grep( elements, function( elem, i ) {
/* jshint -W018 */
return !!qualifier.call( elem, i, elem ) !== not;
});
}
if ( qualifier.nodeType ) {
return jQuery.grep( elements, function( elem ) {
return ( elem === qualifier ) !== not;
});
}
if ( typeof qualifier === "string" ) {
if ( isSimple.test( qualifier ) ) {
return jQuery.filter( qualifier, elements, not );
}
qualifier = jQuery.filter( qualifier, elements );
}
return jQuery.grep( elements, function( elem ) {
return ( core_indexOf.call( qualifier, elem ) >= 0 ) !== not;
});
}
var rxhtmlTag = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,
rtagName = /<([\w:]+)/,
rhtml = /<|&#?\w+;/,
rnoInnerhtml = /<(?:script|style|link)/i,
manipulation_rcheckableType = /^(?:checkbox|radio)$/i,
// checked="checked" or checked
rchecked = /checked\s*(?:[^=]|=\s*.checked.)/i,
rscriptType = /^$|\/(?:java|ecma)script/i,
rscriptTypeMasked = /^true\/(.*)/,
rcleanScript = /^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g,
// We have to close these tags to support XHTML (#13200)
wrapMap = {
// Support: IE 9
option: [ 1, "<select multiple='multiple'>", "</select>" ],
thead: [ 1, "<table>", "</table>" ],
col: [ 2, "<table><colgroup>", "</colgroup></table>" ],
tr: [ 2, "<table><tbody>", "</tbody></table>" ],
td: [ 3, "<table><tbody><tr>", "</tr></tbody></table>" ],
_default: [ 0, "", "" ]
};
// Support: IE 9
wrapMap.optgroup = wrapMap.option;
wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead;
wrapMap.th = wrapMap.td;
jQuery.fn.extend({
text: function( value ) {
return jQuery.access( this, function( value ) {
return value === undefined ?
jQuery.text( this ) :
this.empty().append( ( this[ 0 ] && this[ 0 ].ownerDocument || document ).createTextNode( value ) );
}, null, value, arguments.length );
},
append: function() {
return this.domManip( arguments, function( elem ) {
if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) {
var target = manipulationTarget( this, elem );
target.appendChild( elem );
}
});
},
prepend: function() {
return this.domManip( arguments, function( elem ) {
if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) {
var target = manipulationTarget( this, elem );
target.insertBefore( elem, target.firstChild );
}
});
},
before: function() {
return this.domManip( arguments, function( elem ) {
if ( this.parentNode ) {
this.parentNode.insertBefore( elem, this );
}
});
},
after: function() {
return this.domManip( arguments, function( elem ) {
if ( this.parentNode ) {
this.parentNode.insertBefore( elem, this.nextSibling );
}
});
},
// keepData is for internal use only--do not document
remove: function( selector, keepData ) {
var elem,
elems = selector ? jQuery.filter( selector, this ) : this,
i = 0;
for ( ; (elem = elems[i]) != null; i++ ) {
if ( !keepData && elem.nodeType === 1 ) {
jQuery.cleanData( getAll( elem ) );
}
if ( elem.parentNode ) {
if ( keepData && jQuery.contains( elem.ownerDocument, elem ) ) {
setGlobalEval( getAll( elem, "script" ) );
}
elem.parentNode.removeChild( elem );
}
}
return this;
},
empty: function() {
var elem,
i = 0;
for ( ; (elem = this[i]) != null; i++ ) {
if ( elem.nodeType === 1 ) {
// Prevent memory leaks
jQuery.cleanData( getAll( elem, false ) );
// Remove any remaining nodes
elem.textContent = "";
}
}
return this;
},
clone: function( dataAndEvents, deepDataAndEvents ) {
dataAndEvents = dataAndEvents == null ? false : dataAndEvents;
deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents;
return this.map( function () {
return jQuery.clone( this, dataAndEvents, deepDataAndEvents );
});
},
html: function( value ) {
return jQuery.access( this, function( value ) {
var elem = this[ 0 ] || {},
i = 0,
l = this.length;
if ( value === undefined && elem.nodeType === 1 ) {
return elem.innerHTML;
}
// See if we can take a shortcut and just use innerHTML
if ( typeof value === "string" && !rnoInnerhtml.test( value ) &&
!wrapMap[ ( rtagName.exec( value ) || [ "", "" ] )[ 1 ].toLowerCase() ] ) {
value = value.replace( rxhtmlTag, "<$1></$2>" );
try {
for ( ; i < l; i++ ) {
elem = this[ i ] || {};
// Remove element nodes and prevent memory leaks
if ( elem.nodeType === 1 ) {
jQuery.cleanData( getAll( elem, false ) );
elem.innerHTML = value;
}
}
elem = 0;
// If using innerHTML throws an exception, use the fallback method
} catch( e ) {}
}
if ( elem ) {
this.empty().append( value );
}
}, null, value, arguments.length );
},
replaceWith: function() {
var
// Snapshot the DOM in case .domManip sweeps something relevant into its fragment
args = jQuery.map( this, function( elem ) {
return [ elem.nextSibling, elem.parentNode ];
}),
i = 0;
// Make the changes, replacing each context element with the new content
this.domManip( arguments, function( elem ) {
var next = args[ i++ ],
parent = args[ i++ ];
if ( parent ) {
// Don't use the snapshot next if it has moved (#13810)
if ( next && next.parentNode !== parent ) {
next = this.nextSibling;
}
jQuery( this ).remove();
parent.insertBefore( elem, next );
}
// Allow new content to include elements from the context set
}, true );
// Force removal if there was no new content (e.g., from empty arguments)
return i ? this : this.remove();
},
detach: function( selector ) {
return this.remove( selector, true );
},
domManip: function( args, callback, allowIntersection ) {
// Flatten any nested arrays
args = core_concat.apply( [], args );
var fragment, first, scripts, hasScripts, node, doc,
i = 0,
l = this.length,
set = this,
iNoClone = l - 1,
value = args[ 0 ],
isFunction = jQuery.isFunction( value );
// We can't cloneNode fragments that contain checked, in WebKit
if ( isFunction || !( l <= 1 || typeof value !== "string" || jQuery.support.checkClone || !rchecked.test( value ) ) ) {
return this.each(function( index ) {
var self = set.eq( index );
if ( isFunction ) {
args[ 0 ] = value.call( this, index, self.html() );
}
self.domManip( args, callback, allowIntersection );
});
}
if ( l ) {
fragment = jQuery.buildFragment( args, this[ 0 ].ownerDocument, false, !allowIntersection && this );
first = fragment.firstChild;
if ( fragment.childNodes.length === 1 ) {
fragment = first;
}
if ( first ) {
scripts = jQuery.map( getAll( fragment, "script" ), disableScript );
hasScripts = scripts.length;
// Use the original fragment for the last item instead of the first because it can end up
// being emptied incorrectly in certain situations (#8070).
for ( ; i < l; i++ ) {
node = fragment;
if ( i !== iNoClone ) {
node = jQuery.clone( node, true, true );
// Keep references to cloned scripts for later restoration
if ( hasScripts ) {
// Support: QtWebKit
// jQuery.merge because core_push.apply(_, arraylike) throws
jQuery.merge( scripts, getAll( node, "script" ) );
}
}
callback.call( this[ i ], node, i );
}
if ( hasScripts ) {
doc = scripts[ scripts.length - 1 ].ownerDocument;
// Reenable scripts
jQuery.map( scripts, restoreScript );
// Evaluate executable scripts on first document insertion
for ( i = 0; i < hasScripts; i++ ) {
node = scripts[ i ];
if ( rscriptType.test( node.type || "" ) &&
!data_priv.access( node, "globalEval" ) && jQuery.contains( doc, node ) ) {
if ( node.src ) {
// Hope ajax is available...
jQuery._evalUrl( node.src );
} else {
jQuery.globalEval( node.textContent.replace( rcleanScript, "" ) );
}
}
}
}
}
}
return this;
}
});
jQuery.each({
appendTo: "append",
prependTo: "prepend",
insertBefore: "before",
insertAfter: "after",
replaceAll: "replaceWith"
}, function( name, original ) {
jQuery.fn[ name ] = function( selector ) {
var elems,
ret = [],
insert = jQuery( selector ),
last = insert.length - 1,
i = 0;
for ( ; i <= last; i++ ) {
elems = i === last ? this : this.clone( true );
jQuery( insert[ i ] )[ original ]( elems );
// Support: QtWebKit
// .get() because core_push.apply(_, arraylike) throws
core_push.apply( ret, elems.get() );
}
return this.pushStack( ret );
};
});
jQuery.extend({
clone: function( elem, dataAndEvents, deepDataAndEvents ) {
var i, l, srcElements, destElements,
clone = elem.cloneNode( true ),
inPage = jQuery.contains( elem.ownerDocument, elem );
// Support: IE >= 9
// Fix Cloning issues
if ( !jQuery.support.noCloneChecked && ( elem.nodeType === 1 || elem.nodeType === 11 ) && !jQuery.isXMLDoc( elem ) ) {
// We eschew Sizzle here for performance reasons: http://jsperf.com/getall-vs-sizzle/2
destElements = getAll( clone );
srcElements = getAll( elem );
for ( i = 0, l = srcElements.length; i < l; i++ ) {
fixInput( srcElements[ i ], destElements[ i ] );
}
}
// Copy the events from the original to the clone
if ( dataAndEvents ) {
if ( deepDataAndEvents ) {
srcElements = srcElements || getAll( elem );
destElements = destElements || getAll( clone );
for ( i = 0, l = srcElements.length; i < l; i++ ) {
cloneCopyEvent( srcElements[ i ], destElements[ i ] );
}
} else {
cloneCopyEvent( elem, clone );
}
}
// Preserve script evaluation history
destElements = getAll( clone, "script" );
if ( destElements.length > 0 ) {
setGlobalEval( destElements, !inPage && getAll( elem, "script" ) );
}
// Return the cloned set
return clone;
},
buildFragment: function( elems, context, scripts, selection ) {
var elem, tmp, tag, wrap, contains, j,
i = 0,
l = elems.length,
fragment = context.createDocumentFragment(),
nodes = [];
for ( ; i < l; i++ ) {
elem = elems[ i ];
if ( elem || elem === 0 ) {
// Add nodes directly
if ( jQuery.type( elem ) === "object" ) {
// Support: QtWebKit
// jQuery.merge because core_push.apply(_, arraylike) throws
jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem );
// Convert non-html into a text node
} else if ( !rhtml.test( elem ) ) {
nodes.push( context.createTextNode( elem ) );
// Convert html into DOM nodes
} else {
tmp = tmp || fragment.appendChild( context.createElement("div") );
// Deserialize a standard representation
tag = ( rtagName.exec( elem ) || ["", ""] )[ 1 ].toLowerCase();
wrap = wrapMap[ tag ] || wrapMap._default;
tmp.innerHTML = wrap[ 1 ] + elem.replace( rxhtmlTag, "<$1></$2>" ) + wrap[ 2 ];
// Descend through wrappers to the right content
j = wrap[ 0 ];
while ( j-- ) {
tmp = tmp.firstChild;
}
// Support: QtWebKit
// jQuery.merge because core_push.apply(_, arraylike) throws
jQuery.merge( nodes, tmp.childNodes );
// Remember the top-level container
tmp = fragment.firstChild;
// Fixes #12346
// Support: Webkit, IE
tmp.textContent = "";
}
}
}
// Remove wrapper from fragment
fragment.textContent = "";
i = 0;
while ( (elem = nodes[ i++ ]) ) {
// #4087 - If origin and destination elements are the same, and this is
// that element, do not do anything
if ( selection && jQuery.inArray( elem, selection ) !== -1 ) {
continue;
}
contains = jQuery.contains( elem.ownerDocument, elem );
// Append to fragment
tmp = getAll( fragment.appendChild( elem ), "script" );
// Preserve script evaluation history
if ( contains ) {
setGlobalEval( tmp );
}
// Capture executables
if ( scripts ) {
j = 0;
while ( (elem = tmp[ j++ ]) ) {
if ( rscriptType.test( elem.type || "" ) ) {
scripts.push( elem );
}
}
}
}
return fragment;
},
cleanData: function( elems ) {
var data, elem, events, type, key, j,
special = jQuery.event.special,
i = 0;
for ( ; (elem = elems[ i ]) !== undefined; i++ ) {
if ( Data.accepts( elem ) ) {
key = elem[ data_priv.expando ];
if ( key && (data = data_priv.cache[ key ]) ) {
events = Object.keys( data.events || {} );
if ( events.length ) {
for ( j = 0; (type = events[j]) !== undefined; j++ ) {
if ( special[ type ] ) {
jQuery.event.remove( elem, type );
// This is a shortcut to avoid jQuery.event.remove's overhead
} else {
jQuery.removeEvent( elem, type, data.handle );
}
}
}
if ( data_priv.cache[ key ] ) {
// Discard any remaining `private` data
delete data_priv.cache[ key ];
}
}
}
// Discard any remaining `user` data
delete data_user.cache[ elem[ data_user.expando ] ];
}
},
_evalUrl: function( url ) {
return jQuery.ajax({
url: url,
type: "GET",
dataType: "script",
async: false,
global: false,
"throws": true
});
}
});
// Support: 1.x compatibility
// Manipulating tables requires a tbody
function manipulationTarget( elem, content ) {
return jQuery.nodeName( elem, "table" ) &&
jQuery.nodeName( content.nodeType === 1 ? content : content.firstChild, "tr" ) ?
elem.getElementsByTagName("tbody")[0] ||
elem.appendChild( elem.ownerDocument.createElement("tbody") ) :
elem;
}
// Replace/restore the type attribute of script elements for safe DOM manipulation
function disableScript( elem ) {
elem.type = (elem.getAttribute("type") !== null) + "/" + elem.type;
return elem;
}
function restoreScript( elem ) {
var match = rscriptTypeMasked.exec( elem.type );
if ( match ) {
elem.type = match[ 1 ];
} else {
elem.removeAttribute("type");
}
return elem;
}
// Mark scripts as having already been evaluated
function setGlobalEval( elems, refElements ) {
var l = elems.length,
i = 0;
for ( ; i < l; i++ ) {
data_priv.set(
elems[ i ], "globalEval", !refElements || data_priv.get( refElements[ i ], "globalEval" )
);
}
}
function cloneCopyEvent( src, dest ) {
var i, l, type, pdataOld, pdataCur, udataOld, udataCur, events;
if ( dest.nodeType !== 1 ) {
return;
}
// 1. Copy private data: events, handlers, etc.
if ( data_priv.hasData( src ) ) {
pdataOld = data_priv.access( src );
pdataCur = data_priv.set( dest, pdataOld );
events = pdataOld.events;
if ( events ) {
delete pdataCur.handle;
pdataCur.events = {};
for ( type in events ) {
for ( i = 0, l = events[ type ].length; i < l; i++ ) {
jQuery.event.add( dest, type, events[ type ][ i ] );
}
}
}
}
// 2. Copy user data
if ( data_user.hasData( src ) ) {
udataOld = data_user.access( src );
udataCur = jQuery.extend( {}, udataOld );
data_user.set( dest, udataCur );
}
}
function getAll( context, tag ) {
var ret = context.getElementsByTagName ? context.getElementsByTagName( tag || "*" ) :
context.querySelectorAll ? context.querySelectorAll( tag || "*" ) :
[];
return tag === undefined || tag && jQuery.nodeName( context, tag ) ?
jQuery.merge( [ context ], ret ) :
ret;
}
// Support: IE >= 9
function fixInput( src, dest ) {
var nodeName = dest.nodeName.toLowerCase();
// Fails to persist the checked state of a cloned checkbox or radio button.
if ( nodeName === "input" && manipulation_rcheckableType.test( src.type ) ) {
dest.checked = src.checked;
// Fails to return the selected option to the default selected state when cloning options
} else if ( nodeName === "input" || nodeName === "textarea" ) {
dest.defaultValue = src.defaultValue;
}
}
jQuery.fn.extend({
wrapAll: function( html ) {
var wrap;
if ( jQuery.isFunction( html ) ) {
return this.each(function( i ) {
jQuery( this ).wrapAll( html.call(this, i) );
});
}
if ( this[ 0 ] ) {
// The elements to wrap the target around
wrap = jQuery( html, this[ 0 ].ownerDocument ).eq( 0 ).clone( true );
if ( this[ 0 ].parentNode ) {
wrap.insertBefore( this[ 0 ] );
}
wrap.map(function() {
var elem = this;
while ( elem.firstElementChild ) {
elem = elem.firstElementChild;
}
return elem;
}).append( this );
}
return this;
},
wrapInner: function( html ) {
if ( jQuery.isFunction( html ) ) {
return this.each(function( i ) {
jQuery( this ).wrapInner( html.call(this, i) );
});
}
return this.each(function() {
var self = jQuery( this ),
contents = self.contents();
if ( contents.length ) {
contents.wrapAll( html );
} else {
self.append( html );
}
});
},
wrap: function( html ) {
var isFunction = jQuery.isFunction( html );
return this.each(function( i ) {
jQuery( this ).wrapAll( isFunction ? html.call(this, i) : html );
});
},
unwrap: function() {
return this.parent().each(function() {
if ( !jQuery.nodeName( this, "body" ) ) {
jQuery( this ).replaceWith( this.childNodes );
}
}).end();
}
});
var r20 = /%20/g,
rbracket = /\[\]$/,
rCRLF = /\r?\n/g,
rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i,
rsubmittable = /^(?:input|select|textarea|keygen)/i;
jQuery.fn.extend({
serialize: function() {
return jQuery.param( this.serializeArray() );
},
serializeArray: function() {
return this.map(function(){
// Can add propHook for "elements" to filter or add form elements
var elements = jQuery.prop( this, "elements" );
return elements ? jQuery.makeArray( elements ) : this;
})
.filter(function(){
var type = this.type;
// Use .is(":disabled") so that fieldset[disabled] works
return this.name && !jQuery( this ).is( ":disabled" ) &&
rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) &&
( this.checked || !manipulation_rcheckableType.test( type ) );
})
.map(function( i, elem ){
var val = jQuery( this ).val();
return val == null ?
null :
jQuery.isArray( val ) ?
jQuery.map( val, function( val ){
return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) };
}) :
{ name: elem.name, value: val.replace( rCRLF, "\r\n" ) };
}).get();
}
});
//Serialize an array of form elements or a set of
//key/values into a query string
jQuery.param = function( a, traditional ) {
var prefix,
s = [],
add = function( key, value ) {
// If value is a function, invoke it and return its value
value = jQuery.isFunction( value ) ? value() : ( value == null ? "" : value );
s[ s.length ] = encodeURIComponent( key ) + "=" + encodeURIComponent( value );
};
// Set traditional to true for jQuery <= 1.3.2 behavior.
if ( traditional === undefined ) {
traditional = jQuery.ajaxSettings && jQuery.ajaxSettings.traditional;
}
// If an array was passed in, assume that it is an array of form elements.
if ( jQuery.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) {
// Serialize the form elements
jQuery.each( a, function() {
add( this.name, this.value );
});
} else {
// If traditional, encode the "old" way (the way 1.3.2 or older
// did it), otherwise encode params recursively.
for ( prefix in a ) {
buildParams( prefix, a[ prefix ], traditional, add );
}
}
// Return the resulting serialization
return s.join( "&" ).replace( r20, "+" );
};
function buildParams( prefix, obj, traditional, add ) {
var name;
if ( jQuery.isArray( obj ) ) {
// Serialize array item.
jQuery.each( obj, function( i, v ) {
if ( traditional || rbracket.test( prefix ) ) {
// Treat each array item as a scalar.
add( prefix, v );
} else {
// Item is non-scalar (array or object), encode its numeric index.
buildParams( prefix + "[" + ( typeof v === "object" ? i : "" ) + "]", v, traditional, add );
}
});
} else if ( !traditional && jQuery.type( obj ) === "object" ) {
// Serialize object item.
for ( name in obj ) {
buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add );
}
} else {
// Serialize scalar item.
add( prefix, obj );
}
}
jQuery.each( ("blur focus focusin focusout load resize scroll unload click dblclick " +
"mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave " +
"change select submit keydown keypress keyup error contextmenu").split(" "), function( i, name ) {
// Handle event binding
jQuery.fn[ name ] = function( data, fn ) {
return arguments.length > 0 ?
this.on( name, null, data, fn ) :
this.trigger( name );
};
});
jQuery.fn.extend({
hover: function( fnOver, fnOut ) {
return this.mouseenter( fnOver ).mouseleave( fnOut || fnOver );
},
bind: function( types, data, fn ) {
return this.on( types, null, data, fn );
},
unbind: function( types, fn ) {
return this.off( types, null, fn );
},
delegate: function( selector, types, data, fn ) {
return this.on( types, selector, data, fn );
},
undelegate: function( selector, types, fn ) {
// ( namespace ) or ( selector, types [, fn] )
return arguments.length === 1 ? this.off( selector, "**" ) : this.off( types, selector || "**", fn );
}
});
// Limit scope pollution from any deprecated API
// (function() {
// The number of elements contained in the matched element set
jQuery.fn.size = function() {
return this.length;
};
jQuery.fn.andSelf = jQuery.fn.addBack;
// })();
if ( typeof module === "object" && module && typeof module.exports === "object" ) {
// Expose jQuery as module.exports in loaders that implement the Node
// module pattern (including browserify). Do not create the global, since
// the user will be storing it themselves locally, and globals are frowned
// upon in the Node module world.
module.exports = jQuery;
} else {
// Register as a named AMD module, since jQuery can be concatenated with other
// files that may use define, but not via a proper concatenation script that
// understands anonymous AMD modules. A named AMD is safest and most robust
// way to register. Lowercase jquery is used because AMD module names are
// derived from file names, and jQuery is normally delivered in a lowercase
// file name. Do this after creating the global so that if an AMD module wants
// to call noConflict to hide this version of jQuery, it will work.
if ( typeof define === "function" && define.amd ) {
define( "jquery", [], function () { return jQuery; } );
}
}
// If there is a window object, that at least has a document property,
// define jQuery and $ identifiers
if ( typeof window === "object" && typeof window.document === "object" ) {
window.jQuery = window.$ = jQuery;
}
})( window );
|
import os
import io
from . import _common
from ._common import as_file, files
from .abc import ResourceReader
from contextlib import suppress
from importlib.abc import ResourceLoader
from importlib.machinery import ModuleSpec
from io import BytesIO, TextIOWrapper
from pathlib import Path
from types import ModuleType
from typing import ContextManager, Iterable, Union
from typing import cast, BinaryIO, TextIO
from collections.abc import Sequence
from functools import singledispatch
__all__ = [
'Package',
'Resource',
'ResourceReader',
'as_file',
'contents',
'files',
'is_resource',
'open_binary',
'open_text',
'path',
'read_binary',
'read_text',
]
Package = Union[str, ModuleType]
Resource = Union[str, os.PathLike]
def open_binary(package: Package, resource: Resource) -> BinaryIO:
"""Return a file-like object opened for binary reading of the resource."""
resource = _common.normalize_path(resource)
package = _common.get_package(package)
reader = _common.get_resource_reader(package)
if reader is not None:
return reader.open_resource(resource)
spec = cast(ModuleSpec, package.__spec__)
# Using pathlib doesn't work well here due to the lack of 'strict'
# argument for pathlib.Path.resolve() prior to Python 3.6.
if spec.submodule_search_locations is not None:
paths = spec.submodule_search_locations
elif spec.origin is not None:
paths = [os.path.dirname(os.path.abspath(spec.origin))]
for package_path in paths:
full_path = os.path.join(package_path, resource)
try:
return open(full_path, mode='rb')
except OSError:
# Just assume the loader is a resource loader; all the relevant
# importlib.machinery loaders are and an AttributeError for
# get_data() will make it clear what is needed from the loader.
loader = cast(ResourceLoader, spec.loader)
data = None
if hasattr(spec.loader, 'get_data'):
with suppress(OSError):
data = loader.get_data(full_path)
if data is not None:
return BytesIO(data)
raise FileNotFoundError(f'{resource!r} resource not found in {spec.name!r}')
def open_text(
package: Package,
resource: Resource,
encoding: str = 'utf-8',
errors: str = 'strict',
) -> TextIO:
"""Return a file-like object opened for text reading of the resource."""
return TextIOWrapper(
open_binary(package, resource), encoding=encoding, errors=errors
)
def read_binary(package: Package, resource: Resource) -> bytes:
"""Return the binary contents of the resource."""
with open_binary(package, resource) as fp:
return fp.read()
def read_text(
package: Package,
resource: Resource,
encoding: str = 'utf-8',
errors: str = 'strict',
) -> str:
"""Return the decoded string of the resource.
The decoding-related arguments have the same semantics as those of
bytes.decode().
"""
with open_text(package, resource, encoding, errors) as fp:
return fp.read()
def path(
package: Package,
resource: Resource,
) -> 'ContextManager[Path]':
"""A context manager providing a file path object to the resource.
If the resource does not already exist on its own on the file system,
a temporary file will be created. If the file was created, the file
will be deleted upon exiting the context manager (no exception is
raised if the file was deleted prior to the context manager
exiting).
"""
reader = _common.get_resource_reader(_common.get_package(package))
return (
_path_from_reader(reader, _common.normalize_path(resource))
if reader
else _common.as_file(
_common.files(package).joinpath(_common.normalize_path(resource))
)
)
def _path_from_reader(reader, resource):
return _path_from_resource_path(reader, resource) or _path_from_open_resource(
reader, resource
)
def _path_from_resource_path(reader, resource):
with suppress(FileNotFoundError):
return Path(reader.resource_path(resource))
def _path_from_open_resource(reader, resource):
saved = io.BytesIO(reader.open_resource(resource).read())
return _common._tempfile(saved.read, suffix=resource)
def is_resource(package: Package, name: str) -> bool:
"""True if 'name' is a resource inside 'package'.
Directories are *not* resources.
"""
package = _common.get_package(package)
_common.normalize_path(name)
reader = _common.get_resource_reader(package)
if reader is not None:
return reader.is_resource(name)
package_contents = set(contents(package))
if name not in package_contents:
return False
return (_common.from_package(package) / name).is_file()
def contents(package: Package) -> Iterable[str]:
"""Return an iterable of entries in 'package'.
Note that not all entries are resources. Specifically, directories are
not considered resources. Use `is_resource()` on each entry returned here
to check if it is a resource or not.
"""
package = _common.get_package(package)
reader = _common.get_resource_reader(package)
if reader is not None:
return _ensure_sequence(reader.contents())
transversable = _common.from_package(package)
if transversable.is_dir():
return list(item.name for item in transversable.iterdir())
return []
@singledispatch
def _ensure_sequence(iterable):
return list(iterable)
@_ensure_sequence.register(Sequence)
def _(iterable):
return iterable
|
//===--- MemoryLifetime.h ---------------------------------------*- C++ -*-===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2019 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
///
/// \file Contains utilities for calculating and verifying memory lifetime.
///
//===----------------------------------------------------------------------===//
#ifndef SWIFT_SIL_MEMORY_LIFETIME_H
#define SWIFT_SIL_MEMORY_LIFETIME_H
#include "swift/SIL/SILBasicBlock.h"
#include "swift/SIL/SILFunction.h"
namespace swift {
/// The MemoryLocations utility provides functions to analyze memory locations.
///
/// Memory locations are limited to addresses which are guaranteed to
/// be not aliased, like @in/inout parameters and alloc_stack.
/// Currently only a certain set of address instructions are supported:
/// Specifically those instructions which are going to be included when SIL
/// supports opaque values.
/// TODO: Support more address instructions, like cast instructions.
///
/// The MemoryLocations works well together with MemoryDataflow, which can be
/// used to calculate global dataflow of location information.
class MemoryLocations {
public:
using Bits = llvm::SmallBitVector;
/// Represents a not-aliased memory location: either an indirect function
/// parameter or an alloc_stack.
///
/// Each location has a unique number which is index in the
/// MemoryLifetime::locations array and the bit number in the bit sets.
///
/// Locations can have sub-locations in case the parent location is a struct
/// or tuple with fields/elements. So, each top-level location forms a
/// tree-like data structure. Sub-locations are only created lazily, i.e. if
/// struct/tuple elements are really accessed with struct/tuple_element_addr.
///
/// As most alloc_stack locations only live within a single block, such
/// single-block locations are not included in the "regular" data flow
/// analysis (to not blow up the bit vectors). They are handled separately
/// with a simple single-block data flow analysis, which runs independently
/// for each block.
struct Location {
/// The SIL value of the memory location.
///
/// For top-level locations this is either a function argument or an
/// alloc_stack. For sub-locations it's the struct/tuple_element_addr.
/// In case there are multiple struct/tuple_element_addr for a single
/// field, this is only one representative instruction out of the set.
SILValue representativeValue;
/// All tracked sub-locations.
///
/// If all tracked sub-locations cover the whole memory location, the "self"
/// bit is not set. In other words: the "self" bit represents all
/// sublocations, which are not explicitly tracked as locations.
/// For example:
/// \code
/// struct Inner {
/// var a: T
/// var b: T
/// }
/// struct Outer {
/// var x: T
/// var y: Inner
/// var z: T // not accessed in the analyzed function
/// }
/// \endcode
///
/// If the analyzed function contains:
/// \code
/// %a = alloc_stack $Outer // = location 0
/// %ox = struct_element_adr %a, #Outer.x // = location 1
/// %oy = struct_element_adr %a, #Outer.y // = location 2
/// %ia = struct_element_adr %oy, #Inner.a // = location 3
/// %ib = struct_element_adr %oy, #Inner.b // = location 4
/// \endcode
///
/// the ``subLocations`` bits are:
/// \code
/// location 0 (alloc_stack): [0, 1, 3, 4]
/// location 1 (Outer.x): [ 1 ]
/// location 2 (Outer.y): [ 3, 4]
/// location 3 (Inner.a): [ 3 ]
/// location 4 (Inner.b): [ 4]
/// \endcode
///
/// Bit 2 is never set because Inner is completly represented by its
/// sub-locations 3 and 4. But bit 0 is set in location 0 (the "self" bit),
/// because it represents the untracked field ``Outer.z``.
Bits subLocations;
/// The accumulated parent bits, including the "self" bit.
///
/// For the example given for ``subLocations``, the ``selfAndParents`` bits
/// are:
/// \code
/// location 0 (alloc_stack): [0 ]
/// location 1 (Outer.x): [0, 1 ]
/// location 2 (Outer.y): [0, 2 ]
/// location 3 (Inner.a): [0, 2, 3 ]
/// location 4 (Inner.b): [0, 2, 4]
/// \endcode
Bits selfAndParents;
/// The location index of the parent, or -1 if it's a top-level location.
///
/// For the example given for ``subLocations``, the ``parentIdx`` indices
/// are:
/// \code
/// location 0 (alloc_stack): -1
/// location 1 (Outer.x): 0
/// location 2 (Outer.y): 0
/// location 3 (Inner.a): 2
/// location 4 (Inner.b): 2
/// \endcode
int parentIdx;
/// Used to decide if a location is completely covered by its sub-locations.
///
/// -1 means: not yet initialized.
int numFieldsNotCoveredBySubfields = -1;
Location(SILValue val, unsigned index, int parentIdx = -1);
};
private:
/// The array of locations.
llvm::SmallVector<Location, 64> locations;
/// Mapping from SIL values (function arguments and alloc_stack) to location
/// indices.
///
/// In case there are multiple struct/tuple_element_addr for a single
/// field, this map contains multiple entries mapping to the same location.
llvm::DenseMap<SILValue, unsigned> addr2LocIdx;
/// Memory locations (e.g. alloc_stack) which live in a single basic block.
///
/// Those locations are excluded from the locations to keep the bit sets
/// small. They can be handled separately with handleSingleBlockLocations().
llvm::SmallVector<SingleValueInstruction *, 16> singleBlockLocations;
public:
MemoryLocations() {}
MemoryLocations(const MemoryLocations &) = delete;
MemoryLocations &operator=(const MemoryLocations &) = delete;
/// Returns the number of collected locations, except single-block locations.
unsigned getNumLocations() const { return locations.size(); }
/// Returns the location index corresponding to a memory address or -1, if
/// \p addr is not associated with a location.
int getLocationIdx(SILValue addr) const;
/// Returns the location corresponding to a memory address or null, if
/// \p addr is not associated with a location.
const Location *getLocation(SILValue addr) const {
int locIdx = getLocationIdx(addr);
if (locIdx >= 0)
return &locations[locIdx];
return nullptr;
}
/// Returns the location with a given \p index.
const Location *getLocation(unsigned index) const {
return &locations[index];
}
/// Sets the location bits os \p addr in \p bits, if \p addr is associated
/// with a location.
void setBits(Bits &bits, SILValue addr) {
if (auto *loc = getLocation(addr))
bits |= loc->subLocations;
}
/// Clears the location bits os \p addr in \p bits, if \p addr is associated
/// with a location.
void clearBits(Bits &bits, SILValue addr) {
if (auto *loc = getLocation(addr))
bits.reset(loc->subLocations);
}
/// Analyzes all locations in a function.
///
/// Single-block locations are not analyzed, but added to singleBlockLocations.
void analyzeLocations(SILFunction *function);
/// Analyze a single top-level location.
///
/// If all uses of \p loc are okay, the location and its sub-locations are
/// added to the data structures.
void analyzeLocation(SILValue loc);
/// Do a block-local processing for all locations in singleBlockLocations.
///
/// First, initializes all locations which are alive in a block and then
/// calls \p handlerFunc for the block.
void handleSingleBlockLocations(
std::function<void (SILBasicBlock *block)> handlerFunc);
/// Debug dump the MemoryLifetime internals.
void dump() const;
/// Debug dump a bit set .
static void dumpBits(const Bits &bits);
private:
/// Clears all datastructures, except singleBlockLocations;
void clear();
// (locationIdx, fieldNr) -> subLocationIdx
using SubLocationMap = llvm::DenseMap<std::pair<unsigned, unsigned>, unsigned>;
/// Helper function called by analyzeLocation to check all uses of the
/// location recursively.
///
/// The \p subLocationMap is a temporary cache to speed up sub-location lookup.
bool analyzeLocationUsesRecursively(SILValue V, unsigned locIdx,
SmallVectorImpl<SILValue> &collectedVals,
SubLocationMap &subLocationMap);
/// Helper function called by analyzeLocation to create a sub-location for
/// and address projection and check all of its uses.
bool analyzeAddrProjection(
SingleValueInstruction *projection, unsigned parentLocIdx,unsigned fieldNr,
SmallVectorImpl<SILValue> &collectedVals, SubLocationMap &subLocationMap);
/// Calculates Location::numFieldsNotCoveredBySubfields
void initFieldsCounter(Location &loc);
/// Only memory locations which store a non-trivial type are considered.
bool shouldTrackLocation(SILType type, SILFunction *inFunction) {
return !type.isTrivial(*inFunction);
}
};
/// The MemoryDataflow utility calculates global dataflow of memory locations.
///
/// The MemoryDataflow works well together with MemoryLocations, which can be
/// used to analyze locations as input to the dataflow.
/// TODO: Actuall this utility can be used for any kind of dataflow, not just
/// for memory locations. Consider renaming it.
class MemoryDataflow {
public:
using Bits = MemoryLocations::Bits;
/// Basic-block specific information used for dataflow analysis.
struct BlockState {
/// The backlink to the SILBasicBlock.
SILBasicBlock *block;
/// The bits valid at the entry (i.e. the first instruction) of the block.
Bits entrySet;
/// The bits valid at the exit (i.e. after the terminator) of the block.
Bits exitSet;
/// Generated bits of the block.
Bits genSet;
/// Killed bits of the block.
Bits killSet;
/// True, if this block is reachable from the entry block, i.e. is not an
/// unreachable block.
///
/// This flag is only computed if entryReachabilityAnalysis is called.
bool reachableFromEntry = false;
/// True, if any function-exit block can be reached from this block, i.e. is
/// not a block which eventually ends in an unreachable instruction.
///
/// This flag is only computed if exitReachableAnalysis is called.
bool exitReachable = false;
BlockState(SILBasicBlock *block = nullptr) : block(block) { }
// Utility functions for setting and clearing gen- and kill-bits.
void genBits(SILValue addr, const MemoryLocations &locs) {
if (auto *loc = locs.getLocation(addr)) {
killSet.reset(loc->subLocations);
genSet |= loc->subLocations;
}
}
void killBits(SILValue addr, const MemoryLocations &locs) {
if (auto *loc = locs.getLocation(addr)) {
genSet.reset(loc->subLocations);
killSet |= loc->subLocations;
}
}
};
private:
/// All block states.
std::vector<BlockState> blockStates;
/// Getting from SILBasicBlock to BlockState.
llvm::DenseMap<SILBasicBlock *, BlockState *> block2State;
public:
/// Sets up the BlockState datastructures and associates all basic blocks with
/// a state.
MemoryDataflow(SILFunction *function, unsigned numLocations);
MemoryDataflow(const MemoryDataflow &) = delete;
MemoryDataflow &operator=(const MemoryDataflow &) = delete;
using iterator = std::vector<BlockState>::iterator;
iterator begin() { return blockStates.begin(); }
iterator end() { return blockStates.end(); }
/// Returns the state of a block.
BlockState *getState(SILBasicBlock *block) {
return block2State[block];
}
/// Calculates the BlockState::reachableFromEntry flags.
void entryReachabilityAnalysis();
/// Calculates the BlockState::exitReachable flags.
void exitReachableAnalysis();
/// Derives the block exit sets from the entry sets by applying the gen and
/// kill sets.
void solveDataflowForward();
/// Derives the block entry sets from the exit sets by applying the gen and
/// kill sets.
void solveDataflowBackward();
/// Debug dump the MemoryLifetime internals.
void dump() const;
};
/// Verifies the lifetime of memory locations in a function.
void verifyMemoryLifetime(SILFunction *function);
} // end swift namespace
#endif
|
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
""" EvolutionOp Class """
from typing import Optional, Union, Set, List, cast
import logging
import numpy as np
import scipy
from qiskit.circuit import ParameterExpression, Instruction
from ..operator_base import OperatorBase
from ..primitive_ops.primitive_op import PrimitiveOp
from ..primitive_ops.matrix_op import MatrixOp
from ..list_ops import ListOp
from ..list_ops.summed_op import SummedOp
from ..list_ops.composed_op import ComposedOp
from ..list_ops.tensored_op import TensoredOp
logger = logging.getLogger(__name__)
class EvolvedOp(PrimitiveOp):
r"""
Class for wrapping Operator Evolutions for compilation (``convert``) by an EvolutionBase
method later, essentially acting as a placeholder. Note that EvolvedOp is a weird case of
PrimitiveOp. It happens to be that it fits into the PrimitiveOp interface nearly perfectly,
and it essentially represents a placeholder for a PrimitiveOp later, even though it doesn't
actually hold a primitive object. We could have chosen for it to be an OperatorBase,
but would have ended up copying and pasting a lot of code from PrimitiveOp."""
def __init__(self,
primitive: OperatorBase,
coeff: Optional[Union[int, float, complex, ParameterExpression]] = 1.0) -> None:
"""
Args:
primitive: The operator being wrapped to signify evolution later.
coeff: A coefficient multiplying the operator
"""
super().__init__(primitive, coeff=coeff)
def primitive_strings(self) -> Set[str]:
return self.primitive.primitive_strings() # type: ignore
@property
def num_qubits(self) -> int:
return self.primitive.num_qubits # type: ignore
def add(self, other: OperatorBase) -> OperatorBase:
if not self.num_qubits == other.num_qubits:
raise ValueError(
'Sum over operators with different numbers of qubits, {} and {}, is not well '
'defined'.format(self.num_qubits, other.num_qubits))
if isinstance(other, EvolvedOp) and self.primitive == other.primitive:
return EvolvedOp(self.primitive, coeff=self.coeff + other.coeff) # type: ignore
if isinstance(other, SummedOp):
op_list = [self] + cast(SummedOp, other).oplist # type: ignore
return SummedOp(op_list) # type: ignore
return SummedOp([self, other])
def adjoint(self) -> OperatorBase:
return EvolvedOp(self.primitive.adjoint() * -1, coeff=np.conj(self.coeff)) # type: ignore
def equals(self, other: OperatorBase) -> bool:
if not isinstance(other, EvolvedOp) or not self.coeff == other.coeff:
return False
return self.primitive == other.primitive
def tensor(self, other: OperatorBase) -> OperatorBase:
if isinstance(other, TensoredOp):
return TensoredOp([self] + other.oplist) # type: ignore
return TensoredOp([self, other])
def compose(self, other: OperatorBase) -> OperatorBase:
other = self._check_zero_for_composition_and_expand(other)
if isinstance(other, ComposedOp):
return ComposedOp([self] + other.oplist) # type: ignore
return ComposedOp([self, other])
def __str__(self) -> str:
prim_str = str(self.primitive)
if self.coeff == 1.0:
return 'e^(-i*{})'.format(prim_str)
else:
return "{} * e^(-i*{})".format(self.coeff, prim_str)
def __repr__(self) -> str:
return "EvolvedOp({}, coeff={})".format(repr(self.primitive), self.coeff)
def reduce(self) -> OperatorBase:
return EvolvedOp(self.primitive.reduce(), coeff=self.coeff) # type: ignore
def assign_parameters(self, param_dict: dict) -> OperatorBase:
param_value = self.coeff
if isinstance(self.coeff, ParameterExpression):
unrolled_dict = self._unroll_param_dict(param_dict)
if isinstance(unrolled_dict, list):
return ListOp([self.assign_parameters(param_dict) for param_dict in unrolled_dict])
if self.coeff.parameters <= set(unrolled_dict.keys()):
binds = {param: unrolled_dict[param] for param in self.coeff.parameters}
param_value = float(self.coeff.bind(binds))
return EvolvedOp(
self.primitive.bind_parameters(param_dict), coeff=param_value) # type: ignore
def eval(self,
front: Union[str, dict, np.ndarray,
OperatorBase] = None) -> Union[OperatorBase, float, complex]:
return cast(Union[OperatorBase, float, complex], self.to_matrix_op().eval(front=front))
def to_matrix(self, massive: bool = False) -> Union[np.ndarray, List[np.ndarray]]:
if self.primitive.__class__.__name__ == ListOp.__name__:
return [op.exp_i().to_matrix() * self.primitive.coeff * self.coeff # type: ignore
for op in self.primitive.oplist] # type: ignore
prim_mat = -1.j * self.primitive.to_matrix() # type: ignore
# pylint: disable=no-member
return scipy.linalg.expm(prim_mat) * self.coeff
def to_matrix_op(self, massive: bool = False) -> OperatorBase:
""" Returns a ``MatrixOp`` equivalent to this Operator. """
if self.primitive.__class__.__name__ == ListOp.__name__:
return ListOp(
[op.exp_i().to_matrix_op() for op in self.primitive.oplist], # type: ignore
coeff=self.primitive.coeff * self.coeff) # type: ignore
prim_mat = EvolvedOp(self.primitive).to_matrix(massive=massive) # type: ignore
return MatrixOp(prim_mat, coeff=self.coeff)
def log_i(self, massive: bool = False) -> OperatorBase:
return self.primitive * self.coeff # type: ignore
# pylint: disable=arguments-differ
def to_instruction(self, massive: bool = False) -> Instruction:
return self.primitive.to_matrix_op(massive=massive).exp_i() # type: ignore
|
import styles from './template.css';
import template from './template';
import AoflElement from '@aofl/web-components/aofl-element';
/**
* @summary IconOutlineVpnLockElement
* @class IconOutlineVpnLockElement
* @extends {AoflElement}
*/
class IconOutlineVpnLockElement extends AoflElement {
/**
* Creates an instance of IconOutlineVpnLockElement.
*/
constructor() {
super();
}
/**
* @readonly
*/
static get is() {
return 'icon-outline-vpn-lock';
}
/**
*
* @return {Object}
*/
render() {
return super.render(template, [styles]);
}
}
window.customElements.define(IconOutlineVpnLockElement.is, IconOutlineVpnLockElement);
export default IconOutlineVpnLockElement;
|
"""
Filter for detecting multiple salient regions.
The detection works by first extracting the full saliency map
using an object extraction algorithm. The saliency map is binarized
using a threshold which is calculated individually for each image
as the weighted average of 3/4 of the biggest saliency values.
Using this threshold, the image is binarized into solid regions.
All regions and their sizes are calculated using OpenCV's contour
detection. The actual prediction is constructed by dividing the
sum of of the areas of all the regions by the area of the largest
region and squaring the result. This way if the saliency map
contains some small independent areas, the whole image is not
considered to have multiple salient regions.
"""
import cv2
import numpy
from ..utils.image_utils import read_image
from ..utils.object_extraction import extract_object
from .filter import Filter
def count_threshold(saliency_map):
"""Calculates the threshold used for the binarization.
Calculated as the weighted average of 3/4 of the biggest
saliency values.
:param saliency_map: the full saliency map
:type saliency_map: numpy.ndarray
:returns: int -- the threshold
"""
rounded_saliency_map = numpy.around(saliency_map, decimals=-1)
unique, count = numpy.unique(rounded_saliency_map, return_counts=True)
smallest_large_index = unique.shape[0] * 3 / 4
return numpy.average(unique[-smallest_large_index:], axis=0,
weights=count[-smallest_large_index:])
def count_areas(saliency_map):
"""Returns a list of areas of all coherent white regions produced by
binarization using a calculated threshold.
:param saliency_map: the full saliency map
:type saliency_map: numpy.ndarray
:returns: numpy.ndarray -- list of the areas of the regions
"""
# count threshold and use it to binarize the saliency map
limit = count_threshold(saliency_map)
_, thresh = cv2.threshold(saliency_map, limit, 255, cv2.THRESH_BINARY)
# find coherent regions
contours, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE,
cv2.CHAIN_APPROX_SIMPLE)
return numpy.array([cv2.contourArea(contour) for contour in contours])
class MultipleSalientRegions(Filter):
"""Filter for detecting images with multiple salient regions"""
name = 'multiple_salient_regions'
speed = 5
def __init__(self, threshold=0.5, invert_threshold=False,
is_saliency_map=False):
"""Initializes a multiple salient regions filter
:param threshold: threshold at which the given prediction is changed
from negative to positive
:type threshold: float
:param invert_threshold: whether the result should be greater than
the given threshold (default) or lower
for an image to be considered positive
:type invert_threshold: bool
:param: is_saliency_map: whether the given image is already a
saliency map
:type is_saliency_map: bool
"""
super(MultipleSalientRegions, self).__init__(threshold,
invert_threshold)
self.is_saliency_map = is_saliency_map
def predict(self, image_path, return_boolean=True, ROI=None):
"""Predict if a given image has multiple salient regions
:param image_path: path to the image
:type image_path: str
:param return_boolean: whether to return the result as a
float between 0 and 1 or as a boolean
(threshold is given to the class)
:type return_boolean: bool
:param ROI: possible region of interest as a 4-tuple
(x0, y0, width, height), None if not needed
:returns: the prediction as a bool or float depending on the
return_boolean parameter
"""
if self.is_saliency_map:
saliency_map = read_image(image_path, ROI)
else:
saliency_map, _ = extract_object(image_path)
areas = count_areas(saliency_map)
# if areas is empty, there are no salient regions in the image
if not areas.shape[0]:
return False if return_boolean else 0.
prediction = (numpy.sum(areas) / numpy.amax(areas)) ** 2 - 1.0
prediction = min(prediction, 1) # limit prediction to range [0, 1]
if return_boolean:
return self.boolean_result(prediction)
return prediction
|
# Copyright 2019 Intel Corporation.
import functools
import inspect
import logging
import math
import os
from collections import defaultdict
from contextlib import contextmanager
import six
import numpy as np
import scipy.stats
import plaidml2 as plaidml
import plaidml2.edsl as edsl
import plaidml2.exec as plaidml_exec
import plaidml2.op as plaidml_op
import plaidml2.settings as plaidml_settings
from keras.backend.common import epsilon, floatx, image_data_format
from keras.backend.common import set_floatx as keras_set_floatx
logger = logging.getLogger(__name__)
# Keras needs us to keep track of unique IDs for prefix strings
# (for use with get_uid and reset_uids)
_UID_PREFIX_DICT = defaultdict(int)
_NAME_SCOPE_STACK = []
_CONV_DATA_FORMAT = ['channels_first', 'channels_last']
_in_train_phase = None # Will be initialized on first use
_device = plaidml_settings.get('PLAIDML_DEVICE')
def _prepend_name_scope(name, default):
if name:
r = '_'.join(_NAME_SCOPE_STACK + [name])
else:
r = '_'.join(_NAME_SCOPE_STACK + [default])
r += '_' + str(get_uid(r))
return r
def _normalize_axis(axis, ndims, name=''):
negative_axis_given = False
normalized_axis = axis + ndims if axis < 0 else axis
if normalized_axis < 0 or ndims <= normalized_axis:
name_str = 'for {} op '.format(name) if name else ''
raise RuntimeError(
'Axis out of range {}(axis {} requested for tensors with {} dimensions)'.format(
name_str, axis, ndims))
return normalized_axis
def _normalize_data_format(data_format):
if data_format is None:
data_format = image_data_format()
if data_format == 'channels_last':
return 'nxc'
if data_format == 'channels_first':
return 'ncx'
if data_format in ['nxc', 'ncx']:
return data_format
raise ValueError('Unrecognized data_format "{}"'.format(data_format))
def _normalize_padding(padding):
if padding == 'same':
return 'same_upper'
if padding in ['same_lower', 'same_upper', 'valid', 'full']:
return padding
raise ValueError('Unrecognized padding type "{}"'.format(padding))
def _log_call(func):
'''A decorator that logs the call of the wrapped function'''
def wrapper(*args, **kwargs):
# Construct a string logging the call if logging is turned on
if logger.isEnabledFor(logging.DEBUG):
sig = inspect.signature(func)
arg_str_list = list()
for i, arg in enumerate(args):
arg_str_list.append('{}: {}'.format(list(sig.parameters)[i][0], arg))
for k, v in kwargs.items():
arg_str_list.append('{}: {}'.format(k, v))
logger.debug('{}({})'.format(func.__name__, ', '.join(arg_str_list)))
# Call the requested function regardless
return func(*args, **kwargs)
return wrapper
class _Function(object):
def __init__(self, inputs, outputs, updates, name):
self._name = name
self._inputs = inputs
self._outputs = outputs
self._updates = updates
self._cache = {}
def __call__(self, inputs):
logger.debug('_Function: {}({})'.format(self._name, inputs))
inputs = [
np.array(inp) if isinstance(inp, (six.integer_types, float)) else inp for inp in inputs
]
input_shapes = tuple([x.shape for x in inputs])
# logger.debug('_Function: {}({})'.format(self._name, input_shapes))
exe = self._cache.get(input_shapes)
if not exe:
exe = self._compile(inputs)
self._cache[input_shapes] = exe
return [x.as_ndarray() for x in exe(inputs)]
def _compile(self, inputs):
for node, data in zip(self._inputs, inputs):
dtype = node.tensor.shape.dtype
shape = edsl.LogicalShape(dtype, data.shape)
node.tensor.bind(shape)
outputs = [x.tensor for x in self._outputs]
updates = [(x[0].tensor, x[1].tensor) for x in self._updates]
program = edsl.Program(self._name, outputs, updates)
return plaidml_exec.Executable(program, [x.tensor for x in self._inputs])
def _create_var(name, value):
dtype = plaidml.DType.from_numpy(value.dtype)
shape = edsl.LogicalShape(dtype, value.shape)
tensor_shape = plaidml.TensorShape(dtype, value.shape)
buffer = plaidml.Buffer(_device, tensor_shape)
buffer.copy_from_ndarray(value)
return edsl.Tensor(shape=shape, name=name, buffer=buffer)
class _KerasNode(object):
def __init__(self, opname, name=None, shape=None, tensor=None, value=None):
self.opname = opname
self.name = _prepend_name_scope(name, opname)
if value is not None:
tensor = _create_var(self.name, value)
elif tensor is None:
tensor = edsl.Tensor(shape=shape, name=self.name)
# logger.debug('_KerasNode({})'.format(tensor))
self.tensor = tensor
def __repr__(self):
return str(self.tensor)
def __str__(self):
return '{}|{}'.format(self.name, self.tensor.shape)
def eval(self):
return get_value(self)
@property
def _keras_shape(self):
try:
return self.__keras_shape
except AttributeError:
return int_shape(self)
@_keras_shape.setter
def _keras_shape(self, shape):
self.__keras_shape = shape
def __getitem__(self, key):
logger.debug('__getitem__(self: {}, key: {})'.format(self, key))
if isinstance(key, slice) or isinstance(key, int) or isinstance(key, type(Ellipsis)):
key = (key,)
if not isinstance(key, tuple):
raise ValueError('Cannot index PlaidML tensors using type {}'.format(type(key)))
if key.count(Ellipsis) > 1:
raise ValueError('Cannot use multiple ellipses in a slice (given {})'.format(key))
# Fill in ellipsis
try:
ellipsis_idx = key.index(Ellipsis)
except ValueError:
ellipsis_idx = None
I = self.tensor
ndims = I.shape.ndims
extension_length = ndims - len(key)
if ellipsis_idx is not None:
# The ellipsis is counted in the length of the key, but does not persist as an axis for slicing, so undo that count
extension_length += 1
if extension_length < 0:
raise ValueError('Slice key too long. Tensor has {} dimensions, key is {}'.format(
ndims, key))
key = tuple(
list(key[:ellipsis_idx]) + [slice(None, None, None)] * extension_length +
list(key[ellipsis_idx + 1:]))
else:
key = tuple(list(key) + [slice(None, None, None)] * extension_length)
return _KerasNode('slice', tensor=plaidml_op.slice_of(I, key))
def __neg__(self):
return _KerasNode('neg', tensor=-self.tensor)
def __add__(self, other):
return self.__binary_op('add', other, lambda x, y: x + y)
def __radd__(self, other):
return self.__binary_op('add', other, lambda x, y: y + x)
def __sub__(self, other):
return self.__binary_op('sub', other, lambda x, y: x - y)
def __rsub__(self, other):
return self.__binary_op('sub', other, lambda x, y: y - x)
def __mul__(self, other):
return self.__binary_op('mul', other, lambda x, y: x * y)
def __rmul__(self, other):
return self.__binary_op('mul', other, lambda x, y: y * x)
def __div__(self, other):
return self.__binary_op('div', other, lambda x, y: x / y)
def __rdiv__(self, other):
return self.__binary_op('div', other, lambda x, y: y / x)
def __truediv__(self, other):
return self.__binary_op('div', other, lambda x, y: x / y)
def __rtruediv__(self, other):
return self.__binary_op('div', other, lambda x, y: y / x)
def __ge__(self, other):
return self.__binary_op('cmp_ge', other, lambda x, y: x >= y)
def __gt__(self, other):
return self.__binary_op('cmp_gt', other, lambda x, y: x > y)
def __le__(self, other):
return self.__binary_op('cmp_le', other, lambda x, y: x <= y)
def __lt__(self, other):
return self.__binary_op('cmp_lt', other, lambda x, y: x < y)
def __binary_op(self, op, other, fn):
logger.debug('{}(self: {}, other: {})'.format(op, self, other))
if isinstance(other, _KerasNode):
other = other.tensor
if isinstance(other, np.ndarray):
other = variable(other).tensor
return _KerasNode(op, tensor=fn(self.tensor, other))
_k_rng_size = 2048
def _make_rng_state(seed=None):
if seed:
np.random.seed(seed)
rng_init = np.empty((3, _k_rng_size), dtype=np.uint32)
rng_init[0] = np.random.randint(1, 2**32, (_k_rng_size,), dtype=np.uint32)
rng_init[1] = np.random.randint(7, 2**32, (_k_rng_size,), dtype=np.uint32)
rng_init[2] = np.random.randint(15, 2**32, (_k_rng_size,), dtype=np.uint32)
rng_state = variable(rng_init, dtype='uint32')
return rng_state
def _report_unimplemented(name):
report = (
'The Keras backend function \'{}\' is not yet implemented in ' +
'plaidml2. You can help us prioritize by letting us know if this ' +
'function is important to you, and as always, contributions are welcome!').format(name)
raise NotImplementedError(report)
class PlaidMLKerasException(Exception):
pass
@_log_call
def abs(x):
return _KerasNode('abs', tensor=plaidml_op.abs(x.tensor))
@_log_call
def all(x, axis=None, keepdims=False):
return _KerasNode('all', tensor=plaidml_op.all(x.tensor, axis, keepdims))
@_log_call
def any(x, axis=None, keepdims=False):
return _KerasNode('any', tensor=plaidml_op.any(x.tensor, axis, keepdims))
@_log_call
def arange(start, stop=None, step=1, dtype='int32'):
if isinstance(dtype, plaidml.DType):
dtype = dtype.into_numpy()
return variable(np.arange(start, stop, step, dtype), dtype=dtype)
@_log_call
def argmax(x, axis=-1):
return _KerasNode('argmax', tensor=plaidml_op.argmax(x.tensor, axis))
@_log_call
def argmin(x, axis=-1):
return argmax(-x, axis=axis)
@_log_call
def backend():
return 'plaidml2'
@_log_call
def batch_dot(x, y, axes=None, name=None):
X = x.tensor
Y = y.tensor
if isinstance(axes, six.integer_types):
axes = (axes, axes)
if axes is None:
axes = (X.shape.ndims - 1, Y.shape.ndims - 2)
PLAIDML_BATCHDOT_TF_BEHAVIOR = os.getenv('PLAIDML_BATCHDOT_TF_BEHAVIOR')
if PLAIDML_BATCHDOT_TF_BEHAVIOR:
_report_unimplemented('batch_dot')
else:
# replicate theano/documentation-specified behavior
first_dim = edsl.TensorDim()
first_idx = edsl.TensorIndex()
batch_dim = edsl.TensorDim()
batch_idx = edsl.TensorIndex()
xdims = edsl.TensorDims(X.shape.ndims)
xdims[0] = first_dim
xdims[axes[0]] = batch_dim
xidxs = edsl.TensorIndexes(X.shape.ndims)
xidxs[0] = first_idx
xidxs[axes[0]] = batch_idx
ydims = edsl.TensorDims(Y.shape.ndims)
ydims[0] = first_dim
ydims[axes[1]] = batch_dim
yidxs = edsl.TensorIndexes(Y.shape.ndims)
yidxs[0] = first_idx
yidxs[axes[1]] = batch_idx
odims = [xdims[N] for N in range(len(xdims)) if N != axes[0]
] + [ydims[N] for N in range(1, len(ydims)) if N != axes[1]]
oidxs = [xidxs[N] for N in range(len(xidxs)) if N != axes[0]
] + [yidxs[N] for N in range(1, len(yidxs)) if N != axes[1]]
X.bind_dims(*xdims)
Y.bind_dims(*ydims)
O = edsl.TensorOutput(*odims)
O[oidxs] += X[xidxs] * Y[yidxs]
if len(odims) == 1:
O = plaidml_op.expand_dims(O, 1)
return _KerasNode('batch_dot', tensor=O)
@_log_call
def batch_flatten(x):
I = x.tensor
I_dims = edsl.TensorDims(I.shape.ndims)
I.bind_dims(*I_dims)
if len(I_dims) == 1:
return reshape(x, [I_dims[0], 1])
if len(I_dims) == 2:
return x
return reshape(x, [I_dims[0]] + [functools.reduce((lambda x, y: x * y), I_dims[1:])])
@_log_call
def batch_get_value(xs):
return [get_value(x) for x in xs]
@_log_call
def batch_set_value(tuples):
for pair in tuples:
set_value(pair[0], pair[1])
@_log_call
def batch_normalization(x, mean, var, beta, gamma, axis=-1, epsilon=1e-3):
# gamma == scale
# beta == offset
# The `axis` parameter is only used to tell TF the format of a fused batchnorm,
# so we ignore it.
denom = sqrt(var + epsilon)
if gamma is not None and beta is not None:
return ((x - mean) * gamma / denom) + beta
elif gamma is not None:
return ((x - mean) * gamma / denom)
elif beta is not None:
return ((x - mean) / denom) + beta
else:
return ((x - mean) / denom)
@_log_call
def bias_add(x, bias, data_format=None):
if data_format is None:
data_format = image_data_format()
if data_format not in _CONV_DATA_FORMAT:
raise PlaidMLKerasException(
'Unrecognized data_format given to bias_add: "{}"; '.format(data_format) +
'only "channels_first" and "channels_last" recognized.')
if ndim(x) > 2:
if data_format == 'channels_first':
try:
bias_dims = bias.tensor.shape.dims
except AttributeError:
bias_dims = bias.shape
x += reshape(bias, (1, bias_dims[0]) + (1,) * (ndim(x) - 2))
elif data_format == 'channels_last':
x += bias
else:
x += bias
return x
@_log_call
def binary_crossentropy(target, output, from_logits=False):
if from_logits:
output = sigmoid(output)
return _KerasNode('binary_crossentropy',
tensor=plaidml_op.binary_crossentropy(target.tensor, output.tensor,
epsilon()))
@_log_call
def cast(x, dtype):
# Not clear what datatypes Keras supports.
# Each backend appears to implement support for its own subset of some assumed
# but undocumented pool of possible numeric types. Perhaps this pool may be
# the array element scalar type names defined by Numpy?
# Tensorflow supports:
# float16, float32, float64, int16, int32, int64, uint8, uint16
# Scipy offers
# Not sure where 'bool' comes from; scipy uses 'bool_' and 'bool8'.
# TODO: deal with aribtrary python values
# x = ptile.Value.from_python_value(x)
try:
dtype = plaidml.DType.from_numpy(dtype)
except ValueError:
raise PlaidMLKerasException('Unsupported cast (%s -> %s)' % (x.shape.dtype, dtype))
if x.tensor.shape.dtype == dtype:
return x
return _KerasNode('cast', tensor=edsl.cast(x.tensor, dtype))
@_log_call
def categorical_crossentropy(target, output, from_logits=False):
if from_logits:
output = softmax(output)
elif output.opname != 'softmax':
output /= sum(output, axis=(-1,), keepdims=True)
output = clip(output, epsilon(), 1.0 - epsilon())
T = target.tensor
O = output.tensor
ndims = O.shape.ndims
fixed_dims = edsl.TensorDims(ndims - 1)
fixed_idxs = edsl.TensorIndexes(ndims - 1)
Y = edsl.TensorDim()
y = edsl.TensorIndex()
input_dims = fixed_dims + [Y]
O.bind_dims(*input_dims)
T.bind_dims(*input_dims)
LO = edsl.log(O)
TR = edsl.TensorOutput(*fixed_dims)
TR[fixed_idxs] += T[fixed_idxs + [y]] * LO[fixed_idxs + [y]]
R = -TR
return _KerasNode('categorical_crossentropy', tensor=R)
@_log_call
def ceil(x):
return _KerasNode('ceil', tensor=edsl.ceil(x.tensor))
@_log_call
def clear_session():
global _in_train_phase
_in_train_phase = None
@_log_call
def clip(x, min_val, max_val):
return _KerasNode('clip',
tensor=plaidml_op.clip(x.tensor,
variable(min_val).tensor,
variable(max_val).tensor))
@_log_call
def concatenate(tensors, axis=-1):
tensor_vals = [x.tensor for x in tensors]
return _KerasNode('concatenate', tensor=plaidml_op.concatenate(tensor_vals, axis))
@_log_call
def constant(value, dtype=None, shape=None, name=None):
if shape is None:
if isinstance(value, np.ndarray):
shape = value.shape
elif isinstance(value, list) or isinstance(value, tuple):
shape = (len(value),)
else:
shape = (1,)
np_value = np.full(shape, value, dtype=dtype or floatx())
return _KerasNode('constant', name=name, value=np_value)
@_log_call
def cos(x):
return _KerasNode('cos', tensor=edsl.cos(x.tensor))
@_log_call
def conv(x,
kernel,
strides=None,
padding='valid',
data_format=None,
dilation_rate=None,
channelwise=False):
if channelwise:
group_layout = 'in_C'
autogroup_mode = 'max'
else:
group_layout = 'none'
autogroup_mode = 'ungrouped'
rank = x.tensor.shape.ndims - 2
if strides is None:
strides = tuple(1 for _ in range(rank))
if dilation_rate is None:
dilation_rate = tuple(1 for _ in range(rank))
return _KerasNode(
'conv',
tensor=plaidml_op.convolution(
x.tensor,
kernel.tensor,
strides,
dilation_rate,
[1] * len(strides),
[],
1,
_normalize_padding(padding),
[],
_normalize_data_format(data_format),
'xck',
group_layout,
False, # winograd_allowed
cur_name(),
autogroup_mode,
'none',
[]))
@_log_call
def conv_transpose(x, kernel, output_shape, strides, padding, data_format, dilation_rate):
# Keras gives every dim on the output_shape, but PlaidML expects to infer the channel dims; so restrict to spatial dims
data_format = _normalize_data_format(data_format)
if data_format == 'nxc':
output_shape = output_shape[1:-1]
elif data_format == 'ncx':
output_shape = output_shape[2:]
else:
raise ValueError('Could not parse data_format "{}"'.format(data_format))
rank = x.tensor.shape.ndims - 2
if strides is None:
strides = tuple(1 for _ in range(rank))
if dilation_rate is None:
dilation_rate = tuple(1 for _ in range(rank))
return _KerasNode(
'conv',
tensor=plaidml_op.convolution(
x.tensor,
kernel.tensor,
strides,
dilation_rate,
[1] * len(strides),
[],
1,
_normalize_padding(padding),
[],
data_format,
'xck',
'none',
False, # winograd_allowed
cur_name(),
'ungrouped',
'data',
output_shape))
@_log_call
def conv1d(x, kernel, strides=1, padding='valid', data_format=None, dilation_rate=1):
if padding == 'causal':
left_pad = dilation_rate * (int_shape(kernel)[0] - 1)
x = temporal_padding(x, (left_pad, 0))
padding = 'valid'
return conv(x, kernel, (strides,), padding, data_format, (dilation_rate,))
@_log_call
def conv2d(x, kernel, strides=(1, 1), padding='valid', dilation_rate=(1, 1), data_format=None):
if isinstance(strides, six.integer_types):
strides = (strides,) * 2
if isinstance(dilation_rate, six.integer_types):
dilation_rate = (dilation_rate,) * 2
return conv(x, kernel, strides, padding, data_format, dilation_rate)
@_log_call
def conv2d_transpose(x,
kernel,
output_shape,
strides=(1, 1),
padding='valid',
data_format=None,
dilation_rate=(1, 1)):
if isinstance(strides, six.integer_types):
strides = (strides,) * 2
if isinstance(dilation_rate, six.integer_types):
dilation_rate = (dilation_rate,) * 2
return conv_transpose(x, kernel, output_shape, strides, padding, data_format, dilation_rate)
@_log_call
def conv3d(x,
kernel,
strides=(1, 1, 1),
padding='valid',
dilation_rate=(1, 1, 1),
data_format=None):
if isinstance(strides, six.integer_types):
strides = (strides,) * 3
if isinstance(dilation_rate, six.integer_types):
dilation_rate = (dilation_rate,) * 3
return conv(x, kernel, strides, padding, data_format, dilation_rate)
@_log_call
def conv3d_transpose(x,
kernel,
output_shape,
strides=(1, 1, 1),
padding='valid',
data_format=None,
dilation_rate=(1, 1, 1)):
if isinstance(strides, six.integer_types):
strides = (strides,) * 3
if isinstance(dilation_rate, six.integer_types):
dilation_rate = (dilation_rate,) * 3
return conv_transpose(x, kernel, output_shape, strides, padding, data_format, dilation_rate)
@_log_call
def count_params(x):
result = 1
for dim in x.tensor.shape.int_dims:
result *= dim
return result
@_log_call
def ctc_batch_cost(y_true, y_pred, input_length, label_length):
_report_unimplemented('ctc_batch_cost')
@_log_call
def ctc_decode(y_pred, input_length, greedy=True, beam_width=100, top_paths=1):
_report_unimplemented('ctc_decode')
@_log_call
def ctc_label_dense_to_sparse(labels, label_lengths):
_report_unimplemented('ctc_label_dense_to_sparse')
@_log_call
def cumprod(x, axis=0):
return _KerasNode('cumprod', tensor=plaidml_op.cumprod(x.tensor, axis))
@_log_call
def cumsum(x, axis=0):
return _KerasNode('cumsum', tensor=plaidml_op.cumsum(x.tensor, axis))
@_log_call
def cur_name():
if len(_NAME_SCOPE_STACK):
return _NAME_SCOPE_STACK[0]
return ''
@_log_call
def depthwise_conv2d(x,
kernel,
strides=(1, 1),
padding='valid',
data_format=None,
dilation_rate=(1, 1)):
return conv(x, kernel, strides, padding, data_format, dilation_rate, channelwise=True)
@_log_call
def dot(x, y, name=None):
return _KerasNode('dot', tensor=plaidml_op.dot(x.tensor, y.tensor), name=name)
@_log_call
def dropout(x, level, noise_shape=None, seed=None):
I = x.tensor
if noise_shape is not None and len(noise_shape) != I.shape.ndims:
raise ValueError('noise_shape ndims doesn\'t match input ndims')
if noise_shape is None:
shape = I.shape.dims
else:
shape = noise_shape
rng_state = _make_rng_state(seed)
R = 1.0 - level
M = 1.0 / R
T = edsl.prng(rng_state.tensor, shape)
O = edsl.select(T < R, I * M, 0.0)
return _KerasNode('dropout', tensor=O)
@_log_call
def dtype(x):
return x.tensor.shape.dtype.into_numpy()
@_log_call
def elu(x, alpha=1.0):
return _KerasNode('elu', name='elu', tensor=plaidml_op.elu(x.tensor, alpha))
@_log_call
def equal(x, y):
if isinstance(x, _KerasNode):
x = x.tensor
if isinstance(x, np.ndarray):
x = variable(x).tensor
if isinstance(y, _KerasNode):
y = y.tensor
if isinstance(y, np.ndarray):
y = variable(y).tensor
return _KerasNode('equal', tensor=(x == y))
@_log_call
def exp(x):
return _KerasNode('exp', tensor=edsl.exp(x.tensor))
@_log_call
def eval(x):
return get_value(x)
@_log_call
def expand_dims(x, axis=-1, name=None):
return _KerasNode('expand_dims', name=name, tensor=plaidml_op.expand_dims(x.tensor, axis))
@_log_call
def eye(size, dtype=None, name=None):
if dtype is None:
dtype = floatx()
elif isinstance(dtype, plaidml.DType):
dtype = dtype.into_numpy()
return variable(np.eye(size, dtype=dtype), name=name, dtype=dtype)
@_log_call
def flatten(x):
I = x.tensor
I_dims = edsl.TensorDims(I.shape.ndims)
I.bind_dims(*I_dims)
O_dim = functools.reduce(lambda x, y: x * y, I_dims)
return reshape(x, [O_dim])
@_log_call
def floor(x):
return _KerasNode('floor', tensor=edsl.floor(x.tensor))
@_log_call
def foldl(fn, elems, initializer=None, name=None):
_report_unimplemented('foldl')
@_log_call
def foldr(fn, elems, initializer=None, name=None):
_report_unimplemented('foldr')
# No _log_call as this does specialized logging
def function(inputs, outputs, updates=None, name=None):
logger.debug('function(name: {})'.format(name))
logger.debug(' inputs:')
for input in inputs:
logger.debug(' {}'.format(input))
logger.debug(' outputs:')
for output in outputs:
logger.debug(' {}'.format(output))
if updates:
logger.debug(' updates:')
for update in updates:
logger.debug(' {}'.format(update))
if updates is None:
updates = []
if name is None:
name = ''
return _Function(inputs, outputs, updates, name)
@_log_call
def gather(x, indicies):
return _KerasNode('gather', tensor=edsl.gather(x.tensor, indicies.tensor))
@_log_call
def get_uid(prefix=''):
_UID_PREFIX_DICT[prefix] += 1
return _UID_PREFIX_DICT[prefix]
@_log_call
def get_value(x):
inputs = []
fn = _Function(inputs, [x], [], name='get_value')
outputs = fn(inputs)
return outputs[0]
@_log_call
def get_variable_shape(x):
return x._keras_shape
@_log_call
def gradients(loss, variables):
grads = edsl.gradients(loss.tensor, [x.tensor for x in variables])
return [_KerasNode('gradients', tensor=x) for x in grads]
@_log_call
def greater(x, y):
return x > y
@_log_call
def greater_equal(x, y):
return x >= y
@_log_call
def hard_sigmoid(x):
return _KerasNode('hard_sigmoid',
name='hard_sigmoid',
tensor=plaidml_op.hard_sigmoid(x.tensor, 0.2))
@_log_call
def identity(x):
return _KerasNode('identity', tensor=edsl.ident(x.tensor))
@_log_call
def in_test_phase(x, alt, training=None):
# Note that this flips 'alt' and 'x'
return in_train_phase(alt, x, training=training)
@_log_call
def in_top_k(predictions, targets, k):
_report_unimplemented('in_top_k')
@_log_call
def in_train_phase(x, alt, training=None):
if training is None:
training = learning_phase()
uses_learning_phase = True
else:
uses_learning_phase = False
cx = x() if callable(x) else x
calt = alt() if callable(alt) else alt
if training is 1 or training is True:
return cx
elif training is 0 or training is False:
return calt
else:
o = switch(training, cx, calt)
if uses_learning_phase:
o._uses_learning_phase = True
return o
@_log_call
def int_shape(x):
return tuple(None if x == 0 else x for x in x.tensor.shape.int_dims)
@_log_call
def is_keras_tensor(x):
if not is_tensor(x):
raise ValueError()
return hasattr(x, '_keras_history')
@_log_call
def is_placeholder(x):
_report_unimplemented('is_placeholder')
@_log_call
def is_sparse(x):
return False
@_log_call
def is_tensor(x):
return isinstance(x, _KerasNode)
@_log_call
def l2_normalize(x, axis):
norm = sqrt(sum(square(x), axis=axis, keepdims=True))
return x / norm
@_log_call
def learning_phase():
# Initialize _in_train_phase if this is the first use
global _in_train_phase
if _in_train_phase is None:
_in_train_phase = placeholder(ndim=0, dtype='bool')
return _in_train_phase
@_log_call
def less(x, y):
return x < y
@_log_call
def less_equal(x, y):
return x <= y
@_log_call
def local_conv1d(inputs, kernel, kernel_size, strides, data_format=None):
_report_unimplemented('local_conv1d')
@_log_call
def local_conv2d(inputs, kernel, kernel_size, strides, output_shape, data_format=None):
_report_unimplemented('local_conv2d')
@_log_call
def log(x):
return _KerasNode('log', tensor=edsl.log(x.tensor))
@_log_call
def logsumexp(x, axis=None, keepdims=False):
return log(sum(exp(x), axis=axis, keepdims=keepdims))
@_log_call
def manual_variable_initialization(value):
_report_unimplemented('manual_variable_initialization')
@_log_call
def map_fn(fn, elems, name=None, dtype=None):
_report_unimplemented('map_fn')
@_log_call
def max(x, axis=None, keepdims=False):
return _KerasNode('max', tensor=plaidml_op.max(x.tensor, axis, keepdims))
@_log_call
def maximum(x, y):
return _KerasNode('maximum', tensor=plaidml_op.maximum(x.tensor, y.tensor))
@_log_call
def mean(x, axis=None, keepdims=False):
return _KerasNode('mean', tensor=plaidml_op.mean(x.tensor, axis, keepdims))
@_log_call
def min(x, axis=None, keepdims=False):
return _KerasNode('min', tensor=plaidml_op.min(x.tensor, axis, keepdims))
@_log_call
def minimum(x, y):
return _KerasNode('minimum', tensor=plaidml_op.minimum(x.tensor, y.tensor))
@_log_call
def moving_average_update(x, value, momentum):
return (x, x * momentum + value * (1. - momentum))
# No _log_call as this manages logging specially
@contextmanager
def name_scope(name):
_NAME_SCOPE_STACK.append(name)
logger.debug('name_scope({}), push: {}'.format(name, _NAME_SCOPE_STACK))
yield
_NAME_SCOPE_STACK.pop()
logger.debug('name_scope({}), pop: {}'.format(name, _NAME_SCOPE_STACK))
@_log_call
def ndim(x):
return len(x._keras_shape)
@_log_call
def not_equal(lhs, rhs):
if isinstance(lhs, _KerasNode):
lhs = lhs.tensor
if isinstance(lhs, np.ndarray):
lhs = variable(lhs).tensor
if isinstance(rhs, _KerasNode):
rhs = rhs.tensor
if isinstance(rhs, np.ndarray):
rhs = variable(rhs).tensor
return _KerasNode('not_equal', tensor=(lhs != rhs))
@_log_call
def normalize_batch_in_training(x, gamma, beta, reduction_axes, epsilon=1e-3):
I = x.tensor
ndims = I.shape.ndims
if reduction_axes == None:
raw_axes = [ndims - 1]
else:
raw_axes = reduction_axes
axes = [_normalize_axis(x, ndims, 'normalize_batch_in_training') for x in raw_axes]
m = mean(x, axis=axes, keepdims=True)
v = var(x, axis=axes, keepdims=True)
normalized_tensor = batch_normalization(x=x,
mean=m,
var=v,
beta=beta,
gamma=gamma,
epsilon=epsilon)
# Tensorflow and Theano disagree on whether mean and var should be squeezed
# here. For now, going with Theano for simplicity (i.e. don't squeeze).
return normalized_tensor, m, v
@_log_call
def one_hot(indices, num_classes):
#Note: does not error check for entries in indices that are >= num_classes
count = variable(np.array(range(num_classes)), dtype='int32').tensor
I = indices.tensor
I_ndims = I.shape.ndims
I_dims = edsl.TensorDims(I_ndims)
I_idxs = edsl.TensorIndexes(I_ndims)
C = edsl.TensorDim()
c = edsl.TensorIndex()
O_dims = I_dims + [C]
O_idxs = I_idxs + [c]
I.bind_dims(*I_dims)
count.bind_dims(C)
O = edsl.TensorOutput(*O_dims)
O[O_idxs] = I[I_idxs] == count[c]
return _KerasNode('one_hot', name='one_hot', tensor=O)
@_log_call
def ones(shape, dtype=None, name=None):
value = np.full(shape, 1, dtype=dtype or floatx())
return _KerasNode('ones', name=name, value=value)
@_log_call
def ones_like(x, dtype=None, name=None):
value = np.full((1), 1, dtype=dtype or floatx())
one = _create_var('a_one', value)
I = x.tensor
ndim = I.shape.ndims
dims = edsl.TensorDims(ndim)
idxs = edsl.TensorIndexes(ndim)
I.bind_dims(*dims)
O = edsl.TensorOutput(*dims)
O[idxs] = one[0]
return _KerasNode('ones_like', name=name, tensor=O)
@_log_call
def permute_dimensions(x, pattern=None):
return _KerasNode('permute_dimensions', tensor=plaidml_op.transpose(x.tensor, pattern))
@_log_call
def placeholder(shape=None, ndim=None, dtype=None, sparse=False, name=None):
dtype = plaidml.DType.from_numpy(dtype or floatx())
# TODO: Need to support empty shapes; once supported, convert below to `if _ is not None`
if shape is not None:
return _KerasNode('placeholder', shape=edsl.LogicalShape(dtype, shape), name=name)
if ndim is not None:
return _KerasNode('placeholder', shape=edsl.LogicalShape(dtype, [0] * ndim), name=name)
raise ValueError()
@_log_call
def pool(x, pool_size, strides=None, padding='valid', data_format=None, pool_mode='max'):
return _KerasNode('pool',
tensor=plaidml_op.pool(
x.tensor,
pool_mode,
pool_size,
strides,
_normalize_padding(padding),
tuple(),
_normalize_data_format(data_format),
False,
False,
))
@_log_call
def pool2d(x, pool_size, strides=(1, 1), padding='valid', data_format=None, pool_mode='max'):
return pool(x=x,
pool_size=pool_size,
strides=strides,
padding=padding,
data_format=data_format,
pool_mode=pool_mode)
@_log_call
def pool3d(x, pool_size, strides=(1, 1, 1), padding='valid', data_format=None, pool_mode='max'):
return pool(x=x,
pool_size=pool_size,
strides=strides,
padding=padding,
data_format=data_format,
pool_mode=pool_mode)
@_log_call
def pow(x, a):
return _KerasNode('pow', tensor=edsl.pow(x.tensor, a))
@_log_call
def print_tensor(x, message=''):
_report_unimplemented('print_tensor')
@_log_call
def prod(value, axis=None, keepdims=False):
if isinstance(value, (tuple, list)):
# In this case, a product of the elements of the tuple/list is being requested,
# rather than a within-tensor product
return functools.reduce(lambda x, y: x * y, value)
return _KerasNode('prod', tensor=plaidml_op.prod(value.tensor, axis, keepdims))
@_log_call
def random_binomial(shape, p=0.0, dtype=None, see=None):
_report_unimplemented('random_binomial')
@_log_call
def random_normal(shape, mean=0.0, stddev=1.0, dtype=None, seed=None):
if dtype is None:
dtype = floatx()
if seed:
np.random.seed(seed)
# TODO: We only use half of the Box-Muller here
u1 = random_uniform(shape, dtype='float32')
u2 = random_uniform(shape, dtype='float32')
z0 = sqrt(-2.0 * log(u1 + (1.0 / (2**33)))) * cos(2.0 * math.pi * u2)
z0 = stddev * z0
z0 = z0 + mean
if dtype != 'float32':
z0 = cast(z0, dtype)
return z0
@_log_call
def random_normal_variable(shape, mean, scale, dtype=None, name=None, seed=None):
if dtype is None:
dtype = floatx()
elif isinstance(dtype, plaidml.DType):
dtype = ptile.convert_pml_dtype_to_np(dtype)
if seed:
np.random.seed(seed)
data = np.random.normal(mean, scale, shape).astype(dtype)
return variable(data, dtype=dtype, name=name)
@_log_call
def random_uniform(shape, minval=0.0, maxval=1.0, dtype=None, seed=None):
rng_state = _make_rng_state(seed)
R = edsl.prng(rng_state.tensor, shape)
dtype = dtype or floatx()
if dtype != 'float32':
R = edsl.cast(R, plaidml.DType.from_numpy(dtype))
O = (maxval - minval) * R + minval
return _KerasNode('random_uniform', tensor=O)
@_log_call
def random_uniform_variable(shape, low, high, dtype=None, name=None, seed=None):
if seed:
np.random.seed(seed)
val = np.random.uniform(low=low, high=high, size=shape)
return variable(val, dtype=dtype)
@_log_call
def relu(x, alpha=None, max_value=None, threshold=0.):
return _KerasNode('relu', tensor=plaidml_op.relu(x.tensor, alpha, max_value, threshold))
@_log_call
def repeat(x, n):
y = expand_dims(x, 1, name='repeat')
return repeat_elements(y, n, 1)
@_log_call
def repeat_elements(x, rep, axis):
return _KerasNode('repeat_elements',
name='repeat_elements',
tensor=plaidml_op.repeat(x.tensor, rep, axis))
@_log_call
def reset_uids():
global _UID_PREFIX_DICT
_UID_PREFIX_DICT.clear()
@_log_call
def reshape(x, dims):
# TODO: This needs to be more thoroughly tested with symbolic shapes
dims = list(dims)
I = x.tensor
I_dims = edsl.TensorDims(I.shape.ndims)
I.bind_dims(*I_dims)
neg_idx = None
for idx, dim in enumerate(dims):
if isinstance(dim, edsl.TensorDim):
continue
if dim == 0 or dim is None:
dims[idx] = I_dims[idx] # TODO: Fix how we manage shape
elif dim == -1:
if neg_idx:
raise RuntimeError('At most one dimension of size -1 may be provided in Reshape')
neg_idx = idx
dims[idx] = 1 # Just to simplify the size computation later
if neg_idx is not None:
# Compute the value to use for the -1 dimension in the
# output shape, by making it what it needs to be in order
# to preserve the correct number of elements in the
# tensor.
#
# This code is a little tricky because symbolic values
# (e.g. the batch size in a typical neural network) may
# appear in both the original shape and the target shape.
# Naively multiplying the original shape's dimensions and
# dividing by the target shape's dimensions (excluding the
# -1 dimension) would produce a symbolic value.
#
# So:
#
# We scan the input dimensions, counting the number of
# instances of each symbolic size encountered and
# multiplying together the non-symbolic sizes into the
# numerator.
#
# We then scan the output dimensions. Where there's a
# symbolic size, we check and see if we have a count for
# it, and decrement the count if we do. Otherwise -- if
# we don't have a count for it, or if it's not symbolic --
# we multiply it into the denominator.
#
# We then take the remaining symbolic input dimensions,
# and multiply them into the numerator -- these are the
# dimensions that haven't been cancelled out.
#
# And then the size of the -1 dimension is just numerator
# / denominator; if there are any remaining uncancelled
# symbolic dimension sizes, the output will be symbolic,
# but otherwise we'll come out with a concrete dimension
# size.
num = 1
syms = defaultdict(int)
for idx, dim in enumerate(I.shape.int_dims):
if dim is None:
syms[I_dims[idx]] += 1
else:
num *= dim
den = 1
for dim in dims:
if isinstance(dim, edsl.TensorDim) and syms[dim] > 0:
syms[dim] -= 1
else:
den *= dim
for sym, count in syms.items():
for _ in range(count):
num *= sym
dims[neg_idx] = num // den
return _KerasNode('reshape', tensor=edsl.reshape(I, dims))
@_log_call
def resize_images(x, height_factor, width_factor, data_format, interpolation='nearest'):
return _KerasNode('resize_images',
tensor=plaidml_op.image_resize(x.tensor, (height_factor, width_factor),
interpolation,
_normalize_data_format(data_format)))
@_log_call
def resize_volumes(x, depth_factor, height_factor, width_factor, data_format):
data_format = _normalize_data_format(data_format)
if data_format == 'ncx':
ret = repeat_elements(x, depth_factor, axis=2)
ret = repeat_elements(ret, height_factor, axis=3)
ret = repeat_elements(ret, width_factor, axis=4)
elif data_format == 'nxc':
ret = repeat_elements(x, depth_factor, axis=1)
ret = repeat_elements(ret, height_factor, axis=2)
ret = repeat_elements(ret, width_factor, axis=3)
else:
raise ValueError('Invalid data_format {}'.format(data_format))
return ret
@_log_call
def reverse(x, axes):
return _KerasNode('reverse', name='reverse', tensor=plaidml_op.flip(x.tensor, axes))
@_log_call
def reverse_gradient(x, coeff=1.0):
_report_unimplemented('reverse_gradient')
@_log_call
def rnn(step_function,
inputs,
initial_states,
go_backwards=False,
mask=None,
constants=None,
unroll=False,
input_length=None):
if input_length is None:
input_length = inputs.tensor.shape.int_dims[1]
if not isinstance(input_length, six.integer_types):
raise NotImplementedError('rnn is not implemented for variable sized inputs')
if mask is not None:
raise NotImplementedError('rnn is not implemented with mask support')
if constants is None:
constants = list()
def time_expand(val, ii, t, prev):
I = val.tensor
ndmo = I.shape.ndims - 1
if (ndmo < 0):
raise PlaidMLKerasException('output values must have a batch size dimension')
dims = edsl.TensorDims(ndmo)
idxs = edsl.TensorIndexes(ndmo)
batch_dim = edsl.TensorDim()
batch_idx = edsl.TensorIndex()
I_dims = [batch_dim] + dims
I_idxs = [batch_idx] + idxs
I.bind_dims(*I_dims)
O_dims = [batch_dim] + [t] + dims
O = edsl.TensorOutput(*O_dims)
O_idxs = [batch_idx] + [ii] + idxs
O[O_idxs] = I[I_idxs]
if prev is None:
if ii != 0:
raise RuntimeError(
'Generating RNN at time step {} with no previous time step'.format(ii))
else:
O.use_default(prev.tensor)
return _KerasNode('time_expand', name='time_expand', tensor=O)
states = initial_states
output = None
for i in range(input_length):
if go_backwards:
input_val = inputs[:, input_length - 1 - i]
else:
input_val = inputs[:, i]
output_val, new_states = step_function(input_val, states + constants)
output = time_expand(output_val, i, input_length, output)
states = new_states
return (output_val, output, states)
@_log_call
def round(x):
return _KerasNode('round', tensor=edsl.round(x.tensor))
@_log_call
def separable_conv(x,
depthwise_kernel,
pointwise_kernel,
strides=None,
padding='valid',
data_format=None,
dilation_rate=None):
data_format = _normalize_data_format(data_format)
if int_shape(pointwise_kernel
)[-2] != int_shape(depthwise_kernel)[-1] * int_shape(depthwise_kernel)[-2]:
raise ValueError(
('Shape mismatch in separable convolution. Depthwise kernel input ' +
'channel count must match pointwise kernel channel count times channel ' +
'multiplier.\nReceived {} v {} * {} (from full shapes {} and ' + '{})').format(
pointwise_kernel.tensor.shape.dims[-2], depthwise_kernel.tensor.shape.dims[-2],
depthwise_kernel.tensor.shape.dims[-1], pointwise_kernel.tensor.shape,
depthwise_kernel.tensor.shape))
intermediate = conv(x,
depthwise_kernel,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
channelwise=True)
rank = x.tensor.shape.ndims - 2
ones = tuple(1 for _ in range(rank))
return conv(intermediate,
pointwise_kernel,
strides=ones,
padding='valid',
data_format=data_format,
dilation_rate=ones)
@_log_call
def separable_conv2d(x,
depthwise_kernel,
pointwise_kernel,
strides=(1, 1),
padding='valid',
data_format=None,
dilation_rate=(1, 1)):
return separable_conv(x, depthwise_kernel, pointwise_kernel, strides, padding, data_format,
dilation_rate)
@_log_call
def set_floatx(dtype):
keras_set_floatx(dtype)
# plaidml.set_floatx(ptile.convert_np_dtype_to_pml(dtype))
@_log_call
def set_learning_phase(value):
if value != 0 and value != 1:
raise ValueError('May only set_learning_phase to 0 or 1')
value = int(value)
global _in_train_phase
_in_train_phase = value
@_log_call
def set_value(x, value):
dtype = plaidml.DType.from_numpy(value.dtype)
tensor_shape = plaidml.TensorShape(dtype, value.shape)
buffer = plaidml.Buffer(_device, tensor_shape)
buffer.copy_from_ndarray(value)
x.tensor.set_param_value(buffer)
@_log_call
def shape(x):
return _KerasNode('shape', tensor=edsl.shape(x.tensor))
@_log_call
def sigmoid(x):
return _KerasNode('sigmoid', tensor=plaidml_op.sigmoid(x.tensor))
@_log_call
def sign(x):
intermediate = _KerasNode('sign_intermediate', tensor=edsl.select((x > 0).tensor, 1., -1.))
return _KerasNode('sign', tensor=edsl.select((x.tensor == 0.), 0., intermediate.tensor))
@_log_call
def sin(x):
return _KerasNode('sin', tensor=edsl.sin(x.tensor))
@_log_call
def softmax(x, axis=None, name=None):
I = x.tensor
if name is None:
name = 'softmax'
if axis is None:
axis = I.shape.ndims - 1
y = plaidml_op.softmax(I, axis=axis)
return _KerasNode('softmax', name=name, tensor=y)
@_log_call
def softplus(x):
return log(1. + exp(x))
@_log_call
def softsign(x):
return x / (1. + abs(x))
@_log_call
def sparse_categorical_crossentropy(target, output, from_logits=False):
dims = edsl.TensorDims(output.tensor.shape.ndims)
output.tensor.bind_dims(*dims)
return categorical_crossentropy(
reshape(one_hot(target, output.tensor.shape.int_dims[-1]), dims), output, from_logits)
@_log_call
def spatial_2d_padding(x, padding=((1, 1), (1, 1)), data_format=None):
data_format = _normalize_data_format(data_format)
lo_pads = [padding[i][0] for i in range(2)]
hi_pads = [padding[i][1] for i in range(2)]
return _KerasNode('spatial_2d_padding',
tensor=plaidml_op.spatial_padding(x.tensor,
lo_pads=lo_pads,
hi_pads=hi_pads,
data_layout=data_format))
@_log_call
def spatial_3d_padding(x, padding=((1, 1), (1, 1), (1, 1)), data_format=None):
data_format = _normalize_data_format(data_format)
lo_pads = [padding[i][0] for i in range(3)]
hi_pads = [padding[i][1] for i in range(3)]
return _KerasNode('spatial_2d_padding',
tensor=plaidml_op.spatial_padding(x.tensor,
lo_pads=lo_pads,
hi_pads=hi_pads,
data_layout=data_format))
@_log_call
def sqrt(x):
return _KerasNode('sqrt', tensor=edsl.sqrt(x.tensor))
@_log_call
def square(x):
return _KerasNode('square', tensor=plaidml_op.square(x.tensor))
@_log_call
def squeeze(x, axis):
return _KerasNode('squeeze', tensor=plaidml_op.squeeze(x.tensor, axis))
@_log_call
def stack(x, axis=0):
return concatenate([expand_dims(item, axis) for item in x], axis=axis)
@_log_call
def std(x, axis=None, keepdims=False):
return sqrt(var(x, axis=axis, keepdims=keepdims))
def stop_gradient(variables):
_report_unimplemented('stop_gradient')
@_log_call
def sum(x, axis=None, keepdims=False):
if isinstance(x, (tuple, list)):
# In this case, a sum of the elements of the tuple/list is being requested,
# rather than a within-tensor sum
return functools.reduce(lambda a, b: a + b, x)
return _KerasNode('sum', tensor=plaidml_op.sum(x.tensor, axis, keepdims))
@_log_call
def switch(condition, then_expression, else_expression):
return _KerasNode('switch',
tensor=edsl.select(condition.tensor, then_expression.tensor,
else_expression.tensor))
@_log_call
def tanh(x):
return _KerasNode('tanh', tensor=edsl.tanh(x.tensor))
@_log_call
def temporal_padding(x, padding=(1, 1)):
data_format = _normalize_data_format(None) # uses image_data_format()
lo_pads = [padding[0]]
hi_pads = [padding[1]]
return _KerasNode('temporal_padding',
tensor=plaidml_op.spatial_padding(x.tensor,
lo_pads=lo_pads,
hi_pads=hi_pads,
data_layout=data_format))
@_log_call
def tile(x, n):
return _KerasNode('tile', tensor=plaidml_op.tile(x.tensor, n))
@_log_call
def to_dense(tensor):
_report_unimplemented('to_dense')
@_log_call
def transpose(x):
return _KerasNode('transpose', tensor=plaidml_op.transpose(x.tensor))
@_log_call
def truncated_normal(shape, mean=0.0, stddev=1.0, dtype=None, seed=None):
if dtype is None:
dtype = floatx()
if seed:
np.random.seed(seed)
return variable(stddev * scipy.stats.truncnorm.rvs(-2.0, 2.0, size=shape) + mean, dtype)
@_log_call
def update(x, new_x):
return (x, new_x)
@_log_call
def update_add(x, increment):
return (x, x + increment)
@_log_call
def update_sub(x, decrement):
return (x, x - decrement)
@_log_call
def var(x, axis=None, keepdims=False):
return _KerasNode('var', tensor=plaidml_op.variance(x.tensor, axis, keepdims))
@_log_call
def variable(value, dtype=None, name=None, constraint=None):
if name is None:
name = 'anon'
dtype = dtype or floatx()
if isinstance(value, _KerasNode):
value = value.eval()
if isinstance(value, float) or isinstance(value, six.integer_types):
value = np.array(value, dtype=dtype)
if isinstance(value, list) or isinstance(value, tuple):
value = np.array(value, dtype=dtype)
if isinstance(value, np.ndarray):
if dtype != value.dtype:
logger.debug(
'Casting to requested dtype in variable, received {} and requested {}'.format(
value.dtype, dtype))
value = value.astype(dtype)
return _KerasNode('variable', name=name, value=value)
raise TypeError('Unknown type for variable: {}'.format(type(value)))
@_log_call
def zeros(shape, dtype=None, name=None):
value = np.full(shape, 0, dtype=dtype or floatx())
return _KerasNode('zeros', name=name, value=value)
@_log_call
def zeros_like(x, dtype=None, name=None):
value = np.full((1), 0, dtype=dtype or floatx())
zero = _create_var('a_zero', value)
I = x.tensor
ndim = I.shape.ndims
dims = edsl.TensorDims(ndim)
idxs = edsl.TensorIndexes(ndim)
I.bind_dims(*dims)
O = edsl.TensorOutput(*dims)
O[idxs] = zero[0]
return _KerasNode('zeros_like', name=name, tensor=O)
|
import json
from pathlib import Path
from pprint import pprint
from typing import Any, Type, TypeVar
import cattr
from dfi.config import FileGroup, Settings # type: ignore # noqa
from dfi.dotfile import LinkData # type: ignore # noqa
from .conftest import FixturePaths
BASE_DIR = Path("/home/foo/settings")
FG = FileGroup(
base_dir=BASE_DIR,
dirs=[Path("/home/foo/settings"), Path("/home/bar/settings")],
globs=["xyz", "abc"],
excludes=["*bad", ".ignore*"],
target_dir=Path("/home/foo")
)
SETTINGS = Settings(
conflicting_file_strategy='backup',
conflicting_symlink_strategy='replace',
base_dir=BASE_DIR,
dotfiles_file_group=FG,
binfiles_file_group=FG,
)
T = TypeVar('T')
def do_roundtrip(obj: T, cls: Type[T]):
return cattr.structure(json.loads(json.dumps(cattr.unstructure(obj))), cls)
def test_FileGroup_json_round_trip():
assert FG == do_roundtrip(FG, FileGroup)
def test_Settings_round_trip():
assert SETTINGS == do_roundtrip(SETTINGS, Settings)
def test_Settings_vpaths(df_paths: FixturePaths):
s = Settings.mk_default(df_paths.base_dir)
pprint(s.vpaths)
bins = ['ctags', 'pants', 'pip']
dotfiles = ['bash_profile', 'bashrc', 'inputrc', 'vimrc']
vpaths = [
*[df_paths.bin_dir.joinpath(b) for b in bins],
*[df_paths.dotfiles_dir.joinpath(d) for d in dotfiles]
]
assert s.vpaths == vpaths
def test_Settings_link_data(df_paths: FixturePaths):
s = Settings.mk_default(df_paths.base_dir)
bins = ['ctags', 'pants', 'pip']
ld_bins = [
LinkData(
vpath=df_paths.bin_dir.joinpath(b),
link_path=df_paths.home_dir / '.local/bin' / b,
link_data=Path("../..", df_paths.base_dir.name) / 'bin' / b,
) for b in bins
]
dotfiles = ['bash_profile', 'bashrc', 'inputrc', 'vimrc']
ld_df = [
LinkData(
vpath=df_paths.dotfiles_dir.joinpath(df),
link_path=df_paths.home_dir / f".{df}",
link_data=Path(df_paths.base_dir.name) / 'dotfiles' / df,
) for df in dotfiles
]
assert s.link_data == [*ld_bins, *ld_df]
pprint(cattr.unstructure(s.link_data))
def test_Settings_with_globs(df_paths: FixturePaths):
s = Settings(
base_dir=df_paths.base_dir,
dotfiles_file_group=FileGroup(
base_dir=df_paths.base_dir,
target_dir=df_paths.home_dir,
dirs=[df_paths.dotfiles_dir],
globs=[str(g.relative_to(df_paths.base_dir)) for g in df_paths.dotfile_extras],
excludes=['.*', 'gnome'],
link_prefix='.',
),
binfiles_file_group=FileGroup(
base_dir=df_paths.base_dir,
target_dir=df_paths.home_dir.joinpath('.local', 'bin'),
dirs=[],
globs=None,
excludes=None,
),
)
s.link_data
1 / 0
|
import copy
import pickle
from typing import Callable, Optional, Tuple
from contextlib import contextmanager
from torchcontrib.optim import SWA
from pathlib import Path
import sys
import itertools
import argparse
import torch.utils.data as torchdata
import torch
import torchvision as tv
from alr.utils import stratified_partition, _map_device, manual_seed
from alr.utils._type_aliases import _DeviceType
from alr.training.samplers import RandomFixedLengthSampler, MinLabelledSampler
from alr.training.utils import PLPredictionSaver
import numpy as np
from torch.nn import functional as F
from torch import nn
from torch.nn.utils import weight_norm
from ignite.engine import create_supervised_evaluator
class IndexMarker(torchdata.Dataset):
PSEUDO_LABELLED = True
LABELLED = False
def __init__(self, dataset: torchdata.Dataset, mark):
self.dataset = dataset
self.mark = mark
def __len__(self):
return len(self.dataset)
def __getitem__(self, idx):
# returns (x, y), idx, mark
return self.dataset[idx], idx, self.mark
class PDS(torchdata.Dataset):
def __init__(
self,
dataset: IndexMarker,
transform: Callable[[torch.Tensor], torch.Tensor],
augmentation: Optional[Callable[[torch.Tensor], torch.Tensor]] = None,
):
self.dataset = dataset
self._augmentation = augmentation
self._transform = transform
self._with_metadata = True
self._new_targets = None
def __getitem__(self, idx):
(img_raw, target), idx, mark = self.dataset[idx]
# override target
if self._new_targets is not None:
target = self._new_targets[idx]
if self._augmentation:
img_aug = self._augmentation(img_raw)
img_raw, img_aug = map(self._transform, [img_raw, img_aug])
else:
img_raw = self._transform(img_raw)
img_aug = img_raw
if self._with_metadata:
return img_raw, img_aug, target, idx, mark
return img_aug, target
def __len__(self):
return len(self.dataset)
@contextmanager
def no_fluff(self):
if self._with_metadata:
self._with_metadata = False
yield self
self._with_metadata = True
else:
yield self
@contextmanager
def no_augmentation(self):
if self._augmentation:
store = self._augmentation
self._augmentation = None
yield self
self._augmentation = store
else:
yield self
def override_targets(self, new_targets: torch.Tensor):
assert new_targets.size(0) == len(self.dataset)
self._new_targets = new_targets
@property
def override_accuracy(self):
assert self._new_targets is not None
correct = 0
for i in range(len(self)):
overridden_target = self._new_targets[i]
original_target = self.dataset[i][0][-1]
correct += (
overridden_target.argmax(dim=-1).item()
== original_target.argmax(dim=-1).item()
)
return correct / len(self)
def evaluate(model, loader, device):
correct = 0
total = 0
with torch.no_grad():
model.eval()
for x, y in loader:
x, y = _map_device([x, y], device)
preds = model(x)
correct += torch.eq(preds.argmax(dim=-1), y.argmax(dim=-1)).float().sum()
total += y.size(0)
return correct / total
# from https://github.com/facebookresearch/mixup-cifar10/blob/master/train.py#L119
def mixup(
x: torch.Tensor, y: torch.Tensor, alpha: float = 1.0, device: _DeviceType = None
):
"""Returns mixed inputs, pairs of targets, and lambda"""
if alpha > 0:
lam = np.random.beta(alpha, alpha)
else:
lam = 1
batch_size = x.size()[0]
index = torch.randperm(batch_size)
if device:
index = index.to(device)
mixed_x = lam * x + (1 - lam) * x[index, :]
y_a, y_b = y, y[index]
return mixed_x, y_a, y_b, lam
def reg_nll_loss(coef: Optional[Tuple[float, float]] = (0.8, 0.4)):
def _reg_nll_loss(pred: torch.Tensor, target: torch.Tensor):
C = target.size(-1)
prob = pred.exp()
# heuristic: empirical mean of mini-batch
prob_avg = prob.mean(dim=0)
# uniform prior
prior = target.new_ones(C) / C
# standard cross entropy loss: H[target, pred]
ce_loss = -torch.mean(torch.sum(target * pred, dim=1))
# prior loss: KL(prior || empirical mean) = sum c=1..C of prior * log[prior/emp. mean]
# note, this is simplified, the full prior loss is:
# sum(prior * log[prior] - prior * log[prob_avg])
# but since the first term is a constant, we drop it.
prior_loss = -torch.sum(prior * torch.log(prob_avg))
# entropy loss: neg. mean of sum c=1..C of p(y=c|x)log[p(y=c|x)]
entropy_loss = -torch.mean(torch.sum(prob * pred, dim=1))
return ce_loss + coef[0] * prior_loss + coef[1] * entropy_loss
return _reg_nll_loss
def reg_mixup_loss(coef: Optional[Tuple[float, float]] = (0.8, 0.4)):
def _reg_mixup_loss(
pred: torch.Tensor, y1: torch.Tensor, y2: torch.Tensor, lamb: int
):
"""
pred is log_softmax,
y1 and y2 are softmax probabilities
"""
C = y1.size(-1)
assert y2.size(-1) == C
# NxC
prob = pred.exp()
# C
prob_avg = prob.mean(dim=0)
prior = y2.new_ones(C) / C
# term1, term2, [1,]
term1 = -torch.mean(torch.sum(y1 * pred, dim=1))
term2 = -torch.mean(torch.sum(y2 * pred, dim=1))
mixup_loss = lamb * term1 + (1 - lamb) * term2
prior_loss = -torch.sum(prior * torch.log(prob_avg))
entropy_loss = -torch.mean(torch.sum(prob * pred, dim=1))
return mixup_loss + coef[0] * prior_loss + coef[1] * entropy_loss
return _reg_mixup_loss
# from: https://github.com/EricArazo/PseudoLabeling/blob/2fbbbd3ca648cae453e3659e2e2ed44f71be5906/utils_pseudoLab/ssl_networks.py
class Net(nn.Module):
"""
CNN from Mean Teacher paper
"""
def __init__(self, num_classes=10, dropRatio=0.5):
super(Net, self).__init__()
self.activation = nn.LeakyReLU(0.1)
self.conv1a = weight_norm(nn.Conv2d(3, 128, 3, padding=1))
self.bn1a = nn.BatchNorm2d(128)
self.conv1b = weight_norm(nn.Conv2d(128, 128, 3, padding=1))
self.bn1b = nn.BatchNorm2d(128)
self.conv1c = weight_norm(nn.Conv2d(128, 128, 3, padding=1))
self.bn1c = nn.BatchNorm2d(128)
self.mp1 = nn.MaxPool2d(2, stride=2, padding=0)
self.drop = nn.Dropout(dropRatio)
self.conv2a = weight_norm(nn.Conv2d(128, 256, 3, padding=1))
self.bn2a = nn.BatchNorm2d(256)
self.conv2b = weight_norm(nn.Conv2d(256, 256, 3, padding=1))
self.bn2b = nn.BatchNorm2d(256)
self.conv2c = weight_norm(nn.Conv2d(256, 256, 3, padding=1))
self.bn2c = nn.BatchNorm2d(256)
self.mp2 = nn.MaxPool2d(2, stride=2, padding=0)
self.conv3a = weight_norm(nn.Conv2d(256, 512, 3, padding=0))
self.bn3a = nn.BatchNorm2d(512)
self.conv3b = weight_norm(nn.Conv2d(512, 256, 1, padding=0))
self.bn3b = nn.BatchNorm2d(256)
self.conv3c = weight_norm(nn.Conv2d(256, 128, 1, padding=0))
self.bn3c = nn.BatchNorm2d(128)
self.ap3 = nn.AvgPool2d(6, stride=2, padding=0)
self.fc1 = weight_norm(nn.Linear(128, num_classes))
def forward(self, x):
x = self.activation(self.bn1a(self.conv1a(x)))
x = self.activation(self.bn1b(self.conv1b(x)))
x = self.activation(self.bn1c(self.conv1c(x)))
x = self.mp1(x)
x = self.drop(x)
x = self.activation(self.bn2a(self.conv2a(x)))
x = self.activation(self.bn2b(self.conv2b(x)))
x = self.activation(self.bn2c(self.conv2c(x)))
x = self.mp2(x)
x = self.drop(x)
x = self.activation(self.bn3a(self.conv3a(x)))
x = self.activation(self.bn3b(self.conv3b(x)))
x = self.activation(self.bn3c(self.conv3c(x)))
x = self.ap3(x)
x = x.view(-1, 128)
return F.log_softmax(self.fc1(x), dim=-1)
def onehot_transform(n):
def _onehot_transform(x):
return torch.eye(n)[x]
return _onehot_transform
def main(
optim="SGD",
alpha=1,
batch_size=100,
min_label_prop=16,
rfls_len=20_000,
val_size=5_000,
warm_up_epochs=10,
epochs=400,
start_size=1000,
dropout=0.1,
swa=False,
augment=True,
):
root = Path("results")
template = f"{optim}_alpha_{alpha}_drop_{dropout}_size_{start_size}"
if augment:
template += "_aug"
if swa:
template += "_swa"
root = root / template
root.mkdir(parents=True)
with open(root / "params.txt", "w") as fp:
fp.write(str(locals()))
accs = {
"test_acc1": 0,
"test_acc2": 0,
"val_acc": [],
"override_acc": [],
}
# reset optimiser between stage 1 and 2
RESET_OPTIM = True
manual_seed(42)
if torch.cuda.is_available():
device = torch.device("cuda:0")
kwargs = dict(num_workers=4, pin_memory=True)
else:
device = torch.device("cpu")
kwargs = {}
if optim == "SGD":
OPTIM_KWARGS = dict(lr=0.1, momentum=0.9, weight_decay=1e-4)
elif optim == "Adam":
OPTIM_KWARGS = dict(lr=0.1, weight_decay=1e-4)
# log error every 20 iterations
LOG_EVERY = 20
if swa:
swa_params = dict(swa_lr=-0.01, swa_freq=5, swa_start=350)
train_transform = tv.transforms.Compose(
[
tv.transforms.ToTensor(),
tv.transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
]
)
test_transform = tv.transforms.Compose(
[
tv.transforms.ToTensor(),
tv.transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
]
)
if augment:
data_augmentation = tv.transforms.Compose(
[
tv.transforms.Pad(2, padding_mode="reflect"),
tv.transforms.ColorJitter(
brightness=0.4, contrast=0.4, saturation=0.4, hue=0.1
),
tv.transforms.RandomCrop(32),
tv.transforms.RandomHorizontalFlip(),
]
)
else:
data_augmentation = None
cifar_train = tv.datasets.CIFAR10(
root="data",
train=True, # leave transform for PDS
download=True,
target_transform=onehot_transform(10),
)
cifar_test = tv.datasets.CIFAR10(
root="data",
train=False,
transform=test_transform,
download=True,
target_transform=onehot_transform(10),
)
test_loader = torchdata.DataLoader(
cifar_test,
shuffle=False,
batch_size=512,
**kwargs,
)
train, pool = stratified_partition(cifar_train, 10, size=start_size)
pool, val = torchdata.random_split(pool, (len(pool) - val_size, val_size))
train = IndexMarker(train, mark=IndexMarker.LABELLED)
train = PDS(train, transform=train_transform, augmentation=data_augmentation)
model = Net(dropRatio=dropout).to(device)
optimiser = getattr(torch.optim, optim)(model.parameters(), **OPTIM_KWARGS)
optimiser_init_state = copy.deepcopy(optimiser.state_dict())
scheduler = torch.optim.lr_scheduler.MultiStepLR(
optimiser, milestones=[250, 350], gamma=0.1
)
val = PDS(IndexMarker(val, mark=None), transform=test_transform, augmentation=None)
val._with_metadata = False
print("Stage 1")
with train.no_fluff():
train_loader = torchdata.DataLoader(
train,
batch_size=batch_size,
sampler=RandomFixedLengthSampler(train, rfls_len, shuffle=True),
**kwargs,
)
val_loader = torchdata.DataLoader(
val,
batch_size=512,
shuffle=False,
**kwargs,
)
for e in range(1, warm_up_epochs + 1):
print(f"Epoch {e}/{warm_up_epochs}")
for i, (x, y) in enumerate(train_loader, 1):
model.train()
x, y = _map_device([x, y], device)
pred = model(x)
loss = reg_nll_loss()(pred, y)
optimiser.zero_grad()
loss.backward()
optimiser.step()
if i % LOG_EVERY == 0:
print(
f"\tIteration {i}/{len(train_loader)}:"
f" loss = {loss.item():.4f}"
)
val_acc = evaluate(model, val_loader, device=device)
print(f"\tval_acc = {val_acc}")
accs["val_acc"].append(val_acc)
test_acc = evaluate(model, test_loader, device=device)
print(f"Stage 1 over, test acc = {test_acc}")
accs["test_acc1"] = test_acc
if RESET_OPTIM:
optimiser.load_state_dict(optimiser_init_state)
if swa:
optimiser = SWA(optimiser, swa_lr=swa_params["swa_lr"])
# pseudo-label pool
pool = PDS(
IndexMarker(pool, mark=IndexMarker.PSEUDO_LABELLED),
transform=train_transform,
augmentation=data_augmentation,
)
# DO NOT use augmentation when obtaining pseudo-labels
with pool.no_augmentation():
with pool.no_fluff():
pool_loader = torchdata.DataLoader(
pool,
batch_size=512,
shuffle=False,
**kwargs,
)
pseudo_labels = []
with torch.no_grad():
model.eval()
for x, _ in pool_loader:
x = x.to(device)
# note: add probability, NOT log_softmax!
pseudo_labels.append(model(x).exp().detach().cpu())
# update pseudo-labels
pool.override_targets(torch.cat(pseudo_labels))
pool_lab_acc = pool.override_accuracy
print(f"Overridden label's accuracy = {pool_lab_acc:.4f}")
accs["override_acc"].append(pool_lab_acc)
# get full dataset
full_dataset = torchdata.ConcatDataset((train, pool))
fds_loader = torchdata.DataLoader(
full_dataset,
batch_sampler=MinLabelledSampler(
train,
pool,
batch_size=batch_size,
min_labelled=min_label_prop,
),
**kwargs,
)
print("Stage 2")
pseudo_labels = torch.empty(size=(len(pool), 10))
for e in range(1, epochs + 1):
print(f"Epoch {e}/{epochs}")
for i, (img_raw, img_aug, target, idx, mark) in enumerate(fds_loader, 1):
img_raw, img_aug, target, idx, mark = _map_device(
[img_raw, img_aug, target, idx, mark], device
)
# train
model.train()
xp, y1, y2, lamb = mixup(img_aug, target, alpha=alpha)
preds = model(xp)
loss = reg_mixup_loss()(preds, y1, y2, lamb)
optimiser.zero_grad()
loss.backward()
optimiser.step()
# update pseudo-labels
with torch.no_grad():
model.eval()
pld_mask = mark == IndexMarker.PSEUDO_LABELLED
# unaugmented, raw, pseudo-labelled images
pld_img = img_raw[pld_mask]
# get *softmax* predictions -- exponentiate the output!
new_pld = model(pld_img).exp().detach().cpu()
pseudo_labels[idx[pld_mask]] = new_pld
if i % LOG_EVERY == 0:
print(
f"\tIteration {i}/{len(fds_loader)}:" f" loss = {loss.item():.4f}"
)
pool.override_targets(pseudo_labels)
pool_lab_acc = pool.override_accuracy
scheduler.step()
print(f"Overridden label's accuracy = {pool_lab_acc:.4f}")
accs["override_acc"].append(pool_lab_acc)
val_acc = evaluate(model, val_loader, device=device)
print(f"\tval_acc = {val_acc}")
accs["val_acc"].append(val_acc)
if swa:
if e > swa_params["swa_start"] and e % swa_params["swa_freq"] == 0:
optimiser.update_swa()
if swa:
with train.no_fluff():
with train.no_augmentation():
optimiser.swap_swa_sgd()
optimiser.bn_update(train_loader, model, device)
test_acc = evaluate(model, test_loader, device=device)
print(f"Stage 2 over, test acc = {test_acc}")
accs["test_acc2"] = test_acc
torch.save(model.state_dict(), root / "weights.pt")
with open(root / "metrics.pkl", "wb") as fp:
pickle.dump(accs, fp)
save_pl_metrics = create_supervised_evaluator(model, metrics=None, device=device)
pps = PLPredictionSaver(log_dir=(root / "calib_metrics"))
pps.attach(save_pl_metrics)
ds = tv.datasets.CIFAR10(
root="data",
train=False,
transform=test_transform,
download=True,
)
save_pl_metrics.run(
torchdata.DataLoader(ds, batch_size=512, shuffle=False, **kwargs)
)
if __name__ == "__main__":
sizes = [20, 50]
alphas = [0.4, 1, 4, 8]
configs = list(itertools.product(sizes, alphas))
idx = int(sys.argv[1])
config = {
"start_size": configs[idx][0],
"alpha": configs[idx][1],
}
main(dropout=0.5, augment=True, **config)
|
import logging
import logging.config
import os
from django.conf import settings
def configure_logging(config_file, log_file):
if os.path.exists(config_file):
logging.config.fileConfig(config_file)
else:
logging.basicConfig()
logging.getLogger().setLevel(logging.INFO)
_file_handler = logging.FileHandler(settings.LOG_LOCATION + log_file)
_formatter = logging.Formatter("""[%(asctime)s %(levelname)s %(name)s] %(message)s""")
_file_handler.setFormatter(_formatter)
logging.getLogger().addHandler(_file_handler)
|
/******************************************************************************
* Copyright 2017-2018 Baidu Robotic Vision Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#ifndef LINEARALGEBRA_VECTOR3_H_
#define LINEARALGEBRA_VECTOR3_H_
#include "Vector2.h"
#include "Utility.h"
#include <string>
#include <algorithm>
//#define CFG_DEBUG_EIGEN
namespace LA {
class AlignedVector3f {
public:
inline AlignedVector3f() {}
inline AlignedVector3f(const float *v) { Set(v); }
inline AlignedVector3f(const float v0, const float v1, const float v2) {
v012r().vset_all_lane(v0, v1, v2, 0.0f);
}
inline AlignedVector3f(const Vector2f &x, const float z) {
xyzr().vset_all_lane(x.x() * z, x.y() * z, z, 0.0f);
}
inline operator const float* () const { return (const float *) this; }
inline operator float* () { return ( float *) this; }
inline AlignedVector3f(const xp128f &v) : m_data(v) {} // NOLINT
inline const xp128f &v012r() const { return m_data; } inline xp128f &v012r() { return m_data; }
inline const xp128f &xyzr () const { return m_data; } inline xp128f &xyzr () { return m_data; }
inline const float& v0() const { return m_data[0]; } inline float& v0() { return m_data[0]; }
inline const float& v1() const { return m_data[1]; } inline float& v1() { return m_data[1]; }
inline const float& v2() const { return m_data[2]; } inline float& v2() { return m_data[2]; }
inline const float& r () const { return m_data[3]; } inline float& r () { return m_data[3]; }
inline const float& x () const { return m_data[0]; } inline float& x () { return m_data[0]; }
inline const float& y () const { return m_data[1]; } inline float& y () { return m_data[1]; }
inline const float& z () const { return m_data[2]; } inline float& z () { return m_data[2]; }
inline const float& operator() (const int row, const int col = 0) const {
return m_data[row];
}
inline float& operator() (const int row, const int col = 0) {
return m_data[row];
}
inline void operator = (const xp128f &v) {
v012r() = v;
}
inline bool operator == (const AlignedVector3f &v) const {
// TODO(yanghongtian) : rooms for optimizations
return v0() == v.v0() && v1() == v.v1() && v2() == v.v2();
}
inline void operator += (const AlignedVector3f &v) {
v012r() += v.v012r();
}
inline void operator -= (const AlignedVector3f &v) {
v012r() -= v.v012r();
}
inline void operator *= (const float s) { Scale(s); }
inline void operator *= (const xp128f &s) { v012r() *= s; }
inline void operator *= (const AlignedVector3f &s) { v012r() *= s.v012r(); }
inline void operator /= (const float d) { Scale(1.0f / d); }
inline AlignedVector3f operator + (const AlignedVector3f &b) const {
AlignedVector3f _apb;
apb(*this, b, _apb);
return _apb;
}
inline AlignedVector3f operator + (const float b) const {
AlignedVector3f apb;
apb.v012r() = v012r() + b;
return apb;
}
inline AlignedVector3f operator - (const AlignedVector3f &b) const {
AlignedVector3f _amb;
amb(*this, b, _amb);
return _amb;
}
inline AlignedVector3f operator - (const float b) const {
AlignedVector3f amb;
amb.v012r() = v012r() - b;
return amb;
}
inline AlignedVector3f operator * (const float s) const {
AlignedVector3f v;
GetScaled(s, v);
return v;
}
inline AlignedVector3f operator * (const xp128f &s) const {
AlignedVector3f v;
GetScaled(s, v);
return v;
}
inline void Set(const float *v) {
memcpy(&m_data[0], v, 12);
}
inline void Set(const double *v) {
v012r().vset_all_lane(static_cast<float>(v[0]),
static_cast<float>(v[1]),
static_cast<float>(v[2]),
0.0f);
}
inline void Set(const float v0, const float v1, const float v2) {
v012r().vset_all_lane(v0, v1, v2, 0.0f);
}
inline void Set(const Vector2f &x, const float z) {
xyzr().vset_all_lane(x.x() * z, x.y() * z, z, 0.0f);
}
inline void Get(float *v) const { memcpy(v, this, 12); }
inline void Get(double *v) const {
v[0] = static_cast<double>(v0());
v[1] = static_cast<double>(v1());
v[2] = static_cast<double>(v2());
}
inline void GetMinus(AlignedVector3f &v) const {
const xp128f zero = xp128f::get(0.0f);
v.v012r() = zero - v012r();
}
inline AlignedVector3f GetMinus() const { AlignedVector3f v; GetMinus(v); return v; }
inline void MakeZero() { memset(this, 0, 12); }
inline void MakeMinus() { v012r().vmake_minus();}
inline bool Valid() const { return v0() != FLT_MAX; }
inline bool Invalid() const { return v0() == FLT_MAX; }
inline void Invalidate() { v0() = FLT_MAX; }
inline void Project(Vector2f &x) const {
x.y() = 1.f / z();
x.x() = this->x() * x.y();
x.y() = this->y() * x.y();
}
inline const Vector2f GetProjected() const { Vector2f x; Project(x); return x; }
inline void Normalize() {
v012r() *= (1.0f / sqrtf(SquaredLength()));
}
inline AlignedVector3f GetNormalized() const {
AlignedVector3f v;
v.v012r() = v012r() * (1.0f / sqrtf(SquaredLength()));
return v;
}
inline void Scale(const float s) {
v012r() *= s;
}
inline void GetScaled(const float s, AlignedVector3f &v) const {
v.v012r() = v012r() * s;
}
inline void GetScaled(const xp128f &s, AlignedVector3f &v) const {
v.v012r() = v012r() * s;
}
inline void GetScaled(const LA::AlignedVector3f &s, AlignedVector3f &v) const {
v.v012r() = v012r() * s.v012r();
}
inline AlignedVector3f GetScaled(const float s) const {
AlignedVector3f v;
GetScaled(s, v);
return v;
}
inline void GetSquareRoot(AlignedVector3f &v) const {
v.v012r().vset_sqrt_from_vec(v012r());
}
inline AlignedVector3f GetSquareRoot() const {
AlignedVector3f v; GetSquareRoot(v); return v;
}
inline void MakeSquareRoot() {
v012r().vset_sqrt_from_vec(v012r());
}
inline float Sum() const { return v012r().vsum_012(); }
inline float SquaredLength() const {
return (v012r() * v012r()).vsum_012();
}
inline float Dot(const AlignedVector3f &v) const {
return (v012r() * v.v012r()).vsum_012();
}
inline float Dot(const float *v) const {
return (v012r() * *(reinterpret_cast<const xp128f *>(v))).vsum_012();
}
inline AlignedVector3f Cross(const AlignedVector3f &v) const {
AlignedVector3f c;
c.x() = y() * v.z() - z() * v.y();
c.y() = z() * v.x() - x() * v.z();
c.z() = x() * v.y() - y() * v.x();
return c;
}
inline void Interpolate(const float w1, const AlignedVector3f &v1, const AlignedVector3f &v2) {
v012r() = v1.v012r() * w1 + v2.v012r() * (1.f - w1);
}
inline void Interpolate(const AlignedVector3f &v1, const AlignedVector3f &v2,
const float t1, const float t2, const float t) {
const float w1 = (t2 - t) / (t2 - t1);
Interpolate(w1, v1, v2);
}
inline void Print(const bool e = false) const {
if (e) {
UT::Print("%e %e %e\n", v0(), v1(), v2());
} else {
UT::Print("%f %f %f\n", v0(), v1(), v2());
}
}
inline void Print(const std::string str, const bool e, const bool l, const bool n) const {
UT::Print("%s", str.c_str());
if (e) {
UT::Print("%e %e %e", v0(), v1(), v2());
} else {
UT::Print("%f %f %f", v0(), v1(), v2());
}
if (l) {
if (e) {
UT::Print(" = %e", sqrtf(SquaredLength()));
} else {
UT::Print(" = %f", sqrtf(SquaredLength()));
}
}
if (n) {
UT::Print("\n");
}
}
inline void Save(FILE *fp, const bool e = false) const {
if (e) {
fprintf(fp, "%e %e %e\n", v0(), v1(), v2());
} else {
fprintf(fp, "%f %f %f\n", v0(), v1(), v2());
}
}
inline void Load(FILE *fp) {
#ifdef CFG_DEBUG
const int N = fscanf(fp, "%f %f %f", &v0(), &v1(), &v2());
UT_ASSERT(N == 3);
#else
fscanf(fp, "%f %f %f", &v0(), &v1(), &v2());
#endif
}
inline bool AssertEqual(const AlignedVector3f &v,
const int verbose = 1, const std::string str = "",
const float epsAbs = 0.0f, const float epsRel = 0.0f) const {
if (UT::VectorAssertEqual(&v0(), &v.v0(), 3, verbose, str, epsAbs, epsRel)) {
return true;
} else if (verbose) {
UT::PrintSeparator();
Print(verbose > 1);
v.Print(verbose > 1);
const AlignedVector3f e = *this - v;
e.Print(verbose > 1);
}
return false;
}
inline bool AssertZero(const int verbose = 1, const std::string str = "",
const float epsAbs = 0.0f, const float epsRel = 0.0f) const {
if (UT::VectorAssertZero(&v0(), 3, verbose, str, epsAbs, epsRel)) {
return true;
} else if (verbose) {
UT::PrintSeparator();
Print(verbose > 1);
}
return false;
}
inline void SetInfinite() {
v012r().vdup_all_lane(FLT_MAX);
}
inline void AssertInfinite() const {
UT_ASSERT(v0() == FLT_MAX);
UT_ASSERT(v1() == FLT_MAX);
UT_ASSERT(v2() == FLT_MAX);
UT_ASSERT(r() == FLT_MAX);
}
inline void AssertFinite() const {
UT_ASSERT(v0() != FLT_MAX);
UT_ASSERT(v1() != FLT_MAX);
UT_ASSERT(v2() != FLT_MAX);
UT_ASSERT(r() != FLT_MAX);
}
inline void Random(const float vMax) { Random(-vMax, vMax); }
inline void Random(const float vMin, const float vMax) { UT::Random(&v0(), 3, vMin, vMax); }
static inline AlignedVector3f GetRandom(const float vMax) {
AlignedVector3f v;
v.Random(vMax);
return v;
}
static inline AlignedVector3f GetRandom(const float vMin, const float vMax) {
AlignedVector3f v;
v.Random(vMin, vMax);
return v;
}
static inline void apb(const AlignedVector3f &a,
const AlignedVector3f &b, AlignedVector3f &apb) {
apb.v012r() = a.v012r() + b.v012r();
}
static inline void amb(const AlignedVector3f &a,
const AlignedVector3f &b, AlignedVector3f &amb) {
amb.v012r() = a.v012r() - b.v012r();
}
protected:
xp128f m_data;
};
template<typename TYPE> class Vector3 {
public:
//inline Vector3<TYPE>() {}
//inline Vector3<TYPE>(const TYPE *v) { Set(v); }
//inline Vector3<TYPE>(const TYPE v0, const TYPE v1, const TYPE v2) {
// this->v0() = v0;
// this->v1() = v1;
// this->v2() = v2;
//}
//inline Vector3<TYPE>(const Vector2<TYPE> &v0, const TYPE v1) {
// this->v0() = v0.v0();
// this->v1() = v0.v1();
// this->v2() = v1;
//}
static inline Vector3<TYPE> Get(const TYPE v0, const TYPE v1, const TYPE v2) {
Vector3<TYPE> v;
v.Set(v0, v1, v2);
return v;
}
inline const TYPE& v0() const { return m_data[0]; } inline TYPE& v0() { return m_data[0]; }
inline const TYPE& v1() const { return m_data[1]; } inline TYPE& v1() { return m_data[1]; }
inline const TYPE& v2() const { return m_data[2]; } inline TYPE& v2() { return m_data[2]; }
inline const TYPE& x () const { return m_data[0]; } inline TYPE& x () { return m_data[0]; }
inline const TYPE& y () const { return m_data[1]; } inline TYPE& y () { return m_data[1]; }
inline const TYPE& z () const { return m_data[2]; } inline TYPE& z () { return m_data[2]; }
inline operator const TYPE* () const { return m_data; }
inline operator TYPE* () { return m_data; }
inline void operator = (const TYPE *v) { Set(v); }
inline void operator += (const Vector3<TYPE> &v) {
v0() = v.v0() + v0();
v1() = v.v1() + v1();
v2() = v.v2() + v2();
}
inline void operator -= (const Vector3<TYPE> &v) {
v0() = -v.v0() + v0();
v1() = -v.v1() + v1();
v2() = -v.v2() + v2();
}
inline void operator += (const AlignedVector3f &v) {
v0() = v.v0() + v0();
v1() = v.v1() + v1();
v2() = v.v2() + v2();
}
inline void operator *= (const TYPE s) { Scale(s); }
inline Vector3<TYPE> operator + (const Vector3<TYPE> &v) const {
return Vector3<TYPE>::Get(v0() + v.v0(), v1() + v.v1(), v2() + v.v2());
}
inline Vector3<TYPE> operator - (const Vector3<TYPE> &v) const {
return Vector3<TYPE>::Get(v0() - v.v0(), v1() - v.v1(), v2() - v.v2());
}
inline Vector3<TYPE> operator * (const TYPE s) const {
return Vector3<TYPE>::Get(v0() * s, v1() * s, v2() * s);
}
inline void Set(const TYPE v0, const TYPE v1, const TYPE v2) {
this->v0() = v0;
this->v1() = v1;
this->v2() = v2;
}
inline void Set(const float *v);
inline void Set(const double *v);
inline void Get(TYPE *v) const { memcpy(v, this, sizeof(Vector3<TYPE>)); }
inline AlignedVector3f GetAlignedVector3f() const { AlignedVector3f v; Get(v); return v; }
inline void GetMinus(Vector3<TYPE> &v) const {
v.v0() = -v0(); v.v1() = -v1(); v.v2() = -v2();
}
inline void MakeMinus() { v0() = -v0(); v1() = -v1(); v2() = -v2(); }
inline void MakeZero() { memset(this, 0, sizeof(Vector3<TYPE>)); }
inline void Scale(const TYPE s) { v0() *= s; v1() *= s; v2() *= s; }
inline TYPE Dot(const Vector3<TYPE> &v) const {
return v0() * v.v0() + v1() * v.v1() + v2() * v.v2();
}
inline void Normalize() { Scale(1 / sqrtf(SquaredLength())); }
inline TYPE SquaredLength() const { return v0() * v0() + v1() * v1() + v2() * v2(); }
inline TYPE Sum() const { return v0() + v1() + v2(); }
inline TYPE Maximal() const { return std::max(std::max(v0(), v1()), v2()); }
inline TYPE Minimal() const { return std::min(std::min(v0(), v1()), v2()); }
inline void Invalidate() { v0() = UT::Invalid<TYPE>(); }
inline bool Valid() const { return v0() != UT::Invalid<TYPE>(); }
inline bool Invalid() const { return v0() == UT::Invalid<TYPE>(); }
inline void Print(const bool e = false) const {
if (e) {
UT::Print("%e %e %e\n", v0(), v1(), v2());
} else {
UT::Print("%f %f %f\n", v0(), v1(), v2());
}
}
inline void Print(const std::string str, const bool e) const {
UT::Print("%s", str.c_str());
Print(e);
}
inline bool AssertEqual(const Vector3<TYPE> &v,
const int verbose = 1, const std::string str = "",
const TYPE epsAbs = 0, const TYPE epsRel = 0) const {
if (UT::VectorAssertEqual<TYPE>(&v0(), &v.v0(), 3, verbose, str, epsAbs, epsRel)) {
return true;
} else if (verbose) {
UT::PrintSeparator();
Print(verbose > 1);
v.Print(verbose > 1);
const Vector3<TYPE> e = *this - v;
e.Print(verbose > 1);
}
return false;
}
inline void Random(const TYPE vMin, const TYPE vMax) { UT::Random(&v0(), 3, vMin, vMax); }
static inline Vector3<TYPE> GetRandom(const TYPE vMin, const TYPE vMax) {
Vector3<TYPE> v;
v.Random(vMin, vMax);
return v;
}
protected:
TYPE m_data[3];
};
typedef Vector3<int> Vector3i;
typedef Vector3<float> Vector3f;
typedef Vector3<double> Vector3d;
typedef Vector3<ubyte> Vector3ub;
template<> inline void Vector3f::Set(const float *v) { memcpy(this, v, sizeof(Vector3f)); }
template<> inline void Vector3d::Set(const double *v) { memcpy(this, v, sizeof(Vector3d)); }
template<> inline void Vector3f::Set(const double *v) {
v0() = static_cast<float>(v[0]);
v1() = static_cast<float>(v[1]);
v2() = static_cast<float>(v[2]);
}
template<> inline void Vector3d::Set(const float *v) {
v0() = static_cast<double>(v[0]);
v1() = static_cast<double>(v[1]);
v2() = static_cast<double>(v[2]);
}
} // namespace LA
#ifdef CFG_DEBUG_EIGEN
class EigenVector3f : public Eigen::Vector3f {
public:
inline EigenVector3f() : Eigen::Vector3f() {}
inline EigenVector3f(const Eigen::Vector3f &e_v) : Eigen::Vector3f(e_v) {}
inline EigenVector3f(const float *v) : Eigen::Vector3f(v[0], v[1], v[2]) {}
inline EigenVector3f(const LA::AlignedVector3f &v)
: Eigen::Vector3f(v.v0(), v.v1(), v.v2()) { }
inline EigenVector3f(const LA::Vector3f &v) : Eigen::Vector3f(v.v0(), v.v1(), v.v2()) {}
inline EigenVector3f(const float v0, const float v1, const float v2) : Eigen::Vector3f(v0, v1,
v2) {}
inline EigenVector3f(const EigenVector2f &e_v0, const float v1)
: Eigen::Vector3f(e_v0(0), e_v0(1), v1) { }
inline void operator = (const Eigen::Vector3f &e_v) { *((Eigen::Vector3f *) this) = e_v; }
inline LA::AlignedVector3f GetAlignedVector3f() const {
LA::AlignedVector3f v;
const Eigen::Vector3f &e_v = *this;
v.v0() = e_v(0);
v.v1() = e_v(1);
v.v2() = e_v(2);
return v;
}
inline LA::Vector3f GetVector3f() const {
LA::Vector3f v;
const Eigen::Vector3f &e_v = *this;
v.v0() = e_v(0);
v.v1() = e_v(1);
v.v2() = e_v(2);
return v;
}
inline EigenVector2f Project() const {
EigenVector2f e_x;
e_x.y() = 1.0f / z();
e_x.x() = x() * e_x.y();
e_x.y() = y() * e_x.y();
return e_x;
}
inline float SquaredLength() const { return GetAlignedVector3f().SquaredLength(); }
inline void Print(const bool e = false) const { GetAlignedVector3f().Print(e); }
static inline EigenVector3f GetRandom(const float vMax) {
return EigenVector3f(LA::AlignedVector3f::GetRandom(vMax));
}
inline bool AssertEqual(const LA::AlignedVector3f &v,
const int verbose = 1, const std::string str = "",
const float epsAbs = 0.0f, const float epsRel = 0.0f) const {
return GetAlignedVector3f().AssertEqual(v, verbose, str, epsAbs, epsRel);
}
inline bool AssertEqual(const LA::Vector3f &v, const int verbose = 1, const std::string str = "",
const float epsAbs = 0.0f, const float epsRel = 0.0f) const {
return GetVector3f().AssertEqual(v, verbose, str, epsAbs, epsRel);
}
inline bool AssertEqual(const EigenVector3f &e_v,
const int verbose = 1, const std::string str = "",
const float epsAbs = 0.0f, const float epsRel = 0.0f) const {
return AssertEqual(e_v.GetAlignedVector3f(), verbose, str, epsAbs, epsRel);
}
static inline EigenVector3f Zero() { return EigenVector3f(Eigen::Vector3f::Zero()); }
};
#endif
#endif // LINEARALGEBRA_VECTOR3_H_
|
const purgecss = require('@fullhuman/postcss-purgecss')({
// Specify the paths to all of the template files in your project
content: [
'./src/**/*.js',
// etc.
],
// Include any special characters you're using in this regular expression
defaultExtractor: content => content.match(/[\w-/:]+(?<!:)/g) || []
})
module.exports = {
plugins: [
require('tailwindcss'),
require('autoprefixer'),
...[purgecss]
]
}
|
import { TriggerAnimationStep } from './TriggerStep';
import * as tools from '../../../tools/tools';
import makeFigure from '../../../__mocks__/makeFigure';
import {
Point,
} from '../../../tools/g2';
tools.isTouchDevice = jest.fn();
jest.mock('../../Gesture');
jest.mock('../../webgl/webgl');
jest.mock('../../DrawContext2D');
describe('Animation Trigger', () => {
let elem1;
let elem2;
let trigger1;
let trigger2;
let triggerFlag1;
let triggerFlag2;
beforeEach(() => {
const figure = makeFigure();
elem1 = figure.collections.line();
elem1.setPosition(new Point(0, 0));
elem2 = figure.collections.line();
elem2.setPosition(new Point(0, 0));
triggerFlag1 = 0;
triggerFlag2 = 0;
trigger1 = () => { triggerFlag1 = 1; };
trigger2 = () => { triggerFlag2 = 1; };
});
test('Instantiation', () => {
const onFinish = () => {};
const completeOnCancel = false;
const step = new TriggerAnimationStep({
onFinish,
completeOnCancel,
callback: trigger1,
});
expect(step.onFinish).toBe(onFinish);
expect(step.callback).toBe(trigger1);
expect(step.completeOnCancel).toBe(completeOnCancel);
});
test('Delay then move', () => {
elem1.animations.new()
.delay(1)
.trigger(trigger1)
.position({ target: new Point(1, 1), duration: 1, progression: 'linear' })
.trigger(trigger2)
.delay(1)
.start();
elem1.animations.nextFrame(0);
expect(triggerFlag1).toBe(0);
expect(triggerFlag2).toBe(0);
elem1.animations.nextFrame(0.5);
expect(triggerFlag1).toBe(0);
expect(triggerFlag2).toBe(0);
elem1.animations.nextFrame(1);
expect(triggerFlag1).toBe(1);
expect(triggerFlag2).toBe(0);
// elem1.animations.nextFrame(1.01);
// expect(triggerFlag1).toBe(1);
// expect(triggerFlag2).toBe(0);
});
test('Delay in trigger', () => {
elem1.animations.new()
.trigger({ callback: trigger1, delay: 1 })
.start();
elem1.animations.nextFrame(0);
expect(triggerFlag1).toBe(0);
elem1.animations.nextFrame(0.5);
expect(triggerFlag1).toBe(0);
elem1.animations.nextFrame(1);
expect(triggerFlag1).toBe(1);
});
test('Zero duration', () => {
expect(triggerFlag1).toBe(0);
expect(triggerFlag2).toBe(0);
elem1.animations.new()
.trigger({ callback: trigger1, duration: 0 })
.trigger({ callback: trigger2, duration: 0 })
.start();
expect(triggerFlag1).toBe(1);
expect(triggerFlag2).toBe(1);
});
test('Some duration', () => {
expect(triggerFlag1).toBe(0);
expect(triggerFlag2).toBe(0);
elem1.animations.new()
.trigger({ callback: trigger1, duration: 2 })
.trigger({ callback: trigger2, duration: 0 })
.start();
expect(triggerFlag1).toBe(0);
expect(triggerFlag2).toBe(0);
elem1.animations.nextFrame(0);
expect(triggerFlag1).toBe(1);
expect(triggerFlag2).toBe(0);
elem1.animations.nextFrame(0.5);
expect(triggerFlag1).toBe(1);
expect(triggerFlag2).toBe(0);
elem1.animations.nextFrame(1.5);
expect(triggerFlag1).toBe(1);
expect(triggerFlag2).toBe(0);
elem1.animations.nextFrame(2);
expect(triggerFlag1).toBe(1);
expect(triggerFlag2).toBe(1);
});
test('Some More duration', () => {
expect(triggerFlag1).toBe(0);
expect(triggerFlag2).toBe(0);
elem1.animations.new()
.trigger({ callback: () => {}, duration: 1 })
.trigger({ callback: trigger1, duration: 2 })
.trigger({ callback: trigger2, duration: 0 })
.start();
expect(triggerFlag1).toBe(0);
expect(triggerFlag2).toBe(0);
elem1.animations.nextFrame(0);
expect(triggerFlag1).toBe(0);
expect(triggerFlag2).toBe(0);
elem1.animations.nextFrame(0.5);
expect(triggerFlag1).toBe(0);
expect(triggerFlag2).toBe(0);
elem1.animations.nextFrame(1);
expect(triggerFlag1).toBe(1);
expect(triggerFlag2).toBe(0);
elem1.animations.nextFrame(3);
expect(triggerFlag1).toBe(1);
expect(triggerFlag2).toBe(1);
});
});
|
exports.run = async (client, message, args, level) => {// eslint-disable-line no-unused-vars
try {
if (!args || args.length < 1) return message.reply('You must provide a command to load!');
let response;
response = client.loadCommand(args[0]);
if (response) return message.reply(`Error Loading: ${response}`);
client.logger.log(`Loading Command: ${args[0]}`);
message.reply(`The command \`${args[0]}\` has been loaded`);
} catch (err) {
message.channel.send('There was an error!\n' + err).catch();
}
};
exports.conf = {
enabled: true,
guildOnly: false,
aliases: [],
permLevel: 'Bot Moderator'
};
exports.help = {
name: 'load',
category: 'System',
description: 'Loads a command',
usage: 'load [command]'
}; |
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django.contrib import messages
from django.shortcuts import render, redirect
from django.urls import reverse
from django.views.generic import TemplateView, DetailView, ListView
from project_core.forms.funding_instrument import FundingInstrumentForm
from project_core.models import FundingInstrument
from variable_templates.forms.template_variables import TemplateVariableItemFormSet
from variable_templates.utils import get_template_variables_for_funding_instrument
FUNDING_INSTRUMENT_FORM_NAME = 'funding_instrument_form'
TEMPLATE_VARIABLES_FORM_NAME = 'template_variables_form'
class FundingInstrumentList(ListView):
model = FundingInstrument
context_object_name = 'funding_instruments'
template_name = 'logged/funding_instrument-list.tmpl'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({'active_section': 'calls',
'active_subsection': 'funding-instrument-list',
'sidebar_template': 'logged/_sidebar-calls.tmpl'})
context['breadcrumb'] = [{'name': 'Funding instruments'}]
return context
class FundingInstrumentMixin:
fields = ['long_name', 'short_name', 'description']
@property
def success_msg(self):
return NotImplemented
class AddCrispySubmitButtonMixin:
def get_form(self, form_class=None):
form = super().get_form(form_class)
form.helper = FormHelper()
form.helper.add_input(Submit('submit', 'Submit'))
return form
class FundingInstrumentView(TemplateView):
@staticmethod
def _cancel_url(kwargs):
if 'pk' in kwargs:
return reverse('logged-funding-instrument-detail', kwargs={'pk': kwargs['pk']})
else:
return reverse('logged-funding-instrument-list')
def get(self, request, *args, **kwargs):
context = super().get_context_data(**kwargs)
context['cancel_url'] = FundingInstrumentView._cancel_url(kwargs)
if 'pk' in kwargs:
funding_instrument = FundingInstrument.objects.get(pk=kwargs['pk'])
context[FUNDING_INSTRUMENT_FORM_NAME] = FundingInstrumentForm(instance=funding_instrument,
prefix=FUNDING_INSTRUMENT_FORM_NAME)
context[TEMPLATE_VARIABLES_FORM_NAME] = TemplateVariableItemFormSet(funding_instrument=funding_instrument,
prefix=TEMPLATE_VARIABLES_FORM_NAME)
context['action_url'] = reverse('logged-funding-instrument-update', kwargs={'pk': kwargs['pk']})
context.update({'active_section': 'calls',
'active_subsection': 'funding-instrument-list',
'sidebar_template': 'logged/_sidebar-calls.tmpl'})
breadcrumb = f'Edit ({funding_instrument.short_name})'
context['action_submit_button'] = 'Save Funding Instrument'
context['action_title'] = 'Edit'
else:
context[FUNDING_INSTRUMENT_FORM_NAME] = FundingInstrumentForm(prefix=FUNDING_INSTRUMENT_FORM_NAME)
context[TEMPLATE_VARIABLES_FORM_NAME] = TemplateVariableItemFormSet(prefix=TEMPLATE_VARIABLES_FORM_NAME)
context.update({'active_section': 'calls',
'active_subsection': 'funding-instrument-add',
'sidebar_template': 'logged/_sidebar-calls.tmpl'})
breadcrumb = 'Create'
context['action_submit_button'] = 'Create Funding Instrument'
context['action_title'] = 'Create'
context['breadcrumb'] = [{'name': 'Funding instruments', 'url': reverse('logged-funding-instrument-list')},
{'name': breadcrumb}]
return render(request, 'logged/funding_instrument-form.tmpl', context)
def post(self, request, *args, **kwargs):
context = super().get_context_data(**kwargs)
context['cancel_url'] = FundingInstrumentView._cancel_url(kwargs)
if 'pk' in kwargs:
funding_instrument = FundingInstrument.objects.get(id=kwargs['pk'])
funding_instrument_form = FundingInstrumentForm(request.POST, instance=funding_instrument,
prefix=FUNDING_INSTRUMENT_FORM_NAME)
context['action_submit_button'] = 'Save Funding Instrument'
context['action_title'] = 'Edit'
else:
funding_instrument_form = FundingInstrumentForm(request.POST, prefix=FUNDING_INSTRUMENT_FORM_NAME)
context['action_submit_button'] = 'Create Funding Instrument'
context['action_title'] = 'Create'
template_variables_form = TemplateVariableItemFormSet(request.POST, prefix=TEMPLATE_VARIABLES_FORM_NAME)
if funding_instrument_form.is_valid() and template_variables_form.is_valid():
funding_instrument = funding_instrument_form.save()
template_variables_form.save_into_funding_instrument(funding_instrument)
messages.success(request, 'Funding instrument has been saved')
return redirect(reverse('logged-funding-instrument-detail', kwargs={'pk': funding_instrument.pk}))
context[FUNDING_INSTRUMENT_FORM_NAME] = funding_instrument_form
context[TEMPLATE_VARIABLES_FORM_NAME] = template_variables_form
context['action_url'] = reverse('logged-funding-instrument-update', kwargs={'pk': kwargs['pk']})
context.update({'active_section': 'calls',
'active_subsection': 'funding-instrument-list',
'sidebar_template': 'logged/_sidebar-calls.tmpl'})
messages.error(request, 'Funding Instrument not saved. Please correct the errors in the form and try again.')
return render(request, 'logged/funding_instrument-form.tmpl', context)
class FundingInstrumentDetailView(FundingInstrumentMixin, DetailView):
template_name = 'logged/funding_instrument-detail.tmpl'
context_object_name = 'funding_instrument'
model = FundingInstrument
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({'active_section': 'calls',
'active_subsection': 'funding-instrument-list',
'sidebar_template': 'logged/_sidebar-calls.tmpl'})
context['template_variables'] = get_template_variables_for_funding_instrument(kwargs['object'])
context['breadcrumb'] = [{'name': 'Funding instruments', 'url': reverse('logged-funding-instrument-list')},
{'name': f'Details ({context["funding_instrument"].short_name})'}]
return context
|
from videos_search.src import video_url
from videos_search.src.firebase_database import Database
from videos_search.settings import MAX_SCRAPED_VIDEOS
class VideoManager:
_db = Database("videos")
def create_video(self, save,*args, **kwargs):
video_obj = object.__new__(Video)
video_obj.__init__(*args, **kwargs)
if save:
video_obj.save(self._db)
return video_obj
def all(self):
objects_dict = self._db.read()
for obj_dict in objects_dict:
yield self.create_video(False, **obj_dict)
def filter(self):
pass
class Video:
objects = VideoManager()
def __init__(self, url, page_title, page_url, addition_date, keywords,
video_info, likes = 0, deslikes = 0, visualizations = 0, number_querys = 0):
self.url = url
self.page_title = page_title
self.page_url = page_url
self.addition_date = addition_date
self.keywords = keywords
self.video_info = video_info
self.likes = likes
self.deslikes = deslikes
self.visualizations = visualizations
self.number_querys = number_querys
def save(self, db):
db.insert(self.page_title, self.json())
def json(self):
return {
"url" : self.url,
"page_title" : self.page_title,
"page_url" : self.page_url,
"addition_date" : self.addition_date,
"keywords" : self.keywords,
"video_info" : self.video_info,
"likes" : self.likes,
"deslikes" : self.deslikes,
"visualizations" : self.visualizations,
"number_querys" : self.number_querys
}
def __new__(cls, *args, **kwargs):
return cls.objects.create_video(True, *args, **kwargs)
@classmethod
def search_videos(cls, seach_text, max_videos = MAX_SCRAPED_VIDEOS):
for video_dict in video_url.get_videos(seach_text, max_videos):
yield Video(**video_dict)
if __name__ == "__main__":
test = Video("https://video.google.com", "Overlord ep 3", "https://google.com","1/1/2013",
["overlord","ep", "1"], {"duration":"10min","title":"overlord ep 1"})
print(len(list(test.objects.all()))) |
from __future__ import print_function
import base64
import collections
import contextlib
import copy
import errno
import getpass
import grp
import hashlib
import itertools
import json
import logging
import mmap
import os
import platform
import pprint
import psutil
import pwd
import re
import shlex
import shutil
import socket
import subprocess
import sys
import tempfile
import time
from . import six
from .exceptions import IrodsError, IrodsWarning
from . import execute
from . import paths
# get the fully qualified domain name
#(no, really, getfqdn() is insufficient)
def get_hostname():
return socket.getaddrinfo(
socket.gethostname(), 0, 0, 0, 0, socket.AI_CANONNAME)[0][3]
indent = execute.indent
safe_shlex_split_for_2_6 = execute.safe_shlex_split_for_2_6
execute_command_nonblocking = execute.execute_command_nonblocking
execute_command_timeout = execute.execute_command_timeout
execute_command_permissive = execute.execute_command_permissive
execute_command = execute.execute_command
def get_pids_executing_binary_file(binary_file_path):
def get_exe(process):
if psutil.version_info >= (2,0):
return process.exe()
return process.exe
abspath = os.path.abspath(binary_file_path)
pids = []
for p in psutil.process_iter():
try:
if abspath == get_exe(p):
pids.append(p.pid)
except (psutil.NoSuchProcess, psutil.AccessDenied):
pass
return pids
def kill_pid(pid):
p = psutil.Process(pid)
p.suspend()
p.terminate()
p.kill()
def find_shared_object(so_name, regex=False, additional_directories=[]):
paths = []
if regex:
so_regex = re.compile(so_name)
if 'LD_PRELOAD' in os.environ:
for path in os.environ['LD_PRELOAD'].split(':'):
if path not in paths and os.path.exists(path) and (
(regex and so_regex.match(os.path.basename(path))) or
(not regex and os.path.basename(path) == so_name)):
paths.append(path)
if 'LD_LIBRARY_PATH' in os.environ:
for directory in os.environ['LD_LIBRARY_PATH'].split(':'):
if regex and os.path.exists(directory):
for name in os.listdir(directory):
if so_regex.match(name) and os.path.join(directory, name) not in paths:
paths.append(os.path.join(directory, name))
elif os.path.exists(os.path.join(directory, so_name)) and os.path.join(directory, so_name) not in paths:
paths.append(os.path.join(directory, so_name))
env = os.environ.copy()
env['PATH'] = ':'.join([env['PATH'], '/sbin'])
out, _ = execute_command(['ldconfig', '-vNX'], env=env)
for directory in [d.rstrip(':') for d in out.splitlines() if d and d[0] == '/']:
if regex and os.path.exists(directory):
for name in os.listdir(directory):
if so_regex.match(name) and os.path.join(directory, name) not in paths:
paths.append(os.path.join(directory, name))
elif os.path.exists(os.path.join(directory, so_name)) and os.path.join(directory, so_name) not in paths:
paths.append(os.path.join(directory, so_name))
for (directory, _, names) in itertools.chain(*[os.walk(d) for d in itertools.chain(additional_directories, ['/usr/lib/'])]):
if regex:
for name in names:
if so_regex.match(os.path.basename(name)) and os.path.join(directory, name) not in paths:
paths.append(os.path.join(directory, name))
elif os.path.exists(os.path.join(directory, so_name)) and os.path.join(directory, so_name) not in paths:
paths.append(os.path.join(directory, so_name))
return paths
def file_digest(filename, hash_type, encoding='hex'):
block_size = pow(2, 20)
hasher = hashlib.new(hash_type)
with open(filename, 'rb') as f:
while True:
data = f.read(block_size)
if not data:
break
hasher.update(data)
if encoding == 'hex':
return hasher.hexdigest()
if encoding == 'base64':
return base64.b64encode(hasher.digest()).decode()
if encoding is None or encoding == 'none':
return hasher.digest()
raise IrodsError('Unknown encoding %s for digest' % (encoding))
def re_shm_exists():
possible_shm_locations = ['/var/run/shm', '/dev/shm']
for l in possible_shm_locations:
try:
files = os.listdir(l)
for f in files:
if 'irods' in f.lower():
return os.path.join(l, f)
except OSError:
pass
return False
def json_object_hook_list(l):
rv = []
for i in l:
if not isinstance(i, str) and isinstance(i, six.text_type):
i = i.encode('ascii')
elif isinstance(i, list):
i = json_object_hook_list(i)
rv.append(i)
return rv
def json_object_hook_dict(d):
rv = {}
for k, v in d.items():
if not isinstance(k, str) and isinstance(k, six.text_type):
k = k.encode('ascii')
if not isinstance(v, str) and isinstance(v, six.text_type):
v = v.encode('ascii')
elif isinstance(v, list):
v = json_object_hook_list(v)
rv[k] = v
return rv
def open_and_load_json(filename):
with open(filename, 'rt') as f:
return json.load(f, object_hook=json_object_hook_dict)
def update_json_file_from_dict(filename, update_dict):
env = open_and_load_json(filename)
env.update(update_dict)
with open(filename, 'wt') as f:
json.dump(env, f, indent=4)
def create_directory_of_small_files(directory_name_suffix, file_count):
if not os.path.exists(directory_name_suffix):
os.mkdir(directory_name_suffix)
for i in range(file_count):
with open('{0}/{1}'.format(directory_name_suffix, i), 'wt') as f:
print("iglkg3fqfhwpwpo-" + "A" * i, file=f, end='')
def create_local_testfile(filename):
filepath = os.path.abspath(filename)
with open(filepath, 'wt') as f:
print('TESTFILE -- [' + filepath + ']', file=f, end='')
return filepath
def touch(fname, times=None):
with open(fname, 'at'):
os.utime(fname, times)
def cat(fname, string):
with open(fname, 'at') as f:
print(string, file=f, end='')
def make_file(f_name, f_size, contents='zero', block_size_in_bytes=1000):
if contents not in ['arbitrary', 'random', 'zero']:
raise AssertionError
if contents == 'arbitrary' or f_size == 0:
execute_command(['truncate', '-s', str(f_size), f_name])
return
source = {'zero': '/dev/zero',
'random': '/dev/urandom'}[contents]
count = f_size / block_size_in_bytes
if count > 0:
execute_command(['dd', 'if='+source, 'of='+f_name, 'count='+str(count), 'bs='+str(block_size_in_bytes)])
leftover_size = f_size % block_size_in_bytes
if leftover_size > 0:
execute_command(['dd', 'if='+source, 'of='+f_name, 'count=1', 'bs='+str(leftover_size), 'oflag=append', 'conv=notrunc'])
else:
execute_command(['dd', 'if='+source, 'of='+f_name, 'count=1', 'bs='+str(f_size)])
def make_dir_p(directory):
try:
os.makedirs(directory)
except OSError as e:
if e.errno == errno.EEXIST and os.path.isdir(directory):
pass
else:
raise
def make_large_local_tmp_dir(dir_name, file_count, file_size):
os.makedirs(dir_name)
for i in range(file_count):
make_file(os.path.join(dir_name, "junk" + str(i).zfill(4)),
file_size)
local_files = os.listdir(dir_name)
if len(local_files) != file_count:
raise AssertionError("dd loop did not make all " + str(file_count) + " files")
return local_files
def make_deep_local_tmp_dir(root_name, depth=10, files_per_level=50, file_size=100):
# output
directories = {}
current_dir_name = root_name
for d in range(depth):
# make subdir and files
files = make_large_local_tmp_dir(current_dir_name, files_per_level, file_size)
# add to output
directories[current_dir_name] = files
# next level down
current_dir_name = os.path.join(current_dir_name, 'sub'+str(d))
return directories
def files_in_dir(path):
for file in os.listdir(path):
if os.path.isfile(os.path.join(path, file)):
yield file
def get_user_env(user):
out, _ = execute_command(['su', '-', user, '-c',
'python -c "from __future__ import print_function; import os; import json; print(json.dumps(dict(os.environ)))"'])
return json.loads(out)
def switch_user(user, group=None):
user_env = get_user_env(user)
pw_record = pwd.getpwnam(user)
os.environ['HOME'] = pw_record.pw_dir
os.environ['LOGNAME'] = pw_record.pw_name
os.environ['USER'] = pw_record.pw_name
os.setgid(pw_record.pw_gid if group is None else grp.getgrnam(group).gr_gid)
os.setuid(pw_record.pw_uid)
@contextlib.contextmanager
def file_backed_up(filename):
with tempfile.NamedTemporaryFile(prefix=os.path.basename(filename)) as f:
shutil.copyfile(filename, f.name)
try:
yield filename
finally:
shutil.copyfile(f.name, filename)
@contextlib.contextmanager
def directory_deleter(dirname):
try:
yield dirname
finally:
shutil.rmtree(dirname)
def is_jsonschema_installed():
try:
import jsonschema
return True
except ImportError:
return False
def prepend_string_to_file(string, filename):
with open(filename, 'r') as f:
contents = f.read()
with open(filename, 'wt') as f:
print(string, file=f, end='')
print(contents, file=f, end='')
def make_environment_dict(username, hostname, zone_name, use_ssl=True):
irods_home = os.path.join('/', zone_name, 'home', username)
environment = {
'irods_host': hostname,
'irods_port': 1247,
'irods_default_resource': 'demoResc',
'irods_home': irods_home,
'irods_cwd': irods_home,
'irods_user_name': username,
'irods_zone_name': zone_name,
'irods_client_server_negotiation': 'request_server_negotiation',
'irods_client_server_policy': 'CS_NEG_REFUSE',
'irods_encryption_key_size': 32,
'irods_encryption_salt_size': 8,
'irods_encryption_num_hash_rounds': 16,
'irods_encryption_algorithm': 'AES-256-CBC',
'irods_default_hash_scheme': 'SHA256',
'irods_maximum_size_for_single_buffer_in_megabytes': 32,
'irods_default_number_of_transfer_threads': 4,
'irods_maximum_number_of_transfer_threads': 64,
'irods_transfer_buffer_size_for_parallel_transfer_in_megabytes': 4,
'irods_connection_pool_refresh_time_in_seconds': 300
}
if use_ssl:
environment.update({
'irods_client_server_policy': 'CS_NEG_REQUIRE',
'irods_ssl_verify_server': 'cert',
'irods_ssl_ca_certificate_file': '/etc/irods/server.crt',
})
return environment
def get_os_distribution():
return platform.linux_distribution()[0]
def get_os_distribution_version_major():
return platform.linux_distribution()[1].split('.')[0]
def get_object_names_from_entries(ils_out):
if isinstance(ils_out, six.string_types):
ils_out = ils_out.strip().split()
for item in ils_out:
# strip collections
if not item.startswith('C- /'):
yield item
def get_file_size_by_path(path):
return os.stat(path).st_size
def write_to_log(log_path, message):
pass
def count_occurrences_of_regexp_in_log(log_path, pattern, start_index=0):
occurrences = []
target = None
if isinstance(pattern,(tuple,list)):
pattern = [pattern[0].encode('ascii')] + list(pattern[1:])
elif isinstance(pattern,re._pattern_type):
target = pattern
else:
pattern = [pattern.encode('ascii')] # assume string-like
if target is None: target=re.compile(*pattern)
with open(log_path) as f:
m = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
occurrences.extend(j for j in target.finditer(m[start_index:]))
m.close()
return occurrences
def count_occurrences_of_string_in_log(log_path, string, start_index=0):
with open(log_path) as f:
m = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
n = 0
target = string.encode('ascii')
i = m.find(target, start_index)
while i != -1:
n += 1
i = m.find(target, i + 1)
m.close()
return n
def version_string_to_tuple(version_string):
return tuple(map(int, version_string.split('.')))
def hostname_resolves_to_local_address(hostname):
_, err, ret = execute_command_permissive([
os.path.join( paths.server_bin_directory(), 'hostname_resolves_to_local_address'),
hostname])
if ret == 0:
return True
elif ret == 1:
return False
raise IrodsError('Error encountered in hostname_resolves_to_local_address for hostname [{0}]:\n{1}'.format(hostname, err))
def get_header(message):
lines = [l.strip() for l in message.splitlines()]
length = 0
for line in lines:
length = max(length, len(line))
edge = '+' + '-' * (length + 2) + '+'
format_string = '{0:<' + str(length) + '}'
header_lines = ['', edge]
for line in lines:
header_lines.append('| ' + format_string.format(line) + ' |')
header_lines.append(edge)
header_lines.append('')
return '\n'.join(header_lines)
def nested_update(d, u):
for k, v in u.items():
d[k] = nested_update(d.get(k, {}), v) if isinstance(v, collections.Mapping) else u[k]
return d
def prompt(*args, **kwargs):
echo = kwargs.get('echo', True)
end = kwargs.get('end', ': ')
input_filter = kwargs.get('input_filter', lambda x: x)
l = logging.getLogger(__name__)
message = ''.join([args[0] % tuple(args[1:]), end])
while True:
l.debug(message)
if echo:
print(message, end='')
sys.stdout.flush()
user_input = sys.stdin.readline().rstrip('\n')
l.debug('User input: %s', user_input)
else:
if sys.stdin.isatty():
user_input = getpass.getpass(message)
else:
print('Warning: Cannot control echo output on the terminal (stdin is not a tty). Input may be echoed.', file=sys.stderr)
user_input = sys.stdin.readline().rstrip('\n')
if not sys.stdin.isatty():
print('\n', end='')
try :
return input_filter(user_input)
except InputFilterError as e:
l.debug('Error encountered in user input:', exc_info=sys.exc_info())
l.warning(e.args[0] if len(e.args) else "User input error.")
def default_prompt(*args, **kwargs):
l = logging.getLogger(__name__)
default = kwargs.pop('default', [])
input_filter = kwargs.pop('input_filter', lambda x: x)
while True:
if default:
if len(default) == 1:
message = ''.join([
args[0] % tuple(args[1:]),
' [%s]' % (default[0])])
user_input = prompt(message, **kwargs)
if not user_input:
user_input = default[0]
else:
message = ''.join([
args[0] % tuple(args[1:]), ':\n',
'\n'.join(['%d. %s' % (i + 1, default[i]) for i in range(0, len(default))]),
'\nPlease select a number or choose 0 to enter a new value'])
user_input = default_prompt(message, default=[1], **kwargs)
try:
i = int(user_input) - 1
except (TypeError, ValueError):
i = -1
if i in range(0, len(default)):
user_input = default[i]
else:
user_input = prompt('New value', **kwargs)
else:
user_input = prompt(*args, **kwargs)
try :
return input_filter(user_input)
except InputFilterError as e:
l.debug('Error encountered in user input:', exc_info=sys.exc_info())
l.warning(e.args[0] if len(e.args) else "User input error.")
def int_filter(field='Input'):
def f(x):
try:
return int(x)
except ValueError as e:
six.reraise(InputFilterError, InputFilterError('%s must be an integer.' % (field)), sys.exc_info()[2])
return f
def set_filter(set_, field='Input'):
def f(x):
if x in set_:
return x
raise InputFilterError('%s must be chosen from %s' % (field, list(set_)))
return f
def character_count_filter(minimum=None, maximum=None, field='Input'):
def f(x):
if (minimum is None or len(x) >= minimum) and (maximum is None or len(x) <= maximum):
return x
if minimum is not None and minimum < 0:
new_minimum = 0
else:
new_minimum = minimum
if new_minimum is not None and maximum is not None:
if new_minimum == maximum:
raise InputFilterError('%s must be exactly %s character%s in length.' % (field, maximum, '' if maximum == 1 else 's'))
if new_minimum < maximum:
raise InputFilterError('%s must be between %s and %s characters in length.' % (field, new_minimum, maximum))
raise IrodsError('Minimum character count %s must not be greater than maximum character count %s.' % (new_minimum, maximum))
if new_minimum is not None:
raise InputFilterError('%s must be at least %s character%s in length.' % (field, new_minimum, '' if new_minimum == 1 else 's'))
raise InputFilterError('%s may be at most %s character%s in length.' % (field, maximum, '' if maximum == 1 else 's'))
return f
class InputFilterError(Exception):
pass
class callback_on_change_dict(dict):
def __init__(self, *args, **kwargs):
if args:
self.callback = args[0]
args = args[1:]
else:
self.callback = lambda: None
super(callback_on_change_dict, self).__init__(*args, **kwargs)
def __setitem__(self, *args, **kwargs):
super(callback_on_change_dict, self).__setitem__(*args, **kwargs)
self.callback()
def __delitem__(self, *args, **kwargs):
super(callback_on_change_dict, self).__delitem__(*args, **kwargs)
self.callback()
def update(self, *args, **kwargs):
super(callback_on_change_dict, self).update(*args, **kwargs)
self.callback()
def clear(self, *args, **kwargs):
super(callback_on_change_dict, self).clear(*args, **kwargs)
self.callback()
def pop(self, *args, **kwargs):
super(callback_on_change_dict, self).pop(*args, **kwargs)
self.callback()
def popitem(self, *args, **kwargs):
super(callback_on_change_dict, self).popitem(*args, **kwargs)
self.callback()
def setdefault(self, *args, **kwargs):
super(callback_on_change_dict, self).setdefault(*args, **kwargs)
self.callback()
def delayAssert(a, interval=1, maxrep=100):
for _ in range(maxrep):
time.sleep(interval) # wait for test to fire
if a():
break
if not a():
raise AssertionError
def log_message_occurrences_equals_count(msg, count=1, server_log_path=None, start_index=0):
if server_log_path is None:
server_log_path=paths.server_log_path()
return count == count_occurrences_of_string_in_log(server_log_path, msg, start_index)
def log_message_occurrences_greater_than_count(msg, count=1, server_log_path=None, start_index=0):
if server_log_path is None:
server_log_path=paths.server_log_path()
return count_occurrences_of_string_in_log(server_log_path, msg, start_index) > count
def log_message_occurrences_fewer_than_count(msg, count=1, server_log_path=None, start_index=0):
if server_log_path is None:
server_log_path=paths.server_log_path()
return count_occurrences_of_string_in_log(server_log_path, msg, start_index) < count
def log_message_occurrences_is_one_of_list_of_counts(msg, expected_value_list=None, server_log_path=None, start_index=0):
if server_log_path is None:
server_log_path=paths.server_log_path()
return count_occurrences_of_string_in_log(server_log_path, msg, start_index) in expected_value_list
def metadata_attr_with_value_exists(session, attr, value):
print('looking for attr:[{0}] value:[{1}]'.format(attr, value))
out, _, _ = session.run_icommand(['iquest', '%s',
'"select META_DATA_ATTR_VALUE where META_DATA_ATTR_NAME = \'{}\'"'.format(attr)])
print(out)
return value in out
def create_ufs_resource(resource_name, user, hostname=None):
vault_name = resource_name + '_vault'
vault_directory = os.path.join(user.local_session_dir, vault_name)
if not hostname:
hostname = socket.gethostname()
vault = hostname + ':' + vault_directory
user.assert_icommand(['iadmin', 'mkresc', resource_name, 'unixfilesystem', vault], 'STDOUT', [resource_name])
def create_replication_resource(resource_name, user):
user.assert_icommand(['iadmin', 'mkresc', resource_name, 'replication'], 'STDOUT', [resource_name])
def create_passthru_resource(resource_name, user):
user.assert_icommand(['iadmin', 'mkresc', resource_name, 'passthru'], 'STDOUT', [resource_name])
def remove_resource(resource_name, user):
user.assert_icommand(['iadmin', 'rmresc', resource_name])
def add_child_resource(parent_resource_name, child_resource_name, user):
user.assert_icommand(['iadmin', 'addchildtoresc', parent_resource_name, child_resource_name])
def remove_child_resource(parent_resource_name, child_resource_name, user):
user.assert_icommand(['iadmin', 'rmchildfromresc', parent_resource_name, child_resource_name])
|
/*
* Copyright (c) 2016 Nordic Semiconductor ASA
* Copyright (c) 2016 Vinayak Kariappa Chettimada
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <soc.h>
#include <errno.h>
#include <atomic.h>
#include <device.h>
#include <clock_control.h>
#include <misc/__assert.h>
#include "nrf_clock.h"
#if defined(CONFIG_USB) && defined(CONFIG_SOC_NRF52840)
#include <nrf_power.h>
#include <drivers/clock_control/nrf5_clock_control.h>
#endif
static u8_t m16src_ref;
static u8_t m16src_grd;
static u8_t k32src_initialized;
static int _m16src_start(struct device *dev, clock_control_subsys_t sub_system)
{
bool blocking;
u32_t imask;
u32_t stat;
/* If the clock is already started then just increment refcount.
* If the start and stop don't happen in pairs, a rollover will
* be caught and in that case system should assert.
*/
/* Test for reference increment from zero and resource guard not taken.
*/
imask = irq_lock();
if (m16src_ref++) {
irq_unlock(imask);
goto hf_already_started;
}
if (m16src_grd) {
m16src_ref--;
irq_unlock(imask);
return -EAGAIN;
}
m16src_grd = 1;
irq_unlock(imask);
/* If blocking then spin-wait in CPU sleep until 16MHz clock settles. */
blocking = POINTER_TO_UINT(sub_system);
if (blocking) {
u32_t intenset;
irq_disable(POWER_CLOCK_IRQn);
NRF_CLOCK->EVENTS_HFCLKSTARTED = 0;
intenset = NRF_CLOCK->INTENSET;
nrf_clock_int_enable(NRF_CLOCK_INT_HF_STARTED_MASK);
nrf_clock_task_trigger(NRF_CLOCK_TASK_HFCLKSTART);
while (NRF_CLOCK->EVENTS_HFCLKSTARTED == 0) {
__WFE();
__SEV();
__WFE();
}
NRF_CLOCK->EVENTS_HFCLKSTARTED = 0;
if (!(intenset & CLOCK_INTENSET_HFCLKSTARTED_Msk)) {
nrf_clock_int_disable(NRF_CLOCK_INT_HF_STARTED_MASK);
}
NVIC_ClearPendingIRQ(POWER_CLOCK_IRQn);
irq_enable(POWER_CLOCK_IRQn);
} else {
NRF_CLOCK->EVENTS_HFCLKSTARTED = 0;
nrf_clock_task_trigger(NRF_CLOCK_TASK_HFCLKSTART);
}
/* release resource guard */
m16src_grd = 0;
hf_already_started:
/* rollover should not happen as start and stop shall be
* called in pairs.
*/
__ASSERT_NO_MSG(m16src_ref);
stat = CLOCK_HFCLKSTAT_SRC_Xtal | CLOCK_HFCLKSTAT_STATE_Msk;
if ((NRF_CLOCK->HFCLKSTAT & stat) == stat) {
return 0;
} else {
return -EINPROGRESS;
}
}
static int _m16src_stop(struct device *dev, clock_control_subsys_t sub_system)
{
u32_t imask;
ARG_UNUSED(sub_system);
/* Test for started resource, if so, decrement reference and acquire
* resource guard.
*/
imask = irq_lock();
if (!m16src_ref) {
irq_unlock(imask);
return -EALREADY;
}
if (--m16src_ref) {
irq_unlock(imask);
return -EBUSY;
}
if (m16src_grd) {
m16src_ref++;
irq_unlock(imask);
return -EAGAIN;
}
m16src_grd = 1;
irq_unlock(imask);
/* re-entrancy and mult-context safe, and reference count is zero, */
nrf_clock_task_trigger(NRF_CLOCK_TASK_HFCLKSTOP);
/* release resource guard */
m16src_grd = 0;
return 0;
}
static int _k32src_start(struct device *dev, clock_control_subsys_t sub_system)
{
u32_t lf_clk_src;
u32_t imask;
u32_t stat;
#if defined(CONFIG_CLOCK_CONTROL_NRF5_K32SRC_BLOCKING)
u32_t intenset;
#endif /* CONFIG_CLOCK_CONTROL_NRF5_K32SRC_BLOCKING */
/* If the LF clock is already started, but wasn't initialized with
* this function, allow it to run once. This is needed because if a
* soft reset is triggered while watchdog is active, the LF clock will
* already be running, but won't be configured yet (watchdog forces LF
* clock to be running).
*
* That is, a hardware check won't work here, because even if the LF
* clock is already running it might not be initialized. We need an
* initialized flag.
*/
imask = irq_lock();
if (k32src_initialized) {
irq_unlock(imask);
goto lf_already_started;
}
k32src_initialized = 1;
irq_unlock(imask);
/* Clear events if any */
NRF_CLOCK->EVENTS_LFCLKSTARTED = 0;
/* Set LF Clock Source */
lf_clk_src = POINTER_TO_UINT(sub_system);
NRF_CLOCK->LFCLKSRC = lf_clk_src;
#if defined(CONFIG_CLOCK_CONTROL_NRF5_K32SRC_BLOCKING)
irq_disable(POWER_CLOCK_IRQn);
intenset = NRF_CLOCK->INTENSET;
nrf_clock_int_enable(NRF_CLOCK_INT_LF_STARTED_MASK);
/* Start and spin-wait until clock settles */
nrf_clock_task_trigger(NRF_CLOCK_TASK_LFCLKSTART);
while (NRF_CLOCK->EVENTS_LFCLKSTARTED == 0) {
__WFE();
__SEV();
__WFE();
}
NRF_CLOCK->EVENTS_LFCLKSTARTED = 0;
if (!(intenset & CLOCK_INTENSET_LFCLKSTARTED_Msk)) {
nrf_clock_int_disable(NRF_CLOCK_INT_LF_STARTED_MASK);
}
NVIC_ClearPendingIRQ(POWER_CLOCK_IRQn);
irq_enable(POWER_CLOCK_IRQn);
#else /* !CONFIG_CLOCK_CONTROL_NRF5_K32SRC_BLOCKING */
/* NOTE: LFCLK will initially start running from the LFRC if LFXO is
* selected.
*/
nrf_clock_task_trigger(NRF_CLOCK_TASK_LFCLKSTART);
#endif /* !CONFIG_CLOCK_CONTROL_NRF5_K32SRC_BLOCKING */
/* If RC selected, calibrate and start timer for consecutive
* calibrations.
*/
nrf_clock_int_disable(NRF_CLOCK_INT_DONE_MASK |
NRF_CLOCK_INT_CTTO_MASK);
NRF_CLOCK->EVENTS_DONE = 0;
NRF_CLOCK->EVENTS_CTTO = 0;
if ((lf_clk_src & CLOCK_LFCLKSRC_SRC_Msk) == CLOCK_LFCLKSRC_SRC_RC) {
int err;
/* Set the Calibration Timer Initial Value */
NRF_CLOCK->CTIV = 16; /* 4s in 0.25s units */
/* Enable DONE and CTTO IRQs */
nrf_clock_int_enable(NRF_CLOCK_INT_DONE_MASK |
NRF_CLOCK_INT_CTTO_MASK);
/* Start HF clock, if already started then explicitly
* assert IRQ.
* NOTE: The INTENSET is used as state flag to start
* calibration in ISR.
*/
nrf_clock_int_enable(NRF_CLOCK_INT_HF_STARTED_MASK);
err = _m16src_start(dev, false);
if (!err) {
NVIC_SetPendingIRQ(POWER_CLOCK_IRQn);
} else {
__ASSERT_NO_MSG(err == -EINPROGRESS);
}
}
lf_already_started:
stat = (NRF_CLOCK->LFCLKSRCCOPY & CLOCK_LFCLKSRCCOPY_SRC_Msk) |
CLOCK_LFCLKSTAT_STATE_Msk;
if ((NRF_CLOCK->LFCLKSTAT & stat) == stat) {
return 0;
} else {
return -EINPROGRESS;
}
}
#if defined(CONFIG_USB) && defined(CONFIG_SOC_NRF52840)
static inline void power_event_cb(nrf_power_event_t event)
{
extern void usb_dc_nrfx_power_event_callback(nrf_power_event_t event);
usb_dc_nrfx_power_event_callback(event);
}
#endif
static void _power_clock_isr(void *arg)
{
u8_t pof, hf_intenset, hf_stat, hf, lf, done, ctto;
#if defined(CONFIG_USB) && defined(CONFIG_SOC_NRF52840)
bool usb_detected, usb_pwr_rdy, usb_removed;
#endif
struct device *dev = arg;
pof = (NRF_POWER->EVENTS_POFWARN != 0);
hf_intenset =
((NRF_CLOCK->INTENSET & CLOCK_INTENSET_HFCLKSTARTED_Msk) != 0);
hf_stat = ((NRF_CLOCK->HFCLKSTAT & CLOCK_HFCLKSTAT_STATE_Msk) != 0);
hf = (NRF_CLOCK->EVENTS_HFCLKSTARTED != 0);
lf = (NRF_CLOCK->EVENTS_LFCLKSTARTED != 0);
done = (NRF_CLOCK->EVENTS_DONE != 0);
ctto = (NRF_CLOCK->EVENTS_CTTO != 0);
#if defined(CONFIG_USB) && defined(CONFIG_SOC_NRF52840)
usb_detected = nrf_power_event_check(NRF_POWER_EVENT_USBDETECTED);
usb_pwr_rdy = nrf_power_event_check(NRF_POWER_EVENT_USBPWRRDY);
usb_removed = nrf_power_event_check(NRF_POWER_EVENT_USBREMOVED);
__ASSERT_NO_MSG(pof || hf || hf_intenset || lf || done || ctto ||
usb_detected || usb_pwr_rdy || usb_removed);
#else
__ASSERT_NO_MSG(pof || hf || hf_intenset || lf || done || ctto);
#endif
if (pof) {
NRF_POWER->EVENTS_POFWARN = 0;
}
if (hf) {
NRF_CLOCK->EVENTS_HFCLKSTARTED = 0;
}
if (hf_intenset && hf_stat) {
/* INTENSET is used as state flag to start calibration,
* hence clear it here.
*/
NRF_CLOCK->INTENCLR = CLOCK_INTENCLR_HFCLKSTARTED_Msk;
/* Start Calibration */
NRF_CLOCK->TASKS_CAL = 1;
}
if (lf) {
NRF_CLOCK->EVENTS_LFCLKSTARTED = 0;
__ASSERT_NO_MSG(0);
}
if (done) {
int err;
NRF_CLOCK->EVENTS_DONE = 0;
/* Calibration done, stop 16M Xtal. */
err = _m16src_stop(dev, NULL);
__ASSERT_NO_MSG(!err);
/* Start timer for next calibration. */
NRF_CLOCK->TASKS_CTSTART = 1;
}
if (ctto) {
int err;
NRF_CLOCK->EVENTS_CTTO = 0;
/* Start HF clock, if already started
* then explicitly assert IRQ; we use the INTENSET
* as a state flag to start calibration.
*/
NRF_CLOCK->INTENSET = CLOCK_INTENSET_HFCLKSTARTED_Msk;
err = _m16src_start(dev, false);
if (!err) {
NVIC_SetPendingIRQ(POWER_CLOCK_IRQn);
} else {
__ASSERT_NO_MSG(err == -EINPROGRESS);
}
}
#if defined(CONFIG_USB) && defined(CONFIG_SOC_NRF52840)
if (usb_detected) {
nrf_power_event_clear(NRF_POWER_EVENT_USBDETECTED);
power_event_cb(NRF_POWER_EVENT_USBDETECTED);
}
if (usb_pwr_rdy) {
nrf_power_event_clear(NRF_POWER_EVENT_USBPWRRDY);
power_event_cb(NRF_POWER_EVENT_USBPWRRDY);
}
if (usb_removed) {
nrf_power_event_clear(NRF_POWER_EVENT_USBREMOVED);
power_event_cb(NRF_POWER_EVENT_USBREMOVED);
}
#endif
}
static int _clock_control_init(struct device *dev)
{
/* TODO: Initialization will be called twice, once for 32KHz and then
* for 16 MHz clock. The vector is also shared for other power related
* features. Hence, design a better way to init IRQISR when adding
* power peripheral driver and/or new SoC series.
* NOTE: Currently the operations here are idempotent.
*/
IRQ_CONNECT(NRF5_IRQ_POWER_CLOCK_IRQn,
CONFIG_CLOCK_CONTROL_NRF5_IRQ_PRIORITY,
_power_clock_isr, 0, 0);
irq_enable(POWER_CLOCK_IRQn);
return 0;
}
static const struct clock_control_driver_api _m16src_clock_control_api = {
.on = _m16src_start,
.off = _m16src_stop,
.get_rate = NULL,
};
DEVICE_AND_API_INIT(clock_nrf5_m16src,
CONFIG_CLOCK_CONTROL_NRF5_M16SRC_DRV_NAME,
_clock_control_init, NULL, NULL, PRE_KERNEL_1,
CONFIG_KERNEL_INIT_PRIORITY_DEVICE,
&_m16src_clock_control_api);
static const struct clock_control_driver_api _k32src_clock_control_api = {
.on = _k32src_start,
.off = NULL,
.get_rate = NULL,
};
DEVICE_AND_API_INIT(clock_nrf5_k32src,
CONFIG_CLOCK_CONTROL_NRF5_K32SRC_DRV_NAME,
_clock_control_init, NULL, NULL, PRE_KERNEL_1,
CONFIG_KERNEL_INIT_PRIORITY_DEVICE,
&_k32src_clock_control_api);
#if defined(CONFIG_USB) && defined(CONFIG_SOC_NRF52840)
void nrf5_power_usb_power_int_enable(bool enable)
{
u32_t mask;
mask = NRF_POWER_INT_USBDETECTED_MASK |
NRF_POWER_INT_USBREMOVED_MASK |
NRF_POWER_INT_USBPWRRDY_MASK;
if (enable) {
nrf_power_int_enable(mask);
irq_enable(POWER_CLOCK_IRQn);
} else {
nrf_power_int_disable(mask);
}
}
#endif
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""IO helper functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import os
import re
import six
import tensorflow as tf
_ESCAPE_GLOB_CHARACTERS_REGEX = re.compile('([*?[])')
# TODO(chihuahua): Rename this method to use camel-case for GCS (Gcs).
def IsGCSPath(path):
return path.startswith("gs://")
def IsCnsPath(path):
return path.startswith("/cns/")
def IsTensorFlowEventsFile(path):
"""Check the path name to see if it is probably a TF Events file.
Args:
path: A file path to check if it is an event file.
Raises:
ValueError: If the path is an empty string.
Returns:
If path is formatted like a TensorFlowEventsFile.
"""
if not path:
raise ValueError('Path must be a nonempty string')
return 'tfevents' in tf.compat.as_str_any(os.path.basename(path))
def ListDirectoryAbsolute(directory):
"""Yields all files in the given directory. The paths are absolute."""
return (os.path.join(directory, path)
for path in tf.gfile.ListDirectory(directory))
def _EscapeGlobCharacters(path):
"""Escapes the glob characters in a path.
Python 3 has a glob.escape method, but python 2 lacks it, so we manually
implement this method.
Args:
path: The absolute path to escape.
Returns:
The escaped path string.
"""
drive, path = os.path.splitdrive(path)
return '%s%s' % (drive, _ESCAPE_GLOB_CHARACTERS_REGEX.sub(r'[\1]', path))
def ListRecursivelyViaGlobbing(top):
"""Recursively lists all files within the directory.
This method does not list subdirectories (in addition to regular files), and
the file paths are all absolute. If the directory does not exist, this yields
nothing.
This method does so by glob-ing deeper and deeper directories, ie
foo/*, foo/*/*, foo/*/*/* and so on until all files are listed. All file
paths are absolute, and this method lists subdirectories too.
For certain file systems, globbing via this method may prove significantly
faster than recursively walking a directory. Specifically, TF file systems
that implement TensorFlow's FileSystem.GetMatchingPaths method could save
costly disk reads by using this method. However, for other file systems, this
method might prove slower because the file system performs a walk per call to
glob (in which case it might as well just perform 1 walk).
Args:
top: A path to a directory.
Yields:
A (dir_path, file_paths) tuple for each directory/subdirectory.
"""
current_glob_string = os.path.join(_EscapeGlobCharacters(top), '*')
level = 0
while True:
tf.logging.info('GlobAndListFiles: Starting to glob level %d', level)
glob = tf.gfile.Glob(current_glob_string)
tf.logging.info(
'GlobAndListFiles: %d files glob-ed at level %d', len(glob), level)
if not glob:
# This subdirectory level lacks files. Terminate.
return
# Map subdirectory to a list of files.
pairs = collections.defaultdict(list)
for file_path in glob:
pairs[os.path.dirname(file_path)].append(file_path)
for dir_name, file_paths in six.iteritems(pairs):
yield (dir_name, tuple(file_paths))
if len(pairs) == 1:
# If at any point the glob returns files that are all in a single
# directory, replace the current globbing path with that directory as the
# literal prefix. This should improve efficiency in cases where a single
# subdir is significantly deeper than the rest of the sudirs.
current_glob_string = os.path.join(list(pairs.keys())[0], '*')
# Iterate to the next level of subdirectories.
current_glob_string = os.path.join(current_glob_string, '*')
level += 1
def ListRecursivelyViaWalking(top):
"""Walks a directory tree, yielding (dir_path, file_paths) tuples.
For each of `top` and its subdirectories, yields a tuple containing the path
to the directory and the path to each of the contained files. Note that
unlike os.Walk()/tf.gfile.Walk()/ListRecursivelyViaGlobbing, this does not
list subdirectories. The file paths are all absolute. If the directory does
not exist, this yields nothing.
Walking may be incredibly slow on certain file systems.
Args:
top: A path to a directory.
Yields:
A (dir_path, file_paths) tuple for each directory/subdirectory.
"""
for dir_path, _, filenames in tf.gfile.Walk(top):
yield (dir_path, (os.path.join(dir_path, filename)
for filename in filenames))
def GetLogdirSubdirectories(path):
"""Obtains all subdirectories with events files.
The order of the subdirectories returned is unspecified. The internal logic
that determines order varies by scenario.
Args:
path: The path to a directory under which to find subdirectories.
Returns:
A tuple of absolute paths of all subdirectories each with at least 1 events
file directly within the subdirectory.
Raises:
ValueError: If the path passed to the method exists and is not a directory.
"""
if not tf.gfile.Exists(path):
# No directory to traverse.
return ()
if not tf.gfile.IsDirectory(path):
raise ValueError('GetLogdirSubdirectories: path exists and is not a '
'directory, %s' % path)
if IsGCSPath(path) or IsCnsPath(path):
# Glob-ing for files can be significantly faster than recursively
# walking through directories for some file systems.
tf.logging.info(
'GetLogdirSubdirectories: Starting to list directories via glob-ing.')
traversal_method = ListRecursivelyViaGlobbing
else:
# For other file systems, the glob-ing based method might be slower because
# each call to glob could involve performing a recursive walk.
tf.logging.info(
'GetLogdirSubdirectories: Starting to list directories via walking.')
traversal_method = ListRecursivelyViaWalking
return (
subdir
for (subdir, files) in traversal_method(path)
if any(IsTensorFlowEventsFile(f) for f in files)
)
|
// Copyright 2009 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview A table for showing the results of performance testing.
*
* {@see goog.testing.benchmark} for an easy way to use this functionality.
*
* @author [email protected] (Attila Bodis)
* @author [email protected] (Nick Santos)
*/
goog.setTestOnly('goog.testing.PerformanceTable');
goog.provide('goog.testing.PerformanceTable');
goog.require('goog.asserts');
goog.require('goog.dom');
goog.require('goog.dom.TagName');
goog.require('goog.testing.PerformanceTimer');
/**
* A UI widget that runs performance tests and displays the results.
* @param {Element} root The element where the table should be attached.
* @param {goog.testing.PerformanceTimer=} opt_timer A timer to use for
* executing functions and profiling them.
* @param {number=} opt_precision Number of digits of precision to include in
* results. Defaults to 0.
* @param {number=} opt_numSamples The number of samples to take. Defaults to 5.
* @constructor
* @final
*/
goog.testing.PerformanceTable = function(
root, opt_timer, opt_precision, opt_numSamples) {
/**
* Where the table should be attached.
* @private {Element}
*/
this.root_ = root;
/**
* Number of digits of precision to include in results.
* Defaults to 0.
* @private {number}
*/
this.precision_ = opt_precision || 0;
var timer = opt_timer;
if (!timer) {
timer = new goog.testing.PerformanceTimer();
timer.setNumSamples(opt_numSamples || 5);
timer.setDiscardOutliers(true);
}
/**
* A timer for running the tests.
* @private {goog.testing.PerformanceTimer}
*/
this.timer_ = timer;
this.initRoot_();
};
/**
* @return {goog.testing.PerformanceTimer} The timer being used.
*/
goog.testing.PerformanceTable.prototype.getTimer = function() {
return this.timer_;
};
/**
* Render the initial table.
* @private
*/
goog.testing.PerformanceTable.prototype.initRoot_ = function() {
this.root_.innerHTML = '<table class="test-results" cellspacing="1">' +
' <thead>' +
' <tr>' +
' <th rowspan="2">Test Description</th>' +
' <th rowspan="2">Runs</th>' +
' <th colspan="4">Results (ms)</th>' +
' </tr>' +
' <tr>' +
' <th>Average</th>' +
' <th>Std Dev</th>' +
' <th>Minimum</th>' +
' <th>Maximum</th>' +
' </tr>' +
' </thead>' +
' <tbody>' +
' </tbody>' +
'</table>';
};
/**
* @return {Element} The body of the table.
* @private
*/
goog.testing.PerformanceTable.prototype.getTableBody_ = function() {
return goog.dom.getElementsByTagName(
goog.dom.TagName.TBODY, goog.asserts.assert(this.root_))[0];
};
/**
* Round to the specified precision.
* @param {number} num The number to round.
* @return {string} The rounded number, as a string.
* @private
*/
goog.testing.PerformanceTable.prototype.round_ = function(num) {
var factor = Math.pow(10, this.precision_);
return String(Math.round(num * factor) / factor);
};
/**
* Run the given function with the performance timer, and show the results.
* @param {Function} fn The function to run.
* @param {string=} opt_desc A description to associate with this run.
*/
goog.testing.PerformanceTable.prototype.run = function(fn, opt_desc) {
this.runTask(
new goog.testing.PerformanceTimer.Task(/** @type {function()} */ (fn)),
opt_desc);
};
/**
* Run the given task with the performance timer, and show the results.
* @param {goog.testing.PerformanceTimer.Task} task The performance timer task
* to run.
* @param {string=} opt_desc A description to associate with this run.
*/
goog.testing.PerformanceTable.prototype.runTask = function(task, opt_desc) {
var results = this.timer_.runTask(task);
this.recordResults(results, opt_desc);
};
/**
* Record a performance timer results object to the performance table. See
* {@code goog.testing.PerformanceTimer} for details of the format of this
* object.
* @param {Object} results The performance timer results object.
* @param {string=} opt_desc A description to associate with these results.
*/
goog.testing.PerformanceTable.prototype.recordResults = function(
results, opt_desc) {
var average = results['average'];
var standardDeviation = results['standardDeviation'];
var isSuspicious = average < 0 || standardDeviation > average * .5;
var resultsRow = goog.dom.createDom(
goog.dom.TagName.TR, null, goog.dom.createDom(
goog.dom.TagName.TD, 'test-description',
opt_desc || 'No description'),
goog.dom.createDom(
goog.dom.TagName.TD, 'test-count', String(results['count'])),
goog.dom.createDom(
goog.dom.TagName.TD, 'test-average', this.round_(average)),
goog.dom.createDom(
goog.dom.TagName.TD, 'test-standard-deviation',
this.round_(standardDeviation)),
goog.dom.createDom(
goog.dom.TagName.TD, 'test-minimum', String(results['minimum'])),
goog.dom.createDom(
goog.dom.TagName.TD, 'test-maximum', String(results['maximum'])));
if (isSuspicious) {
resultsRow.className = 'test-suspicious';
}
this.getTableBody_().appendChild(resultsRow);
};
/**
* Report an error in the table.
* @param {*} reason The reason for the error.
*/
goog.testing.PerformanceTable.prototype.reportError = function(reason) {
this.getTableBody_().appendChild(
goog.dom.createDom(
goog.dom.TagName.TR, null,
goog.dom.createDom(
goog.dom.TagName.TD, {'class': 'test-error', 'colSpan': 5},
String(reason))));
};
|
var topic = [
"尚未開學",
"國定假日",
"環境準備",
"隨機性",
"重複性",
"條件判斷"
];
var startDate = new Date();
function setMonthAndDay(startMonth, startDay) {
//一次設定好月份與日期
startDate.setMonth(startMonth - 1, startDay);
startDate.setHours(0);
startDate.setMinutes(0);
startDate.setSeconds(0);
}
var month = 4;
var date = 1;
function Tonchange() {
var sdate = document.querySelector('input[name="pwd"]');
var month = sdate.value.slice(5, 7);
var date = sdate.value.slice(8, 10);
alert(month);
alert(date);
} |
"""Define reusable pytest fixtures."""
import tempfile
from types import SimpleNamespace
import pytest
from yamlpath.wrappers import ConsolePrinter
from yamlpath.eyaml import EYAMLProcessor
# Implied constants
EYAML_PRIVATE_KEY_FILENAME = "private_key.pkcs7.pem"
EYAML_PUBLIC_KEY_FILENAME = "public_key.pkcs7.pem"
# pylint: disable=locally-disabled,invalid-name
requireseyaml = pytest.mark.skipif(
EYAMLProcessor.get_eyaml_executable("eyaml") is None
, reason="The 'eyaml' command must be installed and accessible on the PATH"
+ " to test and use EYAML features. Try: 'gem install hiera-eyaml'"
+ " after intalling ruby and rubygems."
)
@pytest.fixture
def quiet_logger():
"""Returns a quiet ConsolePrinter."""
args = SimpleNamespace(verbose=False, quiet=True, debug=False)
return ConsolePrinter(args)
@pytest.fixture(scope="session")
def old_eyaml_keys(tmp_path_factory):
"""Creates temporary keys for encryption/decryption tests."""
old_key_path_name = "old-keys"
old_key_dir = tmp_path_factory.mktemp(old_key_path_name)
old_private_key_file = old_key_dir / EYAML_PRIVATE_KEY_FILENAME
old_public_key_file = old_key_dir / EYAML_PUBLIC_KEY_FILENAME
old_private_key = r"""-----BEGIN RSA PRIVATE KEY-----
MIIEpAIBAAKCAQEA1BuytnsdHdt6NkNfLoGJIlf9hrWux8raPP3W57cONh2MrQ6d
aoAX+L+igTSjvSTI6oxsO0dqdYXZO1+rOK3gI9OnZQhkCjq9IRoWx7AIvM7skaD0
Lne9YsvA7mGY/z9lm3IALI6OBVV5k6xnBR2PVi6A7FnDm0CRLit2Bn9eHLN3k4oL
S/ynxgXBmWWgnKtJNJwGmeD5PwzJfXCcJ3kPItiktFizJZoPmAlBP7LIzamlfSXV
VoniRs45aGrTGpmZSdvossL41KBCYJGjP+lIL/UpDJHBeiuqVQoDl4/UZqb5xF9S
C2p2Of21fmZmj4uUAT5FPtKMKCspmLWQeUEfiwIDAQABAoIBAEyXR9wu7p+mbiYE
A+22Jr+5CDpJhrhsXovhmWWIq2ANIYwoF92qLX3MLTD8whd9nfNcC4UIT7/qOjv/
WsOXvbUSK4MHGaC7/ylh01H+Fdmf2rrnZOUWpdN0AdHSej3JNbaA3uE4BL6WU9Vo
TrcBKo4TMsilzUVVdlc2qGLGQUSZPLnIJWMLQIdCe2kZ9YvUlGloD4uGT3RH6+vH
TOtXqDgLGS/79K0rnflnBsUBkXpukxzOcTRHxR0s7SJ2XCB0JfdLWfR6X1nzM4mh
rn/m2nzEOG9ICe5hoHqAEZ/ezKd/jwxMg1YMZnGAzDMw7/UPWo87wgVdxxOHOsHG
v/pK+okCgYEA/SToT82qtvWIiUTbkdDislGhTet2eEu2Bi9wCxAUQn045Y812r9d
TvJyfKJyvvpxzejaecJb8oOXyypMDay7aPOVBI1E2OqfImxF8pJ0QqejAUCinXrj
KnV72L/hjTavivWq1vHZYXSxufAMG0C7UeztwkOfk85N3wuuYYWYrc0CgYEA1oBG
2fQ0PXDyionE3c4bpRGZMJxD+3RFRMCJiE+xheRR22ObSDLUH123ZGmU0m2FTS9O
M+GJbZgdV1Oe0EJ5rWfzFYUmVJIreH+oQWaY/HMkoe705LAMcPyd0bSjRVWWiz+l
anIGjj5HaPSI7XFqdQu7j3ww67k4BBAca8++arcCgYA/cIhnt3sY7t+QxxjfqiGl
3p82D9RYwWCUnD7QBu+M2iTwIru0XlDcABaA9ZUcF1d96uUVroesdx4LZEY7BxbQ
bnrh8SVX1zSaQ9gjumA4dBp9rd0S6kET2u12nF/CK/pCMN7njySTL9N6bZYbHlXT
ajULgjbzq7gINb01420n4QKBgQCqu0epy9qY3QHwi2ALPDZ82NkZ/AeQaieIZcgS
m3wtmmIdQdcjTHHS1YFXh0JRi6MCoJiaavY8KUuRapmKIp8/CvJNOsIbpoy7SMDf
7Y3vwqZxzgVW0VnVxPzJIgKi+VDuXSaI52GYbrHgNGOYuyGFMGWF+8/kkHSppzk4
Bw8FWQKBgQCo/7cV19n3e7ZlZ/aGhIOtSCTopMBV8/9PIw+s+ZDiQ7vRSj9DwkAQ
+x97V0idgh16tnly0xKvGCGTQR7qDsDTjHmmF4LZUGjcq7pHsTi/umCM/toE+BCk
7ayr+G0DWr5FjhQ7uCt2Rz1NKcj6EkDcM1WZxkDLAvBXjlj+T+eqtQ==
-----END RSA PRIVATE KEY-----
"""
old_public_key = r"""-----BEGIN CERTIFICATE-----
MIIC2TCCAcGgAwIBAgIBATANBgkqhkiG9w0BAQsFADAAMCAXDTE5MDUwNzE4MDAw
NVoYDzIwNjkwNDI0MTgwMDA1WjAAMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
CgKCAQEA1BuytnsdHdt6NkNfLoGJIlf9hrWux8raPP3W57cONh2MrQ6daoAX+L+i
gTSjvSTI6oxsO0dqdYXZO1+rOK3gI9OnZQhkCjq9IRoWx7AIvM7skaD0Lne9YsvA
7mGY/z9lm3IALI6OBVV5k6xnBR2PVi6A7FnDm0CRLit2Bn9eHLN3k4oLS/ynxgXB
mWWgnKtJNJwGmeD5PwzJfXCcJ3kPItiktFizJZoPmAlBP7LIzamlfSXVVoniRs45
aGrTGpmZSdvossL41KBCYJGjP+lIL/UpDJHBeiuqVQoDl4/UZqb5xF9SC2p2Of21
fmZmj4uUAT5FPtKMKCspmLWQeUEfiwIDAQABo1wwWjAPBgNVHRMBAf8EBTADAQH/
MB0GA1UdDgQWBBTUHb3HX8dBfYFL1J1sCv+uCum5AzAoBgNVHSMEITAfgBTUHb3H
X8dBfYFL1J1sCv+uCum5A6EEpAIwAIIBATANBgkqhkiG9w0BAQsFAAOCAQEAcw+0
dfHSLNAZD95G2pDnT2qShjmdLdbrDQhAXWhLeoWpXsKvC0iUyQaOF9ckl++tHM2g
ejm1vEOrZ+1uXK3qnMXPF99Wet686OhyoDt262Mt3wzGHNijAHEvQtjap8ZIwfOM
zFTvjmOlUScqF/Yg+htcGnJdQhWIrsD+exiY5Kz2IMtuW+yWLLP8bY5vPg6qfrp2
4VVJ3Md1gdSownd1Au5tqPXm6VfSgLiCm9iDPVsjDII9h8ydate1d2TBHPup+4tN
JZ5/muctimydC+S2oCn7ucsilxZD89N7rJjKXNfoUOGHjOEVQMa8RtZLzH2sUEaS
FktE6rH8a+8SwO+TGw==
-----END CERTIFICATE-----
"""
with open(old_private_key_file, 'w') as key_file:
key_file.write(old_private_key)
with open(old_public_key_file, 'w') as key_file:
key_file.write(old_public_key)
return (old_private_key_file, old_public_key_file)
@requireseyaml
@pytest.fixture(scope="session")
def new_eyaml_keys(tmp_path_factory):
"""Creates temporary keys for encryption/decryption tests."""
from subprocess import run
new_key_path_name = "new-keys"
new_key_dir = tmp_path_factory.mktemp(new_key_path_name)
new_private_key_file = new_key_dir / EYAML_PRIVATE_KEY_FILENAME
new_public_key_file = new_key_dir / EYAML_PUBLIC_KEY_FILENAME
run(
"{} createkeys --pkcs7-private-key={} --pkcs7-public-key={}"
.format(
EYAMLProcessor.get_eyaml_executable("eyaml"),
new_private_key_file,
new_public_key_file
)
.split()
)
return (new_private_key_file, new_public_key_file)
def create_temp_yaml_file(tmp_path_factory, content):
"""Creates a test YAML input file."""
fhnd = tempfile.NamedTemporaryFile(mode='w',
dir=tmp_path_factory.getbasetemp(),
suffix='.yaml',
delete=False)
fhnd.write(content)
return fhnd.name
@pytest.fixture(scope="session")
def imparsible_yaml_file(tmp_path_factory):
"""
Creates a YAML file that causes a ParserError when read by ruamel.yaml.
"""
content = '''{"json": "is YAML", "but_bad_json": "isn't anything!"'''
return create_temp_yaml_file(tmp_path_factory, content)
@pytest.fixture(scope="session")
def badsyntax_yaml_file(tmp_path_factory):
"""
Creates a YAML file that causes a ScannerError when read by ruamel.yaml.
"""
content = """---
# This YAML content contains a critical syntax error
& bad_anchor: is bad
"""
return create_temp_yaml_file(tmp_path_factory, content)
@pytest.fixture(scope="session")
def badcmp_yaml_file(tmp_path_factory):
"""
Creates a YAML file that causes a ComposerError when read by ruamel.yaml.
"""
content = """---
# This YAML file is improperly composed
this is a parsing error: *no such capability
"""
return create_temp_yaml_file(tmp_path_factory, content)
|
/*
* Test to ensure that (dump/restore/export/import/oplog) works with a replica set connection string
* 1. Start a replica set.
* 2. Add data to a collection.
* 3. Take a dump of the database.
* 4. Drop the db.
* 5. Restore the db.
* 6. Export a collection.
* 7. Drop the collection.
* 8. Import the collection.
*/
(function() {
"use strict";
// Skip this test if running with --nojournal and WiredTiger.
if (jsTest.options().noJournal &&
(!jsTest.options().storageEngine || jsTest.options().storageEngine === "wiredTiger")) {
print("Skipping test because running WiredTiger without journaling isn't a valid" +
" replica set configuration");
return;
}
var replTest =
new ReplSetTest({name: 'tool_replset', nodes: 2, oplogSize: 5, nodeOptions: {"vvvvv": ""}});
var nodes = replTest.startSet();
var config = replTest.getReplSetConfig();
config.members[0].priority = 3;
config.members[1].priority = 0;
replTest.initiate(config);
var master = replTest.getPrimary();
assert.eq(nodes[0], master, "incorrect master elected");
for (var i = 0; i < 100; i++) {
assert.commandWorked(master.getDB("foo").bar.insert({a: i}));
}
replTest.awaitReplication();
var replSetConnString =
"tool_replset/127.0.0.1:" + replTest.ports[0] + ",127.0.0.1:" + replTest.ports[1];
// Test with mongodump/mongorestore
var data = MongoRunner.dataDir + "/tool_replset-dump1/";
print("using mongodump to dump the db to " + data);
var exitCode = MongoRunner.runMongoTool("mongodump", {
host: replSetConnString,
out: data,
});
assert.eq(0, exitCode, "mongodump failed to dump from the replica set");
print("db successfully dumped to " + data +
". dropping collection before testing the restore process");
assert(master.getDB("foo").bar.drop());
replTest.awaitReplication();
print("using mongorestore to restore the db from " + data);
exitCode = MongoRunner.runMongoTool("mongorestore", {
host: replSetConnString,
dir: data,
});
assert.eq(0, exitCode, "mongorestore failed to restore data to the replica set");
print("db successfully restored, checking count");
var x = master.getDB("foo").getCollection("bar").count();
assert.eq(x, 100, "mongorestore should have successfully restored the collection");
replTest.awaitReplication();
// Test with mongoexport/mongoimport
print("export the collection");
var extFile = MongoRunner.dataDir + "/tool_replset/export";
exitCode = MongoRunner.runMongoTool("mongoexport", {
host: replSetConnString,
out: extFile,
db: "foo",
collection: "bar",
});
assert.eq(0, exitCode, "mongoexport failed to export collection 'foo.bar' from the replica set");
print("collection successfully exported, dropping now");
master.getDB("foo").getCollection("bar").drop();
replTest.awaitReplication();
print("import the collection");
exitCode = MongoRunner.runMongoTool("mongoimport", {
host: replSetConnString,
file: extFile,
db: "foo",
collection: "bar",
});
assert.eq(0, exitCode, "mongoimport failed to import collection 'foo.bar' into the replica set");
var x = master.getDB("foo").getCollection("bar").count();
assert.eq(x, 100, "mongoimport should have successfully imported the collection");
print("all tests successful, stopping replica set");
replTest.stopSet();
print("replica set stopped, test complete");
}());
|
"""
The MIT License (MIT)
Copyright (c) 2015-2021 Rapptz
Copyright (c) 2021-present Disnake Development
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
import asyncio
import os
import sys
import time
import traceback
from functools import partial
from itertools import groupby
from typing import (
TYPE_CHECKING,
Any,
Callable,
ClassVar,
Dict,
Iterator,
List,
Optional,
Sequence,
Tuple,
cast,
)
from ..components import (
ActionRow as ActionRowComponent,
Button as ButtonComponent,
Component,
NestedComponent,
SelectMenu as SelectComponent,
_component_factory,
)
from ..enums import ComponentType, try_enum_to_int
from .item import Item
__all__ = ("View",)
if TYPE_CHECKING:
from ..interactions import MessageInteraction
from ..message import Message
from ..state import ConnectionState
from ..types.components import Component as ComponentPayload
from .item import ItemCallbackType
def _walk_all_components(components: List[Component]) -> Iterator[NestedComponent]:
for item in components:
if isinstance(item, ActionRowComponent):
yield from item.children
else:
yield cast(NestedComponent, item)
def _component_to_item(component: NestedComponent) -> Item:
if isinstance(component, ButtonComponent):
from .button import Button
return Button.from_component(component)
if isinstance(component, SelectComponent):
from .select import Select
return Select.from_component(component)
return Item.from_component(component)
class _ViewWeights:
__slots__ = ("weights",)
def __init__(self, children: List[Item]):
self.weights: List[int] = [0, 0, 0, 0, 0]
key = lambda i: sys.maxsize if i.row is None else i.row
children = sorted(children, key=key)
for row, group in groupby(children, key=key):
for item in group:
self.add_item(item)
def find_open_space(self, item: Item) -> int:
for index, weight in enumerate(self.weights):
if weight + item.width <= 5:
return index
raise ValueError("could not find open space for item")
def add_item(self, item: Item) -> None:
if item.row is not None:
total = self.weights[item.row] + item.width
if total > 5:
raise ValueError(f"item would not fit at row {item.row} ({total} > 5 width)")
self.weights[item.row] = total
item._rendered_row = item.row
else:
index = self.find_open_space(item)
self.weights[index] += item.width
item._rendered_row = index
def remove_item(self, item: Item) -> None:
if item._rendered_row is not None:
self.weights[item._rendered_row] -= item.width
item._rendered_row = None
def clear(self) -> None:
self.weights = [0, 0, 0, 0, 0]
class View:
"""Represents a UI view.
This object must be inherited to create a UI within Discord.
.. versionadded:: 2.0
Parameters
----------
timeout: Optional[:class:`float`]
Timeout in seconds from last interaction with the UI before no longer accepting input.
If ``None`` then there is no timeout.
Attributes
----------
timeout: Optional[:class:`float`]
Timeout from last interaction with the UI before no longer accepting input.
If ``None`` then there is no timeout.
children: List[:class:`Item`]
The list of children attached to this view.
"""
__discord_ui_view__: ClassVar[bool] = True
__view_children_items__: ClassVar[List[ItemCallbackType]] = []
def __init_subclass__(cls) -> None:
children: List[ItemCallbackType] = []
for base in reversed(cls.__mro__):
for member in base.__dict__.values():
if hasattr(member, "__discord_ui_model_type__"):
children.append(member)
if len(children) > 25:
raise TypeError("View cannot have more than 25 children")
cls.__view_children_items__ = children
def __init__(self, *, timeout: Optional[float] = 180.0):
self.timeout = timeout
self.children: List[Item] = []
for func in self.__view_children_items__:
item: Item = func.__discord_ui_model_type__(**func.__discord_ui_model_kwargs__)
item.callback = partial(func, self, item)
item._view = self
setattr(self, func.__name__, item)
self.children.append(item)
self.__weights = _ViewWeights(self.children)
loop = asyncio.get_running_loop()
self.id: str = os.urandom(16).hex()
self.__cancel_callback: Optional[Callable[[View], None]] = None
self.__timeout_expiry: Optional[float] = None
self.__timeout_task: Optional[asyncio.Task[None]] = None
self.__stopped: asyncio.Future[bool] = loop.create_future()
def __repr__(self) -> str:
return f"<{self.__class__.__name__} timeout={self.timeout} children={len(self.children)}>"
async def __timeout_task_impl(self) -> None:
while True:
# Guard just in case someone changes the value of the timeout at runtime
if self.timeout is None:
return
if self.__timeout_expiry is None:
return self._dispatch_timeout()
# Check if we've elapsed our currently set timeout
now = time.monotonic()
if now >= self.__timeout_expiry:
return self._dispatch_timeout()
# Wait N seconds to see if timeout data has been refreshed
await asyncio.sleep(self.__timeout_expiry - now)
def to_components(self) -> List[Dict[str, Any]]:
def key(item: Item) -> int:
return item._rendered_row or 0
children = sorted(self.children, key=key)
components: List[Dict[str, Any]] = []
for _, group in groupby(children, key=key):
children = [item.to_component_dict() for item in group]
if not children:
continue
components.append(
{
"type": 1,
"components": children,
}
)
return components
@classmethod
def from_message(cls, message: Message, /, *, timeout: Optional[float] = 180.0) -> View:
"""Converts a message's components into a :class:`View`.
The :attr:`.Message.components` of a message are read-only
and separate types from those in the ``disnake.ui`` namespace.
In order to modify and edit message components they must be
converted into a :class:`View` first.
Parameters
----------
message: :class:`disnake.Message`
The message with components to convert into a view.
timeout: Optional[:class:`float`]
The timeout of the converted view.
Returns
-------
:class:`View`
The converted view. This always returns a :class:`View` and not
one of its subclasses.
"""
view = View(timeout=timeout)
for component in _walk_all_components(message.components):
view.add_item(_component_to_item(component))
return view
@property
def _expires_at(self) -> Optional[float]:
if self.timeout:
return time.monotonic() + self.timeout
return None
def add_item(self, item: Item) -> None:
"""Adds an item to the view.
Parameters
----------
item: :class:`Item`
The item to add to the view.
Raises
------
TypeError
An :class:`Item` was not passed.
ValueError
Maximum number of children has been exceeded (25)
or the row the item is trying to be added to is full.
"""
if len(self.children) > 25:
raise ValueError("maximum number of children exceeded")
if not isinstance(item, Item):
raise TypeError(f"expected Item not {item.__class__!r}")
self.__weights.add_item(item)
item._view = self
self.children.append(item)
def remove_item(self, item: Item) -> None:
"""Removes an item from the view.
Parameters
----------
item: :class:`Item`
The item to remove from the view.
"""
try:
self.children.remove(item)
except ValueError:
pass
else:
self.__weights.remove_item(item)
def clear_items(self) -> None:
"""Removes all items from the view."""
self.children.clear()
self.__weights.clear()
async def interaction_check(self, interaction: MessageInteraction) -> bool:
"""|coro|
A callback that is called when an interaction happens within the view
that checks whether the view should process item callbacks for the interaction.
This is useful to override if, for example, you want to ensure that the
interaction author is a given user.
The default implementation of this returns ``True``.
.. note::
If an exception occurs within the body then the check
is considered a failure and :meth:`on_error` is called.
Parameters
----------
interaction: :class:`.MessageInteraction`
The interaction that occurred.
Returns
-------
:class:`bool`
Whether the view children's callbacks should be called.
"""
return True
async def on_timeout(self) -> None:
"""|coro|
A callback that is called when a view's timeout elapses without being explicitly stopped.
"""
pass
async def on_error(self, error: Exception, item: Item, interaction: MessageInteraction) -> None:
"""|coro|
A callback that is called when an item's callback or :meth:`interaction_check`
fails with an error.
The default implementation prints the traceback to stderr.
Parameters
----------
error: :class:`Exception`
The exception that was raised.
item: :class:`Item`
The item that failed the dispatch.
interaction: :class:`.MessageInteraction`
The interaction that led to the failure.
"""
print(f"Ignoring exception in view {self} for item {item}:", file=sys.stderr)
traceback.print_exception(error.__class__, error, error.__traceback__, file=sys.stderr)
async def _scheduled_task(self, item: Item, interaction: MessageInteraction):
try:
if self.timeout:
self.__timeout_expiry = time.monotonic() + self.timeout
allow = await self.interaction_check(interaction)
if not allow:
return
await item.callback(interaction)
except Exception as e:
return await self.on_error(e, item, interaction)
def _start_listening_from_store(self, store: ViewStore) -> None:
self.__cancel_callback = partial(store.remove_view)
if self.timeout:
loop = asyncio.get_running_loop()
if self.__timeout_task is not None:
self.__timeout_task.cancel()
self.__timeout_expiry = time.monotonic() + self.timeout
self.__timeout_task = loop.create_task(self.__timeout_task_impl())
def _dispatch_timeout(self):
if self.__stopped.done():
return
self.__stopped.set_result(True)
asyncio.create_task(self.on_timeout(), name=f"disnake-ui-view-timeout-{self.id}")
def _dispatch_item(self, item: Item, interaction: MessageInteraction):
if self.__stopped.done():
return
asyncio.create_task(
self._scheduled_task(item, interaction), name=f"disnake-ui-view-dispatch-{self.id}"
)
def refresh(self, components: List[Component]):
# TODO: this is pretty hacky at the moment
old_state: Dict[Tuple[int, str], Item] = {
(item.type.value, item.custom_id): item # type: ignore
for item in self.children
if item.is_dispatchable()
}
children: List[Item] = []
for component in _walk_all_components(components):
older: Optional[Item] = None
try:
older = old_state[(component.type.value, component.custom_id)] # type: ignore
except (KeyError, AttributeError):
# workaround for url buttons, since they're not part of `old_state`
if isinstance(component, ButtonComponent):
for child in self.children:
if (
child.type is ComponentType.button
and child.label == component.label # type: ignore
and child.url == component.url # type: ignore
):
older = child
break
if older:
older.refresh_component(component)
children.append(older)
else:
children.append(_component_to_item(component))
self.children = children
def stop(self) -> None:
"""Stops listening to interaction events from this view.
This operation cannot be undone.
"""
if not self.__stopped.done():
self.__stopped.set_result(False)
self.__timeout_expiry = None
if self.__timeout_task is not None:
self.__timeout_task.cancel()
self.__timeout_task = None
if self.__cancel_callback:
self.__cancel_callback(self)
self.__cancel_callback = None
def is_finished(self) -> bool:
"""Whether the view has finished interacting.
:return type: :class:`bool`
"""
return self.__stopped.done()
def is_dispatching(self) -> bool:
"""Whether the view has been added for dispatching purposes.
:return type: :class:`bool`
"""
return self.__cancel_callback is not None
def is_persistent(self) -> bool:
"""Whether the view is set up as persistent.
A persistent view has all their components with a set ``custom_id`` and
a :attr:`timeout` set to ``None``.
:return type: :class:`bool`
"""
return self.timeout is None and all(item.is_persistent() for item in self.children)
async def wait(self) -> bool:
"""Waits until the view has finished interacting.
A view is considered finished when :meth:`stop` is called
or it times out.
Returns
-------
:class:`bool`
If ``True``, then the view timed out. If ``False`` then
the view finished normally.
"""
return await self.__stopped
class ViewStore:
def __init__(self, state: ConnectionState):
# (component_type, message_id, custom_id): (View, Item)
self._views: Dict[Tuple[int, Optional[int], str], Tuple[View, Item]] = {}
# message_id: View
self._synced_message_views: Dict[int, View] = {}
self._state: ConnectionState = state
@property
def persistent_views(self) -> Sequence[View]:
# fmt: off
views = {
view.id: view
for (_, (view, _)) in self._views.items()
if view.is_persistent()
}
# fmt: on
return list(views.values())
def __verify_integrity(self):
to_remove: List[Tuple[int, Optional[int], str]] = []
for (k, (view, _)) in self._views.items():
if view.is_finished():
to_remove.append(k)
for k in to_remove:
del self._views[k]
def add_view(self, view: View, message_id: Optional[int] = None):
self.__verify_integrity()
view._start_listening_from_store(self)
for item in view.children:
if item.is_dispatchable():
self._views[(item.type.value, message_id, item.custom_id)] = (view, item) # type: ignore
if message_id is not None:
self._synced_message_views[message_id] = view
def remove_view(self, view: View):
for item in view.children:
if item.is_dispatchable():
self._views.pop((item.type.value, item.custom_id), None) # type: ignore
for key, value in self._synced_message_views.items():
if value.id == view.id:
del self._synced_message_views[key]
break
def dispatch(self, interaction: MessageInteraction):
self.__verify_integrity()
message_id: Optional[int] = interaction.message and interaction.message.id
component_type = try_enum_to_int(interaction.data.component_type)
custom_id = interaction.data.custom_id
key = (component_type, message_id, custom_id)
# Fallback to None message_id searches in case a persistent view
# was added without an associated message_id
value = self._views.get(key) or self._views.get((component_type, None, custom_id))
if value is None:
return
view, item = value
item.refresh_state(interaction)
view._dispatch_item(item, interaction)
def is_message_tracked(self, message_id: int):
return message_id in self._synced_message_views
def remove_message_tracking(self, message_id: int) -> Optional[View]:
return self._synced_message_views.pop(message_id, None)
def update_from_message(self, message_id: int, components: List[ComponentPayload]):
# pre-req: is_message_tracked == true
view = self._synced_message_views[message_id]
view.refresh([_component_factory(d) for d in components])
|
import argparse
import os
import sys
sys.path.append("..")
import yaml
from tensorflow.keras import Model
from tensorflow.keras.applications.inception_v3 import InceptionV3
from tensorflow.keras.layers import Embedding, LSTM, Dense, Input, Bidirectional, RepeatVector, Concatenate, Dropout, Add
from tensorflow.keras.callbacks import EarlyStopping, ReduceLROnPlateau
from datasets.googlecc import PreProcessing, get_line_count
from datasets.common import get_dataset_metadata_cfg
from preprocessing import utils
from tensorflow.keras.callbacks import ModelCheckpoint
from tensorflow.keras.callbacks import CSVLogger
from tensorflow.keras.backend import clear_session
from utils.performance import PerformanceMetrics
if __name__ == "__main__":
clear_session()
parser = argparse.ArgumentParser(description="config")
parser.add_argument(
"--config",
nargs="?",
type=str,
default="../configs/inception_lstm_preprocessed1.yaml",
help="Configuration file to use",
)
args = parser.parse_args()
with open(args.config) as fp:
cfg = yaml.load(fp)
dataset_cfg = get_dataset_metadata_cfg()
model_workspace_dir = os.path.join(cfg["workspace"]["directory"], cfg["dataset"]["name"], cfg["model"]["arch"])
utils.make_directories(model_workspace_dir)
img_model = InceptionV3(weights='imagenet')
dataset_preprocessor = PreProcessing(cfg, "inception", True, False)
dataset_preprocessor.run_one_time_encoding(img_model)
# Load train, validation sets from the pre-processor
training_generator, validation_generator, test_generator = dataset_preprocessor.get_keras_generators("inception")
MAX_LEN = 40
EMBEDDING_DIM = 300
IMAGE_ENC_DIM = 300
vocab_size = get_line_count(
os.path.join(cfg["workspace"]["directory"], cfg["dataset"]["name"], "word_dictionary.txt")
)
img_input = Input(shape=(2048,))
img_enc = Dense(300, activation="relu")(img_input)
images = RepeatVector(MAX_LEN)(img_enc)
# Text input
text_input = Input(shape=(MAX_LEN,))
embedding = Embedding(vocab_size, EMBEDDING_DIM, input_length=MAX_LEN)(text_input)
x = Concatenate()([images, embedding])
y = Bidirectional(LSTM(256, return_sequences=False))(x)
pred = Dense(vocab_size, activation='softmax')(y)
model = Model(inputs=[img_input, text_input], outputs=pred)
model.compile(loss='categorical_crossentropy', optimizer="RMSProp", metrics=['accuracy'])
model.summary()
callbacks = [
EarlyStopping(patience=10, verbose=1),
ReduceLROnPlateau(factor=0.1, patience=3, min_lr=0.00001, verbose=1),
ModelCheckpoint(
os.path.join(os.path.join(model_workspace_dir, 'weights_best.hdf5')),
verbose=1,
save_best_only=False,
save_weights_only=True
),
CSVLogger(os.path.join(model_workspace_dir, 'training.csv')),
PerformanceMetrics(os.path.join(model_workspace_dir, 'performance.csv')),
]
model.fit_generator(
generator=training_generator,
validation_data=validation_generator,
epochs=100,
callbacks=callbacks
)
|
from ..widgets.FileDialog import FileDialog
from ..Qt import QtGui, QtCore, QtSvg
from ..python2_3 import asUnicode, basestring
from ..GraphicsScene import GraphicsScene
from ..graphicsItems.GraphicsLayout import GraphicsLayout
from ..graphicsItems.ScatterPlotItem import ScatterPlotItem
from ..graphicsItems.PlotItem import PlotItem
import os, re
LastExportDirectory = None
class Exporter(object):
"""
Abstract class used for exporting graphics to file / printer / whatever.
"""
allowCopy = False # subclasses set this to True if they can use the copy buffer
Exporters = []
@classmethod
def register(cls):
"""
Used to register Exporter classes to appear in the export dialog.
"""
Exporter.Exporters.append(cls)
def __init__(self, item):
"""
Initialize with the item to be exported.
Can be an individual graphics item or a scene.
"""
object.__init__(self)
self.item = item
# works-for-me
scene = self.getScene()
for it in scene.items():
if isinstance(it, ScatterPlotItem):
if isinstance(item, PlotItem):
it.export_mode = "single"
else:
it.export_mode = "layout"
def parameters(self):
"""Return the parameters used to configure this exporter."""
raise Exception("Abstract method must be overridden in subclass.")
def export(self, fileName=None, toBytes=False, copy=False):
"""
If *fileName* is None, pop-up a file dialog.
If *toBytes* is True, return a bytes object rather than writing to file.
If *copy* is True, export to the copy buffer rather than writing to file.
"""
raise Exception("Abstract method must be overridden in subclass.")
def fileSaveDialog(self, filter=None, opts=None):
## Show a file dialog, call self.export(fileName) when finished.
if opts is None:
opts = {}
self.fileDialog = FileDialog()
self.fileDialog.setFileMode(QtGui.QFileDialog.AnyFile)
self.fileDialog.setAcceptMode(QtGui.QFileDialog.AcceptSave)
if filter is not None:
if isinstance(filter, basestring):
self.fileDialog.setNameFilter(filter)
elif isinstance(filter, list):
self.fileDialog.setNameFilters(filter)
global LastExportDirectory
exportDir = LastExportDirectory
if exportDir is not None:
self.fileDialog.setDirectory(exportDir)
self.fileDialog.show()
self.fileDialog.opts = opts
self.fileDialog.fileSelected.connect(self.fileSaveFinished)
return
def fileSaveFinished(self, fileName):
fileName = asUnicode(fileName)
global LastExportDirectory
LastExportDirectory = os.path.split(fileName)[0]
## If file name does not match selected extension, append it now
ext = os.path.splitext(fileName)[1].lower().lstrip('.')
selectedExt = re.search(r'\*\.(\w+)\b', asUnicode(self.fileDialog.selectedNameFilter()))
if selectedExt is not None:
selectedExt = selectedExt.groups()[0].lower()
if ext != selectedExt:
fileName = fileName + '.' + selectedExt.lstrip('.')
self.export(fileName=fileName, **self.fileDialog.opts)
def getScene(self):
if isinstance(self.item, GraphicsScene):
return self.item
else:
return self.item.scene()
def getSourceRect(self):
if isinstance(self.item, GraphicsScene):
w = self.item.getViewWidget()
return w.viewportTransform().inverted()[0].mapRect(w.rect())
else:
return self.item.sceneBoundingRect()
def getTargetRect(self):
if isinstance(self.item, GraphicsScene):
return self.item.getViewWidget().rect()
else:
return self.item.mapRectToDevice(self.item.boundingRect())
def setExportMode(self, export, opts=None):
"""
Call setExportMode(export, opts) on all items that will
be painted during the export. This informs the item
that it is about to be painted for export, allowing it to
alter its appearance temporarily
*export* - bool; must be True before exporting and False afterward
*opts* - dict; common parameters are 'antialias' and 'background'
"""
if opts is None:
opts = {}
for item in self.getPaintItems():
if hasattr(item, 'setExportMode'):
item.setExportMode(export, opts)
def getPaintItems(self, root=None):
"""Return a list of all items that should be painted in the correct order."""
if root is None:
root = self.item
preItems = []
postItems = []
if isinstance(root, QtGui.QGraphicsScene):
childs = [i for i in root.items() if i.parentItem() is None]
rootItem = []
else:
childs = root.childItems()
rootItem = [root]
childs.sort(key=lambda a: a.zValue())
while len(childs) > 0:
ch = childs.pop(0)
tree = self.getPaintItems(ch)
if int(ch.flags() & ch.ItemStacksBehindParent) > 0 or (ch.zValue() < 0 and int(ch.flags() & ch.ItemNegativeZStacksBehindParent) > 0):
preItems.extend(tree)
else:
postItems.extend(tree)
return preItems + rootItem + postItems
def render(self, painter, targetRect, sourceRect, item=None):
self.getScene().render(painter, QtCore.QRectF(targetRect), QtCore.QRectF(sourceRect))
|
"""
Django settings for src project.
Generated by 'django-admin startproject' using Django 3.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-_hyf+ngr9@jyaig(4#)i-&84&gi5u=3jh!*m@*s9$rd)d*!7r6'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'src.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'src.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
#!c:\users\behrensdeluna\documents\github\doctut\docs\venv\scripts\python.exe
# $Id: rst2odt_prepstyles.py 8346 2019-08-26 12:11:32Z milde $
# Author: Dave Kuhlman <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
Fix a word-processor-generated styles.odt for odtwriter use: Drop page size
specifications from styles.xml in STYLE_FILE.odt.
"""
# Author: Michael Schutte <[email protected]>
from __future__ import print_function
from lxml import etree
import sys
import zipfile
from tempfile import mkstemp
import shutil
import os
NAMESPACES = {
"style": "urn:oasis:names:tc:opendocument:xmlns:style:1.0",
"fo": "urn:oasis:names:tc:opendocument:xmlns:xsl-fo-compatible:1.0"
}
def prepstyle(filename):
zin = zipfile.ZipFile(filename)
styles = zin.read("styles.xml")
root = etree.fromstring(styles)
for el in root.xpath("//style:page-layout-properties",
namespaces=NAMESPACES):
for attr in el.attrib:
if attr.startswith("{%s}" % NAMESPACES["fo"]):
del el.attrib[attr]
tempname = mkstemp()
zout = zipfile.ZipFile(os.fdopen(tempname[0], "w"), "w",
zipfile.ZIP_DEFLATED)
for item in zin.infolist():
if item.filename == "styles.xml":
zout.writestr(item, etree.tostring(root))
else:
zout.writestr(item, zin.read(item.filename))
zout.close()
zin.close()
shutil.move(tempname[1], filename)
def main():
args = sys.argv[1:]
if len(args) != 1:
print(__doc__, file=sys.stderr)
print("Usage: %s STYLE_FILE.odt\n" % sys.argv[0], file=sys.stderr)
sys.exit(1)
filename = args[0]
prepstyle(filename)
if __name__ == '__main__':
main()
|
import { shallow } from 'vue-test-utils';
import { testClassFromProps } from 'test/utils';
import Checkbox from 'src/modules/Checkbox/Checkbox';
describe('Checkbox', () => {
testClassFromProps(Checkbox, ['ui', 'checkbox']);
it('should create a SUI Checkbox with label', () => {
const checkbox = shallow(Checkbox, { propsData: { label: 'check' } });
expect(checkbox.find('input').element.getAttribute('type')).to.equal('checkbox');
expect(checkbox.find('label').element.textContent).to.equal('check');
});
it('should create a SUI Radio button with label', () => {
const checkbox = shallow(Checkbox, { propsData: { label: 'check', radio: true } });
expect(checkbox.find('input').element.getAttribute('type')).to.equal('radio');
expect(checkbox.find('label').element.textContent).to.equal('check');
});
it('should create a SUI Checkbox in Disabled mode', () => {
const checkbox = shallow(Checkbox, { propsData: { label: 'check', disabled: true } });
expect(checkbox.classes()).to.include('disabled');
});
it('should create a SUI Radio button in Disabled mode', () => {
const checkbox = shallow(Checkbox, { propsData: { label: 'check', radio: true, disabled: true } });
expect(checkbox.classes()).to.include('disabled');
});
});
|
/*
* Copyright (c) 2021, krishpranav
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
void pic_initialize();
void pic_ack(int intno);
void pic_disable(); |
import React from 'react'
import { Link } from 'react-router-dom'
export default ({ className = 'o-app__top-right', text = 'Cancel', ...props }) => (
<Link to='/' className={className} {...props}>
{ text }
</Link>
)
|
const { Menu, BrowserWindow } = require('electron')
// 菜单栏
var template = [
{
label: '项目',//mac第一个默认当前app名称
submenu: [
{
label: '关于',
accelerator:'command+Q',// 快捷键
click: function () {
var win = new BrowserWindow({
width: 500,
height: 500,
webPreferences: {
nodeIntegration: true, // 集成node环境
}
})// 事件
win.loadFile('view/subView.html')
win.on('close',()=>{
win = null;
})
}
},
{ label: '检查更新' },
]
},
{
label: '选择',
submenu: [
{ label: '全选' },
{ label: '展开选定内容' },
]
},
]
var m = Menu.buildFromTemplate(template)
Menu.setApplicationMenu(m)
|
int func() {
static int i = 0;
return i++;
}
int main() {
int *arr;
arr = (int[]){ 1, func(), func() };
return arr[0] + arr[1] + arr[2] + func();
}
|
define(["jquery", "knockout", "everest"], function($, ko, ê){
"use strict";
var host = "api.github.com",
defaultResourcePath = "/repos/PulsarBlow/everest.js",
$loader,
restApi = new ê.RestClient({
host: host,
useSSL: true // Set SSL on because github requires it
});
function ViewModel() {
this.host = ko.observable(host);
this.resourcePath = ko.observable(defaultResourcePath);
this.result = ko.observable("");
this.canPost = ko.computed(function(){
return this.host() && this.resourcePath()
}, this);
}
ViewModel.prototype.readResource = function () {
var that = this;
// Reset the host (in case your changed it in the input field)
restApi.setConfiguration({host: that.host()});
// Triggers the read and handles the outcome
$loader.removeClass("hidden");
restApi.read(that.resourcePath())
.done(function (data) {
that.result(JSON.stringify(data));
console.log("ResApiClient read success", data);
// Highlight response
$('pre.highlight').each(function (i, block) {
hljs.highlightBlock(block);
});
})
.fail(function () {
console.log("RestClient read fail", arguments);
})
.always(function () {
console.log("RestClient read completed");
$loader.addClass("hidden");
});
};
$(document).ready(function () {
$loader = $("#loader");
// Databinds the viewModel. Just some Knockout stuff here, nothing related
// to EverestJs
ko.applyBindings(new ViewModel());
});
}); |
/* -*- Mode: C; c-basic-offset:4 ; indent-tabs-mode:nil -*- */
/*
* Copyright (c) 2004-2007 The Trustees of Indiana University and Indiana
* University Research and Technology
* Corporation. All rights reserved.
* Copyright (c) 2004-2016 The University of Tennessee and The University
* of Tennessee Research Foundation. All rights
* reserved.
* Copyright (c) 2004-2008 High Performance Computing Center Stuttgart,
* University of Stuttgart. All rights reserved.
* Copyright (c) 2004-2005 The Regents of the University of California.
* All rights reserved.
* Copyright (c) 2006 Cisco Systems, Inc. All rights reserved.
* Copyright (c) 2013 Los Alamos National Security, LLC. All rights
* reserved.
* Copyright (c) 2015 Research Organization for Information Science
* and Technology (RIST). All rights reserved.
* $COPYRIGHT$
*
* Additional copyrights may follow
*
* $HEADER$
*/
#include "ompi_config.h"
#include <stdio.h>
#include "ompi/mpi/c/bindings.h"
#include "ompi/runtime/params.h"
#include "ompi/communicator/communicator.h"
#include "ompi/errhandler/errhandler.h"
#include "ompi/mca/pml/pml.h"
#include "ompi/mca/pml/base/pml_base_bsend.h"
#include "ompi/memchecker.h"
#if OMPI_BUILD_MPI_PROFILING
#if OPAL_HAVE_WEAK_SYMBOLS
#pragma weak MPI_Bsend_init = PMPI_Bsend_init
#endif
#define MPI_Bsend_init PMPI_Bsend_init
#endif
static const char FUNC_NAME[] = "MPI_Bsend_init";
int MPI_Bsend_init(const void *buf, int count, MPI_Datatype type,
int dest, int tag, MPI_Comm comm, MPI_Request *request)
{
int rc;
MEMCHECKER(
memchecker_datatype(type);
memchecker_call(&opal_memchecker_base_isaddressable, buf, count, type);
memchecker_comm(comm);
);
if ( MPI_PARAM_CHECK ) {
rc = MPI_SUCCESS;
OMPI_ERR_INIT_FINALIZE(FUNC_NAME);
if (ompi_comm_invalid(comm)) {
return OMPI_ERRHANDLER_INVOKE(MPI_COMM_WORLD, MPI_ERR_COMM, FUNC_NAME);
} else if (count < 0) {
rc = MPI_ERR_COUNT;
} else if (type == MPI_DATATYPE_NULL) {
rc = MPI_ERR_TYPE;
} else if (tag < 0 || tag > mca_pml.pml_max_tag) {
rc = MPI_ERR_TAG;
} else if (ompi_comm_peer_invalid(comm, dest) &&
(MPI_PROC_NULL != dest)) {
rc = MPI_ERR_RANK;
} else if (request == NULL) {
rc = MPI_ERR_REQUEST;
}
OMPI_ERRHANDLER_CHECK(rc, comm, rc, FUNC_NAME);
}
if (MPI_PROC_NULL == dest) {
*request = OBJ_NEW(ompi_request_t);
/* Other fields were initialized by the constructor for
ompi_request_t */
(*request)->req_type = OMPI_REQUEST_NOOP;
(*request)->req_status = ompi_request_empty.req_status;
(*request)->req_complete = REQUEST_COMPLETED;
(*request)->req_state = OMPI_REQUEST_INACTIVE;
(*request)->req_persistent = true;
(*request)->req_free = ompi_request_persistent_proc_null_free;
return MPI_SUCCESS;
}
/*
* Here, we just initialize the request -- memchecker should set the buffer in MPI_Start.
*/
rc = MCA_PML_CALL(isend_init(buf, count, type, dest, tag,
MCA_PML_BASE_SEND_BUFFERED, comm, request));
OMPI_ERRHANDLER_RETURN(rc, comm, rc, FUNC_NAME);
}
|
import {SUBMIT_FORM, CLEAR_FORM} from '../actions/index';
export default(state = false, action) => {
switch (action.type) {
case SUBMIT_FORM:
return true;
break;
case CLEAR_FORM:
return false;
break;
default:
return state;
}
}
|
#!/usr/bin/env python
#
# Copyright (C) 2016 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""binary_cache_builder.py: read perf.data, collect binaries needed by
it, and put them in binary_cache.
"""
from __future__ import print_function
import argparse
import os
import os.path
import re
import shutil
import subprocess
import sys
import time
from simpleperf_report_lib import *
from utils import *
class BinaryCacheBuilder(object):
"""Collect all binaries needed by perf.data in binary_cache."""
def __init__(self, config):
config_names = ['perf_data_path', 'symfs_dirs', 'ndk_path']
for name in config_names:
if name not in config:
log_exit('config for "%s" is missing' % name)
self.perf_data_path = config.get('perf_data_path')
if not os.path.isfile(self.perf_data_path):
log_exit("can't find file %s" % self.perf_data_path)
self.symfs_dirs = config.get('symfs_dirs')
for symfs_dir in self.symfs_dirs:
if not os.path.isdir(symfs_dir):
log_exit("symfs_dir '%s' is not a directory" % symfs_dir)
self.adb = AdbHelper(enable_switch_to_root=not config['disable_adb_root'])
self.readelf = ReadElf(config.get('ndk_path'))
self.binary_cache_dir = 'binary_cache'
if not os.path.isdir(self.binary_cache_dir):
os.makedirs(self.binary_cache_dir)
def build_binary_cache(self):
self._collect_used_binaries()
self._copy_binaries_from_symfs_dirs()
self._pull_binaries_from_device()
self._pull_kernel_symbols()
def _collect_used_binaries(self):
"""read perf.data, collect all used binaries and their build id (if available)."""
# A dict mapping from binary name to build_id
binaries = dict()
lib = ReportLib()
lib.SetRecordFile(self.perf_data_path)
lib.SetLogSeverity('error')
while True:
sample = lib.GetNextSample()
if sample is None:
lib.Close()
break
symbols = [lib.GetSymbolOfCurrentSample()]
callchain = lib.GetCallChainOfCurrentSample()
for i in range(callchain.nr):
symbols.append(callchain.entries[i].symbol)
for symbol in symbols:
dso_name = symbol.dso_name
if dso_name not in binaries:
binaries[dso_name] = lib.GetBuildIdForPath(dso_name)
self.binaries = binaries
def _copy_binaries_from_symfs_dirs(self):
"""collect all files in symfs_dirs."""
if not self.symfs_dirs:
return
# It is possible that the path of the binary in symfs_dirs doesn't match
# the one recorded in perf.data. For example, a file in symfs_dirs might
# be "debug/arm/obj/armeabi-v7a/libsudo-game-jni.so", but the path in
# perf.data is "/data/app/xxxx/lib/arm/libsudo-game-jni.so". So we match
# binaries if they have the same filename (like libsudo-game-jni.so)
# and same build_id.
# Map from filename to binary paths.
filename_dict = dict()
for binary in self.binaries:
index = binary.rfind('/')
filename = binary[index+1:]
paths = filename_dict.get(filename)
if paths is None:
filename_dict[filename] = paths = []
paths.append(binary)
# Walk through all files in symfs_dirs, and copy matching files to build_cache.
for symfs_dir in self.symfs_dirs:
for root, _, files in os.walk(symfs_dir):
for file in files:
paths = filename_dict.get(file)
if paths is not None:
build_id = self._read_build_id(os.path.join(root, file))
if not build_id:
continue
for binary in paths:
expected_build_id = self.binaries.get(binary)
if expected_build_id == build_id:
self._copy_to_binary_cache(os.path.join(root, file),
expected_build_id, binary)
def _copy_to_binary_cache(self, from_path, expected_build_id, target_file):
if target_file[0] == '/':
target_file = target_file[1:]
target_file = target_file.replace('/', os.sep)
target_file = os.path.join(self.binary_cache_dir, target_file)
if (os.path.isfile(target_file) and self._read_build_id(target_file) == expected_build_id
and self._file_has_symbol_table(target_file)):
# The existing file in binary_cache can provide more information, so no
# need to copy.
return
target_dir = os.path.dirname(target_file)
if not os.path.isdir(target_dir):
os.makedirs(target_dir)
log_info('copy to binary_cache: %s to %s' % (from_path, target_file))
shutil.copy(from_path, target_file)
def _pull_binaries_from_device(self):
"""pull binaries needed in perf.data to binary_cache."""
for binary in self.binaries:
build_id = self.binaries[binary]
if binary[0] != '/' or binary == "//anon" or binary.startswith("/dev/"):
# [kernel.kallsyms] or unknown, or something we can't find binary.
continue
binary_cache_file = binary[1:].replace('/', os.sep)
binary_cache_file = os.path.join(self.binary_cache_dir, binary_cache_file)
self._check_and_pull_binary(binary, build_id, binary_cache_file)
def _check_and_pull_binary(self, binary, expected_build_id, binary_cache_file):
"""If the binary_cache_file exists and has the expected_build_id, there
is no need to pull the binary from device. Otherwise, pull it.
"""
need_pull = True
if os.path.isfile(binary_cache_file):
need_pull = False
if expected_build_id:
build_id = self._read_build_id(binary_cache_file)
if expected_build_id != build_id:
need_pull = True
if need_pull:
target_dir = os.path.dirname(binary_cache_file)
if not os.path.isdir(target_dir):
os.makedirs(target_dir)
if os.path.isfile(binary_cache_file):
os.remove(binary_cache_file)
log_info('pull file to binary_cache: %s to %s' % (binary, binary_cache_file))
self._pull_file_from_device(binary, binary_cache_file)
else:
log_info('use current file in binary_cache: %s' % binary_cache_file)
def _read_build_id(self, file):
"""read build id of a binary on host."""
return self.readelf.get_build_id(file)
def _file_has_symbol_table(self, file):
"""Test if an elf file has symbol table section."""
return '.symtab' in self.readelf.get_sections(file)
def _pull_file_from_device(self, device_path, host_path):
if self.adb.run(['pull', device_path, host_path]):
return True
# In non-root device, we can't pull /data/app/XXX/base.odex directly.
# Instead, we can first copy the file to /data/local/tmp, then pull it.
filename = device_path[device_path.rfind('/')+1:]
if (self.adb.run(['shell', 'cp', device_path, '/data/local/tmp']) and
self.adb.run(['pull', '/data/local/tmp/' + filename, host_path])):
self.adb.run(['shell', 'rm', '/data/local/tmp/' + filename])
return True
log_warning('failed to pull %s from device' % device_path)
return False
def _pull_kernel_symbols(self):
file = os.path.join(self.binary_cache_dir, 'kallsyms')
if os.path.isfile(file):
os.remove(file)
if self.adb.switch_to_root():
self.adb.run(['shell', '"echo 0 >/proc/sys/kernel/kptr_restrict"'])
self.adb.run(['pull', '/proc/kallsyms', file])
def main():
parser = argparse.ArgumentParser(description=
"""Pull binaries needed by perf.data from device to binary_cache directory.""")
parser.add_argument('-i', '--perf_data_path', default='perf.data', help=
"""The path of profiling data.""")
parser.add_argument('-lib', '--native_lib_dir', nargs='+', help=
"""Path to find debug version of native shared libraries used in the app.""",
action='append')
parser.add_argument('--disable_adb_root', action='store_true', help=
"""Force adb to run in non root mode.""")
parser.add_argument('--ndk_path', nargs=1, help='Find tools in the ndk path.')
args = parser.parse_args()
config = {}
config['perf_data_path'] = args.perf_data_path
config['symfs_dirs'] = flatten_arg_list(args.native_lib_dir)
config['disable_adb_root'] = args.disable_adb_root
config['ndk_path'] = None if not args.ndk_path else args.ndk_path[0]
builder = BinaryCacheBuilder(config)
builder.build_binary_cache()
if __name__ == '__main__':
main() |
/*
* This header is generated by classdump-dyld 1.5
* on Wednesday, April 28, 2021 at 9:03:15 PM Mountain Standard Time
* Operating System: Version 14.5 (Build 18L204)
* Image Source: /System/Library/Frameworks/Intents.framework/Intents
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos. Updated by Kevin Bradley.
*/
#import <Foundation/NSValueTransformer.h>
@interface INIntentWellnessValueSlotValueTransformer : NSValueTransformer
+(BOOL)allowsReverseTransformation;
+(Class)transformedValueClass;
+(Class)reverseTransformedValueClass;
-(id)transformedValue:(id)arg1 ;
-(id)reverseTransformedValue:(id)arg1 ;
@end
|
import numpy as np
from os import path
import json
import time
import datetime
from epics import PV
from pyemittance.saving_io import numpy_save, save_config
this_dir, this_filename = path.split(__file__)
CONFIG_PATH = path.join(this_dir, "configs")
# Measurement PVs
meas_pv_info = json.load(open(CONFIG_PATH + '/meas_pv_info.json'))
# in meters for emittance calc
scan_pv = PV(meas_pv_info['diagnostic']['pv']['scan'])
x_size_pv = PV(meas_pv_info['diagnostic']['pv']['xsize'])
y_size_pv = PV(meas_pv_info['diagnostic']['pv']['ysize'])
def get_beamsizes_wire(online=False, save_summary=True):
"""Main function imported by beam_io
Returns xrms, yrms, xrms_err, yrms_err
"""
# run wire scans
get_beamsize(online=online)
# read in PVs
xrms = x_size_pv.get()*1e-6
yrms = y_size_pv.get()*1e-6
# add some error estimate
xrms_err = xrms*0.02
yrms_err = yrms*0.02
if save_summary:
timestamp = (datetime.datetime.now()).strftime("%Y-%m-%d_%H-%M-%S-%f")
save_config(xrms, yrms, xrms_err, yrms_err, timestamp)
numpy_save(xrms, yrms, xrms_err, yrms_err, timestamp)
return xrms, yrms, xrms_err, yrms_err
def get_beamsize(online):
if online:
scan_pv.put(1)
time.sleep(1)
status = scan_pv.get()
if status == 2:
while scan_pv.get()!= 0:
time.sleep(5)
time.sleep(3) # to not break the wire scanner
else:
print(f"WS did not run. Status {status}.") |
/*
* Copyright (c) 2017-2019 THL A29 Limited, a Tencent company. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef TENCENTCLOUD_TIA_V20180226_MODEL_CREATEMODELREQUEST_H_
#define TENCENTCLOUD_TIA_V20180226_MODEL_CREATEMODELREQUEST_H_
#include <string>
#include <vector>
#include <map>
#include <tencentcloud/core/AbstractModel.h>
namespace TencentCloud
{
namespace Tia
{
namespace V20180226
{
namespace Model
{
/**
* CreateModel请求参数结构体
*/
class CreateModelRequest : public AbstractModel
{
public:
CreateModelRequest();
~CreateModelRequest() = default;
std::string ToJsonString() const;
/**
* 获取模型名称
* @return Name 模型名称
*/
std::string GetName() const;
/**
* 设置模型名称
* @param Name 模型名称
*/
void SetName(const std::string& _name);
/**
* 判断参数 Name 是否已赋值
* @return Name 是否已赋值
*/
bool NameHasBeenSet() const;
/**
* 获取要部署的模型文件路径名
* @return Model 要部署的模型文件路径名
*/
std::string GetModel() const;
/**
* 设置要部署的模型文件路径名
* @param Model 要部署的模型文件路径名
*/
void SetModel(const std::string& _model);
/**
* 判断参数 Model 是否已赋值
* @return Model 是否已赋值
*/
bool ModelHasBeenSet() const;
/**
* 获取关于模型的描述
* @return Description 关于模型的描述
*/
std::string GetDescription() const;
/**
* 设置关于模型的描述
* @param Description 关于模型的描述
*/
void SetDescription(const std::string& _description);
/**
* 判断参数 Description 是否已赋值
* @return Description 是否已赋值
*/
bool DescriptionHasBeenSet() const;
/**
* 获取部署目标集群的名称,`集群模式` 必填
* @return Cluster 部署目标集群的名称,`集群模式` 必填
*/
std::string GetCluster() const;
/**
* 设置部署目标集群的名称,`集群模式` 必填
* @param Cluster 部署目标集群的名称,`集群模式` 必填
*/
void SetCluster(const std::string& _cluster);
/**
* 判断参数 Cluster 是否已赋值
* @return Cluster 是否已赋值
*/
bool ClusterHasBeenSet() const;
/**
* 获取运行环境镜像的标签,详见 [Serving 环境](https://cloud.tencent.com/document/product/851/17320#serving-.E7.8E.AF.E5.A2.83)
* @return RuntimeVersion 运行环境镜像的标签,详见 [Serving 环境](https://cloud.tencent.com/document/product/851/17320#serving-.E7.8E.AF.E5.A2.83)
*/
std::string GetRuntimeVersion() const;
/**
* 设置运行环境镜像的标签,详见 [Serving 环境](https://cloud.tencent.com/document/product/851/17320#serving-.E7.8E.AF.E5.A2.83)
* @param RuntimeVersion 运行环境镜像的标签,详见 [Serving 环境](https://cloud.tencent.com/document/product/851/17320#serving-.E7.8E.AF.E5.A2.83)
*/
void SetRuntimeVersion(const std::string& _runtimeVersion);
/**
* 判断参数 RuntimeVersion 是否已赋值
* @return RuntimeVersion 是否已赋值
*/
bool RuntimeVersionHasBeenSet() const;
/**
* 获取要部署的模型副本数目,`集群模式` 选填
* @return Replicas 要部署的模型副本数目,`集群模式` 选填
*/
uint64_t GetReplicas() const;
/**
* 设置要部署的模型副本数目,`集群模式` 选填
* @param Replicas 要部署的模型副本数目,`集群模式` 选填
*/
void SetReplicas(const uint64_t& _replicas);
/**
* 判断参数 Replicas 是否已赋值
* @return Replicas 是否已赋值
*/
bool ReplicasHasBeenSet() const;
/**
* 获取暴露外网或内网,默认暴露外网,`集群模式` 选填
* @return Expose 暴露外网或内网,默认暴露外网,`集群模式` 选填
*/
std::string GetExpose() const;
/**
* 设置暴露外网或内网,默认暴露外网,`集群模式` 选填
* @param Expose 暴露外网或内网,默认暴露外网,`集群模式` 选填
*/
void SetExpose(const std::string& _expose);
/**
* 判断参数 Expose 是否已赋值
* @return Expose 是否已赋值
*/
bool ExposeHasBeenSet() const;
/**
* 获取部署模式,取值 `serverless` 即为 `无服务器模式`,否则为 `集群模式` 下服务的运行规模,形如 `2U4G1P`,详见 [自定义的训练规模](https://cloud.tencent.com/document/product/851/17319#.E8.87.AA.E5.AE.9A.E4.B9.89.E7.9A.84.E8.AE.AD.E7.BB.83.E8.A7.84.E6.A8.A1)
* @return ServType 部署模式,取值 `serverless` 即为 `无服务器模式`,否则为 `集群模式` 下服务的运行规模,形如 `2U4G1P`,详见 [自定义的训练规模](https://cloud.tencent.com/document/product/851/17319#.E8.87.AA.E5.AE.9A.E4.B9.89.E7.9A.84.E8.AE.AD.E7.BB.83.E8.A7.84.E6.A8.A1)
*/
std::string GetServType() const;
/**
* 设置部署模式,取值 `serverless` 即为 `无服务器模式`,否则为 `集群模式` 下服务的运行规模,形如 `2U4G1P`,详见 [自定义的训练规模](https://cloud.tencent.com/document/product/851/17319#.E8.87.AA.E5.AE.9A.E4.B9.89.E7.9A.84.E8.AE.AD.E7.BB.83.E8.A7.84.E6.A8.A1)
* @param ServType 部署模式,取值 `serverless` 即为 `无服务器模式`,否则为 `集群模式` 下服务的运行规模,形如 `2U4G1P`,详见 [自定义的训练规模](https://cloud.tencent.com/document/product/851/17319#.E8.87.AA.E5.AE.9A.E4.B9.89.E7.9A.84.E8.AE.AD.E7.BB.83.E8.A7.84.E6.A8.A1)
*/
void SetServType(const std::string& _servType);
/**
* 判断参数 ServType 是否已赋值
* @return ServType 是否已赋值
*/
bool ServTypeHasBeenSet() const;
/**
* 获取`无服务器模式` 可选的其他配置信息,详见 [利用无服务器函数部署](https://cloud.tencent.com/document/product/851/17049#.E5.88.A9.E7.94.A8.E6.97.A0.E6.9C.8D.E5.8A.A1.E5.99.A8.E5.87.BD.E6.95.B0.E9.83.A8.E7.BD.B2)
* @return RuntimeConf `无服务器模式` 可选的其他配置信息,详见 [利用无服务器函数部署](https://cloud.tencent.com/document/product/851/17049#.E5.88.A9.E7.94.A8.E6.97.A0.E6.9C.8D.E5.8A.A1.E5.99.A8.E5.87.BD.E6.95.B0.E9.83.A8.E7.BD.B2)
*/
std::vector<std::string> GetRuntimeConf() const;
/**
* 设置`无服务器模式` 可选的其他配置信息,详见 [利用无服务器函数部署](https://cloud.tencent.com/document/product/851/17049#.E5.88.A9.E7.94.A8.E6.97.A0.E6.9C.8D.E5.8A.A1.E5.99.A8.E5.87.BD.E6.95.B0.E9.83.A8.E7.BD.B2)
* @param RuntimeConf `无服务器模式` 可选的其他配置信息,详见 [利用无服务器函数部署](https://cloud.tencent.com/document/product/851/17049#.E5.88.A9.E7.94.A8.E6.97.A0.E6.9C.8D.E5.8A.A1.E5.99.A8.E5.87.BD.E6.95.B0.E9.83.A8.E7.BD.B2)
*/
void SetRuntimeConf(const std::vector<std::string>& _runtimeConf);
/**
* 判断参数 RuntimeConf 是否已赋值
* @return RuntimeConf 是否已赋值
*/
bool RuntimeConfHasBeenSet() const;
private:
/**
* 模型名称
*/
std::string m_name;
bool m_nameHasBeenSet;
/**
* 要部署的模型文件路径名
*/
std::string m_model;
bool m_modelHasBeenSet;
/**
* 关于模型的描述
*/
std::string m_description;
bool m_descriptionHasBeenSet;
/**
* 部署目标集群的名称,`集群模式` 必填
*/
std::string m_cluster;
bool m_clusterHasBeenSet;
/**
* 运行环境镜像的标签,详见 [Serving 环境](https://cloud.tencent.com/document/product/851/17320#serving-.E7.8E.AF.E5.A2.83)
*/
std::string m_runtimeVersion;
bool m_runtimeVersionHasBeenSet;
/**
* 要部署的模型副本数目,`集群模式` 选填
*/
uint64_t m_replicas;
bool m_replicasHasBeenSet;
/**
* 暴露外网或内网,默认暴露外网,`集群模式` 选填
*/
std::string m_expose;
bool m_exposeHasBeenSet;
/**
* 部署模式,取值 `serverless` 即为 `无服务器模式`,否则为 `集群模式` 下服务的运行规模,形如 `2U4G1P`,详见 [自定义的训练规模](https://cloud.tencent.com/document/product/851/17319#.E8.87.AA.E5.AE.9A.E4.B9.89.E7.9A.84.E8.AE.AD.E7.BB.83.E8.A7.84.E6.A8.A1)
*/
std::string m_servType;
bool m_servTypeHasBeenSet;
/**
* `无服务器模式` 可选的其他配置信息,详见 [利用无服务器函数部署](https://cloud.tencent.com/document/product/851/17049#.E5.88.A9.E7.94.A8.E6.97.A0.E6.9C.8D.E5.8A.A1.E5.99.A8.E5.87.BD.E6.95.B0.E9.83.A8.E7.BD.B2)
*/
std::vector<std::string> m_runtimeConf;
bool m_runtimeConfHasBeenSet;
};
}
}
}
}
#endif // !TENCENTCLOUD_TIA_V20180226_MODEL_CREATEMODELREQUEST_H_
|
"""
Copyright (c) 2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from copy import deepcopy
from typing import Set, List
import numpy as np
import tensorflow as tf
from nncf import NNCFConfig
from nncf.common.graph.transformations.commands import TransformationPriority
from nncf.common.sparsity.schedulers import SPARSITY_SCHEDULERS
from nncf.common.sparsity.schedulers import SparsityScheduler
from nncf.common.sparsity.statistics import RBSparsityStatistics
from nncf.common.statistics import NNCFStatistics
from nncf.config.extractors import extract_algo_specific_config
from nncf.tensorflow.algorithm_selector import TF_COMPRESSION_ALGORITHMS
from nncf.tensorflow.api.compression import TFCompressionAlgorithmBuilder
from nncf.tensorflow.graph.converter import TFModelConverterFactory
from nncf.tensorflow.graph.transformations.commands import TFInsertionCommand
from nncf.tensorflow.graph.transformations.commands import TFLayerWeight
from nncf.tensorflow.graph.transformations.layout import TFTransformationLayout
from nncf.tensorflow.graph.utils import get_original_name_and_instance_idx
from nncf.tensorflow.graph.utils import get_nncf_operations
from nncf.tensorflow.sparsity.base_algorithm import BaseSparsityController
from nncf.tensorflow.sparsity.base_algorithm import SPARSITY_LAYER_METATYPES
from nncf.tensorflow.sparsity.rb.loss import SparseLoss
from nncf.tensorflow.sparsity.rb.operation import RBSparsifyingWeight
from nncf.tensorflow.sparsity.collector import TFSparseModelStatisticsCollector
from nncf.common.utils.helpers import should_consider_scope
from nncf.common.accuracy_aware_training.training_loop import ADAPTIVE_COMPRESSION_CONTROLLERS
from nncf.common.schedulers import StubCompressionScheduler
@TF_COMPRESSION_ALGORITHMS.register('rb_sparsity')
class RBSparsityBuilder(TFCompressionAlgorithmBuilder):
def __init__(self, config: NNCFConfig, should_init: bool = True):
super().__init__(config, should_init)
self.ignored_scopes = self._algo_config.get('ignored_scopes', [])
self._op_names = []
def get_transformation_layout(self, model: tf.keras.Model) -> TFTransformationLayout:
converter = TFModelConverterFactory.create(model)
nncf_graph = converter.convert()
transformations = TFTransformationLayout()
processed_shared_layer_names = set() # type: Set[str]
for node in nncf_graph.get_all_nodes():
if node.is_shared():
target_layer_name, _ = get_original_name_and_instance_idx(node.node_name)
if target_layer_name in processed_shared_layer_names:
continue
processed_shared_layer_names.add(target_layer_name)
if not (node.metatype in SPARSITY_LAYER_METATYPES and
should_consider_scope(node.node_name, ignored_scopes=self.ignored_scopes)):
continue
_, layer_info = converter.get_layer_info_for_node(node.node_name)
for weight_def in node.metatype.weight_definitions:
op_name = self._get_rb_sparsity_operation_name(node.node_name,
weight_def.weight_attr_name)
self._op_names.append(op_name)
transformations.register(
TFInsertionCommand(
target_point=TFLayerWeight(layer_info.layer_name, weight_def.weight_attr_name),
callable_object=RBSparsifyingWeight(op_name),
priority=TransformationPriority.SPARSIFICATION_PRIORITY
))
return transformations
def _get_rb_sparsity_operation_name(self, layer_name: str, weight_attr_name: str) -> str:
return f'{layer_name}_{weight_attr_name}_rb_sparsity_weight'
def _build_controller(self, model: tf.keras.Model) -> 'RBSparsityController':
"""
Simple implementation of building controller without setting builder state and loading controller's one.
Should be called once the compressed model target_model is fully constructed.
:param model: The model with additional modifications necessary to enable
algorithm-specific compression during fine-tuning.
:return: The instance of the `RBSparsityController`.
"""
return RBSparsityController(model, self.config, self._op_names)
def initialize(self, model: tf.keras.Model) -> None:
pass
@ADAPTIVE_COMPRESSION_CONTROLLERS.register('tf_rb_sparsity')
class RBSparsityController(BaseSparsityController):
def __init__(self, target_model, config: NNCFConfig, op_names: List[str]):
super().__init__(target_model, op_names)
algo_config = extract_algo_specific_config(config, "rb_sparsity")
sparsity_init = algo_config.get('sparsity_init', 0)
params = deepcopy(algo_config.get('params', {}))
params['sparsity_init'] = sparsity_init
sparsity_level_mode = params.get('sparsity_level_setting_mode', 'global')
if sparsity_level_mode == 'local':
raise NotImplementedError('RB sparsity algorithm do not support local sparsity loss')
target_ops = []
for wrapped_layer, _, op in get_nncf_operations(self.model, self._op_names):
target_ops.append(
(op, wrapped_layer.get_operation_weights(op.name))
)
self._loss = SparseLoss(target_ops)
schedule_type = params.get('schedule', 'exponential')
if schedule_type == 'adaptive':
raise NotImplementedError('RB sparsity algorithm do not support adaptive scheduler')
scheduler_cls = SPARSITY_SCHEDULERS.get(schedule_type)
self._scheduler = scheduler_cls(self, params)
self.set_sparsity_level(sparsity_init)
@property
def scheduler(self) -> SparsityScheduler:
return self._scheduler
@property
def loss(self) -> SparseLoss:
return self._loss
def set_sparsity_level(self, sparsity_level):
self._loss.set_target_sparsity_loss(sparsity_level)
def freeze(self):
self._loss.disable()
def statistics(self, quickly_collected_only: bool = False) -> NNCFStatistics:
collector = TFSparseModelStatisticsCollector(self.model, self._op_names)
model_stats = collector.collect()
sparse_prob_sum = 0.0
num_weights = 0
for wrapped_layer, _, op in get_nncf_operations(self.model, self._op_names):
operation_weights = wrapped_layer.get_operation_weights(op.name)
mask = op.get_mask(operation_weights)
sparse_prob_sum += tf.math.reduce_sum(tf.math.sigmoid(mask)).numpy().item()
num_weights += np.prod(mask.shape.as_list()).item()
mean_sparse_prob = 1.0 - (sparse_prob_sum / num_weights)
target_sparsity_level = self.scheduler.current_sparsity_level
stats = RBSparsityStatistics(model_stats, target_sparsity_level, mean_sparse_prob)
nncf_stats = NNCFStatistics()
nncf_stats.register('rb_sparsity', stats)
return nncf_stats
@property
def compression_rate(self) -> float:
return self._loss.target_sparsity_rate
@compression_rate.setter
def compression_rate(self, compression_rate: float) -> None:
self.set_sparsity_level(compression_rate)
def disable_scheduler(self):
self._scheduler = StubCompressionScheduler()
|
"""
This part reuses code from https://github.com/MandyMo/pytorch_HMR/blob/master/src/util.py
which is part of a PyTorch port of SMPL.
Thanks to Zhang Xiong (MandyMo) for making this great code available on github !
"""
import argparse
from torch.autograd import gradcheck
import torch
from torch.autograd import Variable
from manopth import argutils
def quat2mat(quat):
"""Convert quaternion coefficients to rotation matrix.
Args:
quat: size = [batch_size, 4] 4 <===>(w, x, y, z)
Returns:
Rotation matrix corresponding to the quaternion -- size = [batch_size, 3, 3]
"""
norm_quat = quat
norm_quat = norm_quat / norm_quat.norm(p=2, dim=1, keepdim=True)
w, x, y, z = norm_quat[:, 0], norm_quat[:, 1], norm_quat[:,
2], norm_quat[:,
3]
batch_size = quat.size(0)
w2, x2, y2, z2 = w.pow(2), x.pow(2), y.pow(2), z.pow(2)
wx, wy, wz = w * x, w * y, w * z
xy, xz, yz = x * y, x * z, y * z
rotMat = torch.stack([
w2 + x2 - y2 - z2, 2 * xy - 2 * wz, 2 * wy + 2 * xz, 2 * wz + 2 * xy,
w2 - x2 + y2 - z2, 2 * yz - 2 * wx, 2 * xz - 2 * wy, 2 * wx + 2 * yz,
w2 - x2 - y2 + z2
],
dim=1).view(batch_size, 3, 3)
return rotMat
def batch_rodrigues(axisang):
#axisang N x 3
axisang_norm = torch.norm(axisang + 1e-8, p=2, dim=1)
angle = torch.unsqueeze(axisang_norm, -1)
axisang_normalized = torch.div(axisang, angle)
angle = angle * 0.5
v_cos = torch.cos(angle)
v_sin = torch.sin(angle)
quat = torch.cat([v_cos, v_sin * axisang_normalized], dim=1)
rot_mat = quat2mat(quat)
rot_mat = rot_mat.view(rot_mat.shape[0], 9)
return rot_mat
def th_get_axis_angle(vector):
angle = torch.norm(vector, 2, 1)
axes = vector / angle.unsqueeze(1)
return axes, angle
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', default=1, type=int)
parser.add_argument('--cuda', action='store_true')
args = parser.parse_args()
argutils.print_args(args)
n_components = 6
rot = 3
inputs = torch.rand(args.batch_size, rot)
inputs_var = Variable(inputs.double(), requires_grad=True)
if args.cuda:
inputs = inputs.cuda()
# outputs = batch_rodrigues(inputs)
test_function = gradcheck(batch_rodrigues, (inputs_var, ))
print('batch test passed !')
inputs = torch.rand(rot)
inputs_var = Variable(inputs.double(), requires_grad=True)
test_function = gradcheck(th_cv2_rod_sub_id.apply, (inputs_var, ))
print('th_cv2_rod test passed')
inputs = torch.rand(rot)
inputs_var = Variable(inputs.double(), requires_grad=True)
test_th = gradcheck(th_cv2_rod.apply, (inputs_var, ))
print('th_cv2_rod_id test passed !')
|
const test = require("ava");
const StudentAPI = require("../");
const { isObject } = require("lodash");
test("basic", (t) => {
const config = {
baseURL: "http://dkh.tlu.edu.vn",
};
const api = new StudentAPI();
api.config(config);
t.is(api.isAuthenticated, false);
t.is(api.user, null);
});
test("authenicated", async (t) => {
const config = {
baseURL: "http://dkh.tlu.edu.vn",
};
const api = new StudentAPI();
api.config(config);
const user = { idUser: "", passwordUser: "" };
await api.login(user);
t.is(api.isAuthenticated, true);
t.is(api.user, user);
});
test("getStudentTimeTable", async (t) => {
const config = {
baseURL: "http://dkh.tlu.edu.vn",
};
const api = new StudentAPI();
api.config(config);
const user = { idUser: "", passwordUser: "" };
await api.login(user);
const lich = await api.studentTimeTable("2_2019_2020");
t.is(api.isAuthenticated, true);
t.is(api.user, user);
t.is(!!lich.length, true);
});
test("getMarks", async (t) => {
const config = {
baseURL: "http://dkh.tlu.edu.vn",
};
const api = new StudentAPI();
api.config(config);
const user = { idUser: "", passwordUser: "" };
await api.login(user);
const lich = await api.getMarks();
t.is(api.isAuthenticated, true);
t.is(api.user, user);
t.is(isObject(lich), true);
});
|
"use strict";
var editor_1 = require("./editor");
var core_coderoad_1 = require("core-coderoad");
module.exports = core_coderoad_1.default(editor_1.default);
//# sourceMappingURL=index.js.map |
var m = require('mithril');
module.exports = m.trust('<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" baseProfile="full" width="24" height="24" viewBox="0 0 24.00 24.00" enable-background="new 0 0 24.00 24.00" xml:space="preserve"><path fill="#000000" fill-opacity="1" stroke-width="0.2" stroke-linejoin="round" d="M 9,22C 8.44771,22 8,21.5523 8,21L 7.99999,18L 4,18C 2.89543,18 2,17.1046 2,16L 2.0094,3.99808C 2.0094,2.89308 2.89538,1.99809 3.99939,1.99809L 19.9994,1.99809C 21.1034,1.99809 21.9994,2.89308 21.9994,3.99808L 21.9994,15.9981C 21.9994,17.1021 21.1034,17.9981 19.9994,17.9981L 13.904,17.9997L 10.1979,21.7058C 10.0053,21.8984 9.75367,21.996 9.50123,21.9986L 9.49999,22L 9,22 Z M 13,10L 13,6L 11,6L 11,10L 13,10 Z M 13,14L 13,12L 11,12L 11,14L 13,14 Z "/></svg>');
|
# -*- coding: utf-8 -*-
"""
Source: https://github.com/awesto/django-shop/blob/7378f024b0982ba5bf48fd07ffb18b247cba02a5/shop/models/order.py
"""
from __future__ import unicode_literals
from six import with_metaclass
from decimal import Decimal
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.db import models, transaction
from django.db.models.aggregates import Sum
try:
from django.urls import NoReverseMatch
except ImportError:
from django.core.urlresolvers import NoReverseMatch
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _, pgettext_lazy, get_language_from_request
from django.utils.six.moves.urllib.parse import urljoin
from django.utils.encoding import force_text
from django.utils.module_loading import import_string
from rest_framework.exceptions import PermissionDenied
from django_fsm import FSMField, transition
from ipware.ip import get_ip
from edw import deferred
from edw.models.entity import EntityModel, BaseEntityManager
from edw.models.mixins.entity.fsm import FSMMixin
from edw.models.data_mart import DataMartModel
from edw.models.term import TermModel
from edw_shop.conf import app_settings
from edw_shop.models.cart import CartItemModel
from edw_shop.models.fields import JSONField
from edw_shop.money.fields import MoneyField, MoneyMaker
from .product import BaseProduct, ProductModel
from sid.rest.serializers.relation import ObjRelationSerializer
_shared_system_flags_term_restriction = (
TermModel.system_flags.delete_restriction
| TermModel.system_flags.change_parent_restriction
| TermModel.system_flags.change_slug_restriction
| TermModel.system_flags.change_semantic_rule_restriction
| TermModel.system_flags.has_child_restriction
)
_shared_system_flags_datamart_restriction = (
DataMartModel.system_flags.delete_restriction
| DataMartModel.system_flags.change_parent_restriction
| DataMartModel.system_flags.change_slug_restriction
| DataMartModel.system_flags.change_terms_restriction
)
class OrderQuerySet(models.QuerySet):
def _filter_or_exclude(self, negate, *args, **kwargs):
"""
Emulate filter queries on the Order model using a pseudo slug attribute.
This allows to use order numbers as slugs, formatted by method `Order.get_number()`.
"""
lookup_kwargs = {}
for key, lookup in kwargs.items():
try:
index = key.index('__')
field_name, lookup_type = key[:index], key[index:]
except ValueError:
field_name, lookup_type = key, ''
if field_name == 'slug':
key, lookup = self.model.resolve_number(lookup).popitem()
lookup_kwargs.update({key + lookup_type: lookup})
else:
lookup_kwargs.update({key: lookup})
return super(OrderQuerySet, self)._filter_or_exclude(negate, *args, **lookup_kwargs)
class OrderManager(BaseEntityManager):
#_queryset_class = OrderQuerySet
def create_from_cart(self, cart, request):
"""
This creates a new empty Order object with a valid order number. This order is not
populated with any cart items yet. This must be performed in the next step by calling
``order.populate_from_cart(cart, request)``.
If this method is not invoked, the order object remains in state ``new``.
"""
cart.update(request)
cart.customer.get_or_assign_number()
order = self.model(customer=cart.customer, _subtotal=Decimal(0), _total=Decimal(0), stored_request=self.stored_request(request))
order.get_or_assign_number()
order.save()
return order
def stored_request(self, request):
"""
Extract useful information about the request to be used for emulating a Django request
during offline rendering.
"""
return {
'language': get_language_from_request(request),
'absolute_base_uri': request.build_absolute_uri('/'),
'remote_ip': get_ip(request),
'user_agent': request.META.get('HTTP_USER_AGENT'),
}
#todo: delete
def filter_from_request(self, request):
"""
Return a queryset containing the orders for the customer associated with the given
request object.
"""
if request.customer.is_visitor():
detail = _("Only signed in customers can view their orders")
raise PermissionDenied(detail=detail)
return self.get_queryset().filter(customer=request.customer).order_by('-updated_at', )
def get_summary_url(self):
# """
# Returns the URL of the page with the list view for all orders related to the current customer
# """
# if hasattr(self, '_summary_url'):
# return self._summary_url
# try: # via CMS pages
# page = Page.objects.public().get(reverse_id='shop-order')
# except Page.DoesNotExist:
# page = Page.objects.public().filter(application_urls='OrderApp').first()
# if page:
# self._summary_url = page.get_absolute_url()
# else:
# try: # through hardcoded urlpatterns
# self._summary_url = reverse('shop-order')
# except NoReverseMatch:
# self._summary_url = 'cms-page_or_view_with_reverse_id=shop-order_does_not_exist/'
# return self._summary_url
return "/"
def get_latest_url(self):
"""
Returns the URL of the page with the detail view for the latest order related to the
current customer. This normally is the thank-you view.
"""
return "/"
@python_2_unicode_compatible
class BaseOrder(FSMMixin, EntityModel.materialized):
"""
An Order is the "in process" counterpart of the shopping cart, which freezes the state of the
cart on the moment of purchase. It also holds stuff like the shipping and billing addresses,
and keeps all the additional entities, as determined by the cart modifiers.
"""
DATA_MART_NAME_PATTERN = '{}-dm'
TRANSITION_TARGETS = {
'new': _("New order"),
'processed': _("Processed by manager"),
'in_work': _("In work"), # Проведен
'shipped': _("Shipped"), # Отгружен
'completed': _("Completed"),
'canceled': _("Canceled"),
}
VIEW_COMPONENT_LIST = 'order_list'
VIEW_COMPONENTS = (
(VIEW_COMPONENT_LIST, _('List')),
)
decimalfield_kwargs = {
'max_digits': 30,
'decimal_places': 2,
}
decimal_exp = Decimal('.' + '0' * decimalfield_kwargs['decimal_places'])
customer = deferred.ForeignKey(
'BaseCustomer',
verbose_name=_("Customer"),
related_name='orders',
)
status = FSMField(
default='new',
protected=True,
verbose_name=_("Status"),
)
currency = models.CharField(
max_length=7,
editable=False,
help_text=_("Currency in which this order was concluded"),
)
_subtotal = models.DecimalField(
_("Subtotal"),
**decimalfield_kwargs
)
_total = models.DecimalField(
_("Total"),
**decimalfield_kwargs
)
extra = JSONField(
verbose_name=_("Extra fields"),
help_text=_("Arbitrary information for this order object on the moment of purchase."),
blank=True,
null=True
)
stored_request = JSONField(
verbose_name=_("Stored request"),
help_text=_("Parts of the Request objects on the moment of purchase."),
)
objects = OrderManager()
class Meta:
abstract = True
verbose_name = pgettext_lazy('order_models', "Order")
verbose_name_plural = pgettext_lazy('order_models', "Orders")
def __str__(self):
return self.get_number()
def __repr__(self):
return "<{}(pk={})>".format(self.__class__.__name__, self.pk)
class RESTMeta:
lookup_fields = ('id',)
#validators = []
exclude = ['_subtotal', '_total', 'stored_request', 'images', 'files', 'status'] # todo: 'status readonly
include = {
'transition': ('rest_framework.serializers.CharField', {
'write_only': True,
'required': False
}),
'state': ('rest_framework.serializers.CharField', {
'source': 'get_state',
'read_only': True
}),
'subtotal': ('rest_framework.serializers.DecimalField', {
'max_digits': 10,
'decimal_places': 2,
'required': False
#'read_only': True
}),
'total': ('rest_framework.serializers.DecimalField', {
'max_digits': 10,
'decimal_places': 2,
'required': False
# 'read_only': True
}),
'amount_paid': ('rest_framework.serializers.DecimalField', {
'max_digits': 10,
'decimal_places': 2,
'read_only': True
}),
'outstanding_amount': ('rest_framework.serializers.DecimalField', {
'max_digits': 10,
'decimal_places': 2,
'read_only': True
}), #app_settings.ORDER_ITEM_SERIALIZER
'items': ('edw_shop.serializers.defaults.OrderItemSerializer', {
'read_only': True,
'many': True
}),
'cancelable': ('rest_framework.serializers.BooleanField', {
'read_only': True,
}),
'rels': ('rest_framework.serializers.ListField', {
'child': ObjRelationSerializer(),
'required': False,
'write_only': True
}),
#todo: delete
#'cancel': ('rest_framework.serializers.BooleanField', {
# 'write_only': True,
# 'default':False,
#})
}
filters = {
'order_status': ("edw_shop.rest.filters.order.stateFilter", {
'name': 'status'
}),
}
"""
'shipped': _("Shipped"), # Отгружен
'completed': _("Completed"),
'canceled': _("Canceled"),
"""
def do_transition(self, transition_name):
trans_func = getattr(self, transition_name)
return trans_func()
@transition(
field=status, source='new', target='processed',
custom=dict(admin=True, button_name=_("New to processed"))
)
def new_to_processed(self):
pass
@transition(
field=status, source='new', target='in_work',
custom=dict(admin=True, button_name=_("New to in work"))
)
def new_to_in_work(self):
pass
@transition(
field=status, source='new', target='shipped',
custom=dict(admin=True, button_name=_("New to shipped"))
)
def new_to_shipped(self):
pass
@transition(
field=status, source='new', target='completed',
custom=dict(admin=True, button_name=_("New to completed"))
)
def new_to_completed(self):
pass
@transition(
field=status, source='new', target='canceled',
custom=dict(admin=True, button_name=_("New to canceled"))
)
def new_to_canceled(self):
pass
@transition(
field=status, source='processed', target='canceled',
custom=dict(admin=True, button_name=_("Processed to canceled"))
)
def processed_to_canceled(self):
pass
@transition(
field=status, source='processed', target='in_work',
custom=dict(admin=True, button_name=_("Processed to in_work"))
)
def processed_to_in_work(self):
pass
@transition(
field=status, source='processed', target='shipped',
custom=dict(admin=True, button_name=_("Processed to in_work"))
)
def processed_to_shipped(self):
pass
@transition(
field=status, source='processed', target='completed',
custom=dict(admin=True, button_name=_("Processed to completed"))
)
def processed_to_completed(self):
pass
@transition(
field=status, source='in_work', target='shipped',
custom=dict(admin=True, button_name=_("In work to shipped"))
)
def in_work_to_shipped(self):
pass
@transition(
field=status, source='in_work', target='completed',
custom=dict(admin=True, button_name=_("In work to completed"))
)
def in_work_to_completed(self):
pass
@transition(
field=status, source='in_work', target='canceled',
custom=dict(admin=True, button_name=_("In work to canceled"))
)
def in_work_to_canceled(self):
pass
@transition(
field=status, source='shipped', target='completed',
custom=dict(admin=True, button_name=_("Shipped to completed"))
)
def shipped_to_completed(self):
pass
@transition(
field=status, source='shipped', target='canceled',
custom=dict(admin=True, button_name=_("Shipped to canceled"))
)
def shipped_to_canceled(self):
pass
@transition(
field=status, source='canceled', target='new',
custom=dict(admin=True, button_name=_("Canceled to new"))
)
def canceled_to_new(self):
pass
def get_summary_extra(self, context):
extra = {
'url': self.get_detail_url(),
'number': self.get_number(),
'status': self.status_name(),
'cancelable': self.cancelable(),
'subtotal': self.subtotal,
'total': self.total,
'created_at': self.created_at,
'updated_at': self.updated_at
}
return extra
def get_state(self):
return self.status
@classmethod
def validate_data_mart_model(cls):
'''
Создаем структуру витрин данных соответствующих тематической модели объектов. Модели потомки также используют
этот метод для создания иерархии витрин данных
:return:
'''
class_name = 'order'
with transaction.atomic():
root_cls_dm, is_created = DataMartModel.objects.get_or_create(
slug=cls.DATA_MART_NAME_PATTERN.format(class_name),
parent=None,
defaults={
'name': force_text(cls._meta.verbose_name_plural)
}
)
cls_dm = root_cls_dm
if is_created:
try:
dm_term = cls.get_entities_types()[class_name]
except KeyError:
dm_term = cls.get_entities_types(from_cache=False)[class_name]
cls_dm.terms.add(dm_term.id)
cls_dm.system_flags = _shared_system_flags_datamart_restriction
cls_dm.save()
def get_or_assign_number(self):
"""
Hook to get or to assign the order number. It shall be invoked, every time an Order
object is created. If you prefer to use an order number which differs from the primary
key, then override this method.
"""
return self.get_number()
def get_number(self):
"""
Hook to get the order number.
A class inheriting from Order may transform this into a string which is better readable.
"""
return str(self.pk)
@classmethod
def resolve_number(cls, number):
"""
Return a lookup pair used to filter down a queryset.
It should revert the effect from the above method `get_number`.
"""
return dict(pk=number)
@property
def subtotal(self):
"""
The summed up amount for all ordered items excluding extra order lines.
"""
# MoneyMaker(self.currency)(self._subtotal)
return self._subtotal
@property
def total(self):
"""
The final total to charge for this order.
"""
# MoneyMaker(self.currency)(self._total)
return self._total
@classmethod
def round_amount(cls, amount):
if amount and amount.is_finite():
return Decimal(amount).quantize(cls.decimal_exp)
return Decimal('0').quantize(cls.decimal_exp)
def get_detail_url(self, data_mart=None):
return reverse('order_detail', args=[self.pk])
#def get_absolute_url(self, request=None, format=None):
# """
# Returns the URL for the detail view of this order.
# """
# return urljoin(OrderModel.objects.get_summary_url(), self.get_number())
def populate_dialog_forms(self, cart, request):
dialog_forms = set([import_string(fc) for fc in app_settings.DIALOG_FORMS])
if dialog_forms:
for form_class in dialog_forms:
form_class.populate_from_cart(request, cart, self)
@transaction.atomic
def populate_from_cart(self, cart, request):
"""
Populate the order object with the fields from the given cart.
For each cart item a corresponding order item is created populating its fields and removing
that cart item.
Override this method, in case a customized cart has some fields which have to be transfered
to the cart.
"""
for cart_item in cart.items.active():
cart_item.update(request)
order_item = OrderItemModel(order=self)
try:
order_item.populate_from_cart_item(cart_item, request)
order_item.save()
cart_item.delete()
except CartItemModel.DoesNotExist:
pass
self._subtotal = Decimal(cart.subtotal)
self._total = Decimal(cart.total)
self.extra = dict(cart.extra)
self.extra.update(rows=[(modifier, extra_row.data) for modifier, extra_row in cart.extra_rows.items()])
self.save()
self.populate_dialog_forms(cart, request)
@transaction.atomic
def readd_to_cart(self, cart):
"""
Re-add the items of this order back to the cart.
"""
for order_item in self.items.all():
extra = dict(order_item.extra)
extra.pop('rows', None)
extra.update(product_code=order_item.product_code)
cart_item = order_item.product.is_in_cart(cart, **extra)
if cart_item:
cart_item.quantity = max(cart_item.quantity, order_item.quantity)
else:
cart_item = CartItemModel(cart=cart, product=order_item.product,
product_code=order_item.product_code,
quantity=order_item.quantity, extra=extra)
cart_item.save()
def save(self, **kwargs):
"""
The status of an Order object may change, if auto transistions are specified.
"""
# round the total to the given decimal_places
self._subtotal = BaseOrder.round_amount(self._subtotal)
self._total = BaseOrder.round_amount(self._total)
super(BaseOrder, self).save(**kwargs)
@cached_property
def amount_paid(self):
"""
The amount paid is the sum of related orderpayments
"""
amount = self.orderpayment_set.aggregate(amount=Sum('amount'))['amount']
if amount is None:
amount = Decimal(0.0)#MoneyMaker(self.currency)()
return amount
@property
def outstanding_amount(self):
"""
Return the outstanding amount paid for this order
"""
return self.total - self.amount_paid
def is_fully_paid(self):
return self.amount_paid >= self.total
@transition(field='status', source='*', target='payment_confirmed', conditions=[is_fully_paid])
def acknowledge_payment(self, by=None):
"""
Change status to `payment_confirmed`. This status code is known globally and can be used
by all external plugins to check, if an Order object has been fully paid.
"""
def cancelable(self):
"""
Returns True if the current Order is cancelable.
This method is just a hook and must be overridden by a mixin class
managing Order cancellations.
"""
return False
def refund_payment(self):
"""
Hook to handle payment refunds.
"""
@classmethod
def get_all_transitions(cls):
"""
Returns a generator over all transition objects for this Order model.
"""
return cls.status.field.get_all_transitions(OrderModel)
@classmethod
def get_transition_name(cls, target):
"""Return the human readable name for a given transition target"""
return cls.TRANSITION_TARGETS.get(target, target)
def status_name(self):
"""Return the human readable name for the current transition state"""
return self.TRANSITION_TARGETS.get(self.status, self.status)
status_name.short_description = pgettext_lazy('order_models', "State")
OrderModel = deferred.MaterializedModel(BaseOrder)
@python_2_unicode_compatible
class OrderPayment(with_metaclass(deferred.ForeignKeyBuilder, models.Model)):
"""
A model to hold received payments for a given order.
"""
order = deferred.ForeignKey(
BaseOrder,
verbose_name=_("Order"),
)
amount = MoneyField(
_("Amount paid"),
help_text=_("How much was paid with this particular transfer."),
)
transaction_id = models.CharField(
_("Transaction ID"),
max_length=255,
help_text=_("The transaction processor's reference"),
)
created_at = models.DateTimeField(
_("Received at"),
auto_now_add=True,
)
payment_method = models.CharField(
_("Payment method"),
max_length=50,
help_text=_("The payment backend used to process the purchase"),
)
class Meta(EntityModel.RESTMeta):
verbose_name = pgettext_lazy('order_models', "Order payment")
verbose_name_plural = pgettext_lazy('order_models', "Order payments")
def __str__(self):
return _("Payment ID: {}").format(self.id)
@python_2_unicode_compatible
class BaseOrderItem(with_metaclass(deferred.ForeignKeyBuilder, models.Model)):
"""
An item for an order.
"""
order = deferred.ForeignKey(
BaseOrder,
related_name='items',
verbose_name=_("Order"),
)
product_name = models.CharField(
_("Product name"),
max_length=255,
null=True,
blank=True,
help_text=_("Product name at the moment of purchase."),
)
product_code = models.CharField(
_("Product code"),
max_length=255,
null=True,
blank=True,
help_text=_("Product code at the moment of purchase."),
)
product = deferred.ForeignKey(
'BaseProduct',
null=True,
blank=True,
on_delete=models.SET_NULL,
verbose_name=_("Product"),
)
step = models.DecimalField(verbose_name=_('addition step'), default=1, max_digits=10, decimal_places=3)
_unit_price = models.DecimalField(
_("Unit price"),
null=True, # may be NaN
help_text=_("Products unit price at the moment of purchase."),
**BaseOrder.decimalfield_kwargs
)
_line_total = models.DecimalField(
_("Line Total"),
null=True, # may be NaN
help_text=_("Line total on the invoice at the moment of purchase."),
**BaseOrder.decimalfield_kwargs
)
extra = JSONField(
verbose_name=_("Extra fields"),
help_text=_("Arbitrary information for this order item"),
)
class Meta:
abstract = True
verbose_name = _("Order item")
verbose_name_plural = _("Order items")
def __str__(self):
return self.product_name
@classmethod
def perform_model_checks(cls):
try:
cart_field = [f for f in CartItemModel._meta.fields if f.attname == 'quantity'][0]
order_field = [f for f in cls._meta.fields if f.attname == 'quantity'][0]
if order_field.get_internal_type() != cart_field.get_internal_type():
msg = "Field `{}.quantity` must be of one same type `{}.quantity`."
raise ImproperlyConfigured(msg.format(cls.__name__, CartItemModel.__name__))
except IndexError:
msg = "Class `{}` must implement a field named `quantity`."
raise ImproperlyConfigured(msg.format(cls.__name__))
@property
def unit_price(self):
# MoneyMaker(self.order.currency)(self._unit_price)
return self._unit_price
@property
def line_total(self):
# MoneyMaker(self.order.currency)(self._line_total)
return self._line_total
@property
def absolute_quantity(self):
if self.step:
return self.quantity * self.step
return self.quantity
def populate_from_cart_item(self, cart_item, request):
"""
From a given cart item, populate the current order item.
If the operation was successful, the given item shall be removed from the cart.
If a CartItem.DoesNotExist exception is raised, discard the order item.
"""
if cart_item.quantity == 0:
raise CartItemModel.DoesNotExist("Cart Item is on the Wish List")
self.product = cart_item.product
self.product_name = cart_item.product.product_name
self.product_code = cart_item.product_code
self._unit_price = Decimal(cart_item.unit_price)
self._line_total = Decimal(cart_item.line_total)
self.quantity = cart_item.quantity
self.step = cart_item.product.get_step
self.extra = dict(cart_item.extra)
extra_rows = [(modifier, extra_row.data) for modifier, extra_row in cart_item.extra_rows.items()]
self.extra.update(rows=extra_rows)
def save(self, *args, **kwargs):
"""
Before saving the OrderItem object to the database, round the amounts to the given decimal places
"""
self._unit_price = BaseOrder.round_amount(self._unit_price)
self._line_total = BaseOrder.round_amount(self._line_total)
super(BaseOrderItem, self).save(*args, **kwargs)
OrderItemModel = deferred.MaterializedModel(BaseOrderItem)
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getRules = exports.isJSONType = void 0;
const _jsonTypes = ["string", "number", "integer", "boolean", "null", "object", "array"];
const jsonTypes = new Set(_jsonTypes);
function isJSONType(x) {
return typeof x == "string" && jsonTypes.has(x);
}
exports.isJSONType = isJSONType;
function getRules() {
const groups = {
number: { type: "number", rules: [] },
string: { type: "string", rules: [] },
array: { type: "array", rules: [] },
object: { type: "object", rules: [] },
};
return {
types: { ...groups, integer: true, boolean: true, null: true },
rules: [{ rules: [] }, groups.number, groups.string, groups.array, groups.object],
post: { rules: [] },
all: { type: true, $comment: true },
keywords: { type: true, $comment: true },
};
}
exports.getRules = getRules;
//# sourceMappingURL=rules.js.map |
(self.webpackChunkdocusaurus_template=self.webpackChunkdocusaurus_template||[]).push([[47236],{13919:function(e,n,t){"use strict";function i(e){return!0===/^(\w*:|\/\/)/.test(e)}function o(e){return void 0!==e&&!i(e)}t.d(n,{b:function(){return i},Z:function(){return o}})},44996:function(e,n,t){"use strict";t.d(n,{C:function(){return a},Z:function(){return r}});var i=t(52263),o=t(13919);function a(){var e=(0,i.Z)().siteConfig,n=(e=void 0===e?{}:e).baseUrl,t=void 0===n?"/":n,a=e.url;return{withBaseUrl:function(e,n){return function(e,n,t,i){var a=void 0===i?{}:i,r=a.forcePrependBaseUrl,s=void 0!==r&&r,l=a.absolute,d=void 0!==l&&l;if(!t)return t;if(t.startsWith("#"))return t;if((0,o.b)(t))return t;if(s)return n+t;var u=t.startsWith(n)?t:n+t.replace(/^\//,"");return d?e+u:u}(a,t,e,n)}}}function r(e,n){return void 0===n&&(n={}),(0,a().withBaseUrl)(e,n)}},32527:function(e,n,t){"use strict";t.d(n,{Z:function(){return p}});var i=t(67294),o=t(21140),a=t.n(o),r="graph_1lrJ",s="pointer_3d8u",l="overlay_2fuY",d="visible_2Z3U",u="backdrop_2z5L",h=t(94184),c=t.n(h);a().initialize({startOnLoad:!0,logLevel:"fatal",securityLevel:"strict",arrowMarkerAbsolute:!1,theme:"neutral",flowchart:{useMaxWidth:!0,htmlLabels:!0,rankSpacing:65,nodeSpacing:30,curve:"basis"},sequence:{useMaxWidth:!0},gantt:{useMaxWidth:!0}});var p=function(e){var n,t=e.chart,o=(0,i.useState)(!1),h=o[0],p=o[1],m=(0,i.useState)(void 0),g=m[0],f=m[1],k=(0,i.useState)("mermaid-"+Math.random().toString(36).substr(2,-1))[0],v=function(){return p(!h)};return(0,i.useEffect)((function(){a().render(k,t,(function(e){f(e)}))}),[]),i.createElement(i.Fragment,null,i.createElement("div",{onClick:v,className:c()(r,s),dangerouslySetInnerHTML:{__html:g}}),i.createElement("div",{onClick:v,className:c()(l,s,r,(n={},n[d]=h,n))},i.createElement("div",{onClick:function(e){return e.stopPropagation()},className:c()(u,r),dangerouslySetInnerHTML:{__html:g}})))}},25770:function(e,n,t){"use strict";t.r(n),t.d(n,{frontMatter:function(){return h},contentTitle:function(){return c},metadata:function(){return p},toc:function(){return m},default:function(){return f}});var i=t(22122),o=t(19756),a=(t(67294),t(3905)),r=t(44996),s=t(32527),l=t(55064),d=t(58215),u=["components"],h={id:"login",title:"Login Flow"},c=void 0,p={unversionedId:"concepts/login",id:"version-v1.8/concepts/login",isDocsHomePage:!1,title:"Login Flow",description:"OAuth2 and OpenID Connect require an authenticated End-User session for all",source:"@site/versioned_docs/version-v1.8/concepts/login.mdx",sourceDirName:"concepts",slug:"/concepts/login",permalink:"/hydra/docs/v1.8/concepts/login",editUrl:"https://github.com/ory/hydra/edit/master/docs/versioned_docs/version-v1.8/concepts/login.mdx",version:"v1.8",lastUpdatedBy:"aeneasr",lastUpdatedAt:1601676726,formattedLastUpdatedAt:"10/2/2020",frontMatter:{id:"login",title:"Login Flow"},sidebar:"version-v1.8/docs",previous:{title:"OpenID Connect",permalink:"/hydra/docs/v1.8/concepts/openid-connect-oidc"},next:{title:"Consent Flow",permalink:"/hydra/docs/v1.8/concepts/consent"}},m=[{value:"Initiating the OAuth 2.0 / OpenID Connect Flow",id:"initiating-the-oauth-20--openid-connect-flow",children:[]},{value:"Redirection to the Login Endpoint",id:"redirection-to-the-login-endpoint",children:[{value:"Login Sessions, <code>prompt</code>, <code>max_age</code>, <code>id_token_hint</code>",id:"login-sessions-prompt-max_age-id_token_hint",children:[]}]},{value:"The Login Endpoint",id:"the-login-endpoint",children:[{value:"Accepting the Login Flow",id:"accepting-the-login-flow",children:[]},{value:"Rejecting the Login Flow",id:"rejecting-the-login-flow",children:[]}]},{value:"Redirection to the Consent Endpoint",id:"redirection-to-the-consent-endpoint",children:[]},{value:"Revoking ORY Hydra Login Sessions",id:"revoking-ory-hydra-login-sessions",children:[]}],g={toc:m};function f(e){var n=e.components,t=(0,o.Z)(e,u);return(0,a.kt)("wrapper",(0,i.Z)({},g,t,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("p",null,"OAuth2 and OpenID Connect require an authenticated End-User session for all\nOAuth2 / OpenID Connect flows except the ",(0,a.kt)("inlineCode",{parentName:"p"},"client_credentials")," flow which does\nnot involve End-Users."),(0,a.kt)("p",null,'ORY Hydra does not contain a database with End-Users but instead uses HTTP\nRedirection to "delegate" the login flow to another app - we call this the Login\n& Consent App.'),(0,a.kt)("p",null,"The following short video shows the flow from an End-User's perspective - it\nincludes both login and consent."),(0,a.kt)("iframe",{width:"560",height:"315",src:"https://www.youtube.com/embed/txUmfORzu8Y",frameborder:"0",allowfullscreen:!0}),(0,a.kt)("p",null,"The following sequence diagram describes the different API calls and HTTP\nRedirects when performing the OAuth2 flow:"),(0,a.kt)(s.Z,{chart:"sequenceDiagram\n OAuth2 Client->>ORY Hydra: Initiates OAuth2 Authorize Code or Implicit Flow\n ORY Hydra--\x3e>ORY Hydra: No end user session available (not authenticated)\n ORY Hydra->>Login Endpoint: Redirects end user with login challenge\n Login Endpoint--\x3eORY Hydra: Fetches login info\n Login Endpoint--\x3e>Login Endpoint: Authenticates user with credentials\n Login Endpoint--\x3eORY Hydra: Transmits login info and receives redirect url with login verifier\n Login Endpoint->>ORY Hydra: Redirects end user to redirect url with login verifier\n ORY Hydra--\x3e>ORY Hydra: First time that client asks user for permissions\n ORY Hydra->>Consent Endpoint: Redirects end user with consent challenge\n Consent Endpoint--\x3eORY Hydra: Fetches consent info (which user, what app, what scopes)\n Consent Endpoint--\x3e>Consent Endpoint: Asks for end user's permission to grant application access\n Consent Endpoint--\x3eORY Hydra: Transmits consent result and receives redirect url with consent verifier\n Consent Endpoint->>ORY Hydra: Redirects to redirect url with consent verifier\n ORY Hydra--\x3e>ORY Hydra: Verifies grant\n ORY Hydra->>OAuth2 Client: Transmits authorization code/token",mdxType:"Mermaid"}),(0,a.kt)("h2",{id:"initiating-the-oauth-20--openid-connect-flow"},"Initiating the OAuth 2.0 / OpenID Connect Flow"),(0,a.kt)("p",null,"The OAuth2 2.0 / OpenID Connect Flow is initiated by pointing the End-User's\nbrowser to the ",(0,a.kt)("inlineCode",{parentName:"p"},"/oauth2/auth"),' endpoint. Depending on which flow ("Authorize Code\nFlow", "Implicit Flow", ...) you want to use some of the query parameters (e.g.\n',(0,a.kt)("inlineCode",{parentName:"p"},"/oauth2/auth?response_type=code"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"/oauth2/auth?response_type=token"),", ...)\nmight change but the overall initiation works always by sending the browser to\nthat URL."),(0,a.kt)("div",{className:"admonition admonition-note alert alert--secondary"},(0,a.kt)("div",{parentName:"div",className:"admonition-heading"},(0,a.kt)("h5",{parentName:"div"},(0,a.kt)("span",{parentName:"h5",className:"admonition-icon"},(0,a.kt)("svg",{parentName:"span",xmlns:"http://www.w3.org/2000/svg",width:"14",height:"16",viewBox:"0 0 14 16"},(0,a.kt)("path",{parentName:"svg",fillRule:"evenodd",d:"M6.3 5.69a.942.942 0 0 1-.28-.7c0-.28.09-.52.28-.7.19-.18.42-.28.7-.28.28 0 .52.09.7.28.18.19.28.42.28.7 0 .28-.09.52-.28.7a1 1 0 0 1-.7.3c-.28 0-.52-.11-.7-.3zM8 7.99c-.02-.25-.11-.48-.31-.69-.2-.19-.42-.3-.69-.31H6c-.27.02-.48.13-.69.31-.2.2-.3.44-.31.69h1v3c.02.27.11.5.31.69.2.2.42.31.69.31h1c.27 0 .48-.11.69-.31.2-.19.3-.42.31-.69H8V7.98v.01zM7 2.3c-3.14 0-5.7 2.54-5.7 5.68 0 3.14 2.56 5.7 5.7 5.7s5.7-2.55 5.7-5.7c0-3.15-2.56-5.69-5.7-5.69v.01zM7 .98c3.86 0 7 3.14 7 7s-3.14 7-7 7-7-3.12-7-7 3.14-7 7-7z"}))),"note")),(0,a.kt)("div",{parentName:"div",className:"admonition-content"},(0,a.kt)("p",{parentName:"div"},"This guide uses URLs from the ",(0,a.kt)("a",{parentName:"p",href:"../5min-tutorial"},"5 Minute Tutorial"),":"),(0,a.kt)("ul",{parentName:"div"},(0,a.kt)("li",{parentName:"ul"},"ORY Hydra Public Endpoint: ",(0,a.kt)("a",{parentName:"li",href:"http://127.0.0.1:4444"},"http://127.0.0.1:4444")),(0,a.kt)("li",{parentName:"ul"},"ORY Hydra Admin Endpoint: ",(0,a.kt)("a",{parentName:"li",href:"http://127.0.0.1:4445"},"http://127.0.0.1:4445"))),(0,a.kt)("p",{parentName:"div"},"When translating this guide into your own environment, make sure to use the\ncorrect endpoint for your interactions."))),(0,a.kt)(l.Z,{defaultValue:"ui",values:[{label:"UI",value:"ui"},{label:"HTML",value:"html"},{label:"JavaScript",value:"js"}],mdxType:"Tabs"},(0,a.kt)(d.Z,{value:"ui",mdxType:"TabItem"},(0,a.kt)("img",{src:(0,r.Z)("/img/docs/oauth2-consumer.png")})),(0,a.kt)(d.Z,{value:"html",mdxType:"TabItem"},(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-html"},'<a\n href="https://<hydra-public>/oauth2/auth?client_id=...&response_type=...&scope=..."/>\nLogin in with ORY Hydra</a>\n'))),(0,a.kt)(d.Z,{value:"js",mdxType:"TabItem"},(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-js"},"// ...\nwindow.location.href =\n 'https://<hydra-public>/oauth2/auth?client_id=...&response_type=...&scope=...'\n")))),(0,a.kt)("h2",{id:"redirection-to-the-login-endpoint"},"Redirection to the Login Endpoint"),(0,a.kt)("p",null,"The next task for ORY Hydra is to know the user of the request. To achieve that,\nORY Hydra checks if a session cookie is set containing information about a\npreviously successful login. Additionally, OpenID Connect parameters\n",(0,a.kt)("inlineCode",{parentName:"p"},"id_token_hint"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"prompt"),", and ",(0,a.kt)("inlineCode",{parentName:"p"},"max_age")," are evaluated and processed. Depending\non their values and the login state, the user might need to re-authenticate or\nthe flow will fail completely."),(0,a.kt)("p",null,'To authenticate the user (this happens regardless of whether a session exists\nfor the user or not), ORY Hydra redirects the browser to the "Login Endpoint"\nestablished in your config:'),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-yaml",metastring:'title="hydra serve all -c path/to/hydra/config.yml',title:'"hydra',serve:!0,all:!0,"-c":!0,"path/to/hydra/config.yml":!0},"# Can also be set using the environment variable:\n# URLS_LOGIN=https://login-app/login\nurls:\n login: https://login-app/login\n")),(0,a.kt)("p",null,"ORY Hydra appends a ",(0,a.kt)("inlineCode",{parentName:"p"},"login_challenge")," query parameter to the url. The value is a\nID which should later be used by the Login Endpoint to fetch important\ninformation about the request."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre"},"https://login-app/login?login_challenge=7bb518c4eec2454dbb289f5fdb4c0ee2\n")),(0,a.kt)("h3",{id:"login-sessions-prompt-max_age-id_token_hint"},"Login Sessions, ",(0,a.kt)("inlineCode",{parentName:"h3"},"prompt"),", ",(0,a.kt)("inlineCode",{parentName:"h3"},"max_age"),", ",(0,a.kt)("inlineCode",{parentName:"h3"},"id_token_hint")),(0,a.kt)("p",null,"ORY Hydra keeps track of user sessions. It does so by issuing a cookie which\ncontains the user ID. On subsequent OAuth2 / OpenID Connect flows, the session\nwill be checked and the Login Endpoint will be instructed to, for example, show\nthe Login HTML Form or skip the Login HTML Form."),(0,a.kt)("p",null,"ORY Hydra supports the following OpenID Connect query parameters:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"prompt")," (optional). Space delimited, case sensitive list of ASCII string\nvalues that specifies whether the Authorization Server prompts the End-User\nfor reauthentication and consent.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"prompt=none")," instructs ORY Hydra to not display the login or consent user\ninterface pages. An error is returned if an End-User is not already\nauthenticated or the Client does not have pre-configured consent for the\nrequested Claims or does not fulfill other conditions for processing the\nrequest. The error code will typically be ",(0,a.kt)("inlineCode",{parentName:"li"},"login_required"),",\n",(0,a.kt)("inlineCode",{parentName:"li"},"interaction_required"),", or another code. This can be used as a method to\ncheck for existing authentication and/or consent."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"prompt=login")," instructs ORY Hydra to force the End-User to log in using the\nLogin HTML Form in the Login Endpoint."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"prompt=consent")," instructs ORY Hydra to force the End-User to re-authorize\n(give consent) the OAuth2 Client using the\n",(0,a.kt)("a",{parentName:"li",href:"./consent"},"Consent HTML Form in the Consent Endpoint"),"."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"prompt=select_account")," is currently not supported in ORY Hydra, see ","[#]","."))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"max_age")," (optional) specifies the allowable elapsed time in seconds since the\nlast time the End-User was actively authenticated by ORY Hydra. If the elapsed\ntime is greater than this value, the Login HTML Form must be shown and the\nEnd-User must re-authenticate."),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"id_token_hint")," (optional) - ID Token previously issued by ORY Hydra being\npassed as a hint about the End-User's current or past authenticated session\nwith the Client. If the End-User identified by the ID Token is logged in or is\nlogged in by the request, then the Authorization Server returns a positive\nresponse; otherwise, it returns an error, typically ",(0,a.kt)("inlineCode",{parentName:"li"},"login_required"),". It does\nnot matter if the ID Token is expired or not.")),(0,a.kt)("p",null,"To specify these parameters add them to the OAuth2 Auth Endpoint URL Query:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre"},"https://<hydra-public>/oauth2/auth?prompt=login&max_age=60&id_token_hint=...'\n")),(0,a.kt)("h2",{id:"the-login-endpoint"},"The Login Endpoint"),(0,a.kt)("p",null,"The Login Endpoint (set by ",(0,a.kt)("inlineCode",{parentName:"p"},"urls.login"),") is an application written by you. You\ncan find an exemplary\n",(0,a.kt)("a",{parentName:"p",href:"https://github.com/ory/hydra-login-consent-node"},"NodeJS reference implementation on our GitHub"),"."),(0,a.kt)("p",null,"The Login Endpoint uses the ",(0,a.kt)("inlineCode",{parentName:"p"},"login_challenge")," value in the URL to fetch\ninformation about the login request by making a HTTP GET request to:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre"},"http(s)://<HYDRA_ADMIN_URL>/oauth2/auth/requests/login?login_challenge=<challenge>\n")),(0,a.kt)("p",null,'The response (see below in "Login Challenge Response" tab) contains information\nabout the login request. The body contains a ',(0,a.kt)("inlineCode",{parentName:"p"},"skip")," value. If the value is\n",(0,a.kt)("inlineCode",{parentName:"p"},"false"),", the user interface must be shown. If ",(0,a.kt)("inlineCode",{parentName:"p"},"skip")," is true, you should not\nshow the user interface but instead just accept or reject the login request! For\nmore details about the implementation check the\n",(0,a.kt)("a",{parentName:"p",href:"../guides/login"},'"Implementing the Login Endpoint" Guide'),"."),(0,a.kt)(l.Z,{defaultValue:"ui",values:[{label:"UI",value:"ui"},{label:"curl",value:"curl"},{label:"Login Challenge Response",value:"json"}],mdxType:"Tabs"},(0,a.kt)(d.Z,{value:"ui",mdxType:"TabItem"},(0,a.kt)("img",{src:(0,r.Z)("/img/docs/login-endpoint.png")})),(0,a.kt)(d.Z,{value:"curl",mdxType:"TabItem"},(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre"},'curl "http://127.0.0.1:4445/oauth2/auth/requests/login?login_challenge=7bb518c4eec2454dbb289f5fdb4c0ee2"\n')),(0,a.kt)("p",null,"Check the ",(0,a.kt)("a",{parentName:"p",href:"../guides/login"},'"Implementing the Login Endpoint" Guide')," for\nexamples using the ORY Hydra SDK in different languages.")),(0,a.kt)(d.Z,{value:"json",mdxType:"TabItem"},(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-json"},'{\n "challenge": "7bb518c4eec2454dbb289f5fdb4c0ee2",\n "requested_scope": ["openid", "offline"],\n "requested_access_token_audience": null,\n "skip": false,\n "subject": "",\n "oidc_context": {},\n "client": {\n "client_id": "auth-code-client",\n "client_name": "",\n "redirect_uris": ["http://127.0.0.1:5555/callback"],\n "grant_types": ["authorization_code", "refresh_token"],\n "response_types": ["code", "id_token"],\n "scope": "openid offline",\n "audience": null,\n "owner": "",\n "policy_uri": "",\n "allowed_cors_origins": null,\n "tos_uri": "",\n "client_uri": "",\n "logo_uri": "",\n "contacts": null,\n "client_secret_expires_at": 0,\n "subject_type": "public",\n "token_endpoint_auth_method": "client_secret_basic",\n "userinfo_signed_response_alg": "none",\n "created_at": "2020-07-08T12:31:47Z",\n "updated_at": "2020-07-08T12:31:47Z"\n },\n "request_url": "http://127.0.0.1:4444/oauth2/auth?audience=&client_id=auth-code-client&max_age=0&nonce=hognfveoohhddoralbeygsjg&prompt=&redirect_uri=http%3A%2F%2F127.0.0.1%3A5555%2Fcallback&response_type=code&scope=openid+offline&state=imnweycejbfpyrmnahgqzcmm",\n "session_id": "d3c98aa6-67ae-478b-bc30-f7887b58f630"\n}\n')))),(0,a.kt)("p",null,"The way you authenticate the End-User is up to you. In most cases, you will show\nan HTML form similar to:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-html"},'<form action="/login" method="post">\n <input type="hidden" name="csrf_token" value="...." />\n \x3c!-- Use CSRF tokens in your HTML forms! --\x3e\n <input\n type="email"\n name="login_email"\n placeholder="Please enter your email address to log in"\n />\n <input type="password" name="login_password" />\n <input type="checkbox" name="remember" value="Remember me on this device" />\n <input type="submit" value="Log in" />\n</form>\n')),(0,a.kt)("p",null,"Once the End-User authenticated successfully, you either ",(0,a.kt)("strong",{parentName:"p"},"accept")," the login\nchallenge, or you ",(0,a.kt)("strong",{parentName:"p"},"reject")," (e.g. the user is not allowed to perform OAuth2\nflows) the login challenge."),(0,a.kt)("h3",{id:"accepting-the-login-flow"},"Accepting the Login Flow"),(0,a.kt)("p",null,"To accept the Login Challenge, make a HTTP PUT request with\n",(0,a.kt)("inlineCode",{parentName:"p"},"Content-Type: application/json")," and a JSON payload (see\n",(0,a.kt)("a",{parentName:"p",href:"../reference/api#schemaacceptloginrequest"},"Accept Login Request HTTP API Reference"),")"),(0,a.kt)("div",{className:"admonition admonition-warning alert alert--danger"},(0,a.kt)("div",{parentName:"div",className:"admonition-heading"},(0,a.kt)("h5",{parentName:"div"},(0,a.kt)("span",{parentName:"h5",className:"admonition-icon"},(0,a.kt)("svg",{parentName:"span",xmlns:"http://www.w3.org/2000/svg",width:"12",height:"16",viewBox:"0 0 12 16"},(0,a.kt)("path",{parentName:"svg",fillRule:"evenodd",d:"M5.05.31c.81 2.17.41 3.38-.52 4.31C3.55 5.67 1.98 6.45.9 7.98c-1.45 2.05-1.7 6.53 3.53 7.7-2.2-1.16-2.67-4.52-.3-6.61-.61 2.03.53 3.33 1.94 2.86 1.39-.47 2.3.53 2.27 1.67-.02.78-.31 1.44-1.13 1.81 3.42-.59 4.78-3.42 4.78-5.56 0-2.84-2.53-3.22-1.25-5.61-1.52.13-2.03 1.13-1.89 2.75.09 1.08-1.02 1.8-1.86 1.33-.67-.41-.66-1.19-.06-1.78C8.18 5.31 8.68 2.45 5.05.32L5.03.3l.02.01z"}))),"warning")),(0,a.kt)("div",{parentName:"div",className:"admonition-content"},(0,a.kt)("p",{parentName:"div"},"The subject must be an immutable user ID, it should never be an email address, a\nusername, or something else that may change over time."))),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-json"},'{\n // Subject is the user ID of the end-user that authenticated.\n "subject": "string", // required!\n\n // All values below are optional!\n\n // Remember, if set to true, tells ORY Hydra to remember this user by telling the user agent (browser) to store\n // a cookie with authentication data. If the same user performs another OAuth 2.0 Authorization Request, he/she will not be asked to log in again.\n "remember": true,\n\n // RememberFor sets how long the authentication should be remembered for in seconds. If set to 0,\n // the authorization will be remembered for the duration of the browser session (using a session cookie).\n "remember_for": 0,\n\n // ACR sets the Authentication AuthorizationContext Class Reference value for this authentication session. You can use it to express that, for example, a user authenticated using two factor authentication.\n "acr": "string",\n\n // Context is an optional object which can hold arbitrary data. The data will be made available when fetching the\n // consent request under the "context" field. This is useful in scenarios where login and consent endpoints share\n // data.\n "context": {\n // "foo": "bar"\n },\n\n // ForceSubjectIdentifier forces the "pairwise" user ID of the end-user that authenticated. The "pairwise"\n // user ID refers to the (Pairwise Identifier Algorithm)[http://openid.net/specs/openid-connect-core-1_0.html#PairwiseAlg]\n // of the OpenID Connect specification. It allows you to set an obfuscated subject ("user") identifier that is unique\n // to the client. Please note that this changes the user ID on endpoint /userinfo and sub claim of the ID Token.\n // It does not change the sub claim in the OAuth 2.0 Introspection. Per default, ORY Hydra handles this value with its own algorithm.\n // In case you want to set this yourself you can use this field. Please note that setting this field has no effect if pairwise is not\n // configured in ORY Hydra or the OAuth 2.0 Client does not expect a pairwise identifier (set via subject_type key in the client\'s configuration).\n // Please also be aware that ORY Hydra is unable to properly compute this value during authentication. This implies that\n // you have to compute this value on every authentication process (probably depending on the client ID or some other unique value).\n // If you fail to compute the proper value, then authentication processes which have id_token_hint set might fail.\n "force_subject_identifier": "string"\n}\n')),(0,a.kt)("p",null,"With ",(0,a.kt)("inlineCode",{parentName:"p"},"curl")," this might look like the following request:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-shell",metastring:"script",script:!0},'$ curl --location --request PUT \'http://127.0.0.1:4445/oauth2/auth/requests/login/accept?login_challenge=7bb518c4eec2454dbb289f5fdb4c0ee2\' \\\n--header \'Content-Type: application/json\' \\\n--data-raw \'{\n "subject": "the-user-id-that-just-logged-in",\n "remember": true,\n "remember_for": 3600\n}\'\n')),(0,a.kt)("p",null,"The server responds with JSON"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre"},'{\n "redirect_to": "http://127.0.0.1:4445/oauth2/auth..."\n}\n')),(0,a.kt)("p",null,"which is the URL your application must redirect the End-User's browser to."),(0,a.kt)("p",null,"Check the ",(0,a.kt)("a",{parentName:"p",href:"../guides/login"},'"Implementing the Login Endpoint" Guide')," for\nexamples using the ORY Hydra SDK in different languages."),(0,a.kt)("h3",{id:"rejecting-the-login-flow"},"Rejecting the Login Flow"),(0,a.kt)("p",null,"To reject the Login Challenge, make a HTTP PUT request with\n",(0,a.kt)("inlineCode",{parentName:"p"},"Content-Type: application/json")," and a JSON payload (see\n",(0,a.kt)("a",{parentName:"p",href:"../reference/api#schemarejectrequest"},"Reject Login Request HTTP API Reference"),")"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-json"},'{\n // The error should follow the OAuth2 error format (e.g. `invalid_request`, `login_required`).\n "error": "user_banned",\n\n // Description of the error in a human readable format.\n "error_description": "You are banned!",\n\n // Hint to help resolve the error.\n "error_hint": "Contact the site administrator.",\n\n // Debug contains information to help resolve the problem as a developer. Usually not exposed\n // to the public but only in the server logs.\n "error_debug": "The user was marked banned in the database.",\n\n // Represents the HTTP status code of the error (e.g. 401 or 403)\n //\n // Defaults to 400\n "status_code": 403\n}\n')),(0,a.kt)("p",null,"With ",(0,a.kt)("inlineCode",{parentName:"p"},"curl")," this might look like the following request:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-shell",metastring:"script",script:!0},'$ curl --location --request PUT \'http://127.0.0.1:4445/oauth2/auth/requests/login/reject?login_challenge=7bb518c4eec2454dbb289f5fdb4c0ee2\' \\\n--header \'Content-Type: application/json\' \\\n--data-raw \'{\n "error": "user_banned",\n "error_debug": "The user was marked banned in the database.",\n "error_description": "You are banned!",\n "error_hint": "Contact the site administrator.",\n "status_code": 403\n}\'\n')),(0,a.kt)("p",null,"The server responds with JSON"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-json"},'{\n "redirect_to": "http://127.0.0.1:4445/oauth2/auth..."\n}\n')),(0,a.kt)("p",null,"which is the URL your application must the End-User's browser to."),(0,a.kt)("p",null,"Check the ",(0,a.kt)("a",{parentName:"p",href:"../guides/login"},'"Implementing the Login Endpoint" Guide')," for\nexamples using the ORY Hydra SDK in different languages."),(0,a.kt)("h2",{id:"redirection-to-the-consent-endpoint"},"Redirection to the Consent Endpoint"),(0,a.kt)("p",null,"Please head over to ",(0,a.kt)("a",{parentName:"p",href:"./consent"},"Consent Flow"),"!"),(0,a.kt)("h2",{id:"revoking-ory-hydra-login-sessions"},"Revoking ORY Hydra Login Sessions"),(0,a.kt)("p",null,"Revoking a login session will remove all of the user's cookies at ORY Hydra and\nwill require the user to re-authenticate when performing the next OAuth 2.0\nAuthorize Code Flow. Be aware that this option will remove all cookies from all\ndevices."),(0,a.kt)("div",{className:"admonition admonition-info alert alert--info"},(0,a.kt)("div",{parentName:"div",className:"admonition-heading"},(0,a.kt)("h5",{parentName:"div"},(0,a.kt)("span",{parentName:"h5",className:"admonition-icon"},(0,a.kt)("svg",{parentName:"span",xmlns:"http://www.w3.org/2000/svg",width:"14",height:"16",viewBox:"0 0 14 16"},(0,a.kt)("path",{parentName:"svg",fillRule:"evenodd",d:"M7 2.3c3.14 0 5.7 2.56 5.7 5.7s-2.56 5.7-5.7 5.7A5.71 5.71 0 0 1 1.3 8c0-3.14 2.56-5.7 5.7-5.7zM7 1C3.14 1 0 4.14 0 8s3.14 7 7 7 7-3.14 7-7-3.14-7-7-7zm1 3H6v5h2V4zm0 6H6v2h2v-2z"}))),"info")),(0,a.kt)("div",{parentName:"div",className:"admonition-content"},(0,a.kt)("p",{parentName:"div"},"Revoking a login session does not invalidate any Access, Refresh, or ID Tokens!\nIf you log out of GitHub, you will not be logged out of CircleCI/TravisCI."))),(0,a.kt)("p",null,"Revoking the login sessions of a user is as easy as sending ",(0,a.kt)("inlineCode",{parentName:"p"},"DELETE")," to\n",(0,a.kt)("inlineCode",{parentName:"p"},"/oauth2/auth/sessions/login?subject={subject}")," (see\n",(0,a.kt)("a",{parentName:"p",href:"../reference/api#opIdrevokeAuthenticationSession"},"full API documentation"),")."),(0,a.kt)("p",null,"This endpoint is not compatible with OpenID Connect Front-/Backchannel logout\nand does not revoke any tokens."))}f.isMDXComponent=!0},11748:function(e,n,t){var i={"./locale":89234,"./locale.js":89234};function o(e){var n=a(e);return t(n)}function a(e){if(!t.o(i,e)){var n=new Error("Cannot find module '"+e+"'");throw n.code="MODULE_NOT_FOUND",n}return i[e]}o.keys=function(){return Object.keys(i)},o.resolve=a,e.exports=o,o.id=11748}}]); |
import React from 'react'
import BaseComponent from '../../components/BaseComponent';
import { login } from '../../components/helper';
class Login extends BaseComponent {
constructor(props) {
super(props);
this.username = React.createRef();
this.password = React.createRef();
this.state = { isLoading: false }
}
render() {
const { isLoading } = this.state
const handleOnSubmit = async (e) => {
e.preventDefault();
this.setState({ isLoading: true })
const username = this.username.current.value
const password = this.password.current.value
login(username, password, this);
}
return (
<div className="login-root">
<div className="login-background">
<div className="login-background-effect">
</div>
</div>
<main className={`${isLoading && 'loading'} `}>
<div className="login-texto">
<span> Aplicación OLSoftware </span>
<p> Prueba práctica Front-end senior </p>
</div>
<div className="login-form">
<span> Inicio de sesión </span>
<form onSubmit={handleOnSubmit}>
<div id="input_container">
<input type="text" placeholder="Usuario" ref={this.username} defaultValue="user" autoComplete="off" />
<img src={BaseComponent.Constantes.loginUserIcon} id="input_img" />
</div>
<div id="input_container">
<input type="password" placeholder="Contraseña" ref={this.password} defaultValue="user" autoComplete="off" />
<img src={BaseComponent.Constantes.loginPasswordIcon} id="input_img" />
</div>
<button type="submit"> Iniciar sesión </button>
</form>
</div>
<div className="modal">
<div className="modal-vista">
<h1> Estamos preparando todo para tí </h1>
<img src="/loading.gif" />
</div>
</div>
</main>
<footer className="footer-login">
OLSoftware - 2020
</footer>
</div>
);
}
}
//<TablaUsuarios />
export default Login;
|
# ----------------------------------------------------------------------------
# - Open3D: www.open3d.org -
# ----------------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2018-2021 www.open3d.org
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# ----------------------------------------------------------------------------
import open3d as o3d
import numpy as np
import matplotlib.pyplot as plt
if __name__ == '__main__':
tum_data = o3d.data.SampleTUMRGBDImage()
depth = o3d.t.io.read_image(tum_data.depth_path)
intrinsic = o3d.core.Tensor([[535.4, 0, 320.1], [0, 539.2, 247.6],
[0, 0, 1]])
pcd = o3d.t.geometry.PointCloud.create_from_depth_image(depth,
intrinsic,
depth_scale=5000.0,
depth_max=10.0)
o3d.visualization.draw([pcd])
depth_reproj = pcd.project_to_depth_image(640,
480,
intrinsic,
depth_scale=5000.0,
depth_max=10.0)
fig, axs = plt.subplots(1, 2)
axs[0].imshow(np.asarray(depth.to_legacy()))
axs[1].imshow(np.asarray(depth_reproj.to_legacy()))
plt.show()
|
module.exports = {
plugins: [
...(process.env.PIKA
? [[require(`babel-plugin-import-graphql`), { runtime: true }]]
: []),
require(`@babel/plugin-transform-runtime`),
require(`@babel/plugin-proposal-class-properties`),
require(`@babel/plugin-transform-object-assign`),
require(`@babel/plugin-proposal-object-rest-spread`)
],
presets: [
require(`@babel/preset-typescript`),
[
require(`@babel/preset-env`),
{
targets: { node: true },
useBuiltIns: `usage`,
corejs: 3,
bugfixes: true
}
]
],
env: {
test: {
sourceMaps: `inline`,
plugins: [require(`@babel/plugin-transform-runtime`)],
presets: [
[
require(`@babel/preset-env`),
{
targets: { node: true },
modules: `commonjs`,
useBuiltIns: `usage`,
corejs: 3,
bugfixes: true
}
]
]
}
}
};
|
from samcli.lib.utils.osutils import stderr
from unittest import TestCase
from parameterized import parameterized
from subprocess import STDOUT, Popen, TimeoutExpired, PIPE
import os
import shutil
import tempfile
from samcli.lib.utils.packagetype import IMAGE, ZIP
from pathlib import Path
TIMEOUT = 300
class TestBasicInitCommand(TestCase):
def test_init_command_passes_and_dir_created(self):
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--runtime",
"nodejs10.x",
"--dependency-manager",
"npm",
"--architecture",
"arm64",
"--app-template",
"hello-world",
"--name",
"sam-app",
"--no-interactive",
"-o",
temp,
]
)
try:
process.communicate(timeout=TIMEOUT)
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 0)
self.assertTrue(Path(temp, "sam-app").is_dir())
def test_init_command_passes_and_dir_created_image(self):
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--package-type",
IMAGE,
"--base-image",
"amazon/nodejs10.x-base",
"--dependency-manager",
"npm",
"--name",
"sam-app",
"--no-interactive",
"-o",
temp,
]
)
try:
process.communicate(timeout=TIMEOUT)
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 0)
self.assertTrue(Path(temp, "sam-app").is_dir())
def test_init_new_app_template(self):
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--runtime",
"nodejs10.x",
"--dependency-manager",
"npm",
"--app-template",
"quick-start-from-scratch",
"--name",
"qs-scratch",
"--no-interactive",
"-o",
temp,
]
)
try:
process.communicate(timeout=TIMEOUT)
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 0)
self.assertTrue(Path(temp, "qs-scratch").is_dir())
def test_init_command_java_maven(self):
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--runtime",
"java8",
"--dependency-manager",
"maven",
"--app-template",
"hello-world",
"--name",
"sam-app-maven",
"--no-interactive",
"-o",
temp,
]
)
try:
process.communicate(timeout=TIMEOUT)
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 0)
self.assertTrue(Path(temp, "sam-app-maven").is_dir())
def test_init_command_java_gradle(self):
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--runtime",
"java8",
"--dependency-manager",
"gradle",
"--app-template",
"hello-world",
"--name",
"sam-app-gradle",
"--no-interactive",
"-o",
temp,
]
)
try:
process.communicate(timeout=TIMEOUT)
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 0)
self.assertTrue(Path(temp, "sam-app-gradle").is_dir())
def test_init_command_with_extra_context_parameter(self):
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--runtime",
"java8",
"--dependency-manager",
"maven",
"--app-template",
"hello-world",
"--name",
"sam-app-maven",
"--no-interactive",
"--extra-context",
'{"schema_name": "codedeploy", "schema_type": "aws"}',
"-o",
temp,
]
)
try:
process.communicate(timeout=TIMEOUT)
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 0)
self.assertTrue(Path(temp, "sam-app-maven").is_dir())
def test_init_command_passes_with_arm_architecture(self):
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--runtime",
"nodejs14.x",
"--dependency-manager",
"npm",
"--app-template",
"hello-world",
"--name",
"sam-app",
"--no-interactive",
"-o",
temp,
"--architecture",
"arm64",
]
)
try:
process.communicate(timeout=TIMEOUT)
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 0)
self.assertTrue(Path(temp, "sam-app").is_dir())
def test_init_command_passes_with_x86_64_architecture(self):
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--runtime",
"nodejs14.x",
"--dependency-manager",
"npm",
"--app-template",
"hello-world",
"--name",
"sam-app",
"--no-interactive",
"-o",
temp,
"--architecture",
"x86_64",
]
)
try:
process.communicate(timeout=TIMEOUT)
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 0)
self.assertTrue(Path(temp, "sam-app").is_dir())
def test_init_command_passes_with_unknown_architecture(self):
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--runtime",
"nodejs14.x",
"--dependency-manager",
"npm",
"--app-template",
"hello-world",
"--name",
"sam-app",
"--no-interactive",
"-o",
temp,
"--architecture",
"unknown_arch",
]
)
capture_output = ""
try:
process.communicate(timeout=TIMEOUT)
except TimeoutExpired as e:
capture_output = e.output
process.kill()
raise
self.assertEqual(process.returncode, 2)
msg = "Invalid value for '-a' / '--architecture': invalid choice: unknown_arch. (choose from arm64, x86_64)"
self.assertIn(capture_output, msg)
class TestInitForParametersCompatibility(TestCase):
def test_init_command_no_interactive_missing_name(self):
stderr = None
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--runtime",
"nodejs10.x",
"--dependency-manager",
"npm",
"--app-template",
"hello-world",
"--no-interactive",
"-o",
temp,
],
stdout=PIPE,
stderr=PIPE,
)
try:
stdout_data, stderr_data = process.communicate(timeout=TIMEOUT)
stderr = stderr_data.decode("utf-8")
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 2)
errmsg = """
Error:
Missing required parameters, with --no-interactive set.
Must provide one of the following required parameter combinations:
--name and --runtime and --dependency-manager and --app-template
--name and --package-type and --base-image and --dependency-manager
--location
You can also re-run without the --no-interactive flag to be prompted for required values.
"""
self.assertEqual(errmsg.strip(), "\n".join(stderr.strip().splitlines()))
def test_init_command_no_interactive_apptemplate_location(self):
stderr = None
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--app-template",
"hello-world",
"--no-interactive",
"--location",
"some_location",
"-o",
temp,
],
stdout=PIPE,
stderr=PIPE,
)
try:
stdout_data, stderr_data = process.communicate(timeout=TIMEOUT)
stderr = stderr_data.decode("utf-8")
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 2)
errmsg = """
Error:
You must not provide both the --app-template and --location parameters.
You can run 'sam init' without any options for an interactive initialization flow, or you can provide one of the following required parameter combinations:
--name and --runtime and --app-template and --dependency-manager
--name and --package-type and --base-image
--location
"""
self.assertEqual(errmsg.strip(), "\n".join(stderr.strip().splitlines()))
def test_init_command_no_interactive_runtime_location(self):
stderr = None
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--runtime",
"nodejs10.x",
"--no-interactive",
"--location",
"some_location",
"-o",
temp,
],
stdout=PIPE,
stderr=PIPE,
)
try:
stdout_data, stderr_data = process.communicate(timeout=TIMEOUT)
stderr = stderr_data.decode("utf-8")
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 2)
errmsg = """
Error:
You must not provide both the --runtime and --location parameters.
You can run 'sam init' without any options for an interactive initialization flow, or you can provide one of the following required parameter combinations:
--name and --runtime and --app-template and --dependency-manager
--name and --package-type and --base-image
--location
"""
self.assertEqual(errmsg.strip(), "\n".join(stderr.strip().splitlines()))
def test_init_command_no_interactive_base_image_location(self):
stderr = None
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--base-image",
"amazon/nodejs10.x-base",
"--no-interactive",
"--location",
"some_location",
"-o",
temp,
],
stdout=PIPE,
stderr=PIPE,
)
try:
stdout_data, stderr_data = process.communicate(timeout=TIMEOUT)
stderr = stderr_data.decode("utf-8")
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 2)
errmsg = """
Error:
You must not provide both the --base-image and --location parameters.
You can run 'sam init' without any options for an interactive initialization flow, or you can provide one of the following required parameter combinations:
--name and --runtime and --app-template and --dependency-manager
--name and --package-type and --base-image
--location
"""
self.assertEqual(errmsg.strip(), "\n".join(stderr.strip().splitlines()))
def test_init_command_no_interactive_base_image_no_dependency(self):
stderr = None
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--package-type",
IMAGE,
"--base-image",
"amazon/nodejs10.x-base",
"--no-interactive",
"-o",
temp,
],
stdout=PIPE,
stderr=PIPE,
)
try:
stdout_data, stderr_data = process.communicate(timeout=TIMEOUT)
stderr = stderr_data.decode("utf-8")
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 2)
errmsg = """
Error:
Missing required parameters, with --no-interactive set.
Must provide one of the following required parameter combinations:
--name and --runtime and --dependency-manager and --app-template
--name and --package-type and --base-image and --dependency-manager
--location
You can also re-run without the --no-interactive flag to be prompted for required values.
"""
self.assertEqual(errmsg.strip(), "\n".join(stderr.strip().splitlines()))
def test_init_command_no_interactive_packagetype_location(self):
stderr = None
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--package-type",
ZIP,
"--no-interactive",
"--location",
"some_location",
"-o",
temp,
],
stdout=PIPE,
stderr=PIPE,
)
try:
stdout_data, stderr_data = process.communicate(timeout=TIMEOUT)
stderr = stderr_data.decode("utf-8")
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 2)
errmsg = """
Error:
You must not provide both the --package-type and --location parameters.
You can run 'sam init' without any options for an interactive initialization flow, or you can provide one of the following required parameter combinations:
--name and --runtime and --app-template and --dependency-manager
--name and --package-type and --base-image
--location
"""
self.assertEqual(errmsg.strip(), "\n".join(stderr.strip().splitlines()))
def test_init_command_no_interactive_base_image_no_packagetype(self):
stderr = None
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--base-image",
"amazon/nodejs10.x-base",
"--no-interactive",
"-o",
temp,
],
stdout=PIPE,
stderr=PIPE,
)
try:
stdout_data, stderr_data = process.communicate(timeout=TIMEOUT)
stderr = stderr_data.decode("utf-8")
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 2)
errmsg = """
Error:
Missing required parameters, with --no-interactive set.
Must provide one of the following required parameter combinations:
--name and --runtime and --dependency-manager and --app-template
--name and --package-type and --base-image and --dependency-manager
--location
You can also re-run without the --no-interactive flag to be prompted for required values.
"""
self.assertEqual(errmsg.strip(), "\n".join(stderr.strip().splitlines()))
def test_init_command_wrong_packagetype(self):
stderr = None
with tempfile.TemporaryDirectory() as temp:
process = Popen(
[
_get_command(),
"init",
"--package-type",
"WrongPT",
"-o",
temp,
],
stdout=PIPE,
stderr=PIPE,
)
try:
stdout_data, stderr_data = process.communicate(timeout=TIMEOUT)
stderr = stderr_data.decode("utf-8")
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 2)
errmsg = """
Usage: {0} init [OPTIONS]
Try '{0} init -h' for help.
Error: Invalid value for '-p' / '--package-type': invalid choice: WrongPT. (choose from Zip, Image)
""".format(
_get_command()
)
self.assertEqual(errmsg.strip(), "\n".join(stderr.strip().splitlines()))
class TestInitWithArbitraryProject(TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
zipdata_folder = Path(self.tempdir, "zipdata")
zipdata_folder.mkdir(parents=True)
Path(zipdata_folder, "test.txt").write_text("hello world")
zip_path_no_extension = str(Path(self.tempdir, "myfile"))
self.zip_path = shutil.make_archive(zip_path_no_extension, "zip", root_dir=self.tempdir, base_dir="zipdata")
def tearDown(self):
shutil.rmtree(self.tempdir)
@parameterized.expand([(None,), ("project_name",)])
def test_arbitrary_project(self, project_name):
with tempfile.TemporaryDirectory() as temp:
args = [_get_command(), "init", "--location", self.zip_path, "-o", temp]
if project_name:
args.extend(["--name", project_name])
process = Popen(args)
try:
process.communicate(timeout=TIMEOUT)
except TimeoutExpired:
process.kill()
raise
expected_output_folder = Path(temp, project_name) if project_name else Path(temp)
self.assertEqual(process.returncode, 0)
self.assertTrue(expected_output_folder.exists())
self.assertEqual(os.listdir(str(expected_output_folder)), ["test.txt"])
self.assertEqual(Path(expected_output_folder, "test.txt").read_text(), "hello world")
def test_zip_not_exists(self):
with tempfile.TemporaryDirectory() as temp:
args = [_get_command(), "init", "--location", str(Path("invalid", "zip", "path")), "-o", temp]
process = Popen(args)
try:
process.communicate(timeout=TIMEOUT)
except TimeoutExpired:
process.kill()
raise
self.assertEqual(process.returncode, 1)
def _get_command():
command = "sam"
if os.getenv("SAM_CLI_DEV"):
command = "samdev"
return command
|
from logging import getLogger
from selenium.webdriver import Chrome
from selenium.webdriver.common.by import By
from .humanlike import randsleep
logger = getLogger(__name__)
CAPTCHA_ATTEMPTS = 4
def solve_captcha(driver: Chrome) -> bool:
driver.switch_to.default_content()
iframe = driver.find_element(value="main-iframe")
driver.switch_to.frame(iframe)
iframe = driver.find_element(
By.CSS_SELECTOR,
"iframe[name*='a-'][src*='https://www.google.com/recaptcha/api2/anchor?']",
)
driver.switch_to.frame(iframe)
randsleep(0.2)
driver.find_element(By.XPATH, "//span[@id='recaptcha-anchor']").click()
driver.switch_to.default_content()
randsleep(0.2)
iframe = driver.find_element(value="main-iframe")
driver.switch_to.frame(iframe)
if "Why am I seeing this page" in driver.page_source:
logger.info("Completing catpcha 1")
randsleep(0.2)
iframe = driver.find_element(
By.CSS_SELECTOR,
"iframe[title*='recaptcha challenge'][src*='https://www.google.com/recaptcha/api2/bframe?']",
)
driver.switch_to.frame(iframe)
randsleep(0.2)
for _ in range(CAPTCHA_ATTEMPTS):
logger.info("Completing catpcha")
# let buster do it for us:
driver.find_elements(By.CLASS_NAME, "help-button-holder")[0].click()
randsleep(5)
if "Multiple correct solutions required" not in driver.page_source:
break
driver.switch_to.default_content()
randsleep(0.5)
return "Why am I seeing this page" in driver.page_source
|
import logging
import re
from dateutil.parser import parse
from sympy.parsing.sympy_parser import parse_expr
from sympy import symbols
from volttron.platform.agent.utils import setup_logging
__version__ = "0.2"
setup_logging()
_log = logging.getLogger(__name__)
def parse_sympy(data, condition=False):
"""
:param condition:
:param data:
:return:
"""
def clean_text(text, rep={" ": ""}):
rep = dict((re.escape(k), v) for k, v in rep.iteritems())
pattern = re.compile("|".join(rep.keys()))
new_key = pattern.sub(lambda m: rep[re.escape(m.group(0))], text)
return new_key
if isinstance(data, dict):
return_data = {}
for key, value in data.items():
new_key = clean_text(key)
return_data[new_key] = value
elif isinstance(data, list):
if condition:
return_data = ""
for item in data:
parsed_string = clean_text(item)
parsed_string = "(" + clean_text(item) + ")" if parsed_string not in ("&", "|") else parsed_string
return_data += parsed_string
else:
return_data = []
for item in data:
return_data.append(clean_text(item))
else:
return_data = clean_text(data)
return return_data
def init_schedule(schedule):
_schedule = {}
if schedule:
for day_str, schedule_info in schedule.items():
_day = parse(day_str).weekday()
if schedule_info not in ["always_on", "always_off"]:
start = parse(schedule_info["start"]).time()
end = parse(schedule_info["end"]).time()
_schedule[_day] = {"start": start, "end": end}
else:
_schedule[_day] = schedule_info
return _schedule
def check_schedule(dt, schedule):
if not schedule:
occupied = True
return occupied
current_schedule = schedule[dt.weekday()]
if "always_on" in current_schedule:
occupied = True
return occupied
if "always_off" in current_schedule:
occupied = False
return occupied
_start = current_schedule["start"]
_end = current_schedule["end"]
if _start < dt.time() < _end:
occupied = True
elif dt.time() > _end:
occupied = False
else:
occupied = False
return occupied
class ClusterContainer(object):
def __init__(self):
self.clusters = []
self.devices = {}
def add_curtailment_cluster(self, cluster):
self.clusters.append(cluster)
self.devices.update(cluster.devices)
def get_device_name_list(self):
return self.devices.keys()
def get_device(self, device_name):
return self.devices[device_name]
def get_power_bounds(self):
positive_power = []
negative_power = []
for cluster in self.clusters:
pos_power, neg_power = cluster.get_power_values()
positive_power.extend(pos_power)
negative_power.extend(neg_power)
_log.debug("power_adders: pos {} - neg {}".format(positive_power, negative_power))
return positive_power, negative_power
class DeviceClusters(object):
def __init__(self, cluster_config, load_type):
self.devices = {}
for device_name, device_config in cluster_config.items():
if load_type == "discreet":
self.devices[device_name] = DiscreetLoadManager(device_config)
elif load_type == "continuous":
self.devices[device_name] = ContinuousLoadManager(device_config)
def get_power_values(self):
positive_power = []
negative_power = []
for device_id, device in self.devices.items():
pos_power, neg_power = device.get_power_values()
positive_power.extend(pos_power)
negative_power.extend(neg_power)
return positive_power, negative_power
class DiscreetLoadManager(object):
def __init__(self, device_config):
self.command_status = {}
self.device_power = {}
self.device_status_args = {}
self.sop_args = {}
self.sop_expr = {}
self.expr = {}
self.condition = {}
self.sop_condition = {}
self.points = {}
self.sop_points = {}
self.rated_power = {}
self.positive_power = {}
self.negative_power = {}
for device_id, config in device_config.items():
rated_power = config['rated_power']
device_dict = config.pop('parameters')
device_status_args = parse_sympy(device_dict['discreet_on_condition_args'])
condition = device_dict['discreet_on_condition']
self.device_status_args[device_id] = device_status_args
self.condition[device_id] = parse_sympy(condition, condition=True)
self.points[device_id] = symbols(device_status_args)
self.expr[device_id] = parse_expr(self.condition[device_id])
pos_sop_condition = device_dict.get("pos_sop", "")
neg_sop_condition = device_dict.get("neg_sop", "")
sop_args = parse_sympy(device_dict['sop_args'])
self.sop_args[device_id] = sop_args
self.sop_condition[device_id] = [parse_sympy(pos_sop_condition), parse_sympy(neg_sop_condition)]
self.sop_points[device_id] = symbols(sop_args)
self.sop_expr[device_id] = [parse_expr(sop_cond) if sop_cond else False for sop_cond in self.sop_condition[device_id]]
self.command_status[device_id] = False
self.device_power[device_id] = 0.
self.rated_power[device_id] = rated_power
self.negative_power[device_id] = 0.
self.positive_power[device_id] = 0.
def ingest_data(self, data):
for device_id in self.rated_power:
conditional_points = []
sop_points = []
for item in self.device_status_args[device_id]:
conditional_points.append((item, data[item]))
for item in self.sop_args[device_id]:
sop_points.append((item, data[item]))
conditional_value = False
sop_values = []
if conditional_points:
conditional_value = self.expr[device_id].subs(conditional_points)
for expr in self.sop_expr[device_id]:
if sop_points and expr or not self.sop_args[device_id]:
sop_values.append(expr.subs(sop_points))
elif not expr:
sop_values.append(0.)
_log.debug('{} - {} (device status) evaluated to {}'.format(device_id, self.condition[device_id], conditional_value))
_log.debug('{} - {} (device power) evaluated to {}'.format(device_id, self.sop_condition[device_id], sop_values))
try:
self.command_status[device_id] = bool(conditional_value)
except TypeError:
self.command_status[device_id] = False
self.determine_power_adders(device_id, sop_values)
def get_power_values(self):
return self.positive_power.values(), self.negative_power.values()
def determine_power_adders(self, device_id, sop):
sop = [min(max(0.0, value), 1.0) for value in sop]
status = self.command_status[device_id]
if status:
self.positive_power[device_id] = 0
self.negative_power[device_id] = float(sop[1]) * self.rated_power[device_id]
else:
self.positive_power[device_id] = float(sop[0]) * self.rated_power[device_id]
self.negative_power[device_id] = 0
_log.debug("{} - Negative Power: {} - sop: {}".format(device_id, self.negative_power, sop))
_log.debug("{} - Positive Power: {} - sop: {}".format(device_id, self.positive_power, sop))
class ContinuousLoadManager(object):
def __init__(self, device_config):
self.device_power = {}
self.sop_args = {}
self.condition = {}
self.sop_condition = {}
self.points = {}
self.sop_points = {}
self.rated_power = {}
self.positive_power = {}
self.negative_power = {}
self.sop_expr = {}
for device_id, config in device_config.items():
rated_power = config['rated_power']
device_dict = config.pop('parameters')
pos_sop_condition = device_dict.get("pos_sop", "")
neg_sop_condition = device_dict.get("neg_sop", "")
sop_args = parse_sympy(device_dict['sop_args'])
self.sop_args[device_id] = sop_args
self.sop_condition[device_id] = [parse_sympy(pos_sop_condition), parse_sympy(neg_sop_condition)]
self.sop_points[device_id] = symbols(sop_args)
self.sop_expr[device_id] = [parse_expr(sop_cond) if sop_cond else False for sop_cond in self.sop_condition[device_id]]
self.device_power[device_id] = 0.
self.rated_power[device_id] = rated_power
self.negative_power[device_id] = 0.
self.positive_power[device_id] = 0.
def ingest_data(self, data):
for device_id in self.rated_power:
sop_points = []
for item in self.sop_args[device_id]:
sop_points.append((item, data[item]))
sop_values = []
for expr in self.sop_expr[device_id]:
if sop_points and expr or not self.sop_args[device_id]:
sop_values.append(expr.subs(sop_points))
elif not expr:
sop_values.append(0)
_log.debug('{} (device power) evaluated to {}'.format(self.sop_condition[device_id], sop_values))
self.determine_power_adders(device_id, sop_values)
def get_power_values(self):
return self.positive_power.values(), self.negative_power.values()
def determine_power_adders(self, device_id, sop):
sop = [min(max(0.0, value), 1.0) for value in sop]
self.negative_power[device_id] = float(sop[1]) * self.rated_power[device_id]
self.positive_power[device_id] = float(sop[0]) * self.rated_power[device_id]
_log.debug("{} - Negative Power: {} - sop: {}".format(device_id, self.negative_power, sop))
_log.debug("{} - Positive Power: {} - sop: {}".format(device_id, self.positive_power, sop))
|
(function ($) {
'use strict';
/**
* All of the code for your admin-facing JavaScript source
* should reside in this file.
*
* Note: It has been assumed you will write jQuery code here, so the
* $ function reference has been prepared for usage within the scope
* of this function.
*
* This enables you to define handlers, for when the DOM is ready:
*
* $(function() {
*
* });
*
* When the window is loaded:
*
* $( window ).load(function() {
*
* });
*
* ...and/or other possibilities.
*
* Ideally, it is not considered best practise to attach more than a
* single DOM-ready or window-load handler for a particular page.
* Although scripts in the WordPress core, Plugins and Themes may be
* practising this, we should strive to set a better example in our own work.
*/
})(jQuery);
|
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from pylab import bone, pcolor, colorbar, plot, show
from sklearn.preprocessing import MinMaxScaler
from minisom import MiniSom
dataset = pd.read_csv('Credit_Card_Applications.csv')
x = dataset.iloc[:, : - 1].values
y = dataset.iloc[:, -1].values
scaler = MinMaxScaler()
x = scaler.fit_transform(x)
# SOM
som = MiniSom(x = 10, y = 10, input_len = 15, sigma = 1.0, learning_rate = 0.5)
som.random_weights_init(x)
som.train_random(x, 100)
# Visualization
bone()
pcolor(som.distance_map().T)
colorbar()
markers = ['o', 's']
colors = ['r', 'g']
for i, j in enumerate(x):
winning_node = som.winner(j)
plot(winning_node[0] + 0.5,
winning_node[1] + 0.5,
markers[y[i]],
markeredgecolor = colors[y[i]],
markerfacecolor = 'None',
markersize = 10,
markeredgewidth = 2)
show()
# Find the frauds
mappings = som.win_map(x)
frauds = np.concatenate((mappings[(7, 8)], mappings[(8, 7)]), axis = 0)
frauds = scaler.inverse_transform(frauds) |
#include "be_var.h"
#include "be_vm.h"
#include "be_vector.h"
#include "be_string.h"
#include "be_map.h"
#include "be_gc.h"
#define global(vm) ((vm)->gbldesc.global)
#define builtin(vm) ((vm)->gbldesc.builtin)
void be_globalvar_init(bvm *vm)
{
global(vm).vtab = be_map_new(vm);
be_gc_fix(vm, gc_object(global(vm).vtab));
be_vector_init(vm, &global(vm).vlist, sizeof(bvalue));
#if !BE_USE_PRECOMPILED_OBJECT
builtin(vm).vtab = be_map_new(vm);
be_vector_init(vm, &builtin(vm).vlist, sizeof(bvalue));
be_gc_fix(vm, gc_object(builtin(vm).vtab));
#endif
}
void be_globalvar_deinit(bvm *vm)
{
global(vm).vtab = NULL;
be_vector_delete(vm, &global(vm).vlist);
#if !BE_USE_PRECOMPILED_OBJECT
builtin(vm).vtab = NULL;
be_vector_delete(vm, &builtin(vm).vlist);
#endif
}
static int global_find(bvm *vm, bstring *name)
{
bvalue *res = be_map_findstr(global(vm).vtab, name);
if (res) {
return var_toidx(res) + be_builtin_count(vm);
}
return -1; /* not found */
}
int be_global_find(bvm *vm, bstring *name)
{
int res = global_find(vm, name);
return res != -1 ? res : be_builtin_find(vm, name);
}
int be_global_new(bvm *vm, bstring *name)
{
int idx = global_find(vm, name);
if (idx == -1) {
bvalue *desc;
idx = be_map_count(global(vm).vtab);
desc = be_map_insertstr(vm, global(vm).vtab, name, NULL);
var_setint(desc, idx);
be_vector_resize(vm, &global(vm).vlist, idx + 1);
/* set the new variable to nil */
var_setnil((bvalue *)global(vm).vlist.end);
return idx + be_builtin_count(vm);
}
return idx;
}
bvalue* be_global_var(bvm *vm, int index)
{
int bcnt = be_builtin_count(vm);
if (index < bcnt) {
return be_vector_at(&builtin(vm).vlist, index);
}
index -= bcnt;
return be_vector_at(&global(vm).vlist, index);
}
void be_global_release_space(bvm *vm)
{
be_map_release(vm, global(vm).vtab);
be_vector_release(vm, &global(vm).vlist);
}
int be_builtin_find(bvm *vm, bstring *name)
{
bvalue *res = be_map_findstr(builtin(vm).vtab, name);
if (res) {
return var_toidx(res);
}
return -1; /* not found */
}
bstring* be_builtin_name(bvm *vm, int index)
{
bmap *map = builtin(vm).vtab;
bmapnode *end, *node = map->slots;
for (end = node + map->size; node < end; ++node) {
if (var_isstr(&node->key) && node->value.v.i == index) {
return node->key.v.s;
}
}
return NULL;
}
#if !BE_USE_PRECOMPILED_OBJECT
int be_builtin_new(bvm *vm, bstring *name)
{
int idx = be_builtin_find(vm, name);
if (idx == -1) {
bvalue *desc;
idx = be_map_count(builtin(vm).vtab);
desc = be_map_insertstr(vm, builtin(vm).vtab, name, NULL);
var_setint(desc, idx);
be_vector_resize(vm, &builtin(vm).vlist, idx + 1);
/* set the new variable to nil */
var_setnil((bvalue*)(builtin(vm).vlist.end));
}
return idx;
}
void be_bulitin_release_space(bvm *vm)
{
be_map_release(vm, builtin(vm).vtab);
be_vector_release(vm, &builtin(vm).vlist);
}
#else
void be_const_builtin_set(bvm *vm, const bmap *map, const bvector *vec)
{
builtin(vm).vtab = cast(bmap*, map);
builtin(vm).vlist = *vec;
}
#endif
|
#!/usr/bin/env python3
"""
Generate Ruby code with URLs and file hashes for packages from PyPi
(i.e., httpie itself as well as its dependencies) to be included
in the Homebrew formula after a new release of HTTPie has been published
on PyPi.
<https://github.com/Homebrew/homebrew-core/blob/master/Formula/httpie.rb>
"""
import hashlib
import requests
PACKAGES = [
('kongctl', 'https://pypi.org/pypi/{}/json'),
('requests', 'https://pypi.org/pypi/{}/json'),
('certifi', 'https://pypi.org/pypi/{}/json'),
('termcolor', 'https://pypi.org/pypi/{}/json'),
('urllib3', 'https://pypi.org/pypi/{}/json'),
('idna', 'https://pypi.org/pypi/{}/json'),
('chardet', 'https://pypi.org/pypi/{}/json'),
('PySocks', 'https://pypi.org/pypi/{}/json'),
('PyYAML', 'https://pypi.org/pypi/{}/json'),
]
def get_package_meta(package_name):
api_url = package_name[1].format(package_name[0])
resp = requests.get(api_url).json()
hasher = hashlib.sha256()
for release in resp['urls']:
download_url = release['url']
if download_url.endswith('.tar.gz'):
hasher.update(requests.get(download_url).content)
return {
'name': package_name[0],
'url': download_url,
'sha256': hasher.hexdigest(),
}
else:
raise RuntimeError(
'{}: download not found: {}'.format(package_name, resp))
def main():
package_meta_map = {
package_name[0]: get_package_meta(package_name)
for package_name in PACKAGES
}
kongctl_meta = package_meta_map.pop('kongctl')
print()
print(' url "{url}"'.format(url=kongctl_meta['url']))
print(' sha256 "{sha256}"'.format(sha256=kongctl_meta['sha256']))
print()
for dep_meta in package_meta_map.values():
print(' resource "{name}" do'.format(name=dep_meta['name']))
print(' url "{url}"'.format(url=dep_meta['url']))
print(' sha256 "{sha256}"'.format(sha256=dep_meta['sha256']))
print(' end')
print('')
if __name__ == '__main__':
main()
|
//export from Engineer file
const Employee = require('./Employee');
class Engineer extends Employee {
constructor (name, id, email, github) {
super(name, id, email);
this.github = github
}
getGithub() {
return this.github;
}
getRole() {
return 'Engineer';
}
}
module.exports = Engineer |
/*
LaTeXMathML.js
==============
This file, in this form, is due to Douglas Woodall, June 2006.
It contains JavaScript functions to convert (most simple) LaTeX
math notation to Presentation MathML. It was obtained by
downloading the file ASCIIMathML.js from
http://www1.chapman.edu/~jipsen/mathml/asciimathdownload/
and modifying it so that it carries out ONLY those conversions
that would be carried out in LaTeX. A description of the original
file, with examples, can be found at
www1.chapman.edu/~jipsen/mathml/asciimath.html
ASCIIMathML: Math on the web for everyone
Here is the header notice from the original file:
ASCIIMathML.js
==============
This file contains JavaScript functions to convert ASCII math notation
to Presentation MathML. The conversion is done while the (X)HTML page
loads, and should work with Firefox/Mozilla/Netscape 7+ and Internet
Explorer 6+MathPlayer (http://www.dessci.com/en/products/mathplayer/).
Just add the next line to your (X)HTML page with this file in the same folder:
This is a convenient and inexpensive solution for authoring MathML.
Version 1.4.7 Dec 15, 2005, (c) Peter Jipsen http://www.chapman.edu/~jipsen
Latest version at http://www.chapman.edu/~jipsen/mathml/ASCIIMathML.js
For changes see http://www.chapman.edu/~jipsen/mathml/asciimathchanges.txt
If you use it on a webpage, please send the URL to [email protected]
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License (at http://www.gnu.org/copyleft/gpl.html)
for more details.
LaTeXMathML.js (ctd)
==============
The instructions for use are the same as for the original
ASCIIMathML.js, except that of course the line you add to your
file should be
Or use absolute path names if the file is not in the same folder
as your (X)HTML page.
*/
var checkForMathML = true; // check if browser can display MathML
var notifyIfNoMathML = true; // display note if no MathML capability
var alertIfNoMathML = false; // show alert box if no MathML capability
// was "red":
var mathcolor = ""; // change it to "" (to inherit) or any other color
// was "serif":
var mathfontfamily = ""; // change to "" to inherit (works in IE)
// or another family (e.g. "arial")
var showasciiformulaonhover = true; // helps students learn ASCIIMath
/*
// Commented out by DRW -- not now used -- see DELIMITERS (twice) near the end
var displaystyle = false; // puts limits above and below large operators
var decimalsign = "."; // change to "," if you like, beware of `(1,2)`!
var AMdelimiter1 = "`", AMescape1 = "\\\\`"; // can use other characters
var AMdelimiter2 = "$", AMescape2 = "\\\\\\$", AMdelimiter2regexp = "\\$";
var doubleblankmathdelimiter = false; // if true, x+1 is equal to `x+1`
// for IE this works only in <!-- -->
//var separatetokens;// has been removed (email me if this is a problem)
*/
var isIE = document.createElementNS==null;
if (document.getElementById==null)
alert("This webpage requires a recent browser such as\
\nMozilla/Netscape 7+ or Internet Explorer 6+MathPlayer")
// all further global variables start with "AM"
function AMcreateElementXHTML(t) {
if (isIE) return document.createElement(t);
else return document.createElementNS("http://www.w3.org/1999/xhtml",t);
}
function AMnoMathMLNote() {
var nd = AMcreateElementXHTML("h3");
nd.setAttribute("align","center")
nd.appendChild(AMcreateElementXHTML("p"));
nd.appendChild(document.createTextNode("To view the "));
var an = AMcreateElementXHTML("a");
an.appendChild(document.createTextNode("LaTeXMathML"));
an.setAttribute("href","http://www.maths.nott.ac.uk/personal/drw/lm.html");
nd.appendChild(an);
nd.appendChild(document.createTextNode(" notation use Internet Explorer 6+"));
an = AMcreateElementXHTML("a");
an.appendChild(document.createTextNode("MathPlayer"));
an.setAttribute("href","http://www.dessci.com/en/products/mathplayer/download.htm");
nd.appendChild(an);
nd.appendChild(document.createTextNode(" or Netscape/Mozilla/Firefox"));
nd.appendChild(AMcreateElementXHTML("p"));
return nd;
}
function AMisMathMLavailable() {
if (navigator.appName.slice(0,8)=="Netscape")
if (navigator.appVersion.slice(0,1)>="5") return null;
else return AMnoMathMLNote();
else if (navigator.appName.slice(0,9)=="Microsoft")
try {
var ActiveX = new ActiveXObject("MathPlayer.Factory.1");
return null;
} catch (e) {
return AMnoMathMLNote();
}
else return AMnoMathMLNote();
}
// character lists for Mozilla/Netscape fonts
var AMcal = [0xEF35,0x212C,0xEF36,0xEF37,0x2130,0x2131,0xEF38,0x210B,0x2110,0xEF39,0xEF3A,0x2112,0x2133,0xEF3B,0xEF3C,0xEF3D,0xEF3E,0x211B,0xEF3F,0xEF40,0xEF41,0xEF42,0xEF43,0xEF44,0xEF45,0xEF46];
var AMfrk = [0xEF5D,0xEF5E,0x212D,0xEF5F,0xEF60,0xEF61,0xEF62,0x210C,0x2111,0xEF63,0xEF64,0xEF65,0xEF66,0xEF67,0xEF68,0xEF69,0xEF6A,0x211C,0xEF6B,0xEF6C,0xEF6D,0xEF6E,0xEF6F,0xEF70,0xEF71,0x2128];
var AMbbb = [0xEF8C,0xEF8D,0x2102,0xEF8E,0xEF8F,0xEF90,0xEF91,0x210D,0xEF92,0xEF93,0xEF94,0xEF95,0xEF96,0x2115,0xEF97,0x2119,0x211A,0x211D,0xEF98,0xEF99,0xEF9A,0xEF9B,0xEF9C,0xEF9D,0xEF9E,0x2124];
var CONST = 0, UNARY = 1, BINARY = 2, INFIX = 3, LEFTBRACKET = 4,
RIGHTBRACKET = 5, SPACE = 6, UNDEROVER = 7, DEFINITION = 8,
TEXT = 9, BIG = 10, LONG = 11, STRETCHY = 12, MATRIX = 13; // token types
var AMsqrt = {input:"\\sqrt", tag:"msqrt", output:"sqrt", ttype:UNARY},
AMroot = {input:"\\root", tag:"mroot", output:"root", ttype:BINARY},
AMfrac = {input:"\\frac", tag:"mfrac", output:"/", ttype:BINARY},
AMover = {input:"\\stackrel", tag:"mover", output:"stackrel", ttype:BINARY},
AMatop = {input:"\\atop", tag:"mfrac", output:"", ttype:INFIX},
AMchoose = {input:"\\choose", tag:"mfrac", output:"", ttype:INFIX},
AMsub = {input:"_", tag:"msub", output:"_", ttype:INFIX},
AMsup = {input:"^", tag:"msup", output:"^", ttype:INFIX},
AMtext = {input:"\\mathrm", tag:"mtext", output:"text", ttype:TEXT},
AMmbox = {input:"\\mbox", tag:"mtext", output:"mbox", ttype:TEXT};
// Commented out by DRW to prevent 1/2 turning into a 2-line fraction
// AMdiv = {input:"/", tag:"mfrac", output:"/", ttype:INFIX},
// Commented out by DRW so that " prints literally in equations
// AMquote = {input:"\"", tag:"mtext", output:"mbox", ttype:TEXT};
var AMsymbols = [
//Greek letters
{input:"\\alpha", tag:"mi", output:"\u03B1", ttype:CONST},
{input:"\\beta", tag:"mi", output:"\u03B2", ttype:CONST},
{input:"\\gamma", tag:"mi", output:"\u03B3", ttype:CONST},
{input:"\\delta", tag:"mi", output:"\u03B4", ttype:CONST},
{input:"\\epsilon", tag:"mi", output:"\u03B5", ttype:CONST},
{input:"\\varepsilon", tag:"mi", output:"\u025B", ttype:CONST},
{input:"\\zeta", tag:"mi", output:"\u03B6", ttype:CONST},
{input:"\\eta", tag:"mi", output:"\u03B7", ttype:CONST},
{input:"\\theta", tag:"mi", output:"\u03B8", ttype:CONST},
{input:"\\vartheta", tag:"mi", output:"\u03D1", ttype:CONST},
{input:"\\iota", tag:"mi", output:"\u03B9", ttype:CONST},
{input:"\\kappa", tag:"mi", output:"\u03BA", ttype:CONST},
{input:"\\lambda", tag:"mi", output:"\u03BB", ttype:CONST},
{input:"\\mu", tag:"mi", output:"\u03BC", ttype:CONST},
{input:"\\nu", tag:"mi", output:"\u03BD", ttype:CONST},
{input:"\\xi", tag:"mi", output:"\u03BE", ttype:CONST},
{input:"\\pi", tag:"mi", output:"\u03C0", ttype:CONST},
{input:"\\varpi", tag:"mi", output:"\u03D6", ttype:CONST},
{input:"\\rho", tag:"mi", output:"\u03C1", ttype:CONST},
{input:"\\varrho", tag:"mi", output:"\u03F1", ttype:CONST},
{input:"\\varsigma", tag:"mi", output:"\u03C2", ttype:CONST},
{input:"\\sigma", tag:"mi", output:"\u03C3", ttype:CONST},
{input:"\\tau", tag:"mi", output:"\u03C4", ttype:CONST},
{input:"\\upsilon", tag:"mi", output:"\u03C5", ttype:CONST},
{input:"\\phi", tag:"mi", output:"\u03C6", ttype:CONST},
{input:"\\varphi", tag:"mi", output:"\u03D5", ttype:CONST},
{input:"\\chi", tag:"mi", output:"\u03C7", ttype:CONST},
{input:"\\psi", tag:"mi", output:"\u03C8", ttype:CONST},
{input:"\\omega", tag:"mi", output:"\u03C9", ttype:CONST},
{input:"\\Gamma", tag:"mo", output:"\u0393", ttype:CONST},
{input:"\\Delta", tag:"mo", output:"\u0394", ttype:CONST},
{input:"\\Theta", tag:"mo", output:"\u0398", ttype:CONST},
{input:"\\Lambda", tag:"mo", output:"\u039B", ttype:CONST},
{input:"\\Xi", tag:"mo", output:"\u039E", ttype:CONST},
{input:"\\Pi", tag:"mo", output:"\u03A0", ttype:CONST},
{input:"\\Sigma", tag:"mo", output:"\u03A3", ttype:CONST},
{input:"\\Upsilon", tag:"mo", output:"\u03A5", ttype:CONST},
{input:"\\Phi", tag:"mo", output:"\u03A6", ttype:CONST},
{input:"\\Psi", tag:"mo", output:"\u03A8", ttype:CONST},
{input:"\\Omega", tag:"mo", output:"\u03A9", ttype:CONST},
//fractions
{input:"\\frac12", tag:"mo", output:"\u00BD", ttype:CONST},
{input:"\\frac14", tag:"mo", output:"\u00BC", ttype:CONST},
{input:"\\frac34", tag:"mo", output:"\u00BE", ttype:CONST},
{input:"\\frac13", tag:"mo", output:"\u2153", ttype:CONST},
{input:"\\frac23", tag:"mo", output:"\u2154", ttype:CONST},
{input:"\\frac15", tag:"mo", output:"\u2155", ttype:CONST},
{input:"\\frac25", tag:"mo", output:"\u2156", ttype:CONST},
{input:"\\frac35", tag:"mo", output:"\u2157", ttype:CONST},
{input:"\\frac45", tag:"mo", output:"\u2158", ttype:CONST},
{input:"\\frac16", tag:"mo", output:"\u2159", ttype:CONST},
{input:"\\frac56", tag:"mo", output:"\u215A", ttype:CONST},
{input:"\\frac18", tag:"mo", output:"\u215B", ttype:CONST},
{input:"\\frac38", tag:"mo", output:"\u215C", ttype:CONST},
{input:"\\frac58", tag:"mo", output:"\u215D", ttype:CONST},
{input:"\\frac78", tag:"mo", output:"\u215E", ttype:CONST},
//binary operation symbols
{input:"\\pm", tag:"mo", output:"\u00B1", ttype:CONST},
{input:"\\mp", tag:"mo", output:"\u2213", ttype:CONST},
{input:"\\triangleleft",tag:"mo", output:"\u22B2", ttype:CONST},
{input:"\\triangleright",tag:"mo",output:"\u22B3", ttype:CONST},
{input:"\\cdot", tag:"mo", output:"\u22C5", ttype:CONST},
{input:"\\star", tag:"mo", output:"\u22C6", ttype:CONST},
{input:"\\ast", tag:"mo", output:"\u002A", ttype:CONST},
{input:"\\times", tag:"mo", output:"\u00D7", ttype:CONST},
{input:"\\div", tag:"mo", output:"\u00F7", ttype:CONST},
{input:"\\circ", tag:"mo", output:"\u2218", ttype:CONST},
//{input:"\\bullet", tag:"mo", output:"\u2219", ttype:CONST},
{input:"\\bullet", tag:"mo", output:"\u2022", ttype:CONST},
{input:"\\oplus", tag:"mo", output:"\u2295", ttype:CONST},
{input:"\\ominus", tag:"mo", output:"\u2296", ttype:CONST},
{input:"\\otimes", tag:"mo", output:"\u2297", ttype:CONST},
{input:"\\bigcirc", tag:"mo", output:"\u25CB", ttype:CONST},
{input:"\\oslash", tag:"mo", output:"\u2298", ttype:CONST},
{input:"\\odot", tag:"mo", output:"\u2299", ttype:CONST},
{input:"\\land", tag:"mo", output:"\u2227", ttype:CONST},
{input:"\\wedge", tag:"mo", output:"\u2227", ttype:CONST},
{input:"\\lor", tag:"mo", output:"\u2228", ttype:CONST},
{input:"\\vee", tag:"mo", output:"\u2228", ttype:CONST},
{input:"\\cap", tag:"mo", output:"\u2229", ttype:CONST},
{input:"\\cup", tag:"mo", output:"\u222A", ttype:CONST},
{input:"\\sqcap", tag:"mo", output:"\u2293", ttype:CONST},
{input:"\\sqcup", tag:"mo", output:"\u2294", ttype:CONST},
{input:"\\uplus", tag:"mo", output:"\u228E", ttype:CONST},
{input:"\\amalg", tag:"mo", output:"\u2210", ttype:CONST},
{input:"\\bigtriangleup",tag:"mo",output:"\u25B3", ttype:CONST},
{input:"\\bigtriangledown",tag:"mo",output:"\u25BD", ttype:CONST},
{input:"\\dag", tag:"mo", output:"\u2020", ttype:CONST},
{input:"\\dagger", tag:"mo", output:"\u2020", ttype:CONST},
{input:"\\ddag", tag:"mo", output:"\u2021", ttype:CONST},
{input:"\\ddagger", tag:"mo", output:"\u2021", ttype:CONST},
{input:"\\lhd", tag:"mo", output:"\u22B2", ttype:CONST},
{input:"\\rhd", tag:"mo", output:"\u22B3", ttype:CONST},
{input:"\\unlhd", tag:"mo", output:"\u22B4", ttype:CONST},
{input:"\\unrhd", tag:"mo", output:"\u22B5", ttype:CONST},
//BIG Operators
{input:"\\sum", tag:"mo", output:"\u2211", ttype:UNDEROVER},
{input:"\\prod", tag:"mo", output:"\u220F", ttype:UNDEROVER},
{input:"\\bigcap", tag:"mo", output:"\u22C2", ttype:UNDEROVER},
{input:"\\bigcup", tag:"mo", output:"\u22C3", ttype:UNDEROVER},
{input:"\\bigwedge", tag:"mo", output:"\u22C0", ttype:UNDEROVER},
{input:"\\bigvee", tag:"mo", output:"\u22C1", ttype:UNDEROVER},
{input:"\\bigsqcap", tag:"mo", output:"\u2A05", ttype:UNDEROVER},
{input:"\\bigsqcup", tag:"mo", output:"\u2A06", ttype:UNDEROVER},
{input:"\\coprod", tag:"mo", output:"\u2210", ttype:UNDEROVER},
{input:"\\bigoplus", tag:"mo", output:"\u2A01", ttype:UNDEROVER},
{input:"\\bigotimes", tag:"mo", output:"\u2A02", ttype:UNDEROVER},
{input:"\\bigodot", tag:"mo", output:"\u2A00", ttype:UNDEROVER},
{input:"\\biguplus", tag:"mo", output:"\u2A04", ttype:UNDEROVER},
{input:"\\int", tag:"mo", output:"\u222B", ttype:CONST},
{input:"\\oint", tag:"mo", output:"\u222E", ttype:CONST},
//binary relation symbols
{input:":=", tag:"mo", output:":=", ttype:CONST},
{input:"\\lt", tag:"mo", output:"<", ttype:CONST},
{input:"\\gt", tag:"mo", output:">", ttype:CONST},
{input:"\\ne", tag:"mo", output:"\u2260", ttype:CONST},
{input:"\\neq", tag:"mo", output:"\u2260", ttype:CONST},
{input:"\\le", tag:"mo", output:"\u2264", ttype:CONST},
{input:"\\leq", tag:"mo", output:"\u2264", ttype:CONST},
{input:"\\leqslant", tag:"mo", output:"\u2264", ttype:CONST},
{input:"\\ge", tag:"mo", output:"\u2265", ttype:CONST},
{input:"\\geq", tag:"mo", output:"\u2265", ttype:CONST},
{input:"\\geqslant", tag:"mo", output:"\u2265", ttype:CONST},
{input:"\\equiv", tag:"mo", output:"\u2261", ttype:CONST},
{input:"\\ll", tag:"mo", output:"\u226A", ttype:CONST},
{input:"\\gg", tag:"mo", output:"\u226B", ttype:CONST},
{input:"\\doteq", tag:"mo", output:"\u2250", ttype:CONST},
{input:"\\prec", tag:"mo", output:"\u227A", ttype:CONST},
{input:"\\succ", tag:"mo", output:"\u227B", ttype:CONST},
{input:"\\preceq", tag:"mo", output:"\u227C", ttype:CONST},
{input:"\\succeq", tag:"mo", output:"\u227D", ttype:CONST},
{input:"\\subset", tag:"mo", output:"\u2282", ttype:CONST},
{input:"\\supset", tag:"mo", output:"\u2283", ttype:CONST},
{input:"\\subseteq", tag:"mo", output:"\u2286", ttype:CONST},
{input:"\\supseteq", tag:"mo", output:"\u2287", ttype:CONST},
{input:"\\sqsubset", tag:"mo", output:"\u228F", ttype:CONST},
{input:"\\sqsupset", tag:"mo", output:"\u2290", ttype:CONST},
{input:"\\sqsubseteq", tag:"mo", output:"\u2291", ttype:CONST},
{input:"\\sqsupseteq", tag:"mo", output:"\u2292", ttype:CONST},
{input:"\\sim", tag:"mo", output:"\u223C", ttype:CONST},
{input:"\\simeq", tag:"mo", output:"\u2243", ttype:CONST},
{input:"\\approx", tag:"mo", output:"\u2248", ttype:CONST},
{input:"\\cong", tag:"mo", output:"\u2245", ttype:CONST},
{input:"\\Join", tag:"mo", output:"\u22C8", ttype:CONST},
{input:"\\bowtie", tag:"mo", output:"\u22C8", ttype:CONST},
{input:"\\in", tag:"mo", output:"\u2208", ttype:CONST},
{input:"\\ni", tag:"mo", output:"\u220B", ttype:CONST},
{input:"\\owns", tag:"mo", output:"\u220B", ttype:CONST},
{input:"\\propto", tag:"mo", output:"\u221D", ttype:CONST},
{input:"\\vdash", tag:"mo", output:"\u22A2", ttype:CONST},
{input:"\\dashv", tag:"mo", output:"\u22A3", ttype:CONST},
{input:"\\models", tag:"mo", output:"\u22A8", ttype:CONST},
{input:"\\perp", tag:"mo", output:"\u22A5", ttype:CONST},
{input:"\\smile", tag:"mo", output:"\u2323", ttype:CONST},
{input:"\\frown", tag:"mo", output:"\u2322", ttype:CONST},
{input:"\\asymp", tag:"mo", output:"\u224D", ttype:CONST},
{input:"\\notin", tag:"mo", output:"\u2209", ttype:CONST},
//matrices
{input:"\\begin{eqnarray}", output:"X", ttype:MATRIX, invisible:true},
{input:"\\begin{array}", output:"X", ttype:MATRIX, invisible:true},
{input:"\\\\", output:"}&{", ttype:DEFINITION},
{input:"\\end{eqnarray}", output:"}}", ttype:DEFINITION},
{input:"\\end{array}", output:"}}", ttype:DEFINITION},
//grouping and literal brackets -- ieval is for IE
{input:"\\big", tag:"mo", output:"X", atval:"1.2", ieval:"2.2", ttype:BIG},
{input:"\\Big", tag:"mo", output:"X", atval:"1.6", ieval:"2.6", ttype:BIG},
{input:"\\bigg", tag:"mo", output:"X", atval:"2.2", ieval:"3.2", ttype:BIG},
{input:"\\Bigg", tag:"mo", output:"X", atval:"2.9", ieval:"3.9", ttype:BIG},
{input:"\\left", tag:"mo", output:"X", ttype:LEFTBRACKET},
{input:"\\right", tag:"mo", output:"X", ttype:RIGHTBRACKET},
{input:"{", output:"{", ttype:LEFTBRACKET, invisible:true},
{input:"}", output:"}", ttype:RIGHTBRACKET, invisible:true},
{input:"(", tag:"mo", output:"(", atval:"1", ttype:STRETCHY},
{input:"[", tag:"mo", output:"[", atval:"1", ttype:STRETCHY},
{input:"\\lbrack", tag:"mo", output:"[", atval:"1", ttype:STRETCHY},
{input:"\\{", tag:"mo", output:"{", atval:"1", ttype:STRETCHY},
{input:"\\lbrace", tag:"mo", output:"{", atval:"1", ttype:STRETCHY},
{input:"\\langle", tag:"mo", output:"\u2329", atval:"1", ttype:STRETCHY},
{input:"\\lfloor", tag:"mo", output:"\u230A", atval:"1", ttype:STRETCHY},
{input:"\\lceil", tag:"mo", output:"\u2308", atval:"1", ttype:STRETCHY},
// rtag:"mi" causes space to be inserted before a following sin, cos, etc.
// (see function AMparseExpr() )
{input:")", tag:"mo",output:")", rtag:"mi",atval:"1",ttype:STRETCHY},
{input:"]", tag:"mo",output:"]", rtag:"mi",atval:"1",ttype:STRETCHY},
{input:"\\rbrack",tag:"mo",output:"]", rtag:"mi",atval:"1",ttype:STRETCHY},
{input:"\\}", tag:"mo",output:"}", rtag:"mi",atval:"1",ttype:STRETCHY},
{input:"\\rbrace",tag:"mo",output:"}", rtag:"mi",atval:"1",ttype:STRETCHY},
{input:"\\rangle",tag:"mo",output:"\u232A", rtag:"mi",atval:"1",ttype:STRETCHY},
{input:"\\rfloor",tag:"mo",output:"\u230B", rtag:"mi",atval:"1",ttype:STRETCHY},
{input:"\\rceil", tag:"mo",output:"\u2309", rtag:"mi",atval:"1",ttype:STRETCHY},
// "|", "\\|", "\\vert" and "\\Vert" modified later: lspace = rspace = 0em
{input:"|", tag:"mo", output:"\u2223", atval:"1", ttype:STRETCHY},
{input:"\\|", tag:"mo", output:"\u2225", atval:"1", ttype:STRETCHY},
{input:"\\vert", tag:"mo", output:"\u2223", atval:"1", ttype:STRETCHY},
{input:"\\Vert", tag:"mo", output:"\u2225", atval:"1", ttype:STRETCHY},
{input:"\\mid", tag:"mo", output:"\u2223", atval:"1", ttype:STRETCHY},
{input:"\\parallel", tag:"mo", output:"\u2225", atval:"1", ttype:STRETCHY},
{input:"/", tag:"mo", output:"/", atval:"1.01", ttype:STRETCHY},
{input:"\\backslash", tag:"mo", output:"\u2216", atval:"1", ttype:STRETCHY},
{input:"\\setminus", tag:"mo", output:"\\", ttype:CONST},
//miscellaneous symbols
{input:"\\!", tag:"mspace", atname:"width", atval:"-0.167em", ttype:SPACE},
{input:"\\,", tag:"mspace", atname:"width", atval:"0.167em", ttype:SPACE},
{input:"\\>", tag:"mspace", atname:"width", atval:"0.222em", ttype:SPACE},
{input:"\\:", tag:"mspace", atname:"width", atval:"0.222em", ttype:SPACE},
{input:"\\;", tag:"mspace", atname:"width", atval:"0.278em", ttype:SPACE},
{input:"~", tag:"mspace", atname:"width", atval:"0.333em", ttype:SPACE},
{input:"\\quad", tag:"mspace", atname:"width", atval:"1em", ttype:SPACE},
{input:"\\qquad", tag:"mspace", atname:"width", atval:"2em", ttype:SPACE},
//{input:"{}", tag:"mo", output:"\u200B", ttype:CONST}, // zero-width
{input:"\\prime", tag:"mo", output:"\u2032", ttype:CONST},
{input:"'", tag:"mo", output:"\u02B9", ttype:CONST},
{input:"''", tag:"mo", output:"\u02BA", ttype:CONST},
{input:"'''", tag:"mo", output:"\u2034", ttype:CONST},
{input:"''''", tag:"mo", output:"\u2057", ttype:CONST},
{input:"\\ldots", tag:"mo", output:"\u2026", ttype:CONST},
{input:"\\cdots", tag:"mo", output:"\u22EF", ttype:CONST},
{input:"\\vdots", tag:"mo", output:"\u22EE", ttype:CONST},
{input:"\\ddots", tag:"mo", output:"\u22F1", ttype:CONST},
{input:"\\forall", tag:"mo", output:"\u2200", ttype:CONST},
{input:"\\exists", tag:"mo", output:"\u2203", ttype:CONST},
{input:"\\Re", tag:"mo", output:"\u211C", ttype:CONST},
{input:"\\Im", tag:"mo", output:"\u2111", ttype:CONST},
{input:"\\aleph", tag:"mo", output:"\u2135", ttype:CONST},
{input:"\\hbar", tag:"mo", output:"\u210F", ttype:CONST},
{input:"\\ell", tag:"mo", output:"\u2113", ttype:CONST},
{input:"\\wp", tag:"mo", output:"\u2118", ttype:CONST},
{input:"\\emptyset", tag:"mo", output:"\u2205", ttype:CONST},
{input:"\\infty", tag:"mo", output:"\u221E", ttype:CONST},
{input:"\\surd", tag:"mo", output:"\\sqrt{}", ttype:DEFINITION},
{input:"\\partial", tag:"mo", output:"\u2202", ttype:CONST},
{input:"\\nabla", tag:"mo", output:"\u2207", ttype:CONST},
{input:"\\triangle", tag:"mo", output:"\u25B3", ttype:CONST},
{input:"\\therefore", tag:"mo", output:"\u2234", ttype:CONST},
{input:"\\angle", tag:"mo", output:"\u2220", ttype:CONST},
//{input:"\\\\ ", tag:"mo", output:"\u00A0", ttype:CONST},
{input:"\\diamond", tag:"mo", output:"\u22C4", ttype:CONST},
//{input:"\\Diamond", tag:"mo", output:"\u25CA", ttype:CONST},
{input:"\\Diamond", tag:"mo", output:"\u25C7", ttype:CONST},
{input:"\\neg", tag:"mo", output:"\u00AC", ttype:CONST},
{input:"\\lnot", tag:"mo", output:"\u00AC", ttype:CONST},
{input:"\\bot", tag:"mo", output:"\u22A5", ttype:CONST},
{input:"\\top", tag:"mo", output:"\u22A4", ttype:CONST},
{input:"\\square", tag:"mo", output:"\u25AB", ttype:CONST},
{input:"\\Box", tag:"mo", output:"\u25A1", ttype:CONST},
{input:"\\wr", tag:"mo", output:"\u2240", ttype:CONST},
//standard functions
//Note UNDEROVER *must* have tag:"mo" to work properly
{input:"\\arccos", tag:"mi", output:"arccos", ttype:UNARY, func:true},
{input:"\\arcsin", tag:"mi", output:"arcsin", ttype:UNARY, func:true},
{input:"\\arctan", tag:"mi", output:"arctan", ttype:UNARY, func:true},
{input:"\\arg", tag:"mi", output:"arg", ttype:UNARY, func:true},
{input:"\\cos", tag:"mi", output:"cos", ttype:UNARY, func:true},
{input:"\\cosh", tag:"mi", output:"cosh", ttype:UNARY, func:true},
{input:"\\cot", tag:"mi", output:"cot", ttype:UNARY, func:true},
{input:"\\coth", tag:"mi", output:"coth", ttype:UNARY, func:true},
{input:"\\csc", tag:"mi", output:"csc", ttype:UNARY, func:true},
{input:"\\deg", tag:"mi", output:"deg", ttype:UNARY, func:true},
{input:"\\det", tag:"mi", output:"det", ttype:UNARY, func:true},
{input:"\\dim", tag:"mi", output:"dim", ttype:UNARY, func:true}, //CONST?
{input:"\\exp", tag:"mi", output:"exp", ttype:UNARY, func:true},
{input:"\\gcd", tag:"mi", output:"gcd", ttype:UNARY, func:true}, //CONST?
{input:"\\hom", tag:"mi", output:"hom", ttype:UNARY, func:true},
{input:"\\inf", tag:"mo", output:"inf", ttype:UNDEROVER},
{input:"\\ker", tag:"mi", output:"ker", ttype:UNARY, func:true},
{input:"\\lg", tag:"mi", output:"lg", ttype:UNARY, func:true},
{input:"\\lim", tag:"mo", output:"lim", ttype:UNDEROVER},
{input:"\\liminf", tag:"mo", output:"liminf", ttype:UNDEROVER},
{input:"\\limsup", tag:"mo", output:"limsup", ttype:UNDEROVER},
{input:"\\ln", tag:"mi", output:"ln", ttype:UNARY, func:true},
{input:"\\log", tag:"mi", output:"log", ttype:UNARY, func:true},
{input:"\\max", tag:"mo", output:"max", ttype:UNDEROVER},
{input:"\\min", tag:"mo", output:"min", ttype:UNDEROVER},
{input:"\\Pr", tag:"mi", output:"Pr", ttype:UNARY, func:true},
{input:"\\sec", tag:"mi", output:"sec", ttype:UNARY, func:true},
{input:"\\sin", tag:"mi", output:"sin", ttype:UNARY, func:true},
{input:"\\sinh", tag:"mi", output:"sinh", ttype:UNARY, func:true},
{input:"\\sup", tag:"mo", output:"sup", ttype:UNDEROVER},
{input:"\\tan", tag:"mi", output:"tan", ttype:UNARY, func:true},
{input:"\\tanh", tag:"mi", output:"tanh", ttype:UNARY, func:true},
//arrows
{input:"\\gets", tag:"mo", output:"\u2190", ttype:CONST},
{input:"\\leftarrow", tag:"mo", output:"\u2190", ttype:CONST},
{input:"\\to", tag:"mo", output:"\u2192", ttype:CONST},
{input:"\\rightarrow", tag:"mo", output:"\u2192", ttype:CONST},
{input:"\\leftrightarrow", tag:"mo", output:"\u2194", ttype:CONST},
{input:"\\uparrow", tag:"mo", output:"\u2191", ttype:CONST},
{input:"\\downarrow", tag:"mo", output:"\u2193", ttype:CONST},
{input:"\\updownarrow", tag:"mo", output:"\u2195", ttype:CONST},
{input:"\\Leftarrow", tag:"mo", output:"\u21D0", ttype:CONST},
{input:"\\Rightarrow", tag:"mo", output:"\u21D2", ttype:CONST},
{input:"\\Leftrightarrow", tag:"mo", output:"\u21D4", ttype:CONST},
{input:"\\iff", tag:"mo", output:"~\\Longleftrightarrow~", ttype:DEFINITION},
{input:"\\Uparrow", tag:"mo", output:"\u21D1", ttype:CONST},
{input:"\\Downarrow", tag:"mo", output:"\u21D3", ttype:CONST},
{input:"\\Updownarrow", tag:"mo", output:"\u21D5", ttype:CONST},
{input:"\\mapsto", tag:"mo", output:"\u21A6", ttype:CONST},
{input:"\\longleftarrow", tag:"mo", output:"\u2190", ttype:LONG},
{input:"\\longrightarrow", tag:"mo", output:"\u2192", ttype:LONG},
{input:"\\longleftrightarrow", tag:"mo", output:"\u2194", ttype:LONG},
{input:"\\Longleftarrow", tag:"mo", output:"\u21D0", ttype:LONG},
{input:"\\Longrightarrow", tag:"mo", output:"\u21D2", ttype:LONG},
{input:"\\Longleftrightarrow", tag:"mo", output:"\u21D4", ttype:LONG},
{input:"\\longmapsto", tag:"mo", output:"\u21A6", ttype:CONST},
// disaster if LONG
//commands with argument
AMsqrt, AMroot, AMfrac, AMover, AMsub, AMsup, AMtext, AMmbox, AMatop, AMchoose,
//AMdiv, AMquote,
//diacritical marks
{input:"\\acute", tag:"mover", output:"\u00B4", ttype:UNARY, acc:true},
//{input:"\\acute", tag:"mover", output:"\u0317", ttype:UNARY, acc:true},
//{input:"\\acute", tag:"mover", output:"\u0301", ttype:UNARY, acc:true},
//{input:"\\grave", tag:"mover", output:"\u0300", ttype:UNARY, acc:true},
//{input:"\\grave", tag:"mover", output:"\u0316", ttype:UNARY, acc:true},
{input:"\\grave", tag:"mover", output:"\u0060", ttype:UNARY, acc:true},
{input:"\\breve", tag:"mover", output:"\u02D8", ttype:UNARY, acc:true},
{input:"\\check", tag:"mover", output:"\u02C7", ttype:UNARY, acc:true},
{input:"\\dot", tag:"mover", output:".", ttype:UNARY, acc:true},
{input:"\\ddot", tag:"mover", output:"..", ttype:UNARY, acc:true},
//{input:"\\ddot", tag:"mover", output:"\u00A8", ttype:UNARY, acc:true},
{input:"\\mathring", tag:"mover", output:"\u00B0", ttype:UNARY, acc:true},
{input:"\\vec", tag:"mover", output:"\u20D7", ttype:UNARY, acc:true},
{input:"\\overrightarrow",tag:"mover",output:"\u20D7", ttype:UNARY, acc:true},
{input:"\\overleftarrow",tag:"mover", output:"\u20D6", ttype:UNARY, acc:true},
{input:"\\hat", tag:"mover", output:"\u005E", ttype:UNARY, acc:true},
{input:"\\widehat", tag:"mover", output:"\u0302", ttype:UNARY, acc:true},
{input:"\\tilde", tag:"mover", output:"~", ttype:UNARY, acc:true},
//{input:"\\tilde", tag:"mover", output:"\u0303", ttype:UNARY, acc:true},
{input:"\\widetilde", tag:"mover", output:"\u02DC", ttype:UNARY, acc:true},
{input:"\\bar", tag:"mover", output:"\u203E", ttype:UNARY, acc:true},
{input:"\\overbrace", tag:"mover", output:"\u23B4", ttype:UNARY, acc:true},
{input:"\\overline", tag:"mover", output:"\u00AF", ttype:UNARY, acc:true},
{input:"\\underbrace", tag:"munder", output:"\u23B5", ttype:UNARY, acc:true},
{input:"\\underline", tag:"munder", output:"\u00AF", ttype:UNARY, acc:true},
//{input:"underline", tag:"munder", output:"\u0332", ttype:UNARY, acc:true},
//typestyles and fonts
{input:"\\displaystyle",tag:"mstyle",atname:"displaystyle",atval:"true", ttype:UNARY},
{input:"\\textstyle",tag:"mstyle",atname:"displaystyle",atval:"false", ttype:UNARY},
{input:"\\scriptstyle",tag:"mstyle",atname:"scriptlevel",atval:"1", ttype:UNARY},
{input:"\\scriptscriptstyle",tag:"mstyle",atname:"scriptlevel",atval:"2", ttype:UNARY},
{input:"\\textrm", tag:"mstyle", output:"\\mathrm", ttype: DEFINITION},
{input:"\\mathbf", tag:"mstyle", atname:"mathvariant", atval:"bold", ttype:UNARY},
{input:"\\textbf", tag:"mstyle", atname:"mathvariant", atval:"bold", ttype:UNARY},
{input:"\\mathit", tag:"mstyle", atname:"mathvariant", atval:"italic", ttype:UNARY},
{input:"\\textit", tag:"mstyle", atname:"mathvariant", atval:"italic", ttype:UNARY},
{input:"\\mathtt", tag:"mstyle", atname:"mathvariant", atval:"monospace", ttype:UNARY},
{input:"\\texttt", tag:"mstyle", atname:"mathvariant", atval:"monospace", ttype:UNARY},
{input:"\\mathsf", tag:"mstyle", atname:"mathvariant", atval:"sans-serif", ttype:UNARY},
{input:"\\mathbb", tag:"mstyle", atname:"mathvariant", atval:"double-struck", ttype:UNARY, codes:AMbbb},
{input:"\\mathcal",tag:"mstyle", atname:"mathvariant", atval:"script", ttype:UNARY, codes:AMcal},
{input:"\\mathfrak",tag:"mstyle",atname:"mathvariant", atval:"fraktur",ttype:UNARY, codes:AMfrk}
];
function compareNames(s1,s2) {
if (s1.input > s2.input) return 1
else return -1;
}
var AMnames = []; //list of input symbols
function AMinitSymbols() {
AMsymbols.sort(compareNames);
for (i=0; i<AMsymbols.length; i++) AMnames[i] = AMsymbols[i].input;
}
var AMmathml = "http://www.w3.org/1998/Math/MathML";
function AMcreateElementMathML(t) {
if (isIE) return document.createElement("m:"+t);
else return document.createElementNS(AMmathml,t);
}
function AMcreateMmlNode(t,frag) {
// var node = AMcreateElementMathML(name);
if (isIE) var node = document.createElement("m:"+t);
else var node = document.createElementNS(AMmathml,t);
node.appendChild(frag);
return node;
}
function newcommand(oldstr,newstr) {
AMsymbols = AMsymbols.concat([{input:oldstr, tag:"mo", output:newstr,
ttype:DEFINITION}]);
}
function AMremoveCharsAndBlanks(str,n) {
//remove n characters and any following blanks
var st;
st = str.slice(n);
for (var i=0; i<st.length && st.charCodeAt(i)<=32; i=i+1);
return st.slice(i);
}
function AMposition(arr, str, n) {
// return position >=n where str appears or would be inserted
// assumes arr is sorted
if (n==0) {
var h,m;
n = -1;
h = arr.length;
while (n+1<h) {
m = (n+h) >> 1;
if (arr[m]<str) n = m; else h = m;
}
return h;
} else
for (var i=n; i<arr.length && arr[i]<str; i++);
return i; // i=arr.length || arr[i]>=str
}
function AMgetSymbol(str) {
//return maximal initial substring of str that appears in names
//return null if there is none
var k = 0; //new pos
var j = 0; //old pos
var mk; //match pos
var st;
var tagst;
var match = "";
var more = true;
for (var i=1; i<=str.length && more; i++) {
st = str.slice(0,i); //initial substring of length i
j = k;
k = AMposition(AMnames, st, j);
if (k<AMnames.length && str.slice(0,AMnames[k].length)==AMnames[k]){
match = AMnames[k];
mk = k;
i = match.length;
}
more = k<AMnames.length && str.slice(0,AMnames[k].length)>=AMnames[k];
}
AMpreviousSymbol=AMcurrentSymbol;
if (match!=""){
AMcurrentSymbol=AMsymbols[mk].ttype;
return AMsymbols[mk];
}
AMcurrentSymbol=CONST;
k = 1;
st = str.slice(0,1); //take 1 character
if ("0"<=st && st<="9") tagst = "mn";
else tagst = (("A">st || st>"Z") && ("a">st || st>"z")?"mo":"mi");
/*
// Commented out by DRW (not fully understood, but probably to do with
// use of "/" as an INFIX version of "\\frac", which we don't want):
//}
//if (st=="-" && AMpreviousSymbol==INFIX) {
// AMcurrentSymbol = INFIX; //trick "/" into recognizing "-" on second parse
// return {input:st, tag:tagst, output:st, ttype:UNARY, func:true};
//}
*/
return {input:st, tag:tagst, output:st, ttype:CONST};
}
/*Parsing ASCII math expressions with the following grammar
v ::= [A-Za-z] | greek letters | numbers | other constant symbols
u ::= sqrt | text | bb | other unary symbols for font commands
b ::= frac | root | stackrel binary symbols
l ::= { | \left left brackets
r ::= } | \right right brackets
S ::= v | lEr | uS | bSS Simple expression
I ::= S_S | S^S | S_S^S | S Intermediate expression
E ::= IE | I/I Expression
Each terminal symbol is translated into a corresponding mathml node.*/
var AMpreviousSymbol,AMcurrentSymbol;
function AMparseSexpr(str) { //parses str and returns [node,tailstr,(node)tag]
var symbol, node, result, result2, i, st,// rightvert = false,
newFrag = document.createDocumentFragment();
str = AMremoveCharsAndBlanks(str,0);
symbol = AMgetSymbol(str); //either a token or a bracket or empty
if (symbol == null || symbol.ttype == RIGHTBRACKET)
return [null,str,null];
if (symbol.ttype == DEFINITION) {
str = symbol.output+AMremoveCharsAndBlanks(str,symbol.input.length);
symbol = AMgetSymbol(str);
if (symbol == null || symbol.ttype == RIGHTBRACKET)
return [null,str,null];
}
str = AMremoveCharsAndBlanks(str,symbol.input.length);
switch (symbol.ttype) {
case SPACE:
node = AMcreateElementMathML(symbol.tag);
node.setAttribute(symbol.atname,symbol.atval);
return [node,str,symbol.tag];
case UNDEROVER:
if (isIE) {
if (symbol.input.substr(0,4) == "\\big") { // botch for missing symbols
str = "\\"+symbol.input.substr(4)+str; // make \bigcup = \cup etc.
symbol = AMgetSymbol(str);
symbol.ttype = UNDEROVER;
str = AMremoveCharsAndBlanks(str,symbol.input.length);
}
}
return [AMcreateMmlNode(symbol.tag,
document.createTextNode(symbol.output)),str,symbol.tag];
case CONST:
var output = symbol.output;
if (isIE) {
if (symbol.input == "'")
output = "\u2032";
else if (symbol.input == "''")
output = "\u2033";
else if (symbol.input == "'''")
output = "\u2033\u2032";
else if (symbol.input == "''''")
output = "\u2033\u2033";
else if (symbol.input == "\\square")
output = "\u25A1"; // same as \Box
else if (symbol.input.substr(0,5) == "\\frac") {
// botch for missing fractions
var denom = symbol.input.substr(6,1);
if (denom == "5" || denom == "6") {
str = symbol.input.replace(/\\frac/,"\\frac ")+str;
return [node,str,symbol.tag];
}
}
}
node = AMcreateMmlNode(symbol.tag,document.createTextNode(output));
return [node,str,symbol.tag];
case LONG: // added by DRW
node = AMcreateMmlNode(symbol.tag,document.createTextNode(symbol.output));
node.setAttribute("minsize","1.5");
node.setAttribute("maxsize","1.5");
node = AMcreateMmlNode("mover",node);
node.appendChild(AMcreateElementMathML("mspace"));
return [node,str,symbol.tag];
case STRETCHY: // added by DRW
if (isIE && symbol.input == "\\backslash")
symbol.output = "\\"; // doesn't expand, but then nor does "\u2216"
node = AMcreateMmlNode(symbol.tag,document.createTextNode(symbol.output));
if (symbol.input == "|" || symbol.input == "\\vert" ||
symbol.input == "\\|" || symbol.input == "\\Vert") {
node.setAttribute("lspace","0em");
node.setAttribute("rspace","0em");
}
node.setAttribute("maxsize",symbol.atval); // don't allow to stretch here
if (symbol.rtag != null)
return [node,str,symbol.rtag];
else
return [node,str,symbol.tag];
case BIG: // added by DRW
var atval = symbol.atval;
if (isIE)
atval = symbol.ieval;
symbol = AMgetSymbol(str);
if (symbol == null)
return [null,str,null];
str = AMremoveCharsAndBlanks(str,symbol.input.length);
node = AMcreateMmlNode(symbol.tag,document.createTextNode(symbol.output));
if (isIE) { // to get brackets to expand
var space = AMcreateElementMathML("mspace");
space.setAttribute("height",atval+"ex");
node = AMcreateMmlNode("mrow",node);
node.appendChild(space);
} else { // ignored in IE
node.setAttribute("minsize",atval);
node.setAttribute("maxsize",atval);
}
return [node,str,symbol.tag];
case LEFTBRACKET: //read (expr+)
if (symbol.input == "\\left") { // left what?
symbol = AMgetSymbol(str);
if (symbol != null) {
if (symbol.input == ".")
symbol.invisible = true;
str = AMremoveCharsAndBlanks(str,symbol.input.length);
}
}
result = AMparseExpr(str,true,false);
if (symbol==null ||
(typeof symbol.invisible == "boolean" && symbol.invisible))
node = AMcreateMmlNode("mrow",result[0]);
else {
node = AMcreateMmlNode("mo",document.createTextNode(symbol.output));
node = AMcreateMmlNode("mrow",node);
node.appendChild(result[0]);
}
return [node,result[1],result[2]];
case MATRIX: //read (expr+)
if (symbol.input == "\\begin{array}") {
var mask = "";
symbol = AMgetSymbol(str);
str = AMremoveCharsAndBlanks(str,0);
if (symbol == null)
mask = "l";
else {
str = AMremoveCharsAndBlanks(str,symbol.input.length);
if (symbol.input != "{")
mask = "l";
else do {
symbol = AMgetSymbol(str);
if (symbol != null) {
str = AMremoveCharsAndBlanks(str,symbol.input.length);
if (symbol.input != "}")
mask = mask+symbol.input;
}
} while (symbol != null && symbol.input != "" && symbol.input != "}");
}
result = AMparseExpr("{"+str,true,true);
// if (result[0]==null) return [AMcreateMmlNode("mo",
// document.createTextNode(symbol.input)),str];
node = AMcreateMmlNode("mtable",result[0]);
mask = mask.replace(/l/g,"left ");
mask = mask.replace(/r/g,"right ");
mask = mask.replace(/c/g,"center ");
node.setAttribute("columnalign",mask);
node.setAttribute("displaystyle","false");
if (isIE)
return [node,result[1],null];
// trying to get a *little* bit of space around the array
// (IE already includes it)
var lspace = AMcreateElementMathML("mspace");
lspace.setAttribute("width","0.167em");
var rspace = AMcreateElementMathML("mspace");
rspace.setAttribute("width","0.167em");
var node1 = AMcreateMmlNode("mrow",lspace);
node1.appendChild(node);
node1.appendChild(rspace);
return [node1,result[1],null];
} else { // eqnarray
result = AMparseExpr("{"+str,true,true);
node = AMcreateMmlNode("mtable",result[0]);
if (isIE)
node.setAttribute("columnspacing","0.25em"); // best in practice?
else
node.setAttribute("columnspacing","0.167em"); // correct (but ignored?)
node.setAttribute("columnalign","right center left");
node.setAttribute("displaystyle","true");
node = AMcreateMmlNode("mrow",node);
return [node,result[1],null];
}
case TEXT:
if (str.charAt(0)=="{") i=str.indexOf("}");
else i = 0;
if (i==-1)
i = str.length;
st = str.slice(1,i);
if (st.charAt(0) == " ") {
node = AMcreateElementMathML("mspace");
node.setAttribute("width","0.33em"); // was 1ex
newFrag.appendChild(node);
}
newFrag.appendChild(
AMcreateMmlNode(symbol.tag,document.createTextNode(st)));
if (st.charAt(st.length-1) == " ") {
node = AMcreateElementMathML("mspace");
node.setAttribute("width","0.33em"); // was 1ex
newFrag.appendChild(node);
}
str = AMremoveCharsAndBlanks(str,i+1);
return [AMcreateMmlNode("mrow",newFrag),str,null];
case UNARY:
result = AMparseSexpr(str);
if (result[0]==null) return [AMcreateMmlNode(symbol.tag,
document.createTextNode(symbol.output)),str];
if (typeof symbol.func == "boolean" && symbol.func) { // functions hack
st = str.charAt(0);
// if (st=="^" || st=="_" || st=="/" || st=="|" || st==",") {
if (st=="^" || st=="_" || st==",") {
return [AMcreateMmlNode(symbol.tag,
document.createTextNode(symbol.output)),str,symbol.tag];
} else {
node = AMcreateMmlNode("mrow",
AMcreateMmlNode(symbol.tag,document.createTextNode(symbol.output)));
if (isIE) {
var space = AMcreateElementMathML("mspace");
space.setAttribute("width","0.167em");
node.appendChild(space);
}
node.appendChild(result[0]);
return [node,result[1],symbol.tag];
}
}
if (symbol.input == "\\sqrt") { // sqrt
if (isIE) { // set minsize, for \surd
var space = AMcreateElementMathML("mspace");
space.setAttribute("height","1.2ex");
space.setAttribute("width","0em"); // probably no effect
node = AMcreateMmlNode(symbol.tag,result[0])
// node.setAttribute("minsize","1"); // ignored
// node = AMcreateMmlNode("mrow",node); // hopefully unnecessary
node.appendChild(space);
return [node,result[1],symbol.tag];
} else
return [AMcreateMmlNode(symbol.tag,result[0]),result[1],symbol.tag];
} else if (typeof symbol.acc == "boolean" && symbol.acc) { // accent
node = AMcreateMmlNode(symbol.tag,result[0]);
var output = symbol.output;
if (isIE) {
if (symbol.input == "\\hat")
output = "\u0302";
else if (symbol.input == "\\widehat")
output = "\u005E";
else if (symbol.input == "\\bar")
output = "\u00AF";
else if (symbol.input == "\\grave")
output = "\u0300";
else if (symbol.input == "\\tilde")
output = "\u0303";
}
var node1 = AMcreateMmlNode("mo",document.createTextNode(output));
if (symbol.input == "\\vec" || symbol.input == "\\check")
// don't allow to stretch
node1.setAttribute("maxsize","1.2");
// why doesn't "1" work? \vec nearly disappears in firefox
if (isIE && symbol.input == "\\bar")
node1.setAttribute("maxsize","0.5");
if (symbol.input == "\\underbrace" || symbol.input == "\\underline")
node1.setAttribute("accentunder","true");
else
node1.setAttribute("accent","true");
node.appendChild(node1);
if (symbol.input == "\\overbrace" || symbol.input == "\\underbrace")
node.ttype = UNDEROVER;
return [node,result[1],symbol.tag];
} else { // font change or displaystyle command
if (!isIE && typeof symbol.codes != "undefined") {
for (i=0; i<result[0].childNodes.length; i++)
if (result[0].childNodes[i].nodeName=="mi" || result[0].nodeName=="mi") {
st = (result[0].nodeName=="mi"?result[0].firstChild.nodeValue:
result[0].childNodes[i].firstChild.nodeValue);
var newst = [];
for (var j=0; j<st.length; j++)
if (st.charCodeAt(j)>64 && st.charCodeAt(j)<91) newst = newst +
String.fromCharCode(symbol.codes[st.charCodeAt(j)-65]);
else newst = newst + st.charAt(j);
if (result[0].nodeName=="mi")
result[0]=AMcreateElementMathML("mo").
appendChild(document.createTextNode(newst));
else result[0].replaceChild(AMcreateElementMathML("mo").
appendChild(document.createTextNode(newst)),result[0].childNodes[i]);
}
}
node = AMcreateMmlNode(symbol.tag,result[0]);
node.setAttribute(symbol.atname,symbol.atval);
if (symbol.input == "\\scriptstyle" ||
symbol.input == "\\scriptscriptstyle")
node.setAttribute("displaystyle","false");
return [node,result[1],symbol.tag];
}
case BINARY:
result = AMparseSexpr(str);
if (result[0]==null) return [AMcreateMmlNode("mo",
document.createTextNode(symbol.input)),str,null];
result2 = AMparseSexpr(result[1]);
if (result2[0]==null) return [AMcreateMmlNode("mo",
document.createTextNode(symbol.input)),str,null];
if (symbol.input=="\\root" || symbol.input=="\\stackrel")
newFrag.appendChild(result2[0]);
newFrag.appendChild(result[0]);
if (symbol.input=="\\frac") newFrag.appendChild(result2[0]);
return [AMcreateMmlNode(symbol.tag,newFrag),result2[1],symbol.tag];
case INFIX:
str = AMremoveCharsAndBlanks(str,symbol.input.length);
return [AMcreateMmlNode("mo",document.createTextNode(symbol.output)),
str,symbol.tag];
default:
return [AMcreateMmlNode(symbol.tag, //its a constant
document.createTextNode(symbol.output)),str,symbol.tag];
}
}
function AMparseIexpr(str) {
var symbol, sym1, sym2, node, result, tag, underover;
str = AMremoveCharsAndBlanks(str,0);
sym1 = AMgetSymbol(str);
result = AMparseSexpr(str);
node = result[0];
str = result[1];
tag = result[2];
symbol = AMgetSymbol(str);
if (symbol.ttype == INFIX) {
str = AMremoveCharsAndBlanks(str,symbol.input.length);
result = AMparseSexpr(str);
if (result[0] == null) // show box in place of missing argument
result[0] = AMcreateMmlNode("mo",document.createTextNode("\u25A1"));
str = result[1];
tag = result[2];
if (symbol.input == "_" || symbol.input == "^") {
sym2 = AMgetSymbol(str);
tag = null; // no space between x^2 and a following sin, cos, etc.
// This is for \underbrace and \overbrace
underover = ((sym1.ttype == UNDEROVER) || (node.ttype == UNDEROVER));
// underover = (sym1.ttype == UNDEROVER);
if (symbol.input == "_" && sym2.input == "^") {
str = AMremoveCharsAndBlanks(str,sym2.input.length);
var res2 = AMparseSexpr(str);
str = res2[1];
tag = res2[2]; // leave space between x_1^2 and a following sin etc.
node = AMcreateMmlNode((underover?"munderover":"msubsup"),node);
node.appendChild(result[0]);
node.appendChild(res2[0]);
} else if (symbol.input == "_") {
node = AMcreateMmlNode((underover?"munder":"msub"),node);
node.appendChild(result[0]);
} else {
node = AMcreateMmlNode((underover?"mover":"msup"),node);
node.appendChild(result[0]);
}
node = AMcreateMmlNode("mrow",node); // so sum does not stretch
} else {
node = AMcreateMmlNode(symbol.tag,node);
if (symbol.input == "\\atop" || symbol.input == "\\choose")
node.setAttribute("linethickness","0ex");
node.appendChild(result[0]);
if (symbol.input == "\\choose")
node = AMcreateMmlNode("mfenced",node);
}
}
return [node,str,tag];
}
function AMparseExpr(str,rightbracket,matrix) {
var symbol, node, result, i, tag,
newFrag = document.createDocumentFragment();
do {
str = AMremoveCharsAndBlanks(str,0);
result = AMparseIexpr(str);
node = result[0];
str = result[1];
tag = result[2];
symbol = AMgetSymbol(str);
if (node!=undefined) {
if ((tag == "mn" || tag == "mi") && symbol!=null &&
typeof symbol.func == "boolean" && symbol.func) {
// Add space before \sin in 2\sin x or x\sin x
var space = AMcreateElementMathML("mspace");
space.setAttribute("width","0.167em");
node = AMcreateMmlNode("mrow",node);
node.appendChild(space);
}
newFrag.appendChild(node);
}
} while ((symbol.ttype != RIGHTBRACKET)
&& symbol!=null && symbol.output!="");
tag = null;
if (symbol.ttype == RIGHTBRACKET) {
if (symbol.input == "\\right") { // right what?
str = AMremoveCharsAndBlanks(str,symbol.input.length);
symbol = AMgetSymbol(str);
if (symbol != null && symbol.input == ".")
symbol.invisible = true;
if (symbol != null)
tag = symbol.rtag;
}
if (symbol!=null)
str = AMremoveCharsAndBlanks(str,symbol.input.length); // ready to return
var len = newFrag.childNodes.length;
if (matrix &&
len>0 && newFrag.childNodes[len-1].nodeName == "mrow" && len>1 &&
newFrag.childNodes[len-2].nodeName == "mo" &&
newFrag.childNodes[len-2].firstChild.nodeValue == "&") { //matrix
var pos = []; // positions of ampersands
var m = newFrag.childNodes.length;
for (i=0; matrix && i<m; i=i+2) {
pos[i] = [];
node = newFrag.childNodes[i];
for (var j=0; j<node.childNodes.length; j++)
if (node.childNodes[j].firstChild.nodeValue=="&")
pos[i][pos[i].length]=j;
}
var row, frag, n, k, table = document.createDocumentFragment();
for (i=0; i<m; i=i+2) {
row = document.createDocumentFragment();
frag = document.createDocumentFragment();
node = newFrag.firstChild; // <mrow> -&-&...&-&- </mrow>
n = node.childNodes.length;
k = 0;
for (j=0; j<n; j++) {
if (typeof pos[i][k] != "undefined" && j==pos[i][k]){
node.removeChild(node.firstChild); //remove &
row.appendChild(AMcreateMmlNode("mtd",frag));
k++;
} else frag.appendChild(node.firstChild);
}
row.appendChild(AMcreateMmlNode("mtd",frag));
if (newFrag.childNodes.length>2) {
newFrag.removeChild(newFrag.firstChild); //remove <mrow> </mrow>
newFrag.removeChild(newFrag.firstChild); //remove <mo>&</mo>
}
table.appendChild(AMcreateMmlNode("mtr",row));
}
return [table,str];
}
if (typeof symbol.invisible != "boolean" || !symbol.invisible) {
node = AMcreateMmlNode("mo",document.createTextNode(symbol.output));
newFrag.appendChild(node);
}
}
return [newFrag,str,tag];
}
function AMparseMath(str) {
var result, node = AMcreateElementMathML("mstyle");
if (mathcolor != "") node.setAttribute("mathcolor",mathcolor);
if (mathfontfamily != "") node.setAttribute("fontfamily",mathfontfamily);
node.appendChild(AMparseExpr(str.replace(/^\s+/g,""),false,false)[0]);
node = AMcreateMmlNode("math",node);
if (showasciiformulaonhover) //fixed by djhsu so newline
node.setAttribute("title",str.replace(/\s+/g," "));//does not show in Gecko
if (mathfontfamily != "" && (isIE || mathfontfamily != "serif")) {
var fnode = AMcreateElementXHTML("font");
fnode.setAttribute("face",mathfontfamily);
fnode.appendChild(node);
return fnode;
}
return node;
}
function AMstrarr2docFrag(arr, linebreaks) {
var newFrag=document.createDocumentFragment();
var expr = false;
for (var i=0; i<arr.length; i++) {
if (expr) newFrag.appendChild(AMparseMath(arr[i]));
else {
var arri = (linebreaks ? arr[i].split("\n\n") : [arr[i]]);
newFrag.appendChild(AMcreateElementXHTML("span").
appendChild(document.createTextNode(arri[0])));
for (var j=1; j<arri.length; j++) {
newFrag.appendChild(AMcreateElementXHTML("p"));
newFrag.appendChild(AMcreateElementXHTML("span").
appendChild(document.createTextNode(arri[j])));
}
}
expr = !expr;
}
return newFrag;
}
function AMprocessNodeR(n, linebreaks) {
var mtch, str, arr, frg, i;
if (n.childNodes.length == 0) {
if ((n.nodeType!=8 || linebreaks) &&
n.parentNode.nodeName!="form" && n.parentNode.nodeName!="FORM" &&
n.parentNode.nodeName!="textarea" && n.parentNode.nodeName!="TEXTAREA" &&
n.parentNode.nodeName!="pre" && n.parentNode.nodeName!="PRE") {
str = n.nodeValue;
if (!(str == null)) {
str = str.replace(/\r\n\r\n/g,"\n\n");
str = str.replace(/\x20+/g," ");
str = str.replace(/\s*\r\n/g," ");
// DELIMITERS:
mtch = (str.indexOf("\$")==-1 ? false : true);
str = str.replace(/([^\\])\$/g,"$1 \$");
str = str.replace(/^\$/," \$"); // in case \$ at start of string
arr = str.split(" \$");
for (i=0; i<arr.length; i++)
arr[i]=arr[i].replace(/\\\$/g,"\$");
if (arr.length>1 || mtch) {
if (checkForMathML) {
checkForMathML = false;
var nd = AMisMathMLavailable();
AMnoMathML = nd != null;
if (AMnoMathML && notifyIfNoMathML)
if (alertIfNoMathML)
alert("To view the ASCIIMathML notation use Internet Explorer 6 +\nMathPlayer (free from www.dessci.com)\n\
or Firefox/Mozilla/Netscape");
else AMbody.insertBefore(nd,AMbody.childNodes[0]);
}
if (!AMnoMathML) {
frg = AMstrarr2docFrag(arr,n.nodeType==8);
var len = frg.childNodes.length;
n.parentNode.replaceChild(frg,n);
return len-1;
} else return 0;
}
}
} else return 0;
} else if (n.nodeName!="math") {
for (i=0; i<n.childNodes.length; i++)
i += AMprocessNodeR(n.childNodes[i], linebreaks);
}
return 0;
}
function AMprocessNode(n, linebreaks, spanclassAM) {
var frag,st;
if (spanclassAM!=null) {
frag = document.getElementsByTagName("span")
for (var i=0;i<frag.length;i++)
if (frag[i].className == "AM")
AMprocessNodeR(frag[i],linebreaks);
} else {
try {
st = n.innerHTML;
} catch(err) {}
// DELIMITERS:
if (st==null || st.indexOf("\$")!=-1)
AMprocessNodeR(n,linebreaks);
}
if (isIE) { //needed to match size and font of formula to surrounding text
frag = document.getElementsByTagName('math');
for (var i=0;i<frag.length;i++) frag[i].update()
}
}
var AMbody;
var AMnoMathML = false, AMtranslated = false;
function translate(spanclassAM) {
if (!AMtranslated) { // run this only once
AMtranslated = true;
AMinitSymbols();
AMbody = document.getElementsByTagName("body")[0];
AMprocessNode(AMbody, false, spanclassAM);
}
}
if (isIE) { // avoid adding MathPlayer info explicitly to each webpage
document.write("<object id=\"mathplayer\"\
classid=\"clsid:32F66A20-7614-11D4-BD11-00104BD3F987\"></object>");
document.write("<?import namespace=\"m\" implementation=\"#mathplayer\"?>");
}
// GO1.1 Generic onload by Brothercake
// http://www.brothercake.com/
//onload function (replaces the onload="translate()" in the <body> tag)
function generic()
{
translate();
};
//setup onload function
if(typeof window.addEventListener != 'undefined')
{
//.. gecko, safari, konqueror and standard
window.addEventListener('load', generic, false);
}
else if(typeof document.addEventListener != 'undefined')
{
//.. opera 7
document.addEventListener('load', generic, false);
}
else if(typeof window.attachEvent != 'undefined')
{
//.. win/ie
window.attachEvent('onload', generic);
}
//** remove this condition to degrade older browsers
else
{
//.. mac/ie5 and anything else that gets this far
//if there's an existing onload function
if(typeof window.onload == 'function')
{
//store it
var existing = onload;
//add new onload handler
window.onload = function()
{
//call existing onload function
existing();
//call generic onload function
generic();
};
}
else
{
//setup onload function
window.onload = generic;
}
}
|
/**
* 订单
*/
let uniqid = require('uniqid'); //生成唯一id
const mongoose = require('mongoose');
mongoose.Promise = global.Promise; // Use native promises
let Order = mongoose.Schema({
Id: {
type: String,
required: true,
index: true
}, //订单id
BuyInfor: {
type: String,
required: true,
}, //购买的图书信息
UserId: {
type: String,
required: true,
}, //会员Id
Nick: {
type: String,
required: true,
}, //昵称
Freight: {
type: Number,
required: true,
},
Total: {
type: Number,
required: true,
}, //订单总额
Name: {
type: String,
default: "",
}, //收货人
Mobile: {
type: String,
default: "",
}, //收货人手机
Address: {
type: String,
default: "",
}, //收货地址
Note: {
type: String,
default: "",
},
DeliveryTime: {
type: String,
default: ""
}, //配送时间
InvoiceInfor: {
type: String,
default: ""
}, //发票信息
CreateDate: {
type: Number,
required: true,
},
UpdateDate: {
type: Number,
required: true,
},
IsApplyRefund: {
type: Number,
default: 0
}, //是否申请售后 0:否、1:是
Status: {
type: Number,
default: 1
}, //订单状态,-1:已删除、0:已失效、1:待确认、2:配送中、3:已签收、4:审核中、5:已退款、6:已评价、7:退换中、8:已换货、9:待评价
});
//获取订单列表
Order.statics.getOrderList = function(index, size, usrid) {
// console.log(index, size, usrid)
return new Promise((resolve, reject) => {
let query = this.find({ UserId: usrid, Status: { $gt: -1 } });
let total = this.find({ UserId: usrid, Status: { $gt: -1 } }).count();
query.sort({ UpdateDate: -1 }); //根据添加日期倒序
query.skip(index * size); //跳过多少个数据
query.limit(size); //限制Size条数据
query.exec((error, result) => {
if (!error) {
total.exec((err, res) => {
if (!err) {
resolve({
Data: result,
TotalCount: res
});
} else {
reject(err);
}
})
} else {
reject(error);
}
})
})
}
Order.statics.getOrderByStatus = function(usrid, status) {
return new Promise((resolve, reject) => {
let query = this.find({ UserId: usrid, Status: status, IsApplyRefund: 0 });
query.sort({ UpdateDate: -1 });
query.exec((error, result) => {
if (!error) {
resolve(result)
} else {
reject(error);
}
})
})
}
//根据Id获取订单详情
Order.statics.getOrderById = function(Id) {
return new Promise((resolve, reject) => {
let query = this.findOne({ Id: Id });
query.exec((error, result) => {
if (!error) {
resolve(result);
} else {
reject(error);
}
})
})
}
// 新增订单(确认下单)
Order.statics.addOrder = function(json) {
return new Promise((resolve, reject) => {
json.CreateDate = Date.now();
json.UpdateDate = Date.now();
json.save((error, result) => {
if (result) {
resolve(result);
} else {
reject(error);
}
})
})
}
//修改订单
Order.statics.editOrder = function(json) {
return new Promise((resolve, reject) => {
let query = this.findOne({ Id: json.Id });
query.exec((error, result) => {
if (result) {
result.BookId = json.BookId;
result.BookName = json.BookName;
result.Image = json.Image;
result.Count = json.Count;
result.Freight = json.Freight;
result.Total = json.Total;
result.Name = json.Name;
result.Mobile = json.Mobile;
result.Address = json.Address;
result.Note = json.Note;
result.DeliveryTime = json.DeliveryTime;
result.InvoiceInfor = json.InvoiceInfor;
result.Status = json.Status;
result.UpdateDate = Date.now();
result.save((err, res) => {
if (res) {
resolve(res);
} else {
reject(err);
}
})
} else {
reject(error);
}
})
})
}
//修改订单状态
Order.statics.setOrderStatus = function(json) {
return new Promise((resolve, reject) => {
let query = this.findOne({ Id: json.Id });
query.exec((error, result) => {
if (!error) {
if (result) {
result.Status = json.Status;
result.UpdateDate = Date.now();
result.save((error, res) => {
resolve(res); //更新后的数据
})
}
} else {
reject(error);
}
})
})
}
Order.statics.setApplyRefund = function(json) {
return new Promise((resolve, reject) => {
let query = this.findOne({ Id: json.Id });
query.exec((error, result) => {
if (!error) {
if (result) {
result.IsApplyRefund = json.IsApplyRefund;
result.UpdateDate = Date.now();
result.save((error, res) => {
resolve(res); //更新后的数据
})
}
} else {
reject(error);
}
})
})
}
let Orders = mongoose.model('Orders', Order);
module.exports = Orders;
|
/*BEGIN_LEGAL
Intel Open Source License
Copyright (c) 2002-2016 Intel Corporation. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer. Redistributions
in binary form must reproduce the above copyright notice, this list of
conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution. Neither the name of
the Intel Corporation nor the names of its contributors may be used to
endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE INTEL OR
ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
END_LEGAL */
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
/* Simple string test
*/
/* Windows 32 bit implementation */
# if (defined(_WIN32))
# if (!defined (_M_X64))
# define WIN_CODE(...) __VA_ARGS__
# define WIN32_CODE(...) __VA_ARGS__
# define WIN64_CODE(...)
# define LINUX_CODE(...)
# define LINUX32_CODE(...)
# define LINUX64_CODE(...)
# else
/* Windows 64 bit implementation */
# define WIN_CODE(...) __VA_ARGS__
# define WIN32_CODE(...)
# define WIN64_CODE(...) __VA_ARGS__
# define LINUX_CODE(...)
# define LINUX32_CODE(...)
# define LINUX64_CODE(...)
# endif
# else
/* Not windows, presumably a GNU compiler and (likely) Linux */
# if (!defined (_M_X64))
# define WIN_CODE(...)
# define WIN32_CODE(...)
# define WIN64_CODE(...)
# define LINUX_CODE(...) __VA_ARGS__
# define LINUX32_CODE(...) __VA_ARGS__
# define LINUX64_CODE(...)
# else
/* Linux 64 bit implementations */
# define WIN_CODE(...)
# define WIN32_CODE(...)
# define WIN64_CODE(...)
# define LINUX_CODE(...) __VA_ARGS__
# define LINUX32_CODE(...)
# define LINUX64_CODE(...) __VA_ARGS__
# endif
# endif
static void movsb (char * dest, const char * src, int len, int decrement)
{
if (decrement)
{
WIN_CODE ( __asm std )
LINUX_CODE (__asm__ volatile ("std");)
}
WIN32_CODE (
__asm mov esi, src
__asm mov edi, dest
__asm mov ecx, len
__asm rep movsb
)
WIN64_CODE (
__asm mov rsi, src
__asm mov rdi, dest
__asm mov rcx, len
__asm rep movsb
)
LINUX_CODE(
__asm__ volatile ("rep movsb" :: "S"(src),"D"(dest),"c"(len) : "memory");
)
if (decrement)
{
WIN_CODE ( __asm cld )
LINUX_CODE (__asm__ volatile ("cld");)
}
}
static void movsd (void * dest, const void * src, int len, int decrement)
{
if (decrement)
{
WIN_CODE ( __asm std )
LINUX_CODE (__asm__ volatile ("std");)
}
WIN32_CODE (
__asm mov esi, src
__asm mov edi, dest
__asm mov ecx, len
__asm rep movsd
)
WIN64_CODE (
__asm mov rsi, src
__asm mov rdi, dest
__asm mov rcx, len
__asm rep movsd
)
LINUX_CODE(
__asm__ volatile ("rep movsd" :: "S"(src),"D"(dest),"c"(len) : "memory");
)
if (decrement)
{
WIN_CODE ( __asm cld )
LINUX_CODE (__asm__ volatile ("cld");)
}
}
/* Use repne scasb to calculate length of a string.
*/
static int length(const char * src, int maxlen)
{
const char * s = src;
WIN32_CODE (
__asm mov edi, src
__asm mov al, 0
__asm mov ecx, maxlen
__asm repne scasb
__asm mov src, edi
)
WIN64_CODE (
__asm mov rdi, src
__asm mov al, 0
__asm mov rcx, maxlen
__asm repne scasb
__asm mov src, rdi
)
LINUX_CODE(
__asm__ volatile ("xor %%al,%%al; repne scasb" : "=D"(src) :"0"(src),"c"(maxlen): "%eax");
)
return src-s;
}
static int cmps (const char * s1, const char * s2, int maxlen, int decrement)
{
int res = 1;
if (decrement)
{
WIN_CODE ( __asm std )
LINUX_CODE (__asm__ volatile ("std");)
}
LINUX_CODE(
__asm__ volatile ("repz cmpsb\n"
"cmovz %2,%0\n"
"jge 1f\n"
"neg %0\n"
"1:" : "=r"(res) :"0"(res),"r"(0), "S"(s1),"D"(s2),"c"(maxlen) );
)
if (decrement)
{
WIN_CODE ( __asm cld )
LINUX_CODE (__asm__ volatile ("cld");)
}
return res;
}
/* Use rep stosb to fill a chunk of store. */
static void fill(char * target, char value, int count)
{
WIN32_CODE (
__asm mov edi, target
__asm mov al, value
__asm mov ecx, count
__asm rep stosb
)
WIN64_CODE (
__asm mov rdi, target
__asm mov al, value
__asm mov rcx, count
__asm rep stosb
)
LINUX_CODE(
__asm__ volatile ("mov %1,%%al; rep stosb" : "=D"(target) :"m"(value),"c"(count),"0"(target): "%eax", "memory");
)
}
int copyAndTest (int * dest, int * src, int len,int df)
{
int i;
int failed = 0;
memset(dest, 0xff, (len+2)*sizeof(int));
for (i=0;i<len;i++)
src[i] = i;
dest = dest + 1;
if (df)
movsd (dest+len-1, src+len-1, len, df);
else
movsd (dest, src, len, df);
// Compare the results
for (i=0;i<len;i++)
{
if (src[i] != dest[i])
{
failed++;
printf ("***Failed : copy %p to %p for %d %swards, at dest[%d] see %d not %d\n",
src,dest,len,df ?"back":"for",i, dest[i], src[i]);
}
}
if (dest[-1] != -1)
{
failed++;
printf ("***Failed : %swards, overwrote below base (%d)\n",
df ?"back":"for", dest[-1]);
}
if (dest[len] != -1)
{
failed++;
printf ("***Failed : %swards, overwrote above top (%d)\n",
df ?"back":"for", dest[len]);
}
return failed;
}
int testMovsd()
{
enum {
length = 121
};
int * s1 = (int *)malloc (length * sizeof(int));
int * d1 = (int *)malloc ((length+2) * sizeof (int));
int df;
int failures = 0;
for (df=0; df<2; df++)
{
failures += copyAndTest(d1, s1, length, df);
}
printf ("%d failures in testMovsd\n", failures);
return failures;
}
int doCmpsTests()
{
const char * s1 = "abcdefg";
const char * s2 = "abcdefh";
const char * s3 = &s2[1];
int df = 0;
int results[4];
int failures = 0;
printf ("s1 : '%s' @%p\n"
"s2 : '%s' @%p\n", s1,s1,s2,s2);
for (df = 0; df <2; df++)
{
int d1 = df ? 6 : 0;
int d2 = df ? 5 : 0;
printf ("DF = %d\n", df);
results[0] = cmps (s1+d1,s1+d1,7,df);
results[1] = cmps (s1+d1,s2+d1,7,df);
results[2] = cmps (s2+d1,s1+d1,7,df);
results[3] = cmps (s3+d2,s2+d2,6,df);
printf ("cmps (%s,%s) = %d, should be 0\n",s1,s1,results[0]);
printf ("cmps (%s,%s) = %d, should be -1\n",s1,s2,results[1]);
printf ("cmps (%s,%s) = %d, should be 1\n",s2,s1,results[2]);
printf ("cmps (%s,%s) = %d, should be 1\n",s3,s2,results[3]);
failures += results[0] != 0;
failures += results[1] != -1;
failures += results[2] != 1;
failures += results[3] != 1;
}
return failures;
}
int doTest()
{
const char * src = "Hello World";
char dest[50];
char d1[101];
int len = strlen(src)+1;
int i;
int failures = 0;
movsb (&dest[0],src,len,0);
printf ("Result after copy '%s'\n", dest);
if (strcmp(dest,src) != 0)
{
printf ("*** Copy failed ***\n");
failures++;
}
memset (dest, 0, sizeof(dest));
movsb (&dest[len], &src[len], len+1, 1);
printf ("Result after reversed copy '%s'\n", dest);
if (strcmp(dest,src) != 0)
{
printf ("*** Copy failed ***\n");
failures++;
}
printf ("src = '%s'\n", src);
i = length(src, 1024);
printf ("Length gives %d\n", i);
movsb (&dest[0],src,len,0);
printf ("dest = '%s'\n", dest);
movsb (&dest[0],&dest[6],len-6,0);
printf ("Result after overlapping copy '%s'\n", dest);
memset (d1, -1, 100);
d1[100] = 99;
fill (d1, (char)1, 100);
printf ("Filled\n");
for (i=0;i<100; i++)
if (d1[i] != 1)
{
printf ("Fill failed d1[%d] == 0x%02x\n",i, d1[i]);
failures++;
}
if (d1[100] != 99)
{
printf ("Fill failed d1[101] == 0x%02x\n",d1[100]);
failures++;
}
return failures;
}
int main (int argc, char ** argv)
{
int failures = 0;
printf("Simple tests\n");
failures += doTest();
printf ("movsd tests\n");
failures += testMovsd();
printf("\n\ncmps tests\n");
failures += doCmpsTests(0);
return failures;
}
|
function replaceContentToAddD() {
document.getElementById('page_contentD').innerHTML = document.getElementById('addDoctor').innerHTML;
}
function replaceContentToAddN() {
document.getElementById('page_contentN').innerHTML = document.getElementById('addNurse').innerHTML;
}
function replaceContentToAddP() {
document.getElementById('profile-tabb').click();
}
function replaceContentToAddS() {
document.getElementById('page_contentS').innerHTML = document.getElementById('addSec').innerHTML;
}
function replaceContentToEditN() {
document.getElementById('page_contentN').innerHTML = document.getElementById('editNurse').innerHTML;
} |
#! /usr/bin/env python
# Copyright 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from timeit import default_timer as time
import sys
import numpy as np
import numba_dppy, numba_dppy as dppy
import dpctl
@dppy.kernel(
access_types={"read_only": ["a", "b"], "write_only": ["c"], "read_write": []}
)
def data_parallel_sum(a, b, c):
i = dppy.get_global_id(0)
c[i] = a[i] + b[i]
global_size = 64
local_size = 32
N = global_size * local_size
a = np.array(np.random.random(N), dtype=np.float32)
b = np.array(np.random.random(N), dtype=np.float32)
c = np.ones_like(a)
def main():
if dpctl.has_gpu_queues():
with dpctl.device_context("opencl:gpu") as gpu_queue:
print("----Running in GPU----")
print("before A: ", a)
print("before B: ", b)
data_parallel_sum[global_size, local_size](a, b, c)
print("after C: ", c)
if dpctl.has_cpu_queues():
with dpctl.device_context("opencl:cpu") as cpu_queue:
print("----Running in CPU----")
print("before A: ", a)
print("before B: ", b)
data_parallel_sum[global_size, local_size](a, b, c)
print("after C: ", c)
else:
print("No device found")
exit()
print("Done...")
if __name__ == "__main__":
main()
|
from ophyd.scaler import ScalerCH
from ophyd import Component, Device
from ophyd import EpicsSignal, EpicsSignalRO
from apstools.devices import SRS570_PreAmplifier
import logging
logger = logging.getLogger(__name__)
class Keithley6485(Device):
value = Component(EpicsSignalRO, ":read")
# refresh = Component(EpicsSignal, ":read.SCAN", write_pv=":read.SCAN")
rate = Component(EpicsSignal, ":rate", write_pv=":rateSet", kind="config", string=True)
range= Component(EpicsSignalRO,':range', kind="config", string=True)
autorange = Component(EpicsSignal, ":rangeAuto", write_pv=":rangeAutoSet", kind="config", string=True)
autoulimit = Component(EpicsSignal, ":rangeAutoUlimit", write_pv=":rangeAutoUlimitSet", kind="config", string=True)
autollimit = Component(EpicsSignal, ":rangeAutoLlimit", write_pv=":rangeAutoLlimitSet", kind="config", string=True)
zerocheck = Component(EpicsSignal, ":zeroCheck", write_pv= ":zeroCheckSet", kind="config", string=True)
zerocorrect = Component(EpicsSignal, ":zeroCorrect", write_pv= ":zeroCorrectSet", kind="config", string=True)
medianfilter = Component(EpicsSignal, ":medianFilter", write_pv= ":medianFilterSet", kind="config", string=True)
medianfilterrank = Component(EpicsSignal, ":medianFilterRank", write_pv= ":medianFilterRankSet", kind="config", string=True)
digitalfilter = Component(EpicsSignal, ":digitalFilter", write_pv= ":digitalFilterSet", kind="config", string=True)
filtercount = Component(EpicsSignal, ":digitalFilterCount", write_pv= ":digitalFilterCountSet", kind="config", string=True)
filtercontrol = Component(EpicsSignal, ":digitalFilterControl", write_pv= ":digitalFilterControlSet", kind="config", string=True)
class MyKeithley(Device):
ca1 = Component(Keithley6485, "1")
ca2 = Component(Keithley6485, "2")
ca3 = Component(Keithley6485, "3")
ca4 = Component(Keithley6485, "4")
ca5 = Component(Keithley6485, "5")
ca6 = Component(Keithley6485, "6")
ca7 = Component(Keithley6485, "7")
ca8 = Component(Keithley6485, "8")
ca9 = Component(Keithley6485, "9")
ca11 = Component(Keithley6485, "11")
ca12 = Component(Keithley6485, "12")
ca13 = Component(Keithley6485, "13")
ca14 = Component(Keithley6485, "14")
ca15 = Component(Keithley6485, "15")
keithley_objects = MyKeithley("29idb:ca", name="keithley_objects")
# from epics import caput, caget
# from IEX_29id.utils.strings import ClearStringSeq
# from IEX_29id.utils.exp import BL_Mode_Read, BL_ioc
# from IEX_29id.utils.misc import RangeUp
# from time import sleep
# import numpy as np
# def ca2flux(ca,hv=None,p=1):
# curve=LoadResponsivityCurve()
# responsivity=curve[:,0]
# energy=curve[:,1]
# charge = 1.602e-19
# if hv is None:
# hv=caget('29idmono:ENERGY_SP')
# print("\nCalculating flux for:")
# print(" hv = %.1f eV" % hv)
# print(" ca = %.3e Amp" % ca)
# eff=np.interp(hv,energy,responsivity)
# flux = ca/(eff*hv*charge)
# if p is not None:
# print("Flux = %.3e ph/s\n" % flux)
# return flux
# def LoadResponsivityCurve():
# FilePath='/home/beams/29IDUSER/Documents/User_Macros/Macros_29id/IEX_Dictionaries/'
# FileName="DiodeResponsivityCurve"
# data = np.loadtxt(FilePath+FileName, delimiter=' ', skiprows=1)
# return data
# def reset_keithley(keithley_ioc,keithley_num,rate="Slow"):
# pv="29id"+keithley_ioc+":ca"+str(keithley_num)
# caput(pv+":reset.PROC",1)
# caput(pv+":digitalFilterSet","Off")
# caput(pv+":medianFilterSet","Off")
# caput(pv+":zeroCheckSet",0)
# caput(pv+":rangeAuto",1)
# caput(pv+":rateSet",rate)
# caput(pv+":rangeAutoUlimit","20mA")
# caput(pv+":read.SCAN",".5 second")
# def reset_all_keithley(rate="Slow"):
# for i in [1,2,3,4,5,9,10,12,13,14,15]:
# reset_keithley("b",i,rate)
# for i in [1,2]:
# reset_keithley("c",i,rate)
# #for i in [1,2,3,4,5]:
# for i in [2,3,4]:
# reset_keithley("d",i,rate)
# caput("29idb:ca5:read.SCAN","Passive") # CA5 in passive
# print("\nAll the current amplifiers have been reset; ca5 set to passive.")
# def keithley_live_strseq(scanIOC): # do we need to add 29idb:ca5 ???
# n=7
# pvstr="29id"+scanIOC+":userStringSeq"+str(n)
# ClearStringSeq(scanIOC,n)
# caput(pvstr+".DESC","CA_Live_"+scanIOC)
# n=len(Detector_List(scanIOC))
# for (i,list) in enumerate(Detector_List(scanIOC)):
# pvCA_read='29id'+list[0]+':ca'+str(list[1])+':read.SCAN CA NMS'
# pvCA_avg='29id'+list[0]+':ca'+str(list[1])+':digitalFilterSet PP NMS'
# caput(pvstr+".LNK"+str(i+1),pvCA_avg)
# caput(pvstr+".STR" +str(i+1),"Off")
# if n+1+i < 10:
# caput(pvstr+".LNK" +str(n+1+i),pvCA_read)
# caput(pvstr+".STR" +str(n+1+i),".5 second")
# caput(pvstr+".WAIT"+str(n+1+i),"After"+str(n))
# elif n+1+i == 10:
# caput(pvstr+".LNKA",pvCA_read)
# caput(pvstr+".STRA",".5 second")
# caput(pvstr+".WAITA","After"+str(n))
# # if scanIOC == 'Kappa':
# # caput(pvstr+".LNK" +str(2*n+1),'29idMZ0:scaler1.CONT CA NMS')
# # caput(pvstr+".STR" +str(2*n+1),"AutoCount")
# # caput(pvstr+".WAIT"+str(2*n+1),"After"+str(2*n))
# return pvstr+".PROC"
# def Detector_List(scanIOC):
# """
# Define the detector used for:
# keithley_live_strseq()
# Detector_Triggers_StrSeq()
# BeforeScan_StrSeq() => puts everybody in passive
# CA_Average()
# WARNING: can't have more than 5 otherwise keithley_live_strseq gets angry.
# """
# BL_mode=BL_Mode_Read()[0]
# if scanIOC == "ARPES":
# CA_list=[["c",1],["b",15],["b",4],["b",13]]
# elif scanIOC == "Kappa":
# CA_list=[["d",2],["d",3],["d",4],["b",14]]
# elif scanIOC == "RSoXS":
# CA_list=[["d",3],["d",4],["d",5],["b",14],]
# else:
# CA_list=[]
# # if BL_mode == 1:
# # CA_list=[["b",1],["b",2],["b",3],["b",4],["b",5]] #JM was here
# # CA_list=[["b",15],["d",2],["d",3],["d",4],["b",14]]
# return CA_list
# def CA_Name(ca_ioc,ca_num): #{motor,position}
# ca={}
# ca["b"] = {4:'Slit1A',13:'Slit3C',14:'MeshD',15:'DiodeC'}
# ca["c"] = {1:'TEY' ,2:'Diode'}
# ca["d"] = {1:'APD' ,2:'TEY', 3:'D-3', 4:'D-4',5:'RSoXS Diode'}
# try:
# name=ca[ca_ioc][ca_num]
# except:
# name=""
# return name
# def CA_Autoscale(ca_ioc,ca_num,On_Off='On',gain=7):
# """
# On_Off= 'On' => Turns On the Autoscale; gain is irrelevant.
# On_Off= 'Off' => Turns Off the Autoscale with gain below:
# 0 = 2nA
# 1 = 20nA
# 2 = 200nA
# 3 = 2uA
# 4 = 20uA
# 5 = 200uA
# 6 = 2mA
# 7 = 20mA
# """
# pv="29id"+ca_ioc+":ca"+str(ca_num)
# caput(pv+":rangeAutoSet",On_Off)
# sleep(0.5)
# caput(pv+":rangeSet",gain)
# print(pv,"Autoscale",On_Off)
# if On_Off == 'Off':
# sleep(1)
# print("Gain set to:",caget(pv+":range",as_string=True))
|
/**
* jQuery EasyUI 1.2.4
*
* Licensed under the GPL terms
* To use it on other terms please contact us
*
* Copyright(c) 2009-2011 stworthy [ [email protected] ]
*
*/
(function($){
function _1(_2){
var _3=$.data(_2,"combogrid").options;
var _4=$.data(_2,"combogrid").grid;
$(_2).addClass("combogrid-f");
$(_2).combo(_3);
var _5=$(_2).combo("panel");
if(!_4){
_4=$("<table></table>").appendTo(_5);
$.data(_2,"combogrid").grid=_4;
}
_4.datagrid($.extend({},_3,{border:false,fit:true,singleSelect:(!_3.multiple),onLoadSuccess:function(_6){
var _7=$.data(_2,"combogrid").remainText;
var _8=$(_2).combo("getValues");
_1c(_2,_8,_7);
_3.onLoadSuccess.apply(_2,arguments);
},onClickRow:_9,onSelect:function(_a,_b){
_c();
_3.onSelect.call(this,_a,_b);
},onUnselect:function(_d,_e){
_c();
_3.onUnselect.call(this,_d,_e);
},onSelectAll:function(_f){
_c();
_3.onSelectAll.call(this,_f);
},onUnselectAll:function(_10){
if(_3.multiple){
_c();
}
_3.onUnselectAll.call(this,_10);
}}));
function _9(_11,row){
$.data(_2,"combogrid").remainText=false;
_c();
if(!_3.multiple){
$(_2).combo("hidePanel");
}
_3.onClickRow.call(this,_11,row);
};
function _c(){
var _12=$.data(_2,"combogrid").remainText;
var _13=_4.datagrid("getSelections");
var vv=[],ss=[];
for(var i=0;i<_13.length;i++){
vv.push(_13[i][_3.idField]);
ss.push(_13[i][_3.textField]);
}
if(!_3.multiple){
$(_2).combo("setValues",(vv.length?vv:[""]));
}else{
$(_2).combo("setValues",vv);
}
if(!_12){
$(_2).combo("setText",ss.join(_3.separator));
}
};
};
function _14(_15,_16){
var _17=$.data(_15,"combogrid").options;
var _18=$.data(_15,"combogrid").grid;
var _19=_18.datagrid("getRows").length;
$.data(_15,"combogrid").remainText=false;
var _1a;
var _1b=_18.datagrid("getSelections");
if(_1b.length){
_1a=_18.datagrid("getRowIndex",_1b[_1b.length-1][_17.idField]);
_1a+=_16;
if(_1a<0){
_1a=0;
}
if(_1a>=_19){
_1a=_19-1;
}
}else{
if(_16>0){
_1a=0;
}else{
if(_16<0){
_1a=_19-1;
}else{
_1a=-1;
}
}
}
if(_1a>=0){
_18.datagrid("clearSelections");
_18.datagrid("selectRow",_1a);
}
};
function _1c(_1d,_1e,_1f){
var _20=$.data(_1d,"combogrid").options;
var _21=$.data(_1d,"combogrid").grid;
var _22=_21.datagrid("getRows");
var ss=[];
for(var i=0;i<_1e.length;i++){
var _23=_21.datagrid("getRowIndex",_1e[i]);
if(_23>=0){
_21.datagrid("selectRow",_23);
ss.push(_22[_23][_20.textField]);
}else{
ss.push(_1e[i]);
}
}
if($(_1d).combo("getValues").join(",")==_1e.join(",")){
return;
}
$(_1d).combo("setValues",_1e);
if(!_1f){
$(_1d).combo("setText",ss.join(_20.separator));
}
};
function _24(_25,q){
var _26=$.data(_25,"combogrid").options;
var _27=$.data(_25,"combogrid").grid;
$.data(_25,"combogrid").remainText=true;
if(_26.multiple&&!q){
_1c(_25,[],true);
}else{
_1c(_25,[q],true);
}
if(_26.mode=="remote"){
_27.datagrid("clearSelections");
_27.datagrid("load",{q:q});
}else{
if(!q){
return;
}
var _28=_27.datagrid("getRows");
for(var i=0;i<_28.length;i++){
if(_26.filter.call(_25,q,_28[i])){
_27.datagrid("clearSelections");
_27.datagrid("selectRow",i);
return;
}
}
}
};
$.fn.combogrid=function(_29,_2a){
if(typeof _29=="string"){
var _2b=$.fn.combogrid.methods[_29];
if(_2b){
return _2b(this,_2a);
}else{
return $.fn.combo.methods[_29](this,_2a);
}
}
_29=_29||{};
return this.each(function(){
var _2c=$.data(this,"combogrid");
if(_2c){
$.extend(_2c.options,_29);
}else{
_2c=$.data(this,"combogrid",{options:$.extend({},$.fn.combogrid.defaults,$.fn.combogrid.parseOptions(this),_29)});
}
_1(this);
});
};
$.fn.combogrid.methods={options:function(jq){
return $.data(jq[0],"combogrid").options;
},grid:function(jq){
return $.data(jq[0],"combogrid").grid;
},setValues:function(jq,_2d){
return jq.each(function(){
_1c(this,_2d);
});
},setValue:function(jq,_2e){
return jq.each(function(){
_1c(this,[_2e]);
});
},clear:function(jq){
return jq.each(function(){
$(this).combogrid("grid").datagrid("clearSelections");
$(this).combo("clear");
});
}};
$.fn.combogrid.parseOptions=function(_2f){
var t=$(_2f);
return $.extend({},$.fn.combo.parseOptions(_2f),$.fn.datagrid.parseOptions(_2f),{idField:(t.attr("idField")||undefined),textField:(t.attr("textField")||undefined),mode:t.attr("mode")});
};
$.fn.combogrid.defaults=$.extend({},$.fn.combo.defaults,$.fn.datagrid.defaults,{loadMsg:null,idField:null,textField:null,mode:"local",keyHandler:{up:function(){
_14(this,-1);
},down:function(){
_14(this,1);
},enter:function(){
_14(this,0);
$(this).combo("hidePanel");
},query:function(q){
_24(this,q);
}},filter:function(q,row){
var _30=$(this).combogrid("options");
return row[_30.textField].indexOf(q)==0;
}});
})(jQuery);
|
# ******************************************************************************
#
# DSF2Blender: Python script for Blender that allows import of X-Plane DSF files
# Checked with Blender 3.0 but should work from 2.8 up
#
# For more details refer to GitHub: https://github.com/nofaceinbook/DSF2Blender
#
# WARNING: This code is still under development and may still have some errors.
#
# Copyright (C) 2022 by schmax (Max Schmidt)
#
# This code is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE.
# ******************************************************************************
from xplnedsf2 import *
import bpy
from bpy.props import BoolProperty, EnumProperty, StringProperty, IntProperty, FloatProperty
from bpy.types import Operator
from bpy_extras.io_utils import ExportHelper, ImportHelper
#### IMPORTANT: Requires xplnedsf2.py from https://github.com/nofaceinbook/muxp in your Blender python/lib directory
#### For the moment the dsf file must be unzipped or you install PLYZMA in Blender Python
#### Rendering a complete O4XP tile probably causes out of memory fault
################ ENTER BELOW your X-Plane default unzipped DSF File and path to X-Plane #################################
dsf_file = 'X:/X-Plane/steamapps/common/X-Plane 11/Custom Scenery/zzzz_MUXP_default_mesh_updates/Earth nav data/+10-070/+10-067.dsf'
#dsf_file = 'X:/X-Plane/steamapps/common/X-Plane 11/Custom Scenery/zzzz_MUXP_default_mesh_updates/Earth nav data/+30+040/+39+046.dsf'
#dsf_file = 'X:/X-Plane/steamapps/common/X-Plane 11/Custom Scenery/zOrtho4XP_+50+007/Earth nav data/+50+000/+50+007.dsf'
#dsf_file = 'X:/X-Plane/steamapps/common/X-Plane 11/Custom Scenery/zOrtho4XP_-08-015/Earth nav data/-10-020/-08-015.dsf'
#dsf_file = 'X:/X-Plane/steamapps/common/X-Plane 11/Custom Scenery/zOrtho4XP_+32-017/Earth nav data/+30-020/+32-017.dsf'
class DSF_loader:
def __init__(self, wb, eb, sb, nb, scl, lp_overlay):
self.AREA_W = wb # define area from west to east and south to north to be extracted
self.AREA_E = eb # 0 to 1 extracts the full one by on grid
self.AREA_S = sb # you could also use the full coordinates like 50.21 or -7.4
self.AREA_N = nb
self.SCALING = scl
self.LAYER_PER_OVERLAY = lp_overlay # if this is true each overlay terrain will be defined as individual object
def read_ter_file(self, terpath, xppath, dsf_path):
"""
Reads X-Plane terrain file (.ter) in terpath and returns values as dictionary.
In case of errors the dict contains key ERROR with value containing description of error.
To read default terrains the path for X-Plane (xppath) is needed.
dsfpath is the path of the dsf file that contains the terrain definition. Needed to read dsf specific terrain.
"""
ter = dict()
dsfpath = dsf_path.replace("\\", "/") ############ NEW ######## TBD: USE FILE DELIMITER INDEPENDENT COMMANDS ###########
if terpath == 'terrain_Water': # No terrian file for Water
return ter
if terpath.endswith('_OVL'): #### TBD: Can probably be removed as function is called with terrain name now
overlay = True
terpath = terpath[:-4] # remove _OVL now from terrain name
### TBD: handle different path delimeters in given pathes like \ by replacing them ? ####
if terpath.startswith("lib/g10"): # global XP 10 terrain definition
#### TBD: Build path correct for every file system ###
filename = xppath + "/Resources/default scenery/1000 world terrain" + terpath[7:] # remove lib/g10
elif terpath.startswith("terrain/"): # local dsf terrain definition
filename = dsfpath[:dsfpath.rfind("Earth nav data")] + terpath # remove part for dsf location
### TBD: Error check that terrain file exists
else:
ter["ERROR"] = "Unknown Terrain definition: " + terpath
return ter
##### TBD: Build filename for local .ter files starting with ../ using dsfpath #######
try:
with open(filename, encoding="utf8") as f:
for line in f: ### TBD: Check that first three lines contain A 800 TERRAIN #####
values = line.split()
#print(values)
if len(values) > 0: # skip empty line
key = values.pop(0)
if len(values) > 0 and values[0].startswith("../"): # replace relative path with full path
filepath = filename[:filename.rfind("/")] # get just path without name of file in path
values[0] = filepath[:filepath.rfind("/") + 1] + values[0][3:]
#print("###", filename, values[0])
### TBD: Handle ERROR when '/' is not found; when other delimiters are used
ter[key] = values
### TBD: in case of multiple keys in files append new values to existing key
except IOError:
ter["ERROR"] = "Error reading terrain file: " + filename
return ter
def add_material(self, matName, ter, bpy):
m = bpy.data.materials.new(matName)
if matName.find('terrain_Water') < 0: # this is no water
if "BASE_TEX" in ter:
teximagefile = ter["BASE_TEX"][0].replace("/", "\\\\")
elif "BASE_TEX_NOWRAP" in ter:
teximagefile = ter["BASE_TEX_NOWRAP"][0].replace("/", "\\\\")
#print("Loading texture image: {}".format(teximagefile))
m.use_nodes = True
bsdf = m.node_tree.nodes["Principled BSDF"]
texImage = m.node_tree.nodes.new('ShaderNodeTexImage')
texImage.location = (-400,280)
texImage.image = bpy.data.images.load(teximagefile, check_existing=True)
m.node_tree.links.new(bsdf.inputs['Base Color'], texImage.outputs['Color'])
bsdf.inputs[7].default_value = 0.01 # This is setting the specular intensity
### TBD: increase rougness
if matName.endswith( "_O"): # add border texture for overlay
if "BORDER_TEX" in ter:
borderimagefile = ter["BORDER_TEX"][0].replace("/", "\\\\")
borderImage = m.node_tree.nodes.new('ShaderNodeTexImage')
borderImage.location = (-400,0)
############### TBD: Check that existing images are used - if it works as below use everywhere where image is loaded
borderImage.image = bpy.data.images.load(borderimagefile, check_existing=True)
borderImage.image.colorspace_settings.name = 'Non-Color'
m.node_tree.links.new(bsdf.inputs['Alpha'], borderImage.outputs['Color'])
m.blend_method = 'CLIP'
node = m.node_tree.nodes.new('ShaderNodeUVMap')
node.location = (-700,0)
node.uv_map = "borderUV"
m.node_tree.links.new(node.outputs[0], borderImage.inputs[0]) # add link from new uv map to image texture
else:
print("WARNING: No texture file found for this terrain overlay/material!\n")
else: ### TBD: don't double everything below
teximagefile = self.xp_path + "/Resources/bitmaps/world/water/any.png"
teximagefile.replace("/", "\\\\")
#print("Loading texture image: {}".format(teximagefile))
m.use_nodes = True
bsdf = m.node_tree.nodes["Principled BSDF"]
texImage = m.node_tree.nodes.new('ShaderNodeTexImage')
texImage.image = bpy.data.images.load(teximagefile, check_existing=True)
m.node_tree.links.new(bsdf.inputs['Base Color'], texImage.outputs['Color'])
### TBD: Change specular, roughness, transmission to good values for water
return m
def execute(self, dsf_file):
print("------------ Starting to use DSF ------------------")
print("Reading DSF file: {}".format(dsf_file))
self.xp_path = 'X:/X-Plane/steamapps/common/X-Plane 11' ########### TBD: be retrieved from dsf file ##########
dsf = XPLNEDSF()
dsf.read(dsf_file)
grid_west = int(dsf.Properties["sim/west"])
grid_south = int(dsf.Properties["sim/south"])
print("Importing Mesh and setting west={} and south={} to origin.".format(grid_west, grid_south))
if 0 <= self.AREA_W <= 1 and 0 <= self.AREA_S <= 1:
self.AREA_W += grid_west
self.AREA_E += grid_west
self.AREA_S += grid_south
self.AREA_N += grid_south
print("But extracting just from west {} to east {} and south {} to north {}".format(self.AREA_W, self.AREA_E, self.AREA_S, self.AREA_N))
#### Load all terrain files that dsf file into a dictionary ###
terrain_details = dict() # containing per terrain index the details of .ter-file in dict
for id in dsf.DefTerrains:
print("Loading Terrain {}".format(dsf.DefTerrains[id]))
terrain_details[id] = self.read_ter_file(dsf.DefTerrains[id], self.xp_path, dsf_file)
if "ERROR" in terrain_details[id]:
print(terrain_details[id]["ERROR"])
print("Loaded {} terrain details".format(len(terrain_details)))
# SORT mesh patches so that pyhiscal mesh is bottom layer and all overlys are above
# All layers sorted based on the flag and id of terrain in list, so that they will get higher z-value to avoid same z-layer artefacts
# Also sort by near and far values to store them later in material name
# In addition this sorting allows to switch materials with every layer
######## TBD: Give option to avoid loading of overlays
ter_layers = dict()
for p in dsf.Patches:
#print("TerIndex {}: Flag: {} Near: {} Far: {}".format(p.defIndex, p.flag, p.near, p.far))
ter_type = (p.flag, p.defIndex, p.near, p.far)
if ter_type in ter_layers:
ter_layers[ter_type].append(p)
else:
ter_layers[ter_type] = [p]
print("Sorted {} mesh patches into {} different types".format(len(dsf.Patches), len(ter_layers)))
verts = []
edges = [] # will not be filled as Blender takes in case of empty edges the edges from the faces
normals = [] # normals stored per vertex
faces = [[]]
uvs = [[]]
uvs2 = [[]] # second uv coordinates given for borders in case of non projected mesh
coords = dict() # existing coordinates per object as key and index of them in verts as value
tria_layer = dict() # returning for a tria of vertex index the current layer (how many are above eahc ohter)
materials = [] # list containing all information for all materials for all layers used
matIndexPerTria = [[]] # list of material index for each tria of mesh
used_materials = [[]] # list containing for each layer the used materials
for ter_layer_id in sorted(ter_layers.keys()):
projected_uv = ("PROJECTED" in terrain_details[ter_layer_id[1]]) # 2nd valud in ter_layer_id includes defintionIndex of patch
water = (dsf.DefTerrains[ter_layer_id[1]] == "terrain_Water")
if self.LAYER_PER_OVERLAY:
if ter_layer_id[0] == 1: # for basemesh we use layer 0
layer = 0
else:
layer += 1 # this requires that there was base-mesh before settin layer=0
for p in ter_layers[ter_layer_id]:
trias = p.triangles()
if water and len(trias) and len(dsf.V[trias[0][0][0]][trias[0][0][1]]) <= 5:
projected_uv = True
### TBD: create own material / ter_layer_id for projected Water to give it different name ###################
##### or make sure that uv coordinates are outside [0:1] in case of projection
else:
projected_uv = False
# if water is projected depends if uv coordinates are given or not, taken from ferst vertex in first tria
for t in trias:
if not (self.AREA_W <= dsf.V[t[0][0]][t[0][1]][0] <= self.AREA_E and self.AREA_S <= dsf.V[t[0][0]][t[0][1]][1] <= self.AREA_N) \
and not (self.AREA_W <= dsf.V[t[1][0]][t[1][1]][0] <= self.AREA_E and self.AREA_S <= dsf.V[t[1][0]][t[1][1]][1] <= self.AREA_N) \
and not (self.AREA_W <= dsf.V[t[2][0]][t[2][1]][0] <= self.AREA_E and self.AREA_S <= dsf.V[t[2][0]][t[2][1]][1] <= self.AREA_N):
continue
ti = [] # index list of vertices of tria that will be added to faces
tuvs = [] # uvs for that triangle
tuvs2 = [] # 2nd uves for triangle e.g. for borders if existent
for v in t: # this is now index to Pool and to vertex in Pool
#### TBD: Scale to Marcartor in order to have same east/west and north/south dimension #####
vx = round((dsf.V[v[0]][v[1]][0] - grid_west) * self.SCALING, 3)
vy = round((dsf.V[v[0]][v[1]][1] - grid_south) * self.SCALING, 3)
vz = dsf.getVertexElevation(dsf.V[v[0]][v[1]][0], dsf.V[v[0]][v[1]][1], dsf.V[v[0]][v[1]][2])
vz = round(vz / (100000/self.SCALING), 3) ### TBD: Make stretching of height configureable
if (vx, vy) in coords:
vi = coords[(vx, vy)]
#### TBD: check if new normal is equal to existing one ###############
else:
vi = len(coords) # index in verts is the last one, as coords will now be added
coords[(vx, vy)] = vi
verts.append([vx, vy, vz])
nx = round(dsf.V[v[0]][v[1]][3], 4) #### TBD: Rounding and if below can be removed; just checking if existent
ny = round(dsf.V[v[0]][v[1]][4], 4)
normals.append([nx, ny, round(sqrt(1 - nx*nx - ny*ny), 4)])
#if normals[-1] != [0.0, 0.0, 1.0]:
# print(normals[-1])
ti.insert(0, vi) # winding in Blender is just opposite as in X-Plane
if len(dsf.V[v[0]][v[1]]) == 7: # in case of projection we need first uvs do by own unwrapping and use the others as second e.g. for border
if not projected_uv and p.flag == 1: # for projected physical mesh; for overlay we would need second uvs for border
########### TBD: when projected then map tuvs to vx and vy --> if NOT projected, CORRRECT ????????? ################################
tuvs.insert(0, (dsf.V[v[0]][v[1]][5], dsf.V[v[0]][v[1]][6]))
tuvs2.insert(0, (vx/100, vy/100)) # add this uv, even if not needed in order to get full uv-mesh for that layer
else: # should only be the case if projected and we have overlay to get uv-map for border
tuvs.insert(0, (vx/100, vy/100)) #By this definition uvs exced [0;1] range, but should lead to scale 10 times the size
tuvs2.insert(0, (dsf.V[v[0]][v[1]][5], dsf.V[v[0]][v[1]][6])) # uvs are defined for every vertex of every face / loop
elif len(dsf.V[v[0]][v[1]]) == 9: # first uvs for mesh 2nd for border
tuvs.insert(0, (dsf.V[v[0]][v[1]][5], dsf.V[v[0]][v[1]][6])) # uvs are defined for every vertex of every face / loop
tuvs2.insert(0, (dsf.V[v[0]][v[1]][7], dsf.V[v[0]][v[1]][8])) # uvs are defined for every vertex of every face / loop
else: # we don't have uvs so we unwrap our own ones
tuvs.insert(0, (vx/100, vy/100)) # By this definition uvs exced [0;1] range, but should lead to scale 10 times the size
tuvs2.insert(0, (vx/100, vy/100)) # Add uvs even if not needed for that tria but to have full uvs for that layer
### Identifiy layer for material ###
if not self.LAYER_PER_OVERLAY:
smallest_index = min(ti) # make sure that smallest index is first in the list, but keep winding of tria
if smallest_index == ti[1]:
ti_match = (ti[1], ti[2], ti[0]) ######## CHANGING ORDER WOULD MEAN ALSO TO CHANG ORDER FOR UV --> created ti just for matching in dict !!!!!!! ##########
elif smallest_index == ti[2]:
ti_match = (ti[2], ti[0], ti[1])
else:
ti_match = (ti[0], ti[1], ti[2])
if ti_match in tria_layer: # this tria is already existing
tria_layer[ti_match] += 1 # this new tria has to be pot on next layer
layer = tria_layer[ti_match]
else:
tria_layer[ti_match] = 0 # this is first tria which is layer 0 (base mesh)
layer = 0
if layer >= len(faces): # We need addtional layer so extend lists
faces.append([])
uvs.append([])
uvs2.append([])
matIndexPerTria.append([])
used_materials.append([])
faces[layer].append(ti)
uvs[layer].extend(tuvs) # uvs added with correct order because of tria different winding
if tuvs2 != []:
uvs2[layer].extend(tuvs2)
if len(materials) == 0 or ter_layer_id != materials[-1]: # as materials are already sorted just check if this ter_layer_id is already at the end of materials
materials.append(ter_layer_id)
mat_id = len(materials) - 1
if len(used_materials[layer]) == 0 or mat_id != used_materials[layer][-1]: # as materials are sorted per layer_id we need just to check if required material is at end of the list
used_materials[layer].append(mat_id)
matIndexPerTria[layer].append(len(used_materials[layer]) - 1)
print("Arranged mesh into {} layers with {} materials".format(len(faces), len(materials)))
### Create materials ###
created_materials = [] # list containg references to all created blender materials
for ter_layer_id in materials:
terrain_name = str(ter_layer_id[1]) + '_' # include terrain defintion index to allow correct sorting for a later import
terrain_name += dsf.DefTerrains[ter_layer_id[1]] # add name of terrain
terrain_name = terrain_name + "_" + str(ter_layer_id[2]) + "_" + str(ter_layer_id[3]) # add near and far values for a later import
######### IDEA: STORE VALUES in material properties or special nodes ###########################
if "PROJECTED" in terrain_details[ter_layer_id[1]]:
terrain_name += "_P" # add if base mesh is projected
if ter_layer_id[0] > 1: # this is an overlay
terrain_name += "_O"
m = self.add_material(terrain_name, terrain_details[ter_layer_id[1]], bpy) # add material to Blender materials
created_materials.append(m)
print("Created {} materials".format(len(created_materials)))
# Create own collection for basemesh and overlays
main_collection = bpy.data.collections.new("XPDSF")
bpy.context.scene.collection.children.link(main_collection)
ol_collection = bpy.data.collections.new("Overlays")
main_collection.children.link(ol_collection)
for layer in range(len(faces)):
if layer == 0:
mesh_name = "Basemesh"
else:
mesh_name = "Overlay_" + str(layer)
### TBD Group overlays in own group
mesh = bpy.data.meshes.new(mesh_name) # add the new mesh
obj = bpy.data.objects.new(mesh.name, mesh)
if mesh_name.startswith("Base"):
col = bpy.data.collections.get("XPDSF")
else:
col = bpy.data.collections.get("Overlays")
col.objects.link(obj)
bpy.context.view_layer.objects.active = obj
##### Delete loose vertices ####
if layer > 0:
verts_layer = []
faces_layer = []
normals_layer = []
verts_index = dict()
for t in faces[layer]:
faces_layer.append([])
for v in t:
if v in verts_index:
faces_layer[-1].append(verts_index[v])
else:
verts_layer.append(verts[v])
normals_layer.append(normals[v])
verts_index[v] = len(verts_layer) - 1
faces_layer[-1].append(len(verts_layer) - 1)
else:
verts_layer = verts
faces_layer = faces[layer]
normals_layer = normals #faces[layer] = [] # free memory (if this helps) ...
mesh.from_pydata(verts_layer, edges, faces_layer)
mesh.use_auto_smooth = True # needed to make use of imported normals split
#mesh.normals_split_custom_set([(0, 0, 0) for l in mesh.loops])
mesh.normals_split_custom_set_from_vertices(normals_layer) # set imported normals as custom split vertex normals
# ADDING MATERIALS PER LAYER
for m in used_materials[layer]:
bpy.context.object.data.materials.append(created_materials[m])
for i, tria in enumerate(bpy.context.object.data.polygons): #### Use obj instead of context ??
tria.material_index = matIndexPerTria[layer][i]
new_uv = bpy.context.active_object.data.uv_layers.new(name='baseUV') #### Use obj instead of context ??
for loop in bpy.context.active_object.data.loops:
new_uv.data[loop.index].uv = uvs[layer][loop.index]
bpy.context.object.data.uv_layers["baseUV"].active_render = True
######## ADDING BORDER UVS ###########
if layer > 0: # we haver overlay
border_uv = bpy.context.active_object.data.uv_layers.new(name='borderUV') #### Use obj instead of context ??
for loop in bpy.context.active_object.data.loops:
border_uv.data[loop.index].uv = uvs2[layer][loop.index]
### Move overlays along z-axis
obj.location.z += layer * 0.01
return {"FINISHED"}
bl_info = {
"name": "X-Plane Distribuation Scenery Format importer (.dsf)",
"author": "schmax",
"version": (0, 0, 1),
"blender": (3, 0, 0),
"location": "File > Import",
"description": "Import-Export OBJ, Import OBJ mesh, UV's, materials and textures",
"warning": "",
"doc_url": "https://github.com/nofaceinbook/DSF2Blender",
"support": 'none / in development',
"category": "Import",
}
class ImportDSF(Operator, ImportHelper):
"""Load a X-Plane mesh from dsf file"""
bl_idname = "import_mesh.dsf" # important since its how bpy.ops.import_test.some_data is constructed
bl_label = "Import DSF"
# ImportHelper mixin class uses this
filename_ext = ".dsf"
filter_glob: StringProperty(
default="*.dsf",
options={'HIDDEN'},
)
east_bound: FloatProperty(
name="West bound",
description="West boundary, relative to tile (e.g. 0.4) or absolute (-29.6) in degree (0.0 for any west tile border)",
min=-180.0, max=180.0,
soft_min=0.0, soft_max=1.0,
default=0.0,
)
west_bound: FloatProperty(
name="East bound",
description="East boundary, relative to tile (e.g. 0.6) or absolute (-29.4) in degree (1.0 for any east tile border)",
min=-180.0, max=180.0,
soft_min=0.0, soft_max=1.0,
default=1.0,
)
south_bound: FloatProperty(
name="South bound",
description="South boundary, relative to tile (e.g. 0.4) or absolute (50.4) in degree (0.0 for any south tile border)",
min=-90.0, max=90.0,
soft_min=0.0, soft_max=1.0,
default=0.0,
)
north_bound: FloatProperty(
name="North bound",
description="North boundary, relative to tile (e.g. 0.6) or absolute (50.6) in degree (1.0 for any nord tile border)",
min=-90.0, max=90.0,
soft_min=0.0, soft_max=1.0,
default=1.0,
)
scaling: IntProperty(
name="Scaleing",
default=1000,
description="Multiplier for degree tile",
min=1,
max=100000,
)
seperate_overlays: BoolProperty(
name="Overlay per Terrain Type",
description="Create seperate overlays per terrain type",
default=False
)
def execute(self, context):
"""Executes the import process """
importer = DSF_loader(self.east_bound, self.west_bound, self.south_bound, self.north_bound, self.scaling, self.seperate_overlays)
return importer.execute(self.filepath)
def menu_func_import(self, context):
self.layout.operator(ImportDSF.bl_idname, text="X-Plane DSF mesh (.dsf)")
def register():
bpy.utils.register_class(ImportDSF)
bpy.types.TOPBAR_MT_file_import.append(menu_func_import)
def unregister():
bpy.types.TOPBAR_MT_file_import.remove(menu_func_import)
bpy.utils.unregister_class(ImportDSF)
if __name__ == "__main__":
register()
#unregister() #### TBD: Issue with unregister as some bl mrna attribute not found ##############
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _reactAddonsPureRenderMixin = require('react-addons-pure-render-mixin');
var _reactAddonsPureRenderMixin2 = _interopRequireDefault(_reactAddonsPureRenderMixin);
var _svgIcon = require('../../svg-icon');
var _svgIcon2 = _interopRequireDefault(_svgIcon);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var DeviceSettingsSystemDaydream = _react2.default.createClass({
displayName: 'DeviceSettingsSystemDaydream',
mixins: [_reactAddonsPureRenderMixin2.default],
render: function render() {
return _react2.default.createElement(
_svgIcon2.default,
this.props,
_react2.default.createElement('path', { d: 'M9 16h6.5c1.38 0 2.5-1.12 2.5-2.5S16.88 11 15.5 11h-.05c-.24-1.69-1.69-3-3.45-3-1.4 0-2.6.83-3.16 2.02h-.16C7.17 10.18 6 11.45 6 13c0 1.66 1.34 3 3 3zM21 3H3c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h18c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2zm0 16.01H3V4.99h18v14.02z' })
);
}
});
exports.default = DeviceSettingsSystemDaydream;
module.exports = exports['default']; |
"""Implements the base rule class.
Rules crawl through the trees returned by the parser and evaluate particular
rules.
The intent is that it should be possible for the rules to be expressed
as simply as possible, with as much of the complexity abstracted away.
The evaluation function should take enough arguments that it can evaluate
the position of the given segment in relation to its neighbors, and that
the segment which finally "triggers" the error, should be the one that would
be corrected OR if the rule relates to something that is missing, then it
should flag on the segment FOLLOWING, the place that the desired element is
missing.
"""
import bdb
import copy
import fnmatch
import logging
import pathlib
import regex
from typing import Iterable, Optional, List, Set, Tuple, Union, Any
from collections import namedtuple
from dataclasses import dataclass
from sqlfluff.core.cached_property import cached_property
from sqlfluff.core.linter import LintedFile
from sqlfluff.core.parser import BaseSegment, RawSegment
from sqlfluff.core.dialects import Dialect
from sqlfluff.core.errors import SQLLintError
from sqlfluff.core.rules.functional import Segments
from sqlfluff.core.templaters.base import RawFileSlice, TemplatedFile
# The ghost of a rule (mostly used for testing)
RuleGhost = namedtuple("RuleGhost", ["code", "description"])
# Instantiate the rules logger
rules_logger = logging.getLogger("sqlfluff.rules")
linter_logger: logging.Logger = logging.getLogger("sqlfluff.linter")
class RuleLoggingAdapter(logging.LoggerAdapter):
"""A LoggingAdapter for rules which adds the code of the rule to it."""
def process(self, msg, kwargs):
"""Add the code element to the logging message before emit."""
return "[{}] {}".format(self.extra["code"], msg), kwargs
class LintResult:
"""A class to hold the results of a rule evaluation.
Args:
anchor (:obj:`BaseSegment`, optional): A segment which represents
the *position* of the a problem. NB: Each fix will also hold
its own reference to position, so this position is mostly for
alerting the user to where the *problem* is.
fixes (:obj:`list` of :obj:`LintFix`, optional): An array of any
fixes which would correct this issue. If not present then it's
assumed that this issue will have to manually fixed.
memory (:obj:`dict`, optional): An object which stores any working
memory for the rule. The `memory` returned in any `LintResult`
will be passed as an input to the next segment to be crawled.
description (:obj:`str`, optional): A description of the problem
identified as part of this result. This will override the
description of the rule as what gets reported to the user
with the problem if provided.
"""
def __init__(
self,
anchor: Optional[BaseSegment] = None,
fixes: Optional[List["LintFix"]] = None,
memory=None,
description=None,
):
# An anchor of none, means no issue
self.anchor = anchor
# Fixes might be blank
self.fixes = fixes or []
# When instantiating the result, we filter any fixes which are "trivial".
self.fixes = [f for f in self.fixes if not f.is_trivial()]
# Memory is passed back in the linting result
self.memory = memory
# store a description_override for later
self.description = description
def to_linting_error(self, rule) -> Optional[SQLLintError]:
"""Convert a linting result to a :exc:`SQLLintError` if appropriate."""
if self.anchor:
# Allow description override from the LintResult
description = self.description or rule.description
return SQLLintError(
rule=rule,
segment=self.anchor,
fixes=self.fixes,
description=description,
)
else:
return None
class LintFix:
"""A class to hold a potential fix to a linting violation.
Args:
edit_type (:obj:`str`): One of `create_before`, `create_after,
`replace`, `delete` to indicate the kind of fix this represents.
anchor (:obj:`BaseSegment`): A segment which represents
the *position* that this fix should be applied at. For deletions
it represents the segment to delete, for creations it implies the
position to create at (with the existing element at this position
to be moved *after* the edit), for a `replace` it implies the
segment to be replaced.
edit (:obj:`BaseSegment`, optional): For `replace` and `create` fixes,
this holds the iterable of segments to create or replace at the
given `anchor` point.
source (:obj:`BaseSegment`, optional): For `replace` and `create` fixes,
this holds iterable of segments that provided code. IMPORTANT: The
linter uses this to prevent copying material from templated areas.
"""
def __init__(
self,
edit_type: str,
anchor: BaseSegment,
edit: Optional[Iterable[BaseSegment]] = None,
source: Optional[Iterable[BaseSegment]] = None,
) -> None:
if edit_type not in (
"create_before",
"create_after",
"replace",
"delete",
): # pragma: no cover
raise ValueError(f"Unexpected edit_type: {edit_type}")
self.edit_type = edit_type
if not anchor: # pragma: no cover
raise ValueError("Fixes must provide an anchor.")
self.anchor = anchor
self.edit: Optional[List[BaseSegment]] = None
if edit is not None:
# Coerce edit iterable to list
edit = list(edit)
# Copy all the elements of edit to stop contamination.
# We're about to start stripping the position markers
# off some of the elements and we don't want to end up
# stripping the positions of the original elements of
# the parsed structure.
self.edit = copy.deepcopy(edit)
# Check that any edits don't have a position marker set.
# We should rely on realignment to make position markers.
# Strip position markers of anything enriched, otherwise things can get
# blurry
for seg in self.edit:
if seg.pos_marker:
# Developer warning.
rules_logger.debug(
"Developer Note: Edit segment found with preset position "
"marker. These should be unset and calculated later."
)
seg.pos_marker = None # type: ignore
# Once stripped, we shouldn't replace any markers because
# later code may rely on them being accurate, which we
# can't guarantee with edits.
self.source = [seg for seg in source if seg.pos_marker] if source else []
def is_trivial(self):
"""Return true if the fix is trivial.
Trivial edits are:
- Anything of zero length.
- Any edits which result in themselves.
Removing these makes the routines which process fixes much faster.
"""
if self.edit_type in ("create_before", "create_after"):
if isinstance(self.edit, BaseSegment):
if len(self.edit.raw) == 0: # pragma: no cover TODO?
return True
elif all(len(elem.raw) == 0 for elem in self.edit):
return True
elif self.edit_type == "replace" and self.edit == self.anchor:
return True # pragma: no cover TODO?
return False
def __repr__(self):
if self.edit_type == "delete":
detail = f"delete:{self.anchor.raw!r}"
elif self.edit_type in ("replace", "create_before", "create_after"):
if hasattr(self.edit, "raw"):
new_detail = self.edit.raw # pragma: no cover TODO?
else:
new_detail = "".join(s.raw for s in self.edit)
if self.edit_type == "replace":
detail = f"edt:{self.anchor.raw!r}->{new_detail!r}"
else:
detail = f"create:{new_detail!r}"
else:
detail = "" # pragma: no cover TODO?
return "<LintFix: {} @{} {}>".format(
self.edit_type, self.anchor.pos_marker, detail
)
def __eq__(self, other):
"""Compare equality with another fix.
A fix is equal to another if is in the same place (position), with the
same type and (if appropriate) the same edit values.
"""
if not self.edit_type == other.edit_type:
return False
if not self.anchor == other.anchor:
return False
if not self.edit == other.edit:
return False
return True # pragma: no cover TODO?
@classmethod
def delete(cls, anchor_segment: BaseSegment) -> "LintFix":
"""Delete supplied anchor segment."""
return cls("delete", anchor_segment)
@classmethod
def replace(
cls,
anchor_segment: BaseSegment,
edit_segments: Iterable[BaseSegment],
source: Optional[Iterable[BaseSegment]] = None,
) -> "LintFix":
"""Replace supplied anchor segment with the edit segments."""
return cls("replace", anchor_segment, edit_segments, source)
@classmethod
def create_before(
cls,
anchor_segment: BaseSegment,
edit_segments: Iterable[BaseSegment],
source: Optional[Iterable[BaseSegment]] = None,
) -> "LintFix":
"""Create edit segments before the supplied anchor segment."""
return cls("create_before", anchor_segment, edit_segments, source)
@classmethod
def create_after(
cls,
anchor_segment: BaseSegment,
edit_segments: Iterable[BaseSegment],
source: Optional[Iterable[BaseSegment]] = None,
) -> "LintFix":
"""Create edit segments after the supplied anchor segment."""
return cls("create_after", anchor_segment, edit_segments, source)
def has_template_conflicts(self, templated_file: TemplatedFile) -> bool:
"""Does this fix conflict with (i.e. touch) templated code?"""
# Goal: Find the raw slices touched by the fix. Two cases, based on
# edit type:
# 1. "delete", "replace": Raw slices touching the anchor segment. If
# ANY are templated, discard the fix.
# 2. "create_before", "create_after": Raw slices encompassing the two
# character positions surrounding the insertion point (**NOT** the
# whole anchor segment, because we're not *touching* the anchor
# segment, we're inserting **RELATIVE** to it. If ALL are templated,
# discard the fix.
anchor_slice = self.anchor.pos_marker.templated_slice
templated_slices = [anchor_slice]
check_fn = any
if self.edit_type == "create_before":
# Consider the first position of the anchor segment and the
# position just before it.
templated_slices = [
slice(anchor_slice.start, anchor_slice.start + 1),
slice(anchor_slice.start - 1, anchor_slice.start),
]
check_fn = all
elif self.edit_type == "create_after":
# Consider the last position of the anchor segment and the
# character just after it.
templated_slices = [
slice(anchor_slice.stop - 1, anchor_slice.stop),
slice(anchor_slice.stop, anchor_slice.stop + 1),
]
check_fn = all
# TRICKY: For creations at the end of the file, there won't be an
# existing slice. In this case, the function adds file_end_slice to the
# result, as a sort of placeholder or sentinel value. We pass a literal
# slice for "file_end_slice" so that later in this function, the LintFix
# is interpreted as literal code. Otherwise, it could be interpreted as
# a fix to *templated* code and incorrectly discarded.
fix_slices = self._raw_slices_from_templated_slices(
templated_file,
templated_slices,
file_end_slice=RawFileSlice("", "literal", -1),
)
# We have the fix slices. Now check for conflicts.
result = check_fn(fs.slice_type == "templated" for fs in fix_slices)
if result or not self.source:
return result
# Fix slices were okay. Now check template safety of the "source" field.
templated_slices = [source.pos_marker.templated_slice for source in self.source]
raw_slices = self._raw_slices_from_templated_slices(
templated_file, templated_slices
)
return any(fs.slice_type == "templated" for fs in raw_slices)
@staticmethod
def _raw_slices_from_templated_slices(
templated_file: TemplatedFile,
templated_slices: List[slice],
file_end_slice: Optional[RawFileSlice] = None,
) -> Set[RawFileSlice]:
raw_slices: Set[RawFileSlice] = set()
for templated_slice in templated_slices:
try:
raw_slices.update(
templated_file.raw_slices_spanning_source_slice(
templated_file.templated_slice_to_source_slice(templated_slice)
)
)
except (IndexError, ValueError):
# These errors will happen with "create_before" at the beginning
# of the file or "create_after" at the end of the file. By
# default, we ignore this situation. If the caller passed
# "file_end_slice", add that to the result. In effect,
# file_end_slice serves as a placeholder or sentinel value.
if file_end_slice is not None:
raw_slices.add(file_end_slice)
return raw_slices
EvalResultType = Union[LintResult, List[LintResult], None]
@dataclass
class RuleContext:
"""Class for holding the context passed to rule eval functions."""
segment: BaseSegment
parent_stack: Tuple[BaseSegment, ...]
siblings_pre: Tuple[BaseSegment, ...]
siblings_post: Tuple[BaseSegment, ...]
raw_stack: Tuple[RawSegment, ...]
memory: Any
dialect: Dialect
path: Optional[pathlib.Path]
templated_file: Optional[TemplatedFile]
@cached_property
def functional(self):
"""Returns a Surrogates object that simplifies writing rules."""
return FunctionalRuleContext(self)
class FunctionalRuleContext:
"""RuleContext written in a "functional" style; simplifies writing rules."""
def __init__(self, context: RuleContext):
self.context = context
@cached_property
def segment(self) -> "Segments":
"""Returns a Segments object for context.segment."""
return Segments(
self.context.segment, templated_file=self.context.templated_file
)
@property
def parent_stack(self) -> "Segments": # pragma: no cover
"""Returns a Segments object for context.parent_stack."""
return Segments(
*self.context.parent_stack, templated_file=self.context.templated_file
)
@property
def siblings_pre(self) -> "Segments": # pragma: no cover
"""Returns a Segments object for context.siblings_pre."""
return Segments(
*self.context.siblings_pre, templated_file=self.context.templated_file
)
@property
def siblings_post(self) -> "Segments": # pragma: no cover
"""Returns a Segments object for context.siblings_post."""
return Segments(
*self.context.siblings_post, templated_file=self.context.templated_file
)
@cached_property
def raw_stack(self) -> "Segments":
"""Returns a Segments object for context.raw_stack."""
return Segments(
*self.context.raw_stack, templated_file=self.context.templated_file
)
@cached_property
def raw_segments(self):
"""Returns a Segments object for all the raw segments in the file."""
file_segment = self.context.parent_stack[0]
return Segments(
*file_segment.get_raw_segments(), templated_file=self.context.templated_file
)
class BaseRule:
"""The base class for a rule.
Args:
code (:obj:`str`): The identifier for this rule, used in inclusion
or exclusion.
description (:obj:`str`): A human readable description of what this
rule does. It will be displayed when any violations are found.
"""
_check_docstring = True
_works_on_unparsable = True
targets_templated = False
def __init__(self, code, description, **kwargs):
self.description = description
self.code = code
# kwargs represents the config passed to the rule. Add all kwargs as class
# attributes so they can be accessed in rules which inherit from this class
for key, value in kwargs.items():
self.__dict__[key] = value
# We also define a custom logger here, which also includes the code
# of the rule in the logging.
self.logger = RuleLoggingAdapter(rules_logger, {"code": code})
# Validate that declared configuration options exist
try:
for keyword in self.config_keywords:
if keyword not in kwargs.keys():
raise ValueError(
(
"Unrecognized config '{}' for Rule {}. If this "
"is a new option, please add it to "
"`default_config.cfg`"
).format(keyword, code)
)
except AttributeError:
self.logger.info(f"No config_keywords defined for {code}")
def _eval(self, context: RuleContext) -> EvalResultType:
"""Evaluate this rule against the current context.
This should indicate whether a linting violation has occurred and/or
whether there is something to remember from this evaluation.
Note that an evaluate function should always accept `**kwargs`, but
if it relies on any available kwargs, it should explicitly call
them out at definition.
Returns:
:obj:`LintResult`, list of :obj:`LintResult` or :obj:`None`.
The reason that this method is called :meth:`_eval` and not `eval` is
a bit of a hack with sphinx autodoc, to make it so that the rule
documentation auto-generates nicely.
"""
raise NotImplementedError(
(
"{} has not had its `eval` function defined. This is a problem "
"with the rule setup."
).format(self.__class__.__name__)
) # pragma: no cover
def crawl(
self,
segment,
ignore_mask,
dialect,
parent_stack=None,
siblings_pre=None,
siblings_post=None,
raw_stack=None,
memory=None,
fname=None,
templated_file: Optional["TemplatedFile"] = None,
):
"""Recursively perform the crawl operation on a given segment.
Returns:
A tuple of (vs, raw_stack, fixes, memory)
"""
# parent stack should be a tuple if it exists
# Rules should evaluate on segments FIRST, before evaluating on their
# children. They should also return a list of violations.
parent_stack = parent_stack or ()
raw_stack = raw_stack or ()
siblings_post = siblings_post or ()
siblings_pre = siblings_pre or ()
memory = memory or {}
vs: List[SQLLintError] = []
fixes: List[LintFix] = []
# First, check whether we're looking at an unparsable and whether
# this rule will still operate on that.
if not self._works_on_unparsable and segment.is_type("unparsable"):
# Abort here if it doesn't. Otherwise we'll get odd results.
return vs, raw_stack, [], memory
# TODO: Document what options are available to the evaluation function.
try:
res = self._eval(
context=RuleContext(
segment=segment,
parent_stack=parent_stack,
siblings_pre=siblings_pre,
siblings_post=siblings_post,
raw_stack=raw_stack,
memory=memory,
dialect=dialect,
path=pathlib.Path(fname) if fname else None,
templated_file=templated_file,
)
)
except (bdb.BdbQuit, KeyboardInterrupt): # pragma: no cover
raise
# Any exception at this point would halt the linter and
# cause the user to get no results
except Exception as e:
self.logger.critical(
f"Applying rule {self.code} threw an Exception: {e}", exc_info=True
)
exception_line, _ = segment.pos_marker.source_position()
vs.append(
SQLLintError(
rule=self,
segment=segment,
fixes=[],
description=(
f"Unexpected exception: {str(e)};\n"
"Could you open an issue at "
"https://github.com/sqlfluff/sqlfluff/issues ?\n"
"You can ignore this exception for now, by adding "
f"'-- noqa: {self.code}' at the end\n"
f"of line {exception_line}\n"
),
)
)
return vs, raw_stack, fixes, memory
new_lerrs: List[SQLLintError] = []
new_fixes: List[LintFix] = []
if res is None:
# Assume this means no problems (also means no memory)
pass
elif isinstance(res, LintResult):
# Extract any memory
memory = res.memory
self._process_lint_result(
res, templated_file, ignore_mask, new_lerrs, new_fixes
)
elif isinstance(res, list) and all(
isinstance(elem, LintResult) for elem in res
):
# Extract any memory from the *last* one, assuming
# it was the last to be added
memory = res[-1].memory
for elem in res:
self._process_lint_result(
elem, templated_file, ignore_mask, new_lerrs, new_fixes
)
else: # pragma: no cover
raise TypeError(
"Got unexpected result [{!r}] back from linting rule: {!r}".format(
res, self.code
)
)
for lerr in new_lerrs:
self.logger.debug("!! Violation Found: %r", lerr.description)
for fix in new_fixes:
self.logger.debug("!! Fix Proposed: %r", fix)
# Consume the new results
vs += new_lerrs
fixes += new_fixes
# The raw stack only keeps track of the previous raw segments
if len(segment.segments) == 0:
raw_stack += (segment,)
# Parent stack keeps track of all the parent segments
parent_stack += (segment,)
for idx, child in enumerate(segment.segments):
dvs, raw_stack, child_fixes, memory = self.crawl(
segment=child,
ignore_mask=ignore_mask,
parent_stack=parent_stack,
siblings_pre=segment.segments[:idx],
siblings_post=segment.segments[idx + 1 :],
raw_stack=raw_stack,
memory=memory,
dialect=dialect,
fname=fname,
templated_file=templated_file,
)
vs += dvs
fixes += child_fixes
return vs, raw_stack, fixes, memory
# HELPER METHODS --------
def _process_lint_result(
self, res, templated_file, ignore_mask, new_lerrs, new_fixes
):
self.discard_unsafe_fixes(res, templated_file)
lerr = res.to_linting_error(rule=self)
ignored = False
if lerr:
if ignore_mask:
filtered = LintedFile.ignore_masked_violations([lerr], ignore_mask)
if not filtered:
lerr = None
ignored = True
if lerr:
new_lerrs.append(lerr)
if not ignored:
new_fixes.extend(res.fixes)
@cached_property
def indent(self) -> str:
"""String for a single indent, based on configuration."""
self.tab_space_size: int
self.indent_unit: str
tab = "\t"
space = " "
return space * self.tab_space_size if self.indent_unit == "space" else tab
def _is_final_segment_helper(self, context: RuleContext):
if len(self.filter_meta(context.siblings_post)) > 0:
# This can only fail on the last segment
return False
elif len(context.segment.segments) > 0:
# This can only fail on the last base segment
return False
elif context.segment.is_meta:
# We can't fail on a meta segment
return False
return True
def is_final_segment(self, context: RuleContext) -> bool:
"""Is the current segment the final segment in the parse tree."""
if not self._is_final_segment_helper(context):
return False
# We know we are at a leaf of the tree but not necessarily at the end of the
# tree. Therefore we look backwards up the parent stack and ask if any of
# the parent segments have another non-meta child segment after the current
# one.
child_segment = context.segment
for parent_segment in context.parent_stack[::-1]:
possible_children = [s for s in parent_segment.segments if not s.is_meta]
if len(possible_children) > possible_children.index(child_segment) + 1:
return False
child_segment = parent_segment
return True
def closing_ancestors(
self, context: RuleContext, types: Iterable[str]
) -> List[BaseSegment]:
"""Returns ancestors of specified types closing at this segment.
Useful, for example, to find the statements ending at a segment.
"""
result: List[BaseSegment] = []
if not self._is_final_segment_helper(context):
return result
# Look backwards up the parent stack until we find a parent segment that has
# another non-meta child segment after the current one, returning a list of
# matching "type" segments we encounter along the way.
child_segment = context.segment
for parent_segment in context.parent_stack[::-1]:
possible_children = [s for s in parent_segment.segments if not s.is_meta]
if len(possible_children) > possible_children.index(child_segment) + 1:
return result
elif parent_segment.is_type(*types):
result.append(parent_segment)
child_segment = parent_segment
return result
@staticmethod
def filter_meta(segments, keep_meta=False):
"""Filter the segments to non-meta.
Or optionally the opposite if keep_meta is True.
"""
buff = []
for elem in segments:
if elem.is_meta is keep_meta:
buff.append(elem)
return tuple(buff)
@classmethod
def get_parent_of(cls, segment, root_segment): # pragma: no cover TODO?
"""Return the segment immediately containing segment.
NB: This is recursive.
Args:
segment: The segment to look for.
root_segment: Some known parent of the segment
we're looking for (although likely not the
direct parent in question).
"""
if segment in root_segment.segments:
return root_segment
elif root_segment.segments:
# try each of the subsegments
for sub in root_segment.segments:
p = cls.get_parent_of(segment, sub)
if p:
return p
# Not directly in the segment and
# no subsegments to check. Return None.
return None
@staticmethod
def matches_target_tuples(seg: BaseSegment, target_tuples: List[Tuple[str, str]]):
"""Does the given segment match any of the given type tuples."""
if seg.name in [elem[1] for elem in target_tuples if elem[0] == "name"]:
return True
elif seg.is_type(*[elem[1] for elem in target_tuples if elem[0] == "type"]):
return True
return False
@staticmethod
def discard_unsafe_fixes(
lint_result: LintResult, templated_file: Optional[TemplatedFile]
):
"""Remove (discard) LintResult fixes if they are "unsafe".
By removing its fixes, a LintResult will still be reported, but it
will be treated as _unfixable_.
"""
if not lint_result.fixes or not templated_file:
return
# Get the set of slices touched by any of the fixes.
fix_slices: Set[RawFileSlice] = set()
for fix in lint_result.fixes:
if fix.anchor:
fix_slices.update(
templated_file.raw_slices_spanning_source_slice(
fix.anchor.pos_marker.source_slice
)
)
# Compute the set of block IDs affected by the fixes. If it's more than
# one, discard the fixes. Rationale: Fixes that span block boundaries
# may corrupt the file, e.g. by moving code in or out of a template
# loop.
block_info = templated_file.raw_slice_block_info
fix_block_ids = set(block_info.block_ids[slice_] for slice_ in fix_slices)
if len(fix_block_ids) > 1:
linter_logger.info(
" * Discarding fixes that span blocks: %s",
lint_result.fixes,
)
lint_result.fixes = []
return
# If the fixes touch a literal-only loop, discard the fixes.
# Rationale: Fixes to a template loop that contains only literals are:
# - Difficult to map correctly back to source code, so there's a risk of
# accidentally "expanding" the loop body if we apply them.
# - Highly unusual (In practice, templated loops in SQL are usually for
# expanding the same code using different column names, types, etc.,
# in which case the loop body contains template variables.
for block_id in fix_block_ids:
if block_id in block_info.literal_only_loops:
linter_logger.info(
" * Discarding fixes to literal-only loop: %s",
lint_result.fixes,
)
lint_result.fixes = []
return
for fix in lint_result.fixes:
if fix.has_template_conflicts(templated_file):
linter_logger.info(
" * Discarding fixes that touch templated code: %s",
lint_result.fixes,
)
lint_result.fixes = []
return
@staticmethod
def split_comma_separated_string(raw_str: str) -> List[str]:
"""Converts comma separated string to List, stripping whitespace."""
return [s.strip() for s in raw_str.split(",") if s.strip()]
class RuleSet:
"""Class to define a ruleset.
A rule set is instantiated on module load, but the references
to each of its classes are instantiated at runtime. This means
that configuration values can be passed to those rules live
and be responsive to any changes in configuration from the
path that the file is in.
Rules should be fetched using the :meth:`get_rulelist` command which
also handles any filtering (i.e. allowlisting and denylisting).
New rules should be added to the instance of this class using the
:meth:`register` decorator. That decorator registers the class, but also
performs basic type and name-convention checks.
The code for the rule will be parsed from the name, the description
from the docstring. The eval function is assumed that it will be
overriden by the subclass, and the parent class raises an error on
this function if not overriden.
"""
def __init__(self, name, config_info):
self.name = name
self.config_info = config_info
self._register = {}
def _validate_config_options(self, config, rule=None):
"""Ensure that all config options are valid.
Config options can also be checked for a specific rule e.g L010.
"""
rule_config = config.get_section("rules")
for config_name, info_dict in self.config_info.items():
config_option = (
rule_config.get(config_name)
if not rule
else rule_config.get(rule).get(config_name)
)
valid_options = info_dict.get("validation")
if (
valid_options
and config_option not in valid_options
and config_option is not None
):
raise ValueError(
(
"Invalid option '{}' for {} configuration. Must be one of {}"
).format(
config_option,
config_name,
valid_options,
)
)
@property
def valid_rule_name_regex(self):
"""Defines the accepted pattern for rule names.
The first group captures the plugin name (optional), which
must be capitalized.
The second group captures the rule code.
Examples of valid rule names:
* Rule_PluginName_L001
* Rule_L001
"""
return regex.compile(r"Rule_?([A-Z]{1}[a-zA-Z]+)?_([A-Z][0-9]{3})")
def register(self, cls, plugin=None):
"""Decorate a class with this to add it to the ruleset.
.. code-block:: python
@myruleset.register
class Rule_L001(BaseRule):
"Description of rule."
def eval(self, **kwargs):
return LintResult()
We expect that rules are defined as classes with the name `Rule_XXXX`
where `XXXX` is of the form `LNNN`, where L is a letter (literally L for
*linting* by default) and N is a three digit number.
If this receives classes by any other name, then it will raise a
:exc:`ValueError`.
"""
rule_name_match = self.valid_rule_name_regex.match(cls.__name__)
# Validate the name
if not rule_name_match: # pragma: no cover
raise ValueError(
(
"Tried to register rule on set {!r} with unexpected "
"format: {}, format should be: Rule_PluginName_L123 (for plugins) "
"or Rule_L123 (for core rules)."
).format(self.name, cls.__name__)
)
plugin_name, code = rule_name_match.groups()
# If the docstring is multiline, then we extract just summary.
description = cls.__doc__.replace("``", "'").split("\n")[0]
if plugin_name:
code = f"{plugin_name}_{code}"
# Keep track of the *class* in the register. Don't instantiate yet.
if code in self._register: # pragma: no cover
raise ValueError(
"Rule {!r} has already been registered on RuleSet {!r}!".format(
code, self.name
)
)
self._register[code] = dict(code=code, description=description, cls=cls)
# Make sure we actually return the original class
return cls
def _expand_config_rule_glob_list(self, glob_list: List[str]) -> List[str]:
"""Expand a list of rule globs into a list of rule codes.
Returns:
:obj:`list` of :obj:`str` rule codes.
"""
expanded_glob_list = []
for r in glob_list:
expanded_glob_list.extend(
[
x
for x in fnmatch.filter(self._register, r)
if x not in expanded_glob_list
]
)
return expanded_glob_list
def get_rulelist(self, config) -> List[BaseRule]:
"""Use the config to return the appropriate rules.
We use the config both for allowlisting and denylisting, but also
for configuring the rules given the given config.
Returns:
:obj:`list` of instantiated :obj:`BaseRule`.
"""
# Validate all generic rule configs
self._validate_config_options(config)
# default the allowlist to all the rules if not set
allowlist = config.get("rule_allowlist") or list(self._register.keys())
denylist = config.get("rule_denylist") or []
allowlisted_unknown_rule_codes = [
r for r in allowlist if not fnmatch.filter(self._register, r)
]
if any(allowlisted_unknown_rule_codes):
rules_logger.warning(
"Tried to allowlist unknown rules: {!r}".format(
allowlisted_unknown_rule_codes
)
)
denylisted_unknown_rule_codes = [
r for r in denylist if not fnmatch.filter(self._register, r)
]
if any(denylisted_unknown_rule_codes): # pragma: no cover
rules_logger.warning(
"Tried to denylist unknown rules: {!r}".format(
denylisted_unknown_rule_codes
)
)
keylist = sorted(self._register.keys())
# First we expand the allowlist and denylist globs
expanded_allowlist = self._expand_config_rule_glob_list(allowlist)
expanded_denylist = self._expand_config_rule_glob_list(denylist)
# Then we filter the rules
keylist = [
r for r in keylist if r in expanded_allowlist and r not in expanded_denylist
]
# Construct the kwargs for instantiation before we actually do it.
rule_kwargs = {}
for k in keylist:
kwargs = {}
generic_rule_config = config.get_section("rules")
specific_rule_config = config.get_section(
("rules", self._register[k]["code"])
)
if generic_rule_config:
kwargs.update(generic_rule_config)
if specific_rule_config:
# Validate specific rule config before adding
self._validate_config_options(config, self._register[k]["code"])
kwargs.update(specific_rule_config)
kwargs["code"] = self._register[k]["code"]
# Allow variable substitution in making the description
kwargs["description"] = self._register[k]["description"].format(**kwargs)
rule_kwargs[k] = kwargs
# Instantiate in the final step
return [self._register[k]["cls"](**rule_kwargs[k]) for k in keylist]
def copy(self):
"""Return a copy of self with a separate register."""
new_ruleset = copy.copy(self)
new_ruleset._register = self._register.copy()
return new_ruleset
|
let eventGuid = 0
// let todayStr = new Date().toISOString().replace(/T.*$/, '') // YYYY-MM-DD of today
// 초기 event 값 설정
// id auto increasement
// post
// request year || start와 end에서 해당 년도가 포함된 데이터를 요청
//response id, title, start, end,
export const INITIAL_EVENTS = [
{
id: createEventId(),
title: 'event 4',
start: '2022-03-16T13:00:00',
end: '2022-03-20T18:00:00',
},
{
id: createEventId(),
title: '축제',
start: '2022-03-11',
end: '2022-03-18T23:59:00',
},
{
id: createEventId(),
title: 'event 2',
start: '2022-03-16',
end: '2022-03-16',
},
{
id: createEventId(),
title: '점심',
start: '2022-03-17',
end: '2022-03-18' },
]
export function createEventId() {
return String(eventGuid++)
}
|
#
# Author: Cunren Liang
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import logging
import numpy as np
import isceobj
from isceobj.Alos2Proc.runGeocode import geocode
from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo
logger = logging.getLogger('isce.alos2insar.runGeocodeSd')
def runGeocodeSd(self):
'''geocode final products
'''
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
self.updateParamemetersFromUser()
masterTrack = self._insar.loadTrack(master=True)
#slaveTrack = self._insar.loadTrack(master=False)
demFile = os.path.abspath(self._insar.demGeo)
sdDir = 'sd'
if not os.path.exists(sdDir):
os.makedirs(sdDir)
os.chdir(sdDir)
if self.geocodeListSd == None:
geocodeList = self._insar.multilookCoherenceSd + self._insar.azimuthDeformationSd + self._insar.maskedAzimuthDeformationSd
else:
geocodeList = self.geocodeListSd
if self.bbox == None:
bbox = getBboxGeo(masterTrack)
else:
bbox = self.bbox
catalog.addItem('geocode bounding box', bbox, 'runGeocodeSd')
numberRangeLooks = self._insar.numberRangeLooks1 * self._insar.numberRangeLooksSd
numberAzimuthLooks = self._insar.numberAzimuthLooks1 * self._insar.numberAzimuthLooksSd
for inputFile in geocodeList:
if self.geocodeInterpMethodSd == None:
img = isceobj.createImage()
img.load(inputFile + '.xml')
if img.dataType.upper() == 'CFLOAT':
interpMethod = 'sinc'
else:
interpMethod = 'bilinear'
else:
interpMethod = self.geocodeInterpMethodSd.lower()
geocode(masterTrack, demFile, inputFile, bbox, numberRangeLooks, numberAzimuthLooks, interpMethod, 0, 0)
os.chdir('../')
catalog.printToLog(logger, "runGeocodeSd")
self._insar.procDoc.addAllFromCatalog(catalog)
|
import unittest
from pathlib import Path
from cuticle_analysis import datasets
class TestSamples(unittest.TestCase):
@classmethod
def setUpClass(cls):
all_ids = range(0, 2877)
to_test = [
1, 2, 3, 5, 6,
7, 8, 10, 16, 18,
22, 24, 26, 501, 502,
505, 1658, 1659, 1661, 1666,
1676
]
excludes = list(set(all_ids) - set(to_test))
cls.d = datasets.RoughSmoothSub(
(16, 16), excludes=excludes, rebuild=True)
def test_rough(self):
'Test individual samples known to be rough (1) from the original dataset.'
for i in [2, 6, 16, 18, 22, 24, 26]:
self.assertEqual(self.d.get_label(i), 1, "Should be 1 - Rough")
def test_smooth(self):
'Test individual samples known to be smooth (2) from the original dataset.'
for i in [1, 3, 5, 7, 8, 10]:
self.assertEqual(self.d.get_label(i), 2, "Should be 2 - Smooth")
def test_na(self):
'Test individual samples known to be NA from the original dataset.'
for i in [501, 502, 505]:
with self.assertRaises(ValueError):
self.d.get_label(i)
def test_duplicate(self):
# there is a duplicate row at 1654 which can cause issues for
# images above 1654
# Rough
for i in [1658]:
self.assertEqual(self.d.get_label(i), 1, "Should be 1 - Rough")
# Smooth
for i in [1676]:
self.assertEqual(self.d.get_label(i), 2, "Should be 2 - Smooth")
# NA
for i in [1659, 1661, 1666]:
with self.assertRaises(ValueError):
self.d.get_label(i)
class TestDataset(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.excludes = list(range(100, 2877))
def test_datasets(self):
'Test each dataset configuration.'
# sub
datasets.RoughSmoothSub(
(16, 16), rebuild=True)
# full
datasets.RoughSmoothFull(
(16, 16), rebuild=True)
def test_subimage_files(self):
'Test that files are created when expected for subimage dataset.'
# save files
d = datasets.RoughSmoothSub(
(16, 16), excludes=self.excludes, rebuild=True, save=True)
paths = [
Path(d.img_meta_path),
Path(d.images_path),
Path(d.labels_path),
Path(d.ids_path)
]
for path in paths:
self.assertTrue(path.exists())
path.unlink()
# don't save files
d = datasets.RoughSmoothSub(
(16, 16), excludes=self.excludes, rebuild=True, save=False)
paths = [
Path(d.img_meta_path),
Path(d.images_path),
Path(d.labels_path),
Path(d.ids_path)
]
for path in paths:
self.assertFalse(path.exists())
def test_full_files(self):
'Test that files are created when expected for full dataset.'
# save files
d = datasets.RoughSmoothFull(
(64, 64), excludes=self.excludes, rebuild=True, save=True)
paths = [
Path(d.img_meta_path),
Path(d.images_path),
Path(d.labels_path),
Path(d.ids_path)
]
for path in paths:
self.assertTrue(path.exists())
path.unlink()
# don't save files
d = datasets.RoughSmoothFull(
(64, 64), excludes=self.excludes, rebuild=True, save=False)
paths = [
Path(d.img_meta_path),
Path(d.images_path),
Path(d.labels_path),
Path(d.ids_path)
]
for path in paths:
self.assertFalse(path.exists())
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python2
# Copyright 2012 The Emscripten Authors. All rights reserved.
# Emscripten is available under two separate licenses, the MIT license and the
# University of Illinois/NCSA Open Source License. Both these licenses can be
# found in the LICENSE file.
from __future__ import print_function
import os
def generate(env, emscripten_path=None, **kw):
""" SCons tool entry point """
if emscripten_path is None:
emscripten_path = os.environ.get('EMSCRIPTEN_ROOT')
if not emscripten_path:
raise 'Unable to find emscripten. Please set EMSCRIPTEN_ROOT'
# SCons does not by default invoke the compiler with the
# environment variabls from the parent calling process,
# so manually route all environment variables referenced
# by Emscripten to the child.
for var in ['EM_CACHE', 'EMCC_DEBUG', 'EMTEST_BROWSER',
'EMMAKEN_JUST_CONFIGURE', 'EMCC_CFLAGS', 'EMCC_TEMP_DIR',
'EMCC_AUTODEBUG', 'EM_COMPILER_WRAPPER',
'EMMAKEN_COMPILER', 'EMMAKEN_CFLAGS',
'MOZ_DISABLE_AUTO_SAFE_MODE', 'EMCC_STDERR_FILE',
'EMSCRIPTEN_SUPPRESS_USAGE_WARNING', 'NODE_PATH', 'EMCC_JSOPT_MIN_CHUNK_SIZE',
'EMCC_JSOPT_MAX_CHUNK_SIZE', 'EMCC_SAVE_OPT_TEMP', 'EMCC_CORES', 'EMCC_NO_OPT_SORT',
'EMCC_BUILD_DIR', 'EMCC_DEBUG_SAVE', 'EMCC_SKIP_SANITY_CHECK',
'EMMAKEN_NO_SDK', 'EM_PKG_CONFIG_PATH', 'EMCC_CLOSURE_ARGS', 'JAVA_HEAP_SIZE',
'EMCC_FORCE_STDLIBS', 'EMCC_ONLY_FORCED_STDLIBS', 'EM_PORTS', 'IDL_CHECKS', 'IDL_VERBOSE',
'EMTEST_SAVE_DIR']:
if os.environ.get(var):
env['ENV'][var] = os.environ.get(var)
try:
emscPath = emscripten_path.abspath
except:
emscPath = emscripten_path
env.Replace(CC=os.path.join(emscPath, "emcc"))
env.Replace(CXX=os.path.join(emscPath, "em++"))
# LINK uses smark_link by default which will choose
# either emcc or em++ depending on if there are any C++ sources
# in the program, so no need to change that.
# SHLINK and LDMODULE should use LINK so no
# need to change them here
env.Replace(AR=os.path.join(emscPath, "emar"))
env.Replace(RANLIB=os.path.join(emscPath, "emranlib"))
env.Replace(OBJSUFFIX=[".js", ".bc", ".o"][2])
env.Replace(LIBSUFFIX=[".js", ".bc", ".o"][2])
env.Replace(PROGSUFFIX=[".html", ".js"][1])
def exists(env):
""" NOOP method required by SCons """
return 1
|
'use strict'
module.exports = function assertFunction (value) {
if (typeof value !== 'function') {
throw new TypeError('Expected function, got: ' + value)
}
}
|
# coding: utf-8
"""
convertapi
Convert API lets you effortlessly convert file formats and types. # noqa: E501
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import cloudmersive_convert_api_client
from cloudmersive_convert_api_client.models.get_xlsx_cell_by_identifier_request import GetXlsxCellByIdentifierRequest # noqa: E501
from cloudmersive_convert_api_client.rest import ApiException
class TestGetXlsxCellByIdentifierRequest(unittest.TestCase):
"""GetXlsxCellByIdentifierRequest unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGetXlsxCellByIdentifierRequest(self):
"""Test GetXlsxCellByIdentifierRequest"""
# FIXME: construct object with mandatory attributes with example values
# model = cloudmersive_convert_api_client.models.get_xlsx_cell_by_identifier_request.GetXlsxCellByIdentifierRequest() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
// SPDX-License-Identifier: GPL-2.0+
/*
* vimc-streamer.c Virtual Media Controller Driver
*
* Copyright (C) 2018 Lucas A. M. Magalhães <[email protected]>
*
*/
#include <linux/init.h>
#include <linux/freezer.h>
#include <linux/kthread.h>
#include "vimc-streamer.h"
/**
* vimc_get_source_entity - get the entity connected with the first sink pad
*
* @ent: reference media_entity
*
* Helper function that returns the media entity containing the source pad
* linked with the first sink pad from the given media entity pad list.
*
* Return: The source pad or NULL, if it wasn't found.
*/
static struct media_entity *vimc_get_source_entity(struct media_entity *ent)
{
struct media_pad *pad;
int i;
for (i = 0; i < ent->num_pads; i++) {
if (ent->pads[i].flags & MEDIA_PAD_FL_SOURCE)
continue;
pad = media_entity_remote_pad(&ent->pads[i]);
return pad ? pad->entity : NULL;
}
return NULL;
}
/**
* vimc_streamer_pipeline_terminate - Disable stream in all ved in stream
*
* @stream: the pointer to the stream structure with the pipeline to be
* disabled.
*
* Calls s_stream to disable the stream in each entity of the pipeline
*
*/
static void vimc_streamer_pipeline_terminate(struct vimc_stream *stream)
{
struct vimc_ent_device *ved;
struct v4l2_subdev *sd;
while (stream->pipe_size) {
stream->pipe_size--;
ved = stream->ved_pipeline[stream->pipe_size];
stream->ved_pipeline[stream->pipe_size] = NULL;
if (!is_media_entity_v4l2_subdev(ved->ent))
continue;
sd = media_entity_to_v4l2_subdev(ved->ent);
v4l2_subdev_call(sd, video, s_stream, 0);
}
}
/**
* vimc_streamer_pipeline_init - Initializes the stream structure
*
* @stream: the pointer to the stream structure to be initialized
* @ved: the pointer to the vimc entity initializing the stream
*
* Initializes the stream structure. Walks through the entity graph to
* construct the pipeline used later on the streamer thread.
* Calls vimc_streamer_s_stream() to enable stream in all entities of
* the pipeline.
*
* Return: 0 if success, error code otherwise.
*/
static int vimc_streamer_pipeline_init(struct vimc_stream *stream,
struct vimc_ent_device *ved)
{
struct media_entity *entity;
struct video_device *vdev;
struct v4l2_subdev *sd;
int ret = 0;
stream->pipe_size = 0;
while (stream->pipe_size < VIMC_STREAMER_PIPELINE_MAX_SIZE) {
if (!ved) {
vimc_streamer_pipeline_terminate(stream);
return -EINVAL;
}
stream->ved_pipeline[stream->pipe_size++] = ved;
if (is_media_entity_v4l2_subdev(ved->ent)) {
sd = media_entity_to_v4l2_subdev(ved->ent);
ret = v4l2_subdev_call(sd, video, s_stream, 1);
if (ret && ret != -ENOIOCTLCMD) {
dev_err(ved->dev, "subdev_call error %s\n",
ved->ent->name);
vimc_streamer_pipeline_terminate(stream);
return ret;
}
}
entity = vimc_get_source_entity(ved->ent);
/* Check if the end of the pipeline was reached */
if (!entity) {
/* the first entity of the pipe should be source only */
if (!vimc_is_source(ved->ent)) {
dev_err(ved->dev,
"first entity in the pipe '%s' is not a source\n",
ved->ent->name);
vimc_streamer_pipeline_terminate(stream);
return -EPIPE;
}
return 0;
}
/* Get the next device in the pipeline */
if (is_media_entity_v4l2_subdev(entity)) {
sd = media_entity_to_v4l2_subdev(entity);
ved = v4l2_get_subdevdata(sd);
} else {
vdev = container_of(entity,
struct video_device,
entity);
ved = video_get_drvdata(vdev);
}
}
vimc_streamer_pipeline_terminate(stream);
return -EINVAL;
}
/**
* vimc_streamer_thread - Process frames through the pipeline
*
* @data: vimc_stream struct of the current stream
*
* From the source to the sink, gets a frame from each subdevice and send to
* the next one of the pipeline at a fixed framerate.
*
* Return:
* Always zero (created as ``int`` instead of ``void`` to comply with
* kthread API).
*/
static int vimc_streamer_thread(void *data)
{
struct vimc_stream *stream = data;
u8 *frame = NULL;
int i;
set_freezable();
for (;;) {
try_to_freeze();
if (kthread_should_stop())
break;
for (i = stream->pipe_size - 1; i >= 0; i--) {
frame = stream->ved_pipeline[i]->process_frame(
stream->ved_pipeline[i], frame);
if (!frame || IS_ERR(frame))
break;
}
//wait for 60hz
set_current_state(TASK_UNINTERRUPTIBLE);
schedule_timeout(HZ / 60);
}
return 0;
}
/**
* vimc_streamer_s_stream - Start/stop the streaming on the media pipeline
*
* @stream: the pointer to the stream structure of the current stream
* @ved: pointer to the vimc entity of the entity of the stream
* @enable: flag to determine if stream should start/stop
*
* When starting, check if there is no ``stream->kthread`` allocated. This
* should indicate that a stream is already running. Then, it initializes the
* pipeline, creates and runs a kthread to consume buffers through the pipeline.
* When stopping, analogously check if there is a stream running, stop the
* thread and terminates the pipeline.
*
* Return: 0 if success, error code otherwise.
*/
int vimc_streamer_s_stream(struct vimc_stream *stream,
struct vimc_ent_device *ved,
int enable)
{
int ret;
if (!stream || !ved)
return -EINVAL;
if (enable) {
if (stream->kthread)
return 0;
ret = vimc_streamer_pipeline_init(stream, ved);
if (ret)
return ret;
stream->kthread = kthread_run(vimc_streamer_thread, stream,
"vimc-streamer thread");
if (IS_ERR(stream->kthread)) {
ret = PTR_ERR(stream->kthread);
dev_err(ved->dev, "kthread_run failed with %d\n", ret);
vimc_streamer_pipeline_terminate(stream);
stream->kthread = NULL;
return ret;
}
} else {
if (!stream->kthread)
return 0;
ret = kthread_stop(stream->kthread);
/*
* kthread_stop returns -EINTR in cases when streamon was
* immediately followed by streamoff, and the thread didn't had
* a chance to run. Ignore errors to stop the stream in the
* pipeline.
*/
if (ret)
dev_dbg(ved->dev, "kthread_stop returned '%d'\n", ret);
stream->kthread = NULL;
vimc_streamer_pipeline_terminate(stream);
}
return 0;
}
|
'''After escaping the pirate's cave without drowning, you stumble upon a
field where it's rumored a lot of gold can be found. You even have a map that
shows where all of the biggest hauls are located!
Unfortunately, the sun is going down, so you don't have a ton of time to
search. You decide to take one quick pass through the field. You choose
only to move one of three ways:
-diagonally northeast
-diagonally southeast
-straight east
If you start at the northwest corner of the field, how should you move to
maximize the gold you collect?
Can you write a function that finds the best path through a square
field of any size?
Ex.
N
Input = [[2, 4, 1],
W [0, 3, 2], E
[1, 2, 6]
]
S
Output = '27.098 can be acquired by moving
['se', 'se']'
(based on the Gold Mine Problem at
https://www.geeksforgeeks.org/gold-mine-problem/?ref=lbp)
'''
import random
import time
from itertools import product
def naive_scavenging(field):
'''This solution generates all possible sequences of directions we may
move. Then, it sums up the values, counts how many sequences produce the
target sum, and calculates the odds that someone rolling `n` dice will
end up with a sum equal to 3 times the number of dice.
'''
# generate all possible permutations of 'ne', 'e' or 'se' movements
# that get a person across the field
all_move_combos = list(product(['se', 'e', 'ne'], repeat=len(field) - 1))
# for each combo of moves, try to traverse, and calculate the total gold collected
output = ''
max_gold = 0
for move_combo in all_move_combos:
# start at the top left corner
total_gold = field[0][0]
col = 0
row = 0
for direction in move_combo:
if direction == 'se':
# try to move south
if row > len(field) - 1:
break
else:
row += 1
elif direction == 'ne':
# try to move north
if row <= 0:
break
else:
row -= 1
# always move east after moving south or north
col += 1
total_gold += field[row][col]
# total gold will be equal to the gold we grabbed using this move combo
if total_gold > max_gold:
max_gold = total_gold
output = f'{max_gold:.3f} gold can be aquired if we move {move_combo}'
return output
def dp_scavenging(field):
'''This function utilizes dynamic programming to reduce the number of
duplicate calculations that are performed (compared to the naive
approach). After a coordinate is visited, we save both i) the max
amount of gold that can be picked up from that coordinate and ii) the
path you'd have to travel to pick up maximum gold from that point.
Subpaths on the eastern side of the field that we visited multiple times
in the naive approach are only visited once using dynamic programming.
'''
gold_cache = [[0 for _ in range(len(field))] for _ in range(len(field))]
# start at end of field, and work our way down
for col in range(len(field) - 1, -1, -1):
for row in range(len(field)):
# take all possible directions, and figure out max gold per direction
e_gold = 0
ne_gold = 0
se_gold = 0
# gold collected if we choose to go E
if (col != len(field) - 1):
e_gold = gold_cache[row][col+1]
# gold collected if we choose to go NE
if (row != 0 and col != len(field) - 1):
ne_gold = gold_cache[row-1][col+1]
# gold collected if we choose to go SE
if (row != len(field) - 1 and col != len(field) - 1):
se_gold = gold_cache[row+1][col+1]
current_gold = field[row][col]
# add the current gold + the BEST amount from E/NE/SE
# save it in cache
gold_cache[row][col] = current_gold + max(e_gold, ne_gold, se_gold)
best_gold = gold_cache[0][0]
return f'{best_gold:.3f} gold is the most we can get'
def print_field(field, label):
'''Helper function to display 2D fields
with gold at different coordinates
'''
print(label)
for row in field:
output = ''
for r in row:
output += f' {r}'
print(f'{output}\n')
print()
# TESTS -
# Below are a series of tests that can be utilized to demonstrate the
# improvements achieved through dynamic programming. Timing is included
# to give students an idea of how poorly some approaches scale.
# However, efficiency should also be formalized using Big O notation.
small_field = []
size = 5
for _ in range(size):
row = []
for _ in range(size):
row.append(round(random.random()*random.randint(1, 9), 3))
small_field.append(row)
print_field(small_field, 'Small field')
large_field = []
size = 16
for _ in range(size):
row = []
for _ in range(size):
row.append(round(random.random()*random.randint(1, 9), 3))
large_field.append(row
)
# print_field(large_field, 'Large field')
# Test 1 - Naive
print('Starting test 1, naive approach...\ncrossing small field...\n')
start = time.time()
print(f'{naive_scavenging(small_field)}')
print(f'\nResult calculated in {time.time()-start:.5f} seconds')
print('\n--------------------------------\n')
# Test 2 - Naive
# print('Starting test 2, naive approach...\ncrossing large field...\n')
# start = time.time()
# print(f'\n{naive_scavenging(large_field)}')
# print(f'\nResult calculated in {time.time()-start:.5f} seconds')
# print('\n--------------------------------\n')
# Test 3 - Dynamic Programming
print('Starting test 3, dynamic programming...\ncrossing small field...\n')
start = time.time()
print(f'\n{dp_scavenging(small_field)}')
print(f'\nResult calculated in {time.time()-start:.5f} seconds')
print('\n--------------------------------\n')
# Test 4 - Dynamic Programming
print('Starting test 4, dynamic programming...\ncrossing large field...\n')
start = time.time()
print(f'\n{dp_scavenging(large_field)}')
print(f'\nResult calculated in {time.time()-start:.5f} seconds')
print('\n--------------------------------\n')
|
//
// Copyright (c) .NET Foundation and Contributors
// Portions Copyright (c) 2006..2015 Giovanni Di Sirio. All rights reserved.
// See LICENSE file in the project root for full license information.
//
#ifndef MCUCONF_H
#define MCUCONF_H
/*
* STM32F4xx drivers configuration.
* The following settings override the default settings present in
* the various device driver implementation headers.
* Note that the settings for each driver only have effect if the whole
* driver is enabled in halconf.h.
*
* IRQ priorities:
* 15...0 Lowest...Highest.
*
* DMA priorities:
* 0...3 Lowest...Highest.
*/
#define STM32F4xx_MCUCONF
/*
* HAL driver system settings.
*/
#define STM32_NO_INIT FALSE
#define STM32_HSI_ENABLED TRUE
#define STM32_LSI_ENABLED TRUE
#define STM32_HSE_ENABLED TRUE
#define STM32_LSE_ENABLED FALSE
#define STM32_CLOCK48_REQUIRED TRUE
#define STM32_SW STM32_SW_PLL
#define STM32_PLLSRC STM32_PLLSRC_HSE
#define STM32_PLLM_VALUE 25
#define STM32_PLLN_VALUE 336
#define STM32_PLLP_VALUE 2
#define STM32_PLLQ_VALUE 7
#define STM32_HPRE STM32_HPRE_DIV1
#define STM32_PPRE1 STM32_PPRE1_DIV4
#define STM32_PPRE2 STM32_PPRE2_DIV2
#define STM32_RTCSEL STM32_RTCSEL_LSI
#define STM32_RTCPRE_VALUE 8
#define STM32_MCO1SEL STM32_MCO1SEL_HSI
#define STM32_MCO1PRE STM32_MCO1PRE_DIV1
#define STM32_MCO2SEL STM32_MCO2SEL_SYSCLK
#define STM32_MCO2PRE STM32_MCO2PRE_DIV1
#define STM32_I2SSRC STM32_I2SSRC_PLLI2S
#define STM32_PLLI2SN_VALUE 192
#define STM32_PLLI2SR_VALUE 2
#define STM32_PVD_ENABLE FALSE
#define STM32_PLS STM32_PLS_LEV0
#define STM32_BKPRAM_ENABLE FALSE
/*
* IRQ system settings.
*/
#define STM32_IRQ_EXTI0_PRIORITY 6
#define STM32_IRQ_EXTI1_PRIORITY 6
#define STM32_IRQ_EXTI2_PRIORITY 6
#define STM32_IRQ_EXTI3_PRIORITY 6
#define STM32_IRQ_EXTI4_PRIORITY 6
#define STM32_IRQ_EXTI5_9_PRIORITY 6
#define STM32_IRQ_EXTI10_15_PRIORITY 6
#define STM32_IRQ_EXTI16_PRIORITY 6
#define STM32_IRQ_EXTI17_PRIORITY 15
#define STM32_IRQ_EXTI18_PRIORITY 6
#define STM32_IRQ_EXTI19_PRIORITY 6
#define STM32_IRQ_EXTI20_PRIORITY 6
#define STM32_IRQ_EXTI21_PRIORITY 15
#define STM32_IRQ_EXTI22_PRIORITY 15
/*
* ADC driver system settings.
*/
#define STM32_ADC_ADCPRE ADC_CCR_ADCPRE_DIV4
#define STM32_ADC_USE_ADC1 FALSE
#define STM32_ADC_USE_ADC2 FALSE
#define STM32_ADC_USE_ADC3 FALSE
#define STM32_ADC_ADC1_DMA_STREAM STM32_DMA_STREAM_ID(2, 4)
#define STM32_ADC_ADC2_DMA_STREAM STM32_DMA_STREAM_ID(2, 2)
#define STM32_ADC_ADC3_DMA_STREAM STM32_DMA_STREAM_ID(2, 1)
#define STM32_ADC_ADC1_DMA_PRIORITY 2
#define STM32_ADC_ADC2_DMA_PRIORITY 2
#define STM32_ADC_ADC3_DMA_PRIORITY 2
#define STM32_ADC_IRQ_PRIORITY 6
#define STM32_ADC_ADC1_DMA_IRQ_PRIORITY 6
#define STM32_ADC_ADC2_DMA_IRQ_PRIORITY 6
#define STM32_ADC_ADC3_DMA_IRQ_PRIORITY 6
/*
* CAN driver system settings.
*/
#define STM32_CAN_USE_CAN1 FALSE
#define STM32_CAN_USE_CAN2 FALSE
#define STM32_CAN_CAN1_IRQ_PRIORITY 11
#define STM32_CAN_CAN2_IRQ_PRIORITY 11
/*
* DAC driver system settings.
*/
#define STM32_DAC_DUAL_MODE FALSE
#define STM32_DAC_USE_DAC1_CH1 FALSE
#define STM32_DAC_USE_DAC1_CH2 FALSE
#define STM32_DAC_DAC1_CH1_IRQ_PRIORITY 10
#define STM32_DAC_DAC1_CH2_IRQ_PRIORITY 10
#define STM32_DAC_DAC1_CH1_DMA_PRIORITY 2
#define STM32_DAC_DAC1_CH2_DMA_PRIORITY 2
#define STM32_DAC_DAC1_CH1_DMA_STREAM STM32_DMA_STREAM_ID(1, 5)
#define STM32_DAC_DAC1_CH2_DMA_STREAM STM32_DMA_STREAM_ID(1, 6)
/*
* GPT driver system settings.
*/
#define STM32_GPT_USE_TIM1 FALSE
#define STM32_GPT_USE_TIM2 FALSE
#define STM32_GPT_USE_TIM3 FALSE
#define STM32_GPT_USE_TIM4 FALSE
#define STM32_GPT_USE_TIM5 FALSE
#define STM32_GPT_USE_TIM6 FALSE
#define STM32_GPT_USE_TIM7 FALSE
#define STM32_GPT_USE_TIM8 FALSE
#define STM32_GPT_USE_TIM9 FALSE
#define STM32_GPT_USE_TIM11 FALSE
#define STM32_GPT_USE_TIM12 FALSE
#define STM32_GPT_USE_TIM14 FALSE
#define STM32_GPT_TIM1_IRQ_PRIORITY 7
#define STM32_GPT_TIM2_IRQ_PRIORITY 7
#define STM32_GPT_TIM3_IRQ_PRIORITY 7
#define STM32_GPT_TIM4_IRQ_PRIORITY 7
#define STM32_GPT_TIM5_IRQ_PRIORITY 7
#define STM32_GPT_TIM6_IRQ_PRIORITY 7
#define STM32_GPT_TIM7_IRQ_PRIORITY 7
#define STM32_GPT_TIM8_IRQ_PRIORITY 7
#define STM32_GPT_TIM9_IRQ_PRIORITY 7
#define STM32_GPT_TIM11_IRQ_PRIORITY 7
#define STM32_GPT_TIM12_IRQ_PRIORITY 7
#define STM32_GPT_TIM14_IRQ_PRIORITY 7
/*
* I2C driver system settings.
*/
#define STM32_I2C_USE_I2C1 FALSE
#define STM32_I2C_USE_I2C2 FALSE
#define STM32_I2C_USE_I2C3 FALSE
#define STM32_I2C_BUSY_TIMEOUT 50
#define STM32_I2C_I2C1_RX_DMA_STREAM STM32_DMA_STREAM_ID(1, 0)
#define STM32_I2C_I2C1_TX_DMA_STREAM STM32_DMA_STREAM_ID(1, 7)
#define STM32_I2C_I2C2_RX_DMA_STREAM STM32_DMA_STREAM_ID(1, 2)
#define STM32_I2C_I2C2_TX_DMA_STREAM STM32_DMA_STREAM_ID(1, 7)
#define STM32_I2C_I2C3_RX_DMA_STREAM STM32_DMA_STREAM_ID(1, 2)
#define STM32_I2C_I2C3_TX_DMA_STREAM STM32_DMA_STREAM_ID(1, 4)
#define STM32_I2C_I2C1_IRQ_PRIORITY 5
#define STM32_I2C_I2C2_IRQ_PRIORITY 5
#define STM32_I2C_I2C3_IRQ_PRIORITY 5
#define STM32_I2C_I2C1_DMA_PRIORITY 3
#define STM32_I2C_I2C2_DMA_PRIORITY 3
#define STM32_I2C_I2C3_DMA_PRIORITY 3
#define STM32_I2C_DMA_ERROR_HOOK(i2cp) osalSysHalt("DMA failure")
/*
* I2S driver system settings.
*/
#define STM32_I2S_USE_SPI2 FALSE
#define STM32_I2S_USE_SPI3 FALSE
#define STM32_I2S_SPI2_IRQ_PRIORITY 10
#define STM32_I2S_SPI3_IRQ_PRIORITY 10
#define STM32_I2S_SPI2_DMA_PRIORITY 1
#define STM32_I2S_SPI3_DMA_PRIORITY 1
#define STM32_I2S_SPI2_RX_DMA_STREAM STM32_DMA_STREAM_ID(1, 3)
#define STM32_I2S_SPI2_TX_DMA_STREAM STM32_DMA_STREAM_ID(1, 4)
#define STM32_I2S_SPI3_RX_DMA_STREAM STM32_DMA_STREAM_ID(1, 0)
#define STM32_I2S_SPI3_TX_DMA_STREAM STM32_DMA_STREAM_ID(1, 7)
#define STM32_I2S_DMA_ERROR_HOOK(i2sp) osalSysHalt("DMA failure")
/*
* ICU driver system settings.
*/
#define STM32_ICU_USE_TIM1 FALSE
#define STM32_ICU_USE_TIM2 FALSE
#define STM32_ICU_USE_TIM3 FALSE
#define STM32_ICU_USE_TIM4 FALSE
#define STM32_ICU_USE_TIM5 FALSE
#define STM32_ICU_USE_TIM8 FALSE
#define STM32_ICU_USE_TIM9 FALSE
#define STM32_ICU_TIM1_IRQ_PRIORITY 7
#define STM32_ICU_TIM2_IRQ_PRIORITY 7
#define STM32_ICU_TIM3_IRQ_PRIORITY 7
#define STM32_ICU_TIM4_IRQ_PRIORITY 7
#define STM32_ICU_TIM5_IRQ_PRIORITY 7
#define STM32_ICU_TIM8_IRQ_PRIORITY 7
#define STM32_ICU_TIM9_IRQ_PRIORITY 7
/*
* MAC driver system settings.
*/
#define STM32_MAC_TRANSMIT_BUFFERS 2
#define STM32_MAC_RECEIVE_BUFFERS 4
#define STM32_MAC_BUFFERS_SIZE 1522
#define STM32_MAC_PHY_TIMEOUT 100
#define STM32_MAC_ETH1_CHANGE_PHY_STATE TRUE
#define STM32_MAC_ETH1_IRQ_PRIORITY 13
#define STM32_MAC_IP_CHECKSUM_OFFLOAD 0
/*
* PWM driver system settings.
*/
#define STM32_PWM_USE_ADVANCED FALSE
#define STM32_PWM_USE_TIM1 FALSE
#define STM32_PWM_USE_TIM2 FALSE
#define STM32_PWM_USE_TIM3 FALSE
#define STM32_PWM_USE_TIM4 FALSE
#define STM32_PWM_USE_TIM5 FALSE
#define STM32_PWM_USE_TIM8 FALSE
#define STM32_PWM_USE_TIM9 FALSE
#define STM32_PWM_TIM1_IRQ_PRIORITY 7
#define STM32_PWM_TIM2_IRQ_PRIORITY 7
#define STM32_PWM_TIM3_IRQ_PRIORITY 7
#define STM32_PWM_TIM4_IRQ_PRIORITY 7
#define STM32_PWM_TIM5_IRQ_PRIORITY 7
#define STM32_PWM_TIM8_IRQ_PRIORITY 7
#define STM32_PWM_TIM9_IRQ_PRIORITY 7
/*
* SDC driver system settings.
*/
#define STM32_SDC_SDIO_DMA_PRIORITY 3
#define STM32_SDC_SDIO_IRQ_PRIORITY 9
#define STM32_SDC_WRITE_TIMEOUT_MS 250
#define STM32_SDC_READ_TIMEOUT_MS 25
#define STM32_SDC_CLOCK_ACTIVATION_DELAY 10
#define STM32_SDC_SDIO_UNALIGNED_SUPPORT TRUE
#define STM32_SDC_SDIO_DMA_STREAM STM32_DMA_STREAM_ID(2, 3)
/*
* SERIAL driver system settings.
*/
#define STM32_SERIAL_USE_USART1 FALSE
#define STM32_SERIAL_USE_USART2 TRUE
#define STM32_SERIAL_USE_USART3 FALSE
#define STM32_SERIAL_USE_UART4 FALSE
#define STM32_SERIAL_USE_UART5 FALSE
#define STM32_SERIAL_USE_USART6 FALSE
#define STM32_SERIAL_USART1_PRIORITY 12
#define STM32_SERIAL_USART2_PRIORITY 12
#define STM32_SERIAL_USART3_PRIORITY 12
#define STM32_SERIAL_UART4_PRIORITY 12
#define STM32_SERIAL_UART5_PRIORITY 12
#define STM32_SERIAL_USART6_PRIORITY 12
/*
* SPI driver system settings.
*/
#define STM32_SPI_USE_SPI1 FALSE
#define STM32_SPI_USE_SPI2 FALSE
#define STM32_SPI_USE_SPI3 FALSE
#define STM32_SPI_SPI1_RX_DMA_STREAM STM32_DMA_STREAM_ID(2, 0)
#define STM32_SPI_SPI1_TX_DMA_STREAM STM32_DMA_STREAM_ID(2, 3)
#define STM32_SPI_SPI2_RX_DMA_STREAM STM32_DMA_STREAM_ID(1, 3)
#define STM32_SPI_SPI2_TX_DMA_STREAM STM32_DMA_STREAM_ID(1, 4)
#define STM32_SPI_SPI3_RX_DMA_STREAM STM32_DMA_STREAM_ID(1, 2)
#define STM32_SPI_SPI3_TX_DMA_STREAM STM32_DMA_STREAM_ID(1, 5)
#define STM32_SPI_SPI1_DMA_PRIORITY 1
#define STM32_SPI_SPI2_DMA_PRIORITY 1
#define STM32_SPI_SPI3_DMA_PRIORITY 1
#define STM32_SPI_SPI1_IRQ_PRIORITY 10
#define STM32_SPI_SPI2_IRQ_PRIORITY 10
#define STM32_SPI_SPI3_IRQ_PRIORITY 10
#define STM32_SPI_DMA_ERROR_HOOK(spip) osalSysHalt("DMA failure")
/*
* ST driver system settings.
*/
#define STM32_ST_IRQ_PRIORITY 8
#define STM32_ST_USE_TIMER 2
/*
* UART driver system settings.
*/
#define STM32_UART_USE_USART1 FALSE
#define STM32_UART_USE_USART2 FALSE
#define STM32_UART_USE_USART3 FALSE
#define STM32_UART_USE_UART4 FALSE
#define STM32_UART_USE_UART5 FALSE
#define STM32_UART_USE_USART6 FALSE
#define STM32_UART_USART1_RX_DMA_STREAM STM32_DMA_STREAM_ID(2, 2)
#define STM32_UART_USART1_TX_DMA_STREAM STM32_DMA_STREAM_ID(2, 7)
#define STM32_UART_USART2_RX_DMA_STREAM STM32_DMA_STREAM_ID(1, 5)
#define STM32_UART_USART2_TX_DMA_STREAM STM32_DMA_STREAM_ID(1, 6)
#define STM32_UART_USART3_RX_DMA_STREAM STM32_DMA_STREAM_ID(1, 1)
#define STM32_UART_USART3_TX_DMA_STREAM STM32_DMA_STREAM_ID(1, 3)
#define STM32_UART_UART4_RX_DMA_STREAM STM32_DMA_STREAM_ID(1, 2)
#define STM32_UART_UART4_TX_DMA_STREAM STM32_DMA_STREAM_ID(1, 4)
#define STM32_UART_UART5_RX_DMA_STREAM STM32_DMA_STREAM_ID(1, 0)
#define STM32_UART_UART5_TX_DMA_STREAM STM32_DMA_STREAM_ID(1, 7)
#define STM32_UART_USART6_RX_DMA_STREAM STM32_DMA_STREAM_ID(2, 1)
#define STM32_UART_USART6_TX_DMA_STREAM STM32_DMA_STREAM_ID(2, 6)
#define STM32_UART_USART1_IRQ_PRIORITY 12
#define STM32_UART_USART2_IRQ_PRIORITY 12
#define STM32_UART_USART3_IRQ_PRIORITY 12
#define STM32_UART_UART4_IRQ_PRIORITY 12
#define STM32_UART_UART5_IRQ_PRIORITY 12
#define STM32_UART_USART6_IRQ_PRIORITY 12
#define STM32_UART_USART1_DMA_PRIORITY 0
#define STM32_UART_USART2_DMA_PRIORITY 0
#define STM32_UART_USART3_DMA_PRIORITY 0
#define STM32_UART_UART4_DMA_PRIORITY 0
#define STM32_UART_UART5_DMA_PRIORITY 0
#define STM32_UART_USART6_DMA_PRIORITY 0
#define STM32_UART_DMA_ERROR_HOOK(uartp) osalSysHalt("DMA failure")
/*
* USB driver system settings.
*/
#define STM32_USB_USE_OTG1 TRUE
#define STM32_USB_USE_OTG2 FALSE
#define STM32_USB_OTG1_IRQ_PRIORITY 14
#define STM32_USB_OTG2_IRQ_PRIORITY 14
#define STM32_USB_OTG1_RX_FIFO_SIZE 512
#define STM32_USB_OTG2_RX_FIFO_SIZE 1024
#define STM32_USB_OTG_THREAD_PRIO LOWPRIO
#define STM32_USB_OTG_THREAD_STACK_SIZE 128
#define STM32_USB_OTGFIFO_FILL_BASEPRI 0
/*
* WDG driver system settings.
*/
#define STM32_WDG_USE_IWDG FALSE
// header for nanoFramework overlay drivers
#include "mcuconf_nf.h"
#endif /* MCUCONF_H */
|
import dash
import dash_core_components as dcc
import dash_html_components as html
print(dcc.__version__) # 0.6.0 or above is required
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
# Since we're adding callbacks to elements that don't exist in the app.layout,
# Dash will raise an exception to warn us that we might be
# doing something wrong.
# In this case, we're adding the elements through a callback, so we can ignore
# the exception.
app.config.suppress_callback_exceptions = True
app.layout = html.Div([
dcc.Location(id='url', refresh=False),
html.Div(id='page-content')
])
index_page = html.Div([
dcc.Link('Go to Page 1', href='/page-1'),
html.Br(),
dcc.Link('Go to Page 2', href='/page-2'),
])
page_1_layout = html.Div([
html.H1('Page 1'),
dcc.Dropdown(
id='page-1-dropdown',
options=[{'label': i, 'value': i} for i in ['LA', 'NYC', 'MTL']],
value='LA'
),
html.Div(id='page-1-content'),
html.Br(),
dcc.Link('Go to Page 2', href='/page-2'),
html.Br(),
dcc.Link('Go back to home', href='/'),
])
@app.callback(dash.dependencies.Output('page-1-content', 'children'),
[dash.dependencies.Input('page-1-dropdown', 'value')])
def page_1_dropdown(value):
return 'You have selected "{}"'.format(value)
page_2_layout = html.Div([
html.H1('Page 2'),
dcc.RadioItems(
id='page-2-radios',
options=[{'label': i, 'value': i} for i in ['Orange', 'Blue', 'Red']],
value='Orange'
),
html.Div(id='page-2-content'),
html.Br(),
dcc.Link('Go to Page 1', href='/page-1'),
html.Br(),
dcc.Link('Go back to home', href='/')
])
@app.callback(dash.dependencies.Output('page-2-content', 'children'),
[dash.dependencies.Input('page-2-radios', 'value')])
def page_2_radios(value):
return 'You have selected "{}"'.format(value)
# Update the index
@app.callback(dash.dependencies.Output('page-content', 'children'),
[dash.dependencies.Input('url', 'pathname')])
def display_page(pathname):
if pathname == '/page-1':
return page_1_layout
elif pathname == '/page-2':
return page_2_layout
else:
return index_page
# You could also return a 404 "URL not found" page here
if __name__ == '__main__':
app.run_server(debug=True)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# PyEUPI documentation build configuration file, created by
# sphinx-quickstart on Mon Apr 18 17:08:15 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
def skip(app, what, name, obj, skip, options):
if name == "__init__":
return False
return skip
def setup(app):
app.connect("autodoc-skip-member", skip)
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
# 'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'PyEUPI'
copyright = u'2016, Raphaël Vinot'
author = u'Raphaël Vinot'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#html_title = 'PyEUPI v1.0'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'PyEUPIdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'PyEUPI.tex', 'PyEUPI Documentation',
u'Raphaël Vinot', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pyeupi', 'PyEUPI Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'PyEUPI', 'PyEUPI Documentation',
author, 'PyEUPI', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.